| { |
| "best_metric": null, |
| "best_model_checkpoint": null, |
| "epoch": 4.992, |
| "eval_steps": 500, |
| "global_step": 390, |
| "is_hyper_param_search": false, |
| "is_local_process_zero": true, |
| "is_world_process_zero": true, |
| "log_history": [ |
| { |
| "epoch": 0.0128, |
| "grad_norm": 6.455806688672532, |
| "learning_rate": 1.0256410256410257e-06, |
| "loss": 1.0316, |
| "step": 1 |
| }, |
| { |
| "epoch": 0.0256, |
| "grad_norm": 6.3584318209329265, |
| "learning_rate": 2.0512820512820513e-06, |
| "loss": 1.0063, |
| "step": 2 |
| }, |
| { |
| "epoch": 0.0384, |
| "grad_norm": 6.27601957590265, |
| "learning_rate": 3.0769230769230774e-06, |
| "loss": 1.019, |
| "step": 3 |
| }, |
| { |
| "epoch": 0.0512, |
| "grad_norm": 5.633561959867806, |
| "learning_rate": 4.102564102564103e-06, |
| "loss": 0.9994, |
| "step": 4 |
| }, |
| { |
| "epoch": 0.064, |
| "grad_norm": 4.168257171686445, |
| "learning_rate": 5.128205128205128e-06, |
| "loss": 0.9575, |
| "step": 5 |
| }, |
| { |
| "epoch": 0.0768, |
| "grad_norm": 2.7379628063731998, |
| "learning_rate": 6.153846153846155e-06, |
| "loss": 0.9572, |
| "step": 6 |
| }, |
| { |
| "epoch": 0.0896, |
| "grad_norm": 2.5053177560943887, |
| "learning_rate": 7.17948717948718e-06, |
| "loss": 0.9424, |
| "step": 7 |
| }, |
| { |
| "epoch": 0.1024, |
| "grad_norm": 4.253893464411118, |
| "learning_rate": 8.205128205128205e-06, |
| "loss": 0.9186, |
| "step": 8 |
| }, |
| { |
| "epoch": 0.1152, |
| "grad_norm": 4.349687589283012, |
| "learning_rate": 9.230769230769232e-06, |
| "loss": 0.9434, |
| "step": 9 |
| }, |
| { |
| "epoch": 0.128, |
| "grad_norm": 4.086114442629868, |
| "learning_rate": 1.0256410256410256e-05, |
| "loss": 0.9009, |
| "step": 10 |
| }, |
| { |
| "epoch": 0.1408, |
| "grad_norm": 3.5542371555285537, |
| "learning_rate": 1.1282051282051283e-05, |
| "loss": 0.8844, |
| "step": 11 |
| }, |
| { |
| "epoch": 0.1536, |
| "grad_norm": 2.3660399950585864, |
| "learning_rate": 1.230769230769231e-05, |
| "loss": 0.8672, |
| "step": 12 |
| }, |
| { |
| "epoch": 0.1664, |
| "grad_norm": 1.463822846330684, |
| "learning_rate": 1.3333333333333333e-05, |
| "loss": 0.8248, |
| "step": 13 |
| }, |
| { |
| "epoch": 0.1792, |
| "grad_norm": 1.6085823891800346, |
| "learning_rate": 1.435897435897436e-05, |
| "loss": 0.7911, |
| "step": 14 |
| }, |
| { |
| "epoch": 0.192, |
| "grad_norm": 1.251506150307515, |
| "learning_rate": 1.5384615384615387e-05, |
| "loss": 0.8038, |
| "step": 15 |
| }, |
| { |
| "epoch": 0.2048, |
| "grad_norm": 1.2138517226169034, |
| "learning_rate": 1.641025641025641e-05, |
| "loss": 0.7948, |
| "step": 16 |
| }, |
| { |
| "epoch": 0.2176, |
| "grad_norm": 1.0115516940069706, |
| "learning_rate": 1.7435897435897438e-05, |
| "loss": 0.7719, |
| "step": 17 |
| }, |
| { |
| "epoch": 0.2304, |
| "grad_norm": 0.9967586365102533, |
| "learning_rate": 1.8461538461538465e-05, |
| "loss": 0.7766, |
| "step": 18 |
| }, |
| { |
| "epoch": 0.2432, |
| "grad_norm": 0.9748420422414402, |
| "learning_rate": 1.9487179487179488e-05, |
| "loss": 0.757, |
| "step": 19 |
| }, |
| { |
| "epoch": 0.256, |
| "grad_norm": 0.9745033899149671, |
| "learning_rate": 2.0512820512820512e-05, |
| "loss": 0.7388, |
| "step": 20 |
| }, |
| { |
| "epoch": 0.2688, |
| "grad_norm": 0.7953897403498978, |
| "learning_rate": 2.153846153846154e-05, |
| "loss": 0.7582, |
| "step": 21 |
| }, |
| { |
| "epoch": 0.2816, |
| "grad_norm": 0.67039020165351, |
| "learning_rate": 2.2564102564102566e-05, |
| "loss": 0.721, |
| "step": 22 |
| }, |
| { |
| "epoch": 0.2944, |
| "grad_norm": 0.7476588826201536, |
| "learning_rate": 2.3589743589743593e-05, |
| "loss": 0.7254, |
| "step": 23 |
| }, |
| { |
| "epoch": 0.3072, |
| "grad_norm": 0.851933767925763, |
| "learning_rate": 2.461538461538462e-05, |
| "loss": 0.72, |
| "step": 24 |
| }, |
| { |
| "epoch": 0.32, |
| "grad_norm": 1.050103153251014, |
| "learning_rate": 2.5641025641025646e-05, |
| "loss": 0.7221, |
| "step": 25 |
| }, |
| { |
| "epoch": 0.3328, |
| "grad_norm": 0.9569659066942513, |
| "learning_rate": 2.6666666666666667e-05, |
| "loss": 0.7271, |
| "step": 26 |
| }, |
| { |
| "epoch": 0.3456, |
| "grad_norm": 1.0180903946753956, |
| "learning_rate": 2.7692307692307694e-05, |
| "loss": 0.6983, |
| "step": 27 |
| }, |
| { |
| "epoch": 0.3584, |
| "grad_norm": 0.9834048884701009, |
| "learning_rate": 2.871794871794872e-05, |
| "loss": 0.7135, |
| "step": 28 |
| }, |
| { |
| "epoch": 0.3712, |
| "grad_norm": 1.0073676867959942, |
| "learning_rate": 2.9743589743589747e-05, |
| "loss": 0.7113, |
| "step": 29 |
| }, |
| { |
| "epoch": 0.384, |
| "grad_norm": 0.7994115171324645, |
| "learning_rate": 3.0769230769230774e-05, |
| "loss": 0.7019, |
| "step": 30 |
| }, |
| { |
| "epoch": 0.3968, |
| "grad_norm": 0.8419690903250425, |
| "learning_rate": 3.1794871794871795e-05, |
| "loss": 0.6884, |
| "step": 31 |
| }, |
| { |
| "epoch": 0.4096, |
| "grad_norm": 0.9649544417885602, |
| "learning_rate": 3.282051282051282e-05, |
| "loss": 0.6883, |
| "step": 32 |
| }, |
| { |
| "epoch": 0.4224, |
| "grad_norm": 0.9482176195571902, |
| "learning_rate": 3.384615384615385e-05, |
| "loss": 0.6922, |
| "step": 33 |
| }, |
| { |
| "epoch": 0.4352, |
| "grad_norm": 1.2054232359185417, |
| "learning_rate": 3.4871794871794875e-05, |
| "loss": 0.6842, |
| "step": 34 |
| }, |
| { |
| "epoch": 0.448, |
| "grad_norm": 0.8304160439310835, |
| "learning_rate": 3.58974358974359e-05, |
| "loss": 0.6849, |
| "step": 35 |
| }, |
| { |
| "epoch": 0.4608, |
| "grad_norm": 0.7788944301557317, |
| "learning_rate": 3.692307692307693e-05, |
| "loss": 0.6808, |
| "step": 36 |
| }, |
| { |
| "epoch": 0.4736, |
| "grad_norm": 0.9211808519940944, |
| "learning_rate": 3.794871794871795e-05, |
| "loss": 0.6816, |
| "step": 37 |
| }, |
| { |
| "epoch": 0.4864, |
| "grad_norm": 1.141755499672058, |
| "learning_rate": 3.8974358974358976e-05, |
| "loss": 0.6771, |
| "step": 38 |
| }, |
| { |
| "epoch": 0.4992, |
| "grad_norm": 1.2582244397210618, |
| "learning_rate": 4e-05, |
| "loss": 0.6694, |
| "step": 39 |
| }, |
| { |
| "epoch": 0.512, |
| "grad_norm": 1.213751424428597, |
| "learning_rate": 3.9999198907597046e-05, |
| "loss": 0.6798, |
| "step": 40 |
| }, |
| { |
| "epoch": 0.5248, |
| "grad_norm": 1.072160256932152, |
| "learning_rate": 3.9996795694563096e-05, |
| "loss": 0.6817, |
| "step": 41 |
| }, |
| { |
| "epoch": 0.5376, |
| "grad_norm": 1.0276409716576014, |
| "learning_rate": 3.999279055341771e-05, |
| "loss": 0.6856, |
| "step": 42 |
| }, |
| { |
| "epoch": 0.5504, |
| "grad_norm": 0.9611453357798541, |
| "learning_rate": 3.998718380500971e-05, |
| "loss": 0.6655, |
| "step": 43 |
| }, |
| { |
| "epoch": 0.5632, |
| "grad_norm": 1.4420963670568259, |
| "learning_rate": 3.997997589849145e-05, |
| "loss": 0.6697, |
| "step": 44 |
| }, |
| { |
| "epoch": 0.576, |
| "grad_norm": 0.7531021939810414, |
| "learning_rate": 3.9971167411282835e-05, |
| "loss": 0.668, |
| "step": 45 |
| }, |
| { |
| "epoch": 0.5888, |
| "grad_norm": 1.3611499420758055, |
| "learning_rate": 3.99607590490251e-05, |
| "loss": 0.6726, |
| "step": 46 |
| }, |
| { |
| "epoch": 0.6016, |
| "grad_norm": 0.7676770155990247, |
| "learning_rate": 3.9948751645524235e-05, |
| "loss": 0.6728, |
| "step": 47 |
| }, |
| { |
| "epoch": 0.6144, |
| "grad_norm": 1.0672519923351085, |
| "learning_rate": 3.9935146162684206e-05, |
| "loss": 0.6539, |
| "step": 48 |
| }, |
| { |
| "epoch": 0.6272, |
| "grad_norm": 0.9360474930296524, |
| "learning_rate": 3.9919943690429906e-05, |
| "loss": 0.6615, |
| "step": 49 |
| }, |
| { |
| "epoch": 0.64, |
| "grad_norm": 0.9115905560265815, |
| "learning_rate": 3.9903145446619837e-05, |
| "loss": 0.6737, |
| "step": 50 |
| }, |
| { |
| "epoch": 0.6528, |
| "grad_norm": 0.9376645251323517, |
| "learning_rate": 3.9884752776948564e-05, |
| "loss": 0.6509, |
| "step": 51 |
| }, |
| { |
| "epoch": 0.6656, |
| "grad_norm": 0.9909901658839197, |
| "learning_rate": 3.9864767154838864e-05, |
| "loss": 0.6613, |
| "step": 52 |
| }, |
| { |
| "epoch": 0.6784, |
| "grad_norm": 0.9219417875130282, |
| "learning_rate": 3.9843190181323744e-05, |
| "loss": 0.6518, |
| "step": 53 |
| }, |
| { |
| "epoch": 0.6912, |
| "grad_norm": 0.8877846456269147, |
| "learning_rate": 3.982002358491817e-05, |
| "loss": 0.6687, |
| "step": 54 |
| }, |
| { |
| "epoch": 0.704, |
| "grad_norm": 0.8164231309596293, |
| "learning_rate": 3.979526922148058e-05, |
| "loss": 0.6441, |
| "step": 55 |
| }, |
| { |
| "epoch": 0.7168, |
| "grad_norm": 0.7957660461983604, |
| "learning_rate": 3.9768929074064206e-05, |
| "loss": 0.6528, |
| "step": 56 |
| }, |
| { |
| "epoch": 0.7296, |
| "grad_norm": 1.1236119183964566, |
| "learning_rate": 3.9741005252758255e-05, |
| "loss": 0.6545, |
| "step": 57 |
| }, |
| { |
| "epoch": 0.7424, |
| "grad_norm": 0.9479944170480047, |
| "learning_rate": 3.971149999451886e-05, |
| "loss": 0.6562, |
| "step": 58 |
| }, |
| { |
| "epoch": 0.7552, |
| "grad_norm": 0.626548034871867, |
| "learning_rate": 3.9680415662989806e-05, |
| "loss": 0.6558, |
| "step": 59 |
| }, |
| { |
| "epoch": 0.768, |
| "grad_norm": 1.0298462713659975, |
| "learning_rate": 3.9647754748313294e-05, |
| "loss": 0.6539, |
| "step": 60 |
| }, |
| { |
| "epoch": 0.7808, |
| "grad_norm": 0.8077874189369452, |
| "learning_rate": 3.96135198669304e-05, |
| "loss": 0.6793, |
| "step": 61 |
| }, |
| { |
| "epoch": 0.7936, |
| "grad_norm": 0.740398264505655, |
| "learning_rate": 3.957771376137144e-05, |
| "loss": 0.6522, |
| "step": 62 |
| }, |
| { |
| "epoch": 0.8064, |
| "grad_norm": 0.6750195443469825, |
| "learning_rate": 3.954033930003634e-05, |
| "loss": 0.665, |
| "step": 63 |
| }, |
| { |
| "epoch": 0.8192, |
| "grad_norm": 0.7647643575597695, |
| "learning_rate": 3.9501399476964806e-05, |
| "loss": 0.6413, |
| "step": 64 |
| }, |
| { |
| "epoch": 0.832, |
| "grad_norm": 1.0572104788097436, |
| "learning_rate": 3.946089741159648e-05, |
| "loss": 0.6669, |
| "step": 65 |
| }, |
| { |
| "epoch": 0.8448, |
| "grad_norm": 1.1325165717999284, |
| "learning_rate": 3.9418836348521045e-05, |
| "loss": 0.6227, |
| "step": 66 |
| }, |
| { |
| "epoch": 0.8576, |
| "grad_norm": 0.7775720377493975, |
| "learning_rate": 3.937521965721831e-05, |
| "loss": 0.6501, |
| "step": 67 |
| }, |
| { |
| "epoch": 0.8704, |
| "grad_norm": 0.44099236187160734, |
| "learning_rate": 3.933005083178828e-05, |
| "loss": 0.6446, |
| "step": 68 |
| }, |
| { |
| "epoch": 0.8832, |
| "grad_norm": 0.6272417464679025, |
| "learning_rate": 3.928333349067125e-05, |
| "loss": 0.6545, |
| "step": 69 |
| }, |
| { |
| "epoch": 0.896, |
| "grad_norm": 0.8503735452053673, |
| "learning_rate": 3.923507137635792e-05, |
| "loss": 0.6513, |
| "step": 70 |
| }, |
| { |
| "epoch": 0.9088, |
| "grad_norm": 1.333730016691775, |
| "learning_rate": 3.9185268355089606e-05, |
| "loss": 0.6548, |
| "step": 71 |
| }, |
| { |
| "epoch": 0.9216, |
| "grad_norm": 0.6253427770084057, |
| "learning_rate": 3.913392841654851e-05, |
| "loss": 0.6416, |
| "step": 72 |
| }, |
| { |
| "epoch": 0.9344, |
| "grad_norm": 0.7920186893055566, |
| "learning_rate": 3.9081055673538093e-05, |
| "loss": 0.6457, |
| "step": 73 |
| }, |
| { |
| "epoch": 0.9472, |
| "grad_norm": 1.6339368533950605, |
| "learning_rate": 3.902665436165364e-05, |
| "loss": 0.6484, |
| "step": 74 |
| }, |
| { |
| "epoch": 0.96, |
| "grad_norm": 0.5544575336714153, |
| "learning_rate": 3.897072883894291e-05, |
| "loss": 0.6438, |
| "step": 75 |
| }, |
| { |
| "epoch": 0.9728, |
| "grad_norm": 1.974943623020223, |
| "learning_rate": 3.8913283585557054e-05, |
| "loss": 0.6444, |
| "step": 76 |
| }, |
| { |
| "epoch": 0.9856, |
| "grad_norm": 0.9821828355476883, |
| "learning_rate": 3.885432320339167e-05, |
| "loss": 0.6424, |
| "step": 77 |
| }, |
| { |
| "epoch": 0.9984, |
| "grad_norm": 2.663484130789532, |
| "learning_rate": 3.879385241571817e-05, |
| "loss": 0.6477, |
| "step": 78 |
| }, |
| { |
| "epoch": 1.0112, |
| "grad_norm": 4.941382266435169, |
| "learning_rate": 3.873187606680543e-05, |
| "loss": 1.183, |
| "step": 79 |
| }, |
| { |
| "epoch": 1.024, |
| "grad_norm": 0.7995378733417778, |
| "learning_rate": 3.866839912153168e-05, |
| "loss": 0.59, |
| "step": 80 |
| }, |
| { |
| "epoch": 1.0368, |
| "grad_norm": 1.7720817417042583, |
| "learning_rate": 3.860342666498677e-05, |
| "loss": 0.6096, |
| "step": 81 |
| }, |
| { |
| "epoch": 1.0496, |
| "grad_norm": 1.26620382611777, |
| "learning_rate": 3.853696390206484e-05, |
| "loss": 0.5998, |
| "step": 82 |
| }, |
| { |
| "epoch": 1.0624, |
| "grad_norm": 1.4754633366791623, |
| "learning_rate": 3.846901615704734e-05, |
| "loss": 0.5792, |
| "step": 83 |
| }, |
| { |
| "epoch": 1.0752, |
| "grad_norm": 1.2481412136313652, |
| "learning_rate": 3.839958887317649e-05, |
| "loss": 0.5849, |
| "step": 84 |
| }, |
| { |
| "epoch": 1.088, |
| "grad_norm": 1.1376920215674418, |
| "learning_rate": 3.832868761221926e-05, |
| "loss": 0.5971, |
| "step": 85 |
| }, |
| { |
| "epoch": 1.1008, |
| "grad_norm": 1.1320442541159759, |
| "learning_rate": 3.825631805402182e-05, |
| "loss": 0.6035, |
| "step": 86 |
| }, |
| { |
| "epoch": 1.1136, |
| "grad_norm": 1.0232067338865352, |
| "learning_rate": 3.818248599605448e-05, |
| "loss": 0.6091, |
| "step": 87 |
| }, |
| { |
| "epoch": 1.1264, |
| "grad_norm": 1.0202253176884335, |
| "learning_rate": 3.810719735294731e-05, |
| "loss": 0.58, |
| "step": 88 |
| }, |
| { |
| "epoch": 1.1392, |
| "grad_norm": 0.6918802575336643, |
| "learning_rate": 3.8030458156016326e-05, |
| "loss": 0.568, |
| "step": 89 |
| }, |
| { |
| "epoch": 1.152, |
| "grad_norm": 0.9802445576062785, |
| "learning_rate": 3.795227455278029e-05, |
| "loss": 0.5868, |
| "step": 90 |
| }, |
| { |
| "epoch": 1.1648, |
| "grad_norm": 0.6331372584047888, |
| "learning_rate": 3.787265280646825e-05, |
| "loss": 0.583, |
| "step": 91 |
| }, |
| { |
| "epoch": 1.1776, |
| "grad_norm": 0.8631768926664705, |
| "learning_rate": 3.7791599295517825e-05, |
| "loss": 0.5861, |
| "step": 92 |
| }, |
| { |
| "epoch": 1.1904, |
| "grad_norm": 0.6615914830584512, |
| "learning_rate": 3.7709120513064196e-05, |
| "loss": 0.576, |
| "step": 93 |
| }, |
| { |
| "epoch": 1.2032, |
| "grad_norm": 0.800863810967628, |
| "learning_rate": 3.762522306641998e-05, |
| "loss": 0.5875, |
| "step": 94 |
| }, |
| { |
| "epoch": 1.216, |
| "grad_norm": 0.6580379221698681, |
| "learning_rate": 3.7539913676545874e-05, |
| "loss": 0.5921, |
| "step": 95 |
| }, |
| { |
| "epoch": 1.2288000000000001, |
| "grad_norm": 0.785879346898882, |
| "learning_rate": 3.745319917751229e-05, |
| "loss": 0.5748, |
| "step": 96 |
| }, |
| { |
| "epoch": 1.2416, |
| "grad_norm": 0.6096781399430284, |
| "learning_rate": 3.736508651595188e-05, |
| "loss": 0.59, |
| "step": 97 |
| }, |
| { |
| "epoch": 1.2544, |
| "grad_norm": 0.6853611392115616, |
| "learning_rate": 3.727558275050301e-05, |
| "loss": 0.5897, |
| "step": 98 |
| }, |
| { |
| "epoch": 1.2671999999999999, |
| "grad_norm": 0.5845789885416388, |
| "learning_rate": 3.718469505124434e-05, |
| "loss": 0.5869, |
| "step": 99 |
| }, |
| { |
| "epoch": 1.28, |
| "grad_norm": 0.6403201084831845, |
| "learning_rate": 3.709243069912041e-05, |
| "loss": 0.5817, |
| "step": 100 |
| }, |
| { |
| "epoch": 1.2928, |
| "grad_norm": 0.8028194459196691, |
| "learning_rate": 3.699879708535838e-05, |
| "loss": 0.588, |
| "step": 101 |
| }, |
| { |
| "epoch": 1.3056, |
| "grad_norm": 0.7762362375714167, |
| "learning_rate": 3.69038017108759e-05, |
| "loss": 0.5813, |
| "step": 102 |
| }, |
| { |
| "epoch": 1.3184, |
| "grad_norm": 0.5011274095834896, |
| "learning_rate": 3.680745218568026e-05, |
| "loss": 0.5811, |
| "step": 103 |
| }, |
| { |
| "epoch": 1.3312, |
| "grad_norm": 0.6540273787471301, |
| "learning_rate": 3.6709756228258735e-05, |
| "loss": 0.5645, |
| "step": 104 |
| }, |
| { |
| "epoch": 1.3439999999999999, |
| "grad_norm": 0.48135368825756675, |
| "learning_rate": 3.6610721664960236e-05, |
| "loss": 0.6093, |
| "step": 105 |
| }, |
| { |
| "epoch": 1.3568, |
| "grad_norm": 0.5457309582236752, |
| "learning_rate": 3.65103564293684e-05, |
| "loss": 0.5732, |
| "step": 106 |
| }, |
| { |
| "epoch": 1.3696, |
| "grad_norm": 0.43409957473161764, |
| "learning_rate": 3.640866856166601e-05, |
| "loss": 0.5858, |
| "step": 107 |
| }, |
| { |
| "epoch": 1.3824, |
| "grad_norm": 0.6416881133504726, |
| "learning_rate": 3.6305666207990886e-05, |
| "loss": 0.5818, |
| "step": 108 |
| }, |
| { |
| "epoch": 1.3952, |
| "grad_norm": 0.6620870091612782, |
| "learning_rate": 3.6201357619783336e-05, |
| "loss": 0.5754, |
| "step": 109 |
| }, |
| { |
| "epoch": 1.408, |
| "grad_norm": 0.41657718891348167, |
| "learning_rate": 3.609575115312511e-05, |
| "loss": 0.5933, |
| "step": 110 |
| }, |
| { |
| "epoch": 1.4208, |
| "grad_norm": 0.6336040794251142, |
| "learning_rate": 3.598885526807003e-05, |
| "loss": 0.5724, |
| "step": 111 |
| }, |
| { |
| "epoch": 1.4336, |
| "grad_norm": 0.4627677166330687, |
| "learning_rate": 3.5880678527966224e-05, |
| "loss": 0.5716, |
| "step": 112 |
| }, |
| { |
| "epoch": 1.4464000000000001, |
| "grad_norm": 0.5680979429728227, |
| "learning_rate": 3.577122959877017e-05, |
| "loss": 0.5781, |
| "step": 113 |
| }, |
| { |
| "epoch": 1.4592, |
| "grad_norm": 0.6426557810505764, |
| "learning_rate": 3.566051724835245e-05, |
| "loss": 0.5893, |
| "step": 114 |
| }, |
| { |
| "epoch": 1.472, |
| "grad_norm": 0.6851542652536005, |
| "learning_rate": 3.554855034579532e-05, |
| "loss": 0.5967, |
| "step": 115 |
| }, |
| { |
| "epoch": 1.4848, |
| "grad_norm": 0.7823470379961811, |
| "learning_rate": 3.5435337860682304e-05, |
| "loss": 0.5705, |
| "step": 116 |
| }, |
| { |
| "epoch": 1.4976, |
| "grad_norm": 0.45695030313416946, |
| "learning_rate": 3.532088886237956e-05, |
| "loss": 0.5788, |
| "step": 117 |
| }, |
| { |
| "epoch": 1.5104, |
| "grad_norm": 0.8111261846226543, |
| "learning_rate": 3.520521251930941e-05, |
| "loss": 0.5911, |
| "step": 118 |
| }, |
| { |
| "epoch": 1.5232, |
| "grad_norm": 0.6168312996627079, |
| "learning_rate": 3.5088318098215805e-05, |
| "loss": 0.5807, |
| "step": 119 |
| }, |
| { |
| "epoch": 1.536, |
| "grad_norm": 0.49043925134838406, |
| "learning_rate": 3.497021496342203e-05, |
| "loss": 0.5831, |
| "step": 120 |
| }, |
| { |
| "epoch": 1.5488, |
| "grad_norm": 0.6270161616203219, |
| "learning_rate": 3.485091257608047e-05, |
| "loss": 0.5751, |
| "step": 121 |
| }, |
| { |
| "epoch": 1.5615999999999999, |
| "grad_norm": 0.39170508583909547, |
| "learning_rate": 3.473042049341474e-05, |
| "loss": 0.5613, |
| "step": 122 |
| }, |
| { |
| "epoch": 1.5744, |
| "grad_norm": 0.522661971617142, |
| "learning_rate": 3.4608748367954064e-05, |
| "loss": 0.5717, |
| "step": 123 |
| }, |
| { |
| "epoch": 1.5872000000000002, |
| "grad_norm": 0.5257015468025916, |
| "learning_rate": 3.4485905946759965e-05, |
| "loss": 0.5798, |
| "step": 124 |
| }, |
| { |
| "epoch": 1.6, |
| "grad_norm": 0.47688461562895773, |
| "learning_rate": 3.4361903070645484e-05, |
| "loss": 0.5757, |
| "step": 125 |
| }, |
| { |
| "epoch": 1.6128, |
| "grad_norm": 0.4351215404541433, |
| "learning_rate": 3.423674967338681e-05, |
| "loss": 0.5776, |
| "step": 126 |
| }, |
| { |
| "epoch": 1.6256, |
| "grad_norm": 0.6051250233610711, |
| "learning_rate": 3.411045578092754e-05, |
| "loss": 0.6019, |
| "step": 127 |
| }, |
| { |
| "epoch": 1.6383999999999999, |
| "grad_norm": 0.6232000529993532, |
| "learning_rate": 3.398303151057543e-05, |
| "loss": 0.5904, |
| "step": 128 |
| }, |
| { |
| "epoch": 1.6512, |
| "grad_norm": 0.40200857504745496, |
| "learning_rate": 3.385448707019199e-05, |
| "loss": 0.5782, |
| "step": 129 |
| }, |
| { |
| "epoch": 1.6640000000000001, |
| "grad_norm": 0.49383447796538155, |
| "learning_rate": 3.372483275737468e-05, |
| "loss": 0.5649, |
| "step": 130 |
| }, |
| { |
| "epoch": 1.6768, |
| "grad_norm": 0.4397035126378412, |
| "learning_rate": 3.359407895863199e-05, |
| "loss": 0.579, |
| "step": 131 |
| }, |
| { |
| "epoch": 1.6896, |
| "grad_norm": 0.47236407026339106, |
| "learning_rate": 3.34622361485514e-05, |
| "loss": 0.591, |
| "step": 132 |
| }, |
| { |
| "epoch": 1.7024, |
| "grad_norm": 0.42274762850501924, |
| "learning_rate": 3.332931488896029e-05, |
| "loss": 0.574, |
| "step": 133 |
| }, |
| { |
| "epoch": 1.7151999999999998, |
| "grad_norm": 0.5543584627415455, |
| "learning_rate": 3.319532582807977e-05, |
| "loss": 0.5895, |
| "step": 134 |
| }, |
| { |
| "epoch": 1.728, |
| "grad_norm": 0.42076772007078395, |
| "learning_rate": 3.30602796996717e-05, |
| "loss": 0.5691, |
| "step": 135 |
| }, |
| { |
| "epoch": 1.7408000000000001, |
| "grad_norm": 0.5069394232619601, |
| "learning_rate": 3.2924187322178865e-05, |
| "loss": 0.5884, |
| "step": 136 |
| }, |
| { |
| "epoch": 1.7536, |
| "grad_norm": 0.4038923709639309, |
| "learning_rate": 3.278705959785821e-05, |
| "loss": 0.5411, |
| "step": 137 |
| }, |
| { |
| "epoch": 1.7664, |
| "grad_norm": 0.5133335475939002, |
| "learning_rate": 3.2648907511907544e-05, |
| "loss": 0.6003, |
| "step": 138 |
| }, |
| { |
| "epoch": 1.7792, |
| "grad_norm": 0.39682004790635145, |
| "learning_rate": 3.250974213158555e-05, |
| "loss": 0.5524, |
| "step": 139 |
| }, |
| { |
| "epoch": 1.792, |
| "grad_norm": 0.5296349936034231, |
| "learning_rate": 3.23695746053251e-05, |
| "loss": 0.581, |
| "step": 140 |
| }, |
| { |
| "epoch": 1.8048, |
| "grad_norm": 0.5996083460308522, |
| "learning_rate": 3.222841616184025e-05, |
| "loss": 0.5717, |
| "step": 141 |
| }, |
| { |
| "epoch": 1.8176, |
| "grad_norm": 0.5413798728390828, |
| "learning_rate": 3.208627810922665e-05, |
| "loss": 0.5788, |
| "step": 142 |
| }, |
| { |
| "epoch": 1.8304, |
| "grad_norm": 0.5175466976647898, |
| "learning_rate": 3.194317183405573e-05, |
| "loss": 0.5904, |
| "step": 143 |
| }, |
| { |
| "epoch": 1.8432, |
| "grad_norm": 0.4682093401048202, |
| "learning_rate": 3.1799108800462466e-05, |
| "loss": 0.5642, |
| "step": 144 |
| }, |
| { |
| "epoch": 1.8559999999999999, |
| "grad_norm": 0.47551566805255907, |
| "learning_rate": 3.1654100549227024e-05, |
| "loss": 0.5824, |
| "step": 145 |
| }, |
| { |
| "epoch": 1.8688, |
| "grad_norm": 0.4318024287468201, |
| "learning_rate": 3.1508158696850275e-05, |
| "loss": 0.5857, |
| "step": 146 |
| }, |
| { |
| "epoch": 1.8816000000000002, |
| "grad_norm": 0.420772862898559, |
| "learning_rate": 3.136129493462312e-05, |
| "loss": 0.5474, |
| "step": 147 |
| }, |
| { |
| "epoch": 1.8944, |
| "grad_norm": 0.48689725875900847, |
| "learning_rate": 3.121352102768998e-05, |
| "loss": 0.5799, |
| "step": 148 |
| }, |
| { |
| "epoch": 1.9072, |
| "grad_norm": 0.43171984330104424, |
| "learning_rate": 3.106484881410628e-05, |
| "loss": 0.5766, |
| "step": 149 |
| }, |
| { |
| "epoch": 1.92, |
| "grad_norm": 0.5738204026591428, |
| "learning_rate": 3.091529020389009e-05, |
| "loss": 0.5673, |
| "step": 150 |
| }, |
| { |
| "epoch": 1.9327999999999999, |
| "grad_norm": 0.5224543547495724, |
| "learning_rate": 3.076485717806808e-05, |
| "loss": 0.581, |
| "step": 151 |
| }, |
| { |
| "epoch": 1.9456, |
| "grad_norm": 0.48031966263105436, |
| "learning_rate": 3.061356178771564e-05, |
| "loss": 0.5571, |
| "step": 152 |
| }, |
| { |
| "epoch": 1.9584000000000001, |
| "grad_norm": 0.5100900921302143, |
| "learning_rate": 3.0461416152991555e-05, |
| "loss": 0.5794, |
| "step": 153 |
| }, |
| { |
| "epoch": 1.9712, |
| "grad_norm": 0.4832266559009672, |
| "learning_rate": 3.0308432462167045e-05, |
| "loss": 0.5719, |
| "step": 154 |
| }, |
| { |
| "epoch": 1.984, |
| "grad_norm": 0.49697395866229604, |
| "learning_rate": 3.015462297064936e-05, |
| "loss": 0.595, |
| "step": 155 |
| }, |
| { |
| "epoch": 1.9968, |
| "grad_norm": 0.5025796919566643, |
| "learning_rate": 3.0000000000000004e-05, |
| "loss": 0.5512, |
| "step": 156 |
| }, |
| { |
| "epoch": 2.0096, |
| "grad_norm": 1.0942861291092771, |
| "learning_rate": 2.98445759369477e-05, |
| "loss": 1.0569, |
| "step": 157 |
| }, |
| { |
| "epoch": 2.0224, |
| "grad_norm": 1.1203214893028626, |
| "learning_rate": 2.9688363232396056e-05, |
| "loss": 0.4969, |
| "step": 158 |
| }, |
| { |
| "epoch": 2.0352, |
| "grad_norm": 0.7092678871521405, |
| "learning_rate": 2.9531374400426158e-05, |
| "loss": 0.5063, |
| "step": 159 |
| }, |
| { |
| "epoch": 2.048, |
| "grad_norm": 0.7519363641433118, |
| "learning_rate": 2.9373622017294075e-05, |
| "loss": 0.5289, |
| "step": 160 |
| }, |
| { |
| "epoch": 2.0608, |
| "grad_norm": 0.753356480873723, |
| "learning_rate": 2.9215118720423375e-05, |
| "loss": 0.4947, |
| "step": 161 |
| }, |
| { |
| "epoch": 2.0736, |
| "grad_norm": 0.5426401035171584, |
| "learning_rate": 2.9055877207392752e-05, |
| "loss": 0.4962, |
| "step": 162 |
| }, |
| { |
| "epoch": 2.0864, |
| "grad_norm": 0.707369552029798, |
| "learning_rate": 2.8895910234918828e-05, |
| "loss": 0.5074, |
| "step": 163 |
| }, |
| { |
| "epoch": 2.0992, |
| "grad_norm": 0.5497395140252154, |
| "learning_rate": 2.873523061783426e-05, |
| "loss": 0.5197, |
| "step": 164 |
| }, |
| { |
| "epoch": 2.112, |
| "grad_norm": 0.6185549300177703, |
| "learning_rate": 2.8573851228061084e-05, |
| "loss": 0.5023, |
| "step": 165 |
| }, |
| { |
| "epoch": 2.1248, |
| "grad_norm": 0.580497252773744, |
| "learning_rate": 2.8411784993579633e-05, |
| "loss": 0.5072, |
| "step": 166 |
| }, |
| { |
| "epoch": 2.1376, |
| "grad_norm": 0.5409738558428386, |
| "learning_rate": 2.8249044897392814e-05, |
| "loss": 0.4982, |
| "step": 167 |
| }, |
| { |
| "epoch": 2.1504, |
| "grad_norm": 0.43534172535797905, |
| "learning_rate": 2.80856439764861e-05, |
| "loss": 0.4771, |
| "step": 168 |
| }, |
| { |
| "epoch": 2.1632, |
| "grad_norm": 0.4601930566955383, |
| "learning_rate": 2.792159532078314e-05, |
| "loss": 0.4929, |
| "step": 169 |
| }, |
| { |
| "epoch": 2.176, |
| "grad_norm": 0.4320507312702627, |
| "learning_rate": 2.77569120720971e-05, |
| "loss": 0.5226, |
| "step": 170 |
| }, |
| { |
| "epoch": 2.1888, |
| "grad_norm": 0.48502043578579096, |
| "learning_rate": 2.7591607423077932e-05, |
| "loss": 0.499, |
| "step": 171 |
| }, |
| { |
| "epoch": 2.2016, |
| "grad_norm": 0.4859522614450258, |
| "learning_rate": 2.7425694616155474e-05, |
| "loss": 0.5045, |
| "step": 172 |
| }, |
| { |
| "epoch": 2.2144, |
| "grad_norm": 0.49391904855284163, |
| "learning_rate": 2.7259186942478656e-05, |
| "loss": 0.4968, |
| "step": 173 |
| }, |
| { |
| "epoch": 2.2272, |
| "grad_norm": 0.4325613973134699, |
| "learning_rate": 2.7092097740850712e-05, |
| "loss": 0.5024, |
| "step": 174 |
| }, |
| { |
| "epoch": 2.24, |
| "grad_norm": 0.3873002923244623, |
| "learning_rate": 2.692444039666066e-05, |
| "loss": 0.5131, |
| "step": 175 |
| }, |
| { |
| "epoch": 2.2528, |
| "grad_norm": 0.41706430806430034, |
| "learning_rate": 2.6756228340810946e-05, |
| "loss": 0.4776, |
| "step": 176 |
| }, |
| { |
| "epoch": 2.2656, |
| "grad_norm": 0.3502080718383108, |
| "learning_rate": 2.6587475048641596e-05, |
| "loss": 0.5073, |
| "step": 177 |
| }, |
| { |
| "epoch": 2.2784, |
| "grad_norm": 0.42777947299044994, |
| "learning_rate": 2.6418194038850634e-05, |
| "loss": 0.4961, |
| "step": 178 |
| }, |
| { |
| "epoch": 2.2912, |
| "grad_norm": 0.38837734226091947, |
| "learning_rate": 2.624839887241115e-05, |
| "loss": 0.5049, |
| "step": 179 |
| }, |
| { |
| "epoch": 2.304, |
| "grad_norm": 0.3670279921533217, |
| "learning_rate": 2.607810315148494e-05, |
| "loss": 0.4865, |
| "step": 180 |
| }, |
| { |
| "epoch": 2.3168, |
| "grad_norm": 0.39350246399828265, |
| "learning_rate": 2.5907320518332827e-05, |
| "loss": 0.5026, |
| "step": 181 |
| }, |
| { |
| "epoch": 2.3296, |
| "grad_norm": 0.3650725578145916, |
| "learning_rate": 2.5736064654221808e-05, |
| "loss": 0.5152, |
| "step": 182 |
| }, |
| { |
| "epoch": 2.3424, |
| "grad_norm": 0.3585014755905389, |
| "learning_rate": 2.5564349278329056e-05, |
| "loss": 0.477, |
| "step": 183 |
| }, |
| { |
| "epoch": 2.3552, |
| "grad_norm": 0.4033941898356016, |
| "learning_rate": 2.539218814664288e-05, |
| "loss": 0.5262, |
| "step": 184 |
| }, |
| { |
| "epoch": 2.368, |
| "grad_norm": 0.4119980141819767, |
| "learning_rate": 2.521959505086075e-05, |
| "loss": 0.4792, |
| "step": 185 |
| }, |
| { |
| "epoch": 2.3808, |
| "grad_norm": 0.38231858641717104, |
| "learning_rate": 2.5046583817284437e-05, |
| "loss": 0.4831, |
| "step": 186 |
| }, |
| { |
| "epoch": 2.3936, |
| "grad_norm": 0.39842931794253045, |
| "learning_rate": 2.487316830571244e-05, |
| "loss": 0.5192, |
| "step": 187 |
| }, |
| { |
| "epoch": 2.4064, |
| "grad_norm": 0.35082108346987945, |
| "learning_rate": 2.4699362408329646e-05, |
| "loss": 0.4981, |
| "step": 188 |
| }, |
| { |
| "epoch": 2.4192, |
| "grad_norm": 0.37643446548871795, |
| "learning_rate": 2.4525180048594452e-05, |
| "loss": 0.5059, |
| "step": 189 |
| }, |
| { |
| "epoch": 2.432, |
| "grad_norm": 0.46134742515544525, |
| "learning_rate": 2.435063518012335e-05, |
| "loss": 0.5068, |
| "step": 190 |
| }, |
| { |
| "epoch": 2.4448, |
| "grad_norm": 0.31260136088701385, |
| "learning_rate": 2.4175741785573177e-05, |
| "loss": 0.4882, |
| "step": 191 |
| }, |
| { |
| "epoch": 2.4576000000000002, |
| "grad_norm": 0.4080202121089193, |
| "learning_rate": 2.4000513875520892e-05, |
| "loss": 0.4889, |
| "step": 192 |
| }, |
| { |
| "epoch": 2.4704, |
| "grad_norm": 0.3951121417796483, |
| "learning_rate": 2.3824965487341247e-05, |
| "loss": 0.5041, |
| "step": 193 |
| }, |
| { |
| "epoch": 2.4832, |
| "grad_norm": 0.4414864009705837, |
| "learning_rate": 2.3649110684082258e-05, |
| "loss": 0.5, |
| "step": 194 |
| }, |
| { |
| "epoch": 2.496, |
| "grad_norm": 0.36475828248511283, |
| "learning_rate": 2.3472963553338614e-05, |
| "loss": 0.4911, |
| "step": 195 |
| }, |
| { |
| "epoch": 2.5088, |
| "grad_norm": 0.3681976840329648, |
| "learning_rate": 2.3296538206123134e-05, |
| "loss": 0.4974, |
| "step": 196 |
| }, |
| { |
| "epoch": 2.5216, |
| "grad_norm": 0.4408783566439563, |
| "learning_rate": 2.311984877573636e-05, |
| "loss": 0.5057, |
| "step": 197 |
| }, |
| { |
| "epoch": 2.5343999999999998, |
| "grad_norm": 0.2998572928867727, |
| "learning_rate": 2.2942909416634326e-05, |
| "loss": 0.5128, |
| "step": 198 |
| }, |
| { |
| "epoch": 2.5472, |
| "grad_norm": 0.4093518980642346, |
| "learning_rate": 2.2765734303294666e-05, |
| "loss": 0.5151, |
| "step": 199 |
| }, |
| { |
| "epoch": 2.56, |
| "grad_norm": 0.38801526961333443, |
| "learning_rate": 2.2588337629081107e-05, |
| "loss": 0.4786, |
| "step": 200 |
| }, |
| { |
| "epoch": 2.5728, |
| "grad_norm": 0.3069570187353235, |
| "learning_rate": 2.2410733605106462e-05, |
| "loss": 0.4941, |
| "step": 201 |
| }, |
| { |
| "epoch": 2.5856, |
| "grad_norm": 0.3712540930355693, |
| "learning_rate": 2.2232936459094158e-05, |
| "loss": 0.5228, |
| "step": 202 |
| }, |
| { |
| "epoch": 2.5984, |
| "grad_norm": 0.35360170691882187, |
| "learning_rate": 2.205496043423849e-05, |
| "loss": 0.4948, |
| "step": 203 |
| }, |
| { |
| "epoch": 2.6112, |
| "grad_norm": 0.2977082345525892, |
| "learning_rate": 2.1876819788063586e-05, |
| "loss": 0.4809, |
| "step": 204 |
| }, |
| { |
| "epoch": 2.624, |
| "grad_norm": 0.32965077669737364, |
| "learning_rate": 2.16985287912813e-05, |
| "loss": 0.5145, |
| "step": 205 |
| }, |
| { |
| "epoch": 2.6368, |
| "grad_norm": 0.3343456972774645, |
| "learning_rate": 2.1520101726647922e-05, |
| "loss": 0.505, |
| "step": 206 |
| }, |
| { |
| "epoch": 2.6496, |
| "grad_norm": 0.3159281951261117, |
| "learning_rate": 2.1341552887820048e-05, |
| "loss": 0.4877, |
| "step": 207 |
| }, |
| { |
| "epoch": 2.6624, |
| "grad_norm": 0.3724089659208682, |
| "learning_rate": 2.1162896578209517e-05, |
| "loss": 0.5022, |
| "step": 208 |
| }, |
| { |
| "epoch": 2.6752000000000002, |
| "grad_norm": 0.3938799705682781, |
| "learning_rate": 2.0984147109837564e-05, |
| "loss": 0.4937, |
| "step": 209 |
| }, |
| { |
| "epoch": 2.6879999999999997, |
| "grad_norm": 0.32624238141968814, |
| "learning_rate": 2.0805318802188307e-05, |
| "loss": 0.5074, |
| "step": 210 |
| }, |
| { |
| "epoch": 2.7008, |
| "grad_norm": 0.32607377269848736, |
| "learning_rate": 2.0626425981061608e-05, |
| "loss": 0.5063, |
| "step": 211 |
| }, |
| { |
| "epoch": 2.7136, |
| "grad_norm": 0.33016534479694615, |
| "learning_rate": 2.0447482977425465e-05, |
| "loss": 0.4901, |
| "step": 212 |
| }, |
| { |
| "epoch": 2.7264, |
| "grad_norm": 0.3220830396065913, |
| "learning_rate": 2.0268504126267952e-05, |
| "loss": 0.4913, |
| "step": 213 |
| }, |
| { |
| "epoch": 2.7392, |
| "grad_norm": 0.33650418658391196, |
| "learning_rate": 2.008950376544887e-05, |
| "loss": 0.5169, |
| "step": 214 |
| }, |
| { |
| "epoch": 2.752, |
| "grad_norm": 0.31364559213449145, |
| "learning_rate": 1.9910496234551132e-05, |
| "loss": 0.4862, |
| "step": 215 |
| }, |
| { |
| "epoch": 2.7648, |
| "grad_norm": 0.35159923424406647, |
| "learning_rate": 1.9731495873732055e-05, |
| "loss": 0.5215, |
| "step": 216 |
| }, |
| { |
| "epoch": 2.7776, |
| "grad_norm": 0.350127339673872, |
| "learning_rate": 1.9552517022574542e-05, |
| "loss": 0.4974, |
| "step": 217 |
| }, |
| { |
| "epoch": 2.7904, |
| "grad_norm": 0.3309668325479677, |
| "learning_rate": 1.93735740189384e-05, |
| "loss": 0.4947, |
| "step": 218 |
| }, |
| { |
| "epoch": 2.8032, |
| "grad_norm": 0.3890538321973108, |
| "learning_rate": 1.9194681197811703e-05, |
| "loss": 0.5292, |
| "step": 219 |
| }, |
| { |
| "epoch": 2.816, |
| "grad_norm": 0.304625863899522, |
| "learning_rate": 1.901585289016244e-05, |
| "loss": 0.4897, |
| "step": 220 |
| }, |
| { |
| "epoch": 2.8288, |
| "grad_norm": 0.36889928691140406, |
| "learning_rate": 1.8837103421790486e-05, |
| "loss": 0.5319, |
| "step": 221 |
| }, |
| { |
| "epoch": 2.8416, |
| "grad_norm": 0.32072648363389383, |
| "learning_rate": 1.8658447112179952e-05, |
| "loss": 0.4795, |
| "step": 222 |
| }, |
| { |
| "epoch": 2.8544, |
| "grad_norm": 1.5334556064043223, |
| "learning_rate": 1.8479898273352084e-05, |
| "loss": 0.512, |
| "step": 223 |
| }, |
| { |
| "epoch": 2.8672, |
| "grad_norm": 0.32011564920171937, |
| "learning_rate": 1.83014712087187e-05, |
| "loss": 0.472, |
| "step": 224 |
| }, |
| { |
| "epoch": 2.88, |
| "grad_norm": 0.3654491843646982, |
| "learning_rate": 1.8123180211936417e-05, |
| "loss": 0.5271, |
| "step": 225 |
| }, |
| { |
| "epoch": 2.8928000000000003, |
| "grad_norm": 0.2947574558063537, |
| "learning_rate": 1.794503956576152e-05, |
| "loss": 0.4804, |
| "step": 226 |
| }, |
| { |
| "epoch": 2.9055999999999997, |
| "grad_norm": 0.3638823371729275, |
| "learning_rate": 1.776706354090585e-05, |
| "loss": 0.5218, |
| "step": 227 |
| }, |
| { |
| "epoch": 2.9184, |
| "grad_norm": 0.3208139595881512, |
| "learning_rate": 1.758926639489354e-05, |
| "loss": 0.5259, |
| "step": 228 |
| }, |
| { |
| "epoch": 2.9312, |
| "grad_norm": 0.31957527909719796, |
| "learning_rate": 1.7411662370918893e-05, |
| "loss": 0.49, |
| "step": 229 |
| }, |
| { |
| "epoch": 2.944, |
| "grad_norm": 0.31359088379829064, |
| "learning_rate": 1.7234265696705344e-05, |
| "loss": 0.5195, |
| "step": 230 |
| }, |
| { |
| "epoch": 2.9568, |
| "grad_norm": 1.1655499021539601, |
| "learning_rate": 1.7057090583365678e-05, |
| "loss": 0.4975, |
| "step": 231 |
| }, |
| { |
| "epoch": 2.9696, |
| "grad_norm": 0.39715740402723715, |
| "learning_rate": 1.6880151224263646e-05, |
| "loss": 0.4893, |
| "step": 232 |
| }, |
| { |
| "epoch": 2.9824, |
| "grad_norm": 0.35280843733532735, |
| "learning_rate": 1.6703461793876876e-05, |
| "loss": 0.5173, |
| "step": 233 |
| }, |
| { |
| "epoch": 2.9952, |
| "grad_norm": 0.3559930169592642, |
| "learning_rate": 1.6527036446661396e-05, |
| "loss": 0.4812, |
| "step": 234 |
| }, |
| { |
| "epoch": 3.008, |
| "grad_norm": 0.7231980902652241, |
| "learning_rate": 1.635088931591775e-05, |
| "loss": 0.9067, |
| "step": 235 |
| }, |
| { |
| "epoch": 3.0208, |
| "grad_norm": 0.5034560975349548, |
| "learning_rate": 1.6175034512658753e-05, |
| "loss": 0.4311, |
| "step": 236 |
| }, |
| { |
| "epoch": 3.0336, |
| "grad_norm": 0.4368081123116843, |
| "learning_rate": 1.5999486124479115e-05, |
| "loss": 0.4352, |
| "step": 237 |
| }, |
| { |
| "epoch": 3.0464, |
| "grad_norm": 0.6460103786782603, |
| "learning_rate": 1.5824258214426833e-05, |
| "loss": 0.4447, |
| "step": 238 |
| }, |
| { |
| "epoch": 3.0592, |
| "grad_norm": 0.5604762648113057, |
| "learning_rate": 1.5649364819876655e-05, |
| "loss": 0.4459, |
| "step": 239 |
| }, |
| { |
| "epoch": 3.072, |
| "grad_norm": 0.4659688108810321, |
| "learning_rate": 1.547481995140556e-05, |
| "loss": 0.4242, |
| "step": 240 |
| }, |
| { |
| "epoch": 3.0848, |
| "grad_norm": 0.5562504118798672, |
| "learning_rate": 1.5300637591670357e-05, |
| "loss": 0.44, |
| "step": 241 |
| }, |
| { |
| "epoch": 3.0976, |
| "grad_norm": 0.3783187935203311, |
| "learning_rate": 1.5126831694287564e-05, |
| "loss": 0.4269, |
| "step": 242 |
| }, |
| { |
| "epoch": 3.1104, |
| "grad_norm": 0.5092961964016923, |
| "learning_rate": 1.4953416182715566e-05, |
| "loss": 0.4262, |
| "step": 243 |
| }, |
| { |
| "epoch": 3.1232, |
| "grad_norm": 0.44649585580545287, |
| "learning_rate": 1.478040494913926e-05, |
| "loss": 0.4497, |
| "step": 244 |
| }, |
| { |
| "epoch": 3.136, |
| "grad_norm": 0.41274305264763705, |
| "learning_rate": 1.460781185335713e-05, |
| "loss": 0.4172, |
| "step": 245 |
| }, |
| { |
| "epoch": 3.1488, |
| "grad_norm": 0.5213506905074292, |
| "learning_rate": 1.443565072167095e-05, |
| "loss": 0.4531, |
| "step": 246 |
| }, |
| { |
| "epoch": 3.1616, |
| "grad_norm": 0.3666854421866173, |
| "learning_rate": 1.4263935345778202e-05, |
| "loss": 0.4049, |
| "step": 247 |
| }, |
| { |
| "epoch": 3.1744, |
| "grad_norm": 0.43103085237184946, |
| "learning_rate": 1.409267948166718e-05, |
| "loss": 0.4483, |
| "step": 248 |
| }, |
| { |
| "epoch": 3.1872, |
| "grad_norm": 0.352188864622588, |
| "learning_rate": 1.3921896848515064e-05, |
| "loss": 0.4269, |
| "step": 249 |
| }, |
| { |
| "epoch": 3.2, |
| "grad_norm": 0.3934477385172981, |
| "learning_rate": 1.3751601127588849e-05, |
| "loss": 0.4291, |
| "step": 250 |
| }, |
| { |
| "epoch": 3.2128, |
| "grad_norm": 0.37685856219419706, |
| "learning_rate": 1.3581805961149371e-05, |
| "loss": 0.4484, |
| "step": 251 |
| }, |
| { |
| "epoch": 3.2256, |
| "grad_norm": 0.37721604742051845, |
| "learning_rate": 1.341252495135841e-05, |
| "loss": 0.4273, |
| "step": 252 |
| }, |
| { |
| "epoch": 3.2384, |
| "grad_norm": 0.32492704599266914, |
| "learning_rate": 1.324377165918906e-05, |
| "loss": 0.4257, |
| "step": 253 |
| }, |
| { |
| "epoch": 3.2512, |
| "grad_norm": 0.3693100483401799, |
| "learning_rate": 1.3075559603339354e-05, |
| "loss": 0.4354, |
| "step": 254 |
| }, |
| { |
| "epoch": 3.2640000000000002, |
| "grad_norm": 0.3787891582540323, |
| "learning_rate": 1.2907902259149287e-05, |
| "loss": 0.4389, |
| "step": 255 |
| }, |
| { |
| "epoch": 3.2768, |
| "grad_norm": 0.34605983599487977, |
| "learning_rate": 1.274081305752135e-05, |
| "loss": 0.4342, |
| "step": 256 |
| }, |
| { |
| "epoch": 3.2896, |
| "grad_norm": 0.42900570051436154, |
| "learning_rate": 1.2574305383844528e-05, |
| "loss": 0.4297, |
| "step": 257 |
| }, |
| { |
| "epoch": 3.3024, |
| "grad_norm": 0.29955937843112695, |
| "learning_rate": 1.2408392576922075e-05, |
| "loss": 0.4192, |
| "step": 258 |
| }, |
| { |
| "epoch": 3.3152, |
| "grad_norm": 0.3868524850348689, |
| "learning_rate": 1.2243087927902905e-05, |
| "loss": 0.4211, |
| "step": 259 |
| }, |
| { |
| "epoch": 3.328, |
| "grad_norm": 0.3558484048797036, |
| "learning_rate": 1.2078404679216864e-05, |
| "loss": 0.4379, |
| "step": 260 |
| }, |
| { |
| "epoch": 3.3407999999999998, |
| "grad_norm": 0.3807498715101144, |
| "learning_rate": 1.1914356023513904e-05, |
| "loss": 0.4284, |
| "step": 261 |
| }, |
| { |
| "epoch": 3.3536, |
| "grad_norm": 0.30985472150031523, |
| "learning_rate": 1.1750955102607193e-05, |
| "loss": 0.4311, |
| "step": 262 |
| }, |
| { |
| "epoch": 3.3664, |
| "grad_norm": 0.36061598013651475, |
| "learning_rate": 1.1588215006420374e-05, |
| "loss": 0.4457, |
| "step": 263 |
| }, |
| { |
| "epoch": 3.3792, |
| "grad_norm": 0.28201169754914196, |
| "learning_rate": 1.1426148771938915e-05, |
| "loss": 0.4256, |
| "step": 264 |
| }, |
| { |
| "epoch": 3.392, |
| "grad_norm": 0.3311788452757313, |
| "learning_rate": 1.1264769382165748e-05, |
| "loss": 0.4217, |
| "step": 265 |
| }, |
| { |
| "epoch": 3.4048, |
| "grad_norm": 0.5540609151275562, |
| "learning_rate": 1.110408976508118e-05, |
| "loss": 0.4397, |
| "step": 266 |
| }, |
| { |
| "epoch": 3.4176, |
| "grad_norm": 6.832443750094971, |
| "learning_rate": 1.094412279260726e-05, |
| "loss": 0.4437, |
| "step": 267 |
| }, |
| { |
| "epoch": 3.4304, |
| "grad_norm": 0.4466069899969703, |
| "learning_rate": 1.0784881279576635e-05, |
| "loss": 0.4332, |
| "step": 268 |
| }, |
| { |
| "epoch": 3.4432, |
| "grad_norm": 0.2768477569237576, |
| "learning_rate": 1.0626377982705929e-05, |
| "loss": 0.4285, |
| "step": 269 |
| }, |
| { |
| "epoch": 3.456, |
| "grad_norm": 0.35203192444621906, |
| "learning_rate": 1.0468625599573842e-05, |
| "loss": 0.4213, |
| "step": 270 |
| }, |
| { |
| "epoch": 3.4688, |
| "grad_norm": 0.3597046552499742, |
| "learning_rate": 1.0311636767603952e-05, |
| "loss": 0.4374, |
| "step": 271 |
| }, |
| { |
| "epoch": 3.4816, |
| "grad_norm": 0.3325530857201198, |
| "learning_rate": 1.0155424063052306e-05, |
| "loss": 0.4219, |
| "step": 272 |
| }, |
| { |
| "epoch": 3.4944, |
| "grad_norm": 0.3853480285752419, |
| "learning_rate": 1.0000000000000006e-05, |
| "loss": 0.4229, |
| "step": 273 |
| }, |
| { |
| "epoch": 3.5072, |
| "grad_norm": 0.30159057588033156, |
| "learning_rate": 9.84537702935065e-06, |
| "loss": 0.4324, |
| "step": 274 |
| }, |
| { |
| "epoch": 3.52, |
| "grad_norm": 0.3847830760236813, |
| "learning_rate": 9.691567537832964e-06, |
| "loss": 0.4325, |
| "step": 275 |
| }, |
| { |
| "epoch": 3.5328, |
| "grad_norm": 0.31767472597893276, |
| "learning_rate": 9.538583847008452e-06, |
| "loss": 0.4228, |
| "step": 276 |
| }, |
| { |
| "epoch": 3.5456, |
| "grad_norm": 0.2977997235495541, |
| "learning_rate": 9.386438212284372e-06, |
| "loss": 0.438, |
| "step": 277 |
| }, |
| { |
| "epoch": 3.5584, |
| "grad_norm": 0.30218898899305674, |
| "learning_rate": 9.235142821931928e-06, |
| "loss": 0.4203, |
| "step": 278 |
| }, |
| { |
| "epoch": 3.5712, |
| "grad_norm": 0.27407360658766305, |
| "learning_rate": 9.084709796109907e-06, |
| "loss": 0.4457, |
| "step": 279 |
| }, |
| { |
| "epoch": 3.584, |
| "grad_norm": 0.28664639374216394, |
| "learning_rate": 8.93515118589373e-06, |
| "loss": 0.4288, |
| "step": 280 |
| }, |
| { |
| "epoch": 3.5968, |
| "grad_norm": 0.29135021472808337, |
| "learning_rate": 8.786478972310023e-06, |
| "loss": 0.4316, |
| "step": 281 |
| }, |
| { |
| "epoch": 3.6096, |
| "grad_norm": 0.23942524410753038, |
| "learning_rate": 8.638705065376887e-06, |
| "loss": 0.4228, |
| "step": 282 |
| }, |
| { |
| "epoch": 3.6224, |
| "grad_norm": 0.2610404113372267, |
| "learning_rate": 8.491841303149728e-06, |
| "loss": 0.4356, |
| "step": 283 |
| }, |
| { |
| "epoch": 3.6352, |
| "grad_norm": 0.26152990417585215, |
| "learning_rate": 8.345899450772975e-06, |
| "loss": 0.43, |
| "step": 284 |
| }, |
| { |
| "epoch": 3.648, |
| "grad_norm": 0.25792082318056, |
| "learning_rate": 8.200891199537549e-06, |
| "loss": 0.4137, |
| "step": 285 |
| }, |
| { |
| "epoch": 3.6608, |
| "grad_norm": 0.2814226776112004, |
| "learning_rate": 8.056828165944282e-06, |
| "loss": 0.4345, |
| "step": 286 |
| }, |
| { |
| "epoch": 3.6736, |
| "grad_norm": 0.24079584409939367, |
| "learning_rate": 7.913721890773354e-06, |
| "loss": 0.4295, |
| "step": 287 |
| }, |
| { |
| "epoch": 3.6864, |
| "grad_norm": 0.24764066775640362, |
| "learning_rate": 7.771583838159756e-06, |
| "loss": 0.439, |
| "step": 288 |
| }, |
| { |
| "epoch": 3.6992000000000003, |
| "grad_norm": 0.25293795245606016, |
| "learning_rate": 7.630425394674903e-06, |
| "loss": 0.4176, |
| "step": 289 |
| }, |
| { |
| "epoch": 3.7119999999999997, |
| "grad_norm": 0.25567663000353846, |
| "learning_rate": 7.49025786841445e-06, |
| "loss": 0.4448, |
| "step": 290 |
| }, |
| { |
| "epoch": 3.7248, |
| "grad_norm": 0.2557748628132262, |
| "learning_rate": 7.3510924880924575e-06, |
| "loss": 0.4143, |
| "step": 291 |
| }, |
| { |
| "epoch": 3.7376, |
| "grad_norm": 0.26348617282981335, |
| "learning_rate": 7.212940402141808e-06, |
| "loss": 0.4341, |
| "step": 292 |
| }, |
| { |
| "epoch": 3.7504, |
| "grad_norm": 0.26100409390539614, |
| "learning_rate": 7.075812677821145e-06, |
| "loss": 0.4364, |
| "step": 293 |
| }, |
| { |
| "epoch": 3.7632, |
| "grad_norm": 0.2691727892889049, |
| "learning_rate": 6.939720300328303e-06, |
| "loss": 0.4171, |
| "step": 294 |
| }, |
| { |
| "epoch": 3.776, |
| "grad_norm": 0.257825726884648, |
| "learning_rate": 6.8046741719202385e-06, |
| "loss": 0.4407, |
| "step": 295 |
| }, |
| { |
| "epoch": 3.7888, |
| "grad_norm": 0.2316927863965696, |
| "learning_rate": 6.67068511103971e-06, |
| "loss": 0.4372, |
| "step": 296 |
| }, |
| { |
| "epoch": 3.8016, |
| "grad_norm": 0.25125850891129237, |
| "learning_rate": 6.537763851448593e-06, |
| "loss": 0.4151, |
| "step": 297 |
| }, |
| { |
| "epoch": 3.8144, |
| "grad_norm": 0.23884683068419738, |
| "learning_rate": 6.4059210413680175e-06, |
| "loss": 0.4134, |
| "step": 298 |
| }, |
| { |
| "epoch": 3.8272, |
| "grad_norm": 0.22498688360615401, |
| "learning_rate": 6.275167242625331e-06, |
| "loss": 0.4137, |
| "step": 299 |
| }, |
| { |
| "epoch": 3.84, |
| "grad_norm": 0.23545994713685783, |
| "learning_rate": 6.145512929808013e-06, |
| "loss": 0.4279, |
| "step": 300 |
| }, |
| { |
| "epoch": 3.8528000000000002, |
| "grad_norm": 0.24709602404173453, |
| "learning_rate": 6.016968489424572e-06, |
| "loss": 0.4324, |
| "step": 301 |
| }, |
| { |
| "epoch": 3.8656, |
| "grad_norm": 0.22722612778685342, |
| "learning_rate": 5.889544219072465e-06, |
| "loss": 0.4148, |
| "step": 302 |
| }, |
| { |
| "epoch": 3.8784, |
| "grad_norm": 0.2276563993650903, |
| "learning_rate": 5.7632503266131925e-06, |
| "loss": 0.428, |
| "step": 303 |
| }, |
| { |
| "epoch": 3.8912, |
| "grad_norm": 0.24042359633073718, |
| "learning_rate": 5.638096929354522e-06, |
| "loss": 0.4307, |
| "step": 304 |
| }, |
| { |
| "epoch": 3.904, |
| "grad_norm": 0.2184993596582375, |
| "learning_rate": 5.514094053240035e-06, |
| "loss": 0.412, |
| "step": 305 |
| }, |
| { |
| "epoch": 3.9168, |
| "grad_norm": 0.23158938650439423, |
| "learning_rate": 5.39125163204594e-06, |
| "loss": 0.4254, |
| "step": 306 |
| }, |
| { |
| "epoch": 3.9295999999999998, |
| "grad_norm": 0.2400100801267434, |
| "learning_rate": 5.269579506585259e-06, |
| "loss": 0.4328, |
| "step": 307 |
| }, |
| { |
| "epoch": 3.9424, |
| "grad_norm": 0.22379826927352578, |
| "learning_rate": 5.149087423919541e-06, |
| "loss": 0.4498, |
| "step": 308 |
| }, |
| { |
| "epoch": 3.9552, |
| "grad_norm": 0.2175943729139426, |
| "learning_rate": 5.029785036577976e-06, |
| "loss": 0.4241, |
| "step": 309 |
| }, |
| { |
| "epoch": 3.968, |
| "grad_norm": 0.24142393569400988, |
| "learning_rate": 4.911681901784198e-06, |
| "loss": 0.4397, |
| "step": 310 |
| }, |
| { |
| "epoch": 3.9808, |
| "grad_norm": 0.2277954625839659, |
| "learning_rate": 4.794787480690597e-06, |
| "loss": 0.4156, |
| "step": 311 |
| }, |
| { |
| "epoch": 3.9936, |
| "grad_norm": 0.21963572957000518, |
| "learning_rate": 4.679111137620442e-06, |
| "loss": 0.4332, |
| "step": 312 |
| }, |
| { |
| "epoch": 4.0064, |
| "grad_norm": 0.7453095594065533, |
| "learning_rate": 4.5646621393177e-06, |
| "loss": 0.7858, |
| "step": 313 |
| }, |
| { |
| "epoch": 4.0192, |
| "grad_norm": 0.3623525450413221, |
| "learning_rate": 4.451449654204685e-06, |
| "loss": 0.4057, |
| "step": 314 |
| }, |
| { |
| "epoch": 4.032, |
| "grad_norm": 0.29588229171753117, |
| "learning_rate": 4.339482751647557e-06, |
| "loss": 0.3755, |
| "step": 315 |
| }, |
| { |
| "epoch": 4.0448, |
| "grad_norm": 0.3163251340108107, |
| "learning_rate": 4.228770401229824e-06, |
| "loss": 0.3829, |
| "step": 316 |
| }, |
| { |
| "epoch": 4.0576, |
| "grad_norm": 0.3065162031868669, |
| "learning_rate": 4.119321472033779e-06, |
| "loss": 0.3984, |
| "step": 317 |
| }, |
| { |
| "epoch": 4.0704, |
| "grad_norm": 0.3400715690966586, |
| "learning_rate": 4.011144731929981e-06, |
| "loss": 0.3754, |
| "step": 318 |
| }, |
| { |
| "epoch": 4.0832, |
| "grad_norm": 0.32985853310217333, |
| "learning_rate": 3.904248846874894e-06, |
| "loss": 0.3703, |
| "step": 319 |
| }, |
| { |
| "epoch": 4.096, |
| "grad_norm": 0.3393092530537702, |
| "learning_rate": 3.7986423802166705e-06, |
| "loss": 0.3987, |
| "step": 320 |
| }, |
| { |
| "epoch": 4.1088, |
| "grad_norm": 0.2859983479785996, |
| "learning_rate": 3.694333792009115e-06, |
| "loss": 0.381, |
| "step": 321 |
| }, |
| { |
| "epoch": 4.1216, |
| "grad_norm": 0.2603753990950839, |
| "learning_rate": 3.5913314383339937e-06, |
| "loss": 0.3842, |
| "step": 322 |
| }, |
| { |
| "epoch": 4.1344, |
| "grad_norm": 0.27353545934037826, |
| "learning_rate": 3.4896435706316e-06, |
| "loss": 0.3951, |
| "step": 323 |
| }, |
| { |
| "epoch": 4.1472, |
| "grad_norm": 0.2976771126638214, |
| "learning_rate": 3.3892783350397675e-06, |
| "loss": 0.3638, |
| "step": 324 |
| }, |
| { |
| "epoch": 4.16, |
| "grad_norm": 0.2979409629661625, |
| "learning_rate": 3.290243771741275e-06, |
| "loss": 0.4115, |
| "step": 325 |
| }, |
| { |
| "epoch": 4.1728, |
| "grad_norm": 0.24959324888076445, |
| "learning_rate": 3.1925478143197418e-06, |
| "loss": 0.3815, |
| "step": 326 |
| }, |
| { |
| "epoch": 4.1856, |
| "grad_norm": 0.25345576845513385, |
| "learning_rate": 3.0961982891241083e-06, |
| "loss": 0.3773, |
| "step": 327 |
| }, |
| { |
| "epoch": 4.1984, |
| "grad_norm": 0.2556273873961127, |
| "learning_rate": 3.001202914641628e-06, |
| "loss": 0.3958, |
| "step": 328 |
| }, |
| { |
| "epoch": 4.2112, |
| "grad_norm": 0.26751038831185286, |
| "learning_rate": 2.907569300879596e-06, |
| "loss": 0.373, |
| "step": 329 |
| }, |
| { |
| "epoch": 4.224, |
| "grad_norm": 0.2642931307211567, |
| "learning_rate": 2.815304948755664e-06, |
| "loss": 0.3754, |
| "step": 330 |
| }, |
| { |
| "epoch": 4.2368, |
| "grad_norm": 0.2556489340286757, |
| "learning_rate": 2.7244172494969978e-06, |
| "loss": 0.3946, |
| "step": 331 |
| }, |
| { |
| "epoch": 4.2496, |
| "grad_norm": 0.22787664846961111, |
| "learning_rate": 2.6349134840481294e-06, |
| "loss": 0.3816, |
| "step": 332 |
| }, |
| { |
| "epoch": 4.2624, |
| "grad_norm": 0.23950933032081287, |
| "learning_rate": 2.546800822487714e-06, |
| "loss": 0.3879, |
| "step": 333 |
| }, |
| { |
| "epoch": 4.2752, |
| "grad_norm": 0.24496434324273658, |
| "learning_rate": 2.4600863234541338e-06, |
| "loss": 0.3913, |
| "step": 334 |
| }, |
| { |
| "epoch": 4.288, |
| "grad_norm": 0.24368645441312148, |
| "learning_rate": 2.374776933580025e-06, |
| "loss": 0.3767, |
| "step": 335 |
| }, |
| { |
| "epoch": 4.3008, |
| "grad_norm": 0.2327625767748398, |
| "learning_rate": 2.2908794869358044e-06, |
| "loss": 0.3781, |
| "step": 336 |
| }, |
| { |
| "epoch": 4.3136, |
| "grad_norm": 0.22176499332523203, |
| "learning_rate": 2.2084007044821764e-06, |
| "loss": 0.3843, |
| "step": 337 |
| }, |
| { |
| "epoch": 4.3264, |
| "grad_norm": 0.22707367923274444, |
| "learning_rate": 2.127347193531757e-06, |
| "loss": 0.3896, |
| "step": 338 |
| }, |
| { |
| "epoch": 4.3392, |
| "grad_norm": 0.23024969123081462, |
| "learning_rate": 2.0477254472197237e-06, |
| "loss": 0.3713, |
| "step": 339 |
| }, |
| { |
| "epoch": 4.352, |
| "grad_norm": 0.22965164941152233, |
| "learning_rate": 1.96954184398368e-06, |
| "loss": 0.387, |
| "step": 340 |
| }, |
| { |
| "epoch": 4.3648, |
| "grad_norm": 0.22295693061640967, |
| "learning_rate": 1.8928026470526917e-06, |
| "loss": 0.3816, |
| "step": 341 |
| }, |
| { |
| "epoch": 4.3776, |
| "grad_norm": 0.21915300930601672, |
| "learning_rate": 1.817514003945524e-06, |
| "loss": 0.3935, |
| "step": 342 |
| }, |
| { |
| "epoch": 4.3904, |
| "grad_norm": 0.20845120406783407, |
| "learning_rate": 1.743681945978184e-06, |
| "loss": 0.3684, |
| "step": 343 |
| }, |
| { |
| "epoch": 4.4032, |
| "grad_norm": 0.21830097067368961, |
| "learning_rate": 1.6713123877807413e-06, |
| "loss": 0.3949, |
| "step": 344 |
| }, |
| { |
| "epoch": 4.416, |
| "grad_norm": 0.2319295084970909, |
| "learning_rate": 1.6004111268235156e-06, |
| "loss": 0.3861, |
| "step": 345 |
| }, |
| { |
| "epoch": 4.4288, |
| "grad_norm": 0.2252688795708634, |
| "learning_rate": 1.5309838429526714e-06, |
| "loss": 0.3822, |
| "step": 346 |
| }, |
| { |
| "epoch": 4.4416, |
| "grad_norm": 0.21942449224589794, |
| "learning_rate": 1.4630360979351644e-06, |
| "loss": 0.4034, |
| "step": 347 |
| }, |
| { |
| "epoch": 4.4544, |
| "grad_norm": 0.22032239874191956, |
| "learning_rate": 1.396573335013236e-06, |
| "loss": 0.379, |
| "step": 348 |
| }, |
| { |
| "epoch": 4.4672, |
| "grad_norm": 0.21751705116878547, |
| "learning_rate": 1.3316008784683265e-06, |
| "loss": 0.3882, |
| "step": 349 |
| }, |
| { |
| "epoch": 4.48, |
| "grad_norm": 0.20693329192460425, |
| "learning_rate": 1.2681239331945695e-06, |
| "loss": 0.3717, |
| "step": 350 |
| }, |
| { |
| "epoch": 4.4928, |
| "grad_norm": 0.20856743603119338, |
| "learning_rate": 1.2061475842818337e-06, |
| "loss": 0.3733, |
| "step": 351 |
| }, |
| { |
| "epoch": 4.5056, |
| "grad_norm": 0.2250156081858486, |
| "learning_rate": 1.1456767966083393e-06, |
| "loss": 0.3992, |
| "step": 352 |
| }, |
| { |
| "epoch": 4.5184, |
| "grad_norm": 0.20967206248158893, |
| "learning_rate": 1.086716414442952e-06, |
| "loss": 0.3932, |
| "step": 353 |
| }, |
| { |
| "epoch": 4.5312, |
| "grad_norm": 0.21304814301631292, |
| "learning_rate": 1.0292711610570904e-06, |
| "loss": 0.384, |
| "step": 354 |
| }, |
| { |
| "epoch": 4.5440000000000005, |
| "grad_norm": 0.21347126689517681, |
| "learning_rate": 9.733456383463658e-07, |
| "loss": 0.3774, |
| "step": 355 |
| }, |
| { |
| "epoch": 4.5568, |
| "grad_norm": 0.2144707901587489, |
| "learning_rate": 9.189443264619102e-07, |
| "loss": 0.3902, |
| "step": 356 |
| }, |
| { |
| "epoch": 4.5696, |
| "grad_norm": 0.21283483621641808, |
| "learning_rate": 8.660715834514977e-07, |
| "loss": 0.38, |
| "step": 357 |
| }, |
| { |
| "epoch": 4.5824, |
| "grad_norm": 0.20874683219822784, |
| "learning_rate": 8.147316449103959e-07, |
| "loss": 0.3766, |
| "step": 358 |
| }, |
| { |
| "epoch": 4.5952, |
| "grad_norm": 0.2106890163018537, |
| "learning_rate": 7.649286236420806e-07, |
| "loss": 0.3992, |
| "step": 359 |
| }, |
| { |
| "epoch": 4.608, |
| "grad_norm": 0.20561612581088098, |
| "learning_rate": 7.166665093287539e-07, |
| "loss": 0.3875, |
| "step": 360 |
| }, |
| { |
| "epoch": 4.6208, |
| "grad_norm": 0.20439781434323565, |
| "learning_rate": 6.69949168211721e-07, |
| "loss": 0.3829, |
| "step": 361 |
| }, |
| { |
| "epoch": 4.6336, |
| "grad_norm": 0.2681421275066011, |
| "learning_rate": 6.247803427816945e-07, |
| "loss": 0.4002, |
| "step": 362 |
| }, |
| { |
| "epoch": 4.6464, |
| "grad_norm": 0.21204428712133977, |
| "learning_rate": 5.811636514789598e-07, |
| "loss": 0.3782, |
| "step": 363 |
| }, |
| { |
| "epoch": 4.6592, |
| "grad_norm": 0.20647473756251203, |
| "learning_rate": 5.391025884035239e-07, |
| "loss": 0.3687, |
| "step": 364 |
| }, |
| { |
| "epoch": 4.672, |
| "grad_norm": 0.21537782712985679, |
| "learning_rate": 4.986005230351954e-07, |
| "loss": 0.3877, |
| "step": 365 |
| }, |
| { |
| "epoch": 4.6848, |
| "grad_norm": 0.20929612677212645, |
| "learning_rate": 4.5966069996365993e-07, |
| "loss": 0.3881, |
| "step": 366 |
| }, |
| { |
| "epoch": 4.6975999999999996, |
| "grad_norm": 0.20469289983672867, |
| "learning_rate": 4.22286238628562e-07, |
| "loss": 0.3782, |
| "step": 367 |
| }, |
| { |
| "epoch": 4.7104, |
| "grad_norm": 0.20501899025609002, |
| "learning_rate": 3.8648013306960664e-07, |
| "loss": 0.363, |
| "step": 368 |
| }, |
| { |
| "epoch": 4.7232, |
| "grad_norm": 0.20999132281758767, |
| "learning_rate": 3.522452516867048e-07, |
| "loss": 0.392, |
| "step": 369 |
| }, |
| { |
| "epoch": 4.736, |
| "grad_norm": 0.2074595523353208, |
| "learning_rate": 3.1958433701019697e-07, |
| "loss": 0.3824, |
| "step": 370 |
| }, |
| { |
| "epoch": 4.7488, |
| "grad_norm": 0.20946142382537822, |
| "learning_rate": 2.8850000548115155e-07, |
| "loss": 0.3833, |
| "step": 371 |
| }, |
| { |
| "epoch": 4.7616, |
| "grad_norm": 0.20538052481360528, |
| "learning_rate": 2.5899474724174313e-07, |
| "loss": 0.3728, |
| "step": 372 |
| }, |
| { |
| "epoch": 4.7744, |
| "grad_norm": 0.21061747295792488, |
| "learning_rate": 2.3107092593579905e-07, |
| "loss": 0.3963, |
| "step": 373 |
| }, |
| { |
| "epoch": 4.7872, |
| "grad_norm": 0.20832891708472415, |
| "learning_rate": 2.0473077851942858e-07, |
| "loss": 0.3678, |
| "step": 374 |
| }, |
| { |
| "epoch": 4.8, |
| "grad_norm": 0.21826074453447222, |
| "learning_rate": 1.799764150818306e-07, |
| "loss": 0.3896, |
| "step": 375 |
| }, |
| { |
| "epoch": 4.8128, |
| "grad_norm": 0.20430725766989333, |
| "learning_rate": 1.5680981867625566e-07, |
| "loss": 0.3842, |
| "step": 376 |
| }, |
| { |
| "epoch": 4.8256, |
| "grad_norm": 0.20976252169539303, |
| "learning_rate": 1.3523284516113955e-07, |
| "loss": 0.3753, |
| "step": 377 |
| }, |
| { |
| "epoch": 4.8384, |
| "grad_norm": 0.20149655479697867, |
| "learning_rate": 1.1524722305144231e-07, |
| "loss": 0.3836, |
| "step": 378 |
| }, |
| { |
| "epoch": 4.8512, |
| "grad_norm": 0.2094211498322376, |
| "learning_rate": 9.685455338016347e-08, |
| "loss": 0.386, |
| "step": 379 |
| }, |
| { |
| "epoch": 4.864, |
| "grad_norm": 0.2010445971986087, |
| "learning_rate": 8.005630957010014e-08, |
| "loss": 0.3792, |
| "step": 380 |
| }, |
| { |
| "epoch": 4.8768, |
| "grad_norm": 0.2055018594838335, |
| "learning_rate": 6.485383731580142e-08, |
| "loss": 0.3959, |
| "step": 381 |
| }, |
| { |
| "epoch": 4.8896, |
| "grad_norm": 0.20749079229469425, |
| "learning_rate": 5.1248354475768034e-08, |
| "loss": 0.3768, |
| "step": 382 |
| }, |
| { |
| "epoch": 4.9024, |
| "grad_norm": 0.2002473418624163, |
| "learning_rate": 3.924095097489922e-08, |
| "loss": 0.3918, |
| "step": 383 |
| }, |
| { |
| "epoch": 4.9152000000000005, |
| "grad_norm": 0.1966493907159764, |
| "learning_rate": 2.8832588717164766e-08, |
| "loss": 0.3755, |
| "step": 384 |
| }, |
| { |
| "epoch": 4.928, |
| "grad_norm": 0.20815227965413274, |
| "learning_rate": 2.0024101508555604e-08, |
| "loss": 0.3894, |
| "step": 385 |
| }, |
| { |
| "epoch": 4.9408, |
| "grad_norm": 0.20441975721928723, |
| "learning_rate": 1.281619499029274e-08, |
| "loss": 0.3768, |
| "step": 386 |
| }, |
| { |
| "epoch": 4.9536, |
| "grad_norm": 0.21525302379268882, |
| "learning_rate": 7.209446582292501e-09, |
| "loss": 0.389, |
| "step": 387 |
| }, |
| { |
| "epoch": 4.9664, |
| "grad_norm": 0.19748050149463348, |
| "learning_rate": 3.2043054369057523e-09, |
| "loss": 0.3797, |
| "step": 388 |
| }, |
| { |
| "epoch": 4.9792, |
| "grad_norm": 0.20299006619692414, |
| "learning_rate": 8.010924029533406e-10, |
| "loss": 0.3827, |
| "step": 389 |
| }, |
| { |
| "epoch": 4.992, |
| "grad_norm": 0.20924765505845955, |
| "learning_rate": 0.0, |
| "loss": 0.3776, |
| "step": 390 |
| }, |
| { |
| "epoch": 4.992, |
| "step": 390, |
| "total_flos": 1.7434270791410647e+18, |
| "train_loss": 0.529817401445829, |
| "train_runtime": 20813.8025, |
| "train_samples_per_second": 2.402, |
| "train_steps_per_second": 0.019 |
| } |
| ], |
| "logging_steps": 1, |
| "max_steps": 390, |
| "num_input_tokens_seen": 0, |
| "num_train_epochs": 5, |
| "save_steps": 500, |
| "stateful_callbacks": { |
| "TrainerControl": { |
| "args": { |
| "should_epoch_stop": false, |
| "should_evaluate": false, |
| "should_log": false, |
| "should_save": true, |
| "should_training_stop": true |
| }, |
| "attributes": {} |
| } |
| }, |
| "total_flos": 1.7434270791410647e+18, |
| "train_batch_size": 1, |
| "trial_name": null, |
| "trial_params": null |
| } |
|
|