| { |
| "best_metric": null, |
| "best_model_checkpoint": null, |
| "epoch": 5.954545454545455, |
| "eval_steps": 500, |
| "global_step": 702, |
| "is_hyper_param_search": false, |
| "is_local_process_zero": true, |
| "is_world_process_zero": true, |
| "log_history": [ |
| { |
| "epoch": 0.008522727272727272, |
| "grad_norm": 35.49958801269531, |
| "learning_rate": 5.0000000000000004e-08, |
| "loss": 4.6143, |
| "step": 1 |
| }, |
| { |
| "epoch": 0.017045454545454544, |
| "grad_norm": 35.543556213378906, |
| "learning_rate": 1.0000000000000001e-07, |
| "loss": 4.6719, |
| "step": 2 |
| }, |
| { |
| "epoch": 0.02556818181818182, |
| "grad_norm": 34.12852096557617, |
| "learning_rate": 1.5000000000000002e-07, |
| "loss": 4.5546, |
| "step": 3 |
| }, |
| { |
| "epoch": 0.03409090909090909, |
| "grad_norm": 33.610572814941406, |
| "learning_rate": 2.0000000000000002e-07, |
| "loss": 4.4919, |
| "step": 4 |
| }, |
| { |
| "epoch": 0.04261363636363636, |
| "grad_norm": 34.532169342041016, |
| "learning_rate": 2.5000000000000004e-07, |
| "loss": 4.6095, |
| "step": 5 |
| }, |
| { |
| "epoch": 0.05113636363636364, |
| "grad_norm": 34.2357063293457, |
| "learning_rate": 3.0000000000000004e-07, |
| "loss": 4.585, |
| "step": 6 |
| }, |
| { |
| "epoch": 0.05965909090909091, |
| "grad_norm": 34.94609832763672, |
| "learning_rate": 3.5000000000000004e-07, |
| "loss": 4.5911, |
| "step": 7 |
| }, |
| { |
| "epoch": 0.06818181818181818, |
| "grad_norm": 34.79508590698242, |
| "learning_rate": 4.0000000000000003e-07, |
| "loss": 4.6294, |
| "step": 8 |
| }, |
| { |
| "epoch": 0.07670454545454546, |
| "grad_norm": 35.18478775024414, |
| "learning_rate": 4.5000000000000003e-07, |
| "loss": 4.6568, |
| "step": 9 |
| }, |
| { |
| "epoch": 0.08522727272727272, |
| "grad_norm": 33.75633239746094, |
| "learning_rate": 5.000000000000001e-07, |
| "loss": 4.4504, |
| "step": 10 |
| }, |
| { |
| "epoch": 0.09375, |
| "grad_norm": 34.20966339111328, |
| "learning_rate": 5.5e-07, |
| "loss": 4.5005, |
| "step": 11 |
| }, |
| { |
| "epoch": 0.10227272727272728, |
| "grad_norm": 33.20008087158203, |
| "learning_rate": 6.000000000000001e-07, |
| "loss": 4.4297, |
| "step": 12 |
| }, |
| { |
| "epoch": 0.11079545454545454, |
| "grad_norm": 33.53578567504883, |
| "learning_rate": 6.5e-07, |
| "loss": 4.4136, |
| "step": 13 |
| }, |
| { |
| "epoch": 0.11931818181818182, |
| "grad_norm": 31.951068878173828, |
| "learning_rate": 7.000000000000001e-07, |
| "loss": 4.3065, |
| "step": 14 |
| }, |
| { |
| "epoch": 0.1278409090909091, |
| "grad_norm": 30.890714645385742, |
| "learning_rate": 7.5e-07, |
| "loss": 4.2433, |
| "step": 15 |
| }, |
| { |
| "epoch": 0.13636363636363635, |
| "grad_norm": 29.448890686035156, |
| "learning_rate": 8.000000000000001e-07, |
| "loss": 4.0981, |
| "step": 16 |
| }, |
| { |
| "epoch": 0.14488636363636365, |
| "grad_norm": 26.769498825073242, |
| "learning_rate": 8.500000000000001e-07, |
| "loss": 3.9818, |
| "step": 17 |
| }, |
| { |
| "epoch": 0.1534090909090909, |
| "grad_norm": 25.418458938598633, |
| "learning_rate": 9.000000000000001e-07, |
| "loss": 3.8568, |
| "step": 18 |
| }, |
| { |
| "epoch": 0.16193181818181818, |
| "grad_norm": 24.099462509155273, |
| "learning_rate": 9.500000000000001e-07, |
| "loss": 3.7139, |
| "step": 19 |
| }, |
| { |
| "epoch": 0.17045454545454544, |
| "grad_norm": 22.487621307373047, |
| "learning_rate": 1.0000000000000002e-06, |
| "loss": 3.6967, |
| "step": 20 |
| }, |
| { |
| "epoch": 0.17897727272727273, |
| "grad_norm": 20.72856330871582, |
| "learning_rate": 1.0500000000000001e-06, |
| "loss": 3.5031, |
| "step": 21 |
| }, |
| { |
| "epoch": 0.1875, |
| "grad_norm": 19.552040100097656, |
| "learning_rate": 1.1e-06, |
| "loss": 3.4201, |
| "step": 22 |
| }, |
| { |
| "epoch": 0.19602272727272727, |
| "grad_norm": 18.033971786499023, |
| "learning_rate": 1.1500000000000002e-06, |
| "loss": 3.1842, |
| "step": 23 |
| }, |
| { |
| "epoch": 0.20454545454545456, |
| "grad_norm": 18.865802764892578, |
| "learning_rate": 1.2000000000000002e-06, |
| "loss": 3.1982, |
| "step": 24 |
| }, |
| { |
| "epoch": 0.21306818181818182, |
| "grad_norm": 18.849502563476562, |
| "learning_rate": 1.25e-06, |
| "loss": 2.9954, |
| "step": 25 |
| }, |
| { |
| "epoch": 0.2215909090909091, |
| "grad_norm": 19.714330673217773, |
| "learning_rate": 1.3e-06, |
| "loss": 2.8763, |
| "step": 26 |
| }, |
| { |
| "epoch": 0.23011363636363635, |
| "grad_norm": 20.26412010192871, |
| "learning_rate": 1.3500000000000002e-06, |
| "loss": 2.7259, |
| "step": 27 |
| }, |
| { |
| "epoch": 0.23863636363636365, |
| "grad_norm": 19.212318420410156, |
| "learning_rate": 1.4000000000000001e-06, |
| "loss": 2.6099, |
| "step": 28 |
| }, |
| { |
| "epoch": 0.2471590909090909, |
| "grad_norm": 16.80523681640625, |
| "learning_rate": 1.45e-06, |
| "loss": 2.4482, |
| "step": 29 |
| }, |
| { |
| "epoch": 0.2556818181818182, |
| "grad_norm": 14.624052047729492, |
| "learning_rate": 1.5e-06, |
| "loss": 2.2364, |
| "step": 30 |
| }, |
| { |
| "epoch": 0.26420454545454547, |
| "grad_norm": 14.759950637817383, |
| "learning_rate": 1.5500000000000002e-06, |
| "loss": 2.2159, |
| "step": 31 |
| }, |
| { |
| "epoch": 0.2727272727272727, |
| "grad_norm": 14.904441833496094, |
| "learning_rate": 1.6000000000000001e-06, |
| "loss": 2.0206, |
| "step": 32 |
| }, |
| { |
| "epoch": 0.28125, |
| "grad_norm": 15.03490161895752, |
| "learning_rate": 1.6500000000000003e-06, |
| "loss": 1.8725, |
| "step": 33 |
| }, |
| { |
| "epoch": 0.2897727272727273, |
| "grad_norm": 15.70709228515625, |
| "learning_rate": 1.7000000000000002e-06, |
| "loss": 1.8046, |
| "step": 34 |
| }, |
| { |
| "epoch": 0.29829545454545453, |
| "grad_norm": 15.637526512145996, |
| "learning_rate": 1.75e-06, |
| "loss": 1.6198, |
| "step": 35 |
| }, |
| { |
| "epoch": 0.3068181818181818, |
| "grad_norm": 14.345853805541992, |
| "learning_rate": 1.8000000000000001e-06, |
| "loss": 1.4247, |
| "step": 36 |
| }, |
| { |
| "epoch": 0.3153409090909091, |
| "grad_norm": 14.281502723693848, |
| "learning_rate": 1.85e-06, |
| "loss": 1.2859, |
| "step": 37 |
| }, |
| { |
| "epoch": 0.32386363636363635, |
| "grad_norm": 13.567434310913086, |
| "learning_rate": 1.9000000000000002e-06, |
| "loss": 1.1452, |
| "step": 38 |
| }, |
| { |
| "epoch": 0.33238636363636365, |
| "grad_norm": 13.128920555114746, |
| "learning_rate": 1.9500000000000004e-06, |
| "loss": 1.0036, |
| "step": 39 |
| }, |
| { |
| "epoch": 0.3409090909090909, |
| "grad_norm": 12.954020500183105, |
| "learning_rate": 2.0000000000000003e-06, |
| "loss": 0.851, |
| "step": 40 |
| }, |
| { |
| "epoch": 0.3494318181818182, |
| "grad_norm": 12.533946990966797, |
| "learning_rate": 2.05e-06, |
| "loss": 0.715, |
| "step": 41 |
| }, |
| { |
| "epoch": 0.35795454545454547, |
| "grad_norm": 11.564764022827148, |
| "learning_rate": 2.1000000000000002e-06, |
| "loss": 0.5987, |
| "step": 42 |
| }, |
| { |
| "epoch": 0.3664772727272727, |
| "grad_norm": 10.383822441101074, |
| "learning_rate": 2.15e-06, |
| "loss": 0.4834, |
| "step": 43 |
| }, |
| { |
| "epoch": 0.375, |
| "grad_norm": 8.901700973510742, |
| "learning_rate": 2.2e-06, |
| "loss": 0.3808, |
| "step": 44 |
| }, |
| { |
| "epoch": 0.3835227272727273, |
| "grad_norm": 7.737320423126221, |
| "learning_rate": 2.25e-06, |
| "loss": 0.2975, |
| "step": 45 |
| }, |
| { |
| "epoch": 0.39204545454545453, |
| "grad_norm": 5.334733963012695, |
| "learning_rate": 2.3000000000000004e-06, |
| "loss": 0.2261, |
| "step": 46 |
| }, |
| { |
| "epoch": 0.4005681818181818, |
| "grad_norm": 3.3499741554260254, |
| "learning_rate": 2.35e-06, |
| "loss": 0.1854, |
| "step": 47 |
| }, |
| { |
| "epoch": 0.4090909090909091, |
| "grad_norm": 2.4037742614746094, |
| "learning_rate": 2.4000000000000003e-06, |
| "loss": 0.1532, |
| "step": 48 |
| }, |
| { |
| "epoch": 0.41761363636363635, |
| "grad_norm": 1.8914185762405396, |
| "learning_rate": 2.4500000000000003e-06, |
| "loss": 0.1372, |
| "step": 49 |
| }, |
| { |
| "epoch": 0.42613636363636365, |
| "grad_norm": 1.7124507427215576, |
| "learning_rate": 2.5e-06, |
| "loss": 0.136, |
| "step": 50 |
| }, |
| { |
| "epoch": 0.4346590909090909, |
| "grad_norm": 1.242527961730957, |
| "learning_rate": 2.55e-06, |
| "loss": 0.1238, |
| "step": 51 |
| }, |
| { |
| "epoch": 0.4431818181818182, |
| "grad_norm": 0.9835780262947083, |
| "learning_rate": 2.6e-06, |
| "loss": 0.1192, |
| "step": 52 |
| }, |
| { |
| "epoch": 0.45170454545454547, |
| "grad_norm": 1.0163497924804688, |
| "learning_rate": 2.6500000000000005e-06, |
| "loss": 0.1175, |
| "step": 53 |
| }, |
| { |
| "epoch": 0.4602272727272727, |
| "grad_norm": 0.8837094306945801, |
| "learning_rate": 2.7000000000000004e-06, |
| "loss": 0.1104, |
| "step": 54 |
| }, |
| { |
| "epoch": 0.46875, |
| "grad_norm": 0.6102741956710815, |
| "learning_rate": 2.7500000000000004e-06, |
| "loss": 0.0986, |
| "step": 55 |
| }, |
| { |
| "epoch": 0.4772727272727273, |
| "grad_norm": 0.85715252161026, |
| "learning_rate": 2.8000000000000003e-06, |
| "loss": 0.1083, |
| "step": 56 |
| }, |
| { |
| "epoch": 0.48579545454545453, |
| "grad_norm": 0.9692059755325317, |
| "learning_rate": 2.85e-06, |
| "loss": 0.0994, |
| "step": 57 |
| }, |
| { |
| "epoch": 0.4943181818181818, |
| "grad_norm": 0.5620752573013306, |
| "learning_rate": 2.9e-06, |
| "loss": 0.0909, |
| "step": 58 |
| }, |
| { |
| "epoch": 0.5028409090909091, |
| "grad_norm": 0.5377550721168518, |
| "learning_rate": 2.95e-06, |
| "loss": 0.087, |
| "step": 59 |
| }, |
| { |
| "epoch": 0.5113636363636364, |
| "grad_norm": 0.7260486483573914, |
| "learning_rate": 3e-06, |
| "loss": 0.0949, |
| "step": 60 |
| }, |
| { |
| "epoch": 0.5198863636363636, |
| "grad_norm": 0.5636699199676514, |
| "learning_rate": 3.05e-06, |
| "loss": 0.0884, |
| "step": 61 |
| }, |
| { |
| "epoch": 0.5284090909090909, |
| "grad_norm": 0.3729614019393921, |
| "learning_rate": 3.1000000000000004e-06, |
| "loss": 0.0771, |
| "step": 62 |
| }, |
| { |
| "epoch": 0.5369318181818182, |
| "grad_norm": 0.5472120046615601, |
| "learning_rate": 3.1500000000000003e-06, |
| "loss": 0.0887, |
| "step": 63 |
| }, |
| { |
| "epoch": 0.5454545454545454, |
| "grad_norm": 0.4915490448474884, |
| "learning_rate": 3.2000000000000003e-06, |
| "loss": 0.0826, |
| "step": 64 |
| }, |
| { |
| "epoch": 0.5539772727272727, |
| "grad_norm": 0.4123076796531677, |
| "learning_rate": 3.2500000000000002e-06, |
| "loss": 0.0834, |
| "step": 65 |
| }, |
| { |
| "epoch": 0.5625, |
| "grad_norm": 0.32767826318740845, |
| "learning_rate": 3.3000000000000006e-06, |
| "loss": 0.0765, |
| "step": 66 |
| }, |
| { |
| "epoch": 0.5710227272727273, |
| "grad_norm": 0.34970155358314514, |
| "learning_rate": 3.3500000000000005e-06, |
| "loss": 0.0832, |
| "step": 67 |
| }, |
| { |
| "epoch": 0.5795454545454546, |
| "grad_norm": 0.38679587841033936, |
| "learning_rate": 3.4000000000000005e-06, |
| "loss": 0.0766, |
| "step": 68 |
| }, |
| { |
| "epoch": 0.5880681818181818, |
| "grad_norm": 0.5464550852775574, |
| "learning_rate": 3.45e-06, |
| "loss": 0.0748, |
| "step": 69 |
| }, |
| { |
| "epoch": 0.5965909090909091, |
| "grad_norm": 0.3545376658439636, |
| "learning_rate": 3.5e-06, |
| "loss": 0.0759, |
| "step": 70 |
| }, |
| { |
| "epoch": 0.6051136363636364, |
| "grad_norm": 0.3532780110836029, |
| "learning_rate": 3.5500000000000003e-06, |
| "loss": 0.0728, |
| "step": 71 |
| }, |
| { |
| "epoch": 0.6136363636363636, |
| "grad_norm": 0.5024192929267883, |
| "learning_rate": 3.6000000000000003e-06, |
| "loss": 0.081, |
| "step": 72 |
| }, |
| { |
| "epoch": 0.6221590909090909, |
| "grad_norm": 0.3845844268798828, |
| "learning_rate": 3.65e-06, |
| "loss": 0.0689, |
| "step": 73 |
| }, |
| { |
| "epoch": 0.6306818181818182, |
| "grad_norm": 0.46010249853134155, |
| "learning_rate": 3.7e-06, |
| "loss": 0.0757, |
| "step": 74 |
| }, |
| { |
| "epoch": 0.6392045454545454, |
| "grad_norm": 0.36048972606658936, |
| "learning_rate": 3.7500000000000005e-06, |
| "loss": 0.0758, |
| "step": 75 |
| }, |
| { |
| "epoch": 0.6477272727272727, |
| "grad_norm": 0.2978876829147339, |
| "learning_rate": 3.8000000000000005e-06, |
| "loss": 0.0747, |
| "step": 76 |
| }, |
| { |
| "epoch": 0.65625, |
| "grad_norm": 0.30899888277053833, |
| "learning_rate": 3.85e-06, |
| "loss": 0.0788, |
| "step": 77 |
| }, |
| { |
| "epoch": 0.6647727272727273, |
| "grad_norm": 0.45000651478767395, |
| "learning_rate": 3.900000000000001e-06, |
| "loss": 0.0672, |
| "step": 78 |
| }, |
| { |
| "epoch": 0.6732954545454546, |
| "grad_norm": 0.2621772587299347, |
| "learning_rate": 3.95e-06, |
| "loss": 0.0719, |
| "step": 79 |
| }, |
| { |
| "epoch": 0.6818181818181818, |
| "grad_norm": 0.28618085384368896, |
| "learning_rate": 4.000000000000001e-06, |
| "loss": 0.0744, |
| "step": 80 |
| }, |
| { |
| "epoch": 0.6903409090909091, |
| "grad_norm": 0.40642479062080383, |
| "learning_rate": 4.05e-06, |
| "loss": 0.0743, |
| "step": 81 |
| }, |
| { |
| "epoch": 0.6988636363636364, |
| "grad_norm": 0.3514344096183777, |
| "learning_rate": 4.1e-06, |
| "loss": 0.0708, |
| "step": 82 |
| }, |
| { |
| "epoch": 0.7073863636363636, |
| "grad_norm": 0.426798939704895, |
| "learning_rate": 4.15e-06, |
| "loss": 0.073, |
| "step": 83 |
| }, |
| { |
| "epoch": 0.7159090909090909, |
| "grad_norm": 0.29413217306137085, |
| "learning_rate": 4.2000000000000004e-06, |
| "loss": 0.0732, |
| "step": 84 |
| }, |
| { |
| "epoch": 0.7244318181818182, |
| "grad_norm": 0.37668099999427795, |
| "learning_rate": 4.25e-06, |
| "loss": 0.0664, |
| "step": 85 |
| }, |
| { |
| "epoch": 0.7329545454545454, |
| "grad_norm": 0.3696061372756958, |
| "learning_rate": 4.3e-06, |
| "loss": 0.0649, |
| "step": 86 |
| }, |
| { |
| "epoch": 0.7414772727272727, |
| "grad_norm": 0.311988890171051, |
| "learning_rate": 4.350000000000001e-06, |
| "loss": 0.0689, |
| "step": 87 |
| }, |
| { |
| "epoch": 0.75, |
| "grad_norm": 0.3180747628211975, |
| "learning_rate": 4.4e-06, |
| "loss": 0.0746, |
| "step": 88 |
| }, |
| { |
| "epoch": 0.7585227272727273, |
| "grad_norm": 0.46045729517936707, |
| "learning_rate": 4.450000000000001e-06, |
| "loss": 0.0763, |
| "step": 89 |
| }, |
| { |
| "epoch": 0.7670454545454546, |
| "grad_norm": 0.3566094934940338, |
| "learning_rate": 4.5e-06, |
| "loss": 0.0676, |
| "step": 90 |
| }, |
| { |
| "epoch": 0.7755681818181818, |
| "grad_norm": 0.35632985830307007, |
| "learning_rate": 4.5500000000000005e-06, |
| "loss": 0.0677, |
| "step": 91 |
| }, |
| { |
| "epoch": 0.7840909090909091, |
| "grad_norm": 0.27962526679039, |
| "learning_rate": 4.600000000000001e-06, |
| "loss": 0.0689, |
| "step": 92 |
| }, |
| { |
| "epoch": 0.7926136363636364, |
| "grad_norm": 0.4532427191734314, |
| "learning_rate": 4.65e-06, |
| "loss": 0.0724, |
| "step": 93 |
| }, |
| { |
| "epoch": 0.8011363636363636, |
| "grad_norm": 0.363337904214859, |
| "learning_rate": 4.7e-06, |
| "loss": 0.0708, |
| "step": 94 |
| }, |
| { |
| "epoch": 0.8096590909090909, |
| "grad_norm": 0.3065521717071533, |
| "learning_rate": 4.75e-06, |
| "loss": 0.0713, |
| "step": 95 |
| }, |
| { |
| "epoch": 0.8181818181818182, |
| "grad_norm": 0.24705548584461212, |
| "learning_rate": 4.800000000000001e-06, |
| "loss": 0.0683, |
| "step": 96 |
| }, |
| { |
| "epoch": 0.8267045454545454, |
| "grad_norm": 0.5038250684738159, |
| "learning_rate": 4.85e-06, |
| "loss": 0.0738, |
| "step": 97 |
| }, |
| { |
| "epoch": 0.8352272727272727, |
| "grad_norm": 0.2972690761089325, |
| "learning_rate": 4.9000000000000005e-06, |
| "loss": 0.0684, |
| "step": 98 |
| }, |
| { |
| "epoch": 0.84375, |
| "grad_norm": 0.40811270475387573, |
| "learning_rate": 4.95e-06, |
| "loss": 0.066, |
| "step": 99 |
| }, |
| { |
| "epoch": 0.8522727272727273, |
| "grad_norm": 0.3925577998161316, |
| "learning_rate": 5e-06, |
| "loss": 0.0692, |
| "step": 100 |
| }, |
| { |
| "epoch": 0.8607954545454546, |
| "grad_norm": 0.32043716311454773, |
| "learning_rate": 4.999965957943338e-06, |
| "loss": 0.0652, |
| "step": 101 |
| }, |
| { |
| "epoch": 0.8693181818181818, |
| "grad_norm": 0.26025497913360596, |
| "learning_rate": 4.999863832700438e-06, |
| "loss": 0.0634, |
| "step": 102 |
| }, |
| { |
| "epoch": 0.8778409090909091, |
| "grad_norm": 0.5605457425117493, |
| "learning_rate": 4.999693627052545e-06, |
| "loss": 0.0686, |
| "step": 103 |
| }, |
| { |
| "epoch": 0.8863636363636364, |
| "grad_norm": 0.48003220558166504, |
| "learning_rate": 4.9994553456349785e-06, |
| "loss": 0.0707, |
| "step": 104 |
| }, |
| { |
| "epoch": 0.8948863636363636, |
| "grad_norm": 0.22346197068691254, |
| "learning_rate": 4.99914899493701e-06, |
| "loss": 0.067, |
| "step": 105 |
| }, |
| { |
| "epoch": 0.9034090909090909, |
| "grad_norm": 0.45966312289237976, |
| "learning_rate": 4.998774583301685e-06, |
| "loss": 0.0663, |
| "step": 106 |
| }, |
| { |
| "epoch": 0.9119318181818182, |
| "grad_norm": 0.38695356249809265, |
| "learning_rate": 4.998332120925598e-06, |
| "loss": 0.0714, |
| "step": 107 |
| }, |
| { |
| "epoch": 0.9204545454545454, |
| "grad_norm": 0.35708868503570557, |
| "learning_rate": 4.997821619858614e-06, |
| "loss": 0.0632, |
| "step": 108 |
| }, |
| { |
| "epoch": 0.9289772727272727, |
| "grad_norm": 0.2699528932571411, |
| "learning_rate": 4.9972430940035355e-06, |
| "loss": 0.0649, |
| "step": 109 |
| }, |
| { |
| "epoch": 0.9375, |
| "grad_norm": 0.5043486952781677, |
| "learning_rate": 4.9965965591157314e-06, |
| "loss": 0.0715, |
| "step": 110 |
| }, |
| { |
| "epoch": 0.9460227272727273, |
| "grad_norm": 0.4046338200569153, |
| "learning_rate": 4.995882032802703e-06, |
| "loss": 0.0657, |
| "step": 111 |
| }, |
| { |
| "epoch": 0.9545454545454546, |
| "grad_norm": 0.3543775677680969, |
| "learning_rate": 4.995099534523608e-06, |
| "loss": 0.0698, |
| "step": 112 |
| }, |
| { |
| "epoch": 0.9630681818181818, |
| "grad_norm": 0.3295219838619232, |
| "learning_rate": 4.994249085588725e-06, |
| "loss": 0.0719, |
| "step": 113 |
| }, |
| { |
| "epoch": 0.9715909090909091, |
| "grad_norm": 0.3691340982913971, |
| "learning_rate": 4.993330709158879e-06, |
| "loss": 0.0689, |
| "step": 114 |
| }, |
| { |
| "epoch": 0.9801136363636364, |
| "grad_norm": 0.4219862222671509, |
| "learning_rate": 4.9923444302448095e-06, |
| "loss": 0.0644, |
| "step": 115 |
| }, |
| { |
| "epoch": 0.9886363636363636, |
| "grad_norm": 0.33437392115592957, |
| "learning_rate": 4.991290275706486e-06, |
| "loss": 0.074, |
| "step": 116 |
| }, |
| { |
| "epoch": 0.9971590909090909, |
| "grad_norm": 0.22970478236675262, |
| "learning_rate": 4.990168274252379e-06, |
| "loss": 0.0639, |
| "step": 117 |
| }, |
| { |
| "epoch": 1.0, |
| "grad_norm": 0.22970478236675262, |
| "learning_rate": 4.988978456438678e-06, |
| "loss": 0.0718, |
| "step": 118 |
| }, |
| { |
| "epoch": 1.0085227272727273, |
| "grad_norm": 0.7316553592681885, |
| "learning_rate": 4.98772085466846e-06, |
| "loss": 0.062, |
| "step": 119 |
| }, |
| { |
| "epoch": 1.0170454545454546, |
| "grad_norm": 0.27226850390434265, |
| "learning_rate": 4.986395503190805e-06, |
| "loss": 0.0583, |
| "step": 120 |
| }, |
| { |
| "epoch": 1.0255681818181819, |
| "grad_norm": 0.20066767930984497, |
| "learning_rate": 4.9850024380998655e-06, |
| "loss": 0.0624, |
| "step": 121 |
| }, |
| { |
| "epoch": 1.0340909090909092, |
| "grad_norm": 0.26274779438972473, |
| "learning_rate": 4.9835416973338815e-06, |
| "loss": 0.0641, |
| "step": 122 |
| }, |
| { |
| "epoch": 1.0426136363636365, |
| "grad_norm": 0.3893499970436096, |
| "learning_rate": 4.982013320674149e-06, |
| "loss": 0.066, |
| "step": 123 |
| }, |
| { |
| "epoch": 1.0511363636363635, |
| "grad_norm": 0.3454163670539856, |
| "learning_rate": 4.980417349743936e-06, |
| "loss": 0.061, |
| "step": 124 |
| }, |
| { |
| "epoch": 1.0596590909090908, |
| "grad_norm": 0.26956701278686523, |
| "learning_rate": 4.97875382800735e-06, |
| "loss": 0.0641, |
| "step": 125 |
| }, |
| { |
| "epoch": 1.0681818181818181, |
| "grad_norm": 0.4147774279117584, |
| "learning_rate": 4.97702280076815e-06, |
| "loss": 0.0599, |
| "step": 126 |
| }, |
| { |
| "epoch": 1.0767045454545454, |
| "grad_norm": 0.2839421033859253, |
| "learning_rate": 4.9752243151685185e-06, |
| "loss": 0.0669, |
| "step": 127 |
| }, |
| { |
| "epoch": 1.0852272727272727, |
| "grad_norm": 0.41760239005088806, |
| "learning_rate": 4.973358420187776e-06, |
| "loss": 0.0661, |
| "step": 128 |
| }, |
| { |
| "epoch": 1.09375, |
| "grad_norm": 0.29833659529685974, |
| "learning_rate": 4.9714251666410426e-06, |
| "loss": 0.0672, |
| "step": 129 |
| }, |
| { |
| "epoch": 1.1022727272727273, |
| "grad_norm": 0.2806449830532074, |
| "learning_rate": 4.969424607177861e-06, |
| "loss": 0.0589, |
| "step": 130 |
| }, |
| { |
| "epoch": 1.1107954545454546, |
| "grad_norm": 0.33876338601112366, |
| "learning_rate": 4.967356796280756e-06, |
| "loss": 0.0647, |
| "step": 131 |
| }, |
| { |
| "epoch": 1.1193181818181819, |
| "grad_norm": 0.2331860363483429, |
| "learning_rate": 4.96522179026376e-06, |
| "loss": 0.0633, |
| "step": 132 |
| }, |
| { |
| "epoch": 1.1278409090909092, |
| "grad_norm": 0.22244933247566223, |
| "learning_rate": 4.963019647270866e-06, |
| "loss": 0.0618, |
| "step": 133 |
| }, |
| { |
| "epoch": 1.1363636363636362, |
| "grad_norm": 0.24649232625961304, |
| "learning_rate": 4.960750427274458e-06, |
| "loss": 0.0565, |
| "step": 134 |
| }, |
| { |
| "epoch": 1.1448863636363638, |
| "grad_norm": 0.23454727232456207, |
| "learning_rate": 4.958414192073665e-06, |
| "loss": 0.0607, |
| "step": 135 |
| }, |
| { |
| "epoch": 1.1534090909090908, |
| "grad_norm": 0.28441867232322693, |
| "learning_rate": 4.956011005292693e-06, |
| "loss": 0.0548, |
| "step": 136 |
| }, |
| { |
| "epoch": 1.1619318181818181, |
| "grad_norm": 0.26571962237358093, |
| "learning_rate": 4.9535409323790726e-06, |
| "loss": 0.0596, |
| "step": 137 |
| }, |
| { |
| "epoch": 1.1704545454545454, |
| "grad_norm": 0.2692795991897583, |
| "learning_rate": 4.951004040601898e-06, |
| "loss": 0.0572, |
| "step": 138 |
| }, |
| { |
| "epoch": 1.1789772727272727, |
| "grad_norm": 0.35209110379219055, |
| "learning_rate": 4.9484003990499785e-06, |
| "loss": 0.0657, |
| "step": 139 |
| }, |
| { |
| "epoch": 1.1875, |
| "grad_norm": 0.2726614773273468, |
| "learning_rate": 4.945730078629965e-06, |
| "loss": 0.0561, |
| "step": 140 |
| }, |
| { |
| "epoch": 1.1960227272727273, |
| "grad_norm": 0.2439083456993103, |
| "learning_rate": 4.942993152064415e-06, |
| "loss": 0.0594, |
| "step": 141 |
| }, |
| { |
| "epoch": 1.2045454545454546, |
| "grad_norm": 0.23282821476459503, |
| "learning_rate": 4.940189693889819e-06, |
| "loss": 0.0598, |
| "step": 142 |
| }, |
| { |
| "epoch": 1.2130681818181819, |
| "grad_norm": 0.22507373988628387, |
| "learning_rate": 4.937319780454559e-06, |
| "loss": 0.0632, |
| "step": 143 |
| }, |
| { |
| "epoch": 1.2215909090909092, |
| "grad_norm": 0.35792839527130127, |
| "learning_rate": 4.934383489916843e-06, |
| "loss": 0.0626, |
| "step": 144 |
| }, |
| { |
| "epoch": 1.2301136363636362, |
| "grad_norm": 0.3073280453681946, |
| "learning_rate": 4.931380902242564e-06, |
| "loss": 0.0558, |
| "step": 145 |
| }, |
| { |
| "epoch": 1.2386363636363638, |
| "grad_norm": 0.2743082642555237, |
| "learning_rate": 4.928312099203131e-06, |
| "loss": 0.0638, |
| "step": 146 |
| }, |
| { |
| "epoch": 1.2471590909090908, |
| "grad_norm": 0.2522992491722107, |
| "learning_rate": 4.925177164373237e-06, |
| "loss": 0.0599, |
| "step": 147 |
| }, |
| { |
| "epoch": 1.2556818181818181, |
| "grad_norm": 0.25230202078819275, |
| "learning_rate": 4.921976183128585e-06, |
| "loss": 0.0579, |
| "step": 148 |
| }, |
| { |
| "epoch": 1.2642045454545454, |
| "grad_norm": 0.24467583000659943, |
| "learning_rate": 4.918709242643563e-06, |
| "loss": 0.0607, |
| "step": 149 |
| }, |
| { |
| "epoch": 1.2727272727272727, |
| "grad_norm": 0.2191130518913269, |
| "learning_rate": 4.915376431888871e-06, |
| "loss": 0.0622, |
| "step": 150 |
| }, |
| { |
| "epoch": 1.28125, |
| "grad_norm": 0.26959559321403503, |
| "learning_rate": 4.911977841629092e-06, |
| "loss": 0.0568, |
| "step": 151 |
| }, |
| { |
| "epoch": 1.2897727272727273, |
| "grad_norm": 0.25139617919921875, |
| "learning_rate": 4.908513564420231e-06, |
| "loss": 0.0607, |
| "step": 152 |
| }, |
| { |
| "epoch": 1.2982954545454546, |
| "grad_norm": 0.27705878019332886, |
| "learning_rate": 4.904983694607183e-06, |
| "loss": 0.0607, |
| "step": 153 |
| }, |
| { |
| "epoch": 1.3068181818181819, |
| "grad_norm": 0.2860431373119354, |
| "learning_rate": 4.9013883283211705e-06, |
| "loss": 0.0572, |
| "step": 154 |
| }, |
| { |
| "epoch": 1.3153409090909092, |
| "grad_norm": 0.27946561574935913, |
| "learning_rate": 4.897727563477123e-06, |
| "loss": 0.054, |
| "step": 155 |
| }, |
| { |
| "epoch": 1.3238636363636362, |
| "grad_norm": 0.25392627716064453, |
| "learning_rate": 4.894001499771015e-06, |
| "loss": 0.0623, |
| "step": 156 |
| }, |
| { |
| "epoch": 1.3323863636363638, |
| "grad_norm": 0.24209532141685486, |
| "learning_rate": 4.890210238677141e-06, |
| "loss": 0.0592, |
| "step": 157 |
| }, |
| { |
| "epoch": 1.3409090909090908, |
| "grad_norm": 0.23799730837345123, |
| "learning_rate": 4.886353883445363e-06, |
| "loss": 0.0545, |
| "step": 158 |
| }, |
| { |
| "epoch": 1.3494318181818181, |
| "grad_norm": 0.20330385863780975, |
| "learning_rate": 4.88243253909829e-06, |
| "loss": 0.0582, |
| "step": 159 |
| }, |
| { |
| "epoch": 1.3579545454545454, |
| "grad_norm": 0.2390514761209488, |
| "learning_rate": 4.878446312428424e-06, |
| "loss": 0.0558, |
| "step": 160 |
| }, |
| { |
| "epoch": 1.3664772727272727, |
| "grad_norm": 0.3132871687412262, |
| "learning_rate": 4.874395311995248e-06, |
| "loss": 0.0522, |
| "step": 161 |
| }, |
| { |
| "epoch": 1.375, |
| "grad_norm": 0.2441999763250351, |
| "learning_rate": 4.870279648122271e-06, |
| "loss": 0.0632, |
| "step": 162 |
| }, |
| { |
| "epoch": 1.3835227272727273, |
| "grad_norm": 0.3468320369720459, |
| "learning_rate": 4.866099432894023e-06, |
| "loss": 0.0544, |
| "step": 163 |
| }, |
| { |
| "epoch": 1.3920454545454546, |
| "grad_norm": 0.2637532949447632, |
| "learning_rate": 4.8618547801530045e-06, |
| "loss": 0.0508, |
| "step": 164 |
| }, |
| { |
| "epoch": 1.4005681818181819, |
| "grad_norm": 0.25950244069099426, |
| "learning_rate": 4.857545805496581e-06, |
| "loss": 0.0535, |
| "step": 165 |
| }, |
| { |
| "epoch": 1.4090909090909092, |
| "grad_norm": 0.24120447039604187, |
| "learning_rate": 4.853172626273841e-06, |
| "loss": 0.0563, |
| "step": 166 |
| }, |
| { |
| "epoch": 1.4176136363636362, |
| "grad_norm": 0.2234521359205246, |
| "learning_rate": 4.848735361582396e-06, |
| "loss": 0.0526, |
| "step": 167 |
| }, |
| { |
| "epoch": 1.4261363636363638, |
| "grad_norm": 0.24657292664051056, |
| "learning_rate": 4.844234132265139e-06, |
| "loss": 0.0596, |
| "step": 168 |
| }, |
| { |
| "epoch": 1.4346590909090908, |
| "grad_norm": 0.4169978201389313, |
| "learning_rate": 4.839669060906952e-06, |
| "loss": 0.0569, |
| "step": 169 |
| }, |
| { |
| "epoch": 1.4431818181818181, |
| "grad_norm": 0.24876588582992554, |
| "learning_rate": 4.835040271831371e-06, |
| "loss": 0.0555, |
| "step": 170 |
| }, |
| { |
| "epoch": 1.4517045454545454, |
| "grad_norm": 0.2905561923980713, |
| "learning_rate": 4.830347891097193e-06, |
| "loss": 0.0518, |
| "step": 171 |
| }, |
| { |
| "epoch": 1.4602272727272727, |
| "grad_norm": 0.32290127873420715, |
| "learning_rate": 4.8255920464950545e-06, |
| "loss": 0.0544, |
| "step": 172 |
| }, |
| { |
| "epoch": 1.46875, |
| "grad_norm": 0.3065219223499298, |
| "learning_rate": 4.820772867543939e-06, |
| "loss": 0.0563, |
| "step": 173 |
| }, |
| { |
| "epoch": 1.4772727272727273, |
| "grad_norm": 0.3081221878528595, |
| "learning_rate": 4.8158904854876555e-06, |
| "loss": 0.0595, |
| "step": 174 |
| }, |
| { |
| "epoch": 1.4857954545454546, |
| "grad_norm": 0.3171551823616028, |
| "learning_rate": 4.810945033291267e-06, |
| "loss": 0.0509, |
| "step": 175 |
| }, |
| { |
| "epoch": 1.4943181818181819, |
| "grad_norm": 0.46660706400871277, |
| "learning_rate": 4.805936645637463e-06, |
| "loss": 0.0575, |
| "step": 176 |
| }, |
| { |
| "epoch": 1.5028409090909092, |
| "grad_norm": 0.3173319399356842, |
| "learning_rate": 4.800865458922899e-06, |
| "loss": 0.0579, |
| "step": 177 |
| }, |
| { |
| "epoch": 1.5113636363636362, |
| "grad_norm": 0.2553943693637848, |
| "learning_rate": 4.795731611254473e-06, |
| "loss": 0.0602, |
| "step": 178 |
| }, |
| { |
| "epoch": 1.5198863636363638, |
| "grad_norm": 0.39231568574905396, |
| "learning_rate": 4.790535242445573e-06, |
| "loss": 0.0606, |
| "step": 179 |
| }, |
| { |
| "epoch": 1.5284090909090908, |
| "grad_norm": 0.2721359133720398, |
| "learning_rate": 4.7852764940122636e-06, |
| "loss": 0.0545, |
| "step": 180 |
| }, |
| { |
| "epoch": 1.5369318181818183, |
| "grad_norm": 0.2545195519924164, |
| "learning_rate": 4.779955509169435e-06, |
| "loss": 0.0546, |
| "step": 181 |
| }, |
| { |
| "epoch": 1.5454545454545454, |
| "grad_norm": 0.3155561685562134, |
| "learning_rate": 4.7745724328269e-06, |
| "loss": 0.0543, |
| "step": 182 |
| }, |
| { |
| "epoch": 1.5539772727272727, |
| "grad_norm": 0.24933341145515442, |
| "learning_rate": 4.769127411585452e-06, |
| "loss": 0.0571, |
| "step": 183 |
| }, |
| { |
| "epoch": 1.5625, |
| "grad_norm": 0.2886108458042145, |
| "learning_rate": 4.763620593732867e-06, |
| "loss": 0.0521, |
| "step": 184 |
| }, |
| { |
| "epoch": 1.5710227272727273, |
| "grad_norm": 0.23579014837741852, |
| "learning_rate": 4.75805212923987e-06, |
| "loss": 0.0551, |
| "step": 185 |
| }, |
| { |
| "epoch": 1.5795454545454546, |
| "grad_norm": 0.2821877896785736, |
| "learning_rate": 4.752422169756048e-06, |
| "loss": 0.0494, |
| "step": 186 |
| }, |
| { |
| "epoch": 1.5880681818181817, |
| "grad_norm": 0.28472304344177246, |
| "learning_rate": 4.746730868605721e-06, |
| "loss": 0.0524, |
| "step": 187 |
| }, |
| { |
| "epoch": 1.5965909090909092, |
| "grad_norm": 0.30284813046455383, |
| "learning_rate": 4.7409783807837654e-06, |
| "loss": 0.0519, |
| "step": 188 |
| }, |
| { |
| "epoch": 1.6051136363636362, |
| "grad_norm": 0.2568075656890869, |
| "learning_rate": 4.735164862951395e-06, |
| "loss": 0.0532, |
| "step": 189 |
| }, |
| { |
| "epoch": 1.6136363636363638, |
| "grad_norm": 0.28199082612991333, |
| "learning_rate": 4.729290473431892e-06, |
| "loss": 0.0578, |
| "step": 190 |
| }, |
| { |
| "epoch": 1.6221590909090908, |
| "grad_norm": 0.3818947970867157, |
| "learning_rate": 4.723355372206297e-06, |
| "loss": 0.0552, |
| "step": 191 |
| }, |
| { |
| "epoch": 1.6306818181818183, |
| "grad_norm": 0.26314595341682434, |
| "learning_rate": 4.717359720909053e-06, |
| "loss": 0.0505, |
| "step": 192 |
| }, |
| { |
| "epoch": 1.6392045454545454, |
| "grad_norm": 0.2616359293460846, |
| "learning_rate": 4.7113036828235995e-06, |
| "loss": 0.0511, |
| "step": 193 |
| }, |
| { |
| "epoch": 1.6477272727272727, |
| "grad_norm": 0.33692309260368347, |
| "learning_rate": 4.705187422877931e-06, |
| "loss": 0.0554, |
| "step": 194 |
| }, |
| { |
| "epoch": 1.65625, |
| "grad_norm": 0.30861467123031616, |
| "learning_rate": 4.6990111076401e-06, |
| "loss": 0.0485, |
| "step": 195 |
| }, |
| { |
| "epoch": 1.6647727272727273, |
| "grad_norm": 0.251370370388031, |
| "learning_rate": 4.692774905313687e-06, |
| "loss": 0.0454, |
| "step": 196 |
| }, |
| { |
| "epoch": 1.6732954545454546, |
| "grad_norm": 0.33005693554878235, |
| "learning_rate": 4.686478985733212e-06, |
| "loss": 0.0509, |
| "step": 197 |
| }, |
| { |
| "epoch": 1.6818181818181817, |
| "grad_norm": 0.39172685146331787, |
| "learning_rate": 4.68012352035952e-06, |
| "loss": 0.0552, |
| "step": 198 |
| }, |
| { |
| "epoch": 1.6903409090909092, |
| "grad_norm": 0.5113612413406372, |
| "learning_rate": 4.673708682275097e-06, |
| "loss": 0.0537, |
| "step": 199 |
| }, |
| { |
| "epoch": 1.6988636363636362, |
| "grad_norm": 0.27660125494003296, |
| "learning_rate": 4.667234646179368e-06, |
| "loss": 0.0463, |
| "step": 200 |
| }, |
| { |
| "epoch": 1.7073863636363638, |
| "grad_norm": 0.38760706782341003, |
| "learning_rate": 4.660701588383939e-06, |
| "loss": 0.0496, |
| "step": 201 |
| }, |
| { |
| "epoch": 1.7159090909090908, |
| "grad_norm": 0.3487601578235626, |
| "learning_rate": 4.654109686807787e-06, |
| "loss": 0.0498, |
| "step": 202 |
| }, |
| { |
| "epoch": 1.7244318181818183, |
| "grad_norm": 0.2964484393596649, |
| "learning_rate": 4.647459120972422e-06, |
| "loss": 0.0501, |
| "step": 203 |
| }, |
| { |
| "epoch": 1.7329545454545454, |
| "grad_norm": 0.3318927586078644, |
| "learning_rate": 4.640750071996995e-06, |
| "loss": 0.0535, |
| "step": 204 |
| }, |
| { |
| "epoch": 1.7414772727272727, |
| "grad_norm": 0.3152824640274048, |
| "learning_rate": 4.633982722593367e-06, |
| "loss": 0.0537, |
| "step": 205 |
| }, |
| { |
| "epoch": 1.75, |
| "grad_norm": 0.3267940878868103, |
| "learning_rate": 4.62715725706113e-06, |
| "loss": 0.0434, |
| "step": 206 |
| }, |
| { |
| "epoch": 1.7585227272727273, |
| "grad_norm": 0.32321202754974365, |
| "learning_rate": 4.620273861282593e-06, |
| "loss": 0.055, |
| "step": 207 |
| }, |
| { |
| "epoch": 1.7670454545454546, |
| "grad_norm": 0.3501484990119934, |
| "learning_rate": 4.613332722717714e-06, |
| "loss": 0.0458, |
| "step": 208 |
| }, |
| { |
| "epoch": 1.7755681818181817, |
| "grad_norm": 0.29107236862182617, |
| "learning_rate": 4.606334030399e-06, |
| "loss": 0.0496, |
| "step": 209 |
| }, |
| { |
| "epoch": 1.7840909090909092, |
| "grad_norm": 0.3194922208786011, |
| "learning_rate": 4.599277974926355e-06, |
| "loss": 0.0532, |
| "step": 210 |
| }, |
| { |
| "epoch": 1.7926136363636362, |
| "grad_norm": 0.2896871864795685, |
| "learning_rate": 4.592164748461892e-06, |
| "loss": 0.0482, |
| "step": 211 |
| }, |
| { |
| "epoch": 1.8011363636363638, |
| "grad_norm": 0.2559587061405182, |
| "learning_rate": 4.584994544724695e-06, |
| "loss": 0.0474, |
| "step": 212 |
| }, |
| { |
| "epoch": 1.8096590909090908, |
| "grad_norm": 0.3188915252685547, |
| "learning_rate": 4.577767558985552e-06, |
| "loss": 0.0454, |
| "step": 213 |
| }, |
| { |
| "epoch": 1.8181818181818183, |
| "grad_norm": 0.31643223762512207, |
| "learning_rate": 4.57048398806163e-06, |
| "loss": 0.0503, |
| "step": 214 |
| }, |
| { |
| "epoch": 1.8267045454545454, |
| "grad_norm": 0.3251838684082031, |
| "learning_rate": 4.563144030311114e-06, |
| "loss": 0.0484, |
| "step": 215 |
| }, |
| { |
| "epoch": 1.8352272727272727, |
| "grad_norm": 0.26603975892066956, |
| "learning_rate": 4.555747885627812e-06, |
| "loss": 0.0442, |
| "step": 216 |
| }, |
| { |
| "epoch": 1.84375, |
| "grad_norm": 0.4656536877155304, |
| "learning_rate": 4.548295755435703e-06, |
| "loss": 0.0428, |
| "step": 217 |
| }, |
| { |
| "epoch": 1.8522727272727273, |
| "grad_norm": 0.32816216349601746, |
| "learning_rate": 4.540787842683459e-06, |
| "loss": 0.0455, |
| "step": 218 |
| }, |
| { |
| "epoch": 1.8607954545454546, |
| "grad_norm": 0.31960371136665344, |
| "learning_rate": 4.533224351838914e-06, |
| "loss": 0.0454, |
| "step": 219 |
| }, |
| { |
| "epoch": 1.8693181818181817, |
| "grad_norm": 0.33228686451911926, |
| "learning_rate": 4.525605488883493e-06, |
| "loss": 0.0413, |
| "step": 220 |
| }, |
| { |
| "epoch": 1.8778409090909092, |
| "grad_norm": 0.271839439868927, |
| "learning_rate": 4.517931461306609e-06, |
| "loss": 0.0462, |
| "step": 221 |
| }, |
| { |
| "epoch": 1.8863636363636362, |
| "grad_norm": 0.3333345949649811, |
| "learning_rate": 4.510202478100008e-06, |
| "loss": 0.0458, |
| "step": 222 |
| }, |
| { |
| "epoch": 1.8948863636363638, |
| "grad_norm": 0.2923465371131897, |
| "learning_rate": 4.502418749752076e-06, |
| "loss": 0.0407, |
| "step": 223 |
| }, |
| { |
| "epoch": 1.9034090909090908, |
| "grad_norm": 0.34088781476020813, |
| "learning_rate": 4.494580488242109e-06, |
| "loss": 0.0422, |
| "step": 224 |
| }, |
| { |
| "epoch": 1.9119318181818183, |
| "grad_norm": 0.3535335958003998, |
| "learning_rate": 4.486687907034544e-06, |
| "loss": 0.0454, |
| "step": 225 |
| }, |
| { |
| "epoch": 1.9204545454545454, |
| "grad_norm": 0.32973766326904297, |
| "learning_rate": 4.478741221073136e-06, |
| "loss": 0.0429, |
| "step": 226 |
| }, |
| { |
| "epoch": 1.9289772727272727, |
| "grad_norm": 0.3509384095668793, |
| "learning_rate": 4.470740646775113e-06, |
| "loss": 0.0436, |
| "step": 227 |
| }, |
| { |
| "epoch": 1.9375, |
| "grad_norm": 0.2931392788887024, |
| "learning_rate": 4.462686402025277e-06, |
| "loss": 0.0466, |
| "step": 228 |
| }, |
| { |
| "epoch": 1.9460227272727273, |
| "grad_norm": 0.30415666103363037, |
| "learning_rate": 4.454578706170075e-06, |
| "loss": 0.0416, |
| "step": 229 |
| }, |
| { |
| "epoch": 1.9545454545454546, |
| "grad_norm": 0.3433127701282501, |
| "learning_rate": 4.446417780011618e-06, |
| "loss": 0.0424, |
| "step": 230 |
| }, |
| { |
| "epoch": 1.9630681818181817, |
| "grad_norm": 0.352159708738327, |
| "learning_rate": 4.4382038458016764e-06, |
| "loss": 0.0404, |
| "step": 231 |
| }, |
| { |
| "epoch": 1.9715909090909092, |
| "grad_norm": 0.27307572960853577, |
| "learning_rate": 4.42993712723562e-06, |
| "loss": 0.0429, |
| "step": 232 |
| }, |
| { |
| "epoch": 1.9801136363636362, |
| "grad_norm": 0.2662625312805176, |
| "learning_rate": 4.4216178494463305e-06, |
| "loss": 0.0425, |
| "step": 233 |
| }, |
| { |
| "epoch": 1.9886363636363638, |
| "grad_norm": 0.29612037539482117, |
| "learning_rate": 4.413246238998069e-06, |
| "loss": 0.0463, |
| "step": 234 |
| }, |
| { |
| "epoch": 1.9971590909090908, |
| "grad_norm": 0.33151501417160034, |
| "learning_rate": 4.404822523880305e-06, |
| "loss": 0.0441, |
| "step": 235 |
| }, |
| { |
| "epoch": 2.0, |
| "grad_norm": 0.33151501417160034, |
| "learning_rate": 4.396346933501508e-06, |
| "loss": 0.0494, |
| "step": 236 |
| }, |
| { |
| "epoch": 2.008522727272727, |
| "grad_norm": 0.8211016058921814, |
| "learning_rate": 4.3878196986829015e-06, |
| "loss": 0.0394, |
| "step": 237 |
| }, |
| { |
| "epoch": 2.0170454545454546, |
| "grad_norm": 0.28689032793045044, |
| "learning_rate": 4.379241051652174e-06, |
| "loss": 0.0379, |
| "step": 238 |
| }, |
| { |
| "epoch": 2.0255681818181817, |
| "grad_norm": 0.3541617691516876, |
| "learning_rate": 4.370611226037155e-06, |
| "loss": 0.0363, |
| "step": 239 |
| }, |
| { |
| "epoch": 2.034090909090909, |
| "grad_norm": 0.32633963227272034, |
| "learning_rate": 4.361930456859455e-06, |
| "loss": 0.0403, |
| "step": 240 |
| }, |
| { |
| "epoch": 2.0426136363636362, |
| "grad_norm": 0.30213046073913574, |
| "learning_rate": 4.353198980528063e-06, |
| "loss": 0.0393, |
| "step": 241 |
| }, |
| { |
| "epoch": 2.0511363636363638, |
| "grad_norm": 0.3540145456790924, |
| "learning_rate": 4.3444170348329095e-06, |
| "loss": 0.035, |
| "step": 242 |
| }, |
| { |
| "epoch": 2.059659090909091, |
| "grad_norm": 0.2990974485874176, |
| "learning_rate": 4.335584858938388e-06, |
| "loss": 0.0346, |
| "step": 243 |
| }, |
| { |
| "epoch": 2.0681818181818183, |
| "grad_norm": 0.4325309991836548, |
| "learning_rate": 4.326702693376844e-06, |
| "loss": 0.0351, |
| "step": 244 |
| }, |
| { |
| "epoch": 2.0767045454545454, |
| "grad_norm": 0.31114035844802856, |
| "learning_rate": 4.317770780042025e-06, |
| "loss": 0.0341, |
| "step": 245 |
| }, |
| { |
| "epoch": 2.085227272727273, |
| "grad_norm": 0.30371102690696716, |
| "learning_rate": 4.308789362182492e-06, |
| "loss": 0.0373, |
| "step": 246 |
| }, |
| { |
| "epoch": 2.09375, |
| "grad_norm": 0.40639927983283997, |
| "learning_rate": 4.2997586843949905e-06, |
| "loss": 0.0333, |
| "step": 247 |
| }, |
| { |
| "epoch": 2.102272727272727, |
| "grad_norm": 0.33129221200942993, |
| "learning_rate": 4.290678992617797e-06, |
| "loss": 0.0323, |
| "step": 248 |
| }, |
| { |
| "epoch": 2.1107954545454546, |
| "grad_norm": 0.3084648549556732, |
| "learning_rate": 4.28155053412402e-06, |
| "loss": 0.0355, |
| "step": 249 |
| }, |
| { |
| "epoch": 2.1193181818181817, |
| "grad_norm": 0.3743211328983307, |
| "learning_rate": 4.2723735575148585e-06, |
| "loss": 0.0367, |
| "step": 250 |
| }, |
| { |
| "epoch": 2.127840909090909, |
| "grad_norm": 0.354165256023407, |
| "learning_rate": 4.263148312712838e-06, |
| "loss": 0.0293, |
| "step": 251 |
| }, |
| { |
| "epoch": 2.1363636363636362, |
| "grad_norm": 0.37607696652412415, |
| "learning_rate": 4.253875050955005e-06, |
| "loss": 0.0318, |
| "step": 252 |
| }, |
| { |
| "epoch": 2.1448863636363638, |
| "grad_norm": 0.33469027280807495, |
| "learning_rate": 4.2445540247860805e-06, |
| "loss": 0.038, |
| "step": 253 |
| }, |
| { |
| "epoch": 2.153409090909091, |
| "grad_norm": 0.32785046100616455, |
| "learning_rate": 4.2351854880515856e-06, |
| "loss": 0.0303, |
| "step": 254 |
| }, |
| { |
| "epoch": 2.1619318181818183, |
| "grad_norm": 0.41809284687042236, |
| "learning_rate": 4.2257696958909255e-06, |
| "loss": 0.0339, |
| "step": 255 |
| }, |
| { |
| "epoch": 2.1704545454545454, |
| "grad_norm": 0.3557347059249878, |
| "learning_rate": 4.216306904730448e-06, |
| "loss": 0.0347, |
| "step": 256 |
| }, |
| { |
| "epoch": 2.178977272727273, |
| "grad_norm": 0.3383937478065491, |
| "learning_rate": 4.206797372276447e-06, |
| "loss": 0.0384, |
| "step": 257 |
| }, |
| { |
| "epoch": 2.1875, |
| "grad_norm": 0.339476615190506, |
| "learning_rate": 4.197241357508159e-06, |
| "loss": 0.0351, |
| "step": 258 |
| }, |
| { |
| "epoch": 2.196022727272727, |
| "grad_norm": 0.4095429480075836, |
| "learning_rate": 4.187639120670702e-06, |
| "loss": 0.0339, |
| "step": 259 |
| }, |
| { |
| "epoch": 2.2045454545454546, |
| "grad_norm": 0.3317232131958008, |
| "learning_rate": 4.177990923267986e-06, |
| "loss": 0.0363, |
| "step": 260 |
| }, |
| { |
| "epoch": 2.2130681818181817, |
| "grad_norm": 0.4111759066581726, |
| "learning_rate": 4.168297028055599e-06, |
| "loss": 0.0369, |
| "step": 261 |
| }, |
| { |
| "epoch": 2.221590909090909, |
| "grad_norm": 0.4489494860172272, |
| "learning_rate": 4.158557699033644e-06, |
| "loss": 0.0312, |
| "step": 262 |
| }, |
| { |
| "epoch": 2.2301136363636362, |
| "grad_norm": 0.3993566930294037, |
| "learning_rate": 4.148773201439553e-06, |
| "loss": 0.0306, |
| "step": 263 |
| }, |
| { |
| "epoch": 2.2386363636363638, |
| "grad_norm": 0.32782599329948425, |
| "learning_rate": 4.138943801740865e-06, |
| "loss": 0.0329, |
| "step": 264 |
| }, |
| { |
| "epoch": 2.247159090909091, |
| "grad_norm": 0.26705044507980347, |
| "learning_rate": 4.129069767627963e-06, |
| "loss": 0.0341, |
| "step": 265 |
| }, |
| { |
| "epoch": 2.2556818181818183, |
| "grad_norm": 0.32875779271125793, |
| "learning_rate": 4.119151368006793e-06, |
| "loss": 0.0301, |
| "step": 266 |
| }, |
| { |
| "epoch": 2.2642045454545454, |
| "grad_norm": 0.42058393359184265, |
| "learning_rate": 4.109188872991529e-06, |
| "loss": 0.0346, |
| "step": 267 |
| }, |
| { |
| "epoch": 2.2727272727272725, |
| "grad_norm": 0.44719040393829346, |
| "learning_rate": 4.099182553897228e-06, |
| "loss": 0.0297, |
| "step": 268 |
| }, |
| { |
| "epoch": 2.28125, |
| "grad_norm": 0.35350751876831055, |
| "learning_rate": 4.089132683232437e-06, |
| "loss": 0.0285, |
| "step": 269 |
| }, |
| { |
| "epoch": 2.2897727272727275, |
| "grad_norm": 0.5518717169761658, |
| "learning_rate": 4.0790395346917674e-06, |
| "loss": 0.0333, |
| "step": 270 |
| }, |
| { |
| "epoch": 2.2982954545454546, |
| "grad_norm": 0.4138563573360443, |
| "learning_rate": 4.068903383148448e-06, |
| "loss": 0.0385, |
| "step": 271 |
| }, |
| { |
| "epoch": 2.3068181818181817, |
| "grad_norm": 0.406649112701416, |
| "learning_rate": 4.058724504646834e-06, |
| "loss": 0.0298, |
| "step": 272 |
| }, |
| { |
| "epoch": 2.315340909090909, |
| "grad_norm": 0.3303579092025757, |
| "learning_rate": 4.0485031763948935e-06, |
| "loss": 0.0339, |
| "step": 273 |
| }, |
| { |
| "epoch": 2.3238636363636362, |
| "grad_norm": 0.35349851846694946, |
| "learning_rate": 4.038239676756654e-06, |
| "loss": 0.0308, |
| "step": 274 |
| }, |
| { |
| "epoch": 2.3323863636363638, |
| "grad_norm": 0.4232323467731476, |
| "learning_rate": 4.027934285244624e-06, |
| "loss": 0.0307, |
| "step": 275 |
| }, |
| { |
| "epoch": 2.340909090909091, |
| "grad_norm": 0.35764482617378235, |
| "learning_rate": 4.017587282512181e-06, |
| "loss": 0.0296, |
| "step": 276 |
| }, |
| { |
| "epoch": 2.3494318181818183, |
| "grad_norm": 0.3608599603176117, |
| "learning_rate": 4.007198950345929e-06, |
| "loss": 0.0346, |
| "step": 277 |
| }, |
| { |
| "epoch": 2.3579545454545454, |
| "grad_norm": 0.530514121055603, |
| "learning_rate": 3.996769571658022e-06, |
| "loss": 0.0293, |
| "step": 278 |
| }, |
| { |
| "epoch": 2.3664772727272725, |
| "grad_norm": 0.3813832998275757, |
| "learning_rate": 3.9862994304784585e-06, |
| "loss": 0.0279, |
| "step": 279 |
| }, |
| { |
| "epoch": 2.375, |
| "grad_norm": 0.48923951387405396, |
| "learning_rate": 3.975788811947351e-06, |
| "loss": 0.0284, |
| "step": 280 |
| }, |
| { |
| "epoch": 2.3835227272727275, |
| "grad_norm": 0.3939470946788788, |
| "learning_rate": 3.965238002307156e-06, |
| "loss": 0.0315, |
| "step": 281 |
| }, |
| { |
| "epoch": 2.3920454545454546, |
| "grad_norm": 0.2854841649532318, |
| "learning_rate": 3.9546472888948825e-06, |
| "loss": 0.0288, |
| "step": 282 |
| }, |
| { |
| "epoch": 2.4005681818181817, |
| "grad_norm": 0.39930015802383423, |
| "learning_rate": 3.944016960134262e-06, |
| "loss": 0.0304, |
| "step": 283 |
| }, |
| { |
| "epoch": 2.409090909090909, |
| "grad_norm": 0.4333280026912689, |
| "learning_rate": 3.933347305527898e-06, |
| "loss": 0.0282, |
| "step": 284 |
| }, |
| { |
| "epoch": 2.4176136363636362, |
| "grad_norm": 0.34740084409713745, |
| "learning_rate": 3.922638615649381e-06, |
| "loss": 0.0313, |
| "step": 285 |
| }, |
| { |
| "epoch": 2.4261363636363638, |
| "grad_norm": 0.4125167727470398, |
| "learning_rate": 3.911891182135371e-06, |
| "loss": 0.0301, |
| "step": 286 |
| }, |
| { |
| "epoch": 2.434659090909091, |
| "grad_norm": 0.350911945104599, |
| "learning_rate": 3.9011052976776635e-06, |
| "loss": 0.0255, |
| "step": 287 |
| }, |
| { |
| "epoch": 2.4431818181818183, |
| "grad_norm": 0.2700820863246918, |
| "learning_rate": 3.890281256015207e-06, |
| "loss": 0.0296, |
| "step": 288 |
| }, |
| { |
| "epoch": 2.4517045454545454, |
| "grad_norm": 0.3823346197605133, |
| "learning_rate": 3.879419351926115e-06, |
| "loss": 0.0279, |
| "step": 289 |
| }, |
| { |
| "epoch": 2.4602272727272725, |
| "grad_norm": 0.4576987326145172, |
| "learning_rate": 3.868519881219631e-06, |
| "loss": 0.0298, |
| "step": 290 |
| }, |
| { |
| "epoch": 2.46875, |
| "grad_norm": 0.319879412651062, |
| "learning_rate": 3.857583140728075e-06, |
| "loss": 0.0313, |
| "step": 291 |
| }, |
| { |
| "epoch": 2.4772727272727275, |
| "grad_norm": 0.36349669098854065, |
| "learning_rate": 3.8466094282987575e-06, |
| "loss": 0.0266, |
| "step": 292 |
| }, |
| { |
| "epoch": 2.4857954545454546, |
| "grad_norm": 0.3291328251361847, |
| "learning_rate": 3.835599042785872e-06, |
| "loss": 0.0292, |
| "step": 293 |
| }, |
| { |
| "epoch": 2.4943181818181817, |
| "grad_norm": 0.36020705103874207, |
| "learning_rate": 3.824552284042351e-06, |
| "loss": 0.0295, |
| "step": 294 |
| }, |
| { |
| "epoch": 2.502840909090909, |
| "grad_norm": 0.3137778341770172, |
| "learning_rate": 3.8134694529117045e-06, |
| "loss": 0.0244, |
| "step": 295 |
| }, |
| { |
| "epoch": 2.5113636363636362, |
| "grad_norm": 0.36281993985176086, |
| "learning_rate": 3.802350851219826e-06, |
| "loss": 0.0272, |
| "step": 296 |
| }, |
| { |
| "epoch": 2.5198863636363638, |
| "grad_norm": 0.3342132270336151, |
| "learning_rate": 3.79119678176677e-06, |
| "loss": 0.0288, |
| "step": 297 |
| }, |
| { |
| "epoch": 2.528409090909091, |
| "grad_norm": 0.45444363355636597, |
| "learning_rate": 3.7800075483185073e-06, |
| "loss": 0.0282, |
| "step": 298 |
| }, |
| { |
| "epoch": 2.5369318181818183, |
| "grad_norm": 0.4871613085269928, |
| "learning_rate": 3.7687834555986537e-06, |
| "loss": 0.0334, |
| "step": 299 |
| }, |
| { |
| "epoch": 2.5454545454545454, |
| "grad_norm": 0.49083876609802246, |
| "learning_rate": 3.7575248092801686e-06, |
| "loss": 0.0229, |
| "step": 300 |
| }, |
| { |
| "epoch": 2.5539772727272725, |
| "grad_norm": 0.3823300004005432, |
| "learning_rate": 3.7462319159770344e-06, |
| "loss": 0.023, |
| "step": 301 |
| }, |
| { |
| "epoch": 2.5625, |
| "grad_norm": 0.38027557730674744, |
| "learning_rate": 3.734905083235901e-06, |
| "loss": 0.025, |
| "step": 302 |
| }, |
| { |
| "epoch": 2.5710227272727275, |
| "grad_norm": 0.42017418146133423, |
| "learning_rate": 3.723544619527714e-06, |
| "loss": 0.03, |
| "step": 303 |
| }, |
| { |
| "epoch": 2.5795454545454546, |
| "grad_norm": 0.4550953209400177, |
| "learning_rate": 3.712150834239313e-06, |
| "loss": 0.0301, |
| "step": 304 |
| }, |
| { |
| "epoch": 2.5880681818181817, |
| "grad_norm": 0.4527112543582916, |
| "learning_rate": 3.7007240376650054e-06, |
| "loss": 0.0235, |
| "step": 305 |
| }, |
| { |
| "epoch": 2.596590909090909, |
| "grad_norm": 0.36789700388908386, |
| "learning_rate": 3.6892645409981166e-06, |
| "loss": 0.0231, |
| "step": 306 |
| }, |
| { |
| "epoch": 2.6051136363636362, |
| "grad_norm": 0.6067688465118408, |
| "learning_rate": 3.6777726563225147e-06, |
| "loss": 0.0256, |
| "step": 307 |
| }, |
| { |
| "epoch": 2.6136363636363638, |
| "grad_norm": 0.35337361693382263, |
| "learning_rate": 3.6662486966041104e-06, |
| "loss": 0.0244, |
| "step": 308 |
| }, |
| { |
| "epoch": 2.622159090909091, |
| "grad_norm": 0.4097234010696411, |
| "learning_rate": 3.654692975682336e-06, |
| "loss": 0.0262, |
| "step": 309 |
| }, |
| { |
| "epoch": 2.6306818181818183, |
| "grad_norm": 0.5017573833465576, |
| "learning_rate": 3.6431058082615966e-06, |
| "loss": 0.0245, |
| "step": 310 |
| }, |
| { |
| "epoch": 2.6392045454545454, |
| "grad_norm": 0.40696877241134644, |
| "learning_rate": 3.6314875099027e-06, |
| "loss": 0.023, |
| "step": 311 |
| }, |
| { |
| "epoch": 2.6477272727272725, |
| "grad_norm": 0.35261037945747375, |
| "learning_rate": 3.619838397014263e-06, |
| "loss": 0.0232, |
| "step": 312 |
| }, |
| { |
| "epoch": 2.65625, |
| "grad_norm": 0.3590414524078369, |
| "learning_rate": 3.6081587868440944e-06, |
| "loss": 0.0242, |
| "step": 313 |
| }, |
| { |
| "epoch": 2.6647727272727275, |
| "grad_norm": 0.37196213006973267, |
| "learning_rate": 3.5964489974705553e-06, |
| "loss": 0.0222, |
| "step": 314 |
| }, |
| { |
| "epoch": 2.6732954545454546, |
| "grad_norm": 0.3922335207462311, |
| "learning_rate": 3.5847093477938955e-06, |
| "loss": 0.0236, |
| "step": 315 |
| }, |
| { |
| "epoch": 2.6818181818181817, |
| "grad_norm": 0.3914213180541992, |
| "learning_rate": 3.5729401575275724e-06, |
| "loss": 0.025, |
| "step": 316 |
| }, |
| { |
| "epoch": 2.690340909090909, |
| "grad_norm": 0.4622058570384979, |
| "learning_rate": 3.561141747189538e-06, |
| "loss": 0.0252, |
| "step": 317 |
| }, |
| { |
| "epoch": 2.6988636363636362, |
| "grad_norm": 0.5096047520637512, |
| "learning_rate": 3.5493144380935155e-06, |
| "loss": 0.0242, |
| "step": 318 |
| }, |
| { |
| "epoch": 2.7073863636363638, |
| "grad_norm": 0.574635922908783, |
| "learning_rate": 3.537458552340247e-06, |
| "loss": 0.0224, |
| "step": 319 |
| }, |
| { |
| "epoch": 2.715909090909091, |
| "grad_norm": 0.6871437430381775, |
| "learning_rate": 3.5255744128087175e-06, |
| "loss": 0.0271, |
| "step": 320 |
| }, |
| { |
| "epoch": 2.7244318181818183, |
| "grad_norm": 0.48472297191619873, |
| "learning_rate": 3.5136623431473704e-06, |
| "loss": 0.0246, |
| "step": 321 |
| }, |
| { |
| "epoch": 2.7329545454545454, |
| "grad_norm": 0.5469712018966675, |
| "learning_rate": 3.501722667765286e-06, |
| "loss": 0.0252, |
| "step": 322 |
| }, |
| { |
| "epoch": 2.7414772727272725, |
| "grad_norm": 0.4378078877925873, |
| "learning_rate": 3.489755711823348e-06, |
| "loss": 0.0277, |
| "step": 323 |
| }, |
| { |
| "epoch": 2.75, |
| "grad_norm": 0.4401378035545349, |
| "learning_rate": 3.47776180122539e-06, |
| "loss": 0.0235, |
| "step": 324 |
| }, |
| { |
| "epoch": 2.7585227272727275, |
| "grad_norm": 0.6726410388946533, |
| "learning_rate": 3.4657412626093204e-06, |
| "loss": 0.0207, |
| "step": 325 |
| }, |
| { |
| "epoch": 2.7670454545454546, |
| "grad_norm": 0.3676104247570038, |
| "learning_rate": 3.4536944233382248e-06, |
| "loss": 0.0196, |
| "step": 326 |
| }, |
| { |
| "epoch": 2.7755681818181817, |
| "grad_norm": 0.4041842222213745, |
| "learning_rate": 3.4416216114914493e-06, |
| "loss": 0.0275, |
| "step": 327 |
| }, |
| { |
| "epoch": 2.784090909090909, |
| "grad_norm": 0.7207781076431274, |
| "learning_rate": 3.429523155855672e-06, |
| "loss": 0.0243, |
| "step": 328 |
| }, |
| { |
| "epoch": 2.7926136363636362, |
| "grad_norm": 0.6210029125213623, |
| "learning_rate": 3.417399385915941e-06, |
| "loss": 0.0207, |
| "step": 329 |
| }, |
| { |
| "epoch": 2.8011363636363638, |
| "grad_norm": 0.49545082449913025, |
| "learning_rate": 3.405250631846708e-06, |
| "loss": 0.0243, |
| "step": 330 |
| }, |
| { |
| "epoch": 2.809659090909091, |
| "grad_norm": 0.5395477414131165, |
| "learning_rate": 3.393077224502832e-06, |
| "loss": 0.0227, |
| "step": 331 |
| }, |
| { |
| "epoch": 2.8181818181818183, |
| "grad_norm": 0.4645976126194, |
| "learning_rate": 3.3808794954105716e-06, |
| "loss": 0.0192, |
| "step": 332 |
| }, |
| { |
| "epoch": 2.8267045454545454, |
| "grad_norm": 0.3877430558204651, |
| "learning_rate": 3.368657776758555e-06, |
| "loss": 0.0219, |
| "step": 333 |
| }, |
| { |
| "epoch": 2.8352272727272725, |
| "grad_norm": 0.5611310005187988, |
| "learning_rate": 3.3564124013887324e-06, |
| "loss": 0.0241, |
| "step": 334 |
| }, |
| { |
| "epoch": 2.84375, |
| "grad_norm": 0.4889032244682312, |
| "learning_rate": 3.3441437027873135e-06, |
| "loss": 0.0181, |
| "step": 335 |
| }, |
| { |
| "epoch": 2.8522727272727275, |
| "grad_norm": 0.47838833928108215, |
| "learning_rate": 3.331852015075685e-06, |
| "loss": 0.0192, |
| "step": 336 |
| }, |
| { |
| "epoch": 2.8607954545454546, |
| "grad_norm": 0.4922442138195038, |
| "learning_rate": 3.3195376730013107e-06, |
| "loss": 0.0197, |
| "step": 337 |
| }, |
| { |
| "epoch": 2.8693181818181817, |
| "grad_norm": 0.3534923493862152, |
| "learning_rate": 3.3072010119286156e-06, |
| "loss": 0.0221, |
| "step": 338 |
| }, |
| { |
| "epoch": 2.877840909090909, |
| "grad_norm": 0.4476766884326935, |
| "learning_rate": 3.294842367829851e-06, |
| "loss": 0.0167, |
| "step": 339 |
| }, |
| { |
| "epoch": 2.8863636363636362, |
| "grad_norm": 0.4423317313194275, |
| "learning_rate": 3.2824620772759475e-06, |
| "loss": 0.0209, |
| "step": 340 |
| }, |
| { |
| "epoch": 2.8948863636363638, |
| "grad_norm": 0.3954547345638275, |
| "learning_rate": 3.270060477427347e-06, |
| "loss": 0.0209, |
| "step": 341 |
| }, |
| { |
| "epoch": 2.903409090909091, |
| "grad_norm": 0.47028860449790955, |
| "learning_rate": 3.257637906024822e-06, |
| "loss": 0.0189, |
| "step": 342 |
| }, |
| { |
| "epoch": 2.9119318181818183, |
| "grad_norm": 0.42390963435173035, |
| "learning_rate": 3.2451947013802746e-06, |
| "loss": 0.015, |
| "step": 343 |
| }, |
| { |
| "epoch": 2.9204545454545454, |
| "grad_norm": 0.4463144838809967, |
| "learning_rate": 3.2327312023675287e-06, |
| "loss": 0.0177, |
| "step": 344 |
| }, |
| { |
| "epoch": 2.9289772727272725, |
| "grad_norm": 0.43879300355911255, |
| "learning_rate": 3.2202477484130947e-06, |
| "loss": 0.0145, |
| "step": 345 |
| }, |
| { |
| "epoch": 2.9375, |
| "grad_norm": 0.36773353815078735, |
| "learning_rate": 3.20774467948693e-06, |
| "loss": 0.0186, |
| "step": 346 |
| }, |
| { |
| "epoch": 2.9460227272727275, |
| "grad_norm": 0.45109617710113525, |
| "learning_rate": 3.19522233609318e-06, |
| "loss": 0.0153, |
| "step": 347 |
| }, |
| { |
| "epoch": 2.9545454545454546, |
| "grad_norm": 0.48943161964416504, |
| "learning_rate": 3.1826810592609036e-06, |
| "loss": 0.0186, |
| "step": 348 |
| }, |
| { |
| "epoch": 2.9630681818181817, |
| "grad_norm": 0.5407648682594299, |
| "learning_rate": 3.170121190534785e-06, |
| "loss": 0.0173, |
| "step": 349 |
| }, |
| { |
| "epoch": 2.971590909090909, |
| "grad_norm": 0.5848131775856018, |
| "learning_rate": 3.157543071965835e-06, |
| "loss": 0.013, |
| "step": 350 |
| }, |
| { |
| "epoch": 2.9801136363636362, |
| "grad_norm": 0.46287980675697327, |
| "learning_rate": 3.1449470461020738e-06, |
| "loss": 0.0194, |
| "step": 351 |
| }, |
| { |
| "epoch": 2.9886363636363638, |
| "grad_norm": 0.655013382434845, |
| "learning_rate": 3.132333455979202e-06, |
| "loss": 0.0176, |
| "step": 352 |
| }, |
| { |
| "epoch": 2.997159090909091, |
| "grad_norm": 0.7326741218566895, |
| "learning_rate": 3.11970264511126e-06, |
| "loss": 0.0154, |
| "step": 353 |
| }, |
| { |
| "epoch": 3.0, |
| "grad_norm": 0.9463636875152588, |
| "learning_rate": 3.107054957481271e-06, |
| "loss": 0.0142, |
| "step": 354 |
| }, |
| { |
| "epoch": 3.008522727272727, |
| "grad_norm": 0.8092179894447327, |
| "learning_rate": 3.0943907375318733e-06, |
| "loss": 0.0178, |
| "step": 355 |
| }, |
| { |
| "epoch": 3.0170454545454546, |
| "grad_norm": 0.43350231647491455, |
| "learning_rate": 3.0817103301559422e-06, |
| "loss": 0.0163, |
| "step": 356 |
| }, |
| { |
| "epoch": 3.0255681818181817, |
| "grad_norm": 0.3987981379032135, |
| "learning_rate": 3.069014080687195e-06, |
| "loss": 0.0153, |
| "step": 357 |
| }, |
| { |
| "epoch": 3.034090909090909, |
| "grad_norm": 0.3733361065387726, |
| "learning_rate": 3.056302334890786e-06, |
| "loss": 0.012, |
| "step": 358 |
| }, |
| { |
| "epoch": 3.0426136363636362, |
| "grad_norm": 0.5638909339904785, |
| "learning_rate": 3.043575438953893e-06, |
| "loss": 0.0128, |
| "step": 359 |
| }, |
| { |
| "epoch": 3.0511363636363638, |
| "grad_norm": 0.5339016914367676, |
| "learning_rate": 3.030833739476285e-06, |
| "loss": 0.0142, |
| "step": 360 |
| }, |
| { |
| "epoch": 3.059659090909091, |
| "grad_norm": 0.5681723356246948, |
| "learning_rate": 3.018077583460886e-06, |
| "loss": 0.0144, |
| "step": 361 |
| }, |
| { |
| "epoch": 3.0681818181818183, |
| "grad_norm": 0.5017768144607544, |
| "learning_rate": 3.0053073183043257e-06, |
| "loss": 0.0137, |
| "step": 362 |
| }, |
| { |
| "epoch": 3.0767045454545454, |
| "grad_norm": 1.0726104974746704, |
| "learning_rate": 2.9925232917874764e-06, |
| "loss": 0.013, |
| "step": 363 |
| }, |
| { |
| "epoch": 3.085227272727273, |
| "grad_norm": 0.5261201858520508, |
| "learning_rate": 2.979725852065981e-06, |
| "loss": 0.0122, |
| "step": 364 |
| }, |
| { |
| "epoch": 3.09375, |
| "grad_norm": 0.5007719993591309, |
| "learning_rate": 2.966915347660775e-06, |
| "loss": 0.0123, |
| "step": 365 |
| }, |
| { |
| "epoch": 3.102272727272727, |
| "grad_norm": 0.5095813870429993, |
| "learning_rate": 2.9540921274485913e-06, |
| "loss": 0.0133, |
| "step": 366 |
| }, |
| { |
| "epoch": 3.1107954545454546, |
| "grad_norm": 0.4466419816017151, |
| "learning_rate": 2.941256540652462e-06, |
| "loss": 0.0094, |
| "step": 367 |
| }, |
| { |
| "epoch": 3.1193181818181817, |
| "grad_norm": 0.9082984328269958, |
| "learning_rate": 2.9284089368322044e-06, |
| "loss": 0.0155, |
| "step": 368 |
| }, |
| { |
| "epoch": 3.127840909090909, |
| "grad_norm": 0.4232461750507355, |
| "learning_rate": 2.915549665874905e-06, |
| "loss": 0.0103, |
| "step": 369 |
| }, |
| { |
| "epoch": 3.1363636363636362, |
| "grad_norm": 0.736997663974762, |
| "learning_rate": 2.9026790779853877e-06, |
| "loss": 0.0153, |
| "step": 370 |
| }, |
| { |
| "epoch": 3.1448863636363638, |
| "grad_norm": 0.5333961844444275, |
| "learning_rate": 2.8897975236766784e-06, |
| "loss": 0.0122, |
| "step": 371 |
| }, |
| { |
| "epoch": 3.153409090909091, |
| "grad_norm": 0.610040545463562, |
| "learning_rate": 2.876905353760459e-06, |
| "loss": 0.0121, |
| "step": 372 |
| }, |
| { |
| "epoch": 3.1619318181818183, |
| "grad_norm": 0.3609560430049896, |
| "learning_rate": 2.864002919337513e-06, |
| "loss": 0.0106, |
| "step": 373 |
| }, |
| { |
| "epoch": 3.1704545454545454, |
| "grad_norm": 0.5127677917480469, |
| "learning_rate": 2.8510905717881615e-06, |
| "loss": 0.0146, |
| "step": 374 |
| }, |
| { |
| "epoch": 3.178977272727273, |
| "grad_norm": 0.6335748434066772, |
| "learning_rate": 2.838168662762699e-06, |
| "loss": 0.0133, |
| "step": 375 |
| }, |
| { |
| "epoch": 3.1875, |
| "grad_norm": 0.5320702195167542, |
| "learning_rate": 2.8252375441718137e-06, |
| "loss": 0.0108, |
| "step": 376 |
| }, |
| { |
| "epoch": 3.196022727272727, |
| "grad_norm": 0.49305084347724915, |
| "learning_rate": 2.812297568177002e-06, |
| "loss": 0.0107, |
| "step": 377 |
| }, |
| { |
| "epoch": 3.2045454545454546, |
| "grad_norm": 0.5496407747268677, |
| "learning_rate": 2.7993490871809808e-06, |
| "loss": 0.0109, |
| "step": 378 |
| }, |
| { |
| "epoch": 3.2130681818181817, |
| "grad_norm": 0.4906885623931885, |
| "learning_rate": 2.7863924538180904e-06, |
| "loss": 0.0123, |
| "step": 379 |
| }, |
| { |
| "epoch": 3.221590909090909, |
| "grad_norm": 0.5412434339523315, |
| "learning_rate": 2.773428020944687e-06, |
| "loss": 0.0128, |
| "step": 380 |
| }, |
| { |
| "epoch": 3.2301136363636362, |
| "grad_norm": 0.6251363754272461, |
| "learning_rate": 2.7604561416295366e-06, |
| "loss": 0.0117, |
| "step": 381 |
| }, |
| { |
| "epoch": 3.2386363636363638, |
| "grad_norm": 0.517668604850769, |
| "learning_rate": 2.747477169144202e-06, |
| "loss": 0.0113, |
| "step": 382 |
| }, |
| { |
| "epoch": 3.247159090909091, |
| "grad_norm": 0.5211403965950012, |
| "learning_rate": 2.7344914569534154e-06, |
| "loss": 0.0103, |
| "step": 383 |
| }, |
| { |
| "epoch": 3.2556818181818183, |
| "grad_norm": 0.797484278678894, |
| "learning_rate": 2.721499358705458e-06, |
| "loss": 0.0111, |
| "step": 384 |
| }, |
| { |
| "epoch": 3.2642045454545454, |
| "grad_norm": 0.6931593418121338, |
| "learning_rate": 2.708501228222523e-06, |
| "loss": 0.0115, |
| "step": 385 |
| }, |
| { |
| "epoch": 3.2727272727272725, |
| "grad_norm": 0.515566349029541, |
| "learning_rate": 2.695497419491089e-06, |
| "loss": 0.0114, |
| "step": 386 |
| }, |
| { |
| "epoch": 3.28125, |
| "grad_norm": 0.48849251866340637, |
| "learning_rate": 2.682488286652269e-06, |
| "loss": 0.01, |
| "step": 387 |
| }, |
| { |
| "epoch": 3.2897727272727275, |
| "grad_norm": 0.6838280558586121, |
| "learning_rate": 2.6694741839921734e-06, |
| "loss": 0.011, |
| "step": 388 |
| }, |
| { |
| "epoch": 3.2982954545454546, |
| "grad_norm": 0.4109753966331482, |
| "learning_rate": 2.656455465932259e-06, |
| "loss": 0.007, |
| "step": 389 |
| }, |
| { |
| "epoch": 3.3068181818181817, |
| "grad_norm": 0.4408889412879944, |
| "learning_rate": 2.6434324870196746e-06, |
| "loss": 0.0096, |
| "step": 390 |
| }, |
| { |
| "epoch": 3.315340909090909, |
| "grad_norm": 0.4267784059047699, |
| "learning_rate": 2.63040560191761e-06, |
| "loss": 0.009, |
| "step": 391 |
| }, |
| { |
| "epoch": 3.3238636363636362, |
| "grad_norm": 0.42103955149650574, |
| "learning_rate": 2.617375165395634e-06, |
| "loss": 0.0084, |
| "step": 392 |
| }, |
| { |
| "epoch": 3.3323863636363638, |
| "grad_norm": 0.4938065707683563, |
| "learning_rate": 2.6043415323200333e-06, |
| "loss": 0.0084, |
| "step": 393 |
| }, |
| { |
| "epoch": 3.340909090909091, |
| "grad_norm": 0.9540116190910339, |
| "learning_rate": 2.591305057644148e-06, |
| "loss": 0.0123, |
| "step": 394 |
| }, |
| { |
| "epoch": 3.3494318181818183, |
| "grad_norm": 0.5479457974433899, |
| "learning_rate": 2.5782660963987054e-06, |
| "loss": 0.01, |
| "step": 395 |
| }, |
| { |
| "epoch": 3.3579545454545454, |
| "grad_norm": 0.44472524523735046, |
| "learning_rate": 2.5652250036821522e-06, |
| "loss": 0.0066, |
| "step": 396 |
| }, |
| { |
| "epoch": 3.3664772727272725, |
| "grad_norm": 1.0710281133651733, |
| "learning_rate": 2.552182134650982e-06, |
| "loss": 0.0109, |
| "step": 397 |
| }, |
| { |
| "epoch": 3.375, |
| "grad_norm": 0.6171219944953918, |
| "learning_rate": 2.5391378445100646e-06, |
| "loss": 0.0054, |
| "step": 398 |
| }, |
| { |
| "epoch": 3.3835227272727275, |
| "grad_norm": 0.5634967684745789, |
| "learning_rate": 2.526092488502971e-06, |
| "loss": 0.0091, |
| "step": 399 |
| }, |
| { |
| "epoch": 3.3920454545454546, |
| "grad_norm": 0.5629643797874451, |
| "learning_rate": 2.5130464219022994e-06, |
| "loss": 0.0079, |
| "step": 400 |
| }, |
| { |
| "epoch": 3.4005681818181817, |
| "grad_norm": 0.45441868901252747, |
| "learning_rate": 2.5e-06, |
| "loss": 0.0084, |
| "step": 401 |
| }, |
| { |
| "epoch": 3.409090909090909, |
| "grad_norm": 0.7561835646629333, |
| "learning_rate": 2.4869535780977023e-06, |
| "loss": 0.0148, |
| "step": 402 |
| }, |
| { |
| "epoch": 3.4176136363636362, |
| "grad_norm": 0.42049774527549744, |
| "learning_rate": 2.47390751149703e-06, |
| "loss": 0.0071, |
| "step": 403 |
| }, |
| { |
| "epoch": 3.4261363636363638, |
| "grad_norm": 0.4444538652896881, |
| "learning_rate": 2.460862155489936e-06, |
| "loss": 0.0075, |
| "step": 404 |
| }, |
| { |
| "epoch": 3.434659090909091, |
| "grad_norm": 0.6667734980583191, |
| "learning_rate": 2.447817865349018e-06, |
| "loss": 0.0098, |
| "step": 405 |
| }, |
| { |
| "epoch": 3.4431818181818183, |
| "grad_norm": 0.6603801250457764, |
| "learning_rate": 2.4347749963178486e-06, |
| "loss": 0.0079, |
| "step": 406 |
| }, |
| { |
| "epoch": 3.4517045454545454, |
| "grad_norm": 0.5178464651107788, |
| "learning_rate": 2.421733903601296e-06, |
| "loss": 0.0065, |
| "step": 407 |
| }, |
| { |
| "epoch": 3.4602272727272725, |
| "grad_norm": 0.47047677636146545, |
| "learning_rate": 2.408694942355853e-06, |
| "loss": 0.0084, |
| "step": 408 |
| }, |
| { |
| "epoch": 3.46875, |
| "grad_norm": 0.4323576092720032, |
| "learning_rate": 2.3956584676799676e-06, |
| "loss": 0.0068, |
| "step": 409 |
| }, |
| { |
| "epoch": 3.4772727272727275, |
| "grad_norm": 0.45331141352653503, |
| "learning_rate": 2.3826248346043664e-06, |
| "loss": 0.008, |
| "step": 410 |
| }, |
| { |
| "epoch": 3.4857954545454546, |
| "grad_norm": 0.4945763051509857, |
| "learning_rate": 2.369594398082391e-06, |
| "loss": 0.0088, |
| "step": 411 |
| }, |
| { |
| "epoch": 3.4943181818181817, |
| "grad_norm": 0.4799608588218689, |
| "learning_rate": 2.356567512980326e-06, |
| "loss": 0.0046, |
| "step": 412 |
| }, |
| { |
| "epoch": 3.502840909090909, |
| "grad_norm": 0.5917950868606567, |
| "learning_rate": 2.343544534067742e-06, |
| "loss": 0.0064, |
| "step": 413 |
| }, |
| { |
| "epoch": 3.5113636363636362, |
| "grad_norm": 0.5370076894760132, |
| "learning_rate": 2.3305258160078274e-06, |
| "loss": 0.0061, |
| "step": 414 |
| }, |
| { |
| "epoch": 3.5198863636363638, |
| "grad_norm": 0.5539790391921997, |
| "learning_rate": 2.317511713347731e-06, |
| "loss": 0.0057, |
| "step": 415 |
| }, |
| { |
| "epoch": 3.528409090909091, |
| "grad_norm": 0.5244786739349365, |
| "learning_rate": 2.304502580508912e-06, |
| "loss": 0.0077, |
| "step": 416 |
| }, |
| { |
| "epoch": 3.5369318181818183, |
| "grad_norm": 0.5492494106292725, |
| "learning_rate": 2.291498771777478e-06, |
| "loss": 0.008, |
| "step": 417 |
| }, |
| { |
| "epoch": 3.5454545454545454, |
| "grad_norm": 0.5254555940628052, |
| "learning_rate": 2.278500641294543e-06, |
| "loss": 0.007, |
| "step": 418 |
| }, |
| { |
| "epoch": 3.5539772727272725, |
| "grad_norm": 0.7219369411468506, |
| "learning_rate": 2.2655085430465855e-06, |
| "loss": 0.0055, |
| "step": 419 |
| }, |
| { |
| "epoch": 3.5625, |
| "grad_norm": 0.8314915895462036, |
| "learning_rate": 2.252522830855798e-06, |
| "loss": 0.0068, |
| "step": 420 |
| }, |
| { |
| "epoch": 3.5710227272727275, |
| "grad_norm": 0.4091542363166809, |
| "learning_rate": 2.239543858370464e-06, |
| "loss": 0.004, |
| "step": 421 |
| }, |
| { |
| "epoch": 3.5795454545454546, |
| "grad_norm": 0.6792344450950623, |
| "learning_rate": 2.2265719790553147e-06, |
| "loss": 0.0059, |
| "step": 422 |
| }, |
| { |
| "epoch": 3.5880681818181817, |
| "grad_norm": 0.4202214777469635, |
| "learning_rate": 2.2136075461819104e-06, |
| "loss": 0.0077, |
| "step": 423 |
| }, |
| { |
| "epoch": 3.596590909090909, |
| "grad_norm": 0.6968201398849487, |
| "learning_rate": 2.2006509128190196e-06, |
| "loss": 0.0086, |
| "step": 424 |
| }, |
| { |
| "epoch": 3.6051136363636362, |
| "grad_norm": 0.6859596371650696, |
| "learning_rate": 2.1877024318229987e-06, |
| "loss": 0.0055, |
| "step": 425 |
| }, |
| { |
| "epoch": 3.6136363636363638, |
| "grad_norm": 0.5254549980163574, |
| "learning_rate": 2.1747624558281867e-06, |
| "loss": 0.0076, |
| "step": 426 |
| }, |
| { |
| "epoch": 3.622159090909091, |
| "grad_norm": 0.38539767265319824, |
| "learning_rate": 2.1618313372373016e-06, |
| "loss": 0.0058, |
| "step": 427 |
| }, |
| { |
| "epoch": 3.6306818181818183, |
| "grad_norm": 0.5018851161003113, |
| "learning_rate": 2.1489094282118393e-06, |
| "loss": 0.0063, |
| "step": 428 |
| }, |
| { |
| "epoch": 3.6392045454545454, |
| "grad_norm": 0.2640945017337799, |
| "learning_rate": 2.1359970806624886e-06, |
| "loss": 0.0033, |
| "step": 429 |
| }, |
| { |
| "epoch": 3.6477272727272725, |
| "grad_norm": 0.5288704037666321, |
| "learning_rate": 2.1230946462395412e-06, |
| "loss": 0.0081, |
| "step": 430 |
| }, |
| { |
| "epoch": 3.65625, |
| "grad_norm": 0.5245815515518188, |
| "learning_rate": 2.1102024763233224e-06, |
| "loss": 0.0053, |
| "step": 431 |
| }, |
| { |
| "epoch": 3.6647727272727275, |
| "grad_norm": 0.4336767792701721, |
| "learning_rate": 2.0973209220146135e-06, |
| "loss": 0.0057, |
| "step": 432 |
| }, |
| { |
| "epoch": 3.6732954545454546, |
| "grad_norm": 0.6342284679412842, |
| "learning_rate": 2.084450334125096e-06, |
| "loss": 0.0066, |
| "step": 433 |
| }, |
| { |
| "epoch": 3.6818181818181817, |
| "grad_norm": 0.44606292247772217, |
| "learning_rate": 2.071591063167797e-06, |
| "loss": 0.0067, |
| "step": 434 |
| }, |
| { |
| "epoch": 3.690340909090909, |
| "grad_norm": 0.519180953502655, |
| "learning_rate": 2.0587434593475385e-06, |
| "loss": 0.0046, |
| "step": 435 |
| }, |
| { |
| "epoch": 3.6988636363636362, |
| "grad_norm": 0.44359397888183594, |
| "learning_rate": 2.045907872551409e-06, |
| "loss": 0.0052, |
| "step": 436 |
| }, |
| { |
| "epoch": 3.7073863636363638, |
| "grad_norm": 0.43811115622520447, |
| "learning_rate": 2.033084652339226e-06, |
| "loss": 0.0048, |
| "step": 437 |
| }, |
| { |
| "epoch": 3.715909090909091, |
| "grad_norm": 0.5960409045219421, |
| "learning_rate": 2.0202741479340193e-06, |
| "loss": 0.0063, |
| "step": 438 |
| }, |
| { |
| "epoch": 3.7244318181818183, |
| "grad_norm": 0.4575386345386505, |
| "learning_rate": 2.0074767082125244e-06, |
| "loss": 0.0052, |
| "step": 439 |
| }, |
| { |
| "epoch": 3.7329545454545454, |
| "grad_norm": 0.6704768538475037, |
| "learning_rate": 1.9946926816956743e-06, |
| "loss": 0.0075, |
| "step": 440 |
| }, |
| { |
| "epoch": 3.7414772727272725, |
| "grad_norm": 0.44066664576530457, |
| "learning_rate": 1.9819224165391145e-06, |
| "loss": 0.004, |
| "step": 441 |
| }, |
| { |
| "epoch": 3.75, |
| "grad_norm": 0.5848504304885864, |
| "learning_rate": 1.969166260523717e-06, |
| "loss": 0.005, |
| "step": 442 |
| }, |
| { |
| "epoch": 3.7585227272727275, |
| "grad_norm": 0.43681785464286804, |
| "learning_rate": 1.956424561046108e-06, |
| "loss": 0.0041, |
| "step": 443 |
| }, |
| { |
| "epoch": 3.7670454545454546, |
| "grad_norm": 0.31628116965293884, |
| "learning_rate": 1.9436976651092143e-06, |
| "loss": 0.0026, |
| "step": 444 |
| }, |
| { |
| "epoch": 3.7755681818181817, |
| "grad_norm": 0.4530532658100128, |
| "learning_rate": 1.9309859193128055e-06, |
| "loss": 0.0052, |
| "step": 445 |
| }, |
| { |
| "epoch": 3.784090909090909, |
| "grad_norm": 0.258119136095047, |
| "learning_rate": 1.918289669844058e-06, |
| "loss": 0.0025, |
| "step": 446 |
| }, |
| { |
| "epoch": 3.7926136363636362, |
| "grad_norm": 0.40686145424842834, |
| "learning_rate": 1.905609262468128e-06, |
| "loss": 0.005, |
| "step": 447 |
| }, |
| { |
| "epoch": 3.8011363636363638, |
| "grad_norm": 0.4893140196800232, |
| "learning_rate": 1.8929450425187298e-06, |
| "loss": 0.0033, |
| "step": 448 |
| }, |
| { |
| "epoch": 3.809659090909091, |
| "grad_norm": 0.8415848612785339, |
| "learning_rate": 1.880297354888741e-06, |
| "loss": 0.0044, |
| "step": 449 |
| }, |
| { |
| "epoch": 3.8181818181818183, |
| "grad_norm": 0.21848903596401215, |
| "learning_rate": 1.8676665440207982e-06, |
| "loss": 0.0015, |
| "step": 450 |
| }, |
| { |
| "epoch": 3.8267045454545454, |
| "grad_norm": 0.7620615363121033, |
| "learning_rate": 1.8550529538979273e-06, |
| "loss": 0.0069, |
| "step": 451 |
| }, |
| { |
| "epoch": 3.8352272727272725, |
| "grad_norm": 0.35021477937698364, |
| "learning_rate": 1.8424569280341653e-06, |
| "loss": 0.004, |
| "step": 452 |
| }, |
| { |
| "epoch": 3.84375, |
| "grad_norm": 0.477241575717926, |
| "learning_rate": 1.8298788094652158e-06, |
| "loss": 0.0052, |
| "step": 453 |
| }, |
| { |
| "epoch": 3.8522727272727275, |
| "grad_norm": 0.3984035849571228, |
| "learning_rate": 1.817318940739098e-06, |
| "loss": 0.0029, |
| "step": 454 |
| }, |
| { |
| "epoch": 3.8607954545454546, |
| "grad_norm": 0.3424110412597656, |
| "learning_rate": 1.8047776639068204e-06, |
| "loss": 0.0044, |
| "step": 455 |
| }, |
| { |
| "epoch": 3.8693181818181817, |
| "grad_norm": 1.0182089805603027, |
| "learning_rate": 1.7922553205130708e-06, |
| "loss": 0.0027, |
| "step": 456 |
| }, |
| { |
| "epoch": 3.877840909090909, |
| "grad_norm": 0.4957254230976105, |
| "learning_rate": 1.7797522515869062e-06, |
| "loss": 0.0028, |
| "step": 457 |
| }, |
| { |
| "epoch": 3.8863636363636362, |
| "grad_norm": 0.42411452531814575, |
| "learning_rate": 1.767268797632472e-06, |
| "loss": 0.0032, |
| "step": 458 |
| }, |
| { |
| "epoch": 3.8948863636363638, |
| "grad_norm": 0.3485550582408905, |
| "learning_rate": 1.7548052986197258e-06, |
| "loss": 0.0032, |
| "step": 459 |
| }, |
| { |
| "epoch": 3.903409090909091, |
| "grad_norm": 0.3125132918357849, |
| "learning_rate": 1.7423620939751787e-06, |
| "loss": 0.0017, |
| "step": 460 |
| }, |
| { |
| "epoch": 3.9119318181818183, |
| "grad_norm": 0.7568768262863159, |
| "learning_rate": 1.7299395225726533e-06, |
| "loss": 0.006, |
| "step": 461 |
| }, |
| { |
| "epoch": 3.9204545454545454, |
| "grad_norm": 0.7975472807884216, |
| "learning_rate": 1.7175379227240524e-06, |
| "loss": 0.0044, |
| "step": 462 |
| }, |
| { |
| "epoch": 3.9289772727272725, |
| "grad_norm": 0.5542371273040771, |
| "learning_rate": 1.7051576321701493e-06, |
| "loss": 0.0034, |
| "step": 463 |
| }, |
| { |
| "epoch": 3.9375, |
| "grad_norm": 0.3784295916557312, |
| "learning_rate": 1.6927989880713852e-06, |
| "loss": 0.0024, |
| "step": 464 |
| }, |
| { |
| "epoch": 3.9460227272727275, |
| "grad_norm": 0.7081852555274963, |
| "learning_rate": 1.680462326998689e-06, |
| "loss": 0.0047, |
| "step": 465 |
| }, |
| { |
| "epoch": 3.9545454545454546, |
| "grad_norm": 0.268210232257843, |
| "learning_rate": 1.6681479849243153e-06, |
| "loss": 0.0019, |
| "step": 466 |
| }, |
| { |
| "epoch": 3.9630681818181817, |
| "grad_norm": 0.8834506273269653, |
| "learning_rate": 1.6558562972126869e-06, |
| "loss": 0.0025, |
| "step": 467 |
| }, |
| { |
| "epoch": 3.971590909090909, |
| "grad_norm": 0.8946535587310791, |
| "learning_rate": 1.6435875986112685e-06, |
| "loss": 0.0035, |
| "step": 468 |
| }, |
| { |
| "epoch": 3.9801136363636362, |
| "grad_norm": 0.37636420130729675, |
| "learning_rate": 1.631342223241446e-06, |
| "loss": 0.0035, |
| "step": 469 |
| }, |
| { |
| "epoch": 3.9886363636363638, |
| "grad_norm": 0.25573107600212097, |
| "learning_rate": 1.6191205045894283e-06, |
| "loss": 0.0021, |
| "step": 470 |
| }, |
| { |
| "epoch": 3.997159090909091, |
| "grad_norm": 0.6776405572891235, |
| "learning_rate": 1.6069227754971683e-06, |
| "loss": 0.0048, |
| "step": 471 |
| }, |
| { |
| "epoch": 4.0, |
| "grad_norm": 0.6776405572891235, |
| "learning_rate": 1.594749368153292e-06, |
| "loss": 0.0021, |
| "step": 472 |
| }, |
| { |
| "epoch": 4.0085227272727275, |
| "grad_norm": 0.7230031490325928, |
| "learning_rate": 1.5826006140840593e-06, |
| "loss": 0.0017, |
| "step": 473 |
| }, |
| { |
| "epoch": 4.017045454545454, |
| "grad_norm": 0.28904300928115845, |
| "learning_rate": 1.570476844144329e-06, |
| "loss": 0.0012, |
| "step": 474 |
| }, |
| { |
| "epoch": 4.025568181818182, |
| "grad_norm": 0.2574220895767212, |
| "learning_rate": 1.5583783885085513e-06, |
| "loss": 0.0015, |
| "step": 475 |
| }, |
| { |
| "epoch": 4.034090909090909, |
| "grad_norm": 0.27897921204566956, |
| "learning_rate": 1.5463055766617763e-06, |
| "loss": 0.002, |
| "step": 476 |
| }, |
| { |
| "epoch": 4.042613636363637, |
| "grad_norm": 0.32508671283721924, |
| "learning_rate": 1.5342587373906798e-06, |
| "loss": 0.0009, |
| "step": 477 |
| }, |
| { |
| "epoch": 4.051136363636363, |
| "grad_norm": 0.30587589740753174, |
| "learning_rate": 1.5222381987746104e-06, |
| "loss": 0.0019, |
| "step": 478 |
| }, |
| { |
| "epoch": 4.059659090909091, |
| "grad_norm": 0.5016082525253296, |
| "learning_rate": 1.5102442881766533e-06, |
| "loss": 0.0012, |
| "step": 479 |
| }, |
| { |
| "epoch": 4.068181818181818, |
| "grad_norm": 0.28716009855270386, |
| "learning_rate": 1.4982773322347144e-06, |
| "loss": 0.0015, |
| "step": 480 |
| }, |
| { |
| "epoch": 4.076704545454546, |
| "grad_norm": 0.1832437366247177, |
| "learning_rate": 1.48633765685263e-06, |
| "loss": 0.0023, |
| "step": 481 |
| }, |
| { |
| "epoch": 4.0852272727272725, |
| "grad_norm": 0.35543736815452576, |
| "learning_rate": 1.4744255871912825e-06, |
| "loss": 0.0024, |
| "step": 482 |
| }, |
| { |
| "epoch": 4.09375, |
| "grad_norm": 0.38490474224090576, |
| "learning_rate": 1.4625414476597545e-06, |
| "loss": 0.003, |
| "step": 483 |
| }, |
| { |
| "epoch": 4.1022727272727275, |
| "grad_norm": 0.17410258948802948, |
| "learning_rate": 1.4506855619064847e-06, |
| "loss": 0.0009, |
| "step": 484 |
| }, |
| { |
| "epoch": 4.110795454545454, |
| "grad_norm": 0.30948376655578613, |
| "learning_rate": 1.4388582528104628e-06, |
| "loss": 0.0012, |
| "step": 485 |
| }, |
| { |
| "epoch": 4.119318181818182, |
| "grad_norm": 0.41439351439476013, |
| "learning_rate": 1.4270598424724291e-06, |
| "loss": 0.0017, |
| "step": 486 |
| }, |
| { |
| "epoch": 4.127840909090909, |
| "grad_norm": 1.3294129371643066, |
| "learning_rate": 1.415290652206105e-06, |
| "loss": 0.0019, |
| "step": 487 |
| }, |
| { |
| "epoch": 4.136363636363637, |
| "grad_norm": 0.2371089905500412, |
| "learning_rate": 1.4035510025294463e-06, |
| "loss": 0.0023, |
| "step": 488 |
| }, |
| { |
| "epoch": 4.144886363636363, |
| "grad_norm": 0.9173463582992554, |
| "learning_rate": 1.3918412131559073e-06, |
| "loss": 0.0019, |
| "step": 489 |
| }, |
| { |
| "epoch": 4.153409090909091, |
| "grad_norm": 0.9328559041023254, |
| "learning_rate": 1.380161602985738e-06, |
| "loss": 0.002, |
| "step": 490 |
| }, |
| { |
| "epoch": 4.161931818181818, |
| "grad_norm": 0.2602047026157379, |
| "learning_rate": 1.3685124900973013e-06, |
| "loss": 0.0013, |
| "step": 491 |
| }, |
| { |
| "epoch": 4.170454545454546, |
| "grad_norm": 0.19252748787403107, |
| "learning_rate": 1.3568941917384038e-06, |
| "loss": 0.0008, |
| "step": 492 |
| }, |
| { |
| "epoch": 4.1789772727272725, |
| "grad_norm": 0.259917289018631, |
| "learning_rate": 1.3453070243176646e-06, |
| "loss": 0.002, |
| "step": 493 |
| }, |
| { |
| "epoch": 4.1875, |
| "grad_norm": 0.4336180090904236, |
| "learning_rate": 1.3337513033958904e-06, |
| "loss": 0.0012, |
| "step": 494 |
| }, |
| { |
| "epoch": 4.1960227272727275, |
| "grad_norm": 0.3246527910232544, |
| "learning_rate": 1.3222273436774857e-06, |
| "loss": 0.0009, |
| "step": 495 |
| }, |
| { |
| "epoch": 4.204545454545454, |
| "grad_norm": 0.6794546246528625, |
| "learning_rate": 1.310735459001884e-06, |
| "loss": 0.001, |
| "step": 496 |
| }, |
| { |
| "epoch": 4.213068181818182, |
| "grad_norm": 0.3761320114135742, |
| "learning_rate": 1.2992759623349946e-06, |
| "loss": 0.0015, |
| "step": 497 |
| }, |
| { |
| "epoch": 4.221590909090909, |
| "grad_norm": 0.37112557888031006, |
| "learning_rate": 1.2878491657606874e-06, |
| "loss": 0.002, |
| "step": 498 |
| }, |
| { |
| "epoch": 4.230113636363637, |
| "grad_norm": 0.29353010654449463, |
| "learning_rate": 1.2764553804722868e-06, |
| "loss": 0.0005, |
| "step": 499 |
| }, |
| { |
| "epoch": 4.238636363636363, |
| "grad_norm": 0.16569066047668457, |
| "learning_rate": 1.2650949167640997e-06, |
| "loss": 0.0006, |
| "step": 500 |
| }, |
| { |
| "epoch": 4.247159090909091, |
| "grad_norm": 0.2684345543384552, |
| "learning_rate": 1.2537680840229665e-06, |
| "loss": 0.0018, |
| "step": 501 |
| }, |
| { |
| "epoch": 4.255681818181818, |
| "grad_norm": 1.2519632577896118, |
| "learning_rate": 1.2424751907198312e-06, |
| "loss": 0.001, |
| "step": 502 |
| }, |
| { |
| "epoch": 4.264204545454546, |
| "grad_norm": 0.4186049997806549, |
| "learning_rate": 1.2312165444013473e-06, |
| "loss": 0.0013, |
| "step": 503 |
| }, |
| { |
| "epoch": 4.2727272727272725, |
| "grad_norm": 0.32475754618644714, |
| "learning_rate": 1.219992451681494e-06, |
| "loss": 0.0009, |
| "step": 504 |
| }, |
| { |
| "epoch": 4.28125, |
| "grad_norm": 1.013559341430664, |
| "learning_rate": 1.2088032182332307e-06, |
| "loss": 0.0019, |
| "step": 505 |
| }, |
| { |
| "epoch": 4.2897727272727275, |
| "grad_norm": 0.459043949842453, |
| "learning_rate": 1.1976491487801747e-06, |
| "loss": 0.001, |
| "step": 506 |
| }, |
| { |
| "epoch": 4.298295454545454, |
| "grad_norm": 0.5227071642875671, |
| "learning_rate": 1.1865305470882955e-06, |
| "loss": 0.0014, |
| "step": 507 |
| }, |
| { |
| "epoch": 4.306818181818182, |
| "grad_norm": 0.23663417994976044, |
| "learning_rate": 1.17544771595765e-06, |
| "loss": 0.001, |
| "step": 508 |
| }, |
| { |
| "epoch": 4.315340909090909, |
| "grad_norm": 0.2578096091747284, |
| "learning_rate": 1.1644009572141293e-06, |
| "loss": 0.0012, |
| "step": 509 |
| }, |
| { |
| "epoch": 4.323863636363637, |
| "grad_norm": 0.9903965592384338, |
| "learning_rate": 1.1533905717012425e-06, |
| "loss": 0.002, |
| "step": 510 |
| }, |
| { |
| "epoch": 4.332386363636363, |
| "grad_norm": 0.34067901968955994, |
| "learning_rate": 1.1424168592719257e-06, |
| "loss": 0.0012, |
| "step": 511 |
| }, |
| { |
| "epoch": 4.340909090909091, |
| "grad_norm": 0.11968725919723511, |
| "learning_rate": 1.1314801187803687e-06, |
| "loss": 0.0003, |
| "step": 512 |
| }, |
| { |
| "epoch": 4.349431818181818, |
| "grad_norm": 0.12175023555755615, |
| "learning_rate": 1.1205806480738852e-06, |
| "loss": 0.0008, |
| "step": 513 |
| }, |
| { |
| "epoch": 4.357954545454546, |
| "grad_norm": 0.18433789908885956, |
| "learning_rate": 1.109718743984794e-06, |
| "loss": 0.0014, |
| "step": 514 |
| }, |
| { |
| "epoch": 4.3664772727272725, |
| "grad_norm": 0.17771068215370178, |
| "learning_rate": 1.0988947023223374e-06, |
| "loss": 0.0008, |
| "step": 515 |
| }, |
| { |
| "epoch": 4.375, |
| "grad_norm": 0.26217710971832275, |
| "learning_rate": 1.0881088178646291e-06, |
| "loss": 0.0011, |
| "step": 516 |
| }, |
| { |
| "epoch": 4.3835227272727275, |
| "grad_norm": 0.22049932181835175, |
| "learning_rate": 1.0773613843506201e-06, |
| "loss": 0.0007, |
| "step": 517 |
| }, |
| { |
| "epoch": 4.392045454545454, |
| "grad_norm": 0.3353421688079834, |
| "learning_rate": 1.0666526944721017e-06, |
| "loss": 0.0012, |
| "step": 518 |
| }, |
| { |
| "epoch": 4.400568181818182, |
| "grad_norm": 0.5478553771972656, |
| "learning_rate": 1.0559830398657387e-06, |
| "loss": 0.002, |
| "step": 519 |
| }, |
| { |
| "epoch": 4.409090909090909, |
| "grad_norm": 0.1836828738451004, |
| "learning_rate": 1.0453527111051183e-06, |
| "loss": 0.0018, |
| "step": 520 |
| }, |
| { |
| "epoch": 4.417613636363637, |
| "grad_norm": 2.7253522872924805, |
| "learning_rate": 1.0347619976928447e-06, |
| "loss": 0.0004, |
| "step": 521 |
| }, |
| { |
| "epoch": 4.426136363636363, |
| "grad_norm": 0.1142822802066803, |
| "learning_rate": 1.0242111880526495e-06, |
| "loss": 0.0007, |
| "step": 522 |
| }, |
| { |
| "epoch": 4.434659090909091, |
| "grad_norm": 0.27105048298835754, |
| "learning_rate": 1.013700569521542e-06, |
| "loss": 0.0008, |
| "step": 523 |
| }, |
| { |
| "epoch": 4.443181818181818, |
| "grad_norm": 0.3742014467716217, |
| "learning_rate": 1.0032304283419792e-06, |
| "loss": 0.0007, |
| "step": 524 |
| }, |
| { |
| "epoch": 4.451704545454546, |
| "grad_norm": 0.2388819307088852, |
| "learning_rate": 9.92801049654071e-07, |
| "loss": 0.002, |
| "step": 525 |
| }, |
| { |
| "epoch": 4.4602272727272725, |
| "grad_norm": 0.2067682445049286, |
| "learning_rate": 9.824127174878196e-07, |
| "loss": 0.0009, |
| "step": 526 |
| }, |
| { |
| "epoch": 4.46875, |
| "grad_norm": 0.2894744277000427, |
| "learning_rate": 9.72065714755377e-07, |
| "loss": 0.0009, |
| "step": 527 |
| }, |
| { |
| "epoch": 4.4772727272727275, |
| "grad_norm": 0.07446195930242538, |
| "learning_rate": 9.617603232433475e-07, |
| "loss": 0.0006, |
| "step": 528 |
| }, |
| { |
| "epoch": 4.485795454545454, |
| "grad_norm": 1.2312819957733154, |
| "learning_rate": 9.514968236051081e-07, |
| "loss": 0.0011, |
| "step": 529 |
| }, |
| { |
| "epoch": 4.494318181818182, |
| "grad_norm": 0.15863823890686035, |
| "learning_rate": 9.412754953531664e-07, |
| "loss": 0.0009, |
| "step": 530 |
| }, |
| { |
| "epoch": 4.502840909090909, |
| "grad_norm": 0.6933857798576355, |
| "learning_rate": 9.310966168515528e-07, |
| "loss": 0.0007, |
| "step": 531 |
| }, |
| { |
| "epoch": 4.511363636363637, |
| "grad_norm": 0.09022935479879379, |
| "learning_rate": 9.209604653082326e-07, |
| "loss": 0.0007, |
| "step": 532 |
| }, |
| { |
| "epoch": 4.519886363636363, |
| "grad_norm": 0.07025769352912903, |
| "learning_rate": 9.108673167675635e-07, |
| "loss": 0.0003, |
| "step": 533 |
| }, |
| { |
| "epoch": 4.528409090909091, |
| "grad_norm": 0.10223375260829926, |
| "learning_rate": 9.008174461027724e-07, |
| "loss": 0.0005, |
| "step": 534 |
| }, |
| { |
| "epoch": 4.536931818181818, |
| "grad_norm": 0.15842217206954956, |
| "learning_rate": 8.908111270084718e-07, |
| "loss": 0.0003, |
| "step": 535 |
| }, |
| { |
| "epoch": 4.545454545454545, |
| "grad_norm": 0.16270683705806732, |
| "learning_rate": 8.808486319932083e-07, |
| "loss": 0.0008, |
| "step": 536 |
| }, |
| { |
| "epoch": 4.5539772727272725, |
| "grad_norm": 0.07918278872966766, |
| "learning_rate": 8.709302323720367e-07, |
| "loss": 0.0003, |
| "step": 537 |
| }, |
| { |
| "epoch": 4.5625, |
| "grad_norm": 0.09812464565038681, |
| "learning_rate": 8.610561982591356e-07, |
| "loss": 0.0005, |
| "step": 538 |
| }, |
| { |
| "epoch": 4.5710227272727275, |
| "grad_norm": 0.25634539127349854, |
| "learning_rate": 8.512267985604475e-07, |
| "loss": 0.0009, |
| "step": 539 |
| }, |
| { |
| "epoch": 4.579545454545455, |
| "grad_norm": 0.13367760181427002, |
| "learning_rate": 8.414423009663564e-07, |
| "loss": 0.0005, |
| "step": 540 |
| }, |
| { |
| "epoch": 4.588068181818182, |
| "grad_norm": 0.09154052287340164, |
| "learning_rate": 8.317029719444017e-07, |
| "loss": 0.0006, |
| "step": 541 |
| }, |
| { |
| "epoch": 4.596590909090909, |
| "grad_norm": 0.18064351379871368, |
| "learning_rate": 8.220090767320138e-07, |
| "loss": 0.0005, |
| "step": 542 |
| }, |
| { |
| "epoch": 4.605113636363637, |
| "grad_norm": 0.10706845670938492, |
| "learning_rate": 8.123608793292987e-07, |
| "loss": 0.0008, |
| "step": 543 |
| }, |
| { |
| "epoch": 4.613636363636363, |
| "grad_norm": 0.1352279931306839, |
| "learning_rate": 8.027586424918413e-07, |
| "loss": 0.0005, |
| "step": 544 |
| }, |
| { |
| "epoch": 4.622159090909091, |
| "grad_norm": 0.23175151646137238, |
| "learning_rate": 7.932026277235533e-07, |
| "loss": 0.0008, |
| "step": 545 |
| }, |
| { |
| "epoch": 4.630681818181818, |
| "grad_norm": 0.13066157698631287, |
| "learning_rate": 7.836930952695535e-07, |
| "loss": 0.0008, |
| "step": 546 |
| }, |
| { |
| "epoch": 4.639204545454545, |
| "grad_norm": 0.1292756050825119, |
| "learning_rate": 7.74230304109074e-07, |
| "loss": 0.0005, |
| "step": 547 |
| }, |
| { |
| "epoch": 4.6477272727272725, |
| "grad_norm": 0.07149559259414673, |
| "learning_rate": 7.648145119484152e-07, |
| "loss": 0.0003, |
| "step": 548 |
| }, |
| { |
| "epoch": 4.65625, |
| "grad_norm": 0.1683271825313568, |
| "learning_rate": 7.554459752139204e-07, |
| "loss": 0.0009, |
| "step": 549 |
| }, |
| { |
| "epoch": 4.6647727272727275, |
| "grad_norm": 0.10963233560323715, |
| "learning_rate": 7.461249490449954e-07, |
| "loss": 0.0007, |
| "step": 550 |
| }, |
| { |
| "epoch": 4.673295454545455, |
| "grad_norm": 0.1800011694431305, |
| "learning_rate": 7.368516872871622e-07, |
| "loss": 0.0008, |
| "step": 551 |
| }, |
| { |
| "epoch": 4.681818181818182, |
| "grad_norm": 0.12762947380542755, |
| "learning_rate": 7.276264424851426e-07, |
| "loss": 0.0004, |
| "step": 552 |
| }, |
| { |
| "epoch": 4.690340909090909, |
| "grad_norm": 0.08830232918262482, |
| "learning_rate": 7.184494658759808e-07, |
| "loss": 0.0004, |
| "step": 553 |
| }, |
| { |
| "epoch": 4.698863636363637, |
| "grad_norm": 0.149992436170578, |
| "learning_rate": 7.093210073822027e-07, |
| "loss": 0.001, |
| "step": 554 |
| }, |
| { |
| "epoch": 4.707386363636363, |
| "grad_norm": 0.09241579473018646, |
| "learning_rate": 7.002413156050109e-07, |
| "loss": 0.0007, |
| "step": 555 |
| }, |
| { |
| "epoch": 4.715909090909091, |
| "grad_norm": 0.11096753925085068, |
| "learning_rate": 6.912106378175098e-07, |
| "loss": 0.0007, |
| "step": 556 |
| }, |
| { |
| "epoch": 4.724431818181818, |
| "grad_norm": 0.10129871964454651, |
| "learning_rate": 6.82229219957975e-07, |
| "loss": 0.0004, |
| "step": 557 |
| }, |
| { |
| "epoch": 4.732954545454545, |
| "grad_norm": 0.06802051514387131, |
| "learning_rate": 6.732973066231563e-07, |
| "loss": 0.0004, |
| "step": 558 |
| }, |
| { |
| "epoch": 4.7414772727272725, |
| "grad_norm": 0.12237358093261719, |
| "learning_rate": 6.644151410616123e-07, |
| "loss": 0.0007, |
| "step": 559 |
| }, |
| { |
| "epoch": 4.75, |
| "grad_norm": 0.15545684099197388, |
| "learning_rate": 6.555829651670912e-07, |
| "loss": 0.0013, |
| "step": 560 |
| }, |
| { |
| "epoch": 4.7585227272727275, |
| "grad_norm": 0.1454661637544632, |
| "learning_rate": 6.468010194719376e-07, |
| "loss": 0.0005, |
| "step": 561 |
| }, |
| { |
| "epoch": 4.767045454545455, |
| "grad_norm": 0.25689929723739624, |
| "learning_rate": 6.380695431405453e-07, |
| "loss": 0.0011, |
| "step": 562 |
| }, |
| { |
| "epoch": 4.775568181818182, |
| "grad_norm": 0.15160903334617615, |
| "learning_rate": 6.29388773962846e-07, |
| "loss": 0.0004, |
| "step": 563 |
| }, |
| { |
| "epoch": 4.784090909090909, |
| "grad_norm": 0.1834772229194641, |
| "learning_rate": 6.207589483478266e-07, |
| "loss": 0.0003, |
| "step": 564 |
| }, |
| { |
| "epoch": 4.792613636363637, |
| "grad_norm": 0.17777854204177856, |
| "learning_rate": 6.121803013170988e-07, |
| "loss": 0.0013, |
| "step": 565 |
| }, |
| { |
| "epoch": 4.801136363636363, |
| "grad_norm": 0.17234425246715546, |
| "learning_rate": 6.036530664984922e-07, |
| "loss": 0.0006, |
| "step": 566 |
| }, |
| { |
| "epoch": 4.809659090909091, |
| "grad_norm": 0.16570787131786346, |
| "learning_rate": 5.951774761196955e-07, |
| "loss": 0.0008, |
| "step": 567 |
| }, |
| { |
| "epoch": 4.818181818181818, |
| "grad_norm": 0.20983879268169403, |
| "learning_rate": 5.867537610019317e-07, |
| "loss": 0.0007, |
| "step": 568 |
| }, |
| { |
| "epoch": 4.826704545454545, |
| "grad_norm": 0.13165390491485596, |
| "learning_rate": 5.783821505536696e-07, |
| "loss": 0.0008, |
| "step": 569 |
| }, |
| { |
| "epoch": 4.8352272727272725, |
| "grad_norm": 0.06456772238016129, |
| "learning_rate": 5.700628727643806e-07, |
| "loss": 0.0004, |
| "step": 570 |
| }, |
| { |
| "epoch": 4.84375, |
| "grad_norm": 0.14952756464481354, |
| "learning_rate": 5.617961541983244e-07, |
| "loss": 0.0007, |
| "step": 571 |
| }, |
| { |
| "epoch": 4.8522727272727275, |
| "grad_norm": 0.1199275404214859, |
| "learning_rate": 5.53582219988382e-07, |
| "loss": 0.0005, |
| "step": 572 |
| }, |
| { |
| "epoch": 4.860795454545455, |
| "grad_norm": 0.18303653597831726, |
| "learning_rate": 5.454212938299256e-07, |
| "loss": 0.0003, |
| "step": 573 |
| }, |
| { |
| "epoch": 4.869318181818182, |
| "grad_norm": 0.1395275741815567, |
| "learning_rate": 5.373135979747226e-07, |
| "loss": 0.0004, |
| "step": 574 |
| }, |
| { |
| "epoch": 4.877840909090909, |
| "grad_norm": 0.18017546832561493, |
| "learning_rate": 5.292593532248877e-07, |
| "loss": 0.0004, |
| "step": 575 |
| }, |
| { |
| "epoch": 4.886363636363637, |
| "grad_norm": 0.08687930554151535, |
| "learning_rate": 5.21258778926865e-07, |
| "loss": 0.0003, |
| "step": 576 |
| }, |
| { |
| "epoch": 4.894886363636363, |
| "grad_norm": 0.206526979804039, |
| "learning_rate": 5.133120929654567e-07, |
| "loss": 0.001, |
| "step": 577 |
| }, |
| { |
| "epoch": 4.903409090909091, |
| "grad_norm": 0.09521065652370453, |
| "learning_rate": 5.054195117578914e-07, |
| "loss": 0.0008, |
| "step": 578 |
| }, |
| { |
| "epoch": 4.911931818181818, |
| "grad_norm": 0.08821254968643188, |
| "learning_rate": 4.975812502479249e-07, |
| "loss": 0.0007, |
| "step": 579 |
| }, |
| { |
| "epoch": 4.920454545454545, |
| "grad_norm": 0.10224071145057678, |
| "learning_rate": 4.897975218999926e-07, |
| "loss": 0.0011, |
| "step": 580 |
| }, |
| { |
| "epoch": 4.9289772727272725, |
| "grad_norm": 0.22748161852359772, |
| "learning_rate": 4.82068538693391e-07, |
| "loss": 0.0006, |
| "step": 581 |
| }, |
| { |
| "epoch": 4.9375, |
| "grad_norm": 0.07090436667203903, |
| "learning_rate": 4.7439451111650685e-07, |
| "loss": 0.0003, |
| "step": 582 |
| }, |
| { |
| "epoch": 4.9460227272727275, |
| "grad_norm": 0.05179463326931, |
| "learning_rate": 4.667756481610866e-07, |
| "loss": 0.0002, |
| "step": 583 |
| }, |
| { |
| "epoch": 4.954545454545455, |
| "grad_norm": 0.04754333943128586, |
| "learning_rate": 4.5921215731654144e-07, |
| "loss": 0.0003, |
| "step": 584 |
| }, |
| { |
| "epoch": 4.963068181818182, |
| "grad_norm": 0.08957211673259735, |
| "learning_rate": 4.51704244564298e-07, |
| "loss": 0.0005, |
| "step": 585 |
| }, |
| { |
| "epoch": 4.971590909090909, |
| "grad_norm": 0.14774544537067413, |
| "learning_rate": 4.4425211437218926e-07, |
| "loss": 0.0008, |
| "step": 586 |
| }, |
| { |
| "epoch": 4.980113636363637, |
| "grad_norm": 0.07764644920825958, |
| "learning_rate": 4.3685596968888686e-07, |
| "loss": 0.0004, |
| "step": 587 |
| }, |
| { |
| "epoch": 4.988636363636363, |
| "grad_norm": 0.0783153623342514, |
| "learning_rate": 4.2951601193837124e-07, |
| "loss": 0.0004, |
| "step": 588 |
| }, |
| { |
| "epoch": 4.997159090909091, |
| "grad_norm": 0.07887904345989227, |
| "learning_rate": 4.2223244101444795e-07, |
| "loss": 0.0004, |
| "step": 589 |
| }, |
| { |
| "epoch": 5.0, |
| "grad_norm": 0.07887904345989227, |
| "learning_rate": 4.150054552753055e-07, |
| "loss": 0.0001, |
| "step": 590 |
| }, |
| { |
| "epoch": 5.0085227272727275, |
| "grad_norm": 0.09780554473400116, |
| "learning_rate": 4.078352515381098e-07, |
| "loss": 0.0006, |
| "step": 591 |
| }, |
| { |
| "epoch": 5.017045454545454, |
| "grad_norm": 0.13108158111572266, |
| "learning_rate": 4.0072202507364543e-07, |
| "loss": 0.0004, |
| "step": 592 |
| }, |
| { |
| "epoch": 5.025568181818182, |
| "grad_norm": 0.12491384893655777, |
| "learning_rate": 3.9366596960100063e-07, |
| "loss": 0.0005, |
| "step": 593 |
| }, |
| { |
| "epoch": 5.034090909090909, |
| "grad_norm": 0.06136389449238777, |
| "learning_rate": 3.866672772822863e-07, |
| "loss": 0.0003, |
| "step": 594 |
| }, |
| { |
| "epoch": 5.042613636363637, |
| "grad_norm": 0.04369005933403969, |
| "learning_rate": 3.797261387174081e-07, |
| "loss": 0.0002, |
| "step": 595 |
| }, |
| { |
| "epoch": 5.051136363636363, |
| "grad_norm": 0.047066204249858856, |
| "learning_rate": 3.728427429388709e-07, |
| "loss": 0.0005, |
| "step": 596 |
| }, |
| { |
| "epoch": 5.059659090909091, |
| "grad_norm": 0.11412976682186127, |
| "learning_rate": 3.6601727740663396e-07, |
| "loss": 0.0003, |
| "step": 597 |
| }, |
| { |
| "epoch": 5.068181818181818, |
| "grad_norm": 0.06502807140350342, |
| "learning_rate": 3.592499280030057e-07, |
| "loss": 0.0005, |
| "step": 598 |
| }, |
| { |
| "epoch": 5.076704545454546, |
| "grad_norm": 0.0838172435760498, |
| "learning_rate": 3.5254087902757835e-07, |
| "loss": 0.0002, |
| "step": 599 |
| }, |
| { |
| "epoch": 5.0852272727272725, |
| "grad_norm": 0.06359241157770157, |
| "learning_rate": 3.458903131922134e-07, |
| "loss": 0.0003, |
| "step": 600 |
| }, |
| { |
| "epoch": 5.09375, |
| "grad_norm": 0.06850689649581909, |
| "learning_rate": 3.3929841161606165e-07, |
| "loss": 0.0003, |
| "step": 601 |
| }, |
| { |
| "epoch": 5.1022727272727275, |
| "grad_norm": 0.0888059064745903, |
| "learning_rate": 3.3276535382063184e-07, |
| "loss": 0.0004, |
| "step": 602 |
| }, |
| { |
| "epoch": 5.110795454545454, |
| "grad_norm": 0.08776720613241196, |
| "learning_rate": 3.262913177249044e-07, |
| "loss": 0.0006, |
| "step": 603 |
| }, |
| { |
| "epoch": 5.119318181818182, |
| "grad_norm": 0.07846678793430328, |
| "learning_rate": 3.1987647964048075e-07, |
| "loss": 0.0004, |
| "step": 604 |
| }, |
| { |
| "epoch": 5.127840909090909, |
| "grad_norm": 0.07031604647636414, |
| "learning_rate": 3.135210142667874e-07, |
| "loss": 0.0003, |
| "step": 605 |
| }, |
| { |
| "epoch": 5.136363636363637, |
| "grad_norm": 0.04869723320007324, |
| "learning_rate": 3.07225094686314e-07, |
| "loss": 0.0003, |
| "step": 606 |
| }, |
| { |
| "epoch": 5.144886363636363, |
| "grad_norm": 0.09217824041843414, |
| "learning_rate": 3.009888923599003e-07, |
| "loss": 0.0003, |
| "step": 607 |
| }, |
| { |
| "epoch": 5.153409090909091, |
| "grad_norm": 0.07268112152814865, |
| "learning_rate": 2.9481257712206974e-07, |
| "loss": 0.0004, |
| "step": 608 |
| }, |
| { |
| "epoch": 5.161931818181818, |
| "grad_norm": 0.10733587294816971, |
| "learning_rate": 2.886963171764004e-07, |
| "loss": 0.0007, |
| "step": 609 |
| }, |
| { |
| "epoch": 5.170454545454546, |
| "grad_norm": 0.09093357622623444, |
| "learning_rate": 2.8264027909094715e-07, |
| "loss": 0.0004, |
| "step": 610 |
| }, |
| { |
| "epoch": 5.1789772727272725, |
| "grad_norm": 0.0977114662528038, |
| "learning_rate": 2.7664462779370293e-07, |
| "loss": 0.0004, |
| "step": 611 |
| }, |
| { |
| "epoch": 5.1875, |
| "grad_norm": 0.07888097316026688, |
| "learning_rate": 2.707095265681081e-07, |
| "loss": 0.0003, |
| "step": 612 |
| }, |
| { |
| "epoch": 5.1960227272727275, |
| "grad_norm": 0.06542014330625534, |
| "learning_rate": 2.648351370486052e-07, |
| "loss": 0.0007, |
| "step": 613 |
| }, |
| { |
| "epoch": 5.204545454545454, |
| "grad_norm": 0.12074026465415955, |
| "learning_rate": 2.5902161921623454e-07, |
| "loss": 0.0004, |
| "step": 614 |
| }, |
| { |
| "epoch": 5.213068181818182, |
| "grad_norm": 0.06304474920034409, |
| "learning_rate": 2.5326913139427924e-07, |
| "loss": 0.0004, |
| "step": 615 |
| }, |
| { |
| "epoch": 5.221590909090909, |
| "grad_norm": 0.1032005101442337, |
| "learning_rate": 2.4757783024395244e-07, |
| "loss": 0.0007, |
| "step": 616 |
| }, |
| { |
| "epoch": 5.230113636363637, |
| "grad_norm": 0.09077300131320953, |
| "learning_rate": 2.4194787076013025e-07, |
| "loss": 0.0004, |
| "step": 617 |
| }, |
| { |
| "epoch": 5.238636363636363, |
| "grad_norm": 0.046764302998781204, |
| "learning_rate": 2.3637940626713346e-07, |
| "loss": 0.0003, |
| "step": 618 |
| }, |
| { |
| "epoch": 5.247159090909091, |
| "grad_norm": 0.14630404114723206, |
| "learning_rate": 2.308725884145488e-07, |
| "loss": 0.0013, |
| "step": 619 |
| }, |
| { |
| "epoch": 5.255681818181818, |
| "grad_norm": 0.1074412390589714, |
| "learning_rate": 2.254275671731007e-07, |
| "loss": 0.0002, |
| "step": 620 |
| }, |
| { |
| "epoch": 5.264204545454546, |
| "grad_norm": 0.03694937378168106, |
| "learning_rate": 2.2004449083056574e-07, |
| "loss": 0.0004, |
| "step": 621 |
| }, |
| { |
| "epoch": 5.2727272727272725, |
| "grad_norm": 0.06688185781240463, |
| "learning_rate": 2.14723505987737e-07, |
| "loss": 0.0002, |
| "step": 622 |
| }, |
| { |
| "epoch": 5.28125, |
| "grad_norm": 0.03325579687952995, |
| "learning_rate": 2.0946475755442758e-07, |
| "loss": 0.0006, |
| "step": 623 |
| }, |
| { |
| "epoch": 5.2897727272727275, |
| "grad_norm": 0.0761151984333992, |
| "learning_rate": 2.0426838874552713e-07, |
| "loss": 0.0005, |
| "step": 624 |
| }, |
| { |
| "epoch": 5.298295454545454, |
| "grad_norm": 0.11030630767345428, |
| "learning_rate": 1.9913454107710172e-07, |
| "loss": 0.0002, |
| "step": 625 |
| }, |
| { |
| "epoch": 5.306818181818182, |
| "grad_norm": 0.036464713513851166, |
| "learning_rate": 1.9406335436253727e-07, |
| "loss": 0.0003, |
| "step": 626 |
| }, |
| { |
| "epoch": 5.315340909090909, |
| "grad_norm": 0.057171326130628586, |
| "learning_rate": 1.890549667087338e-07, |
| "loss": 0.0002, |
| "step": 627 |
| }, |
| { |
| "epoch": 5.323863636363637, |
| "grad_norm": 0.06663847714662552, |
| "learning_rate": 1.8410951451234533e-07, |
| "loss": 0.0002, |
| "step": 628 |
| }, |
| { |
| "epoch": 5.332386363636363, |
| "grad_norm": 0.04272397607564926, |
| "learning_rate": 1.7922713245606183e-07, |
| "loss": 0.0003, |
| "step": 629 |
| }, |
| { |
| "epoch": 5.340909090909091, |
| "grad_norm": 0.05067570507526398, |
| "learning_rate": 1.7440795350494588e-07, |
| "loss": 0.0003, |
| "step": 630 |
| }, |
| { |
| "epoch": 5.349431818181818, |
| "grad_norm": 0.11823557317256927, |
| "learning_rate": 1.6965210890280693e-07, |
| "loss": 0.0005, |
| "step": 631 |
| }, |
| { |
| "epoch": 5.357954545454546, |
| "grad_norm": 0.1108795702457428, |
| "learning_rate": 1.649597281686302e-07, |
| "loss": 0.0006, |
| "step": 632 |
| }, |
| { |
| "epoch": 5.3664772727272725, |
| "grad_norm": 0.0415506586432457, |
| "learning_rate": 1.6033093909304853e-07, |
| "loss": 0.0004, |
| "step": 633 |
| }, |
| { |
| "epoch": 5.375, |
| "grad_norm": 0.08474580943584442, |
| "learning_rate": 1.5576586773486198e-07, |
| "loss": 0.0003, |
| "step": 634 |
| }, |
| { |
| "epoch": 5.3835227272727275, |
| "grad_norm": 0.48341888189315796, |
| "learning_rate": 1.5126463841760475e-07, |
| "loss": 0.0009, |
| "step": 635 |
| }, |
| { |
| "epoch": 5.392045454545454, |
| "grad_norm": 0.09822040796279907, |
| "learning_rate": 1.4682737372615968e-07, |
| "loss": 0.0004, |
| "step": 636 |
| }, |
| { |
| "epoch": 5.400568181818182, |
| "grad_norm": 0.06642526388168335, |
| "learning_rate": 1.4245419450341913e-07, |
| "loss": 0.0004, |
| "step": 637 |
| }, |
| { |
| "epoch": 5.409090909090909, |
| "grad_norm": 0.12337509542703629, |
| "learning_rate": 1.3814521984699597e-07, |
| "loss": 0.0006, |
| "step": 638 |
| }, |
| { |
| "epoch": 5.417613636363637, |
| "grad_norm": 0.10305451601743698, |
| "learning_rate": 1.339005671059765e-07, |
| "loss": 0.0004, |
| "step": 639 |
| }, |
| { |
| "epoch": 5.426136363636363, |
| "grad_norm": 0.11694454401731491, |
| "learning_rate": 1.297203518777293e-07, |
| "loss": 0.0005, |
| "step": 640 |
| }, |
| { |
| "epoch": 5.434659090909091, |
| "grad_norm": 0.051395516842603683, |
| "learning_rate": 1.2560468800475262e-07, |
| "loss": 0.0002, |
| "step": 641 |
| }, |
| { |
| "epoch": 5.443181818181818, |
| "grad_norm": 0.04204337298870087, |
| "learning_rate": 1.2155368757157644e-07, |
| "loss": 0.0001, |
| "step": 642 |
| }, |
| { |
| "epoch": 5.451704545454546, |
| "grad_norm": 0.01427415944635868, |
| "learning_rate": 1.1756746090171051e-07, |
| "loss": 0.0002, |
| "step": 643 |
| }, |
| { |
| "epoch": 5.4602272727272725, |
| "grad_norm": 0.063359834253788, |
| "learning_rate": 1.1364611655463737e-07, |
| "loss": 0.0003, |
| "step": 644 |
| }, |
| { |
| "epoch": 5.46875, |
| "grad_norm": 0.05776561051607132, |
| "learning_rate": 1.097897613228588e-07, |
| "loss": 0.0004, |
| "step": 645 |
| }, |
| { |
| "epoch": 5.4772727272727275, |
| "grad_norm": 0.0897364616394043, |
| "learning_rate": 1.0599850022898539e-07, |
| "loss": 0.0002, |
| "step": 646 |
| }, |
| { |
| "epoch": 5.485795454545454, |
| "grad_norm": 0.020489100366830826, |
| "learning_rate": 1.0227243652287639e-07, |
| "loss": 0.0003, |
| "step": 647 |
| }, |
| { |
| "epoch": 5.494318181818182, |
| "grad_norm": 0.08238087594509125, |
| "learning_rate": 9.861167167883046e-08, |
| "loss": 0.0002, |
| "step": 648 |
| }, |
| { |
| "epoch": 5.502840909090909, |
| "grad_norm": 0.08249715715646744, |
| "learning_rate": 9.50163053928177e-08, |
| "loss": 0.0003, |
| "step": 649 |
| }, |
| { |
| "epoch": 5.511363636363637, |
| "grad_norm": 0.04850064963102341, |
| "learning_rate": 9.148643557976955e-08, |
| "loss": 0.0006, |
| "step": 650 |
| }, |
| { |
| "epoch": 5.519886363636363, |
| "grad_norm": 0.10203363001346588, |
| "learning_rate": 8.802215837090817e-08, |
| "loss": 0.0006, |
| "step": 651 |
| }, |
| { |
| "epoch": 5.528409090909091, |
| "grad_norm": 0.07185014337301254, |
| "learning_rate": 8.462356811112987e-08, |
| "loss": 0.0005, |
| "step": 652 |
| }, |
| { |
| "epoch": 5.536931818181818, |
| "grad_norm": 0.09434297680854797, |
| "learning_rate": 8.129075735643698e-08, |
| "loss": 0.0005, |
| "step": 653 |
| }, |
| { |
| "epoch": 5.545454545454545, |
| "grad_norm": 0.07415787875652313, |
| "learning_rate": 7.802381687141537e-08, |
| "loss": 0.0002, |
| "step": 654 |
| }, |
| { |
| "epoch": 5.5539772727272725, |
| "grad_norm": 0.14693333208560944, |
| "learning_rate": 7.482283562676357e-08, |
| "loss": 0.0002, |
| "step": 655 |
| }, |
| { |
| "epoch": 5.5625, |
| "grad_norm": 0.04635694622993469, |
| "learning_rate": 7.168790079686932e-08, |
| "loss": 0.0004, |
| "step": 656 |
| }, |
| { |
| "epoch": 5.5710227272727275, |
| "grad_norm": 0.09312669187784195, |
| "learning_rate": 6.861909775743609e-08, |
| "loss": 0.0002, |
| "step": 657 |
| }, |
| { |
| "epoch": 5.579545454545455, |
| "grad_norm": 0.07916014641523361, |
| "learning_rate": 6.561651008315739e-08, |
| "loss": 0.0005, |
| "step": 658 |
| }, |
| { |
| "epoch": 5.588068181818182, |
| "grad_norm": 0.1338101327419281, |
| "learning_rate": 6.268021954544095e-08, |
| "loss": 0.0003, |
| "step": 659 |
| }, |
| { |
| "epoch": 5.596590909090909, |
| "grad_norm": 0.01953204907476902, |
| "learning_rate": 5.981030611018235e-08, |
| "loss": 0.0004, |
| "step": 660 |
| }, |
| { |
| "epoch": 5.605113636363637, |
| "grad_norm": 0.05372335761785507, |
| "learning_rate": 5.700684793558536e-08, |
| "loss": 0.0005, |
| "step": 661 |
| }, |
| { |
| "epoch": 5.613636363636363, |
| "grad_norm": 0.14678236842155457, |
| "learning_rate": 5.426992137003623e-08, |
| "loss": 0.0006, |
| "step": 662 |
| }, |
| { |
| "epoch": 5.622159090909091, |
| "grad_norm": 0.09216056019067764, |
| "learning_rate": 5.15996009500222e-08, |
| "loss": 0.0007, |
| "step": 663 |
| }, |
| { |
| "epoch": 5.630681818181818, |
| "grad_norm": 0.11501898616552353, |
| "learning_rate": 4.899595939810237e-08, |
| "loss": 0.0002, |
| "step": 664 |
| }, |
| { |
| "epoch": 5.639204545454545, |
| "grad_norm": 0.06315010786056519, |
| "learning_rate": 4.645906762092756e-08, |
| "loss": 0.0002, |
| "step": 665 |
| }, |
| { |
| "epoch": 5.6477272727272725, |
| "grad_norm": 0.06256142258644104, |
| "learning_rate": 4.3988994707308274e-08, |
| "loss": 0.0004, |
| "step": 666 |
| }, |
| { |
| "epoch": 5.65625, |
| "grad_norm": 0.035255659371614456, |
| "learning_rate": 4.158580792633482e-08, |
| "loss": 0.0002, |
| "step": 667 |
| }, |
| { |
| "epoch": 5.6647727272727275, |
| "grad_norm": 0.05211144685745239, |
| "learning_rate": 3.92495727255432e-08, |
| "loss": 0.0005, |
| "step": 668 |
| }, |
| { |
| "epoch": 5.673295454545455, |
| "grad_norm": 0.1178005188703537, |
| "learning_rate": 3.6980352729134025e-08, |
| "loss": 0.0004, |
| "step": 669 |
| }, |
| { |
| "epoch": 5.681818181818182, |
| "grad_norm": 0.04155606031417847, |
| "learning_rate": 3.4778209736240633e-08, |
| "loss": 0.0003, |
| "step": 670 |
| }, |
| { |
| "epoch": 5.690340909090909, |
| "grad_norm": 0.059223588556051254, |
| "learning_rate": 3.26432037192434e-08, |
| "loss": 0.0002, |
| "step": 671 |
| }, |
| { |
| "epoch": 5.698863636363637, |
| "grad_norm": 0.05430597439408302, |
| "learning_rate": 3.057539282213973e-08, |
| "loss": 0.0006, |
| "step": 672 |
| }, |
| { |
| "epoch": 5.707386363636363, |
| "grad_norm": 0.14908038079738617, |
| "learning_rate": 2.8574833358957776e-08, |
| "loss": 0.0007, |
| "step": 673 |
| }, |
| { |
| "epoch": 5.715909090909091, |
| "grad_norm": 0.09642591327428818, |
| "learning_rate": 2.6641579812224373e-08, |
| "loss": 0.0001, |
| "step": 674 |
| }, |
| { |
| "epoch": 5.724431818181818, |
| "grad_norm": 0.025506554171442986, |
| "learning_rate": 2.477568483148146e-08, |
| "loss": 0.0006, |
| "step": 675 |
| }, |
| { |
| "epoch": 5.732954545454545, |
| "grad_norm": 0.13444463908672333, |
| "learning_rate": 2.2977199231850323e-08, |
| "loss": 0.0003, |
| "step": 676 |
| }, |
| { |
| "epoch": 5.7414772727272725, |
| "grad_norm": 0.07416418194770813, |
| "learning_rate": 2.124617199265072e-08, |
| "loss": 0.0002, |
| "step": 677 |
| }, |
| { |
| "epoch": 5.75, |
| "grad_norm": 0.10049674659967422, |
| "learning_rate": 1.9582650256064206e-08, |
| "loss": 0.0002, |
| "step": 678 |
| }, |
| { |
| "epoch": 5.7585227272727275, |
| "grad_norm": 0.10891962051391602, |
| "learning_rate": 1.7986679325851518e-08, |
| "loss": 0.0005, |
| "step": 679 |
| }, |
| { |
| "epoch": 5.767045454545455, |
| "grad_norm": 0.021479619666934013, |
| "learning_rate": 1.6458302666119142e-08, |
| "loss": 0.0002, |
| "step": 680 |
| }, |
| { |
| "epoch": 5.775568181818182, |
| "grad_norm": 0.027055837213993073, |
| "learning_rate": 1.4997561900135238e-08, |
| "loss": 0.0004, |
| "step": 681 |
| }, |
| { |
| "epoch": 5.784090909090909, |
| "grad_norm": 0.06013903394341469, |
| "learning_rate": 1.3604496809195289e-08, |
| "loss": 0.0003, |
| "step": 682 |
| }, |
| { |
| "epoch": 5.792613636363637, |
| "grad_norm": 0.07452274113893509, |
| "learning_rate": 1.2279145331540177e-08, |
| "loss": 0.0004, |
| "step": 683 |
| }, |
| { |
| "epoch": 5.801136363636363, |
| "grad_norm": 0.0601140633225441, |
| "learning_rate": 1.1021543561322012e-08, |
| "loss": 0.0001, |
| "step": 684 |
| }, |
| { |
| "epoch": 5.809659090909091, |
| "grad_norm": 0.025853119790554047, |
| "learning_rate": 9.831725747621035e-09, |
| "loss": 0.0002, |
| "step": 685 |
| }, |
| { |
| "epoch": 5.818181818181818, |
| "grad_norm": 0.04668550565838814, |
| "learning_rate": 8.709724293513855e-09, |
| "loss": 0.0002, |
| "step": 686 |
| }, |
| { |
| "epoch": 5.826704545454545, |
| "grad_norm": 0.07281884551048279, |
| "learning_rate": 7.655569755190272e-09, |
| "loss": 0.0003, |
| "step": 687 |
| }, |
| { |
| "epoch": 5.8352272727272725, |
| "grad_norm": 0.02255946397781372, |
| "learning_rate": 6.66929084112089e-09, |
| "loss": 0.0001, |
| "step": 688 |
| }, |
| { |
| "epoch": 5.84375, |
| "grad_norm": 0.043406061828136444, |
| "learning_rate": 5.7509144112757875e-09, |
| "loss": 0.0005, |
| "step": 689 |
| }, |
| { |
| "epoch": 5.8522727272727275, |
| "grad_norm": 0.06619398295879364, |
| "learning_rate": 4.900465476393168e-09, |
| "loss": 0.0004, |
| "step": 690 |
| }, |
| { |
| "epoch": 5.860795454545455, |
| "grad_norm": 0.14031286537647247, |
| "learning_rate": 4.117967197297401e-09, |
| "loss": 0.0005, |
| "step": 691 |
| }, |
| { |
| "epoch": 5.869318181818182, |
| "grad_norm": 0.09792155772447586, |
| "learning_rate": 3.4034408842695264e-09, |
| "loss": 0.0004, |
| "step": 692 |
| }, |
| { |
| "epoch": 5.877840909090909, |
| "grad_norm": 0.065652035176754, |
| "learning_rate": 2.75690599646522e-09, |
| "loss": 0.0002, |
| "step": 693 |
| }, |
| { |
| "epoch": 5.886363636363637, |
| "grad_norm": 0.038954220712184906, |
| "learning_rate": 2.1783801413866044e-09, |
| "loss": 0.0005, |
| "step": 694 |
| }, |
| { |
| "epoch": 5.894886363636363, |
| "grad_norm": 0.0737239271402359, |
| "learning_rate": 1.6678790744015238e-09, |
| "loss": 0.0003, |
| "step": 695 |
| }, |
| { |
| "epoch": 5.903409090909091, |
| "grad_norm": 0.3166946768760681, |
| "learning_rate": 1.2254166983152737e-09, |
| "loss": 0.0004, |
| "step": 696 |
| }, |
| { |
| "epoch": 5.911931818181818, |
| "grad_norm": 0.058832280337810516, |
| "learning_rate": 8.510050629909062e-10, |
| "loss": 0.0003, |
| "step": 697 |
| }, |
| { |
| "epoch": 5.920454545454545, |
| "grad_norm": 0.047831758856773376, |
| "learning_rate": 5.446543650219905e-10, |
| "loss": 0.0002, |
| "step": 698 |
| }, |
| { |
| "epoch": 5.9289772727272725, |
| "grad_norm": 0.028987539932131767, |
| "learning_rate": 3.0637294745533566e-10, |
| "loss": 0.0001, |
| "step": 699 |
| }, |
| { |
| "epoch": 5.9375, |
| "grad_norm": 0.05544570833444595, |
| "learning_rate": 1.3616729956228425e-10, |
| "loss": 0.0003, |
| "step": 700 |
| }, |
| { |
| "epoch": 5.9460227272727275, |
| "grad_norm": 0.0382600799202919, |
| "learning_rate": 3.4042056662741964e-11, |
| "loss": 0.0003, |
| "step": 701 |
| }, |
| { |
| "epoch": 5.954545454545455, |
| "grad_norm": 0.09347274154424667, |
| "learning_rate": 0.0, |
| "loss": 0.0003, |
| "step": 702 |
| } |
| ], |
| "logging_steps": 1, |
| "max_steps": 702, |
| "num_input_tokens_seen": 0, |
| "num_train_epochs": 6, |
| "save_steps": 117, |
| "stateful_callbacks": { |
| "TrainerControl": { |
| "args": { |
| "should_epoch_stop": false, |
| "should_evaluate": false, |
| "should_log": false, |
| "should_save": true, |
| "should_training_stop": true |
| }, |
| "attributes": {} |
| } |
| }, |
| "total_flos": 1.5639931739764163e+18, |
| "train_batch_size": 4, |
| "trial_name": null, |
| "trial_params": null |
| } |
|
|