| { |
| "best_metric": null, |
| "best_model_checkpoint": null, |
| "epoch": 0.9971590909090909, |
| "eval_steps": 500, |
| "global_step": 117, |
| "is_hyper_param_search": false, |
| "is_local_process_zero": true, |
| "is_world_process_zero": true, |
| "log_history": [ |
| { |
| "epoch": 0.008522727272727272, |
| "grad_norm": 35.49958801269531, |
| "learning_rate": 5.0000000000000004e-08, |
| "loss": 4.6143, |
| "step": 1 |
| }, |
| { |
| "epoch": 0.017045454545454544, |
| "grad_norm": 35.543556213378906, |
| "learning_rate": 1.0000000000000001e-07, |
| "loss": 4.6719, |
| "step": 2 |
| }, |
| { |
| "epoch": 0.02556818181818182, |
| "grad_norm": 34.12852096557617, |
| "learning_rate": 1.5000000000000002e-07, |
| "loss": 4.5546, |
| "step": 3 |
| }, |
| { |
| "epoch": 0.03409090909090909, |
| "grad_norm": 33.610572814941406, |
| "learning_rate": 2.0000000000000002e-07, |
| "loss": 4.4919, |
| "step": 4 |
| }, |
| { |
| "epoch": 0.04261363636363636, |
| "grad_norm": 34.532169342041016, |
| "learning_rate": 2.5000000000000004e-07, |
| "loss": 4.6095, |
| "step": 5 |
| }, |
| { |
| "epoch": 0.05113636363636364, |
| "grad_norm": 34.2357063293457, |
| "learning_rate": 3.0000000000000004e-07, |
| "loss": 4.585, |
| "step": 6 |
| }, |
| { |
| "epoch": 0.05965909090909091, |
| "grad_norm": 34.94609832763672, |
| "learning_rate": 3.5000000000000004e-07, |
| "loss": 4.5911, |
| "step": 7 |
| }, |
| { |
| "epoch": 0.06818181818181818, |
| "grad_norm": 34.79508590698242, |
| "learning_rate": 4.0000000000000003e-07, |
| "loss": 4.6294, |
| "step": 8 |
| }, |
| { |
| "epoch": 0.07670454545454546, |
| "grad_norm": 35.18478775024414, |
| "learning_rate": 4.5000000000000003e-07, |
| "loss": 4.6568, |
| "step": 9 |
| }, |
| { |
| "epoch": 0.08522727272727272, |
| "grad_norm": 33.75633239746094, |
| "learning_rate": 5.000000000000001e-07, |
| "loss": 4.4504, |
| "step": 10 |
| }, |
| { |
| "epoch": 0.09375, |
| "grad_norm": 34.20966339111328, |
| "learning_rate": 5.5e-07, |
| "loss": 4.5005, |
| "step": 11 |
| }, |
| { |
| "epoch": 0.10227272727272728, |
| "grad_norm": 33.20008087158203, |
| "learning_rate": 6.000000000000001e-07, |
| "loss": 4.4297, |
| "step": 12 |
| }, |
| { |
| "epoch": 0.11079545454545454, |
| "grad_norm": 33.53578567504883, |
| "learning_rate": 6.5e-07, |
| "loss": 4.4136, |
| "step": 13 |
| }, |
| { |
| "epoch": 0.11931818181818182, |
| "grad_norm": 31.951068878173828, |
| "learning_rate": 7.000000000000001e-07, |
| "loss": 4.3065, |
| "step": 14 |
| }, |
| { |
| "epoch": 0.1278409090909091, |
| "grad_norm": 30.890714645385742, |
| "learning_rate": 7.5e-07, |
| "loss": 4.2433, |
| "step": 15 |
| }, |
| { |
| "epoch": 0.13636363636363635, |
| "grad_norm": 29.448890686035156, |
| "learning_rate": 8.000000000000001e-07, |
| "loss": 4.0981, |
| "step": 16 |
| }, |
| { |
| "epoch": 0.14488636363636365, |
| "grad_norm": 26.769498825073242, |
| "learning_rate": 8.500000000000001e-07, |
| "loss": 3.9818, |
| "step": 17 |
| }, |
| { |
| "epoch": 0.1534090909090909, |
| "grad_norm": 25.418458938598633, |
| "learning_rate": 9.000000000000001e-07, |
| "loss": 3.8568, |
| "step": 18 |
| }, |
| { |
| "epoch": 0.16193181818181818, |
| "grad_norm": 24.099462509155273, |
| "learning_rate": 9.500000000000001e-07, |
| "loss": 3.7139, |
| "step": 19 |
| }, |
| { |
| "epoch": 0.17045454545454544, |
| "grad_norm": 22.487621307373047, |
| "learning_rate": 1.0000000000000002e-06, |
| "loss": 3.6967, |
| "step": 20 |
| }, |
| { |
| "epoch": 0.17897727272727273, |
| "grad_norm": 20.72856330871582, |
| "learning_rate": 1.0500000000000001e-06, |
| "loss": 3.5031, |
| "step": 21 |
| }, |
| { |
| "epoch": 0.1875, |
| "grad_norm": 19.552040100097656, |
| "learning_rate": 1.1e-06, |
| "loss": 3.4201, |
| "step": 22 |
| }, |
| { |
| "epoch": 0.19602272727272727, |
| "grad_norm": 18.033971786499023, |
| "learning_rate": 1.1500000000000002e-06, |
| "loss": 3.1842, |
| "step": 23 |
| }, |
| { |
| "epoch": 0.20454545454545456, |
| "grad_norm": 18.865802764892578, |
| "learning_rate": 1.2000000000000002e-06, |
| "loss": 3.1982, |
| "step": 24 |
| }, |
| { |
| "epoch": 0.21306818181818182, |
| "grad_norm": 18.849502563476562, |
| "learning_rate": 1.25e-06, |
| "loss": 2.9954, |
| "step": 25 |
| }, |
| { |
| "epoch": 0.2215909090909091, |
| "grad_norm": 19.714330673217773, |
| "learning_rate": 1.3e-06, |
| "loss": 2.8763, |
| "step": 26 |
| }, |
| { |
| "epoch": 0.23011363636363635, |
| "grad_norm": 20.26412010192871, |
| "learning_rate": 1.3500000000000002e-06, |
| "loss": 2.7259, |
| "step": 27 |
| }, |
| { |
| "epoch": 0.23863636363636365, |
| "grad_norm": 19.212318420410156, |
| "learning_rate": 1.4000000000000001e-06, |
| "loss": 2.6099, |
| "step": 28 |
| }, |
| { |
| "epoch": 0.2471590909090909, |
| "grad_norm": 16.80523681640625, |
| "learning_rate": 1.45e-06, |
| "loss": 2.4482, |
| "step": 29 |
| }, |
| { |
| "epoch": 0.2556818181818182, |
| "grad_norm": 14.624052047729492, |
| "learning_rate": 1.5e-06, |
| "loss": 2.2364, |
| "step": 30 |
| }, |
| { |
| "epoch": 0.26420454545454547, |
| "grad_norm": 14.759950637817383, |
| "learning_rate": 1.5500000000000002e-06, |
| "loss": 2.2159, |
| "step": 31 |
| }, |
| { |
| "epoch": 0.2727272727272727, |
| "grad_norm": 14.904441833496094, |
| "learning_rate": 1.6000000000000001e-06, |
| "loss": 2.0206, |
| "step": 32 |
| }, |
| { |
| "epoch": 0.28125, |
| "grad_norm": 15.03490161895752, |
| "learning_rate": 1.6500000000000003e-06, |
| "loss": 1.8725, |
| "step": 33 |
| }, |
| { |
| "epoch": 0.2897727272727273, |
| "grad_norm": 15.70709228515625, |
| "learning_rate": 1.7000000000000002e-06, |
| "loss": 1.8046, |
| "step": 34 |
| }, |
| { |
| "epoch": 0.29829545454545453, |
| "grad_norm": 15.637526512145996, |
| "learning_rate": 1.75e-06, |
| "loss": 1.6198, |
| "step": 35 |
| }, |
| { |
| "epoch": 0.3068181818181818, |
| "grad_norm": 14.345853805541992, |
| "learning_rate": 1.8000000000000001e-06, |
| "loss": 1.4247, |
| "step": 36 |
| }, |
| { |
| "epoch": 0.3153409090909091, |
| "grad_norm": 14.281502723693848, |
| "learning_rate": 1.85e-06, |
| "loss": 1.2859, |
| "step": 37 |
| }, |
| { |
| "epoch": 0.32386363636363635, |
| "grad_norm": 13.567434310913086, |
| "learning_rate": 1.9000000000000002e-06, |
| "loss": 1.1452, |
| "step": 38 |
| }, |
| { |
| "epoch": 0.33238636363636365, |
| "grad_norm": 13.128920555114746, |
| "learning_rate": 1.9500000000000004e-06, |
| "loss": 1.0036, |
| "step": 39 |
| }, |
| { |
| "epoch": 0.3409090909090909, |
| "grad_norm": 12.954020500183105, |
| "learning_rate": 2.0000000000000003e-06, |
| "loss": 0.851, |
| "step": 40 |
| }, |
| { |
| "epoch": 0.3494318181818182, |
| "grad_norm": 12.533946990966797, |
| "learning_rate": 2.05e-06, |
| "loss": 0.715, |
| "step": 41 |
| }, |
| { |
| "epoch": 0.35795454545454547, |
| "grad_norm": 11.564764022827148, |
| "learning_rate": 2.1000000000000002e-06, |
| "loss": 0.5987, |
| "step": 42 |
| }, |
| { |
| "epoch": 0.3664772727272727, |
| "grad_norm": 10.383822441101074, |
| "learning_rate": 2.15e-06, |
| "loss": 0.4834, |
| "step": 43 |
| }, |
| { |
| "epoch": 0.375, |
| "grad_norm": 8.901700973510742, |
| "learning_rate": 2.2e-06, |
| "loss": 0.3808, |
| "step": 44 |
| }, |
| { |
| "epoch": 0.3835227272727273, |
| "grad_norm": 7.737320423126221, |
| "learning_rate": 2.25e-06, |
| "loss": 0.2975, |
| "step": 45 |
| }, |
| { |
| "epoch": 0.39204545454545453, |
| "grad_norm": 5.334733963012695, |
| "learning_rate": 2.3000000000000004e-06, |
| "loss": 0.2261, |
| "step": 46 |
| }, |
| { |
| "epoch": 0.4005681818181818, |
| "grad_norm": 3.3499741554260254, |
| "learning_rate": 2.35e-06, |
| "loss": 0.1854, |
| "step": 47 |
| }, |
| { |
| "epoch": 0.4090909090909091, |
| "grad_norm": 2.4037742614746094, |
| "learning_rate": 2.4000000000000003e-06, |
| "loss": 0.1532, |
| "step": 48 |
| }, |
| { |
| "epoch": 0.41761363636363635, |
| "grad_norm": 1.8914185762405396, |
| "learning_rate": 2.4500000000000003e-06, |
| "loss": 0.1372, |
| "step": 49 |
| }, |
| { |
| "epoch": 0.42613636363636365, |
| "grad_norm": 1.7124507427215576, |
| "learning_rate": 2.5e-06, |
| "loss": 0.136, |
| "step": 50 |
| }, |
| { |
| "epoch": 0.4346590909090909, |
| "grad_norm": 1.242527961730957, |
| "learning_rate": 2.55e-06, |
| "loss": 0.1238, |
| "step": 51 |
| }, |
| { |
| "epoch": 0.4431818181818182, |
| "grad_norm": 0.9835780262947083, |
| "learning_rate": 2.6e-06, |
| "loss": 0.1192, |
| "step": 52 |
| }, |
| { |
| "epoch": 0.45170454545454547, |
| "grad_norm": 1.0163497924804688, |
| "learning_rate": 2.6500000000000005e-06, |
| "loss": 0.1175, |
| "step": 53 |
| }, |
| { |
| "epoch": 0.4602272727272727, |
| "grad_norm": 0.8837094306945801, |
| "learning_rate": 2.7000000000000004e-06, |
| "loss": 0.1104, |
| "step": 54 |
| }, |
| { |
| "epoch": 0.46875, |
| "grad_norm": 0.6102741956710815, |
| "learning_rate": 2.7500000000000004e-06, |
| "loss": 0.0986, |
| "step": 55 |
| }, |
| { |
| "epoch": 0.4772727272727273, |
| "grad_norm": 0.85715252161026, |
| "learning_rate": 2.8000000000000003e-06, |
| "loss": 0.1083, |
| "step": 56 |
| }, |
| { |
| "epoch": 0.48579545454545453, |
| "grad_norm": 0.9692059755325317, |
| "learning_rate": 2.85e-06, |
| "loss": 0.0994, |
| "step": 57 |
| }, |
| { |
| "epoch": 0.4943181818181818, |
| "grad_norm": 0.5620752573013306, |
| "learning_rate": 2.9e-06, |
| "loss": 0.0909, |
| "step": 58 |
| }, |
| { |
| "epoch": 0.5028409090909091, |
| "grad_norm": 0.5377550721168518, |
| "learning_rate": 2.95e-06, |
| "loss": 0.087, |
| "step": 59 |
| }, |
| { |
| "epoch": 0.5113636363636364, |
| "grad_norm": 0.7260486483573914, |
| "learning_rate": 3e-06, |
| "loss": 0.0949, |
| "step": 60 |
| }, |
| { |
| "epoch": 0.5198863636363636, |
| "grad_norm": 0.5636699199676514, |
| "learning_rate": 3.05e-06, |
| "loss": 0.0884, |
| "step": 61 |
| }, |
| { |
| "epoch": 0.5284090909090909, |
| "grad_norm": 0.3729614019393921, |
| "learning_rate": 3.1000000000000004e-06, |
| "loss": 0.0771, |
| "step": 62 |
| }, |
| { |
| "epoch": 0.5369318181818182, |
| "grad_norm": 0.5472120046615601, |
| "learning_rate": 3.1500000000000003e-06, |
| "loss": 0.0887, |
| "step": 63 |
| }, |
| { |
| "epoch": 0.5454545454545454, |
| "grad_norm": 0.4915490448474884, |
| "learning_rate": 3.2000000000000003e-06, |
| "loss": 0.0826, |
| "step": 64 |
| }, |
| { |
| "epoch": 0.5539772727272727, |
| "grad_norm": 0.4123076796531677, |
| "learning_rate": 3.2500000000000002e-06, |
| "loss": 0.0834, |
| "step": 65 |
| }, |
| { |
| "epoch": 0.5625, |
| "grad_norm": 0.32767826318740845, |
| "learning_rate": 3.3000000000000006e-06, |
| "loss": 0.0765, |
| "step": 66 |
| }, |
| { |
| "epoch": 0.5710227272727273, |
| "grad_norm": 0.34970155358314514, |
| "learning_rate": 3.3500000000000005e-06, |
| "loss": 0.0832, |
| "step": 67 |
| }, |
| { |
| "epoch": 0.5795454545454546, |
| "grad_norm": 0.38679587841033936, |
| "learning_rate": 3.4000000000000005e-06, |
| "loss": 0.0766, |
| "step": 68 |
| }, |
| { |
| "epoch": 0.5880681818181818, |
| "grad_norm": 0.5464550852775574, |
| "learning_rate": 3.45e-06, |
| "loss": 0.0748, |
| "step": 69 |
| }, |
| { |
| "epoch": 0.5965909090909091, |
| "grad_norm": 0.3545376658439636, |
| "learning_rate": 3.5e-06, |
| "loss": 0.0759, |
| "step": 70 |
| }, |
| { |
| "epoch": 0.6051136363636364, |
| "grad_norm": 0.3532780110836029, |
| "learning_rate": 3.5500000000000003e-06, |
| "loss": 0.0728, |
| "step": 71 |
| }, |
| { |
| "epoch": 0.6136363636363636, |
| "grad_norm": 0.5024192929267883, |
| "learning_rate": 3.6000000000000003e-06, |
| "loss": 0.081, |
| "step": 72 |
| }, |
| { |
| "epoch": 0.6221590909090909, |
| "grad_norm": 0.3845844268798828, |
| "learning_rate": 3.65e-06, |
| "loss": 0.0689, |
| "step": 73 |
| }, |
| { |
| "epoch": 0.6306818181818182, |
| "grad_norm": 0.46010249853134155, |
| "learning_rate": 3.7e-06, |
| "loss": 0.0757, |
| "step": 74 |
| }, |
| { |
| "epoch": 0.6392045454545454, |
| "grad_norm": 0.36048972606658936, |
| "learning_rate": 3.7500000000000005e-06, |
| "loss": 0.0758, |
| "step": 75 |
| }, |
| { |
| "epoch": 0.6477272727272727, |
| "grad_norm": 0.2978876829147339, |
| "learning_rate": 3.8000000000000005e-06, |
| "loss": 0.0747, |
| "step": 76 |
| }, |
| { |
| "epoch": 0.65625, |
| "grad_norm": 0.30899888277053833, |
| "learning_rate": 3.85e-06, |
| "loss": 0.0788, |
| "step": 77 |
| }, |
| { |
| "epoch": 0.6647727272727273, |
| "grad_norm": 0.45000651478767395, |
| "learning_rate": 3.900000000000001e-06, |
| "loss": 0.0672, |
| "step": 78 |
| }, |
| { |
| "epoch": 0.6732954545454546, |
| "grad_norm": 0.2621772587299347, |
| "learning_rate": 3.95e-06, |
| "loss": 0.0719, |
| "step": 79 |
| }, |
| { |
| "epoch": 0.6818181818181818, |
| "grad_norm": 0.28618085384368896, |
| "learning_rate": 4.000000000000001e-06, |
| "loss": 0.0744, |
| "step": 80 |
| }, |
| { |
| "epoch": 0.6903409090909091, |
| "grad_norm": 0.40642479062080383, |
| "learning_rate": 4.05e-06, |
| "loss": 0.0743, |
| "step": 81 |
| }, |
| { |
| "epoch": 0.6988636363636364, |
| "grad_norm": 0.3514344096183777, |
| "learning_rate": 4.1e-06, |
| "loss": 0.0708, |
| "step": 82 |
| }, |
| { |
| "epoch": 0.7073863636363636, |
| "grad_norm": 0.426798939704895, |
| "learning_rate": 4.15e-06, |
| "loss": 0.073, |
| "step": 83 |
| }, |
| { |
| "epoch": 0.7159090909090909, |
| "grad_norm": 0.29413217306137085, |
| "learning_rate": 4.2000000000000004e-06, |
| "loss": 0.0732, |
| "step": 84 |
| }, |
| { |
| "epoch": 0.7244318181818182, |
| "grad_norm": 0.37668099999427795, |
| "learning_rate": 4.25e-06, |
| "loss": 0.0664, |
| "step": 85 |
| }, |
| { |
| "epoch": 0.7329545454545454, |
| "grad_norm": 0.3696061372756958, |
| "learning_rate": 4.3e-06, |
| "loss": 0.0649, |
| "step": 86 |
| }, |
| { |
| "epoch": 0.7414772727272727, |
| "grad_norm": 0.311988890171051, |
| "learning_rate": 4.350000000000001e-06, |
| "loss": 0.0689, |
| "step": 87 |
| }, |
| { |
| "epoch": 0.75, |
| "grad_norm": 0.3180747628211975, |
| "learning_rate": 4.4e-06, |
| "loss": 0.0746, |
| "step": 88 |
| }, |
| { |
| "epoch": 0.7585227272727273, |
| "grad_norm": 0.46045729517936707, |
| "learning_rate": 4.450000000000001e-06, |
| "loss": 0.0763, |
| "step": 89 |
| }, |
| { |
| "epoch": 0.7670454545454546, |
| "grad_norm": 0.3566094934940338, |
| "learning_rate": 4.5e-06, |
| "loss": 0.0676, |
| "step": 90 |
| }, |
| { |
| "epoch": 0.7755681818181818, |
| "grad_norm": 0.35632985830307007, |
| "learning_rate": 4.5500000000000005e-06, |
| "loss": 0.0677, |
| "step": 91 |
| }, |
| { |
| "epoch": 0.7840909090909091, |
| "grad_norm": 0.27962526679039, |
| "learning_rate": 4.600000000000001e-06, |
| "loss": 0.0689, |
| "step": 92 |
| }, |
| { |
| "epoch": 0.7926136363636364, |
| "grad_norm": 0.4532427191734314, |
| "learning_rate": 4.65e-06, |
| "loss": 0.0724, |
| "step": 93 |
| }, |
| { |
| "epoch": 0.8011363636363636, |
| "grad_norm": 0.363337904214859, |
| "learning_rate": 4.7e-06, |
| "loss": 0.0708, |
| "step": 94 |
| }, |
| { |
| "epoch": 0.8096590909090909, |
| "grad_norm": 0.3065521717071533, |
| "learning_rate": 4.75e-06, |
| "loss": 0.0713, |
| "step": 95 |
| }, |
| { |
| "epoch": 0.8181818181818182, |
| "grad_norm": 0.24705548584461212, |
| "learning_rate": 4.800000000000001e-06, |
| "loss": 0.0683, |
| "step": 96 |
| }, |
| { |
| "epoch": 0.8267045454545454, |
| "grad_norm": 0.5038250684738159, |
| "learning_rate": 4.85e-06, |
| "loss": 0.0738, |
| "step": 97 |
| }, |
| { |
| "epoch": 0.8352272727272727, |
| "grad_norm": 0.2972690761089325, |
| "learning_rate": 4.9000000000000005e-06, |
| "loss": 0.0684, |
| "step": 98 |
| }, |
| { |
| "epoch": 0.84375, |
| "grad_norm": 0.40811270475387573, |
| "learning_rate": 4.95e-06, |
| "loss": 0.066, |
| "step": 99 |
| }, |
| { |
| "epoch": 0.8522727272727273, |
| "grad_norm": 0.3925577998161316, |
| "learning_rate": 5e-06, |
| "loss": 0.0692, |
| "step": 100 |
| }, |
| { |
| "epoch": 0.8607954545454546, |
| "grad_norm": 0.32043716311454773, |
| "learning_rate": 4.999965957943338e-06, |
| "loss": 0.0652, |
| "step": 101 |
| }, |
| { |
| "epoch": 0.8693181818181818, |
| "grad_norm": 0.26025497913360596, |
| "learning_rate": 4.999863832700438e-06, |
| "loss": 0.0634, |
| "step": 102 |
| }, |
| { |
| "epoch": 0.8778409090909091, |
| "grad_norm": 0.5605457425117493, |
| "learning_rate": 4.999693627052545e-06, |
| "loss": 0.0686, |
| "step": 103 |
| }, |
| { |
| "epoch": 0.8863636363636364, |
| "grad_norm": 0.48003220558166504, |
| "learning_rate": 4.9994553456349785e-06, |
| "loss": 0.0707, |
| "step": 104 |
| }, |
| { |
| "epoch": 0.8948863636363636, |
| "grad_norm": 0.22346197068691254, |
| "learning_rate": 4.99914899493701e-06, |
| "loss": 0.067, |
| "step": 105 |
| }, |
| { |
| "epoch": 0.9034090909090909, |
| "grad_norm": 0.45966312289237976, |
| "learning_rate": 4.998774583301685e-06, |
| "loss": 0.0663, |
| "step": 106 |
| }, |
| { |
| "epoch": 0.9119318181818182, |
| "grad_norm": 0.38695356249809265, |
| "learning_rate": 4.998332120925598e-06, |
| "loss": 0.0714, |
| "step": 107 |
| }, |
| { |
| "epoch": 0.9204545454545454, |
| "grad_norm": 0.35708868503570557, |
| "learning_rate": 4.997821619858614e-06, |
| "loss": 0.0632, |
| "step": 108 |
| }, |
| { |
| "epoch": 0.9289772727272727, |
| "grad_norm": 0.2699528932571411, |
| "learning_rate": 4.9972430940035355e-06, |
| "loss": 0.0649, |
| "step": 109 |
| }, |
| { |
| "epoch": 0.9375, |
| "grad_norm": 0.5043486952781677, |
| "learning_rate": 4.9965965591157314e-06, |
| "loss": 0.0715, |
| "step": 110 |
| }, |
| { |
| "epoch": 0.9460227272727273, |
| "grad_norm": 0.4046338200569153, |
| "learning_rate": 4.995882032802703e-06, |
| "loss": 0.0657, |
| "step": 111 |
| }, |
| { |
| "epoch": 0.9545454545454546, |
| "grad_norm": 0.3543775677680969, |
| "learning_rate": 4.995099534523608e-06, |
| "loss": 0.0698, |
| "step": 112 |
| }, |
| { |
| "epoch": 0.9630681818181818, |
| "grad_norm": 0.3295219838619232, |
| "learning_rate": 4.994249085588725e-06, |
| "loss": 0.0719, |
| "step": 113 |
| }, |
| { |
| "epoch": 0.9715909090909091, |
| "grad_norm": 0.3691340982913971, |
| "learning_rate": 4.993330709158879e-06, |
| "loss": 0.0689, |
| "step": 114 |
| }, |
| { |
| "epoch": 0.9801136363636364, |
| "grad_norm": 0.4219862222671509, |
| "learning_rate": 4.9923444302448095e-06, |
| "loss": 0.0644, |
| "step": 115 |
| }, |
| { |
| "epoch": 0.9886363636363636, |
| "grad_norm": 0.33437392115592957, |
| "learning_rate": 4.991290275706486e-06, |
| "loss": 0.074, |
| "step": 116 |
| }, |
| { |
| "epoch": 0.9971590909090909, |
| "grad_norm": 0.22970478236675262, |
| "learning_rate": 4.990168274252379e-06, |
| "loss": 0.0639, |
| "step": 117 |
| } |
| ], |
| "logging_steps": 1, |
| "max_steps": 702, |
| "num_input_tokens_seen": 0, |
| "num_train_epochs": 6, |
| "save_steps": 117, |
| "stateful_callbacks": { |
| "TrainerControl": { |
| "args": { |
| "should_epoch_stop": false, |
| "should_evaluate": false, |
| "should_log": false, |
| "should_save": true, |
| "should_training_stop": false |
| }, |
| "attributes": {} |
| } |
| }, |
| "total_flos": 2.620967314689884e+17, |
| "train_batch_size": 4, |
| "trial_name": null, |
| "trial_params": null |
| } |
|
|