| { | |
| "best_metric": null, | |
| "best_model_checkpoint": null, | |
| "epoch": 2.986175115207373, | |
| "eval_steps": 500, | |
| "global_step": 216, | |
| "is_hyper_param_search": false, | |
| "is_local_process_zero": true, | |
| "is_world_process_zero": true, | |
| "log_history": [ | |
| { | |
| "epoch": 0.013824884792626729, | |
| "grad_norm": 3.8321747022193033, | |
| "learning_rate": 4.5454545454545457e-07, | |
| "loss": 0.4904, | |
| "step": 1 | |
| }, | |
| { | |
| "epoch": 0.027649769585253458, | |
| "grad_norm": 4.166445470011081, | |
| "learning_rate": 9.090909090909091e-07, | |
| "loss": 0.4586, | |
| "step": 2 | |
| }, | |
| { | |
| "epoch": 0.041474654377880185, | |
| "grad_norm": 3.8454000093459273, | |
| "learning_rate": 1.3636363636363636e-06, | |
| "loss": 0.4491, | |
| "step": 3 | |
| }, | |
| { | |
| "epoch": 0.055299539170506916, | |
| "grad_norm": 3.9389330204801944, | |
| "learning_rate": 1.8181818181818183e-06, | |
| "loss": 0.495, | |
| "step": 4 | |
| }, | |
| { | |
| "epoch": 0.06912442396313365, | |
| "grad_norm": 3.503496575080232, | |
| "learning_rate": 2.2727272727272728e-06, | |
| "loss": 0.4518, | |
| "step": 5 | |
| }, | |
| { | |
| "epoch": 0.08294930875576037, | |
| "grad_norm": 2.5778889382942296, | |
| "learning_rate": 2.7272727272727272e-06, | |
| "loss": 0.4245, | |
| "step": 6 | |
| }, | |
| { | |
| "epoch": 0.0967741935483871, | |
| "grad_norm": 2.6973870749604654, | |
| "learning_rate": 3.181818181818182e-06, | |
| "loss": 0.4485, | |
| "step": 7 | |
| }, | |
| { | |
| "epoch": 0.11059907834101383, | |
| "grad_norm": 2.087801488195196, | |
| "learning_rate": 3.6363636363636366e-06, | |
| "loss": 0.4126, | |
| "step": 8 | |
| }, | |
| { | |
| "epoch": 0.12442396313364056, | |
| "grad_norm": 1.937230458954793, | |
| "learning_rate": 4.0909090909090915e-06, | |
| "loss": 0.3782, | |
| "step": 9 | |
| }, | |
| { | |
| "epoch": 0.1382488479262673, | |
| "grad_norm": 1.4844954637777827, | |
| "learning_rate": 4.5454545454545455e-06, | |
| "loss": 0.4114, | |
| "step": 10 | |
| }, | |
| { | |
| "epoch": 0.15207373271889402, | |
| "grad_norm": 1.6047087443414494, | |
| "learning_rate": 5e-06, | |
| "loss": 0.4018, | |
| "step": 11 | |
| }, | |
| { | |
| "epoch": 0.16589861751152074, | |
| "grad_norm": 1.2519442814402522, | |
| "learning_rate": 5.4545454545454545e-06, | |
| "loss": 0.3739, | |
| "step": 12 | |
| }, | |
| { | |
| "epoch": 0.17972350230414746, | |
| "grad_norm": 1.11650299203384, | |
| "learning_rate": 5.90909090909091e-06, | |
| "loss": 0.3516, | |
| "step": 13 | |
| }, | |
| { | |
| "epoch": 0.1935483870967742, | |
| "grad_norm": 1.3867124167107647, | |
| "learning_rate": 6.363636363636364e-06, | |
| "loss": 0.3873, | |
| "step": 14 | |
| }, | |
| { | |
| "epoch": 0.2073732718894009, | |
| "grad_norm": 1.6171625319611096, | |
| "learning_rate": 6.818181818181818e-06, | |
| "loss": 0.3635, | |
| "step": 15 | |
| }, | |
| { | |
| "epoch": 0.22119815668202766, | |
| "grad_norm": 1.4344025164842837, | |
| "learning_rate": 7.272727272727273e-06, | |
| "loss": 0.3538, | |
| "step": 16 | |
| }, | |
| { | |
| "epoch": 0.2350230414746544, | |
| "grad_norm": 1.2769447355987351, | |
| "learning_rate": 7.727272727272727e-06, | |
| "loss": 0.3322, | |
| "step": 17 | |
| }, | |
| { | |
| "epoch": 0.2488479262672811, | |
| "grad_norm": 1.1544799589527472, | |
| "learning_rate": 8.181818181818183e-06, | |
| "loss": 0.3175, | |
| "step": 18 | |
| }, | |
| { | |
| "epoch": 0.2626728110599078, | |
| "grad_norm": 0.9340143537679371, | |
| "learning_rate": 8.636363636363637e-06, | |
| "loss": 0.2943, | |
| "step": 19 | |
| }, | |
| { | |
| "epoch": 0.2764976958525346, | |
| "grad_norm": 0.8350563937099799, | |
| "learning_rate": 9.090909090909091e-06, | |
| "loss": 0.3269, | |
| "step": 20 | |
| }, | |
| { | |
| "epoch": 0.2903225806451613, | |
| "grad_norm": 0.9427953311385877, | |
| "learning_rate": 9.545454545454547e-06, | |
| "loss": 0.3665, | |
| "step": 21 | |
| }, | |
| { | |
| "epoch": 0.30414746543778803, | |
| "grad_norm": 0.8571337937972116, | |
| "learning_rate": 1e-05, | |
| "loss": 0.3081, | |
| "step": 22 | |
| }, | |
| { | |
| "epoch": 0.31797235023041476, | |
| "grad_norm": 0.9574970030840929, | |
| "learning_rate": 9.999344418328161e-06, | |
| "loss": 0.3216, | |
| "step": 23 | |
| }, | |
| { | |
| "epoch": 0.3317972350230415, | |
| "grad_norm": 0.8240000968027297, | |
| "learning_rate": 9.997377845227577e-06, | |
| "loss": 0.322, | |
| "step": 24 | |
| }, | |
| { | |
| "epoch": 0.3456221198156682, | |
| "grad_norm": 0.7442732006332352, | |
| "learning_rate": 9.994100796397954e-06, | |
| "loss": 0.3406, | |
| "step": 25 | |
| }, | |
| { | |
| "epoch": 0.35944700460829493, | |
| "grad_norm": 0.862489472956441, | |
| "learning_rate": 9.98951413118856e-06, | |
| "loss": 0.342, | |
| "step": 26 | |
| }, | |
| { | |
| "epoch": 0.37327188940092165, | |
| "grad_norm": 0.7897584822354033, | |
| "learning_rate": 9.983619052372847e-06, | |
| "loss": 0.3228, | |
| "step": 27 | |
| }, | |
| { | |
| "epoch": 0.3870967741935484, | |
| "grad_norm": 0.8247735964866483, | |
| "learning_rate": 9.97641710583307e-06, | |
| "loss": 0.34, | |
| "step": 28 | |
| }, | |
| { | |
| "epoch": 0.4009216589861751, | |
| "grad_norm": 0.6877859922485577, | |
| "learning_rate": 9.96791018015489e-06, | |
| "loss": 0.3624, | |
| "step": 29 | |
| }, | |
| { | |
| "epoch": 0.4147465437788018, | |
| "grad_norm": 0.6854976819111457, | |
| "learning_rate": 9.958100506132127e-06, | |
| "loss": 0.3362, | |
| "step": 30 | |
| }, | |
| { | |
| "epoch": 0.42857142857142855, | |
| "grad_norm": 0.6780848305353676, | |
| "learning_rate": 9.946990656181782e-06, | |
| "loss": 0.3306, | |
| "step": 31 | |
| }, | |
| { | |
| "epoch": 0.4423963133640553, | |
| "grad_norm": 0.8635579814520753, | |
| "learning_rate": 9.934583543669454e-06, | |
| "loss": 0.333, | |
| "step": 32 | |
| }, | |
| { | |
| "epoch": 0.45622119815668205, | |
| "grad_norm": 0.7095969477530761, | |
| "learning_rate": 9.920882422145372e-06, | |
| "loss": 0.3297, | |
| "step": 33 | |
| }, | |
| { | |
| "epoch": 0.4700460829493088, | |
| "grad_norm": 0.6554896662064854, | |
| "learning_rate": 9.905890884491196e-06, | |
| "loss": 0.3474, | |
| "step": 34 | |
| }, | |
| { | |
| "epoch": 0.4838709677419355, | |
| "grad_norm": 0.7233953981673558, | |
| "learning_rate": 9.889612861977855e-06, | |
| "loss": 0.3366, | |
| "step": 35 | |
| }, | |
| { | |
| "epoch": 0.4976958525345622, | |
| "grad_norm": 0.7808564766414073, | |
| "learning_rate": 9.872052623234632e-06, | |
| "loss": 0.3247, | |
| "step": 36 | |
| }, | |
| { | |
| "epoch": 0.511520737327189, | |
| "grad_norm": 0.7662284906743654, | |
| "learning_rate": 9.853214773129796e-06, | |
| "loss": 0.3035, | |
| "step": 37 | |
| }, | |
| { | |
| "epoch": 0.5253456221198156, | |
| "grad_norm": 0.7589864104505978, | |
| "learning_rate": 9.833104251563058e-06, | |
| "loss": 0.3222, | |
| "step": 38 | |
| }, | |
| { | |
| "epoch": 0.5391705069124424, | |
| "grad_norm": 0.7364643638285358, | |
| "learning_rate": 9.811726332170153e-06, | |
| "loss": 0.3557, | |
| "step": 39 | |
| }, | |
| { | |
| "epoch": 0.5529953917050692, | |
| "grad_norm": 0.7228711702399517, | |
| "learning_rate": 9.789086620939936e-06, | |
| "loss": 0.3392, | |
| "step": 40 | |
| }, | |
| { | |
| "epoch": 0.5668202764976958, | |
| "grad_norm": 0.6418796946575632, | |
| "learning_rate": 9.765191054744305e-06, | |
| "loss": 0.3305, | |
| "step": 41 | |
| }, | |
| { | |
| "epoch": 0.5806451612903226, | |
| "grad_norm": 0.7360519892248771, | |
| "learning_rate": 9.740045899781353e-06, | |
| "loss": 0.3387, | |
| "step": 42 | |
| }, | |
| { | |
| "epoch": 0.5944700460829493, | |
| "grad_norm": 0.7117878571151136, | |
| "learning_rate": 9.713657749932172e-06, | |
| "loss": 0.378, | |
| "step": 43 | |
| }, | |
| { | |
| "epoch": 0.6082949308755761, | |
| "grad_norm": 0.6562391589488417, | |
| "learning_rate": 9.68603352503172e-06, | |
| "loss": 0.3808, | |
| "step": 44 | |
| }, | |
| { | |
| "epoch": 0.6221198156682027, | |
| "grad_norm": 0.7011857576729418, | |
| "learning_rate": 9.657180469054213e-06, | |
| "loss": 0.3561, | |
| "step": 45 | |
| }, | |
| { | |
| "epoch": 0.6359447004608295, | |
| "grad_norm": 0.6588644804163393, | |
| "learning_rate": 9.627106148213521e-06, | |
| "loss": 0.3224, | |
| "step": 46 | |
| }, | |
| { | |
| "epoch": 0.6497695852534562, | |
| "grad_norm": 0.6413771673007567, | |
| "learning_rate": 9.595818448979061e-06, | |
| "loss": 0.3502, | |
| "step": 47 | |
| }, | |
| { | |
| "epoch": 0.663594470046083, | |
| "grad_norm": 0.6132130927693553, | |
| "learning_rate": 9.563325576007702e-06, | |
| "loss": 0.3172, | |
| "step": 48 | |
| }, | |
| { | |
| "epoch": 0.6774193548387096, | |
| "grad_norm": 0.7562554878178558, | |
| "learning_rate": 9.529636049992235e-06, | |
| "loss": 0.3078, | |
| "step": 49 | |
| }, | |
| { | |
| "epoch": 0.6912442396313364, | |
| "grad_norm": 0.7025632258958434, | |
| "learning_rate": 9.494758705426978e-06, | |
| "loss": 0.3378, | |
| "step": 50 | |
| }, | |
| { | |
| "epoch": 0.7050691244239631, | |
| "grad_norm": 0.6580901818465655, | |
| "learning_rate": 9.458702688291072e-06, | |
| "loss": 0.3749, | |
| "step": 51 | |
| }, | |
| { | |
| "epoch": 0.7188940092165899, | |
| "grad_norm": 0.837525662904706, | |
| "learning_rate": 9.421477453650118e-06, | |
| "loss": 0.3646, | |
| "step": 52 | |
| }, | |
| { | |
| "epoch": 0.7327188940092166, | |
| "grad_norm": 0.6988946242836359, | |
| "learning_rate": 9.38309276317674e-06, | |
| "loss": 0.2952, | |
| "step": 53 | |
| }, | |
| { | |
| "epoch": 0.7465437788018433, | |
| "grad_norm": 0.7088548576538783, | |
| "learning_rate": 9.343558682590757e-06, | |
| "loss": 0.3402, | |
| "step": 54 | |
| }, | |
| { | |
| "epoch": 0.7603686635944701, | |
| "grad_norm": 0.6031364330217948, | |
| "learning_rate": 9.302885579019626e-06, | |
| "loss": 0.3157, | |
| "step": 55 | |
| }, | |
| { | |
| "epoch": 0.7741935483870968, | |
| "grad_norm": 0.6869976637076074, | |
| "learning_rate": 9.261084118279846e-06, | |
| "loss": 0.3724, | |
| "step": 56 | |
| }, | |
| { | |
| "epoch": 0.7880184331797235, | |
| "grad_norm": 0.6362516594505776, | |
| "learning_rate": 9.218165262080024e-06, | |
| "loss": 0.2915, | |
| "step": 57 | |
| }, | |
| { | |
| "epoch": 0.8018433179723502, | |
| "grad_norm": 0.6133826251571223, | |
| "learning_rate": 9.174140265146356e-06, | |
| "loss": 0.3374, | |
| "step": 58 | |
| }, | |
| { | |
| "epoch": 0.815668202764977, | |
| "grad_norm": 0.5552231441277784, | |
| "learning_rate": 9.129020672271283e-06, | |
| "loss": 0.3119, | |
| "step": 59 | |
| }, | |
| { | |
| "epoch": 0.8294930875576036, | |
| "grad_norm": 0.5344163684333119, | |
| "learning_rate": 9.082818315286054e-06, | |
| "loss": 0.3139, | |
| "step": 60 | |
| }, | |
| { | |
| "epoch": 0.8433179723502304, | |
| "grad_norm": 0.5908367481260651, | |
| "learning_rate": 9.035545309958048e-06, | |
| "loss": 0.3391, | |
| "step": 61 | |
| }, | |
| { | |
| "epoch": 0.8571428571428571, | |
| "grad_norm": 0.6049797134002246, | |
| "learning_rate": 8.987214052813605e-06, | |
| "loss": 0.3385, | |
| "step": 62 | |
| }, | |
| { | |
| "epoch": 0.8709677419354839, | |
| "grad_norm": 0.632151936446925, | |
| "learning_rate": 8.937837217887273e-06, | |
| "loss": 0.3216, | |
| "step": 63 | |
| }, | |
| { | |
| "epoch": 0.8847926267281107, | |
| "grad_norm": 0.573287253380456, | |
| "learning_rate": 8.887427753398249e-06, | |
| "loss": 0.2931, | |
| "step": 64 | |
| }, | |
| { | |
| "epoch": 0.8986175115207373, | |
| "grad_norm": 0.5852983805141764, | |
| "learning_rate": 8.83599887835493e-06, | |
| "loss": 0.3172, | |
| "step": 65 | |
| }, | |
| { | |
| "epoch": 0.9124423963133641, | |
| "grad_norm": 0.6363174322514683, | |
| "learning_rate": 8.783564079088478e-06, | |
| "loss": 0.3882, | |
| "step": 66 | |
| }, | |
| { | |
| "epoch": 0.9262672811059908, | |
| "grad_norm": 0.6351465289963182, | |
| "learning_rate": 8.730137105716231e-06, | |
| "loss": 0.2963, | |
| "step": 67 | |
| }, | |
| { | |
| "epoch": 0.9400921658986175, | |
| "grad_norm": 0.5424528987366075, | |
| "learning_rate": 8.675731968536004e-06, | |
| "loss": 0.2824, | |
| "step": 68 | |
| }, | |
| { | |
| "epoch": 0.9539170506912442, | |
| "grad_norm": 0.5850480108903799, | |
| "learning_rate": 8.620362934352109e-06, | |
| "loss": 0.3439, | |
| "step": 69 | |
| }, | |
| { | |
| "epoch": 0.967741935483871, | |
| "grad_norm": 0.5924509545505863, | |
| "learning_rate": 8.564044522734147e-06, | |
| "loss": 0.345, | |
| "step": 70 | |
| }, | |
| { | |
| "epoch": 0.9815668202764977, | |
| "grad_norm": 0.6356218187245817, | |
| "learning_rate": 8.506791502209497e-06, | |
| "loss": 0.3383, | |
| "step": 71 | |
| }, | |
| { | |
| "epoch": 0.9953917050691244, | |
| "grad_norm": 0.5255727075687244, | |
| "learning_rate": 8.448618886390523e-06, | |
| "loss": 0.31, | |
| "step": 72 | |
| }, | |
| { | |
| "epoch": 1.0092165898617511, | |
| "grad_norm": 1.075353564084177, | |
| "learning_rate": 8.389541930037516e-06, | |
| "loss": 0.4918, | |
| "step": 73 | |
| }, | |
| { | |
| "epoch": 1.023041474654378, | |
| "grad_norm": 0.5411851622437062, | |
| "learning_rate": 8.329576125058406e-06, | |
| "loss": 0.284, | |
| "step": 74 | |
| }, | |
| { | |
| "epoch": 1.0368663594470047, | |
| "grad_norm": 0.5438469424766922, | |
| "learning_rate": 8.268737196446264e-06, | |
| "loss": 0.2914, | |
| "step": 75 | |
| }, | |
| { | |
| "epoch": 1.0506912442396312, | |
| "grad_norm": 0.5420670331882803, | |
| "learning_rate": 8.207041098155701e-06, | |
| "loss": 0.2723, | |
| "step": 76 | |
| }, | |
| { | |
| "epoch": 1.064516129032258, | |
| "grad_norm": 0.6001856622683356, | |
| "learning_rate": 8.144504008919224e-06, | |
| "loss": 0.3119, | |
| "step": 77 | |
| }, | |
| { | |
| "epoch": 1.0783410138248848, | |
| "grad_norm": 0.5657652505048809, | |
| "learning_rate": 8.081142328004638e-06, | |
| "loss": 0.2598, | |
| "step": 78 | |
| }, | |
| { | |
| "epoch": 1.0921658986175116, | |
| "grad_norm": 0.5575714850260662, | |
| "learning_rate": 8.016972670914624e-06, | |
| "loss": 0.322, | |
| "step": 79 | |
| }, | |
| { | |
| "epoch": 1.1059907834101383, | |
| "grad_norm": 0.6083958309172656, | |
| "learning_rate": 7.952011865029614e-06, | |
| "loss": 0.2509, | |
| "step": 80 | |
| }, | |
| { | |
| "epoch": 1.119815668202765, | |
| "grad_norm": 0.5266548143398665, | |
| "learning_rate": 7.886276945195098e-06, | |
| "loss": 0.2206, | |
| "step": 81 | |
| }, | |
| { | |
| "epoch": 1.1336405529953917, | |
| "grad_norm": 0.5687307229026926, | |
| "learning_rate": 7.819785149254534e-06, | |
| "loss": 0.2954, | |
| "step": 82 | |
| }, | |
| { | |
| "epoch": 1.1474654377880185, | |
| "grad_norm": 0.5640940659138813, | |
| "learning_rate": 7.752553913529019e-06, | |
| "loss": 0.2628, | |
| "step": 83 | |
| }, | |
| { | |
| "epoch": 1.1612903225806452, | |
| "grad_norm": 0.6680289489599975, | |
| "learning_rate": 7.68460086824492e-06, | |
| "loss": 0.3206, | |
| "step": 84 | |
| }, | |
| { | |
| "epoch": 1.1751152073732718, | |
| "grad_norm": 0.5283769448858802, | |
| "learning_rate": 7.61594383291065e-06, | |
| "loss": 0.2363, | |
| "step": 85 | |
| }, | |
| { | |
| "epoch": 1.1889400921658986, | |
| "grad_norm": 0.6482385383082108, | |
| "learning_rate": 7.546600811643816e-06, | |
| "loss": 0.3353, | |
| "step": 86 | |
| }, | |
| { | |
| "epoch": 1.2027649769585254, | |
| "grad_norm": 0.5264348159834544, | |
| "learning_rate": 7.476589988449939e-06, | |
| "loss": 0.2618, | |
| "step": 87 | |
| }, | |
| { | |
| "epoch": 1.2165898617511521, | |
| "grad_norm": 0.5512488141503382, | |
| "learning_rate": 7.405929722454026e-06, | |
| "loss": 0.2639, | |
| "step": 88 | |
| }, | |
| { | |
| "epoch": 1.230414746543779, | |
| "grad_norm": 0.5093764966098362, | |
| "learning_rate": 7.334638543086203e-06, | |
| "loss": 0.2477, | |
| "step": 89 | |
| }, | |
| { | |
| "epoch": 1.2442396313364055, | |
| "grad_norm": 0.5160996147207031, | |
| "learning_rate": 7.262735145222696e-06, | |
| "loss": 0.2437, | |
| "step": 90 | |
| }, | |
| { | |
| "epoch": 1.2580645161290323, | |
| "grad_norm": 0.5507115707918743, | |
| "learning_rate": 7.190238384283413e-06, | |
| "loss": 0.2625, | |
| "step": 91 | |
| }, | |
| { | |
| "epoch": 1.271889400921659, | |
| "grad_norm": 0.5596845582953057, | |
| "learning_rate": 7.117167271287453e-06, | |
| "loss": 0.2838, | |
| "step": 92 | |
| }, | |
| { | |
| "epoch": 1.2857142857142856, | |
| "grad_norm": 0.5261309041679847, | |
| "learning_rate": 7.043540967867782e-06, | |
| "loss": 0.2596, | |
| "step": 93 | |
| }, | |
| { | |
| "epoch": 1.2995391705069124, | |
| "grad_norm": 0.606787789098388, | |
| "learning_rate": 6.969378781246436e-06, | |
| "loss": 0.295, | |
| "step": 94 | |
| }, | |
| { | |
| "epoch": 1.3133640552995391, | |
| "grad_norm": 0.5525493866624152, | |
| "learning_rate": 6.894700159171535e-06, | |
| "loss": 0.2478, | |
| "step": 95 | |
| }, | |
| { | |
| "epoch": 1.327188940092166, | |
| "grad_norm": 0.4948582007424209, | |
| "learning_rate": 6.819524684817439e-06, | |
| "loss": 0.2375, | |
| "step": 96 | |
| }, | |
| { | |
| "epoch": 1.3410138248847927, | |
| "grad_norm": 0.5910117531529238, | |
| "learning_rate": 6.743872071649411e-06, | |
| "loss": 0.293, | |
| "step": 97 | |
| }, | |
| { | |
| "epoch": 1.3548387096774195, | |
| "grad_norm": 0.5642544177081138, | |
| "learning_rate": 6.667762158254104e-06, | |
| "loss": 0.2759, | |
| "step": 98 | |
| }, | |
| { | |
| "epoch": 1.368663594470046, | |
| "grad_norm": 0.5640787140131098, | |
| "learning_rate": 6.591214903137221e-06, | |
| "loss": 0.2919, | |
| "step": 99 | |
| }, | |
| { | |
| "epoch": 1.3824884792626728, | |
| "grad_norm": 0.627883249036726, | |
| "learning_rate": 6.514250379489754e-06, | |
| "loss": 0.3236, | |
| "step": 100 | |
| }, | |
| { | |
| "epoch": 1.3963133640552996, | |
| "grad_norm": 0.5777799162176069, | |
| "learning_rate": 6.436888769924142e-06, | |
| "loss": 0.2599, | |
| "step": 101 | |
| }, | |
| { | |
| "epoch": 1.4101382488479262, | |
| "grad_norm": 0.5575789159801893, | |
| "learning_rate": 6.3591503611817155e-06, | |
| "loss": 0.2881, | |
| "step": 102 | |
| }, | |
| { | |
| "epoch": 1.423963133640553, | |
| "grad_norm": 0.5454339637824865, | |
| "learning_rate": 6.281055538812861e-06, | |
| "loss": 0.2528, | |
| "step": 103 | |
| }, | |
| { | |
| "epoch": 1.4377880184331797, | |
| "grad_norm": 0.5785302727121883, | |
| "learning_rate": 6.202624781831269e-06, | |
| "loss": 0.2586, | |
| "step": 104 | |
| }, | |
| { | |
| "epoch": 1.4516129032258065, | |
| "grad_norm": 0.5795551004814405, | |
| "learning_rate": 6.123878657343648e-06, | |
| "loss": 0.2933, | |
| "step": 105 | |
| }, | |
| { | |
| "epoch": 1.4654377880184333, | |
| "grad_norm": 0.5562868873733925, | |
| "learning_rate": 6.044837815156377e-06, | |
| "loss": 0.2682, | |
| "step": 106 | |
| }, | |
| { | |
| "epoch": 1.4792626728110598, | |
| "grad_norm": 0.5788310033089493, | |
| "learning_rate": 5.965522982360441e-06, | |
| "loss": 0.2574, | |
| "step": 107 | |
| }, | |
| { | |
| "epoch": 1.4930875576036866, | |
| "grad_norm": 0.5239597133970942, | |
| "learning_rate": 5.885954957896115e-06, | |
| "loss": 0.256, | |
| "step": 108 | |
| }, | |
| { | |
| "epoch": 1.5069124423963134, | |
| "grad_norm": 0.5873986776610259, | |
| "learning_rate": 5.806154607098799e-06, | |
| "loss": 0.3367, | |
| "step": 109 | |
| }, | |
| { | |
| "epoch": 1.52073732718894, | |
| "grad_norm": 0.573952411610799, | |
| "learning_rate": 5.726142856227453e-06, | |
| "loss": 0.2801, | |
| "step": 110 | |
| }, | |
| { | |
| "epoch": 1.5345622119815667, | |
| "grad_norm": 0.5754764716263057, | |
| "learning_rate": 5.645940686977033e-06, | |
| "loss": 0.2512, | |
| "step": 111 | |
| }, | |
| { | |
| "epoch": 1.5483870967741935, | |
| "grad_norm": 0.5612364021450903, | |
| "learning_rate": 5.5655691309764225e-06, | |
| "loss": 0.2938, | |
| "step": 112 | |
| }, | |
| { | |
| "epoch": 1.5622119815668203, | |
| "grad_norm": 0.5214616228011532, | |
| "learning_rate": 5.485049264273241e-06, | |
| "loss": 0.2598, | |
| "step": 113 | |
| }, | |
| { | |
| "epoch": 1.576036866359447, | |
| "grad_norm": 0.5442190086345122, | |
| "learning_rate": 5.404402201807022e-06, | |
| "loss": 0.2587, | |
| "step": 114 | |
| }, | |
| { | |
| "epoch": 1.5898617511520738, | |
| "grad_norm": 0.6223597688467173, | |
| "learning_rate": 5.323649091872179e-06, | |
| "loss": 0.2917, | |
| "step": 115 | |
| }, | |
| { | |
| "epoch": 1.6036866359447006, | |
| "grad_norm": 0.5267088823599309, | |
| "learning_rate": 5.242811110572243e-06, | |
| "loss": 0.2357, | |
| "step": 116 | |
| }, | |
| { | |
| "epoch": 1.6175115207373272, | |
| "grad_norm": 0.5310849049848287, | |
| "learning_rate": 5.161909456266781e-06, | |
| "loss": 0.2259, | |
| "step": 117 | |
| }, | |
| { | |
| "epoch": 1.631336405529954, | |
| "grad_norm": 0.5327550485763282, | |
| "learning_rate": 5.080965344012509e-06, | |
| "loss": 0.2918, | |
| "step": 118 | |
| }, | |
| { | |
| "epoch": 1.6451612903225805, | |
| "grad_norm": 0.48433766437116366, | |
| "learning_rate": 5e-06, | |
| "loss": 0.2057, | |
| "step": 119 | |
| }, | |
| { | |
| "epoch": 1.6589861751152073, | |
| "grad_norm": 0.5201772106049566, | |
| "learning_rate": 4.919034655987493e-06, | |
| "loss": 0.2469, | |
| "step": 120 | |
| }, | |
| { | |
| "epoch": 1.672811059907834, | |
| "grad_norm": 0.5728059995080512, | |
| "learning_rate": 4.838090543733222e-06, | |
| "loss": 0.2977, | |
| "step": 121 | |
| }, | |
| { | |
| "epoch": 1.6866359447004609, | |
| "grad_norm": 0.5451785777689631, | |
| "learning_rate": 4.757188889427761e-06, | |
| "loss": 0.2664, | |
| "step": 122 | |
| }, | |
| { | |
| "epoch": 1.7004608294930876, | |
| "grad_norm": 0.55325878163931, | |
| "learning_rate": 4.6763509081278215e-06, | |
| "loss": 0.2938, | |
| "step": 123 | |
| }, | |
| { | |
| "epoch": 1.7142857142857144, | |
| "grad_norm": 0.5482322683176598, | |
| "learning_rate": 4.59559779819298e-06, | |
| "loss": 0.2918, | |
| "step": 124 | |
| }, | |
| { | |
| "epoch": 1.728110599078341, | |
| "grad_norm": 0.5317903391669024, | |
| "learning_rate": 4.51495073572676e-06, | |
| "loss": 0.2593, | |
| "step": 125 | |
| }, | |
| { | |
| "epoch": 1.7419354838709677, | |
| "grad_norm": 0.5947363067321887, | |
| "learning_rate": 4.434430869023579e-06, | |
| "loss": 0.2565, | |
| "step": 126 | |
| }, | |
| { | |
| "epoch": 1.7557603686635943, | |
| "grad_norm": 0.5166626884834731, | |
| "learning_rate": 4.3540593130229695e-06, | |
| "loss": 0.2542, | |
| "step": 127 | |
| }, | |
| { | |
| "epoch": 1.769585253456221, | |
| "grad_norm": 0.5716701172322802, | |
| "learning_rate": 4.27385714377255e-06, | |
| "loss": 0.2809, | |
| "step": 128 | |
| }, | |
| { | |
| "epoch": 1.7834101382488479, | |
| "grad_norm": 0.548457756610785, | |
| "learning_rate": 4.1938453929012014e-06, | |
| "loss": 0.2656, | |
| "step": 129 | |
| }, | |
| { | |
| "epoch": 1.7972350230414746, | |
| "grad_norm": 0.5101224008525175, | |
| "learning_rate": 4.1140450421038865e-06, | |
| "loss": 0.2602, | |
| "step": 130 | |
| }, | |
| { | |
| "epoch": 1.8110599078341014, | |
| "grad_norm": 0.5688581933379532, | |
| "learning_rate": 4.034477017639561e-06, | |
| "loss": 0.2708, | |
| "step": 131 | |
| }, | |
| { | |
| "epoch": 1.8248847926267282, | |
| "grad_norm": 0.5608572985897213, | |
| "learning_rate": 3.955162184843625e-06, | |
| "loss": 0.2728, | |
| "step": 132 | |
| }, | |
| { | |
| "epoch": 1.838709677419355, | |
| "grad_norm": 0.60913972327474, | |
| "learning_rate": 3.8761213426563546e-06, | |
| "loss": 0.2763, | |
| "step": 133 | |
| }, | |
| { | |
| "epoch": 1.8525345622119815, | |
| "grad_norm": 0.5498604689214065, | |
| "learning_rate": 3.7973752181687336e-06, | |
| "loss": 0.2574, | |
| "step": 134 | |
| }, | |
| { | |
| "epoch": 1.8663594470046083, | |
| "grad_norm": 0.6459792319529313, | |
| "learning_rate": 3.7189444611871383e-06, | |
| "loss": 0.2768, | |
| "step": 135 | |
| }, | |
| { | |
| "epoch": 1.8801843317972349, | |
| "grad_norm": 0.6375472108520018, | |
| "learning_rate": 3.6408496388182857e-06, | |
| "loss": 0.3172, | |
| "step": 136 | |
| }, | |
| { | |
| "epoch": 1.8940092165898617, | |
| "grad_norm": 0.5323139638568577, | |
| "learning_rate": 3.5631112300758595e-06, | |
| "loss": 0.2375, | |
| "step": 137 | |
| }, | |
| { | |
| "epoch": 1.9078341013824884, | |
| "grad_norm": 0.5704026138530017, | |
| "learning_rate": 3.4857496205102475e-06, | |
| "loss": 0.2889, | |
| "step": 138 | |
| }, | |
| { | |
| "epoch": 1.9216589861751152, | |
| "grad_norm": 0.5751626829380007, | |
| "learning_rate": 3.4087850968627823e-06, | |
| "loss": 0.2668, | |
| "step": 139 | |
| }, | |
| { | |
| "epoch": 1.935483870967742, | |
| "grad_norm": 0.5176599089594356, | |
| "learning_rate": 3.3322378417458985e-06, | |
| "loss": 0.2675, | |
| "step": 140 | |
| }, | |
| { | |
| "epoch": 1.9493087557603688, | |
| "grad_norm": 0.605437544433559, | |
| "learning_rate": 3.2561279283505888e-06, | |
| "loss": 0.2711, | |
| "step": 141 | |
| }, | |
| { | |
| "epoch": 1.9631336405529956, | |
| "grad_norm": 0.5219033376075164, | |
| "learning_rate": 3.180475315182563e-06, | |
| "loss": 0.2665, | |
| "step": 142 | |
| }, | |
| { | |
| "epoch": 1.976958525345622, | |
| "grad_norm": 0.6188916507167108, | |
| "learning_rate": 3.1052998408284664e-06, | |
| "loss": 0.2708, | |
| "step": 143 | |
| }, | |
| { | |
| "epoch": 1.9907834101382489, | |
| "grad_norm": 0.5752086490165061, | |
| "learning_rate": 3.0306212187535653e-06, | |
| "loss": 0.2798, | |
| "step": 144 | |
| }, | |
| { | |
| "epoch": 2.0046082949308754, | |
| "grad_norm": 0.9737614239717218, | |
| "learning_rate": 2.9564590321322206e-06, | |
| "loss": 0.3732, | |
| "step": 145 | |
| }, | |
| { | |
| "epoch": 2.0184331797235022, | |
| "grad_norm": 0.6171348142735417, | |
| "learning_rate": 2.882832728712551e-06, | |
| "loss": 0.2203, | |
| "step": 146 | |
| }, | |
| { | |
| "epoch": 2.032258064516129, | |
| "grad_norm": 0.5478831551166238, | |
| "learning_rate": 2.8097616157165886e-06, | |
| "loss": 0.196, | |
| "step": 147 | |
| }, | |
| { | |
| "epoch": 2.046082949308756, | |
| "grad_norm": 0.5300984312964834, | |
| "learning_rate": 2.7372648547773063e-06, | |
| "loss": 0.2146, | |
| "step": 148 | |
| }, | |
| { | |
| "epoch": 2.0599078341013826, | |
| "grad_norm": 0.527691094200599, | |
| "learning_rate": 2.665361456913797e-06, | |
| "loss": 0.2404, | |
| "step": 149 | |
| }, | |
| { | |
| "epoch": 2.0737327188940093, | |
| "grad_norm": 0.5203329902273544, | |
| "learning_rate": 2.594070277545975e-06, | |
| "loss": 0.2051, | |
| "step": 150 | |
| }, | |
| { | |
| "epoch": 2.087557603686636, | |
| "grad_norm": 0.5500390166449323, | |
| "learning_rate": 2.5234100115500643e-06, | |
| "loss": 0.2648, | |
| "step": 151 | |
| }, | |
| { | |
| "epoch": 2.1013824884792625, | |
| "grad_norm": 0.4893779390529743, | |
| "learning_rate": 2.4533991883561868e-06, | |
| "loss": 0.2245, | |
| "step": 152 | |
| }, | |
| { | |
| "epoch": 2.1152073732718892, | |
| "grad_norm": 0.5441384122952133, | |
| "learning_rate": 2.38405616708935e-06, | |
| "loss": 0.2162, | |
| "step": 153 | |
| }, | |
| { | |
| "epoch": 2.129032258064516, | |
| "grad_norm": 0.6239869837559884, | |
| "learning_rate": 2.315399131755081e-06, | |
| "loss": 0.3197, | |
| "step": 154 | |
| }, | |
| { | |
| "epoch": 2.142857142857143, | |
| "grad_norm": 0.5137901138193296, | |
| "learning_rate": 2.2474460864709825e-06, | |
| "loss": 0.2537, | |
| "step": 155 | |
| }, | |
| { | |
| "epoch": 2.1566820276497696, | |
| "grad_norm": 0.5666929612913303, | |
| "learning_rate": 2.1802148507454675e-06, | |
| "loss": 0.245, | |
| "step": 156 | |
| }, | |
| { | |
| "epoch": 2.1705069124423964, | |
| "grad_norm": 0.522249344639658, | |
| "learning_rate": 2.1137230548049042e-06, | |
| "loss": 0.2074, | |
| "step": 157 | |
| }, | |
| { | |
| "epoch": 2.184331797235023, | |
| "grad_norm": 0.5427924264706615, | |
| "learning_rate": 2.0479881349703885e-06, | |
| "loss": 0.1891, | |
| "step": 158 | |
| }, | |
| { | |
| "epoch": 2.19815668202765, | |
| "grad_norm": 0.5395578092838311, | |
| "learning_rate": 1.983027329085377e-06, | |
| "loss": 0.2141, | |
| "step": 159 | |
| }, | |
| { | |
| "epoch": 2.2119815668202767, | |
| "grad_norm": 0.6203214366115929, | |
| "learning_rate": 1.9188576719953635e-06, | |
| "loss": 0.2395, | |
| "step": 160 | |
| }, | |
| { | |
| "epoch": 2.225806451612903, | |
| "grad_norm": 0.5561057678969209, | |
| "learning_rate": 1.8554959910807773e-06, | |
| "loss": 0.2325, | |
| "step": 161 | |
| }, | |
| { | |
| "epoch": 2.23963133640553, | |
| "grad_norm": 0.4975297831721625, | |
| "learning_rate": 1.7929589018443016e-06, | |
| "loss": 0.2189, | |
| "step": 162 | |
| }, | |
| { | |
| "epoch": 2.2534562211981566, | |
| "grad_norm": 0.51442024085124, | |
| "learning_rate": 1.7312628035537388e-06, | |
| "loss": 0.1941, | |
| "step": 163 | |
| }, | |
| { | |
| "epoch": 2.2672811059907834, | |
| "grad_norm": 0.5689991394989393, | |
| "learning_rate": 1.6704238749415958e-06, | |
| "loss": 0.2274, | |
| "step": 164 | |
| }, | |
| { | |
| "epoch": 2.28110599078341, | |
| "grad_norm": 0.5545628564372191, | |
| "learning_rate": 1.6104580699624839e-06, | |
| "loss": 0.2582, | |
| "step": 165 | |
| }, | |
| { | |
| "epoch": 2.294930875576037, | |
| "grad_norm": 0.5532036253850108, | |
| "learning_rate": 1.5513811136094786e-06, | |
| "loss": 0.2518, | |
| "step": 166 | |
| }, | |
| { | |
| "epoch": 2.3087557603686637, | |
| "grad_norm": 0.4892665339324161, | |
| "learning_rate": 1.4932084977905043e-06, | |
| "loss": 0.1982, | |
| "step": 167 | |
| }, | |
| { | |
| "epoch": 2.3225806451612905, | |
| "grad_norm": 0.551947647990794, | |
| "learning_rate": 1.4359554772658551e-06, | |
| "loss": 0.2607, | |
| "step": 168 | |
| }, | |
| { | |
| "epoch": 2.3364055299539173, | |
| "grad_norm": 0.5151354916631968, | |
| "learning_rate": 1.3796370656478936e-06, | |
| "loss": 0.2072, | |
| "step": 169 | |
| }, | |
| { | |
| "epoch": 2.3502304147465436, | |
| "grad_norm": 0.5685893753938921, | |
| "learning_rate": 1.3242680314639995e-06, | |
| "loss": 0.2143, | |
| "step": 170 | |
| }, | |
| { | |
| "epoch": 2.3640552995391704, | |
| "grad_norm": 0.4880909328371241, | |
| "learning_rate": 1.2698628942837698e-06, | |
| "loss": 0.1718, | |
| "step": 171 | |
| }, | |
| { | |
| "epoch": 2.377880184331797, | |
| "grad_norm": 0.493208956793857, | |
| "learning_rate": 1.2164359209115235e-06, | |
| "loss": 0.1923, | |
| "step": 172 | |
| }, | |
| { | |
| "epoch": 2.391705069124424, | |
| "grad_norm": 0.5582991832891734, | |
| "learning_rate": 1.164001121645069e-06, | |
| "loss": 0.2524, | |
| "step": 173 | |
| }, | |
| { | |
| "epoch": 2.4055299539170507, | |
| "grad_norm": 0.505450721310959, | |
| "learning_rate": 1.1125722466017547e-06, | |
| "loss": 0.2125, | |
| "step": 174 | |
| }, | |
| { | |
| "epoch": 2.4193548387096775, | |
| "grad_norm": 0.5255011925428328, | |
| "learning_rate": 1.062162782112729e-06, | |
| "loss": 0.2456, | |
| "step": 175 | |
| }, | |
| { | |
| "epoch": 2.4331797235023043, | |
| "grad_norm": 0.4989323482397167, | |
| "learning_rate": 1.012785947186397e-06, | |
| "loss": 0.1834, | |
| "step": 176 | |
| }, | |
| { | |
| "epoch": 2.447004608294931, | |
| "grad_norm": 0.51435324430214, | |
| "learning_rate": 9.644546900419533e-07, | |
| "loss": 0.2056, | |
| "step": 177 | |
| }, | |
| { | |
| "epoch": 2.460829493087558, | |
| "grad_norm": 0.4941625397188085, | |
| "learning_rate": 9.171816847139447e-07, | |
| "loss": 0.2044, | |
| "step": 178 | |
| }, | |
| { | |
| "epoch": 2.474654377880184, | |
| "grad_norm": 0.5742862761852323, | |
| "learning_rate": 8.709793277287182e-07, | |
| "loss": 0.2334, | |
| "step": 179 | |
| }, | |
| { | |
| "epoch": 2.488479262672811, | |
| "grad_norm": 0.45964093874741846, | |
| "learning_rate": 8.258597348536452e-07, | |
| "loss": 0.1885, | |
| "step": 180 | |
| }, | |
| { | |
| "epoch": 2.5023041474654377, | |
| "grad_norm": 0.5736994982571209, | |
| "learning_rate": 7.818347379199781e-07, | |
| "loss": 0.2601, | |
| "step": 181 | |
| }, | |
| { | |
| "epoch": 2.5161290322580645, | |
| "grad_norm": 0.4946177722077173, | |
| "learning_rate": 7.389158817201541e-07, | |
| "loss": 0.1937, | |
| "step": 182 | |
| }, | |
| { | |
| "epoch": 2.5299539170506913, | |
| "grad_norm": 0.5484333491191469, | |
| "learning_rate": 6.971144209803738e-07, | |
| "loss": 0.2487, | |
| "step": 183 | |
| }, | |
| { | |
| "epoch": 2.543778801843318, | |
| "grad_norm": 0.5338015925288597, | |
| "learning_rate": 6.564413174092443e-07, | |
| "loss": 0.2134, | |
| "step": 184 | |
| }, | |
| { | |
| "epoch": 2.557603686635945, | |
| "grad_norm": 0.4987205048741818, | |
| "learning_rate": 6.16907236823262e-07, | |
| "loss": 0.2115, | |
| "step": 185 | |
| }, | |
| { | |
| "epoch": 2.571428571428571, | |
| "grad_norm": 0.5116461576836463, | |
| "learning_rate": 5.785225463498828e-07, | |
| "loss": 0.2161, | |
| "step": 186 | |
| }, | |
| { | |
| "epoch": 2.5852534562211984, | |
| "grad_norm": 0.5549460042068343, | |
| "learning_rate": 5.412973117089288e-07, | |
| "loss": 0.249, | |
| "step": 187 | |
| }, | |
| { | |
| "epoch": 2.5990783410138247, | |
| "grad_norm": 0.4625486356955096, | |
| "learning_rate": 5.05241294573024e-07, | |
| "loss": 0.1794, | |
| "step": 188 | |
| }, | |
| { | |
| "epoch": 2.6129032258064515, | |
| "grad_norm": 0.5601735455591889, | |
| "learning_rate": 4.7036395000776556e-07, | |
| "loss": 0.2483, | |
| "step": 189 | |
| }, | |
| { | |
| "epoch": 2.6267281105990783, | |
| "grad_norm": 0.4873480362305034, | |
| "learning_rate": 4.3667442399229985e-07, | |
| "loss": 0.1891, | |
| "step": 190 | |
| }, | |
| { | |
| "epoch": 2.640552995391705, | |
| "grad_norm": 0.5036174624248448, | |
| "learning_rate": 4.041815510209396e-07, | |
| "loss": 0.2294, | |
| "step": 191 | |
| }, | |
| { | |
| "epoch": 2.654377880184332, | |
| "grad_norm": 0.5288779116171689, | |
| "learning_rate": 3.728938517864794e-07, | |
| "loss": 0.2278, | |
| "step": 192 | |
| }, | |
| { | |
| "epoch": 2.6682027649769586, | |
| "grad_norm": 0.5090457286467068, | |
| "learning_rate": 3.4281953094578877e-07, | |
| "loss": 0.2144, | |
| "step": 193 | |
| }, | |
| { | |
| "epoch": 2.6820276497695854, | |
| "grad_norm": 0.5256240057085287, | |
| "learning_rate": 3.1396647496828245e-07, | |
| "loss": 0.2517, | |
| "step": 194 | |
| }, | |
| { | |
| "epoch": 2.6958525345622117, | |
| "grad_norm": 0.5629758036361966, | |
| "learning_rate": 2.8634225006782867e-07, | |
| "loss": 0.2611, | |
| "step": 195 | |
| }, | |
| { | |
| "epoch": 2.709677419354839, | |
| "grad_norm": 0.5114348887925635, | |
| "learning_rate": 2.599541002186479e-07, | |
| "loss": 0.2059, | |
| "step": 196 | |
| }, | |
| { | |
| "epoch": 2.7235023041474653, | |
| "grad_norm": 0.467691586903513, | |
| "learning_rate": 2.3480894525569564e-07, | |
| "loss": 0.1805, | |
| "step": 197 | |
| }, | |
| { | |
| "epoch": 2.737327188940092, | |
| "grad_norm": 0.5136153625365067, | |
| "learning_rate": 2.109133790600648e-07, | |
| "loss": 0.2479, | |
| "step": 198 | |
| }, | |
| { | |
| "epoch": 2.751152073732719, | |
| "grad_norm": 0.521711267048856, | |
| "learning_rate": 1.8827366782984913e-07, | |
| "loss": 0.2388, | |
| "step": 199 | |
| }, | |
| { | |
| "epoch": 2.7649769585253456, | |
| "grad_norm": 0.5264204516952466, | |
| "learning_rate": 1.6689574843694433e-07, | |
| "loss": 0.2238, | |
| "step": 200 | |
| }, | |
| { | |
| "epoch": 2.7788018433179724, | |
| "grad_norm": 0.5123667240282413, | |
| "learning_rate": 1.4678522687020414e-07, | |
| "loss": 0.1927, | |
| "step": 201 | |
| }, | |
| { | |
| "epoch": 2.792626728110599, | |
| "grad_norm": 0.5109045161700035, | |
| "learning_rate": 1.2794737676536993e-07, | |
| "loss": 0.219, | |
| "step": 202 | |
| }, | |
| { | |
| "epoch": 2.806451612903226, | |
| "grad_norm": 0.5412780752972016, | |
| "learning_rate": 1.1038713802214718e-07, | |
| "loss": 0.2258, | |
| "step": 203 | |
| }, | |
| { | |
| "epoch": 2.8202764976958523, | |
| "grad_norm": 0.5001657996677221, | |
| "learning_rate": 9.410911550880474e-08, | |
| "loss": 0.2272, | |
| "step": 204 | |
| }, | |
| { | |
| "epoch": 2.8341013824884795, | |
| "grad_norm": 0.4830345984951105, | |
| "learning_rate": 7.911757785462882e-08, | |
| "loss": 0.1845, | |
| "step": 205 | |
| }, | |
| { | |
| "epoch": 2.847926267281106, | |
| "grad_norm": 0.4754821970835569, | |
| "learning_rate": 6.54164563305465e-08, | |
| "loss": 0.1772, | |
| "step": 206 | |
| }, | |
| { | |
| "epoch": 2.8617511520737327, | |
| "grad_norm": 0.5180006446663228, | |
| "learning_rate": 5.3009343818219985e-08, | |
| "loss": 0.2429, | |
| "step": 207 | |
| }, | |
| { | |
| "epoch": 2.8755760368663594, | |
| "grad_norm": 0.4922784862533948, | |
| "learning_rate": 4.189949386787462e-08, | |
| "loss": 0.2, | |
| "step": 208 | |
| }, | |
| { | |
| "epoch": 2.889400921658986, | |
| "grad_norm": 0.5733544961657199, | |
| "learning_rate": 3.2089819845111946e-08, | |
| "loss": 0.2812, | |
| "step": 209 | |
| }, | |
| { | |
| "epoch": 2.903225806451613, | |
| "grad_norm": 0.5752601209401671, | |
| "learning_rate": 2.358289416693027e-08, | |
| "loss": 0.2579, | |
| "step": 210 | |
| }, | |
| { | |
| "epoch": 2.9170506912442398, | |
| "grad_norm": 0.4757128270112064, | |
| "learning_rate": 1.6380947627153143e-08, | |
| "loss": 0.2217, | |
| "step": 211 | |
| }, | |
| { | |
| "epoch": 2.9308755760368665, | |
| "grad_norm": 0.5139378693377304, | |
| "learning_rate": 1.0485868811441757e-08, | |
| "loss": 0.2273, | |
| "step": 212 | |
| }, | |
| { | |
| "epoch": 2.944700460829493, | |
| "grad_norm": 0.5102846639395582, | |
| "learning_rate": 5.899203602046655e-09, | |
| "loss": 0.2158, | |
| "step": 213 | |
| }, | |
| { | |
| "epoch": 2.9585253456221197, | |
| "grad_norm": 0.5057982261906225, | |
| "learning_rate": 2.6221547724253337e-09, | |
| "loss": 0.2063, | |
| "step": 214 | |
| }, | |
| { | |
| "epoch": 2.9723502304147464, | |
| "grad_norm": 0.5645707694001406, | |
| "learning_rate": 6.555816718389896e-10, | |
| "loss": 0.2588, | |
| "step": 215 | |
| }, | |
| { | |
| "epoch": 2.986175115207373, | |
| "grad_norm": 0.48505546530830695, | |
| "learning_rate": 0.0, | |
| "loss": 0.2179, | |
| "step": 216 | |
| }, | |
| { | |
| "epoch": 2.986175115207373, | |
| "step": 216, | |
| "total_flos": 32031101919232.0, | |
| "train_loss": 0.28372171659160544, | |
| "train_runtime": 1010.9103, | |
| "train_samples_per_second": 20.539, | |
| "train_steps_per_second": 0.214 | |
| } | |
| ], | |
| "logging_steps": 1, | |
| "max_steps": 216, | |
| "num_input_tokens_seen": 0, | |
| "num_train_epochs": 3, | |
| "save_steps": 500, | |
| "stateful_callbacks": { | |
| "TrainerControl": { | |
| "args": { | |
| "should_epoch_stop": false, | |
| "should_evaluate": false, | |
| "should_log": false, | |
| "should_save": true, | |
| "should_training_stop": true | |
| }, | |
| "attributes": {} | |
| } | |
| }, | |
| "total_flos": 32031101919232.0, | |
| "train_batch_size": 1, | |
| "trial_name": null, | |
| "trial_params": null | |
| } | |