| { | |
| "best_metric": null, | |
| "best_model_checkpoint": null, | |
| "epoch": 4.984802431610943, | |
| "eval_steps": 500, | |
| "global_step": 1230, | |
| "is_hyper_param_search": false, | |
| "is_local_process_zero": true, | |
| "is_world_process_zero": true, | |
| "log_history": [ | |
| { | |
| "epoch": 0.004052684903748734, | |
| "grad_norm": 6.167648898391557, | |
| "learning_rate": 3.2520325203252037e-07, | |
| "loss": 1.0318, | |
| "step": 1 | |
| }, | |
| { | |
| "epoch": 0.008105369807497468, | |
| "grad_norm": 6.347812495179655, | |
| "learning_rate": 6.504065040650407e-07, | |
| "loss": 1.056, | |
| "step": 2 | |
| }, | |
| { | |
| "epoch": 0.0121580547112462, | |
| "grad_norm": 6.39086695112969, | |
| "learning_rate": 9.75609756097561e-07, | |
| "loss": 1.0683, | |
| "step": 3 | |
| }, | |
| { | |
| "epoch": 0.016210739614994935, | |
| "grad_norm": 5.868504359059768, | |
| "learning_rate": 1.3008130081300815e-06, | |
| "loss": 1.0039, | |
| "step": 4 | |
| }, | |
| { | |
| "epoch": 0.020263424518743668, | |
| "grad_norm": 5.95258603492728, | |
| "learning_rate": 1.6260162601626018e-06, | |
| "loss": 1.0091, | |
| "step": 5 | |
| }, | |
| { | |
| "epoch": 0.0243161094224924, | |
| "grad_norm": 5.686737380141647, | |
| "learning_rate": 1.951219512195122e-06, | |
| "loss": 1.0146, | |
| "step": 6 | |
| }, | |
| { | |
| "epoch": 0.028368794326241134, | |
| "grad_norm": 4.845316721839779, | |
| "learning_rate": 2.2764227642276426e-06, | |
| "loss": 1.0164, | |
| "step": 7 | |
| }, | |
| { | |
| "epoch": 0.03242147922998987, | |
| "grad_norm": 4.437111552313984, | |
| "learning_rate": 2.601626016260163e-06, | |
| "loss": 0.9559, | |
| "step": 8 | |
| }, | |
| { | |
| "epoch": 0.0364741641337386, | |
| "grad_norm": 2.331650380020354, | |
| "learning_rate": 2.926829268292683e-06, | |
| "loss": 0.9222, | |
| "step": 9 | |
| }, | |
| { | |
| "epoch": 0.040526849037487336, | |
| "grad_norm": 2.0732459621608426, | |
| "learning_rate": 3.2520325203252037e-06, | |
| "loss": 0.9407, | |
| "step": 10 | |
| }, | |
| { | |
| "epoch": 0.044579533941236066, | |
| "grad_norm": 2.0234048735325945, | |
| "learning_rate": 3.577235772357724e-06, | |
| "loss": 0.9354, | |
| "step": 11 | |
| }, | |
| { | |
| "epoch": 0.0486322188449848, | |
| "grad_norm": 4.215254282831698, | |
| "learning_rate": 3.902439024390244e-06, | |
| "loss": 0.9022, | |
| "step": 12 | |
| }, | |
| { | |
| "epoch": 0.05268490374873354, | |
| "grad_norm": 4.5868341762511715, | |
| "learning_rate": 4.227642276422765e-06, | |
| "loss": 0.9349, | |
| "step": 13 | |
| }, | |
| { | |
| "epoch": 0.05673758865248227, | |
| "grad_norm": 4.433058184612854, | |
| "learning_rate": 4.552845528455285e-06, | |
| "loss": 0.9253, | |
| "step": 14 | |
| }, | |
| { | |
| "epoch": 0.060790273556231005, | |
| "grad_norm": 4.266424597073819, | |
| "learning_rate": 4.8780487804878055e-06, | |
| "loss": 0.8974, | |
| "step": 15 | |
| }, | |
| { | |
| "epoch": 0.06484295845997974, | |
| "grad_norm": 3.1480482357604633, | |
| "learning_rate": 5.203252032520326e-06, | |
| "loss": 0.8728, | |
| "step": 16 | |
| }, | |
| { | |
| "epoch": 0.06889564336372847, | |
| "grad_norm": 3.041705383649145, | |
| "learning_rate": 5.528455284552846e-06, | |
| "loss": 0.8685, | |
| "step": 17 | |
| }, | |
| { | |
| "epoch": 0.0729483282674772, | |
| "grad_norm": 2.6233469042456554, | |
| "learning_rate": 5.853658536585366e-06, | |
| "loss": 0.8515, | |
| "step": 18 | |
| }, | |
| { | |
| "epoch": 0.07700101317122594, | |
| "grad_norm": 2.163027022931509, | |
| "learning_rate": 6.178861788617887e-06, | |
| "loss": 0.8091, | |
| "step": 19 | |
| }, | |
| { | |
| "epoch": 0.08105369807497467, | |
| "grad_norm": 1.8410606107564447, | |
| "learning_rate": 6.504065040650407e-06, | |
| "loss": 0.824, | |
| "step": 20 | |
| }, | |
| { | |
| "epoch": 0.0851063829787234, | |
| "grad_norm": 1.5676195426530077, | |
| "learning_rate": 6.829268292682928e-06, | |
| "loss": 0.8095, | |
| "step": 21 | |
| }, | |
| { | |
| "epoch": 0.08915906788247213, | |
| "grad_norm": 1.6591355070204865, | |
| "learning_rate": 7.154471544715448e-06, | |
| "loss": 0.8001, | |
| "step": 22 | |
| }, | |
| { | |
| "epoch": 0.09321175278622088, | |
| "grad_norm": 1.7062461112445475, | |
| "learning_rate": 7.4796747967479676e-06, | |
| "loss": 0.762, | |
| "step": 23 | |
| }, | |
| { | |
| "epoch": 0.0972644376899696, | |
| "grad_norm": 1.6812635640813298, | |
| "learning_rate": 7.804878048780489e-06, | |
| "loss": 0.7807, | |
| "step": 24 | |
| }, | |
| { | |
| "epoch": 0.10131712259371833, | |
| "grad_norm": 1.1682946514885548, | |
| "learning_rate": 8.130081300813009e-06, | |
| "loss": 0.7836, | |
| "step": 25 | |
| }, | |
| { | |
| "epoch": 0.10536980749746708, | |
| "grad_norm": 1.175780955362441, | |
| "learning_rate": 8.45528455284553e-06, | |
| "loss": 0.7598, | |
| "step": 26 | |
| }, | |
| { | |
| "epoch": 0.1094224924012158, | |
| "grad_norm": 1.1947293101442185, | |
| "learning_rate": 8.78048780487805e-06, | |
| "loss": 0.7324, | |
| "step": 27 | |
| }, | |
| { | |
| "epoch": 0.11347517730496454, | |
| "grad_norm": 1.1439924778893764, | |
| "learning_rate": 9.10569105691057e-06, | |
| "loss": 0.7708, | |
| "step": 28 | |
| }, | |
| { | |
| "epoch": 0.11752786220871327, | |
| "grad_norm": 0.8926807480526097, | |
| "learning_rate": 9.43089430894309e-06, | |
| "loss": 0.7742, | |
| "step": 29 | |
| }, | |
| { | |
| "epoch": 0.12158054711246201, | |
| "grad_norm": 0.8910934397226351, | |
| "learning_rate": 9.756097560975611e-06, | |
| "loss": 0.746, | |
| "step": 30 | |
| }, | |
| { | |
| "epoch": 0.12563323201621074, | |
| "grad_norm": 1.017515700446468, | |
| "learning_rate": 1.008130081300813e-05, | |
| "loss": 0.7341, | |
| "step": 31 | |
| }, | |
| { | |
| "epoch": 0.12968591691995948, | |
| "grad_norm": 0.9219012702934337, | |
| "learning_rate": 1.0406504065040652e-05, | |
| "loss": 0.7175, | |
| "step": 32 | |
| }, | |
| { | |
| "epoch": 0.1337386018237082, | |
| "grad_norm": 0.7067857437587276, | |
| "learning_rate": 1.0731707317073172e-05, | |
| "loss": 0.7532, | |
| "step": 33 | |
| }, | |
| { | |
| "epoch": 0.13779128672745694, | |
| "grad_norm": 0.8234083660446192, | |
| "learning_rate": 1.1056910569105692e-05, | |
| "loss": 0.7559, | |
| "step": 34 | |
| }, | |
| { | |
| "epoch": 0.14184397163120568, | |
| "grad_norm": 0.7171954009438662, | |
| "learning_rate": 1.1382113821138213e-05, | |
| "loss": 0.7137, | |
| "step": 35 | |
| }, | |
| { | |
| "epoch": 0.1458966565349544, | |
| "grad_norm": 1.8089723082370195, | |
| "learning_rate": 1.1707317073170731e-05, | |
| "loss": 0.7554, | |
| "step": 36 | |
| }, | |
| { | |
| "epoch": 0.14994934143870314, | |
| "grad_norm": 0.6242734911079652, | |
| "learning_rate": 1.2032520325203254e-05, | |
| "loss": 0.7029, | |
| "step": 37 | |
| }, | |
| { | |
| "epoch": 0.1540020263424519, | |
| "grad_norm": 0.6777927051895012, | |
| "learning_rate": 1.2357723577235774e-05, | |
| "loss": 0.7363, | |
| "step": 38 | |
| }, | |
| { | |
| "epoch": 0.1580547112462006, | |
| "grad_norm": 0.642939877950786, | |
| "learning_rate": 1.2682926829268294e-05, | |
| "loss": 0.7587, | |
| "step": 39 | |
| }, | |
| { | |
| "epoch": 0.16210739614994935, | |
| "grad_norm": 0.5930362057557689, | |
| "learning_rate": 1.3008130081300815e-05, | |
| "loss": 0.7029, | |
| "step": 40 | |
| }, | |
| { | |
| "epoch": 0.1661600810536981, | |
| "grad_norm": 0.6084463235116186, | |
| "learning_rate": 1.3333333333333333e-05, | |
| "loss": 0.7199, | |
| "step": 41 | |
| }, | |
| { | |
| "epoch": 0.1702127659574468, | |
| "grad_norm": 0.6053344982178421, | |
| "learning_rate": 1.3658536585365855e-05, | |
| "loss": 0.7196, | |
| "step": 42 | |
| }, | |
| { | |
| "epoch": 0.17426545086119555, | |
| "grad_norm": 0.5951326842648802, | |
| "learning_rate": 1.3983739837398376e-05, | |
| "loss": 0.6796, | |
| "step": 43 | |
| }, | |
| { | |
| "epoch": 0.17831813576494426, | |
| "grad_norm": 0.5967129397518233, | |
| "learning_rate": 1.4308943089430896e-05, | |
| "loss": 0.7163, | |
| "step": 44 | |
| }, | |
| { | |
| "epoch": 0.182370820668693, | |
| "grad_norm": 0.5769137675207878, | |
| "learning_rate": 1.4634146341463415e-05, | |
| "loss": 0.6974, | |
| "step": 45 | |
| }, | |
| { | |
| "epoch": 0.18642350557244175, | |
| "grad_norm": 0.5847969054311509, | |
| "learning_rate": 1.4959349593495935e-05, | |
| "loss": 0.6994, | |
| "step": 46 | |
| }, | |
| { | |
| "epoch": 0.19047619047619047, | |
| "grad_norm": 0.5888504373368105, | |
| "learning_rate": 1.528455284552846e-05, | |
| "loss": 0.7094, | |
| "step": 47 | |
| }, | |
| { | |
| "epoch": 0.1945288753799392, | |
| "grad_norm": 0.6145465354226735, | |
| "learning_rate": 1.5609756097560978e-05, | |
| "loss": 0.7169, | |
| "step": 48 | |
| }, | |
| { | |
| "epoch": 0.19858156028368795, | |
| "grad_norm": 0.6394177359489411, | |
| "learning_rate": 1.5934959349593496e-05, | |
| "loss": 0.681, | |
| "step": 49 | |
| }, | |
| { | |
| "epoch": 0.20263424518743667, | |
| "grad_norm": 0.6273074421944015, | |
| "learning_rate": 1.6260162601626018e-05, | |
| "loss": 0.6929, | |
| "step": 50 | |
| }, | |
| { | |
| "epoch": 0.2066869300911854, | |
| "grad_norm": 0.6349928921016353, | |
| "learning_rate": 1.6585365853658537e-05, | |
| "loss": 0.6858, | |
| "step": 51 | |
| }, | |
| { | |
| "epoch": 0.21073961499493415, | |
| "grad_norm": 0.680203067184082, | |
| "learning_rate": 1.691056910569106e-05, | |
| "loss": 0.6731, | |
| "step": 52 | |
| }, | |
| { | |
| "epoch": 0.21479229989868287, | |
| "grad_norm": 0.6400603786492668, | |
| "learning_rate": 1.7235772357723578e-05, | |
| "loss": 0.6882, | |
| "step": 53 | |
| }, | |
| { | |
| "epoch": 0.2188449848024316, | |
| "grad_norm": 0.7345937234564793, | |
| "learning_rate": 1.75609756097561e-05, | |
| "loss": 0.6943, | |
| "step": 54 | |
| }, | |
| { | |
| "epoch": 0.22289766970618036, | |
| "grad_norm": 0.598734191633441, | |
| "learning_rate": 1.788617886178862e-05, | |
| "loss": 0.6752, | |
| "step": 55 | |
| }, | |
| { | |
| "epoch": 0.22695035460992907, | |
| "grad_norm": 0.6324661468973752, | |
| "learning_rate": 1.821138211382114e-05, | |
| "loss": 0.6975, | |
| "step": 56 | |
| }, | |
| { | |
| "epoch": 0.23100303951367782, | |
| "grad_norm": 0.5859874563149359, | |
| "learning_rate": 1.8536585365853663e-05, | |
| "loss": 0.6812, | |
| "step": 57 | |
| }, | |
| { | |
| "epoch": 0.23505572441742653, | |
| "grad_norm": 0.6259138567499695, | |
| "learning_rate": 1.886178861788618e-05, | |
| "loss": 0.7042, | |
| "step": 58 | |
| }, | |
| { | |
| "epoch": 0.23910840932117527, | |
| "grad_norm": 0.6111063926683358, | |
| "learning_rate": 1.91869918699187e-05, | |
| "loss": 0.6817, | |
| "step": 59 | |
| }, | |
| { | |
| "epoch": 0.24316109422492402, | |
| "grad_norm": 0.7618540826978784, | |
| "learning_rate": 1.9512195121951222e-05, | |
| "loss": 0.6929, | |
| "step": 60 | |
| }, | |
| { | |
| "epoch": 0.24721377912867273, | |
| "grad_norm": 0.6426120169369418, | |
| "learning_rate": 1.983739837398374e-05, | |
| "loss": 0.7019, | |
| "step": 61 | |
| }, | |
| { | |
| "epoch": 0.2512664640324215, | |
| "grad_norm": 0.719650262344749, | |
| "learning_rate": 2.016260162601626e-05, | |
| "loss": 0.7096, | |
| "step": 62 | |
| }, | |
| { | |
| "epoch": 0.2553191489361702, | |
| "grad_norm": 0.5713084777978398, | |
| "learning_rate": 2.048780487804878e-05, | |
| "loss": 0.706, | |
| "step": 63 | |
| }, | |
| { | |
| "epoch": 0.25937183383991896, | |
| "grad_norm": 0.7914254335309052, | |
| "learning_rate": 2.0813008130081303e-05, | |
| "loss": 0.6981, | |
| "step": 64 | |
| }, | |
| { | |
| "epoch": 0.2634245187436677, | |
| "grad_norm": 0.6243748988190966, | |
| "learning_rate": 2.1138211382113822e-05, | |
| "loss": 0.6917, | |
| "step": 65 | |
| }, | |
| { | |
| "epoch": 0.2674772036474164, | |
| "grad_norm": 0.6918979427651847, | |
| "learning_rate": 2.1463414634146344e-05, | |
| "loss": 0.6954, | |
| "step": 66 | |
| }, | |
| { | |
| "epoch": 0.27152988855116517, | |
| "grad_norm": 0.6251726885465301, | |
| "learning_rate": 2.1788617886178863e-05, | |
| "loss": 0.6932, | |
| "step": 67 | |
| }, | |
| { | |
| "epoch": 0.2755825734549139, | |
| "grad_norm": 0.6289927899059928, | |
| "learning_rate": 2.2113821138211385e-05, | |
| "loss": 0.6839, | |
| "step": 68 | |
| }, | |
| { | |
| "epoch": 0.2796352583586626, | |
| "grad_norm": 0.823615779588664, | |
| "learning_rate": 2.2439024390243907e-05, | |
| "loss": 0.7047, | |
| "step": 69 | |
| }, | |
| { | |
| "epoch": 0.28368794326241137, | |
| "grad_norm": 0.5798634061506487, | |
| "learning_rate": 2.2764227642276426e-05, | |
| "loss": 0.6794, | |
| "step": 70 | |
| }, | |
| { | |
| "epoch": 0.2877406281661601, | |
| "grad_norm": 0.8740507161294093, | |
| "learning_rate": 2.3089430894308948e-05, | |
| "loss": 0.6917, | |
| "step": 71 | |
| }, | |
| { | |
| "epoch": 0.2917933130699088, | |
| "grad_norm": 0.6847316537275833, | |
| "learning_rate": 2.3414634146341463e-05, | |
| "loss": 0.7188, | |
| "step": 72 | |
| }, | |
| { | |
| "epoch": 0.29584599797365757, | |
| "grad_norm": 0.6717146507509836, | |
| "learning_rate": 2.3739837398373985e-05, | |
| "loss": 0.7037, | |
| "step": 73 | |
| }, | |
| { | |
| "epoch": 0.2998986828774063, | |
| "grad_norm": 0.7287082514361286, | |
| "learning_rate": 2.4065040650406507e-05, | |
| "loss": 0.6878, | |
| "step": 74 | |
| }, | |
| { | |
| "epoch": 0.303951367781155, | |
| "grad_norm": 0.7402655932944511, | |
| "learning_rate": 2.4390243902439026e-05, | |
| "loss": 0.6957, | |
| "step": 75 | |
| }, | |
| { | |
| "epoch": 0.3080040526849038, | |
| "grad_norm": 0.7433179548746084, | |
| "learning_rate": 2.4715447154471548e-05, | |
| "loss": 0.6985, | |
| "step": 76 | |
| }, | |
| { | |
| "epoch": 0.3120567375886525, | |
| "grad_norm": 0.6223361686454897, | |
| "learning_rate": 2.5040650406504066e-05, | |
| "loss": 0.6781, | |
| "step": 77 | |
| }, | |
| { | |
| "epoch": 0.3161094224924012, | |
| "grad_norm": 0.7141711932588113, | |
| "learning_rate": 2.536585365853659e-05, | |
| "loss": 0.6629, | |
| "step": 78 | |
| }, | |
| { | |
| "epoch": 0.32016210739615, | |
| "grad_norm": 0.7514260979136724, | |
| "learning_rate": 2.569105691056911e-05, | |
| "loss": 0.6631, | |
| "step": 79 | |
| }, | |
| { | |
| "epoch": 0.3242147922998987, | |
| "grad_norm": 0.932344890017779, | |
| "learning_rate": 2.601626016260163e-05, | |
| "loss": 0.649, | |
| "step": 80 | |
| }, | |
| { | |
| "epoch": 0.3282674772036474, | |
| "grad_norm": 0.7445884228457995, | |
| "learning_rate": 2.634146341463415e-05, | |
| "loss": 0.6814, | |
| "step": 81 | |
| }, | |
| { | |
| "epoch": 0.3323201621073962, | |
| "grad_norm": 1.033752060115608, | |
| "learning_rate": 2.6666666666666667e-05, | |
| "loss": 0.6783, | |
| "step": 82 | |
| }, | |
| { | |
| "epoch": 0.3363728470111449, | |
| "grad_norm": 0.7402027485647631, | |
| "learning_rate": 2.699186991869919e-05, | |
| "loss": 0.6797, | |
| "step": 83 | |
| }, | |
| { | |
| "epoch": 0.3404255319148936, | |
| "grad_norm": 0.9693373093096629, | |
| "learning_rate": 2.731707317073171e-05, | |
| "loss": 0.7093, | |
| "step": 84 | |
| }, | |
| { | |
| "epoch": 0.3444782168186423, | |
| "grad_norm": 0.8214125176946634, | |
| "learning_rate": 2.764227642276423e-05, | |
| "loss": 0.6732, | |
| "step": 85 | |
| }, | |
| { | |
| "epoch": 0.3485309017223911, | |
| "grad_norm": 0.8055834324112491, | |
| "learning_rate": 2.796747967479675e-05, | |
| "loss": 0.6481, | |
| "step": 86 | |
| }, | |
| { | |
| "epoch": 0.3525835866261398, | |
| "grad_norm": 0.8709839008437813, | |
| "learning_rate": 2.829268292682927e-05, | |
| "loss": 0.6934, | |
| "step": 87 | |
| }, | |
| { | |
| "epoch": 0.3566362715298885, | |
| "grad_norm": 0.7352331686966206, | |
| "learning_rate": 2.8617886178861792e-05, | |
| "loss": 0.6984, | |
| "step": 88 | |
| }, | |
| { | |
| "epoch": 0.3606889564336373, | |
| "grad_norm": 0.7988466853595686, | |
| "learning_rate": 2.8943089430894314e-05, | |
| "loss": 0.6802, | |
| "step": 89 | |
| }, | |
| { | |
| "epoch": 0.364741641337386, | |
| "grad_norm": 0.7669342573379789, | |
| "learning_rate": 2.926829268292683e-05, | |
| "loss": 0.6437, | |
| "step": 90 | |
| }, | |
| { | |
| "epoch": 0.36879432624113473, | |
| "grad_norm": 0.717278327199655, | |
| "learning_rate": 2.959349593495935e-05, | |
| "loss": 0.6919, | |
| "step": 91 | |
| }, | |
| { | |
| "epoch": 0.3728470111448835, | |
| "grad_norm": 0.7855973002352786, | |
| "learning_rate": 2.991869918699187e-05, | |
| "loss": 0.6922, | |
| "step": 92 | |
| }, | |
| { | |
| "epoch": 0.3768996960486322, | |
| "grad_norm": 0.721690277100239, | |
| "learning_rate": 3.0243902439024392e-05, | |
| "loss": 0.6875, | |
| "step": 93 | |
| }, | |
| { | |
| "epoch": 0.38095238095238093, | |
| "grad_norm": 0.7021400060547499, | |
| "learning_rate": 3.056910569105692e-05, | |
| "loss": 0.6734, | |
| "step": 94 | |
| }, | |
| { | |
| "epoch": 0.3850050658561297, | |
| "grad_norm": 0.7833322532162378, | |
| "learning_rate": 3.089430894308943e-05, | |
| "loss": 0.673, | |
| "step": 95 | |
| }, | |
| { | |
| "epoch": 0.3890577507598784, | |
| "grad_norm": 0.9293861793205748, | |
| "learning_rate": 3.1219512195121955e-05, | |
| "loss": 0.6742, | |
| "step": 96 | |
| }, | |
| { | |
| "epoch": 0.39311043566362713, | |
| "grad_norm": 1.0306593508149624, | |
| "learning_rate": 3.154471544715447e-05, | |
| "loss": 0.6684, | |
| "step": 97 | |
| }, | |
| { | |
| "epoch": 0.3971631205673759, | |
| "grad_norm": 0.9365742148592389, | |
| "learning_rate": 3.186991869918699e-05, | |
| "loss": 0.6833, | |
| "step": 98 | |
| }, | |
| { | |
| "epoch": 0.4012158054711246, | |
| "grad_norm": 0.875421957183987, | |
| "learning_rate": 3.2195121951219514e-05, | |
| "loss": 0.6903, | |
| "step": 99 | |
| }, | |
| { | |
| "epoch": 0.40526849037487334, | |
| "grad_norm": 0.8070244209681067, | |
| "learning_rate": 3.2520325203252037e-05, | |
| "loss": 0.6965, | |
| "step": 100 | |
| }, | |
| { | |
| "epoch": 0.4093211752786221, | |
| "grad_norm": 1.2072021038272314, | |
| "learning_rate": 3.284552845528456e-05, | |
| "loss": 0.6604, | |
| "step": 101 | |
| }, | |
| { | |
| "epoch": 0.4133738601823708, | |
| "grad_norm": 1.0385376009974099, | |
| "learning_rate": 3.3170731707317074e-05, | |
| "loss": 0.6637, | |
| "step": 102 | |
| }, | |
| { | |
| "epoch": 0.41742654508611954, | |
| "grad_norm": 1.024204734412347, | |
| "learning_rate": 3.3495934959349596e-05, | |
| "loss": 0.6886, | |
| "step": 103 | |
| }, | |
| { | |
| "epoch": 0.4214792299898683, | |
| "grad_norm": 1.0505050096487232, | |
| "learning_rate": 3.382113821138212e-05, | |
| "loss": 0.6787, | |
| "step": 104 | |
| }, | |
| { | |
| "epoch": 0.425531914893617, | |
| "grad_norm": 0.9571980865461368, | |
| "learning_rate": 3.414634146341463e-05, | |
| "loss": 0.6926, | |
| "step": 105 | |
| }, | |
| { | |
| "epoch": 0.42958459979736574, | |
| "grad_norm": 1.0830581671465136, | |
| "learning_rate": 3.4471544715447155e-05, | |
| "loss": 0.656, | |
| "step": 106 | |
| }, | |
| { | |
| "epoch": 0.4336372847011145, | |
| "grad_norm": 0.902566169702378, | |
| "learning_rate": 3.479674796747968e-05, | |
| "loss": 0.6574, | |
| "step": 107 | |
| }, | |
| { | |
| "epoch": 0.4376899696048632, | |
| "grad_norm": 0.9224309905110888, | |
| "learning_rate": 3.51219512195122e-05, | |
| "loss": 0.652, | |
| "step": 108 | |
| }, | |
| { | |
| "epoch": 0.44174265450861194, | |
| "grad_norm": 0.730238002539342, | |
| "learning_rate": 3.544715447154472e-05, | |
| "loss": 0.6985, | |
| "step": 109 | |
| }, | |
| { | |
| "epoch": 0.4457953394123607, | |
| "grad_norm": 0.8368493602637782, | |
| "learning_rate": 3.577235772357724e-05, | |
| "loss": 0.6627, | |
| "step": 110 | |
| }, | |
| { | |
| "epoch": 0.44984802431610943, | |
| "grad_norm": 0.7509524426843108, | |
| "learning_rate": 3.609756097560976e-05, | |
| "loss": 0.6851, | |
| "step": 111 | |
| }, | |
| { | |
| "epoch": 0.45390070921985815, | |
| "grad_norm": 0.7866920759629977, | |
| "learning_rate": 3.642276422764228e-05, | |
| "loss": 0.668, | |
| "step": 112 | |
| }, | |
| { | |
| "epoch": 0.4579533941236069, | |
| "grad_norm": 0.6161650184390867, | |
| "learning_rate": 3.67479674796748e-05, | |
| "loss": 0.671, | |
| "step": 113 | |
| }, | |
| { | |
| "epoch": 0.46200607902735563, | |
| "grad_norm": 0.8130424855867256, | |
| "learning_rate": 3.7073170731707325e-05, | |
| "loss": 0.7099, | |
| "step": 114 | |
| }, | |
| { | |
| "epoch": 0.46605876393110435, | |
| "grad_norm": 1.2842022052356359, | |
| "learning_rate": 3.739837398373984e-05, | |
| "loss": 0.6465, | |
| "step": 115 | |
| }, | |
| { | |
| "epoch": 0.47011144883485306, | |
| "grad_norm": 0.876755312175879, | |
| "learning_rate": 3.772357723577236e-05, | |
| "loss": 0.6921, | |
| "step": 116 | |
| }, | |
| { | |
| "epoch": 0.47416413373860183, | |
| "grad_norm": 0.8419998518584061, | |
| "learning_rate": 3.804878048780488e-05, | |
| "loss": 0.6805, | |
| "step": 117 | |
| }, | |
| { | |
| "epoch": 0.47821681864235055, | |
| "grad_norm": 0.9599677820203032, | |
| "learning_rate": 3.83739837398374e-05, | |
| "loss": 0.6978, | |
| "step": 118 | |
| }, | |
| { | |
| "epoch": 0.48226950354609927, | |
| "grad_norm": 0.811924898172815, | |
| "learning_rate": 3.869918699186992e-05, | |
| "loss": 0.6739, | |
| "step": 119 | |
| }, | |
| { | |
| "epoch": 0.48632218844984804, | |
| "grad_norm": 0.7688237600464115, | |
| "learning_rate": 3.9024390243902444e-05, | |
| "loss": 0.6682, | |
| "step": 120 | |
| }, | |
| { | |
| "epoch": 0.49037487335359675, | |
| "grad_norm": 0.9730471834233246, | |
| "learning_rate": 3.9349593495934966e-05, | |
| "loss": 0.6755, | |
| "step": 121 | |
| }, | |
| { | |
| "epoch": 0.49442755825734547, | |
| "grad_norm": 0.9012683974131351, | |
| "learning_rate": 3.967479674796748e-05, | |
| "loss": 0.677, | |
| "step": 122 | |
| }, | |
| { | |
| "epoch": 0.49848024316109424, | |
| "grad_norm": 0.8187007741761511, | |
| "learning_rate": 4e-05, | |
| "loss": 0.6516, | |
| "step": 123 | |
| }, | |
| { | |
| "epoch": 0.502532928064843, | |
| "grad_norm": 0.9607905484027193, | |
| "learning_rate": 3.999991946137476e-05, | |
| "loss": 0.6738, | |
| "step": 124 | |
| }, | |
| { | |
| "epoch": 0.5065856129685917, | |
| "grad_norm": 0.7829361772146529, | |
| "learning_rate": 3.999967784614766e-05, | |
| "loss": 0.6568, | |
| "step": 125 | |
| }, | |
| { | |
| "epoch": 0.5106382978723404, | |
| "grad_norm": 0.5811815909928657, | |
| "learning_rate": 3.9999275156264656e-05, | |
| "loss": 0.6621, | |
| "step": 126 | |
| }, | |
| { | |
| "epoch": 0.5146909827760892, | |
| "grad_norm": 0.8020123706445362, | |
| "learning_rate": 3.999871139496895e-05, | |
| "loss": 0.6638, | |
| "step": 127 | |
| }, | |
| { | |
| "epoch": 0.5187436676798379, | |
| "grad_norm": 0.8275834768265873, | |
| "learning_rate": 3.9997986566800995e-05, | |
| "loss": 0.6574, | |
| "step": 128 | |
| }, | |
| { | |
| "epoch": 0.5227963525835866, | |
| "grad_norm": 0.6936890922275888, | |
| "learning_rate": 3.999710067759846e-05, | |
| "loss": 0.6796, | |
| "step": 129 | |
| }, | |
| { | |
| "epoch": 0.5268490374873354, | |
| "grad_norm": 0.6258078657526079, | |
| "learning_rate": 3.999605373449617e-05, | |
| "loss": 0.6708, | |
| "step": 130 | |
| }, | |
| { | |
| "epoch": 0.5309017223910841, | |
| "grad_norm": 0.8228076819674753, | |
| "learning_rate": 3.9994845745926075e-05, | |
| "loss": 0.6981, | |
| "step": 131 | |
| }, | |
| { | |
| "epoch": 0.5349544072948328, | |
| "grad_norm": 0.9133988210943939, | |
| "learning_rate": 3.999347672161713e-05, | |
| "loss": 0.6566, | |
| "step": 132 | |
| }, | |
| { | |
| "epoch": 0.5390070921985816, | |
| "grad_norm": 0.9330195279217748, | |
| "learning_rate": 3.999194667259528e-05, | |
| "loss": 0.6913, | |
| "step": 133 | |
| }, | |
| { | |
| "epoch": 0.5430597771023303, | |
| "grad_norm": 0.6280869255027735, | |
| "learning_rate": 3.999025561118334e-05, | |
| "loss": 0.6616, | |
| "step": 134 | |
| }, | |
| { | |
| "epoch": 0.547112462006079, | |
| "grad_norm": 0.6921256242749602, | |
| "learning_rate": 3.998840355100086e-05, | |
| "loss": 0.6735, | |
| "step": 135 | |
| }, | |
| { | |
| "epoch": 0.5511651469098278, | |
| "grad_norm": 0.8024334357178992, | |
| "learning_rate": 3.998639050696409e-05, | |
| "loss": 0.6697, | |
| "step": 136 | |
| }, | |
| { | |
| "epoch": 0.5552178318135765, | |
| "grad_norm": 0.7389507705197214, | |
| "learning_rate": 3.998421649528582e-05, | |
| "loss": 0.6622, | |
| "step": 137 | |
| }, | |
| { | |
| "epoch": 0.5592705167173252, | |
| "grad_norm": 0.5777396347394138, | |
| "learning_rate": 3.9981881533475234e-05, | |
| "loss": 0.6655, | |
| "step": 138 | |
| }, | |
| { | |
| "epoch": 0.563323201621074, | |
| "grad_norm": 0.7202553964392447, | |
| "learning_rate": 3.997938564033779e-05, | |
| "loss": 0.6589, | |
| "step": 139 | |
| }, | |
| { | |
| "epoch": 0.5673758865248227, | |
| "grad_norm": 0.759789149926538, | |
| "learning_rate": 3.9976728835975064e-05, | |
| "loss": 0.6641, | |
| "step": 140 | |
| }, | |
| { | |
| "epoch": 0.5714285714285714, | |
| "grad_norm": 0.59814266045635, | |
| "learning_rate": 3.9973911141784605e-05, | |
| "loss": 0.6854, | |
| "step": 141 | |
| }, | |
| { | |
| "epoch": 0.5754812563323202, | |
| "grad_norm": 0.5694724611486763, | |
| "learning_rate": 3.997093258045973e-05, | |
| "loss": 0.6522, | |
| "step": 142 | |
| }, | |
| { | |
| "epoch": 0.5795339412360689, | |
| "grad_norm": 0.7542096179962985, | |
| "learning_rate": 3.996779317598936e-05, | |
| "loss": 0.6679, | |
| "step": 143 | |
| }, | |
| { | |
| "epoch": 0.5835866261398176, | |
| "grad_norm": 0.7869600278819503, | |
| "learning_rate": 3.996449295365782e-05, | |
| "loss": 0.6554, | |
| "step": 144 | |
| }, | |
| { | |
| "epoch": 0.5876393110435664, | |
| "grad_norm": 0.5140962741677643, | |
| "learning_rate": 3.996103194004467e-05, | |
| "loss": 0.6918, | |
| "step": 145 | |
| }, | |
| { | |
| "epoch": 0.5916919959473151, | |
| "grad_norm": 0.6525220791645695, | |
| "learning_rate": 3.995741016302441e-05, | |
| "loss": 0.7071, | |
| "step": 146 | |
| }, | |
| { | |
| "epoch": 0.5957446808510638, | |
| "grad_norm": 0.7237180596476807, | |
| "learning_rate": 3.9953627651766364e-05, | |
| "loss": 0.674, | |
| "step": 147 | |
| }, | |
| { | |
| "epoch": 0.5997973657548126, | |
| "grad_norm": 0.688475139492635, | |
| "learning_rate": 3.9949684436734325e-05, | |
| "loss": 0.6716, | |
| "step": 148 | |
| }, | |
| { | |
| "epoch": 0.6038500506585613, | |
| "grad_norm": 0.6630390900312946, | |
| "learning_rate": 3.994558054968643e-05, | |
| "loss": 0.6634, | |
| "step": 149 | |
| }, | |
| { | |
| "epoch": 0.60790273556231, | |
| "grad_norm": 0.6136144334691922, | |
| "learning_rate": 3.994131602367481e-05, | |
| "loss": 0.6577, | |
| "step": 150 | |
| }, | |
| { | |
| "epoch": 0.6119554204660588, | |
| "grad_norm": 0.7659164754226812, | |
| "learning_rate": 3.9936890893045376e-05, | |
| "loss": 0.6319, | |
| "step": 151 | |
| }, | |
| { | |
| "epoch": 0.6160081053698075, | |
| "grad_norm": 0.5812014201789653, | |
| "learning_rate": 3.993230519343752e-05, | |
| "loss": 0.6612, | |
| "step": 152 | |
| }, | |
| { | |
| "epoch": 0.6200607902735562, | |
| "grad_norm": 0.7309971180003236, | |
| "learning_rate": 3.992755896178383e-05, | |
| "loss": 0.6586, | |
| "step": 153 | |
| }, | |
| { | |
| "epoch": 0.624113475177305, | |
| "grad_norm": 0.5813157715882152, | |
| "learning_rate": 3.992265223630981e-05, | |
| "loss": 0.6562, | |
| "step": 154 | |
| }, | |
| { | |
| "epoch": 0.6281661600810537, | |
| "grad_norm": 0.6645986427497028, | |
| "learning_rate": 3.991758505653355e-05, | |
| "loss": 0.6651, | |
| "step": 155 | |
| }, | |
| { | |
| "epoch": 0.6322188449848024, | |
| "grad_norm": 0.9886091944928685, | |
| "learning_rate": 3.991235746326543e-05, | |
| "loss": 0.6779, | |
| "step": 156 | |
| }, | |
| { | |
| "epoch": 0.6362715298885512, | |
| "grad_norm": 0.5782719231928911, | |
| "learning_rate": 3.9906969498607745e-05, | |
| "loss": 0.6271, | |
| "step": 157 | |
| }, | |
| { | |
| "epoch": 0.6403242147923, | |
| "grad_norm": 0.7693282918298863, | |
| "learning_rate": 3.990142120595444e-05, | |
| "loss": 0.6492, | |
| "step": 158 | |
| }, | |
| { | |
| "epoch": 0.6443768996960486, | |
| "grad_norm": 0.7132802863575435, | |
| "learning_rate": 3.98957126299907e-05, | |
| "loss": 0.6672, | |
| "step": 159 | |
| }, | |
| { | |
| "epoch": 0.6484295845997974, | |
| "grad_norm": 0.7580819113254795, | |
| "learning_rate": 3.9889843816692596e-05, | |
| "loss": 0.6872, | |
| "step": 160 | |
| }, | |
| { | |
| "epoch": 0.6524822695035462, | |
| "grad_norm": 0.5562842586980432, | |
| "learning_rate": 3.9883814813326766e-05, | |
| "loss": 0.6267, | |
| "step": 161 | |
| }, | |
| { | |
| "epoch": 0.6565349544072948, | |
| "grad_norm": 0.6357443704569753, | |
| "learning_rate": 3.9877625668449956e-05, | |
| "loss": 0.6713, | |
| "step": 162 | |
| }, | |
| { | |
| "epoch": 0.6605876393110436, | |
| "grad_norm": 0.732784578752325, | |
| "learning_rate": 3.98712764319087e-05, | |
| "loss": 0.6574, | |
| "step": 163 | |
| }, | |
| { | |
| "epoch": 0.6646403242147924, | |
| "grad_norm": 0.5777641147619963, | |
| "learning_rate": 3.9864767154838864e-05, | |
| "loss": 0.634, | |
| "step": 164 | |
| }, | |
| { | |
| "epoch": 0.668693009118541, | |
| "grad_norm": 0.6591021208964697, | |
| "learning_rate": 3.9858097889665277e-05, | |
| "loss": 0.6755, | |
| "step": 165 | |
| }, | |
| { | |
| "epoch": 0.6727456940222898, | |
| "grad_norm": 0.7403664776903102, | |
| "learning_rate": 3.985126869010129e-05, | |
| "loss": 0.6741, | |
| "step": 166 | |
| }, | |
| { | |
| "epoch": 0.6767983789260384, | |
| "grad_norm": 0.6828761855823623, | |
| "learning_rate": 3.984427961114833e-05, | |
| "loss": 0.6484, | |
| "step": 167 | |
| }, | |
| { | |
| "epoch": 0.6808510638297872, | |
| "grad_norm": 0.6767716831514466, | |
| "learning_rate": 3.9837130709095475e-05, | |
| "loss": 0.66, | |
| "step": 168 | |
| }, | |
| { | |
| "epoch": 0.684903748733536, | |
| "grad_norm": 0.626744852782807, | |
| "learning_rate": 3.982982204151901e-05, | |
| "loss": 0.634, | |
| "step": 169 | |
| }, | |
| { | |
| "epoch": 0.6889564336372846, | |
| "grad_norm": 0.652846517047525, | |
| "learning_rate": 3.982235366728193e-05, | |
| "loss": 0.6423, | |
| "step": 170 | |
| }, | |
| { | |
| "epoch": 0.6930091185410334, | |
| "grad_norm": 0.7599733407242161, | |
| "learning_rate": 3.9814725646533505e-05, | |
| "loss": 0.6782, | |
| "step": 171 | |
| }, | |
| { | |
| "epoch": 0.6970618034447822, | |
| "grad_norm": 0.572519115158008, | |
| "learning_rate": 3.9806938040708746e-05, | |
| "loss": 0.6513, | |
| "step": 172 | |
| }, | |
| { | |
| "epoch": 0.7011144883485309, | |
| "grad_norm": 0.7641784279124824, | |
| "learning_rate": 3.9798990912527976e-05, | |
| "loss": 0.6603, | |
| "step": 173 | |
| }, | |
| { | |
| "epoch": 0.7051671732522796, | |
| "grad_norm": 0.6199226283481147, | |
| "learning_rate": 3.979088432599627e-05, | |
| "loss": 0.6759, | |
| "step": 174 | |
| }, | |
| { | |
| "epoch": 0.7092198581560284, | |
| "grad_norm": 0.68191145064256, | |
| "learning_rate": 3.9782618346402964e-05, | |
| "loss": 0.6924, | |
| "step": 175 | |
| }, | |
| { | |
| "epoch": 0.713272543059777, | |
| "grad_norm": 0.5629591441061328, | |
| "learning_rate": 3.977419304032111e-05, | |
| "loss": 0.6372, | |
| "step": 176 | |
| }, | |
| { | |
| "epoch": 0.7173252279635258, | |
| "grad_norm": 0.6913418774675397, | |
| "learning_rate": 3.976560847560697e-05, | |
| "loss": 0.6429, | |
| "step": 177 | |
| }, | |
| { | |
| "epoch": 0.7213779128672746, | |
| "grad_norm": 0.6352205498673283, | |
| "learning_rate": 3.9756864721399456e-05, | |
| "loss": 0.6447, | |
| "step": 178 | |
| }, | |
| { | |
| "epoch": 0.7254305977710233, | |
| "grad_norm": 0.6253016658929128, | |
| "learning_rate": 3.974796184811956e-05, | |
| "loss": 0.6738, | |
| "step": 179 | |
| }, | |
| { | |
| "epoch": 0.729483282674772, | |
| "grad_norm": 0.7907771004863969, | |
| "learning_rate": 3.973889992746979e-05, | |
| "loss": 0.6229, | |
| "step": 180 | |
| }, | |
| { | |
| "epoch": 0.7335359675785208, | |
| "grad_norm": 0.6487811809461457, | |
| "learning_rate": 3.972967903243361e-05, | |
| "loss": 0.6318, | |
| "step": 181 | |
| }, | |
| { | |
| "epoch": 0.7375886524822695, | |
| "grad_norm": 0.8125769789219001, | |
| "learning_rate": 3.972029923727486e-05, | |
| "loss": 0.6888, | |
| "step": 182 | |
| }, | |
| { | |
| "epoch": 0.7416413373860182, | |
| "grad_norm": 0.9406261631608638, | |
| "learning_rate": 3.971076061753709e-05, | |
| "loss": 0.6442, | |
| "step": 183 | |
| }, | |
| { | |
| "epoch": 0.745694022289767, | |
| "grad_norm": 0.6385133768437423, | |
| "learning_rate": 3.9701063250043066e-05, | |
| "loss": 0.6184, | |
| "step": 184 | |
| }, | |
| { | |
| "epoch": 0.7497467071935157, | |
| "grad_norm": 0.8931792371984586, | |
| "learning_rate": 3.969120721289402e-05, | |
| "loss": 0.6594, | |
| "step": 185 | |
| }, | |
| { | |
| "epoch": 0.7537993920972644, | |
| "grad_norm": 0.6374249608356587, | |
| "learning_rate": 3.9681192585469146e-05, | |
| "loss": 0.6683, | |
| "step": 186 | |
| }, | |
| { | |
| "epoch": 0.7578520770010132, | |
| "grad_norm": 0.5965250064122172, | |
| "learning_rate": 3.9671019448424865e-05, | |
| "loss": 0.6645, | |
| "step": 187 | |
| }, | |
| { | |
| "epoch": 0.7619047619047619, | |
| "grad_norm": 0.5988671497257495, | |
| "learning_rate": 3.966068788369422e-05, | |
| "loss": 0.6671, | |
| "step": 188 | |
| }, | |
| { | |
| "epoch": 0.7659574468085106, | |
| "grad_norm": 0.6278398855657148, | |
| "learning_rate": 3.965019797448622e-05, | |
| "loss": 0.6551, | |
| "step": 189 | |
| }, | |
| { | |
| "epoch": 0.7700101317122594, | |
| "grad_norm": 0.579108712206603, | |
| "learning_rate": 3.963954980528515e-05, | |
| "loss": 0.6608, | |
| "step": 190 | |
| }, | |
| { | |
| "epoch": 0.7740628166160081, | |
| "grad_norm": 0.5810860711926126, | |
| "learning_rate": 3.9628743461849905e-05, | |
| "loss": 0.6344, | |
| "step": 191 | |
| }, | |
| { | |
| "epoch": 0.7781155015197568, | |
| "grad_norm": 0.5422220043872537, | |
| "learning_rate": 3.961777903121329e-05, | |
| "loss": 0.6099, | |
| "step": 192 | |
| }, | |
| { | |
| "epoch": 0.7821681864235056, | |
| "grad_norm": 0.7045646802018395, | |
| "learning_rate": 3.960665660168131e-05, | |
| "loss": 0.6763, | |
| "step": 193 | |
| }, | |
| { | |
| "epoch": 0.7862208713272543, | |
| "grad_norm": 0.5009113693168984, | |
| "learning_rate": 3.9595376262832485e-05, | |
| "loss": 0.6299, | |
| "step": 194 | |
| }, | |
| { | |
| "epoch": 0.790273556231003, | |
| "grad_norm": 0.6627456039777364, | |
| "learning_rate": 3.9583938105517127e-05, | |
| "loss": 0.6565, | |
| "step": 195 | |
| }, | |
| { | |
| "epoch": 0.7943262411347518, | |
| "grad_norm": 0.535443329284916, | |
| "learning_rate": 3.957234222185657e-05, | |
| "loss": 0.6455, | |
| "step": 196 | |
| }, | |
| { | |
| "epoch": 0.7983789260385005, | |
| "grad_norm": 0.5489568643664365, | |
| "learning_rate": 3.9560588705242474e-05, | |
| "loss": 0.6735, | |
| "step": 197 | |
| }, | |
| { | |
| "epoch": 0.8024316109422492, | |
| "grad_norm": 0.7386198918443827, | |
| "learning_rate": 3.954867765033605e-05, | |
| "loss": 0.698, | |
| "step": 198 | |
| }, | |
| { | |
| "epoch": 0.806484295845998, | |
| "grad_norm": 0.6584025342575558, | |
| "learning_rate": 3.953660915306728e-05, | |
| "loss": 0.6545, | |
| "step": 199 | |
| }, | |
| { | |
| "epoch": 0.8105369807497467, | |
| "grad_norm": 0.4567005415859472, | |
| "learning_rate": 3.952438331063419e-05, | |
| "loss": 0.6527, | |
| "step": 200 | |
| }, | |
| { | |
| "epoch": 0.8145896656534954, | |
| "grad_norm": 0.5747667611954319, | |
| "learning_rate": 3.951200022150205e-05, | |
| "loss": 0.6451, | |
| "step": 201 | |
| }, | |
| { | |
| "epoch": 0.8186423505572442, | |
| "grad_norm": 0.5442160486071941, | |
| "learning_rate": 3.949945998540253e-05, | |
| "loss": 0.6732, | |
| "step": 202 | |
| }, | |
| { | |
| "epoch": 0.8226950354609929, | |
| "grad_norm": 0.5962451177713491, | |
| "learning_rate": 3.9486762703332993e-05, | |
| "loss": 0.664, | |
| "step": 203 | |
| }, | |
| { | |
| "epoch": 0.8267477203647416, | |
| "grad_norm": 0.6333555709303281, | |
| "learning_rate": 3.947390847755559e-05, | |
| "loss": 0.6592, | |
| "step": 204 | |
| }, | |
| { | |
| "epoch": 0.8308004052684904, | |
| "grad_norm": 0.6356985663463997, | |
| "learning_rate": 3.946089741159648e-05, | |
| "loss": 0.6553, | |
| "step": 205 | |
| }, | |
| { | |
| "epoch": 0.8348530901722391, | |
| "grad_norm": 0.6255007415051528, | |
| "learning_rate": 3.944772961024501e-05, | |
| "loss": 0.6581, | |
| "step": 206 | |
| }, | |
| { | |
| "epoch": 0.8389057750759878, | |
| "grad_norm": 0.4838339169733258, | |
| "learning_rate": 3.943440517955285e-05, | |
| "loss": 0.6258, | |
| "step": 207 | |
| }, | |
| { | |
| "epoch": 0.8429584599797366, | |
| "grad_norm": 0.6183837063519475, | |
| "learning_rate": 3.9420924226833126e-05, | |
| "loss": 0.649, | |
| "step": 208 | |
| }, | |
| { | |
| "epoch": 0.8470111448834853, | |
| "grad_norm": 0.5468220534611677, | |
| "learning_rate": 3.9407286860659566e-05, | |
| "loss": 0.6504, | |
| "step": 209 | |
| }, | |
| { | |
| "epoch": 0.851063829787234, | |
| "grad_norm": 0.5198056729443817, | |
| "learning_rate": 3.9393493190865657e-05, | |
| "loss": 0.661, | |
| "step": 210 | |
| }, | |
| { | |
| "epoch": 0.8551165146909828, | |
| "grad_norm": 0.49124252014846126, | |
| "learning_rate": 3.937954332854371e-05, | |
| "loss": 0.6085, | |
| "step": 211 | |
| }, | |
| { | |
| "epoch": 0.8591691995947315, | |
| "grad_norm": 0.5364391818451996, | |
| "learning_rate": 3.9365437386044016e-05, | |
| "loss": 0.6365, | |
| "step": 212 | |
| }, | |
| { | |
| "epoch": 0.8632218844984803, | |
| "grad_norm": 0.6006284773855927, | |
| "learning_rate": 3.935117547697387e-05, | |
| "loss": 0.63, | |
| "step": 213 | |
| }, | |
| { | |
| "epoch": 0.867274569402229, | |
| "grad_norm": 0.48364489641596037, | |
| "learning_rate": 3.933675771619675e-05, | |
| "loss": 0.6386, | |
| "step": 214 | |
| }, | |
| { | |
| "epoch": 0.8713272543059777, | |
| "grad_norm": 0.679659719677668, | |
| "learning_rate": 3.932218421983131e-05, | |
| "loss": 0.6222, | |
| "step": 215 | |
| }, | |
| { | |
| "epoch": 0.8753799392097265, | |
| "grad_norm": 0.5376577238661026, | |
| "learning_rate": 3.9307455105250484e-05, | |
| "loss": 0.642, | |
| "step": 216 | |
| }, | |
| { | |
| "epoch": 0.8794326241134752, | |
| "grad_norm": 0.6546326026688126, | |
| "learning_rate": 3.929257049108054e-05, | |
| "loss": 0.6743, | |
| "step": 217 | |
| }, | |
| { | |
| "epoch": 0.8834853090172239, | |
| "grad_norm": 0.6525453163129142, | |
| "learning_rate": 3.927753049720011e-05, | |
| "loss": 0.6793, | |
| "step": 218 | |
| }, | |
| { | |
| "epoch": 0.8875379939209727, | |
| "grad_norm": 0.5843734165520835, | |
| "learning_rate": 3.9262335244739234e-05, | |
| "loss": 0.6465, | |
| "step": 219 | |
| }, | |
| { | |
| "epoch": 0.8915906788247214, | |
| "grad_norm": 0.5198437560394561, | |
| "learning_rate": 3.92469848560784e-05, | |
| "loss": 0.6622, | |
| "step": 220 | |
| }, | |
| { | |
| "epoch": 0.8956433637284701, | |
| "grad_norm": 0.5277570367845225, | |
| "learning_rate": 3.923147945484751e-05, | |
| "loss": 0.6248, | |
| "step": 221 | |
| }, | |
| { | |
| "epoch": 0.8996960486322189, | |
| "grad_norm": 0.5324262859558979, | |
| "learning_rate": 3.9215819165924956e-05, | |
| "loss": 0.6573, | |
| "step": 222 | |
| }, | |
| { | |
| "epoch": 0.9037487335359676, | |
| "grad_norm": 0.6356450491120629, | |
| "learning_rate": 3.920000411543654e-05, | |
| "loss": 0.6655, | |
| "step": 223 | |
| }, | |
| { | |
| "epoch": 0.9078014184397163, | |
| "grad_norm": 0.5020304511715131, | |
| "learning_rate": 3.9184034430754495e-05, | |
| "loss": 0.6518, | |
| "step": 224 | |
| }, | |
| { | |
| "epoch": 0.9118541033434651, | |
| "grad_norm": 0.6761218155366049, | |
| "learning_rate": 3.916791024049648e-05, | |
| "loss": 0.6419, | |
| "step": 225 | |
| }, | |
| { | |
| "epoch": 0.9159067882472138, | |
| "grad_norm": 0.5477798583367588, | |
| "learning_rate": 3.91516316745245e-05, | |
| "loss": 0.6521, | |
| "step": 226 | |
| }, | |
| { | |
| "epoch": 0.9199594731509625, | |
| "grad_norm": 0.5235486652857558, | |
| "learning_rate": 3.913519886394389e-05, | |
| "loss": 0.654, | |
| "step": 227 | |
| }, | |
| { | |
| "epoch": 0.9240121580547113, | |
| "grad_norm": 0.5318085733561676, | |
| "learning_rate": 3.911861194110225e-05, | |
| "loss": 0.602, | |
| "step": 228 | |
| }, | |
| { | |
| "epoch": 0.92806484295846, | |
| "grad_norm": 0.47327149859685425, | |
| "learning_rate": 3.910187103958837e-05, | |
| "loss": 0.6208, | |
| "step": 229 | |
| }, | |
| { | |
| "epoch": 0.9321175278622087, | |
| "grad_norm": 0.5774412798693223, | |
| "learning_rate": 3.908497629423117e-05, | |
| "loss": 0.6927, | |
| "step": 230 | |
| }, | |
| { | |
| "epoch": 0.9361702127659575, | |
| "grad_norm": 0.5012351161324542, | |
| "learning_rate": 3.9067927841098614e-05, | |
| "loss": 0.6608, | |
| "step": 231 | |
| }, | |
| { | |
| "epoch": 0.9402228976697061, | |
| "grad_norm": 0.5654080613042578, | |
| "learning_rate": 3.9050725817496594e-05, | |
| "loss": 0.6413, | |
| "step": 232 | |
| }, | |
| { | |
| "epoch": 0.9442755825734549, | |
| "grad_norm": 0.5756049621432757, | |
| "learning_rate": 3.9033370361967844e-05, | |
| "loss": 0.6329, | |
| "step": 233 | |
| }, | |
| { | |
| "epoch": 0.9483282674772037, | |
| "grad_norm": 0.5301552288404705, | |
| "learning_rate": 3.901586161429081e-05, | |
| "loss": 0.6551, | |
| "step": 234 | |
| }, | |
| { | |
| "epoch": 0.9523809523809523, | |
| "grad_norm": 0.48164756413613646, | |
| "learning_rate": 3.8998199715478545e-05, | |
| "loss": 0.6377, | |
| "step": 235 | |
| }, | |
| { | |
| "epoch": 0.9564336372847011, | |
| "grad_norm": 0.5042103283518745, | |
| "learning_rate": 3.8980384807777564e-05, | |
| "loss": 0.6416, | |
| "step": 236 | |
| }, | |
| { | |
| "epoch": 0.9604863221884499, | |
| "grad_norm": 0.5240695797874856, | |
| "learning_rate": 3.896241703466667e-05, | |
| "loss": 0.6454, | |
| "step": 237 | |
| }, | |
| { | |
| "epoch": 0.9645390070921985, | |
| "grad_norm": 0.480102592406313, | |
| "learning_rate": 3.894429654085585e-05, | |
| "loss": 0.6156, | |
| "step": 238 | |
| }, | |
| { | |
| "epoch": 0.9685916919959473, | |
| "grad_norm": 0.5392021228543473, | |
| "learning_rate": 3.892602347228505e-05, | |
| "loss": 0.6688, | |
| "step": 239 | |
| }, | |
| { | |
| "epoch": 0.9726443768996961, | |
| "grad_norm": 0.4558541562487634, | |
| "learning_rate": 3.890759797612307e-05, | |
| "loss": 0.6392, | |
| "step": 240 | |
| }, | |
| { | |
| "epoch": 0.9766970618034447, | |
| "grad_norm": 0.5509554561610756, | |
| "learning_rate": 3.888902020076632e-05, | |
| "loss": 0.6368, | |
| "step": 241 | |
| }, | |
| { | |
| "epoch": 0.9807497467071935, | |
| "grad_norm": 0.4902576886131729, | |
| "learning_rate": 3.887029029583764e-05, | |
| "loss": 0.6532, | |
| "step": 242 | |
| }, | |
| { | |
| "epoch": 0.9848024316109423, | |
| "grad_norm": 0.4350141761025598, | |
| "learning_rate": 3.8851408412185125e-05, | |
| "loss": 0.6073, | |
| "step": 243 | |
| }, | |
| { | |
| "epoch": 0.9888551165146909, | |
| "grad_norm": 0.44954792033729896, | |
| "learning_rate": 3.8832374701880855e-05, | |
| "loss": 0.6045, | |
| "step": 244 | |
| }, | |
| { | |
| "epoch": 0.9929078014184397, | |
| "grad_norm": 0.5310462875194559, | |
| "learning_rate": 3.881318931821972e-05, | |
| "loss": 0.6242, | |
| "step": 245 | |
| }, | |
| { | |
| "epoch": 0.9969604863221885, | |
| "grad_norm": 0.5195162896695701, | |
| "learning_rate": 3.879385241571817e-05, | |
| "loss": 0.6352, | |
| "step": 246 | |
| }, | |
| { | |
| "epoch": 1.0010131712259371, | |
| "grad_norm": 0.46539903847427483, | |
| "learning_rate": 3.8774364150112955e-05, | |
| "loss": 0.5993, | |
| "step": 247 | |
| }, | |
| { | |
| "epoch": 1.005065856129686, | |
| "grad_norm": 0.5721331146510316, | |
| "learning_rate": 3.8754724678359884e-05, | |
| "loss": 0.5266, | |
| "step": 248 | |
| }, | |
| { | |
| "epoch": 1.0091185410334347, | |
| "grad_norm": 0.4630175386937948, | |
| "learning_rate": 3.873493415863256e-05, | |
| "loss": 0.5135, | |
| "step": 249 | |
| }, | |
| { | |
| "epoch": 1.0131712259371835, | |
| "grad_norm": 0.5971421598448525, | |
| "learning_rate": 3.871499275032111e-05, | |
| "loss": 0.5353, | |
| "step": 250 | |
| }, | |
| { | |
| "epoch": 1.0172239108409322, | |
| "grad_norm": 0.5971328548121767, | |
| "learning_rate": 3.869490061403091e-05, | |
| "loss": 0.5355, | |
| "step": 251 | |
| }, | |
| { | |
| "epoch": 1.0212765957446808, | |
| "grad_norm": 0.5567927419301527, | |
| "learning_rate": 3.867465791158124e-05, | |
| "loss": 0.5244, | |
| "step": 252 | |
| }, | |
| { | |
| "epoch": 1.0253292806484295, | |
| "grad_norm": 0.5883646614816578, | |
| "learning_rate": 3.865426480600407e-05, | |
| "loss": 0.5235, | |
| "step": 253 | |
| }, | |
| { | |
| "epoch": 1.0293819655521783, | |
| "grad_norm": 0.5419714506776818, | |
| "learning_rate": 3.863372146154264e-05, | |
| "loss": 0.5216, | |
| "step": 254 | |
| }, | |
| { | |
| "epoch": 1.033434650455927, | |
| "grad_norm": 0.6058246665482957, | |
| "learning_rate": 3.861302804365024e-05, | |
| "loss": 0.524, | |
| "step": 255 | |
| }, | |
| { | |
| "epoch": 1.0374873353596759, | |
| "grad_norm": 0.5870085341115271, | |
| "learning_rate": 3.85921847189888e-05, | |
| "loss": 0.5501, | |
| "step": 256 | |
| }, | |
| { | |
| "epoch": 1.0415400202634246, | |
| "grad_norm": 0.540951531119635, | |
| "learning_rate": 3.85711916554276e-05, | |
| "loss": 0.4946, | |
| "step": 257 | |
| }, | |
| { | |
| "epoch": 1.0455927051671732, | |
| "grad_norm": 0.7588521014997948, | |
| "learning_rate": 3.85500490220419e-05, | |
| "loss": 0.5275, | |
| "step": 258 | |
| }, | |
| { | |
| "epoch": 1.049645390070922, | |
| "grad_norm": 0.5872215773115123, | |
| "learning_rate": 3.852875698911154e-05, | |
| "loss": 0.5137, | |
| "step": 259 | |
| }, | |
| { | |
| "epoch": 1.0536980749746707, | |
| "grad_norm": 0.813939027396721, | |
| "learning_rate": 3.850731572811963e-05, | |
| "loss": 0.4953, | |
| "step": 260 | |
| }, | |
| { | |
| "epoch": 1.0577507598784195, | |
| "grad_norm": 0.7157498655664356, | |
| "learning_rate": 3.848572541175116e-05, | |
| "loss": 0.5209, | |
| "step": 261 | |
| }, | |
| { | |
| "epoch": 1.0618034447821683, | |
| "grad_norm": 0.5142387405382273, | |
| "learning_rate": 3.846398621389154e-05, | |
| "loss": 0.5314, | |
| "step": 262 | |
| }, | |
| { | |
| "epoch": 1.0658561296859168, | |
| "grad_norm": 0.6085930865447006, | |
| "learning_rate": 3.84420983096253e-05, | |
| "loss": 0.5222, | |
| "step": 263 | |
| }, | |
| { | |
| "epoch": 1.0699088145896656, | |
| "grad_norm": 0.536939258154577, | |
| "learning_rate": 3.8420061875234606e-05, | |
| "loss": 0.5426, | |
| "step": 264 | |
| }, | |
| { | |
| "epoch": 1.0739614994934144, | |
| "grad_norm": 0.5035325296842744, | |
| "learning_rate": 3.839787708819787e-05, | |
| "loss": 0.5438, | |
| "step": 265 | |
| }, | |
| { | |
| "epoch": 1.0780141843971631, | |
| "grad_norm": 0.5696625542686482, | |
| "learning_rate": 3.8375544127188325e-05, | |
| "loss": 0.5245, | |
| "step": 266 | |
| }, | |
| { | |
| "epoch": 1.082066869300912, | |
| "grad_norm": 0.5099520099300787, | |
| "learning_rate": 3.8353063172072564e-05, | |
| "loss": 0.5198, | |
| "step": 267 | |
| }, | |
| { | |
| "epoch": 1.0861195542046607, | |
| "grad_norm": 0.514341466102075, | |
| "learning_rate": 3.8330434403909105e-05, | |
| "loss": 0.5285, | |
| "step": 268 | |
| }, | |
| { | |
| "epoch": 1.0901722391084094, | |
| "grad_norm": 0.5269907753925314, | |
| "learning_rate": 3.8307658004946934e-05, | |
| "loss": 0.5109, | |
| "step": 269 | |
| }, | |
| { | |
| "epoch": 1.094224924012158, | |
| "grad_norm": 0.48924322825629246, | |
| "learning_rate": 3.8284734158624046e-05, | |
| "loss": 0.522, | |
| "step": 270 | |
| }, | |
| { | |
| "epoch": 1.0982776089159068, | |
| "grad_norm": 0.5538876706026832, | |
| "learning_rate": 3.826166304956594e-05, | |
| "loss": 0.5051, | |
| "step": 271 | |
| }, | |
| { | |
| "epoch": 1.1023302938196555, | |
| "grad_norm": 0.5794742459368873, | |
| "learning_rate": 3.8238444863584164e-05, | |
| "loss": 0.5242, | |
| "step": 272 | |
| }, | |
| { | |
| "epoch": 1.1063829787234043, | |
| "grad_norm": 0.4717872228709027, | |
| "learning_rate": 3.821507978767479e-05, | |
| "loss": 0.5191, | |
| "step": 273 | |
| }, | |
| { | |
| "epoch": 1.110435663627153, | |
| "grad_norm": 0.550044522384166, | |
| "learning_rate": 3.819156801001693e-05, | |
| "loss": 0.5358, | |
| "step": 274 | |
| }, | |
| { | |
| "epoch": 1.1144883485309016, | |
| "grad_norm": 0.5072038334648188, | |
| "learning_rate": 3.816790971997121e-05, | |
| "loss": 0.5351, | |
| "step": 275 | |
| }, | |
| { | |
| "epoch": 1.1185410334346504, | |
| "grad_norm": 0.5807660279909721, | |
| "learning_rate": 3.8144105108078246e-05, | |
| "loss": 0.5459, | |
| "step": 276 | |
| }, | |
| { | |
| "epoch": 1.1225937183383992, | |
| "grad_norm": 0.533811678308289, | |
| "learning_rate": 3.81201543660571e-05, | |
| "loss": 0.5217, | |
| "step": 277 | |
| }, | |
| { | |
| "epoch": 1.126646403242148, | |
| "grad_norm": 0.7354620768312641, | |
| "learning_rate": 3.809605768680377e-05, | |
| "loss": 0.5195, | |
| "step": 278 | |
| }, | |
| { | |
| "epoch": 1.1306990881458967, | |
| "grad_norm": 0.5129443102369141, | |
| "learning_rate": 3.807181526438958e-05, | |
| "loss": 0.531, | |
| "step": 279 | |
| }, | |
| { | |
| "epoch": 1.1347517730496455, | |
| "grad_norm": 0.7242399335538623, | |
| "learning_rate": 3.8047427294059697e-05, | |
| "loss": 0.5057, | |
| "step": 280 | |
| }, | |
| { | |
| "epoch": 1.1388044579533942, | |
| "grad_norm": 0.6088801848681012, | |
| "learning_rate": 3.802289397223145e-05, | |
| "loss": 0.5408, | |
| "step": 281 | |
| }, | |
| { | |
| "epoch": 1.1428571428571428, | |
| "grad_norm": 0.5106813240324434, | |
| "learning_rate": 3.7998215496492854e-05, | |
| "loss": 0.5276, | |
| "step": 282 | |
| }, | |
| { | |
| "epoch": 1.1469098277608916, | |
| "grad_norm": 0.6671448117350898, | |
| "learning_rate": 3.797339206560096e-05, | |
| "loss": 0.5296, | |
| "step": 283 | |
| }, | |
| { | |
| "epoch": 1.1509625126646403, | |
| "grad_norm": 0.5222406639445356, | |
| "learning_rate": 3.794842387948027e-05, | |
| "loss": 0.5185, | |
| "step": 284 | |
| }, | |
| { | |
| "epoch": 1.155015197568389, | |
| "grad_norm": 0.6044830847551335, | |
| "learning_rate": 3.7923311139221114e-05, | |
| "loss": 0.5165, | |
| "step": 285 | |
| }, | |
| { | |
| "epoch": 1.1590678824721379, | |
| "grad_norm": 0.5582160585593303, | |
| "learning_rate": 3.7898054047078054e-05, | |
| "loss": 0.5223, | |
| "step": 286 | |
| }, | |
| { | |
| "epoch": 1.1631205673758864, | |
| "grad_norm": 0.5517860414043275, | |
| "learning_rate": 3.787265280646825e-05, | |
| "loss": 0.5182, | |
| "step": 287 | |
| }, | |
| { | |
| "epoch": 1.1671732522796352, | |
| "grad_norm": 0.5296304576687654, | |
| "learning_rate": 3.7847107621969786e-05, | |
| "loss": 0.5124, | |
| "step": 288 | |
| }, | |
| { | |
| "epoch": 1.171225937183384, | |
| "grad_norm": 0.5107525372836029, | |
| "learning_rate": 3.7821418699320064e-05, | |
| "loss": 0.5165, | |
| "step": 289 | |
| }, | |
| { | |
| "epoch": 1.1752786220871327, | |
| "grad_norm": 0.6064099923099067, | |
| "learning_rate": 3.7795586245414145e-05, | |
| "loss": 0.5364, | |
| "step": 290 | |
| }, | |
| { | |
| "epoch": 1.1793313069908815, | |
| "grad_norm": 0.5327554103630293, | |
| "learning_rate": 3.776961046830306e-05, | |
| "loss": 0.5463, | |
| "step": 291 | |
| }, | |
| { | |
| "epoch": 1.1833839918946303, | |
| "grad_norm": 0.5189107655892751, | |
| "learning_rate": 3.774349157719215e-05, | |
| "loss": 0.515, | |
| "step": 292 | |
| }, | |
| { | |
| "epoch": 1.1874366767983788, | |
| "grad_norm": 0.5312590699745653, | |
| "learning_rate": 3.7717229782439365e-05, | |
| "loss": 0.5314, | |
| "step": 293 | |
| }, | |
| { | |
| "epoch": 1.1914893617021276, | |
| "grad_norm": 0.5371671359053948, | |
| "learning_rate": 3.769082529555359e-05, | |
| "loss": 0.5125, | |
| "step": 294 | |
| }, | |
| { | |
| "epoch": 1.1955420466058764, | |
| "grad_norm": 0.5368223525309944, | |
| "learning_rate": 3.766427832919294e-05, | |
| "loss": 0.5386, | |
| "step": 295 | |
| }, | |
| { | |
| "epoch": 1.1995947315096251, | |
| "grad_norm": 0.5362709443242086, | |
| "learning_rate": 3.7637589097163024e-05, | |
| "loss": 0.5292, | |
| "step": 296 | |
| }, | |
| { | |
| "epoch": 1.203647416413374, | |
| "grad_norm": 0.4905563442824916, | |
| "learning_rate": 3.761075781441526e-05, | |
| "loss": 0.4917, | |
| "step": 297 | |
| }, | |
| { | |
| "epoch": 1.2077001013171227, | |
| "grad_norm": 0.5403762197429501, | |
| "learning_rate": 3.75837846970451e-05, | |
| "loss": 0.5299, | |
| "step": 298 | |
| }, | |
| { | |
| "epoch": 1.2117527862208712, | |
| "grad_norm": 0.578030864935955, | |
| "learning_rate": 3.755666996229032e-05, | |
| "loss": 0.5171, | |
| "step": 299 | |
| }, | |
| { | |
| "epoch": 1.21580547112462, | |
| "grad_norm": 0.43797358524235913, | |
| "learning_rate": 3.752941382852927e-05, | |
| "loss": 0.5165, | |
| "step": 300 | |
| }, | |
| { | |
| "epoch": 1.2198581560283688, | |
| "grad_norm": 0.4421969448614822, | |
| "learning_rate": 3.7502016515279115e-05, | |
| "loss": 0.513, | |
| "step": 301 | |
| }, | |
| { | |
| "epoch": 1.2239108409321175, | |
| "grad_norm": 0.47690204193648944, | |
| "learning_rate": 3.7474478243194043e-05, | |
| "loss": 0.5308, | |
| "step": 302 | |
| }, | |
| { | |
| "epoch": 1.2279635258358663, | |
| "grad_norm": 0.5284829827970645, | |
| "learning_rate": 3.744679923406351e-05, | |
| "loss": 0.553, | |
| "step": 303 | |
| }, | |
| { | |
| "epoch": 1.232016210739615, | |
| "grad_norm": 0.48415755978944, | |
| "learning_rate": 3.741897971081043e-05, | |
| "loss": 0.5036, | |
| "step": 304 | |
| }, | |
| { | |
| "epoch": 1.2360688956433636, | |
| "grad_norm": 0.5140823736996428, | |
| "learning_rate": 3.739101989748946e-05, | |
| "loss": 0.5157, | |
| "step": 305 | |
| }, | |
| { | |
| "epoch": 1.2401215805471124, | |
| "grad_norm": 0.4469363340877741, | |
| "learning_rate": 3.7362920019285066e-05, | |
| "loss": 0.536, | |
| "step": 306 | |
| }, | |
| { | |
| "epoch": 1.2441742654508612, | |
| "grad_norm": 0.5903783771695209, | |
| "learning_rate": 3.73346803025098e-05, | |
| "loss": 0.5134, | |
| "step": 307 | |
| }, | |
| { | |
| "epoch": 1.24822695035461, | |
| "grad_norm": 0.5098387526445031, | |
| "learning_rate": 3.730630097460247e-05, | |
| "loss": 0.5365, | |
| "step": 308 | |
| }, | |
| { | |
| "epoch": 1.2522796352583587, | |
| "grad_norm": 0.5414186717082425, | |
| "learning_rate": 3.727778226412628e-05, | |
| "loss": 0.532, | |
| "step": 309 | |
| }, | |
| { | |
| "epoch": 1.2563323201621075, | |
| "grad_norm": 0.6857138230336729, | |
| "learning_rate": 3.7249124400767006e-05, | |
| "loss": 0.5418, | |
| "step": 310 | |
| }, | |
| { | |
| "epoch": 1.260385005065856, | |
| "grad_norm": 0.48861001889199024, | |
| "learning_rate": 3.722032761533114e-05, | |
| "loss": 0.5304, | |
| "step": 311 | |
| }, | |
| { | |
| "epoch": 1.2644376899696048, | |
| "grad_norm": 0.5676474000363062, | |
| "learning_rate": 3.719139213974403e-05, | |
| "loss": 0.5389, | |
| "step": 312 | |
| }, | |
| { | |
| "epoch": 1.2684903748733536, | |
| "grad_norm": 0.5812308154357729, | |
| "learning_rate": 3.7162318207048006e-05, | |
| "loss": 0.5244, | |
| "step": 313 | |
| }, | |
| { | |
| "epoch": 1.2725430597771024, | |
| "grad_norm": 0.5203803151565424, | |
| "learning_rate": 3.713310605140055e-05, | |
| "loss": 0.5368, | |
| "step": 314 | |
| }, | |
| { | |
| "epoch": 1.2765957446808511, | |
| "grad_norm": 0.5841171386128461, | |
| "learning_rate": 3.710375590807233e-05, | |
| "loss": 0.5267, | |
| "step": 315 | |
| }, | |
| { | |
| "epoch": 1.2806484295845997, | |
| "grad_norm": 0.6451730438846078, | |
| "learning_rate": 3.7074268013445365e-05, | |
| "loss": 0.55, | |
| "step": 316 | |
| }, | |
| { | |
| "epoch": 1.2847011144883485, | |
| "grad_norm": 0.6067873745741551, | |
| "learning_rate": 3.7044642605011114e-05, | |
| "loss": 0.5299, | |
| "step": 317 | |
| }, | |
| { | |
| "epoch": 1.2887537993920972, | |
| "grad_norm": 0.5034572434610671, | |
| "learning_rate": 3.701487992136854e-05, | |
| "loss": 0.5226, | |
| "step": 318 | |
| }, | |
| { | |
| "epoch": 1.292806484295846, | |
| "grad_norm": 0.6086857533591217, | |
| "learning_rate": 3.69849802022222e-05, | |
| "loss": 0.5217, | |
| "step": 319 | |
| }, | |
| { | |
| "epoch": 1.2968591691995948, | |
| "grad_norm": 0.5837386215558551, | |
| "learning_rate": 3.6954943688380334e-05, | |
| "loss": 0.5394, | |
| "step": 320 | |
| }, | |
| { | |
| "epoch": 1.3009118541033435, | |
| "grad_norm": 0.6215797401214238, | |
| "learning_rate": 3.692477062175289e-05, | |
| "loss": 0.5226, | |
| "step": 321 | |
| }, | |
| { | |
| "epoch": 1.3049645390070923, | |
| "grad_norm": 0.5173019410083438, | |
| "learning_rate": 3.689446124534958e-05, | |
| "loss": 0.4966, | |
| "step": 322 | |
| }, | |
| { | |
| "epoch": 1.3090172239108409, | |
| "grad_norm": 0.6245045719766499, | |
| "learning_rate": 3.686401580327799e-05, | |
| "loss": 0.5448, | |
| "step": 323 | |
| }, | |
| { | |
| "epoch": 1.3130699088145896, | |
| "grad_norm": 0.5193931548337708, | |
| "learning_rate": 3.683343454074149e-05, | |
| "loss": 0.514, | |
| "step": 324 | |
| }, | |
| { | |
| "epoch": 1.3171225937183384, | |
| "grad_norm": 0.49935165424524225, | |
| "learning_rate": 3.6802717704037386e-05, | |
| "loss": 0.5333, | |
| "step": 325 | |
| }, | |
| { | |
| "epoch": 1.3211752786220872, | |
| "grad_norm": 0.4949988245199749, | |
| "learning_rate": 3.6771865540554855e-05, | |
| "loss": 0.4995, | |
| "step": 326 | |
| }, | |
| { | |
| "epoch": 1.325227963525836, | |
| "grad_norm": 0.5411766741565728, | |
| "learning_rate": 3.674087829877297e-05, | |
| "loss": 0.5352, | |
| "step": 327 | |
| }, | |
| { | |
| "epoch": 1.3292806484295845, | |
| "grad_norm": 0.5166613605613284, | |
| "learning_rate": 3.6709756228258735e-05, | |
| "loss": 0.5243, | |
| "step": 328 | |
| }, | |
| { | |
| "epoch": 1.3333333333333333, | |
| "grad_norm": 0.5065243071725839, | |
| "learning_rate": 3.667849957966501e-05, | |
| "loss": 0.5159, | |
| "step": 329 | |
| }, | |
| { | |
| "epoch": 1.337386018237082, | |
| "grad_norm": 0.4874077618925925, | |
| "learning_rate": 3.6647108604728546e-05, | |
| "loss": 0.5253, | |
| "step": 330 | |
| }, | |
| { | |
| "epoch": 1.3414387031408308, | |
| "grad_norm": 0.5981950851498741, | |
| "learning_rate": 3.661558355626795e-05, | |
| "loss": 0.5678, | |
| "step": 331 | |
| }, | |
| { | |
| "epoch": 1.3454913880445796, | |
| "grad_norm": 0.5463181943305964, | |
| "learning_rate": 3.658392468818163e-05, | |
| "loss": 0.5304, | |
| "step": 332 | |
| }, | |
| { | |
| "epoch": 1.3495440729483283, | |
| "grad_norm": 0.48947544639011564, | |
| "learning_rate": 3.655213225544574e-05, | |
| "loss": 0.5455, | |
| "step": 333 | |
| }, | |
| { | |
| "epoch": 1.3535967578520771, | |
| "grad_norm": 0.5467682704034038, | |
| "learning_rate": 3.652020651411218e-05, | |
| "loss": 0.533, | |
| "step": 334 | |
| }, | |
| { | |
| "epoch": 1.3576494427558257, | |
| "grad_norm": 0.4687856411038806, | |
| "learning_rate": 3.6488147721306474e-05, | |
| "loss": 0.5278, | |
| "step": 335 | |
| }, | |
| { | |
| "epoch": 1.3617021276595744, | |
| "grad_norm": 0.4601428281212315, | |
| "learning_rate": 3.645595613522574e-05, | |
| "loss": 0.5205, | |
| "step": 336 | |
| }, | |
| { | |
| "epoch": 1.3657548125633232, | |
| "grad_norm": 0.47993191500684906, | |
| "learning_rate": 3.642363201513657e-05, | |
| "loss": 0.5049, | |
| "step": 337 | |
| }, | |
| { | |
| "epoch": 1.369807497467072, | |
| "grad_norm": 0.49662918808162565, | |
| "learning_rate": 3.6391175621373006e-05, | |
| "loss": 0.5077, | |
| "step": 338 | |
| }, | |
| { | |
| "epoch": 1.3738601823708207, | |
| "grad_norm": 0.4978664008115118, | |
| "learning_rate": 3.6358587215334355e-05, | |
| "loss": 0.5336, | |
| "step": 339 | |
| }, | |
| { | |
| "epoch": 1.3779128672745693, | |
| "grad_norm": 0.5490575611293455, | |
| "learning_rate": 3.632586705948318e-05, | |
| "loss": 0.537, | |
| "step": 340 | |
| }, | |
| { | |
| "epoch": 1.381965552178318, | |
| "grad_norm": 0.44963038464161614, | |
| "learning_rate": 3.629301541734311e-05, | |
| "loss": 0.5261, | |
| "step": 341 | |
| }, | |
| { | |
| "epoch": 1.3860182370820668, | |
| "grad_norm": 0.5026458300272734, | |
| "learning_rate": 3.626003255349676e-05, | |
| "loss": 0.5196, | |
| "step": 342 | |
| }, | |
| { | |
| "epoch": 1.3900709219858156, | |
| "grad_norm": 0.43240902926095, | |
| "learning_rate": 3.622691873358357e-05, | |
| "loss": 0.5067, | |
| "step": 343 | |
| }, | |
| { | |
| "epoch": 1.3941236068895644, | |
| "grad_norm": 0.5315792537266139, | |
| "learning_rate": 3.61936742242977e-05, | |
| "loss": 0.5456, | |
| "step": 344 | |
| }, | |
| { | |
| "epoch": 1.3981762917933132, | |
| "grad_norm": 0.5614850785234248, | |
| "learning_rate": 3.6160299293385864e-05, | |
| "loss": 0.5318, | |
| "step": 345 | |
| }, | |
| { | |
| "epoch": 1.402228976697062, | |
| "grad_norm": 0.4707818666771937, | |
| "learning_rate": 3.612679420964516e-05, | |
| "loss": 0.5028, | |
| "step": 346 | |
| }, | |
| { | |
| "epoch": 1.4062816616008105, | |
| "grad_norm": 0.5368451166894669, | |
| "learning_rate": 3.609315924292092e-05, | |
| "loss": 0.5231, | |
| "step": 347 | |
| }, | |
| { | |
| "epoch": 1.4103343465045592, | |
| "grad_norm": 0.48375150574836834, | |
| "learning_rate": 3.6059394664104554e-05, | |
| "loss": 0.5362, | |
| "step": 348 | |
| }, | |
| { | |
| "epoch": 1.414387031408308, | |
| "grad_norm": 0.5675650103542361, | |
| "learning_rate": 3.602550074513133e-05, | |
| "loss": 0.5209, | |
| "step": 349 | |
| }, | |
| { | |
| "epoch": 1.4184397163120568, | |
| "grad_norm": 0.5219281047486538, | |
| "learning_rate": 3.599147775897822e-05, | |
| "loss": 0.5318, | |
| "step": 350 | |
| }, | |
| { | |
| "epoch": 1.4224924012158056, | |
| "grad_norm": 0.5329028869460057, | |
| "learning_rate": 3.595732597966167e-05, | |
| "loss": 0.5333, | |
| "step": 351 | |
| }, | |
| { | |
| "epoch": 1.426545086119554, | |
| "grad_norm": 0.4672111394685723, | |
| "learning_rate": 3.592304568223542e-05, | |
| "loss": 0.5363, | |
| "step": 352 | |
| }, | |
| { | |
| "epoch": 1.4305977710233029, | |
| "grad_norm": 0.4945061168429465, | |
| "learning_rate": 3.588863714278826e-05, | |
| "loss": 0.5335, | |
| "step": 353 | |
| }, | |
| { | |
| "epoch": 1.4346504559270516, | |
| "grad_norm": 0.48559926565972317, | |
| "learning_rate": 3.585410063844186e-05, | |
| "loss": 0.5181, | |
| "step": 354 | |
| }, | |
| { | |
| "epoch": 1.4387031408308004, | |
| "grad_norm": 0.4652278494777882, | |
| "learning_rate": 3.581943644734846e-05, | |
| "loss": 0.527, | |
| "step": 355 | |
| }, | |
| { | |
| "epoch": 1.4427558257345492, | |
| "grad_norm": 0.483349917112747, | |
| "learning_rate": 3.578464484868869e-05, | |
| "loss": 0.5162, | |
| "step": 356 | |
| }, | |
| { | |
| "epoch": 1.4468085106382977, | |
| "grad_norm": 0.4962336407265618, | |
| "learning_rate": 3.5749726122669316e-05, | |
| "loss": 0.5305, | |
| "step": 357 | |
| }, | |
| { | |
| "epoch": 1.4508611955420467, | |
| "grad_norm": 0.47819057209537086, | |
| "learning_rate": 3.5714680550520943e-05, | |
| "loss": 0.5376, | |
| "step": 358 | |
| }, | |
| { | |
| "epoch": 1.4549138804457953, | |
| "grad_norm": 0.5473935166195258, | |
| "learning_rate": 3.5679508414495794e-05, | |
| "loss": 0.5229, | |
| "step": 359 | |
| }, | |
| { | |
| "epoch": 1.458966565349544, | |
| "grad_norm": 0.5224230271081634, | |
| "learning_rate": 3.564420999786543e-05, | |
| "loss": 0.5317, | |
| "step": 360 | |
| }, | |
| { | |
| "epoch": 1.4630192502532928, | |
| "grad_norm": 0.4834531061601308, | |
| "learning_rate": 3.560878558491842e-05, | |
| "loss": 0.5281, | |
| "step": 361 | |
| }, | |
| { | |
| "epoch": 1.4670719351570416, | |
| "grad_norm": 0.49977576562007026, | |
| "learning_rate": 3.5573235460958145e-05, | |
| "loss": 0.5458, | |
| "step": 362 | |
| }, | |
| { | |
| "epoch": 1.4711246200607904, | |
| "grad_norm": 0.518900685921013, | |
| "learning_rate": 3.553755991230039e-05, | |
| "loss": 0.5327, | |
| "step": 363 | |
| }, | |
| { | |
| "epoch": 1.475177304964539, | |
| "grad_norm": 0.5056168019028707, | |
| "learning_rate": 3.5501759226271144e-05, | |
| "loss": 0.5349, | |
| "step": 364 | |
| }, | |
| { | |
| "epoch": 1.4792299898682877, | |
| "grad_norm": 0.5155909978528884, | |
| "learning_rate": 3.546583369120419e-05, | |
| "loss": 0.527, | |
| "step": 365 | |
| }, | |
| { | |
| "epoch": 1.4832826747720365, | |
| "grad_norm": 0.4793050247055886, | |
| "learning_rate": 3.5429783596438864e-05, | |
| "loss": 0.5175, | |
| "step": 366 | |
| }, | |
| { | |
| "epoch": 1.4873353596757852, | |
| "grad_norm": 0.4868088680443328, | |
| "learning_rate": 3.539360923231766e-05, | |
| "loss": 0.5274, | |
| "step": 367 | |
| }, | |
| { | |
| "epoch": 1.491388044579534, | |
| "grad_norm": 0.5482405051288772, | |
| "learning_rate": 3.535731089018394e-05, | |
| "loss": 0.5111, | |
| "step": 368 | |
| }, | |
| { | |
| "epoch": 1.4954407294832825, | |
| "grad_norm": 0.4874542015233093, | |
| "learning_rate": 3.532088886237956e-05, | |
| "loss": 0.5166, | |
| "step": 369 | |
| }, | |
| { | |
| "epoch": 1.4994934143870315, | |
| "grad_norm": 0.5095607981513338, | |
| "learning_rate": 3.528434344224253e-05, | |
| "loss": 0.5222, | |
| "step": 370 | |
| }, | |
| { | |
| "epoch": 1.50354609929078, | |
| "grad_norm": 0.6068683781524951, | |
| "learning_rate": 3.524767492410464e-05, | |
| "loss": 0.5258, | |
| "step": 371 | |
| }, | |
| { | |
| "epoch": 1.5075987841945289, | |
| "grad_norm": 0.45995303775894525, | |
| "learning_rate": 3.521088360328908e-05, | |
| "loss": 0.5135, | |
| "step": 372 | |
| }, | |
| { | |
| "epoch": 1.5116514690982776, | |
| "grad_norm": 0.5402085099617082, | |
| "learning_rate": 3.517396977610811e-05, | |
| "loss": 0.5412, | |
| "step": 373 | |
| }, | |
| { | |
| "epoch": 1.5157041540020262, | |
| "grad_norm": 0.4117225221508349, | |
| "learning_rate": 3.5136933739860595e-05, | |
| "loss": 0.5197, | |
| "step": 374 | |
| }, | |
| { | |
| "epoch": 1.5197568389057752, | |
| "grad_norm": 0.5218056071265279, | |
| "learning_rate": 3.509977579282971e-05, | |
| "loss": 0.5418, | |
| "step": 375 | |
| }, | |
| { | |
| "epoch": 1.5238095238095237, | |
| "grad_norm": 0.4599303269519447, | |
| "learning_rate": 3.5062496234280424e-05, | |
| "loss": 0.5179, | |
| "step": 376 | |
| }, | |
| { | |
| "epoch": 1.5278622087132725, | |
| "grad_norm": 0.5466822045290498, | |
| "learning_rate": 3.502509536445719e-05, | |
| "loss": 0.5246, | |
| "step": 377 | |
| }, | |
| { | |
| "epoch": 1.5319148936170213, | |
| "grad_norm": 0.6238225853486538, | |
| "learning_rate": 3.498757348458147e-05, | |
| "loss": 0.5208, | |
| "step": 378 | |
| }, | |
| { | |
| "epoch": 1.53596757852077, | |
| "grad_norm": 0.41285087503838497, | |
| "learning_rate": 3.4949930896849324e-05, | |
| "loss": 0.5187, | |
| "step": 379 | |
| }, | |
| { | |
| "epoch": 1.5400202634245188, | |
| "grad_norm": 0.5434664565549008, | |
| "learning_rate": 3.491216790442899e-05, | |
| "loss": 0.5235, | |
| "step": 380 | |
| }, | |
| { | |
| "epoch": 1.5440729483282674, | |
| "grad_norm": 0.49047921137476336, | |
| "learning_rate": 3.487428481145839e-05, | |
| "loss": 0.5023, | |
| "step": 381 | |
| }, | |
| { | |
| "epoch": 1.5481256332320164, | |
| "grad_norm": 0.4792556828302923, | |
| "learning_rate": 3.483628192304278e-05, | |
| "loss": 0.5353, | |
| "step": 382 | |
| }, | |
| { | |
| "epoch": 1.552178318135765, | |
| "grad_norm": 0.5966200174699883, | |
| "learning_rate": 3.479815954525219e-05, | |
| "loss": 0.513, | |
| "step": 383 | |
| }, | |
| { | |
| "epoch": 1.5562310030395137, | |
| "grad_norm": 0.3972614524729954, | |
| "learning_rate": 3.475991798511899e-05, | |
| "loss": 0.5273, | |
| "step": 384 | |
| }, | |
| { | |
| "epoch": 1.5602836879432624, | |
| "grad_norm": 0.4959026982966128, | |
| "learning_rate": 3.4721557550635464e-05, | |
| "loss": 0.4951, | |
| "step": 385 | |
| }, | |
| { | |
| "epoch": 1.564336372847011, | |
| "grad_norm": 0.4191241509131936, | |
| "learning_rate": 3.468307855075128e-05, | |
| "loss": 0.5134, | |
| "step": 386 | |
| }, | |
| { | |
| "epoch": 1.56838905775076, | |
| "grad_norm": 0.49166278923604667, | |
| "learning_rate": 3.4644481295371005e-05, | |
| "loss": 0.5173, | |
| "step": 387 | |
| }, | |
| { | |
| "epoch": 1.5724417426545085, | |
| "grad_norm": 0.4820763565102606, | |
| "learning_rate": 3.460576609535163e-05, | |
| "loss": 0.5058, | |
| "step": 388 | |
| }, | |
| { | |
| "epoch": 1.5764944275582573, | |
| "grad_norm": 0.6365641641242409, | |
| "learning_rate": 3.456693326250006e-05, | |
| "loss": 0.5174, | |
| "step": 389 | |
| }, | |
| { | |
| "epoch": 1.580547112462006, | |
| "grad_norm": 0.5399204640668317, | |
| "learning_rate": 3.452798310957058e-05, | |
| "loss": 0.5156, | |
| "step": 390 | |
| }, | |
| { | |
| "epoch": 1.5845997973657548, | |
| "grad_norm": 0.6929809668016618, | |
| "learning_rate": 3.4488915950262386e-05, | |
| "loss": 0.517, | |
| "step": 391 | |
| }, | |
| { | |
| "epoch": 1.5886524822695036, | |
| "grad_norm": 0.6069426281857588, | |
| "learning_rate": 3.4449732099216985e-05, | |
| "loss": 0.5131, | |
| "step": 392 | |
| }, | |
| { | |
| "epoch": 1.5927051671732522, | |
| "grad_norm": 0.5443770799697926, | |
| "learning_rate": 3.441043187201574e-05, | |
| "loss": 0.5204, | |
| "step": 393 | |
| }, | |
| { | |
| "epoch": 1.5967578520770012, | |
| "grad_norm": 0.5423603378079402, | |
| "learning_rate": 3.437101558517728e-05, | |
| "loss": 0.5346, | |
| "step": 394 | |
| }, | |
| { | |
| "epoch": 1.6008105369807497, | |
| "grad_norm": 0.46439672944439964, | |
| "learning_rate": 3.433148355615496e-05, | |
| "loss": 0.5205, | |
| "step": 395 | |
| }, | |
| { | |
| "epoch": 1.6048632218844985, | |
| "grad_norm": 0.49156320237624823, | |
| "learning_rate": 3.4291836103334294e-05, | |
| "loss": 0.5415, | |
| "step": 396 | |
| }, | |
| { | |
| "epoch": 1.6089159067882473, | |
| "grad_norm": 0.4985362944083813, | |
| "learning_rate": 3.425207354603043e-05, | |
| "loss": 0.5465, | |
| "step": 397 | |
| }, | |
| { | |
| "epoch": 1.6129685916919958, | |
| "grad_norm": 0.4239378890163982, | |
| "learning_rate": 3.421219620448553e-05, | |
| "loss": 0.5387, | |
| "step": 398 | |
| }, | |
| { | |
| "epoch": 1.6170212765957448, | |
| "grad_norm": 0.523449987542341, | |
| "learning_rate": 3.417220439986623e-05, | |
| "loss": 0.5378, | |
| "step": 399 | |
| }, | |
| { | |
| "epoch": 1.6210739614994933, | |
| "grad_norm": 0.5239062023209914, | |
| "learning_rate": 3.4132098454261024e-05, | |
| "loss": 0.504, | |
| "step": 400 | |
| }, | |
| { | |
| "epoch": 1.6251266464032421, | |
| "grad_norm": 0.4278704882790379, | |
| "learning_rate": 3.4091878690677676e-05, | |
| "loss": 0.5156, | |
| "step": 401 | |
| }, | |
| { | |
| "epoch": 1.6291793313069909, | |
| "grad_norm": 0.6282985524545994, | |
| "learning_rate": 3.405154543304065e-05, | |
| "loss": 0.5336, | |
| "step": 402 | |
| }, | |
| { | |
| "epoch": 1.6332320162107397, | |
| "grad_norm": 0.47735699409054044, | |
| "learning_rate": 3.401109900618843e-05, | |
| "loss": 0.5291, | |
| "step": 403 | |
| }, | |
| { | |
| "epoch": 1.6372847011144884, | |
| "grad_norm": 0.6370109173024822, | |
| "learning_rate": 3.3970539735870996e-05, | |
| "loss": 0.5403, | |
| "step": 404 | |
| }, | |
| { | |
| "epoch": 1.641337386018237, | |
| "grad_norm": 0.43771744904293863, | |
| "learning_rate": 3.392986794874714e-05, | |
| "loss": 0.5192, | |
| "step": 405 | |
| }, | |
| { | |
| "epoch": 1.645390070921986, | |
| "grad_norm": 0.5830144740280944, | |
| "learning_rate": 3.388908397238184e-05, | |
| "loss": 0.5479, | |
| "step": 406 | |
| }, | |
| { | |
| "epoch": 1.6494427558257345, | |
| "grad_norm": 0.4709545357560335, | |
| "learning_rate": 3.384818813524362e-05, | |
| "loss": 0.5077, | |
| "step": 407 | |
| }, | |
| { | |
| "epoch": 1.6534954407294833, | |
| "grad_norm": 0.533680299031102, | |
| "learning_rate": 3.380718076670195e-05, | |
| "loss": 0.5328, | |
| "step": 408 | |
| }, | |
| { | |
| "epoch": 1.657548125633232, | |
| "grad_norm": 0.4800510860698552, | |
| "learning_rate": 3.376606219702454e-05, | |
| "loss": 0.5119, | |
| "step": 409 | |
| }, | |
| { | |
| "epoch": 1.6616008105369806, | |
| "grad_norm": 0.46192558495254865, | |
| "learning_rate": 3.372483275737468e-05, | |
| "loss": 0.526, | |
| "step": 410 | |
| }, | |
| { | |
| "epoch": 1.6656534954407296, | |
| "grad_norm": 0.4278490296945421, | |
| "learning_rate": 3.368349277980861e-05, | |
| "loss": 0.5128, | |
| "step": 411 | |
| }, | |
| { | |
| "epoch": 1.6697061803444782, | |
| "grad_norm": 0.5016325219235511, | |
| "learning_rate": 3.3642042597272844e-05, | |
| "loss": 0.5289, | |
| "step": 412 | |
| }, | |
| { | |
| "epoch": 1.673758865248227, | |
| "grad_norm": 0.5031776444064212, | |
| "learning_rate": 3.360048254360144e-05, | |
| "loss": 0.5222, | |
| "step": 413 | |
| }, | |
| { | |
| "epoch": 1.6778115501519757, | |
| "grad_norm": 0.4854305552137159, | |
| "learning_rate": 3.355881295351336e-05, | |
| "loss": 0.528, | |
| "step": 414 | |
| }, | |
| { | |
| "epoch": 1.6818642350557245, | |
| "grad_norm": 0.4733493254723261, | |
| "learning_rate": 3.351703416260975e-05, | |
| "loss": 0.5215, | |
| "step": 415 | |
| }, | |
| { | |
| "epoch": 1.6859169199594732, | |
| "grad_norm": 0.46328747017262467, | |
| "learning_rate": 3.347514650737126e-05, | |
| "loss": 0.5148, | |
| "step": 416 | |
| }, | |
| { | |
| "epoch": 1.6899696048632218, | |
| "grad_norm": 0.45673457256223376, | |
| "learning_rate": 3.3433150325155295e-05, | |
| "loss": 0.5109, | |
| "step": 417 | |
| }, | |
| { | |
| "epoch": 1.6940222897669708, | |
| "grad_norm": 0.45121040157866904, | |
| "learning_rate": 3.339104595419334e-05, | |
| "loss": 0.5307, | |
| "step": 418 | |
| }, | |
| { | |
| "epoch": 1.6980749746707193, | |
| "grad_norm": 0.4599716500280817, | |
| "learning_rate": 3.3348833733588204e-05, | |
| "loss": 0.5093, | |
| "step": 419 | |
| }, | |
| { | |
| "epoch": 1.702127659574468, | |
| "grad_norm": 0.5244704449142992, | |
| "learning_rate": 3.3306514003311305e-05, | |
| "loss": 0.54, | |
| "step": 420 | |
| }, | |
| { | |
| "epoch": 1.7061803444782169, | |
| "grad_norm": 0.49039850604531393, | |
| "learning_rate": 3.326408710419996e-05, | |
| "loss": 0.5146, | |
| "step": 421 | |
| }, | |
| { | |
| "epoch": 1.7102330293819654, | |
| "grad_norm": 0.49155803145386423, | |
| "learning_rate": 3.322155337795454e-05, | |
| "loss": 0.5428, | |
| "step": 422 | |
| }, | |
| { | |
| "epoch": 1.7142857142857144, | |
| "grad_norm": 0.4499724692021963, | |
| "learning_rate": 3.317891316713587e-05, | |
| "loss": 0.5218, | |
| "step": 423 | |
| }, | |
| { | |
| "epoch": 1.718338399189463, | |
| "grad_norm": 0.4440815719938451, | |
| "learning_rate": 3.313616681516231e-05, | |
| "loss": 0.5246, | |
| "step": 424 | |
| }, | |
| { | |
| "epoch": 1.7223910840932117, | |
| "grad_norm": 0.42270697927834755, | |
| "learning_rate": 3.309331466630713e-05, | |
| "loss": 0.5328, | |
| "step": 425 | |
| }, | |
| { | |
| "epoch": 1.7264437689969605, | |
| "grad_norm": 0.4678406893179575, | |
| "learning_rate": 3.305035706569563e-05, | |
| "loss": 0.501, | |
| "step": 426 | |
| }, | |
| { | |
| "epoch": 1.7304964539007093, | |
| "grad_norm": 0.4489905142839489, | |
| "learning_rate": 3.3007294359302433e-05, | |
| "loss": 0.5188, | |
| "step": 427 | |
| }, | |
| { | |
| "epoch": 1.734549138804458, | |
| "grad_norm": 0.41365173741361466, | |
| "learning_rate": 3.296412689394864e-05, | |
| "loss": 0.5069, | |
| "step": 428 | |
| }, | |
| { | |
| "epoch": 1.7386018237082066, | |
| "grad_norm": 0.46087665919684145, | |
| "learning_rate": 3.292085501729909e-05, | |
| "loss": 0.5444, | |
| "step": 429 | |
| }, | |
| { | |
| "epoch": 1.7426545086119554, | |
| "grad_norm": 0.4950981802616613, | |
| "learning_rate": 3.2877479077859534e-05, | |
| "loss": 0.5287, | |
| "step": 430 | |
| }, | |
| { | |
| "epoch": 1.7467071935157041, | |
| "grad_norm": 0.4329820173989564, | |
| "learning_rate": 3.283399942497381e-05, | |
| "loss": 0.5366, | |
| "step": 431 | |
| }, | |
| { | |
| "epoch": 1.750759878419453, | |
| "grad_norm": 0.5329613098901713, | |
| "learning_rate": 3.279041640882108e-05, | |
| "loss": 0.5197, | |
| "step": 432 | |
| }, | |
| { | |
| "epoch": 1.7548125633232017, | |
| "grad_norm": 0.4742677154402732, | |
| "learning_rate": 3.2746730380412964e-05, | |
| "loss": 0.5249, | |
| "step": 433 | |
| }, | |
| { | |
| "epoch": 1.7588652482269502, | |
| "grad_norm": 0.47577746586168224, | |
| "learning_rate": 3.2702941691590726e-05, | |
| "loss": 0.4988, | |
| "step": 434 | |
| }, | |
| { | |
| "epoch": 1.7629179331306992, | |
| "grad_norm": 0.9851965771168365, | |
| "learning_rate": 3.265905069502244e-05, | |
| "loss": 0.5321, | |
| "step": 435 | |
| }, | |
| { | |
| "epoch": 1.7669706180344478, | |
| "grad_norm": 0.4823979888760002, | |
| "learning_rate": 3.261505774420016e-05, | |
| "loss": 0.5189, | |
| "step": 436 | |
| }, | |
| { | |
| "epoch": 1.7710233029381965, | |
| "grad_norm": 0.455205134651666, | |
| "learning_rate": 3.257096319343707e-05, | |
| "loss": 0.5296, | |
| "step": 437 | |
| }, | |
| { | |
| "epoch": 1.7750759878419453, | |
| "grad_norm": 0.44071369522770004, | |
| "learning_rate": 3.2526767397864614e-05, | |
| "loss": 0.5435, | |
| "step": 438 | |
| }, | |
| { | |
| "epoch": 1.7791286727456939, | |
| "grad_norm": 0.6251863510623925, | |
| "learning_rate": 3.248247071342966e-05, | |
| "loss": 0.5292, | |
| "step": 439 | |
| }, | |
| { | |
| "epoch": 1.7831813576494429, | |
| "grad_norm": 5.237741963408119, | |
| "learning_rate": 3.243807349689161e-05, | |
| "loss": 0.5072, | |
| "step": 440 | |
| }, | |
| { | |
| "epoch": 1.7872340425531914, | |
| "grad_norm": 0.6028285059583506, | |
| "learning_rate": 3.2393576105819544e-05, | |
| "loss": 0.5275, | |
| "step": 441 | |
| }, | |
| { | |
| "epoch": 1.7912867274569402, | |
| "grad_norm": 0.45288379869363454, | |
| "learning_rate": 3.2348978898589333e-05, | |
| "loss": 0.5346, | |
| "step": 442 | |
| }, | |
| { | |
| "epoch": 1.795339412360689, | |
| "grad_norm": 0.44929836920429317, | |
| "learning_rate": 3.230428223438075e-05, | |
| "loss": 0.5252, | |
| "step": 443 | |
| }, | |
| { | |
| "epoch": 1.7993920972644377, | |
| "grad_norm": 0.426956744807085, | |
| "learning_rate": 3.225948647317459e-05, | |
| "loss": 0.5136, | |
| "step": 444 | |
| }, | |
| { | |
| "epoch": 1.8034447821681865, | |
| "grad_norm": 0.4560814759161261, | |
| "learning_rate": 3.2214591975749745e-05, | |
| "loss": 0.5271, | |
| "step": 445 | |
| }, | |
| { | |
| "epoch": 1.807497467071935, | |
| "grad_norm": 0.4522517163656964, | |
| "learning_rate": 3.216959910368034e-05, | |
| "loss": 0.5181, | |
| "step": 446 | |
| }, | |
| { | |
| "epoch": 1.811550151975684, | |
| "grad_norm": 0.498296326590016, | |
| "learning_rate": 3.212450821933277e-05, | |
| "loss": 0.5025, | |
| "step": 447 | |
| }, | |
| { | |
| "epoch": 1.8156028368794326, | |
| "grad_norm": 4.4486548801132875, | |
| "learning_rate": 3.207931968586281e-05, | |
| "loss": 0.535, | |
| "step": 448 | |
| }, | |
| { | |
| "epoch": 1.8196555217831814, | |
| "grad_norm": 0.575035701044211, | |
| "learning_rate": 3.203403386721272e-05, | |
| "loss": 0.5136, | |
| "step": 449 | |
| }, | |
| { | |
| "epoch": 1.8237082066869301, | |
| "grad_norm": 0.4563880710907371, | |
| "learning_rate": 3.1988651128108245e-05, | |
| "loss": 0.553, | |
| "step": 450 | |
| }, | |
| { | |
| "epoch": 1.8277608915906787, | |
| "grad_norm": 0.4588178359675035, | |
| "learning_rate": 3.194317183405573e-05, | |
| "loss": 0.522, | |
| "step": 451 | |
| }, | |
| { | |
| "epoch": 1.8318135764944277, | |
| "grad_norm": 0.4610151142830939, | |
| "learning_rate": 3.189759635133914e-05, | |
| "loss": 0.5262, | |
| "step": 452 | |
| }, | |
| { | |
| "epoch": 1.8358662613981762, | |
| "grad_norm": 0.47106972053347146, | |
| "learning_rate": 3.185192504701718e-05, | |
| "loss": 0.5055, | |
| "step": 453 | |
| }, | |
| { | |
| "epoch": 1.839918946301925, | |
| "grad_norm": 0.4996514947110662, | |
| "learning_rate": 3.1806158288920234e-05, | |
| "loss": 0.5278, | |
| "step": 454 | |
| }, | |
| { | |
| "epoch": 1.8439716312056738, | |
| "grad_norm": 0.4447332425452821, | |
| "learning_rate": 3.1760296445647477e-05, | |
| "loss": 0.5187, | |
| "step": 455 | |
| }, | |
| { | |
| "epoch": 1.8480243161094225, | |
| "grad_norm": 0.436025433182334, | |
| "learning_rate": 3.1714339886563896e-05, | |
| "loss": 0.5103, | |
| "step": 456 | |
| }, | |
| { | |
| "epoch": 1.8520770010131713, | |
| "grad_norm": 0.4435911283290316, | |
| "learning_rate": 3.166828898179731e-05, | |
| "loss": 0.5268, | |
| "step": 457 | |
| }, | |
| { | |
| "epoch": 1.8561296859169198, | |
| "grad_norm": 0.4755519379825723, | |
| "learning_rate": 3.162214410223536e-05, | |
| "loss": 0.5367, | |
| "step": 458 | |
| }, | |
| { | |
| "epoch": 1.8601823708206688, | |
| "grad_norm": 0.43745331518260105, | |
| "learning_rate": 3.157590561952257e-05, | |
| "loss": 0.5054, | |
| "step": 459 | |
| }, | |
| { | |
| "epoch": 1.8642350557244174, | |
| "grad_norm": 0.506874812204114, | |
| "learning_rate": 3.152957390605732e-05, | |
| "loss": 0.5467, | |
| "step": 460 | |
| }, | |
| { | |
| "epoch": 1.8682877406281662, | |
| "grad_norm": 0.4563562182319296, | |
| "learning_rate": 3.148314933498886e-05, | |
| "loss": 0.5103, | |
| "step": 461 | |
| }, | |
| { | |
| "epoch": 1.872340425531915, | |
| "grad_norm": 0.511279090204102, | |
| "learning_rate": 3.143663228021431e-05, | |
| "loss": 0.5159, | |
| "step": 462 | |
| }, | |
| { | |
| "epoch": 1.8763931104356635, | |
| "grad_norm": 0.5210563427852847, | |
| "learning_rate": 3.1390023116375624e-05, | |
| "loss": 0.5289, | |
| "step": 463 | |
| }, | |
| { | |
| "epoch": 1.8804457953394125, | |
| "grad_norm": 0.4216133282995977, | |
| "learning_rate": 3.134332221885661e-05, | |
| "loss": 0.5073, | |
| "step": 464 | |
| }, | |
| { | |
| "epoch": 1.884498480243161, | |
| "grad_norm": 0.5109875296599887, | |
| "learning_rate": 3.129652996377987e-05, | |
| "loss": 0.5374, | |
| "step": 465 | |
| }, | |
| { | |
| "epoch": 1.8885511651469098, | |
| "grad_norm": 0.4398826582348227, | |
| "learning_rate": 3.12496467280038e-05, | |
| "loss": 0.5152, | |
| "step": 466 | |
| }, | |
| { | |
| "epoch": 1.8926038500506586, | |
| "grad_norm": 0.5990306347700444, | |
| "learning_rate": 3.120267288911952e-05, | |
| "loss": 0.518, | |
| "step": 467 | |
| }, | |
| { | |
| "epoch": 1.8966565349544073, | |
| "grad_norm": 0.4678944678458127, | |
| "learning_rate": 3.11556088254479e-05, | |
| "loss": 0.5484, | |
| "step": 468 | |
| }, | |
| { | |
| "epoch": 1.900709219858156, | |
| "grad_norm": 0.586414211710391, | |
| "learning_rate": 3.11084549160364e-05, | |
| "loss": 0.543, | |
| "step": 469 | |
| }, | |
| { | |
| "epoch": 1.9047619047619047, | |
| "grad_norm": 0.5029556906315458, | |
| "learning_rate": 3.106121154065615e-05, | |
| "loss": 0.5309, | |
| "step": 470 | |
| }, | |
| { | |
| "epoch": 1.9088145896656536, | |
| "grad_norm": 0.6188263570301109, | |
| "learning_rate": 3.1013879079798805e-05, | |
| "loss": 0.5239, | |
| "step": 471 | |
| }, | |
| { | |
| "epoch": 1.9128672745694022, | |
| "grad_norm": 0.5197232110656221, | |
| "learning_rate": 3.096645791467348e-05, | |
| "loss": 0.5316, | |
| "step": 472 | |
| }, | |
| { | |
| "epoch": 1.916919959473151, | |
| "grad_norm": 0.5049144817665914, | |
| "learning_rate": 3.091894842720373e-05, | |
| "loss": 0.507, | |
| "step": 473 | |
| }, | |
| { | |
| "epoch": 1.9209726443768997, | |
| "grad_norm": 0.4872513269437638, | |
| "learning_rate": 3.0871351000024425e-05, | |
| "loss": 0.5675, | |
| "step": 474 | |
| }, | |
| { | |
| "epoch": 1.9250253292806483, | |
| "grad_norm": 0.523793855618941, | |
| "learning_rate": 3.0823666016478716e-05, | |
| "loss": 0.5095, | |
| "step": 475 | |
| }, | |
| { | |
| "epoch": 1.9290780141843973, | |
| "grad_norm": 0.4475698418425971, | |
| "learning_rate": 3.0775893860614896e-05, | |
| "loss": 0.5385, | |
| "step": 476 | |
| }, | |
| { | |
| "epoch": 1.9331306990881458, | |
| "grad_norm": 0.5248101484565839, | |
| "learning_rate": 3.0728034917183336e-05, | |
| "loss": 0.5346, | |
| "step": 477 | |
| }, | |
| { | |
| "epoch": 1.9371833839918946, | |
| "grad_norm": 0.43479752861596394, | |
| "learning_rate": 3.06800895716334e-05, | |
| "loss": 0.5341, | |
| "step": 478 | |
| }, | |
| { | |
| "epoch": 1.9412360688956434, | |
| "grad_norm": 0.4753365099371603, | |
| "learning_rate": 3.063205821011029e-05, | |
| "loss": 0.4991, | |
| "step": 479 | |
| }, | |
| { | |
| "epoch": 1.9452887537993921, | |
| "grad_norm": 0.43641949097561217, | |
| "learning_rate": 3.0583941219452016e-05, | |
| "loss": 0.5293, | |
| "step": 480 | |
| }, | |
| { | |
| "epoch": 1.949341438703141, | |
| "grad_norm": 0.4318038415988742, | |
| "learning_rate": 3.053573898718618e-05, | |
| "loss": 0.5175, | |
| "step": 481 | |
| }, | |
| { | |
| "epoch": 1.9533941236068895, | |
| "grad_norm": 0.43194074981805536, | |
| "learning_rate": 3.0487451901526956e-05, | |
| "loss": 0.5242, | |
| "step": 482 | |
| }, | |
| { | |
| "epoch": 1.9574468085106385, | |
| "grad_norm": 0.4352916731275262, | |
| "learning_rate": 3.0439080351371875e-05, | |
| "loss": 0.5114, | |
| "step": 483 | |
| }, | |
| { | |
| "epoch": 1.961499493414387, | |
| "grad_norm": 0.5675741468391975, | |
| "learning_rate": 3.0390624726298764e-05, | |
| "loss": 0.5358, | |
| "step": 484 | |
| }, | |
| { | |
| "epoch": 1.9655521783181358, | |
| "grad_norm": 0.4164535121026274, | |
| "learning_rate": 3.034208541656255e-05, | |
| "loss": 0.5448, | |
| "step": 485 | |
| }, | |
| { | |
| "epoch": 1.9696048632218845, | |
| "grad_norm": 0.5006076462431339, | |
| "learning_rate": 3.029346281309218e-05, | |
| "loss": 0.5083, | |
| "step": 486 | |
| }, | |
| { | |
| "epoch": 1.973657548125633, | |
| "grad_norm": 0.45032708293165896, | |
| "learning_rate": 3.0244757307487415e-05, | |
| "loss": 0.5368, | |
| "step": 487 | |
| }, | |
| { | |
| "epoch": 1.977710233029382, | |
| "grad_norm": 0.46440525247323666, | |
| "learning_rate": 3.019596929201569e-05, | |
| "loss": 0.5178, | |
| "step": 488 | |
| }, | |
| { | |
| "epoch": 1.9817629179331306, | |
| "grad_norm": 0.4370803534776703, | |
| "learning_rate": 3.0147099159608985e-05, | |
| "loss": 0.5368, | |
| "step": 489 | |
| }, | |
| { | |
| "epoch": 1.9858156028368794, | |
| "grad_norm": 0.4779198932334461, | |
| "learning_rate": 3.0098147303860616e-05, | |
| "loss": 0.5186, | |
| "step": 490 | |
| }, | |
| { | |
| "epoch": 1.9898682877406282, | |
| "grad_norm": 0.3914689882946684, | |
| "learning_rate": 3.0049114119022117e-05, | |
| "loss": 0.5313, | |
| "step": 491 | |
| }, | |
| { | |
| "epoch": 1.993920972644377, | |
| "grad_norm": 0.52269164790699, | |
| "learning_rate": 3.0000000000000004e-05, | |
| "loss": 0.5219, | |
| "step": 492 | |
| }, | |
| { | |
| "epoch": 1.9979736575481257, | |
| "grad_norm": 0.41065354137650834, | |
| "learning_rate": 2.995080534235264e-05, | |
| "loss": 0.5115, | |
| "step": 493 | |
| }, | |
| { | |
| "epoch": 2.0020263424518743, | |
| "grad_norm": 0.6232621074004968, | |
| "learning_rate": 2.9901530542287044e-05, | |
| "loss": 0.453, | |
| "step": 494 | |
| }, | |
| { | |
| "epoch": 2.0060790273556233, | |
| "grad_norm": 0.8169909853325651, | |
| "learning_rate": 2.9852175996655676e-05, | |
| "loss": 0.3995, | |
| "step": 495 | |
| }, | |
| { | |
| "epoch": 2.010131712259372, | |
| "grad_norm": 1.65561844571941, | |
| "learning_rate": 2.980274210295326e-05, | |
| "loss": 0.3993, | |
| "step": 496 | |
| }, | |
| { | |
| "epoch": 2.0141843971631204, | |
| "grad_norm": 0.5510670332038249, | |
| "learning_rate": 2.9753229259313578e-05, | |
| "loss": 0.3809, | |
| "step": 497 | |
| }, | |
| { | |
| "epoch": 2.0182370820668694, | |
| "grad_norm": 0.5902591145076758, | |
| "learning_rate": 2.9703637864506274e-05, | |
| "loss": 0.388, | |
| "step": 498 | |
| }, | |
| { | |
| "epoch": 2.022289766970618, | |
| "grad_norm": 0.581870188636675, | |
| "learning_rate": 2.965396831793362e-05, | |
| "loss": 0.3923, | |
| "step": 499 | |
| }, | |
| { | |
| "epoch": 2.026342451874367, | |
| "grad_norm": 0.5122119525731773, | |
| "learning_rate": 2.9604221019627316e-05, | |
| "loss": 0.3924, | |
| "step": 500 | |
| }, | |
| { | |
| "epoch": 2.0303951367781155, | |
| "grad_norm": 0.6369001201435273, | |
| "learning_rate": 2.955439637024526e-05, | |
| "loss": 0.4021, | |
| "step": 501 | |
| }, | |
| { | |
| "epoch": 2.0344478216818644, | |
| "grad_norm": 0.5607219551159003, | |
| "learning_rate": 2.9504494771068334e-05, | |
| "loss": 0.3863, | |
| "step": 502 | |
| }, | |
| { | |
| "epoch": 2.038500506585613, | |
| "grad_norm": 0.5690904246439554, | |
| "learning_rate": 2.9454516623997156e-05, | |
| "loss": 0.3982, | |
| "step": 503 | |
| }, | |
| { | |
| "epoch": 2.0425531914893615, | |
| "grad_norm": 0.5606348830350215, | |
| "learning_rate": 2.9404462331548847e-05, | |
| "loss": 0.3895, | |
| "step": 504 | |
| }, | |
| { | |
| "epoch": 2.0466058763931105, | |
| "grad_norm": 0.4868023717582465, | |
| "learning_rate": 2.93543322968538e-05, | |
| "loss": 0.3837, | |
| "step": 505 | |
| }, | |
| { | |
| "epoch": 2.050658561296859, | |
| "grad_norm": 0.47931255663885713, | |
| "learning_rate": 2.9304126923652428e-05, | |
| "loss": 0.3664, | |
| "step": 506 | |
| }, | |
| { | |
| "epoch": 2.054711246200608, | |
| "grad_norm": 0.5223096925986057, | |
| "learning_rate": 2.9253846616291896e-05, | |
| "loss": 0.3757, | |
| "step": 507 | |
| }, | |
| { | |
| "epoch": 2.0587639311043566, | |
| "grad_norm": 0.49467389021849756, | |
| "learning_rate": 2.9203491779722896e-05, | |
| "loss": 0.3933, | |
| "step": 508 | |
| }, | |
| { | |
| "epoch": 2.062816616008105, | |
| "grad_norm": 0.4782933391638284, | |
| "learning_rate": 2.9153062819496357e-05, | |
| "loss": 0.3704, | |
| "step": 509 | |
| }, | |
| { | |
| "epoch": 2.066869300911854, | |
| "grad_norm": 0.45425163866874996, | |
| "learning_rate": 2.9102560141760178e-05, | |
| "loss": 0.3855, | |
| "step": 510 | |
| }, | |
| { | |
| "epoch": 2.0709219858156027, | |
| "grad_norm": 0.4672148090714988, | |
| "learning_rate": 2.9051984153256004e-05, | |
| "loss": 0.3812, | |
| "step": 511 | |
| }, | |
| { | |
| "epoch": 2.0749746707193517, | |
| "grad_norm": 0.4586673041925491, | |
| "learning_rate": 2.900133526131588e-05, | |
| "loss": 0.3804, | |
| "step": 512 | |
| }, | |
| { | |
| "epoch": 2.0790273556231003, | |
| "grad_norm": 0.4470522437231022, | |
| "learning_rate": 2.8950613873859025e-05, | |
| "loss": 0.3584, | |
| "step": 513 | |
| }, | |
| { | |
| "epoch": 2.0830800405268493, | |
| "grad_norm": 0.4840466133147679, | |
| "learning_rate": 2.8899820399388515e-05, | |
| "loss": 0.3703, | |
| "step": 514 | |
| }, | |
| { | |
| "epoch": 2.087132725430598, | |
| "grad_norm": 0.43699024485975313, | |
| "learning_rate": 2.8848955246988012e-05, | |
| "loss": 0.3697, | |
| "step": 515 | |
| }, | |
| { | |
| "epoch": 2.0911854103343464, | |
| "grad_norm": 0.4617029420399355, | |
| "learning_rate": 2.879801882631847e-05, | |
| "loss": 0.3802, | |
| "step": 516 | |
| }, | |
| { | |
| "epoch": 2.0952380952380953, | |
| "grad_norm": 0.45405988329926195, | |
| "learning_rate": 2.8747011547614808e-05, | |
| "loss": 0.3998, | |
| "step": 517 | |
| }, | |
| { | |
| "epoch": 2.099290780141844, | |
| "grad_norm": 0.44440396091154216, | |
| "learning_rate": 2.8695933821682635e-05, | |
| "loss": 0.37, | |
| "step": 518 | |
| }, | |
| { | |
| "epoch": 2.103343465045593, | |
| "grad_norm": 0.4762703853920394, | |
| "learning_rate": 2.864478605989494e-05, | |
| "loss": 0.3718, | |
| "step": 519 | |
| }, | |
| { | |
| "epoch": 2.1073961499493414, | |
| "grad_norm": 0.4150592525523646, | |
| "learning_rate": 2.8593568674188765e-05, | |
| "loss": 0.3767, | |
| "step": 520 | |
| }, | |
| { | |
| "epoch": 2.11144883485309, | |
| "grad_norm": 0.4725077696341366, | |
| "learning_rate": 2.8542282077061892e-05, | |
| "loss": 0.3848, | |
| "step": 521 | |
| }, | |
| { | |
| "epoch": 2.115501519756839, | |
| "grad_norm": 0.42378037803754093, | |
| "learning_rate": 2.8490926681569523e-05, | |
| "loss": 0.3726, | |
| "step": 522 | |
| }, | |
| { | |
| "epoch": 2.1195542046605875, | |
| "grad_norm": 0.38885784145222896, | |
| "learning_rate": 2.8439502901320956e-05, | |
| "loss": 0.3885, | |
| "step": 523 | |
| }, | |
| { | |
| "epoch": 2.1236068895643365, | |
| "grad_norm": 0.4415263609638792, | |
| "learning_rate": 2.8388011150476237e-05, | |
| "loss": 0.3906, | |
| "step": 524 | |
| }, | |
| { | |
| "epoch": 2.127659574468085, | |
| "grad_norm": 0.41640563801208946, | |
| "learning_rate": 2.8336451843742866e-05, | |
| "loss": 0.3598, | |
| "step": 525 | |
| }, | |
| { | |
| "epoch": 2.1317122593718336, | |
| "grad_norm": 0.4096956476053644, | |
| "learning_rate": 2.8284825396372387e-05, | |
| "loss": 0.3842, | |
| "step": 526 | |
| }, | |
| { | |
| "epoch": 2.1357649442755826, | |
| "grad_norm": 0.4377291728632232, | |
| "learning_rate": 2.8233132224157132e-05, | |
| "loss": 0.3887, | |
| "step": 527 | |
| }, | |
| { | |
| "epoch": 2.139817629179331, | |
| "grad_norm": 0.3737306695119243, | |
| "learning_rate": 2.8181372743426805e-05, | |
| "loss": 0.3648, | |
| "step": 528 | |
| }, | |
| { | |
| "epoch": 2.14387031408308, | |
| "grad_norm": 0.42100018353041196, | |
| "learning_rate": 2.8129547371045128e-05, | |
| "loss": 0.3744, | |
| "step": 529 | |
| }, | |
| { | |
| "epoch": 2.1479229989868287, | |
| "grad_norm": 0.41436306962303654, | |
| "learning_rate": 2.8077656524406534e-05, | |
| "loss": 0.3953, | |
| "step": 530 | |
| }, | |
| { | |
| "epoch": 2.1519756838905777, | |
| "grad_norm": 0.3858696987436117, | |
| "learning_rate": 2.802570062143278e-05, | |
| "loss": 0.3765, | |
| "step": 531 | |
| }, | |
| { | |
| "epoch": 2.1560283687943262, | |
| "grad_norm": 0.4124078601251155, | |
| "learning_rate": 2.7973680080569555e-05, | |
| "loss": 0.3693, | |
| "step": 532 | |
| }, | |
| { | |
| "epoch": 2.160081053698075, | |
| "grad_norm": 0.4266867491757614, | |
| "learning_rate": 2.792159532078314e-05, | |
| "loss": 0.3664, | |
| "step": 533 | |
| }, | |
| { | |
| "epoch": 2.164133738601824, | |
| "grad_norm": 0.3978170466321553, | |
| "learning_rate": 2.7869446761557033e-05, | |
| "loss": 0.3816, | |
| "step": 534 | |
| }, | |
| { | |
| "epoch": 2.1681864235055723, | |
| "grad_norm": 0.43102734791598346, | |
| "learning_rate": 2.781723482288857e-05, | |
| "loss": 0.3825, | |
| "step": 535 | |
| }, | |
| { | |
| "epoch": 2.1722391084093213, | |
| "grad_norm": 0.3901304725972472, | |
| "learning_rate": 2.7764959925285517e-05, | |
| "loss": 0.3788, | |
| "step": 536 | |
| }, | |
| { | |
| "epoch": 2.17629179331307, | |
| "grad_norm": 0.4865783173738111, | |
| "learning_rate": 2.771262248976272e-05, | |
| "loss": 0.3857, | |
| "step": 537 | |
| }, | |
| { | |
| "epoch": 2.180344478216819, | |
| "grad_norm": 0.3785043876040034, | |
| "learning_rate": 2.7660222937838677e-05, | |
| "loss": 0.3766, | |
| "step": 538 | |
| }, | |
| { | |
| "epoch": 2.1843971631205674, | |
| "grad_norm": 0.43871924157027203, | |
| "learning_rate": 2.7607761691532186e-05, | |
| "loss": 0.36, | |
| "step": 539 | |
| }, | |
| { | |
| "epoch": 2.188449848024316, | |
| "grad_norm": 0.4032015416906201, | |
| "learning_rate": 2.7555239173358916e-05, | |
| "loss": 0.3858, | |
| "step": 540 | |
| }, | |
| { | |
| "epoch": 2.192502532928065, | |
| "grad_norm": 0.42985166613333153, | |
| "learning_rate": 2.7502655806328e-05, | |
| "loss": 0.3801, | |
| "step": 541 | |
| }, | |
| { | |
| "epoch": 2.1965552178318135, | |
| "grad_norm": 0.4081432867416322, | |
| "learning_rate": 2.7450012013938648e-05, | |
| "loss": 0.3796, | |
| "step": 542 | |
| }, | |
| { | |
| "epoch": 2.2006079027355625, | |
| "grad_norm": 0.4105681252274914, | |
| "learning_rate": 2.739730822017673e-05, | |
| "loss": 0.3871, | |
| "step": 543 | |
| }, | |
| { | |
| "epoch": 2.204660587639311, | |
| "grad_norm": 0.4405657516618478, | |
| "learning_rate": 2.7344544849511355e-05, | |
| "loss": 0.3871, | |
| "step": 544 | |
| }, | |
| { | |
| "epoch": 2.2087132725430596, | |
| "grad_norm": 0.40420869178467833, | |
| "learning_rate": 2.7291722326891456e-05, | |
| "loss": 0.3947, | |
| "step": 545 | |
| }, | |
| { | |
| "epoch": 2.2127659574468086, | |
| "grad_norm": 0.41475963587342235, | |
| "learning_rate": 2.723884107774236e-05, | |
| "loss": 0.3766, | |
| "step": 546 | |
| }, | |
| { | |
| "epoch": 2.216818642350557, | |
| "grad_norm": 0.4352827849622015, | |
| "learning_rate": 2.718590152796239e-05, | |
| "loss": 0.3999, | |
| "step": 547 | |
| }, | |
| { | |
| "epoch": 2.220871327254306, | |
| "grad_norm": 0.38854387845945204, | |
| "learning_rate": 2.71329041039194e-05, | |
| "loss": 0.3651, | |
| "step": 548 | |
| }, | |
| { | |
| "epoch": 2.2249240121580547, | |
| "grad_norm": 0.4350894454142731, | |
| "learning_rate": 2.7079849232447357e-05, | |
| "loss": 0.3852, | |
| "step": 549 | |
| }, | |
| { | |
| "epoch": 2.2289766970618032, | |
| "grad_norm": 0.4286420643863448, | |
| "learning_rate": 2.7026737340842895e-05, | |
| "loss": 0.3859, | |
| "step": 550 | |
| }, | |
| { | |
| "epoch": 2.2330293819655522, | |
| "grad_norm": 0.40829811576315933, | |
| "learning_rate": 2.697356885686189e-05, | |
| "loss": 0.3897, | |
| "step": 551 | |
| }, | |
| { | |
| "epoch": 2.237082066869301, | |
| "grad_norm": 0.4119258956286351, | |
| "learning_rate": 2.6920344208716014e-05, | |
| "loss": 0.396, | |
| "step": 552 | |
| }, | |
| { | |
| "epoch": 2.2411347517730498, | |
| "grad_norm": 0.40459874467202434, | |
| "learning_rate": 2.6867063825069252e-05, | |
| "loss": 0.3646, | |
| "step": 553 | |
| }, | |
| { | |
| "epoch": 2.2451874366767983, | |
| "grad_norm": 0.44324114811035326, | |
| "learning_rate": 2.6813728135034494e-05, | |
| "loss": 0.3832, | |
| "step": 554 | |
| }, | |
| { | |
| "epoch": 2.2492401215805473, | |
| "grad_norm": 0.3738346466608523, | |
| "learning_rate": 2.6760337568170056e-05, | |
| "loss": 0.3819, | |
| "step": 555 | |
| }, | |
| { | |
| "epoch": 2.253292806484296, | |
| "grad_norm": 0.45453455890313166, | |
| "learning_rate": 2.6706892554476226e-05, | |
| "loss": 0.3761, | |
| "step": 556 | |
| }, | |
| { | |
| "epoch": 2.2573454913880444, | |
| "grad_norm": 0.3943998611110839, | |
| "learning_rate": 2.6653393524391795e-05, | |
| "loss": 0.3882, | |
| "step": 557 | |
| }, | |
| { | |
| "epoch": 2.2613981762917934, | |
| "grad_norm": 0.41387653919697004, | |
| "learning_rate": 2.6599840908790592e-05, | |
| "loss": 0.3727, | |
| "step": 558 | |
| }, | |
| { | |
| "epoch": 2.265450861195542, | |
| "grad_norm": 0.3986327009516797, | |
| "learning_rate": 2.6546235138978028e-05, | |
| "loss": 0.3634, | |
| "step": 559 | |
| }, | |
| { | |
| "epoch": 2.269503546099291, | |
| "grad_norm": 0.48499203758999465, | |
| "learning_rate": 2.6492576646687597e-05, | |
| "loss": 0.3857, | |
| "step": 560 | |
| }, | |
| { | |
| "epoch": 2.2735562310030395, | |
| "grad_norm": 0.4226373434122471, | |
| "learning_rate": 2.6438865864077425e-05, | |
| "loss": 0.3711, | |
| "step": 561 | |
| }, | |
| { | |
| "epoch": 2.2776089159067885, | |
| "grad_norm": 0.44094398538075524, | |
| "learning_rate": 2.6385103223726766e-05, | |
| "loss": 0.3726, | |
| "step": 562 | |
| }, | |
| { | |
| "epoch": 2.281661600810537, | |
| "grad_norm": 0.42810302434452924, | |
| "learning_rate": 2.6331289158632537e-05, | |
| "loss": 0.3849, | |
| "step": 563 | |
| }, | |
| { | |
| "epoch": 2.2857142857142856, | |
| "grad_norm": 0.4184503377943871, | |
| "learning_rate": 2.6277424102205817e-05, | |
| "loss": 0.37, | |
| "step": 564 | |
| }, | |
| { | |
| "epoch": 2.2897669706180346, | |
| "grad_norm": 0.4432225616112314, | |
| "learning_rate": 2.6223508488268374e-05, | |
| "loss": 0.3696, | |
| "step": 565 | |
| }, | |
| { | |
| "epoch": 2.293819655521783, | |
| "grad_norm": 0.38533088289882106, | |
| "learning_rate": 2.6169542751049148e-05, | |
| "loss": 0.3846, | |
| "step": 566 | |
| }, | |
| { | |
| "epoch": 2.297872340425532, | |
| "grad_norm": 0.45459020294460795, | |
| "learning_rate": 2.6115527325180754e-05, | |
| "loss": 0.3483, | |
| "step": 567 | |
| }, | |
| { | |
| "epoch": 2.3019250253292807, | |
| "grad_norm": 0.3987525159041936, | |
| "learning_rate": 2.606146264569603e-05, | |
| "loss": 0.3797, | |
| "step": 568 | |
| }, | |
| { | |
| "epoch": 2.305977710233029, | |
| "grad_norm": 0.3917876397747245, | |
| "learning_rate": 2.6007349148024447e-05, | |
| "loss": 0.3636, | |
| "step": 569 | |
| }, | |
| { | |
| "epoch": 2.310030395136778, | |
| "grad_norm": 0.4248791571615101, | |
| "learning_rate": 2.5953187267988694e-05, | |
| "loss": 0.3773, | |
| "step": 570 | |
| }, | |
| { | |
| "epoch": 2.3140830800405268, | |
| "grad_norm": 0.39213120685951824, | |
| "learning_rate": 2.5898977441801097e-05, | |
| "loss": 0.375, | |
| "step": 571 | |
| }, | |
| { | |
| "epoch": 2.3181357649442758, | |
| "grad_norm": 0.4188292988155098, | |
| "learning_rate": 2.584472010606015e-05, | |
| "loss": 0.3746, | |
| "step": 572 | |
| }, | |
| { | |
| "epoch": 2.3221884498480243, | |
| "grad_norm": 0.3963465963177131, | |
| "learning_rate": 2.5790415697746976e-05, | |
| "loss": 0.3682, | |
| "step": 573 | |
| }, | |
| { | |
| "epoch": 2.326241134751773, | |
| "grad_norm": 0.42230860830833267, | |
| "learning_rate": 2.5736064654221808e-05, | |
| "loss": 0.3845, | |
| "step": 574 | |
| }, | |
| { | |
| "epoch": 2.330293819655522, | |
| "grad_norm": 0.4436008807056159, | |
| "learning_rate": 2.568166741322048e-05, | |
| "loss": 0.3591, | |
| "step": 575 | |
| }, | |
| { | |
| "epoch": 2.3343465045592704, | |
| "grad_norm": 0.37066010387279286, | |
| "learning_rate": 2.56272244128509e-05, | |
| "loss": 0.3612, | |
| "step": 576 | |
| }, | |
| { | |
| "epoch": 2.3383991894630194, | |
| "grad_norm": 0.4330675968766995, | |
| "learning_rate": 2.55727360915895e-05, | |
| "loss": 0.3867, | |
| "step": 577 | |
| }, | |
| { | |
| "epoch": 2.342451874366768, | |
| "grad_norm": 0.40880093907937803, | |
| "learning_rate": 2.5518202888277734e-05, | |
| "loss": 0.3786, | |
| "step": 578 | |
| }, | |
| { | |
| "epoch": 2.3465045592705165, | |
| "grad_norm": 0.39795064444132017, | |
| "learning_rate": 2.5463625242118523e-05, | |
| "loss": 0.3813, | |
| "step": 579 | |
| }, | |
| { | |
| "epoch": 2.3505572441742655, | |
| "grad_norm": 0.37522536620671515, | |
| "learning_rate": 2.5409003592672723e-05, | |
| "loss": 0.3835, | |
| "step": 580 | |
| }, | |
| { | |
| "epoch": 2.354609929078014, | |
| "grad_norm": 0.4479217190524236, | |
| "learning_rate": 2.535433837985559e-05, | |
| "loss": 0.3824, | |
| "step": 581 | |
| }, | |
| { | |
| "epoch": 2.358662613981763, | |
| "grad_norm": 0.40930651198571455, | |
| "learning_rate": 2.529963004393324e-05, | |
| "loss": 0.3792, | |
| "step": 582 | |
| }, | |
| { | |
| "epoch": 2.3627152988855116, | |
| "grad_norm": 0.36076212305214456, | |
| "learning_rate": 2.524487902551908e-05, | |
| "loss": 0.378, | |
| "step": 583 | |
| }, | |
| { | |
| "epoch": 2.3667679837892606, | |
| "grad_norm": 0.42039817052177986, | |
| "learning_rate": 2.519008576557029e-05, | |
| "loss": 0.3687, | |
| "step": 584 | |
| }, | |
| { | |
| "epoch": 2.370820668693009, | |
| "grad_norm": 0.37379960596206885, | |
| "learning_rate": 2.5135250705384254e-05, | |
| "loss": 0.3664, | |
| "step": 585 | |
| }, | |
| { | |
| "epoch": 2.3748733535967577, | |
| "grad_norm": 0.4467751129733778, | |
| "learning_rate": 2.5080374286595007e-05, | |
| "loss": 0.371, | |
| "step": 586 | |
| }, | |
| { | |
| "epoch": 2.3789260385005067, | |
| "grad_norm": 0.4007386534063922, | |
| "learning_rate": 2.5025456951169677e-05, | |
| "loss": 0.3554, | |
| "step": 587 | |
| }, | |
| { | |
| "epoch": 2.382978723404255, | |
| "grad_norm": 0.4119635809316113, | |
| "learning_rate": 2.4970499141404942e-05, | |
| "loss": 0.378, | |
| "step": 588 | |
| }, | |
| { | |
| "epoch": 2.387031408308004, | |
| "grad_norm": 0.47346681867348134, | |
| "learning_rate": 2.491550129992345e-05, | |
| "loss": 0.3606, | |
| "step": 589 | |
| }, | |
| { | |
| "epoch": 2.3910840932117527, | |
| "grad_norm": 0.40564117950013207, | |
| "learning_rate": 2.486046386967024e-05, | |
| "loss": 0.3902, | |
| "step": 590 | |
| }, | |
| { | |
| "epoch": 2.3951367781155017, | |
| "grad_norm": 0.4190961363148275, | |
| "learning_rate": 2.4805387293909214e-05, | |
| "loss": 0.3615, | |
| "step": 591 | |
| }, | |
| { | |
| "epoch": 2.3991894630192503, | |
| "grad_norm": 0.44932026629769223, | |
| "learning_rate": 2.4750272016219552e-05, | |
| "loss": 0.3552, | |
| "step": 592 | |
| }, | |
| { | |
| "epoch": 2.403242147922999, | |
| "grad_norm": 0.4136435376168819, | |
| "learning_rate": 2.4695118480492114e-05, | |
| "loss": 0.3799, | |
| "step": 593 | |
| }, | |
| { | |
| "epoch": 2.407294832826748, | |
| "grad_norm": 0.47035995869422675, | |
| "learning_rate": 2.4639927130925898e-05, | |
| "loss": 0.3712, | |
| "step": 594 | |
| }, | |
| { | |
| "epoch": 2.4113475177304964, | |
| "grad_norm": 0.4262153307259335, | |
| "learning_rate": 2.458469841202444e-05, | |
| "loss": 0.3665, | |
| "step": 595 | |
| }, | |
| { | |
| "epoch": 2.4154002026342454, | |
| "grad_norm": 0.41518180585769826, | |
| "learning_rate": 2.452943276859226e-05, | |
| "loss": 0.3846, | |
| "step": 596 | |
| }, | |
| { | |
| "epoch": 2.419452887537994, | |
| "grad_norm": 0.41176549247240596, | |
| "learning_rate": 2.447413064573125e-05, | |
| "loss": 0.39, | |
| "step": 597 | |
| }, | |
| { | |
| "epoch": 2.4235055724417425, | |
| "grad_norm": 0.42071307008133824, | |
| "learning_rate": 2.4418792488837095e-05, | |
| "loss": 0.3671, | |
| "step": 598 | |
| }, | |
| { | |
| "epoch": 2.4275582573454915, | |
| "grad_norm": 0.39871906594638695, | |
| "learning_rate": 2.4363418743595713e-05, | |
| "loss": 0.3891, | |
| "step": 599 | |
| }, | |
| { | |
| "epoch": 2.43161094224924, | |
| "grad_norm": 0.43702924369234947, | |
| "learning_rate": 2.430800985597963e-05, | |
| "loss": 0.3909, | |
| "step": 600 | |
| }, | |
| { | |
| "epoch": 2.435663627152989, | |
| "grad_norm": 0.4131901920541675, | |
| "learning_rate": 2.4252566272244415e-05, | |
| "loss": 0.3664, | |
| "step": 601 | |
| }, | |
| { | |
| "epoch": 2.4397163120567376, | |
| "grad_norm": 0.4195389982623185, | |
| "learning_rate": 2.4197088438925063e-05, | |
| "loss": 0.365, | |
| "step": 602 | |
| }, | |
| { | |
| "epoch": 2.443768996960486, | |
| "grad_norm": 0.42836971921357075, | |
| "learning_rate": 2.4141576802832417e-05, | |
| "loss": 0.3764, | |
| "step": 603 | |
| }, | |
| { | |
| "epoch": 2.447821681864235, | |
| "grad_norm": 0.3818961441018969, | |
| "learning_rate": 2.408603181104957e-05, | |
| "loss": 0.3837, | |
| "step": 604 | |
| }, | |
| { | |
| "epoch": 2.4518743667679836, | |
| "grad_norm": 0.46811873730436787, | |
| "learning_rate": 2.4030453910928245e-05, | |
| "loss": 0.3775, | |
| "step": 605 | |
| }, | |
| { | |
| "epoch": 2.4559270516717326, | |
| "grad_norm": 0.382206133462594, | |
| "learning_rate": 2.397484355008521e-05, | |
| "loss": 0.3741, | |
| "step": 606 | |
| }, | |
| { | |
| "epoch": 2.459979736575481, | |
| "grad_norm": 0.4333455945048134, | |
| "learning_rate": 2.3919201176398662e-05, | |
| "loss": 0.3884, | |
| "step": 607 | |
| }, | |
| { | |
| "epoch": 2.46403242147923, | |
| "grad_norm": 0.3863605999991453, | |
| "learning_rate": 2.3863527238004633e-05, | |
| "loss": 0.4034, | |
| "step": 608 | |
| }, | |
| { | |
| "epoch": 2.4680851063829787, | |
| "grad_norm": 0.3772576840977968, | |
| "learning_rate": 2.380782218329337e-05, | |
| "loss": 0.3851, | |
| "step": 609 | |
| }, | |
| { | |
| "epoch": 2.4721377912867273, | |
| "grad_norm": 0.4101064024145213, | |
| "learning_rate": 2.3752086460905725e-05, | |
| "loss": 0.3816, | |
| "step": 610 | |
| }, | |
| { | |
| "epoch": 2.4761904761904763, | |
| "grad_norm": 0.40575792529637883, | |
| "learning_rate": 2.3696320519729544e-05, | |
| "loss": 0.3885, | |
| "step": 611 | |
| }, | |
| { | |
| "epoch": 2.480243161094225, | |
| "grad_norm": 0.3891190110820612, | |
| "learning_rate": 2.3640524808896045e-05, | |
| "loss": 0.3963, | |
| "step": 612 | |
| }, | |
| { | |
| "epoch": 2.484295845997974, | |
| "grad_norm": 0.3824125650383179, | |
| "learning_rate": 2.3584699777776222e-05, | |
| "loss": 0.3775, | |
| "step": 613 | |
| }, | |
| { | |
| "epoch": 2.4883485309017224, | |
| "grad_norm": 0.3822018624287662, | |
| "learning_rate": 2.3528845875977195e-05, | |
| "loss": 0.3712, | |
| "step": 614 | |
| }, | |
| { | |
| "epoch": 2.4924012158054714, | |
| "grad_norm": 0.3902267787396165, | |
| "learning_rate": 2.3472963553338614e-05, | |
| "loss": 0.372, | |
| "step": 615 | |
| }, | |
| { | |
| "epoch": 2.49645390070922, | |
| "grad_norm": 0.3617482440577049, | |
| "learning_rate": 2.341705325992901e-05, | |
| "loss": 0.3703, | |
| "step": 616 | |
| }, | |
| { | |
| "epoch": 2.5005065856129685, | |
| "grad_norm": 0.39054685343275136, | |
| "learning_rate": 2.336111544604222e-05, | |
| "loss": 0.3602, | |
| "step": 617 | |
| }, | |
| { | |
| "epoch": 2.5045592705167175, | |
| "grad_norm": 0.3735509393891408, | |
| "learning_rate": 2.33051505621937e-05, | |
| "loss": 0.3762, | |
| "step": 618 | |
| }, | |
| { | |
| "epoch": 2.508611955420466, | |
| "grad_norm": 0.35661656981457157, | |
| "learning_rate": 2.324915905911693e-05, | |
| "loss": 0.3745, | |
| "step": 619 | |
| }, | |
| { | |
| "epoch": 2.512664640324215, | |
| "grad_norm": 0.38820137263090165, | |
| "learning_rate": 2.319314138775977e-05, | |
| "loss": 0.3769, | |
| "step": 620 | |
| }, | |
| { | |
| "epoch": 2.5167173252279635, | |
| "grad_norm": 0.35358970141786616, | |
| "learning_rate": 2.3137097999280856e-05, | |
| "loss": 0.3768, | |
| "step": 621 | |
| }, | |
| { | |
| "epoch": 2.520770010131712, | |
| "grad_norm": 0.3895221568896916, | |
| "learning_rate": 2.308102934504593e-05, | |
| "loss": 0.3714, | |
| "step": 622 | |
| }, | |
| { | |
| "epoch": 2.524822695035461, | |
| "grad_norm": 0.37935152151278373, | |
| "learning_rate": 2.3024935876624222e-05, | |
| "loss": 0.3925, | |
| "step": 623 | |
| }, | |
| { | |
| "epoch": 2.5288753799392096, | |
| "grad_norm": 0.3763570799159414, | |
| "learning_rate": 2.2968818045784813e-05, | |
| "loss": 0.377, | |
| "step": 624 | |
| }, | |
| { | |
| "epoch": 2.5329280648429586, | |
| "grad_norm": 0.3708096374727324, | |
| "learning_rate": 2.2912676304493006e-05, | |
| "loss": 0.3812, | |
| "step": 625 | |
| }, | |
| { | |
| "epoch": 2.536980749746707, | |
| "grad_norm": 0.3819554217877225, | |
| "learning_rate": 2.2856511104906668e-05, | |
| "loss": 0.4035, | |
| "step": 626 | |
| }, | |
| { | |
| "epoch": 2.5410334346504557, | |
| "grad_norm": 0.36913526743289365, | |
| "learning_rate": 2.2800322899372586e-05, | |
| "loss": 0.3688, | |
| "step": 627 | |
| }, | |
| { | |
| "epoch": 2.5450861195542047, | |
| "grad_norm": 0.3639443937619455, | |
| "learning_rate": 2.2744112140422844e-05, | |
| "loss": 0.3791, | |
| "step": 628 | |
| }, | |
| { | |
| "epoch": 2.5491388044579533, | |
| "grad_norm": 0.4149934290729677, | |
| "learning_rate": 2.2687879280771177e-05, | |
| "loss": 0.3958, | |
| "step": 629 | |
| }, | |
| { | |
| "epoch": 2.5531914893617023, | |
| "grad_norm": 0.38347523579059295, | |
| "learning_rate": 2.26316247733093e-05, | |
| "loss": 0.3636, | |
| "step": 630 | |
| }, | |
| { | |
| "epoch": 2.557244174265451, | |
| "grad_norm": 0.4420509005241039, | |
| "learning_rate": 2.257534907110328e-05, | |
| "loss": 0.3959, | |
| "step": 631 | |
| }, | |
| { | |
| "epoch": 2.5612968591691994, | |
| "grad_norm": 0.408905711610069, | |
| "learning_rate": 2.2519052627389882e-05, | |
| "loss": 0.3766, | |
| "step": 632 | |
| }, | |
| { | |
| "epoch": 2.5653495440729484, | |
| "grad_norm": 0.37825266042953937, | |
| "learning_rate": 2.246273589557294e-05, | |
| "loss": 0.3849, | |
| "step": 633 | |
| }, | |
| { | |
| "epoch": 2.569402228976697, | |
| "grad_norm": 0.43414986147382434, | |
| "learning_rate": 2.240639932921966e-05, | |
| "loss": 0.3883, | |
| "step": 634 | |
| }, | |
| { | |
| "epoch": 2.573454913880446, | |
| "grad_norm": 0.3744659398071677, | |
| "learning_rate": 2.2350043382056995e-05, | |
| "loss": 0.3756, | |
| "step": 635 | |
| }, | |
| { | |
| "epoch": 2.5775075987841944, | |
| "grad_norm": 0.40986103144476926, | |
| "learning_rate": 2.2293668507968015e-05, | |
| "loss": 0.3681, | |
| "step": 636 | |
| }, | |
| { | |
| "epoch": 2.581560283687943, | |
| "grad_norm": 0.3996212073361045, | |
| "learning_rate": 2.2237275160988186e-05, | |
| "loss": 0.3798, | |
| "step": 637 | |
| }, | |
| { | |
| "epoch": 2.585612968591692, | |
| "grad_norm": 0.3793763813055075, | |
| "learning_rate": 2.2180863795301787e-05, | |
| "loss": 0.3861, | |
| "step": 638 | |
| }, | |
| { | |
| "epoch": 2.589665653495441, | |
| "grad_norm": 0.4005993878505897, | |
| "learning_rate": 2.212443486523819e-05, | |
| "loss": 0.3694, | |
| "step": 639 | |
| }, | |
| { | |
| "epoch": 2.5937183383991895, | |
| "grad_norm": 0.3899642945934973, | |
| "learning_rate": 2.2067988825268243e-05, | |
| "loss": 0.3941, | |
| "step": 640 | |
| }, | |
| { | |
| "epoch": 2.597771023302938, | |
| "grad_norm": 0.4038712054901919, | |
| "learning_rate": 2.2011526130000596e-05, | |
| "loss": 0.3784, | |
| "step": 641 | |
| }, | |
| { | |
| "epoch": 2.601823708206687, | |
| "grad_norm": 0.4120780054958312, | |
| "learning_rate": 2.1955047234178038e-05, | |
| "loss": 0.3906, | |
| "step": 642 | |
| }, | |
| { | |
| "epoch": 2.6058763931104356, | |
| "grad_norm": 0.37054887907501416, | |
| "learning_rate": 2.1898552592673825e-05, | |
| "loss": 0.366, | |
| "step": 643 | |
| }, | |
| { | |
| "epoch": 2.6099290780141846, | |
| "grad_norm": 0.39411090352456984, | |
| "learning_rate": 2.184204266048803e-05, | |
| "loss": 0.3764, | |
| "step": 644 | |
| }, | |
| { | |
| "epoch": 2.613981762917933, | |
| "grad_norm": 0.40547711641747647, | |
| "learning_rate": 2.1785517892743887e-05, | |
| "loss": 0.3779, | |
| "step": 645 | |
| }, | |
| { | |
| "epoch": 2.6180344478216817, | |
| "grad_norm": 0.37010071859343574, | |
| "learning_rate": 2.17289787446841e-05, | |
| "loss": 0.4027, | |
| "step": 646 | |
| }, | |
| { | |
| "epoch": 2.6220871327254307, | |
| "grad_norm": 0.37799910699161426, | |
| "learning_rate": 2.1672425671667198e-05, | |
| "loss": 0.3659, | |
| "step": 647 | |
| }, | |
| { | |
| "epoch": 2.6261398176291793, | |
| "grad_norm": 0.40366355092847167, | |
| "learning_rate": 2.161585912916385e-05, | |
| "loss": 0.3795, | |
| "step": 648 | |
| }, | |
| { | |
| "epoch": 2.6301925025329282, | |
| "grad_norm": 0.38310254282681644, | |
| "learning_rate": 2.1559279572753214e-05, | |
| "loss": 0.3594, | |
| "step": 649 | |
| }, | |
| { | |
| "epoch": 2.634245187436677, | |
| "grad_norm": 0.3883302189496693, | |
| "learning_rate": 2.1502687458119268e-05, | |
| "loss": 0.3968, | |
| "step": 650 | |
| }, | |
| { | |
| "epoch": 2.6382978723404253, | |
| "grad_norm": 0.3735177985957199, | |
| "learning_rate": 2.1446083241047116e-05, | |
| "loss": 0.392, | |
| "step": 651 | |
| }, | |
| { | |
| "epoch": 2.6423505572441743, | |
| "grad_norm": 0.36619361662693517, | |
| "learning_rate": 2.1389467377419333e-05, | |
| "loss": 0.3911, | |
| "step": 652 | |
| }, | |
| { | |
| "epoch": 2.646403242147923, | |
| "grad_norm": 0.3556681152065816, | |
| "learning_rate": 2.133284032321232e-05, | |
| "loss": 0.3948, | |
| "step": 653 | |
| }, | |
| { | |
| "epoch": 2.650455927051672, | |
| "grad_norm": 0.37492067817330393, | |
| "learning_rate": 2.1276202534492566e-05, | |
| "loss": 0.3822, | |
| "step": 654 | |
| }, | |
| { | |
| "epoch": 2.6545086119554204, | |
| "grad_norm": 0.3791878754761423, | |
| "learning_rate": 2.121955446741306e-05, | |
| "loss": 0.3557, | |
| "step": 655 | |
| }, | |
| { | |
| "epoch": 2.658561296859169, | |
| "grad_norm": 0.41548913876098637, | |
| "learning_rate": 2.1162896578209517e-05, | |
| "loss": 0.3695, | |
| "step": 656 | |
| }, | |
| { | |
| "epoch": 2.662613981762918, | |
| "grad_norm": 0.35442545340571024, | |
| "learning_rate": 2.1106229323196813e-05, | |
| "loss": 0.3727, | |
| "step": 657 | |
| }, | |
| { | |
| "epoch": 2.6666666666666665, | |
| "grad_norm": 0.3565427539064041, | |
| "learning_rate": 2.1049553158765214e-05, | |
| "loss": 0.3589, | |
| "step": 658 | |
| }, | |
| { | |
| "epoch": 2.6707193515704155, | |
| "grad_norm": 0.3746704132058309, | |
| "learning_rate": 2.0992868541376764e-05, | |
| "loss": 0.3974, | |
| "step": 659 | |
| }, | |
| { | |
| "epoch": 2.674772036474164, | |
| "grad_norm": 0.3791594215115498, | |
| "learning_rate": 2.093617592756158e-05, | |
| "loss": 0.3575, | |
| "step": 660 | |
| }, | |
| { | |
| "epoch": 2.6788247213779126, | |
| "grad_norm": 0.3710386307717811, | |
| "learning_rate": 2.0879475773914167e-05, | |
| "loss": 0.3801, | |
| "step": 661 | |
| }, | |
| { | |
| "epoch": 2.6828774062816616, | |
| "grad_norm": 0.36892767257957243, | |
| "learning_rate": 2.082276853708978e-05, | |
| "loss": 0.371, | |
| "step": 662 | |
| }, | |
| { | |
| "epoch": 2.6869300911854106, | |
| "grad_norm": 0.39783575774095603, | |
| "learning_rate": 2.076605467380071e-05, | |
| "loss": 0.3858, | |
| "step": 663 | |
| }, | |
| { | |
| "epoch": 2.690982776089159, | |
| "grad_norm": 0.3647315679168818, | |
| "learning_rate": 2.0709334640812613e-05, | |
| "loss": 0.4014, | |
| "step": 664 | |
| }, | |
| { | |
| "epoch": 2.6950354609929077, | |
| "grad_norm": 0.3484398498485764, | |
| "learning_rate": 2.0652608894940824e-05, | |
| "loss": 0.383, | |
| "step": 665 | |
| }, | |
| { | |
| "epoch": 2.6990881458966567, | |
| "grad_norm": 0.4013105776920568, | |
| "learning_rate": 2.0595877893046722e-05, | |
| "loss": 0.3851, | |
| "step": 666 | |
| }, | |
| { | |
| "epoch": 2.7031408308004052, | |
| "grad_norm": 0.36057351575739904, | |
| "learning_rate": 2.0539142092033985e-05, | |
| "loss": 0.3689, | |
| "step": 667 | |
| }, | |
| { | |
| "epoch": 2.7071935157041542, | |
| "grad_norm": 0.37345619593276425, | |
| "learning_rate": 2.048240194884496e-05, | |
| "loss": 0.3924, | |
| "step": 668 | |
| }, | |
| { | |
| "epoch": 2.711246200607903, | |
| "grad_norm": 0.40335554590301, | |
| "learning_rate": 2.042565792045695e-05, | |
| "loss": 0.3775, | |
| "step": 669 | |
| }, | |
| { | |
| "epoch": 2.7152988855116513, | |
| "grad_norm": 0.3644864047403148, | |
| "learning_rate": 2.036891046387857e-05, | |
| "loss": 0.371, | |
| "step": 670 | |
| }, | |
| { | |
| "epoch": 2.7193515704154003, | |
| "grad_norm": 0.38553621926411713, | |
| "learning_rate": 2.0312160036146036e-05, | |
| "loss": 0.379, | |
| "step": 671 | |
| }, | |
| { | |
| "epoch": 2.723404255319149, | |
| "grad_norm": 0.3811039494400777, | |
| "learning_rate": 2.025540709431948e-05, | |
| "loss": 0.3859, | |
| "step": 672 | |
| }, | |
| { | |
| "epoch": 2.727456940222898, | |
| "grad_norm": 0.365337052525446, | |
| "learning_rate": 2.0198652095479298e-05, | |
| "loss": 0.3882, | |
| "step": 673 | |
| }, | |
| { | |
| "epoch": 2.7315096251266464, | |
| "grad_norm": 0.35817396458331424, | |
| "learning_rate": 2.014189549672245e-05, | |
| "loss": 0.3787, | |
| "step": 674 | |
| }, | |
| { | |
| "epoch": 2.735562310030395, | |
| "grad_norm": 0.38239411741221413, | |
| "learning_rate": 2.0085137755158776e-05, | |
| "loss": 0.3418, | |
| "step": 675 | |
| }, | |
| { | |
| "epoch": 2.739614994934144, | |
| "grad_norm": 0.36510941017207393, | |
| "learning_rate": 2.0028379327907327e-05, | |
| "loss": 0.3725, | |
| "step": 676 | |
| }, | |
| { | |
| "epoch": 2.7436676798378925, | |
| "grad_norm": 0.3898901677976552, | |
| "learning_rate": 1.9971620672092676e-05, | |
| "loss": 0.4074, | |
| "step": 677 | |
| }, | |
| { | |
| "epoch": 2.7477203647416415, | |
| "grad_norm": 0.3862996385449035, | |
| "learning_rate": 1.991486224484123e-05, | |
| "loss": 0.3867, | |
| "step": 678 | |
| }, | |
| { | |
| "epoch": 2.75177304964539, | |
| "grad_norm": 0.38460961853036985, | |
| "learning_rate": 1.985810450327756e-05, | |
| "loss": 0.3981, | |
| "step": 679 | |
| }, | |
| { | |
| "epoch": 2.7558257345491386, | |
| "grad_norm": 0.39507663814375005, | |
| "learning_rate": 1.9801347904520706e-05, | |
| "loss": 0.3751, | |
| "step": 680 | |
| }, | |
| { | |
| "epoch": 2.7598784194528876, | |
| "grad_norm": 0.37860774978681155, | |
| "learning_rate": 1.974459290568053e-05, | |
| "loss": 0.3808, | |
| "step": 681 | |
| }, | |
| { | |
| "epoch": 2.763931104356636, | |
| "grad_norm": 0.4113473405513068, | |
| "learning_rate": 1.968783996385397e-05, | |
| "loss": 0.3736, | |
| "step": 682 | |
| }, | |
| { | |
| "epoch": 2.767983789260385, | |
| "grad_norm": 0.4435353149534309, | |
| "learning_rate": 1.963108953612143e-05, | |
| "loss": 0.38, | |
| "step": 683 | |
| }, | |
| { | |
| "epoch": 2.7720364741641337, | |
| "grad_norm": 0.3861534109094039, | |
| "learning_rate": 1.9574342079543056e-05, | |
| "loss": 0.3896, | |
| "step": 684 | |
| }, | |
| { | |
| "epoch": 2.7760891590678822, | |
| "grad_norm": 0.42740880012429777, | |
| "learning_rate": 1.9517598051155046e-05, | |
| "loss": 0.3691, | |
| "step": 685 | |
| }, | |
| { | |
| "epoch": 2.780141843971631, | |
| "grad_norm": 0.3623737604637641, | |
| "learning_rate": 1.9460857907966025e-05, | |
| "loss": 0.3757, | |
| "step": 686 | |
| }, | |
| { | |
| "epoch": 2.78419452887538, | |
| "grad_norm": 0.39156193949987783, | |
| "learning_rate": 1.9404122106953285e-05, | |
| "loss": 0.3793, | |
| "step": 687 | |
| }, | |
| { | |
| "epoch": 2.7882472137791288, | |
| "grad_norm": 0.3932330085246938, | |
| "learning_rate": 1.9347391105059176e-05, | |
| "loss": 0.3922, | |
| "step": 688 | |
| }, | |
| { | |
| "epoch": 2.7922998986828773, | |
| "grad_norm": 0.3519508323517644, | |
| "learning_rate": 1.92906653591874e-05, | |
| "loss": 0.3823, | |
| "step": 689 | |
| }, | |
| { | |
| "epoch": 2.7963525835866263, | |
| "grad_norm": 0.3767444119709347, | |
| "learning_rate": 1.9233945326199295e-05, | |
| "loss": 0.3897, | |
| "step": 690 | |
| }, | |
| { | |
| "epoch": 2.800405268490375, | |
| "grad_norm": 0.3728319282448301, | |
| "learning_rate": 1.917723146291022e-05, | |
| "loss": 0.3866, | |
| "step": 691 | |
| }, | |
| { | |
| "epoch": 2.804457953394124, | |
| "grad_norm": 0.4102016109415563, | |
| "learning_rate": 1.912052422608584e-05, | |
| "loss": 0.3675, | |
| "step": 692 | |
| }, | |
| { | |
| "epoch": 2.8085106382978724, | |
| "grad_norm": 0.34434306116290503, | |
| "learning_rate": 1.9063824072438428e-05, | |
| "loss": 0.3557, | |
| "step": 693 | |
| }, | |
| { | |
| "epoch": 2.812563323201621, | |
| "grad_norm": 0.3902517864929646, | |
| "learning_rate": 1.9007131458623246e-05, | |
| "loss": 0.3849, | |
| "step": 694 | |
| }, | |
| { | |
| "epoch": 2.81661600810537, | |
| "grad_norm": 0.36256206010063297, | |
| "learning_rate": 1.895044684123479e-05, | |
| "loss": 0.3683, | |
| "step": 695 | |
| }, | |
| { | |
| "epoch": 2.8206686930091185, | |
| "grad_norm": 0.38608133916394377, | |
| "learning_rate": 1.8893770676803194e-05, | |
| "loss": 0.3926, | |
| "step": 696 | |
| }, | |
| { | |
| "epoch": 2.8247213779128675, | |
| "grad_norm": 0.35827205733960754, | |
| "learning_rate": 1.8837103421790486e-05, | |
| "loss": 0.3635, | |
| "step": 697 | |
| }, | |
| { | |
| "epoch": 2.828774062816616, | |
| "grad_norm": 0.3979637154113055, | |
| "learning_rate": 1.8780445532586952e-05, | |
| "loss": 0.3878, | |
| "step": 698 | |
| }, | |
| { | |
| "epoch": 2.8328267477203646, | |
| "grad_norm": 0.3841873789489047, | |
| "learning_rate": 1.872379746550743e-05, | |
| "loss": 0.346, | |
| "step": 699 | |
| }, | |
| { | |
| "epoch": 2.8368794326241136, | |
| "grad_norm": 0.3427459638042756, | |
| "learning_rate": 1.866715967678769e-05, | |
| "loss": 0.3851, | |
| "step": 700 | |
| }, | |
| { | |
| "epoch": 2.840932117527862, | |
| "grad_norm": 0.3979434521075449, | |
| "learning_rate": 1.861053262258067e-05, | |
| "loss": 0.3911, | |
| "step": 701 | |
| }, | |
| { | |
| "epoch": 2.844984802431611, | |
| "grad_norm": 0.37381380420731086, | |
| "learning_rate": 1.8553916758952897e-05, | |
| "loss": 0.3763, | |
| "step": 702 | |
| }, | |
| { | |
| "epoch": 2.8490374873353597, | |
| "grad_norm": 0.3793688296777444, | |
| "learning_rate": 1.8497312541880735e-05, | |
| "loss": 0.362, | |
| "step": 703 | |
| }, | |
| { | |
| "epoch": 2.853090172239108, | |
| "grad_norm": 0.3674791219662531, | |
| "learning_rate": 1.8440720427246786e-05, | |
| "loss": 0.3833, | |
| "step": 704 | |
| }, | |
| { | |
| "epoch": 2.857142857142857, | |
| "grad_norm": 0.38145437770490875, | |
| "learning_rate": 1.8384140870836157e-05, | |
| "loss": 0.3714, | |
| "step": 705 | |
| }, | |
| { | |
| "epoch": 2.8611955420466058, | |
| "grad_norm": 0.35213060021048614, | |
| "learning_rate": 1.8327574328332806e-05, | |
| "loss": 0.3569, | |
| "step": 706 | |
| }, | |
| { | |
| "epoch": 2.8652482269503547, | |
| "grad_norm": 0.34949433069776786, | |
| "learning_rate": 1.8271021255315906e-05, | |
| "loss": 0.3664, | |
| "step": 707 | |
| }, | |
| { | |
| "epoch": 2.8693009118541033, | |
| "grad_norm": 0.3709009949572628, | |
| "learning_rate": 1.8214482107256117e-05, | |
| "loss": 0.3799, | |
| "step": 708 | |
| }, | |
| { | |
| "epoch": 2.873353596757852, | |
| "grad_norm": 0.37066440498453446, | |
| "learning_rate": 1.8157957339511968e-05, | |
| "loss": 0.3911, | |
| "step": 709 | |
| }, | |
| { | |
| "epoch": 2.877406281661601, | |
| "grad_norm": 0.3523618265711624, | |
| "learning_rate": 1.8101447407326182e-05, | |
| "loss": 0.3761, | |
| "step": 710 | |
| }, | |
| { | |
| "epoch": 2.8814589665653494, | |
| "grad_norm": 0.38747940968963557, | |
| "learning_rate": 1.8044952765821966e-05, | |
| "loss": 0.3939, | |
| "step": 711 | |
| }, | |
| { | |
| "epoch": 2.8855116514690984, | |
| "grad_norm": 0.37472074964057683, | |
| "learning_rate": 1.7988473869999407e-05, | |
| "loss": 0.3681, | |
| "step": 712 | |
| }, | |
| { | |
| "epoch": 2.889564336372847, | |
| "grad_norm": 0.38440420798382996, | |
| "learning_rate": 1.7932011174731764e-05, | |
| "loss": 0.3797, | |
| "step": 713 | |
| }, | |
| { | |
| "epoch": 2.8936170212765955, | |
| "grad_norm": 0.363196702994756, | |
| "learning_rate": 1.7875565134761817e-05, | |
| "loss": 0.3714, | |
| "step": 714 | |
| }, | |
| { | |
| "epoch": 2.8976697061803445, | |
| "grad_norm": 0.3680210862226706, | |
| "learning_rate": 1.7819136204698226e-05, | |
| "loss": 0.3756, | |
| "step": 715 | |
| }, | |
| { | |
| "epoch": 2.9017223910840935, | |
| "grad_norm": 0.3819471211574995, | |
| "learning_rate": 1.776272483901182e-05, | |
| "loss": 0.3709, | |
| "step": 716 | |
| }, | |
| { | |
| "epoch": 2.905775075987842, | |
| "grad_norm": 0.39754352225387185, | |
| "learning_rate": 1.7706331492031995e-05, | |
| "loss": 0.3685, | |
| "step": 717 | |
| }, | |
| { | |
| "epoch": 2.9098277608915906, | |
| "grad_norm": 0.38795236506454567, | |
| "learning_rate": 1.764995661794301e-05, | |
| "loss": 0.3768, | |
| "step": 718 | |
| }, | |
| { | |
| "epoch": 2.9138804457953396, | |
| "grad_norm": 0.37447055734407164, | |
| "learning_rate": 1.759360067078035e-05, | |
| "loss": 0.3896, | |
| "step": 719 | |
| }, | |
| { | |
| "epoch": 2.917933130699088, | |
| "grad_norm": 0.3659105336937824, | |
| "learning_rate": 1.7537264104427064e-05, | |
| "loss": 0.3777, | |
| "step": 720 | |
| }, | |
| { | |
| "epoch": 2.921985815602837, | |
| "grad_norm": 0.35532016905847696, | |
| "learning_rate": 1.748094737261012e-05, | |
| "loss": 0.3906, | |
| "step": 721 | |
| }, | |
| { | |
| "epoch": 2.9260385005065856, | |
| "grad_norm": 0.36426913389764304, | |
| "learning_rate": 1.7424650928896726e-05, | |
| "loss": 0.3558, | |
| "step": 722 | |
| }, | |
| { | |
| "epoch": 2.930091185410334, | |
| "grad_norm": 0.3390834994043332, | |
| "learning_rate": 1.7368375226690712e-05, | |
| "loss": 0.3826, | |
| "step": 723 | |
| }, | |
| { | |
| "epoch": 2.934143870314083, | |
| "grad_norm": 0.38006247429583906, | |
| "learning_rate": 1.731212071922883e-05, | |
| "loss": 0.3499, | |
| "step": 724 | |
| }, | |
| { | |
| "epoch": 2.9381965552178317, | |
| "grad_norm": 0.3766950364739606, | |
| "learning_rate": 1.7255887859577156e-05, | |
| "loss": 0.3925, | |
| "step": 725 | |
| }, | |
| { | |
| "epoch": 2.9422492401215807, | |
| "grad_norm": 0.3445079726843259, | |
| "learning_rate": 1.7199677100627427e-05, | |
| "loss": 0.3907, | |
| "step": 726 | |
| }, | |
| { | |
| "epoch": 2.9463019250253293, | |
| "grad_norm": 0.36767398913176497, | |
| "learning_rate": 1.7143488895093343e-05, | |
| "loss": 0.3748, | |
| "step": 727 | |
| }, | |
| { | |
| "epoch": 2.950354609929078, | |
| "grad_norm": 0.36334071602063733, | |
| "learning_rate": 1.7087323695506994e-05, | |
| "loss": 0.3741, | |
| "step": 728 | |
| }, | |
| { | |
| "epoch": 2.954407294832827, | |
| "grad_norm": 0.3892231746309117, | |
| "learning_rate": 1.7031181954215194e-05, | |
| "loss": 0.4068, | |
| "step": 729 | |
| }, | |
| { | |
| "epoch": 2.9584599797365754, | |
| "grad_norm": 0.3744068704518135, | |
| "learning_rate": 1.6975064123375788e-05, | |
| "loss": 0.4008, | |
| "step": 730 | |
| }, | |
| { | |
| "epoch": 2.9625126646403244, | |
| "grad_norm": 0.3527052208533122, | |
| "learning_rate": 1.6918970654954084e-05, | |
| "loss": 0.3608, | |
| "step": 731 | |
| }, | |
| { | |
| "epoch": 2.966565349544073, | |
| "grad_norm": 0.34705247102719267, | |
| "learning_rate": 1.686290200071915e-05, | |
| "loss": 0.3741, | |
| "step": 732 | |
| }, | |
| { | |
| "epoch": 2.9706180344478215, | |
| "grad_norm": 0.3845768916189244, | |
| "learning_rate": 1.6806858612240234e-05, | |
| "loss": 0.4008, | |
| "step": 733 | |
| }, | |
| { | |
| "epoch": 2.9746707193515705, | |
| "grad_norm": 0.36047998183341096, | |
| "learning_rate": 1.6750840940883078e-05, | |
| "loss": 0.3808, | |
| "step": 734 | |
| }, | |
| { | |
| "epoch": 2.978723404255319, | |
| "grad_norm": 0.3763867679524316, | |
| "learning_rate": 1.6694849437806305e-05, | |
| "loss": 0.3693, | |
| "step": 735 | |
| }, | |
| { | |
| "epoch": 2.982776089159068, | |
| "grad_norm": 0.36441098288125223, | |
| "learning_rate": 1.663888455395778e-05, | |
| "loss": 0.3759, | |
| "step": 736 | |
| }, | |
| { | |
| "epoch": 2.9868287740628165, | |
| "grad_norm": 0.3738055776817942, | |
| "learning_rate": 1.6582946740070995e-05, | |
| "loss": 0.3743, | |
| "step": 737 | |
| }, | |
| { | |
| "epoch": 2.990881458966565, | |
| "grad_norm": 0.3729511595660726, | |
| "learning_rate": 1.6527036446661396e-05, | |
| "loss": 0.3845, | |
| "step": 738 | |
| }, | |
| { | |
| "epoch": 2.994934143870314, | |
| "grad_norm": 0.35001643981401726, | |
| "learning_rate": 1.6471154124022818e-05, | |
| "loss": 0.3615, | |
| "step": 739 | |
| }, | |
| { | |
| "epoch": 2.998986828774063, | |
| "grad_norm": 0.3421336303521715, | |
| "learning_rate": 1.6415300222223788e-05, | |
| "loss": 0.3693, | |
| "step": 740 | |
| }, | |
| { | |
| "epoch": 3.0030395136778116, | |
| "grad_norm": 0.5600529519261276, | |
| "learning_rate": 1.6359475191103958e-05, | |
| "loss": 0.2899, | |
| "step": 741 | |
| }, | |
| { | |
| "epoch": 3.00709219858156, | |
| "grad_norm": 0.40062655237685113, | |
| "learning_rate": 1.6303679480270466e-05, | |
| "loss": 0.2639, | |
| "step": 742 | |
| }, | |
| { | |
| "epoch": 3.011144883485309, | |
| "grad_norm": 0.4904935328441184, | |
| "learning_rate": 1.624791353909428e-05, | |
| "loss": 0.2666, | |
| "step": 743 | |
| }, | |
| { | |
| "epoch": 3.0151975683890577, | |
| "grad_norm": 0.7119800692226997, | |
| "learning_rate": 1.619217781670663e-05, | |
| "loss": 0.265, | |
| "step": 744 | |
| }, | |
| { | |
| "epoch": 3.0192502532928063, | |
| "grad_norm": 0.42404178935688747, | |
| "learning_rate": 1.6136472761995373e-05, | |
| "loss": 0.2757, | |
| "step": 745 | |
| }, | |
| { | |
| "epoch": 3.0233029381965553, | |
| "grad_norm": 0.5232870554733102, | |
| "learning_rate": 1.608079882360134e-05, | |
| "loss": 0.2551, | |
| "step": 746 | |
| }, | |
| { | |
| "epoch": 3.027355623100304, | |
| "grad_norm": 0.4974571537460195, | |
| "learning_rate": 1.60251564499148e-05, | |
| "loss": 0.2813, | |
| "step": 747 | |
| }, | |
| { | |
| "epoch": 3.031408308004053, | |
| "grad_norm": 0.3708092464792635, | |
| "learning_rate": 1.596954608907176e-05, | |
| "loss": 0.2639, | |
| "step": 748 | |
| }, | |
| { | |
| "epoch": 3.0354609929078014, | |
| "grad_norm": 0.47499905270420256, | |
| "learning_rate": 1.591396818895043e-05, | |
| "loss": 0.2779, | |
| "step": 749 | |
| }, | |
| { | |
| "epoch": 3.0395136778115504, | |
| "grad_norm": 0.43463722378980485, | |
| "learning_rate": 1.585842319716759e-05, | |
| "loss": 0.2715, | |
| "step": 750 | |
| }, | |
| { | |
| "epoch": 3.043566362715299, | |
| "grad_norm": 0.3929820249092284, | |
| "learning_rate": 1.5802911561074944e-05, | |
| "loss": 0.2575, | |
| "step": 751 | |
| }, | |
| { | |
| "epoch": 3.0476190476190474, | |
| "grad_norm": 0.40298032506059567, | |
| "learning_rate": 1.5747433727755595e-05, | |
| "loss": 0.2468, | |
| "step": 752 | |
| }, | |
| { | |
| "epoch": 3.0516717325227964, | |
| "grad_norm": 0.3795494101135109, | |
| "learning_rate": 1.5691990144020376e-05, | |
| "loss": 0.262, | |
| "step": 753 | |
| }, | |
| { | |
| "epoch": 3.055724417426545, | |
| "grad_norm": 0.43527058986853423, | |
| "learning_rate": 1.5636581256404297e-05, | |
| "loss": 0.2493, | |
| "step": 754 | |
| }, | |
| { | |
| "epoch": 3.059777102330294, | |
| "grad_norm": 0.38366039855973616, | |
| "learning_rate": 1.558120751116291e-05, | |
| "loss": 0.2499, | |
| "step": 755 | |
| }, | |
| { | |
| "epoch": 3.0638297872340425, | |
| "grad_norm": 0.3703456193450308, | |
| "learning_rate": 1.552586935426876e-05, | |
| "loss": 0.259, | |
| "step": 756 | |
| }, | |
| { | |
| "epoch": 3.067882472137791, | |
| "grad_norm": 0.38926982776480284, | |
| "learning_rate": 1.547056723140774e-05, | |
| "loss": 0.2686, | |
| "step": 757 | |
| }, | |
| { | |
| "epoch": 3.07193515704154, | |
| "grad_norm": 0.3971691022782858, | |
| "learning_rate": 1.5415301587975565e-05, | |
| "loss": 0.274, | |
| "step": 758 | |
| }, | |
| { | |
| "epoch": 3.0759878419452886, | |
| "grad_norm": 0.34259308049622955, | |
| "learning_rate": 1.536007286907411e-05, | |
| "loss": 0.2785, | |
| "step": 759 | |
| }, | |
| { | |
| "epoch": 3.0800405268490376, | |
| "grad_norm": 0.3519055701287284, | |
| "learning_rate": 1.5304881519507896e-05, | |
| "loss": 0.2685, | |
| "step": 760 | |
| }, | |
| { | |
| "epoch": 3.084093211752786, | |
| "grad_norm": 0.3986279325917256, | |
| "learning_rate": 1.5249727983780453e-05, | |
| "loss": 0.2671, | |
| "step": 761 | |
| }, | |
| { | |
| "epoch": 3.088145896656535, | |
| "grad_norm": 0.33498139064686766, | |
| "learning_rate": 1.5194612706090786e-05, | |
| "loss": 0.2604, | |
| "step": 762 | |
| }, | |
| { | |
| "epoch": 3.0921985815602837, | |
| "grad_norm": 0.35506082326468474, | |
| "learning_rate": 1.5139536130329771e-05, | |
| "loss": 0.2781, | |
| "step": 763 | |
| }, | |
| { | |
| "epoch": 3.0962512664640323, | |
| "grad_norm": 0.37338973342339954, | |
| "learning_rate": 1.508449870007656e-05, | |
| "loss": 0.276, | |
| "step": 764 | |
| }, | |
| { | |
| "epoch": 3.1003039513677813, | |
| "grad_norm": 0.35216263535692055, | |
| "learning_rate": 1.5029500858595056e-05, | |
| "loss": 0.2752, | |
| "step": 765 | |
| }, | |
| { | |
| "epoch": 3.10435663627153, | |
| "grad_norm": 0.379257491254358, | |
| "learning_rate": 1.4974543048830328e-05, | |
| "loss": 0.2728, | |
| "step": 766 | |
| }, | |
| { | |
| "epoch": 3.108409321175279, | |
| "grad_norm": 0.34727114309387874, | |
| "learning_rate": 1.4919625713405e-05, | |
| "loss": 0.2642, | |
| "step": 767 | |
| }, | |
| { | |
| "epoch": 3.1124620060790273, | |
| "grad_norm": 0.34699442460768515, | |
| "learning_rate": 1.4864749294615756e-05, | |
| "loss": 0.2425, | |
| "step": 768 | |
| }, | |
| { | |
| "epoch": 3.116514690982776, | |
| "grad_norm": 0.32785053229340233, | |
| "learning_rate": 1.4809914234429716e-05, | |
| "loss": 0.2658, | |
| "step": 769 | |
| }, | |
| { | |
| "epoch": 3.120567375886525, | |
| "grad_norm": 0.33378848976125164, | |
| "learning_rate": 1.4755120974480923e-05, | |
| "loss": 0.2738, | |
| "step": 770 | |
| }, | |
| { | |
| "epoch": 3.1246200607902734, | |
| "grad_norm": 0.3218517747604464, | |
| "learning_rate": 1.4700369956066771e-05, | |
| "loss": 0.2364, | |
| "step": 771 | |
| }, | |
| { | |
| "epoch": 3.1286727456940224, | |
| "grad_norm": 0.3508861737946556, | |
| "learning_rate": 1.4645661620144413e-05, | |
| "loss": 0.2621, | |
| "step": 772 | |
| }, | |
| { | |
| "epoch": 3.132725430597771, | |
| "grad_norm": 0.3278541522766288, | |
| "learning_rate": 1.4590996407327284e-05, | |
| "loss": 0.2533, | |
| "step": 773 | |
| }, | |
| { | |
| "epoch": 3.13677811550152, | |
| "grad_norm": 0.34665588248462076, | |
| "learning_rate": 1.4536374757881487e-05, | |
| "loss": 0.2903, | |
| "step": 774 | |
| }, | |
| { | |
| "epoch": 3.1408308004052685, | |
| "grad_norm": 0.3341243227978257, | |
| "learning_rate": 1.4481797111722271e-05, | |
| "loss": 0.2604, | |
| "step": 775 | |
| }, | |
| { | |
| "epoch": 3.144883485309017, | |
| "grad_norm": 0.32195920903254105, | |
| "learning_rate": 1.4427263908410507e-05, | |
| "loss": 0.2691, | |
| "step": 776 | |
| }, | |
| { | |
| "epoch": 3.148936170212766, | |
| "grad_norm": 0.3344948074415684, | |
| "learning_rate": 1.4372775587149108e-05, | |
| "loss": 0.2799, | |
| "step": 777 | |
| }, | |
| { | |
| "epoch": 3.1529888551165146, | |
| "grad_norm": 0.3211184436106537, | |
| "learning_rate": 1.4318332586779522e-05, | |
| "loss": 0.2691, | |
| "step": 778 | |
| }, | |
| { | |
| "epoch": 3.1570415400202636, | |
| "grad_norm": 0.336496421450049, | |
| "learning_rate": 1.4263935345778202e-05, | |
| "loss": 0.2547, | |
| "step": 779 | |
| }, | |
| { | |
| "epoch": 3.161094224924012, | |
| "grad_norm": 0.3320265683727049, | |
| "learning_rate": 1.420958430225303e-05, | |
| "loss": 0.2649, | |
| "step": 780 | |
| }, | |
| { | |
| "epoch": 3.1651469098277607, | |
| "grad_norm": 0.3147161518900082, | |
| "learning_rate": 1.415527989393985e-05, | |
| "loss": 0.2811, | |
| "step": 781 | |
| }, | |
| { | |
| "epoch": 3.1691995947315097, | |
| "grad_norm": 0.33554847626065776, | |
| "learning_rate": 1.410102255819891e-05, | |
| "loss": 0.2755, | |
| "step": 782 | |
| }, | |
| { | |
| "epoch": 3.1732522796352582, | |
| "grad_norm": 0.3219281622858217, | |
| "learning_rate": 1.404681273201131e-05, | |
| "loss": 0.257, | |
| "step": 783 | |
| }, | |
| { | |
| "epoch": 3.1773049645390072, | |
| "grad_norm": 0.3307754836566679, | |
| "learning_rate": 1.399265085197556e-05, | |
| "loss": 0.2525, | |
| "step": 784 | |
| }, | |
| { | |
| "epoch": 3.181357649442756, | |
| "grad_norm": 0.3368758051951691, | |
| "learning_rate": 1.393853735430398e-05, | |
| "loss": 0.2704, | |
| "step": 785 | |
| }, | |
| { | |
| "epoch": 3.1854103343465043, | |
| "grad_norm": 0.3545040034181631, | |
| "learning_rate": 1.3884472674819246e-05, | |
| "loss": 0.2599, | |
| "step": 786 | |
| }, | |
| { | |
| "epoch": 3.1894630192502533, | |
| "grad_norm": 0.3387686816478282, | |
| "learning_rate": 1.3830457248950864e-05, | |
| "loss": 0.261, | |
| "step": 787 | |
| }, | |
| { | |
| "epoch": 3.193515704154002, | |
| "grad_norm": 0.6260775634296268, | |
| "learning_rate": 1.377649151173163e-05, | |
| "loss": 0.2676, | |
| "step": 788 | |
| }, | |
| { | |
| "epoch": 3.197568389057751, | |
| "grad_norm": 0.34774391051268577, | |
| "learning_rate": 1.3722575897794181e-05, | |
| "loss": 0.2649, | |
| "step": 789 | |
| }, | |
| { | |
| "epoch": 3.2016210739614994, | |
| "grad_norm": 0.3200340078719376, | |
| "learning_rate": 1.3668710841367472e-05, | |
| "loss": 0.2646, | |
| "step": 790 | |
| }, | |
| { | |
| "epoch": 3.2056737588652484, | |
| "grad_norm": 0.33712219582514896, | |
| "learning_rate": 1.361489677627324e-05, | |
| "loss": 0.2702, | |
| "step": 791 | |
| }, | |
| { | |
| "epoch": 3.209726443768997, | |
| "grad_norm": 0.34090566307362863, | |
| "learning_rate": 1.3561134135922585e-05, | |
| "loss": 0.2577, | |
| "step": 792 | |
| }, | |
| { | |
| "epoch": 3.2137791286727455, | |
| "grad_norm": 0.34090994377651584, | |
| "learning_rate": 1.350742335331241e-05, | |
| "loss": 0.2739, | |
| "step": 793 | |
| }, | |
| { | |
| "epoch": 3.2178318135764945, | |
| "grad_norm": 0.32758417576795545, | |
| "learning_rate": 1.345376486102198e-05, | |
| "loss": 0.2365, | |
| "step": 794 | |
| }, | |
| { | |
| "epoch": 3.221884498480243, | |
| "grad_norm": 0.31947044505856886, | |
| "learning_rate": 1.3400159091209414e-05, | |
| "loss": 0.2872, | |
| "step": 795 | |
| }, | |
| { | |
| "epoch": 3.225937183383992, | |
| "grad_norm": 0.3460900206595306, | |
| "learning_rate": 1.3346606475608216e-05, | |
| "loss": 0.2528, | |
| "step": 796 | |
| }, | |
| { | |
| "epoch": 3.2299898682877406, | |
| "grad_norm": 0.3210062034818833, | |
| "learning_rate": 1.3293107445523781e-05, | |
| "loss": 0.2736, | |
| "step": 797 | |
| }, | |
| { | |
| "epoch": 3.2340425531914896, | |
| "grad_norm": 0.32619214077310826, | |
| "learning_rate": 1.3239662431829949e-05, | |
| "loss": 0.2599, | |
| "step": 798 | |
| }, | |
| { | |
| "epoch": 3.238095238095238, | |
| "grad_norm": 0.3359157797286736, | |
| "learning_rate": 1.3186271864965509e-05, | |
| "loss": 0.2586, | |
| "step": 799 | |
| }, | |
| { | |
| "epoch": 3.2421479229989867, | |
| "grad_norm": 0.30941949941469704, | |
| "learning_rate": 1.3132936174930756e-05, | |
| "loss": 0.2785, | |
| "step": 800 | |
| }, | |
| { | |
| "epoch": 3.2462006079027357, | |
| "grad_norm": 0.3229983694924969, | |
| "learning_rate": 1.3079655791283995e-05, | |
| "loss": 0.2586, | |
| "step": 801 | |
| }, | |
| { | |
| "epoch": 3.2502532928064842, | |
| "grad_norm": 0.33128953406197204, | |
| "learning_rate": 1.3026431143138108e-05, | |
| "loss": 0.2868, | |
| "step": 802 | |
| }, | |
| { | |
| "epoch": 3.254305977710233, | |
| "grad_norm": 0.3321507795647326, | |
| "learning_rate": 1.2973262659157114e-05, | |
| "loss": 0.2658, | |
| "step": 803 | |
| }, | |
| { | |
| "epoch": 3.2583586626139818, | |
| "grad_norm": 0.3315643951898538, | |
| "learning_rate": 1.2920150767552651e-05, | |
| "loss": 0.2604, | |
| "step": 804 | |
| }, | |
| { | |
| "epoch": 3.2624113475177303, | |
| "grad_norm": 0.31291061983277974, | |
| "learning_rate": 1.2867095896080607e-05, | |
| "loss": 0.2604, | |
| "step": 805 | |
| }, | |
| { | |
| "epoch": 3.2664640324214793, | |
| "grad_norm": 0.3123892787685489, | |
| "learning_rate": 1.2814098472037612e-05, | |
| "loss": 0.2694, | |
| "step": 806 | |
| }, | |
| { | |
| "epoch": 3.270516717325228, | |
| "grad_norm": 0.31219030375062823, | |
| "learning_rate": 1.276115892225764e-05, | |
| "loss": 0.2732, | |
| "step": 807 | |
| }, | |
| { | |
| "epoch": 3.274569402228977, | |
| "grad_norm": 0.3225341656832403, | |
| "learning_rate": 1.2708277673108555e-05, | |
| "loss": 0.2536, | |
| "step": 808 | |
| }, | |
| { | |
| "epoch": 3.2786220871327254, | |
| "grad_norm": 0.3236606895721125, | |
| "learning_rate": 1.2655455150488649e-05, | |
| "loss": 0.2684, | |
| "step": 809 | |
| }, | |
| { | |
| "epoch": 3.282674772036474, | |
| "grad_norm": 0.3345680849985815, | |
| "learning_rate": 1.2602691779823272e-05, | |
| "loss": 0.2501, | |
| "step": 810 | |
| }, | |
| { | |
| "epoch": 3.286727456940223, | |
| "grad_norm": 0.3170114262475946, | |
| "learning_rate": 1.2549987986061355e-05, | |
| "loss": 0.2615, | |
| "step": 811 | |
| }, | |
| { | |
| "epoch": 3.2907801418439715, | |
| "grad_norm": 0.2977160931511391, | |
| "learning_rate": 1.2497344193672005e-05, | |
| "loss": 0.2716, | |
| "step": 812 | |
| }, | |
| { | |
| "epoch": 3.2948328267477205, | |
| "grad_norm": 0.31474378973195855, | |
| "learning_rate": 1.2444760826641092e-05, | |
| "loss": 0.2626, | |
| "step": 813 | |
| }, | |
| { | |
| "epoch": 3.298885511651469, | |
| "grad_norm": 0.3148636434779564, | |
| "learning_rate": 1.2392238308467817e-05, | |
| "loss": 0.2498, | |
| "step": 814 | |
| }, | |
| { | |
| "epoch": 3.3029381965552176, | |
| "grad_norm": 0.3211908279459109, | |
| "learning_rate": 1.2339777062161326e-05, | |
| "loss": 0.2636, | |
| "step": 815 | |
| }, | |
| { | |
| "epoch": 3.3069908814589666, | |
| "grad_norm": 0.3144857442262971, | |
| "learning_rate": 1.2287377510237293e-05, | |
| "loss": 0.2684, | |
| "step": 816 | |
| }, | |
| { | |
| "epoch": 3.311043566362715, | |
| "grad_norm": 0.332194708455874, | |
| "learning_rate": 1.2235040074714488e-05, | |
| "loss": 0.2353, | |
| "step": 817 | |
| }, | |
| { | |
| "epoch": 3.315096251266464, | |
| "grad_norm": 0.3151369336504755, | |
| "learning_rate": 1.2182765177111434e-05, | |
| "loss": 0.2588, | |
| "step": 818 | |
| }, | |
| { | |
| "epoch": 3.3191489361702127, | |
| "grad_norm": 0.30660696690316586, | |
| "learning_rate": 1.213055323844297e-05, | |
| "loss": 0.2545, | |
| "step": 819 | |
| }, | |
| { | |
| "epoch": 3.3232016210739617, | |
| "grad_norm": 0.3194962613887905, | |
| "learning_rate": 1.2078404679216864e-05, | |
| "loss": 0.2621, | |
| "step": 820 | |
| }, | |
| { | |
| "epoch": 3.32725430597771, | |
| "grad_norm": 0.32662331541891854, | |
| "learning_rate": 1.2026319919430458e-05, | |
| "loss": 0.2705, | |
| "step": 821 | |
| }, | |
| { | |
| "epoch": 3.331306990881459, | |
| "grad_norm": 0.31676091474994, | |
| "learning_rate": 1.1974299378567227e-05, | |
| "loss": 0.2577, | |
| "step": 822 | |
| }, | |
| { | |
| "epoch": 3.3353596757852078, | |
| "grad_norm": 0.321280032927269, | |
| "learning_rate": 1.1922343475593462e-05, | |
| "loss": 0.267, | |
| "step": 823 | |
| }, | |
| { | |
| "epoch": 3.3394123606889563, | |
| "grad_norm": 0.29663169391790173, | |
| "learning_rate": 1.187045262895488e-05, | |
| "loss": 0.2503, | |
| "step": 824 | |
| }, | |
| { | |
| "epoch": 3.3434650455927053, | |
| "grad_norm": 0.3132011003523163, | |
| "learning_rate": 1.1818627256573203e-05, | |
| "loss": 0.2631, | |
| "step": 825 | |
| }, | |
| { | |
| "epoch": 3.347517730496454, | |
| "grad_norm": 0.35902221639637133, | |
| "learning_rate": 1.1766867775842864e-05, | |
| "loss": 0.2483, | |
| "step": 826 | |
| }, | |
| { | |
| "epoch": 3.351570415400203, | |
| "grad_norm": 0.3207823786103355, | |
| "learning_rate": 1.1715174603627615e-05, | |
| "loss": 0.2694, | |
| "step": 827 | |
| }, | |
| { | |
| "epoch": 3.3556231003039514, | |
| "grad_norm": 0.32494475598976197, | |
| "learning_rate": 1.1663548156257147e-05, | |
| "loss": 0.2778, | |
| "step": 828 | |
| }, | |
| { | |
| "epoch": 3.3596757852077, | |
| "grad_norm": 0.35534253997490584, | |
| "learning_rate": 1.161198884952377e-05, | |
| "loss": 0.2778, | |
| "step": 829 | |
| }, | |
| { | |
| "epoch": 3.363728470111449, | |
| "grad_norm": 0.31771795045997614, | |
| "learning_rate": 1.1560497098679056e-05, | |
| "loss": 0.2955, | |
| "step": 830 | |
| }, | |
| { | |
| "epoch": 3.3677811550151975, | |
| "grad_norm": 0.3254710910459406, | |
| "learning_rate": 1.1509073318430479e-05, | |
| "loss": 0.2724, | |
| "step": 831 | |
| }, | |
| { | |
| "epoch": 3.3718338399189465, | |
| "grad_norm": 0.32610464930335553, | |
| "learning_rate": 1.1457717922938116e-05, | |
| "loss": 0.2572, | |
| "step": 832 | |
| }, | |
| { | |
| "epoch": 3.375886524822695, | |
| "grad_norm": 0.32016536013857216, | |
| "learning_rate": 1.1406431325811233e-05, | |
| "loss": 0.2851, | |
| "step": 833 | |
| }, | |
| { | |
| "epoch": 3.3799392097264436, | |
| "grad_norm": 0.32613824085952187, | |
| "learning_rate": 1.135521394010506e-05, | |
| "loss": 0.269, | |
| "step": 834 | |
| }, | |
| { | |
| "epoch": 3.3839918946301926, | |
| "grad_norm": 0.3176061729200933, | |
| "learning_rate": 1.1304066178317367e-05, | |
| "loss": 0.2678, | |
| "step": 835 | |
| }, | |
| { | |
| "epoch": 3.388044579533941, | |
| "grad_norm": 0.3273414850683646, | |
| "learning_rate": 1.1252988452385199e-05, | |
| "loss": 0.2667, | |
| "step": 836 | |
| }, | |
| { | |
| "epoch": 3.39209726443769, | |
| "grad_norm": 0.3143731753866215, | |
| "learning_rate": 1.1201981173681536e-05, | |
| "loss": 0.2646, | |
| "step": 837 | |
| }, | |
| { | |
| "epoch": 3.3961499493414387, | |
| "grad_norm": 0.3247811205827489, | |
| "learning_rate": 1.1151044753011991e-05, | |
| "loss": 0.2619, | |
| "step": 838 | |
| }, | |
| { | |
| "epoch": 3.400202634245187, | |
| "grad_norm": 1.8947263124000093, | |
| "learning_rate": 1.1100179600611491e-05, | |
| "loss": 0.2713, | |
| "step": 839 | |
| }, | |
| { | |
| "epoch": 3.404255319148936, | |
| "grad_norm": 0.33637947850863703, | |
| "learning_rate": 1.1049386126140985e-05, | |
| "loss": 0.2602, | |
| "step": 840 | |
| }, | |
| { | |
| "epoch": 3.4083080040526847, | |
| "grad_norm": 0.3150752281938797, | |
| "learning_rate": 1.0998664738684128e-05, | |
| "loss": 0.2854, | |
| "step": 841 | |
| }, | |
| { | |
| "epoch": 3.4123606889564337, | |
| "grad_norm": 0.3347151860197323, | |
| "learning_rate": 1.0948015846744e-05, | |
| "loss": 0.2474, | |
| "step": 842 | |
| }, | |
| { | |
| "epoch": 3.4164133738601823, | |
| "grad_norm": 0.511450944906848, | |
| "learning_rate": 1.0897439858239832e-05, | |
| "loss": 0.2713, | |
| "step": 843 | |
| }, | |
| { | |
| "epoch": 3.4204660587639313, | |
| "grad_norm": 0.41419552933404846, | |
| "learning_rate": 1.0846937180503652e-05, | |
| "loss": 0.2689, | |
| "step": 844 | |
| }, | |
| { | |
| "epoch": 3.42451874366768, | |
| "grad_norm": 0.3285347976746443, | |
| "learning_rate": 1.0796508220277117e-05, | |
| "loss": 0.2464, | |
| "step": 845 | |
| }, | |
| { | |
| "epoch": 3.4285714285714284, | |
| "grad_norm": 0.3083169453724999, | |
| "learning_rate": 1.0746153383708107e-05, | |
| "loss": 0.2532, | |
| "step": 846 | |
| }, | |
| { | |
| "epoch": 3.4326241134751774, | |
| "grad_norm": 0.31245259800402425, | |
| "learning_rate": 1.0695873076347579e-05, | |
| "loss": 0.2846, | |
| "step": 847 | |
| }, | |
| { | |
| "epoch": 3.436676798378926, | |
| "grad_norm": 0.3032968702113556, | |
| "learning_rate": 1.0645667703146205e-05, | |
| "loss": 0.2708, | |
| "step": 848 | |
| }, | |
| { | |
| "epoch": 3.440729483282675, | |
| "grad_norm": 0.3224705636965893, | |
| "learning_rate": 1.0595537668451161e-05, | |
| "loss": 0.2596, | |
| "step": 849 | |
| }, | |
| { | |
| "epoch": 3.4447821681864235, | |
| "grad_norm": 0.32835399980698077, | |
| "learning_rate": 1.0545483376002854e-05, | |
| "loss": 0.2932, | |
| "step": 850 | |
| }, | |
| { | |
| "epoch": 3.4488348530901725, | |
| "grad_norm": 0.31332341150486, | |
| "learning_rate": 1.0495505228931676e-05, | |
| "loss": 0.289, | |
| "step": 851 | |
| }, | |
| { | |
| "epoch": 3.452887537993921, | |
| "grad_norm": 0.30600721413225457, | |
| "learning_rate": 1.044560362975474e-05, | |
| "loss": 0.2744, | |
| "step": 852 | |
| }, | |
| { | |
| "epoch": 3.4569402228976696, | |
| "grad_norm": 0.31285740494514, | |
| "learning_rate": 1.0395778980372695e-05, | |
| "loss": 0.2532, | |
| "step": 853 | |
| }, | |
| { | |
| "epoch": 3.4609929078014185, | |
| "grad_norm": 0.30720270954326273, | |
| "learning_rate": 1.0346031682066381e-05, | |
| "loss": 0.2742, | |
| "step": 854 | |
| }, | |
| { | |
| "epoch": 3.465045592705167, | |
| "grad_norm": 0.3089313609771571, | |
| "learning_rate": 1.0296362135493724e-05, | |
| "loss": 0.257, | |
| "step": 855 | |
| }, | |
| { | |
| "epoch": 3.469098277608916, | |
| "grad_norm": 0.29999508361374144, | |
| "learning_rate": 1.0246770740686422e-05, | |
| "loss": 0.2771, | |
| "step": 856 | |
| }, | |
| { | |
| "epoch": 3.4731509625126646, | |
| "grad_norm": 0.3068670286580572, | |
| "learning_rate": 1.0197257897046743e-05, | |
| "loss": 0.252, | |
| "step": 857 | |
| }, | |
| { | |
| "epoch": 3.477203647416413, | |
| "grad_norm": 0.3058522340242042, | |
| "learning_rate": 1.014782400334433e-05, | |
| "loss": 0.2571, | |
| "step": 858 | |
| }, | |
| { | |
| "epoch": 3.481256332320162, | |
| "grad_norm": 0.29985560401095307, | |
| "learning_rate": 1.009846945771296e-05, | |
| "loss": 0.2615, | |
| "step": 859 | |
| }, | |
| { | |
| "epoch": 3.4853090172239107, | |
| "grad_norm": 0.314301350699763, | |
| "learning_rate": 1.0049194657647363e-05, | |
| "loss": 0.2604, | |
| "step": 860 | |
| }, | |
| { | |
| "epoch": 3.4893617021276597, | |
| "grad_norm": 0.2925623883163185, | |
| "learning_rate": 1.0000000000000006e-05, | |
| "loss": 0.261, | |
| "step": 861 | |
| }, | |
| { | |
| "epoch": 3.4934143870314083, | |
| "grad_norm": 0.3029121847837866, | |
| "learning_rate": 9.950885880977891e-06, | |
| "loss": 0.2526, | |
| "step": 862 | |
| }, | |
| { | |
| "epoch": 3.497467071935157, | |
| "grad_norm": 0.3135906596148526, | |
| "learning_rate": 9.901852696139382e-06, | |
| "loss": 0.2617, | |
| "step": 863 | |
| }, | |
| { | |
| "epoch": 3.501519756838906, | |
| "grad_norm": 0.3135084579948363, | |
| "learning_rate": 9.852900840391027e-06, | |
| "loss": 0.263, | |
| "step": 864 | |
| }, | |
| { | |
| "epoch": 3.5055724417426544, | |
| "grad_norm": 0.3096812555810701, | |
| "learning_rate": 9.804030707984313e-06, | |
| "loss": 0.2657, | |
| "step": 865 | |
| }, | |
| { | |
| "epoch": 3.5096251266464034, | |
| "grad_norm": 0.31641953443361814, | |
| "learning_rate": 9.755242692512599e-06, | |
| "loss": 0.2426, | |
| "step": 866 | |
| }, | |
| { | |
| "epoch": 3.513677811550152, | |
| "grad_norm": 0.33044668134732463, | |
| "learning_rate": 9.70653718690782e-06, | |
| "loss": 0.2383, | |
| "step": 867 | |
| }, | |
| { | |
| "epoch": 3.5177304964539005, | |
| "grad_norm": 0.3165205582220823, | |
| "learning_rate": 9.657914583437454e-06, | |
| "loss": 0.253, | |
| "step": 868 | |
| }, | |
| { | |
| "epoch": 3.5217831813576495, | |
| "grad_norm": 0.3307205764242631, | |
| "learning_rate": 9.609375273701246e-06, | |
| "loss": 0.2708, | |
| "step": 869 | |
| }, | |
| { | |
| "epoch": 3.5258358662613984, | |
| "grad_norm": 0.3066516033471303, | |
| "learning_rate": 9.560919648628133e-06, | |
| "loss": 0.2594, | |
| "step": 870 | |
| }, | |
| { | |
| "epoch": 3.529888551165147, | |
| "grad_norm": 0.3246657963078779, | |
| "learning_rate": 9.512548098473047e-06, | |
| "loss": 0.2096, | |
| "step": 871 | |
| }, | |
| { | |
| "epoch": 3.5339412360688955, | |
| "grad_norm": 0.3362957974542694, | |
| "learning_rate": 9.464261012813825e-06, | |
| "loss": 0.2759, | |
| "step": 872 | |
| }, | |
| { | |
| "epoch": 3.5379939209726445, | |
| "grad_norm": 0.3304485978677205, | |
| "learning_rate": 9.416058780547987e-06, | |
| "loss": 0.2898, | |
| "step": 873 | |
| }, | |
| { | |
| "epoch": 3.542046605876393, | |
| "grad_norm": 0.31133390632645674, | |
| "learning_rate": 9.367941789889714e-06, | |
| "loss": 0.2752, | |
| "step": 874 | |
| }, | |
| { | |
| "epoch": 3.546099290780142, | |
| "grad_norm": 0.29849994820716225, | |
| "learning_rate": 9.319910428366607e-06, | |
| "loss": 0.2912, | |
| "step": 875 | |
| }, | |
| { | |
| "epoch": 3.5501519756838906, | |
| "grad_norm": 0.3208314249649794, | |
| "learning_rate": 9.271965082816667e-06, | |
| "loss": 0.2526, | |
| "step": 876 | |
| }, | |
| { | |
| "epoch": 3.554204660587639, | |
| "grad_norm": 0.32125883300988467, | |
| "learning_rate": 9.224106139385111e-06, | |
| "loss": 0.266, | |
| "step": 877 | |
| }, | |
| { | |
| "epoch": 3.558257345491388, | |
| "grad_norm": 0.314356104752064, | |
| "learning_rate": 9.176333983521291e-06, | |
| "loss": 0.2352, | |
| "step": 878 | |
| }, | |
| { | |
| "epoch": 3.5623100303951367, | |
| "grad_norm": 0.3389958884059575, | |
| "learning_rate": 9.12864899997558e-06, | |
| "loss": 0.2776, | |
| "step": 879 | |
| }, | |
| { | |
| "epoch": 3.5663627152988857, | |
| "grad_norm": 0.3273219643483403, | |
| "learning_rate": 9.08105157279628e-06, | |
| "loss": 0.274, | |
| "step": 880 | |
| }, | |
| { | |
| "epoch": 3.5704154002026343, | |
| "grad_norm": 0.32617511078993644, | |
| "learning_rate": 9.03354208532653e-06, | |
| "loss": 0.2772, | |
| "step": 881 | |
| }, | |
| { | |
| "epoch": 3.574468085106383, | |
| "grad_norm": 0.3145058960162268, | |
| "learning_rate": 8.986120920201205e-06, | |
| "loss": 0.261, | |
| "step": 882 | |
| }, | |
| { | |
| "epoch": 3.578520770010132, | |
| "grad_norm": 0.3077975385039152, | |
| "learning_rate": 8.938788459343852e-06, | |
| "loss": 0.2701, | |
| "step": 883 | |
| }, | |
| { | |
| "epoch": 3.5825734549138804, | |
| "grad_norm": 0.312000193748488, | |
| "learning_rate": 8.8915450839636e-06, | |
| "loss": 0.281, | |
| "step": 884 | |
| }, | |
| { | |
| "epoch": 3.5866261398176293, | |
| "grad_norm": 0.29972825163373873, | |
| "learning_rate": 8.844391174552116e-06, | |
| "loss": 0.2554, | |
| "step": 885 | |
| }, | |
| { | |
| "epoch": 3.590678824721378, | |
| "grad_norm": 0.31132922076152514, | |
| "learning_rate": 8.797327110880479e-06, | |
| "loss": 0.2662, | |
| "step": 886 | |
| }, | |
| { | |
| "epoch": 3.5947315096251264, | |
| "grad_norm": 0.3327088625654375, | |
| "learning_rate": 8.750353271996206e-06, | |
| "loss": 0.2736, | |
| "step": 887 | |
| }, | |
| { | |
| "epoch": 3.5987841945288754, | |
| "grad_norm": 0.29955418519195676, | |
| "learning_rate": 8.703470036220132e-06, | |
| "loss": 0.2592, | |
| "step": 888 | |
| }, | |
| { | |
| "epoch": 3.602836879432624, | |
| "grad_norm": 0.31535405206103595, | |
| "learning_rate": 8.656677781143394e-06, | |
| "loss": 0.2697, | |
| "step": 889 | |
| }, | |
| { | |
| "epoch": 3.606889564336373, | |
| "grad_norm": 0.31500185967340805, | |
| "learning_rate": 8.609976883624377e-06, | |
| "loss": 0.2651, | |
| "step": 890 | |
| }, | |
| { | |
| "epoch": 3.6109422492401215, | |
| "grad_norm": 0.2959567535285897, | |
| "learning_rate": 8.563367719785698e-06, | |
| "loss": 0.2584, | |
| "step": 891 | |
| }, | |
| { | |
| "epoch": 3.61499493414387, | |
| "grad_norm": 0.29971699468290663, | |
| "learning_rate": 8.516850665011138e-06, | |
| "loss": 0.2778, | |
| "step": 892 | |
| }, | |
| { | |
| "epoch": 3.619047619047619, | |
| "grad_norm": 0.2987705236125032, | |
| "learning_rate": 8.47042609394269e-06, | |
| "loss": 0.2522, | |
| "step": 893 | |
| }, | |
| { | |
| "epoch": 3.6231003039513676, | |
| "grad_norm": 0.2965735003444721, | |
| "learning_rate": 8.424094380477432e-06, | |
| "loss": 0.2614, | |
| "step": 894 | |
| }, | |
| { | |
| "epoch": 3.6271529888551166, | |
| "grad_norm": 0.3197425604522645, | |
| "learning_rate": 8.37785589776465e-06, | |
| "loss": 0.2851, | |
| "step": 895 | |
| }, | |
| { | |
| "epoch": 3.631205673758865, | |
| "grad_norm": 0.30693228647403803, | |
| "learning_rate": 8.331711018202694e-06, | |
| "loss": 0.2822, | |
| "step": 896 | |
| }, | |
| { | |
| "epoch": 3.6352583586626137, | |
| "grad_norm": 0.3133813075925239, | |
| "learning_rate": 8.285660113436104e-06, | |
| "loss": 0.2558, | |
| "step": 897 | |
| }, | |
| { | |
| "epoch": 3.6393110435663627, | |
| "grad_norm": 0.31166912098127175, | |
| "learning_rate": 8.239703554352527e-06, | |
| "loss": 0.2817, | |
| "step": 898 | |
| }, | |
| { | |
| "epoch": 3.6433637284701117, | |
| "grad_norm": 0.2960859122048135, | |
| "learning_rate": 8.193841711079775e-06, | |
| "loss": 0.2627, | |
| "step": 899 | |
| }, | |
| { | |
| "epoch": 3.6474164133738602, | |
| "grad_norm": 0.30366440571292913, | |
| "learning_rate": 8.148074952982828e-06, | |
| "loss": 0.2439, | |
| "step": 900 | |
| }, | |
| { | |
| "epoch": 3.651469098277609, | |
| "grad_norm": 0.3091690452424721, | |
| "learning_rate": 8.102403648660859e-06, | |
| "loss": 0.2659, | |
| "step": 901 | |
| }, | |
| { | |
| "epoch": 3.655521783181358, | |
| "grad_norm": 0.315231891692595, | |
| "learning_rate": 8.056828165944282e-06, | |
| "loss": 0.2516, | |
| "step": 902 | |
| }, | |
| { | |
| "epoch": 3.6595744680851063, | |
| "grad_norm": 0.3215566760445528, | |
| "learning_rate": 8.011348871891762e-06, | |
| "loss": 0.2734, | |
| "step": 903 | |
| }, | |
| { | |
| "epoch": 3.6636271529888553, | |
| "grad_norm": 0.3304114603125728, | |
| "learning_rate": 7.965966132787287e-06, | |
| "loss": 0.2788, | |
| "step": 904 | |
| }, | |
| { | |
| "epoch": 3.667679837892604, | |
| "grad_norm": 0.31294537288978014, | |
| "learning_rate": 7.920680314137189e-06, | |
| "loss": 0.2667, | |
| "step": 905 | |
| }, | |
| { | |
| "epoch": 3.6717325227963524, | |
| "grad_norm": 0.3137553525432779, | |
| "learning_rate": 7.875491780667246e-06, | |
| "loss": 0.2473, | |
| "step": 906 | |
| }, | |
| { | |
| "epoch": 3.6757852077001014, | |
| "grad_norm": 0.3169350317800105, | |
| "learning_rate": 7.830400896319667e-06, | |
| "loss": 0.2751, | |
| "step": 907 | |
| }, | |
| { | |
| "epoch": 3.67983789260385, | |
| "grad_norm": 0.32899558223738445, | |
| "learning_rate": 7.785408024250259e-06, | |
| "loss": 0.2455, | |
| "step": 908 | |
| }, | |
| { | |
| "epoch": 3.683890577507599, | |
| "grad_norm": 0.3008678539357601, | |
| "learning_rate": 7.74051352682542e-06, | |
| "loss": 0.2541, | |
| "step": 909 | |
| }, | |
| { | |
| "epoch": 3.6879432624113475, | |
| "grad_norm": 0.3182914366084159, | |
| "learning_rate": 7.695717765619257e-06, | |
| "loss": 0.2676, | |
| "step": 910 | |
| }, | |
| { | |
| "epoch": 3.691995947315096, | |
| "grad_norm": 0.3151022840055354, | |
| "learning_rate": 7.651021101410673e-06, | |
| "loss": 0.2491, | |
| "step": 911 | |
| }, | |
| { | |
| "epoch": 3.696048632218845, | |
| "grad_norm": 0.3176289420458808, | |
| "learning_rate": 7.606423894180464e-06, | |
| "loss": 0.2385, | |
| "step": 912 | |
| }, | |
| { | |
| "epoch": 3.7001013171225936, | |
| "grad_norm": 0.3119660430144287, | |
| "learning_rate": 7.56192650310839e-06, | |
| "loss": 0.2652, | |
| "step": 913 | |
| }, | |
| { | |
| "epoch": 3.7041540020263426, | |
| "grad_norm": 0.31763258325249577, | |
| "learning_rate": 7.517529286570349e-06, | |
| "loss": 0.2549, | |
| "step": 914 | |
| }, | |
| { | |
| "epoch": 3.708206686930091, | |
| "grad_norm": 0.30881313545233424, | |
| "learning_rate": 7.473232602135387e-06, | |
| "loss": 0.2518, | |
| "step": 915 | |
| }, | |
| { | |
| "epoch": 3.7122593718338397, | |
| "grad_norm": 0.30385436324208165, | |
| "learning_rate": 7.429036806562935e-06, | |
| "loss": 0.2449, | |
| "step": 916 | |
| }, | |
| { | |
| "epoch": 3.7163120567375887, | |
| "grad_norm": 0.30982099077160963, | |
| "learning_rate": 7.3849422557998455e-06, | |
| "loss": 0.2488, | |
| "step": 917 | |
| }, | |
| { | |
| "epoch": 3.7203647416413372, | |
| "grad_norm": 0.3172414755307915, | |
| "learning_rate": 7.340949304977567e-06, | |
| "loss": 0.2604, | |
| "step": 918 | |
| }, | |
| { | |
| "epoch": 3.7244174265450862, | |
| "grad_norm": 0.3298378403525879, | |
| "learning_rate": 7.297058308409282e-06, | |
| "loss": 0.2557, | |
| "step": 919 | |
| }, | |
| { | |
| "epoch": 3.728470111448835, | |
| "grad_norm": 0.31675431972545304, | |
| "learning_rate": 7.25326961958704e-06, | |
| "loss": 0.2628, | |
| "step": 920 | |
| }, | |
| { | |
| "epoch": 3.7325227963525833, | |
| "grad_norm": 0.32183988345521586, | |
| "learning_rate": 7.209583591178921e-06, | |
| "loss": 0.2701, | |
| "step": 921 | |
| }, | |
| { | |
| "epoch": 3.7365754812563323, | |
| "grad_norm": 0.30879046650738046, | |
| "learning_rate": 7.1660005750261925e-06, | |
| "loss": 0.2646, | |
| "step": 922 | |
| }, | |
| { | |
| "epoch": 3.7406281661600813, | |
| "grad_norm": 0.3216410098313188, | |
| "learning_rate": 7.1225209221404765e-06, | |
| "loss": 0.254, | |
| "step": 923 | |
| }, | |
| { | |
| "epoch": 3.74468085106383, | |
| "grad_norm": 0.3145039362722645, | |
| "learning_rate": 7.079144982700909e-06, | |
| "loss": 0.263, | |
| "step": 924 | |
| }, | |
| { | |
| "epoch": 3.7487335359675784, | |
| "grad_norm": 0.29437008791437835, | |
| "learning_rate": 7.0358731060513695e-06, | |
| "loss": 0.2479, | |
| "step": 925 | |
| }, | |
| { | |
| "epoch": 3.7527862208713274, | |
| "grad_norm": 0.3044342051730224, | |
| "learning_rate": 6.99270564069757e-06, | |
| "loss": 0.2557, | |
| "step": 926 | |
| }, | |
| { | |
| "epoch": 3.756838905775076, | |
| "grad_norm": 0.3024636673356088, | |
| "learning_rate": 6.949642934304375e-06, | |
| "loss": 0.2688, | |
| "step": 927 | |
| }, | |
| { | |
| "epoch": 3.760891590678825, | |
| "grad_norm": 0.29203716284985315, | |
| "learning_rate": 6.906685333692871e-06, | |
| "loss": 0.2603, | |
| "step": 928 | |
| }, | |
| { | |
| "epoch": 3.7649442755825735, | |
| "grad_norm": 0.30152363046942754, | |
| "learning_rate": 6.86383318483769e-06, | |
| "loss": 0.2774, | |
| "step": 929 | |
| }, | |
| { | |
| "epoch": 3.768996960486322, | |
| "grad_norm": 0.29801377764970816, | |
| "learning_rate": 6.821086832864139e-06, | |
| "loss": 0.2532, | |
| "step": 930 | |
| }, | |
| { | |
| "epoch": 3.773049645390071, | |
| "grad_norm": 0.29839047371386357, | |
| "learning_rate": 6.77844662204546e-06, | |
| "loss": 0.2796, | |
| "step": 931 | |
| }, | |
| { | |
| "epoch": 3.7771023302938196, | |
| "grad_norm": 0.31535956964117756, | |
| "learning_rate": 6.7359128958000455e-06, | |
| "loss": 0.2577, | |
| "step": 932 | |
| }, | |
| { | |
| "epoch": 3.7811550151975686, | |
| "grad_norm": 0.31044301065692237, | |
| "learning_rate": 6.693485996688695e-06, | |
| "loss": 0.2644, | |
| "step": 933 | |
| }, | |
| { | |
| "epoch": 3.785207700101317, | |
| "grad_norm": 0.29548251048550833, | |
| "learning_rate": 6.651166266411801e-06, | |
| "loss": 0.2658, | |
| "step": 934 | |
| }, | |
| { | |
| "epoch": 3.7892603850050657, | |
| "grad_norm": 0.3067785826909148, | |
| "learning_rate": 6.6089540458066725e-06, | |
| "loss": 0.2671, | |
| "step": 935 | |
| }, | |
| { | |
| "epoch": 3.7933130699088147, | |
| "grad_norm": 0.3066620985068279, | |
| "learning_rate": 6.566849674844711e-06, | |
| "loss": 0.265, | |
| "step": 936 | |
| }, | |
| { | |
| "epoch": 3.797365754812563, | |
| "grad_norm": 0.32209579521116294, | |
| "learning_rate": 6.524853492628747e-06, | |
| "loss": 0.2502, | |
| "step": 937 | |
| }, | |
| { | |
| "epoch": 3.801418439716312, | |
| "grad_norm": 0.29942548070936587, | |
| "learning_rate": 6.4829658373902536e-06, | |
| "loss": 0.2801, | |
| "step": 938 | |
| }, | |
| { | |
| "epoch": 3.8054711246200608, | |
| "grad_norm": 0.293065519953897, | |
| "learning_rate": 6.441187046486648e-06, | |
| "loss": 0.2481, | |
| "step": 939 | |
| }, | |
| { | |
| "epoch": 3.8095238095238093, | |
| "grad_norm": 0.29049131298288855, | |
| "learning_rate": 6.399517456398567e-06, | |
| "loss": 0.2602, | |
| "step": 940 | |
| }, | |
| { | |
| "epoch": 3.8135764944275583, | |
| "grad_norm": 0.30369176668732883, | |
| "learning_rate": 6.357957402727164e-06, | |
| "loss": 0.2635, | |
| "step": 941 | |
| }, | |
| { | |
| "epoch": 3.817629179331307, | |
| "grad_norm": 0.3067513510029192, | |
| "learning_rate": 6.316507220191395e-06, | |
| "loss": 0.2635, | |
| "step": 942 | |
| }, | |
| { | |
| "epoch": 3.821681864235056, | |
| "grad_norm": 0.3194165398819077, | |
| "learning_rate": 6.275167242625331e-06, | |
| "loss": 0.2487, | |
| "step": 943 | |
| }, | |
| { | |
| "epoch": 3.8257345491388044, | |
| "grad_norm": 0.30171946107430697, | |
| "learning_rate": 6.233937802975471e-06, | |
| "loss": 0.2444, | |
| "step": 944 | |
| }, | |
| { | |
| "epoch": 3.829787234042553, | |
| "grad_norm": 0.30196780788185257, | |
| "learning_rate": 6.192819233298046e-06, | |
| "loss": 0.2756, | |
| "step": 945 | |
| }, | |
| { | |
| "epoch": 3.833839918946302, | |
| "grad_norm": 0.3086260115771585, | |
| "learning_rate": 6.151811864756383e-06, | |
| "loss": 0.2666, | |
| "step": 946 | |
| }, | |
| { | |
| "epoch": 3.837892603850051, | |
| "grad_norm": 0.30998694081313144, | |
| "learning_rate": 6.1109160276181655e-06, | |
| "loss": 0.2599, | |
| "step": 947 | |
| }, | |
| { | |
| "epoch": 3.8419452887537995, | |
| "grad_norm": 0.28273260444463266, | |
| "learning_rate": 6.070132051252868e-06, | |
| "loss": 0.2879, | |
| "step": 948 | |
| }, | |
| { | |
| "epoch": 3.845997973657548, | |
| "grad_norm": 0.29542376626541444, | |
| "learning_rate": 6.0294602641290034e-06, | |
| "loss": 0.2525, | |
| "step": 949 | |
| }, | |
| { | |
| "epoch": 3.850050658561297, | |
| "grad_norm": 0.31605097595317233, | |
| "learning_rate": 5.988900993811575e-06, | |
| "loss": 0.2538, | |
| "step": 950 | |
| }, | |
| { | |
| "epoch": 3.8541033434650456, | |
| "grad_norm": 0.30016612154804717, | |
| "learning_rate": 5.948454566959363e-06, | |
| "loss": 0.2611, | |
| "step": 951 | |
| }, | |
| { | |
| "epoch": 3.8581560283687946, | |
| "grad_norm": 0.3010782584811012, | |
| "learning_rate": 5.908121309322328e-06, | |
| "loss": 0.2906, | |
| "step": 952 | |
| }, | |
| { | |
| "epoch": 3.862208713272543, | |
| "grad_norm": 0.2941824997238047, | |
| "learning_rate": 5.867901545738976e-06, | |
| "loss": 0.2292, | |
| "step": 953 | |
| }, | |
| { | |
| "epoch": 3.8662613981762917, | |
| "grad_norm": 0.3004609017328805, | |
| "learning_rate": 5.827795600133774e-06, | |
| "loss": 0.2464, | |
| "step": 954 | |
| }, | |
| { | |
| "epoch": 3.8703140830800407, | |
| "grad_norm": 0.30811035529085995, | |
| "learning_rate": 5.787803795514466e-06, | |
| "loss": 0.24, | |
| "step": 955 | |
| }, | |
| { | |
| "epoch": 3.874366767983789, | |
| "grad_norm": 0.3055434035747132, | |
| "learning_rate": 5.747926453969576e-06, | |
| "loss": 0.2606, | |
| "step": 956 | |
| }, | |
| { | |
| "epoch": 3.878419452887538, | |
| "grad_norm": 0.2916824121672623, | |
| "learning_rate": 5.708163896665708e-06, | |
| "loss": 0.2868, | |
| "step": 957 | |
| }, | |
| { | |
| "epoch": 3.8824721377912867, | |
| "grad_norm": 0.2903844656847278, | |
| "learning_rate": 5.668516443845047e-06, | |
| "loss": 0.2613, | |
| "step": 958 | |
| }, | |
| { | |
| "epoch": 3.8865248226950353, | |
| "grad_norm": 0.2950092733275902, | |
| "learning_rate": 5.6289844148227225e-06, | |
| "loss": 0.2701, | |
| "step": 959 | |
| }, | |
| { | |
| "epoch": 3.8905775075987843, | |
| "grad_norm": 0.3188163619027849, | |
| "learning_rate": 5.5895681279842615e-06, | |
| "loss": 0.267, | |
| "step": 960 | |
| }, | |
| { | |
| "epoch": 3.894630192502533, | |
| "grad_norm": 0.29002937516427496, | |
| "learning_rate": 5.550267900783019e-06, | |
| "loss": 0.2583, | |
| "step": 961 | |
| }, | |
| { | |
| "epoch": 3.898682877406282, | |
| "grad_norm": 0.2957613648308718, | |
| "learning_rate": 5.511084049737623e-06, | |
| "loss": 0.239, | |
| "step": 962 | |
| }, | |
| { | |
| "epoch": 3.9027355623100304, | |
| "grad_norm": 0.3078390964841555, | |
| "learning_rate": 5.4720168904294215e-06, | |
| "loss": 0.2719, | |
| "step": 963 | |
| }, | |
| { | |
| "epoch": 3.906788247213779, | |
| "grad_norm": 0.28757271285603414, | |
| "learning_rate": 5.433066737499948e-06, | |
| "loss": 0.2483, | |
| "step": 964 | |
| }, | |
| { | |
| "epoch": 3.910840932117528, | |
| "grad_norm": 0.2898065513805054, | |
| "learning_rate": 5.394233904648376e-06, | |
| "loss": 0.2603, | |
| "step": 965 | |
| }, | |
| { | |
| "epoch": 3.9148936170212765, | |
| "grad_norm": 0.29449781617834325, | |
| "learning_rate": 5.355518704628997e-06, | |
| "loss": 0.2908, | |
| "step": 966 | |
| }, | |
| { | |
| "epoch": 3.9189463019250255, | |
| "grad_norm": 0.28597036074303583, | |
| "learning_rate": 5.316921449248731e-06, | |
| "loss": 0.2778, | |
| "step": 967 | |
| }, | |
| { | |
| "epoch": 3.922998986828774, | |
| "grad_norm": 0.2950318496843534, | |
| "learning_rate": 5.278442449364538e-06, | |
| "loss": 0.2609, | |
| "step": 968 | |
| }, | |
| { | |
| "epoch": 3.9270516717325226, | |
| "grad_norm": 0.2917711197709822, | |
| "learning_rate": 5.240082014881016e-06, | |
| "loss": 0.2737, | |
| "step": 969 | |
| }, | |
| { | |
| "epoch": 3.9311043566362716, | |
| "grad_norm": 0.29199580236725375, | |
| "learning_rate": 5.201840454747822e-06, | |
| "loss": 0.2592, | |
| "step": 970 | |
| }, | |
| { | |
| "epoch": 3.93515704154002, | |
| "grad_norm": 0.30946911933721954, | |
| "learning_rate": 5.163718076957223e-06, | |
| "loss": 0.2593, | |
| "step": 971 | |
| }, | |
| { | |
| "epoch": 3.939209726443769, | |
| "grad_norm": 0.3108704747518739, | |
| "learning_rate": 5.125715188541609e-06, | |
| "loss": 0.2641, | |
| "step": 972 | |
| }, | |
| { | |
| "epoch": 3.9432624113475176, | |
| "grad_norm": 0.29861649204673274, | |
| "learning_rate": 5.087832095571021e-06, | |
| "loss": 0.2624, | |
| "step": 973 | |
| }, | |
| { | |
| "epoch": 3.947315096251266, | |
| "grad_norm": 0.30024732419343236, | |
| "learning_rate": 5.0500691031506766e-06, | |
| "loss": 0.2638, | |
| "step": 974 | |
| }, | |
| { | |
| "epoch": 3.951367781155015, | |
| "grad_norm": 0.3011498867290003, | |
| "learning_rate": 5.01242651541854e-06, | |
| "loss": 0.2364, | |
| "step": 975 | |
| }, | |
| { | |
| "epoch": 3.955420466058764, | |
| "grad_norm": 0.3013776701541714, | |
| "learning_rate": 4.974904635542815e-06, | |
| "loss": 0.2454, | |
| "step": 976 | |
| }, | |
| { | |
| "epoch": 3.9594731509625127, | |
| "grad_norm": 0.2953405798688752, | |
| "learning_rate": 4.937503765719582e-06, | |
| "loss": 0.2598, | |
| "step": 977 | |
| }, | |
| { | |
| "epoch": 3.9635258358662613, | |
| "grad_norm": 0.29009471131732406, | |
| "learning_rate": 4.900224207170299e-06, | |
| "loss": 0.2526, | |
| "step": 978 | |
| }, | |
| { | |
| "epoch": 3.9675785207700103, | |
| "grad_norm": 0.28905551812716823, | |
| "learning_rate": 4.8630662601394065e-06, | |
| "loss": 0.2672, | |
| "step": 979 | |
| }, | |
| { | |
| "epoch": 3.971631205673759, | |
| "grad_norm": 0.31002558655131884, | |
| "learning_rate": 4.8260302238918995e-06, | |
| "loss": 0.2661, | |
| "step": 980 | |
| }, | |
| { | |
| "epoch": 3.975683890577508, | |
| "grad_norm": 0.2997941893957553, | |
| "learning_rate": 4.789116396710924e-06, | |
| "loss": 0.2472, | |
| "step": 981 | |
| }, | |
| { | |
| "epoch": 3.9797365754812564, | |
| "grad_norm": 0.3062649825445047, | |
| "learning_rate": 4.752325075895368e-06, | |
| "loss": 0.2444, | |
| "step": 982 | |
| }, | |
| { | |
| "epoch": 3.983789260385005, | |
| "grad_norm": 0.2969244274242138, | |
| "learning_rate": 4.715656557757473e-06, | |
| "loss": 0.256, | |
| "step": 983 | |
| }, | |
| { | |
| "epoch": 3.987841945288754, | |
| "grad_norm": 0.29835911336998316, | |
| "learning_rate": 4.679111137620442e-06, | |
| "loss": 0.2545, | |
| "step": 984 | |
| }, | |
| { | |
| "epoch": 3.9918946301925025, | |
| "grad_norm": 0.29905085994897906, | |
| "learning_rate": 4.6426891098160585e-06, | |
| "loss": 0.2523, | |
| "step": 985 | |
| }, | |
| { | |
| "epoch": 3.9959473150962515, | |
| "grad_norm": 0.29747054790618666, | |
| "learning_rate": 4.6063907676823474e-06, | |
| "loss": 0.2755, | |
| "step": 986 | |
| }, | |
| { | |
| "epoch": 4.0, | |
| "grad_norm": 0.2832719007538571, | |
| "learning_rate": 4.570216403561141e-06, | |
| "loss": 0.2674, | |
| "step": 987 | |
| }, | |
| { | |
| "epoch": 4.0040526849037485, | |
| "grad_norm": 0.49212729486349194, | |
| "learning_rate": 4.534166308795815e-06, | |
| "loss": 0.2193, | |
| "step": 988 | |
| }, | |
| { | |
| "epoch": 4.008105369807497, | |
| "grad_norm": 0.39151569717974855, | |
| "learning_rate": 4.498240773728859e-06, | |
| "loss": 0.2248, | |
| "step": 989 | |
| }, | |
| { | |
| "epoch": 4.0121580547112465, | |
| "grad_norm": 0.30516742770421834, | |
| "learning_rate": 4.462440087699609e-06, | |
| "loss": 0.1939, | |
| "step": 990 | |
| }, | |
| { | |
| "epoch": 4.016210739614995, | |
| "grad_norm": 0.31930493556115996, | |
| "learning_rate": 4.426764539041861e-06, | |
| "loss": 0.2327, | |
| "step": 991 | |
| }, | |
| { | |
| "epoch": 4.020263424518744, | |
| "grad_norm": 0.3870130938070103, | |
| "learning_rate": 4.391214415081582e-06, | |
| "loss": 0.1895, | |
| "step": 992 | |
| }, | |
| { | |
| "epoch": 4.024316109422492, | |
| "grad_norm": 0.415207616450743, | |
| "learning_rate": 4.355790002134579e-06, | |
| "loss": 0.2111, | |
| "step": 993 | |
| }, | |
| { | |
| "epoch": 4.028368794326241, | |
| "grad_norm": 0.38268577384210645, | |
| "learning_rate": 4.320491585504207e-06, | |
| "loss": 0.2107, | |
| "step": 994 | |
| }, | |
| { | |
| "epoch": 4.03242147922999, | |
| "grad_norm": 0.31157136719358164, | |
| "learning_rate": 4.2853194494790615e-06, | |
| "loss": 0.2083, | |
| "step": 995 | |
| }, | |
| { | |
| "epoch": 4.036474164133739, | |
| "grad_norm": 0.31333251481752583, | |
| "learning_rate": 4.250273877330691e-06, | |
| "loss": 0.2241, | |
| "step": 996 | |
| }, | |
| { | |
| "epoch": 4.040526849037487, | |
| "grad_norm": 0.34060503424452715, | |
| "learning_rate": 4.215355151311313e-06, | |
| "loss": 0.1918, | |
| "step": 997 | |
| }, | |
| { | |
| "epoch": 4.044579533941236, | |
| "grad_norm": 0.3268501430316943, | |
| "learning_rate": 4.180563552651542e-06, | |
| "loss": 0.1882, | |
| "step": 998 | |
| }, | |
| { | |
| "epoch": 4.048632218844984, | |
| "grad_norm": 0.3116497051026214, | |
| "learning_rate": 4.145899361558147e-06, | |
| "loss": 0.2213, | |
| "step": 999 | |
| }, | |
| { | |
| "epoch": 4.052684903748734, | |
| "grad_norm": 0.27610979982627937, | |
| "learning_rate": 4.111362857211738e-06, | |
| "loss": 0.1917, | |
| "step": 1000 | |
| }, | |
| { | |
| "epoch": 4.056737588652482, | |
| "grad_norm": 0.29400657452838874, | |
| "learning_rate": 4.076954317764592e-06, | |
| "loss": 0.2092, | |
| "step": 1001 | |
| }, | |
| { | |
| "epoch": 4.060790273556231, | |
| "grad_norm": 0.3086862509080231, | |
| "learning_rate": 4.042674020338335e-06, | |
| "loss": 0.2114, | |
| "step": 1002 | |
| }, | |
| { | |
| "epoch": 4.0648429584599794, | |
| "grad_norm": 0.3224024489703029, | |
| "learning_rate": 4.0085222410217835e-06, | |
| "loss": 0.2019, | |
| "step": 1003 | |
| }, | |
| { | |
| "epoch": 4.068895643363729, | |
| "grad_norm": 0.2705299793101589, | |
| "learning_rate": 3.974499254868674e-06, | |
| "loss": 0.1833, | |
| "step": 1004 | |
| }, | |
| { | |
| "epoch": 4.072948328267477, | |
| "grad_norm": 0.2673173816058797, | |
| "learning_rate": 3.940605335895451e-06, | |
| "loss": 0.1885, | |
| "step": 1005 | |
| }, | |
| { | |
| "epoch": 4.077001013171226, | |
| "grad_norm": 0.26974341663165635, | |
| "learning_rate": 3.90684075707908e-06, | |
| "loss": 0.1951, | |
| "step": 1006 | |
| }, | |
| { | |
| "epoch": 4.0810536980749745, | |
| "grad_norm": 0.25847740455641743, | |
| "learning_rate": 3.8732057903548505e-06, | |
| "loss": 0.207, | |
| "step": 1007 | |
| }, | |
| { | |
| "epoch": 4.085106382978723, | |
| "grad_norm": 0.28037316702706233, | |
| "learning_rate": 3.8397007066141375e-06, | |
| "loss": 0.2132, | |
| "step": 1008 | |
| }, | |
| { | |
| "epoch": 4.0891590678824725, | |
| "grad_norm": 0.2617641209902261, | |
| "learning_rate": 3.806325775702304e-06, | |
| "loss": 0.1863, | |
| "step": 1009 | |
| }, | |
| { | |
| "epoch": 4.093211752786221, | |
| "grad_norm": 0.2586032192754939, | |
| "learning_rate": 3.773081266416434e-06, | |
| "loss": 0.1968, | |
| "step": 1010 | |
| }, | |
| { | |
| "epoch": 4.09726443768997, | |
| "grad_norm": 0.253973391818422, | |
| "learning_rate": 3.739967446503245e-06, | |
| "loss": 0.2103, | |
| "step": 1011 | |
| }, | |
| { | |
| "epoch": 4.101317122593718, | |
| "grad_norm": 0.25622263586402133, | |
| "learning_rate": 3.706984582656894e-06, | |
| "loss": 0.2115, | |
| "step": 1012 | |
| }, | |
| { | |
| "epoch": 4.105369807497467, | |
| "grad_norm": 0.25934097060507016, | |
| "learning_rate": 3.6741329405168237e-06, | |
| "loss": 0.204, | |
| "step": 1013 | |
| }, | |
| { | |
| "epoch": 4.109422492401216, | |
| "grad_norm": 0.2559893678968738, | |
| "learning_rate": 3.641412784665648e-06, | |
| "loss": 0.195, | |
| "step": 1014 | |
| }, | |
| { | |
| "epoch": 4.113475177304965, | |
| "grad_norm": 0.25143638788418826, | |
| "learning_rate": 3.608824378627005e-06, | |
| "loss": 0.1981, | |
| "step": 1015 | |
| }, | |
| { | |
| "epoch": 4.117527862208713, | |
| "grad_norm": 0.2540889791223249, | |
| "learning_rate": 3.5763679848634337e-06, | |
| "loss": 0.2193, | |
| "step": 1016 | |
| }, | |
| { | |
| "epoch": 4.121580547112462, | |
| "grad_norm": 0.25212538564201203, | |
| "learning_rate": 3.544043864774269e-06, | |
| "loss": 0.2154, | |
| "step": 1017 | |
| }, | |
| { | |
| "epoch": 4.12563323201621, | |
| "grad_norm": 0.2572787015433609, | |
| "learning_rate": 3.5118522786935282e-06, | |
| "loss": 0.1858, | |
| "step": 1018 | |
| }, | |
| { | |
| "epoch": 4.12968591691996, | |
| "grad_norm": 0.2679733285844258, | |
| "learning_rate": 3.479793485887819e-06, | |
| "loss": 0.1966, | |
| "step": 1019 | |
| }, | |
| { | |
| "epoch": 4.133738601823708, | |
| "grad_norm": 0.24839021819849938, | |
| "learning_rate": 3.4478677445542653e-06, | |
| "loss": 0.2105, | |
| "step": 1020 | |
| }, | |
| { | |
| "epoch": 4.137791286727457, | |
| "grad_norm": 0.25362931971902636, | |
| "learning_rate": 3.4160753118183767e-06, | |
| "loss": 0.2125, | |
| "step": 1021 | |
| }, | |
| { | |
| "epoch": 4.141843971631205, | |
| "grad_norm": 0.24918882651482377, | |
| "learning_rate": 3.3844164437320527e-06, | |
| "loss": 0.2126, | |
| "step": 1022 | |
| }, | |
| { | |
| "epoch": 4.145896656534954, | |
| "grad_norm": 0.2399558874876352, | |
| "learning_rate": 3.3528913952714558e-06, | |
| "loss": 0.2343, | |
| "step": 1023 | |
| }, | |
| { | |
| "epoch": 4.149949341438703, | |
| "grad_norm": 0.24165090346826792, | |
| "learning_rate": 3.321500420335e-06, | |
| "loss": 0.2224, | |
| "step": 1024 | |
| }, | |
| { | |
| "epoch": 4.154002026342452, | |
| "grad_norm": 0.24893064432398765, | |
| "learning_rate": 3.290243771741275e-06, | |
| "loss": 0.2042, | |
| "step": 1025 | |
| }, | |
| { | |
| "epoch": 4.1580547112462005, | |
| "grad_norm": 0.25910848130582637, | |
| "learning_rate": 3.2591217012270325e-06, | |
| "loss": 0.1884, | |
| "step": 1026 | |
| }, | |
| { | |
| "epoch": 4.162107396149949, | |
| "grad_norm": 0.2536423379541269, | |
| "learning_rate": 3.228134459445149e-06, | |
| "loss": 0.1899, | |
| "step": 1027 | |
| }, | |
| { | |
| "epoch": 4.1661600810536985, | |
| "grad_norm": 0.2661029357187124, | |
| "learning_rate": 3.1972822959626205e-06, | |
| "loss": 0.2094, | |
| "step": 1028 | |
| }, | |
| { | |
| "epoch": 4.170212765957447, | |
| "grad_norm": 0.27076723986649215, | |
| "learning_rate": 3.166565459258513e-06, | |
| "loss": 0.1944, | |
| "step": 1029 | |
| }, | |
| { | |
| "epoch": 4.174265450861196, | |
| "grad_norm": 0.2468023705922614, | |
| "learning_rate": 3.1359841967220193e-06, | |
| "loss": 0.2221, | |
| "step": 1030 | |
| }, | |
| { | |
| "epoch": 4.178318135764944, | |
| "grad_norm": 0.26167615756092766, | |
| "learning_rate": 3.105538754650419e-06, | |
| "loss": 0.2015, | |
| "step": 1031 | |
| }, | |
| { | |
| "epoch": 4.182370820668693, | |
| "grad_norm": 0.25625855047734064, | |
| "learning_rate": 3.07522937824712e-06, | |
| "loss": 0.2086, | |
| "step": 1032 | |
| }, | |
| { | |
| "epoch": 4.186423505572442, | |
| "grad_norm": 0.2617685262388744, | |
| "learning_rate": 3.0450563116196697e-06, | |
| "loss": 0.1866, | |
| "step": 1033 | |
| }, | |
| { | |
| "epoch": 4.190476190476191, | |
| "grad_norm": 0.25373784168402314, | |
| "learning_rate": 3.0150197977778008e-06, | |
| "loss": 0.1918, | |
| "step": 1034 | |
| }, | |
| { | |
| "epoch": 4.194528875379939, | |
| "grad_norm": 0.24636857695552652, | |
| "learning_rate": 2.985120078631465e-06, | |
| "loss": 0.2092, | |
| "step": 1035 | |
| }, | |
| { | |
| "epoch": 4.198581560283688, | |
| "grad_norm": 0.2551065406718208, | |
| "learning_rate": 2.9553573949888893e-06, | |
| "loss": 0.2067, | |
| "step": 1036 | |
| }, | |
| { | |
| "epoch": 4.202634245187436, | |
| "grad_norm": 0.24854021935493217, | |
| "learning_rate": 2.9257319865546384e-06, | |
| "loss": 0.1991, | |
| "step": 1037 | |
| }, | |
| { | |
| "epoch": 4.206686930091186, | |
| "grad_norm": 0.2837436937534287, | |
| "learning_rate": 2.896244091927678e-06, | |
| "loss": 0.1982, | |
| "step": 1038 | |
| }, | |
| { | |
| "epoch": 4.210739614994934, | |
| "grad_norm": 0.23814691470808982, | |
| "learning_rate": 2.8668939485994584e-06, | |
| "loss": 0.2094, | |
| "step": 1039 | |
| }, | |
| { | |
| "epoch": 4.214792299898683, | |
| "grad_norm": 0.25539316516953253, | |
| "learning_rate": 2.837681792951994e-06, | |
| "loss": 0.1664, | |
| "step": 1040 | |
| }, | |
| { | |
| "epoch": 4.218844984802431, | |
| "grad_norm": 0.2422251662125335, | |
| "learning_rate": 2.808607860255981e-06, | |
| "loss": 0.2151, | |
| "step": 1041 | |
| }, | |
| { | |
| "epoch": 4.22289766970618, | |
| "grad_norm": 0.25345068857885406, | |
| "learning_rate": 2.7796723846688634e-06, | |
| "loss": 0.207, | |
| "step": 1042 | |
| }, | |
| { | |
| "epoch": 4.226950354609929, | |
| "grad_norm": 0.2521490357116477, | |
| "learning_rate": 2.7508755992329937e-06, | |
| "loss": 0.1845, | |
| "step": 1043 | |
| }, | |
| { | |
| "epoch": 4.231003039513678, | |
| "grad_norm": 0.23993585179346516, | |
| "learning_rate": 2.722217735873718e-06, | |
| "loss": 0.1798, | |
| "step": 1044 | |
| }, | |
| { | |
| "epoch": 4.2350557244174265, | |
| "grad_norm": 0.2508661760227835, | |
| "learning_rate": 2.6936990253975315e-06, | |
| "loss": 0.1885, | |
| "step": 1045 | |
| }, | |
| { | |
| "epoch": 4.239108409321175, | |
| "grad_norm": 0.23667653270070665, | |
| "learning_rate": 2.665319697490205e-06, | |
| "loss": 0.2049, | |
| "step": 1046 | |
| }, | |
| { | |
| "epoch": 4.243161094224924, | |
| "grad_norm": 0.23876067477757165, | |
| "learning_rate": 2.637079980714945e-06, | |
| "loss": 0.1769, | |
| "step": 1047 | |
| }, | |
| { | |
| "epoch": 4.247213779128673, | |
| "grad_norm": 0.2668315076195076, | |
| "learning_rate": 2.6089801025105453e-06, | |
| "loss": 0.182, | |
| "step": 1048 | |
| }, | |
| { | |
| "epoch": 4.251266464032422, | |
| "grad_norm": 0.2572690531504522, | |
| "learning_rate": 2.581020289189571e-06, | |
| "loss": 0.223, | |
| "step": 1049 | |
| }, | |
| { | |
| "epoch": 4.25531914893617, | |
| "grad_norm": 0.242066838041828, | |
| "learning_rate": 2.553200765936501e-06, | |
| "loss": 0.2068, | |
| "step": 1050 | |
| }, | |
| { | |
| "epoch": 4.259371833839919, | |
| "grad_norm": 0.23894822167243368, | |
| "learning_rate": 2.525521756805962e-06, | |
| "loss": 0.2155, | |
| "step": 1051 | |
| }, | |
| { | |
| "epoch": 4.263424518743667, | |
| "grad_norm": 0.24596692132728049, | |
| "learning_rate": 2.497983484720885e-06, | |
| "loss": 0.2155, | |
| "step": 1052 | |
| }, | |
| { | |
| "epoch": 4.267477203647417, | |
| "grad_norm": 0.24090147901585304, | |
| "learning_rate": 2.470586171470728e-06, | |
| "loss": 0.1905, | |
| "step": 1053 | |
| }, | |
| { | |
| "epoch": 4.271529888551165, | |
| "grad_norm": 0.24388436990543663, | |
| "learning_rate": 2.4433300377096836e-06, | |
| "loss": 0.1776, | |
| "step": 1054 | |
| }, | |
| { | |
| "epoch": 4.275582573454914, | |
| "grad_norm": 0.2423833870219351, | |
| "learning_rate": 2.4162153029549073e-06, | |
| "loss": 0.2132, | |
| "step": 1055 | |
| }, | |
| { | |
| "epoch": 4.279635258358662, | |
| "grad_norm": 0.24741593036217494, | |
| "learning_rate": 2.3892421855847458e-06, | |
| "loss": 0.1937, | |
| "step": 1056 | |
| }, | |
| { | |
| "epoch": 4.283687943262412, | |
| "grad_norm": 0.24610474684848563, | |
| "learning_rate": 2.362410902836978e-06, | |
| "loss": 0.2034, | |
| "step": 1057 | |
| }, | |
| { | |
| "epoch": 4.28774062816616, | |
| "grad_norm": 0.2594899181984098, | |
| "learning_rate": 2.3357216708070653e-06, | |
| "loss": 0.2043, | |
| "step": 1058 | |
| }, | |
| { | |
| "epoch": 4.291793313069909, | |
| "grad_norm": 0.23216297134289854, | |
| "learning_rate": 2.309174704446411e-06, | |
| "loss": 0.1868, | |
| "step": 1059 | |
| }, | |
| { | |
| "epoch": 4.295845997973657, | |
| "grad_norm": 0.23712832453290464, | |
| "learning_rate": 2.2827702175606437e-06, | |
| "loss": 0.2054, | |
| "step": 1060 | |
| }, | |
| { | |
| "epoch": 4.299898682877406, | |
| "grad_norm": 0.2507480694582349, | |
| "learning_rate": 2.256508422807855e-06, | |
| "loss": 0.2105, | |
| "step": 1061 | |
| }, | |
| { | |
| "epoch": 4.303951367781155, | |
| "grad_norm": 0.25879269677677746, | |
| "learning_rate": 2.230389531696946e-06, | |
| "loss": 0.1953, | |
| "step": 1062 | |
| }, | |
| { | |
| "epoch": 4.308004052684904, | |
| "grad_norm": 0.24680874528095242, | |
| "learning_rate": 2.204413754585857e-06, | |
| "loss": 0.196, | |
| "step": 1063 | |
| }, | |
| { | |
| "epoch": 4.3120567375886525, | |
| "grad_norm": 0.24200937644511783, | |
| "learning_rate": 2.1785813006799406e-06, | |
| "loss": 0.1873, | |
| "step": 1064 | |
| }, | |
| { | |
| "epoch": 4.316109422492401, | |
| "grad_norm": 0.2511879217821238, | |
| "learning_rate": 2.1528923780302224e-06, | |
| "loss": 0.215, | |
| "step": 1065 | |
| }, | |
| { | |
| "epoch": 4.32016210739615, | |
| "grad_norm": 0.2486178968017722, | |
| "learning_rate": 2.127347193531757e-06, | |
| "loss": 0.1986, | |
| "step": 1066 | |
| }, | |
| { | |
| "epoch": 4.324214792299899, | |
| "grad_norm": 0.24880387265465134, | |
| "learning_rate": 2.101945952921942e-06, | |
| "loss": 0.2047, | |
| "step": 1067 | |
| }, | |
| { | |
| "epoch": 4.328267477203648, | |
| "grad_norm": 0.24814884270733864, | |
| "learning_rate": 2.0766888607788906e-06, | |
| "loss": 0.1882, | |
| "step": 1068 | |
| }, | |
| { | |
| "epoch": 4.332320162107396, | |
| "grad_norm": 0.25376204863570767, | |
| "learning_rate": 2.0515761205197337e-06, | |
| "loss": 0.2145, | |
| "step": 1069 | |
| }, | |
| { | |
| "epoch": 4.336372847011145, | |
| "grad_norm": 0.2507770029501886, | |
| "learning_rate": 2.0266079343990453e-06, | |
| "loss": 0.1933, | |
| "step": 1070 | |
| }, | |
| { | |
| "epoch": 4.340425531914893, | |
| "grad_norm": 0.23553515551574122, | |
| "learning_rate": 2.0017845035071494e-06, | |
| "loss": 0.1851, | |
| "step": 1071 | |
| }, | |
| { | |
| "epoch": 4.344478216818643, | |
| "grad_norm": 0.24081197545377153, | |
| "learning_rate": 1.9771060277685537e-06, | |
| "loss": 0.2022, | |
| "step": 1072 | |
| }, | |
| { | |
| "epoch": 4.348530901722391, | |
| "grad_norm": 0.23164103310692896, | |
| "learning_rate": 1.95257270594031e-06, | |
| "loss": 0.2051, | |
| "step": 1073 | |
| }, | |
| { | |
| "epoch": 4.35258358662614, | |
| "grad_norm": 0.2507897453876342, | |
| "learning_rate": 1.9281847356104188e-06, | |
| "loss": 0.2082, | |
| "step": 1074 | |
| }, | |
| { | |
| "epoch": 4.356636271529888, | |
| "grad_norm": 0.24838376792263012, | |
| "learning_rate": 1.9039423131962365e-06, | |
| "loss": 0.1947, | |
| "step": 1075 | |
| }, | |
| { | |
| "epoch": 4.360688956433638, | |
| "grad_norm": 0.25032303766849684, | |
| "learning_rate": 1.8798456339429027e-06, | |
| "loss": 0.216, | |
| "step": 1076 | |
| }, | |
| { | |
| "epoch": 4.364741641337386, | |
| "grad_norm": 0.24665126935087092, | |
| "learning_rate": 1.8558948919217612e-06, | |
| "loss": 0.1956, | |
| "step": 1077 | |
| }, | |
| { | |
| "epoch": 4.368794326241135, | |
| "grad_norm": 0.2482312631519759, | |
| "learning_rate": 1.8320902800287954e-06, | |
| "loss": 0.1988, | |
| "step": 1078 | |
| }, | |
| { | |
| "epoch": 4.372847011144883, | |
| "grad_norm": 0.24814712740732703, | |
| "learning_rate": 1.8084319899830726e-06, | |
| "loss": 0.2081, | |
| "step": 1079 | |
| }, | |
| { | |
| "epoch": 4.376899696048632, | |
| "grad_norm": 0.24025261815548518, | |
| "learning_rate": 1.7849202123252097e-06, | |
| "loss": 0.2289, | |
| "step": 1080 | |
| }, | |
| { | |
| "epoch": 4.380952380952381, | |
| "grad_norm": 0.23612681803947239, | |
| "learning_rate": 1.7615551364158401e-06, | |
| "loss": 0.2229, | |
| "step": 1081 | |
| }, | |
| { | |
| "epoch": 4.38500506585613, | |
| "grad_norm": 0.2521152153147202, | |
| "learning_rate": 1.738336950434061e-06, | |
| "loss": 0.1934, | |
| "step": 1082 | |
| }, | |
| { | |
| "epoch": 4.3890577507598785, | |
| "grad_norm": 0.25135962207437634, | |
| "learning_rate": 1.715265841375957e-06, | |
| "loss": 0.192, | |
| "step": 1083 | |
| }, | |
| { | |
| "epoch": 4.393110435663627, | |
| "grad_norm": 0.232366600618381, | |
| "learning_rate": 1.6923419950530684e-06, | |
| "loss": 0.2174, | |
| "step": 1084 | |
| }, | |
| { | |
| "epoch": 4.397163120567376, | |
| "grad_norm": 0.2285110789006824, | |
| "learning_rate": 1.6695655960909008e-06, | |
| "loss": 0.1926, | |
| "step": 1085 | |
| }, | |
| { | |
| "epoch": 4.401215805471125, | |
| "grad_norm": 0.2567657347910915, | |
| "learning_rate": 1.646936827927441e-06, | |
| "loss": 0.2225, | |
| "step": 1086 | |
| }, | |
| { | |
| "epoch": 4.405268490374874, | |
| "grad_norm": 0.2487204408107827, | |
| "learning_rate": 1.6244558728116766e-06, | |
| "loss": 0.2065, | |
| "step": 1087 | |
| }, | |
| { | |
| "epoch": 4.409321175278622, | |
| "grad_norm": 0.24564914083082331, | |
| "learning_rate": 1.6021229118021265e-06, | |
| "loss": 0.2261, | |
| "step": 1088 | |
| }, | |
| { | |
| "epoch": 4.413373860182371, | |
| "grad_norm": 0.26121927899026226, | |
| "learning_rate": 1.5799381247653967e-06, | |
| "loss": 0.2, | |
| "step": 1089 | |
| }, | |
| { | |
| "epoch": 4.417426545086119, | |
| "grad_norm": 0.25818201502464255, | |
| "learning_rate": 1.5579016903747013e-06, | |
| "loss": 0.2062, | |
| "step": 1090 | |
| }, | |
| { | |
| "epoch": 4.421479229989869, | |
| "grad_norm": 0.2513170691588368, | |
| "learning_rate": 1.5360137861084656e-06, | |
| "loss": 0.2135, | |
| "step": 1091 | |
| }, | |
| { | |
| "epoch": 4.425531914893617, | |
| "grad_norm": 0.24214669792628418, | |
| "learning_rate": 1.5142745882488475e-06, | |
| "loss": 0.2217, | |
| "step": 1092 | |
| }, | |
| { | |
| "epoch": 4.429584599797366, | |
| "grad_norm": 0.24527753134952718, | |
| "learning_rate": 1.4926842718803691e-06, | |
| "loss": 0.1961, | |
| "step": 1093 | |
| }, | |
| { | |
| "epoch": 4.433637284701114, | |
| "grad_norm": 0.24857828641548574, | |
| "learning_rate": 1.4712430108884657e-06, | |
| "loss": 0.1946, | |
| "step": 1094 | |
| }, | |
| { | |
| "epoch": 4.437689969604863, | |
| "grad_norm": 0.2469834607983773, | |
| "learning_rate": 1.4499509779581078e-06, | |
| "loss": 0.2139, | |
| "step": 1095 | |
| }, | |
| { | |
| "epoch": 4.441742654508612, | |
| "grad_norm": 0.24956970530670186, | |
| "learning_rate": 1.4288083445723988e-06, | |
| "loss": 0.2103, | |
| "step": 1096 | |
| }, | |
| { | |
| "epoch": 4.445795339412361, | |
| "grad_norm": 0.24116317894672204, | |
| "learning_rate": 1.4078152810112045e-06, | |
| "loss": 0.1906, | |
| "step": 1097 | |
| }, | |
| { | |
| "epoch": 4.449848024316109, | |
| "grad_norm": 0.2574432423758979, | |
| "learning_rate": 1.3869719563497697e-06, | |
| "loss": 0.1688, | |
| "step": 1098 | |
| }, | |
| { | |
| "epoch": 4.453900709219858, | |
| "grad_norm": 0.2564190473657212, | |
| "learning_rate": 1.3662785384573663e-06, | |
| "loss": 0.1896, | |
| "step": 1099 | |
| }, | |
| { | |
| "epoch": 4.4579533941236065, | |
| "grad_norm": 0.24289552638331346, | |
| "learning_rate": 1.3457351939959383e-06, | |
| "loss": 0.1919, | |
| "step": 1100 | |
| }, | |
| { | |
| "epoch": 4.462006079027356, | |
| "grad_norm": 0.24724780039080202, | |
| "learning_rate": 1.3253420884187551e-06, | |
| "loss": 0.2216, | |
| "step": 1101 | |
| }, | |
| { | |
| "epoch": 4.4660587639311045, | |
| "grad_norm": 0.2407276399877983, | |
| "learning_rate": 1.3050993859690953e-06, | |
| "loss": 0.1886, | |
| "step": 1102 | |
| }, | |
| { | |
| "epoch": 4.470111448834853, | |
| "grad_norm": 0.25016173271792247, | |
| "learning_rate": 1.2850072496788869e-06, | |
| "loss": 0.1796, | |
| "step": 1103 | |
| }, | |
| { | |
| "epoch": 4.474164133738602, | |
| "grad_norm": 0.2519278638878137, | |
| "learning_rate": 1.2650658413674434e-06, | |
| "loss": 0.2143, | |
| "step": 1104 | |
| }, | |
| { | |
| "epoch": 4.47821681864235, | |
| "grad_norm": 0.2621742889948093, | |
| "learning_rate": 1.2452753216401226e-06, | |
| "loss": 0.215, | |
| "step": 1105 | |
| }, | |
| { | |
| "epoch": 4.4822695035460995, | |
| "grad_norm": 0.24452298022434485, | |
| "learning_rate": 1.2256358498870503e-06, | |
| "loss": 0.1901, | |
| "step": 1106 | |
| }, | |
| { | |
| "epoch": 4.486322188449848, | |
| "grad_norm": 0.2576015579696859, | |
| "learning_rate": 1.2061475842818337e-06, | |
| "loss": 0.2024, | |
| "step": 1107 | |
| }, | |
| { | |
| "epoch": 4.490374873353597, | |
| "grad_norm": 0.24075028414776595, | |
| "learning_rate": 1.1868106817802816e-06, | |
| "loss": 0.222, | |
| "step": 1108 | |
| }, | |
| { | |
| "epoch": 4.494427558257345, | |
| "grad_norm": 0.24831346549243255, | |
| "learning_rate": 1.1676252981191482e-06, | |
| "loss": 0.2066, | |
| "step": 1109 | |
| }, | |
| { | |
| "epoch": 4.498480243161095, | |
| "grad_norm": 0.23841893550956242, | |
| "learning_rate": 1.1485915878148823e-06, | |
| "loss": 0.184, | |
| "step": 1110 | |
| }, | |
| { | |
| "epoch": 4.502532928064843, | |
| "grad_norm": 0.25990573630755864, | |
| "learning_rate": 1.1297097041623584e-06, | |
| "loss": 0.2053, | |
| "step": 1111 | |
| }, | |
| { | |
| "epoch": 4.506585612968592, | |
| "grad_norm": 0.24923901659028547, | |
| "learning_rate": 1.1109797992336847e-06, | |
| "loss": 0.2072, | |
| "step": 1112 | |
| }, | |
| { | |
| "epoch": 4.51063829787234, | |
| "grad_norm": 0.24418909141616818, | |
| "learning_rate": 1.092402023876933e-06, | |
| "loss": 0.2055, | |
| "step": 1113 | |
| }, | |
| { | |
| "epoch": 4.514690982776089, | |
| "grad_norm": 0.24918082373793862, | |
| "learning_rate": 1.0739765277149527e-06, | |
| "loss": 0.2219, | |
| "step": 1114 | |
| }, | |
| { | |
| "epoch": 4.518743667679838, | |
| "grad_norm": 0.23760923628696845, | |
| "learning_rate": 1.0557034591441596e-06, | |
| "loss": 0.1935, | |
| "step": 1115 | |
| }, | |
| { | |
| "epoch": 4.522796352583587, | |
| "grad_norm": 0.24760590364626245, | |
| "learning_rate": 1.0375829653333324e-06, | |
| "loss": 0.1909, | |
| "step": 1116 | |
| }, | |
| { | |
| "epoch": 4.526849037487335, | |
| "grad_norm": 0.24713589140416042, | |
| "learning_rate": 1.0196151922224385e-06, | |
| "loss": 0.2113, | |
| "step": 1117 | |
| }, | |
| { | |
| "epoch": 4.530901722391084, | |
| "grad_norm": 0.26466208575599254, | |
| "learning_rate": 1.0018002845214526e-06, | |
| "loss": 0.2082, | |
| "step": 1118 | |
| }, | |
| { | |
| "epoch": 4.5349544072948325, | |
| "grad_norm": 0.25257223464006556, | |
| "learning_rate": 9.841383857091947e-07, | |
| "loss": 0.1766, | |
| "step": 1119 | |
| }, | |
| { | |
| "epoch": 4.539007092198582, | |
| "grad_norm": 0.25167998725227053, | |
| "learning_rate": 9.666296380321616e-07, | |
| "loss": 0.1998, | |
| "step": 1120 | |
| }, | |
| { | |
| "epoch": 4.54305977710233, | |
| "grad_norm": 0.2416148215010902, | |
| "learning_rate": 9.492741825034124e-07, | |
| "loss": 0.1933, | |
| "step": 1121 | |
| }, | |
| { | |
| "epoch": 4.547112462006079, | |
| "grad_norm": 0.25706651350908744, | |
| "learning_rate": 9.320721589013892e-07, | |
| "loss": 0.1978, | |
| "step": 1122 | |
| }, | |
| { | |
| "epoch": 4.5511651469098275, | |
| "grad_norm": 0.23673547041483708, | |
| "learning_rate": 9.150237057688339e-07, | |
| "loss": 0.1925, | |
| "step": 1123 | |
| }, | |
| { | |
| "epoch": 4.555217831813577, | |
| "grad_norm": 0.24777510116778528, | |
| "learning_rate": 8.981289604116328e-07, | |
| "loss": 0.1922, | |
| "step": 1124 | |
| }, | |
| { | |
| "epoch": 4.5592705167173255, | |
| "grad_norm": 0.2549886668392779, | |
| "learning_rate": 8.813880588977542e-07, | |
| "loss": 0.1737, | |
| "step": 1125 | |
| }, | |
| { | |
| "epoch": 4.563323201621074, | |
| "grad_norm": 0.24941224040718177, | |
| "learning_rate": 8.648011360561126e-07, | |
| "loss": 0.2086, | |
| "step": 1126 | |
| }, | |
| { | |
| "epoch": 4.567375886524823, | |
| "grad_norm": 0.25008647508496457, | |
| "learning_rate": 8.483683254755037e-07, | |
| "loss": 0.202, | |
| "step": 1127 | |
| }, | |
| { | |
| "epoch": 4.571428571428571, | |
| "grad_norm": 0.24304549771387748, | |
| "learning_rate": 8.320897595035227e-07, | |
| "loss": 0.2222, | |
| "step": 1128 | |
| }, | |
| { | |
| "epoch": 4.575481256332321, | |
| "grad_norm": 0.2444987304766229, | |
| "learning_rate": 8.159655692455093e-07, | |
| "loss": 0.194, | |
| "step": 1129 | |
| }, | |
| { | |
| "epoch": 4.579533941236069, | |
| "grad_norm": 0.2604206956706893, | |
| "learning_rate": 7.999958845634648e-07, | |
| "loss": 0.2142, | |
| "step": 1130 | |
| }, | |
| { | |
| "epoch": 4.583586626139818, | |
| "grad_norm": 0.25309715388414655, | |
| "learning_rate": 7.841808340750478e-07, | |
| "loss": 0.2119, | |
| "step": 1131 | |
| }, | |
| { | |
| "epoch": 4.587639311043566, | |
| "grad_norm": 0.24790481602710543, | |
| "learning_rate": 7.685205451524869e-07, | |
| "loss": 0.2086, | |
| "step": 1132 | |
| }, | |
| { | |
| "epoch": 4.591691995947315, | |
| "grad_norm": 0.24983535654820474, | |
| "learning_rate": 7.530151439216027e-07, | |
| "loss": 0.1787, | |
| "step": 1133 | |
| }, | |
| { | |
| "epoch": 4.595744680851064, | |
| "grad_norm": 0.2486891997934812, | |
| "learning_rate": 7.376647552607675e-07, | |
| "loss": 0.1987, | |
| "step": 1134 | |
| }, | |
| { | |
| "epoch": 4.599797365754813, | |
| "grad_norm": 0.2326434459898208, | |
| "learning_rate": 7.224695027998963e-07, | |
| "loss": 0.181, | |
| "step": 1135 | |
| }, | |
| { | |
| "epoch": 4.603850050658561, | |
| "grad_norm": 0.24054877749576015, | |
| "learning_rate": 7.07429508919466e-07, | |
| "loss": 0.2013, | |
| "step": 1136 | |
| }, | |
| { | |
| "epoch": 4.60790273556231, | |
| "grad_norm": 0.24846389224889026, | |
| "learning_rate": 6.925448947495206e-07, | |
| "loss": 0.1944, | |
| "step": 1137 | |
| }, | |
| { | |
| "epoch": 4.611955420466058, | |
| "grad_norm": 0.24660112392711814, | |
| "learning_rate": 6.778157801686936e-07, | |
| "loss": 0.1982, | |
| "step": 1138 | |
| }, | |
| { | |
| "epoch": 4.616008105369808, | |
| "grad_norm": 0.2596039992836043, | |
| "learning_rate": 6.632422838032515e-07, | |
| "loss": 0.2163, | |
| "step": 1139 | |
| }, | |
| { | |
| "epoch": 4.620060790273556, | |
| "grad_norm": 0.24965207189531147, | |
| "learning_rate": 6.488245230261281e-07, | |
| "loss": 0.1899, | |
| "step": 1140 | |
| }, | |
| { | |
| "epoch": 4.624113475177305, | |
| "grad_norm": 0.22996459963899185, | |
| "learning_rate": 6.345626139559868e-07, | |
| "loss": 0.1842, | |
| "step": 1141 | |
| }, | |
| { | |
| "epoch": 4.6281661600810535, | |
| "grad_norm": 0.24344591612958302, | |
| "learning_rate": 6.204566714562866e-07, | |
| "loss": 0.2231, | |
| "step": 1142 | |
| }, | |
| { | |
| "epoch": 4.632218844984802, | |
| "grad_norm": 0.2501881839782138, | |
| "learning_rate": 6.06506809134344e-07, | |
| "loss": 0.1898, | |
| "step": 1143 | |
| }, | |
| { | |
| "epoch": 4.6362715298885515, | |
| "grad_norm": 0.2390439307860063, | |
| "learning_rate": 5.927131393404373e-07, | |
| "loss": 0.1942, | |
| "step": 1144 | |
| }, | |
| { | |
| "epoch": 4.6403242147923, | |
| "grad_norm": 0.23859296216820156, | |
| "learning_rate": 5.790757731668817e-07, | |
| "loss": 0.2083, | |
| "step": 1145 | |
| }, | |
| { | |
| "epoch": 4.644376899696049, | |
| "grad_norm": 0.25156654012233953, | |
| "learning_rate": 5.655948204471507e-07, | |
| "loss": 0.19, | |
| "step": 1146 | |
| }, | |
| { | |
| "epoch": 4.648429584599797, | |
| "grad_norm": 0.24935521485438975, | |
| "learning_rate": 5.522703897549875e-07, | |
| "loss": 0.1851, | |
| "step": 1147 | |
| }, | |
| { | |
| "epoch": 4.652482269503546, | |
| "grad_norm": 0.24987385132029152, | |
| "learning_rate": 5.391025884035239e-07, | |
| "loss": 0.2114, | |
| "step": 1148 | |
| }, | |
| { | |
| "epoch": 4.656534954407295, | |
| "grad_norm": 0.23979057308566046, | |
| "learning_rate": 5.260915224444207e-07, | |
| "loss": 0.2016, | |
| "step": 1149 | |
| }, | |
| { | |
| "epoch": 4.660587639311044, | |
| "grad_norm": 0.24037457816555458, | |
| "learning_rate": 5.132372966670129e-07, | |
| "loss": 0.198, | |
| "step": 1150 | |
| }, | |
| { | |
| "epoch": 4.664640324214792, | |
| "grad_norm": 0.24662597536979464, | |
| "learning_rate": 5.005400145974704e-07, | |
| "loss": 0.1991, | |
| "step": 1151 | |
| }, | |
| { | |
| "epoch": 4.668693009118541, | |
| "grad_norm": 0.2456610686526762, | |
| "learning_rate": 4.879997784979562e-07, | |
| "loss": 0.1987, | |
| "step": 1152 | |
| }, | |
| { | |
| "epoch": 4.672745694022289, | |
| "grad_norm": 0.23818091118705748, | |
| "learning_rate": 4.7561668936580984e-07, | |
| "loss": 0.1857, | |
| "step": 1153 | |
| }, | |
| { | |
| "epoch": 4.676798378926039, | |
| "grad_norm": 0.23870037125444504, | |
| "learning_rate": 4.6339084693272306e-07, | |
| "loss": 0.2015, | |
| "step": 1154 | |
| }, | |
| { | |
| "epoch": 4.680851063829787, | |
| "grad_norm": 0.2426180811096015, | |
| "learning_rate": 4.5132234966395847e-07, | |
| "loss": 0.2294, | |
| "step": 1155 | |
| }, | |
| { | |
| "epoch": 4.684903748733536, | |
| "grad_norm": 0.2444717883510916, | |
| "learning_rate": 4.3941129475752795e-07, | |
| "loss": 0.1949, | |
| "step": 1156 | |
| }, | |
| { | |
| "epoch": 4.688956433637284, | |
| "grad_norm": 0.23867256875477824, | |
| "learning_rate": 4.27657778143431e-07, | |
| "loss": 0.1925, | |
| "step": 1157 | |
| }, | |
| { | |
| "epoch": 4.693009118541033, | |
| "grad_norm": 0.24415027130155562, | |
| "learning_rate": 4.1606189448287757e-07, | |
| "loss": 0.1972, | |
| "step": 1158 | |
| }, | |
| { | |
| "epoch": 4.697061803444782, | |
| "grad_norm": 0.24512888367198166, | |
| "learning_rate": 4.046237371675177e-07, | |
| "loss": 0.1974, | |
| "step": 1159 | |
| }, | |
| { | |
| "epoch": 4.701114488348531, | |
| "grad_norm": 0.2481879364227404, | |
| "learning_rate": 3.9334339831869963e-07, | |
| "loss": 0.2024, | |
| "step": 1160 | |
| }, | |
| { | |
| "epoch": 4.7051671732522795, | |
| "grad_norm": 0.2407595092163603, | |
| "learning_rate": 3.8222096878671955e-07, | |
| "loss": 0.1736, | |
| "step": 1161 | |
| }, | |
| { | |
| "epoch": 4.709219858156028, | |
| "grad_norm": 0.23931711755408194, | |
| "learning_rate": 3.7125653815009545e-07, | |
| "loss": 0.191, | |
| "step": 1162 | |
| }, | |
| { | |
| "epoch": 4.713272543059777, | |
| "grad_norm": 0.24888033224975037, | |
| "learning_rate": 3.6045019471484974e-07, | |
| "loss": 0.207, | |
| "step": 1163 | |
| }, | |
| { | |
| "epoch": 4.717325227963526, | |
| "grad_norm": 0.25125223432810573, | |
| "learning_rate": 3.498020255137813e-07, | |
| "loss": 0.1998, | |
| "step": 1164 | |
| }, | |
| { | |
| "epoch": 4.721377912867275, | |
| "grad_norm": 0.2565517772478762, | |
| "learning_rate": 3.393121163057811e-07, | |
| "loss": 0.1988, | |
| "step": 1165 | |
| }, | |
| { | |
| "epoch": 4.725430597771023, | |
| "grad_norm": 0.2527112386006553, | |
| "learning_rate": 3.289805515751399e-07, | |
| "loss": 0.1969, | |
| "step": 1166 | |
| }, | |
| { | |
| "epoch": 4.729483282674772, | |
| "grad_norm": 0.2447049153289079, | |
| "learning_rate": 3.188074145308573e-07, | |
| "loss": 0.2001, | |
| "step": 1167 | |
| }, | |
| { | |
| "epoch": 4.733535967578521, | |
| "grad_norm": 0.2515366216607944, | |
| "learning_rate": 3.087927871059804e-07, | |
| "loss": 0.2106, | |
| "step": 1168 | |
| }, | |
| { | |
| "epoch": 4.73758865248227, | |
| "grad_norm": 0.25284169932128453, | |
| "learning_rate": 2.989367499569418e-07, | |
| "loss": 0.1921, | |
| "step": 1169 | |
| }, | |
| { | |
| "epoch": 4.741641337386018, | |
| "grad_norm": 2.1158282294024806, | |
| "learning_rate": 2.8923938246290917e-07, | |
| "loss": 0.231, | |
| "step": 1170 | |
| }, | |
| { | |
| "epoch": 4.745694022289767, | |
| "grad_norm": 0.25472104100072057, | |
| "learning_rate": 2.7970076272514804e-07, | |
| "loss": 0.1957, | |
| "step": 1171 | |
| }, | |
| { | |
| "epoch": 4.749746707193515, | |
| "grad_norm": 0.2427726194796699, | |
| "learning_rate": 2.703209675663887e-07, | |
| "loss": 0.1866, | |
| "step": 1172 | |
| }, | |
| { | |
| "epoch": 4.753799392097265, | |
| "grad_norm": 0.25159398007209016, | |
| "learning_rate": 2.6110007253021374e-07, | |
| "loss": 0.1964, | |
| "step": 1173 | |
| }, | |
| { | |
| "epoch": 4.757852077001013, | |
| "grad_norm": 0.24834947573666064, | |
| "learning_rate": 2.520381518804471e-07, | |
| "loss": 0.2278, | |
| "step": 1174 | |
| }, | |
| { | |
| "epoch": 4.761904761904762, | |
| "grad_norm": 0.24389835262032514, | |
| "learning_rate": 2.4313527860054585e-07, | |
| "loss": 0.1955, | |
| "step": 1175 | |
| }, | |
| { | |
| "epoch": 4.76595744680851, | |
| "grad_norm": 0.24040244441202888, | |
| "learning_rate": 2.343915243930317e-07, | |
| "loss": 0.1956, | |
| "step": 1176 | |
| }, | |
| { | |
| "epoch": 4.77001013171226, | |
| "grad_norm": 0.2445642021300155, | |
| "learning_rate": 2.2580695967889367e-07, | |
| "loss": 0.1989, | |
| "step": 1177 | |
| }, | |
| { | |
| "epoch": 4.774062816616008, | |
| "grad_norm": 0.23649243767505182, | |
| "learning_rate": 2.1738165359704189e-07, | |
| "loss": 0.1781, | |
| "step": 1178 | |
| }, | |
| { | |
| "epoch": 4.778115501519757, | |
| "grad_norm": 0.24444891340051922, | |
| "learning_rate": 2.0911567400373257e-07, | |
| "loss": 0.1858, | |
| "step": 1179 | |
| }, | |
| { | |
| "epoch": 4.7821681864235055, | |
| "grad_norm": 0.24272706127774726, | |
| "learning_rate": 2.0100908747202607e-07, | |
| "loss": 0.1956, | |
| "step": 1180 | |
| }, | |
| { | |
| "epoch": 4.786220871327254, | |
| "grad_norm": 0.23992042016885853, | |
| "learning_rate": 1.9306195929125638e-07, | |
| "loss": 0.2221, | |
| "step": 1181 | |
| }, | |
| { | |
| "epoch": 4.7902735562310035, | |
| "grad_norm": 0.24110345991491774, | |
| "learning_rate": 1.8527435346650247e-07, | |
| "loss": 0.185, | |
| "step": 1182 | |
| }, | |
| { | |
| "epoch": 4.794326241134752, | |
| "grad_norm": 0.2549465246377265, | |
| "learning_rate": 1.7764633271807108e-07, | |
| "loss": 0.229, | |
| "step": 1183 | |
| }, | |
| { | |
| "epoch": 4.798378926038501, | |
| "grad_norm": 0.23452676157526703, | |
| "learning_rate": 1.7017795848099262e-07, | |
| "loss": 0.2014, | |
| "step": 1184 | |
| }, | |
| { | |
| "epoch": 4.802431610942249, | |
| "grad_norm": 0.24613568698781021, | |
| "learning_rate": 1.6286929090452596e-07, | |
| "loss": 0.2178, | |
| "step": 1185 | |
| }, | |
| { | |
| "epoch": 4.806484295845998, | |
| "grad_norm": 0.24642812766695082, | |
| "learning_rate": 1.557203888516745e-07, | |
| "loss": 0.206, | |
| "step": 1186 | |
| }, | |
| { | |
| "epoch": 4.810536980749747, | |
| "grad_norm": 0.24480746434735134, | |
| "learning_rate": 1.487313098987131e-07, | |
| "loss": 0.1674, | |
| "step": 1187 | |
| }, | |
| { | |
| "epoch": 4.814589665653496, | |
| "grad_norm": 0.2486845959874826, | |
| "learning_rate": 1.4190211033472402e-07, | |
| "loss": 0.1902, | |
| "step": 1188 | |
| }, | |
| { | |
| "epoch": 4.818642350557244, | |
| "grad_norm": 0.24701531634013274, | |
| "learning_rate": 1.3523284516113955e-07, | |
| "loss": 0.1965, | |
| "step": 1189 | |
| }, | |
| { | |
| "epoch": 4.822695035460993, | |
| "grad_norm": 0.2424354515002044, | |
| "learning_rate": 1.2872356809130682e-07, | |
| "loss": 0.1938, | |
| "step": 1190 | |
| }, | |
| { | |
| "epoch": 4.826747720364741, | |
| "grad_norm": 0.24401263259008535, | |
| "learning_rate": 1.2237433155004807e-07, | |
| "loss": 0.1921, | |
| "step": 1191 | |
| }, | |
| { | |
| "epoch": 4.830800405268491, | |
| "grad_norm": 0.24629042509856183, | |
| "learning_rate": 1.1618518667323886e-07, | |
| "loss": 0.1929, | |
| "step": 1192 | |
| }, | |
| { | |
| "epoch": 4.834853090172239, | |
| "grad_norm": 0.24527351576465922, | |
| "learning_rate": 1.1015618330740385e-07, | |
| "loss": 0.2104, | |
| "step": 1193 | |
| }, | |
| { | |
| "epoch": 4.838905775075988, | |
| "grad_norm": 0.24993420923356927, | |
| "learning_rate": 1.042873700093061e-07, | |
| "loss": 0.1914, | |
| "step": 1194 | |
| }, | |
| { | |
| "epoch": 4.842958459979736, | |
| "grad_norm": 0.24024907494801329, | |
| "learning_rate": 9.857879404556291e-08, | |
| "loss": 0.2269, | |
| "step": 1195 | |
| }, | |
| { | |
| "epoch": 4.847011144883485, | |
| "grad_norm": 0.25262749541050683, | |
| "learning_rate": 9.303050139225722e-08, | |
| "loss": 0.1923, | |
| "step": 1196 | |
| }, | |
| { | |
| "epoch": 4.851063829787234, | |
| "grad_norm": 0.2502138454239054, | |
| "learning_rate": 8.76425367345779e-08, | |
| "loss": 0.1937, | |
| "step": 1197 | |
| }, | |
| { | |
| "epoch": 4.855116514690983, | |
| "grad_norm": 0.2441087087629779, | |
| "learning_rate": 8.241494346644897e-08, | |
| "loss": 0.2177, | |
| "step": 1198 | |
| }, | |
| { | |
| "epoch": 4.8591691995947315, | |
| "grad_norm": 0.2470489245010094, | |
| "learning_rate": 7.734776369019204e-08, | |
| "loss": 0.2337, | |
| "step": 1199 | |
| }, | |
| { | |
| "epoch": 4.86322188449848, | |
| "grad_norm": 0.24277599122670662, | |
| "learning_rate": 7.244103821617332e-08, | |
| "loss": 0.1925, | |
| "step": 1200 | |
| }, | |
| { | |
| "epoch": 4.867274569402229, | |
| "grad_norm": 0.2452426942002828, | |
| "learning_rate": 6.769480656248606e-08, | |
| "loss": 0.2032, | |
| "step": 1201 | |
| }, | |
| { | |
| "epoch": 4.871327254305978, | |
| "grad_norm": 0.2431359346503819, | |
| "learning_rate": 6.310910695462635e-08, | |
| "loss": 0.2136, | |
| "step": 1202 | |
| }, | |
| { | |
| "epoch": 4.875379939209727, | |
| "grad_norm": 0.23683916021804133, | |
| "learning_rate": 5.8683976325191185e-08, | |
| "loss": 0.1942, | |
| "step": 1203 | |
| }, | |
| { | |
| "epoch": 4.879432624113475, | |
| "grad_norm": 0.2350404087337152, | |
| "learning_rate": 5.4419450313571984e-08, | |
| "loss": 0.2163, | |
| "step": 1204 | |
| }, | |
| { | |
| "epoch": 4.883485309017224, | |
| "grad_norm": 0.23309890859786067, | |
| "learning_rate": 5.031556326567488e-08, | |
| "loss": 0.1901, | |
| "step": 1205 | |
| }, | |
| { | |
| "epoch": 4.887537993920972, | |
| "grad_norm": 0.25201277765073227, | |
| "learning_rate": 4.637234823364312e-08, | |
| "loss": 0.2129, | |
| "step": 1206 | |
| }, | |
| { | |
| "epoch": 4.891590678824722, | |
| "grad_norm": 0.2486182262508644, | |
| "learning_rate": 4.258983697558838e-08, | |
| "loss": 0.1876, | |
| "step": 1207 | |
| }, | |
| { | |
| "epoch": 4.89564336372847, | |
| "grad_norm": 0.24556936656999018, | |
| "learning_rate": 3.896805995533548e-08, | |
| "loss": 0.1834, | |
| "step": 1208 | |
| }, | |
| { | |
| "epoch": 4.899696048632219, | |
| "grad_norm": 0.24004525286969983, | |
| "learning_rate": 3.550704634218028e-08, | |
| "loss": 0.1911, | |
| "step": 1209 | |
| }, | |
| { | |
| "epoch": 4.903748733535967, | |
| "grad_norm": 0.24133164614908817, | |
| "learning_rate": 3.2206824010647676e-08, | |
| "loss": 0.1755, | |
| "step": 1210 | |
| }, | |
| { | |
| "epoch": 4.907801418439716, | |
| "grad_norm": 0.25036437458547406, | |
| "learning_rate": 2.9067419540278476e-08, | |
| "loss": 0.196, | |
| "step": 1211 | |
| }, | |
| { | |
| "epoch": 4.911854103343465, | |
| "grad_norm": 0.22127798453587197, | |
| "learning_rate": 2.6088858215400638e-08, | |
| "loss": 0.2032, | |
| "step": 1212 | |
| }, | |
| { | |
| "epoch": 4.915906788247214, | |
| "grad_norm": 0.24441260961978103, | |
| "learning_rate": 2.3271164024940564e-08, | |
| "loss": 0.2098, | |
| "step": 1213 | |
| }, | |
| { | |
| "epoch": 4.919959473150962, | |
| "grad_norm": 0.25003024386568296, | |
| "learning_rate": 2.061435966221881e-08, | |
| "loss": 0.1948, | |
| "step": 1214 | |
| }, | |
| { | |
| "epoch": 4.924012158054711, | |
| "grad_norm": 0.24705718545387664, | |
| "learning_rate": 1.811846652477245e-08, | |
| "loss": 0.195, | |
| "step": 1215 | |
| }, | |
| { | |
| "epoch": 4.92806484295846, | |
| "grad_norm": 0.24203127006993658, | |
| "learning_rate": 1.5783504714184106e-08, | |
| "loss": 0.185, | |
| "step": 1216 | |
| }, | |
| { | |
| "epoch": 4.932117527862209, | |
| "grad_norm": 0.23786465472707058, | |
| "learning_rate": 1.360949303591097e-08, | |
| "loss": 0.2079, | |
| "step": 1217 | |
| }, | |
| { | |
| "epoch": 4.9361702127659575, | |
| "grad_norm": 0.24715321065178558, | |
| "learning_rate": 1.1596448999144916e-08, | |
| "loss": 0.2115, | |
| "step": 1218 | |
| }, | |
| { | |
| "epoch": 4.940222897669706, | |
| "grad_norm": 0.2327120001547924, | |
| "learning_rate": 9.744388816668172e-09, | |
| "loss": 0.209, | |
| "step": 1219 | |
| }, | |
| { | |
| "epoch": 4.944275582573455, | |
| "grad_norm": 0.24613180956749753, | |
| "learning_rate": 8.05332740472009e-09, | |
| "loss": 0.195, | |
| "step": 1220 | |
| }, | |
| { | |
| "epoch": 4.948328267477204, | |
| "grad_norm": 0.23745813993145284, | |
| "learning_rate": 6.523278382872811e-09, | |
| "loss": 0.1935, | |
| "step": 1221 | |
| }, | |
| { | |
| "epoch": 4.9523809523809526, | |
| "grad_norm": 0.2538165192899779, | |
| "learning_rate": 5.15425407393133e-09, | |
| "loss": 0.1985, | |
| "step": 1222 | |
| }, | |
| { | |
| "epoch": 4.956433637284701, | |
| "grad_norm": 0.25351330451303256, | |
| "learning_rate": 3.94626550383137e-09, | |
| "loss": 0.2075, | |
| "step": 1223 | |
| }, | |
| { | |
| "epoch": 4.96048632218845, | |
| "grad_norm": 0.232080317939174, | |
| "learning_rate": 2.899322401546112e-09, | |
| "loss": 0.204, | |
| "step": 1224 | |
| }, | |
| { | |
| "epoch": 4.964539007092198, | |
| "grad_norm": 0.24489195445340936, | |
| "learning_rate": 2.013433199010706e-09, | |
| "loss": 0.228, | |
| "step": 1225 | |
| }, | |
| { | |
| "epoch": 4.968591691995948, | |
| "grad_norm": 0.24726830604237326, | |
| "learning_rate": 1.2886050310556563e-09, | |
| "loss": 0.1984, | |
| "step": 1226 | |
| }, | |
| { | |
| "epoch": 4.972644376899696, | |
| "grad_norm": 0.2637378330237958, | |
| "learning_rate": 7.248437353468695e-10, | |
| "loss": 0.2205, | |
| "step": 1227 | |
| }, | |
| { | |
| "epoch": 4.976697061803445, | |
| "grad_norm": 0.23905412846927215, | |
| "learning_rate": 3.221538523412449e-10, | |
| "loss": 0.2175, | |
| "step": 1228 | |
| }, | |
| { | |
| "epoch": 4.980749746707193, | |
| "grad_norm": 0.23591072567055912, | |
| "learning_rate": 8.053862524670663e-11, | |
| "loss": 0.1928, | |
| "step": 1229 | |
| }, | |
| { | |
| "epoch": 4.984802431610943, | |
| "grad_norm": 0.24535295960413284, | |
| "learning_rate": 0.0, | |
| "loss": 0.1872, | |
| "step": 1230 | |
| }, | |
| { | |
| "epoch": 4.984802431610943, | |
| "step": 1230, | |
| "total_flos": 2.5001335726405714e+18, | |
| "train_loss": 0.4134981281389066, | |
| "train_runtime": 24880.2523, | |
| "train_samples_per_second": 6.345, | |
| "train_steps_per_second": 0.049 | |
| } | |
| ], | |
| "logging_steps": 1, | |
| "max_steps": 1230, | |
| "num_input_tokens_seen": 0, | |
| "num_train_epochs": 5, | |
| "save_steps": 500, | |
| "stateful_callbacks": { | |
| "TrainerControl": { | |
| "args": { | |
| "should_epoch_stop": false, | |
| "should_evaluate": false, | |
| "should_log": false, | |
| "should_save": true, | |
| "should_training_stop": true | |
| }, | |
| "attributes": {} | |
| } | |
| }, | |
| "total_flos": 2.5001335726405714e+18, | |
| "train_batch_size": 1, | |
| "trial_name": null, | |
| "trial_params": null | |
| } | |