| { | |
| "best_metric": null, | |
| "best_model_checkpoint": null, | |
| "epoch": 4.989367088607595, | |
| "eval_steps": 500, | |
| "global_step": 1230, | |
| "is_hyper_param_search": false, | |
| "is_local_process_zero": true, | |
| "is_world_process_zero": true, | |
| "log_history": [ | |
| { | |
| "epoch": 0.0040547389761784085, | |
| "grad_norm": 6.72721516957798, | |
| "learning_rate": 3.2520325203252037e-07, | |
| "loss": 1.0577, | |
| "step": 1 | |
| }, | |
| { | |
| "epoch": 0.008109477952356817, | |
| "grad_norm": 6.46823017577869, | |
| "learning_rate": 6.504065040650407e-07, | |
| "loss": 1.0413, | |
| "step": 2 | |
| }, | |
| { | |
| "epoch": 0.012164216928535226, | |
| "grad_norm": 6.542268116316752, | |
| "learning_rate": 9.75609756097561e-07, | |
| "loss": 1.0997, | |
| "step": 3 | |
| }, | |
| { | |
| "epoch": 0.016218955904713634, | |
| "grad_norm": 6.510945638929656, | |
| "learning_rate": 1.3008130081300815e-06, | |
| "loss": 1.0543, | |
| "step": 4 | |
| }, | |
| { | |
| "epoch": 0.02027369488089204, | |
| "grad_norm": 6.170439325045293, | |
| "learning_rate": 1.6260162601626018e-06, | |
| "loss": 1.0698, | |
| "step": 5 | |
| }, | |
| { | |
| "epoch": 0.024328433857070453, | |
| "grad_norm": 5.957792791790743, | |
| "learning_rate": 1.951219512195122e-06, | |
| "loss": 1.0583, | |
| "step": 6 | |
| }, | |
| { | |
| "epoch": 0.02838317283324886, | |
| "grad_norm": 4.818340065759815, | |
| "learning_rate": 2.2764227642276426e-06, | |
| "loss": 1.0211, | |
| "step": 7 | |
| }, | |
| { | |
| "epoch": 0.03243791180942727, | |
| "grad_norm": 4.500911870744693, | |
| "learning_rate": 2.601626016260163e-06, | |
| "loss": 1.008, | |
| "step": 8 | |
| }, | |
| { | |
| "epoch": 0.03649265078560568, | |
| "grad_norm": 2.569671445653381, | |
| "learning_rate": 2.926829268292683e-06, | |
| "loss": 0.9531, | |
| "step": 9 | |
| }, | |
| { | |
| "epoch": 0.04054738976178408, | |
| "grad_norm": 2.2777855155348594, | |
| "learning_rate": 3.2520325203252037e-06, | |
| "loss": 0.96, | |
| "step": 10 | |
| }, | |
| { | |
| "epoch": 0.044602128737962494, | |
| "grad_norm": 2.1423184761928775, | |
| "learning_rate": 3.577235772357724e-06, | |
| "loss": 0.9495, | |
| "step": 11 | |
| }, | |
| { | |
| "epoch": 0.048656867714140906, | |
| "grad_norm": 4.503299842746655, | |
| "learning_rate": 3.902439024390244e-06, | |
| "loss": 0.9885, | |
| "step": 12 | |
| }, | |
| { | |
| "epoch": 0.05271160669031931, | |
| "grad_norm": 4.449491752371554, | |
| "learning_rate": 4.227642276422765e-06, | |
| "loss": 0.9298, | |
| "step": 13 | |
| }, | |
| { | |
| "epoch": 0.05676634566649772, | |
| "grad_norm": 4.687406736300778, | |
| "learning_rate": 4.552845528455285e-06, | |
| "loss": 0.9581, | |
| "step": 14 | |
| }, | |
| { | |
| "epoch": 0.060821084642676125, | |
| "grad_norm": 4.179566361341069, | |
| "learning_rate": 4.8780487804878055e-06, | |
| "loss": 0.9229, | |
| "step": 15 | |
| }, | |
| { | |
| "epoch": 0.06487582361885454, | |
| "grad_norm": 3.3430878107594646, | |
| "learning_rate": 5.203252032520326e-06, | |
| "loss": 0.9136, | |
| "step": 16 | |
| }, | |
| { | |
| "epoch": 0.06893056259503294, | |
| "grad_norm": 3.1151188640810967, | |
| "learning_rate": 5.528455284552846e-06, | |
| "loss": 0.8693, | |
| "step": 17 | |
| }, | |
| { | |
| "epoch": 0.07298530157121136, | |
| "grad_norm": 2.8364276261767274, | |
| "learning_rate": 5.853658536585366e-06, | |
| "loss": 0.8641, | |
| "step": 18 | |
| }, | |
| { | |
| "epoch": 0.07704004054738976, | |
| "grad_norm": 2.3652276275629927, | |
| "learning_rate": 6.178861788617887e-06, | |
| "loss": 0.8179, | |
| "step": 19 | |
| }, | |
| { | |
| "epoch": 0.08109477952356817, | |
| "grad_norm": 2.058477206053389, | |
| "learning_rate": 6.504065040650407e-06, | |
| "loss": 0.8681, | |
| "step": 20 | |
| }, | |
| { | |
| "epoch": 0.08514951849974658, | |
| "grad_norm": 1.8143531779298494, | |
| "learning_rate": 6.829268292682928e-06, | |
| "loss": 0.824, | |
| "step": 21 | |
| }, | |
| { | |
| "epoch": 0.08920425747592499, | |
| "grad_norm": 1.8171647874407701, | |
| "learning_rate": 7.154471544715448e-06, | |
| "loss": 0.8199, | |
| "step": 22 | |
| }, | |
| { | |
| "epoch": 0.0932589964521034, | |
| "grad_norm": 1.867787038405791, | |
| "learning_rate": 7.4796747967479676e-06, | |
| "loss": 0.8015, | |
| "step": 23 | |
| }, | |
| { | |
| "epoch": 0.09731373542828181, | |
| "grad_norm": 1.8661866727755845, | |
| "learning_rate": 7.804878048780489e-06, | |
| "loss": 0.8087, | |
| "step": 24 | |
| }, | |
| { | |
| "epoch": 0.10136847440446022, | |
| "grad_norm": 1.5187077254983465, | |
| "learning_rate": 8.130081300813009e-06, | |
| "loss": 0.7823, | |
| "step": 25 | |
| }, | |
| { | |
| "epoch": 0.10542321338063862, | |
| "grad_norm": 1.320351830303469, | |
| "learning_rate": 8.45528455284553e-06, | |
| "loss": 0.8034, | |
| "step": 26 | |
| }, | |
| { | |
| "epoch": 0.10947795235681702, | |
| "grad_norm": 1.3710915571363496, | |
| "learning_rate": 8.78048780487805e-06, | |
| "loss": 0.7643, | |
| "step": 27 | |
| }, | |
| { | |
| "epoch": 0.11353269133299544, | |
| "grad_norm": 1.4574618640892707, | |
| "learning_rate": 9.10569105691057e-06, | |
| "loss": 0.7792, | |
| "step": 28 | |
| }, | |
| { | |
| "epoch": 0.11758743030917385, | |
| "grad_norm": 1.161070280425468, | |
| "learning_rate": 9.43089430894309e-06, | |
| "loss": 0.7906, | |
| "step": 29 | |
| }, | |
| { | |
| "epoch": 0.12164216928535225, | |
| "grad_norm": 1.0272009235272497, | |
| "learning_rate": 9.756097560975611e-06, | |
| "loss": 0.7727, | |
| "step": 30 | |
| }, | |
| { | |
| "epoch": 0.12569690826153065, | |
| "grad_norm": 1.05818562644255, | |
| "learning_rate": 1.008130081300813e-05, | |
| "loss": 0.7917, | |
| "step": 31 | |
| }, | |
| { | |
| "epoch": 0.12975164723770907, | |
| "grad_norm": 0.9849077717276795, | |
| "learning_rate": 1.0406504065040652e-05, | |
| "loss": 0.7549, | |
| "step": 32 | |
| }, | |
| { | |
| "epoch": 0.1338063862138875, | |
| "grad_norm": 0.9190339900565674, | |
| "learning_rate": 1.0731707317073172e-05, | |
| "loss": 0.764, | |
| "step": 33 | |
| }, | |
| { | |
| "epoch": 0.13786112519006588, | |
| "grad_norm": 0.7710236672465931, | |
| "learning_rate": 1.1056910569105692e-05, | |
| "loss": 0.7592, | |
| "step": 34 | |
| }, | |
| { | |
| "epoch": 0.1419158641662443, | |
| "grad_norm": 0.8569230882968184, | |
| "learning_rate": 1.1382113821138213e-05, | |
| "loss": 0.7547, | |
| "step": 35 | |
| }, | |
| { | |
| "epoch": 0.14597060314242272, | |
| "grad_norm": 0.8436470681793244, | |
| "learning_rate": 1.1707317073170731e-05, | |
| "loss": 0.7762, | |
| "step": 36 | |
| }, | |
| { | |
| "epoch": 0.1500253421186011, | |
| "grad_norm": 0.6634761360025331, | |
| "learning_rate": 1.2032520325203254e-05, | |
| "loss": 0.7378, | |
| "step": 37 | |
| }, | |
| { | |
| "epoch": 0.15408008109477953, | |
| "grad_norm": 0.7360792589692458, | |
| "learning_rate": 1.2357723577235774e-05, | |
| "loss": 0.7425, | |
| "step": 38 | |
| }, | |
| { | |
| "epoch": 0.15813482007095794, | |
| "grad_norm": 1.0972974069836425, | |
| "learning_rate": 1.2682926829268294e-05, | |
| "loss": 0.7662, | |
| "step": 39 | |
| }, | |
| { | |
| "epoch": 0.16218955904713633, | |
| "grad_norm": 0.7497035326618606, | |
| "learning_rate": 1.3008130081300815e-05, | |
| "loss": 0.7316, | |
| "step": 40 | |
| }, | |
| { | |
| "epoch": 0.16624429802331475, | |
| "grad_norm": 0.7873091636221122, | |
| "learning_rate": 1.3333333333333333e-05, | |
| "loss": 0.718, | |
| "step": 41 | |
| }, | |
| { | |
| "epoch": 0.17029903699949317, | |
| "grad_norm": 0.9796648682801602, | |
| "learning_rate": 1.3658536585365855e-05, | |
| "loss": 0.7328, | |
| "step": 42 | |
| }, | |
| { | |
| "epoch": 0.17435377597567156, | |
| "grad_norm": 0.6688871725941729, | |
| "learning_rate": 1.3983739837398376e-05, | |
| "loss": 0.7769, | |
| "step": 43 | |
| }, | |
| { | |
| "epoch": 0.17840851495184998, | |
| "grad_norm": 0.7403041425526135, | |
| "learning_rate": 1.4308943089430896e-05, | |
| "loss": 0.7145, | |
| "step": 44 | |
| }, | |
| { | |
| "epoch": 0.1824632539280284, | |
| "grad_norm": 0.7090195830408474, | |
| "learning_rate": 1.4634146341463415e-05, | |
| "loss": 0.7457, | |
| "step": 45 | |
| }, | |
| { | |
| "epoch": 0.1865179929042068, | |
| "grad_norm": 0.6589003304523197, | |
| "learning_rate": 1.4959349593495935e-05, | |
| "loss": 0.7396, | |
| "step": 46 | |
| }, | |
| { | |
| "epoch": 0.1905727318803852, | |
| "grad_norm": 0.7300942368681973, | |
| "learning_rate": 1.528455284552846e-05, | |
| "loss": 0.7171, | |
| "step": 47 | |
| }, | |
| { | |
| "epoch": 0.19462747085656362, | |
| "grad_norm": 0.7424528805957988, | |
| "learning_rate": 1.5609756097560978e-05, | |
| "loss": 0.7282, | |
| "step": 48 | |
| }, | |
| { | |
| "epoch": 0.198682209832742, | |
| "grad_norm": 0.6969522479998541, | |
| "learning_rate": 1.5934959349593496e-05, | |
| "loss": 0.7232, | |
| "step": 49 | |
| }, | |
| { | |
| "epoch": 0.20273694880892043, | |
| "grad_norm": 0.7384738914554233, | |
| "learning_rate": 1.6260162601626018e-05, | |
| "loss": 0.7286, | |
| "step": 50 | |
| }, | |
| { | |
| "epoch": 0.20679168778509882, | |
| "grad_norm": 0.7142741837045874, | |
| "learning_rate": 1.6585365853658537e-05, | |
| "loss": 0.7211, | |
| "step": 51 | |
| }, | |
| { | |
| "epoch": 0.21084642676127724, | |
| "grad_norm": 0.7702480778247397, | |
| "learning_rate": 1.691056910569106e-05, | |
| "loss": 0.7412, | |
| "step": 52 | |
| }, | |
| { | |
| "epoch": 0.21490116573745566, | |
| "grad_norm": 0.7672053437668961, | |
| "learning_rate": 1.7235772357723578e-05, | |
| "loss": 0.7171, | |
| "step": 53 | |
| }, | |
| { | |
| "epoch": 0.21895590471363405, | |
| "grad_norm": 0.6837919266541023, | |
| "learning_rate": 1.75609756097561e-05, | |
| "loss": 0.7211, | |
| "step": 54 | |
| }, | |
| { | |
| "epoch": 0.22301064368981247, | |
| "grad_norm": 0.7145738436200555, | |
| "learning_rate": 1.788617886178862e-05, | |
| "loss": 0.7116, | |
| "step": 55 | |
| }, | |
| { | |
| "epoch": 0.22706538266599088, | |
| "grad_norm": 0.6832268719838818, | |
| "learning_rate": 1.821138211382114e-05, | |
| "loss": 0.6856, | |
| "step": 56 | |
| }, | |
| { | |
| "epoch": 0.23112012164216927, | |
| "grad_norm": 0.5845620879910963, | |
| "learning_rate": 1.8536585365853663e-05, | |
| "loss": 0.7133, | |
| "step": 57 | |
| }, | |
| { | |
| "epoch": 0.2351748606183477, | |
| "grad_norm": 0.6355335848331151, | |
| "learning_rate": 1.886178861788618e-05, | |
| "loss": 0.7093, | |
| "step": 58 | |
| }, | |
| { | |
| "epoch": 0.2392295995945261, | |
| "grad_norm": 0.6948749141582691, | |
| "learning_rate": 1.91869918699187e-05, | |
| "loss": 0.6769, | |
| "step": 59 | |
| }, | |
| { | |
| "epoch": 0.2432843385707045, | |
| "grad_norm": 0.6458388030223168, | |
| "learning_rate": 1.9512195121951222e-05, | |
| "loss": 0.7094, | |
| "step": 60 | |
| }, | |
| { | |
| "epoch": 0.24733907754688292, | |
| "grad_norm": 0.7694557577089198, | |
| "learning_rate": 1.983739837398374e-05, | |
| "loss": 0.702, | |
| "step": 61 | |
| }, | |
| { | |
| "epoch": 0.2513938165230613, | |
| "grad_norm": 0.7458684625864561, | |
| "learning_rate": 2.016260162601626e-05, | |
| "loss": 0.689, | |
| "step": 62 | |
| }, | |
| { | |
| "epoch": 0.2554485554992397, | |
| "grad_norm": 0.779798108616507, | |
| "learning_rate": 2.048780487804878e-05, | |
| "loss": 0.7211, | |
| "step": 63 | |
| }, | |
| { | |
| "epoch": 0.25950329447541814, | |
| "grad_norm": 0.7377844562285283, | |
| "learning_rate": 2.0813008130081303e-05, | |
| "loss": 0.7285, | |
| "step": 64 | |
| }, | |
| { | |
| "epoch": 0.26355803345159656, | |
| "grad_norm": 0.6742920634485373, | |
| "learning_rate": 2.1138211382113822e-05, | |
| "loss": 0.6741, | |
| "step": 65 | |
| }, | |
| { | |
| "epoch": 0.267612772427775, | |
| "grad_norm": 0.6913792974161947, | |
| "learning_rate": 2.1463414634146344e-05, | |
| "loss": 0.7003, | |
| "step": 66 | |
| }, | |
| { | |
| "epoch": 0.27166751140395334, | |
| "grad_norm": 0.7630696019471577, | |
| "learning_rate": 2.1788617886178863e-05, | |
| "loss": 0.6997, | |
| "step": 67 | |
| }, | |
| { | |
| "epoch": 0.27572225038013176, | |
| "grad_norm": 0.7890619061259875, | |
| "learning_rate": 2.2113821138211385e-05, | |
| "loss": 0.6841, | |
| "step": 68 | |
| }, | |
| { | |
| "epoch": 0.2797769893563102, | |
| "grad_norm": 0.763429565427008, | |
| "learning_rate": 2.2439024390243907e-05, | |
| "loss": 0.6909, | |
| "step": 69 | |
| }, | |
| { | |
| "epoch": 0.2838317283324886, | |
| "grad_norm": 0.7286382276397224, | |
| "learning_rate": 2.2764227642276426e-05, | |
| "loss": 0.7088, | |
| "step": 70 | |
| }, | |
| { | |
| "epoch": 0.287886467308667, | |
| "grad_norm": 0.7048885671940769, | |
| "learning_rate": 2.3089430894308948e-05, | |
| "loss": 0.6983, | |
| "step": 71 | |
| }, | |
| { | |
| "epoch": 0.29194120628484543, | |
| "grad_norm": 0.9644537757352696, | |
| "learning_rate": 2.3414634146341463e-05, | |
| "loss": 0.6856, | |
| "step": 72 | |
| }, | |
| { | |
| "epoch": 0.2959959452610238, | |
| "grad_norm": 0.7278462733897373, | |
| "learning_rate": 2.3739837398373985e-05, | |
| "loss": 0.7066, | |
| "step": 73 | |
| }, | |
| { | |
| "epoch": 0.3000506842372022, | |
| "grad_norm": 0.8379959872080561, | |
| "learning_rate": 2.4065040650406507e-05, | |
| "loss": 0.7168, | |
| "step": 74 | |
| }, | |
| { | |
| "epoch": 0.30410542321338063, | |
| "grad_norm": 1.0490128911944459, | |
| "learning_rate": 2.4390243902439026e-05, | |
| "loss": 0.726, | |
| "step": 75 | |
| }, | |
| { | |
| "epoch": 0.30816016218955905, | |
| "grad_norm": 0.6734185042288604, | |
| "learning_rate": 2.4715447154471548e-05, | |
| "loss": 0.6918, | |
| "step": 76 | |
| }, | |
| { | |
| "epoch": 0.31221490116573747, | |
| "grad_norm": 1.1451572755516948, | |
| "learning_rate": 2.5040650406504066e-05, | |
| "loss": 0.709, | |
| "step": 77 | |
| }, | |
| { | |
| "epoch": 0.3162696401419159, | |
| "grad_norm": 0.8185756716696622, | |
| "learning_rate": 2.536585365853659e-05, | |
| "loss": 0.6992, | |
| "step": 78 | |
| }, | |
| { | |
| "epoch": 0.32032437911809425, | |
| "grad_norm": 1.0328469721562996, | |
| "learning_rate": 2.569105691056911e-05, | |
| "loss": 0.7066, | |
| "step": 79 | |
| }, | |
| { | |
| "epoch": 0.32437911809427267, | |
| "grad_norm": 1.3444900383346927, | |
| "learning_rate": 2.601626016260163e-05, | |
| "loss": 0.7102, | |
| "step": 80 | |
| }, | |
| { | |
| "epoch": 0.3284338570704511, | |
| "grad_norm": 0.7389552280306783, | |
| "learning_rate": 2.634146341463415e-05, | |
| "loss": 0.7014, | |
| "step": 81 | |
| }, | |
| { | |
| "epoch": 0.3324885960466295, | |
| "grad_norm": 1.0803164134362695, | |
| "learning_rate": 2.6666666666666667e-05, | |
| "loss": 0.7047, | |
| "step": 82 | |
| }, | |
| { | |
| "epoch": 0.3365433350228079, | |
| "grad_norm": 0.7485845858411434, | |
| "learning_rate": 2.699186991869919e-05, | |
| "loss": 0.6837, | |
| "step": 83 | |
| }, | |
| { | |
| "epoch": 0.34059807399898634, | |
| "grad_norm": 0.9760047392445128, | |
| "learning_rate": 2.731707317073171e-05, | |
| "loss": 0.679, | |
| "step": 84 | |
| }, | |
| { | |
| "epoch": 0.3446528129751647, | |
| "grad_norm": 0.7632569083394044, | |
| "learning_rate": 2.764227642276423e-05, | |
| "loss": 0.698, | |
| "step": 85 | |
| }, | |
| { | |
| "epoch": 0.3487075519513431, | |
| "grad_norm": 0.8293891276518387, | |
| "learning_rate": 2.796747967479675e-05, | |
| "loss": 0.69, | |
| "step": 86 | |
| }, | |
| { | |
| "epoch": 0.35276229092752154, | |
| "grad_norm": 0.9739090243740767, | |
| "learning_rate": 2.829268292682927e-05, | |
| "loss": 0.7075, | |
| "step": 87 | |
| }, | |
| { | |
| "epoch": 0.35681702990369996, | |
| "grad_norm": 0.6430347870691953, | |
| "learning_rate": 2.8617886178861792e-05, | |
| "loss": 0.6846, | |
| "step": 88 | |
| }, | |
| { | |
| "epoch": 0.3608717688798784, | |
| "grad_norm": 0.9133982026270139, | |
| "learning_rate": 2.8943089430894314e-05, | |
| "loss": 0.6879, | |
| "step": 89 | |
| }, | |
| { | |
| "epoch": 0.3649265078560568, | |
| "grad_norm": 0.7595798983853993, | |
| "learning_rate": 2.926829268292683e-05, | |
| "loss": 0.6873, | |
| "step": 90 | |
| }, | |
| { | |
| "epoch": 0.36898124683223515, | |
| "grad_norm": 0.8257891758311616, | |
| "learning_rate": 2.959349593495935e-05, | |
| "loss": 0.6923, | |
| "step": 91 | |
| }, | |
| { | |
| "epoch": 0.3730359858084136, | |
| "grad_norm": 0.9024576487657057, | |
| "learning_rate": 2.991869918699187e-05, | |
| "loss": 0.692, | |
| "step": 92 | |
| }, | |
| { | |
| "epoch": 0.377090724784592, | |
| "grad_norm": 0.6466942341342199, | |
| "learning_rate": 3.0243902439024392e-05, | |
| "loss": 0.6887, | |
| "step": 93 | |
| }, | |
| { | |
| "epoch": 0.3811454637607704, | |
| "grad_norm": 0.9232596534235612, | |
| "learning_rate": 3.056910569105692e-05, | |
| "loss": 0.6741, | |
| "step": 94 | |
| }, | |
| { | |
| "epoch": 0.3852002027369488, | |
| "grad_norm": 0.8080656528815979, | |
| "learning_rate": 3.089430894308943e-05, | |
| "loss": 0.691, | |
| "step": 95 | |
| }, | |
| { | |
| "epoch": 0.38925494171312724, | |
| "grad_norm": 1.0058008730398007, | |
| "learning_rate": 3.1219512195121955e-05, | |
| "loss": 0.6852, | |
| "step": 96 | |
| }, | |
| { | |
| "epoch": 0.3933096806893056, | |
| "grad_norm": 1.0280233982249636, | |
| "learning_rate": 3.154471544715447e-05, | |
| "loss": 0.6911, | |
| "step": 97 | |
| }, | |
| { | |
| "epoch": 0.397364419665484, | |
| "grad_norm": 0.7517810818794654, | |
| "learning_rate": 3.186991869918699e-05, | |
| "loss": 0.6863, | |
| "step": 98 | |
| }, | |
| { | |
| "epoch": 0.40141915864166244, | |
| "grad_norm": 1.0758098938437253, | |
| "learning_rate": 3.2195121951219514e-05, | |
| "loss": 0.683, | |
| "step": 99 | |
| }, | |
| { | |
| "epoch": 0.40547389761784086, | |
| "grad_norm": 0.7342481230908802, | |
| "learning_rate": 3.2520325203252037e-05, | |
| "loss": 0.711, | |
| "step": 100 | |
| }, | |
| { | |
| "epoch": 0.4095286365940193, | |
| "grad_norm": 0.9443883063108075, | |
| "learning_rate": 3.284552845528456e-05, | |
| "loss": 0.6931, | |
| "step": 101 | |
| }, | |
| { | |
| "epoch": 0.41358337557019764, | |
| "grad_norm": 0.7967871031804306, | |
| "learning_rate": 3.3170731707317074e-05, | |
| "loss": 0.6787, | |
| "step": 102 | |
| }, | |
| { | |
| "epoch": 0.41763811454637606, | |
| "grad_norm": 0.8211351724614182, | |
| "learning_rate": 3.3495934959349596e-05, | |
| "loss": 0.699, | |
| "step": 103 | |
| }, | |
| { | |
| "epoch": 0.4216928535225545, | |
| "grad_norm": 1.0724463058993667, | |
| "learning_rate": 3.382113821138212e-05, | |
| "loss": 0.6809, | |
| "step": 104 | |
| }, | |
| { | |
| "epoch": 0.4257475924987329, | |
| "grad_norm": 1.1780972529280092, | |
| "learning_rate": 3.414634146341463e-05, | |
| "loss": 0.6875, | |
| "step": 105 | |
| }, | |
| { | |
| "epoch": 0.4298023314749113, | |
| "grad_norm": 0.7402523042919582, | |
| "learning_rate": 3.4471544715447155e-05, | |
| "loss": 0.6664, | |
| "step": 106 | |
| }, | |
| { | |
| "epoch": 0.43385707045108973, | |
| "grad_norm": 1.3308853636327231, | |
| "learning_rate": 3.479674796747968e-05, | |
| "loss": 0.7151, | |
| "step": 107 | |
| }, | |
| { | |
| "epoch": 0.4379118094272681, | |
| "grad_norm": 0.8188608634411022, | |
| "learning_rate": 3.51219512195122e-05, | |
| "loss": 0.6912, | |
| "step": 108 | |
| }, | |
| { | |
| "epoch": 0.4419665484034465, | |
| "grad_norm": 1.315123879380671, | |
| "learning_rate": 3.544715447154472e-05, | |
| "loss": 0.6941, | |
| "step": 109 | |
| }, | |
| { | |
| "epoch": 0.44602128737962493, | |
| "grad_norm": 0.9525459481311808, | |
| "learning_rate": 3.577235772357724e-05, | |
| "loss": 0.678, | |
| "step": 110 | |
| }, | |
| { | |
| "epoch": 0.45007602635580335, | |
| "grad_norm": 2.3901357503401868, | |
| "learning_rate": 3.609756097560976e-05, | |
| "loss": 0.7146, | |
| "step": 111 | |
| }, | |
| { | |
| "epoch": 0.45413076533198177, | |
| "grad_norm": 1.4990202315034267, | |
| "learning_rate": 3.642276422764228e-05, | |
| "loss": 0.6937, | |
| "step": 112 | |
| }, | |
| { | |
| "epoch": 0.4581855043081602, | |
| "grad_norm": 0.8426609396121021, | |
| "learning_rate": 3.67479674796748e-05, | |
| "loss": 0.6987, | |
| "step": 113 | |
| }, | |
| { | |
| "epoch": 0.46224024328433855, | |
| "grad_norm": 1.6349726015218022, | |
| "learning_rate": 3.7073170731707325e-05, | |
| "loss": 0.6866, | |
| "step": 114 | |
| }, | |
| { | |
| "epoch": 0.46629498226051697, | |
| "grad_norm": 1.196905079918351, | |
| "learning_rate": 3.739837398373984e-05, | |
| "loss": 0.6705, | |
| "step": 115 | |
| }, | |
| { | |
| "epoch": 0.4703497212366954, | |
| "grad_norm": 0.9259170416416036, | |
| "learning_rate": 3.772357723577236e-05, | |
| "loss": 0.7079, | |
| "step": 116 | |
| }, | |
| { | |
| "epoch": 0.4744044602128738, | |
| "grad_norm": 1.0180263895462414, | |
| "learning_rate": 3.804878048780488e-05, | |
| "loss": 0.7203, | |
| "step": 117 | |
| }, | |
| { | |
| "epoch": 0.4784591991890522, | |
| "grad_norm": 1.0078834521339362, | |
| "learning_rate": 3.83739837398374e-05, | |
| "loss": 0.6731, | |
| "step": 118 | |
| }, | |
| { | |
| "epoch": 0.48251393816523064, | |
| "grad_norm": 1.5309138577841397, | |
| "learning_rate": 3.869918699186992e-05, | |
| "loss": 0.6875, | |
| "step": 119 | |
| }, | |
| { | |
| "epoch": 0.486568677141409, | |
| "grad_norm": 0.821718488295634, | |
| "learning_rate": 3.9024390243902444e-05, | |
| "loss": 0.6718, | |
| "step": 120 | |
| }, | |
| { | |
| "epoch": 0.4906234161175874, | |
| "grad_norm": 1.7320738012628334, | |
| "learning_rate": 3.9349593495934966e-05, | |
| "loss": 0.6967, | |
| "step": 121 | |
| }, | |
| { | |
| "epoch": 0.49467815509376584, | |
| "grad_norm": 1.28806423710955, | |
| "learning_rate": 3.967479674796748e-05, | |
| "loss": 0.6951, | |
| "step": 122 | |
| }, | |
| { | |
| "epoch": 0.49873289406994425, | |
| "grad_norm": 1.9807858710872799, | |
| "learning_rate": 4e-05, | |
| "loss": 0.743, | |
| "step": 123 | |
| }, | |
| { | |
| "epoch": 0.5027876330461226, | |
| "grad_norm": 1.835357066309503, | |
| "learning_rate": 3.999991946137476e-05, | |
| "loss": 0.6937, | |
| "step": 124 | |
| }, | |
| { | |
| "epoch": 0.5068423720223011, | |
| "grad_norm": 1.2070749708501356, | |
| "learning_rate": 3.999967784614766e-05, | |
| "loss": 0.7031, | |
| "step": 125 | |
| }, | |
| { | |
| "epoch": 0.5108971109984795, | |
| "grad_norm": 4.614800436607694, | |
| "learning_rate": 3.9999275156264656e-05, | |
| "loss": 0.7107, | |
| "step": 126 | |
| }, | |
| { | |
| "epoch": 0.5149518499746579, | |
| "grad_norm": 1.6327098536948497, | |
| "learning_rate": 3.999871139496895e-05, | |
| "loss": 0.694, | |
| "step": 127 | |
| }, | |
| { | |
| "epoch": 0.5190065889508363, | |
| "grad_norm": 1.3013993777388393, | |
| "learning_rate": 3.9997986566800995e-05, | |
| "loss": 0.6714, | |
| "step": 128 | |
| }, | |
| { | |
| "epoch": 0.5230613279270147, | |
| "grad_norm": 1.9289443360186063, | |
| "learning_rate": 3.999710067759846e-05, | |
| "loss": 0.6753, | |
| "step": 129 | |
| }, | |
| { | |
| "epoch": 0.5271160669031931, | |
| "grad_norm": 1.7090956719629866, | |
| "learning_rate": 3.999605373449617e-05, | |
| "loss": 0.7147, | |
| "step": 130 | |
| }, | |
| { | |
| "epoch": 0.5311708058793715, | |
| "grad_norm": 1.0722214564616033, | |
| "learning_rate": 3.9994845745926075e-05, | |
| "loss": 0.667, | |
| "step": 131 | |
| }, | |
| { | |
| "epoch": 0.53522554485555, | |
| "grad_norm": 1.1286918966506552, | |
| "learning_rate": 3.999347672161713e-05, | |
| "loss": 0.7076, | |
| "step": 132 | |
| }, | |
| { | |
| "epoch": 0.5392802838317283, | |
| "grad_norm": 1.8596403138033017, | |
| "learning_rate": 3.999194667259528e-05, | |
| "loss": 0.7029, | |
| "step": 133 | |
| }, | |
| { | |
| "epoch": 0.5433350228079067, | |
| "grad_norm": 0.9980098596120747, | |
| "learning_rate": 3.999025561118334e-05, | |
| "loss": 0.7003, | |
| "step": 134 | |
| }, | |
| { | |
| "epoch": 0.5473897617840852, | |
| "grad_norm": 1.0646233674967533, | |
| "learning_rate": 3.998840355100086e-05, | |
| "loss": 0.6803, | |
| "step": 135 | |
| }, | |
| { | |
| "epoch": 0.5514445007602635, | |
| "grad_norm": 1.1747189909613964, | |
| "learning_rate": 3.998639050696409e-05, | |
| "loss": 0.6981, | |
| "step": 136 | |
| }, | |
| { | |
| "epoch": 0.555499239736442, | |
| "grad_norm": 0.8890022070734527, | |
| "learning_rate": 3.998421649528582e-05, | |
| "loss": 0.6801, | |
| "step": 137 | |
| }, | |
| { | |
| "epoch": 0.5595539787126204, | |
| "grad_norm": 0.9237520878918816, | |
| "learning_rate": 3.9981881533475234e-05, | |
| "loss": 0.6928, | |
| "step": 138 | |
| }, | |
| { | |
| "epoch": 0.5636087176887988, | |
| "grad_norm": 0.689021448921926, | |
| "learning_rate": 3.997938564033779e-05, | |
| "loss": 0.671, | |
| "step": 139 | |
| }, | |
| { | |
| "epoch": 0.5676634566649772, | |
| "grad_norm": 0.9582186141418718, | |
| "learning_rate": 3.9976728835975064e-05, | |
| "loss": 0.6598, | |
| "step": 140 | |
| }, | |
| { | |
| "epoch": 0.5717181956411556, | |
| "grad_norm": 0.7340409097480117, | |
| "learning_rate": 3.9973911141784605e-05, | |
| "loss": 0.6803, | |
| "step": 141 | |
| }, | |
| { | |
| "epoch": 0.575772934617334, | |
| "grad_norm": 0.705798465159733, | |
| "learning_rate": 3.997093258045973e-05, | |
| "loss": 0.6933, | |
| "step": 142 | |
| }, | |
| { | |
| "epoch": 0.5798276735935124, | |
| "grad_norm": 0.7765603061788923, | |
| "learning_rate": 3.996779317598936e-05, | |
| "loss": 0.6783, | |
| "step": 143 | |
| }, | |
| { | |
| "epoch": 0.5838824125696909, | |
| "grad_norm": 0.6238745530139232, | |
| "learning_rate": 3.996449295365782e-05, | |
| "loss": 0.6601, | |
| "step": 144 | |
| }, | |
| { | |
| "epoch": 0.5879371515458692, | |
| "grad_norm": 0.9929072620924326, | |
| "learning_rate": 3.996103194004467e-05, | |
| "loss": 0.6777, | |
| "step": 145 | |
| }, | |
| { | |
| "epoch": 0.5919918905220476, | |
| "grad_norm": 0.7143742837978996, | |
| "learning_rate": 3.995741016302441e-05, | |
| "loss": 0.6761, | |
| "step": 146 | |
| }, | |
| { | |
| "epoch": 0.5960466294982261, | |
| "grad_norm": 0.8774888500952484, | |
| "learning_rate": 3.9953627651766364e-05, | |
| "loss": 0.6701, | |
| "step": 147 | |
| }, | |
| { | |
| "epoch": 0.6001013684744044, | |
| "grad_norm": 0.8343025750986708, | |
| "learning_rate": 3.9949684436734325e-05, | |
| "loss": 0.706, | |
| "step": 148 | |
| }, | |
| { | |
| "epoch": 0.6041561074505829, | |
| "grad_norm": 0.8102723088966813, | |
| "learning_rate": 3.994558054968643e-05, | |
| "loss": 0.6822, | |
| "step": 149 | |
| }, | |
| { | |
| "epoch": 0.6082108464267613, | |
| "grad_norm": 0.9568927730397531, | |
| "learning_rate": 3.994131602367481e-05, | |
| "loss": 0.7287, | |
| "step": 150 | |
| }, | |
| { | |
| "epoch": 0.6122655854029397, | |
| "grad_norm": 0.6015408850621344, | |
| "learning_rate": 3.9936890893045376e-05, | |
| "loss": 0.6822, | |
| "step": 151 | |
| }, | |
| { | |
| "epoch": 0.6163203243791181, | |
| "grad_norm": 1.2001824966184542, | |
| "learning_rate": 3.993230519343752e-05, | |
| "loss": 0.6833, | |
| "step": 152 | |
| }, | |
| { | |
| "epoch": 0.6203750633552965, | |
| "grad_norm": 0.7698179885632005, | |
| "learning_rate": 3.992755896178383e-05, | |
| "loss": 0.6715, | |
| "step": 153 | |
| }, | |
| { | |
| "epoch": 0.6244298023314749, | |
| "grad_norm": 1.0977983341598858, | |
| "learning_rate": 3.992265223630981e-05, | |
| "loss": 0.6841, | |
| "step": 154 | |
| }, | |
| { | |
| "epoch": 0.6284845413076533, | |
| "grad_norm": 0.8188334478534633, | |
| "learning_rate": 3.991758505653355e-05, | |
| "loss": 0.6792, | |
| "step": 155 | |
| }, | |
| { | |
| "epoch": 0.6325392802838318, | |
| "grad_norm": 0.7417262938692606, | |
| "learning_rate": 3.991235746326543e-05, | |
| "loss": 0.6498, | |
| "step": 156 | |
| }, | |
| { | |
| "epoch": 0.6365940192600101, | |
| "grad_norm": 0.7619102560385524, | |
| "learning_rate": 3.9906969498607745e-05, | |
| "loss": 0.6456, | |
| "step": 157 | |
| }, | |
| { | |
| "epoch": 0.6406487582361885, | |
| "grad_norm": 0.6784465965155523, | |
| "learning_rate": 3.990142120595444e-05, | |
| "loss": 0.6629, | |
| "step": 158 | |
| }, | |
| { | |
| "epoch": 0.644703497212367, | |
| "grad_norm": 0.722121613112185, | |
| "learning_rate": 3.98957126299907e-05, | |
| "loss": 0.6637, | |
| "step": 159 | |
| }, | |
| { | |
| "epoch": 0.6487582361885453, | |
| "grad_norm": 0.6447945770108525, | |
| "learning_rate": 3.9889843816692596e-05, | |
| "loss": 0.6549, | |
| "step": 160 | |
| }, | |
| { | |
| "epoch": 0.6528129751647238, | |
| "grad_norm": 0.7035653078450818, | |
| "learning_rate": 3.9883814813326766e-05, | |
| "loss": 0.6813, | |
| "step": 161 | |
| }, | |
| { | |
| "epoch": 0.6568677141409022, | |
| "grad_norm": 0.6624902074090185, | |
| "learning_rate": 3.9877625668449956e-05, | |
| "loss": 0.6602, | |
| "step": 162 | |
| }, | |
| { | |
| "epoch": 0.6609224531170805, | |
| "grad_norm": 0.575113864148145, | |
| "learning_rate": 3.98712764319087e-05, | |
| "loss": 0.6692, | |
| "step": 163 | |
| }, | |
| { | |
| "epoch": 0.664977192093259, | |
| "grad_norm": 0.6722069364099436, | |
| "learning_rate": 3.9864767154838864e-05, | |
| "loss": 0.668, | |
| "step": 164 | |
| }, | |
| { | |
| "epoch": 0.6690319310694374, | |
| "grad_norm": 0.6151763300858886, | |
| "learning_rate": 3.9858097889665277e-05, | |
| "loss": 0.6727, | |
| "step": 165 | |
| }, | |
| { | |
| "epoch": 0.6730866700456158, | |
| "grad_norm": 0.5765462457314585, | |
| "learning_rate": 3.985126869010129e-05, | |
| "loss": 0.6843, | |
| "step": 166 | |
| }, | |
| { | |
| "epoch": 0.6771414090217942, | |
| "grad_norm": 0.8147294856093195, | |
| "learning_rate": 3.984427961114833e-05, | |
| "loss": 0.6813, | |
| "step": 167 | |
| }, | |
| { | |
| "epoch": 0.6811961479979727, | |
| "grad_norm": 0.6653934331750595, | |
| "learning_rate": 3.9837130709095475e-05, | |
| "loss": 0.6906, | |
| "step": 168 | |
| }, | |
| { | |
| "epoch": 0.685250886974151, | |
| "grad_norm": 0.6367427096971082, | |
| "learning_rate": 3.982982204151901e-05, | |
| "loss": 0.6988, | |
| "step": 169 | |
| }, | |
| { | |
| "epoch": 0.6893056259503294, | |
| "grad_norm": 0.599698466328495, | |
| "learning_rate": 3.982235366728193e-05, | |
| "loss": 0.6692, | |
| "step": 170 | |
| }, | |
| { | |
| "epoch": 0.6933603649265079, | |
| "grad_norm": 0.6849336630476502, | |
| "learning_rate": 3.9814725646533505e-05, | |
| "loss": 0.6536, | |
| "step": 171 | |
| }, | |
| { | |
| "epoch": 0.6974151039026862, | |
| "grad_norm": 0.5733465390017347, | |
| "learning_rate": 3.9806938040708746e-05, | |
| "loss": 0.6961, | |
| "step": 172 | |
| }, | |
| { | |
| "epoch": 0.7014698428788647, | |
| "grad_norm": 0.6818706973421986, | |
| "learning_rate": 3.9798990912527976e-05, | |
| "loss": 0.6856, | |
| "step": 173 | |
| }, | |
| { | |
| "epoch": 0.7055245818550431, | |
| "grad_norm": 0.6132588090722377, | |
| "learning_rate": 3.979088432599627e-05, | |
| "loss": 0.6828, | |
| "step": 174 | |
| }, | |
| { | |
| "epoch": 0.7095793208312214, | |
| "grad_norm": 0.5894185438945142, | |
| "learning_rate": 3.9782618346402964e-05, | |
| "loss": 0.6637, | |
| "step": 175 | |
| }, | |
| { | |
| "epoch": 0.7136340598073999, | |
| "grad_norm": 0.6342968095007281, | |
| "learning_rate": 3.977419304032111e-05, | |
| "loss": 0.6862, | |
| "step": 176 | |
| }, | |
| { | |
| "epoch": 0.7176887987835783, | |
| "grad_norm": 0.6172904989559048, | |
| "learning_rate": 3.976560847560697e-05, | |
| "loss": 0.6754, | |
| "step": 177 | |
| }, | |
| { | |
| "epoch": 0.7217435377597567, | |
| "grad_norm": 0.6513584061429492, | |
| "learning_rate": 3.9756864721399456e-05, | |
| "loss": 0.658, | |
| "step": 178 | |
| }, | |
| { | |
| "epoch": 0.7257982767359351, | |
| "grad_norm": 0.564461934281462, | |
| "learning_rate": 3.974796184811956e-05, | |
| "loss": 0.6626, | |
| "step": 179 | |
| }, | |
| { | |
| "epoch": 0.7298530157121136, | |
| "grad_norm": 0.6090058825632219, | |
| "learning_rate": 3.973889992746979e-05, | |
| "loss": 0.6757, | |
| "step": 180 | |
| }, | |
| { | |
| "epoch": 0.733907754688292, | |
| "grad_norm": 0.5585227586359442, | |
| "learning_rate": 3.972967903243361e-05, | |
| "loss": 0.6717, | |
| "step": 181 | |
| }, | |
| { | |
| "epoch": 0.7379624936644703, | |
| "grad_norm": 0.4707154002171439, | |
| "learning_rate": 3.972029923727486e-05, | |
| "loss": 0.6582, | |
| "step": 182 | |
| }, | |
| { | |
| "epoch": 0.7420172326406488, | |
| "grad_norm": 0.544674928057688, | |
| "learning_rate": 3.971076061753709e-05, | |
| "loss": 0.6474, | |
| "step": 183 | |
| }, | |
| { | |
| "epoch": 0.7460719716168271, | |
| "grad_norm": 0.5365234891982672, | |
| "learning_rate": 3.9701063250043066e-05, | |
| "loss": 0.6707, | |
| "step": 184 | |
| }, | |
| { | |
| "epoch": 0.7501267105930056, | |
| "grad_norm": 0.4955924399686322, | |
| "learning_rate": 3.969120721289402e-05, | |
| "loss": 0.6644, | |
| "step": 185 | |
| }, | |
| { | |
| "epoch": 0.754181449569184, | |
| "grad_norm": 0.5444771343256386, | |
| "learning_rate": 3.9681192585469146e-05, | |
| "loss": 0.6626, | |
| "step": 186 | |
| }, | |
| { | |
| "epoch": 0.7582361885453623, | |
| "grad_norm": 0.5711354858382636, | |
| "learning_rate": 3.9671019448424865e-05, | |
| "loss": 0.6952, | |
| "step": 187 | |
| }, | |
| { | |
| "epoch": 0.7622909275215408, | |
| "grad_norm": 0.49033415499089034, | |
| "learning_rate": 3.966068788369422e-05, | |
| "loss": 0.6643, | |
| "step": 188 | |
| }, | |
| { | |
| "epoch": 0.7663456664977192, | |
| "grad_norm": 0.7977718416858243, | |
| "learning_rate": 3.965019797448622e-05, | |
| "loss": 0.6915, | |
| "step": 189 | |
| }, | |
| { | |
| "epoch": 0.7704004054738977, | |
| "grad_norm": 0.508539756624699, | |
| "learning_rate": 3.963954980528515e-05, | |
| "loss": 0.6494, | |
| "step": 190 | |
| }, | |
| { | |
| "epoch": 0.774455144450076, | |
| "grad_norm": 0.5835751436218239, | |
| "learning_rate": 3.9628743461849905e-05, | |
| "loss": 0.6596, | |
| "step": 191 | |
| }, | |
| { | |
| "epoch": 0.7785098834262545, | |
| "grad_norm": 0.49457305443770033, | |
| "learning_rate": 3.961777903121329e-05, | |
| "loss": 0.6445, | |
| "step": 192 | |
| }, | |
| { | |
| "epoch": 0.7825646224024329, | |
| "grad_norm": 0.5395641244777112, | |
| "learning_rate": 3.960665660168131e-05, | |
| "loss": 0.6867, | |
| "step": 193 | |
| }, | |
| { | |
| "epoch": 0.7866193613786112, | |
| "grad_norm": 0.5514382799732869, | |
| "learning_rate": 3.9595376262832485e-05, | |
| "loss": 0.6863, | |
| "step": 194 | |
| }, | |
| { | |
| "epoch": 0.7906741003547897, | |
| "grad_norm": 0.5663218656289281, | |
| "learning_rate": 3.9583938105517127e-05, | |
| "loss": 0.6692, | |
| "step": 195 | |
| }, | |
| { | |
| "epoch": 0.794728839330968, | |
| "grad_norm": 0.5720308948620724, | |
| "learning_rate": 3.957234222185657e-05, | |
| "loss": 0.667, | |
| "step": 196 | |
| }, | |
| { | |
| "epoch": 0.7987835783071465, | |
| "grad_norm": 0.6308084088213408, | |
| "learning_rate": 3.9560588705242474e-05, | |
| "loss": 0.6626, | |
| "step": 197 | |
| }, | |
| { | |
| "epoch": 0.8028383172833249, | |
| "grad_norm": 0.7116301575492819, | |
| "learning_rate": 3.954867765033605e-05, | |
| "loss": 0.6598, | |
| "step": 198 | |
| }, | |
| { | |
| "epoch": 0.8068930562595032, | |
| "grad_norm": 0.6410050012636682, | |
| "learning_rate": 3.953660915306728e-05, | |
| "loss": 0.6564, | |
| "step": 199 | |
| }, | |
| { | |
| "epoch": 0.8109477952356817, | |
| "grad_norm": 0.603593734567084, | |
| "learning_rate": 3.952438331063419e-05, | |
| "loss": 0.6631, | |
| "step": 200 | |
| }, | |
| { | |
| "epoch": 0.8150025342118601, | |
| "grad_norm": 0.6479894849496406, | |
| "learning_rate": 3.951200022150205e-05, | |
| "loss": 0.6794, | |
| "step": 201 | |
| }, | |
| { | |
| "epoch": 0.8190572731880386, | |
| "grad_norm": 0.5743847940643024, | |
| "learning_rate": 3.949945998540253e-05, | |
| "loss": 0.6757, | |
| "step": 202 | |
| }, | |
| { | |
| "epoch": 0.8231120121642169, | |
| "grad_norm": 0.5813175104068687, | |
| "learning_rate": 3.9486762703332993e-05, | |
| "loss": 0.6915, | |
| "step": 203 | |
| }, | |
| { | |
| "epoch": 0.8271667511403953, | |
| "grad_norm": 0.5622095934511784, | |
| "learning_rate": 3.947390847755559e-05, | |
| "loss": 0.6586, | |
| "step": 204 | |
| }, | |
| { | |
| "epoch": 0.8312214901165738, | |
| "grad_norm": 0.5268665217538123, | |
| "learning_rate": 3.946089741159648e-05, | |
| "loss": 0.6542, | |
| "step": 205 | |
| }, | |
| { | |
| "epoch": 0.8352762290927521, | |
| "grad_norm": 0.5409841799868024, | |
| "learning_rate": 3.944772961024501e-05, | |
| "loss": 0.6562, | |
| "step": 206 | |
| }, | |
| { | |
| "epoch": 0.8393309680689306, | |
| "grad_norm": 0.635540461494687, | |
| "learning_rate": 3.943440517955285e-05, | |
| "loss": 0.669, | |
| "step": 207 | |
| }, | |
| { | |
| "epoch": 0.843385707045109, | |
| "grad_norm": 0.6637248171877679, | |
| "learning_rate": 3.9420924226833126e-05, | |
| "loss": 0.6677, | |
| "step": 208 | |
| }, | |
| { | |
| "epoch": 0.8474404460212874, | |
| "grad_norm": 0.6656738896112339, | |
| "learning_rate": 3.9407286860659566e-05, | |
| "loss": 0.662, | |
| "step": 209 | |
| }, | |
| { | |
| "epoch": 0.8514951849974658, | |
| "grad_norm": 0.6871767253499165, | |
| "learning_rate": 3.9393493190865657e-05, | |
| "loss": 0.6627, | |
| "step": 210 | |
| }, | |
| { | |
| "epoch": 0.8555499239736442, | |
| "grad_norm": 0.7622749720064445, | |
| "learning_rate": 3.937954332854371e-05, | |
| "loss": 0.6558, | |
| "step": 211 | |
| }, | |
| { | |
| "epoch": 0.8596046629498226, | |
| "grad_norm": 0.6414294057685311, | |
| "learning_rate": 3.9365437386044016e-05, | |
| "loss": 0.6754, | |
| "step": 212 | |
| }, | |
| { | |
| "epoch": 0.863659401926001, | |
| "grad_norm": 0.7953161065568234, | |
| "learning_rate": 3.935117547697387e-05, | |
| "loss": 0.6677, | |
| "step": 213 | |
| }, | |
| { | |
| "epoch": 0.8677141409021795, | |
| "grad_norm": 0.594375165813602, | |
| "learning_rate": 3.933675771619675e-05, | |
| "loss": 0.6727, | |
| "step": 214 | |
| }, | |
| { | |
| "epoch": 0.8717688798783578, | |
| "grad_norm": 0.7344808009390653, | |
| "learning_rate": 3.932218421983131e-05, | |
| "loss": 0.681, | |
| "step": 215 | |
| }, | |
| { | |
| "epoch": 0.8758236188545362, | |
| "grad_norm": 0.7167765957206945, | |
| "learning_rate": 3.9307455105250484e-05, | |
| "loss": 0.6759, | |
| "step": 216 | |
| }, | |
| { | |
| "epoch": 0.8798783578307147, | |
| "grad_norm": 0.603462550356937, | |
| "learning_rate": 3.929257049108054e-05, | |
| "loss": 0.6387, | |
| "step": 217 | |
| }, | |
| { | |
| "epoch": 0.883933096806893, | |
| "grad_norm": 0.571884635497545, | |
| "learning_rate": 3.927753049720011e-05, | |
| "loss": 0.6519, | |
| "step": 218 | |
| }, | |
| { | |
| "epoch": 0.8879878357830715, | |
| "grad_norm": 0.6559833796555712, | |
| "learning_rate": 3.9262335244739234e-05, | |
| "loss": 0.6928, | |
| "step": 219 | |
| }, | |
| { | |
| "epoch": 0.8920425747592499, | |
| "grad_norm": 0.53928088476035, | |
| "learning_rate": 3.92469848560784e-05, | |
| "loss": 0.6468, | |
| "step": 220 | |
| }, | |
| { | |
| "epoch": 0.8960973137354283, | |
| "grad_norm": 0.6372413119697461, | |
| "learning_rate": 3.923147945484751e-05, | |
| "loss": 0.6565, | |
| "step": 221 | |
| }, | |
| { | |
| "epoch": 0.9001520527116067, | |
| "grad_norm": 0.5878945612482709, | |
| "learning_rate": 3.9215819165924956e-05, | |
| "loss": 0.6627, | |
| "step": 222 | |
| }, | |
| { | |
| "epoch": 0.9042067916877851, | |
| "grad_norm": 0.7502268667930518, | |
| "learning_rate": 3.920000411543654e-05, | |
| "loss": 0.6728, | |
| "step": 223 | |
| }, | |
| { | |
| "epoch": 0.9082615306639635, | |
| "grad_norm": 0.5258230439693216, | |
| "learning_rate": 3.9184034430754495e-05, | |
| "loss": 0.6403, | |
| "step": 224 | |
| }, | |
| { | |
| "epoch": 0.9123162696401419, | |
| "grad_norm": 0.6898307257490985, | |
| "learning_rate": 3.916791024049648e-05, | |
| "loss": 0.6902, | |
| "step": 225 | |
| }, | |
| { | |
| "epoch": 0.9163710086163204, | |
| "grad_norm": 0.550891947167564, | |
| "learning_rate": 3.91516316745245e-05, | |
| "loss": 0.6747, | |
| "step": 226 | |
| }, | |
| { | |
| "epoch": 0.9204257475924987, | |
| "grad_norm": 0.6560712993548744, | |
| "learning_rate": 3.913519886394389e-05, | |
| "loss": 0.6422, | |
| "step": 227 | |
| }, | |
| { | |
| "epoch": 0.9244804865686771, | |
| "grad_norm": 0.5600888821358391, | |
| "learning_rate": 3.911861194110225e-05, | |
| "loss": 0.6558, | |
| "step": 228 | |
| }, | |
| { | |
| "epoch": 0.9285352255448556, | |
| "grad_norm": 0.6302155881324434, | |
| "learning_rate": 3.910187103958837e-05, | |
| "loss": 0.7023, | |
| "step": 229 | |
| }, | |
| { | |
| "epoch": 0.9325899645210339, | |
| "grad_norm": 0.4869967136247146, | |
| "learning_rate": 3.908497629423117e-05, | |
| "loss": 0.6598, | |
| "step": 230 | |
| }, | |
| { | |
| "epoch": 0.9366447034972124, | |
| "grad_norm": 0.6569008883330597, | |
| "learning_rate": 3.9067927841098614e-05, | |
| "loss": 0.6672, | |
| "step": 231 | |
| }, | |
| { | |
| "epoch": 0.9406994424733908, | |
| "grad_norm": 0.5552260012456819, | |
| "learning_rate": 3.9050725817496594e-05, | |
| "loss": 0.6554, | |
| "step": 232 | |
| }, | |
| { | |
| "epoch": 0.9447541814495691, | |
| "grad_norm": 0.5841537947243322, | |
| "learning_rate": 3.9033370361967844e-05, | |
| "loss": 0.6797, | |
| "step": 233 | |
| }, | |
| { | |
| "epoch": 0.9488089204257476, | |
| "grad_norm": 0.5805402020053461, | |
| "learning_rate": 3.901586161429081e-05, | |
| "loss": 0.6525, | |
| "step": 234 | |
| }, | |
| { | |
| "epoch": 0.952863659401926, | |
| "grad_norm": 0.5175954006831186, | |
| "learning_rate": 3.8998199715478545e-05, | |
| "loss": 0.6587, | |
| "step": 235 | |
| }, | |
| { | |
| "epoch": 0.9569183983781044, | |
| "grad_norm": 0.5235935548422253, | |
| "learning_rate": 3.8980384807777564e-05, | |
| "loss": 0.6731, | |
| "step": 236 | |
| }, | |
| { | |
| "epoch": 0.9609731373542828, | |
| "grad_norm": 0.5240565344545104, | |
| "learning_rate": 3.896241703466667e-05, | |
| "loss": 0.668, | |
| "step": 237 | |
| }, | |
| { | |
| "epoch": 0.9650278763304613, | |
| "grad_norm": 0.5566347217339349, | |
| "learning_rate": 3.894429654085585e-05, | |
| "loss": 0.6512, | |
| "step": 238 | |
| }, | |
| { | |
| "epoch": 0.9690826153066396, | |
| "grad_norm": 0.5467691540646846, | |
| "learning_rate": 3.892602347228505e-05, | |
| "loss": 0.6656, | |
| "step": 239 | |
| }, | |
| { | |
| "epoch": 0.973137354282818, | |
| "grad_norm": 0.5643909729738225, | |
| "learning_rate": 3.890759797612307e-05, | |
| "loss": 0.6786, | |
| "step": 240 | |
| }, | |
| { | |
| "epoch": 0.9771920932589965, | |
| "grad_norm": 0.5488436036709752, | |
| "learning_rate": 3.888902020076632e-05, | |
| "loss": 0.6521, | |
| "step": 241 | |
| }, | |
| { | |
| "epoch": 0.9812468322351748, | |
| "grad_norm": 0.5759615663503065, | |
| "learning_rate": 3.887029029583764e-05, | |
| "loss": 0.6472, | |
| "step": 242 | |
| }, | |
| { | |
| "epoch": 0.9853015712113533, | |
| "grad_norm": 0.5164525825813331, | |
| "learning_rate": 3.8851408412185125e-05, | |
| "loss": 0.6488, | |
| "step": 243 | |
| }, | |
| { | |
| "epoch": 0.9893563101875317, | |
| "grad_norm": 0.6821928673060211, | |
| "learning_rate": 3.8832374701880855e-05, | |
| "loss": 0.6657, | |
| "step": 244 | |
| }, | |
| { | |
| "epoch": 0.99341104916371, | |
| "grad_norm": 0.4928621635756573, | |
| "learning_rate": 3.881318931821972e-05, | |
| "loss": 0.648, | |
| "step": 245 | |
| }, | |
| { | |
| "epoch": 0.9974657881398885, | |
| "grad_norm": 0.6463592667023097, | |
| "learning_rate": 3.879385241571817e-05, | |
| "loss": 0.6684, | |
| "step": 246 | |
| }, | |
| { | |
| "epoch": 1.0030410542321337, | |
| "grad_norm": 0.5878757829771379, | |
| "learning_rate": 3.8774364150112955e-05, | |
| "loss": 0.5734, | |
| "step": 247 | |
| }, | |
| { | |
| "epoch": 1.0070957932083122, | |
| "grad_norm": 0.5758660605379867, | |
| "learning_rate": 3.8754724678359884e-05, | |
| "loss": 0.5413, | |
| "step": 248 | |
| }, | |
| { | |
| "epoch": 1.0111505321844907, | |
| "grad_norm": 0.5687600864259809, | |
| "learning_rate": 3.873493415863256e-05, | |
| "loss": 0.5414, | |
| "step": 249 | |
| }, | |
| { | |
| "epoch": 1.015205271160669, | |
| "grad_norm": 0.5857842299726084, | |
| "learning_rate": 3.871499275032111e-05, | |
| "loss": 0.5532, | |
| "step": 250 | |
| }, | |
| { | |
| "epoch": 1.0192600101368474, | |
| "grad_norm": 0.6289664075593226, | |
| "learning_rate": 3.869490061403091e-05, | |
| "loss": 0.5583, | |
| "step": 251 | |
| }, | |
| { | |
| "epoch": 1.023314749113026, | |
| "grad_norm": 0.6235876622520403, | |
| "learning_rate": 3.867465791158124e-05, | |
| "loss": 0.5648, | |
| "step": 252 | |
| }, | |
| { | |
| "epoch": 1.0273694880892044, | |
| "grad_norm": 0.5381281528998375, | |
| "learning_rate": 3.865426480600407e-05, | |
| "loss": 0.5428, | |
| "step": 253 | |
| }, | |
| { | |
| "epoch": 1.0314242270653826, | |
| "grad_norm": 0.5254876407102241, | |
| "learning_rate": 3.863372146154264e-05, | |
| "loss": 0.5346, | |
| "step": 254 | |
| }, | |
| { | |
| "epoch": 1.035478966041561, | |
| "grad_norm": 0.5927676727148181, | |
| "learning_rate": 3.861302804365024e-05, | |
| "loss": 0.5399, | |
| "step": 255 | |
| }, | |
| { | |
| "epoch": 1.0395337050177396, | |
| "grad_norm": 0.49794763930707314, | |
| "learning_rate": 3.85921847189888e-05, | |
| "loss": 0.5386, | |
| "step": 256 | |
| }, | |
| { | |
| "epoch": 1.0435884439939178, | |
| "grad_norm": 0.5549831303164778, | |
| "learning_rate": 3.85711916554276e-05, | |
| "loss": 0.5547, | |
| "step": 257 | |
| }, | |
| { | |
| "epoch": 1.0476431829700963, | |
| "grad_norm": 0.525078912106115, | |
| "learning_rate": 3.85500490220419e-05, | |
| "loss": 0.5363, | |
| "step": 258 | |
| }, | |
| { | |
| "epoch": 1.0516979219462748, | |
| "grad_norm": 0.5418998537277435, | |
| "learning_rate": 3.852875698911154e-05, | |
| "loss": 0.4941, | |
| "step": 259 | |
| }, | |
| { | |
| "epoch": 1.055752660922453, | |
| "grad_norm": 0.5154304920568932, | |
| "learning_rate": 3.850731572811963e-05, | |
| "loss": 0.4957, | |
| "step": 260 | |
| }, | |
| { | |
| "epoch": 1.0598073998986315, | |
| "grad_norm": 0.5810323798109459, | |
| "learning_rate": 3.848572541175116e-05, | |
| "loss": 0.5211, | |
| "step": 261 | |
| }, | |
| { | |
| "epoch": 1.06386213887481, | |
| "grad_norm": 0.5744998068855989, | |
| "learning_rate": 3.846398621389154e-05, | |
| "loss": 0.5099, | |
| "step": 262 | |
| }, | |
| { | |
| "epoch": 1.0679168778509884, | |
| "grad_norm": 0.648411930307603, | |
| "learning_rate": 3.84420983096253e-05, | |
| "loss": 0.558, | |
| "step": 263 | |
| }, | |
| { | |
| "epoch": 1.0719716168271667, | |
| "grad_norm": 0.6270281388129116, | |
| "learning_rate": 3.8420061875234606e-05, | |
| "loss": 0.5211, | |
| "step": 264 | |
| }, | |
| { | |
| "epoch": 1.0760263558033452, | |
| "grad_norm": 0.5064640571790376, | |
| "learning_rate": 3.839787708819787e-05, | |
| "loss": 0.5245, | |
| "step": 265 | |
| }, | |
| { | |
| "epoch": 1.0800810947795236, | |
| "grad_norm": 0.6718727630974323, | |
| "learning_rate": 3.8375544127188325e-05, | |
| "loss": 0.5385, | |
| "step": 266 | |
| }, | |
| { | |
| "epoch": 1.0841358337557019, | |
| "grad_norm": 0.5479619482607734, | |
| "learning_rate": 3.8353063172072564e-05, | |
| "loss": 0.5481, | |
| "step": 267 | |
| }, | |
| { | |
| "epoch": 1.0881905727318804, | |
| "grad_norm": 0.5824318189656096, | |
| "learning_rate": 3.8330434403909105e-05, | |
| "loss": 0.541, | |
| "step": 268 | |
| }, | |
| { | |
| "epoch": 1.0922453117080588, | |
| "grad_norm": 0.594580577223674, | |
| "learning_rate": 3.8307658004946934e-05, | |
| "loss": 0.5386, | |
| "step": 269 | |
| }, | |
| { | |
| "epoch": 1.0963000506842373, | |
| "grad_norm": 0.5971130485367072, | |
| "learning_rate": 3.8284734158624046e-05, | |
| "loss": 0.5503, | |
| "step": 270 | |
| }, | |
| { | |
| "epoch": 1.1003547896604156, | |
| "grad_norm": 0.7838587020785184, | |
| "learning_rate": 3.826166304956594e-05, | |
| "loss": 0.5187, | |
| "step": 271 | |
| }, | |
| { | |
| "epoch": 1.104409528636594, | |
| "grad_norm": 0.5669595496349817, | |
| "learning_rate": 3.8238444863584164e-05, | |
| "loss": 0.5361, | |
| "step": 272 | |
| }, | |
| { | |
| "epoch": 1.1084642676127725, | |
| "grad_norm": 0.7102136463147705, | |
| "learning_rate": 3.821507978767479e-05, | |
| "loss": 0.5479, | |
| "step": 273 | |
| }, | |
| { | |
| "epoch": 1.1125190065889508, | |
| "grad_norm": 0.5871403310820278, | |
| "learning_rate": 3.819156801001693e-05, | |
| "loss": 0.5419, | |
| "step": 274 | |
| }, | |
| { | |
| "epoch": 1.1165737455651292, | |
| "grad_norm": 0.7350609226387498, | |
| "learning_rate": 3.816790971997121e-05, | |
| "loss": 0.5435, | |
| "step": 275 | |
| }, | |
| { | |
| "epoch": 1.1206284845413077, | |
| "grad_norm": 0.5332760748470078, | |
| "learning_rate": 3.8144105108078246e-05, | |
| "loss": 0.5295, | |
| "step": 276 | |
| }, | |
| { | |
| "epoch": 1.124683223517486, | |
| "grad_norm": 0.6980458593997768, | |
| "learning_rate": 3.81201543660571e-05, | |
| "loss": 0.5415, | |
| "step": 277 | |
| }, | |
| { | |
| "epoch": 1.1287379624936644, | |
| "grad_norm": 0.5822645044689777, | |
| "learning_rate": 3.809605768680377e-05, | |
| "loss": 0.5188, | |
| "step": 278 | |
| }, | |
| { | |
| "epoch": 1.132792701469843, | |
| "grad_norm": 0.6945976103783668, | |
| "learning_rate": 3.807181526438958e-05, | |
| "loss": 0.5256, | |
| "step": 279 | |
| }, | |
| { | |
| "epoch": 1.1368474404460214, | |
| "grad_norm": 0.5708085225855689, | |
| "learning_rate": 3.8047427294059697e-05, | |
| "loss": 0.5484, | |
| "step": 280 | |
| }, | |
| { | |
| "epoch": 1.1409021794221996, | |
| "grad_norm": 0.705893306854577, | |
| "learning_rate": 3.802289397223145e-05, | |
| "loss": 0.5115, | |
| "step": 281 | |
| }, | |
| { | |
| "epoch": 1.144956918398378, | |
| "grad_norm": 0.6254079685042802, | |
| "learning_rate": 3.7998215496492854e-05, | |
| "loss": 0.5297, | |
| "step": 282 | |
| }, | |
| { | |
| "epoch": 1.1490116573745566, | |
| "grad_norm": 0.5534058307056284, | |
| "learning_rate": 3.797339206560096e-05, | |
| "loss": 0.507, | |
| "step": 283 | |
| }, | |
| { | |
| "epoch": 1.1530663963507348, | |
| "grad_norm": 0.710682756301194, | |
| "learning_rate": 3.794842387948027e-05, | |
| "loss": 0.5236, | |
| "step": 284 | |
| }, | |
| { | |
| "epoch": 1.1571211353269133, | |
| "grad_norm": 0.5741554664737155, | |
| "learning_rate": 3.7923311139221114e-05, | |
| "loss": 0.5348, | |
| "step": 285 | |
| }, | |
| { | |
| "epoch": 1.1611758743030918, | |
| "grad_norm": 0.5615514090579279, | |
| "learning_rate": 3.7898054047078054e-05, | |
| "loss": 0.514, | |
| "step": 286 | |
| }, | |
| { | |
| "epoch": 1.1652306132792702, | |
| "grad_norm": 0.599102775121823, | |
| "learning_rate": 3.787265280646825e-05, | |
| "loss": 0.5119, | |
| "step": 287 | |
| }, | |
| { | |
| "epoch": 1.1692853522554485, | |
| "grad_norm": 0.5160057202823605, | |
| "learning_rate": 3.7847107621969786e-05, | |
| "loss": 0.563, | |
| "step": 288 | |
| }, | |
| { | |
| "epoch": 1.173340091231627, | |
| "grad_norm": 0.5128746063076736, | |
| "learning_rate": 3.7821418699320064e-05, | |
| "loss": 0.5173, | |
| "step": 289 | |
| }, | |
| { | |
| "epoch": 1.1773948302078054, | |
| "grad_norm": 0.5469712587296043, | |
| "learning_rate": 3.7795586245414145e-05, | |
| "loss": 0.5493, | |
| "step": 290 | |
| }, | |
| { | |
| "epoch": 1.1814495691839837, | |
| "grad_norm": 0.519459832649935, | |
| "learning_rate": 3.776961046830306e-05, | |
| "loss": 0.5217, | |
| "step": 291 | |
| }, | |
| { | |
| "epoch": 1.1855043081601622, | |
| "grad_norm": 0.48836631195223607, | |
| "learning_rate": 3.774349157719215e-05, | |
| "loss": 0.5253, | |
| "step": 292 | |
| }, | |
| { | |
| "epoch": 1.1895590471363406, | |
| "grad_norm": 0.5250048240890355, | |
| "learning_rate": 3.7717229782439365e-05, | |
| "loss": 0.5176, | |
| "step": 293 | |
| }, | |
| { | |
| "epoch": 1.1936137861125191, | |
| "grad_norm": 0.5205090925529059, | |
| "learning_rate": 3.769082529555359e-05, | |
| "loss": 0.5271, | |
| "step": 294 | |
| }, | |
| { | |
| "epoch": 1.1976685250886974, | |
| "grad_norm": 0.6366825317299508, | |
| "learning_rate": 3.766427832919294e-05, | |
| "loss": 0.5549, | |
| "step": 295 | |
| }, | |
| { | |
| "epoch": 1.2017232640648758, | |
| "grad_norm": 0.6167040022733965, | |
| "learning_rate": 3.7637589097163024e-05, | |
| "loss": 0.5355, | |
| "step": 296 | |
| }, | |
| { | |
| "epoch": 1.2057780030410543, | |
| "grad_norm": 0.6366891094836108, | |
| "learning_rate": 3.761075781441526e-05, | |
| "loss": 0.5227, | |
| "step": 297 | |
| }, | |
| { | |
| "epoch": 1.2098327420172326, | |
| "grad_norm": 0.8359561651386813, | |
| "learning_rate": 3.75837846970451e-05, | |
| "loss": 0.5554, | |
| "step": 298 | |
| }, | |
| { | |
| "epoch": 1.213887480993411, | |
| "grad_norm": 0.5294761511513448, | |
| "learning_rate": 3.755666996229032e-05, | |
| "loss": 0.5136, | |
| "step": 299 | |
| }, | |
| { | |
| "epoch": 1.2179422199695895, | |
| "grad_norm": 0.6624181817991323, | |
| "learning_rate": 3.752941382852927e-05, | |
| "loss": 0.5645, | |
| "step": 300 | |
| }, | |
| { | |
| "epoch": 1.221996958945768, | |
| "grad_norm": 0.5877995515862958, | |
| "learning_rate": 3.7502016515279115e-05, | |
| "loss": 0.5354, | |
| "step": 301 | |
| }, | |
| { | |
| "epoch": 1.2260516979219462, | |
| "grad_norm": 0.5875575199591586, | |
| "learning_rate": 3.7474478243194043e-05, | |
| "loss": 0.5308, | |
| "step": 302 | |
| }, | |
| { | |
| "epoch": 1.2301064368981247, | |
| "grad_norm": 0.5614185046630379, | |
| "learning_rate": 3.744679923406351e-05, | |
| "loss": 0.5529, | |
| "step": 303 | |
| }, | |
| { | |
| "epoch": 1.2341611758743032, | |
| "grad_norm": 1.0664359102077074, | |
| "learning_rate": 3.741897971081043e-05, | |
| "loss": 0.5508, | |
| "step": 304 | |
| }, | |
| { | |
| "epoch": 1.2382159148504814, | |
| "grad_norm": 0.6046973911120708, | |
| "learning_rate": 3.739101989748946e-05, | |
| "loss": 0.5397, | |
| "step": 305 | |
| }, | |
| { | |
| "epoch": 1.24227065382666, | |
| "grad_norm": 1.2673124349483489, | |
| "learning_rate": 3.7362920019285066e-05, | |
| "loss": 0.5705, | |
| "step": 306 | |
| }, | |
| { | |
| "epoch": 1.2463253928028384, | |
| "grad_norm": 0.598233704017945, | |
| "learning_rate": 3.73346803025098e-05, | |
| "loss": 0.5146, | |
| "step": 307 | |
| }, | |
| { | |
| "epoch": 1.2503801317790169, | |
| "grad_norm": 0.5651528299876605, | |
| "learning_rate": 3.730630097460247e-05, | |
| "loss": 0.5261, | |
| "step": 308 | |
| }, | |
| { | |
| "epoch": 1.254434870755195, | |
| "grad_norm": 0.5878193006363205, | |
| "learning_rate": 3.727778226412628e-05, | |
| "loss": 0.5459, | |
| "step": 309 | |
| }, | |
| { | |
| "epoch": 1.2584896097313736, | |
| "grad_norm": 0.6599534372703165, | |
| "learning_rate": 3.7249124400767006e-05, | |
| "loss": 0.5515, | |
| "step": 310 | |
| }, | |
| { | |
| "epoch": 1.2625443487075518, | |
| "grad_norm": 0.5382395668177711, | |
| "learning_rate": 3.722032761533114e-05, | |
| "loss": 0.5447, | |
| "step": 311 | |
| }, | |
| { | |
| "epoch": 1.2665990876837303, | |
| "grad_norm": 0.5821648363520998, | |
| "learning_rate": 3.719139213974403e-05, | |
| "loss": 0.5666, | |
| "step": 312 | |
| }, | |
| { | |
| "epoch": 1.2706538266599088, | |
| "grad_norm": 0.5225471012481449, | |
| "learning_rate": 3.7162318207048006e-05, | |
| "loss": 0.5115, | |
| "step": 313 | |
| }, | |
| { | |
| "epoch": 1.2747085656360873, | |
| "grad_norm": 0.5312525144558344, | |
| "learning_rate": 3.713310605140055e-05, | |
| "loss": 0.5358, | |
| "step": 314 | |
| }, | |
| { | |
| "epoch": 1.2787633046122655, | |
| "grad_norm": 0.5694871694762195, | |
| "learning_rate": 3.710375590807233e-05, | |
| "loss": 0.5293, | |
| "step": 315 | |
| }, | |
| { | |
| "epoch": 1.282818043588444, | |
| "grad_norm": 0.5824208302644457, | |
| "learning_rate": 3.7074268013445365e-05, | |
| "loss": 0.5593, | |
| "step": 316 | |
| }, | |
| { | |
| "epoch": 1.2868727825646225, | |
| "grad_norm": 0.5578603594395953, | |
| "learning_rate": 3.7044642605011114e-05, | |
| "loss": 0.5195, | |
| "step": 317 | |
| }, | |
| { | |
| "epoch": 1.2909275215408007, | |
| "grad_norm": 0.5206883755354508, | |
| "learning_rate": 3.701487992136854e-05, | |
| "loss": 0.5463, | |
| "step": 318 | |
| }, | |
| { | |
| "epoch": 1.2949822605169792, | |
| "grad_norm": 0.6322592912350286, | |
| "learning_rate": 3.69849802022222e-05, | |
| "loss": 0.5381, | |
| "step": 319 | |
| }, | |
| { | |
| "epoch": 1.2990369994931577, | |
| "grad_norm": 0.50976856555698, | |
| "learning_rate": 3.6954943688380334e-05, | |
| "loss": 0.5337, | |
| "step": 320 | |
| }, | |
| { | |
| "epoch": 1.3030917384693361, | |
| "grad_norm": 0.5689241990448529, | |
| "learning_rate": 3.692477062175289e-05, | |
| "loss": 0.5371, | |
| "step": 321 | |
| }, | |
| { | |
| "epoch": 1.3071464774455144, | |
| "grad_norm": 0.5858422733430532, | |
| "learning_rate": 3.689446124534958e-05, | |
| "loss": 0.5185, | |
| "step": 322 | |
| }, | |
| { | |
| "epoch": 1.3112012164216929, | |
| "grad_norm": 0.5150980661542439, | |
| "learning_rate": 3.686401580327799e-05, | |
| "loss": 0.5723, | |
| "step": 323 | |
| }, | |
| { | |
| "epoch": 1.3152559553978713, | |
| "grad_norm": 0.6723271773916295, | |
| "learning_rate": 3.683343454074149e-05, | |
| "loss": 0.5165, | |
| "step": 324 | |
| }, | |
| { | |
| "epoch": 1.3193106943740496, | |
| "grad_norm": 0.48189240381178344, | |
| "learning_rate": 3.6802717704037386e-05, | |
| "loss": 0.541, | |
| "step": 325 | |
| }, | |
| { | |
| "epoch": 1.323365433350228, | |
| "grad_norm": 0.5375564443419653, | |
| "learning_rate": 3.6771865540554855e-05, | |
| "loss": 0.5275, | |
| "step": 326 | |
| }, | |
| { | |
| "epoch": 1.3274201723264065, | |
| "grad_norm": 0.5243751094815126, | |
| "learning_rate": 3.674087829877297e-05, | |
| "loss": 0.5396, | |
| "step": 327 | |
| }, | |
| { | |
| "epoch": 1.331474911302585, | |
| "grad_norm": 0.5009438956429274, | |
| "learning_rate": 3.6709756228258735e-05, | |
| "loss": 0.5393, | |
| "step": 328 | |
| }, | |
| { | |
| "epoch": 1.3355296502787632, | |
| "grad_norm": 0.526887141689783, | |
| "learning_rate": 3.667849957966501e-05, | |
| "loss": 0.5305, | |
| "step": 329 | |
| }, | |
| { | |
| "epoch": 1.3395843892549417, | |
| "grad_norm": 0.5728732776288902, | |
| "learning_rate": 3.6647108604728546e-05, | |
| "loss": 0.5186, | |
| "step": 330 | |
| }, | |
| { | |
| "epoch": 1.3436391282311202, | |
| "grad_norm": 0.4878002753317562, | |
| "learning_rate": 3.661558355626795e-05, | |
| "loss": 0.5384, | |
| "step": 331 | |
| }, | |
| { | |
| "epoch": 1.3476938672072984, | |
| "grad_norm": 0.5984357470604207, | |
| "learning_rate": 3.658392468818163e-05, | |
| "loss": 0.5389, | |
| "step": 332 | |
| }, | |
| { | |
| "epoch": 1.351748606183477, | |
| "grad_norm": 0.45746468176179467, | |
| "learning_rate": 3.655213225544574e-05, | |
| "loss": 0.5452, | |
| "step": 333 | |
| }, | |
| { | |
| "epoch": 1.3558033451596554, | |
| "grad_norm": 0.5288815754613176, | |
| "learning_rate": 3.652020651411218e-05, | |
| "loss": 0.5303, | |
| "step": 334 | |
| }, | |
| { | |
| "epoch": 1.3598580841358339, | |
| "grad_norm": 0.5155283035957694, | |
| "learning_rate": 3.6488147721306474e-05, | |
| "loss": 0.5631, | |
| "step": 335 | |
| }, | |
| { | |
| "epoch": 1.3639128231120121, | |
| "grad_norm": 0.5056019808696458, | |
| "learning_rate": 3.645595613522574e-05, | |
| "loss": 0.5524, | |
| "step": 336 | |
| }, | |
| { | |
| "epoch": 1.3679675620881906, | |
| "grad_norm": 0.5278228931185853, | |
| "learning_rate": 3.642363201513657e-05, | |
| "loss": 0.5084, | |
| "step": 337 | |
| }, | |
| { | |
| "epoch": 1.3720223010643688, | |
| "grad_norm": 0.6461715292614261, | |
| "learning_rate": 3.6391175621373006e-05, | |
| "loss": 0.5464, | |
| "step": 338 | |
| }, | |
| { | |
| "epoch": 1.3760770400405473, | |
| "grad_norm": 0.5661481738074413, | |
| "learning_rate": 3.6358587215334355e-05, | |
| "loss": 0.5185, | |
| "step": 339 | |
| }, | |
| { | |
| "epoch": 1.3801317790167258, | |
| "grad_norm": 0.5942531936530784, | |
| "learning_rate": 3.632586705948318e-05, | |
| "loss": 0.5403, | |
| "step": 340 | |
| }, | |
| { | |
| "epoch": 1.3841865179929043, | |
| "grad_norm": 0.5850682980282983, | |
| "learning_rate": 3.629301541734311e-05, | |
| "loss": 0.5458, | |
| "step": 341 | |
| }, | |
| { | |
| "epoch": 1.3882412569690827, | |
| "grad_norm": 0.5333424760688134, | |
| "learning_rate": 3.626003255349676e-05, | |
| "loss": 0.5263, | |
| "step": 342 | |
| }, | |
| { | |
| "epoch": 1.392295995945261, | |
| "grad_norm": 0.5429442872554063, | |
| "learning_rate": 3.622691873358357e-05, | |
| "loss": 0.5292, | |
| "step": 343 | |
| }, | |
| { | |
| "epoch": 1.3963507349214395, | |
| "grad_norm": 0.47037294092694815, | |
| "learning_rate": 3.61936742242977e-05, | |
| "loss": 0.5229, | |
| "step": 344 | |
| }, | |
| { | |
| "epoch": 1.4004054738976177, | |
| "grad_norm": 0.5942873647263273, | |
| "learning_rate": 3.6160299293385864e-05, | |
| "loss": 0.5458, | |
| "step": 345 | |
| }, | |
| { | |
| "epoch": 1.4044602128737962, | |
| "grad_norm": 0.5273951524953545, | |
| "learning_rate": 3.612679420964516e-05, | |
| "loss": 0.5393, | |
| "step": 346 | |
| }, | |
| { | |
| "epoch": 1.4085149518499747, | |
| "grad_norm": 0.5195904407948269, | |
| "learning_rate": 3.609315924292092e-05, | |
| "loss": 0.5323, | |
| "step": 347 | |
| }, | |
| { | |
| "epoch": 1.4125696908261531, | |
| "grad_norm": 0.4463577929614556, | |
| "learning_rate": 3.6059394664104554e-05, | |
| "loss": 0.5342, | |
| "step": 348 | |
| }, | |
| { | |
| "epoch": 1.4166244298023316, | |
| "grad_norm": 0.4761680337625318, | |
| "learning_rate": 3.602550074513133e-05, | |
| "loss": 0.5424, | |
| "step": 349 | |
| }, | |
| { | |
| "epoch": 1.4206791687785099, | |
| "grad_norm": 0.5127915314976486, | |
| "learning_rate": 3.599147775897822e-05, | |
| "loss": 0.5422, | |
| "step": 350 | |
| }, | |
| { | |
| "epoch": 1.4247339077546883, | |
| "grad_norm": 0.4814083880035532, | |
| "learning_rate": 3.595732597966167e-05, | |
| "loss": 0.5411, | |
| "step": 351 | |
| }, | |
| { | |
| "epoch": 1.4287886467308666, | |
| "grad_norm": 0.5511609873924513, | |
| "learning_rate": 3.592304568223542e-05, | |
| "loss": 0.5067, | |
| "step": 352 | |
| }, | |
| { | |
| "epoch": 1.432843385707045, | |
| "grad_norm": 0.5328538616416393, | |
| "learning_rate": 3.588863714278826e-05, | |
| "loss": 0.5127, | |
| "step": 353 | |
| }, | |
| { | |
| "epoch": 1.4368981246832235, | |
| "grad_norm": 0.5488266333980802, | |
| "learning_rate": 3.585410063844186e-05, | |
| "loss": 0.5441, | |
| "step": 354 | |
| }, | |
| { | |
| "epoch": 1.440952863659402, | |
| "grad_norm": 0.504600725213016, | |
| "learning_rate": 3.581943644734846e-05, | |
| "loss": 0.5341, | |
| "step": 355 | |
| }, | |
| { | |
| "epoch": 1.4450076026355803, | |
| "grad_norm": 0.5029063722907915, | |
| "learning_rate": 3.578464484868869e-05, | |
| "loss": 0.5359, | |
| "step": 356 | |
| }, | |
| { | |
| "epoch": 1.4490623416117587, | |
| "grad_norm": 0.4956156311289888, | |
| "learning_rate": 3.5749726122669316e-05, | |
| "loss": 0.5371, | |
| "step": 357 | |
| }, | |
| { | |
| "epoch": 1.4531170805879372, | |
| "grad_norm": 0.4906625786417074, | |
| "learning_rate": 3.5714680550520943e-05, | |
| "loss": 0.5212, | |
| "step": 358 | |
| }, | |
| { | |
| "epoch": 1.4571718195641155, | |
| "grad_norm": 0.5964700062030917, | |
| "learning_rate": 3.5679508414495794e-05, | |
| "loss": 0.5648, | |
| "step": 359 | |
| }, | |
| { | |
| "epoch": 1.461226558540294, | |
| "grad_norm": 0.4516001940398592, | |
| "learning_rate": 3.564420999786543e-05, | |
| "loss": 0.5319, | |
| "step": 360 | |
| }, | |
| { | |
| "epoch": 1.4652812975164724, | |
| "grad_norm": 0.6184123814858841, | |
| "learning_rate": 3.560878558491842e-05, | |
| "loss": 0.5228, | |
| "step": 361 | |
| }, | |
| { | |
| "epoch": 1.4693360364926509, | |
| "grad_norm": 0.4956415544231763, | |
| "learning_rate": 3.5573235460958145e-05, | |
| "loss": 0.5413, | |
| "step": 362 | |
| }, | |
| { | |
| "epoch": 1.4733907754688291, | |
| "grad_norm": 0.5060737178518595, | |
| "learning_rate": 3.553755991230039e-05, | |
| "loss": 0.5374, | |
| "step": 363 | |
| }, | |
| { | |
| "epoch": 1.4774455144450076, | |
| "grad_norm": 0.5207016195924674, | |
| "learning_rate": 3.5501759226271144e-05, | |
| "loss": 0.5308, | |
| "step": 364 | |
| }, | |
| { | |
| "epoch": 1.481500253421186, | |
| "grad_norm": 0.4775863315679151, | |
| "learning_rate": 3.546583369120419e-05, | |
| "loss": 0.5224, | |
| "step": 365 | |
| }, | |
| { | |
| "epoch": 1.4855549923973643, | |
| "grad_norm": 0.46080617734858675, | |
| "learning_rate": 3.5429783596438864e-05, | |
| "loss": 0.5288, | |
| "step": 366 | |
| }, | |
| { | |
| "epoch": 1.4896097313735428, | |
| "grad_norm": 0.66877228131714, | |
| "learning_rate": 3.539360923231766e-05, | |
| "loss": 0.5241, | |
| "step": 367 | |
| }, | |
| { | |
| "epoch": 1.4936644703497213, | |
| "grad_norm": 0.48274572205584454, | |
| "learning_rate": 3.535731089018394e-05, | |
| "loss": 0.5278, | |
| "step": 368 | |
| }, | |
| { | |
| "epoch": 1.4977192093258997, | |
| "grad_norm": 0.5116727879741129, | |
| "learning_rate": 3.532088886237956e-05, | |
| "loss": 0.5412, | |
| "step": 369 | |
| }, | |
| { | |
| "epoch": 1.501773948302078, | |
| "grad_norm": 0.44640795006887096, | |
| "learning_rate": 3.528434344224253e-05, | |
| "loss": 0.553, | |
| "step": 370 | |
| }, | |
| { | |
| "epoch": 1.5058286872782565, | |
| "grad_norm": 0.45238109884301936, | |
| "learning_rate": 3.524767492410464e-05, | |
| "loss": 0.512, | |
| "step": 371 | |
| }, | |
| { | |
| "epoch": 1.5098834262544347, | |
| "grad_norm": 0.4870707312978545, | |
| "learning_rate": 3.521088360328908e-05, | |
| "loss": 0.5421, | |
| "step": 372 | |
| }, | |
| { | |
| "epoch": 1.5139381652306132, | |
| "grad_norm": 0.48851249073331915, | |
| "learning_rate": 3.517396977610811e-05, | |
| "loss": 0.553, | |
| "step": 373 | |
| }, | |
| { | |
| "epoch": 1.5179929042067917, | |
| "grad_norm": 0.4956952448579774, | |
| "learning_rate": 3.5136933739860595e-05, | |
| "loss": 0.5423, | |
| "step": 374 | |
| }, | |
| { | |
| "epoch": 1.5220476431829701, | |
| "grad_norm": 0.5782921657739899, | |
| "learning_rate": 3.509977579282971e-05, | |
| "loss": 0.5271, | |
| "step": 375 | |
| }, | |
| { | |
| "epoch": 1.5261023821591486, | |
| "grad_norm": 0.5249983066236462, | |
| "learning_rate": 3.5062496234280424e-05, | |
| "loss": 0.5403, | |
| "step": 376 | |
| }, | |
| { | |
| "epoch": 1.5301571211353269, | |
| "grad_norm": 0.5478128513689535, | |
| "learning_rate": 3.502509536445719e-05, | |
| "loss": 0.5252, | |
| "step": 377 | |
| }, | |
| { | |
| "epoch": 1.5342118601115053, | |
| "grad_norm": 0.6173517839190047, | |
| "learning_rate": 3.498757348458147e-05, | |
| "loss": 0.5515, | |
| "step": 378 | |
| }, | |
| { | |
| "epoch": 1.5382665990876836, | |
| "grad_norm": 0.4641534498295412, | |
| "learning_rate": 3.4949930896849324e-05, | |
| "loss": 0.5149, | |
| "step": 379 | |
| }, | |
| { | |
| "epoch": 1.542321338063862, | |
| "grad_norm": 0.5500271294123625, | |
| "learning_rate": 3.491216790442899e-05, | |
| "loss": 0.5441, | |
| "step": 380 | |
| }, | |
| { | |
| "epoch": 1.5463760770400405, | |
| "grad_norm": 0.5498120190656972, | |
| "learning_rate": 3.487428481145839e-05, | |
| "loss": 0.5243, | |
| "step": 381 | |
| }, | |
| { | |
| "epoch": 1.550430816016219, | |
| "grad_norm": 0.5885044583121567, | |
| "learning_rate": 3.483628192304278e-05, | |
| "loss": 0.5313, | |
| "step": 382 | |
| }, | |
| { | |
| "epoch": 1.5544855549923975, | |
| "grad_norm": 0.5766803604146636, | |
| "learning_rate": 3.479815954525219e-05, | |
| "loss": 0.5281, | |
| "step": 383 | |
| }, | |
| { | |
| "epoch": 1.5585402939685757, | |
| "grad_norm": 0.5243422140746384, | |
| "learning_rate": 3.475991798511899e-05, | |
| "loss": 0.5459, | |
| "step": 384 | |
| }, | |
| { | |
| "epoch": 1.5625950329447542, | |
| "grad_norm": 0.4944265458657894, | |
| "learning_rate": 3.4721557550635464e-05, | |
| "loss": 0.5276, | |
| "step": 385 | |
| }, | |
| { | |
| "epoch": 1.5666497719209325, | |
| "grad_norm": 0.5326287391447962, | |
| "learning_rate": 3.468307855075128e-05, | |
| "loss": 0.5563, | |
| "step": 386 | |
| }, | |
| { | |
| "epoch": 1.570704510897111, | |
| "grad_norm": 2.6035134395372577, | |
| "learning_rate": 3.4644481295371005e-05, | |
| "loss": 0.5476, | |
| "step": 387 | |
| }, | |
| { | |
| "epoch": 1.5747592498732894, | |
| "grad_norm": 0.6258440108575806, | |
| "learning_rate": 3.460576609535163e-05, | |
| "loss": 0.5322, | |
| "step": 388 | |
| }, | |
| { | |
| "epoch": 1.5788139888494679, | |
| "grad_norm": 0.4810662385963842, | |
| "learning_rate": 3.456693326250006e-05, | |
| "loss": 0.5428, | |
| "step": 389 | |
| }, | |
| { | |
| "epoch": 1.5828687278256464, | |
| "grad_norm": 0.5623407303060836, | |
| "learning_rate": 3.452798310957058e-05, | |
| "loss": 0.5571, | |
| "step": 390 | |
| }, | |
| { | |
| "epoch": 1.5869234668018246, | |
| "grad_norm": 0.44396895174513346, | |
| "learning_rate": 3.4488915950262386e-05, | |
| "loss": 0.5296, | |
| "step": 391 | |
| }, | |
| { | |
| "epoch": 1.590978205778003, | |
| "grad_norm": 0.4958270417824638, | |
| "learning_rate": 3.4449732099216985e-05, | |
| "loss": 0.5147, | |
| "step": 392 | |
| }, | |
| { | |
| "epoch": 1.5950329447541813, | |
| "grad_norm": 0.46159842793845884, | |
| "learning_rate": 3.441043187201574e-05, | |
| "loss": 0.5363, | |
| "step": 393 | |
| }, | |
| { | |
| "epoch": 1.5990876837303598, | |
| "grad_norm": 0.5161643738943553, | |
| "learning_rate": 3.437101558517728e-05, | |
| "loss": 0.5268, | |
| "step": 394 | |
| }, | |
| { | |
| "epoch": 1.6031424227065383, | |
| "grad_norm": 0.4820851667279659, | |
| "learning_rate": 3.433148355615496e-05, | |
| "loss": 0.5133, | |
| "step": 395 | |
| }, | |
| { | |
| "epoch": 1.6071971616827168, | |
| "grad_norm": 0.48513810682155595, | |
| "learning_rate": 3.4291836103334294e-05, | |
| "loss": 0.5373, | |
| "step": 396 | |
| }, | |
| { | |
| "epoch": 1.6112519006588952, | |
| "grad_norm": 0.5181766436927846, | |
| "learning_rate": 3.425207354603043e-05, | |
| "loss": 0.5363, | |
| "step": 397 | |
| }, | |
| { | |
| "epoch": 1.6153066396350735, | |
| "grad_norm": 0.5059590753444507, | |
| "learning_rate": 3.421219620448553e-05, | |
| "loss": 0.554, | |
| "step": 398 | |
| }, | |
| { | |
| "epoch": 1.619361378611252, | |
| "grad_norm": 0.4940994689949252, | |
| "learning_rate": 3.417220439986623e-05, | |
| "loss": 0.5288, | |
| "step": 399 | |
| }, | |
| { | |
| "epoch": 1.6234161175874302, | |
| "grad_norm": 0.5179628733409677, | |
| "learning_rate": 3.4132098454261024e-05, | |
| "loss": 0.5396, | |
| "step": 400 | |
| }, | |
| { | |
| "epoch": 1.6274708565636087, | |
| "grad_norm": 0.4851519221329909, | |
| "learning_rate": 3.4091878690677676e-05, | |
| "loss": 0.5385, | |
| "step": 401 | |
| }, | |
| { | |
| "epoch": 1.6315255955397872, | |
| "grad_norm": 0.5941817086304318, | |
| "learning_rate": 3.405154543304065e-05, | |
| "loss": 0.5135, | |
| "step": 402 | |
| }, | |
| { | |
| "epoch": 1.6355803345159656, | |
| "grad_norm": 0.561310083920348, | |
| "learning_rate": 3.401109900618843e-05, | |
| "loss": 0.5467, | |
| "step": 403 | |
| }, | |
| { | |
| "epoch": 1.639635073492144, | |
| "grad_norm": 0.5420360500350442, | |
| "learning_rate": 3.3970539735870996e-05, | |
| "loss": 0.5539, | |
| "step": 404 | |
| }, | |
| { | |
| "epoch": 1.6436898124683224, | |
| "grad_norm": 0.5939028717263156, | |
| "learning_rate": 3.392986794874714e-05, | |
| "loss": 0.5272, | |
| "step": 405 | |
| }, | |
| { | |
| "epoch": 1.6477445514445006, | |
| "grad_norm": 0.5090038493094223, | |
| "learning_rate": 3.388908397238184e-05, | |
| "loss": 0.556, | |
| "step": 406 | |
| }, | |
| { | |
| "epoch": 1.651799290420679, | |
| "grad_norm": 0.5590088456106946, | |
| "learning_rate": 3.384818813524362e-05, | |
| "loss": 0.5307, | |
| "step": 407 | |
| }, | |
| { | |
| "epoch": 1.6558540293968576, | |
| "grad_norm": 0.4972841137206524, | |
| "learning_rate": 3.380718076670195e-05, | |
| "loss": 0.5131, | |
| "step": 408 | |
| }, | |
| { | |
| "epoch": 1.659908768373036, | |
| "grad_norm": 0.6111036878491439, | |
| "learning_rate": 3.376606219702454e-05, | |
| "loss": 0.5235, | |
| "step": 409 | |
| }, | |
| { | |
| "epoch": 1.6639635073492145, | |
| "grad_norm": 0.5252236211887534, | |
| "learning_rate": 3.372483275737468e-05, | |
| "loss": 0.5489, | |
| "step": 410 | |
| }, | |
| { | |
| "epoch": 1.6680182463253928, | |
| "grad_norm": 0.5599101064971196, | |
| "learning_rate": 3.368349277980861e-05, | |
| "loss": 0.5441, | |
| "step": 411 | |
| }, | |
| { | |
| "epoch": 1.6720729853015712, | |
| "grad_norm": 0.5522006831385899, | |
| "learning_rate": 3.3642042597272844e-05, | |
| "loss": 0.5462, | |
| "step": 412 | |
| }, | |
| { | |
| "epoch": 1.6761277242777495, | |
| "grad_norm": 0.5580887951556128, | |
| "learning_rate": 3.360048254360144e-05, | |
| "loss": 0.5558, | |
| "step": 413 | |
| }, | |
| { | |
| "epoch": 1.680182463253928, | |
| "grad_norm": 0.520869625593591, | |
| "learning_rate": 3.355881295351336e-05, | |
| "loss": 0.5222, | |
| "step": 414 | |
| }, | |
| { | |
| "epoch": 1.6842372022301064, | |
| "grad_norm": 0.5003440341412961, | |
| "learning_rate": 3.351703416260975e-05, | |
| "loss": 0.5646, | |
| "step": 415 | |
| }, | |
| { | |
| "epoch": 1.688291941206285, | |
| "grad_norm": 0.5364881366475122, | |
| "learning_rate": 3.347514650737126e-05, | |
| "loss": 0.5386, | |
| "step": 416 | |
| }, | |
| { | |
| "epoch": 1.6923466801824634, | |
| "grad_norm": 0.4914345427547982, | |
| "learning_rate": 3.3433150325155295e-05, | |
| "loss": 0.535, | |
| "step": 417 | |
| }, | |
| { | |
| "epoch": 1.6964014191586416, | |
| "grad_norm": 0.5432848620089233, | |
| "learning_rate": 3.339104595419334e-05, | |
| "loss": 0.5453, | |
| "step": 418 | |
| }, | |
| { | |
| "epoch": 1.70045615813482, | |
| "grad_norm": 0.5458384243945, | |
| "learning_rate": 3.3348833733588204e-05, | |
| "loss": 0.5255, | |
| "step": 419 | |
| }, | |
| { | |
| "epoch": 1.7045108971109983, | |
| "grad_norm": 0.5048768442341492, | |
| "learning_rate": 3.3306514003311305e-05, | |
| "loss": 0.5138, | |
| "step": 420 | |
| }, | |
| { | |
| "epoch": 1.7085656360871768, | |
| "grad_norm": 0.5830659104068939, | |
| "learning_rate": 3.326408710419996e-05, | |
| "loss": 0.5322, | |
| "step": 421 | |
| }, | |
| { | |
| "epoch": 1.7126203750633553, | |
| "grad_norm": 0.4421170526539174, | |
| "learning_rate": 3.322155337795454e-05, | |
| "loss": 0.5317, | |
| "step": 422 | |
| }, | |
| { | |
| "epoch": 1.7166751140395338, | |
| "grad_norm": 0.46738927578621453, | |
| "learning_rate": 3.317891316713587e-05, | |
| "loss": 0.5268, | |
| "step": 423 | |
| }, | |
| { | |
| "epoch": 1.7207298530157122, | |
| "grad_norm": 0.43954399294717533, | |
| "learning_rate": 3.313616681516231e-05, | |
| "loss": 0.5033, | |
| "step": 424 | |
| }, | |
| { | |
| "epoch": 1.7247845919918905, | |
| "grad_norm": 0.5301730762424381, | |
| "learning_rate": 3.309331466630713e-05, | |
| "loss": 0.5359, | |
| "step": 425 | |
| }, | |
| { | |
| "epoch": 1.728839330968069, | |
| "grad_norm": 0.4974641831720144, | |
| "learning_rate": 3.305035706569563e-05, | |
| "loss": 0.5369, | |
| "step": 426 | |
| }, | |
| { | |
| "epoch": 1.7328940699442472, | |
| "grad_norm": 0.5085663630067726, | |
| "learning_rate": 3.3007294359302433e-05, | |
| "loss": 0.5121, | |
| "step": 427 | |
| }, | |
| { | |
| "epoch": 1.7369488089204257, | |
| "grad_norm": 0.4781764748466198, | |
| "learning_rate": 3.296412689394864e-05, | |
| "loss": 0.5424, | |
| "step": 428 | |
| }, | |
| { | |
| "epoch": 1.7410035478966042, | |
| "grad_norm": 0.5022048925017011, | |
| "learning_rate": 3.292085501729909e-05, | |
| "loss": 0.5283, | |
| "step": 429 | |
| }, | |
| { | |
| "epoch": 1.7450582868727826, | |
| "grad_norm": 0.49529966343334775, | |
| "learning_rate": 3.2877479077859534e-05, | |
| "loss": 0.5245, | |
| "step": 430 | |
| }, | |
| { | |
| "epoch": 1.7491130258489611, | |
| "grad_norm": 0.47802835185709214, | |
| "learning_rate": 3.283399942497381e-05, | |
| "loss": 0.5233, | |
| "step": 431 | |
| }, | |
| { | |
| "epoch": 1.7531677648251394, | |
| "grad_norm": 0.5510415336443616, | |
| "learning_rate": 3.279041640882108e-05, | |
| "loss": 0.5509, | |
| "step": 432 | |
| }, | |
| { | |
| "epoch": 1.7572225038013178, | |
| "grad_norm": 0.48110439821751527, | |
| "learning_rate": 3.2746730380412964e-05, | |
| "loss": 0.5458, | |
| "step": 433 | |
| }, | |
| { | |
| "epoch": 1.761277242777496, | |
| "grad_norm": 0.5651952979493425, | |
| "learning_rate": 3.2702941691590726e-05, | |
| "loss": 0.538, | |
| "step": 434 | |
| }, | |
| { | |
| "epoch": 1.7653319817536746, | |
| "grad_norm": 0.4415340649904317, | |
| "learning_rate": 3.265905069502244e-05, | |
| "loss": 0.5248, | |
| "step": 435 | |
| }, | |
| { | |
| "epoch": 1.769386720729853, | |
| "grad_norm": 0.5882088681415918, | |
| "learning_rate": 3.261505774420016e-05, | |
| "loss": 0.5433, | |
| "step": 436 | |
| }, | |
| { | |
| "epoch": 1.7734414597060315, | |
| "grad_norm": 0.8980241511942082, | |
| "learning_rate": 3.257096319343707e-05, | |
| "loss": 0.5716, | |
| "step": 437 | |
| }, | |
| { | |
| "epoch": 1.77749619868221, | |
| "grad_norm": 0.509641809505027, | |
| "learning_rate": 3.2526767397864614e-05, | |
| "loss": 0.5305, | |
| "step": 438 | |
| }, | |
| { | |
| "epoch": 1.7815509376583882, | |
| "grad_norm": 0.4808299965743913, | |
| "learning_rate": 3.248247071342966e-05, | |
| "loss": 0.5261, | |
| "step": 439 | |
| }, | |
| { | |
| "epoch": 1.7856056766345665, | |
| "grad_norm": 0.5334613562199398, | |
| "learning_rate": 3.243807349689161e-05, | |
| "loss": 0.5338, | |
| "step": 440 | |
| }, | |
| { | |
| "epoch": 1.789660415610745, | |
| "grad_norm": 0.49099683256519816, | |
| "learning_rate": 3.2393576105819544e-05, | |
| "loss": 0.5019, | |
| "step": 441 | |
| }, | |
| { | |
| "epoch": 1.7937151545869234, | |
| "grad_norm": 0.5517096871871732, | |
| "learning_rate": 3.2348978898589333e-05, | |
| "loss": 0.5405, | |
| "step": 442 | |
| }, | |
| { | |
| "epoch": 1.797769893563102, | |
| "grad_norm": 0.4783643584397025, | |
| "learning_rate": 3.230428223438075e-05, | |
| "loss": 0.525, | |
| "step": 443 | |
| }, | |
| { | |
| "epoch": 1.8018246325392804, | |
| "grad_norm": 0.46894895880465415, | |
| "learning_rate": 3.225948647317459e-05, | |
| "loss": 0.52, | |
| "step": 444 | |
| }, | |
| { | |
| "epoch": 1.8058793715154589, | |
| "grad_norm": 0.45510230722092476, | |
| "learning_rate": 3.2214591975749745e-05, | |
| "loss": 0.5417, | |
| "step": 445 | |
| }, | |
| { | |
| "epoch": 1.809934110491637, | |
| "grad_norm": 0.4816100222096817, | |
| "learning_rate": 3.216959910368034e-05, | |
| "loss": 0.5475, | |
| "step": 446 | |
| }, | |
| { | |
| "epoch": 1.8139888494678154, | |
| "grad_norm": 0.45400023942209033, | |
| "learning_rate": 3.212450821933277e-05, | |
| "loss": 0.5401, | |
| "step": 447 | |
| }, | |
| { | |
| "epoch": 1.8180435884439938, | |
| "grad_norm": 0.4615435336089353, | |
| "learning_rate": 3.207931968586281e-05, | |
| "loss": 0.508, | |
| "step": 448 | |
| }, | |
| { | |
| "epoch": 1.8220983274201723, | |
| "grad_norm": 0.49089976896884613, | |
| "learning_rate": 3.203403386721272e-05, | |
| "loss": 0.5577, | |
| "step": 449 | |
| }, | |
| { | |
| "epoch": 1.8261530663963508, | |
| "grad_norm": 0.5509226939141177, | |
| "learning_rate": 3.1988651128108245e-05, | |
| "loss": 0.5473, | |
| "step": 450 | |
| }, | |
| { | |
| "epoch": 1.8302078053725293, | |
| "grad_norm": 0.44818948714898676, | |
| "learning_rate": 3.194317183405573e-05, | |
| "loss": 0.5395, | |
| "step": 451 | |
| }, | |
| { | |
| "epoch": 1.8342625443487075, | |
| "grad_norm": 0.5055755261923557, | |
| "learning_rate": 3.189759635133914e-05, | |
| "loss": 0.5512, | |
| "step": 452 | |
| }, | |
| { | |
| "epoch": 1.838317283324886, | |
| "grad_norm": 0.4109031998193964, | |
| "learning_rate": 3.185192504701718e-05, | |
| "loss": 0.5475, | |
| "step": 453 | |
| }, | |
| { | |
| "epoch": 1.8423720223010642, | |
| "grad_norm": 0.5786829193210274, | |
| "learning_rate": 3.1806158288920234e-05, | |
| "loss": 0.525, | |
| "step": 454 | |
| }, | |
| { | |
| "epoch": 1.8464267612772427, | |
| "grad_norm": 0.4486101306631423, | |
| "learning_rate": 3.1760296445647477e-05, | |
| "loss": 0.5645, | |
| "step": 455 | |
| }, | |
| { | |
| "epoch": 1.8504815002534212, | |
| "grad_norm": 0.5297784092734797, | |
| "learning_rate": 3.1714339886563896e-05, | |
| "loss": 0.5422, | |
| "step": 456 | |
| }, | |
| { | |
| "epoch": 1.8545362392295996, | |
| "grad_norm": 0.49083680759995885, | |
| "learning_rate": 3.166828898179731e-05, | |
| "loss": 0.5123, | |
| "step": 457 | |
| }, | |
| { | |
| "epoch": 1.8585909782057781, | |
| "grad_norm": 0.4864951857743798, | |
| "learning_rate": 3.162214410223536e-05, | |
| "loss": 0.5214, | |
| "step": 458 | |
| }, | |
| { | |
| "epoch": 1.8626457171819564, | |
| "grad_norm": 0.460905485383442, | |
| "learning_rate": 3.157590561952257e-05, | |
| "loss": 0.5104, | |
| "step": 459 | |
| }, | |
| { | |
| "epoch": 1.8667004561581348, | |
| "grad_norm": 0.5165462744307608, | |
| "learning_rate": 3.152957390605732e-05, | |
| "loss": 0.5546, | |
| "step": 460 | |
| }, | |
| { | |
| "epoch": 1.870755195134313, | |
| "grad_norm": 0.43255359476263083, | |
| "learning_rate": 3.148314933498886e-05, | |
| "loss": 0.5366, | |
| "step": 461 | |
| }, | |
| { | |
| "epoch": 1.8748099341104916, | |
| "grad_norm": 0.48036008287513215, | |
| "learning_rate": 3.143663228021431e-05, | |
| "loss": 0.5402, | |
| "step": 462 | |
| }, | |
| { | |
| "epoch": 1.87886467308667, | |
| "grad_norm": 0.47314553250052255, | |
| "learning_rate": 3.1390023116375624e-05, | |
| "loss": 0.5319, | |
| "step": 463 | |
| }, | |
| { | |
| "epoch": 1.8829194120628485, | |
| "grad_norm": 0.514252925655343, | |
| "learning_rate": 3.134332221885661e-05, | |
| "loss": 0.5347, | |
| "step": 464 | |
| }, | |
| { | |
| "epoch": 1.886974151039027, | |
| "grad_norm": 0.5142539640243031, | |
| "learning_rate": 3.129652996377987e-05, | |
| "loss": 0.5432, | |
| "step": 465 | |
| }, | |
| { | |
| "epoch": 1.8910288900152052, | |
| "grad_norm": 0.5315747901151392, | |
| "learning_rate": 3.12496467280038e-05, | |
| "loss": 0.5311, | |
| "step": 466 | |
| }, | |
| { | |
| "epoch": 1.8950836289913837, | |
| "grad_norm": 0.44734745454945035, | |
| "learning_rate": 3.120267288911952e-05, | |
| "loss": 0.5279, | |
| "step": 467 | |
| }, | |
| { | |
| "epoch": 1.899138367967562, | |
| "grad_norm": 0.49504605802902835, | |
| "learning_rate": 3.11556088254479e-05, | |
| "loss": 0.5387, | |
| "step": 468 | |
| }, | |
| { | |
| "epoch": 1.9031931069437404, | |
| "grad_norm": 0.5009752421254353, | |
| "learning_rate": 3.11084549160364e-05, | |
| "loss": 0.5406, | |
| "step": 469 | |
| }, | |
| { | |
| "epoch": 1.907247845919919, | |
| "grad_norm": 0.4655278319823632, | |
| "learning_rate": 3.106121154065615e-05, | |
| "loss": 0.5385, | |
| "step": 470 | |
| }, | |
| { | |
| "epoch": 1.9113025848960974, | |
| "grad_norm": 0.49026279539403034, | |
| "learning_rate": 3.1013879079798805e-05, | |
| "loss": 0.5142, | |
| "step": 471 | |
| }, | |
| { | |
| "epoch": 1.9153573238722759, | |
| "grad_norm": 0.477240083669834, | |
| "learning_rate": 3.096645791467348e-05, | |
| "loss": 0.5172, | |
| "step": 472 | |
| }, | |
| { | |
| "epoch": 1.9194120628484541, | |
| "grad_norm": 0.5549564840556759, | |
| "learning_rate": 3.091894842720373e-05, | |
| "loss": 0.5375, | |
| "step": 473 | |
| }, | |
| { | |
| "epoch": 1.9234668018246326, | |
| "grad_norm": 0.5179330320251824, | |
| "learning_rate": 3.0871351000024425e-05, | |
| "loss": 0.5543, | |
| "step": 474 | |
| }, | |
| { | |
| "epoch": 1.9275215408008108, | |
| "grad_norm": 0.49761554362326116, | |
| "learning_rate": 3.0823666016478716e-05, | |
| "loss": 0.5321, | |
| "step": 475 | |
| }, | |
| { | |
| "epoch": 1.9315762797769893, | |
| "grad_norm": 0.5209631079429944, | |
| "learning_rate": 3.0775893860614896e-05, | |
| "loss": 0.5348, | |
| "step": 476 | |
| }, | |
| { | |
| "epoch": 1.9356310187531678, | |
| "grad_norm": 0.5078173890091245, | |
| "learning_rate": 3.0728034917183336e-05, | |
| "loss": 0.5303, | |
| "step": 477 | |
| }, | |
| { | |
| "epoch": 1.9396857577293463, | |
| "grad_norm": 0.48133933547201196, | |
| "learning_rate": 3.06800895716334e-05, | |
| "loss": 0.5388, | |
| "step": 478 | |
| }, | |
| { | |
| "epoch": 1.9437404967055247, | |
| "grad_norm": 0.5233547338398804, | |
| "learning_rate": 3.063205821011029e-05, | |
| "loss": 0.5278, | |
| "step": 479 | |
| }, | |
| { | |
| "epoch": 1.947795235681703, | |
| "grad_norm": 0.49342115532441005, | |
| "learning_rate": 3.0583941219452016e-05, | |
| "loss": 0.5438, | |
| "step": 480 | |
| }, | |
| { | |
| "epoch": 1.9518499746578812, | |
| "grad_norm": 0.4244972146497058, | |
| "learning_rate": 3.053573898718618e-05, | |
| "loss": 0.5315, | |
| "step": 481 | |
| }, | |
| { | |
| "epoch": 1.9559047136340597, | |
| "grad_norm": 0.5229600740799853, | |
| "learning_rate": 3.0487451901526956e-05, | |
| "loss": 0.5305, | |
| "step": 482 | |
| }, | |
| { | |
| "epoch": 1.9599594526102382, | |
| "grad_norm": 0.46883505549131543, | |
| "learning_rate": 3.0439080351371875e-05, | |
| "loss": 0.536, | |
| "step": 483 | |
| }, | |
| { | |
| "epoch": 1.9640141915864167, | |
| "grad_norm": 0.5634634624180843, | |
| "learning_rate": 3.0390624726298764e-05, | |
| "loss": 0.5277, | |
| "step": 484 | |
| }, | |
| { | |
| "epoch": 1.9680689305625951, | |
| "grad_norm": 0.45877238281158705, | |
| "learning_rate": 3.034208541656255e-05, | |
| "loss": 0.548, | |
| "step": 485 | |
| }, | |
| { | |
| "epoch": 1.9721236695387736, | |
| "grad_norm": 0.507123641273262, | |
| "learning_rate": 3.029346281309218e-05, | |
| "loss": 0.5317, | |
| "step": 486 | |
| }, | |
| { | |
| "epoch": 1.9761784085149519, | |
| "grad_norm": 0.45884827938264094, | |
| "learning_rate": 3.0244757307487415e-05, | |
| "loss": 0.5562, | |
| "step": 487 | |
| }, | |
| { | |
| "epoch": 1.98023314749113, | |
| "grad_norm": 0.4877600804194238, | |
| "learning_rate": 3.019596929201569e-05, | |
| "loss": 0.5574, | |
| "step": 488 | |
| }, | |
| { | |
| "epoch": 1.9842878864673086, | |
| "grad_norm": 0.45635131041346894, | |
| "learning_rate": 3.0147099159608985e-05, | |
| "loss": 0.5204, | |
| "step": 489 | |
| }, | |
| { | |
| "epoch": 1.988342625443487, | |
| "grad_norm": 0.4974258569485135, | |
| "learning_rate": 3.0098147303860616e-05, | |
| "loss": 0.5356, | |
| "step": 490 | |
| }, | |
| { | |
| "epoch": 1.9923973644196655, | |
| "grad_norm": 0.47201410049283277, | |
| "learning_rate": 3.0049114119022117e-05, | |
| "loss": 0.5333, | |
| "step": 491 | |
| }, | |
| { | |
| "epoch": 1.996452103395844, | |
| "grad_norm": 0.4182359857009549, | |
| "learning_rate": 3.0000000000000004e-05, | |
| "loss": 0.5179, | |
| "step": 492 | |
| }, | |
| { | |
| "epoch": 2.0040506329113925, | |
| "grad_norm": 0.8514659742359979, | |
| "learning_rate": 2.995080534235264e-05, | |
| "loss": 0.4399, | |
| "step": 493 | |
| }, | |
| { | |
| "epoch": 2.008101265822785, | |
| "grad_norm": 0.6349238377917809, | |
| "learning_rate": 2.9901530542287044e-05, | |
| "loss": 0.389, | |
| "step": 494 | |
| }, | |
| { | |
| "epoch": 2.012151898734177, | |
| "grad_norm": 0.9061423615945333, | |
| "learning_rate": 2.9852175996655676e-05, | |
| "loss": 0.3673, | |
| "step": 495 | |
| }, | |
| { | |
| "epoch": 2.0162025316455696, | |
| "grad_norm": 0.6522084160793656, | |
| "learning_rate": 2.980274210295326e-05, | |
| "loss": 0.3749, | |
| "step": 496 | |
| }, | |
| { | |
| "epoch": 2.020253164556962, | |
| "grad_norm": 0.6077870581138277, | |
| "learning_rate": 2.9753229259313578e-05, | |
| "loss": 0.3551, | |
| "step": 497 | |
| }, | |
| { | |
| "epoch": 2.0243037974683546, | |
| "grad_norm": 0.553831455743028, | |
| "learning_rate": 2.9703637864506274e-05, | |
| "loss": 0.3934, | |
| "step": 498 | |
| }, | |
| { | |
| "epoch": 2.0283544303797467, | |
| "grad_norm": 0.5464039098966776, | |
| "learning_rate": 2.965396831793362e-05, | |
| "loss": 0.3804, | |
| "step": 499 | |
| }, | |
| { | |
| "epoch": 2.0324050632911392, | |
| "grad_norm": 0.5718192802534139, | |
| "learning_rate": 2.9604221019627316e-05, | |
| "loss": 0.3831, | |
| "step": 500 | |
| }, | |
| { | |
| "epoch": 2.0364556962025318, | |
| "grad_norm": 0.6045329581908979, | |
| "learning_rate": 2.955439637024526e-05, | |
| "loss": 0.3872, | |
| "step": 501 | |
| }, | |
| { | |
| "epoch": 2.0405063291139243, | |
| "grad_norm": 0.5196942241125314, | |
| "learning_rate": 2.9504494771068334e-05, | |
| "loss": 0.3935, | |
| "step": 502 | |
| }, | |
| { | |
| "epoch": 2.0445569620253163, | |
| "grad_norm": 0.5754960964732723, | |
| "learning_rate": 2.9454516623997156e-05, | |
| "loss": 0.3941, | |
| "step": 503 | |
| }, | |
| { | |
| "epoch": 2.048607594936709, | |
| "grad_norm": 0.5752782606552806, | |
| "learning_rate": 2.9404462331548847e-05, | |
| "loss": 0.3729, | |
| "step": 504 | |
| }, | |
| { | |
| "epoch": 2.0526582278481014, | |
| "grad_norm": 0.5621345611474224, | |
| "learning_rate": 2.93543322968538e-05, | |
| "loss": 0.3661, | |
| "step": 505 | |
| }, | |
| { | |
| "epoch": 2.056708860759494, | |
| "grad_norm": 0.634996690632848, | |
| "learning_rate": 2.9304126923652428e-05, | |
| "loss": 0.3799, | |
| "step": 506 | |
| }, | |
| { | |
| "epoch": 2.060759493670886, | |
| "grad_norm": 0.5718480588242677, | |
| "learning_rate": 2.9253846616291896e-05, | |
| "loss": 0.397, | |
| "step": 507 | |
| }, | |
| { | |
| "epoch": 2.0648101265822785, | |
| "grad_norm": 0.496144593015699, | |
| "learning_rate": 2.9203491779722896e-05, | |
| "loss": 0.3691, | |
| "step": 508 | |
| }, | |
| { | |
| "epoch": 2.068860759493671, | |
| "grad_norm": 0.6145546482098673, | |
| "learning_rate": 2.9153062819496357e-05, | |
| "loss": 0.3838, | |
| "step": 509 | |
| }, | |
| { | |
| "epoch": 2.0729113924050635, | |
| "grad_norm": 0.46273333127496863, | |
| "learning_rate": 2.9102560141760178e-05, | |
| "loss": 0.3921, | |
| "step": 510 | |
| }, | |
| { | |
| "epoch": 2.0769620253164556, | |
| "grad_norm": 0.4879514365498469, | |
| "learning_rate": 2.9051984153256004e-05, | |
| "loss": 0.3722, | |
| "step": 511 | |
| }, | |
| { | |
| "epoch": 2.081012658227848, | |
| "grad_norm": 0.4688509947375713, | |
| "learning_rate": 2.900133526131588e-05, | |
| "loss": 0.3709, | |
| "step": 512 | |
| }, | |
| { | |
| "epoch": 2.0850632911392406, | |
| "grad_norm": 0.4805031988742675, | |
| "learning_rate": 2.8950613873859025e-05, | |
| "loss": 0.367, | |
| "step": 513 | |
| }, | |
| { | |
| "epoch": 2.089113924050633, | |
| "grad_norm": 0.5194697326094561, | |
| "learning_rate": 2.8899820399388515e-05, | |
| "loss": 0.3781, | |
| "step": 514 | |
| }, | |
| { | |
| "epoch": 2.093164556962025, | |
| "grad_norm": 0.5074432805079186, | |
| "learning_rate": 2.8848955246988012e-05, | |
| "loss": 0.3928, | |
| "step": 515 | |
| }, | |
| { | |
| "epoch": 2.0972151898734177, | |
| "grad_norm": 0.4957122474905427, | |
| "learning_rate": 2.879801882631847e-05, | |
| "loss": 0.3808, | |
| "step": 516 | |
| }, | |
| { | |
| "epoch": 2.1012658227848102, | |
| "grad_norm": 0.4615971405441044, | |
| "learning_rate": 2.8747011547614808e-05, | |
| "loss": 0.3744, | |
| "step": 517 | |
| }, | |
| { | |
| "epoch": 2.1053164556962027, | |
| "grad_norm": 0.4395519719873443, | |
| "learning_rate": 2.8695933821682635e-05, | |
| "loss": 0.3628, | |
| "step": 518 | |
| }, | |
| { | |
| "epoch": 2.109367088607595, | |
| "grad_norm": 0.4646395339909091, | |
| "learning_rate": 2.864478605989494e-05, | |
| "loss": 0.3815, | |
| "step": 519 | |
| }, | |
| { | |
| "epoch": 2.1134177215189873, | |
| "grad_norm": 0.4436803364537414, | |
| "learning_rate": 2.8593568674188765e-05, | |
| "loss": 0.3877, | |
| "step": 520 | |
| }, | |
| { | |
| "epoch": 2.11746835443038, | |
| "grad_norm": 0.44069091066092003, | |
| "learning_rate": 2.8542282077061892e-05, | |
| "loss": 0.3986, | |
| "step": 521 | |
| }, | |
| { | |
| "epoch": 2.1215189873417724, | |
| "grad_norm": 0.4292878422341503, | |
| "learning_rate": 2.8490926681569523e-05, | |
| "loss": 0.3846, | |
| "step": 522 | |
| }, | |
| { | |
| "epoch": 2.1255696202531644, | |
| "grad_norm": 0.4180214731459623, | |
| "learning_rate": 2.8439502901320956e-05, | |
| "loss": 0.3721, | |
| "step": 523 | |
| }, | |
| { | |
| "epoch": 2.129620253164557, | |
| "grad_norm": 0.45778394470256106, | |
| "learning_rate": 2.8388011150476237e-05, | |
| "loss": 0.3741, | |
| "step": 524 | |
| }, | |
| { | |
| "epoch": 2.1336708860759495, | |
| "grad_norm": 0.4414769691721126, | |
| "learning_rate": 2.8336451843742866e-05, | |
| "loss": 0.3708, | |
| "step": 525 | |
| }, | |
| { | |
| "epoch": 2.137721518987342, | |
| "grad_norm": 0.44197762460745843, | |
| "learning_rate": 2.8284825396372387e-05, | |
| "loss": 0.3899, | |
| "step": 526 | |
| }, | |
| { | |
| "epoch": 2.141772151898734, | |
| "grad_norm": 0.46521853549160674, | |
| "learning_rate": 2.8233132224157132e-05, | |
| "loss": 0.396, | |
| "step": 527 | |
| }, | |
| { | |
| "epoch": 2.1458227848101266, | |
| "grad_norm": 0.4415683747545979, | |
| "learning_rate": 2.8181372743426805e-05, | |
| "loss": 0.382, | |
| "step": 528 | |
| }, | |
| { | |
| "epoch": 2.149873417721519, | |
| "grad_norm": 0.5139737887003896, | |
| "learning_rate": 2.8129547371045128e-05, | |
| "loss": 0.3901, | |
| "step": 529 | |
| }, | |
| { | |
| "epoch": 2.1539240506329116, | |
| "grad_norm": 0.45221784610277393, | |
| "learning_rate": 2.8077656524406534e-05, | |
| "loss": 0.371, | |
| "step": 530 | |
| }, | |
| { | |
| "epoch": 2.1579746835443037, | |
| "grad_norm": 0.4655033671224608, | |
| "learning_rate": 2.802570062143278e-05, | |
| "loss": 0.3961, | |
| "step": 531 | |
| }, | |
| { | |
| "epoch": 2.162025316455696, | |
| "grad_norm": 0.4667442846665353, | |
| "learning_rate": 2.7973680080569555e-05, | |
| "loss": 0.3775, | |
| "step": 532 | |
| }, | |
| { | |
| "epoch": 2.1660759493670887, | |
| "grad_norm": 0.4394568000032489, | |
| "learning_rate": 2.792159532078314e-05, | |
| "loss": 0.367, | |
| "step": 533 | |
| }, | |
| { | |
| "epoch": 2.170126582278481, | |
| "grad_norm": 0.40914103030178883, | |
| "learning_rate": 2.7869446761557033e-05, | |
| "loss": 0.3725, | |
| "step": 534 | |
| }, | |
| { | |
| "epoch": 2.1741772151898733, | |
| "grad_norm": 0.4151878532671488, | |
| "learning_rate": 2.781723482288857e-05, | |
| "loss": 0.3762, | |
| "step": 535 | |
| }, | |
| { | |
| "epoch": 2.178227848101266, | |
| "grad_norm": 0.4403755362145964, | |
| "learning_rate": 2.7764959925285517e-05, | |
| "loss": 0.38, | |
| "step": 536 | |
| }, | |
| { | |
| "epoch": 2.1822784810126583, | |
| "grad_norm": 0.41790693126486, | |
| "learning_rate": 2.771262248976272e-05, | |
| "loss": 0.3392, | |
| "step": 537 | |
| }, | |
| { | |
| "epoch": 2.186329113924051, | |
| "grad_norm": 0.458924104647922, | |
| "learning_rate": 2.7660222937838677e-05, | |
| "loss": 0.3753, | |
| "step": 538 | |
| }, | |
| { | |
| "epoch": 2.190379746835443, | |
| "grad_norm": 0.43690722027568707, | |
| "learning_rate": 2.7607761691532186e-05, | |
| "loss": 0.3771, | |
| "step": 539 | |
| }, | |
| { | |
| "epoch": 2.1944303797468354, | |
| "grad_norm": 0.44516561175180225, | |
| "learning_rate": 2.7555239173358916e-05, | |
| "loss": 0.3522, | |
| "step": 540 | |
| }, | |
| { | |
| "epoch": 2.198481012658228, | |
| "grad_norm": 0.44725695226517453, | |
| "learning_rate": 2.7502655806328e-05, | |
| "loss": 0.3914, | |
| "step": 541 | |
| }, | |
| { | |
| "epoch": 2.2025316455696204, | |
| "grad_norm": 0.4025650578072138, | |
| "learning_rate": 2.7450012013938648e-05, | |
| "loss": 0.3767, | |
| "step": 542 | |
| }, | |
| { | |
| "epoch": 2.2065822784810125, | |
| "grad_norm": 0.4768323396497322, | |
| "learning_rate": 2.739730822017673e-05, | |
| "loss": 0.386, | |
| "step": 543 | |
| }, | |
| { | |
| "epoch": 2.210632911392405, | |
| "grad_norm": 0.43000345511115917, | |
| "learning_rate": 2.7344544849511355e-05, | |
| "loss": 0.3899, | |
| "step": 544 | |
| }, | |
| { | |
| "epoch": 2.2146835443037975, | |
| "grad_norm": 0.4134392985344769, | |
| "learning_rate": 2.7291722326891456e-05, | |
| "loss": 0.3814, | |
| "step": 545 | |
| }, | |
| { | |
| "epoch": 2.21873417721519, | |
| "grad_norm": 0.4379128240932747, | |
| "learning_rate": 2.723884107774236e-05, | |
| "loss": 0.3812, | |
| "step": 546 | |
| }, | |
| { | |
| "epoch": 2.222784810126582, | |
| "grad_norm": 0.4263783441745549, | |
| "learning_rate": 2.718590152796239e-05, | |
| "loss": 0.3707, | |
| "step": 547 | |
| }, | |
| { | |
| "epoch": 2.2268354430379746, | |
| "grad_norm": 0.41135737054801624, | |
| "learning_rate": 2.71329041039194e-05, | |
| "loss": 0.3719, | |
| "step": 548 | |
| }, | |
| { | |
| "epoch": 2.230886075949367, | |
| "grad_norm": 0.4041013134577113, | |
| "learning_rate": 2.7079849232447357e-05, | |
| "loss": 0.3827, | |
| "step": 549 | |
| }, | |
| { | |
| "epoch": 2.2349367088607597, | |
| "grad_norm": 0.4189157852552827, | |
| "learning_rate": 2.7026737340842895e-05, | |
| "loss": 0.3756, | |
| "step": 550 | |
| }, | |
| { | |
| "epoch": 2.2389873417721518, | |
| "grad_norm": 0.416942921606829, | |
| "learning_rate": 2.697356885686189e-05, | |
| "loss": 0.3697, | |
| "step": 551 | |
| }, | |
| { | |
| "epoch": 2.2430379746835443, | |
| "grad_norm": 0.41674426895198113, | |
| "learning_rate": 2.6920344208716014e-05, | |
| "loss": 0.3605, | |
| "step": 552 | |
| }, | |
| { | |
| "epoch": 2.247088607594937, | |
| "grad_norm": 0.406372058924128, | |
| "learning_rate": 2.6867063825069252e-05, | |
| "loss": 0.3925, | |
| "step": 553 | |
| }, | |
| { | |
| "epoch": 2.2511392405063293, | |
| "grad_norm": 0.4382254459998848, | |
| "learning_rate": 2.6813728135034494e-05, | |
| "loss": 0.3749, | |
| "step": 554 | |
| }, | |
| { | |
| "epoch": 2.2551898734177214, | |
| "grad_norm": 0.4815894076427739, | |
| "learning_rate": 2.6760337568170056e-05, | |
| "loss": 0.3908, | |
| "step": 555 | |
| }, | |
| { | |
| "epoch": 2.259240506329114, | |
| "grad_norm": 0.45091781178998813, | |
| "learning_rate": 2.6706892554476226e-05, | |
| "loss": 0.3737, | |
| "step": 556 | |
| }, | |
| { | |
| "epoch": 2.2632911392405064, | |
| "grad_norm": 0.49430336208261255, | |
| "learning_rate": 2.6653393524391795e-05, | |
| "loss": 0.3731, | |
| "step": 557 | |
| }, | |
| { | |
| "epoch": 2.267341772151899, | |
| "grad_norm": 0.40777031627160537, | |
| "learning_rate": 2.6599840908790592e-05, | |
| "loss": 0.3851, | |
| "step": 558 | |
| }, | |
| { | |
| "epoch": 2.271392405063291, | |
| "grad_norm": 0.47250510727840067, | |
| "learning_rate": 2.6546235138978028e-05, | |
| "loss": 0.3942, | |
| "step": 559 | |
| }, | |
| { | |
| "epoch": 2.2754430379746835, | |
| "grad_norm": 0.42719932978501135, | |
| "learning_rate": 2.6492576646687597e-05, | |
| "loss": 0.378, | |
| "step": 560 | |
| }, | |
| { | |
| "epoch": 2.279493670886076, | |
| "grad_norm": 0.4403029109216816, | |
| "learning_rate": 2.6438865864077425e-05, | |
| "loss": 0.3743, | |
| "step": 561 | |
| }, | |
| { | |
| "epoch": 2.2835443037974685, | |
| "grad_norm": 0.4599031556625358, | |
| "learning_rate": 2.6385103223726766e-05, | |
| "loss": 0.3831, | |
| "step": 562 | |
| }, | |
| { | |
| "epoch": 2.2875949367088606, | |
| "grad_norm": 0.47139164013460116, | |
| "learning_rate": 2.6331289158632537e-05, | |
| "loss": 0.3779, | |
| "step": 563 | |
| }, | |
| { | |
| "epoch": 2.291645569620253, | |
| "grad_norm": 0.4362106117279933, | |
| "learning_rate": 2.6277424102205817e-05, | |
| "loss": 0.3801, | |
| "step": 564 | |
| }, | |
| { | |
| "epoch": 2.2956962025316456, | |
| "grad_norm": 0.4602285139217017, | |
| "learning_rate": 2.6223508488268374e-05, | |
| "loss": 0.3884, | |
| "step": 565 | |
| }, | |
| { | |
| "epoch": 2.299746835443038, | |
| "grad_norm": 0.43036654970037863, | |
| "learning_rate": 2.6169542751049148e-05, | |
| "loss": 0.3922, | |
| "step": 566 | |
| }, | |
| { | |
| "epoch": 2.3037974683544302, | |
| "grad_norm": 0.4034970006165686, | |
| "learning_rate": 2.6115527325180754e-05, | |
| "loss": 0.3822, | |
| "step": 567 | |
| }, | |
| { | |
| "epoch": 2.3078481012658227, | |
| "grad_norm": 0.4409206494805822, | |
| "learning_rate": 2.606146264569603e-05, | |
| "loss": 0.387, | |
| "step": 568 | |
| }, | |
| { | |
| "epoch": 2.3118987341772153, | |
| "grad_norm": 0.4189460471882211, | |
| "learning_rate": 2.6007349148024447e-05, | |
| "loss": 0.3434, | |
| "step": 569 | |
| }, | |
| { | |
| "epoch": 2.3159493670886078, | |
| "grad_norm": 0.4272460465614016, | |
| "learning_rate": 2.5953187267988694e-05, | |
| "loss": 0.3991, | |
| "step": 570 | |
| }, | |
| { | |
| "epoch": 2.32, | |
| "grad_norm": 0.41491420257245454, | |
| "learning_rate": 2.5898977441801097e-05, | |
| "loss": 0.3749, | |
| "step": 571 | |
| }, | |
| { | |
| "epoch": 2.3240506329113924, | |
| "grad_norm": 0.47338763427153185, | |
| "learning_rate": 2.584472010606015e-05, | |
| "loss": 0.3862, | |
| "step": 572 | |
| }, | |
| { | |
| "epoch": 2.328101265822785, | |
| "grad_norm": 0.4163074684373198, | |
| "learning_rate": 2.5790415697746976e-05, | |
| "loss": 0.3867, | |
| "step": 573 | |
| }, | |
| { | |
| "epoch": 2.3321518987341774, | |
| "grad_norm": 0.471018531067937, | |
| "learning_rate": 2.5736064654221808e-05, | |
| "loss": 0.3544, | |
| "step": 574 | |
| }, | |
| { | |
| "epoch": 2.3362025316455695, | |
| "grad_norm": 0.43442359660927177, | |
| "learning_rate": 2.568166741322048e-05, | |
| "loss": 0.3727, | |
| "step": 575 | |
| }, | |
| { | |
| "epoch": 2.340253164556962, | |
| "grad_norm": 0.5302753589909951, | |
| "learning_rate": 2.56272244128509e-05, | |
| "loss": 0.3919, | |
| "step": 576 | |
| }, | |
| { | |
| "epoch": 2.3443037974683545, | |
| "grad_norm": 0.4112882784464308, | |
| "learning_rate": 2.55727360915895e-05, | |
| "loss": 0.3657, | |
| "step": 577 | |
| }, | |
| { | |
| "epoch": 2.348354430379747, | |
| "grad_norm": 0.5008569811448608, | |
| "learning_rate": 2.5518202888277734e-05, | |
| "loss": 0.4041, | |
| "step": 578 | |
| }, | |
| { | |
| "epoch": 2.352405063291139, | |
| "grad_norm": 0.4233472626503316, | |
| "learning_rate": 2.5463625242118523e-05, | |
| "loss": 0.3763, | |
| "step": 579 | |
| }, | |
| { | |
| "epoch": 2.3564556962025316, | |
| "grad_norm": 0.48799912544884766, | |
| "learning_rate": 2.5409003592672723e-05, | |
| "loss": 0.3901, | |
| "step": 580 | |
| }, | |
| { | |
| "epoch": 2.360506329113924, | |
| "grad_norm": 0.4169614626129584, | |
| "learning_rate": 2.535433837985559e-05, | |
| "loss": 0.3729, | |
| "step": 581 | |
| }, | |
| { | |
| "epoch": 2.3645569620253166, | |
| "grad_norm": 0.45754448910417467, | |
| "learning_rate": 2.529963004393324e-05, | |
| "loss": 0.381, | |
| "step": 582 | |
| }, | |
| { | |
| "epoch": 2.3686075949367087, | |
| "grad_norm": 0.43770243940697023, | |
| "learning_rate": 2.524487902551908e-05, | |
| "loss": 0.3585, | |
| "step": 583 | |
| }, | |
| { | |
| "epoch": 2.372658227848101, | |
| "grad_norm": 0.4258747818832008, | |
| "learning_rate": 2.519008576557029e-05, | |
| "loss": 0.3968, | |
| "step": 584 | |
| }, | |
| { | |
| "epoch": 2.3767088607594937, | |
| "grad_norm": 0.4398642474992483, | |
| "learning_rate": 2.5135250705384254e-05, | |
| "loss": 0.3897, | |
| "step": 585 | |
| }, | |
| { | |
| "epoch": 2.3807594936708862, | |
| "grad_norm": 0.41520773835991953, | |
| "learning_rate": 2.5080374286595007e-05, | |
| "loss": 0.363, | |
| "step": 586 | |
| }, | |
| { | |
| "epoch": 2.3848101265822783, | |
| "grad_norm": 0.4451833148839736, | |
| "learning_rate": 2.5025456951169677e-05, | |
| "loss": 0.378, | |
| "step": 587 | |
| }, | |
| { | |
| "epoch": 2.388860759493671, | |
| "grad_norm": 0.4311479119781443, | |
| "learning_rate": 2.4970499141404942e-05, | |
| "loss": 0.3893, | |
| "step": 588 | |
| }, | |
| { | |
| "epoch": 2.3929113924050633, | |
| "grad_norm": 0.43703915111345937, | |
| "learning_rate": 2.491550129992345e-05, | |
| "loss": 0.3788, | |
| "step": 589 | |
| }, | |
| { | |
| "epoch": 2.396962025316456, | |
| "grad_norm": 0.4176600456529176, | |
| "learning_rate": 2.486046386967024e-05, | |
| "loss": 0.3667, | |
| "step": 590 | |
| }, | |
| { | |
| "epoch": 2.401012658227848, | |
| "grad_norm": 0.41982478162628195, | |
| "learning_rate": 2.4805387293909214e-05, | |
| "loss": 0.388, | |
| "step": 591 | |
| }, | |
| { | |
| "epoch": 2.4050632911392404, | |
| "grad_norm": 0.39977671557861466, | |
| "learning_rate": 2.4750272016219552e-05, | |
| "loss": 0.3817, | |
| "step": 592 | |
| }, | |
| { | |
| "epoch": 2.409113924050633, | |
| "grad_norm": 0.4345144432237986, | |
| "learning_rate": 2.4695118480492114e-05, | |
| "loss": 0.3804, | |
| "step": 593 | |
| }, | |
| { | |
| "epoch": 2.4131645569620255, | |
| "grad_norm": 0.40733954810248213, | |
| "learning_rate": 2.4639927130925898e-05, | |
| "loss": 0.369, | |
| "step": 594 | |
| }, | |
| { | |
| "epoch": 2.4172151898734175, | |
| "grad_norm": 0.42889376853620476, | |
| "learning_rate": 2.458469841202444e-05, | |
| "loss": 0.3875, | |
| "step": 595 | |
| }, | |
| { | |
| "epoch": 2.42126582278481, | |
| "grad_norm": 0.4106525470388025, | |
| "learning_rate": 2.452943276859226e-05, | |
| "loss": 0.3803, | |
| "step": 596 | |
| }, | |
| { | |
| "epoch": 2.4253164556962026, | |
| "grad_norm": 0.4177030261720476, | |
| "learning_rate": 2.447413064573125e-05, | |
| "loss": 0.3857, | |
| "step": 597 | |
| }, | |
| { | |
| "epoch": 2.429367088607595, | |
| "grad_norm": 0.39587497567752467, | |
| "learning_rate": 2.4418792488837095e-05, | |
| "loss": 0.3875, | |
| "step": 598 | |
| }, | |
| { | |
| "epoch": 2.433417721518987, | |
| "grad_norm": 0.46353358609537676, | |
| "learning_rate": 2.4363418743595713e-05, | |
| "loss": 0.4015, | |
| "step": 599 | |
| }, | |
| { | |
| "epoch": 2.4374683544303797, | |
| "grad_norm": 0.41487499334008693, | |
| "learning_rate": 2.430800985597963e-05, | |
| "loss": 0.3821, | |
| "step": 600 | |
| }, | |
| { | |
| "epoch": 2.441518987341772, | |
| "grad_norm": 0.44465171632830636, | |
| "learning_rate": 2.4252566272244415e-05, | |
| "loss": 0.3929, | |
| "step": 601 | |
| }, | |
| { | |
| "epoch": 2.4455696202531647, | |
| "grad_norm": 0.44092919368685135, | |
| "learning_rate": 2.4197088438925063e-05, | |
| "loss": 0.3648, | |
| "step": 602 | |
| }, | |
| { | |
| "epoch": 2.449620253164557, | |
| "grad_norm": 0.44062554747559957, | |
| "learning_rate": 2.4141576802832417e-05, | |
| "loss": 0.3842, | |
| "step": 603 | |
| }, | |
| { | |
| "epoch": 2.4536708860759493, | |
| "grad_norm": 0.4698188980278592, | |
| "learning_rate": 2.408603181104957e-05, | |
| "loss": 0.3902, | |
| "step": 604 | |
| }, | |
| { | |
| "epoch": 2.457721518987342, | |
| "grad_norm": 0.40831461197425645, | |
| "learning_rate": 2.4030453910928245e-05, | |
| "loss": 0.3863, | |
| "step": 605 | |
| }, | |
| { | |
| "epoch": 2.4617721518987343, | |
| "grad_norm": 0.4869423238423921, | |
| "learning_rate": 2.397484355008521e-05, | |
| "loss": 0.3924, | |
| "step": 606 | |
| }, | |
| { | |
| "epoch": 2.4658227848101264, | |
| "grad_norm": 0.41456226962415793, | |
| "learning_rate": 2.3919201176398662e-05, | |
| "loss": 0.3721, | |
| "step": 607 | |
| }, | |
| { | |
| "epoch": 2.469873417721519, | |
| "grad_norm": 0.4408498532878629, | |
| "learning_rate": 2.3863527238004633e-05, | |
| "loss": 0.3812, | |
| "step": 608 | |
| }, | |
| { | |
| "epoch": 2.4739240506329114, | |
| "grad_norm": 0.4406169288650729, | |
| "learning_rate": 2.380782218329337e-05, | |
| "loss": 0.3747, | |
| "step": 609 | |
| }, | |
| { | |
| "epoch": 2.477974683544304, | |
| "grad_norm": 0.4293197488150918, | |
| "learning_rate": 2.3752086460905725e-05, | |
| "loss": 0.373, | |
| "step": 610 | |
| }, | |
| { | |
| "epoch": 2.482025316455696, | |
| "grad_norm": 0.4386511917618956, | |
| "learning_rate": 2.3696320519729544e-05, | |
| "loss": 0.3631, | |
| "step": 611 | |
| }, | |
| { | |
| "epoch": 2.4860759493670885, | |
| "grad_norm": 0.4389837360068803, | |
| "learning_rate": 2.3640524808896045e-05, | |
| "loss": 0.3786, | |
| "step": 612 | |
| }, | |
| { | |
| "epoch": 2.490126582278481, | |
| "grad_norm": 0.40594255747539304, | |
| "learning_rate": 2.3584699777776222e-05, | |
| "loss": 0.3743, | |
| "step": 613 | |
| }, | |
| { | |
| "epoch": 2.4941772151898736, | |
| "grad_norm": 0.4330734763237425, | |
| "learning_rate": 2.3528845875977195e-05, | |
| "loss": 0.3874, | |
| "step": 614 | |
| }, | |
| { | |
| "epoch": 2.4982278481012656, | |
| "grad_norm": 0.4394737090135893, | |
| "learning_rate": 2.3472963553338614e-05, | |
| "loss": 0.4031, | |
| "step": 615 | |
| }, | |
| { | |
| "epoch": 2.502278481012658, | |
| "grad_norm": 0.45205534624227534, | |
| "learning_rate": 2.341705325992901e-05, | |
| "loss": 0.4057, | |
| "step": 616 | |
| }, | |
| { | |
| "epoch": 2.5063291139240507, | |
| "grad_norm": 0.41409128599825973, | |
| "learning_rate": 2.336111544604222e-05, | |
| "loss": 0.3886, | |
| "step": 617 | |
| }, | |
| { | |
| "epoch": 2.510379746835443, | |
| "grad_norm": 0.47581991481530356, | |
| "learning_rate": 2.33051505621937e-05, | |
| "loss": 0.3717, | |
| "step": 618 | |
| }, | |
| { | |
| "epoch": 2.5144303797468357, | |
| "grad_norm": 0.4423291440906091, | |
| "learning_rate": 2.324915905911693e-05, | |
| "loss": 0.3756, | |
| "step": 619 | |
| }, | |
| { | |
| "epoch": 2.5184810126582278, | |
| "grad_norm": 0.47092930287137563, | |
| "learning_rate": 2.319314138775977e-05, | |
| "loss": 0.3763, | |
| "step": 620 | |
| }, | |
| { | |
| "epoch": 2.5225316455696203, | |
| "grad_norm": 0.4793937399640029, | |
| "learning_rate": 2.3137097999280856e-05, | |
| "loss": 0.3674, | |
| "step": 621 | |
| }, | |
| { | |
| "epoch": 2.526582278481013, | |
| "grad_norm": 0.4043774353406513, | |
| "learning_rate": 2.308102934504593e-05, | |
| "loss": 0.3713, | |
| "step": 622 | |
| }, | |
| { | |
| "epoch": 2.530632911392405, | |
| "grad_norm": 0.4606441949085103, | |
| "learning_rate": 2.3024935876624222e-05, | |
| "loss": 0.3701, | |
| "step": 623 | |
| }, | |
| { | |
| "epoch": 2.5346835443037974, | |
| "grad_norm": 0.46009816105668844, | |
| "learning_rate": 2.2968818045784813e-05, | |
| "loss": 0.3834, | |
| "step": 624 | |
| }, | |
| { | |
| "epoch": 2.53873417721519, | |
| "grad_norm": 0.4119991386896808, | |
| "learning_rate": 2.2912676304493006e-05, | |
| "loss": 0.3779, | |
| "step": 625 | |
| }, | |
| { | |
| "epoch": 2.5427848101265824, | |
| "grad_norm": 0.4669020052794965, | |
| "learning_rate": 2.2856511104906668e-05, | |
| "loss": 0.3636, | |
| "step": 626 | |
| }, | |
| { | |
| "epoch": 2.546835443037975, | |
| "grad_norm": 0.46354206008873916, | |
| "learning_rate": 2.2800322899372586e-05, | |
| "loss": 0.3742, | |
| "step": 627 | |
| }, | |
| { | |
| "epoch": 2.550886075949367, | |
| "grad_norm": 0.4252775526478063, | |
| "learning_rate": 2.2744112140422844e-05, | |
| "loss": 0.3962, | |
| "step": 628 | |
| }, | |
| { | |
| "epoch": 2.5549367088607595, | |
| "grad_norm": 0.47465774495281027, | |
| "learning_rate": 2.2687879280771177e-05, | |
| "loss": 0.3824, | |
| "step": 629 | |
| }, | |
| { | |
| "epoch": 2.558987341772152, | |
| "grad_norm": 0.421692256516435, | |
| "learning_rate": 2.26316247733093e-05, | |
| "loss": 0.3659, | |
| "step": 630 | |
| }, | |
| { | |
| "epoch": 2.563037974683544, | |
| "grad_norm": 0.4580246597726559, | |
| "learning_rate": 2.257534907110328e-05, | |
| "loss": 0.3717, | |
| "step": 631 | |
| }, | |
| { | |
| "epoch": 2.5670886075949366, | |
| "grad_norm": 0.4621492621448863, | |
| "learning_rate": 2.2519052627389882e-05, | |
| "loss": 0.3786, | |
| "step": 632 | |
| }, | |
| { | |
| "epoch": 2.571139240506329, | |
| "grad_norm": 0.4752863774681814, | |
| "learning_rate": 2.246273589557294e-05, | |
| "loss": 0.3602, | |
| "step": 633 | |
| }, | |
| { | |
| "epoch": 2.5751898734177217, | |
| "grad_norm": 0.42824990418849496, | |
| "learning_rate": 2.240639932921966e-05, | |
| "loss": 0.385, | |
| "step": 634 | |
| }, | |
| { | |
| "epoch": 2.579240506329114, | |
| "grad_norm": 0.4098358299368642, | |
| "learning_rate": 2.2350043382056995e-05, | |
| "loss": 0.3792, | |
| "step": 635 | |
| }, | |
| { | |
| "epoch": 2.5832911392405062, | |
| "grad_norm": 0.43435825711162906, | |
| "learning_rate": 2.2293668507968015e-05, | |
| "loss": 0.3784, | |
| "step": 636 | |
| }, | |
| { | |
| "epoch": 2.5873417721518988, | |
| "grad_norm": 0.41667862858882404, | |
| "learning_rate": 2.2237275160988186e-05, | |
| "loss": 0.365, | |
| "step": 637 | |
| }, | |
| { | |
| "epoch": 2.5913924050632913, | |
| "grad_norm": 0.4505303321596465, | |
| "learning_rate": 2.2180863795301787e-05, | |
| "loss": 0.405, | |
| "step": 638 | |
| }, | |
| { | |
| "epoch": 2.5954430379746833, | |
| "grad_norm": 0.4070092134738687, | |
| "learning_rate": 2.212443486523819e-05, | |
| "loss": 0.3821, | |
| "step": 639 | |
| }, | |
| { | |
| "epoch": 2.599493670886076, | |
| "grad_norm": 0.4543491482895516, | |
| "learning_rate": 2.2067988825268243e-05, | |
| "loss": 0.3599, | |
| "step": 640 | |
| }, | |
| { | |
| "epoch": 2.6035443037974684, | |
| "grad_norm": 0.42360153355796726, | |
| "learning_rate": 2.2011526130000596e-05, | |
| "loss": 0.3969, | |
| "step": 641 | |
| }, | |
| { | |
| "epoch": 2.607594936708861, | |
| "grad_norm": 0.3920020771867113, | |
| "learning_rate": 2.1955047234178038e-05, | |
| "loss": 0.3807, | |
| "step": 642 | |
| }, | |
| { | |
| "epoch": 2.6116455696202534, | |
| "grad_norm": 0.44887307943731053, | |
| "learning_rate": 2.1898552592673825e-05, | |
| "loss": 0.3706, | |
| "step": 643 | |
| }, | |
| { | |
| "epoch": 2.6156962025316455, | |
| "grad_norm": 0.3992780268593902, | |
| "learning_rate": 2.184204266048803e-05, | |
| "loss": 0.3676, | |
| "step": 644 | |
| }, | |
| { | |
| "epoch": 2.619746835443038, | |
| "grad_norm": 0.415901225856746, | |
| "learning_rate": 2.1785517892743887e-05, | |
| "loss": 0.3949, | |
| "step": 645 | |
| }, | |
| { | |
| "epoch": 2.6237974683544305, | |
| "grad_norm": 0.4090818765960425, | |
| "learning_rate": 2.17289787446841e-05, | |
| "loss": 0.3804, | |
| "step": 646 | |
| }, | |
| { | |
| "epoch": 2.6278481012658226, | |
| "grad_norm": 0.4131800708541863, | |
| "learning_rate": 2.1672425671667198e-05, | |
| "loss": 0.3958, | |
| "step": 647 | |
| }, | |
| { | |
| "epoch": 2.631898734177215, | |
| "grad_norm": 0.4263157909775713, | |
| "learning_rate": 2.161585912916385e-05, | |
| "loss": 0.3798, | |
| "step": 648 | |
| }, | |
| { | |
| "epoch": 2.6359493670886076, | |
| "grad_norm": 0.39291049701338837, | |
| "learning_rate": 2.1559279572753214e-05, | |
| "loss": 0.3945, | |
| "step": 649 | |
| }, | |
| { | |
| "epoch": 2.64, | |
| "grad_norm": 0.3878734683088336, | |
| "learning_rate": 2.1502687458119268e-05, | |
| "loss": 0.3522, | |
| "step": 650 | |
| }, | |
| { | |
| "epoch": 2.6440506329113926, | |
| "grad_norm": 0.45832450848251055, | |
| "learning_rate": 2.1446083241047116e-05, | |
| "loss": 0.372, | |
| "step": 651 | |
| }, | |
| { | |
| "epoch": 2.6481012658227847, | |
| "grad_norm": 0.39461053125464596, | |
| "learning_rate": 2.1389467377419333e-05, | |
| "loss": 0.3746, | |
| "step": 652 | |
| }, | |
| { | |
| "epoch": 2.6521518987341772, | |
| "grad_norm": 0.3856506637445099, | |
| "learning_rate": 2.133284032321232e-05, | |
| "loss": 0.3887, | |
| "step": 653 | |
| }, | |
| { | |
| "epoch": 2.6562025316455697, | |
| "grad_norm": 0.3960546128671372, | |
| "learning_rate": 2.1276202534492566e-05, | |
| "loss": 0.3653, | |
| "step": 654 | |
| }, | |
| { | |
| "epoch": 2.660253164556962, | |
| "grad_norm": 0.38557165372639396, | |
| "learning_rate": 2.121955446741306e-05, | |
| "loss": 0.3881, | |
| "step": 655 | |
| }, | |
| { | |
| "epoch": 2.6643037974683543, | |
| "grad_norm": 0.41218011504196184, | |
| "learning_rate": 2.1162896578209517e-05, | |
| "loss": 0.3815, | |
| "step": 656 | |
| }, | |
| { | |
| "epoch": 2.668354430379747, | |
| "grad_norm": 0.42583623566175693, | |
| "learning_rate": 2.1106229323196813e-05, | |
| "loss": 0.4014, | |
| "step": 657 | |
| }, | |
| { | |
| "epoch": 2.6724050632911394, | |
| "grad_norm": 0.3809378393949005, | |
| "learning_rate": 2.1049553158765214e-05, | |
| "loss": 0.3885, | |
| "step": 658 | |
| }, | |
| { | |
| "epoch": 2.676455696202532, | |
| "grad_norm": 0.3906216862453747, | |
| "learning_rate": 2.0992868541376764e-05, | |
| "loss": 0.3659, | |
| "step": 659 | |
| }, | |
| { | |
| "epoch": 2.680506329113924, | |
| "grad_norm": 0.40799850112247493, | |
| "learning_rate": 2.093617592756158e-05, | |
| "loss": 0.4145, | |
| "step": 660 | |
| }, | |
| { | |
| "epoch": 2.6845569620253165, | |
| "grad_norm": 0.39334696177520273, | |
| "learning_rate": 2.0879475773914167e-05, | |
| "loss": 0.3763, | |
| "step": 661 | |
| }, | |
| { | |
| "epoch": 2.688607594936709, | |
| "grad_norm": 0.4510106953320501, | |
| "learning_rate": 2.082276853708978e-05, | |
| "loss": 0.3749, | |
| "step": 662 | |
| }, | |
| { | |
| "epoch": 2.692658227848101, | |
| "grad_norm": 0.4202402397472803, | |
| "learning_rate": 2.076605467380071e-05, | |
| "loss": 0.3754, | |
| "step": 663 | |
| }, | |
| { | |
| "epoch": 2.6967088607594936, | |
| "grad_norm": 0.439002952818395, | |
| "learning_rate": 2.0709334640812613e-05, | |
| "loss": 0.3868, | |
| "step": 664 | |
| }, | |
| { | |
| "epoch": 2.700759493670886, | |
| "grad_norm": 0.42195715110805065, | |
| "learning_rate": 2.0652608894940824e-05, | |
| "loss": 0.3862, | |
| "step": 665 | |
| }, | |
| { | |
| "epoch": 2.7048101265822786, | |
| "grad_norm": 0.4124142472125217, | |
| "learning_rate": 2.0595877893046722e-05, | |
| "loss": 0.3726, | |
| "step": 666 | |
| }, | |
| { | |
| "epoch": 2.708860759493671, | |
| "grad_norm": 0.41449142844075326, | |
| "learning_rate": 2.0539142092033985e-05, | |
| "loss": 0.3906, | |
| "step": 667 | |
| }, | |
| { | |
| "epoch": 2.712911392405063, | |
| "grad_norm": 0.41708339342414497, | |
| "learning_rate": 2.048240194884496e-05, | |
| "loss": 0.3925, | |
| "step": 668 | |
| }, | |
| { | |
| "epoch": 2.7169620253164557, | |
| "grad_norm": 0.42672534102130943, | |
| "learning_rate": 2.042565792045695e-05, | |
| "loss": 0.3809, | |
| "step": 669 | |
| }, | |
| { | |
| "epoch": 2.721012658227848, | |
| "grad_norm": 0.3681459036950113, | |
| "learning_rate": 2.036891046387857e-05, | |
| "loss": 0.3856, | |
| "step": 670 | |
| }, | |
| { | |
| "epoch": 2.7250632911392403, | |
| "grad_norm": 0.44252339116550843, | |
| "learning_rate": 2.0312160036146036e-05, | |
| "loss": 0.3823, | |
| "step": 671 | |
| }, | |
| { | |
| "epoch": 2.729113924050633, | |
| "grad_norm": 0.4079305528932944, | |
| "learning_rate": 2.025540709431948e-05, | |
| "loss": 0.3906, | |
| "step": 672 | |
| }, | |
| { | |
| "epoch": 2.7331645569620253, | |
| "grad_norm": 0.40164959837109615, | |
| "learning_rate": 2.0198652095479298e-05, | |
| "loss": 0.3807, | |
| "step": 673 | |
| }, | |
| { | |
| "epoch": 2.737215189873418, | |
| "grad_norm": 0.38609473494953733, | |
| "learning_rate": 2.014189549672245e-05, | |
| "loss": 0.3746, | |
| "step": 674 | |
| }, | |
| { | |
| "epoch": 2.7412658227848103, | |
| "grad_norm": 1.1350494039209302, | |
| "learning_rate": 2.0085137755158776e-05, | |
| "loss": 0.3928, | |
| "step": 675 | |
| }, | |
| { | |
| "epoch": 2.7453164556962024, | |
| "grad_norm": 0.4963194170270538, | |
| "learning_rate": 2.0028379327907327e-05, | |
| "loss": 0.3902, | |
| "step": 676 | |
| }, | |
| { | |
| "epoch": 2.749367088607595, | |
| "grad_norm": 0.427327526614533, | |
| "learning_rate": 1.9971620672092676e-05, | |
| "loss": 0.3615, | |
| "step": 677 | |
| }, | |
| { | |
| "epoch": 2.7534177215189874, | |
| "grad_norm": 0.47922896065252446, | |
| "learning_rate": 1.991486224484123e-05, | |
| "loss": 0.4028, | |
| "step": 678 | |
| }, | |
| { | |
| "epoch": 2.7574683544303795, | |
| "grad_norm": 0.441189769787658, | |
| "learning_rate": 1.985810450327756e-05, | |
| "loss": 0.3878, | |
| "step": 679 | |
| }, | |
| { | |
| "epoch": 2.761518987341772, | |
| "grad_norm": 0.41250057929751544, | |
| "learning_rate": 1.9801347904520706e-05, | |
| "loss": 0.3624, | |
| "step": 680 | |
| }, | |
| { | |
| "epoch": 2.7655696202531646, | |
| "grad_norm": 0.4695529407736641, | |
| "learning_rate": 1.974459290568053e-05, | |
| "loss": 0.3825, | |
| "step": 681 | |
| }, | |
| { | |
| "epoch": 2.769620253164557, | |
| "grad_norm": 0.3966265027802706, | |
| "learning_rate": 1.968783996385397e-05, | |
| "loss": 0.362, | |
| "step": 682 | |
| }, | |
| { | |
| "epoch": 2.7736708860759496, | |
| "grad_norm": 0.4024416401868956, | |
| "learning_rate": 1.963108953612143e-05, | |
| "loss": 0.378, | |
| "step": 683 | |
| }, | |
| { | |
| "epoch": 2.7777215189873417, | |
| "grad_norm": 0.4170506669029113, | |
| "learning_rate": 1.9574342079543056e-05, | |
| "loss": 0.3718, | |
| "step": 684 | |
| }, | |
| { | |
| "epoch": 2.781772151898734, | |
| "grad_norm": 0.4332398218236451, | |
| "learning_rate": 1.9517598051155046e-05, | |
| "loss": 0.3955, | |
| "step": 685 | |
| }, | |
| { | |
| "epoch": 2.7858227848101267, | |
| "grad_norm": 0.4237296085059462, | |
| "learning_rate": 1.9460857907966025e-05, | |
| "loss": 0.3637, | |
| "step": 686 | |
| }, | |
| { | |
| "epoch": 2.7898734177215188, | |
| "grad_norm": 0.41504591325602147, | |
| "learning_rate": 1.9404122106953285e-05, | |
| "loss": 0.3636, | |
| "step": 687 | |
| }, | |
| { | |
| "epoch": 2.7939240506329113, | |
| "grad_norm": 0.5403379193757628, | |
| "learning_rate": 1.9347391105059176e-05, | |
| "loss": 0.3945, | |
| "step": 688 | |
| }, | |
| { | |
| "epoch": 2.797974683544304, | |
| "grad_norm": 0.4101441159839735, | |
| "learning_rate": 1.92906653591874e-05, | |
| "loss": 0.3827, | |
| "step": 689 | |
| }, | |
| { | |
| "epoch": 2.8020253164556963, | |
| "grad_norm": 0.4234197936780648, | |
| "learning_rate": 1.9233945326199295e-05, | |
| "loss": 0.3752, | |
| "step": 690 | |
| }, | |
| { | |
| "epoch": 2.806075949367089, | |
| "grad_norm": 0.39637366299076726, | |
| "learning_rate": 1.917723146291022e-05, | |
| "loss": 0.387, | |
| "step": 691 | |
| }, | |
| { | |
| "epoch": 2.810126582278481, | |
| "grad_norm": 0.3931711652317323, | |
| "learning_rate": 1.912052422608584e-05, | |
| "loss": 0.3785, | |
| "step": 692 | |
| }, | |
| { | |
| "epoch": 2.8141772151898734, | |
| "grad_norm": 0.36437457992488465, | |
| "learning_rate": 1.9063824072438428e-05, | |
| "loss": 0.4073, | |
| "step": 693 | |
| }, | |
| { | |
| "epoch": 2.818227848101266, | |
| "grad_norm": 0.3870292079949069, | |
| "learning_rate": 1.9007131458623246e-05, | |
| "loss": 0.3517, | |
| "step": 694 | |
| }, | |
| { | |
| "epoch": 2.822278481012658, | |
| "grad_norm": 0.4138726301877295, | |
| "learning_rate": 1.895044684123479e-05, | |
| "loss": 0.3842, | |
| "step": 695 | |
| }, | |
| { | |
| "epoch": 2.8263291139240505, | |
| "grad_norm": 0.42067235889024684, | |
| "learning_rate": 1.8893770676803194e-05, | |
| "loss": 0.3746, | |
| "step": 696 | |
| }, | |
| { | |
| "epoch": 2.830379746835443, | |
| "grad_norm": 0.393923361503678, | |
| "learning_rate": 1.8837103421790486e-05, | |
| "loss": 0.3587, | |
| "step": 697 | |
| }, | |
| { | |
| "epoch": 2.8344303797468355, | |
| "grad_norm": 0.43159474269177983, | |
| "learning_rate": 1.8780445532586952e-05, | |
| "loss": 0.3673, | |
| "step": 698 | |
| }, | |
| { | |
| "epoch": 2.838481012658228, | |
| "grad_norm": 0.40439460292257445, | |
| "learning_rate": 1.872379746550743e-05, | |
| "loss": 0.3794, | |
| "step": 699 | |
| }, | |
| { | |
| "epoch": 2.84253164556962, | |
| "grad_norm": 0.40099408509512946, | |
| "learning_rate": 1.866715967678769e-05, | |
| "loss": 0.3983, | |
| "step": 700 | |
| }, | |
| { | |
| "epoch": 2.8465822784810126, | |
| "grad_norm": 0.4376332719050142, | |
| "learning_rate": 1.861053262258067e-05, | |
| "loss": 0.3653, | |
| "step": 701 | |
| }, | |
| { | |
| "epoch": 2.850632911392405, | |
| "grad_norm": 0.4132331093978461, | |
| "learning_rate": 1.8553916758952897e-05, | |
| "loss": 0.3559, | |
| "step": 702 | |
| }, | |
| { | |
| "epoch": 2.8546835443037972, | |
| "grad_norm": 1.7227765412204687, | |
| "learning_rate": 1.8497312541880735e-05, | |
| "loss": 0.39, | |
| "step": 703 | |
| }, | |
| { | |
| "epoch": 2.8587341772151897, | |
| "grad_norm": 0.4258890416578788, | |
| "learning_rate": 1.8440720427246786e-05, | |
| "loss": 0.3465, | |
| "step": 704 | |
| }, | |
| { | |
| "epoch": 2.8627848101265823, | |
| "grad_norm": 0.4332376304143481, | |
| "learning_rate": 1.8384140870836157e-05, | |
| "loss": 0.3752, | |
| "step": 705 | |
| }, | |
| { | |
| "epoch": 2.8668354430379748, | |
| "grad_norm": 0.4222924462581683, | |
| "learning_rate": 1.8327574328332806e-05, | |
| "loss": 0.3662, | |
| "step": 706 | |
| }, | |
| { | |
| "epoch": 2.8708860759493673, | |
| "grad_norm": 0.41837856304984805, | |
| "learning_rate": 1.8271021255315906e-05, | |
| "loss": 0.3769, | |
| "step": 707 | |
| }, | |
| { | |
| "epoch": 2.8749367088607594, | |
| "grad_norm": 0.43141509558522906, | |
| "learning_rate": 1.8214482107256117e-05, | |
| "loss": 0.3791, | |
| "step": 708 | |
| }, | |
| { | |
| "epoch": 2.878987341772152, | |
| "grad_norm": 0.417321750318619, | |
| "learning_rate": 1.8157957339511968e-05, | |
| "loss": 0.3874, | |
| "step": 709 | |
| }, | |
| { | |
| "epoch": 2.8830379746835444, | |
| "grad_norm": 0.4078955875821102, | |
| "learning_rate": 1.8101447407326182e-05, | |
| "loss": 0.3747, | |
| "step": 710 | |
| }, | |
| { | |
| "epoch": 2.8870886075949365, | |
| "grad_norm": 0.4128869979881167, | |
| "learning_rate": 1.8044952765821966e-05, | |
| "loss": 0.3751, | |
| "step": 711 | |
| }, | |
| { | |
| "epoch": 2.891139240506329, | |
| "grad_norm": 0.38726665699784746, | |
| "learning_rate": 1.7988473869999407e-05, | |
| "loss": 0.3993, | |
| "step": 712 | |
| }, | |
| { | |
| "epoch": 2.8951898734177215, | |
| "grad_norm": 0.3839015960360953, | |
| "learning_rate": 1.7932011174731764e-05, | |
| "loss": 0.3855, | |
| "step": 713 | |
| }, | |
| { | |
| "epoch": 2.899240506329114, | |
| "grad_norm": 0.4002774380570917, | |
| "learning_rate": 1.7875565134761817e-05, | |
| "loss": 0.3599, | |
| "step": 714 | |
| }, | |
| { | |
| "epoch": 2.9032911392405065, | |
| "grad_norm": 0.36345814099517143, | |
| "learning_rate": 1.7819136204698226e-05, | |
| "loss": 0.368, | |
| "step": 715 | |
| }, | |
| { | |
| "epoch": 2.9073417721518986, | |
| "grad_norm": 0.3911740568045173, | |
| "learning_rate": 1.776272483901182e-05, | |
| "loss": 0.3582, | |
| "step": 716 | |
| }, | |
| { | |
| "epoch": 2.911392405063291, | |
| "grad_norm": 0.39753827282648824, | |
| "learning_rate": 1.7706331492031995e-05, | |
| "loss": 0.343, | |
| "step": 717 | |
| }, | |
| { | |
| "epoch": 2.9154430379746836, | |
| "grad_norm": 0.40221455277216717, | |
| "learning_rate": 1.764995661794301e-05, | |
| "loss": 0.4067, | |
| "step": 718 | |
| }, | |
| { | |
| "epoch": 2.9194936708860757, | |
| "grad_norm": 0.4363619105989375, | |
| "learning_rate": 1.759360067078035e-05, | |
| "loss": 0.393, | |
| "step": 719 | |
| }, | |
| { | |
| "epoch": 2.923544303797468, | |
| "grad_norm": 0.3876145888345104, | |
| "learning_rate": 1.7537264104427064e-05, | |
| "loss": 0.3943, | |
| "step": 720 | |
| }, | |
| { | |
| "epoch": 2.9275949367088607, | |
| "grad_norm": 0.3913732310462001, | |
| "learning_rate": 1.748094737261012e-05, | |
| "loss": 0.3808, | |
| "step": 721 | |
| }, | |
| { | |
| "epoch": 2.9316455696202532, | |
| "grad_norm": 0.39921756291313915, | |
| "learning_rate": 1.7424650928896726e-05, | |
| "loss": 0.3883, | |
| "step": 722 | |
| }, | |
| { | |
| "epoch": 2.9356962025316458, | |
| "grad_norm": 0.3791669443725468, | |
| "learning_rate": 1.7368375226690712e-05, | |
| "loss": 0.3841, | |
| "step": 723 | |
| }, | |
| { | |
| "epoch": 2.939746835443038, | |
| "grad_norm": 0.3849742265295548, | |
| "learning_rate": 1.731212071922883e-05, | |
| "loss": 0.3652, | |
| "step": 724 | |
| }, | |
| { | |
| "epoch": 2.9437974683544303, | |
| "grad_norm": 0.40887517963441394, | |
| "learning_rate": 1.7255887859577156e-05, | |
| "loss": 0.3591, | |
| "step": 725 | |
| }, | |
| { | |
| "epoch": 2.947848101265823, | |
| "grad_norm": 0.39220754084126147, | |
| "learning_rate": 1.7199677100627427e-05, | |
| "loss": 0.3516, | |
| "step": 726 | |
| }, | |
| { | |
| "epoch": 2.951898734177215, | |
| "grad_norm": 0.41217843010157806, | |
| "learning_rate": 1.7143488895093343e-05, | |
| "loss": 0.3657, | |
| "step": 727 | |
| }, | |
| { | |
| "epoch": 2.9559493670886074, | |
| "grad_norm": 0.3772143885894634, | |
| "learning_rate": 1.7087323695506994e-05, | |
| "loss": 0.4001, | |
| "step": 728 | |
| }, | |
| { | |
| "epoch": 2.96, | |
| "grad_norm": 0.3831326664846604, | |
| "learning_rate": 1.7031181954215194e-05, | |
| "loss": 0.3876, | |
| "step": 729 | |
| }, | |
| { | |
| "epoch": 2.9640506329113925, | |
| "grad_norm": 0.4285310955075387, | |
| "learning_rate": 1.6975064123375788e-05, | |
| "loss": 0.3724, | |
| "step": 730 | |
| }, | |
| { | |
| "epoch": 2.968101265822785, | |
| "grad_norm": 0.38721629331957647, | |
| "learning_rate": 1.6918970654954084e-05, | |
| "loss": 0.3781, | |
| "step": 731 | |
| }, | |
| { | |
| "epoch": 2.972151898734177, | |
| "grad_norm": 0.3864928134472797, | |
| "learning_rate": 1.686290200071915e-05, | |
| "loss": 0.3777, | |
| "step": 732 | |
| }, | |
| { | |
| "epoch": 2.9762025316455696, | |
| "grad_norm": 0.38979502240280545, | |
| "learning_rate": 1.6806858612240234e-05, | |
| "loss": 0.3842, | |
| "step": 733 | |
| }, | |
| { | |
| "epoch": 2.980253164556962, | |
| "grad_norm": 0.39979176511570574, | |
| "learning_rate": 1.6750840940883078e-05, | |
| "loss": 0.3711, | |
| "step": 734 | |
| }, | |
| { | |
| "epoch": 2.984303797468354, | |
| "grad_norm": 0.3730073375706051, | |
| "learning_rate": 1.6694849437806305e-05, | |
| "loss": 0.3679, | |
| "step": 735 | |
| }, | |
| { | |
| "epoch": 2.9883544303797467, | |
| "grad_norm": 0.38705054036823866, | |
| "learning_rate": 1.663888455395778e-05, | |
| "loss": 0.3704, | |
| "step": 736 | |
| }, | |
| { | |
| "epoch": 2.992405063291139, | |
| "grad_norm": 0.38611799081772286, | |
| "learning_rate": 1.6582946740070995e-05, | |
| "loss": 0.3671, | |
| "step": 737 | |
| }, | |
| { | |
| "epoch": 2.9964556962025317, | |
| "grad_norm": 0.38796509798672774, | |
| "learning_rate": 1.6527036446661396e-05, | |
| "loss": 0.3718, | |
| "step": 738 | |
| }, | |
| { | |
| "epoch": 3.0005063291139242, | |
| "grad_norm": 0.37948874827646206, | |
| "learning_rate": 1.6471154124022818e-05, | |
| "loss": 0.3772, | |
| "step": 739 | |
| }, | |
| { | |
| "epoch": 3.0045569620253163, | |
| "grad_norm": 0.596791084204251, | |
| "learning_rate": 1.6415300222223788e-05, | |
| "loss": 0.2749, | |
| "step": 740 | |
| }, | |
| { | |
| "epoch": 3.008607594936709, | |
| "grad_norm": 0.3743619798467806, | |
| "learning_rate": 1.6359475191103958e-05, | |
| "loss": 0.275, | |
| "step": 741 | |
| }, | |
| { | |
| "epoch": 3.0126582278481013, | |
| "grad_norm": 0.5669373737728506, | |
| "learning_rate": 1.6303679480270466e-05, | |
| "loss": 0.2584, | |
| "step": 742 | |
| }, | |
| { | |
| "epoch": 3.016708860759494, | |
| "grad_norm": 0.6758891561553594, | |
| "learning_rate": 1.624791353909428e-05, | |
| "loss": 0.2682, | |
| "step": 743 | |
| }, | |
| { | |
| "epoch": 3.020759493670886, | |
| "grad_norm": 0.4112812875826829, | |
| "learning_rate": 1.619217781670663e-05, | |
| "loss": 0.2685, | |
| "step": 744 | |
| }, | |
| { | |
| "epoch": 3.0248101265822784, | |
| "grad_norm": 0.5582814357935626, | |
| "learning_rate": 1.6136472761995373e-05, | |
| "loss": 0.2529, | |
| "step": 745 | |
| }, | |
| { | |
| "epoch": 3.028860759493671, | |
| "grad_norm": 0.5839576198760028, | |
| "learning_rate": 1.608079882360134e-05, | |
| "loss": 0.2782, | |
| "step": 746 | |
| }, | |
| { | |
| "epoch": 3.0329113924050635, | |
| "grad_norm": 0.4113209083584426, | |
| "learning_rate": 1.60251564499148e-05, | |
| "loss": 0.2682, | |
| "step": 747 | |
| }, | |
| { | |
| "epoch": 3.0369620253164555, | |
| "grad_norm": 0.4746085930303214, | |
| "learning_rate": 1.596954608907176e-05, | |
| "loss": 0.2738, | |
| "step": 748 | |
| }, | |
| { | |
| "epoch": 3.041012658227848, | |
| "grad_norm": 0.5441305964014203, | |
| "learning_rate": 1.591396818895043e-05, | |
| "loss": 0.2675, | |
| "step": 749 | |
| }, | |
| { | |
| "epoch": 3.0450632911392406, | |
| "grad_norm": 0.4139902728823283, | |
| "learning_rate": 1.585842319716759e-05, | |
| "loss": 0.2831, | |
| "step": 750 | |
| }, | |
| { | |
| "epoch": 3.049113924050633, | |
| "grad_norm": 0.46714488110479824, | |
| "learning_rate": 1.5802911561074944e-05, | |
| "loss": 0.2559, | |
| "step": 751 | |
| }, | |
| { | |
| "epoch": 3.053164556962025, | |
| "grad_norm": 0.4610049016148163, | |
| "learning_rate": 1.5747433727755595e-05, | |
| "loss": 0.2686, | |
| "step": 752 | |
| }, | |
| { | |
| "epoch": 3.0572151898734177, | |
| "grad_norm": 0.41698704893081256, | |
| "learning_rate": 1.5691990144020376e-05, | |
| "loss": 0.2754, | |
| "step": 753 | |
| }, | |
| { | |
| "epoch": 3.06126582278481, | |
| "grad_norm": 0.40250335198182124, | |
| "learning_rate": 1.5636581256404297e-05, | |
| "loss": 0.2741, | |
| "step": 754 | |
| }, | |
| { | |
| "epoch": 3.0653164556962027, | |
| "grad_norm": 0.4297700299619123, | |
| "learning_rate": 1.558120751116291e-05, | |
| "loss": 0.2636, | |
| "step": 755 | |
| }, | |
| { | |
| "epoch": 3.0693670886075948, | |
| "grad_norm": 0.3790307629643322, | |
| "learning_rate": 1.552586935426876e-05, | |
| "loss": 0.2629, | |
| "step": 756 | |
| }, | |
| { | |
| "epoch": 3.0734177215189873, | |
| "grad_norm": 0.4007262827931162, | |
| "learning_rate": 1.547056723140774e-05, | |
| "loss": 0.2488, | |
| "step": 757 | |
| }, | |
| { | |
| "epoch": 3.07746835443038, | |
| "grad_norm": 0.3724225042477577, | |
| "learning_rate": 1.5415301587975565e-05, | |
| "loss": 0.2667, | |
| "step": 758 | |
| }, | |
| { | |
| "epoch": 3.0815189873417723, | |
| "grad_norm": 0.3576009291662094, | |
| "learning_rate": 1.536007286907411e-05, | |
| "loss": 0.2429, | |
| "step": 759 | |
| }, | |
| { | |
| "epoch": 3.0855696202531644, | |
| "grad_norm": 1.3270443313498466, | |
| "learning_rate": 1.5304881519507896e-05, | |
| "loss": 0.2908, | |
| "step": 760 | |
| }, | |
| { | |
| "epoch": 3.089620253164557, | |
| "grad_norm": 0.3973664928458626, | |
| "learning_rate": 1.5249727983780453e-05, | |
| "loss": 0.2667, | |
| "step": 761 | |
| }, | |
| { | |
| "epoch": 3.0936708860759494, | |
| "grad_norm": 0.36113786435375406, | |
| "learning_rate": 1.5194612706090786e-05, | |
| "loss": 0.243, | |
| "step": 762 | |
| }, | |
| { | |
| "epoch": 3.097721518987342, | |
| "grad_norm": 0.35293452717245266, | |
| "learning_rate": 1.5139536130329771e-05, | |
| "loss": 0.2569, | |
| "step": 763 | |
| }, | |
| { | |
| "epoch": 3.101772151898734, | |
| "grad_norm": 0.38725685685440875, | |
| "learning_rate": 1.508449870007656e-05, | |
| "loss": 0.2606, | |
| "step": 764 | |
| }, | |
| { | |
| "epoch": 3.1058227848101265, | |
| "grad_norm": 0.37101523545072473, | |
| "learning_rate": 1.5029500858595056e-05, | |
| "loss": 0.2398, | |
| "step": 765 | |
| }, | |
| { | |
| "epoch": 3.109873417721519, | |
| "grad_norm": 0.36339484109530806, | |
| "learning_rate": 1.4974543048830328e-05, | |
| "loss": 0.243, | |
| "step": 766 | |
| }, | |
| { | |
| "epoch": 3.1139240506329116, | |
| "grad_norm": 0.40935128420094374, | |
| "learning_rate": 1.4919625713405e-05, | |
| "loss": 0.2768, | |
| "step": 767 | |
| }, | |
| { | |
| "epoch": 3.1179746835443036, | |
| "grad_norm": 0.34270105050856187, | |
| "learning_rate": 1.4864749294615756e-05, | |
| "loss": 0.2924, | |
| "step": 768 | |
| }, | |
| { | |
| "epoch": 3.122025316455696, | |
| "grad_norm": 0.37208155715100555, | |
| "learning_rate": 1.4809914234429716e-05, | |
| "loss": 0.2803, | |
| "step": 769 | |
| }, | |
| { | |
| "epoch": 3.1260759493670887, | |
| "grad_norm": 0.3742273536543627, | |
| "learning_rate": 1.4755120974480923e-05, | |
| "loss": 0.2486, | |
| "step": 770 | |
| }, | |
| { | |
| "epoch": 3.130126582278481, | |
| "grad_norm": 0.34419303508524507, | |
| "learning_rate": 1.4700369956066771e-05, | |
| "loss": 0.2661, | |
| "step": 771 | |
| }, | |
| { | |
| "epoch": 3.1341772151898732, | |
| "grad_norm": 0.35441114620378494, | |
| "learning_rate": 1.4645661620144413e-05, | |
| "loss": 0.2514, | |
| "step": 772 | |
| }, | |
| { | |
| "epoch": 3.1382278481012658, | |
| "grad_norm": 0.3719625090077885, | |
| "learning_rate": 1.4590996407327284e-05, | |
| "loss": 0.264, | |
| "step": 773 | |
| }, | |
| { | |
| "epoch": 3.1422784810126583, | |
| "grad_norm": 0.33440958360879275, | |
| "learning_rate": 1.4536374757881487e-05, | |
| "loss": 0.268, | |
| "step": 774 | |
| }, | |
| { | |
| "epoch": 3.146329113924051, | |
| "grad_norm": 0.36378541262525144, | |
| "learning_rate": 1.4481797111722271e-05, | |
| "loss": 0.2396, | |
| "step": 775 | |
| }, | |
| { | |
| "epoch": 3.150379746835443, | |
| "grad_norm": 0.34303575056612884, | |
| "learning_rate": 1.4427263908410507e-05, | |
| "loss": 0.2618, | |
| "step": 776 | |
| }, | |
| { | |
| "epoch": 3.1544303797468354, | |
| "grad_norm": 0.33745475019203186, | |
| "learning_rate": 1.4372775587149108e-05, | |
| "loss": 0.2534, | |
| "step": 777 | |
| }, | |
| { | |
| "epoch": 3.158481012658228, | |
| "grad_norm": 0.3360773666946644, | |
| "learning_rate": 1.4318332586779522e-05, | |
| "loss": 0.271, | |
| "step": 778 | |
| }, | |
| { | |
| "epoch": 3.1625316455696204, | |
| "grad_norm": 0.35439914138463163, | |
| "learning_rate": 1.4263935345778202e-05, | |
| "loss": 0.2699, | |
| "step": 779 | |
| }, | |
| { | |
| "epoch": 3.1665822784810125, | |
| "grad_norm": 0.3394071725567932, | |
| "learning_rate": 1.420958430225303e-05, | |
| "loss": 0.2474, | |
| "step": 780 | |
| }, | |
| { | |
| "epoch": 3.170632911392405, | |
| "grad_norm": 0.35032742427151126, | |
| "learning_rate": 1.415527989393985e-05, | |
| "loss": 0.2655, | |
| "step": 781 | |
| }, | |
| { | |
| "epoch": 3.1746835443037975, | |
| "grad_norm": 0.32854896275880807, | |
| "learning_rate": 1.410102255819891e-05, | |
| "loss": 0.2795, | |
| "step": 782 | |
| }, | |
| { | |
| "epoch": 3.17873417721519, | |
| "grad_norm": 0.33987120407592275, | |
| "learning_rate": 1.404681273201131e-05, | |
| "loss": 0.2676, | |
| "step": 783 | |
| }, | |
| { | |
| "epoch": 3.182784810126582, | |
| "grad_norm": 0.347708890164154, | |
| "learning_rate": 1.399265085197556e-05, | |
| "loss": 0.2635, | |
| "step": 784 | |
| }, | |
| { | |
| "epoch": 3.1868354430379746, | |
| "grad_norm": 0.3428945202025847, | |
| "learning_rate": 1.393853735430398e-05, | |
| "loss": 0.2585, | |
| "step": 785 | |
| }, | |
| { | |
| "epoch": 3.190886075949367, | |
| "grad_norm": 0.33905493766893297, | |
| "learning_rate": 1.3884472674819246e-05, | |
| "loss": 0.267, | |
| "step": 786 | |
| }, | |
| { | |
| "epoch": 3.1949367088607596, | |
| "grad_norm": 0.4598968270116413, | |
| "learning_rate": 1.3830457248950864e-05, | |
| "loss": 0.2739, | |
| "step": 787 | |
| }, | |
| { | |
| "epoch": 3.1989873417721517, | |
| "grad_norm": 0.35877644821741234, | |
| "learning_rate": 1.377649151173163e-05, | |
| "loss": 0.2553, | |
| "step": 788 | |
| }, | |
| { | |
| "epoch": 3.2030379746835442, | |
| "grad_norm": 0.3320817894243063, | |
| "learning_rate": 1.3722575897794181e-05, | |
| "loss": 0.251, | |
| "step": 789 | |
| }, | |
| { | |
| "epoch": 3.2070886075949367, | |
| "grad_norm": 0.35152550235910696, | |
| "learning_rate": 1.3668710841367472e-05, | |
| "loss": 0.2641, | |
| "step": 790 | |
| }, | |
| { | |
| "epoch": 3.2111392405063293, | |
| "grad_norm": 0.3521510610980603, | |
| "learning_rate": 1.361489677627324e-05, | |
| "loss": 0.2902, | |
| "step": 791 | |
| }, | |
| { | |
| "epoch": 3.2151898734177213, | |
| "grad_norm": 0.31889014123417664, | |
| "learning_rate": 1.3561134135922585e-05, | |
| "loss": 0.277, | |
| "step": 792 | |
| }, | |
| { | |
| "epoch": 3.219240506329114, | |
| "grad_norm": 0.35637823460930973, | |
| "learning_rate": 1.350742335331241e-05, | |
| "loss": 0.2725, | |
| "step": 793 | |
| }, | |
| { | |
| "epoch": 3.2232911392405064, | |
| "grad_norm": 0.35794649484589247, | |
| "learning_rate": 1.345376486102198e-05, | |
| "loss": 0.259, | |
| "step": 794 | |
| }, | |
| { | |
| "epoch": 3.227341772151899, | |
| "grad_norm": 0.344872623380073, | |
| "learning_rate": 1.3400159091209414e-05, | |
| "loss": 0.2506, | |
| "step": 795 | |
| }, | |
| { | |
| "epoch": 3.231392405063291, | |
| "grad_norm": 0.3396139615584749, | |
| "learning_rate": 1.3346606475608216e-05, | |
| "loss": 0.2511, | |
| "step": 796 | |
| }, | |
| { | |
| "epoch": 3.2354430379746835, | |
| "grad_norm": 0.3455099205873177, | |
| "learning_rate": 1.3293107445523781e-05, | |
| "loss": 0.2521, | |
| "step": 797 | |
| }, | |
| { | |
| "epoch": 3.239493670886076, | |
| "grad_norm": 0.3354503393861874, | |
| "learning_rate": 1.3239662431829949e-05, | |
| "loss": 0.2624, | |
| "step": 798 | |
| }, | |
| { | |
| "epoch": 3.2435443037974685, | |
| "grad_norm": 0.3485638615335572, | |
| "learning_rate": 1.3186271864965509e-05, | |
| "loss": 0.2509, | |
| "step": 799 | |
| }, | |
| { | |
| "epoch": 3.2475949367088606, | |
| "grad_norm": 0.335781505732869, | |
| "learning_rate": 1.3132936174930756e-05, | |
| "loss": 0.2553, | |
| "step": 800 | |
| }, | |
| { | |
| "epoch": 3.251645569620253, | |
| "grad_norm": 0.3338145595130177, | |
| "learning_rate": 1.3079655791283995e-05, | |
| "loss": 0.2657, | |
| "step": 801 | |
| }, | |
| { | |
| "epoch": 3.2556962025316456, | |
| "grad_norm": 0.34741127195352417, | |
| "learning_rate": 1.3026431143138108e-05, | |
| "loss": 0.2451, | |
| "step": 802 | |
| }, | |
| { | |
| "epoch": 3.259746835443038, | |
| "grad_norm": 0.3285673822019449, | |
| "learning_rate": 1.2973262659157114e-05, | |
| "loss": 0.2752, | |
| "step": 803 | |
| }, | |
| { | |
| "epoch": 3.26379746835443, | |
| "grad_norm": 0.33388344256167163, | |
| "learning_rate": 1.2920150767552651e-05, | |
| "loss": 0.2381, | |
| "step": 804 | |
| }, | |
| { | |
| "epoch": 3.2678481012658227, | |
| "grad_norm": 0.33142962343022947, | |
| "learning_rate": 1.2867095896080607e-05, | |
| "loss": 0.2349, | |
| "step": 805 | |
| }, | |
| { | |
| "epoch": 3.271898734177215, | |
| "grad_norm": 0.3190754524680187, | |
| "learning_rate": 1.2814098472037612e-05, | |
| "loss": 0.2821, | |
| "step": 806 | |
| }, | |
| { | |
| "epoch": 3.2759493670886077, | |
| "grad_norm": 0.32096090705479924, | |
| "learning_rate": 1.276115892225764e-05, | |
| "loss": 0.2785, | |
| "step": 807 | |
| }, | |
| { | |
| "epoch": 3.2800000000000002, | |
| "grad_norm": 0.3280607006667989, | |
| "learning_rate": 1.2708277673108555e-05, | |
| "loss": 0.2513, | |
| "step": 808 | |
| }, | |
| { | |
| "epoch": 3.2840506329113923, | |
| "grad_norm": 0.35596552114047153, | |
| "learning_rate": 1.2655455150488649e-05, | |
| "loss": 0.277, | |
| "step": 809 | |
| }, | |
| { | |
| "epoch": 3.288101265822785, | |
| "grad_norm": 0.9192821929072098, | |
| "learning_rate": 1.2602691779823272e-05, | |
| "loss": 0.2694, | |
| "step": 810 | |
| }, | |
| { | |
| "epoch": 3.2921518987341774, | |
| "grad_norm": 0.36768239311450734, | |
| "learning_rate": 1.2549987986061355e-05, | |
| "loss": 0.2695, | |
| "step": 811 | |
| }, | |
| { | |
| "epoch": 3.2962025316455694, | |
| "grad_norm": 0.3247528593154617, | |
| "learning_rate": 1.2497344193672005e-05, | |
| "loss": 0.2707, | |
| "step": 812 | |
| }, | |
| { | |
| "epoch": 3.300253164556962, | |
| "grad_norm": 0.35341545440612115, | |
| "learning_rate": 1.2444760826641092e-05, | |
| "loss": 0.2349, | |
| "step": 813 | |
| }, | |
| { | |
| "epoch": 3.3043037974683545, | |
| "grad_norm": 0.3324977073379, | |
| "learning_rate": 1.2392238308467817e-05, | |
| "loss": 0.2457, | |
| "step": 814 | |
| }, | |
| { | |
| "epoch": 3.308354430379747, | |
| "grad_norm": 0.3444773402103227, | |
| "learning_rate": 1.2339777062161326e-05, | |
| "loss": 0.2381, | |
| "step": 815 | |
| }, | |
| { | |
| "epoch": 3.3124050632911395, | |
| "grad_norm": 0.3265785857952916, | |
| "learning_rate": 1.2287377510237293e-05, | |
| "loss": 0.2604, | |
| "step": 816 | |
| }, | |
| { | |
| "epoch": 3.3164556962025316, | |
| "grad_norm": 0.3520298512170093, | |
| "learning_rate": 1.2235040074714488e-05, | |
| "loss": 0.2551, | |
| "step": 817 | |
| }, | |
| { | |
| "epoch": 3.320506329113924, | |
| "grad_norm": 0.33967816740067275, | |
| "learning_rate": 1.2182765177111434e-05, | |
| "loss": 0.2635, | |
| "step": 818 | |
| }, | |
| { | |
| "epoch": 3.3245569620253166, | |
| "grad_norm": 0.3469211593077021, | |
| "learning_rate": 1.213055323844297e-05, | |
| "loss": 0.2406, | |
| "step": 819 | |
| }, | |
| { | |
| "epoch": 3.3286075949367087, | |
| "grad_norm": 0.36409982342407027, | |
| "learning_rate": 1.2078404679216864e-05, | |
| "loss": 0.2965, | |
| "step": 820 | |
| }, | |
| { | |
| "epoch": 3.332658227848101, | |
| "grad_norm": 0.35445620973485914, | |
| "learning_rate": 1.2026319919430458e-05, | |
| "loss": 0.2779, | |
| "step": 821 | |
| }, | |
| { | |
| "epoch": 3.3367088607594937, | |
| "grad_norm": 0.3467142083036966, | |
| "learning_rate": 1.1974299378567227e-05, | |
| "loss": 0.2658, | |
| "step": 822 | |
| }, | |
| { | |
| "epoch": 3.340759493670886, | |
| "grad_norm": 0.3225049438747772, | |
| "learning_rate": 1.1922343475593462e-05, | |
| "loss": 0.2956, | |
| "step": 823 | |
| }, | |
| { | |
| "epoch": 3.3448101265822787, | |
| "grad_norm": 0.33111998498364537, | |
| "learning_rate": 1.187045262895488e-05, | |
| "loss": 0.2789, | |
| "step": 824 | |
| }, | |
| { | |
| "epoch": 3.348860759493671, | |
| "grad_norm": 0.3422646532384314, | |
| "learning_rate": 1.1818627256573203e-05, | |
| "loss": 0.2603, | |
| "step": 825 | |
| }, | |
| { | |
| "epoch": 3.3529113924050633, | |
| "grad_norm": 0.35182123732980874, | |
| "learning_rate": 1.1766867775842864e-05, | |
| "loss": 0.2648, | |
| "step": 826 | |
| }, | |
| { | |
| "epoch": 3.356962025316456, | |
| "grad_norm": 0.3404137775972387, | |
| "learning_rate": 1.1715174603627615e-05, | |
| "loss": 0.2441, | |
| "step": 827 | |
| }, | |
| { | |
| "epoch": 3.361012658227848, | |
| "grad_norm": 0.322826275731296, | |
| "learning_rate": 1.1663548156257147e-05, | |
| "loss": 0.2797, | |
| "step": 828 | |
| }, | |
| { | |
| "epoch": 3.3650632911392404, | |
| "grad_norm": 0.32936390271439575, | |
| "learning_rate": 1.161198884952377e-05, | |
| "loss": 0.2516, | |
| "step": 829 | |
| }, | |
| { | |
| "epoch": 3.369113924050633, | |
| "grad_norm": 0.3203961294386798, | |
| "learning_rate": 1.1560497098679056e-05, | |
| "loss": 0.2557, | |
| "step": 830 | |
| }, | |
| { | |
| "epoch": 3.3731645569620254, | |
| "grad_norm": 0.3418066517199913, | |
| "learning_rate": 1.1509073318430479e-05, | |
| "loss": 0.2642, | |
| "step": 831 | |
| }, | |
| { | |
| "epoch": 3.377215189873418, | |
| "grad_norm": 0.3429738693246749, | |
| "learning_rate": 1.1457717922938116e-05, | |
| "loss": 0.2606, | |
| "step": 832 | |
| }, | |
| { | |
| "epoch": 3.38126582278481, | |
| "grad_norm": 0.3353771093082826, | |
| "learning_rate": 1.1406431325811233e-05, | |
| "loss": 0.2814, | |
| "step": 833 | |
| }, | |
| { | |
| "epoch": 3.3853164556962025, | |
| "grad_norm": 0.33381460248100353, | |
| "learning_rate": 1.135521394010506e-05, | |
| "loss": 0.2544, | |
| "step": 834 | |
| }, | |
| { | |
| "epoch": 3.389367088607595, | |
| "grad_norm": 0.33260110677968546, | |
| "learning_rate": 1.1304066178317367e-05, | |
| "loss": 0.255, | |
| "step": 835 | |
| }, | |
| { | |
| "epoch": 3.393417721518987, | |
| "grad_norm": 0.3440022331910399, | |
| "learning_rate": 1.1252988452385199e-05, | |
| "loss": 0.2536, | |
| "step": 836 | |
| }, | |
| { | |
| "epoch": 3.3974683544303796, | |
| "grad_norm": 0.36983894279927965, | |
| "learning_rate": 1.1201981173681536e-05, | |
| "loss": 0.2749, | |
| "step": 837 | |
| }, | |
| { | |
| "epoch": 3.401518987341772, | |
| "grad_norm": 0.32157201310196054, | |
| "learning_rate": 1.1151044753011991e-05, | |
| "loss": 0.2822, | |
| "step": 838 | |
| }, | |
| { | |
| "epoch": 3.4055696202531647, | |
| "grad_norm": 0.3609607935290914, | |
| "learning_rate": 1.1100179600611491e-05, | |
| "loss": 0.2402, | |
| "step": 839 | |
| }, | |
| { | |
| "epoch": 3.409620253164557, | |
| "grad_norm": 0.33477461489221555, | |
| "learning_rate": 1.1049386126140985e-05, | |
| "loss": 0.2589, | |
| "step": 840 | |
| }, | |
| { | |
| "epoch": 3.4136708860759493, | |
| "grad_norm": 0.34080248593658424, | |
| "learning_rate": 1.0998664738684128e-05, | |
| "loss": 0.2486, | |
| "step": 841 | |
| }, | |
| { | |
| "epoch": 3.4177215189873418, | |
| "grad_norm": 0.3309446565199687, | |
| "learning_rate": 1.0948015846744e-05, | |
| "loss": 0.2688, | |
| "step": 842 | |
| }, | |
| { | |
| "epoch": 3.4217721518987343, | |
| "grad_norm": 0.3143740414721356, | |
| "learning_rate": 1.0897439858239832e-05, | |
| "loss": 0.2744, | |
| "step": 843 | |
| }, | |
| { | |
| "epoch": 3.4258227848101264, | |
| "grad_norm": 0.3314178034728436, | |
| "learning_rate": 1.0846937180503652e-05, | |
| "loss": 0.2546, | |
| "step": 844 | |
| }, | |
| { | |
| "epoch": 3.429873417721519, | |
| "grad_norm": 0.3287977614860553, | |
| "learning_rate": 1.0796508220277117e-05, | |
| "loss": 0.2458, | |
| "step": 845 | |
| }, | |
| { | |
| "epoch": 3.4339240506329114, | |
| "grad_norm": 0.3355423312588682, | |
| "learning_rate": 1.0746153383708107e-05, | |
| "loss": 0.2739, | |
| "step": 846 | |
| }, | |
| { | |
| "epoch": 3.437974683544304, | |
| "grad_norm": 0.33354755684911136, | |
| "learning_rate": 1.0695873076347579e-05, | |
| "loss": 0.2607, | |
| "step": 847 | |
| }, | |
| { | |
| "epoch": 3.4420253164556964, | |
| "grad_norm": 0.33889693383208186, | |
| "learning_rate": 1.0645667703146205e-05, | |
| "loss": 0.241, | |
| "step": 848 | |
| }, | |
| { | |
| "epoch": 3.4460759493670885, | |
| "grad_norm": 0.33233405465158594, | |
| "learning_rate": 1.0595537668451161e-05, | |
| "loss": 0.243, | |
| "step": 849 | |
| }, | |
| { | |
| "epoch": 3.450126582278481, | |
| "grad_norm": 0.3313170908696168, | |
| "learning_rate": 1.0545483376002854e-05, | |
| "loss": 0.2615, | |
| "step": 850 | |
| }, | |
| { | |
| "epoch": 3.4541772151898735, | |
| "grad_norm": 0.3505304846832336, | |
| "learning_rate": 1.0495505228931676e-05, | |
| "loss": 0.2358, | |
| "step": 851 | |
| }, | |
| { | |
| "epoch": 3.4582278481012656, | |
| "grad_norm": 0.31732070786780747, | |
| "learning_rate": 1.044560362975474e-05, | |
| "loss": 0.2675, | |
| "step": 852 | |
| }, | |
| { | |
| "epoch": 3.462278481012658, | |
| "grad_norm": 0.3310540127759703, | |
| "learning_rate": 1.0395778980372695e-05, | |
| "loss": 0.2547, | |
| "step": 853 | |
| }, | |
| { | |
| "epoch": 3.4663291139240506, | |
| "grad_norm": 0.3639672971964621, | |
| "learning_rate": 1.0346031682066381e-05, | |
| "loss": 0.2873, | |
| "step": 854 | |
| }, | |
| { | |
| "epoch": 3.470379746835443, | |
| "grad_norm": 0.3362387765306904, | |
| "learning_rate": 1.0296362135493724e-05, | |
| "loss": 0.2638, | |
| "step": 855 | |
| }, | |
| { | |
| "epoch": 3.4744303797468357, | |
| "grad_norm": 0.3207178162833066, | |
| "learning_rate": 1.0246770740686422e-05, | |
| "loss": 0.2733, | |
| "step": 856 | |
| }, | |
| { | |
| "epoch": 3.4784810126582277, | |
| "grad_norm": 0.3376652406664714, | |
| "learning_rate": 1.0197257897046743e-05, | |
| "loss": 0.2526, | |
| "step": 857 | |
| }, | |
| { | |
| "epoch": 3.4825316455696202, | |
| "grad_norm": 0.35513470677101816, | |
| "learning_rate": 1.014782400334433e-05, | |
| "loss": 0.2546, | |
| "step": 858 | |
| }, | |
| { | |
| "epoch": 3.4865822784810128, | |
| "grad_norm": 0.3425472724515098, | |
| "learning_rate": 1.009846945771296e-05, | |
| "loss": 0.264, | |
| "step": 859 | |
| }, | |
| { | |
| "epoch": 3.490632911392405, | |
| "grad_norm": 0.32566631763620035, | |
| "learning_rate": 1.0049194657647363e-05, | |
| "loss": 0.2339, | |
| "step": 860 | |
| }, | |
| { | |
| "epoch": 3.4946835443037974, | |
| "grad_norm": 0.3336027231848593, | |
| "learning_rate": 1.0000000000000006e-05, | |
| "loss": 0.2369, | |
| "step": 861 | |
| }, | |
| { | |
| "epoch": 3.49873417721519, | |
| "grad_norm": 0.3415437067338059, | |
| "learning_rate": 9.950885880977891e-06, | |
| "loss": 0.2472, | |
| "step": 862 | |
| }, | |
| { | |
| "epoch": 3.5027848101265824, | |
| "grad_norm": 0.3415662529897534, | |
| "learning_rate": 9.901852696139382e-06, | |
| "loss": 0.2847, | |
| "step": 863 | |
| }, | |
| { | |
| "epoch": 3.506835443037975, | |
| "grad_norm": 0.3400580440728001, | |
| "learning_rate": 9.852900840391027e-06, | |
| "loss": 0.2897, | |
| "step": 864 | |
| }, | |
| { | |
| "epoch": 3.510886075949367, | |
| "grad_norm": 0.3211091122594664, | |
| "learning_rate": 9.804030707984313e-06, | |
| "loss": 0.2555, | |
| "step": 865 | |
| }, | |
| { | |
| "epoch": 3.5149367088607595, | |
| "grad_norm": 0.32669894847812564, | |
| "learning_rate": 9.755242692512599e-06, | |
| "loss": 0.2479, | |
| "step": 866 | |
| }, | |
| { | |
| "epoch": 3.518987341772152, | |
| "grad_norm": 0.32826980976882586, | |
| "learning_rate": 9.70653718690782e-06, | |
| "loss": 0.2808, | |
| "step": 867 | |
| }, | |
| { | |
| "epoch": 3.523037974683544, | |
| "grad_norm": 0.32101754282768347, | |
| "learning_rate": 9.657914583437454e-06, | |
| "loss": 0.2276, | |
| "step": 868 | |
| }, | |
| { | |
| "epoch": 3.5270886075949366, | |
| "grad_norm": 0.3222837954142063, | |
| "learning_rate": 9.609375273701246e-06, | |
| "loss": 0.2601, | |
| "step": 869 | |
| }, | |
| { | |
| "epoch": 3.531139240506329, | |
| "grad_norm": 0.31807716383970397, | |
| "learning_rate": 9.560919648628133e-06, | |
| "loss": 0.2401, | |
| "step": 870 | |
| }, | |
| { | |
| "epoch": 3.5351898734177216, | |
| "grad_norm": 0.3463343759520837, | |
| "learning_rate": 9.512548098473047e-06, | |
| "loss": 0.2466, | |
| "step": 871 | |
| }, | |
| { | |
| "epoch": 3.539240506329114, | |
| "grad_norm": 0.3263269928201352, | |
| "learning_rate": 9.464261012813825e-06, | |
| "loss": 0.2763, | |
| "step": 872 | |
| }, | |
| { | |
| "epoch": 3.543291139240506, | |
| "grad_norm": 7.034552630776023, | |
| "learning_rate": 9.416058780547987e-06, | |
| "loss": 0.405, | |
| "step": 873 | |
| }, | |
| { | |
| "epoch": 3.5473417721518987, | |
| "grad_norm": 0.3295153865359594, | |
| "learning_rate": 9.367941789889714e-06, | |
| "loss": 0.2442, | |
| "step": 874 | |
| }, | |
| { | |
| "epoch": 3.5513924050632912, | |
| "grad_norm": 0.3418933618238107, | |
| "learning_rate": 9.319910428366607e-06, | |
| "loss": 0.265, | |
| "step": 875 | |
| }, | |
| { | |
| "epoch": 3.5554430379746833, | |
| "grad_norm": 0.3328151183992317, | |
| "learning_rate": 9.271965082816667e-06, | |
| "loss": 0.248, | |
| "step": 876 | |
| }, | |
| { | |
| "epoch": 3.559493670886076, | |
| "grad_norm": 0.3306094626190365, | |
| "learning_rate": 9.224106139385111e-06, | |
| "loss": 0.2418, | |
| "step": 877 | |
| }, | |
| { | |
| "epoch": 3.5635443037974683, | |
| "grad_norm": 0.32806962845348664, | |
| "learning_rate": 9.176333983521291e-06, | |
| "loss": 0.2603, | |
| "step": 878 | |
| }, | |
| { | |
| "epoch": 3.567594936708861, | |
| "grad_norm": 0.3301275003671268, | |
| "learning_rate": 9.12864899997558e-06, | |
| "loss": 0.2409, | |
| "step": 879 | |
| }, | |
| { | |
| "epoch": 3.5716455696202534, | |
| "grad_norm": 0.329848726269475, | |
| "learning_rate": 9.08105157279628e-06, | |
| "loss": 0.256, | |
| "step": 880 | |
| }, | |
| { | |
| "epoch": 3.5756962025316454, | |
| "grad_norm": 0.3395409258655622, | |
| "learning_rate": 9.03354208532653e-06, | |
| "loss": 0.2775, | |
| "step": 881 | |
| }, | |
| { | |
| "epoch": 3.579746835443038, | |
| "grad_norm": 0.33217973977294996, | |
| "learning_rate": 8.986120920201205e-06, | |
| "loss": 0.2466, | |
| "step": 882 | |
| }, | |
| { | |
| "epoch": 3.5837974683544305, | |
| "grad_norm": 0.32309465516487984, | |
| "learning_rate": 8.938788459343852e-06, | |
| "loss": 0.2562, | |
| "step": 883 | |
| }, | |
| { | |
| "epoch": 3.5878481012658225, | |
| "grad_norm": 0.3036419039102078, | |
| "learning_rate": 8.8915450839636e-06, | |
| "loss": 0.2469, | |
| "step": 884 | |
| }, | |
| { | |
| "epoch": 3.591898734177215, | |
| "grad_norm": 0.31160634447617674, | |
| "learning_rate": 8.844391174552116e-06, | |
| "loss": 0.2474, | |
| "step": 885 | |
| }, | |
| { | |
| "epoch": 3.5959493670886076, | |
| "grad_norm": 0.33212298440016336, | |
| "learning_rate": 8.797327110880479e-06, | |
| "loss": 0.2795, | |
| "step": 886 | |
| }, | |
| { | |
| "epoch": 3.6, | |
| "grad_norm": 0.3221819310821201, | |
| "learning_rate": 8.750353271996206e-06, | |
| "loss": 0.2505, | |
| "step": 887 | |
| }, | |
| { | |
| "epoch": 3.6040506329113926, | |
| "grad_norm": 0.34609774776592417, | |
| "learning_rate": 8.703470036220132e-06, | |
| "loss": 0.2384, | |
| "step": 888 | |
| }, | |
| { | |
| "epoch": 3.6081012658227847, | |
| "grad_norm": 0.3266750966100086, | |
| "learning_rate": 8.656677781143394e-06, | |
| "loss": 0.2479, | |
| "step": 889 | |
| }, | |
| { | |
| "epoch": 3.612151898734177, | |
| "grad_norm": 0.32239102147594784, | |
| "learning_rate": 8.609976883624377e-06, | |
| "loss": 0.2444, | |
| "step": 890 | |
| }, | |
| { | |
| "epoch": 3.6162025316455697, | |
| "grad_norm": 0.31854634569233964, | |
| "learning_rate": 8.563367719785698e-06, | |
| "loss": 0.2577, | |
| "step": 891 | |
| }, | |
| { | |
| "epoch": 3.620253164556962, | |
| "grad_norm": 0.3242615893314945, | |
| "learning_rate": 8.516850665011138e-06, | |
| "loss": 0.2468, | |
| "step": 892 | |
| }, | |
| { | |
| "epoch": 3.6243037974683543, | |
| "grad_norm": 0.3322918424139079, | |
| "learning_rate": 8.47042609394269e-06, | |
| "loss": 0.2737, | |
| "step": 893 | |
| }, | |
| { | |
| "epoch": 3.628354430379747, | |
| "grad_norm": 0.3166888744758024, | |
| "learning_rate": 8.424094380477432e-06, | |
| "loss": 0.2607, | |
| "step": 894 | |
| }, | |
| { | |
| "epoch": 3.6324050632911393, | |
| "grad_norm": 0.31179191146796253, | |
| "learning_rate": 8.37785589776465e-06, | |
| "loss": 0.2693, | |
| "step": 895 | |
| }, | |
| { | |
| "epoch": 3.636455696202532, | |
| "grad_norm": 0.3291715961451965, | |
| "learning_rate": 8.331711018202694e-06, | |
| "loss": 0.2604, | |
| "step": 896 | |
| }, | |
| { | |
| "epoch": 3.640506329113924, | |
| "grad_norm": 0.3346112267939724, | |
| "learning_rate": 8.285660113436104e-06, | |
| "loss": 0.2544, | |
| "step": 897 | |
| }, | |
| { | |
| "epoch": 3.6445569620253164, | |
| "grad_norm": 0.33824852742384787, | |
| "learning_rate": 8.239703554352527e-06, | |
| "loss": 0.2615, | |
| "step": 898 | |
| }, | |
| { | |
| "epoch": 3.648607594936709, | |
| "grad_norm": 0.3290725811707173, | |
| "learning_rate": 8.193841711079775e-06, | |
| "loss": 0.2687, | |
| "step": 899 | |
| }, | |
| { | |
| "epoch": 3.652658227848101, | |
| "grad_norm": 0.33961955653639647, | |
| "learning_rate": 8.148074952982828e-06, | |
| "loss": 0.2717, | |
| "step": 900 | |
| }, | |
| { | |
| "epoch": 3.6567088607594935, | |
| "grad_norm": 0.3115553425491023, | |
| "learning_rate": 8.102403648660859e-06, | |
| "loss": 0.2724, | |
| "step": 901 | |
| }, | |
| { | |
| "epoch": 3.660759493670886, | |
| "grad_norm": 0.3036572787445494, | |
| "learning_rate": 8.056828165944282e-06, | |
| "loss": 0.2709, | |
| "step": 902 | |
| }, | |
| { | |
| "epoch": 3.6648101265822786, | |
| "grad_norm": 0.328819526216983, | |
| "learning_rate": 8.011348871891762e-06, | |
| "loss": 0.2665, | |
| "step": 903 | |
| }, | |
| { | |
| "epoch": 3.668860759493671, | |
| "grad_norm": 0.32333906708281607, | |
| "learning_rate": 7.965966132787287e-06, | |
| "loss": 0.2449, | |
| "step": 904 | |
| }, | |
| { | |
| "epoch": 3.672911392405063, | |
| "grad_norm": 0.3352837269176696, | |
| "learning_rate": 7.920680314137189e-06, | |
| "loss": 0.2705, | |
| "step": 905 | |
| }, | |
| { | |
| "epoch": 3.6769620253164557, | |
| "grad_norm": 0.31112564557052813, | |
| "learning_rate": 7.875491780667246e-06, | |
| "loss": 0.2593, | |
| "step": 906 | |
| }, | |
| { | |
| "epoch": 3.681012658227848, | |
| "grad_norm": 0.3164141705824694, | |
| "learning_rate": 7.830400896319667e-06, | |
| "loss": 0.2744, | |
| "step": 907 | |
| }, | |
| { | |
| "epoch": 3.6850632911392402, | |
| "grad_norm": 0.3191144278439063, | |
| "learning_rate": 7.785408024250259e-06, | |
| "loss": 0.2722, | |
| "step": 908 | |
| }, | |
| { | |
| "epoch": 3.6891139240506328, | |
| "grad_norm": 0.32986802592082554, | |
| "learning_rate": 7.74051352682542e-06, | |
| "loss": 0.2406, | |
| "step": 909 | |
| }, | |
| { | |
| "epoch": 3.6931645569620253, | |
| "grad_norm": 0.3184330066566083, | |
| "learning_rate": 7.695717765619257e-06, | |
| "loss": 0.2715, | |
| "step": 910 | |
| }, | |
| { | |
| "epoch": 3.697215189873418, | |
| "grad_norm": 0.30090760551445567, | |
| "learning_rate": 7.651021101410673e-06, | |
| "loss": 0.28, | |
| "step": 911 | |
| }, | |
| { | |
| "epoch": 3.7012658227848103, | |
| "grad_norm": 0.30533829699862847, | |
| "learning_rate": 7.606423894180464e-06, | |
| "loss": 0.2798, | |
| "step": 912 | |
| }, | |
| { | |
| "epoch": 3.7053164556962024, | |
| "grad_norm": 0.31699711436663564, | |
| "learning_rate": 7.56192650310839e-06, | |
| "loss": 0.2754, | |
| "step": 913 | |
| }, | |
| { | |
| "epoch": 3.709367088607595, | |
| "grad_norm": 0.32482823891923296, | |
| "learning_rate": 7.517529286570349e-06, | |
| "loss": 0.254, | |
| "step": 914 | |
| }, | |
| { | |
| "epoch": 3.7134177215189874, | |
| "grad_norm": 0.31427595748544096, | |
| "learning_rate": 7.473232602135387e-06, | |
| "loss": 0.253, | |
| "step": 915 | |
| }, | |
| { | |
| "epoch": 3.7174683544303795, | |
| "grad_norm": 0.32045818822986416, | |
| "learning_rate": 7.429036806562935e-06, | |
| "loss": 0.2606, | |
| "step": 916 | |
| }, | |
| { | |
| "epoch": 3.721518987341772, | |
| "grad_norm": 0.3303491268711638, | |
| "learning_rate": 7.3849422557998455e-06, | |
| "loss": 0.2563, | |
| "step": 917 | |
| }, | |
| { | |
| "epoch": 3.7255696202531645, | |
| "grad_norm": 0.33649458908875574, | |
| "learning_rate": 7.340949304977567e-06, | |
| "loss": 0.2714, | |
| "step": 918 | |
| }, | |
| { | |
| "epoch": 3.729620253164557, | |
| "grad_norm": 0.3142733069089562, | |
| "learning_rate": 7.297058308409282e-06, | |
| "loss": 0.2639, | |
| "step": 919 | |
| }, | |
| { | |
| "epoch": 3.7336708860759495, | |
| "grad_norm": 0.33326018461813206, | |
| "learning_rate": 7.25326961958704e-06, | |
| "loss": 0.2662, | |
| "step": 920 | |
| }, | |
| { | |
| "epoch": 3.7377215189873416, | |
| "grad_norm": 0.30958204640505665, | |
| "learning_rate": 7.209583591178921e-06, | |
| "loss": 0.2721, | |
| "step": 921 | |
| }, | |
| { | |
| "epoch": 3.741772151898734, | |
| "grad_norm": 0.3224744714892965, | |
| "learning_rate": 7.1660005750261925e-06, | |
| "loss": 0.2346, | |
| "step": 922 | |
| }, | |
| { | |
| "epoch": 3.7458227848101266, | |
| "grad_norm": 0.32676212409422484, | |
| "learning_rate": 7.1225209221404765e-06, | |
| "loss": 0.2533, | |
| "step": 923 | |
| }, | |
| { | |
| "epoch": 3.7498734177215187, | |
| "grad_norm": 0.32665020044605836, | |
| "learning_rate": 7.079144982700909e-06, | |
| "loss": 0.2515, | |
| "step": 924 | |
| }, | |
| { | |
| "epoch": 3.7539240506329112, | |
| "grad_norm": 0.3128878165016944, | |
| "learning_rate": 7.0358731060513695e-06, | |
| "loss": 0.2828, | |
| "step": 925 | |
| }, | |
| { | |
| "epoch": 3.7579746835443038, | |
| "grad_norm": 0.312742438714276, | |
| "learning_rate": 6.99270564069757e-06, | |
| "loss": 0.2415, | |
| "step": 926 | |
| }, | |
| { | |
| "epoch": 3.7620253164556963, | |
| "grad_norm": 0.33888126120169765, | |
| "learning_rate": 6.949642934304375e-06, | |
| "loss": 0.2622, | |
| "step": 927 | |
| }, | |
| { | |
| "epoch": 3.766075949367089, | |
| "grad_norm": 0.3341788676777151, | |
| "learning_rate": 6.906685333692871e-06, | |
| "loss": 0.2522, | |
| "step": 928 | |
| }, | |
| { | |
| "epoch": 3.770126582278481, | |
| "grad_norm": 0.32306480628745643, | |
| "learning_rate": 6.86383318483769e-06, | |
| "loss": 0.2538, | |
| "step": 929 | |
| }, | |
| { | |
| "epoch": 3.7741772151898734, | |
| "grad_norm": 0.35206994963337995, | |
| "learning_rate": 6.821086832864139e-06, | |
| "loss": 0.2522, | |
| "step": 930 | |
| }, | |
| { | |
| "epoch": 3.778227848101266, | |
| "grad_norm": 0.3325883610549248, | |
| "learning_rate": 6.77844662204546e-06, | |
| "loss": 0.2678, | |
| "step": 931 | |
| }, | |
| { | |
| "epoch": 3.782278481012658, | |
| "grad_norm": 0.3247730800981493, | |
| "learning_rate": 6.7359128958000455e-06, | |
| "loss": 0.2559, | |
| "step": 932 | |
| }, | |
| { | |
| "epoch": 3.7863291139240505, | |
| "grad_norm": 0.31778710489835665, | |
| "learning_rate": 6.693485996688695e-06, | |
| "loss": 0.2642, | |
| "step": 933 | |
| }, | |
| { | |
| "epoch": 3.790379746835443, | |
| "grad_norm": 0.31700293089972714, | |
| "learning_rate": 6.651166266411801e-06, | |
| "loss": 0.2557, | |
| "step": 934 | |
| }, | |
| { | |
| "epoch": 3.7944303797468355, | |
| "grad_norm": 0.335717004806811, | |
| "learning_rate": 6.6089540458066725e-06, | |
| "loss": 0.2755, | |
| "step": 935 | |
| }, | |
| { | |
| "epoch": 3.798481012658228, | |
| "grad_norm": 0.3296377953470558, | |
| "learning_rate": 6.566849674844711e-06, | |
| "loss": 0.2405, | |
| "step": 936 | |
| }, | |
| { | |
| "epoch": 3.80253164556962, | |
| "grad_norm": 0.3165897833899535, | |
| "learning_rate": 6.524853492628747e-06, | |
| "loss": 0.2364, | |
| "step": 937 | |
| }, | |
| { | |
| "epoch": 3.8065822784810126, | |
| "grad_norm": 0.324184524462777, | |
| "learning_rate": 6.4829658373902536e-06, | |
| "loss": 0.2644, | |
| "step": 938 | |
| }, | |
| { | |
| "epoch": 3.810632911392405, | |
| "grad_norm": 0.3215291068279111, | |
| "learning_rate": 6.441187046486648e-06, | |
| "loss": 0.244, | |
| "step": 939 | |
| }, | |
| { | |
| "epoch": 3.814683544303797, | |
| "grad_norm": 0.3196898873599333, | |
| "learning_rate": 6.399517456398567e-06, | |
| "loss": 0.2422, | |
| "step": 940 | |
| }, | |
| { | |
| "epoch": 3.81873417721519, | |
| "grad_norm": 0.3248229612444475, | |
| "learning_rate": 6.357957402727164e-06, | |
| "loss": 0.2479, | |
| "step": 941 | |
| }, | |
| { | |
| "epoch": 3.8227848101265822, | |
| "grad_norm": 0.3179200740314193, | |
| "learning_rate": 6.316507220191395e-06, | |
| "loss": 0.2592, | |
| "step": 942 | |
| }, | |
| { | |
| "epoch": 3.8268354430379747, | |
| "grad_norm": 0.31487350547600124, | |
| "learning_rate": 6.275167242625331e-06, | |
| "loss": 0.2515, | |
| "step": 943 | |
| }, | |
| { | |
| "epoch": 3.8308860759493673, | |
| "grad_norm": 0.31793330076884496, | |
| "learning_rate": 6.233937802975471e-06, | |
| "loss": 0.2594, | |
| "step": 944 | |
| }, | |
| { | |
| "epoch": 3.8349367088607593, | |
| "grad_norm": 0.3139751020423621, | |
| "learning_rate": 6.192819233298046e-06, | |
| "loss": 0.2705, | |
| "step": 945 | |
| }, | |
| { | |
| "epoch": 3.838987341772152, | |
| "grad_norm": 0.31683723918513695, | |
| "learning_rate": 6.151811864756383e-06, | |
| "loss": 0.2649, | |
| "step": 946 | |
| }, | |
| { | |
| "epoch": 3.8430379746835444, | |
| "grad_norm": 0.309998678656485, | |
| "learning_rate": 6.1109160276181655e-06, | |
| "loss": 0.2342, | |
| "step": 947 | |
| }, | |
| { | |
| "epoch": 3.847088607594937, | |
| "grad_norm": 0.3241766729835554, | |
| "learning_rate": 6.070132051252868e-06, | |
| "loss": 0.265, | |
| "step": 948 | |
| }, | |
| { | |
| "epoch": 3.8511392405063294, | |
| "grad_norm": 0.3185372798053361, | |
| "learning_rate": 6.0294602641290034e-06, | |
| "loss": 0.2619, | |
| "step": 949 | |
| }, | |
| { | |
| "epoch": 3.8551898734177215, | |
| "grad_norm": 0.30812637821772376, | |
| "learning_rate": 5.988900993811575e-06, | |
| "loss": 0.2808, | |
| "step": 950 | |
| }, | |
| { | |
| "epoch": 3.859240506329114, | |
| "grad_norm": 0.3158308701665354, | |
| "learning_rate": 5.948454566959363e-06, | |
| "loss": 0.2531, | |
| "step": 951 | |
| }, | |
| { | |
| "epoch": 3.8632911392405065, | |
| "grad_norm": 0.318019991207333, | |
| "learning_rate": 5.908121309322328e-06, | |
| "loss": 0.2519, | |
| "step": 952 | |
| }, | |
| { | |
| "epoch": 3.8673417721518986, | |
| "grad_norm": 0.31150605253875063, | |
| "learning_rate": 5.867901545738976e-06, | |
| "loss": 0.2517, | |
| "step": 953 | |
| }, | |
| { | |
| "epoch": 3.871392405063291, | |
| "grad_norm": 0.3261892215969753, | |
| "learning_rate": 5.827795600133774e-06, | |
| "loss": 0.2687, | |
| "step": 954 | |
| }, | |
| { | |
| "epoch": 3.8754430379746836, | |
| "grad_norm": 0.31779357618329623, | |
| "learning_rate": 5.787803795514466e-06, | |
| "loss": 0.261, | |
| "step": 955 | |
| }, | |
| { | |
| "epoch": 3.879493670886076, | |
| "grad_norm": 0.3231061100526691, | |
| "learning_rate": 5.747926453969576e-06, | |
| "loss": 0.2542, | |
| "step": 956 | |
| }, | |
| { | |
| "epoch": 3.8835443037974686, | |
| "grad_norm": 0.3229436760909751, | |
| "learning_rate": 5.708163896665708e-06, | |
| "loss": 0.2648, | |
| "step": 957 | |
| }, | |
| { | |
| "epoch": 3.8875949367088607, | |
| "grad_norm": 0.31552205593922894, | |
| "learning_rate": 5.668516443845047e-06, | |
| "loss": 0.2771, | |
| "step": 958 | |
| }, | |
| { | |
| "epoch": 3.891645569620253, | |
| "grad_norm": 0.31422354392116614, | |
| "learning_rate": 5.6289844148227225e-06, | |
| "loss": 0.2554, | |
| "step": 959 | |
| }, | |
| { | |
| "epoch": 3.8956962025316457, | |
| "grad_norm": 0.31696782828662334, | |
| "learning_rate": 5.5895681279842615e-06, | |
| "loss": 0.2726, | |
| "step": 960 | |
| }, | |
| { | |
| "epoch": 3.899746835443038, | |
| "grad_norm": 0.31732079090620985, | |
| "learning_rate": 5.550267900783019e-06, | |
| "loss": 0.255, | |
| "step": 961 | |
| }, | |
| { | |
| "epoch": 3.9037974683544303, | |
| "grad_norm": 0.3136492167991513, | |
| "learning_rate": 5.511084049737623e-06, | |
| "loss": 0.2592, | |
| "step": 962 | |
| }, | |
| { | |
| "epoch": 3.907848101265823, | |
| "grad_norm": 0.30778638520712104, | |
| "learning_rate": 5.4720168904294215e-06, | |
| "loss": 0.2348, | |
| "step": 963 | |
| }, | |
| { | |
| "epoch": 3.9118987341772153, | |
| "grad_norm": 0.3379906949877354, | |
| "learning_rate": 5.433066737499948e-06, | |
| "loss": 0.2481, | |
| "step": 964 | |
| }, | |
| { | |
| "epoch": 3.915949367088608, | |
| "grad_norm": 0.32021485322534854, | |
| "learning_rate": 5.394233904648376e-06, | |
| "loss": 0.2492, | |
| "step": 965 | |
| }, | |
| { | |
| "epoch": 3.92, | |
| "grad_norm": 0.30948397177020814, | |
| "learning_rate": 5.355518704628997e-06, | |
| "loss": 0.256, | |
| "step": 966 | |
| }, | |
| { | |
| "epoch": 3.9240506329113924, | |
| "grad_norm": 0.3247904016291046, | |
| "learning_rate": 5.316921449248731e-06, | |
| "loss": 0.2667, | |
| "step": 967 | |
| }, | |
| { | |
| "epoch": 3.928101265822785, | |
| "grad_norm": 0.3210654392478939, | |
| "learning_rate": 5.278442449364538e-06, | |
| "loss": 0.2571, | |
| "step": 968 | |
| }, | |
| { | |
| "epoch": 3.932151898734177, | |
| "grad_norm": 0.31931131942559504, | |
| "learning_rate": 5.240082014881016e-06, | |
| "loss": 0.2349, | |
| "step": 969 | |
| }, | |
| { | |
| "epoch": 3.9362025316455695, | |
| "grad_norm": 0.3155224256183819, | |
| "learning_rate": 5.201840454747822e-06, | |
| "loss": 0.2798, | |
| "step": 970 | |
| }, | |
| { | |
| "epoch": 3.940253164556962, | |
| "grad_norm": 0.31924396383671494, | |
| "learning_rate": 5.163718076957223e-06, | |
| "loss": 0.2438, | |
| "step": 971 | |
| }, | |
| { | |
| "epoch": 3.9443037974683546, | |
| "grad_norm": 0.3295995565140276, | |
| "learning_rate": 5.125715188541609e-06, | |
| "loss": 0.2418, | |
| "step": 972 | |
| }, | |
| { | |
| "epoch": 3.948354430379747, | |
| "grad_norm": 0.3212526545070598, | |
| "learning_rate": 5.087832095571021e-06, | |
| "loss": 0.2817, | |
| "step": 973 | |
| }, | |
| { | |
| "epoch": 3.952405063291139, | |
| "grad_norm": 0.31368977543121995, | |
| "learning_rate": 5.0500691031506766e-06, | |
| "loss": 0.2433, | |
| "step": 974 | |
| }, | |
| { | |
| "epoch": 3.9564556962025317, | |
| "grad_norm": 0.32020514956326707, | |
| "learning_rate": 5.01242651541854e-06, | |
| "loss": 0.2624, | |
| "step": 975 | |
| }, | |
| { | |
| "epoch": 3.960506329113924, | |
| "grad_norm": 0.32527273560284603, | |
| "learning_rate": 4.974904635542815e-06, | |
| "loss": 0.266, | |
| "step": 976 | |
| }, | |
| { | |
| "epoch": 3.9645569620253163, | |
| "grad_norm": 0.31623854751394964, | |
| "learning_rate": 4.937503765719582e-06, | |
| "loss": 0.2485, | |
| "step": 977 | |
| }, | |
| { | |
| "epoch": 3.968607594936709, | |
| "grad_norm": 0.32830916613393374, | |
| "learning_rate": 4.900224207170299e-06, | |
| "loss": 0.2518, | |
| "step": 978 | |
| }, | |
| { | |
| "epoch": 3.9726582278481013, | |
| "grad_norm": 0.33429318038902406, | |
| "learning_rate": 4.8630662601394065e-06, | |
| "loss": 0.2708, | |
| "step": 979 | |
| }, | |
| { | |
| "epoch": 3.976708860759494, | |
| "grad_norm": 0.3352124050928428, | |
| "learning_rate": 4.8260302238918995e-06, | |
| "loss": 0.2436, | |
| "step": 980 | |
| }, | |
| { | |
| "epoch": 3.9807594936708863, | |
| "grad_norm": 0.30952264080535413, | |
| "learning_rate": 4.789116396710924e-06, | |
| "loss": 0.2598, | |
| "step": 981 | |
| }, | |
| { | |
| "epoch": 3.9848101265822784, | |
| "grad_norm": 0.31679038779075197, | |
| "learning_rate": 4.752325075895368e-06, | |
| "loss": 0.2472, | |
| "step": 982 | |
| }, | |
| { | |
| "epoch": 3.988860759493671, | |
| "grad_norm": 0.3258093025382478, | |
| "learning_rate": 4.715656557757473e-06, | |
| "loss": 0.2535, | |
| "step": 983 | |
| }, | |
| { | |
| "epoch": 3.9929113924050634, | |
| "grad_norm": 0.31246740241386456, | |
| "learning_rate": 4.679111137620442e-06, | |
| "loss": 0.2712, | |
| "step": 984 | |
| }, | |
| { | |
| "epoch": 3.9969620253164555, | |
| "grad_norm": 0.31391694986226065, | |
| "learning_rate": 4.6426891098160585e-06, | |
| "loss": 0.2712, | |
| "step": 985 | |
| }, | |
| { | |
| "epoch": 4.0010126582278485, | |
| "grad_norm": 0.3047249200476688, | |
| "learning_rate": 4.6063907676823474e-06, | |
| "loss": 0.2612, | |
| "step": 986 | |
| }, | |
| { | |
| "epoch": 4.0050632911392405, | |
| "grad_norm": 0.44241131725676663, | |
| "learning_rate": 4.570216403561141e-06, | |
| "loss": 0.197, | |
| "step": 987 | |
| }, | |
| { | |
| "epoch": 4.009113924050633, | |
| "grad_norm": 0.3265966350632202, | |
| "learning_rate": 4.534166308795815e-06, | |
| "loss": 0.2262, | |
| "step": 988 | |
| }, | |
| { | |
| "epoch": 4.013164556962026, | |
| "grad_norm": 0.2738409540777884, | |
| "learning_rate": 4.498240773728859e-06, | |
| "loss": 0.1877, | |
| "step": 989 | |
| }, | |
| { | |
| "epoch": 4.017215189873418, | |
| "grad_norm": 0.3192965982513776, | |
| "learning_rate": 4.462440087699609e-06, | |
| "loss": 0.2029, | |
| "step": 990 | |
| }, | |
| { | |
| "epoch": 4.02126582278481, | |
| "grad_norm": 0.3467720860959134, | |
| "learning_rate": 4.426764539041861e-06, | |
| "loss": 0.195, | |
| "step": 991 | |
| }, | |
| { | |
| "epoch": 4.025316455696203, | |
| "grad_norm": 0.38845727178963896, | |
| "learning_rate": 4.391214415081582e-06, | |
| "loss": 0.1864, | |
| "step": 992 | |
| }, | |
| { | |
| "epoch": 4.029367088607595, | |
| "grad_norm": 0.40546089067312313, | |
| "learning_rate": 4.355790002134579e-06, | |
| "loss": 0.2231, | |
| "step": 993 | |
| }, | |
| { | |
| "epoch": 4.033417721518988, | |
| "grad_norm": 0.3327439149250507, | |
| "learning_rate": 4.320491585504207e-06, | |
| "loss": 0.2039, | |
| "step": 994 | |
| }, | |
| { | |
| "epoch": 4.03746835443038, | |
| "grad_norm": 0.3238421982711312, | |
| "learning_rate": 4.2853194494790615e-06, | |
| "loss": 0.1985, | |
| "step": 995 | |
| }, | |
| { | |
| "epoch": 4.041518987341772, | |
| "grad_norm": 0.3401470372748014, | |
| "learning_rate": 4.250273877330691e-06, | |
| "loss": 0.2108, | |
| "step": 996 | |
| }, | |
| { | |
| "epoch": 4.045569620253165, | |
| "grad_norm": 0.3235429429410752, | |
| "learning_rate": 4.215355151311313e-06, | |
| "loss": 0.2019, | |
| "step": 997 | |
| }, | |
| { | |
| "epoch": 4.049620253164557, | |
| "grad_norm": 0.32656106010699437, | |
| "learning_rate": 4.180563552651542e-06, | |
| "loss": 0.1935, | |
| "step": 998 | |
| }, | |
| { | |
| "epoch": 4.053670886075949, | |
| "grad_norm": 0.29409029662095665, | |
| "learning_rate": 4.145899361558147e-06, | |
| "loss": 0.207, | |
| "step": 999 | |
| }, | |
| { | |
| "epoch": 4.057721518987342, | |
| "grad_norm": 0.26806007558622735, | |
| "learning_rate": 4.111362857211738e-06, | |
| "loss": 0.1902, | |
| "step": 1000 | |
| }, | |
| { | |
| "epoch": 4.061772151898734, | |
| "grad_norm": 0.30354614308193606, | |
| "learning_rate": 4.076954317764592e-06, | |
| "loss": 0.196, | |
| "step": 1001 | |
| }, | |
| { | |
| "epoch": 4.065822784810127, | |
| "grad_norm": 0.32670571327561365, | |
| "learning_rate": 4.042674020338335e-06, | |
| "loss": 0.1966, | |
| "step": 1002 | |
| }, | |
| { | |
| "epoch": 4.069873417721519, | |
| "grad_norm": 0.32539979056424606, | |
| "learning_rate": 4.0085222410217835e-06, | |
| "loss": 0.1959, | |
| "step": 1003 | |
| }, | |
| { | |
| "epoch": 4.073924050632911, | |
| "grad_norm": 0.2958411649222161, | |
| "learning_rate": 3.974499254868674e-06, | |
| "loss": 0.1975, | |
| "step": 1004 | |
| }, | |
| { | |
| "epoch": 4.077974683544304, | |
| "grad_norm": 0.273500123498002, | |
| "learning_rate": 3.940605335895451e-06, | |
| "loss": 0.1865, | |
| "step": 1005 | |
| }, | |
| { | |
| "epoch": 4.082025316455696, | |
| "grad_norm": 0.2736251988723321, | |
| "learning_rate": 3.90684075707908e-06, | |
| "loss": 0.197, | |
| "step": 1006 | |
| }, | |
| { | |
| "epoch": 4.086075949367088, | |
| "grad_norm": 0.2598125408563322, | |
| "learning_rate": 3.8732057903548505e-06, | |
| "loss": 0.1662, | |
| "step": 1007 | |
| }, | |
| { | |
| "epoch": 4.090126582278481, | |
| "grad_norm": 0.29269859812924154, | |
| "learning_rate": 3.8397007066141375e-06, | |
| "loss": 0.1888, | |
| "step": 1008 | |
| }, | |
| { | |
| "epoch": 4.094177215189873, | |
| "grad_norm": 0.2858542297591008, | |
| "learning_rate": 3.806325775702304e-06, | |
| "loss": 0.2056, | |
| "step": 1009 | |
| }, | |
| { | |
| "epoch": 4.098227848101266, | |
| "grad_norm": 0.27614885822567387, | |
| "learning_rate": 3.773081266416434e-06, | |
| "loss": 0.1871, | |
| "step": 1010 | |
| }, | |
| { | |
| "epoch": 4.102278481012658, | |
| "grad_norm": 0.26451519490895925, | |
| "learning_rate": 3.739967446503245e-06, | |
| "loss": 0.17, | |
| "step": 1011 | |
| }, | |
| { | |
| "epoch": 4.10632911392405, | |
| "grad_norm": 0.2752467808155445, | |
| "learning_rate": 3.706984582656894e-06, | |
| "loss": 0.203, | |
| "step": 1012 | |
| }, | |
| { | |
| "epoch": 4.110379746835443, | |
| "grad_norm": 0.24611645460669995, | |
| "learning_rate": 3.6741329405168237e-06, | |
| "loss": 0.2059, | |
| "step": 1013 | |
| }, | |
| { | |
| "epoch": 4.114430379746835, | |
| "grad_norm": 0.2497337452771658, | |
| "learning_rate": 3.641412784665648e-06, | |
| "loss": 0.1833, | |
| "step": 1014 | |
| }, | |
| { | |
| "epoch": 4.118481012658227, | |
| "grad_norm": 0.26198152584102985, | |
| "learning_rate": 3.608824378627005e-06, | |
| "loss": 0.1944, | |
| "step": 1015 | |
| }, | |
| { | |
| "epoch": 4.12253164556962, | |
| "grad_norm": 0.2757730992347448, | |
| "learning_rate": 3.5763679848634337e-06, | |
| "loss": 0.2076, | |
| "step": 1016 | |
| }, | |
| { | |
| "epoch": 4.1265822784810124, | |
| "grad_norm": 0.269706650788486, | |
| "learning_rate": 3.544043864774269e-06, | |
| "loss": 0.2121, | |
| "step": 1017 | |
| }, | |
| { | |
| "epoch": 4.130632911392405, | |
| "grad_norm": 0.25455827836776374, | |
| "learning_rate": 3.5118522786935282e-06, | |
| "loss": 0.18, | |
| "step": 1018 | |
| }, | |
| { | |
| "epoch": 4.1346835443037975, | |
| "grad_norm": 0.2596168327415316, | |
| "learning_rate": 3.479793485887819e-06, | |
| "loss": 0.1888, | |
| "step": 1019 | |
| }, | |
| { | |
| "epoch": 4.1387341772151895, | |
| "grad_norm": 0.25123397163434336, | |
| "learning_rate": 3.4478677445542653e-06, | |
| "loss": 0.2149, | |
| "step": 1020 | |
| }, | |
| { | |
| "epoch": 4.1427848101265825, | |
| "grad_norm": 0.25535819854084696, | |
| "learning_rate": 3.4160753118183767e-06, | |
| "loss": 0.2118, | |
| "step": 1021 | |
| }, | |
| { | |
| "epoch": 4.146835443037975, | |
| "grad_norm": 0.2662978614786831, | |
| "learning_rate": 3.3844164437320527e-06, | |
| "loss": 0.1956, | |
| "step": 1022 | |
| }, | |
| { | |
| "epoch": 4.1508860759493675, | |
| "grad_norm": 0.7788986744259833, | |
| "learning_rate": 3.3528913952714558e-06, | |
| "loss": 0.2174, | |
| "step": 1023 | |
| }, | |
| { | |
| "epoch": 4.15493670886076, | |
| "grad_norm": 0.2621996509324186, | |
| "learning_rate": 3.321500420335e-06, | |
| "loss": 0.2098, | |
| "step": 1024 | |
| }, | |
| { | |
| "epoch": 4.158987341772152, | |
| "grad_norm": 0.2654694698925293, | |
| "learning_rate": 3.290243771741275e-06, | |
| "loss": 0.2208, | |
| "step": 1025 | |
| }, | |
| { | |
| "epoch": 4.163037974683545, | |
| "grad_norm": 0.2809389323094826, | |
| "learning_rate": 3.2591217012270325e-06, | |
| "loss": 0.2015, | |
| "step": 1026 | |
| }, | |
| { | |
| "epoch": 4.167088607594937, | |
| "grad_norm": 0.27030541776004285, | |
| "learning_rate": 3.228134459445149e-06, | |
| "loss": 0.2074, | |
| "step": 1027 | |
| }, | |
| { | |
| "epoch": 4.171139240506329, | |
| "grad_norm": 0.2705151656557634, | |
| "learning_rate": 3.1972822959626205e-06, | |
| "loss": 0.2044, | |
| "step": 1028 | |
| }, | |
| { | |
| "epoch": 4.175189873417722, | |
| "grad_norm": 0.3522771785692766, | |
| "learning_rate": 3.166565459258513e-06, | |
| "loss": 0.2073, | |
| "step": 1029 | |
| }, | |
| { | |
| "epoch": 4.179240506329114, | |
| "grad_norm": 0.38156743222081335, | |
| "learning_rate": 3.1359841967220193e-06, | |
| "loss": 0.1956, | |
| "step": 1030 | |
| }, | |
| { | |
| "epoch": 4.183291139240506, | |
| "grad_norm": 0.2631111978718513, | |
| "learning_rate": 3.105538754650419e-06, | |
| "loss": 0.2069, | |
| "step": 1031 | |
| }, | |
| { | |
| "epoch": 4.187341772151899, | |
| "grad_norm": 0.2659998398333357, | |
| "learning_rate": 3.07522937824712e-06, | |
| "loss": 0.2114, | |
| "step": 1032 | |
| }, | |
| { | |
| "epoch": 4.191392405063291, | |
| "grad_norm": 0.2522949585873704, | |
| "learning_rate": 3.0450563116196697e-06, | |
| "loss": 0.2041, | |
| "step": 1033 | |
| }, | |
| { | |
| "epoch": 4.195443037974684, | |
| "grad_norm": 0.2570931118633325, | |
| "learning_rate": 3.0150197977778008e-06, | |
| "loss": 0.2032, | |
| "step": 1034 | |
| }, | |
| { | |
| "epoch": 4.199493670886076, | |
| "grad_norm": 0.24688101620439035, | |
| "learning_rate": 2.985120078631465e-06, | |
| "loss": 0.1921, | |
| "step": 1035 | |
| }, | |
| { | |
| "epoch": 4.203544303797468, | |
| "grad_norm": 0.2527030808501946, | |
| "learning_rate": 2.9553573949888893e-06, | |
| "loss": 0.2032, | |
| "step": 1036 | |
| }, | |
| { | |
| "epoch": 4.207594936708861, | |
| "grad_norm": 0.25449864893348595, | |
| "learning_rate": 2.9257319865546384e-06, | |
| "loss": 0.1647, | |
| "step": 1037 | |
| }, | |
| { | |
| "epoch": 4.211645569620253, | |
| "grad_norm": 0.2528289474002866, | |
| "learning_rate": 2.896244091927678e-06, | |
| "loss": 0.2037, | |
| "step": 1038 | |
| }, | |
| { | |
| "epoch": 4.215696202531646, | |
| "grad_norm": 0.26499414921145725, | |
| "learning_rate": 2.8668939485994584e-06, | |
| "loss": 0.2131, | |
| "step": 1039 | |
| }, | |
| { | |
| "epoch": 4.219746835443038, | |
| "grad_norm": 0.2601209678660675, | |
| "learning_rate": 2.837681792951994e-06, | |
| "loss": 0.224, | |
| "step": 1040 | |
| }, | |
| { | |
| "epoch": 4.22379746835443, | |
| "grad_norm": 0.2675937211703339, | |
| "learning_rate": 2.808607860255981e-06, | |
| "loss": 0.2089, | |
| "step": 1041 | |
| }, | |
| { | |
| "epoch": 4.227848101265823, | |
| "grad_norm": 0.26779146765265943, | |
| "learning_rate": 2.7796723846688634e-06, | |
| "loss": 0.1984, | |
| "step": 1042 | |
| }, | |
| { | |
| "epoch": 4.231898734177215, | |
| "grad_norm": 0.2594427451798492, | |
| "learning_rate": 2.7508755992329937e-06, | |
| "loss": 0.2199, | |
| "step": 1043 | |
| }, | |
| { | |
| "epoch": 4.235949367088607, | |
| "grad_norm": 0.25616734100604494, | |
| "learning_rate": 2.722217735873718e-06, | |
| "loss": 0.1929, | |
| "step": 1044 | |
| }, | |
| { | |
| "epoch": 4.24, | |
| "grad_norm": 0.24737601009852525, | |
| "learning_rate": 2.6936990253975315e-06, | |
| "loss": 0.2041, | |
| "step": 1045 | |
| }, | |
| { | |
| "epoch": 4.244050632911392, | |
| "grad_norm": 0.2643382283989595, | |
| "learning_rate": 2.665319697490205e-06, | |
| "loss": 0.198, | |
| "step": 1046 | |
| }, | |
| { | |
| "epoch": 4.248101265822784, | |
| "grad_norm": 0.26358066831277943, | |
| "learning_rate": 2.637079980714945e-06, | |
| "loss": 0.1789, | |
| "step": 1047 | |
| }, | |
| { | |
| "epoch": 4.252151898734177, | |
| "grad_norm": 0.24440559702895612, | |
| "learning_rate": 2.6089801025105453e-06, | |
| "loss": 0.1651, | |
| "step": 1048 | |
| }, | |
| { | |
| "epoch": 4.256202531645569, | |
| "grad_norm": 0.2393972856157291, | |
| "learning_rate": 2.581020289189571e-06, | |
| "loss": 0.211, | |
| "step": 1049 | |
| }, | |
| { | |
| "epoch": 4.260253164556962, | |
| "grad_norm": 0.259776256729944, | |
| "learning_rate": 2.553200765936501e-06, | |
| "loss": 0.1999, | |
| "step": 1050 | |
| }, | |
| { | |
| "epoch": 4.264303797468354, | |
| "grad_norm": 0.2527333101787348, | |
| "learning_rate": 2.525521756805962e-06, | |
| "loss": 0.1857, | |
| "step": 1051 | |
| }, | |
| { | |
| "epoch": 4.2683544303797465, | |
| "grad_norm": 0.2621938410536013, | |
| "learning_rate": 2.497983484720885e-06, | |
| "loss": 0.2093, | |
| "step": 1052 | |
| }, | |
| { | |
| "epoch": 4.2724050632911394, | |
| "grad_norm": 0.26870903886202435, | |
| "learning_rate": 2.470586171470728e-06, | |
| "loss": 0.1901, | |
| "step": 1053 | |
| }, | |
| { | |
| "epoch": 4.2764556962025315, | |
| "grad_norm": 0.25477152199533687, | |
| "learning_rate": 2.4433300377096836e-06, | |
| "loss": 0.1942, | |
| "step": 1054 | |
| }, | |
| { | |
| "epoch": 4.2805063291139245, | |
| "grad_norm": 0.2582249271042767, | |
| "learning_rate": 2.4162153029549073e-06, | |
| "loss": 0.2061, | |
| "step": 1055 | |
| }, | |
| { | |
| "epoch": 4.2845569620253166, | |
| "grad_norm": 0.2743432462506923, | |
| "learning_rate": 2.3892421855847458e-06, | |
| "loss": 0.2018, | |
| "step": 1056 | |
| }, | |
| { | |
| "epoch": 4.288607594936709, | |
| "grad_norm": 0.2531837157003946, | |
| "learning_rate": 2.362410902836978e-06, | |
| "loss": 0.2012, | |
| "step": 1057 | |
| }, | |
| { | |
| "epoch": 4.292658227848102, | |
| "grad_norm": 0.2556271962399679, | |
| "learning_rate": 2.3357216708070653e-06, | |
| "loss": 0.1946, | |
| "step": 1058 | |
| }, | |
| { | |
| "epoch": 4.296708860759494, | |
| "grad_norm": 0.2612984899381395, | |
| "learning_rate": 2.309174704446411e-06, | |
| "loss": 0.2088, | |
| "step": 1059 | |
| }, | |
| { | |
| "epoch": 4.300759493670886, | |
| "grad_norm": 0.25992092434618197, | |
| "learning_rate": 2.2827702175606437e-06, | |
| "loss": 0.211, | |
| "step": 1060 | |
| }, | |
| { | |
| "epoch": 4.304810126582279, | |
| "grad_norm": 0.25312328290487046, | |
| "learning_rate": 2.256508422807855e-06, | |
| "loss": 0.1951, | |
| "step": 1061 | |
| }, | |
| { | |
| "epoch": 4.308860759493671, | |
| "grad_norm": 0.2553521105102254, | |
| "learning_rate": 2.230389531696946e-06, | |
| "loss": 0.1975, | |
| "step": 1062 | |
| }, | |
| { | |
| "epoch": 4.312911392405063, | |
| "grad_norm": 0.2587245757264993, | |
| "learning_rate": 2.204413754585857e-06, | |
| "loss": 0.1904, | |
| "step": 1063 | |
| }, | |
| { | |
| "epoch": 4.316962025316456, | |
| "grad_norm": 0.2622452162251132, | |
| "learning_rate": 2.1785813006799406e-06, | |
| "loss": 0.1832, | |
| "step": 1064 | |
| }, | |
| { | |
| "epoch": 4.321012658227848, | |
| "grad_norm": 0.2606499579107849, | |
| "learning_rate": 2.1528923780302224e-06, | |
| "loss": 0.2142, | |
| "step": 1065 | |
| }, | |
| { | |
| "epoch": 4.325063291139241, | |
| "grad_norm": 0.24828440558137282, | |
| "learning_rate": 2.127347193531757e-06, | |
| "loss": 0.2134, | |
| "step": 1066 | |
| }, | |
| { | |
| "epoch": 4.329113924050633, | |
| "grad_norm": 0.25789451689875287, | |
| "learning_rate": 2.101945952921942e-06, | |
| "loss": 0.1849, | |
| "step": 1067 | |
| }, | |
| { | |
| "epoch": 4.333164556962025, | |
| "grad_norm": 0.2621031394073994, | |
| "learning_rate": 2.0766888607788906e-06, | |
| "loss": 0.2112, | |
| "step": 1068 | |
| }, | |
| { | |
| "epoch": 4.337215189873418, | |
| "grad_norm": 0.26534879809308376, | |
| "learning_rate": 2.0515761205197337e-06, | |
| "loss": 0.218, | |
| "step": 1069 | |
| }, | |
| { | |
| "epoch": 4.34126582278481, | |
| "grad_norm": 0.2615573838473744, | |
| "learning_rate": 2.0266079343990453e-06, | |
| "loss": 0.1909, | |
| "step": 1070 | |
| }, | |
| { | |
| "epoch": 4.345316455696203, | |
| "grad_norm": 0.26729624004152985, | |
| "learning_rate": 2.0017845035071494e-06, | |
| "loss": 0.2187, | |
| "step": 1071 | |
| }, | |
| { | |
| "epoch": 4.349367088607595, | |
| "grad_norm": 0.25982169370838243, | |
| "learning_rate": 1.9771060277685537e-06, | |
| "loss": 0.241, | |
| "step": 1072 | |
| }, | |
| { | |
| "epoch": 4.353417721518987, | |
| "grad_norm": 0.2510727580117948, | |
| "learning_rate": 1.95257270594031e-06, | |
| "loss": 0.235, | |
| "step": 1073 | |
| }, | |
| { | |
| "epoch": 4.35746835443038, | |
| "grad_norm": 0.25928975119566805, | |
| "learning_rate": 1.9281847356104188e-06, | |
| "loss": 0.1924, | |
| "step": 1074 | |
| }, | |
| { | |
| "epoch": 4.361518987341772, | |
| "grad_norm": 0.26253850141539453, | |
| "learning_rate": 1.9039423131962365e-06, | |
| "loss": 0.2038, | |
| "step": 1075 | |
| }, | |
| { | |
| "epoch": 4.365569620253164, | |
| "grad_norm": 0.2633374084629919, | |
| "learning_rate": 1.8798456339429027e-06, | |
| "loss": 0.2199, | |
| "step": 1076 | |
| }, | |
| { | |
| "epoch": 4.369620253164557, | |
| "grad_norm": 0.268085867203902, | |
| "learning_rate": 1.8558948919217612e-06, | |
| "loss": 0.1946, | |
| "step": 1077 | |
| }, | |
| { | |
| "epoch": 4.373670886075949, | |
| "grad_norm": 0.253733320758435, | |
| "learning_rate": 1.8320902800287954e-06, | |
| "loss": 0.2097, | |
| "step": 1078 | |
| }, | |
| { | |
| "epoch": 4.377721518987342, | |
| "grad_norm": 0.2669154712162767, | |
| "learning_rate": 1.8084319899830726e-06, | |
| "loss": 0.1943, | |
| "step": 1079 | |
| }, | |
| { | |
| "epoch": 4.381772151898734, | |
| "grad_norm": 0.25382443279494127, | |
| "learning_rate": 1.7849202123252097e-06, | |
| "loss": 0.214, | |
| "step": 1080 | |
| }, | |
| { | |
| "epoch": 4.385822784810126, | |
| "grad_norm": 0.259573618715521, | |
| "learning_rate": 1.7615551364158401e-06, | |
| "loss": 0.1921, | |
| "step": 1081 | |
| }, | |
| { | |
| "epoch": 4.389873417721519, | |
| "grad_norm": 0.2602820198263081, | |
| "learning_rate": 1.738336950434061e-06, | |
| "loss": 0.181, | |
| "step": 1082 | |
| }, | |
| { | |
| "epoch": 4.393924050632911, | |
| "grad_norm": 0.2689217864221219, | |
| "learning_rate": 1.715265841375957e-06, | |
| "loss": 0.196, | |
| "step": 1083 | |
| }, | |
| { | |
| "epoch": 4.397974683544303, | |
| "grad_norm": 0.24901160695872324, | |
| "learning_rate": 1.6923419950530684e-06, | |
| "loss": 0.2044, | |
| "step": 1084 | |
| }, | |
| { | |
| "epoch": 4.402025316455696, | |
| "grad_norm": 0.25002610658720037, | |
| "learning_rate": 1.6695655960909008e-06, | |
| "loss": 0.1902, | |
| "step": 1085 | |
| }, | |
| { | |
| "epoch": 4.4060759493670885, | |
| "grad_norm": 0.25754384170720096, | |
| "learning_rate": 1.646936827927441e-06, | |
| "loss": 0.2025, | |
| "step": 1086 | |
| }, | |
| { | |
| "epoch": 4.410126582278481, | |
| "grad_norm": 0.25340960916236643, | |
| "learning_rate": 1.6244558728116766e-06, | |
| "loss": 0.2047, | |
| "step": 1087 | |
| }, | |
| { | |
| "epoch": 4.4141772151898735, | |
| "grad_norm": 0.25521768141931783, | |
| "learning_rate": 1.6021229118021265e-06, | |
| "loss": 0.2165, | |
| "step": 1088 | |
| }, | |
| { | |
| "epoch": 4.418227848101266, | |
| "grad_norm": 0.2501062855806572, | |
| "learning_rate": 1.5799381247653967e-06, | |
| "loss": 0.1865, | |
| "step": 1089 | |
| }, | |
| { | |
| "epoch": 4.4222784810126585, | |
| "grad_norm": 0.2607938935293541, | |
| "learning_rate": 1.5579016903747013e-06, | |
| "loss": 0.1904, | |
| "step": 1090 | |
| }, | |
| { | |
| "epoch": 4.426329113924051, | |
| "grad_norm": 0.25704591235078983, | |
| "learning_rate": 1.5360137861084656e-06, | |
| "loss": 0.1986, | |
| "step": 1091 | |
| }, | |
| { | |
| "epoch": 4.430379746835443, | |
| "grad_norm": 0.24648704314743006, | |
| "learning_rate": 1.5142745882488475e-06, | |
| "loss": 0.189, | |
| "step": 1092 | |
| }, | |
| { | |
| "epoch": 4.434430379746836, | |
| "grad_norm": 0.26206852461848434, | |
| "learning_rate": 1.4926842718803691e-06, | |
| "loss": 0.2169, | |
| "step": 1093 | |
| }, | |
| { | |
| "epoch": 4.438481012658228, | |
| "grad_norm": 0.24055572295585215, | |
| "learning_rate": 1.4712430108884657e-06, | |
| "loss": 0.197, | |
| "step": 1094 | |
| }, | |
| { | |
| "epoch": 4.442531645569621, | |
| "grad_norm": 0.2497119492424784, | |
| "learning_rate": 1.4499509779581078e-06, | |
| "loss": 0.1936, | |
| "step": 1095 | |
| }, | |
| { | |
| "epoch": 4.446582278481013, | |
| "grad_norm": 0.25159642453577336, | |
| "learning_rate": 1.4288083445723988e-06, | |
| "loss": 0.1842, | |
| "step": 1096 | |
| }, | |
| { | |
| "epoch": 4.450632911392405, | |
| "grad_norm": 0.2595686473383348, | |
| "learning_rate": 1.4078152810112045e-06, | |
| "loss": 0.1854, | |
| "step": 1097 | |
| }, | |
| { | |
| "epoch": 4.454683544303798, | |
| "grad_norm": 0.2593917299568197, | |
| "learning_rate": 1.3869719563497697e-06, | |
| "loss": 0.1934, | |
| "step": 1098 | |
| }, | |
| { | |
| "epoch": 4.45873417721519, | |
| "grad_norm": 0.26258270721543425, | |
| "learning_rate": 1.3662785384573663e-06, | |
| "loss": 0.1868, | |
| "step": 1099 | |
| }, | |
| { | |
| "epoch": 4.462784810126582, | |
| "grad_norm": 0.2618893741218843, | |
| "learning_rate": 1.3457351939959383e-06, | |
| "loss": 0.1969, | |
| "step": 1100 | |
| }, | |
| { | |
| "epoch": 4.466835443037975, | |
| "grad_norm": 0.26113182344245556, | |
| "learning_rate": 1.3253420884187551e-06, | |
| "loss": 0.2034, | |
| "step": 1101 | |
| }, | |
| { | |
| "epoch": 4.470886075949367, | |
| "grad_norm": 0.2493228255722369, | |
| "learning_rate": 1.3050993859690953e-06, | |
| "loss": 0.2288, | |
| "step": 1102 | |
| }, | |
| { | |
| "epoch": 4.47493670886076, | |
| "grad_norm": 0.25307855044586813, | |
| "learning_rate": 1.2850072496788869e-06, | |
| "loss": 0.1841, | |
| "step": 1103 | |
| }, | |
| { | |
| "epoch": 4.478987341772152, | |
| "grad_norm": 0.250273955747193, | |
| "learning_rate": 1.2650658413674434e-06, | |
| "loss": 0.1811, | |
| "step": 1104 | |
| }, | |
| { | |
| "epoch": 4.483037974683544, | |
| "grad_norm": 0.2530546259876481, | |
| "learning_rate": 1.2452753216401226e-06, | |
| "loss": 0.2032, | |
| "step": 1105 | |
| }, | |
| { | |
| "epoch": 4.487088607594937, | |
| "grad_norm": 0.27182433189447064, | |
| "learning_rate": 1.2256358498870503e-06, | |
| "loss": 0.1965, | |
| "step": 1106 | |
| }, | |
| { | |
| "epoch": 4.491139240506329, | |
| "grad_norm": 0.24566165376198426, | |
| "learning_rate": 1.2061475842818337e-06, | |
| "loss": 0.1669, | |
| "step": 1107 | |
| }, | |
| { | |
| "epoch": 4.495189873417721, | |
| "grad_norm": 0.2540728448571459, | |
| "learning_rate": 1.1868106817802816e-06, | |
| "loss": 0.2028, | |
| "step": 1108 | |
| }, | |
| { | |
| "epoch": 4.499240506329114, | |
| "grad_norm": 0.2524915126414455, | |
| "learning_rate": 1.1676252981191482e-06, | |
| "loss": 0.1979, | |
| "step": 1109 | |
| }, | |
| { | |
| "epoch": 4.503291139240506, | |
| "grad_norm": 0.26544147011520597, | |
| "learning_rate": 1.1485915878148823e-06, | |
| "loss": 0.2312, | |
| "step": 1110 | |
| }, | |
| { | |
| "epoch": 4.507341772151898, | |
| "grad_norm": 0.2487871584162263, | |
| "learning_rate": 1.1297097041623584e-06, | |
| "loss": 0.2053, | |
| "step": 1111 | |
| }, | |
| { | |
| "epoch": 4.511392405063291, | |
| "grad_norm": 0.2519491475535857, | |
| "learning_rate": 1.1109797992336847e-06, | |
| "loss": 0.2275, | |
| "step": 1112 | |
| }, | |
| { | |
| "epoch": 4.515443037974683, | |
| "grad_norm": 0.2581338019232022, | |
| "learning_rate": 1.092402023876933e-06, | |
| "loss": 0.2044, | |
| "step": 1113 | |
| }, | |
| { | |
| "epoch": 4.519493670886076, | |
| "grad_norm": 0.26641060531169786, | |
| "learning_rate": 1.0739765277149527e-06, | |
| "loss": 0.2092, | |
| "step": 1114 | |
| }, | |
| { | |
| "epoch": 4.523544303797468, | |
| "grad_norm": 0.26353998030588965, | |
| "learning_rate": 1.0557034591441596e-06, | |
| "loss": 0.1921, | |
| "step": 1115 | |
| }, | |
| { | |
| "epoch": 4.52759493670886, | |
| "grad_norm": 0.2510265050845937, | |
| "learning_rate": 1.0375829653333324e-06, | |
| "loss": 0.2067, | |
| "step": 1116 | |
| }, | |
| { | |
| "epoch": 4.531645569620253, | |
| "grad_norm": 0.25943592805793003, | |
| "learning_rate": 1.0196151922224385e-06, | |
| "loss": 0.1768, | |
| "step": 1117 | |
| }, | |
| { | |
| "epoch": 4.535696202531645, | |
| "grad_norm": 0.2630884108152766, | |
| "learning_rate": 1.0018002845214526e-06, | |
| "loss": 0.1807, | |
| "step": 1118 | |
| }, | |
| { | |
| "epoch": 4.539746835443038, | |
| "grad_norm": 0.26324800027858136, | |
| "learning_rate": 9.841383857091947e-07, | |
| "loss": 0.1754, | |
| "step": 1119 | |
| }, | |
| { | |
| "epoch": 4.54379746835443, | |
| "grad_norm": 0.25600705339287205, | |
| "learning_rate": 9.666296380321616e-07, | |
| "loss": 0.2092, | |
| "step": 1120 | |
| }, | |
| { | |
| "epoch": 4.5478481012658225, | |
| "grad_norm": 0.2624629292750856, | |
| "learning_rate": 9.492741825034124e-07, | |
| "loss": 0.1975, | |
| "step": 1121 | |
| }, | |
| { | |
| "epoch": 4.5518987341772155, | |
| "grad_norm": 0.24907389514885084, | |
| "learning_rate": 9.320721589013892e-07, | |
| "loss": 0.192, | |
| "step": 1122 | |
| }, | |
| { | |
| "epoch": 4.5559493670886075, | |
| "grad_norm": 0.25703402155812216, | |
| "learning_rate": 9.150237057688339e-07, | |
| "loss": 0.2098, | |
| "step": 1123 | |
| }, | |
| { | |
| "epoch": 4.5600000000000005, | |
| "grad_norm": 0.25515075367773454, | |
| "learning_rate": 8.981289604116328e-07, | |
| "loss": 0.2117, | |
| "step": 1124 | |
| }, | |
| { | |
| "epoch": 4.564050632911393, | |
| "grad_norm": 0.2678256814668536, | |
| "learning_rate": 8.813880588977542e-07, | |
| "loss": 0.195, | |
| "step": 1125 | |
| }, | |
| { | |
| "epoch": 4.568101265822785, | |
| "grad_norm": 0.25262397656361074, | |
| "learning_rate": 8.648011360561126e-07, | |
| "loss": 0.2008, | |
| "step": 1126 | |
| }, | |
| { | |
| "epoch": 4.572151898734178, | |
| "grad_norm": 0.2541373039734592, | |
| "learning_rate": 8.483683254755037e-07, | |
| "loss": 0.2145, | |
| "step": 1127 | |
| }, | |
| { | |
| "epoch": 4.57620253164557, | |
| "grad_norm": 0.24387272303640745, | |
| "learning_rate": 8.320897595035227e-07, | |
| "loss": 0.1843, | |
| "step": 1128 | |
| }, | |
| { | |
| "epoch": 4.580253164556962, | |
| "grad_norm": 0.24887737193811613, | |
| "learning_rate": 8.159655692455093e-07, | |
| "loss": 0.1959, | |
| "step": 1129 | |
| }, | |
| { | |
| "epoch": 4.584303797468355, | |
| "grad_norm": 0.27308648375147004, | |
| "learning_rate": 7.999958845634648e-07, | |
| "loss": 0.1847, | |
| "step": 1130 | |
| }, | |
| { | |
| "epoch": 4.588354430379747, | |
| "grad_norm": 0.24398006413986845, | |
| "learning_rate": 7.841808340750478e-07, | |
| "loss": 0.2, | |
| "step": 1131 | |
| }, | |
| { | |
| "epoch": 4.592405063291139, | |
| "grad_norm": 0.24717239191212276, | |
| "learning_rate": 7.685205451524869e-07, | |
| "loss": 0.1773, | |
| "step": 1132 | |
| }, | |
| { | |
| "epoch": 4.596455696202532, | |
| "grad_norm": 0.25164180216931303, | |
| "learning_rate": 7.530151439216027e-07, | |
| "loss": 0.2055, | |
| "step": 1133 | |
| }, | |
| { | |
| "epoch": 4.600506329113924, | |
| "grad_norm": 0.264802247610348, | |
| "learning_rate": 7.376647552607675e-07, | |
| "loss": 0.2, | |
| "step": 1134 | |
| }, | |
| { | |
| "epoch": 4.604556962025317, | |
| "grad_norm": 0.2353577136263203, | |
| "learning_rate": 7.224695027998963e-07, | |
| "loss": 0.1785, | |
| "step": 1135 | |
| }, | |
| { | |
| "epoch": 4.608607594936709, | |
| "grad_norm": 0.25532222542925975, | |
| "learning_rate": 7.07429508919466e-07, | |
| "loss": 0.1957, | |
| "step": 1136 | |
| }, | |
| { | |
| "epoch": 4.612658227848101, | |
| "grad_norm": 0.26145666378599247, | |
| "learning_rate": 6.925448947495206e-07, | |
| "loss": 0.2017, | |
| "step": 1137 | |
| }, | |
| { | |
| "epoch": 4.616708860759494, | |
| "grad_norm": 0.24196088866113447, | |
| "learning_rate": 6.778157801686936e-07, | |
| "loss": 0.1751, | |
| "step": 1138 | |
| }, | |
| { | |
| "epoch": 4.620759493670886, | |
| "grad_norm": 0.25880424892800513, | |
| "learning_rate": 6.632422838032515e-07, | |
| "loss": 0.2096, | |
| "step": 1139 | |
| }, | |
| { | |
| "epoch": 4.624810126582279, | |
| "grad_norm": 0.24567283440680557, | |
| "learning_rate": 6.488245230261281e-07, | |
| "loss": 0.1802, | |
| "step": 1140 | |
| }, | |
| { | |
| "epoch": 4.628860759493671, | |
| "grad_norm": 0.2477971838152083, | |
| "learning_rate": 6.345626139559868e-07, | |
| "loss": 0.1774, | |
| "step": 1141 | |
| }, | |
| { | |
| "epoch": 4.632911392405063, | |
| "grad_norm": 0.2497714560754988, | |
| "learning_rate": 6.204566714562866e-07, | |
| "loss": 0.2175, | |
| "step": 1142 | |
| }, | |
| { | |
| "epoch": 4.636962025316456, | |
| "grad_norm": 0.2523938406424071, | |
| "learning_rate": 6.06506809134344e-07, | |
| "loss": 0.2038, | |
| "step": 1143 | |
| }, | |
| { | |
| "epoch": 4.641012658227848, | |
| "grad_norm": 0.26475387315292453, | |
| "learning_rate": 5.927131393404373e-07, | |
| "loss": 0.2144, | |
| "step": 1144 | |
| }, | |
| { | |
| "epoch": 4.64506329113924, | |
| "grad_norm": 0.25058119184284355, | |
| "learning_rate": 5.790757731668817e-07, | |
| "loss": 0.2115, | |
| "step": 1145 | |
| }, | |
| { | |
| "epoch": 4.649113924050633, | |
| "grad_norm": 2.108012870931347, | |
| "learning_rate": 5.655948204471507e-07, | |
| "loss": 0.245, | |
| "step": 1146 | |
| }, | |
| { | |
| "epoch": 4.653164556962025, | |
| "grad_norm": 0.2536618603065724, | |
| "learning_rate": 5.522703897549875e-07, | |
| "loss": 0.2089, | |
| "step": 1147 | |
| }, | |
| { | |
| "epoch": 4.657215189873417, | |
| "grad_norm": 0.2566944988894761, | |
| "learning_rate": 5.391025884035239e-07, | |
| "loss": 0.1958, | |
| "step": 1148 | |
| }, | |
| { | |
| "epoch": 4.66126582278481, | |
| "grad_norm": 0.24443761724587734, | |
| "learning_rate": 5.260915224444207e-07, | |
| "loss": 0.1844, | |
| "step": 1149 | |
| }, | |
| { | |
| "epoch": 4.665316455696202, | |
| "grad_norm": 0.2564917809340935, | |
| "learning_rate": 5.132372966670129e-07, | |
| "loss": 0.2015, | |
| "step": 1150 | |
| }, | |
| { | |
| "epoch": 4.669367088607595, | |
| "grad_norm": 0.24772010095010105, | |
| "learning_rate": 5.005400145974704e-07, | |
| "loss": 0.1815, | |
| "step": 1151 | |
| }, | |
| { | |
| "epoch": 4.673417721518987, | |
| "grad_norm": 0.25911716775552734, | |
| "learning_rate": 4.879997784979562e-07, | |
| "loss": 0.2213, | |
| "step": 1152 | |
| }, | |
| { | |
| "epoch": 4.6774683544303794, | |
| "grad_norm": 0.2524699359966587, | |
| "learning_rate": 4.7561668936580984e-07, | |
| "loss": 0.1924, | |
| "step": 1153 | |
| }, | |
| { | |
| "epoch": 4.681518987341772, | |
| "grad_norm": 0.24929623373696363, | |
| "learning_rate": 4.6339084693272306e-07, | |
| "loss": 0.214, | |
| "step": 1154 | |
| }, | |
| { | |
| "epoch": 4.6855696202531645, | |
| "grad_norm": 0.2683524558300083, | |
| "learning_rate": 4.5132234966395847e-07, | |
| "loss": 0.2059, | |
| "step": 1155 | |
| }, | |
| { | |
| "epoch": 4.689620253164557, | |
| "grad_norm": 0.24047561905779008, | |
| "learning_rate": 4.3941129475752795e-07, | |
| "loss": 0.1928, | |
| "step": 1156 | |
| }, | |
| { | |
| "epoch": 4.6936708860759495, | |
| "grad_norm": 0.2627637410715735, | |
| "learning_rate": 4.27657778143431e-07, | |
| "loss": 0.2049, | |
| "step": 1157 | |
| }, | |
| { | |
| "epoch": 4.697721518987342, | |
| "grad_norm": 0.2663025997674183, | |
| "learning_rate": 4.1606189448287757e-07, | |
| "loss": 0.2097, | |
| "step": 1158 | |
| }, | |
| { | |
| "epoch": 4.7017721518987345, | |
| "grad_norm": 0.2424310718577902, | |
| "learning_rate": 4.046237371675177e-07, | |
| "loss": 0.2157, | |
| "step": 1159 | |
| }, | |
| { | |
| "epoch": 4.705822784810127, | |
| "grad_norm": 0.24475612559285465, | |
| "learning_rate": 3.9334339831869963e-07, | |
| "loss": 0.2077, | |
| "step": 1160 | |
| }, | |
| { | |
| "epoch": 4.709873417721519, | |
| "grad_norm": 0.2458985882504158, | |
| "learning_rate": 3.8222096878671955e-07, | |
| "loss": 0.1873, | |
| "step": 1161 | |
| }, | |
| { | |
| "epoch": 4.713924050632912, | |
| "grad_norm": 0.2505212663682799, | |
| "learning_rate": 3.7125653815009545e-07, | |
| "loss": 0.2151, | |
| "step": 1162 | |
| }, | |
| { | |
| "epoch": 4.717974683544304, | |
| "grad_norm": 0.24842055840912042, | |
| "learning_rate": 3.6045019471484974e-07, | |
| "loss": 0.1751, | |
| "step": 1163 | |
| }, | |
| { | |
| "epoch": 4.722025316455696, | |
| "grad_norm": 0.25703797192125316, | |
| "learning_rate": 3.498020255137813e-07, | |
| "loss": 0.2095, | |
| "step": 1164 | |
| }, | |
| { | |
| "epoch": 4.726075949367089, | |
| "grad_norm": 0.23809600519830923, | |
| "learning_rate": 3.393121163057811e-07, | |
| "loss": 0.1659, | |
| "step": 1165 | |
| }, | |
| { | |
| "epoch": 4.730126582278481, | |
| "grad_norm": 0.24732234230931982, | |
| "learning_rate": 3.289805515751399e-07, | |
| "loss": 0.1677, | |
| "step": 1166 | |
| }, | |
| { | |
| "epoch": 4.734177215189874, | |
| "grad_norm": 0.25506541907425545, | |
| "learning_rate": 3.188074145308573e-07, | |
| "loss": 0.2036, | |
| "step": 1167 | |
| }, | |
| { | |
| "epoch": 4.738227848101266, | |
| "grad_norm": 0.24424433479851657, | |
| "learning_rate": 3.087927871059804e-07, | |
| "loss": 0.2253, | |
| "step": 1168 | |
| }, | |
| { | |
| "epoch": 4.742278481012658, | |
| "grad_norm": 0.2556828833208802, | |
| "learning_rate": 2.989367499569418e-07, | |
| "loss": 0.2024, | |
| "step": 1169 | |
| }, | |
| { | |
| "epoch": 4.746329113924051, | |
| "grad_norm": 0.24532063389595316, | |
| "learning_rate": 2.8923938246290917e-07, | |
| "loss": 0.2023, | |
| "step": 1170 | |
| }, | |
| { | |
| "epoch": 4.750379746835443, | |
| "grad_norm": 0.24175238452529577, | |
| "learning_rate": 2.7970076272514804e-07, | |
| "loss": 0.178, | |
| "step": 1171 | |
| }, | |
| { | |
| "epoch": 4.754430379746836, | |
| "grad_norm": 0.2456035522057982, | |
| "learning_rate": 2.703209675663887e-07, | |
| "loss": 0.1892, | |
| "step": 1172 | |
| }, | |
| { | |
| "epoch": 4.758481012658228, | |
| "grad_norm": 0.25366356707654963, | |
| "learning_rate": 2.6110007253021374e-07, | |
| "loss": 0.1958, | |
| "step": 1173 | |
| }, | |
| { | |
| "epoch": 4.76253164556962, | |
| "grad_norm": 0.24364209726860575, | |
| "learning_rate": 2.520381518804471e-07, | |
| "loss": 0.2062, | |
| "step": 1174 | |
| }, | |
| { | |
| "epoch": 4.766582278481013, | |
| "grad_norm": 0.2532719586223214, | |
| "learning_rate": 2.4313527860054585e-07, | |
| "loss": 0.2087, | |
| "step": 1175 | |
| }, | |
| { | |
| "epoch": 4.770632911392405, | |
| "grad_norm": 0.25072301879287917, | |
| "learning_rate": 2.343915243930317e-07, | |
| "loss": 0.1903, | |
| "step": 1176 | |
| }, | |
| { | |
| "epoch": 4.774683544303797, | |
| "grad_norm": 0.2554022921665841, | |
| "learning_rate": 2.2580695967889367e-07, | |
| "loss": 0.187, | |
| "step": 1177 | |
| }, | |
| { | |
| "epoch": 4.77873417721519, | |
| "grad_norm": 0.25112185507988183, | |
| "learning_rate": 2.1738165359704189e-07, | |
| "loss": 0.1971, | |
| "step": 1178 | |
| }, | |
| { | |
| "epoch": 4.782784810126582, | |
| "grad_norm": 0.26766889986065084, | |
| "learning_rate": 2.0911567400373257e-07, | |
| "loss": 0.1995, | |
| "step": 1179 | |
| }, | |
| { | |
| "epoch": 4.786835443037974, | |
| "grad_norm": 0.2460592150688571, | |
| "learning_rate": 2.0100908747202607e-07, | |
| "loss": 0.1545, | |
| "step": 1180 | |
| }, | |
| { | |
| "epoch": 4.790886075949367, | |
| "grad_norm": 0.24314463310606263, | |
| "learning_rate": 1.9306195929125638e-07, | |
| "loss": 0.1779, | |
| "step": 1181 | |
| }, | |
| { | |
| "epoch": 4.794936708860759, | |
| "grad_norm": 0.2566677738249757, | |
| "learning_rate": 1.8527435346650247e-07, | |
| "loss": 0.2106, | |
| "step": 1182 | |
| }, | |
| { | |
| "epoch": 4.798987341772152, | |
| "grad_norm": 0.25033412557820445, | |
| "learning_rate": 1.7764633271807108e-07, | |
| "loss": 0.1826, | |
| "step": 1183 | |
| }, | |
| { | |
| "epoch": 4.803037974683544, | |
| "grad_norm": 0.24026372312028166, | |
| "learning_rate": 1.7017795848099262e-07, | |
| "loss": 0.1737, | |
| "step": 1184 | |
| }, | |
| { | |
| "epoch": 4.807088607594936, | |
| "grad_norm": 0.2612560648344739, | |
| "learning_rate": 1.6286929090452596e-07, | |
| "loss": 0.2046, | |
| "step": 1185 | |
| }, | |
| { | |
| "epoch": 4.811139240506329, | |
| "grad_norm": 0.24371697897247008, | |
| "learning_rate": 1.557203888516745e-07, | |
| "loss": 0.2067, | |
| "step": 1186 | |
| }, | |
| { | |
| "epoch": 4.815189873417721, | |
| "grad_norm": 0.25740540458714745, | |
| "learning_rate": 1.487313098987131e-07, | |
| "loss": 0.2182, | |
| "step": 1187 | |
| }, | |
| { | |
| "epoch": 4.819240506329114, | |
| "grad_norm": 0.2571647592095984, | |
| "learning_rate": 1.4190211033472402e-07, | |
| "loss": 0.1979, | |
| "step": 1188 | |
| }, | |
| { | |
| "epoch": 4.8232911392405065, | |
| "grad_norm": 0.2383786360630127, | |
| "learning_rate": 1.3523284516113955e-07, | |
| "loss": 0.1934, | |
| "step": 1189 | |
| }, | |
| { | |
| "epoch": 4.8273417721518985, | |
| "grad_norm": 0.2554511057628668, | |
| "learning_rate": 1.2872356809130682e-07, | |
| "loss": 0.1884, | |
| "step": 1190 | |
| }, | |
| { | |
| "epoch": 4.8313924050632915, | |
| "grad_norm": 0.25637506367302954, | |
| "learning_rate": 1.2237433155004807e-07, | |
| "loss": 0.1814, | |
| "step": 1191 | |
| }, | |
| { | |
| "epoch": 4.8354430379746836, | |
| "grad_norm": 0.24815815988146436, | |
| "learning_rate": 1.1618518667323886e-07, | |
| "loss": 0.1785, | |
| "step": 1192 | |
| }, | |
| { | |
| "epoch": 4.839493670886076, | |
| "grad_norm": 0.23917163802828875, | |
| "learning_rate": 1.1015618330740385e-07, | |
| "loss": 0.1957, | |
| "step": 1193 | |
| }, | |
| { | |
| "epoch": 4.843544303797469, | |
| "grad_norm": 0.24775464265333164, | |
| "learning_rate": 1.042873700093061e-07, | |
| "loss": 0.1798, | |
| "step": 1194 | |
| }, | |
| { | |
| "epoch": 4.847594936708861, | |
| "grad_norm": 0.2655339450441836, | |
| "learning_rate": 9.857879404556291e-08, | |
| "loss": 0.2255, | |
| "step": 1195 | |
| }, | |
| { | |
| "epoch": 4.851645569620253, | |
| "grad_norm": 0.2500842331193114, | |
| "learning_rate": 9.303050139225722e-08, | |
| "loss": 0.181, | |
| "step": 1196 | |
| }, | |
| { | |
| "epoch": 4.855696202531646, | |
| "grad_norm": 0.24828085944143033, | |
| "learning_rate": 8.76425367345779e-08, | |
| "loss": 0.1971, | |
| "step": 1197 | |
| }, | |
| { | |
| "epoch": 4.859746835443038, | |
| "grad_norm": 0.25149439671060814, | |
| "learning_rate": 8.241494346644897e-08, | |
| "loss": 0.1873, | |
| "step": 1198 | |
| }, | |
| { | |
| "epoch": 4.863797468354431, | |
| "grad_norm": 0.2504313440873231, | |
| "learning_rate": 7.734776369019204e-08, | |
| "loss": 0.1872, | |
| "step": 1199 | |
| }, | |
| { | |
| "epoch": 4.867848101265823, | |
| "grad_norm": 0.24756551723862585, | |
| "learning_rate": 7.244103821617332e-08, | |
| "loss": 0.203, | |
| "step": 1200 | |
| }, | |
| { | |
| "epoch": 4.871898734177215, | |
| "grad_norm": 0.25459428437084064, | |
| "learning_rate": 6.769480656248606e-08, | |
| "loss": 0.204, | |
| "step": 1201 | |
| }, | |
| { | |
| "epoch": 4.875949367088608, | |
| "grad_norm": 0.23784856196722465, | |
| "learning_rate": 6.310910695462635e-08, | |
| "loss": 0.1866, | |
| "step": 1202 | |
| }, | |
| { | |
| "epoch": 4.88, | |
| "grad_norm": 0.2528393601485236, | |
| "learning_rate": 5.8683976325191185e-08, | |
| "loss": 0.2102, | |
| "step": 1203 | |
| }, | |
| { | |
| "epoch": 4.884050632911393, | |
| "grad_norm": 0.24753672042114178, | |
| "learning_rate": 5.4419450313571984e-08, | |
| "loss": 0.2046, | |
| "step": 1204 | |
| }, | |
| { | |
| "epoch": 4.888101265822785, | |
| "grad_norm": 0.2617709211633615, | |
| "learning_rate": 5.031556326567488e-08, | |
| "loss": 0.1912, | |
| "step": 1205 | |
| }, | |
| { | |
| "epoch": 4.892151898734177, | |
| "grad_norm": 0.24917855199019504, | |
| "learning_rate": 4.637234823364312e-08, | |
| "loss": 0.2113, | |
| "step": 1206 | |
| }, | |
| { | |
| "epoch": 4.89620253164557, | |
| "grad_norm": 0.2585955363410735, | |
| "learning_rate": 4.258983697558838e-08, | |
| "loss": 0.1992, | |
| "step": 1207 | |
| }, | |
| { | |
| "epoch": 4.900253164556962, | |
| "grad_norm": 0.248815513161566, | |
| "learning_rate": 3.896805995533548e-08, | |
| "loss": 0.2291, | |
| "step": 1208 | |
| }, | |
| { | |
| "epoch": 4.904303797468354, | |
| "grad_norm": 0.2492963591007819, | |
| "learning_rate": 3.550704634218028e-08, | |
| "loss": 0.1823, | |
| "step": 1209 | |
| }, | |
| { | |
| "epoch": 4.908354430379747, | |
| "grad_norm": 0.2588460081211886, | |
| "learning_rate": 3.2206824010647676e-08, | |
| "loss": 0.1981, | |
| "step": 1210 | |
| }, | |
| { | |
| "epoch": 4.912405063291139, | |
| "grad_norm": 0.24042186296805768, | |
| "learning_rate": 2.9067419540278476e-08, | |
| "loss": 0.1971, | |
| "step": 1211 | |
| }, | |
| { | |
| "epoch": 4.916455696202531, | |
| "grad_norm": 0.25605520119864716, | |
| "learning_rate": 2.6088858215400638e-08, | |
| "loss": 0.1835, | |
| "step": 1212 | |
| }, | |
| { | |
| "epoch": 4.920506329113924, | |
| "grad_norm": 0.26764246034256634, | |
| "learning_rate": 2.3271164024940564e-08, | |
| "loss": 0.2126, | |
| "step": 1213 | |
| }, | |
| { | |
| "epoch": 4.924556962025316, | |
| "grad_norm": 0.2541211851690507, | |
| "learning_rate": 2.061435966221881e-08, | |
| "loss": 0.1896, | |
| "step": 1214 | |
| }, | |
| { | |
| "epoch": 4.928607594936709, | |
| "grad_norm": 0.24358458062765112, | |
| "learning_rate": 1.811846652477245e-08, | |
| "loss": 0.1832, | |
| "step": 1215 | |
| }, | |
| { | |
| "epoch": 4.932658227848101, | |
| "grad_norm": 0.25294044643795005, | |
| "learning_rate": 1.5783504714184106e-08, | |
| "loss": 0.1907, | |
| "step": 1216 | |
| }, | |
| { | |
| "epoch": 4.936708860759493, | |
| "grad_norm": 0.24353032529399743, | |
| "learning_rate": 1.360949303591097e-08, | |
| "loss": 0.2112, | |
| "step": 1217 | |
| }, | |
| { | |
| "epoch": 4.940759493670886, | |
| "grad_norm": 0.2466978094075244, | |
| "learning_rate": 1.1596448999144916e-08, | |
| "loss": 0.1974, | |
| "step": 1218 | |
| }, | |
| { | |
| "epoch": 4.944810126582278, | |
| "grad_norm": 0.24367181497075122, | |
| "learning_rate": 9.744388816668172e-09, | |
| "loss": 0.171, | |
| "step": 1219 | |
| }, | |
| { | |
| "epoch": 4.948860759493671, | |
| "grad_norm": 0.2625426681324267, | |
| "learning_rate": 8.05332740472009e-09, | |
| "loss": 0.1857, | |
| "step": 1220 | |
| }, | |
| { | |
| "epoch": 4.952911392405063, | |
| "grad_norm": 0.24955477379731322, | |
| "learning_rate": 6.523278382872811e-09, | |
| "loss": 0.1712, | |
| "step": 1221 | |
| }, | |
| { | |
| "epoch": 4.9569620253164555, | |
| "grad_norm": 0.25297319357389847, | |
| "learning_rate": 5.15425407393133e-09, | |
| "loss": 0.1959, | |
| "step": 1222 | |
| }, | |
| { | |
| "epoch": 4.961012658227848, | |
| "grad_norm": 0.2527721545261932, | |
| "learning_rate": 3.94626550383137e-09, | |
| "loss": 0.203, | |
| "step": 1223 | |
| }, | |
| { | |
| "epoch": 4.9650632911392405, | |
| "grad_norm": 0.2424206745400902, | |
| "learning_rate": 2.899322401546112e-09, | |
| "loss": 0.1804, | |
| "step": 1224 | |
| }, | |
| { | |
| "epoch": 4.969113924050633, | |
| "grad_norm": 0.25698019306593506, | |
| "learning_rate": 2.013433199010706e-09, | |
| "loss": 0.1995, | |
| "step": 1225 | |
| }, | |
| { | |
| "epoch": 4.9731645569620255, | |
| "grad_norm": 0.25422585350548893, | |
| "learning_rate": 1.2886050310556563e-09, | |
| "loss": 0.2084, | |
| "step": 1226 | |
| }, | |
| { | |
| "epoch": 4.977215189873418, | |
| "grad_norm": 0.2578738457889016, | |
| "learning_rate": 7.248437353468695e-10, | |
| "loss": 0.1996, | |
| "step": 1227 | |
| }, | |
| { | |
| "epoch": 4.98126582278481, | |
| "grad_norm": 0.23770006713593, | |
| "learning_rate": 3.221538523412449e-10, | |
| "loss": 0.1895, | |
| "step": 1228 | |
| }, | |
| { | |
| "epoch": 4.985316455696203, | |
| "grad_norm": 0.26211143695661016, | |
| "learning_rate": 8.053862524670663e-11, | |
| "loss": 0.1732, | |
| "step": 1229 | |
| }, | |
| { | |
| "epoch": 4.989367088607595, | |
| "grad_norm": 0.24068550557756116, | |
| "learning_rate": 0.0, | |
| "loss": 0.1817, | |
| "step": 1230 | |
| }, | |
| { | |
| "epoch": 4.989367088607595, | |
| "step": 1230, | |
| "total_flos": 2.53664046736153e+18, | |
| "train_loss": 0.16788045876636737, | |
| "train_runtime": 24163.9435, | |
| "train_samples_per_second": 6.539, | |
| "train_steps_per_second": 0.051 | |
| } | |
| ], | |
| "logging_steps": 1, | |
| "max_steps": 1230, | |
| "num_input_tokens_seen": 0, | |
| "num_train_epochs": 5, | |
| "save_steps": 500, | |
| "stateful_callbacks": { | |
| "TrainerControl": { | |
| "args": { | |
| "should_epoch_stop": false, | |
| "should_evaluate": false, | |
| "should_log": false, | |
| "should_save": true, | |
| "should_training_stop": true | |
| }, | |
| "attributes": {} | |
| } | |
| }, | |
| "total_flos": 2.53664046736153e+18, | |
| "train_batch_size": 1, | |
| "trial_name": null, | |
| "trial_params": null | |
| } | |