| { | |
| "best_global_step": null, | |
| "best_metric": null, | |
| "best_model_checkpoint": null, | |
| "epoch": 2.5654032487888285, | |
| "eval_steps": 500, | |
| "global_step": 4500, | |
| "is_hyper_param_search": false, | |
| "is_local_process_zero": true, | |
| "is_world_process_zero": true, | |
| "log_history": [ | |
| { | |
| "epoch": 0.005699629524080934, | |
| "grad_norm": 19.25, | |
| "learning_rate": 9.982896237172178e-06, | |
| "loss": 2.0698, | |
| "step": 10 | |
| }, | |
| { | |
| "epoch": 0.011399259048161869, | |
| "grad_norm": 6.96875, | |
| "learning_rate": 9.963892056252377e-06, | |
| "loss": 1.5026, | |
| "step": 20 | |
| }, | |
| { | |
| "epoch": 0.017098888572242805, | |
| "grad_norm": 6.5, | |
| "learning_rate": 9.944887875332574e-06, | |
| "loss": 1.4607, | |
| "step": 30 | |
| }, | |
| { | |
| "epoch": 0.022798518096323737, | |
| "grad_norm": 6.5625, | |
| "learning_rate": 9.925883694412771e-06, | |
| "loss": 1.3855, | |
| "step": 40 | |
| }, | |
| { | |
| "epoch": 0.028498147620404674, | |
| "grad_norm": 6.0625, | |
| "learning_rate": 9.90687951349297e-06, | |
| "loss": 1.4154, | |
| "step": 50 | |
| }, | |
| { | |
| "epoch": 0.03419777714448561, | |
| "grad_norm": 5.90625, | |
| "learning_rate": 9.887875332573167e-06, | |
| "loss": 1.3327, | |
| "step": 60 | |
| }, | |
| { | |
| "epoch": 0.039897406668566546, | |
| "grad_norm": 12.9375, | |
| "learning_rate": 9.868871151653364e-06, | |
| "loss": 1.3797, | |
| "step": 70 | |
| }, | |
| { | |
| "epoch": 0.045597036192647475, | |
| "grad_norm": 6.3125, | |
| "learning_rate": 9.849866970733563e-06, | |
| "loss": 1.361, | |
| "step": 80 | |
| }, | |
| { | |
| "epoch": 0.05129666571672841, | |
| "grad_norm": 7.0, | |
| "learning_rate": 9.83086278981376e-06, | |
| "loss": 1.4097, | |
| "step": 90 | |
| }, | |
| { | |
| "epoch": 0.05699629524080935, | |
| "grad_norm": 6.46875, | |
| "learning_rate": 9.811858608893958e-06, | |
| "loss": 1.3784, | |
| "step": 100 | |
| }, | |
| { | |
| "epoch": 0.06269592476489028, | |
| "grad_norm": 6.09375, | |
| "learning_rate": 9.792854427974155e-06, | |
| "loss": 1.3886, | |
| "step": 110 | |
| }, | |
| { | |
| "epoch": 0.06839555428897122, | |
| "grad_norm": 7.0625, | |
| "learning_rate": 9.773850247054353e-06, | |
| "loss": 1.2982, | |
| "step": 120 | |
| }, | |
| { | |
| "epoch": 0.07409518381305215, | |
| "grad_norm": 6.0625, | |
| "learning_rate": 9.75484606613455e-06, | |
| "loss": 1.3887, | |
| "step": 130 | |
| }, | |
| { | |
| "epoch": 0.07979481333713309, | |
| "grad_norm": 5.46875, | |
| "learning_rate": 9.735841885214748e-06, | |
| "loss": 1.3336, | |
| "step": 140 | |
| }, | |
| { | |
| "epoch": 0.08549444286121402, | |
| "grad_norm": 6.78125, | |
| "learning_rate": 9.716837704294946e-06, | |
| "loss": 1.3523, | |
| "step": 150 | |
| }, | |
| { | |
| "epoch": 0.09119407238529495, | |
| "grad_norm": 7.96875, | |
| "learning_rate": 9.697833523375144e-06, | |
| "loss": 1.3483, | |
| "step": 160 | |
| }, | |
| { | |
| "epoch": 0.09689370190937589, | |
| "grad_norm": 6.96875, | |
| "learning_rate": 9.678829342455342e-06, | |
| "loss": 1.3502, | |
| "step": 170 | |
| }, | |
| { | |
| "epoch": 0.10259333143345682, | |
| "grad_norm": 7.0, | |
| "learning_rate": 9.65982516153554e-06, | |
| "loss": 1.3553, | |
| "step": 180 | |
| }, | |
| { | |
| "epoch": 0.10829296095753776, | |
| "grad_norm": 6.3125, | |
| "learning_rate": 9.640820980615736e-06, | |
| "loss": 1.338, | |
| "step": 190 | |
| }, | |
| { | |
| "epoch": 0.1139925904816187, | |
| "grad_norm": 7.3125, | |
| "learning_rate": 9.621816799695934e-06, | |
| "loss": 1.3868, | |
| "step": 200 | |
| }, | |
| { | |
| "epoch": 0.11969222000569962, | |
| "grad_norm": 6.53125, | |
| "learning_rate": 9.602812618776132e-06, | |
| "loss": 1.3996, | |
| "step": 210 | |
| }, | |
| { | |
| "epoch": 0.12539184952978055, | |
| "grad_norm": 7.8125, | |
| "learning_rate": 9.58380843785633e-06, | |
| "loss": 1.3638, | |
| "step": 220 | |
| }, | |
| { | |
| "epoch": 0.1310914790538615, | |
| "grad_norm": 6.90625, | |
| "learning_rate": 9.564804256936528e-06, | |
| "loss": 1.2893, | |
| "step": 230 | |
| }, | |
| { | |
| "epoch": 0.13679110857794244, | |
| "grad_norm": 6.9375, | |
| "learning_rate": 9.545800076016724e-06, | |
| "loss": 1.3408, | |
| "step": 240 | |
| }, | |
| { | |
| "epoch": 0.14249073810202337, | |
| "grad_norm": 6.4375, | |
| "learning_rate": 9.526795895096922e-06, | |
| "loss": 1.3522, | |
| "step": 250 | |
| }, | |
| { | |
| "epoch": 0.1481903676261043, | |
| "grad_norm": 6.59375, | |
| "learning_rate": 9.50779171417712e-06, | |
| "loss": 1.3595, | |
| "step": 260 | |
| }, | |
| { | |
| "epoch": 0.15388999715018523, | |
| "grad_norm": 7.125, | |
| "learning_rate": 9.488787533257318e-06, | |
| "loss": 1.381, | |
| "step": 270 | |
| }, | |
| { | |
| "epoch": 0.15958962667426618, | |
| "grad_norm": 7.1875, | |
| "learning_rate": 9.469783352337516e-06, | |
| "loss": 1.3109, | |
| "step": 280 | |
| }, | |
| { | |
| "epoch": 0.1652892561983471, | |
| "grad_norm": 7.09375, | |
| "learning_rate": 9.450779171417712e-06, | |
| "loss": 1.3793, | |
| "step": 290 | |
| }, | |
| { | |
| "epoch": 0.17098888572242804, | |
| "grad_norm": 7.125, | |
| "learning_rate": 9.43177499049791e-06, | |
| "loss": 1.3373, | |
| "step": 300 | |
| }, | |
| { | |
| "epoch": 0.17668851524650897, | |
| "grad_norm": 6.5625, | |
| "learning_rate": 9.412770809578108e-06, | |
| "loss": 1.3474, | |
| "step": 310 | |
| }, | |
| { | |
| "epoch": 0.1823881447705899, | |
| "grad_norm": 7.03125, | |
| "learning_rate": 9.393766628658306e-06, | |
| "loss": 1.3076, | |
| "step": 320 | |
| }, | |
| { | |
| "epoch": 0.18808777429467086, | |
| "grad_norm": 7.71875, | |
| "learning_rate": 9.374762447738504e-06, | |
| "loss": 1.2929, | |
| "step": 330 | |
| }, | |
| { | |
| "epoch": 0.19378740381875179, | |
| "grad_norm": 6.8125, | |
| "learning_rate": 9.3557582668187e-06, | |
| "loss": 1.3652, | |
| "step": 340 | |
| }, | |
| { | |
| "epoch": 0.19948703334283271, | |
| "grad_norm": 7.15625, | |
| "learning_rate": 9.336754085898898e-06, | |
| "loss": 1.3535, | |
| "step": 350 | |
| }, | |
| { | |
| "epoch": 0.20518666286691364, | |
| "grad_norm": 5.9375, | |
| "learning_rate": 9.317749904979096e-06, | |
| "loss": 1.3388, | |
| "step": 360 | |
| }, | |
| { | |
| "epoch": 0.21088629239099457, | |
| "grad_norm": 7.6875, | |
| "learning_rate": 9.298745724059294e-06, | |
| "loss": 1.3083, | |
| "step": 370 | |
| }, | |
| { | |
| "epoch": 0.21658592191507553, | |
| "grad_norm": 7.125, | |
| "learning_rate": 9.279741543139492e-06, | |
| "loss": 1.3409, | |
| "step": 380 | |
| }, | |
| { | |
| "epoch": 0.22228555143915646, | |
| "grad_norm": 6.78125, | |
| "learning_rate": 9.26073736221969e-06, | |
| "loss": 1.3149, | |
| "step": 390 | |
| }, | |
| { | |
| "epoch": 0.2279851809632374, | |
| "grad_norm": 6.3125, | |
| "learning_rate": 9.241733181299886e-06, | |
| "loss": 1.3525, | |
| "step": 400 | |
| }, | |
| { | |
| "epoch": 0.23368481048731832, | |
| "grad_norm": 7.34375, | |
| "learning_rate": 9.222729000380084e-06, | |
| "loss": 1.3343, | |
| "step": 410 | |
| }, | |
| { | |
| "epoch": 0.23938444001139925, | |
| "grad_norm": 6.875, | |
| "learning_rate": 9.203724819460282e-06, | |
| "loss": 1.3098, | |
| "step": 420 | |
| }, | |
| { | |
| "epoch": 0.2450840695354802, | |
| "grad_norm": 5.8125, | |
| "learning_rate": 9.18472063854048e-06, | |
| "loss": 1.2878, | |
| "step": 430 | |
| }, | |
| { | |
| "epoch": 0.2507836990595611, | |
| "grad_norm": 6.0625, | |
| "learning_rate": 9.165716457620676e-06, | |
| "loss": 1.2681, | |
| "step": 440 | |
| }, | |
| { | |
| "epoch": 0.2564833285836421, | |
| "grad_norm": 7.09375, | |
| "learning_rate": 9.146712276700876e-06, | |
| "loss": 1.2974, | |
| "step": 450 | |
| }, | |
| { | |
| "epoch": 0.262182958107723, | |
| "grad_norm": 6.96875, | |
| "learning_rate": 9.127708095781072e-06, | |
| "loss": 1.3047, | |
| "step": 460 | |
| }, | |
| { | |
| "epoch": 0.26788258763180395, | |
| "grad_norm": 7.40625, | |
| "learning_rate": 9.10870391486127e-06, | |
| "loss": 1.2063, | |
| "step": 470 | |
| }, | |
| { | |
| "epoch": 0.2735822171558849, | |
| "grad_norm": 7.59375, | |
| "learning_rate": 9.089699733941468e-06, | |
| "loss": 1.3046, | |
| "step": 480 | |
| }, | |
| { | |
| "epoch": 0.2792818466799658, | |
| "grad_norm": 5.9375, | |
| "learning_rate": 9.070695553021666e-06, | |
| "loss": 1.2633, | |
| "step": 490 | |
| }, | |
| { | |
| "epoch": 0.28498147620404674, | |
| "grad_norm": 7.3125, | |
| "learning_rate": 9.051691372101862e-06, | |
| "loss": 1.3394, | |
| "step": 500 | |
| }, | |
| { | |
| "epoch": 0.29068110572812766, | |
| "grad_norm": 5.75, | |
| "learning_rate": 9.032687191182062e-06, | |
| "loss": 1.2653, | |
| "step": 510 | |
| }, | |
| { | |
| "epoch": 0.2963807352522086, | |
| "grad_norm": 6.6875, | |
| "learning_rate": 9.013683010262258e-06, | |
| "loss": 1.2359, | |
| "step": 520 | |
| }, | |
| { | |
| "epoch": 0.3020803647762895, | |
| "grad_norm": 7.21875, | |
| "learning_rate": 8.994678829342456e-06, | |
| "loss": 1.2021, | |
| "step": 530 | |
| }, | |
| { | |
| "epoch": 0.30777999430037045, | |
| "grad_norm": 6.8125, | |
| "learning_rate": 8.975674648422654e-06, | |
| "loss": 1.233, | |
| "step": 540 | |
| }, | |
| { | |
| "epoch": 0.31347962382445144, | |
| "grad_norm": 7.5, | |
| "learning_rate": 8.956670467502852e-06, | |
| "loss": 1.3498, | |
| "step": 550 | |
| }, | |
| { | |
| "epoch": 0.31917925334853237, | |
| "grad_norm": 6.5625, | |
| "learning_rate": 8.937666286583048e-06, | |
| "loss": 1.2261, | |
| "step": 560 | |
| }, | |
| { | |
| "epoch": 0.3248788828726133, | |
| "grad_norm": 6.6875, | |
| "learning_rate": 8.918662105663248e-06, | |
| "loss": 1.3059, | |
| "step": 570 | |
| }, | |
| { | |
| "epoch": 0.3305785123966942, | |
| "grad_norm": 6.5, | |
| "learning_rate": 8.899657924743444e-06, | |
| "loss": 1.241, | |
| "step": 580 | |
| }, | |
| { | |
| "epoch": 0.33627814192077515, | |
| "grad_norm": 7.28125, | |
| "learning_rate": 8.880653743823642e-06, | |
| "loss": 1.3466, | |
| "step": 590 | |
| }, | |
| { | |
| "epoch": 0.3419777714448561, | |
| "grad_norm": 6.75, | |
| "learning_rate": 8.861649562903838e-06, | |
| "loss": 1.314, | |
| "step": 600 | |
| }, | |
| { | |
| "epoch": 0.347677400968937, | |
| "grad_norm": 6.6875, | |
| "learning_rate": 8.842645381984038e-06, | |
| "loss": 1.2967, | |
| "step": 610 | |
| }, | |
| { | |
| "epoch": 0.35337703049301794, | |
| "grad_norm": 7.40625, | |
| "learning_rate": 8.823641201064234e-06, | |
| "loss": 1.3309, | |
| "step": 620 | |
| }, | |
| { | |
| "epoch": 0.35907666001709887, | |
| "grad_norm": 7.375, | |
| "learning_rate": 8.804637020144432e-06, | |
| "loss": 1.248, | |
| "step": 630 | |
| }, | |
| { | |
| "epoch": 0.3647762895411798, | |
| "grad_norm": 6.8125, | |
| "learning_rate": 8.78563283922463e-06, | |
| "loss": 1.2862, | |
| "step": 640 | |
| }, | |
| { | |
| "epoch": 0.3704759190652608, | |
| "grad_norm": 7.3125, | |
| "learning_rate": 8.766628658304828e-06, | |
| "loss": 1.2528, | |
| "step": 650 | |
| }, | |
| { | |
| "epoch": 0.3761755485893417, | |
| "grad_norm": 6.75, | |
| "learning_rate": 8.747624477385024e-06, | |
| "loss": 1.2739, | |
| "step": 660 | |
| }, | |
| { | |
| "epoch": 0.38187517811342264, | |
| "grad_norm": 7.40625, | |
| "learning_rate": 8.728620296465224e-06, | |
| "loss": 1.2521, | |
| "step": 670 | |
| }, | |
| { | |
| "epoch": 0.38757480763750357, | |
| "grad_norm": 7.71875, | |
| "learning_rate": 8.70961611554542e-06, | |
| "loss": 1.2068, | |
| "step": 680 | |
| }, | |
| { | |
| "epoch": 0.3932744371615845, | |
| "grad_norm": 5.96875, | |
| "learning_rate": 8.690611934625618e-06, | |
| "loss": 1.3239, | |
| "step": 690 | |
| }, | |
| { | |
| "epoch": 0.39897406668566543, | |
| "grad_norm": 7.28125, | |
| "learning_rate": 8.671607753705816e-06, | |
| "loss": 1.2795, | |
| "step": 700 | |
| }, | |
| { | |
| "epoch": 0.40467369620974636, | |
| "grad_norm": 6.90625, | |
| "learning_rate": 8.652603572786014e-06, | |
| "loss": 1.2848, | |
| "step": 710 | |
| }, | |
| { | |
| "epoch": 0.4103733257338273, | |
| "grad_norm": 6.59375, | |
| "learning_rate": 8.63359939186621e-06, | |
| "loss": 1.2491, | |
| "step": 720 | |
| }, | |
| { | |
| "epoch": 0.4160729552579082, | |
| "grad_norm": 6.5625, | |
| "learning_rate": 8.61459521094641e-06, | |
| "loss": 1.229, | |
| "step": 730 | |
| }, | |
| { | |
| "epoch": 0.42177258478198915, | |
| "grad_norm": 6.34375, | |
| "learning_rate": 8.595591030026606e-06, | |
| "loss": 1.2394, | |
| "step": 740 | |
| }, | |
| { | |
| "epoch": 0.42747221430607013, | |
| "grad_norm": 7.0625, | |
| "learning_rate": 8.576586849106804e-06, | |
| "loss": 1.3423, | |
| "step": 750 | |
| }, | |
| { | |
| "epoch": 0.43317184383015106, | |
| "grad_norm": 7.03125, | |
| "learning_rate": 8.557582668187002e-06, | |
| "loss": 1.3051, | |
| "step": 760 | |
| }, | |
| { | |
| "epoch": 0.438871473354232, | |
| "grad_norm": 6.21875, | |
| "learning_rate": 8.5385784872672e-06, | |
| "loss": 1.2504, | |
| "step": 770 | |
| }, | |
| { | |
| "epoch": 0.4445711028783129, | |
| "grad_norm": 7.5, | |
| "learning_rate": 8.519574306347396e-06, | |
| "loss": 1.2255, | |
| "step": 780 | |
| }, | |
| { | |
| "epoch": 0.45027073240239385, | |
| "grad_norm": 7.34375, | |
| "learning_rate": 8.500570125427594e-06, | |
| "loss": 1.2367, | |
| "step": 790 | |
| }, | |
| { | |
| "epoch": 0.4559703619264748, | |
| "grad_norm": 6.46875, | |
| "learning_rate": 8.481565944507792e-06, | |
| "loss": 1.2855, | |
| "step": 800 | |
| }, | |
| { | |
| "epoch": 0.4616699914505557, | |
| "grad_norm": 6.6875, | |
| "learning_rate": 8.46256176358799e-06, | |
| "loss": 1.2875, | |
| "step": 810 | |
| }, | |
| { | |
| "epoch": 0.46736962097463663, | |
| "grad_norm": 6.75, | |
| "learning_rate": 8.443557582668188e-06, | |
| "loss": 1.2946, | |
| "step": 820 | |
| }, | |
| { | |
| "epoch": 0.47306925049871756, | |
| "grad_norm": 7.03125, | |
| "learning_rate": 8.424553401748386e-06, | |
| "loss": 1.2672, | |
| "step": 830 | |
| }, | |
| { | |
| "epoch": 0.4787688800227985, | |
| "grad_norm": 6.78125, | |
| "learning_rate": 8.405549220828583e-06, | |
| "loss": 1.2279, | |
| "step": 840 | |
| }, | |
| { | |
| "epoch": 0.4844685095468795, | |
| "grad_norm": 7.59375, | |
| "learning_rate": 8.38654503990878e-06, | |
| "loss": 1.2593, | |
| "step": 850 | |
| }, | |
| { | |
| "epoch": 0.4901681390709604, | |
| "grad_norm": 7.0, | |
| "learning_rate": 8.367540858988978e-06, | |
| "loss": 1.2662, | |
| "step": 860 | |
| }, | |
| { | |
| "epoch": 0.49586776859504134, | |
| "grad_norm": 7.8125, | |
| "learning_rate": 8.348536678069176e-06, | |
| "loss": 1.2404, | |
| "step": 870 | |
| }, | |
| { | |
| "epoch": 0.5015673981191222, | |
| "grad_norm": 6.21875, | |
| "learning_rate": 8.329532497149374e-06, | |
| "loss": 1.3002, | |
| "step": 880 | |
| }, | |
| { | |
| "epoch": 0.5072670276432032, | |
| "grad_norm": 6.5, | |
| "learning_rate": 8.31052831622957e-06, | |
| "loss": 1.2786, | |
| "step": 890 | |
| }, | |
| { | |
| "epoch": 0.5129666571672842, | |
| "grad_norm": 7.125, | |
| "learning_rate": 8.291524135309769e-06, | |
| "loss": 1.2384, | |
| "step": 900 | |
| }, | |
| { | |
| "epoch": 0.518666286691365, | |
| "grad_norm": 7.375, | |
| "learning_rate": 8.272519954389967e-06, | |
| "loss": 1.2383, | |
| "step": 910 | |
| }, | |
| { | |
| "epoch": 0.524365916215446, | |
| "grad_norm": 7.375, | |
| "learning_rate": 8.253515773470164e-06, | |
| "loss": 1.2136, | |
| "step": 920 | |
| }, | |
| { | |
| "epoch": 0.5300655457395269, | |
| "grad_norm": 5.84375, | |
| "learning_rate": 8.234511592550362e-06, | |
| "loss": 1.2139, | |
| "step": 930 | |
| }, | |
| { | |
| "epoch": 0.5357651752636079, | |
| "grad_norm": 6.8125, | |
| "learning_rate": 8.215507411630559e-06, | |
| "loss": 1.2511, | |
| "step": 940 | |
| }, | |
| { | |
| "epoch": 0.5414648047876888, | |
| "grad_norm": 7.65625, | |
| "learning_rate": 8.196503230710757e-06, | |
| "loss": 1.2315, | |
| "step": 950 | |
| }, | |
| { | |
| "epoch": 0.5471644343117698, | |
| "grad_norm": 6.875, | |
| "learning_rate": 8.177499049790955e-06, | |
| "loss": 1.2691, | |
| "step": 960 | |
| }, | |
| { | |
| "epoch": 0.5528640638358506, | |
| "grad_norm": 6.78125, | |
| "learning_rate": 8.158494868871153e-06, | |
| "loss": 1.2067, | |
| "step": 970 | |
| }, | |
| { | |
| "epoch": 0.5585636933599316, | |
| "grad_norm": 6.125, | |
| "learning_rate": 8.13949068795135e-06, | |
| "loss": 1.1774, | |
| "step": 980 | |
| }, | |
| { | |
| "epoch": 0.5642633228840125, | |
| "grad_norm": 6.90625, | |
| "learning_rate": 8.120486507031547e-06, | |
| "loss": 1.2299, | |
| "step": 990 | |
| }, | |
| { | |
| "epoch": 0.5699629524080935, | |
| "grad_norm": 7.6875, | |
| "learning_rate": 8.101482326111745e-06, | |
| "loss": 1.2525, | |
| "step": 1000 | |
| }, | |
| { | |
| "epoch": 0.5756625819321745, | |
| "grad_norm": 8.5625, | |
| "learning_rate": 8.082478145191943e-06, | |
| "loss": 1.2287, | |
| "step": 1010 | |
| }, | |
| { | |
| "epoch": 0.5813622114562553, | |
| "grad_norm": 6.09375, | |
| "learning_rate": 8.06347396427214e-06, | |
| "loss": 1.2515, | |
| "step": 1020 | |
| }, | |
| { | |
| "epoch": 0.5870618409803363, | |
| "grad_norm": 8.625, | |
| "learning_rate": 8.044469783352339e-06, | |
| "loss": 1.228, | |
| "step": 1030 | |
| }, | |
| { | |
| "epoch": 0.5927614705044172, | |
| "grad_norm": 7.53125, | |
| "learning_rate": 8.025465602432537e-06, | |
| "loss": 1.2277, | |
| "step": 1040 | |
| }, | |
| { | |
| "epoch": 0.5984611000284982, | |
| "grad_norm": 6.8125, | |
| "learning_rate": 8.006461421512733e-06, | |
| "loss": 1.2442, | |
| "step": 1050 | |
| }, | |
| { | |
| "epoch": 0.604160729552579, | |
| "grad_norm": 6.875, | |
| "learning_rate": 7.98745724059293e-06, | |
| "loss": 1.177, | |
| "step": 1060 | |
| }, | |
| { | |
| "epoch": 0.60986035907666, | |
| "grad_norm": 5.96875, | |
| "learning_rate": 7.968453059673129e-06, | |
| "loss": 1.1878, | |
| "step": 1070 | |
| }, | |
| { | |
| "epoch": 0.6155599886007409, | |
| "grad_norm": 7.3125, | |
| "learning_rate": 7.949448878753327e-06, | |
| "loss": 1.285, | |
| "step": 1080 | |
| }, | |
| { | |
| "epoch": 0.6212596181248219, | |
| "grad_norm": 7.0625, | |
| "learning_rate": 7.930444697833523e-06, | |
| "loss": 1.2323, | |
| "step": 1090 | |
| }, | |
| { | |
| "epoch": 0.6269592476489029, | |
| "grad_norm": 6.625, | |
| "learning_rate": 7.911440516913723e-06, | |
| "loss": 1.1727, | |
| "step": 1100 | |
| }, | |
| { | |
| "epoch": 0.6326588771729837, | |
| "grad_norm": 6.8125, | |
| "learning_rate": 7.892436335993919e-06, | |
| "loss": 1.1775, | |
| "step": 1110 | |
| }, | |
| { | |
| "epoch": 0.6383585066970647, | |
| "grad_norm": 7.6875, | |
| "learning_rate": 7.873432155074117e-06, | |
| "loss": 1.3263, | |
| "step": 1120 | |
| }, | |
| { | |
| "epoch": 0.6440581362211456, | |
| "grad_norm": 6.9375, | |
| "learning_rate": 7.854427974154315e-06, | |
| "loss": 1.1943, | |
| "step": 1130 | |
| }, | |
| { | |
| "epoch": 0.6497577657452266, | |
| "grad_norm": 7.53125, | |
| "learning_rate": 7.835423793234513e-06, | |
| "loss": 1.2537, | |
| "step": 1140 | |
| }, | |
| { | |
| "epoch": 0.6554573952693075, | |
| "grad_norm": 6.6875, | |
| "learning_rate": 7.816419612314709e-06, | |
| "loss": 1.1514, | |
| "step": 1150 | |
| }, | |
| { | |
| "epoch": 0.6611570247933884, | |
| "grad_norm": 6.4375, | |
| "learning_rate": 7.797415431394909e-06, | |
| "loss": 1.1908, | |
| "step": 1160 | |
| }, | |
| { | |
| "epoch": 0.6668566543174693, | |
| "grad_norm": 8.0625, | |
| "learning_rate": 7.778411250475105e-06, | |
| "loss": 1.2328, | |
| "step": 1170 | |
| }, | |
| { | |
| "epoch": 0.6725562838415503, | |
| "grad_norm": 7.375, | |
| "learning_rate": 7.759407069555303e-06, | |
| "loss": 1.2281, | |
| "step": 1180 | |
| }, | |
| { | |
| "epoch": 0.6782559133656312, | |
| "grad_norm": 7.125, | |
| "learning_rate": 7.7404028886355e-06, | |
| "loss": 1.2469, | |
| "step": 1190 | |
| }, | |
| { | |
| "epoch": 0.6839555428897122, | |
| "grad_norm": 7.5625, | |
| "learning_rate": 7.721398707715699e-06, | |
| "loss": 1.2444, | |
| "step": 1200 | |
| }, | |
| { | |
| "epoch": 0.6896551724137931, | |
| "grad_norm": 8.0625, | |
| "learning_rate": 7.702394526795895e-06, | |
| "loss": 1.3066, | |
| "step": 1210 | |
| }, | |
| { | |
| "epoch": 0.695354801937874, | |
| "grad_norm": 7.6875, | |
| "learning_rate": 7.683390345876095e-06, | |
| "loss": 1.2125, | |
| "step": 1220 | |
| }, | |
| { | |
| "epoch": 0.701054431461955, | |
| "grad_norm": 7.78125, | |
| "learning_rate": 7.664386164956291e-06, | |
| "loss": 1.2275, | |
| "step": 1230 | |
| }, | |
| { | |
| "epoch": 0.7067540609860359, | |
| "grad_norm": 7.1875, | |
| "learning_rate": 7.645381984036489e-06, | |
| "loss": 1.2702, | |
| "step": 1240 | |
| }, | |
| { | |
| "epoch": 0.7124536905101169, | |
| "grad_norm": 7.375, | |
| "learning_rate": 7.626377803116686e-06, | |
| "loss": 1.2547, | |
| "step": 1250 | |
| }, | |
| { | |
| "epoch": 0.7181533200341977, | |
| "grad_norm": 7.3125, | |
| "learning_rate": 7.607373622196884e-06, | |
| "loss": 1.2192, | |
| "step": 1260 | |
| }, | |
| { | |
| "epoch": 0.7238529495582787, | |
| "grad_norm": 8.0625, | |
| "learning_rate": 7.588369441277081e-06, | |
| "loss": 1.1881, | |
| "step": 1270 | |
| }, | |
| { | |
| "epoch": 0.7295525790823596, | |
| "grad_norm": 7.09375, | |
| "learning_rate": 7.569365260357279e-06, | |
| "loss": 1.2096, | |
| "step": 1280 | |
| }, | |
| { | |
| "epoch": 0.7352522086064406, | |
| "grad_norm": 6.75, | |
| "learning_rate": 7.550361079437477e-06, | |
| "loss": 1.246, | |
| "step": 1290 | |
| }, | |
| { | |
| "epoch": 0.7409518381305216, | |
| "grad_norm": 7.34375, | |
| "learning_rate": 7.531356898517674e-06, | |
| "loss": 1.1759, | |
| "step": 1300 | |
| }, | |
| { | |
| "epoch": 0.7466514676546024, | |
| "grad_norm": 6.65625, | |
| "learning_rate": 7.512352717597872e-06, | |
| "loss": 1.1641, | |
| "step": 1310 | |
| }, | |
| { | |
| "epoch": 0.7523510971786834, | |
| "grad_norm": 7.5625, | |
| "learning_rate": 7.49334853667807e-06, | |
| "loss": 1.2776, | |
| "step": 1320 | |
| }, | |
| { | |
| "epoch": 0.7580507267027643, | |
| "grad_norm": 8.1875, | |
| "learning_rate": 7.474344355758267e-06, | |
| "loss": 1.2651, | |
| "step": 1330 | |
| }, | |
| { | |
| "epoch": 0.7637503562268453, | |
| "grad_norm": 6.65625, | |
| "learning_rate": 7.455340174838464e-06, | |
| "loss": 1.2367, | |
| "step": 1340 | |
| }, | |
| { | |
| "epoch": 0.7694499857509262, | |
| "grad_norm": 8.125, | |
| "learning_rate": 7.436335993918663e-06, | |
| "loss": 1.2126, | |
| "step": 1350 | |
| }, | |
| { | |
| "epoch": 0.7751496152750071, | |
| "grad_norm": 8.4375, | |
| "learning_rate": 7.41733181299886e-06, | |
| "loss": 1.1739, | |
| "step": 1360 | |
| }, | |
| { | |
| "epoch": 0.780849244799088, | |
| "grad_norm": 6.875, | |
| "learning_rate": 7.398327632079057e-06, | |
| "loss": 1.1959, | |
| "step": 1370 | |
| }, | |
| { | |
| "epoch": 0.786548874323169, | |
| "grad_norm": 7.03125, | |
| "learning_rate": 7.379323451159256e-06, | |
| "loss": 1.1734, | |
| "step": 1380 | |
| }, | |
| { | |
| "epoch": 0.7922485038472499, | |
| "grad_norm": 8.8125, | |
| "learning_rate": 7.360319270239453e-06, | |
| "loss": 1.2139, | |
| "step": 1390 | |
| }, | |
| { | |
| "epoch": 0.7979481333713309, | |
| "grad_norm": 6.6875, | |
| "learning_rate": 7.34131508931965e-06, | |
| "loss": 1.1497, | |
| "step": 1400 | |
| }, | |
| { | |
| "epoch": 0.8036477628954118, | |
| "grad_norm": 8.75, | |
| "learning_rate": 7.322310908399849e-06, | |
| "loss": 1.2907, | |
| "step": 1410 | |
| }, | |
| { | |
| "epoch": 0.8093473924194927, | |
| "grad_norm": 7.125, | |
| "learning_rate": 7.303306727480046e-06, | |
| "loss": 1.2595, | |
| "step": 1420 | |
| }, | |
| { | |
| "epoch": 0.8150470219435737, | |
| "grad_norm": 7.09375, | |
| "learning_rate": 7.284302546560243e-06, | |
| "loss": 1.2449, | |
| "step": 1430 | |
| }, | |
| { | |
| "epoch": 0.8207466514676546, | |
| "grad_norm": 6.75, | |
| "learning_rate": 7.265298365640442e-06, | |
| "loss": 1.2571, | |
| "step": 1440 | |
| }, | |
| { | |
| "epoch": 0.8264462809917356, | |
| "grad_norm": 6.8125, | |
| "learning_rate": 7.246294184720639e-06, | |
| "loss": 1.2183, | |
| "step": 1450 | |
| }, | |
| { | |
| "epoch": 0.8321459105158164, | |
| "grad_norm": 7.21875, | |
| "learning_rate": 7.227290003800836e-06, | |
| "loss": 1.2293, | |
| "step": 1460 | |
| }, | |
| { | |
| "epoch": 0.8378455400398974, | |
| "grad_norm": 7.40625, | |
| "learning_rate": 7.208285822881035e-06, | |
| "loss": 1.1618, | |
| "step": 1470 | |
| }, | |
| { | |
| "epoch": 0.8435451695639783, | |
| "grad_norm": 7.1875, | |
| "learning_rate": 7.189281641961232e-06, | |
| "loss": 1.1948, | |
| "step": 1480 | |
| }, | |
| { | |
| "epoch": 0.8492447990880593, | |
| "grad_norm": 7.3125, | |
| "learning_rate": 7.170277461041429e-06, | |
| "loss": 1.1967, | |
| "step": 1490 | |
| }, | |
| { | |
| "epoch": 0.8549444286121403, | |
| "grad_norm": 5.65625, | |
| "learning_rate": 7.151273280121628e-06, | |
| "loss": 1.2639, | |
| "step": 1500 | |
| }, | |
| { | |
| "epoch": 0.8606440581362211, | |
| "grad_norm": 7.53125, | |
| "learning_rate": 7.132269099201825e-06, | |
| "loss": 1.2751, | |
| "step": 1510 | |
| }, | |
| { | |
| "epoch": 0.8663436876603021, | |
| "grad_norm": 7.03125, | |
| "learning_rate": 7.113264918282022e-06, | |
| "loss": 1.2803, | |
| "step": 1520 | |
| }, | |
| { | |
| "epoch": 0.872043317184383, | |
| "grad_norm": 7.1875, | |
| "learning_rate": 7.094260737362221e-06, | |
| "loss": 1.1903, | |
| "step": 1530 | |
| }, | |
| { | |
| "epoch": 0.877742946708464, | |
| "grad_norm": 7.15625, | |
| "learning_rate": 7.075256556442418e-06, | |
| "loss": 1.1932, | |
| "step": 1540 | |
| }, | |
| { | |
| "epoch": 0.8834425762325449, | |
| "grad_norm": 7.03125, | |
| "learning_rate": 7.056252375522615e-06, | |
| "loss": 1.2517, | |
| "step": 1550 | |
| }, | |
| { | |
| "epoch": 0.8891422057566258, | |
| "grad_norm": 8.0625, | |
| "learning_rate": 7.037248194602814e-06, | |
| "loss": 1.2413, | |
| "step": 1560 | |
| }, | |
| { | |
| "epoch": 0.8948418352807067, | |
| "grad_norm": 8.3125, | |
| "learning_rate": 7.018244013683011e-06, | |
| "loss": 1.1405, | |
| "step": 1570 | |
| }, | |
| { | |
| "epoch": 0.9005414648047877, | |
| "grad_norm": 7.25, | |
| "learning_rate": 6.999239832763208e-06, | |
| "loss": 1.2416, | |
| "step": 1580 | |
| }, | |
| { | |
| "epoch": 0.9062410943288686, | |
| "grad_norm": 7.96875, | |
| "learning_rate": 6.9802356518434055e-06, | |
| "loss": 1.195, | |
| "step": 1590 | |
| }, | |
| { | |
| "epoch": 0.9119407238529496, | |
| "grad_norm": 6.84375, | |
| "learning_rate": 6.961231470923604e-06, | |
| "loss": 1.2723, | |
| "step": 1600 | |
| }, | |
| { | |
| "epoch": 0.9176403533770305, | |
| "grad_norm": 7.25, | |
| "learning_rate": 6.942227290003801e-06, | |
| "loss": 1.2642, | |
| "step": 1610 | |
| }, | |
| { | |
| "epoch": 0.9233399829011114, | |
| "grad_norm": 6.40625, | |
| "learning_rate": 6.9232231090839985e-06, | |
| "loss": 1.2173, | |
| "step": 1620 | |
| }, | |
| { | |
| "epoch": 0.9290396124251924, | |
| "grad_norm": 7.0625, | |
| "learning_rate": 6.904218928164197e-06, | |
| "loss": 1.2514, | |
| "step": 1630 | |
| }, | |
| { | |
| "epoch": 0.9347392419492733, | |
| "grad_norm": 6.90625, | |
| "learning_rate": 6.885214747244394e-06, | |
| "loss": 1.1989, | |
| "step": 1640 | |
| }, | |
| { | |
| "epoch": 0.9404388714733543, | |
| "grad_norm": 8.0625, | |
| "learning_rate": 6.8662105663245915e-06, | |
| "loss": 1.2291, | |
| "step": 1650 | |
| }, | |
| { | |
| "epoch": 0.9461385009974351, | |
| "grad_norm": 7.53125, | |
| "learning_rate": 6.8472063854047895e-06, | |
| "loss": 1.2637, | |
| "step": 1660 | |
| }, | |
| { | |
| "epoch": 0.9518381305215161, | |
| "grad_norm": 7.3125, | |
| "learning_rate": 6.828202204484987e-06, | |
| "loss": 1.1973, | |
| "step": 1670 | |
| }, | |
| { | |
| "epoch": 0.957537760045597, | |
| "grad_norm": 7.3125, | |
| "learning_rate": 6.8091980235651845e-06, | |
| "loss": 1.2915, | |
| "step": 1680 | |
| }, | |
| { | |
| "epoch": 0.963237389569678, | |
| "grad_norm": 7.875, | |
| "learning_rate": 6.7901938426453825e-06, | |
| "loss": 1.1392, | |
| "step": 1690 | |
| }, | |
| { | |
| "epoch": 0.968937019093759, | |
| "grad_norm": 7.46875, | |
| "learning_rate": 6.7711896617255804e-06, | |
| "loss": 1.1384, | |
| "step": 1700 | |
| }, | |
| { | |
| "epoch": 0.9746366486178398, | |
| "grad_norm": 6.84375, | |
| "learning_rate": 6.7521854808057775e-06, | |
| "loss": 1.1322, | |
| "step": 1710 | |
| }, | |
| { | |
| "epoch": 0.9803362781419208, | |
| "grad_norm": 7.40625, | |
| "learning_rate": 6.7331812998859755e-06, | |
| "loss": 1.2261, | |
| "step": 1720 | |
| }, | |
| { | |
| "epoch": 0.9860359076660017, | |
| "grad_norm": 6.40625, | |
| "learning_rate": 6.7141771189661735e-06, | |
| "loss": 1.1714, | |
| "step": 1730 | |
| }, | |
| { | |
| "epoch": 0.9917355371900827, | |
| "grad_norm": 7.28125, | |
| "learning_rate": 6.6951729380463706e-06, | |
| "loss": 1.1884, | |
| "step": 1740 | |
| }, | |
| { | |
| "epoch": 0.9974351667141635, | |
| "grad_norm": 7.46875, | |
| "learning_rate": 6.6761687571265685e-06, | |
| "loss": 1.28, | |
| "step": 1750 | |
| }, | |
| { | |
| "epoch": 1.0034197777144485, | |
| "grad_norm": 7.53125, | |
| "learning_rate": 6.657164576206766e-06, | |
| "loss": 1.3048, | |
| "step": 1760 | |
| }, | |
| { | |
| "epoch": 1.0091194072385294, | |
| "grad_norm": 7.90625, | |
| "learning_rate": 6.638160395286964e-06, | |
| "loss": 1.1814, | |
| "step": 1770 | |
| }, | |
| { | |
| "epoch": 1.0148190367626104, | |
| "grad_norm": 7.65625, | |
| "learning_rate": 6.6191562143671615e-06, | |
| "loss": 1.2091, | |
| "step": 1780 | |
| }, | |
| { | |
| "epoch": 1.0205186662866914, | |
| "grad_norm": 7.46875, | |
| "learning_rate": 6.600152033447359e-06, | |
| "loss": 1.2139, | |
| "step": 1790 | |
| }, | |
| { | |
| "epoch": 1.0262182958107724, | |
| "grad_norm": 8.5, | |
| "learning_rate": 6.581147852527557e-06, | |
| "loss": 1.1838, | |
| "step": 1800 | |
| }, | |
| { | |
| "epoch": 1.0319179253348532, | |
| "grad_norm": 7.4375, | |
| "learning_rate": 6.5621436716077546e-06, | |
| "loss": 1.2291, | |
| "step": 1810 | |
| }, | |
| { | |
| "epoch": 1.0376175548589341, | |
| "grad_norm": 7.46875, | |
| "learning_rate": 6.543139490687952e-06, | |
| "loss": 1.1906, | |
| "step": 1820 | |
| }, | |
| { | |
| "epoch": 1.0433171843830151, | |
| "grad_norm": 6.375, | |
| "learning_rate": 6.52413530976815e-06, | |
| "loss": 1.2225, | |
| "step": 1830 | |
| }, | |
| { | |
| "epoch": 1.0490168139070961, | |
| "grad_norm": 6.8125, | |
| "learning_rate": 6.5051311288483476e-06, | |
| "loss": 1.1612, | |
| "step": 1840 | |
| }, | |
| { | |
| "epoch": 1.0547164434311769, | |
| "grad_norm": 7.40625, | |
| "learning_rate": 6.486126947928545e-06, | |
| "loss": 1.1946, | |
| "step": 1850 | |
| }, | |
| { | |
| "epoch": 1.0604160729552579, | |
| "grad_norm": 7.375, | |
| "learning_rate": 6.467122767008742e-06, | |
| "loss": 1.2219, | |
| "step": 1860 | |
| }, | |
| { | |
| "epoch": 1.0661157024793388, | |
| "grad_norm": 8.0625, | |
| "learning_rate": 6.448118586088941e-06, | |
| "loss": 1.2461, | |
| "step": 1870 | |
| }, | |
| { | |
| "epoch": 1.0718153320034198, | |
| "grad_norm": 7.09375, | |
| "learning_rate": 6.429114405169138e-06, | |
| "loss": 1.1393, | |
| "step": 1880 | |
| }, | |
| { | |
| "epoch": 1.0775149615275008, | |
| "grad_norm": 7.6875, | |
| "learning_rate": 6.410110224249335e-06, | |
| "loss": 1.2083, | |
| "step": 1890 | |
| }, | |
| { | |
| "epoch": 1.0832145910515816, | |
| "grad_norm": 6.6875, | |
| "learning_rate": 6.391106043329534e-06, | |
| "loss": 1.243, | |
| "step": 1900 | |
| }, | |
| { | |
| "epoch": 1.0889142205756626, | |
| "grad_norm": 6.875, | |
| "learning_rate": 6.372101862409731e-06, | |
| "loss": 1.2199, | |
| "step": 1910 | |
| }, | |
| { | |
| "epoch": 1.0946138500997435, | |
| "grad_norm": 6.9375, | |
| "learning_rate": 6.353097681489928e-06, | |
| "loss": 1.202, | |
| "step": 1920 | |
| }, | |
| { | |
| "epoch": 1.1003134796238245, | |
| "grad_norm": 7.125, | |
| "learning_rate": 6.334093500570126e-06, | |
| "loss": 1.1115, | |
| "step": 1930 | |
| }, | |
| { | |
| "epoch": 1.1060131091479053, | |
| "grad_norm": 7.34375, | |
| "learning_rate": 6.315089319650324e-06, | |
| "loss": 1.1845, | |
| "step": 1940 | |
| }, | |
| { | |
| "epoch": 1.1117127386719863, | |
| "grad_norm": 6.40625, | |
| "learning_rate": 6.296085138730521e-06, | |
| "loss": 1.1988, | |
| "step": 1950 | |
| }, | |
| { | |
| "epoch": 1.1174123681960673, | |
| "grad_norm": 6.09375, | |
| "learning_rate": 6.277080957810718e-06, | |
| "loss": 1.1937, | |
| "step": 1960 | |
| }, | |
| { | |
| "epoch": 1.1231119977201482, | |
| "grad_norm": 7.75, | |
| "learning_rate": 6.258076776890917e-06, | |
| "loss": 1.2473, | |
| "step": 1970 | |
| }, | |
| { | |
| "epoch": 1.1288116272442292, | |
| "grad_norm": 6.8125, | |
| "learning_rate": 6.239072595971114e-06, | |
| "loss": 1.2198, | |
| "step": 1980 | |
| }, | |
| { | |
| "epoch": 1.13451125676831, | |
| "grad_norm": 9.625, | |
| "learning_rate": 6.220068415051311e-06, | |
| "loss": 1.1505, | |
| "step": 1990 | |
| }, | |
| { | |
| "epoch": 1.140210886292391, | |
| "grad_norm": 7.03125, | |
| "learning_rate": 6.20106423413151e-06, | |
| "loss": 1.1457, | |
| "step": 2000 | |
| }, | |
| { | |
| "epoch": 1.145910515816472, | |
| "grad_norm": 6.375, | |
| "learning_rate": 6.182060053211707e-06, | |
| "loss": 1.1825, | |
| "step": 2010 | |
| }, | |
| { | |
| "epoch": 1.151610145340553, | |
| "grad_norm": 7.65625, | |
| "learning_rate": 6.163055872291904e-06, | |
| "loss": 1.1962, | |
| "step": 2020 | |
| }, | |
| { | |
| "epoch": 1.1573097748646337, | |
| "grad_norm": 7.21875, | |
| "learning_rate": 6.144051691372103e-06, | |
| "loss": 1.1608, | |
| "step": 2030 | |
| }, | |
| { | |
| "epoch": 1.1630094043887147, | |
| "grad_norm": 7.1875, | |
| "learning_rate": 6.1250475104523e-06, | |
| "loss": 1.2188, | |
| "step": 2040 | |
| }, | |
| { | |
| "epoch": 1.1687090339127957, | |
| "grad_norm": 7.46875, | |
| "learning_rate": 6.106043329532497e-06, | |
| "loss": 1.177, | |
| "step": 2050 | |
| }, | |
| { | |
| "epoch": 1.1744086634368767, | |
| "grad_norm": 7.125, | |
| "learning_rate": 6.087039148612696e-06, | |
| "loss": 1.1917, | |
| "step": 2060 | |
| }, | |
| { | |
| "epoch": 1.1801082929609574, | |
| "grad_norm": 9.75, | |
| "learning_rate": 6.068034967692893e-06, | |
| "loss": 1.2057, | |
| "step": 2070 | |
| }, | |
| { | |
| "epoch": 1.1858079224850384, | |
| "grad_norm": 8.1875, | |
| "learning_rate": 6.04903078677309e-06, | |
| "loss": 1.1498, | |
| "step": 2080 | |
| }, | |
| { | |
| "epoch": 1.1915075520091194, | |
| "grad_norm": 7.21875, | |
| "learning_rate": 6.030026605853289e-06, | |
| "loss": 1.1958, | |
| "step": 2090 | |
| }, | |
| { | |
| "epoch": 1.1972071815332004, | |
| "grad_norm": 9.0625, | |
| "learning_rate": 6.011022424933486e-06, | |
| "loss": 1.1512, | |
| "step": 2100 | |
| }, | |
| { | |
| "epoch": 1.2029068110572814, | |
| "grad_norm": 7.34375, | |
| "learning_rate": 5.992018244013683e-06, | |
| "loss": 1.144, | |
| "step": 2110 | |
| }, | |
| { | |
| "epoch": 1.2086064405813621, | |
| "grad_norm": 7.1875, | |
| "learning_rate": 5.973014063093882e-06, | |
| "loss": 1.0991, | |
| "step": 2120 | |
| }, | |
| { | |
| "epoch": 1.2143060701054431, | |
| "grad_norm": 7.625, | |
| "learning_rate": 5.954009882174079e-06, | |
| "loss": 1.2007, | |
| "step": 2130 | |
| }, | |
| { | |
| "epoch": 1.220005699629524, | |
| "grad_norm": 7.3125, | |
| "learning_rate": 5.935005701254276e-06, | |
| "loss": 1.183, | |
| "step": 2140 | |
| }, | |
| { | |
| "epoch": 1.225705329153605, | |
| "grad_norm": 7.59375, | |
| "learning_rate": 5.916001520334475e-06, | |
| "loss": 1.1851, | |
| "step": 2150 | |
| }, | |
| { | |
| "epoch": 1.231404958677686, | |
| "grad_norm": 7.09375, | |
| "learning_rate": 5.896997339414672e-06, | |
| "loss": 1.1716, | |
| "step": 2160 | |
| }, | |
| { | |
| "epoch": 1.2371045882017668, | |
| "grad_norm": 7.59375, | |
| "learning_rate": 5.877993158494869e-06, | |
| "loss": 1.1023, | |
| "step": 2170 | |
| }, | |
| { | |
| "epoch": 1.2428042177258478, | |
| "grad_norm": 8.6875, | |
| "learning_rate": 5.858988977575067e-06, | |
| "loss": 1.1477, | |
| "step": 2180 | |
| }, | |
| { | |
| "epoch": 1.2485038472499288, | |
| "grad_norm": 7.28125, | |
| "learning_rate": 5.839984796655265e-06, | |
| "loss": 1.1417, | |
| "step": 2190 | |
| }, | |
| { | |
| "epoch": 1.2542034767740096, | |
| "grad_norm": 7.59375, | |
| "learning_rate": 5.820980615735462e-06, | |
| "loss": 1.2224, | |
| "step": 2200 | |
| }, | |
| { | |
| "epoch": 1.2599031062980905, | |
| "grad_norm": 8.0625, | |
| "learning_rate": 5.80197643481566e-06, | |
| "loss": 1.1976, | |
| "step": 2210 | |
| }, | |
| { | |
| "epoch": 1.2656027358221715, | |
| "grad_norm": 8.5, | |
| "learning_rate": 5.782972253895858e-06, | |
| "loss": 1.1783, | |
| "step": 2220 | |
| }, | |
| { | |
| "epoch": 1.2713023653462525, | |
| "grad_norm": 6.9375, | |
| "learning_rate": 5.763968072976055e-06, | |
| "loss": 1.1445, | |
| "step": 2230 | |
| }, | |
| { | |
| "epoch": 1.2770019948703335, | |
| "grad_norm": 6.53125, | |
| "learning_rate": 5.744963892056253e-06, | |
| "loss": 1.102, | |
| "step": 2240 | |
| }, | |
| { | |
| "epoch": 1.2827016243944143, | |
| "grad_norm": 6.5625, | |
| "learning_rate": 5.725959711136451e-06, | |
| "loss": 1.1797, | |
| "step": 2250 | |
| }, | |
| { | |
| "epoch": 1.2884012539184952, | |
| "grad_norm": 7.8125, | |
| "learning_rate": 5.706955530216648e-06, | |
| "loss": 1.2569, | |
| "step": 2260 | |
| }, | |
| { | |
| "epoch": 1.2941008834425762, | |
| "grad_norm": 8.0625, | |
| "learning_rate": 5.687951349296845e-06, | |
| "loss": 1.2034, | |
| "step": 2270 | |
| }, | |
| { | |
| "epoch": 1.2998005129666572, | |
| "grad_norm": 8.9375, | |
| "learning_rate": 5.668947168377043e-06, | |
| "loss": 1.1407, | |
| "step": 2280 | |
| }, | |
| { | |
| "epoch": 1.3055001424907382, | |
| "grad_norm": 7.09375, | |
| "learning_rate": 5.649942987457241e-06, | |
| "loss": 1.1672, | |
| "step": 2290 | |
| }, | |
| { | |
| "epoch": 1.311199772014819, | |
| "grad_norm": 7.75, | |
| "learning_rate": 5.630938806537438e-06, | |
| "loss": 1.2221, | |
| "step": 2300 | |
| }, | |
| { | |
| "epoch": 1.3168994015389, | |
| "grad_norm": 7.25, | |
| "learning_rate": 5.611934625617636e-06, | |
| "loss": 1.243, | |
| "step": 2310 | |
| }, | |
| { | |
| "epoch": 1.322599031062981, | |
| "grad_norm": 7.4375, | |
| "learning_rate": 5.592930444697834e-06, | |
| "loss": 1.1507, | |
| "step": 2320 | |
| }, | |
| { | |
| "epoch": 1.328298660587062, | |
| "grad_norm": 8.125, | |
| "learning_rate": 5.573926263778031e-06, | |
| "loss": 1.1718, | |
| "step": 2330 | |
| }, | |
| { | |
| "epoch": 1.333998290111143, | |
| "grad_norm": 8.3125, | |
| "learning_rate": 5.554922082858229e-06, | |
| "loss": 1.1907, | |
| "step": 2340 | |
| }, | |
| { | |
| "epoch": 1.3396979196352237, | |
| "grad_norm": 7.8125, | |
| "learning_rate": 5.535917901938427e-06, | |
| "loss": 1.202, | |
| "step": 2350 | |
| }, | |
| { | |
| "epoch": 1.3453975491593047, | |
| "grad_norm": 7.75, | |
| "learning_rate": 5.516913721018624e-06, | |
| "loss": 1.1824, | |
| "step": 2360 | |
| }, | |
| { | |
| "epoch": 1.3510971786833856, | |
| "grad_norm": 7.0, | |
| "learning_rate": 5.497909540098822e-06, | |
| "loss": 1.1747, | |
| "step": 2370 | |
| }, | |
| { | |
| "epoch": 1.3567968082074664, | |
| "grad_norm": 7.34375, | |
| "learning_rate": 5.478905359179019e-06, | |
| "loss": 1.0821, | |
| "step": 2380 | |
| }, | |
| { | |
| "epoch": 1.3624964377315474, | |
| "grad_norm": 6.65625, | |
| "learning_rate": 5.459901178259217e-06, | |
| "loss": 1.1109, | |
| "step": 2390 | |
| }, | |
| { | |
| "epoch": 1.3681960672556284, | |
| "grad_norm": 6.75, | |
| "learning_rate": 5.440896997339415e-06, | |
| "loss": 1.1939, | |
| "step": 2400 | |
| }, | |
| { | |
| "epoch": 1.3738956967797094, | |
| "grad_norm": 6.59375, | |
| "learning_rate": 5.421892816419612e-06, | |
| "loss": 1.1034, | |
| "step": 2410 | |
| }, | |
| { | |
| "epoch": 1.3795953263037903, | |
| "grad_norm": 7.15625, | |
| "learning_rate": 5.40288863549981e-06, | |
| "loss": 1.1631, | |
| "step": 2420 | |
| }, | |
| { | |
| "epoch": 1.385294955827871, | |
| "grad_norm": 7.3125, | |
| "learning_rate": 5.383884454580008e-06, | |
| "loss": 1.2072, | |
| "step": 2430 | |
| }, | |
| { | |
| "epoch": 1.390994585351952, | |
| "grad_norm": 6.59375, | |
| "learning_rate": 5.364880273660205e-06, | |
| "loss": 1.1705, | |
| "step": 2440 | |
| }, | |
| { | |
| "epoch": 1.396694214876033, | |
| "grad_norm": 7.65625, | |
| "learning_rate": 5.345876092740403e-06, | |
| "loss": 1.1315, | |
| "step": 2450 | |
| }, | |
| { | |
| "epoch": 1.402393844400114, | |
| "grad_norm": 7.125, | |
| "learning_rate": 5.326871911820601e-06, | |
| "loss": 1.1392, | |
| "step": 2460 | |
| }, | |
| { | |
| "epoch": 1.408093473924195, | |
| "grad_norm": 7.90625, | |
| "learning_rate": 5.3078677309007984e-06, | |
| "loss": 1.2394, | |
| "step": 2470 | |
| }, | |
| { | |
| "epoch": 1.4137931034482758, | |
| "grad_norm": 7.46875, | |
| "learning_rate": 5.2888635499809956e-06, | |
| "loss": 1.1891, | |
| "step": 2480 | |
| }, | |
| { | |
| "epoch": 1.4194927329723568, | |
| "grad_norm": 7.5625, | |
| "learning_rate": 5.269859369061194e-06, | |
| "loss": 1.1707, | |
| "step": 2490 | |
| }, | |
| { | |
| "epoch": 1.4251923624964378, | |
| "grad_norm": 7.65625, | |
| "learning_rate": 5.2508551881413915e-06, | |
| "loss": 1.1489, | |
| "step": 2500 | |
| }, | |
| { | |
| "epoch": 1.4308919920205185, | |
| "grad_norm": 8.1875, | |
| "learning_rate": 5.2318510072215886e-06, | |
| "loss": 1.144, | |
| "step": 2510 | |
| }, | |
| { | |
| "epoch": 1.4365916215445997, | |
| "grad_norm": 7.78125, | |
| "learning_rate": 5.212846826301787e-06, | |
| "loss": 1.0845, | |
| "step": 2520 | |
| }, | |
| { | |
| "epoch": 1.4422912510686805, | |
| "grad_norm": 7.65625, | |
| "learning_rate": 5.1938426453819845e-06, | |
| "loss": 1.1684, | |
| "step": 2530 | |
| }, | |
| { | |
| "epoch": 1.4479908805927615, | |
| "grad_norm": 8.0, | |
| "learning_rate": 5.174838464462182e-06, | |
| "loss": 1.1816, | |
| "step": 2540 | |
| }, | |
| { | |
| "epoch": 1.4536905101168425, | |
| "grad_norm": 7.65625, | |
| "learning_rate": 5.15583428354238e-06, | |
| "loss": 1.122, | |
| "step": 2550 | |
| }, | |
| { | |
| "epoch": 1.4593901396409232, | |
| "grad_norm": 6.25, | |
| "learning_rate": 5.1368301026225775e-06, | |
| "loss": 1.1084, | |
| "step": 2560 | |
| }, | |
| { | |
| "epoch": 1.4650897691650042, | |
| "grad_norm": 6.71875, | |
| "learning_rate": 5.117825921702775e-06, | |
| "loss": 1.162, | |
| "step": 2570 | |
| }, | |
| { | |
| "epoch": 1.4707893986890852, | |
| "grad_norm": 8.0625, | |
| "learning_rate": 5.098821740782973e-06, | |
| "loss": 1.1224, | |
| "step": 2580 | |
| }, | |
| { | |
| "epoch": 1.4764890282131662, | |
| "grad_norm": 8.8125, | |
| "learning_rate": 5.0798175598631705e-06, | |
| "loss": 1.2061, | |
| "step": 2590 | |
| }, | |
| { | |
| "epoch": 1.4821886577372472, | |
| "grad_norm": 7.78125, | |
| "learning_rate": 5.060813378943368e-06, | |
| "loss": 1.1454, | |
| "step": 2600 | |
| }, | |
| { | |
| "epoch": 1.487888287261328, | |
| "grad_norm": 7.625, | |
| "learning_rate": 5.041809198023565e-06, | |
| "loss": 1.1524, | |
| "step": 2610 | |
| }, | |
| { | |
| "epoch": 1.493587916785409, | |
| "grad_norm": 8.4375, | |
| "learning_rate": 5.0228050171037635e-06, | |
| "loss": 1.1719, | |
| "step": 2620 | |
| }, | |
| { | |
| "epoch": 1.49928754630949, | |
| "grad_norm": 7.28125, | |
| "learning_rate": 5.003800836183961e-06, | |
| "loss": 1.2035, | |
| "step": 2630 | |
| }, | |
| { | |
| "epoch": 1.5049871758335707, | |
| "grad_norm": 8.6875, | |
| "learning_rate": 4.984796655264159e-06, | |
| "loss": 1.1639, | |
| "step": 2640 | |
| }, | |
| { | |
| "epoch": 1.5106868053576519, | |
| "grad_norm": 7.53125, | |
| "learning_rate": 4.9657924743443566e-06, | |
| "loss": 1.2598, | |
| "step": 2650 | |
| }, | |
| { | |
| "epoch": 1.5163864348817326, | |
| "grad_norm": 6.9375, | |
| "learning_rate": 4.946788293424554e-06, | |
| "loss": 1.2206, | |
| "step": 2660 | |
| }, | |
| { | |
| "epoch": 1.5220860644058136, | |
| "grad_norm": 8.4375, | |
| "learning_rate": 4.927784112504752e-06, | |
| "loss": 1.1556, | |
| "step": 2670 | |
| }, | |
| { | |
| "epoch": 1.5277856939298946, | |
| "grad_norm": 6.65625, | |
| "learning_rate": 4.90877993158495e-06, | |
| "loss": 1.1909, | |
| "step": 2680 | |
| }, | |
| { | |
| "epoch": 1.5334853234539754, | |
| "grad_norm": 7.53125, | |
| "learning_rate": 4.889775750665147e-06, | |
| "loss": 1.1516, | |
| "step": 2690 | |
| }, | |
| { | |
| "epoch": 1.5391849529780566, | |
| "grad_norm": 8.25, | |
| "learning_rate": 4.870771569745345e-06, | |
| "loss": 1.2119, | |
| "step": 2700 | |
| }, | |
| { | |
| "epoch": 1.5448845825021373, | |
| "grad_norm": 6.75, | |
| "learning_rate": 4.851767388825542e-06, | |
| "loss": 1.1116, | |
| "step": 2710 | |
| }, | |
| { | |
| "epoch": 1.5505842120262183, | |
| "grad_norm": 7.5625, | |
| "learning_rate": 4.83276320790574e-06, | |
| "loss": 1.1612, | |
| "step": 2720 | |
| }, | |
| { | |
| "epoch": 1.5562838415502993, | |
| "grad_norm": 7.6875, | |
| "learning_rate": 4.813759026985938e-06, | |
| "loss": 1.1181, | |
| "step": 2730 | |
| }, | |
| { | |
| "epoch": 1.56198347107438, | |
| "grad_norm": 7.8125, | |
| "learning_rate": 4.794754846066135e-06, | |
| "loss": 1.1038, | |
| "step": 2740 | |
| }, | |
| { | |
| "epoch": 1.567683100598461, | |
| "grad_norm": 7.78125, | |
| "learning_rate": 4.775750665146333e-06, | |
| "loss": 1.1909, | |
| "step": 2750 | |
| }, | |
| { | |
| "epoch": 1.573382730122542, | |
| "grad_norm": 7.75, | |
| "learning_rate": 4.756746484226531e-06, | |
| "loss": 1.1964, | |
| "step": 2760 | |
| }, | |
| { | |
| "epoch": 1.579082359646623, | |
| "grad_norm": 6.96875, | |
| "learning_rate": 4.737742303306728e-06, | |
| "loss": 1.1298, | |
| "step": 2770 | |
| }, | |
| { | |
| "epoch": 1.584781989170704, | |
| "grad_norm": 6.03125, | |
| "learning_rate": 4.718738122386926e-06, | |
| "loss": 1.1795, | |
| "step": 2780 | |
| }, | |
| { | |
| "epoch": 1.5904816186947848, | |
| "grad_norm": 7.15625, | |
| "learning_rate": 4.699733941467124e-06, | |
| "loss": 1.1552, | |
| "step": 2790 | |
| }, | |
| { | |
| "epoch": 1.5961812482188658, | |
| "grad_norm": 7.625, | |
| "learning_rate": 4.680729760547321e-06, | |
| "loss": 1.1374, | |
| "step": 2800 | |
| }, | |
| { | |
| "epoch": 1.6018808777429467, | |
| "grad_norm": 6.25, | |
| "learning_rate": 4.661725579627519e-06, | |
| "loss": 1.1977, | |
| "step": 2810 | |
| }, | |
| { | |
| "epoch": 1.6075805072670275, | |
| "grad_norm": 6.0625, | |
| "learning_rate": 4.642721398707717e-06, | |
| "loss": 1.1775, | |
| "step": 2820 | |
| }, | |
| { | |
| "epoch": 1.6132801367911087, | |
| "grad_norm": 7.78125, | |
| "learning_rate": 4.623717217787914e-06, | |
| "loss": 1.1719, | |
| "step": 2830 | |
| }, | |
| { | |
| "epoch": 1.6189797663151895, | |
| "grad_norm": 8.0, | |
| "learning_rate": 4.604713036868112e-06, | |
| "loss": 1.1505, | |
| "step": 2840 | |
| }, | |
| { | |
| "epoch": 1.6246793958392705, | |
| "grad_norm": 7.0625, | |
| "learning_rate": 4.585708855948309e-06, | |
| "loss": 1.1344, | |
| "step": 2850 | |
| }, | |
| { | |
| "epoch": 1.6303790253633514, | |
| "grad_norm": 8.875, | |
| "learning_rate": 4.566704675028507e-06, | |
| "loss": 1.1618, | |
| "step": 2860 | |
| }, | |
| { | |
| "epoch": 1.6360786548874322, | |
| "grad_norm": 6.875, | |
| "learning_rate": 4.547700494108705e-06, | |
| "loss": 1.1406, | |
| "step": 2870 | |
| }, | |
| { | |
| "epoch": 1.6417782844115134, | |
| "grad_norm": 8.5625, | |
| "learning_rate": 4.528696313188902e-06, | |
| "loss": 1.142, | |
| "step": 2880 | |
| }, | |
| { | |
| "epoch": 1.6474779139355942, | |
| "grad_norm": 7.65625, | |
| "learning_rate": 4.5096921322691e-06, | |
| "loss": 1.2851, | |
| "step": 2890 | |
| }, | |
| { | |
| "epoch": 1.6531775434596752, | |
| "grad_norm": 7.96875, | |
| "learning_rate": 4.490687951349297e-06, | |
| "loss": 1.1699, | |
| "step": 2900 | |
| }, | |
| { | |
| "epoch": 1.6588771729837561, | |
| "grad_norm": 8.0625, | |
| "learning_rate": 4.471683770429495e-06, | |
| "loss": 1.2166, | |
| "step": 2910 | |
| }, | |
| { | |
| "epoch": 1.664576802507837, | |
| "grad_norm": 7.125, | |
| "learning_rate": 4.452679589509693e-06, | |
| "loss": 1.1723, | |
| "step": 2920 | |
| }, | |
| { | |
| "epoch": 1.670276432031918, | |
| "grad_norm": 8.125, | |
| "learning_rate": 4.43367540858989e-06, | |
| "loss": 1.1753, | |
| "step": 2930 | |
| }, | |
| { | |
| "epoch": 1.6759760615559989, | |
| "grad_norm": 8.6875, | |
| "learning_rate": 4.414671227670088e-06, | |
| "loss": 1.1986, | |
| "step": 2940 | |
| }, | |
| { | |
| "epoch": 1.6816756910800796, | |
| "grad_norm": 8.25, | |
| "learning_rate": 4.395667046750285e-06, | |
| "loss": 1.1255, | |
| "step": 2950 | |
| }, | |
| { | |
| "epoch": 1.6873753206041608, | |
| "grad_norm": 6.6875, | |
| "learning_rate": 4.376662865830483e-06, | |
| "loss": 1.0918, | |
| "step": 2960 | |
| }, | |
| { | |
| "epoch": 1.6930749501282416, | |
| "grad_norm": 8.1875, | |
| "learning_rate": 4.357658684910681e-06, | |
| "loss": 1.1376, | |
| "step": 2970 | |
| }, | |
| { | |
| "epoch": 1.6987745796523226, | |
| "grad_norm": 7.15625, | |
| "learning_rate": 4.338654503990878e-06, | |
| "loss": 1.1703, | |
| "step": 2980 | |
| }, | |
| { | |
| "epoch": 1.7044742091764036, | |
| "grad_norm": 7.6875, | |
| "learning_rate": 4.319650323071076e-06, | |
| "loss": 1.2248, | |
| "step": 2990 | |
| }, | |
| { | |
| "epoch": 1.7101738387004843, | |
| "grad_norm": 8.25, | |
| "learning_rate": 4.300646142151273e-06, | |
| "loss": 1.1209, | |
| "step": 3000 | |
| }, | |
| { | |
| "epoch": 1.7158734682245655, | |
| "grad_norm": 7.40625, | |
| "learning_rate": 4.281641961231471e-06, | |
| "loss": 1.1239, | |
| "step": 3010 | |
| }, | |
| { | |
| "epoch": 1.7215730977486463, | |
| "grad_norm": 7.09375, | |
| "learning_rate": 4.262637780311669e-06, | |
| "loss": 1.1752, | |
| "step": 3020 | |
| }, | |
| { | |
| "epoch": 1.7272727272727273, | |
| "grad_norm": 7.625, | |
| "learning_rate": 4.243633599391866e-06, | |
| "loss": 1.1553, | |
| "step": 3030 | |
| }, | |
| { | |
| "epoch": 1.7329723567968083, | |
| "grad_norm": 7.3125, | |
| "learning_rate": 4.224629418472064e-06, | |
| "loss": 1.1895, | |
| "step": 3040 | |
| }, | |
| { | |
| "epoch": 1.738671986320889, | |
| "grad_norm": 7.96875, | |
| "learning_rate": 4.205625237552261e-06, | |
| "loss": 1.2147, | |
| "step": 3050 | |
| }, | |
| { | |
| "epoch": 1.74437161584497, | |
| "grad_norm": 7.6875, | |
| "learning_rate": 4.186621056632459e-06, | |
| "loss": 1.1616, | |
| "step": 3060 | |
| }, | |
| { | |
| "epoch": 1.750071245369051, | |
| "grad_norm": 9.0625, | |
| "learning_rate": 4.167616875712657e-06, | |
| "loss": 1.2389, | |
| "step": 3070 | |
| }, | |
| { | |
| "epoch": 1.755770874893132, | |
| "grad_norm": 9.3125, | |
| "learning_rate": 4.148612694792854e-06, | |
| "loss": 1.2059, | |
| "step": 3080 | |
| }, | |
| { | |
| "epoch": 1.761470504417213, | |
| "grad_norm": 8.125, | |
| "learning_rate": 4.129608513873052e-06, | |
| "loss": 1.1757, | |
| "step": 3090 | |
| }, | |
| { | |
| "epoch": 1.7671701339412937, | |
| "grad_norm": 7.34375, | |
| "learning_rate": 4.11060433295325e-06, | |
| "loss": 1.1371, | |
| "step": 3100 | |
| }, | |
| { | |
| "epoch": 1.7728697634653747, | |
| "grad_norm": 7.09375, | |
| "learning_rate": 4.091600152033447e-06, | |
| "loss": 1.0955, | |
| "step": 3110 | |
| }, | |
| { | |
| "epoch": 1.7785693929894557, | |
| "grad_norm": 6.46875, | |
| "learning_rate": 4.072595971113645e-06, | |
| "loss": 1.1645, | |
| "step": 3120 | |
| }, | |
| { | |
| "epoch": 1.7842690225135365, | |
| "grad_norm": 6.28125, | |
| "learning_rate": 4.053591790193843e-06, | |
| "loss": 1.1949, | |
| "step": 3130 | |
| }, | |
| { | |
| "epoch": 1.7899686520376177, | |
| "grad_norm": 7.8125, | |
| "learning_rate": 4.03458760927404e-06, | |
| "loss": 1.1085, | |
| "step": 3140 | |
| }, | |
| { | |
| "epoch": 1.7956682815616984, | |
| "grad_norm": 8.9375, | |
| "learning_rate": 4.015583428354238e-06, | |
| "loss": 1.1411, | |
| "step": 3150 | |
| }, | |
| { | |
| "epoch": 1.8013679110857794, | |
| "grad_norm": 7.8125, | |
| "learning_rate": 3.996579247434436e-06, | |
| "loss": 1.2513, | |
| "step": 3160 | |
| }, | |
| { | |
| "epoch": 1.8070675406098604, | |
| "grad_norm": 8.0625, | |
| "learning_rate": 3.977575066514633e-06, | |
| "loss": 1.2125, | |
| "step": 3170 | |
| }, | |
| { | |
| "epoch": 1.8127671701339412, | |
| "grad_norm": 7.0, | |
| "learning_rate": 3.958570885594831e-06, | |
| "loss": 1.1309, | |
| "step": 3180 | |
| }, | |
| { | |
| "epoch": 1.8184667996580224, | |
| "grad_norm": 7.15625, | |
| "learning_rate": 3.939566704675028e-06, | |
| "loss": 1.0975, | |
| "step": 3190 | |
| }, | |
| { | |
| "epoch": 1.8241664291821031, | |
| "grad_norm": 7.0625, | |
| "learning_rate": 3.920562523755226e-06, | |
| "loss": 1.1049, | |
| "step": 3200 | |
| }, | |
| { | |
| "epoch": 1.8298660587061841, | |
| "grad_norm": 8.4375, | |
| "learning_rate": 3.901558342835424e-06, | |
| "loss": 1.2252, | |
| "step": 3210 | |
| }, | |
| { | |
| "epoch": 1.8355656882302651, | |
| "grad_norm": 8.0, | |
| "learning_rate": 3.882554161915621e-06, | |
| "loss": 1.1366, | |
| "step": 3220 | |
| }, | |
| { | |
| "epoch": 1.8412653177543459, | |
| "grad_norm": 6.875, | |
| "learning_rate": 3.863549980995819e-06, | |
| "loss": 1.1729, | |
| "step": 3230 | |
| }, | |
| { | |
| "epoch": 1.8469649472784269, | |
| "grad_norm": 6.25, | |
| "learning_rate": 3.844545800076017e-06, | |
| "loss": 1.1101, | |
| "step": 3240 | |
| }, | |
| { | |
| "epoch": 1.8526645768025078, | |
| "grad_norm": 8.0625, | |
| "learning_rate": 3.825541619156214e-06, | |
| "loss": 1.12, | |
| "step": 3250 | |
| }, | |
| { | |
| "epoch": 1.8583642063265886, | |
| "grad_norm": 7.03125, | |
| "learning_rate": 3.8065374382364124e-06, | |
| "loss": 1.1498, | |
| "step": 3260 | |
| }, | |
| { | |
| "epoch": 1.8640638358506698, | |
| "grad_norm": 9.25, | |
| "learning_rate": 3.7875332573166103e-06, | |
| "loss": 1.1916, | |
| "step": 3270 | |
| }, | |
| { | |
| "epoch": 1.8697634653747506, | |
| "grad_norm": 8.8125, | |
| "learning_rate": 3.7685290763968074e-06, | |
| "loss": 1.1687, | |
| "step": 3280 | |
| }, | |
| { | |
| "epoch": 1.8754630948988316, | |
| "grad_norm": 7.34375, | |
| "learning_rate": 3.7495248954770054e-06, | |
| "loss": 1.154, | |
| "step": 3290 | |
| }, | |
| { | |
| "epoch": 1.8811627244229125, | |
| "grad_norm": 7.90625, | |
| "learning_rate": 3.730520714557203e-06, | |
| "loss": 1.1521, | |
| "step": 3300 | |
| }, | |
| { | |
| "epoch": 1.8868623539469933, | |
| "grad_norm": 6.0625, | |
| "learning_rate": 3.7115165336374004e-06, | |
| "loss": 1.1434, | |
| "step": 3310 | |
| }, | |
| { | |
| "epoch": 1.8925619834710745, | |
| "grad_norm": 7.40625, | |
| "learning_rate": 3.6925123527175984e-06, | |
| "loss": 1.136, | |
| "step": 3320 | |
| }, | |
| { | |
| "epoch": 1.8982616129951553, | |
| "grad_norm": 8.1875, | |
| "learning_rate": 3.673508171797796e-06, | |
| "loss": 1.2108, | |
| "step": 3330 | |
| }, | |
| { | |
| "epoch": 1.9039612425192363, | |
| "grad_norm": 7.21875, | |
| "learning_rate": 3.6545039908779935e-06, | |
| "loss": 1.1596, | |
| "step": 3340 | |
| }, | |
| { | |
| "epoch": 1.9096608720433172, | |
| "grad_norm": 8.375, | |
| "learning_rate": 3.635499809958191e-06, | |
| "loss": 1.26, | |
| "step": 3350 | |
| }, | |
| { | |
| "epoch": 1.915360501567398, | |
| "grad_norm": 6.84375, | |
| "learning_rate": 3.6164956290383885e-06, | |
| "loss": 1.1222, | |
| "step": 3360 | |
| }, | |
| { | |
| "epoch": 1.9210601310914792, | |
| "grad_norm": 8.6875, | |
| "learning_rate": 3.5974914481185865e-06, | |
| "loss": 1.2087, | |
| "step": 3370 | |
| }, | |
| { | |
| "epoch": 1.92675976061556, | |
| "grad_norm": 7.875, | |
| "learning_rate": 3.578487267198784e-06, | |
| "loss": 1.2603, | |
| "step": 3380 | |
| }, | |
| { | |
| "epoch": 1.932459390139641, | |
| "grad_norm": 7.0625, | |
| "learning_rate": 3.5594830862789815e-06, | |
| "loss": 1.2147, | |
| "step": 3390 | |
| }, | |
| { | |
| "epoch": 1.938159019663722, | |
| "grad_norm": 8.8125, | |
| "learning_rate": 3.540478905359179e-06, | |
| "loss": 1.1687, | |
| "step": 3400 | |
| }, | |
| { | |
| "epoch": 1.9438586491878027, | |
| "grad_norm": 7.625, | |
| "learning_rate": 3.521474724439377e-06, | |
| "loss": 1.1577, | |
| "step": 3410 | |
| }, | |
| { | |
| "epoch": 1.9495582787118837, | |
| "grad_norm": 7.8125, | |
| "learning_rate": 3.5024705435195746e-06, | |
| "loss": 1.1114, | |
| "step": 3420 | |
| }, | |
| { | |
| "epoch": 1.9552579082359647, | |
| "grad_norm": 7.5625, | |
| "learning_rate": 3.483466362599772e-06, | |
| "loss": 1.2162, | |
| "step": 3430 | |
| }, | |
| { | |
| "epoch": 1.9609575377600454, | |
| "grad_norm": 6.84375, | |
| "learning_rate": 3.46446218167997e-06, | |
| "loss": 1.1566, | |
| "step": 3440 | |
| }, | |
| { | |
| "epoch": 1.9666571672841267, | |
| "grad_norm": 7.21875, | |
| "learning_rate": 3.445458000760167e-06, | |
| "loss": 1.1913, | |
| "step": 3450 | |
| }, | |
| { | |
| "epoch": 1.9723567968082074, | |
| "grad_norm": 7.875, | |
| "learning_rate": 3.426453819840365e-06, | |
| "loss": 1.1609, | |
| "step": 3460 | |
| }, | |
| { | |
| "epoch": 1.9780564263322884, | |
| "grad_norm": 7.21875, | |
| "learning_rate": 3.407449638920563e-06, | |
| "loss": 1.1499, | |
| "step": 3470 | |
| }, | |
| { | |
| "epoch": 1.9837560558563694, | |
| "grad_norm": 8.1875, | |
| "learning_rate": 3.38844545800076e-06, | |
| "loss": 1.1342, | |
| "step": 3480 | |
| }, | |
| { | |
| "epoch": 1.9894556853804501, | |
| "grad_norm": 8.75, | |
| "learning_rate": 3.369441277080958e-06, | |
| "loss": 1.1574, | |
| "step": 3490 | |
| }, | |
| { | |
| "epoch": 1.9951553149045314, | |
| "grad_norm": 8.125, | |
| "learning_rate": 3.350437096161156e-06, | |
| "loss": 1.1451, | |
| "step": 3500 | |
| }, | |
| { | |
| "epoch": 2.001139925904816, | |
| "grad_norm": 8.5, | |
| "learning_rate": 3.331432915241353e-06, | |
| "loss": 1.3065, | |
| "step": 3510 | |
| }, | |
| { | |
| "epoch": 2.006839555428897, | |
| "grad_norm": 7.15625, | |
| "learning_rate": 3.312428734321551e-06, | |
| "loss": 1.16, | |
| "step": 3520 | |
| }, | |
| { | |
| "epoch": 2.012539184952978, | |
| "grad_norm": 7.96875, | |
| "learning_rate": 3.2934245534017483e-06, | |
| "loss": 1.1908, | |
| "step": 3530 | |
| }, | |
| { | |
| "epoch": 2.018238814477059, | |
| "grad_norm": 8.0, | |
| "learning_rate": 3.2744203724819462e-06, | |
| "loss": 1.1746, | |
| "step": 3540 | |
| }, | |
| { | |
| "epoch": 2.02393844400114, | |
| "grad_norm": 7.1875, | |
| "learning_rate": 3.255416191562144e-06, | |
| "loss": 1.1001, | |
| "step": 3550 | |
| }, | |
| { | |
| "epoch": 2.029638073525221, | |
| "grad_norm": 8.8125, | |
| "learning_rate": 3.2364120106423413e-06, | |
| "loss": 1.1319, | |
| "step": 3560 | |
| }, | |
| { | |
| "epoch": 2.0353377030493016, | |
| "grad_norm": 7.6875, | |
| "learning_rate": 3.2174078297225392e-06, | |
| "loss": 1.1245, | |
| "step": 3570 | |
| }, | |
| { | |
| "epoch": 2.041037332573383, | |
| "grad_norm": 7.71875, | |
| "learning_rate": 3.198403648802737e-06, | |
| "loss": 1.1098, | |
| "step": 3580 | |
| }, | |
| { | |
| "epoch": 2.0467369620974636, | |
| "grad_norm": 8.0, | |
| "learning_rate": 3.1793994678829343e-06, | |
| "loss": 1.0988, | |
| "step": 3590 | |
| }, | |
| { | |
| "epoch": 2.052436591621545, | |
| "grad_norm": 6.53125, | |
| "learning_rate": 3.1603952869631323e-06, | |
| "loss": 1.1293, | |
| "step": 3600 | |
| }, | |
| { | |
| "epoch": 2.0581362211456256, | |
| "grad_norm": 6.8125, | |
| "learning_rate": 3.14139110604333e-06, | |
| "loss": 1.1351, | |
| "step": 3610 | |
| }, | |
| { | |
| "epoch": 2.0638358506697063, | |
| "grad_norm": 8.5, | |
| "learning_rate": 3.1223869251235273e-06, | |
| "loss": 1.1508, | |
| "step": 3620 | |
| }, | |
| { | |
| "epoch": 2.0695354801937875, | |
| "grad_norm": 7.75, | |
| "learning_rate": 3.1033827442037253e-06, | |
| "loss": 1.1551, | |
| "step": 3630 | |
| }, | |
| { | |
| "epoch": 2.0752351097178683, | |
| "grad_norm": 8.0, | |
| "learning_rate": 3.084378563283923e-06, | |
| "loss": 1.1957, | |
| "step": 3640 | |
| }, | |
| { | |
| "epoch": 2.0809347392419495, | |
| "grad_norm": 7.96875, | |
| "learning_rate": 3.0653743823641203e-06, | |
| "loss": 1.2297, | |
| "step": 3650 | |
| }, | |
| { | |
| "epoch": 2.0866343687660303, | |
| "grad_norm": 6.03125, | |
| "learning_rate": 3.046370201444318e-06, | |
| "loss": 1.1971, | |
| "step": 3660 | |
| }, | |
| { | |
| "epoch": 2.092333998290111, | |
| "grad_norm": 7.78125, | |
| "learning_rate": 3.027366020524516e-06, | |
| "loss": 1.2317, | |
| "step": 3670 | |
| }, | |
| { | |
| "epoch": 2.0980336278141922, | |
| "grad_norm": 6.9375, | |
| "learning_rate": 3.0083618396047134e-06, | |
| "loss": 1.1261, | |
| "step": 3680 | |
| }, | |
| { | |
| "epoch": 2.103733257338273, | |
| "grad_norm": 7.25, | |
| "learning_rate": 2.989357658684911e-06, | |
| "loss": 1.1401, | |
| "step": 3690 | |
| }, | |
| { | |
| "epoch": 2.1094328868623538, | |
| "grad_norm": 7.875, | |
| "learning_rate": 2.9703534777651084e-06, | |
| "loss": 1.22, | |
| "step": 3700 | |
| }, | |
| { | |
| "epoch": 2.115132516386435, | |
| "grad_norm": 8.5625, | |
| "learning_rate": 2.951349296845306e-06, | |
| "loss": 1.1936, | |
| "step": 3710 | |
| }, | |
| { | |
| "epoch": 2.1208321459105157, | |
| "grad_norm": 7.4375, | |
| "learning_rate": 2.932345115925504e-06, | |
| "loss": 1.218, | |
| "step": 3720 | |
| }, | |
| { | |
| "epoch": 2.126531775434597, | |
| "grad_norm": 7.71875, | |
| "learning_rate": 2.9133409350057014e-06, | |
| "loss": 1.1363, | |
| "step": 3730 | |
| }, | |
| { | |
| "epoch": 2.1322314049586777, | |
| "grad_norm": 7.78125, | |
| "learning_rate": 2.894336754085899e-06, | |
| "loss": 1.1258, | |
| "step": 3740 | |
| }, | |
| { | |
| "epoch": 2.1379310344827585, | |
| "grad_norm": 8.25, | |
| "learning_rate": 2.875332573166097e-06, | |
| "loss": 1.1286, | |
| "step": 3750 | |
| }, | |
| { | |
| "epoch": 2.1436306640068397, | |
| "grad_norm": 8.5625, | |
| "learning_rate": 2.856328392246294e-06, | |
| "loss": 1.1456, | |
| "step": 3760 | |
| }, | |
| { | |
| "epoch": 2.1493302935309204, | |
| "grad_norm": 7.3125, | |
| "learning_rate": 2.837324211326492e-06, | |
| "loss": 1.1695, | |
| "step": 3770 | |
| }, | |
| { | |
| "epoch": 2.1550299230550016, | |
| "grad_norm": 7.28125, | |
| "learning_rate": 2.81832003040669e-06, | |
| "loss": 1.1621, | |
| "step": 3780 | |
| }, | |
| { | |
| "epoch": 2.1607295525790824, | |
| "grad_norm": 8.75, | |
| "learning_rate": 2.799315849486887e-06, | |
| "loss": 1.1934, | |
| "step": 3790 | |
| }, | |
| { | |
| "epoch": 2.166429182103163, | |
| "grad_norm": 8.25, | |
| "learning_rate": 2.780311668567085e-06, | |
| "loss": 1.1586, | |
| "step": 3800 | |
| }, | |
| { | |
| "epoch": 2.1721288116272444, | |
| "grad_norm": 7.46875, | |
| "learning_rate": 2.761307487647283e-06, | |
| "loss": 1.1635, | |
| "step": 3810 | |
| }, | |
| { | |
| "epoch": 2.177828441151325, | |
| "grad_norm": 7.625, | |
| "learning_rate": 2.74230330672748e-06, | |
| "loss": 1.1763, | |
| "step": 3820 | |
| }, | |
| { | |
| "epoch": 2.183528070675406, | |
| "grad_norm": 6.625, | |
| "learning_rate": 2.723299125807678e-06, | |
| "loss": 1.1155, | |
| "step": 3830 | |
| }, | |
| { | |
| "epoch": 2.189227700199487, | |
| "grad_norm": 8.625, | |
| "learning_rate": 2.704294944887876e-06, | |
| "loss": 1.24, | |
| "step": 3840 | |
| }, | |
| { | |
| "epoch": 2.194927329723568, | |
| "grad_norm": 7.0, | |
| "learning_rate": 2.685290763968073e-06, | |
| "loss": 1.1379, | |
| "step": 3850 | |
| }, | |
| { | |
| "epoch": 2.200626959247649, | |
| "grad_norm": 7.875, | |
| "learning_rate": 2.666286583048271e-06, | |
| "loss": 1.2076, | |
| "step": 3860 | |
| }, | |
| { | |
| "epoch": 2.20632658877173, | |
| "grad_norm": 7.65625, | |
| "learning_rate": 2.647282402128468e-06, | |
| "loss": 1.1562, | |
| "step": 3870 | |
| }, | |
| { | |
| "epoch": 2.2120262182958106, | |
| "grad_norm": 8.1875, | |
| "learning_rate": 2.628278221208666e-06, | |
| "loss": 1.196, | |
| "step": 3880 | |
| }, | |
| { | |
| "epoch": 2.217725847819892, | |
| "grad_norm": 7.59375, | |
| "learning_rate": 2.609274040288864e-06, | |
| "loss": 1.178, | |
| "step": 3890 | |
| }, | |
| { | |
| "epoch": 2.2234254773439726, | |
| "grad_norm": 8.8125, | |
| "learning_rate": 2.590269859369061e-06, | |
| "loss": 1.1975, | |
| "step": 3900 | |
| }, | |
| { | |
| "epoch": 2.2291251068680538, | |
| "grad_norm": 8.5, | |
| "learning_rate": 2.571265678449259e-06, | |
| "loss": 1.173, | |
| "step": 3910 | |
| }, | |
| { | |
| "epoch": 2.2348247363921345, | |
| "grad_norm": 7.40625, | |
| "learning_rate": 2.5522614975294567e-06, | |
| "loss": 1.1358, | |
| "step": 3920 | |
| }, | |
| { | |
| "epoch": 2.2405243659162153, | |
| "grad_norm": 7.71875, | |
| "learning_rate": 2.533257316609654e-06, | |
| "loss": 1.0825, | |
| "step": 3930 | |
| }, | |
| { | |
| "epoch": 2.2462239954402965, | |
| "grad_norm": 7.0625, | |
| "learning_rate": 2.514253135689852e-06, | |
| "loss": 1.1757, | |
| "step": 3940 | |
| }, | |
| { | |
| "epoch": 2.2519236249643773, | |
| "grad_norm": 9.125, | |
| "learning_rate": 2.4952489547700497e-06, | |
| "loss": 1.1035, | |
| "step": 3950 | |
| }, | |
| { | |
| "epoch": 2.2576232544884585, | |
| "grad_norm": 8.25, | |
| "learning_rate": 2.4762447738502472e-06, | |
| "loss": 1.0908, | |
| "step": 3960 | |
| }, | |
| { | |
| "epoch": 2.2633228840125392, | |
| "grad_norm": 7.09375, | |
| "learning_rate": 2.4572405929304448e-06, | |
| "loss": 1.2189, | |
| "step": 3970 | |
| }, | |
| { | |
| "epoch": 2.26902251353662, | |
| "grad_norm": 7.1875, | |
| "learning_rate": 2.4382364120106423e-06, | |
| "loss": 1.1601, | |
| "step": 3980 | |
| }, | |
| { | |
| "epoch": 2.274722143060701, | |
| "grad_norm": 7.34375, | |
| "learning_rate": 2.4192322310908402e-06, | |
| "loss": 1.1614, | |
| "step": 3990 | |
| }, | |
| { | |
| "epoch": 2.280421772584782, | |
| "grad_norm": 8.0625, | |
| "learning_rate": 2.4002280501710378e-06, | |
| "loss": 1.1479, | |
| "step": 4000 | |
| }, | |
| { | |
| "epoch": 2.286121402108863, | |
| "grad_norm": 7.53125, | |
| "learning_rate": 2.3812238692512353e-06, | |
| "loss": 1.0901, | |
| "step": 4010 | |
| }, | |
| { | |
| "epoch": 2.291821031632944, | |
| "grad_norm": 8.75, | |
| "learning_rate": 2.362219688331433e-06, | |
| "loss": 1.1956, | |
| "step": 4020 | |
| }, | |
| { | |
| "epoch": 2.2975206611570247, | |
| "grad_norm": 7.25, | |
| "learning_rate": 2.343215507411631e-06, | |
| "loss": 1.1444, | |
| "step": 4030 | |
| }, | |
| { | |
| "epoch": 2.303220290681106, | |
| "grad_norm": 7.59375, | |
| "learning_rate": 2.3242113264918283e-06, | |
| "loss": 1.1696, | |
| "step": 4040 | |
| }, | |
| { | |
| "epoch": 2.3089199202051867, | |
| "grad_norm": 7.96875, | |
| "learning_rate": 2.305207145572026e-06, | |
| "loss": 1.1962, | |
| "step": 4050 | |
| }, | |
| { | |
| "epoch": 2.3146195497292674, | |
| "grad_norm": 6.90625, | |
| "learning_rate": 2.286202964652224e-06, | |
| "loss": 1.1512, | |
| "step": 4060 | |
| }, | |
| { | |
| "epoch": 2.3203191792533486, | |
| "grad_norm": 7.5625, | |
| "learning_rate": 2.2671987837324213e-06, | |
| "loss": 1.1005, | |
| "step": 4070 | |
| }, | |
| { | |
| "epoch": 2.3260188087774294, | |
| "grad_norm": 7.3125, | |
| "learning_rate": 2.248194602812619e-06, | |
| "loss": 1.0969, | |
| "step": 4080 | |
| }, | |
| { | |
| "epoch": 2.33171843830151, | |
| "grad_norm": 8.125, | |
| "learning_rate": 2.2291904218928164e-06, | |
| "loss": 1.1486, | |
| "step": 4090 | |
| }, | |
| { | |
| "epoch": 2.3374180678255914, | |
| "grad_norm": 7.46875, | |
| "learning_rate": 2.2101862409730144e-06, | |
| "loss": 1.1292, | |
| "step": 4100 | |
| }, | |
| { | |
| "epoch": 2.343117697349672, | |
| "grad_norm": 7.21875, | |
| "learning_rate": 2.191182060053212e-06, | |
| "loss": 1.1927, | |
| "step": 4110 | |
| }, | |
| { | |
| "epoch": 2.3488173268737533, | |
| "grad_norm": 7.75, | |
| "learning_rate": 2.1721778791334094e-06, | |
| "loss": 1.1041, | |
| "step": 4120 | |
| }, | |
| { | |
| "epoch": 2.354516956397834, | |
| "grad_norm": 7.625, | |
| "learning_rate": 2.1531736982136074e-06, | |
| "loss": 1.1174, | |
| "step": 4130 | |
| }, | |
| { | |
| "epoch": 2.360216585921915, | |
| "grad_norm": 7.15625, | |
| "learning_rate": 2.134169517293805e-06, | |
| "loss": 1.1631, | |
| "step": 4140 | |
| }, | |
| { | |
| "epoch": 2.365916215445996, | |
| "grad_norm": 9.1875, | |
| "learning_rate": 2.1151653363740025e-06, | |
| "loss": 1.1447, | |
| "step": 4150 | |
| }, | |
| { | |
| "epoch": 2.371615844970077, | |
| "grad_norm": 7.25, | |
| "learning_rate": 2.0961611554542e-06, | |
| "loss": 1.1613, | |
| "step": 4160 | |
| }, | |
| { | |
| "epoch": 2.377315474494158, | |
| "grad_norm": 8.25, | |
| "learning_rate": 2.077156974534398e-06, | |
| "loss": 1.143, | |
| "step": 4170 | |
| }, | |
| { | |
| "epoch": 2.383015104018239, | |
| "grad_norm": 8.1875, | |
| "learning_rate": 2.0581527936145955e-06, | |
| "loss": 1.2261, | |
| "step": 4180 | |
| }, | |
| { | |
| "epoch": 2.3887147335423196, | |
| "grad_norm": 9.1875, | |
| "learning_rate": 2.039148612694793e-06, | |
| "loss": 1.1467, | |
| "step": 4190 | |
| }, | |
| { | |
| "epoch": 2.3944143630664008, | |
| "grad_norm": 9.25, | |
| "learning_rate": 2.020144431774991e-06, | |
| "loss": 1.1715, | |
| "step": 4200 | |
| }, | |
| { | |
| "epoch": 2.4001139925904815, | |
| "grad_norm": 7.03125, | |
| "learning_rate": 2.0011402508551885e-06, | |
| "loss": 1.0986, | |
| "step": 4210 | |
| }, | |
| { | |
| "epoch": 2.4058136221145627, | |
| "grad_norm": 7.625, | |
| "learning_rate": 1.982136069935386e-06, | |
| "loss": 1.1284, | |
| "step": 4220 | |
| }, | |
| { | |
| "epoch": 2.4115132516386435, | |
| "grad_norm": 8.0, | |
| "learning_rate": 1.9631318890155836e-06, | |
| "loss": 1.2176, | |
| "step": 4230 | |
| }, | |
| { | |
| "epoch": 2.4172128811627243, | |
| "grad_norm": 7.59375, | |
| "learning_rate": 1.944127708095781e-06, | |
| "loss": 1.1919, | |
| "step": 4240 | |
| }, | |
| { | |
| "epoch": 2.4229125106868055, | |
| "grad_norm": 8.25, | |
| "learning_rate": 1.925123527175979e-06, | |
| "loss": 1.1563, | |
| "step": 4250 | |
| }, | |
| { | |
| "epoch": 2.4286121402108862, | |
| "grad_norm": 7.28125, | |
| "learning_rate": 1.9061193462561764e-06, | |
| "loss": 1.2067, | |
| "step": 4260 | |
| }, | |
| { | |
| "epoch": 2.4343117697349674, | |
| "grad_norm": 7.65625, | |
| "learning_rate": 1.8871151653363743e-06, | |
| "loss": 1.2409, | |
| "step": 4270 | |
| }, | |
| { | |
| "epoch": 2.440011399259048, | |
| "grad_norm": 7.53125, | |
| "learning_rate": 1.8681109844165718e-06, | |
| "loss": 1.1257, | |
| "step": 4280 | |
| }, | |
| { | |
| "epoch": 2.445711028783129, | |
| "grad_norm": 9.0, | |
| "learning_rate": 1.8491068034967694e-06, | |
| "loss": 1.1233, | |
| "step": 4290 | |
| }, | |
| { | |
| "epoch": 2.45141065830721, | |
| "grad_norm": 9.0625, | |
| "learning_rate": 1.8301026225769671e-06, | |
| "loss": 1.1515, | |
| "step": 4300 | |
| }, | |
| { | |
| "epoch": 2.457110287831291, | |
| "grad_norm": 7.40625, | |
| "learning_rate": 1.8110984416571647e-06, | |
| "loss": 1.1561, | |
| "step": 4310 | |
| }, | |
| { | |
| "epoch": 2.462809917355372, | |
| "grad_norm": 9.0, | |
| "learning_rate": 1.7920942607373624e-06, | |
| "loss": 1.1568, | |
| "step": 4320 | |
| }, | |
| { | |
| "epoch": 2.468509546879453, | |
| "grad_norm": 7.5625, | |
| "learning_rate": 1.77309007981756e-06, | |
| "loss": 1.1507, | |
| "step": 4330 | |
| }, | |
| { | |
| "epoch": 2.4742091764035337, | |
| "grad_norm": 9.0, | |
| "learning_rate": 1.7540858988977577e-06, | |
| "loss": 1.2004, | |
| "step": 4340 | |
| }, | |
| { | |
| "epoch": 2.479908805927615, | |
| "grad_norm": 9.8125, | |
| "learning_rate": 1.7350817179779552e-06, | |
| "loss": 1.1062, | |
| "step": 4350 | |
| }, | |
| { | |
| "epoch": 2.4856084354516956, | |
| "grad_norm": 8.5, | |
| "learning_rate": 1.7160775370581527e-06, | |
| "loss": 1.1187, | |
| "step": 4360 | |
| }, | |
| { | |
| "epoch": 2.4913080649757764, | |
| "grad_norm": 11.375, | |
| "learning_rate": 1.6970733561383507e-06, | |
| "loss": 1.2026, | |
| "step": 4370 | |
| }, | |
| { | |
| "epoch": 2.4970076944998576, | |
| "grad_norm": 8.1875, | |
| "learning_rate": 1.6780691752185482e-06, | |
| "loss": 1.154, | |
| "step": 4380 | |
| }, | |
| { | |
| "epoch": 2.5027073240239384, | |
| "grad_norm": 8.1875, | |
| "learning_rate": 1.6590649942987458e-06, | |
| "loss": 1.1298, | |
| "step": 4390 | |
| }, | |
| { | |
| "epoch": 2.508406953548019, | |
| "grad_norm": 7.0625, | |
| "learning_rate": 1.6400608133789437e-06, | |
| "loss": 1.0692, | |
| "step": 4400 | |
| }, | |
| { | |
| "epoch": 2.5141065830721003, | |
| "grad_norm": 8.0, | |
| "learning_rate": 1.6210566324591412e-06, | |
| "loss": 1.1093, | |
| "step": 4410 | |
| }, | |
| { | |
| "epoch": 2.519806212596181, | |
| "grad_norm": 8.25, | |
| "learning_rate": 1.6020524515393388e-06, | |
| "loss": 1.1828, | |
| "step": 4420 | |
| }, | |
| { | |
| "epoch": 2.5255058421202623, | |
| "grad_norm": 7.15625, | |
| "learning_rate": 1.5830482706195363e-06, | |
| "loss": 1.2296, | |
| "step": 4430 | |
| }, | |
| { | |
| "epoch": 2.531205471644343, | |
| "grad_norm": 6.90625, | |
| "learning_rate": 1.564044089699734e-06, | |
| "loss": 1.1317, | |
| "step": 4440 | |
| }, | |
| { | |
| "epoch": 2.536905101168424, | |
| "grad_norm": 8.0, | |
| "learning_rate": 1.5450399087799318e-06, | |
| "loss": 1.1338, | |
| "step": 4450 | |
| }, | |
| { | |
| "epoch": 2.542604730692505, | |
| "grad_norm": 7.09375, | |
| "learning_rate": 1.5260357278601293e-06, | |
| "loss": 1.1304, | |
| "step": 4460 | |
| }, | |
| { | |
| "epoch": 2.548304360216586, | |
| "grad_norm": 8.0, | |
| "learning_rate": 1.507031546940327e-06, | |
| "loss": 1.1257, | |
| "step": 4470 | |
| }, | |
| { | |
| "epoch": 2.554003989740667, | |
| "grad_norm": 7.375, | |
| "learning_rate": 1.4880273660205246e-06, | |
| "loss": 1.1831, | |
| "step": 4480 | |
| }, | |
| { | |
| "epoch": 2.5597036192647478, | |
| "grad_norm": 8.375, | |
| "learning_rate": 1.4690231851007221e-06, | |
| "loss": 1.1555, | |
| "step": 4490 | |
| }, | |
| { | |
| "epoch": 2.5654032487888285, | |
| "grad_norm": 8.1875, | |
| "learning_rate": 1.4500190041809199e-06, | |
| "loss": 1.1995, | |
| "step": 4500 | |
| } | |
| ], | |
| "logging_steps": 10, | |
| "max_steps": 5262, | |
| "num_input_tokens_seen": 0, | |
| "num_train_epochs": 3, | |
| "save_steps": 500, | |
| "stateful_callbacks": { | |
| "TrainerControl": { | |
| "args": { | |
| "should_epoch_stop": false, | |
| "should_evaluate": false, | |
| "should_log": false, | |
| "should_save": true, | |
| "should_training_stop": false | |
| }, | |
| "attributes": {} | |
| } | |
| }, | |
| "total_flos": 1.948703574322053e+17, | |
| "train_batch_size": 4, | |
| "trial_name": null, | |
| "trial_params": null | |
| } | |