| { | |
| "best_metric": null, | |
| "best_model_checkpoint": null, | |
| "epoch": 1.0, | |
| "eval_steps": 500, | |
| "global_step": 2424, | |
| "is_hyper_param_search": false, | |
| "is_local_process_zero": true, | |
| "is_world_process_zero": true, | |
| "log_history": [ | |
| { | |
| "epoch": 0.00041254125412541255, | |
| "grad_norm": 25.364087477258124, | |
| "learning_rate": 4.1152263374485605e-08, | |
| "loss": 1.4505, | |
| "step": 1 | |
| }, | |
| { | |
| "epoch": 0.0020627062706270625, | |
| "grad_norm": 23.584120739330842, | |
| "learning_rate": 2.05761316872428e-07, | |
| "loss": 1.4234, | |
| "step": 5 | |
| }, | |
| { | |
| "epoch": 0.004125412541254125, | |
| "grad_norm": 14.928598653810912, | |
| "learning_rate": 4.11522633744856e-07, | |
| "loss": 1.3906, | |
| "step": 10 | |
| }, | |
| { | |
| "epoch": 0.006188118811881188, | |
| "grad_norm": 8.160909185975878, | |
| "learning_rate": 6.17283950617284e-07, | |
| "loss": 1.2594, | |
| "step": 15 | |
| }, | |
| { | |
| "epoch": 0.00825082508250825, | |
| "grad_norm": 10.23630031823055, | |
| "learning_rate": 8.23045267489712e-07, | |
| "loss": 1.1692, | |
| "step": 20 | |
| }, | |
| { | |
| "epoch": 0.010313531353135313, | |
| "grad_norm": 4.891097073750768, | |
| "learning_rate": 1.02880658436214e-06, | |
| "loss": 1.0515, | |
| "step": 25 | |
| }, | |
| { | |
| "epoch": 0.012376237623762377, | |
| "grad_norm": 3.5104107671564515, | |
| "learning_rate": 1.234567901234568e-06, | |
| "loss": 0.9833, | |
| "step": 30 | |
| }, | |
| { | |
| "epoch": 0.014438943894389438, | |
| "grad_norm": 3.531054710795857, | |
| "learning_rate": 1.440329218106996e-06, | |
| "loss": 0.9681, | |
| "step": 35 | |
| }, | |
| { | |
| "epoch": 0.0165016501650165, | |
| "grad_norm": 3.0121852103996622, | |
| "learning_rate": 1.646090534979424e-06, | |
| "loss": 0.9485, | |
| "step": 40 | |
| }, | |
| { | |
| "epoch": 0.018564356435643563, | |
| "grad_norm": 3.069656613093857, | |
| "learning_rate": 1.8518518518518519e-06, | |
| "loss": 0.9428, | |
| "step": 45 | |
| }, | |
| { | |
| "epoch": 0.020627062706270627, | |
| "grad_norm": 3.017501156282318, | |
| "learning_rate": 2.05761316872428e-06, | |
| "loss": 0.9155, | |
| "step": 50 | |
| }, | |
| { | |
| "epoch": 0.02268976897689769, | |
| "grad_norm": 2.9523272195417474, | |
| "learning_rate": 2.263374485596708e-06, | |
| "loss": 0.9189, | |
| "step": 55 | |
| }, | |
| { | |
| "epoch": 0.024752475247524754, | |
| "grad_norm": 3.115591318120338, | |
| "learning_rate": 2.469135802469136e-06, | |
| "loss": 0.8972, | |
| "step": 60 | |
| }, | |
| { | |
| "epoch": 0.026815181518151814, | |
| "grad_norm": 3.315252968948637, | |
| "learning_rate": 2.674897119341564e-06, | |
| "loss": 0.9052, | |
| "step": 65 | |
| }, | |
| { | |
| "epoch": 0.028877887788778877, | |
| "grad_norm": 3.0856931589962424, | |
| "learning_rate": 2.880658436213992e-06, | |
| "loss": 0.8861, | |
| "step": 70 | |
| }, | |
| { | |
| "epoch": 0.03094059405940594, | |
| "grad_norm": 3.0575333985043316, | |
| "learning_rate": 3.08641975308642e-06, | |
| "loss": 0.881, | |
| "step": 75 | |
| }, | |
| { | |
| "epoch": 0.033003300330033, | |
| "grad_norm": 3.4245071366441606, | |
| "learning_rate": 3.292181069958848e-06, | |
| "loss": 0.8804, | |
| "step": 80 | |
| }, | |
| { | |
| "epoch": 0.03506600660066007, | |
| "grad_norm": 3.1231813544872074, | |
| "learning_rate": 3.4979423868312762e-06, | |
| "loss": 0.8866, | |
| "step": 85 | |
| }, | |
| { | |
| "epoch": 0.03712871287128713, | |
| "grad_norm": 3.095931330170451, | |
| "learning_rate": 3.7037037037037037e-06, | |
| "loss": 0.8754, | |
| "step": 90 | |
| }, | |
| { | |
| "epoch": 0.039191419141914194, | |
| "grad_norm": 3.129665723198324, | |
| "learning_rate": 3.909465020576132e-06, | |
| "loss": 0.8535, | |
| "step": 95 | |
| }, | |
| { | |
| "epoch": 0.041254125412541254, | |
| "grad_norm": 3.201387303972087, | |
| "learning_rate": 4.11522633744856e-06, | |
| "loss": 0.8763, | |
| "step": 100 | |
| }, | |
| { | |
| "epoch": 0.043316831683168314, | |
| "grad_norm": 3.198510757288822, | |
| "learning_rate": 4.3209876543209875e-06, | |
| "loss": 0.8806, | |
| "step": 105 | |
| }, | |
| { | |
| "epoch": 0.04537953795379538, | |
| "grad_norm": 3.0677019856853622, | |
| "learning_rate": 4.526748971193416e-06, | |
| "loss": 0.853, | |
| "step": 110 | |
| }, | |
| { | |
| "epoch": 0.04744224422442244, | |
| "grad_norm": 3.063859140817453, | |
| "learning_rate": 4.732510288065844e-06, | |
| "loss": 0.8482, | |
| "step": 115 | |
| }, | |
| { | |
| "epoch": 0.04950495049504951, | |
| "grad_norm": 3.0863428454414743, | |
| "learning_rate": 4.938271604938272e-06, | |
| "loss": 0.8677, | |
| "step": 120 | |
| }, | |
| { | |
| "epoch": 0.05156765676567657, | |
| "grad_norm": 3.3814586153531554, | |
| "learning_rate": 5.1440329218107e-06, | |
| "loss": 0.8636, | |
| "step": 125 | |
| }, | |
| { | |
| "epoch": 0.05363036303630363, | |
| "grad_norm": 3.142802292993217, | |
| "learning_rate": 5.349794238683128e-06, | |
| "loss": 0.8614, | |
| "step": 130 | |
| }, | |
| { | |
| "epoch": 0.055693069306930694, | |
| "grad_norm": 3.1420236113465694, | |
| "learning_rate": 5.555555555555557e-06, | |
| "loss": 0.8516, | |
| "step": 135 | |
| }, | |
| { | |
| "epoch": 0.057755775577557754, | |
| "grad_norm": 2.994047093745851, | |
| "learning_rate": 5.761316872427984e-06, | |
| "loss": 0.8516, | |
| "step": 140 | |
| }, | |
| { | |
| "epoch": 0.05981848184818482, | |
| "grad_norm": 3.1021850083230937, | |
| "learning_rate": 5.967078189300412e-06, | |
| "loss": 0.8449, | |
| "step": 145 | |
| }, | |
| { | |
| "epoch": 0.06188118811881188, | |
| "grad_norm": 3.2584063367905562, | |
| "learning_rate": 6.17283950617284e-06, | |
| "loss": 0.8274, | |
| "step": 150 | |
| }, | |
| { | |
| "epoch": 0.06394389438943894, | |
| "grad_norm": 3.170049017525715, | |
| "learning_rate": 6.3786008230452675e-06, | |
| "loss": 0.8385, | |
| "step": 155 | |
| }, | |
| { | |
| "epoch": 0.066006600660066, | |
| "grad_norm": 3.4425094609983353, | |
| "learning_rate": 6.584362139917696e-06, | |
| "loss": 0.8563, | |
| "step": 160 | |
| }, | |
| { | |
| "epoch": 0.06806930693069307, | |
| "grad_norm": 2.927681759000597, | |
| "learning_rate": 6.790123456790124e-06, | |
| "loss": 0.8301, | |
| "step": 165 | |
| }, | |
| { | |
| "epoch": 0.07013201320132013, | |
| "grad_norm": 3.1321713688594235, | |
| "learning_rate": 6.9958847736625525e-06, | |
| "loss": 0.8301, | |
| "step": 170 | |
| }, | |
| { | |
| "epoch": 0.0721947194719472, | |
| "grad_norm": 3.320556237455856, | |
| "learning_rate": 7.201646090534981e-06, | |
| "loss": 0.8419, | |
| "step": 175 | |
| }, | |
| { | |
| "epoch": 0.07425742574257425, | |
| "grad_norm": 3.441590967273474, | |
| "learning_rate": 7.4074074074074075e-06, | |
| "loss": 0.8311, | |
| "step": 180 | |
| }, | |
| { | |
| "epoch": 0.07632013201320131, | |
| "grad_norm": 3.081130693491232, | |
| "learning_rate": 7.613168724279836e-06, | |
| "loss": 0.8371, | |
| "step": 185 | |
| }, | |
| { | |
| "epoch": 0.07838283828382839, | |
| "grad_norm": 2.929265287656251, | |
| "learning_rate": 7.818930041152263e-06, | |
| "loss": 0.8286, | |
| "step": 190 | |
| }, | |
| { | |
| "epoch": 0.08044554455445545, | |
| "grad_norm": 3.369317740510869, | |
| "learning_rate": 8.024691358024692e-06, | |
| "loss": 0.8141, | |
| "step": 195 | |
| }, | |
| { | |
| "epoch": 0.08250825082508251, | |
| "grad_norm": 3.143333891707379, | |
| "learning_rate": 8.23045267489712e-06, | |
| "loss": 0.8365, | |
| "step": 200 | |
| }, | |
| { | |
| "epoch": 0.08457095709570957, | |
| "grad_norm": 3.085954619911263, | |
| "learning_rate": 8.43621399176955e-06, | |
| "loss": 0.8186, | |
| "step": 205 | |
| }, | |
| { | |
| "epoch": 0.08663366336633663, | |
| "grad_norm": 2.9994310068455183, | |
| "learning_rate": 8.641975308641975e-06, | |
| "loss": 0.8318, | |
| "step": 210 | |
| }, | |
| { | |
| "epoch": 0.0886963696369637, | |
| "grad_norm": 2.992649765505128, | |
| "learning_rate": 8.847736625514404e-06, | |
| "loss": 0.8174, | |
| "step": 215 | |
| }, | |
| { | |
| "epoch": 0.09075907590759076, | |
| "grad_norm": 2.931292838310062, | |
| "learning_rate": 9.053497942386832e-06, | |
| "loss": 0.8192, | |
| "step": 220 | |
| }, | |
| { | |
| "epoch": 0.09282178217821782, | |
| "grad_norm": 3.1717689547907684, | |
| "learning_rate": 9.25925925925926e-06, | |
| "loss": 0.8162, | |
| "step": 225 | |
| }, | |
| { | |
| "epoch": 0.09488448844884488, | |
| "grad_norm": 3.015985778332564, | |
| "learning_rate": 9.465020576131688e-06, | |
| "loss": 0.8253, | |
| "step": 230 | |
| }, | |
| { | |
| "epoch": 0.09694719471947194, | |
| "grad_norm": 2.8717973335844986, | |
| "learning_rate": 9.670781893004116e-06, | |
| "loss": 0.8314, | |
| "step": 235 | |
| }, | |
| { | |
| "epoch": 0.09900990099009901, | |
| "grad_norm": 3.1365360963455227, | |
| "learning_rate": 9.876543209876543e-06, | |
| "loss": 0.8291, | |
| "step": 240 | |
| }, | |
| { | |
| "epoch": 0.10107260726072607, | |
| "grad_norm": 3.121729098249164, | |
| "learning_rate": 9.9999792514327e-06, | |
| "loss": 0.8185, | |
| "step": 245 | |
| }, | |
| { | |
| "epoch": 0.10313531353135313, | |
| "grad_norm": 2.899940654946387, | |
| "learning_rate": 9.999745832028163e-06, | |
| "loss": 0.815, | |
| "step": 250 | |
| }, | |
| { | |
| "epoch": 0.1051980198019802, | |
| "grad_norm": 2.979183990670769, | |
| "learning_rate": 9.999253069658074e-06, | |
| "loss": 0.7941, | |
| "step": 255 | |
| }, | |
| { | |
| "epoch": 0.10726072607260725, | |
| "grad_norm": 2.915884290090879, | |
| "learning_rate": 9.998500989882627e-06, | |
| "loss": 0.7928, | |
| "step": 260 | |
| }, | |
| { | |
| "epoch": 0.10932343234323433, | |
| "grad_norm": 3.0656511848073045, | |
| "learning_rate": 9.997489631713117e-06, | |
| "loss": 0.8052, | |
| "step": 265 | |
| }, | |
| { | |
| "epoch": 0.11138613861386139, | |
| "grad_norm": 2.779236934182846, | |
| "learning_rate": 9.996219047609943e-06, | |
| "loss": 0.8042, | |
| "step": 270 | |
| }, | |
| { | |
| "epoch": 0.11344884488448845, | |
| "grad_norm": 3.1406451139744553, | |
| "learning_rate": 9.99468930347986e-06, | |
| "loss": 0.7934, | |
| "step": 275 | |
| }, | |
| { | |
| "epoch": 0.11551155115511551, | |
| "grad_norm": 2.908579072433511, | |
| "learning_rate": 9.99290047867258e-06, | |
| "loss": 0.8036, | |
| "step": 280 | |
| }, | |
| { | |
| "epoch": 0.11757425742574257, | |
| "grad_norm": 2.885133657343927, | |
| "learning_rate": 9.990852665976648e-06, | |
| "loss": 0.7966, | |
| "step": 285 | |
| }, | |
| { | |
| "epoch": 0.11963696369636964, | |
| "grad_norm": 2.8220528871968464, | |
| "learning_rate": 9.98854597161462e-06, | |
| "loss": 0.7872, | |
| "step": 290 | |
| }, | |
| { | |
| "epoch": 0.1216996699669967, | |
| "grad_norm": 3.0815967855392343, | |
| "learning_rate": 9.98598051523758e-06, | |
| "loss": 0.8156, | |
| "step": 295 | |
| }, | |
| { | |
| "epoch": 0.12376237623762376, | |
| "grad_norm": 2.9269865697026427, | |
| "learning_rate": 9.983156429918895e-06, | |
| "loss": 0.797, | |
| "step": 300 | |
| }, | |
| { | |
| "epoch": 0.12582508250825084, | |
| "grad_norm": 3.094829019950906, | |
| "learning_rate": 9.98007386214735e-06, | |
| "loss": 0.7914, | |
| "step": 305 | |
| }, | |
| { | |
| "epoch": 0.12788778877887788, | |
| "grad_norm": 2.9433436175076633, | |
| "learning_rate": 9.976732971819526e-06, | |
| "loss": 0.7811, | |
| "step": 310 | |
| }, | |
| { | |
| "epoch": 0.12995049504950495, | |
| "grad_norm": 2.763941972916272, | |
| "learning_rate": 9.973133932231514e-06, | |
| "loss": 0.7783, | |
| "step": 315 | |
| }, | |
| { | |
| "epoch": 0.132013201320132, | |
| "grad_norm": 2.876279072787343, | |
| "learning_rate": 9.96927693006992e-06, | |
| "loss": 0.7948, | |
| "step": 320 | |
| }, | |
| { | |
| "epoch": 0.13407590759075907, | |
| "grad_norm": 2.951672309789002, | |
| "learning_rate": 9.965162165402194e-06, | |
| "loss": 0.7815, | |
| "step": 325 | |
| }, | |
| { | |
| "epoch": 0.13613861386138615, | |
| "grad_norm": 2.9838776306124726, | |
| "learning_rate": 9.960789851666237e-06, | |
| "loss": 0.7745, | |
| "step": 330 | |
| }, | |
| { | |
| "epoch": 0.1382013201320132, | |
| "grad_norm": 2.822284810816944, | |
| "learning_rate": 9.956160215659342e-06, | |
| "loss": 0.777, | |
| "step": 335 | |
| }, | |
| { | |
| "epoch": 0.14026402640264027, | |
| "grad_norm": 2.9059843659239637, | |
| "learning_rate": 9.951273497526423e-06, | |
| "loss": 0.7697, | |
| "step": 340 | |
| }, | |
| { | |
| "epoch": 0.14232673267326731, | |
| "grad_norm": 2.870130006561793, | |
| "learning_rate": 9.94612995074756e-06, | |
| "loss": 0.7516, | |
| "step": 345 | |
| }, | |
| { | |
| "epoch": 0.1443894389438944, | |
| "grad_norm": 2.772459191587491, | |
| "learning_rate": 9.94072984212485e-06, | |
| "loss": 0.7643, | |
| "step": 350 | |
| }, | |
| { | |
| "epoch": 0.14645214521452146, | |
| "grad_norm": 2.826949039936636, | |
| "learning_rate": 9.935073451768567e-06, | |
| "loss": 0.7648, | |
| "step": 355 | |
| }, | |
| { | |
| "epoch": 0.1485148514851485, | |
| "grad_norm": 3.0414673762126996, | |
| "learning_rate": 9.929161073082636e-06, | |
| "loss": 0.7781, | |
| "step": 360 | |
| }, | |
| { | |
| "epoch": 0.15057755775577558, | |
| "grad_norm": 2.7722425713109855, | |
| "learning_rate": 9.922993012749413e-06, | |
| "loss": 0.7614, | |
| "step": 365 | |
| }, | |
| { | |
| "epoch": 0.15264026402640263, | |
| "grad_norm": 2.748341237442781, | |
| "learning_rate": 9.916569590713775e-06, | |
| "loss": 0.7762, | |
| "step": 370 | |
| }, | |
| { | |
| "epoch": 0.1547029702970297, | |
| "grad_norm": 3.0368108721146045, | |
| "learning_rate": 9.90989114016652e-06, | |
| "loss": 0.7641, | |
| "step": 375 | |
| }, | |
| { | |
| "epoch": 0.15676567656765678, | |
| "grad_norm": 2.764173640131257, | |
| "learning_rate": 9.902958007527092e-06, | |
| "loss": 0.7237, | |
| "step": 380 | |
| }, | |
| { | |
| "epoch": 0.15882838283828382, | |
| "grad_norm": 2.7786342966630158, | |
| "learning_rate": 9.89577055242561e-06, | |
| "loss": 0.7513, | |
| "step": 385 | |
| }, | |
| { | |
| "epoch": 0.1608910891089109, | |
| "grad_norm": 2.921540805258061, | |
| "learning_rate": 9.88832914768421e-06, | |
| "loss": 0.7575, | |
| "step": 390 | |
| }, | |
| { | |
| "epoch": 0.16295379537953794, | |
| "grad_norm": 2.7315756627341443, | |
| "learning_rate": 9.880634179297706e-06, | |
| "loss": 0.754, | |
| "step": 395 | |
| }, | |
| { | |
| "epoch": 0.16501650165016502, | |
| "grad_norm": 2.7136927749412734, | |
| "learning_rate": 9.872686046413575e-06, | |
| "loss": 0.7215, | |
| "step": 400 | |
| }, | |
| { | |
| "epoch": 0.1670792079207921, | |
| "grad_norm": 2.783986178315758, | |
| "learning_rate": 9.864485161311242e-06, | |
| "loss": 0.7651, | |
| "step": 405 | |
| }, | |
| { | |
| "epoch": 0.16914191419141913, | |
| "grad_norm": 2.8016707367343887, | |
| "learning_rate": 9.856031949380707e-06, | |
| "loss": 0.7256, | |
| "step": 410 | |
| }, | |
| { | |
| "epoch": 0.1712046204620462, | |
| "grad_norm": 2.9437239528614216, | |
| "learning_rate": 9.847326849100467e-06, | |
| "loss": 0.7228, | |
| "step": 415 | |
| }, | |
| { | |
| "epoch": 0.17326732673267325, | |
| "grad_norm": 2.739229372104028, | |
| "learning_rate": 9.838370312014783e-06, | |
| "loss": 0.7411, | |
| "step": 420 | |
| }, | |
| { | |
| "epoch": 0.17533003300330033, | |
| "grad_norm": 2.747689406028468, | |
| "learning_rate": 9.829162802710246e-06, | |
| "loss": 0.7373, | |
| "step": 425 | |
| }, | |
| { | |
| "epoch": 0.1773927392739274, | |
| "grad_norm": 2.879870332673276, | |
| "learning_rate": 9.819704798791691e-06, | |
| "loss": 0.7376, | |
| "step": 430 | |
| }, | |
| { | |
| "epoch": 0.17945544554455445, | |
| "grad_norm": 3.057579380919091, | |
| "learning_rate": 9.80999679085741e-06, | |
| "loss": 0.7233, | |
| "step": 435 | |
| }, | |
| { | |
| "epoch": 0.18151815181518152, | |
| "grad_norm": 3.100440102401277, | |
| "learning_rate": 9.800039282473719e-06, | |
| "loss": 0.7001, | |
| "step": 440 | |
| }, | |
| { | |
| "epoch": 0.18358085808580857, | |
| "grad_norm": 2.7070231210764844, | |
| "learning_rate": 9.789832790148822e-06, | |
| "loss": 0.7085, | |
| "step": 445 | |
| }, | |
| { | |
| "epoch": 0.18564356435643564, | |
| "grad_norm": 2.733916165732209, | |
| "learning_rate": 9.77937784330603e-06, | |
| "loss": 0.7012, | |
| "step": 450 | |
| }, | |
| { | |
| "epoch": 0.18770627062706272, | |
| "grad_norm": 2.563129838542594, | |
| "learning_rate": 9.768674984256292e-06, | |
| "loss": 0.7123, | |
| "step": 455 | |
| }, | |
| { | |
| "epoch": 0.18976897689768976, | |
| "grad_norm": 2.74260970646788, | |
| "learning_rate": 9.757724768170074e-06, | |
| "loss": 0.7061, | |
| "step": 460 | |
| }, | |
| { | |
| "epoch": 0.19183168316831684, | |
| "grad_norm": 2.9775386795661607, | |
| "learning_rate": 9.74652776304855e-06, | |
| "loss": 0.7035, | |
| "step": 465 | |
| }, | |
| { | |
| "epoch": 0.19389438943894388, | |
| "grad_norm": 11.7972903732034, | |
| "learning_rate": 9.73508454969415e-06, | |
| "loss": 0.7031, | |
| "step": 470 | |
| }, | |
| { | |
| "epoch": 0.19595709570957096, | |
| "grad_norm": 3.0823242258683203, | |
| "learning_rate": 9.723395721680418e-06, | |
| "loss": 0.7208, | |
| "step": 475 | |
| }, | |
| { | |
| "epoch": 0.19801980198019803, | |
| "grad_norm": 2.984181238427559, | |
| "learning_rate": 9.711461885321247e-06, | |
| "loss": 0.7082, | |
| "step": 480 | |
| }, | |
| { | |
| "epoch": 0.20008250825082508, | |
| "grad_norm": 2.7986743538102505, | |
| "learning_rate": 9.699283659639402e-06, | |
| "loss": 0.6877, | |
| "step": 485 | |
| }, | |
| { | |
| "epoch": 0.20214521452145215, | |
| "grad_norm": 2.8619815735669403, | |
| "learning_rate": 9.68686167633443e-06, | |
| "loss": 0.764, | |
| "step": 490 | |
| }, | |
| { | |
| "epoch": 0.2042079207920792, | |
| "grad_norm": 4.637672201020113, | |
| "learning_rate": 9.67419657974988e-06, | |
| "loss": 0.6984, | |
| "step": 495 | |
| }, | |
| { | |
| "epoch": 0.20627062706270627, | |
| "grad_norm": 2.7511677692038217, | |
| "learning_rate": 9.661289026839889e-06, | |
| "loss": 0.698, | |
| "step": 500 | |
| }, | |
| { | |
| "epoch": 0.20833333333333334, | |
| "grad_norm": 3.1629886058739594, | |
| "learning_rate": 9.648139687135106e-06, | |
| "loss": 0.7025, | |
| "step": 505 | |
| }, | |
| { | |
| "epoch": 0.2103960396039604, | |
| "grad_norm": 2.835404473287634, | |
| "learning_rate": 9.634749242707948e-06, | |
| "loss": 0.6829, | |
| "step": 510 | |
| }, | |
| { | |
| "epoch": 0.21245874587458746, | |
| "grad_norm": 2.6239704035151687, | |
| "learning_rate": 9.62111838813724e-06, | |
| "loss": 0.6772, | |
| "step": 515 | |
| }, | |
| { | |
| "epoch": 0.2145214521452145, | |
| "grad_norm": 2.740976973488732, | |
| "learning_rate": 9.607247830472174e-06, | |
| "loss": 0.6731, | |
| "step": 520 | |
| }, | |
| { | |
| "epoch": 0.21658415841584158, | |
| "grad_norm": 2.735519864410512, | |
| "learning_rate": 9.593138289195634e-06, | |
| "loss": 0.6806, | |
| "step": 525 | |
| }, | |
| { | |
| "epoch": 0.21864686468646866, | |
| "grad_norm": 2.684453002911694, | |
| "learning_rate": 9.578790496186879e-06, | |
| "loss": 0.6962, | |
| "step": 530 | |
| }, | |
| { | |
| "epoch": 0.2207095709570957, | |
| "grad_norm": 2.7970751535674037, | |
| "learning_rate": 9.56420519568358e-06, | |
| "loss": 0.6854, | |
| "step": 535 | |
| }, | |
| { | |
| "epoch": 0.22277227722772278, | |
| "grad_norm": 2.893389666992377, | |
| "learning_rate": 9.549383144243213e-06, | |
| "loss": 0.6713, | |
| "step": 540 | |
| }, | |
| { | |
| "epoch": 0.22483498349834982, | |
| "grad_norm": 2.7169375882464926, | |
| "learning_rate": 9.534325110703814e-06, | |
| "loss": 0.7436, | |
| "step": 545 | |
| }, | |
| { | |
| "epoch": 0.2268976897689769, | |
| "grad_norm": 2.7286325422508537, | |
| "learning_rate": 9.519031876144106e-06, | |
| "loss": 0.6758, | |
| "step": 550 | |
| }, | |
| { | |
| "epoch": 0.22896039603960397, | |
| "grad_norm": 2.8735702079028775, | |
| "learning_rate": 9.503504233842973e-06, | |
| "loss": 0.6657, | |
| "step": 555 | |
| }, | |
| { | |
| "epoch": 0.23102310231023102, | |
| "grad_norm": 3.0387173259825735, | |
| "learning_rate": 9.487742989238318e-06, | |
| "loss": 0.6782, | |
| "step": 560 | |
| }, | |
| { | |
| "epoch": 0.2330858085808581, | |
| "grad_norm": 2.538376376963473, | |
| "learning_rate": 9.471748959885284e-06, | |
| "loss": 0.6776, | |
| "step": 565 | |
| }, | |
| { | |
| "epoch": 0.23514851485148514, | |
| "grad_norm": 2.6593328363095816, | |
| "learning_rate": 9.455522975413846e-06, | |
| "loss": 0.674, | |
| "step": 570 | |
| }, | |
| { | |
| "epoch": 0.2372112211221122, | |
| "grad_norm": 2.7283552755825715, | |
| "learning_rate": 9.439065877485774e-06, | |
| "loss": 0.6533, | |
| "step": 575 | |
| }, | |
| { | |
| "epoch": 0.23927392739273928, | |
| "grad_norm": 2.7031827262094708, | |
| "learning_rate": 9.422378519750978e-06, | |
| "loss": 0.6738, | |
| "step": 580 | |
| }, | |
| { | |
| "epoch": 0.24133663366336633, | |
| "grad_norm": 2.9208550727888554, | |
| "learning_rate": 9.40546176780323e-06, | |
| "loss": 0.6792, | |
| "step": 585 | |
| }, | |
| { | |
| "epoch": 0.2433993399339934, | |
| "grad_norm": 2.8493025404273555, | |
| "learning_rate": 9.388316499135259e-06, | |
| "loss": 0.6728, | |
| "step": 590 | |
| }, | |
| { | |
| "epoch": 0.24546204620462045, | |
| "grad_norm": 2.6354127779313985, | |
| "learning_rate": 9.370943603093235e-06, | |
| "loss": 0.6745, | |
| "step": 595 | |
| }, | |
| { | |
| "epoch": 0.24752475247524752, | |
| "grad_norm": 3.598421971885995, | |
| "learning_rate": 9.353343980830644e-06, | |
| "loss": 0.6581, | |
| "step": 600 | |
| }, | |
| { | |
| "epoch": 0.2495874587458746, | |
| "grad_norm": 2.5853090904417497, | |
| "learning_rate": 9.33551854526154e-06, | |
| "loss": 0.6673, | |
| "step": 605 | |
| }, | |
| { | |
| "epoch": 0.25165016501650167, | |
| "grad_norm": 2.793105077025725, | |
| "learning_rate": 9.31746822101319e-06, | |
| "loss": 0.6637, | |
| "step": 610 | |
| }, | |
| { | |
| "epoch": 0.2537128712871287, | |
| "grad_norm": 2.7270681052691765, | |
| "learning_rate": 9.299193944378112e-06, | |
| "loss": 0.6356, | |
| "step": 615 | |
| }, | |
| { | |
| "epoch": 0.25577557755775576, | |
| "grad_norm": 2.7008720069429115, | |
| "learning_rate": 9.280696663265512e-06, | |
| "loss": 0.6464, | |
| "step": 620 | |
| }, | |
| { | |
| "epoch": 0.25783828382838286, | |
| "grad_norm": 2.671865487166939, | |
| "learning_rate": 9.261977337152107e-06, | |
| "loss": 0.6527, | |
| "step": 625 | |
| }, | |
| { | |
| "epoch": 0.2599009900990099, | |
| "grad_norm": 2.653313117664496, | |
| "learning_rate": 9.243036937032373e-06, | |
| "loss": 0.638, | |
| "step": 630 | |
| }, | |
| { | |
| "epoch": 0.26196369636963696, | |
| "grad_norm": 2.5458051542477502, | |
| "learning_rate": 9.223876445368153e-06, | |
| "loss": 0.6545, | |
| "step": 635 | |
| }, | |
| { | |
| "epoch": 0.264026402640264, | |
| "grad_norm": 2.6965322964778027, | |
| "learning_rate": 9.204496856037718e-06, | |
| "loss": 0.6616, | |
| "step": 640 | |
| }, | |
| { | |
| "epoch": 0.2660891089108911, | |
| "grad_norm": 2.830059150957898, | |
| "learning_rate": 9.184899174284201e-06, | |
| "loss": 0.6492, | |
| "step": 645 | |
| }, | |
| { | |
| "epoch": 0.26815181518151815, | |
| "grad_norm": 2.796697680039645, | |
| "learning_rate": 9.16508441666346e-06, | |
| "loss": 0.6324, | |
| "step": 650 | |
| }, | |
| { | |
| "epoch": 0.2702145214521452, | |
| "grad_norm": 3.091827022287661, | |
| "learning_rate": 9.14505361099134e-06, | |
| "loss": 0.6603, | |
| "step": 655 | |
| }, | |
| { | |
| "epoch": 0.2722772277227723, | |
| "grad_norm": 2.77180371501166, | |
| "learning_rate": 9.124807796290366e-06, | |
| "loss": 0.6265, | |
| "step": 660 | |
| }, | |
| { | |
| "epoch": 0.27433993399339934, | |
| "grad_norm": 2.6247932532421894, | |
| "learning_rate": 9.104348022735853e-06, | |
| "loss": 0.623, | |
| "step": 665 | |
| }, | |
| { | |
| "epoch": 0.2764026402640264, | |
| "grad_norm": 2.909646454868501, | |
| "learning_rate": 9.083675351601417e-06, | |
| "loss": 0.6554, | |
| "step": 670 | |
| }, | |
| { | |
| "epoch": 0.2784653465346535, | |
| "grad_norm": 2.8355016443543795, | |
| "learning_rate": 9.062790855203932e-06, | |
| "loss": 0.6248, | |
| "step": 675 | |
| }, | |
| { | |
| "epoch": 0.28052805280528054, | |
| "grad_norm": 2.845588283250197, | |
| "learning_rate": 9.041695616847915e-06, | |
| "loss": 0.636, | |
| "step": 680 | |
| }, | |
| { | |
| "epoch": 0.2825907590759076, | |
| "grad_norm": 2.5668723810217657, | |
| "learning_rate": 9.020390730769324e-06, | |
| "loss": 0.647, | |
| "step": 685 | |
| }, | |
| { | |
| "epoch": 0.28465346534653463, | |
| "grad_norm": 2.7870591118074515, | |
| "learning_rate": 8.998877302078803e-06, | |
| "loss": 0.6163, | |
| "step": 690 | |
| }, | |
| { | |
| "epoch": 0.28671617161716173, | |
| "grad_norm": 2.8993841087894867, | |
| "learning_rate": 8.97715644670436e-06, | |
| "loss": 0.6308, | |
| "step": 695 | |
| }, | |
| { | |
| "epoch": 0.2887788778877888, | |
| "grad_norm": 2.669040121356427, | |
| "learning_rate": 8.955229291333473e-06, | |
| "loss": 0.6293, | |
| "step": 700 | |
| }, | |
| { | |
| "epoch": 0.2908415841584158, | |
| "grad_norm": 2.6715972640429095, | |
| "learning_rate": 8.933096973354665e-06, | |
| "loss": 0.6414, | |
| "step": 705 | |
| }, | |
| { | |
| "epoch": 0.2929042904290429, | |
| "grad_norm": 2.721741671637072, | |
| "learning_rate": 8.910760640798487e-06, | |
| "loss": 0.6048, | |
| "step": 710 | |
| }, | |
| { | |
| "epoch": 0.29496699669966997, | |
| "grad_norm": 2.694393411500226, | |
| "learning_rate": 8.88822145227798e-06, | |
| "loss": 0.6253, | |
| "step": 715 | |
| }, | |
| { | |
| "epoch": 0.297029702970297, | |
| "grad_norm": 2.7216607887245834, | |
| "learning_rate": 8.865480576928578e-06, | |
| "loss": 0.5975, | |
| "step": 720 | |
| }, | |
| { | |
| "epoch": 0.2990924092409241, | |
| "grad_norm": 2.56544703422047, | |
| "learning_rate": 8.842539194347448e-06, | |
| "loss": 0.6166, | |
| "step": 725 | |
| }, | |
| { | |
| "epoch": 0.30115511551155116, | |
| "grad_norm": 2.687926593730155, | |
| "learning_rate": 8.819398494532328e-06, | |
| "loss": 0.6162, | |
| "step": 730 | |
| }, | |
| { | |
| "epoch": 0.3032178217821782, | |
| "grad_norm": 2.715855361797108, | |
| "learning_rate": 8.796059677819773e-06, | |
| "loss": 0.6159, | |
| "step": 735 | |
| }, | |
| { | |
| "epoch": 0.30528052805280526, | |
| "grad_norm": 2.7177077570753574, | |
| "learning_rate": 8.77252395482291e-06, | |
| "loss": 0.6199, | |
| "step": 740 | |
| }, | |
| { | |
| "epoch": 0.30734323432343236, | |
| "grad_norm": 2.7537980998977596, | |
| "learning_rate": 8.748792546368641e-06, | |
| "loss": 0.5942, | |
| "step": 745 | |
| }, | |
| { | |
| "epoch": 0.3094059405940594, | |
| "grad_norm": 2.7754021446494894, | |
| "learning_rate": 8.72486668343431e-06, | |
| "loss": 0.6204, | |
| "step": 750 | |
| }, | |
| { | |
| "epoch": 0.31146864686468645, | |
| "grad_norm": 2.6145595604114744, | |
| "learning_rate": 8.700747607083851e-06, | |
| "loss": 0.606, | |
| "step": 755 | |
| }, | |
| { | |
| "epoch": 0.31353135313531355, | |
| "grad_norm": 2.5551572752880785, | |
| "learning_rate": 8.676436568403422e-06, | |
| "loss": 0.615, | |
| "step": 760 | |
| }, | |
| { | |
| "epoch": 0.3155940594059406, | |
| "grad_norm": 2.584531027457074, | |
| "learning_rate": 8.651934828436497e-06, | |
| "loss": 0.6094, | |
| "step": 765 | |
| }, | |
| { | |
| "epoch": 0.31765676567656764, | |
| "grad_norm": 2.7585647106017865, | |
| "learning_rate": 8.627243658118466e-06, | |
| "loss": 0.6082, | |
| "step": 770 | |
| }, | |
| { | |
| "epoch": 0.31971947194719474, | |
| "grad_norm": 2.654651285995417, | |
| "learning_rate": 8.602364338210699e-06, | |
| "loss": 0.5939, | |
| "step": 775 | |
| }, | |
| { | |
| "epoch": 0.3217821782178218, | |
| "grad_norm": 2.6787685912910204, | |
| "learning_rate": 8.57729815923412e-06, | |
| "loss": 0.5876, | |
| "step": 780 | |
| }, | |
| { | |
| "epoch": 0.32384488448844884, | |
| "grad_norm": 2.6267227792982357, | |
| "learning_rate": 8.55204642140226e-06, | |
| "loss": 0.599, | |
| "step": 785 | |
| }, | |
| { | |
| "epoch": 0.3259075907590759, | |
| "grad_norm": 2.6802025518663015, | |
| "learning_rate": 8.52661043455382e-06, | |
| "loss": 0.594, | |
| "step": 790 | |
| }, | |
| { | |
| "epoch": 0.327970297029703, | |
| "grad_norm": 2.57110758651426, | |
| "learning_rate": 8.50099151808472e-06, | |
| "loss": 0.5884, | |
| "step": 795 | |
| }, | |
| { | |
| "epoch": 0.33003300330033003, | |
| "grad_norm": 2.99724938552448, | |
| "learning_rate": 8.47519100087967e-06, | |
| "loss": 0.5821, | |
| "step": 800 | |
| }, | |
| { | |
| "epoch": 0.3320957095709571, | |
| "grad_norm": 2.7370257356141616, | |
| "learning_rate": 8.449210221243225e-06, | |
| "loss": 0.5761, | |
| "step": 805 | |
| }, | |
| { | |
| "epoch": 0.3341584158415842, | |
| "grad_norm": 3.0576827360675214, | |
| "learning_rate": 8.42305052683038e-06, | |
| "loss": 0.5824, | |
| "step": 810 | |
| }, | |
| { | |
| "epoch": 0.3362211221122112, | |
| "grad_norm": 2.562013124794496, | |
| "learning_rate": 8.39671327457666e-06, | |
| "loss": 0.5837, | |
| "step": 815 | |
| }, | |
| { | |
| "epoch": 0.33828382838283827, | |
| "grad_norm": 2.7787602704146916, | |
| "learning_rate": 8.370199830627732e-06, | |
| "loss": 0.5924, | |
| "step": 820 | |
| }, | |
| { | |
| "epoch": 0.34034653465346537, | |
| "grad_norm": 2.6149487374961136, | |
| "learning_rate": 8.343511570268541e-06, | |
| "loss": 0.594, | |
| "step": 825 | |
| }, | |
| { | |
| "epoch": 0.3424092409240924, | |
| "grad_norm": 2.7424901857047925, | |
| "learning_rate": 8.316649877851977e-06, | |
| "loss": 0.5696, | |
| "step": 830 | |
| }, | |
| { | |
| "epoch": 0.34447194719471946, | |
| "grad_norm": 2.654634886694336, | |
| "learning_rate": 8.289616146727062e-06, | |
| "loss": 0.5821, | |
| "step": 835 | |
| }, | |
| { | |
| "epoch": 0.3465346534653465, | |
| "grad_norm": 2.5386950387254514, | |
| "learning_rate": 8.262411779166681e-06, | |
| "loss": 0.5722, | |
| "step": 840 | |
| }, | |
| { | |
| "epoch": 0.3485973597359736, | |
| "grad_norm": 2.5882867695406353, | |
| "learning_rate": 8.235038186294836e-06, | |
| "loss": 0.5896, | |
| "step": 845 | |
| }, | |
| { | |
| "epoch": 0.35066006600660066, | |
| "grad_norm": 2.6558091815806226, | |
| "learning_rate": 8.207496788013456e-06, | |
| "loss": 0.5843, | |
| "step": 850 | |
| }, | |
| { | |
| "epoch": 0.3527227722772277, | |
| "grad_norm": 2.766934233648842, | |
| "learning_rate": 8.179789012928747e-06, | |
| "loss": 0.5785, | |
| "step": 855 | |
| }, | |
| { | |
| "epoch": 0.3547854785478548, | |
| "grad_norm": 2.6862925251030427, | |
| "learning_rate": 8.151916298277078e-06, | |
| "loss": 0.5641, | |
| "step": 860 | |
| }, | |
| { | |
| "epoch": 0.35684818481848185, | |
| "grad_norm": 2.6591167593333256, | |
| "learning_rate": 8.123880089850438e-06, | |
| "loss": 0.5696, | |
| "step": 865 | |
| }, | |
| { | |
| "epoch": 0.3589108910891089, | |
| "grad_norm": 2.6326244768332745, | |
| "learning_rate": 8.095681841921441e-06, | |
| "loss": 0.5791, | |
| "step": 870 | |
| }, | |
| { | |
| "epoch": 0.360973597359736, | |
| "grad_norm": 2.65447071424182, | |
| "learning_rate": 8.06732301716789e-06, | |
| "loss": 0.5663, | |
| "step": 875 | |
| }, | |
| { | |
| "epoch": 0.36303630363036304, | |
| "grad_norm": 2.7561863484341207, | |
| "learning_rate": 8.038805086596903e-06, | |
| "loss": 0.572, | |
| "step": 880 | |
| }, | |
| { | |
| "epoch": 0.3650990099009901, | |
| "grad_norm": 2.6239959610961665, | |
| "learning_rate": 8.010129529468614e-06, | |
| "loss": 0.5703, | |
| "step": 885 | |
| }, | |
| { | |
| "epoch": 0.36716171617161714, | |
| "grad_norm": 2.8054930191127627, | |
| "learning_rate": 7.981297833219435e-06, | |
| "loss": 0.5697, | |
| "step": 890 | |
| }, | |
| { | |
| "epoch": 0.36922442244224424, | |
| "grad_norm": 2.556672661006346, | |
| "learning_rate": 7.952311493384916e-06, | |
| "loss": 0.5454, | |
| "step": 895 | |
| }, | |
| { | |
| "epoch": 0.3712871287128713, | |
| "grad_norm": 2.711104263166826, | |
| "learning_rate": 7.923172013522153e-06, | |
| "loss": 0.5477, | |
| "step": 900 | |
| }, | |
| { | |
| "epoch": 0.37334983498349833, | |
| "grad_norm": 2.6047389045795137, | |
| "learning_rate": 7.893880905131807e-06, | |
| "loss": 0.5548, | |
| "step": 905 | |
| }, | |
| { | |
| "epoch": 0.37541254125412543, | |
| "grad_norm": 2.6009558864259077, | |
| "learning_rate": 7.864439687579695e-06, | |
| "loss": 0.5487, | |
| "step": 910 | |
| }, | |
| { | |
| "epoch": 0.3774752475247525, | |
| "grad_norm": 2.776081930223416, | |
| "learning_rate": 7.834849888017979e-06, | |
| "loss": 0.5535, | |
| "step": 915 | |
| }, | |
| { | |
| "epoch": 0.3795379537953795, | |
| "grad_norm": 2.7439984836733227, | |
| "learning_rate": 7.805113041305958e-06, | |
| "loss": 0.5647, | |
| "step": 920 | |
| }, | |
| { | |
| "epoch": 0.3816006600660066, | |
| "grad_norm": 2.621923636099921, | |
| "learning_rate": 7.775230689930445e-06, | |
| "loss": 0.5364, | |
| "step": 925 | |
| }, | |
| { | |
| "epoch": 0.38366336633663367, | |
| "grad_norm": 2.5440712763463296, | |
| "learning_rate": 7.745204383925753e-06, | |
| "loss": 0.5392, | |
| "step": 930 | |
| }, | |
| { | |
| "epoch": 0.3857260726072607, | |
| "grad_norm": 2.6239950992845134, | |
| "learning_rate": 7.715035680793311e-06, | |
| "loss": 0.548, | |
| "step": 935 | |
| }, | |
| { | |
| "epoch": 0.38778877887788776, | |
| "grad_norm": 2.558970533428198, | |
| "learning_rate": 7.684726145420853e-06, | |
| "loss": 0.5481, | |
| "step": 940 | |
| }, | |
| { | |
| "epoch": 0.38985148514851486, | |
| "grad_norm": 2.733683917435511, | |
| "learning_rate": 7.654277350001255e-06, | |
| "loss": 0.5735, | |
| "step": 945 | |
| }, | |
| { | |
| "epoch": 0.3919141914191419, | |
| "grad_norm": 2.4440362195756, | |
| "learning_rate": 7.623690873950988e-06, | |
| "loss": 0.5361, | |
| "step": 950 | |
| }, | |
| { | |
| "epoch": 0.39397689768976896, | |
| "grad_norm": 2.7948133819884347, | |
| "learning_rate": 7.592968303828181e-06, | |
| "loss": 0.532, | |
| "step": 955 | |
| }, | |
| { | |
| "epoch": 0.39603960396039606, | |
| "grad_norm": 2.712027777278135, | |
| "learning_rate": 7.5621112332503325e-06, | |
| "loss": 0.5323, | |
| "step": 960 | |
| }, | |
| { | |
| "epoch": 0.3981023102310231, | |
| "grad_norm": 2.5614258873150724, | |
| "learning_rate": 7.531121262811645e-06, | |
| "loss": 0.532, | |
| "step": 965 | |
| }, | |
| { | |
| "epoch": 0.40016501650165015, | |
| "grad_norm": 2.5258391365444557, | |
| "learning_rate": 7.500000000000001e-06, | |
| "loss": 0.5271, | |
| "step": 970 | |
| }, | |
| { | |
| "epoch": 0.40222772277227725, | |
| "grad_norm": 2.543456979300113, | |
| "learning_rate": 7.468749059113578e-06, | |
| "loss": 0.5168, | |
| "step": 975 | |
| }, | |
| { | |
| "epoch": 0.4042904290429043, | |
| "grad_norm": 2.5683535191631623, | |
| "learning_rate": 7.437370061177116e-06, | |
| "loss": 0.5341, | |
| "step": 980 | |
| }, | |
| { | |
| "epoch": 0.40635313531353134, | |
| "grad_norm": 2.598132530083866, | |
| "learning_rate": 7.40586463385783e-06, | |
| "loss": 0.5278, | |
| "step": 985 | |
| }, | |
| { | |
| "epoch": 0.4084158415841584, | |
| "grad_norm": 2.6824293845426666, | |
| "learning_rate": 7.374234411380987e-06, | |
| "loss": 0.5388, | |
| "step": 990 | |
| }, | |
| { | |
| "epoch": 0.4104785478547855, | |
| "grad_norm": 2.6715849470717576, | |
| "learning_rate": 7.342481034445127e-06, | |
| "loss": 0.5476, | |
| "step": 995 | |
| }, | |
| { | |
| "epoch": 0.41254125412541254, | |
| "grad_norm": 2.676941475984213, | |
| "learning_rate": 7.310606150136965e-06, | |
| "loss": 0.5291, | |
| "step": 1000 | |
| }, | |
| { | |
| "epoch": 0.4146039603960396, | |
| "grad_norm": 2.7331658451074627, | |
| "learning_rate": 7.2786114118459564e-06, | |
| "loss": 0.5277, | |
| "step": 1005 | |
| }, | |
| { | |
| "epoch": 0.4166666666666667, | |
| "grad_norm": 2.6472987943602813, | |
| "learning_rate": 7.246498479178523e-06, | |
| "loss": 0.5221, | |
| "step": 1010 | |
| }, | |
| { | |
| "epoch": 0.41872937293729373, | |
| "grad_norm": 2.586821568507126, | |
| "learning_rate": 7.214269017871981e-06, | |
| "loss": 0.5163, | |
| "step": 1015 | |
| }, | |
| { | |
| "epoch": 0.4207920792079208, | |
| "grad_norm": 2.4588164960210834, | |
| "learning_rate": 7.181924699708127e-06, | |
| "loss": 0.5151, | |
| "step": 1020 | |
| }, | |
| { | |
| "epoch": 0.4228547854785479, | |
| "grad_norm": 2.5130611281830006, | |
| "learning_rate": 7.149467202426525e-06, | |
| "loss": 0.5326, | |
| "step": 1025 | |
| }, | |
| { | |
| "epoch": 0.4249174917491749, | |
| "grad_norm": 2.6743950877544265, | |
| "learning_rate": 7.116898209637478e-06, | |
| "loss": 0.5324, | |
| "step": 1030 | |
| }, | |
| { | |
| "epoch": 0.42698019801980197, | |
| "grad_norm": 2.5588441225148735, | |
| "learning_rate": 7.084219410734701e-06, | |
| "loss": 0.5277, | |
| "step": 1035 | |
| }, | |
| { | |
| "epoch": 0.429042904290429, | |
| "grad_norm": 2.6168950669777935, | |
| "learning_rate": 7.051432500807682e-06, | |
| "loss": 0.4983, | |
| "step": 1040 | |
| }, | |
| { | |
| "epoch": 0.4311056105610561, | |
| "grad_norm": 2.5513680083363677, | |
| "learning_rate": 7.018539180553768e-06, | |
| "loss": 0.5032, | |
| "step": 1045 | |
| }, | |
| { | |
| "epoch": 0.43316831683168316, | |
| "grad_norm": 2.590369266646011, | |
| "learning_rate": 6.985541156189932e-06, | |
| "loss": 0.5151, | |
| "step": 1050 | |
| }, | |
| { | |
| "epoch": 0.4352310231023102, | |
| "grad_norm": 2.5434958160268986, | |
| "learning_rate": 6.952440139364286e-06, | |
| "loss": 0.5231, | |
| "step": 1055 | |
| }, | |
| { | |
| "epoch": 0.4372937293729373, | |
| "grad_norm": 2.630962973641557, | |
| "learning_rate": 6.919237847067282e-06, | |
| "loss": 0.5248, | |
| "step": 1060 | |
| }, | |
| { | |
| "epoch": 0.43935643564356436, | |
| "grad_norm": 2.4427367015076737, | |
| "learning_rate": 6.885936001542658e-06, | |
| "loss": 0.5074, | |
| "step": 1065 | |
| }, | |
| { | |
| "epoch": 0.4414191419141914, | |
| "grad_norm": 2.529834433774531, | |
| "learning_rate": 6.852536330198099e-06, | |
| "loss": 0.52, | |
| "step": 1070 | |
| }, | |
| { | |
| "epoch": 0.4434818481848185, | |
| "grad_norm": 2.5714905566940622, | |
| "learning_rate": 6.819040565515636e-06, | |
| "loss": 0.5273, | |
| "step": 1075 | |
| }, | |
| { | |
| "epoch": 0.44554455445544555, | |
| "grad_norm": 2.4185173202106585, | |
| "learning_rate": 6.785450444961783e-06, | |
| "loss": 0.5058, | |
| "step": 1080 | |
| }, | |
| { | |
| "epoch": 0.4476072607260726, | |
| "grad_norm": 2.6463305618629884, | |
| "learning_rate": 6.751767710897404e-06, | |
| "loss": 0.5186, | |
| "step": 1085 | |
| }, | |
| { | |
| "epoch": 0.44966996699669964, | |
| "grad_norm": 2.4417765175517374, | |
| "learning_rate": 6.71799411048734e-06, | |
| "loss": 0.5119, | |
| "step": 1090 | |
| }, | |
| { | |
| "epoch": 0.45173267326732675, | |
| "grad_norm": 2.4466915489716246, | |
| "learning_rate": 6.684131395609784e-06, | |
| "loss": 0.5091, | |
| "step": 1095 | |
| }, | |
| { | |
| "epoch": 0.4537953795379538, | |
| "grad_norm": 2.5469460375657045, | |
| "learning_rate": 6.650181322765407e-06, | |
| "loss": 0.4972, | |
| "step": 1100 | |
| }, | |
| { | |
| "epoch": 0.45585808580858084, | |
| "grad_norm": 2.55425172030808, | |
| "learning_rate": 6.61614565298624e-06, | |
| "loss": 0.5163, | |
| "step": 1105 | |
| }, | |
| { | |
| "epoch": 0.45792079207920794, | |
| "grad_norm": 2.6047811461540635, | |
| "learning_rate": 6.5820261517443365e-06, | |
| "loss": 0.5068, | |
| "step": 1110 | |
| }, | |
| { | |
| "epoch": 0.459983498349835, | |
| "grad_norm": 2.4314690079287344, | |
| "learning_rate": 6.54782458886019e-06, | |
| "loss": 0.4936, | |
| "step": 1115 | |
| }, | |
| { | |
| "epoch": 0.46204620462046203, | |
| "grad_norm": 2.4496252297971512, | |
| "learning_rate": 6.5135427384109315e-06, | |
| "loss": 0.5055, | |
| "step": 1120 | |
| }, | |
| { | |
| "epoch": 0.46410891089108913, | |
| "grad_norm": 2.4983465170752996, | |
| "learning_rate": 6.479182378638308e-06, | |
| "loss": 0.508, | |
| "step": 1125 | |
| }, | |
| { | |
| "epoch": 0.4661716171617162, | |
| "grad_norm": 2.493954436696378, | |
| "learning_rate": 6.444745291856442e-06, | |
| "loss": 0.5074, | |
| "step": 1130 | |
| }, | |
| { | |
| "epoch": 0.4682343234323432, | |
| "grad_norm": 2.4743870926317104, | |
| "learning_rate": 6.410233264359379e-06, | |
| "loss": 0.4817, | |
| "step": 1135 | |
| }, | |
| { | |
| "epoch": 0.47029702970297027, | |
| "grad_norm": 2.6015106871562717, | |
| "learning_rate": 6.375648086328431e-06, | |
| "loss": 0.503, | |
| "step": 1140 | |
| }, | |
| { | |
| "epoch": 0.47235973597359737, | |
| "grad_norm": 2.4748853313920876, | |
| "learning_rate": 6.340991551739319e-06, | |
| "loss": 0.4898, | |
| "step": 1145 | |
| }, | |
| { | |
| "epoch": 0.4744224422442244, | |
| "grad_norm": 2.571563250995017, | |
| "learning_rate": 6.3062654582691175e-06, | |
| "loss": 0.5038, | |
| "step": 1150 | |
| }, | |
| { | |
| "epoch": 0.47648514851485146, | |
| "grad_norm": 2.56828630674544, | |
| "learning_rate": 6.271471607203006e-06, | |
| "loss": 0.4964, | |
| "step": 1155 | |
| }, | |
| { | |
| "epoch": 0.47854785478547857, | |
| "grad_norm": 2.4472686378385013, | |
| "learning_rate": 6.236611803340829e-06, | |
| "loss": 0.4837, | |
| "step": 1160 | |
| }, | |
| { | |
| "epoch": 0.4806105610561056, | |
| "grad_norm": 2.4926046687862864, | |
| "learning_rate": 6.201687854903492e-06, | |
| "loss": 0.4783, | |
| "step": 1165 | |
| }, | |
| { | |
| "epoch": 0.48267326732673266, | |
| "grad_norm": 2.4605803697540893, | |
| "learning_rate": 6.16670157343915e-06, | |
| "loss": 0.4855, | |
| "step": 1170 | |
| }, | |
| { | |
| "epoch": 0.48473597359735976, | |
| "grad_norm": 2.4830994265099906, | |
| "learning_rate": 6.131654773729255e-06, | |
| "loss": 0.492, | |
| "step": 1175 | |
| }, | |
| { | |
| "epoch": 0.4867986798679868, | |
| "grad_norm": 2.4315421147193184, | |
| "learning_rate": 6.096549273694411e-06, | |
| "loss": 0.4906, | |
| "step": 1180 | |
| }, | |
| { | |
| "epoch": 0.48886138613861385, | |
| "grad_norm": 2.599517694694856, | |
| "learning_rate": 6.061386894300082e-06, | |
| "loss": 0.4818, | |
| "step": 1185 | |
| }, | |
| { | |
| "epoch": 0.4909240924092409, | |
| "grad_norm": 2.5808864192189835, | |
| "learning_rate": 6.026169459462132e-06, | |
| "loss": 0.4882, | |
| "step": 1190 | |
| }, | |
| { | |
| "epoch": 0.492986798679868, | |
| "grad_norm": 2.378628769357962, | |
| "learning_rate": 5.990898795952225e-06, | |
| "loss": 0.4645, | |
| "step": 1195 | |
| }, | |
| { | |
| "epoch": 0.49504950495049505, | |
| "grad_norm": 2.5396332086908906, | |
| "learning_rate": 5.955576733303053e-06, | |
| "loss": 0.4864, | |
| "step": 1200 | |
| }, | |
| { | |
| "epoch": 0.4971122112211221, | |
| "grad_norm": 2.597727268666024, | |
| "learning_rate": 5.920205103713449e-06, | |
| "loss": 0.4801, | |
| "step": 1205 | |
| }, | |
| { | |
| "epoch": 0.4991749174917492, | |
| "grad_norm": 2.5073049896152604, | |
| "learning_rate": 5.884785741953345e-06, | |
| "loss": 0.493, | |
| "step": 1210 | |
| }, | |
| { | |
| "epoch": 0.5012376237623762, | |
| "grad_norm": 2.5498231092866352, | |
| "learning_rate": 5.849320485268597e-06, | |
| "loss": 0.4847, | |
| "step": 1215 | |
| }, | |
| { | |
| "epoch": 0.5033003300330033, | |
| "grad_norm": 2.4434059859729267, | |
| "learning_rate": 5.8138111732856906e-06, | |
| "loss": 0.4816, | |
| "step": 1220 | |
| }, | |
| { | |
| "epoch": 0.5053630363036303, | |
| "grad_norm": 2.427941557179141, | |
| "learning_rate": 5.778259647916309e-06, | |
| "loss": 0.4752, | |
| "step": 1225 | |
| }, | |
| { | |
| "epoch": 0.5074257425742574, | |
| "grad_norm": 2.5066438897899523, | |
| "learning_rate": 5.7426677532618e-06, | |
| "loss": 0.4751, | |
| "step": 1230 | |
| }, | |
| { | |
| "epoch": 0.5094884488448845, | |
| "grad_norm": 2.442917336254435, | |
| "learning_rate": 5.707037335517514e-06, | |
| "loss": 0.4516, | |
| "step": 1235 | |
| }, | |
| { | |
| "epoch": 0.5115511551155115, | |
| "grad_norm": 2.4869384945022066, | |
| "learning_rate": 5.67137024287704e-06, | |
| "loss": 0.4697, | |
| "step": 1240 | |
| }, | |
| { | |
| "epoch": 0.5136138613861386, | |
| "grad_norm": 2.4916982755437016, | |
| "learning_rate": 5.635668325436343e-06, | |
| "loss": 0.4593, | |
| "step": 1245 | |
| }, | |
| { | |
| "epoch": 0.5156765676567657, | |
| "grad_norm": 2.400223999637477, | |
| "learning_rate": 5.599933435097791e-06, | |
| "loss": 0.4627, | |
| "step": 1250 | |
| }, | |
| { | |
| "epoch": 0.5177392739273927, | |
| "grad_norm": 2.5691310342129006, | |
| "learning_rate": 5.564167425474093e-06, | |
| "loss": 0.4773, | |
| "step": 1255 | |
| }, | |
| { | |
| "epoch": 0.5198019801980198, | |
| "grad_norm": 2.5534505692218135, | |
| "learning_rate": 5.528372151792161e-06, | |
| "loss": 0.4757, | |
| "step": 1260 | |
| }, | |
| { | |
| "epoch": 0.5218646864686468, | |
| "grad_norm": 2.3595984093788465, | |
| "learning_rate": 5.492549470796865e-06, | |
| "loss": 0.4598, | |
| "step": 1265 | |
| }, | |
| { | |
| "epoch": 0.5239273927392739, | |
| "grad_norm": 2.484377807128577, | |
| "learning_rate": 5.456701240654726e-06, | |
| "loss": 0.4591, | |
| "step": 1270 | |
| }, | |
| { | |
| "epoch": 0.525990099009901, | |
| "grad_norm": 2.4760013171249966, | |
| "learning_rate": 5.420829320857532e-06, | |
| "loss": 0.4707, | |
| "step": 1275 | |
| }, | |
| { | |
| "epoch": 0.528052805280528, | |
| "grad_norm": 2.462101837921461, | |
| "learning_rate": 5.384935572125882e-06, | |
| "loss": 0.454, | |
| "step": 1280 | |
| }, | |
| { | |
| "epoch": 0.5301155115511551, | |
| "grad_norm": 2.588115117622585, | |
| "learning_rate": 5.349021856312669e-06, | |
| "loss": 0.4678, | |
| "step": 1285 | |
| }, | |
| { | |
| "epoch": 0.5321782178217822, | |
| "grad_norm": 2.5754165982294626, | |
| "learning_rate": 5.3130900363065055e-06, | |
| "loss": 0.4573, | |
| "step": 1290 | |
| }, | |
| { | |
| "epoch": 0.5342409240924092, | |
| "grad_norm": 2.4762653761884064, | |
| "learning_rate": 5.277141975935083e-06, | |
| "loss": 0.4733, | |
| "step": 1295 | |
| }, | |
| { | |
| "epoch": 0.5363036303630363, | |
| "grad_norm": 2.527567420790079, | |
| "learning_rate": 5.24117953986851e-06, | |
| "loss": 0.4678, | |
| "step": 1300 | |
| }, | |
| { | |
| "epoch": 0.5383663366336634, | |
| "grad_norm": 2.55248617117022, | |
| "learning_rate": 5.2052045935225725e-06, | |
| "loss": 0.4676, | |
| "step": 1305 | |
| }, | |
| { | |
| "epoch": 0.5404290429042904, | |
| "grad_norm": 2.569839654328644, | |
| "learning_rate": 5.169219002961987e-06, | |
| "loss": 0.4516, | |
| "step": 1310 | |
| }, | |
| { | |
| "epoch": 0.5424917491749175, | |
| "grad_norm": 2.4340359025464458, | |
| "learning_rate": 5.133224634803594e-06, | |
| "loss": 0.4487, | |
| "step": 1315 | |
| }, | |
| { | |
| "epoch": 0.5445544554455446, | |
| "grad_norm": 2.5185591909324985, | |
| "learning_rate": 5.097223356119538e-06, | |
| "loss": 0.4508, | |
| "step": 1320 | |
| }, | |
| { | |
| "epoch": 0.5466171617161716, | |
| "grad_norm": 2.473389867364878, | |
| "learning_rate": 5.061217034340426e-06, | |
| "loss": 0.4641, | |
| "step": 1325 | |
| }, | |
| { | |
| "epoch": 0.5486798679867987, | |
| "grad_norm": 2.4259356317621585, | |
| "learning_rate": 5.02520753715845e-06, | |
| "loss": 0.4489, | |
| "step": 1330 | |
| }, | |
| { | |
| "epoch": 0.5507425742574258, | |
| "grad_norm": 2.419683877837546, | |
| "learning_rate": 4.989196732430518e-06, | |
| "loss": 0.4523, | |
| "step": 1335 | |
| }, | |
| { | |
| "epoch": 0.5528052805280528, | |
| "grad_norm": 2.5368183953560077, | |
| "learning_rate": 4.953186488081362e-06, | |
| "loss": 0.4575, | |
| "step": 1340 | |
| }, | |
| { | |
| "epoch": 0.5548679867986799, | |
| "grad_norm": 2.6640720825787727, | |
| "learning_rate": 4.9171786720066465e-06, | |
| "loss": 0.4613, | |
| "step": 1345 | |
| }, | |
| { | |
| "epoch": 0.556930693069307, | |
| "grad_norm": 2.3570386936334025, | |
| "learning_rate": 4.881175151976075e-06, | |
| "loss": 0.4533, | |
| "step": 1350 | |
| }, | |
| { | |
| "epoch": 0.558993399339934, | |
| "grad_norm": 2.92205965965078, | |
| "learning_rate": 4.845177795536516e-06, | |
| "loss": 0.4315, | |
| "step": 1355 | |
| }, | |
| { | |
| "epoch": 0.5610561056105611, | |
| "grad_norm": 2.350803495083776, | |
| "learning_rate": 4.809188469915121e-06, | |
| "loss": 0.4611, | |
| "step": 1360 | |
| }, | |
| { | |
| "epoch": 0.5631188118811881, | |
| "grad_norm": 2.4307822932274084, | |
| "learning_rate": 4.773209041922472e-06, | |
| "loss": 0.4502, | |
| "step": 1365 | |
| }, | |
| { | |
| "epoch": 0.5651815181518152, | |
| "grad_norm": 2.471289928751954, | |
| "learning_rate": 4.737241377855751e-06, | |
| "loss": 0.4505, | |
| "step": 1370 | |
| }, | |
| { | |
| "epoch": 0.5672442244224423, | |
| "grad_norm": 2.493867736661699, | |
| "learning_rate": 4.7012873434019296e-06, | |
| "loss": 0.4518, | |
| "step": 1375 | |
| }, | |
| { | |
| "epoch": 0.5693069306930693, | |
| "grad_norm": 2.3665679103622974, | |
| "learning_rate": 4.6653488035409975e-06, | |
| "loss": 0.4303, | |
| "step": 1380 | |
| }, | |
| { | |
| "epoch": 0.5713696369636964, | |
| "grad_norm": 2.642220991131861, | |
| "learning_rate": 4.629427622449217e-06, | |
| "loss": 0.4458, | |
| "step": 1385 | |
| }, | |
| { | |
| "epoch": 0.5734323432343235, | |
| "grad_norm": 2.534010674982419, | |
| "learning_rate": 4.59352566340243e-06, | |
| "loss": 0.4407, | |
| "step": 1390 | |
| }, | |
| { | |
| "epoch": 0.5754950495049505, | |
| "grad_norm": 2.36153551659038, | |
| "learning_rate": 4.557644788679413e-06, | |
| "loss": 0.4343, | |
| "step": 1395 | |
| }, | |
| { | |
| "epoch": 0.5775577557755776, | |
| "grad_norm": 2.535141668952679, | |
| "learning_rate": 4.521786859465263e-06, | |
| "loss": 0.4412, | |
| "step": 1400 | |
| }, | |
| { | |
| "epoch": 0.5796204620462047, | |
| "grad_norm": 2.3990701439475446, | |
| "learning_rate": 4.485953735754872e-06, | |
| "loss": 0.4302, | |
| "step": 1405 | |
| }, | |
| { | |
| "epoch": 0.5816831683168316, | |
| "grad_norm": 2.463873380082242, | |
| "learning_rate": 4.450147276256439e-06, | |
| "loss": 0.4422, | |
| "step": 1410 | |
| }, | |
| { | |
| "epoch": 0.5837458745874587, | |
| "grad_norm": 2.3323366385949953, | |
| "learning_rate": 4.414369338295056e-06, | |
| "loss": 0.4358, | |
| "step": 1415 | |
| }, | |
| { | |
| "epoch": 0.5858085808580858, | |
| "grad_norm": 2.3533697334953874, | |
| "learning_rate": 4.37862177771637e-06, | |
| "loss": 0.4475, | |
| "step": 1420 | |
| }, | |
| { | |
| "epoch": 0.5878712871287128, | |
| "grad_norm": 2.4813703004487655, | |
| "learning_rate": 4.342906448790315e-06, | |
| "loss": 0.4244, | |
| "step": 1425 | |
| }, | |
| { | |
| "epoch": 0.5899339933993399, | |
| "grad_norm": 2.4263959721663686, | |
| "learning_rate": 4.307225204114927e-06, | |
| "loss": 0.4249, | |
| "step": 1430 | |
| }, | |
| { | |
| "epoch": 0.591996699669967, | |
| "grad_norm": 2.3890090123031675, | |
| "learning_rate": 4.271579894520254e-06, | |
| "loss": 0.4374, | |
| "step": 1435 | |
| }, | |
| { | |
| "epoch": 0.594059405940594, | |
| "grad_norm": 2.4162520655229316, | |
| "learning_rate": 4.235972368972343e-06, | |
| "loss": 0.42, | |
| "step": 1440 | |
| }, | |
| { | |
| "epoch": 0.5961221122112211, | |
| "grad_norm": 2.4663434097906416, | |
| "learning_rate": 4.200404474477341e-06, | |
| "loss": 0.4333, | |
| "step": 1445 | |
| }, | |
| { | |
| "epoch": 0.5981848184818482, | |
| "grad_norm": 2.643133094563938, | |
| "learning_rate": 4.16487805598568e-06, | |
| "loss": 0.4383, | |
| "step": 1450 | |
| }, | |
| { | |
| "epoch": 0.6002475247524752, | |
| "grad_norm": 2.369553776540664, | |
| "learning_rate": 4.12939495629638e-06, | |
| "loss": 0.421, | |
| "step": 1455 | |
| }, | |
| { | |
| "epoch": 0.6023102310231023, | |
| "grad_norm": 2.3739230876100277, | |
| "learning_rate": 4.093957015961465e-06, | |
| "loss": 0.4205, | |
| "step": 1460 | |
| }, | |
| { | |
| "epoch": 0.6043729372937293, | |
| "grad_norm": 2.7065139401959857, | |
| "learning_rate": 4.0585660731904855e-06, | |
| "loss": 0.4281, | |
| "step": 1465 | |
| }, | |
| { | |
| "epoch": 0.6064356435643564, | |
| "grad_norm": 2.303889526435788, | |
| "learning_rate": 4.023223963755168e-06, | |
| "loss": 0.4284, | |
| "step": 1470 | |
| }, | |
| { | |
| "epoch": 0.6084983498349835, | |
| "grad_norm": 2.4782043827381974, | |
| "learning_rate": 3.987932520894201e-06, | |
| "loss": 0.4347, | |
| "step": 1475 | |
| }, | |
| { | |
| "epoch": 0.6105610561056105, | |
| "grad_norm": 2.385221087724949, | |
| "learning_rate": 3.9526935752181275e-06, | |
| "loss": 0.4189, | |
| "step": 1480 | |
| }, | |
| { | |
| "epoch": 0.6126237623762376, | |
| "grad_norm": 2.4606523274098397, | |
| "learning_rate": 3.917508954614401e-06, | |
| "loss": 0.431, | |
| "step": 1485 | |
| }, | |
| { | |
| "epoch": 0.6146864686468647, | |
| "grad_norm": 2.3670999832545405, | |
| "learning_rate": 3.882380484152567e-06, | |
| "loss": 0.4284, | |
| "step": 1490 | |
| }, | |
| { | |
| "epoch": 0.6167491749174917, | |
| "grad_norm": 2.476043722834365, | |
| "learning_rate": 3.847309985989593e-06, | |
| "loss": 0.4241, | |
| "step": 1495 | |
| }, | |
| { | |
| "epoch": 0.6188118811881188, | |
| "grad_norm": 2.37313210444145, | |
| "learning_rate": 3.8122992792753534e-06, | |
| "loss": 0.4314, | |
| "step": 1500 | |
| }, | |
| { | |
| "epoch": 0.6208745874587459, | |
| "grad_norm": 2.382925261546362, | |
| "learning_rate": 3.777350180058264e-06, | |
| "loss": 0.4257, | |
| "step": 1505 | |
| }, | |
| { | |
| "epoch": 0.6229372937293729, | |
| "grad_norm": 2.2462536557274766, | |
| "learning_rate": 3.7424645011910847e-06, | |
| "loss": 0.4076, | |
| "step": 1510 | |
| }, | |
| { | |
| "epoch": 0.625, | |
| "grad_norm": 2.3505024535992733, | |
| "learning_rate": 3.707644052236887e-06, | |
| "loss": 0.3984, | |
| "step": 1515 | |
| }, | |
| { | |
| "epoch": 0.6270627062706271, | |
| "grad_norm": 2.4771391837897516, | |
| "learning_rate": 3.672890639375184e-06, | |
| "loss": 0.4172, | |
| "step": 1520 | |
| }, | |
| { | |
| "epoch": 0.6291254125412541, | |
| "grad_norm": 2.3721712805814197, | |
| "learning_rate": 3.6382060653082434e-06, | |
| "loss": 0.4097, | |
| "step": 1525 | |
| }, | |
| { | |
| "epoch": 0.6311881188118812, | |
| "grad_norm": 2.4306878354961454, | |
| "learning_rate": 3.6035921291675815e-06, | |
| "loss": 0.4076, | |
| "step": 1530 | |
| }, | |
| { | |
| "epoch": 0.6332508250825083, | |
| "grad_norm": 2.492688891911326, | |
| "learning_rate": 3.569050626420636e-06, | |
| "loss": 0.4253, | |
| "step": 1535 | |
| }, | |
| { | |
| "epoch": 0.6353135313531353, | |
| "grad_norm": 2.5661742951165585, | |
| "learning_rate": 3.5345833487776404e-06, | |
| "loss": 0.4238, | |
| "step": 1540 | |
| }, | |
| { | |
| "epoch": 0.6373762376237624, | |
| "grad_norm": 2.331662853575241, | |
| "learning_rate": 3.500192084098677e-06, | |
| "loss": 0.4183, | |
| "step": 1545 | |
| }, | |
| { | |
| "epoch": 0.6394389438943895, | |
| "grad_norm": 2.497643635749368, | |
| "learning_rate": 3.4658786163009416e-06, | |
| "loss": 0.4246, | |
| "step": 1550 | |
| }, | |
| { | |
| "epoch": 0.6415016501650165, | |
| "grad_norm": 2.5479933583941663, | |
| "learning_rate": 3.4316447252662142e-06, | |
| "loss": 0.4153, | |
| "step": 1555 | |
| }, | |
| { | |
| "epoch": 0.6435643564356436, | |
| "grad_norm": 2.455261221854015, | |
| "learning_rate": 3.3974921867485238e-06, | |
| "loss": 0.4206, | |
| "step": 1560 | |
| }, | |
| { | |
| "epoch": 0.6456270627062707, | |
| "grad_norm": 2.6087745317564712, | |
| "learning_rate": 3.3634227722820496e-06, | |
| "loss": 0.4078, | |
| "step": 1565 | |
| }, | |
| { | |
| "epoch": 0.6476897689768977, | |
| "grad_norm": 2.3621897623815302, | |
| "learning_rate": 3.3294382490892226e-06, | |
| "loss": 0.4306, | |
| "step": 1570 | |
| }, | |
| { | |
| "epoch": 0.6497524752475248, | |
| "grad_norm": 2.3806385163770067, | |
| "learning_rate": 3.2955403799890567e-06, | |
| "loss": 0.3937, | |
| "step": 1575 | |
| }, | |
| { | |
| "epoch": 0.6518151815181518, | |
| "grad_norm": 2.3247110625837863, | |
| "learning_rate": 3.261730923305717e-06, | |
| "loss": 0.4038, | |
| "step": 1580 | |
| }, | |
| { | |
| "epoch": 0.6538778877887789, | |
| "grad_norm": 2.375164955670525, | |
| "learning_rate": 3.2280116327773028e-06, | |
| "loss": 0.3958, | |
| "step": 1585 | |
| }, | |
| { | |
| "epoch": 0.655940594059406, | |
| "grad_norm": 2.4926818374164674, | |
| "learning_rate": 3.194384257464884e-06, | |
| "loss": 0.3996, | |
| "step": 1590 | |
| }, | |
| { | |
| "epoch": 0.658003300330033, | |
| "grad_norm": 2.495497820744883, | |
| "learning_rate": 3.160850541661779e-06, | |
| "loss": 0.3989, | |
| "step": 1595 | |
| }, | |
| { | |
| "epoch": 0.6600660066006601, | |
| "grad_norm": 2.4045930607358392, | |
| "learning_rate": 3.127412224803068e-06, | |
| "loss": 0.4063, | |
| "step": 1600 | |
| }, | |
| { | |
| "epoch": 0.6621287128712872, | |
| "grad_norm": 2.454505000870822, | |
| "learning_rate": 3.094071041375375e-06, | |
| "loss": 0.3978, | |
| "step": 1605 | |
| }, | |
| { | |
| "epoch": 0.6641914191419142, | |
| "grad_norm": 2.415988215095147, | |
| "learning_rate": 3.060828720826889e-06, | |
| "loss": 0.401, | |
| "step": 1610 | |
| }, | |
| { | |
| "epoch": 0.6662541254125413, | |
| "grad_norm": 2.4233639484301945, | |
| "learning_rate": 3.0276869874776632e-06, | |
| "loss": 0.4037, | |
| "step": 1615 | |
| }, | |
| { | |
| "epoch": 0.6683168316831684, | |
| "grad_norm": 2.4871754101710923, | |
| "learning_rate": 2.994647560430167e-06, | |
| "loss": 0.3957, | |
| "step": 1620 | |
| }, | |
| { | |
| "epoch": 0.6703795379537953, | |
| "grad_norm": 2.3870434326246475, | |
| "learning_rate": 2.961712153480118e-06, | |
| "loss": 0.406, | |
| "step": 1625 | |
| }, | |
| { | |
| "epoch": 0.6724422442244224, | |
| "grad_norm": 2.291964987725719, | |
| "learning_rate": 2.9288824750275803e-06, | |
| "loss": 0.4039, | |
| "step": 1630 | |
| }, | |
| { | |
| "epoch": 0.6745049504950495, | |
| "grad_norm": 2.3781197855311613, | |
| "learning_rate": 2.896160227988357e-06, | |
| "loss": 0.392, | |
| "step": 1635 | |
| }, | |
| { | |
| "epoch": 0.6765676567656765, | |
| "grad_norm": 2.419352678270268, | |
| "learning_rate": 2.8635471097056423e-06, | |
| "loss": 0.3948, | |
| "step": 1640 | |
| }, | |
| { | |
| "epoch": 0.6786303630363036, | |
| "grad_norm": 2.350196159609065, | |
| "learning_rate": 2.8310448118619967e-06, | |
| "loss": 0.3962, | |
| "step": 1645 | |
| }, | |
| { | |
| "epoch": 0.6806930693069307, | |
| "grad_norm": 2.359226335753887, | |
| "learning_rate": 2.7986550203915807e-06, | |
| "loss": 0.4032, | |
| "step": 1650 | |
| }, | |
| { | |
| "epoch": 0.6827557755775577, | |
| "grad_norm": 2.4118762465962558, | |
| "learning_rate": 2.7663794153927165e-06, | |
| "loss": 0.4063, | |
| "step": 1655 | |
| }, | |
| { | |
| "epoch": 0.6848184818481848, | |
| "grad_norm": 2.409985641673926, | |
| "learning_rate": 2.7342196710407337e-06, | |
| "loss": 0.3873, | |
| "step": 1660 | |
| }, | |
| { | |
| "epoch": 0.6868811881188119, | |
| "grad_norm": 2.3453850159224916, | |
| "learning_rate": 2.7021774555011214e-06, | |
| "loss": 0.3935, | |
| "step": 1665 | |
| }, | |
| { | |
| "epoch": 0.6889438943894389, | |
| "grad_norm": 2.38810093387651, | |
| "learning_rate": 2.6702544308430122e-06, | |
| "loss": 0.3948, | |
| "step": 1670 | |
| }, | |
| { | |
| "epoch": 0.691006600660066, | |
| "grad_norm": 2.4973217878271763, | |
| "learning_rate": 2.6384522529529542e-06, | |
| "loss": 0.3922, | |
| "step": 1675 | |
| }, | |
| { | |
| "epoch": 0.693069306930693, | |
| "grad_norm": 2.4765249088902044, | |
| "learning_rate": 2.6067725714490307e-06, | |
| "loss": 0.3971, | |
| "step": 1680 | |
| }, | |
| { | |
| "epoch": 0.6951320132013201, | |
| "grad_norm": 2.3774272170618076, | |
| "learning_rate": 2.5752170295952856e-06, | |
| "loss": 0.3845, | |
| "step": 1685 | |
| }, | |
| { | |
| "epoch": 0.6971947194719472, | |
| "grad_norm": 2.321619743547469, | |
| "learning_rate": 2.5437872642164818e-06, | |
| "loss": 0.3976, | |
| "step": 1690 | |
| }, | |
| { | |
| "epoch": 0.6992574257425742, | |
| "grad_norm": 2.4409520235076516, | |
| "learning_rate": 2.5124849056132094e-06, | |
| "loss": 0.4011, | |
| "step": 1695 | |
| }, | |
| { | |
| "epoch": 0.7013201320132013, | |
| "grad_norm": 2.3383182088114416, | |
| "learning_rate": 2.4813115774773046e-06, | |
| "loss": 0.3937, | |
| "step": 1700 | |
| }, | |
| { | |
| "epoch": 0.7033828382838284, | |
| "grad_norm": 2.297945871228005, | |
| "learning_rate": 2.4502688968076416e-06, | |
| "loss": 0.3734, | |
| "step": 1705 | |
| }, | |
| { | |
| "epoch": 0.7054455445544554, | |
| "grad_norm": 2.326552654875149, | |
| "learning_rate": 2.4193584738262426e-06, | |
| "loss": 0.378, | |
| "step": 1710 | |
| }, | |
| { | |
| "epoch": 0.7075082508250825, | |
| "grad_norm": 2.584655230215262, | |
| "learning_rate": 2.388581911894767e-06, | |
| "loss": 0.3768, | |
| "step": 1715 | |
| }, | |
| { | |
| "epoch": 0.7095709570957096, | |
| "grad_norm": 2.3738045585977177, | |
| "learning_rate": 2.357940807431339e-06, | |
| "loss": 0.3969, | |
| "step": 1720 | |
| }, | |
| { | |
| "epoch": 0.7116336633663366, | |
| "grad_norm": 2.393762942918226, | |
| "learning_rate": 2.3274367498277246e-06, | |
| "loss": 0.3981, | |
| "step": 1725 | |
| }, | |
| { | |
| "epoch": 0.7136963696369637, | |
| "grad_norm": 2.5701720220513935, | |
| "learning_rate": 2.2970713213669127e-06, | |
| "loss": 0.3977, | |
| "step": 1730 | |
| }, | |
| { | |
| "epoch": 0.7157590759075908, | |
| "grad_norm": 2.3432875976560337, | |
| "learning_rate": 2.266846097141026e-06, | |
| "loss": 0.3853, | |
| "step": 1735 | |
| }, | |
| { | |
| "epoch": 0.7178217821782178, | |
| "grad_norm": 2.266576155932609, | |
| "learning_rate": 2.2367626449696168e-06, | |
| "loss": 0.3873, | |
| "step": 1740 | |
| }, | |
| { | |
| "epoch": 0.7198844884488449, | |
| "grad_norm": 2.4100813326697113, | |
| "learning_rate": 2.206822525318352e-06, | |
| "loss": 0.3796, | |
| "step": 1745 | |
| }, | |
| { | |
| "epoch": 0.721947194719472, | |
| "grad_norm": 2.3097612430692824, | |
| "learning_rate": 2.1770272912180577e-06, | |
| "loss": 0.3828, | |
| "step": 1750 | |
| }, | |
| { | |
| "epoch": 0.724009900990099, | |
| "grad_norm": 2.381872158371206, | |
| "learning_rate": 2.1473784881841753e-06, | |
| "loss": 0.3877, | |
| "step": 1755 | |
| }, | |
| { | |
| "epoch": 0.7260726072607261, | |
| "grad_norm": 2.3045933376924728, | |
| "learning_rate": 2.117877654136584e-06, | |
| "loss": 0.3808, | |
| "step": 1760 | |
| }, | |
| { | |
| "epoch": 0.7281353135313532, | |
| "grad_norm": 2.326638441980535, | |
| "learning_rate": 2.088526319319827e-06, | |
| "loss": 0.3921, | |
| "step": 1765 | |
| }, | |
| { | |
| "epoch": 0.7301980198019802, | |
| "grad_norm": 2.4833712672454147, | |
| "learning_rate": 2.059326006223743e-06, | |
| "loss": 0.3787, | |
| "step": 1770 | |
| }, | |
| { | |
| "epoch": 0.7322607260726073, | |
| "grad_norm": 2.3365364725152933, | |
| "learning_rate": 2.030278229504484e-06, | |
| "loss": 0.3849, | |
| "step": 1775 | |
| }, | |
| { | |
| "epoch": 0.7343234323432343, | |
| "grad_norm": 2.4948287704930365, | |
| "learning_rate": 2.001384495905954e-06, | |
| "loss": 0.3774, | |
| "step": 1780 | |
| }, | |
| { | |
| "epoch": 0.7363861386138614, | |
| "grad_norm": 2.562327216184289, | |
| "learning_rate": 1.972646304181656e-06, | |
| "loss": 0.3958, | |
| "step": 1785 | |
| }, | |
| { | |
| "epoch": 0.7384488448844885, | |
| "grad_norm": 2.4385787815481432, | |
| "learning_rate": 1.944065145016935e-06, | |
| "loss": 0.3796, | |
| "step": 1790 | |
| }, | |
| { | |
| "epoch": 0.7405115511551155, | |
| "grad_norm": 2.2522811433040837, | |
| "learning_rate": 1.9156425009516736e-06, | |
| "loss": 0.3877, | |
| "step": 1795 | |
| }, | |
| { | |
| "epoch": 0.7425742574257426, | |
| "grad_norm": 2.5063965305246416, | |
| "learning_rate": 1.8873798463033742e-06, | |
| "loss": 0.3803, | |
| "step": 1800 | |
| }, | |
| { | |
| "epoch": 0.7446369636963697, | |
| "grad_norm": 2.6814884076074628, | |
| "learning_rate": 1.8592786470906932e-06, | |
| "loss": 0.3836, | |
| "step": 1805 | |
| }, | |
| { | |
| "epoch": 0.7466996699669967, | |
| "grad_norm": 2.2538094160236324, | |
| "learning_rate": 1.8313403609573976e-06, | |
| "loss": 0.3723, | |
| "step": 1810 | |
| }, | |
| { | |
| "epoch": 0.7487623762376238, | |
| "grad_norm": 2.4236325470496274, | |
| "learning_rate": 1.8035664370967493e-06, | |
| "loss": 0.3831, | |
| "step": 1815 | |
| }, | |
| { | |
| "epoch": 0.7508250825082509, | |
| "grad_norm": 2.4653728464781537, | |
| "learning_rate": 1.775958316176339e-06, | |
| "loss": 0.3809, | |
| "step": 1820 | |
| }, | |
| { | |
| "epoch": 0.7528877887788779, | |
| "grad_norm": 2.473806197305363, | |
| "learning_rate": 1.7485174302633557e-06, | |
| "loss": 0.3798, | |
| "step": 1825 | |
| }, | |
| { | |
| "epoch": 0.754950495049505, | |
| "grad_norm": 2.530281290392794, | |
| "learning_rate": 1.721245202750299e-06, | |
| "loss": 0.3837, | |
| "step": 1830 | |
| }, | |
| { | |
| "epoch": 0.7570132013201321, | |
| "grad_norm": 2.307304666583442, | |
| "learning_rate": 1.694143048281156e-06, | |
| "loss": 0.378, | |
| "step": 1835 | |
| }, | |
| { | |
| "epoch": 0.759075907590759, | |
| "grad_norm": 2.498849418663323, | |
| "learning_rate": 1.6672123726780083e-06, | |
| "loss": 0.376, | |
| "step": 1840 | |
| }, | |
| { | |
| "epoch": 0.7611386138613861, | |
| "grad_norm": 2.4025100016563186, | |
| "learning_rate": 1.6404545728681232e-06, | |
| "loss": 0.373, | |
| "step": 1845 | |
| }, | |
| { | |
| "epoch": 0.7632013201320133, | |
| "grad_norm": 2.2171879939222894, | |
| "learning_rate": 1.613871036811489e-06, | |
| "loss": 0.3648, | |
| "step": 1850 | |
| }, | |
| { | |
| "epoch": 0.7652640264026402, | |
| "grad_norm": 2.3047287367214837, | |
| "learning_rate": 1.5874631434288128e-06, | |
| "loss": 0.3655, | |
| "step": 1855 | |
| }, | |
| { | |
| "epoch": 0.7673267326732673, | |
| "grad_norm": 2.4662734075880848, | |
| "learning_rate": 1.5612322625300064e-06, | |
| "loss": 0.3714, | |
| "step": 1860 | |
| }, | |
| { | |
| "epoch": 0.7693894389438944, | |
| "grad_norm": 2.331832423235198, | |
| "learning_rate": 1.5351797547431212e-06, | |
| "loss": 0.3675, | |
| "step": 1865 | |
| }, | |
| { | |
| "epoch": 0.7714521452145214, | |
| "grad_norm": 2.4200832008390196, | |
| "learning_rate": 1.5093069714437803e-06, | |
| "loss": 0.3793, | |
| "step": 1870 | |
| }, | |
| { | |
| "epoch": 0.7735148514851485, | |
| "grad_norm": 2.4710921538024717, | |
| "learning_rate": 1.483615254685075e-06, | |
| "loss": 0.3767, | |
| "step": 1875 | |
| }, | |
| { | |
| "epoch": 0.7755775577557755, | |
| "grad_norm": 2.3547080899947543, | |
| "learning_rate": 1.4581059371279516e-06, | |
| "loss": 0.3487, | |
| "step": 1880 | |
| }, | |
| { | |
| "epoch": 0.7776402640264026, | |
| "grad_norm": 2.3257391997447927, | |
| "learning_rate": 1.4327803419720836e-06, | |
| "loss": 0.35, | |
| "step": 1885 | |
| }, | |
| { | |
| "epoch": 0.7797029702970297, | |
| "grad_norm": 2.313586644368121, | |
| "learning_rate": 1.4076397828872441e-06, | |
| "loss": 0.3606, | |
| "step": 1890 | |
| }, | |
| { | |
| "epoch": 0.7817656765676567, | |
| "grad_norm": 2.3428094787987574, | |
| "learning_rate": 1.3826855639451492e-06, | |
| "loss": 0.3653, | |
| "step": 1895 | |
| }, | |
| { | |
| "epoch": 0.7838283828382838, | |
| "grad_norm": 2.45005288836788, | |
| "learning_rate": 1.357918979551831e-06, | |
| "loss": 0.3754, | |
| "step": 1900 | |
| }, | |
| { | |
| "epoch": 0.7858910891089109, | |
| "grad_norm": 2.3976786255699754, | |
| "learning_rate": 1.333341314380479e-06, | |
| "loss": 0.3667, | |
| "step": 1905 | |
| }, | |
| { | |
| "epoch": 0.7879537953795379, | |
| "grad_norm": 2.5250135396691604, | |
| "learning_rate": 1.308953843304816e-06, | |
| "loss": 0.3614, | |
| "step": 1910 | |
| }, | |
| { | |
| "epoch": 0.790016501650165, | |
| "grad_norm": 2.348295501687973, | |
| "learning_rate": 1.2847578313329623e-06, | |
| "loss": 0.3608, | |
| "step": 1915 | |
| }, | |
| { | |
| "epoch": 0.7920792079207921, | |
| "grad_norm": 2.367799355908408, | |
| "learning_rate": 1.2607545335418154e-06, | |
| "loss": 0.3633, | |
| "step": 1920 | |
| }, | |
| { | |
| "epoch": 0.7941419141914191, | |
| "grad_norm": 2.45330068107534, | |
| "learning_rate": 1.2369451950119553e-06, | |
| "loss": 0.3692, | |
| "step": 1925 | |
| }, | |
| { | |
| "epoch": 0.7962046204620462, | |
| "grad_norm": 2.249645433984358, | |
| "learning_rate": 1.2133310507630535e-06, | |
| "loss": 0.3586, | |
| "step": 1930 | |
| }, | |
| { | |
| "epoch": 0.7982673267326733, | |
| "grad_norm": 2.4175222905557012, | |
| "learning_rate": 1.189913325689816e-06, | |
| "loss": 0.3622, | |
| "step": 1935 | |
| }, | |
| { | |
| "epoch": 0.8003300330033003, | |
| "grad_norm": 2.3871779225388616, | |
| "learning_rate": 1.166693234498446e-06, | |
| "loss": 0.3536, | |
| "step": 1940 | |
| }, | |
| { | |
| "epoch": 0.8023927392739274, | |
| "grad_norm": 2.4452723810844743, | |
| "learning_rate": 1.1436719816436293e-06, | |
| "loss": 0.3613, | |
| "step": 1945 | |
| }, | |
| { | |
| "epoch": 0.8044554455445545, | |
| "grad_norm": 2.48712631644359, | |
| "learning_rate": 1.120850761266068e-06, | |
| "loss": 0.3669, | |
| "step": 1950 | |
| }, | |
| { | |
| "epoch": 0.8065181518151815, | |
| "grad_norm": 2.5253662509329393, | |
| "learning_rate": 1.098230757130529e-06, | |
| "loss": 0.3727, | |
| "step": 1955 | |
| }, | |
| { | |
| "epoch": 0.8085808580858086, | |
| "grad_norm": 2.380725750843834, | |
| "learning_rate": 1.075813142564448e-06, | |
| "loss": 0.3513, | |
| "step": 1960 | |
| }, | |
| { | |
| "epoch": 0.8106435643564357, | |
| "grad_norm": 2.358274468399545, | |
| "learning_rate": 1.053599080397068e-06, | |
| "loss": 0.3702, | |
| "step": 1965 | |
| }, | |
| { | |
| "epoch": 0.8127062706270627, | |
| "grad_norm": 2.2547142529874806, | |
| "learning_rate": 1.031589722899109e-06, | |
| "loss": 0.3612, | |
| "step": 1970 | |
| }, | |
| { | |
| "epoch": 0.8147689768976898, | |
| "grad_norm": 2.3254385156527664, | |
| "learning_rate": 1.0097862117230162e-06, | |
| "loss": 0.3468, | |
| "step": 1975 | |
| }, | |
| { | |
| "epoch": 0.8168316831683168, | |
| "grad_norm": 2.3457610696785425, | |
| "learning_rate": 9.881896778437328e-07, | |
| "loss": 0.3565, | |
| "step": 1980 | |
| }, | |
| { | |
| "epoch": 0.8188943894389439, | |
| "grad_norm": 2.3354355561954314, | |
| "learning_rate": 9.6680124150003e-07, | |
| "loss": 0.3545, | |
| "step": 1985 | |
| }, | |
| { | |
| "epoch": 0.820957095709571, | |
| "grad_norm": 2.3649448904745904, | |
| "learning_rate": 9.456220121364091e-07, | |
| "loss": 0.3544, | |
| "step": 1990 | |
| }, | |
| { | |
| "epoch": 0.823019801980198, | |
| "grad_norm": 2.562831507635733, | |
| "learning_rate": 9.24653088345544e-07, | |
| "loss": 0.3487, | |
| "step": 1995 | |
| }, | |
| { | |
| "epoch": 0.8250825082508251, | |
| "grad_norm": 2.5452000028587145, | |
| "learning_rate": 9.038955578113018e-07, | |
| "loss": 0.3511, | |
| "step": 2000 | |
| }, | |
| { | |
| "epoch": 0.8271452145214522, | |
| "grad_norm": 2.279868733040485, | |
| "learning_rate": 8.833504972523238e-07, | |
| "loss": 0.3507, | |
| "step": 2005 | |
| }, | |
| { | |
| "epoch": 0.8292079207920792, | |
| "grad_norm": 2.447577859196897, | |
| "learning_rate": 8.630189723661663e-07, | |
| "loss": 0.3505, | |
| "step": 2010 | |
| }, | |
| { | |
| "epoch": 0.8312706270627063, | |
| "grad_norm": 2.3258834832843855, | |
| "learning_rate": 8.429020377740338e-07, | |
| "loss": 0.3436, | |
| "step": 2015 | |
| }, | |
| { | |
| "epoch": 0.8333333333333334, | |
| "grad_norm": 2.419682969212795, | |
| "learning_rate": 8.230007369660636e-07, | |
| "loss": 0.3488, | |
| "step": 2020 | |
| }, | |
| { | |
| "epoch": 0.8353960396039604, | |
| "grad_norm": 2.386888258570233, | |
| "learning_rate": 8.033161022472063e-07, | |
| "loss": 0.3535, | |
| "step": 2025 | |
| }, | |
| { | |
| "epoch": 0.8374587458745875, | |
| "grad_norm": 2.3745894252664916, | |
| "learning_rate": 7.838491546836763e-07, | |
| "loss": 0.3508, | |
| "step": 2030 | |
| }, | |
| { | |
| "epoch": 0.8395214521452146, | |
| "grad_norm": 2.384176050336693, | |
| "learning_rate": 7.646009040499846e-07, | |
| "loss": 0.3431, | |
| "step": 2035 | |
| }, | |
| { | |
| "epoch": 0.8415841584158416, | |
| "grad_norm": 2.4506033032717274, | |
| "learning_rate": 7.455723487765664e-07, | |
| "loss": 0.349, | |
| "step": 2040 | |
| }, | |
| { | |
| "epoch": 0.8436468646864687, | |
| "grad_norm": 2.4755241753598956, | |
| "learning_rate": 7.267644758979869e-07, | |
| "loss": 0.3465, | |
| "step": 2045 | |
| }, | |
| { | |
| "epoch": 0.8457095709570958, | |
| "grad_norm": 2.3912218867869472, | |
| "learning_rate": 7.08178261001743e-07, | |
| "loss": 0.3606, | |
| "step": 2050 | |
| }, | |
| { | |
| "epoch": 0.8477722772277227, | |
| "grad_norm": 2.4752358064551925, | |
| "learning_rate": 6.898146681776629e-07, | |
| "loss": 0.3451, | |
| "step": 2055 | |
| }, | |
| { | |
| "epoch": 0.8498349834983498, | |
| "grad_norm": 2.4469151560024365, | |
| "learning_rate": 6.7167464996789e-07, | |
| "loss": 0.3515, | |
| "step": 2060 | |
| }, | |
| { | |
| "epoch": 0.851897689768977, | |
| "grad_norm": 2.4398936114143024, | |
| "learning_rate": 6.537591473174814e-07, | |
| "loss": 0.3602, | |
| "step": 2065 | |
| }, | |
| { | |
| "epoch": 0.8539603960396039, | |
| "grad_norm": 2.499314654767742, | |
| "learning_rate": 6.360690895255916e-07, | |
| "loss": 0.3624, | |
| "step": 2070 | |
| }, | |
| { | |
| "epoch": 0.856023102310231, | |
| "grad_norm": 2.386870027944476, | |
| "learning_rate": 6.186053941972775e-07, | |
| "loss": 0.3449, | |
| "step": 2075 | |
| }, | |
| { | |
| "epoch": 0.858085808580858, | |
| "grad_norm": 2.4376398779640347, | |
| "learning_rate": 6.013689671958944e-07, | |
| "loss": 0.3612, | |
| "step": 2080 | |
| }, | |
| { | |
| "epoch": 0.8601485148514851, | |
| "grad_norm": 2.530488463039945, | |
| "learning_rate": 5.84360702596109e-07, | |
| "loss": 0.3635, | |
| "step": 2085 | |
| }, | |
| { | |
| "epoch": 0.8622112211221122, | |
| "grad_norm": 2.393526534346384, | |
| "learning_rate": 5.67581482637527e-07, | |
| "loss": 0.3484, | |
| "step": 2090 | |
| }, | |
| { | |
| "epoch": 0.8642739273927392, | |
| "grad_norm": 2.439985682818376, | |
| "learning_rate": 5.510321776789213e-07, | |
| "loss": 0.3562, | |
| "step": 2095 | |
| }, | |
| { | |
| "epoch": 0.8663366336633663, | |
| "grad_norm": 2.404561027314301, | |
| "learning_rate": 5.347136461530966e-07, | |
| "loss": 0.3515, | |
| "step": 2100 | |
| }, | |
| { | |
| "epoch": 0.8683993399339934, | |
| "grad_norm": 2.529134431042306, | |
| "learning_rate": 5.186267345223539e-07, | |
| "loss": 0.336, | |
| "step": 2105 | |
| }, | |
| { | |
| "epoch": 0.8704620462046204, | |
| "grad_norm": 2.4549178066211796, | |
| "learning_rate": 5.027722772345828e-07, | |
| "loss": 0.3483, | |
| "step": 2110 | |
| }, | |
| { | |
| "epoch": 0.8725247524752475, | |
| "grad_norm": 2.404643832863399, | |
| "learning_rate": 4.871510966799847e-07, | |
| "loss": 0.3406, | |
| "step": 2115 | |
| }, | |
| { | |
| "epoch": 0.8745874587458746, | |
| "grad_norm": 2.503938904115535, | |
| "learning_rate": 4.717640031484055e-07, | |
| "loss": 0.3629, | |
| "step": 2120 | |
| }, | |
| { | |
| "epoch": 0.8766501650165016, | |
| "grad_norm": 2.3230215699664183, | |
| "learning_rate": 4.566117947873139e-07, | |
| "loss": 0.3542, | |
| "step": 2125 | |
| }, | |
| { | |
| "epoch": 0.8787128712871287, | |
| "grad_norm": 2.510839585616837, | |
| "learning_rate": 4.4169525756039164e-07, | |
| "loss": 0.3504, | |
| "step": 2130 | |
| }, | |
| { | |
| "epoch": 0.8807755775577558, | |
| "grad_norm": 2.4483049384414035, | |
| "learning_rate": 4.2701516520677054e-07, | |
| "loss": 0.3426, | |
| "step": 2135 | |
| }, | |
| { | |
| "epoch": 0.8828382838283828, | |
| "grad_norm": 2.3212952681789445, | |
| "learning_rate": 4.1257227920089684e-07, | |
| "loss": 0.3467, | |
| "step": 2140 | |
| }, | |
| { | |
| "epoch": 0.8849009900990099, | |
| "grad_norm": 2.306034813512439, | |
| "learning_rate": 3.983673487130313e-07, | |
| "loss": 0.3496, | |
| "step": 2145 | |
| }, | |
| { | |
| "epoch": 0.886963696369637, | |
| "grad_norm": 2.351525266795836, | |
| "learning_rate": 3.8440111057038874e-07, | |
| "loss": 0.3446, | |
| "step": 2150 | |
| }, | |
| { | |
| "epoch": 0.889026402640264, | |
| "grad_norm": 2.33661307568645, | |
| "learning_rate": 3.706742892189197e-07, | |
| "loss": 0.3385, | |
| "step": 2155 | |
| }, | |
| { | |
| "epoch": 0.8910891089108911, | |
| "grad_norm": 2.479408467495357, | |
| "learning_rate": 3.5718759668572913e-07, | |
| "loss": 0.3471, | |
| "step": 2160 | |
| }, | |
| { | |
| "epoch": 0.8931518151815182, | |
| "grad_norm": 2.4454103391168687, | |
| "learning_rate": 3.439417325421468e-07, | |
| "loss": 0.347, | |
| "step": 2165 | |
| }, | |
| { | |
| "epoch": 0.8952145214521452, | |
| "grad_norm": 2.4210315871292174, | |
| "learning_rate": 3.3093738386743734e-07, | |
| "loss": 0.3387, | |
| "step": 2170 | |
| }, | |
| { | |
| "epoch": 0.8972772277227723, | |
| "grad_norm": 2.415104298972758, | |
| "learning_rate": 3.1817522521316034e-07, | |
| "loss": 0.3328, | |
| "step": 2175 | |
| }, | |
| { | |
| "epoch": 0.8993399339933993, | |
| "grad_norm": 2.6057415546092955, | |
| "learning_rate": 3.0565591856818236e-07, | |
| "loss": 0.3384, | |
| "step": 2180 | |
| }, | |
| { | |
| "epoch": 0.9014026402640264, | |
| "grad_norm": 2.5470334526474083, | |
| "learning_rate": 2.9338011332433525e-07, | |
| "loss": 0.3456, | |
| "step": 2185 | |
| }, | |
| { | |
| "epoch": 0.9034653465346535, | |
| "grad_norm": 2.3087270354481495, | |
| "learning_rate": 2.813484462427357e-07, | |
| "loss": 0.3416, | |
| "step": 2190 | |
| }, | |
| { | |
| "epoch": 0.9055280528052805, | |
| "grad_norm": 2.3652461290680615, | |
| "learning_rate": 2.695615414207542e-07, | |
| "loss": 0.3419, | |
| "step": 2195 | |
| }, | |
| { | |
| "epoch": 0.9075907590759076, | |
| "grad_norm": 2.3153894157922084, | |
| "learning_rate": 2.5802001025963917e-07, | |
| "loss": 0.3457, | |
| "step": 2200 | |
| }, | |
| { | |
| "epoch": 0.9096534653465347, | |
| "grad_norm": 2.437439490370531, | |
| "learning_rate": 2.467244514328082e-07, | |
| "loss": 0.3336, | |
| "step": 2205 | |
| }, | |
| { | |
| "epoch": 0.9117161716171617, | |
| "grad_norm": 2.48712819745237, | |
| "learning_rate": 2.3567545085478983e-07, | |
| "loss": 0.3473, | |
| "step": 2210 | |
| }, | |
| { | |
| "epoch": 0.9137788778877888, | |
| "grad_norm": 2.467836274329266, | |
| "learning_rate": 2.248735816508324e-07, | |
| "loss": 0.3342, | |
| "step": 2215 | |
| }, | |
| { | |
| "epoch": 0.9158415841584159, | |
| "grad_norm": 2.43610224609289, | |
| "learning_rate": 2.1431940412717843e-07, | |
| "loss": 0.349, | |
| "step": 2220 | |
| }, | |
| { | |
| "epoch": 0.9179042904290429, | |
| "grad_norm": 2.4583486850305265, | |
| "learning_rate": 2.040134657419951e-07, | |
| "loss": 0.3577, | |
| "step": 2225 | |
| }, | |
| { | |
| "epoch": 0.91996699669967, | |
| "grad_norm": 2.307477095808075, | |
| "learning_rate": 1.9395630107698293e-07, | |
| "loss": 0.3375, | |
| "step": 2230 | |
| }, | |
| { | |
| "epoch": 0.9220297029702971, | |
| "grad_norm": 2.6904928261878207, | |
| "learning_rate": 1.8414843180964316e-07, | |
| "loss": 0.3381, | |
| "step": 2235 | |
| }, | |
| { | |
| "epoch": 0.9240924092409241, | |
| "grad_norm": 2.395973262946888, | |
| "learning_rate": 1.7459036668621586e-07, | |
| "loss": 0.3487, | |
| "step": 2240 | |
| }, | |
| { | |
| "epoch": 0.9261551155115512, | |
| "grad_norm": 2.5974442906075654, | |
| "learning_rate": 1.6528260149529573e-07, | |
| "loss": 0.3462, | |
| "step": 2245 | |
| }, | |
| { | |
| "epoch": 0.9282178217821783, | |
| "grad_norm": 2.4163928757500734, | |
| "learning_rate": 1.562256190421102e-07, | |
| "loss": 0.3338, | |
| "step": 2250 | |
| }, | |
| { | |
| "epoch": 0.9302805280528053, | |
| "grad_norm": 2.3929529097227595, | |
| "learning_rate": 1.4741988912347848e-07, | |
| "loss": 0.3327, | |
| "step": 2255 | |
| }, | |
| { | |
| "epoch": 0.9323432343234324, | |
| "grad_norm": 2.333345645365385, | |
| "learning_rate": 1.3886586850344276e-07, | |
| "loss": 0.3601, | |
| "step": 2260 | |
| }, | |
| { | |
| "epoch": 0.9344059405940595, | |
| "grad_norm": 2.302538628872532, | |
| "learning_rate": 1.30564000889572e-07, | |
| "loss": 0.3449, | |
| "step": 2265 | |
| }, | |
| { | |
| "epoch": 0.9364686468646864, | |
| "grad_norm": 2.453853466702325, | |
| "learning_rate": 1.225147169099511e-07, | |
| "loss": 0.3318, | |
| "step": 2270 | |
| }, | |
| { | |
| "epoch": 0.9385313531353136, | |
| "grad_norm": 2.490214641545018, | |
| "learning_rate": 1.147184340908386e-07, | |
| "loss": 0.3558, | |
| "step": 2275 | |
| }, | |
| { | |
| "epoch": 0.9405940594059405, | |
| "grad_norm": 2.4840208163150432, | |
| "learning_rate": 1.0717555683501413e-07, | |
| "loss": 0.3486, | |
| "step": 2280 | |
| }, | |
| { | |
| "epoch": 0.9426567656765676, | |
| "grad_norm": 2.378818883269412, | |
| "learning_rate": 9.988647640079785e-08, | |
| "loss": 0.3305, | |
| "step": 2285 | |
| }, | |
| { | |
| "epoch": 0.9447194719471947, | |
| "grad_norm": 2.254918829640657, | |
| "learning_rate": 9.285157088175678e-08, | |
| "loss": 0.3363, | |
| "step": 2290 | |
| }, | |
| { | |
| "epoch": 0.9467821782178217, | |
| "grad_norm": 2.432890954515314, | |
| "learning_rate": 8.607120518709156e-08, | |
| "loss": 0.3417, | |
| "step": 2295 | |
| }, | |
| { | |
| "epoch": 0.9488448844884488, | |
| "grad_norm": 2.4920984458242503, | |
| "learning_rate": 7.954573102271157e-08, | |
| "loss": 0.3476, | |
| "step": 2300 | |
| }, | |
| { | |
| "epoch": 0.9509075907590759, | |
| "grad_norm": 2.47335546582837, | |
| "learning_rate": 7.327548687298625e-08, | |
| "loss": 0.3504, | |
| "step": 2305 | |
| }, | |
| { | |
| "epoch": 0.9529702970297029, | |
| "grad_norm": 2.4140844536841572, | |
| "learning_rate": 6.726079798319185e-08, | |
| "loss": 0.337, | |
| "step": 2310 | |
| }, | |
| { | |
| "epoch": 0.95503300330033, | |
| "grad_norm": 2.5771622061139046, | |
| "learning_rate": 6.150197634263888e-08, | |
| "loss": 0.3285, | |
| "step": 2315 | |
| }, | |
| { | |
| "epoch": 0.9570957095709571, | |
| "grad_norm": 2.3569938271313795, | |
| "learning_rate": 5.599932066848834e-08, | |
| "loss": 0.3539, | |
| "step": 2320 | |
| }, | |
| { | |
| "epoch": 0.9591584158415841, | |
| "grad_norm": 2.4499385246168344, | |
| "learning_rate": 5.0753116390258594e-08, | |
| "loss": 0.3459, | |
| "step": 2325 | |
| }, | |
| { | |
| "epoch": 0.9612211221122112, | |
| "grad_norm": 2.4764510130658386, | |
| "learning_rate": 4.576363563501718e-08, | |
| "loss": 0.346, | |
| "step": 2330 | |
| }, | |
| { | |
| "epoch": 0.9632838283828383, | |
| "grad_norm": 2.596731808326133, | |
| "learning_rate": 4.103113721326768e-08, | |
| "loss": 0.3362, | |
| "step": 2335 | |
| }, | |
| { | |
| "epoch": 0.9653465346534653, | |
| "grad_norm": 2.5900213567140176, | |
| "learning_rate": 3.655586660552324e-08, | |
| "loss": 0.3363, | |
| "step": 2340 | |
| }, | |
| { | |
| "epoch": 0.9674092409240924, | |
| "grad_norm": 2.313646904697264, | |
| "learning_rate": 3.233805594957506e-08, | |
| "loss": 0.3447, | |
| "step": 2345 | |
| }, | |
| { | |
| "epoch": 0.9694719471947195, | |
| "grad_norm": 2.321374397889606, | |
| "learning_rate": 2.8377924028449855e-08, | |
| "loss": 0.3389, | |
| "step": 2350 | |
| }, | |
| { | |
| "epoch": 0.9715346534653465, | |
| "grad_norm": 2.809082658776519, | |
| "learning_rate": 2.4675676259059976e-08, | |
| "loss": 0.3488, | |
| "step": 2355 | |
| }, | |
| { | |
| "epoch": 0.9735973597359736, | |
| "grad_norm": 2.373910272093463, | |
| "learning_rate": 2.123150468155144e-08, | |
| "loss": 0.3475, | |
| "step": 2360 | |
| }, | |
| { | |
| "epoch": 0.9756600660066007, | |
| "grad_norm": 2.5332366911693596, | |
| "learning_rate": 1.8045587949339637e-08, | |
| "loss": 0.3328, | |
| "step": 2365 | |
| }, | |
| { | |
| "epoch": 0.9777227722772277, | |
| "grad_norm": 2.313810796400828, | |
| "learning_rate": 1.5118091319843985e-08, | |
| "loss": 0.3407, | |
| "step": 2370 | |
| }, | |
| { | |
| "epoch": 0.9797854785478548, | |
| "grad_norm": 2.2683681366110777, | |
| "learning_rate": 1.2449166645915333e-08, | |
| "loss": 0.3341, | |
| "step": 2375 | |
| }, | |
| { | |
| "epoch": 0.9818481848184818, | |
| "grad_norm": 2.5404146692025638, | |
| "learning_rate": 1.0038952367958376e-08, | |
| "loss": 0.3287, | |
| "step": 2380 | |
| }, | |
| { | |
| "epoch": 0.9839108910891089, | |
| "grad_norm": 2.433802925714093, | |
| "learning_rate": 7.887573506752954e-09, | |
| "loss": 0.3484, | |
| "step": 2385 | |
| }, | |
| { | |
| "epoch": 0.985973597359736, | |
| "grad_norm": 2.3794895761915953, | |
| "learning_rate": 5.9951416569659085e-09, | |
| "loss": 0.3433, | |
| "step": 2390 | |
| }, | |
| { | |
| "epoch": 0.988036303630363, | |
| "grad_norm": 2.476927805960015, | |
| "learning_rate": 4.361754981365152e-09, | |
| "loss": 0.3393, | |
| "step": 2395 | |
| }, | |
| { | |
| "epoch": 0.9900990099009901, | |
| "grad_norm": 2.438512650786407, | |
| "learning_rate": 2.98749820572708e-09, | |
| "loss": 0.3389, | |
| "step": 2400 | |
| }, | |
| { | |
| "epoch": 0.9921617161716172, | |
| "grad_norm": 2.3365090706536735, | |
| "learning_rate": 1.8724426144395293e-09, | |
| "loss": 0.3354, | |
| "step": 2405 | |
| }, | |
| { | |
| "epoch": 0.9942244224422442, | |
| "grad_norm": 2.4286642972043313, | |
| "learning_rate": 1.0166460468080674e-09, | |
| "loss": 0.3527, | |
| "step": 2410 | |
| }, | |
| { | |
| "epoch": 0.9962871287128713, | |
| "grad_norm": 2.4446712259075682, | |
| "learning_rate": 4.2015289405339386e-10, | |
| "loss": 0.3393, | |
| "step": 2415 | |
| }, | |
| { | |
| "epoch": 0.9983498349834984, | |
| "grad_norm": 2.395544519671724, | |
| "learning_rate": 8.29940970092924e-11, | |
| "loss": 0.3433, | |
| "step": 2420 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "eval_runtime": 3.4199, | |
| "eval_samples_per_second": 2.924, | |
| "eval_steps_per_second": 0.877, | |
| "step": 2424 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "step": 2424, | |
| "total_flos": 253768142684160.0, | |
| "train_loss": 0.0, | |
| "train_runtime": 0.0092, | |
| "train_samples_per_second": 4199719.219, | |
| "train_steps_per_second": 262529.834 | |
| } | |
| ], | |
| "logging_steps": 5, | |
| "max_steps": 2424, | |
| "num_input_tokens_seen": 0, | |
| "num_train_epochs": 1, | |
| "save_steps": 100, | |
| "stateful_callbacks": { | |
| "TrainerControl": { | |
| "args": { | |
| "should_epoch_stop": false, | |
| "should_evaluate": false, | |
| "should_log": false, | |
| "should_save": true, | |
| "should_training_stop": true | |
| }, | |
| "attributes": {} | |
| } | |
| }, | |
| "total_flos": 253768142684160.0, | |
| "train_batch_size": 4, | |
| "trial_name": null, | |
| "trial_params": null | |
| } | |