| { | |
| "best_global_step": null, | |
| "best_metric": null, | |
| "best_model_checkpoint": null, | |
| "epoch": 2.0, | |
| "eval_steps": 500, | |
| "global_step": 3038, | |
| "is_hyper_param_search": false, | |
| "is_local_process_zero": true, | |
| "is_world_process_zero": true, | |
| "log_history": [ | |
| { | |
| "epoch": 0.006584362139917695, | |
| "grad_norm": 16.94489117069711, | |
| "learning_rate": 5.921052631578947e-07, | |
| "loss": 1.7535, | |
| "step": 10 | |
| }, | |
| { | |
| "epoch": 0.01316872427983539, | |
| "grad_norm": 8.859991586767253, | |
| "learning_rate": 1.25e-06, | |
| "loss": 1.2539, | |
| "step": 20 | |
| }, | |
| { | |
| "epoch": 0.019753086419753086, | |
| "grad_norm": 2.075143960034565, | |
| "learning_rate": 1.9078947368421057e-06, | |
| "loss": 0.467, | |
| "step": 30 | |
| }, | |
| { | |
| "epoch": 0.02633744855967078, | |
| "grad_norm": 0.5442095529063627, | |
| "learning_rate": 2.565789473684211e-06, | |
| "loss": 0.1226, | |
| "step": 40 | |
| }, | |
| { | |
| "epoch": 0.03292181069958848, | |
| "grad_norm": 0.5355230081219307, | |
| "learning_rate": 3.223684210526316e-06, | |
| "loss": 0.0881, | |
| "step": 50 | |
| }, | |
| { | |
| "epoch": 0.03950617283950617, | |
| "grad_norm": 0.53778689592408, | |
| "learning_rate": 3.8815789473684214e-06, | |
| "loss": 0.0814, | |
| "step": 60 | |
| }, | |
| { | |
| "epoch": 0.04609053497942387, | |
| "grad_norm": 0.4850174158865047, | |
| "learning_rate": 4.539473684210527e-06, | |
| "loss": 0.0757, | |
| "step": 70 | |
| }, | |
| { | |
| "epoch": 0.05267489711934156, | |
| "grad_norm": 0.5101825080408668, | |
| "learning_rate": 5.197368421052632e-06, | |
| "loss": 0.0706, | |
| "step": 80 | |
| }, | |
| { | |
| "epoch": 0.05925925925925926, | |
| "grad_norm": 0.46233757532872244, | |
| "learning_rate": 5.855263157894738e-06, | |
| "loss": 0.0724, | |
| "step": 90 | |
| }, | |
| { | |
| "epoch": 0.06584362139917696, | |
| "grad_norm": 0.6266803776619211, | |
| "learning_rate": 6.513157894736842e-06, | |
| "loss": 0.071, | |
| "step": 100 | |
| }, | |
| { | |
| "epoch": 0.07242798353909465, | |
| "grad_norm": 0.5540120954026315, | |
| "learning_rate": 7.1710526315789475e-06, | |
| "loss": 0.0702, | |
| "step": 110 | |
| }, | |
| { | |
| "epoch": 0.07901234567901234, | |
| "grad_norm": 0.5523585586762507, | |
| "learning_rate": 7.828947368421054e-06, | |
| "loss": 0.0699, | |
| "step": 120 | |
| }, | |
| { | |
| "epoch": 0.08559670781893004, | |
| "grad_norm": 0.5531513796186659, | |
| "learning_rate": 8.486842105263159e-06, | |
| "loss": 0.074, | |
| "step": 130 | |
| }, | |
| { | |
| "epoch": 0.09218106995884774, | |
| "grad_norm": 0.5714508163654194, | |
| "learning_rate": 9.144736842105264e-06, | |
| "loss": 0.0723, | |
| "step": 140 | |
| }, | |
| { | |
| "epoch": 0.09876543209876543, | |
| "grad_norm": 0.6508444768762316, | |
| "learning_rate": 9.80263157894737e-06, | |
| "loss": 0.0696, | |
| "step": 150 | |
| }, | |
| { | |
| "epoch": 0.10534979423868313, | |
| "grad_norm": 0.6373731668098953, | |
| "learning_rate": 1.0460526315789474e-05, | |
| "loss": 0.0737, | |
| "step": 160 | |
| }, | |
| { | |
| "epoch": 0.11193415637860082, | |
| "grad_norm": 0.5215944852997055, | |
| "learning_rate": 1.111842105263158e-05, | |
| "loss": 0.0757, | |
| "step": 170 | |
| }, | |
| { | |
| "epoch": 0.11851851851851852, | |
| "grad_norm": 0.3748452120567205, | |
| "learning_rate": 1.1776315789473684e-05, | |
| "loss": 0.0725, | |
| "step": 180 | |
| }, | |
| { | |
| "epoch": 0.12510288065843622, | |
| "grad_norm": 0.46949418079968014, | |
| "learning_rate": 1.2434210526315791e-05, | |
| "loss": 0.0729, | |
| "step": 190 | |
| }, | |
| { | |
| "epoch": 0.13168724279835392, | |
| "grad_norm": 0.4669252672418189, | |
| "learning_rate": 1.3092105263157895e-05, | |
| "loss": 0.0701, | |
| "step": 200 | |
| }, | |
| { | |
| "epoch": 0.1382716049382716, | |
| "grad_norm": 0.40651078047311257, | |
| "learning_rate": 1.375e-05, | |
| "loss": 0.07, | |
| "step": 210 | |
| }, | |
| { | |
| "epoch": 0.1448559670781893, | |
| "grad_norm": 0.3734487172816472, | |
| "learning_rate": 1.4407894736842108e-05, | |
| "loss": 0.0717, | |
| "step": 220 | |
| }, | |
| { | |
| "epoch": 0.151440329218107, | |
| "grad_norm": 0.35984990509833603, | |
| "learning_rate": 1.5065789473684211e-05, | |
| "loss": 0.0742, | |
| "step": 230 | |
| }, | |
| { | |
| "epoch": 0.1580246913580247, | |
| "grad_norm": 0.5316375595082459, | |
| "learning_rate": 1.572368421052632e-05, | |
| "loss": 0.0792, | |
| "step": 240 | |
| }, | |
| { | |
| "epoch": 0.1646090534979424, | |
| "grad_norm": 0.2582761022122272, | |
| "learning_rate": 1.638157894736842e-05, | |
| "loss": 0.073, | |
| "step": 250 | |
| }, | |
| { | |
| "epoch": 0.17119341563786009, | |
| "grad_norm": 0.4363297164897905, | |
| "learning_rate": 1.703947368421053e-05, | |
| "loss": 0.0727, | |
| "step": 260 | |
| }, | |
| { | |
| "epoch": 0.17777777777777778, | |
| "grad_norm": 0.4197644322559592, | |
| "learning_rate": 1.769736842105263e-05, | |
| "loss": 0.0736, | |
| "step": 270 | |
| }, | |
| { | |
| "epoch": 0.18436213991769548, | |
| "grad_norm": 0.38889634627156217, | |
| "learning_rate": 1.835526315789474e-05, | |
| "loss": 0.0749, | |
| "step": 280 | |
| }, | |
| { | |
| "epoch": 0.19094650205761318, | |
| "grad_norm": 0.39586272636605613, | |
| "learning_rate": 1.9013157894736845e-05, | |
| "loss": 0.0754, | |
| "step": 290 | |
| }, | |
| { | |
| "epoch": 0.19753086419753085, | |
| "grad_norm": 0.3761946661545154, | |
| "learning_rate": 1.9671052631578947e-05, | |
| "loss": 0.0796, | |
| "step": 300 | |
| }, | |
| { | |
| "epoch": 0.20411522633744855, | |
| "grad_norm": 0.4250884695412882, | |
| "learning_rate": 1.999983495151461e-05, | |
| "loss": 0.0805, | |
| "step": 310 | |
| }, | |
| { | |
| "epoch": 0.21069958847736625, | |
| "grad_norm": 0.42967475726945525, | |
| "learning_rate": 1.999851459632052e-05, | |
| "loss": 0.0819, | |
| "step": 320 | |
| }, | |
| { | |
| "epoch": 0.21728395061728395, | |
| "grad_norm": 0.3743577309340014, | |
| "learning_rate": 1.999587406026899e-05, | |
| "loss": 0.0759, | |
| "step": 330 | |
| }, | |
| { | |
| "epoch": 0.22386831275720165, | |
| "grad_norm": 0.34570674035828175, | |
| "learning_rate": 1.9991913692010336e-05, | |
| "loss": 0.0772, | |
| "step": 340 | |
| }, | |
| { | |
| "epoch": 0.23045267489711935, | |
| "grad_norm": 0.33260134395908936, | |
| "learning_rate": 1.9986634014462463e-05, | |
| "loss": 0.075, | |
| "step": 350 | |
| }, | |
| { | |
| "epoch": 0.23703703703703705, | |
| "grad_norm": 0.2901700303682964, | |
| "learning_rate": 1.998003572474184e-05, | |
| "loss": 0.0732, | |
| "step": 360 | |
| }, | |
| { | |
| "epoch": 0.24362139917695474, | |
| "grad_norm": 0.3420514339891425, | |
| "learning_rate": 1.997211969407147e-05, | |
| "loss": 0.0757, | |
| "step": 370 | |
| }, | |
| { | |
| "epoch": 0.25020576131687244, | |
| "grad_norm": 0.3395419325716837, | |
| "learning_rate": 1.9962886967665807e-05, | |
| "loss": 0.0783, | |
| "step": 380 | |
| }, | |
| { | |
| "epoch": 0.25679012345679014, | |
| "grad_norm": 0.3880605655914281, | |
| "learning_rate": 1.9952338764592815e-05, | |
| "loss": 0.0759, | |
| "step": 390 | |
| }, | |
| { | |
| "epoch": 0.26337448559670784, | |
| "grad_norm": 0.2883781783386072, | |
| "learning_rate": 1.9940476477612945e-05, | |
| "loss": 0.0757, | |
| "step": 400 | |
| }, | |
| { | |
| "epoch": 0.26995884773662554, | |
| "grad_norm": 0.3338258849572171, | |
| "learning_rate": 1.992730167299527e-05, | |
| "loss": 0.0747, | |
| "step": 410 | |
| }, | |
| { | |
| "epoch": 0.2765432098765432, | |
| "grad_norm": 0.39547557944899164, | |
| "learning_rate": 1.9912816090310677e-05, | |
| "loss": 0.0744, | |
| "step": 420 | |
| }, | |
| { | |
| "epoch": 0.2831275720164609, | |
| "grad_norm": 0.35844083352453676, | |
| "learning_rate": 1.9897021642202163e-05, | |
| "loss": 0.0782, | |
| "step": 430 | |
| }, | |
| { | |
| "epoch": 0.2897119341563786, | |
| "grad_norm": 0.4272864277469304, | |
| "learning_rate": 1.9879920414132305e-05, | |
| "loss": 0.075, | |
| "step": 440 | |
| }, | |
| { | |
| "epoch": 0.2962962962962963, | |
| "grad_norm": 0.3767414865383142, | |
| "learning_rate": 1.986151466410791e-05, | |
| "loss": 0.0744, | |
| "step": 450 | |
| }, | |
| { | |
| "epoch": 0.302880658436214, | |
| "grad_norm": 0.294650325367507, | |
| "learning_rate": 1.984180682238185e-05, | |
| "loss": 0.0739, | |
| "step": 460 | |
| }, | |
| { | |
| "epoch": 0.3094650205761317, | |
| "grad_norm": 0.34413977912636246, | |
| "learning_rate": 1.9820799491132196e-05, | |
| "loss": 0.073, | |
| "step": 470 | |
| }, | |
| { | |
| "epoch": 0.3160493827160494, | |
| "grad_norm": 0.3027743204483976, | |
| "learning_rate": 1.9798495444118612e-05, | |
| "loss": 0.072, | |
| "step": 480 | |
| }, | |
| { | |
| "epoch": 0.3226337448559671, | |
| "grad_norm": 0.33099543278662436, | |
| "learning_rate": 1.9774897626316142e-05, | |
| "loss": 0.074, | |
| "step": 490 | |
| }, | |
| { | |
| "epoch": 0.3292181069958848, | |
| "grad_norm": 2.8286706623610964, | |
| "learning_rate": 1.9750009153526345e-05, | |
| "loss": 0.072, | |
| "step": 500 | |
| }, | |
| { | |
| "epoch": 0.3358024691358025, | |
| "grad_norm": 1.168919329951862, | |
| "learning_rate": 1.972383331196589e-05, | |
| "loss": 0.121, | |
| "step": 510 | |
| }, | |
| { | |
| "epoch": 0.34238683127572017, | |
| "grad_norm": 1.417074927074083, | |
| "learning_rate": 1.9696373557832656e-05, | |
| "loss": 0.1133, | |
| "step": 520 | |
| }, | |
| { | |
| "epoch": 0.34897119341563787, | |
| "grad_norm": 0.38096667028056896, | |
| "learning_rate": 1.9667633516849386e-05, | |
| "loss": 0.0737, | |
| "step": 530 | |
| }, | |
| { | |
| "epoch": 0.35555555555555557, | |
| "grad_norm": 0.29346575341126296, | |
| "learning_rate": 1.963761698378495e-05, | |
| "loss": 0.0732, | |
| "step": 540 | |
| }, | |
| { | |
| "epoch": 0.36213991769547327, | |
| "grad_norm": 0.3123097226662431, | |
| "learning_rate": 1.9606327921953296e-05, | |
| "loss": 0.0699, | |
| "step": 550 | |
| }, | |
| { | |
| "epoch": 0.36872427983539097, | |
| "grad_norm": 0.29306500953532727, | |
| "learning_rate": 1.957377046269014e-05, | |
| "loss": 0.0715, | |
| "step": 560 | |
| }, | |
| { | |
| "epoch": 0.37530864197530867, | |
| "grad_norm": 0.30032528958941135, | |
| "learning_rate": 1.9539948904807486e-05, | |
| "loss": 0.0658, | |
| "step": 570 | |
| }, | |
| { | |
| "epoch": 0.38189300411522636, | |
| "grad_norm": 0.3927708921682921, | |
| "learning_rate": 1.9504867714025993e-05, | |
| "loss": 0.0705, | |
| "step": 580 | |
| }, | |
| { | |
| "epoch": 0.388477366255144, | |
| "grad_norm": 0.4267148067486437, | |
| "learning_rate": 1.946853152238536e-05, | |
| "loss": 0.0728, | |
| "step": 590 | |
| }, | |
| { | |
| "epoch": 0.3950617283950617, | |
| "grad_norm": 0.28126136574538696, | |
| "learning_rate": 1.9430945127632714e-05, | |
| "loss": 0.0687, | |
| "step": 600 | |
| }, | |
| { | |
| "epoch": 0.4016460905349794, | |
| "grad_norm": 0.2602257658508144, | |
| "learning_rate": 1.939211349258912e-05, | |
| "loss": 0.0721, | |
| "step": 610 | |
| }, | |
| { | |
| "epoch": 0.4082304526748971, | |
| "grad_norm": 0.2815558626070043, | |
| "learning_rate": 1.93520417444943e-05, | |
| "loss": 0.0742, | |
| "step": 620 | |
| }, | |
| { | |
| "epoch": 0.4148148148148148, | |
| "grad_norm": 0.31858259712820597, | |
| "learning_rate": 1.9310735174329654e-05, | |
| "loss": 0.0692, | |
| "step": 630 | |
| }, | |
| { | |
| "epoch": 0.4213991769547325, | |
| "grad_norm": 0.3269949366706829, | |
| "learning_rate": 1.926819923611965e-05, | |
| "loss": 0.0729, | |
| "step": 640 | |
| }, | |
| { | |
| "epoch": 0.4279835390946502, | |
| "grad_norm": 0.3256202782623892, | |
| "learning_rate": 1.9224439546211678e-05, | |
| "loss": 0.0677, | |
| "step": 650 | |
| }, | |
| { | |
| "epoch": 0.4345679012345679, | |
| "grad_norm": 0.2850429632430984, | |
| "learning_rate": 1.9179461882534488e-05, | |
| "loss": 0.0683, | |
| "step": 660 | |
| }, | |
| { | |
| "epoch": 0.4411522633744856, | |
| "grad_norm": 0.28386859115576163, | |
| "learning_rate": 1.9133272183835286e-05, | |
| "loss": 0.07, | |
| "step": 670 | |
| }, | |
| { | |
| "epoch": 0.4477366255144033, | |
| "grad_norm": 0.3262557709872669, | |
| "learning_rate": 1.908587654889559e-05, | |
| "loss": 0.0703, | |
| "step": 680 | |
| }, | |
| { | |
| "epoch": 0.454320987654321, | |
| "grad_norm": 0.32998452713524645, | |
| "learning_rate": 1.9037281235725965e-05, | |
| "loss": 0.0694, | |
| "step": 690 | |
| }, | |
| { | |
| "epoch": 0.4609053497942387, | |
| "grad_norm": 0.2853898322751637, | |
| "learning_rate": 1.8987492660739725e-05, | |
| "loss": 0.0653, | |
| "step": 700 | |
| }, | |
| { | |
| "epoch": 0.4674897119341564, | |
| "grad_norm": 0.2964101006678424, | |
| "learning_rate": 1.8936517397905728e-05, | |
| "loss": 0.0704, | |
| "step": 710 | |
| }, | |
| { | |
| "epoch": 0.4740740740740741, | |
| "grad_norm": 0.37173379047398913, | |
| "learning_rate": 1.888436217788038e-05, | |
| "loss": 0.0703, | |
| "step": 720 | |
| }, | |
| { | |
| "epoch": 0.4806584362139918, | |
| "grad_norm": 0.34431108355883716, | |
| "learning_rate": 1.8831033887118893e-05, | |
| "loss": 0.0707, | |
| "step": 730 | |
| }, | |
| { | |
| "epoch": 0.4872427983539095, | |
| "grad_norm": 0.2772646446879623, | |
| "learning_rate": 1.8776539566966066e-05, | |
| "loss": 0.0723, | |
| "step": 740 | |
| }, | |
| { | |
| "epoch": 0.49382716049382713, | |
| "grad_norm": 0.2789128124993562, | |
| "learning_rate": 1.8720886412726515e-05, | |
| "loss": 0.0691, | |
| "step": 750 | |
| }, | |
| { | |
| "epoch": 0.5004115226337449, | |
| "grad_norm": 0.358972355330604, | |
| "learning_rate": 1.8664081772714647e-05, | |
| "loss": 0.0735, | |
| "step": 760 | |
| }, | |
| { | |
| "epoch": 0.5069958847736625, | |
| "grad_norm": 0.3390004226510688, | |
| "learning_rate": 1.860613314728441e-05, | |
| "loss": 0.0671, | |
| "step": 770 | |
| }, | |
| { | |
| "epoch": 0.5135802469135803, | |
| "grad_norm": 0.2803284917324781, | |
| "learning_rate": 1.8547048187838943e-05, | |
| "loss": 0.0682, | |
| "step": 780 | |
| }, | |
| { | |
| "epoch": 0.5201646090534979, | |
| "grad_norm": 0.39106099409850015, | |
| "learning_rate": 1.848683469582032e-05, | |
| "loss": 0.0718, | |
| "step": 790 | |
| }, | |
| { | |
| "epoch": 0.5267489711934157, | |
| "grad_norm": 0.3785492333343748, | |
| "learning_rate": 1.8425500621679454e-05, | |
| "loss": 0.0725, | |
| "step": 800 | |
| }, | |
| { | |
| "epoch": 0.5333333333333333, | |
| "grad_norm": 0.30859125161768913, | |
| "learning_rate": 1.8363054063826343e-05, | |
| "loss": 0.072, | |
| "step": 810 | |
| }, | |
| { | |
| "epoch": 0.5399176954732511, | |
| "grad_norm": 0.2849783715486278, | |
| "learning_rate": 1.8299503267560782e-05, | |
| "loss": 0.0718, | |
| "step": 820 | |
| }, | |
| { | |
| "epoch": 0.5465020576131687, | |
| "grad_norm": 0.280774494956223, | |
| "learning_rate": 1.8234856623983653e-05, | |
| "loss": 0.0688, | |
| "step": 830 | |
| }, | |
| { | |
| "epoch": 0.5530864197530864, | |
| "grad_norm": 0.36360987461834693, | |
| "learning_rate": 1.8169122668889e-05, | |
| "loss": 0.0698, | |
| "step": 840 | |
| }, | |
| { | |
| "epoch": 0.5596707818930041, | |
| "grad_norm": 0.28076100136621623, | |
| "learning_rate": 1.8102310081636983e-05, | |
| "loss": 0.0682, | |
| "step": 850 | |
| }, | |
| { | |
| "epoch": 0.5662551440329218, | |
| "grad_norm": 0.288883908927145, | |
| "learning_rate": 1.8034427684007865e-05, | |
| "loss": 0.0625, | |
| "step": 860 | |
| }, | |
| { | |
| "epoch": 0.5728395061728395, | |
| "grad_norm": 0.33603054257245285, | |
| "learning_rate": 1.7965484439037206e-05, | |
| "loss": 0.0684, | |
| "step": 870 | |
| }, | |
| { | |
| "epoch": 0.5794238683127572, | |
| "grad_norm": 0.3327862519871356, | |
| "learning_rate": 1.789548944983241e-05, | |
| "loss": 0.0725, | |
| "step": 880 | |
| }, | |
| { | |
| "epoch": 0.5860082304526749, | |
| "grad_norm": 0.3221835656786138, | |
| "learning_rate": 1.7824451958370774e-05, | |
| "loss": 0.0675, | |
| "step": 890 | |
| }, | |
| { | |
| "epoch": 0.5925925925925926, | |
| "grad_norm": 0.28195104087059997, | |
| "learning_rate": 1.775238134427919e-05, | |
| "loss": 0.0693, | |
| "step": 900 | |
| }, | |
| { | |
| "epoch": 0.5991769547325103, | |
| "grad_norm": 0.3327008369024166, | |
| "learning_rate": 1.767928712359568e-05, | |
| "loss": 0.0681, | |
| "step": 910 | |
| }, | |
| { | |
| "epoch": 0.605761316872428, | |
| "grad_norm": 0.35690676600607474, | |
| "learning_rate": 1.7605178947512927e-05, | |
| "loss": 0.0737, | |
| "step": 920 | |
| }, | |
| { | |
| "epoch": 0.6123456790123457, | |
| "grad_norm": 0.3412188552301251, | |
| "learning_rate": 1.753006660110397e-05, | |
| "loss": 0.0645, | |
| "step": 930 | |
| }, | |
| { | |
| "epoch": 0.6189300411522634, | |
| "grad_norm": 0.29018005763996296, | |
| "learning_rate": 1.745396000203015e-05, | |
| "loss": 0.0665, | |
| "step": 940 | |
| }, | |
| { | |
| "epoch": 0.6255144032921811, | |
| "grad_norm": 0.2565775043232814, | |
| "learning_rate": 1.7376869199231675e-05, | |
| "loss": 0.0656, | |
| "step": 950 | |
| }, | |
| { | |
| "epoch": 0.6320987654320988, | |
| "grad_norm": 0.2791175761665844, | |
| "learning_rate": 1.7298804371600732e-05, | |
| "loss": 0.0697, | |
| "step": 960 | |
| }, | |
| { | |
| "epoch": 0.6386831275720165, | |
| "grad_norm": 0.2961938790167889, | |
| "learning_rate": 1.7219775826637508e-05, | |
| "loss": 0.0682, | |
| "step": 970 | |
| }, | |
| { | |
| "epoch": 0.6452674897119342, | |
| "grad_norm": 0.310720015090511, | |
| "learning_rate": 1.713979399908921e-05, | |
| "loss": 0.0697, | |
| "step": 980 | |
| }, | |
| { | |
| "epoch": 0.6518518518518519, | |
| "grad_norm": 0.31741581911825506, | |
| "learning_rate": 1.7058869449572283e-05, | |
| "loss": 0.0704, | |
| "step": 990 | |
| }, | |
| { | |
| "epoch": 0.6584362139917695, | |
| "grad_norm": 0.25036646851841887, | |
| "learning_rate": 1.697701286317801e-05, | |
| "loss": 0.0674, | |
| "step": 1000 | |
| }, | |
| { | |
| "epoch": 0.6650205761316872, | |
| "grad_norm": 0.30750003251490393, | |
| "learning_rate": 1.6894235048061684e-05, | |
| "loss": 0.069, | |
| "step": 1010 | |
| }, | |
| { | |
| "epoch": 0.671604938271605, | |
| "grad_norm": 0.25736303550768286, | |
| "learning_rate": 1.6810546934015506e-05, | |
| "loss": 0.0712, | |
| "step": 1020 | |
| }, | |
| { | |
| "epoch": 0.6781893004115226, | |
| "grad_norm": 0.3155820303320945, | |
| "learning_rate": 1.672595957102547e-05, | |
| "loss": 0.0669, | |
| "step": 1030 | |
| }, | |
| { | |
| "epoch": 0.6847736625514403, | |
| "grad_norm": 0.3164269400573639, | |
| "learning_rate": 1.664048412781232e-05, | |
| "loss": 0.071, | |
| "step": 1040 | |
| }, | |
| { | |
| "epoch": 0.691358024691358, | |
| "grad_norm": 0.2763190531728626, | |
| "learning_rate": 1.655413189035686e-05, | |
| "loss": 0.066, | |
| "step": 1050 | |
| }, | |
| { | |
| "epoch": 0.6979423868312757, | |
| "grad_norm": 0.29371479626382635, | |
| "learning_rate": 1.646691426040981e-05, | |
| "loss": 0.0658, | |
| "step": 1060 | |
| }, | |
| { | |
| "epoch": 0.7045267489711934, | |
| "grad_norm": 0.28075444305992714, | |
| "learning_rate": 1.637884275398631e-05, | |
| "loss": 0.0617, | |
| "step": 1070 | |
| }, | |
| { | |
| "epoch": 0.7111111111111111, | |
| "grad_norm": 0.30136424993159056, | |
| "learning_rate": 1.6289928999845387e-05, | |
| "loss": 0.0671, | |
| "step": 1080 | |
| }, | |
| { | |
| "epoch": 0.7176954732510288, | |
| "grad_norm": 0.260175676828675, | |
| "learning_rate": 1.6200184737954515e-05, | |
| "loss": 0.0663, | |
| "step": 1090 | |
| }, | |
| { | |
| "epoch": 0.7242798353909465, | |
| "grad_norm": 0.25757537953764154, | |
| "learning_rate": 1.6109621817939503e-05, | |
| "loss": 0.0677, | |
| "step": 1100 | |
| }, | |
| { | |
| "epoch": 0.7308641975308642, | |
| "grad_norm": 0.3477032681102901, | |
| "learning_rate": 1.60182521975199e-05, | |
| "loss": 0.0706, | |
| "step": 1110 | |
| }, | |
| { | |
| "epoch": 0.7374485596707819, | |
| "grad_norm": 0.3026822514515039, | |
| "learning_rate": 1.5926087940930108e-05, | |
| "loss": 0.0663, | |
| "step": 1120 | |
| }, | |
| { | |
| "epoch": 0.7440329218106996, | |
| "grad_norm": 0.27135918905023065, | |
| "learning_rate": 1.5833141217326474e-05, | |
| "loss": 0.0636, | |
| "step": 1130 | |
| }, | |
| { | |
| "epoch": 0.7506172839506173, | |
| "grad_norm": 0.32628846186337646, | |
| "learning_rate": 1.5739424299180473e-05, | |
| "loss": 0.064, | |
| "step": 1140 | |
| }, | |
| { | |
| "epoch": 0.757201646090535, | |
| "grad_norm": 0.3078401324398714, | |
| "learning_rate": 1.564494956065831e-05, | |
| "loss": 0.0649, | |
| "step": 1150 | |
| }, | |
| { | |
| "epoch": 0.7637860082304527, | |
| "grad_norm": 0.25416703467170093, | |
| "learning_rate": 1.5549729475987045e-05, | |
| "loss": 0.0657, | |
| "step": 1160 | |
| }, | |
| { | |
| "epoch": 0.7703703703703704, | |
| "grad_norm": 0.2881081888932816, | |
| "learning_rate": 1.5453776617807514e-05, | |
| "loss": 0.0649, | |
| "step": 1170 | |
| }, | |
| { | |
| "epoch": 0.776954732510288, | |
| "grad_norm": 0.30022122448678223, | |
| "learning_rate": 1.53571036555143e-05, | |
| "loss": 0.0684, | |
| "step": 1180 | |
| }, | |
| { | |
| "epoch": 0.7835390946502058, | |
| "grad_norm": 0.33048446828934225, | |
| "learning_rate": 1.525972335358287e-05, | |
| "loss": 0.0671, | |
| "step": 1190 | |
| }, | |
| { | |
| "epoch": 0.7901234567901234, | |
| "grad_norm": 0.3613590972350096, | |
| "learning_rate": 1.516164856988418e-05, | |
| "loss": 0.0677, | |
| "step": 1200 | |
| }, | |
| { | |
| "epoch": 0.7967078189300412, | |
| "grad_norm": 0.27185769524307135, | |
| "learning_rate": 1.5062892253986974e-05, | |
| "loss": 0.0601, | |
| "step": 1210 | |
| }, | |
| { | |
| "epoch": 0.8032921810699588, | |
| "grad_norm": 0.3090560452899117, | |
| "learning_rate": 1.4963467445447925e-05, | |
| "loss": 0.0664, | |
| "step": 1220 | |
| }, | |
| { | |
| "epoch": 0.8098765432098766, | |
| "grad_norm": 0.2967875145368493, | |
| "learning_rate": 1.4863387272089951e-05, | |
| "loss": 0.0619, | |
| "step": 1230 | |
| }, | |
| { | |
| "epoch": 0.8164609053497942, | |
| "grad_norm": 0.2819524057533708, | |
| "learning_rate": 1.4762664948268819e-05, | |
| "loss": 0.0629, | |
| "step": 1240 | |
| }, | |
| { | |
| "epoch": 0.823045267489712, | |
| "grad_norm": 0.26960961863052646, | |
| "learning_rate": 1.4661313773128378e-05, | |
| "loss": 0.0671, | |
| "step": 1250 | |
| }, | |
| { | |
| "epoch": 0.8296296296296296, | |
| "grad_norm": 0.2603448502135837, | |
| "learning_rate": 1.4559347128844543e-05, | |
| "loss": 0.0616, | |
| "step": 1260 | |
| }, | |
| { | |
| "epoch": 0.8362139917695474, | |
| "grad_norm": 0.27525610937960354, | |
| "learning_rate": 1.445677847885837e-05, | |
| "loss": 0.0664, | |
| "step": 1270 | |
| }, | |
| { | |
| "epoch": 0.842798353909465, | |
| "grad_norm": 0.22605093521859634, | |
| "learning_rate": 1.435362136609836e-05, | |
| "loss": 0.0577, | |
| "step": 1280 | |
| }, | |
| { | |
| "epoch": 0.8493827160493828, | |
| "grad_norm": 0.24864901698873845, | |
| "learning_rate": 1.4249889411192285e-05, | |
| "loss": 0.0645, | |
| "step": 1290 | |
| }, | |
| { | |
| "epoch": 0.8559670781893004, | |
| "grad_norm": 0.34406433332005937, | |
| "learning_rate": 1.4145596310668745e-05, | |
| "loss": 0.0692, | |
| "step": 1300 | |
| }, | |
| { | |
| "epoch": 0.8625514403292182, | |
| "grad_norm": 0.28721624391973316, | |
| "learning_rate": 1.4040755835148726e-05, | |
| "loss": 0.0634, | |
| "step": 1310 | |
| }, | |
| { | |
| "epoch": 0.8691358024691358, | |
| "grad_norm": 0.23992833244716943, | |
| "learning_rate": 1.3935381827527337e-05, | |
| "loss": 0.0677, | |
| "step": 1320 | |
| }, | |
| { | |
| "epoch": 0.8757201646090536, | |
| "grad_norm": 0.2736178034496462, | |
| "learning_rate": 1.3829488201146048e-05, | |
| "loss": 0.0578, | |
| "step": 1330 | |
| }, | |
| { | |
| "epoch": 0.8823045267489712, | |
| "grad_norm": 0.23190529623665304, | |
| "learning_rate": 1.372308893795559e-05, | |
| "loss": 0.0637, | |
| "step": 1340 | |
| }, | |
| { | |
| "epoch": 0.8888888888888888, | |
| "grad_norm": 0.3074951163100435, | |
| "learning_rate": 1.3616198086669814e-05, | |
| "loss": 0.0605, | |
| "step": 1350 | |
| }, | |
| { | |
| "epoch": 0.8954732510288066, | |
| "grad_norm": 0.3372946062104279, | |
| "learning_rate": 1.3508829760910733e-05, | |
| "loss": 0.0634, | |
| "step": 1360 | |
| }, | |
| { | |
| "epoch": 0.9020576131687242, | |
| "grad_norm": 0.27411971079704744, | |
| "learning_rate": 1.3400998137344988e-05, | |
| "loss": 0.0649, | |
| "step": 1370 | |
| }, | |
| { | |
| "epoch": 0.908641975308642, | |
| "grad_norm": 0.28065392133225836, | |
| "learning_rate": 1.3292717453812007e-05, | |
| "loss": 0.0664, | |
| "step": 1380 | |
| }, | |
| { | |
| "epoch": 0.9152263374485596, | |
| "grad_norm": 0.24381176230064203, | |
| "learning_rate": 1.3184002007444047e-05, | |
| "loss": 0.0609, | |
| "step": 1390 | |
| }, | |
| { | |
| "epoch": 0.9218106995884774, | |
| "grad_norm": 0.2661053065569555, | |
| "learning_rate": 1.307486615277846e-05, | |
| "loss": 0.065, | |
| "step": 1400 | |
| }, | |
| { | |
| "epoch": 0.928395061728395, | |
| "grad_norm": 0.2566543696146929, | |
| "learning_rate": 1.2965324299862333e-05, | |
| "loss": 0.0605, | |
| "step": 1410 | |
| }, | |
| { | |
| "epoch": 0.9349794238683128, | |
| "grad_norm": 0.2743337005891253, | |
| "learning_rate": 1.2855390912349841e-05, | |
| "loss": 0.0615, | |
| "step": 1420 | |
| }, | |
| { | |
| "epoch": 0.9415637860082304, | |
| "grad_norm": 0.27142150674415816, | |
| "learning_rate": 1.2745080505592473e-05, | |
| "loss": 0.0638, | |
| "step": 1430 | |
| }, | |
| { | |
| "epoch": 0.9481481481481482, | |
| "grad_norm": 0.3376407664510331, | |
| "learning_rate": 1.2634407644722475e-05, | |
| "loss": 0.063, | |
| "step": 1440 | |
| }, | |
| { | |
| "epoch": 0.9547325102880658, | |
| "grad_norm": 0.2943710963213783, | |
| "learning_rate": 1.2523386942729711e-05, | |
| "loss": 0.0661, | |
| "step": 1450 | |
| }, | |
| { | |
| "epoch": 0.9613168724279836, | |
| "grad_norm": 0.27205493163410066, | |
| "learning_rate": 1.2412033058532166e-05, | |
| "loss": 0.0582, | |
| "step": 1460 | |
| }, | |
| { | |
| "epoch": 0.9679012345679012, | |
| "grad_norm": 0.2882900893916512, | |
| "learning_rate": 1.2300360695040455e-05, | |
| "loss": 0.0603, | |
| "step": 1470 | |
| }, | |
| { | |
| "epoch": 0.974485596707819, | |
| "grad_norm": 0.2699431138398926, | |
| "learning_rate": 1.2188384597216456e-05, | |
| "loss": 0.0631, | |
| "step": 1480 | |
| }, | |
| { | |
| "epoch": 0.9810699588477366, | |
| "grad_norm": 0.27437035738940724, | |
| "learning_rate": 1.2076119550126434e-05, | |
| "loss": 0.0629, | |
| "step": 1490 | |
| }, | |
| { | |
| "epoch": 0.9876543209876543, | |
| "grad_norm": 0.2679913859251194, | |
| "learning_rate": 1.196358037698884e-05, | |
| "loss": 0.0609, | |
| "step": 1500 | |
| }, | |
| { | |
| "epoch": 0.994238683127572, | |
| "grad_norm": 0.26054011722862475, | |
| "learning_rate": 1.1850781937217118e-05, | |
| "loss": 0.0603, | |
| "step": 1510 | |
| }, | |
| { | |
| "epoch": 1.0006584362139919, | |
| "grad_norm": 0.2802502838189846, | |
| "learning_rate": 1.1737739124457659e-05, | |
| "loss": 0.0579, | |
| "step": 1520 | |
| }, | |
| { | |
| "epoch": 1.0072427983539094, | |
| "grad_norm": 0.35510024410868773, | |
| "learning_rate": 1.1624466864623318e-05, | |
| "loss": 0.0507, | |
| "step": 1530 | |
| }, | |
| { | |
| "epoch": 1.0138271604938272, | |
| "grad_norm": 0.277681593699746, | |
| "learning_rate": 1.1510980113922603e-05, | |
| "loss": 0.0489, | |
| "step": 1540 | |
| }, | |
| { | |
| "epoch": 1.020411522633745, | |
| "grad_norm": 0.3288263931355778, | |
| "learning_rate": 1.1397293856884905e-05, | |
| "loss": 0.0491, | |
| "step": 1550 | |
| }, | |
| { | |
| "epoch": 1.0269958847736627, | |
| "grad_norm": 0.3194763227922266, | |
| "learning_rate": 1.1283423104381982e-05, | |
| "loss": 0.0483, | |
| "step": 1560 | |
| }, | |
| { | |
| "epoch": 1.0335802469135802, | |
| "grad_norm": 0.2040227272582597, | |
| "learning_rate": 1.1169382891645926e-05, | |
| "loss": 0.045, | |
| "step": 1570 | |
| }, | |
| { | |
| "epoch": 1.040164609053498, | |
| "grad_norm": 0.2365516786275801, | |
| "learning_rate": 1.1055188276283996e-05, | |
| "loss": 0.0501, | |
| "step": 1580 | |
| }, | |
| { | |
| "epoch": 1.0467489711934157, | |
| "grad_norm": 0.2860681326747978, | |
| "learning_rate": 1.0940854336290398e-05, | |
| "loss": 0.0482, | |
| "step": 1590 | |
| }, | |
| { | |
| "epoch": 1.0533333333333332, | |
| "grad_norm": 0.2782290326046087, | |
| "learning_rate": 1.0826396168055453e-05, | |
| "loss": 0.0501, | |
| "step": 1600 | |
| }, | |
| { | |
| "epoch": 1.059917695473251, | |
| "grad_norm": 0.24475848111652504, | |
| "learning_rate": 1.0711828884372287e-05, | |
| "loss": 0.0474, | |
| "step": 1610 | |
| }, | |
| { | |
| "epoch": 1.0665020576131687, | |
| "grad_norm": 0.2810653047963972, | |
| "learning_rate": 1.059716761244138e-05, | |
| "loss": 0.0497, | |
| "step": 1620 | |
| }, | |
| { | |
| "epoch": 1.0730864197530865, | |
| "grad_norm": 0.29025799663429724, | |
| "learning_rate": 1.0482427491873202e-05, | |
| "loss": 0.0495, | |
| "step": 1630 | |
| }, | |
| { | |
| "epoch": 1.079670781893004, | |
| "grad_norm": 0.2834170259334841, | |
| "learning_rate": 1.0367623672689225e-05, | |
| "loss": 0.0481, | |
| "step": 1640 | |
| }, | |
| { | |
| "epoch": 1.0862551440329218, | |
| "grad_norm": 0.23229206192072585, | |
| "learning_rate": 1.0252771313321528e-05, | |
| "loss": 0.0487, | |
| "step": 1650 | |
| }, | |
| { | |
| "epoch": 1.0928395061728395, | |
| "grad_norm": 0.2876592601523108, | |
| "learning_rate": 1.013788557861133e-05, | |
| "loss": 0.0501, | |
| "step": 1660 | |
| }, | |
| { | |
| "epoch": 1.0994238683127573, | |
| "grad_norm": 0.22076045055427748, | |
| "learning_rate": 1.002298163780665e-05, | |
| "loss": 0.0495, | |
| "step": 1670 | |
| }, | |
| { | |
| "epoch": 1.1060082304526748, | |
| "grad_norm": 0.26630456708114275, | |
| "learning_rate": 9.908074662559403e-06, | |
| "loss": 0.0504, | |
| "step": 1680 | |
| }, | |
| { | |
| "epoch": 1.1125925925925926, | |
| "grad_norm": 0.3139296644641171, | |
| "learning_rate": 9.793179824922162e-06, | |
| "loss": 0.0519, | |
| "step": 1690 | |
| }, | |
| { | |
| "epoch": 1.1191769547325103, | |
| "grad_norm": 0.26535265666452507, | |
| "learning_rate": 9.67831229534488e-06, | |
| "loss": 0.0462, | |
| "step": 1700 | |
| }, | |
| { | |
| "epoch": 1.125761316872428, | |
| "grad_norm": 0.2447275618207565, | |
| "learning_rate": 9.563487240671808e-06, | |
| "loss": 0.046, | |
| "step": 1710 | |
| }, | |
| { | |
| "epoch": 1.1323456790123456, | |
| "grad_norm": 0.29887021341625364, | |
| "learning_rate": 9.44871982213892e-06, | |
| "loss": 0.0464, | |
| "step": 1720 | |
| }, | |
| { | |
| "epoch": 1.1389300411522634, | |
| "grad_norm": 0.2548535806101749, | |
| "learning_rate": 9.33402519337204e-06, | |
| "loss": 0.0466, | |
| "step": 1730 | |
| }, | |
| { | |
| "epoch": 1.1455144032921811, | |
| "grad_norm": 0.2435720674026185, | |
| "learning_rate": 9.219418498386002e-06, | |
| "loss": 0.0483, | |
| "step": 1740 | |
| }, | |
| { | |
| "epoch": 1.1520987654320987, | |
| "grad_norm": 0.33161507675613966, | |
| "learning_rate": 9.104914869585057e-06, | |
| "loss": 0.0492, | |
| "step": 1750 | |
| }, | |
| { | |
| "epoch": 1.1586831275720164, | |
| "grad_norm": 0.2949494236743323, | |
| "learning_rate": 8.99052942576485e-06, | |
| "loss": 0.0484, | |
| "step": 1760 | |
| }, | |
| { | |
| "epoch": 1.1652674897119342, | |
| "grad_norm": 0.2896787745807131, | |
| "learning_rate": 8.876277270116146e-06, | |
| "loss": 0.0508, | |
| "step": 1770 | |
| }, | |
| { | |
| "epoch": 1.171851851851852, | |
| "grad_norm": 0.23254732847556894, | |
| "learning_rate": 8.762173488230636e-06, | |
| "loss": 0.0466, | |
| "step": 1780 | |
| }, | |
| { | |
| "epoch": 1.1784362139917695, | |
| "grad_norm": 0.24410269407834392, | |
| "learning_rate": 8.648233146109096e-06, | |
| "loss": 0.0453, | |
| "step": 1790 | |
| }, | |
| { | |
| "epoch": 1.1850205761316872, | |
| "grad_norm": 0.26054519963031003, | |
| "learning_rate": 8.534471288172088e-06, | |
| "loss": 0.0454, | |
| "step": 1800 | |
| }, | |
| { | |
| "epoch": 1.191604938271605, | |
| "grad_norm": 0.256497827424079, | |
| "learning_rate": 8.420902935273519e-06, | |
| "loss": 0.048, | |
| "step": 1810 | |
| }, | |
| { | |
| "epoch": 1.1981893004115227, | |
| "grad_norm": 0.2974875612681633, | |
| "learning_rate": 8.30754308271736e-06, | |
| "loss": 0.0484, | |
| "step": 1820 | |
| }, | |
| { | |
| "epoch": 1.2047736625514402, | |
| "grad_norm": 0.2771311630214479, | |
| "learning_rate": 8.194406698277662e-06, | |
| "loss": 0.0456, | |
| "step": 1830 | |
| }, | |
| { | |
| "epoch": 1.211358024691358, | |
| "grad_norm": 0.2578544317764551, | |
| "learning_rate": 8.081508720222258e-06, | |
| "loss": 0.0489, | |
| "step": 1840 | |
| }, | |
| { | |
| "epoch": 1.2179423868312758, | |
| "grad_norm": 0.2980262635958971, | |
| "learning_rate": 7.968864055340358e-06, | |
| "loss": 0.0465, | |
| "step": 1850 | |
| }, | |
| { | |
| "epoch": 1.2245267489711935, | |
| "grad_norm": 0.24044451552764326, | |
| "learning_rate": 7.85648757697428e-06, | |
| "loss": 0.0499, | |
| "step": 1860 | |
| }, | |
| { | |
| "epoch": 1.231111111111111, | |
| "grad_norm": 0.24934077402765678, | |
| "learning_rate": 7.744394123055612e-06, | |
| "loss": 0.0478, | |
| "step": 1870 | |
| }, | |
| { | |
| "epoch": 1.2376954732510288, | |
| "grad_norm": 0.30627124671375233, | |
| "learning_rate": 7.632598494146041e-06, | |
| "loss": 0.0474, | |
| "step": 1880 | |
| }, | |
| { | |
| "epoch": 1.2442798353909466, | |
| "grad_norm": 0.27856167370667784, | |
| "learning_rate": 7.521115451483143e-06, | |
| "loss": 0.0473, | |
| "step": 1890 | |
| }, | |
| { | |
| "epoch": 1.250864197530864, | |
| "grad_norm": 0.2934881906212686, | |
| "learning_rate": 7.4099597150312994e-06, | |
| "loss": 0.0483, | |
| "step": 1900 | |
| }, | |
| { | |
| "epoch": 1.2574485596707818, | |
| "grad_norm": 0.2812946240766539, | |
| "learning_rate": 7.2991459615381525e-06, | |
| "loss": 0.0446, | |
| "step": 1910 | |
| }, | |
| { | |
| "epoch": 1.2640329218106996, | |
| "grad_norm": 0.2873116667440875, | |
| "learning_rate": 7.18868882259669e-06, | |
| "loss": 0.0448, | |
| "step": 1920 | |
| }, | |
| { | |
| "epoch": 1.2706172839506173, | |
| "grad_norm": 0.21940218487234736, | |
| "learning_rate": 7.0786028827133436e-06, | |
| "loss": 0.0453, | |
| "step": 1930 | |
| }, | |
| { | |
| "epoch": 1.277201646090535, | |
| "grad_norm": 0.24051886922282145, | |
| "learning_rate": 6.968902677382267e-06, | |
| "loss": 0.0446, | |
| "step": 1940 | |
| }, | |
| { | |
| "epoch": 1.2837860082304526, | |
| "grad_norm": 0.24626322456669333, | |
| "learning_rate": 6.859602691166116e-06, | |
| "loss": 0.0451, | |
| "step": 1950 | |
| }, | |
| { | |
| "epoch": 1.2903703703703704, | |
| "grad_norm": 0.2678893327945353, | |
| "learning_rate": 6.750717355783542e-06, | |
| "loss": 0.0486, | |
| "step": 1960 | |
| }, | |
| { | |
| "epoch": 1.2969547325102881, | |
| "grad_norm": 0.2668368834905403, | |
| "learning_rate": 6.642261048203645e-06, | |
| "loss": 0.0464, | |
| "step": 1970 | |
| }, | |
| { | |
| "epoch": 1.3035390946502057, | |
| "grad_norm": 0.26490057296285885, | |
| "learning_rate": 6.534248088747681e-06, | |
| "loss": 0.0434, | |
| "step": 1980 | |
| }, | |
| { | |
| "epoch": 1.3101234567901234, | |
| "grad_norm": 0.29323274134112237, | |
| "learning_rate": 6.426692739198247e-06, | |
| "loss": 0.0446, | |
| "step": 1990 | |
| }, | |
| { | |
| "epoch": 1.3167078189300412, | |
| "grad_norm": 0.28908056291849554, | |
| "learning_rate": 6.3196092009161745e-06, | |
| "loss": 0.0452, | |
| "step": 2000 | |
| }, | |
| { | |
| "epoch": 1.323292181069959, | |
| "grad_norm": 0.25086566194095056, | |
| "learning_rate": 6.2130116129654226e-06, | |
| "loss": 0.043, | |
| "step": 2010 | |
| }, | |
| { | |
| "epoch": 1.3298765432098765, | |
| "grad_norm": 0.26535183878935253, | |
| "learning_rate": 6.106914050246195e-06, | |
| "loss": 0.0438, | |
| "step": 2020 | |
| }, | |
| { | |
| "epoch": 1.3364609053497942, | |
| "grad_norm": 0.22348939883537003, | |
| "learning_rate": 6.001330521636503e-06, | |
| "loss": 0.0449, | |
| "step": 2030 | |
| }, | |
| { | |
| "epoch": 1.343045267489712, | |
| "grad_norm": 0.2894385435857123, | |
| "learning_rate": 5.8962749681424816e-06, | |
| "loss": 0.042, | |
| "step": 2040 | |
| }, | |
| { | |
| "epoch": 1.3496296296296295, | |
| "grad_norm": 0.26457325904752105, | |
| "learning_rate": 5.791761261057647e-06, | |
| "loss": 0.0442, | |
| "step": 2050 | |
| }, | |
| { | |
| "epoch": 1.3562139917695473, | |
| "grad_norm": 0.3068389385519454, | |
| "learning_rate": 5.687803200131365e-06, | |
| "loss": 0.0445, | |
| "step": 2060 | |
| }, | |
| { | |
| "epoch": 1.362798353909465, | |
| "grad_norm": 0.27184806182261856, | |
| "learning_rate": 5.584414511746759e-06, | |
| "loss": 0.0429, | |
| "step": 2070 | |
| }, | |
| { | |
| "epoch": 1.3693827160493828, | |
| "grad_norm": 0.27850156271247684, | |
| "learning_rate": 5.481608847108304e-06, | |
| "loss": 0.0449, | |
| "step": 2080 | |
| }, | |
| { | |
| "epoch": 1.3759670781893005, | |
| "grad_norm": 0.24442973473869922, | |
| "learning_rate": 5.379399780439378e-06, | |
| "loss": 0.0429, | |
| "step": 2090 | |
| }, | |
| { | |
| "epoch": 1.382551440329218, | |
| "grad_norm": 0.3053775299401999, | |
| "learning_rate": 5.2778008071899215e-06, | |
| "loss": 0.0451, | |
| "step": 2100 | |
| }, | |
| { | |
| "epoch": 1.3891358024691358, | |
| "grad_norm": 0.2791009219054029, | |
| "learning_rate": 5.176825342254557e-06, | |
| "loss": 0.0432, | |
| "step": 2110 | |
| }, | |
| { | |
| "epoch": 1.3957201646090536, | |
| "grad_norm": 0.24621991407843596, | |
| "learning_rate": 5.076486718201292e-06, | |
| "loss": 0.0404, | |
| "step": 2120 | |
| }, | |
| { | |
| "epoch": 1.402304526748971, | |
| "grad_norm": 0.26338569735525286, | |
| "learning_rate": 4.976798183511131e-06, | |
| "loss": 0.0432, | |
| "step": 2130 | |
| }, | |
| { | |
| "epoch": 1.4088888888888889, | |
| "grad_norm": 0.20763210064318866, | |
| "learning_rate": 4.8777729008287824e-06, | |
| "loss": 0.044, | |
| "step": 2140 | |
| }, | |
| { | |
| "epoch": 1.4154732510288066, | |
| "grad_norm": 0.2707221214388065, | |
| "learning_rate": 4.779423945224685e-06, | |
| "loss": 0.0447, | |
| "step": 2150 | |
| }, | |
| { | |
| "epoch": 1.4220576131687244, | |
| "grad_norm": 0.24759594327493678, | |
| "learning_rate": 4.6817643024686e-06, | |
| "loss": 0.0409, | |
| "step": 2160 | |
| }, | |
| { | |
| "epoch": 1.428641975308642, | |
| "grad_norm": 0.26793363060402137, | |
| "learning_rate": 4.584806867315012e-06, | |
| "loss": 0.0434, | |
| "step": 2170 | |
| }, | |
| { | |
| "epoch": 1.4352263374485597, | |
| "grad_norm": 0.2619545136078983, | |
| "learning_rate": 4.4885644418005204e-06, | |
| "loss": 0.0414, | |
| "step": 2180 | |
| }, | |
| { | |
| "epoch": 1.4418106995884774, | |
| "grad_norm": 0.28117466372187855, | |
| "learning_rate": 4.393049733553514e-06, | |
| "loss": 0.0422, | |
| "step": 2190 | |
| }, | |
| { | |
| "epoch": 1.448395061728395, | |
| "grad_norm": 0.26612476494237675, | |
| "learning_rate": 4.2982753541162554e-06, | |
| "loss": 0.0429, | |
| "step": 2200 | |
| }, | |
| { | |
| "epoch": 1.4549794238683127, | |
| "grad_norm": 0.2499798501451582, | |
| "learning_rate": 4.204253817279696e-06, | |
| "loss": 0.0415, | |
| "step": 2210 | |
| }, | |
| { | |
| "epoch": 1.4615637860082304, | |
| "grad_norm": 0.2780004458942545, | |
| "learning_rate": 4.110997537431187e-06, | |
| "loss": 0.0425, | |
| "step": 2220 | |
| }, | |
| { | |
| "epoch": 1.4681481481481482, | |
| "grad_norm": 0.3192672826059967, | |
| "learning_rate": 4.018518827915312e-06, | |
| "loss": 0.0441, | |
| "step": 2230 | |
| }, | |
| { | |
| "epoch": 1.474732510288066, | |
| "grad_norm": 0.27703933400252145, | |
| "learning_rate": 3.926829899408044e-06, | |
| "loss": 0.0401, | |
| "step": 2240 | |
| }, | |
| { | |
| "epoch": 1.4813168724279835, | |
| "grad_norm": 0.2823126837130346, | |
| "learning_rate": 3.835942858304502e-06, | |
| "loss": 0.0403, | |
| "step": 2250 | |
| }, | |
| { | |
| "epoch": 1.4879012345679012, | |
| "grad_norm": 0.2731273650807394, | |
| "learning_rate": 3.7458697051204253e-06, | |
| "loss": 0.0418, | |
| "step": 2260 | |
| }, | |
| { | |
| "epoch": 1.494485596707819, | |
| "grad_norm": 0.24352619762339012, | |
| "learning_rate": 3.6566223329076767e-06, | |
| "loss": 0.0416, | |
| "step": 2270 | |
| }, | |
| { | |
| "epoch": 1.5010699588477365, | |
| "grad_norm": 0.28451768132403726, | |
| "learning_rate": 3.5682125256838995e-06, | |
| "loss": 0.0415, | |
| "step": 2280 | |
| }, | |
| { | |
| "epoch": 1.5076543209876543, | |
| "grad_norm": 0.28620812939315216, | |
| "learning_rate": 3.480651956876575e-06, | |
| "loss": 0.041, | |
| "step": 2290 | |
| }, | |
| { | |
| "epoch": 1.514238683127572, | |
| "grad_norm": 0.21963136320519067, | |
| "learning_rate": 3.3939521877817016e-06, | |
| "loss": 0.0411, | |
| "step": 2300 | |
| }, | |
| { | |
| "epoch": 1.5208230452674898, | |
| "grad_norm": 0.31945344360590944, | |
| "learning_rate": 3.308124666037271e-06, | |
| "loss": 0.0408, | |
| "step": 2310 | |
| }, | |
| { | |
| "epoch": 1.5274074074074075, | |
| "grad_norm": 0.3139422167179338, | |
| "learning_rate": 3.223180724111734e-06, | |
| "loss": 0.0433, | |
| "step": 2320 | |
| }, | |
| { | |
| "epoch": 1.533991769547325, | |
| "grad_norm": 0.2386939526426441, | |
| "learning_rate": 3.139131577807699e-06, | |
| "loss": 0.0411, | |
| "step": 2330 | |
| }, | |
| { | |
| "epoch": 1.5405761316872428, | |
| "grad_norm": 0.2703524104892378, | |
| "learning_rate": 3.0559883247810152e-06, | |
| "loss": 0.0417, | |
| "step": 2340 | |
| }, | |
| { | |
| "epoch": 1.5471604938271604, | |
| "grad_norm": 0.25361111617911514, | |
| "learning_rate": 2.9737619430754773e-06, | |
| "loss": 0.0408, | |
| "step": 2350 | |
| }, | |
| { | |
| "epoch": 1.5537448559670781, | |
| "grad_norm": 0.2400249023794216, | |
| "learning_rate": 2.8924632896732963e-06, | |
| "loss": 0.0425, | |
| "step": 2360 | |
| }, | |
| { | |
| "epoch": 1.5603292181069959, | |
| "grad_norm": 0.2650927496831694, | |
| "learning_rate": 2.8121030990615717e-06, | |
| "loss": 0.0413, | |
| "step": 2370 | |
| }, | |
| { | |
| "epoch": 1.5669135802469136, | |
| "grad_norm": 0.2819908762212163, | |
| "learning_rate": 2.7326919818149356e-06, | |
| "loss": 0.0421, | |
| "step": 2380 | |
| }, | |
| { | |
| "epoch": 1.5734979423868314, | |
| "grad_norm": 0.2913835799722889, | |
| "learning_rate": 2.654240423194555e-06, | |
| "loss": 0.0421, | |
| "step": 2390 | |
| }, | |
| { | |
| "epoch": 1.5800823045267491, | |
| "grad_norm": 0.2619137449154397, | |
| "learning_rate": 2.5767587817636908e-06, | |
| "loss": 0.0403, | |
| "step": 2400 | |
| }, | |
| { | |
| "epoch": 1.5866666666666667, | |
| "grad_norm": 0.3168699518532093, | |
| "learning_rate": 2.5002572880199706e-06, | |
| "loss": 0.042, | |
| "step": 2410 | |
| }, | |
| { | |
| "epoch": 1.5932510288065842, | |
| "grad_norm": 0.279028781919842, | |
| "learning_rate": 2.424746043044569e-06, | |
| "loss": 0.04, | |
| "step": 2420 | |
| }, | |
| { | |
| "epoch": 1.599835390946502, | |
| "grad_norm": 0.27749109750003076, | |
| "learning_rate": 2.350235017168493e-06, | |
| "loss": 0.0387, | |
| "step": 2430 | |
| }, | |
| { | |
| "epoch": 1.6064197530864197, | |
| "grad_norm": 0.2723149006591808, | |
| "learning_rate": 2.276734048656133e-06, | |
| "loss": 0.0398, | |
| "step": 2440 | |
| }, | |
| { | |
| "epoch": 1.6130041152263375, | |
| "grad_norm": 0.23989841202762976, | |
| "learning_rate": 2.204252842406216e-06, | |
| "loss": 0.04, | |
| "step": 2450 | |
| }, | |
| { | |
| "epoch": 1.6195884773662552, | |
| "grad_norm": 0.255318273830075, | |
| "learning_rate": 2.132800968670414e-06, | |
| "loss": 0.0393, | |
| "step": 2460 | |
| }, | |
| { | |
| "epoch": 1.626172839506173, | |
| "grad_norm": 0.25389109080238237, | |
| "learning_rate": 2.0623878617896954e-06, | |
| "loss": 0.0395, | |
| "step": 2470 | |
| }, | |
| { | |
| "epoch": 1.6327572016460905, | |
| "grad_norm": 0.2924114124112562, | |
| "learning_rate": 1.9930228189486576e-06, | |
| "loss": 0.0403, | |
| "step": 2480 | |
| }, | |
| { | |
| "epoch": 1.6393415637860083, | |
| "grad_norm": 0.24737384773403684, | |
| "learning_rate": 1.9247149989479243e-06, | |
| "loss": 0.0407, | |
| "step": 2490 | |
| }, | |
| { | |
| "epoch": 1.6459259259259258, | |
| "grad_norm": 0.26668053446933565, | |
| "learning_rate": 1.8574734209948452e-06, | |
| "loss": 0.0394, | |
| "step": 2500 | |
| }, | |
| { | |
| "epoch": 1.6525102880658435, | |
| "grad_norm": 0.25979254838989646, | |
| "learning_rate": 1.791306963512629e-06, | |
| "loss": 0.0392, | |
| "step": 2510 | |
| }, | |
| { | |
| "epoch": 1.6590946502057613, | |
| "grad_norm": 0.2808416728163827, | |
| "learning_rate": 1.7262243629680542e-06, | |
| "loss": 0.0406, | |
| "step": 2520 | |
| }, | |
| { | |
| "epoch": 1.665679012345679, | |
| "grad_norm": 0.2651943758260359, | |
| "learning_rate": 1.6622342127179159e-06, | |
| "loss": 0.0398, | |
| "step": 2530 | |
| }, | |
| { | |
| "epoch": 1.6722633744855968, | |
| "grad_norm": 0.2998953886189872, | |
| "learning_rate": 1.5993449618743962e-06, | |
| "loss": 0.0394, | |
| "step": 2540 | |
| }, | |
| { | |
| "epoch": 1.6788477366255146, | |
| "grad_norm": 0.3012596814004147, | |
| "learning_rate": 1.5375649141894445e-06, | |
| "loss": 0.0396, | |
| "step": 2550 | |
| }, | |
| { | |
| "epoch": 1.685432098765432, | |
| "grad_norm": 0.22926184017494686, | |
| "learning_rate": 1.4769022269583778e-06, | |
| "loss": 0.0392, | |
| "step": 2560 | |
| }, | |
| { | |
| "epoch": 1.6920164609053496, | |
| "grad_norm": 0.2915606153726947, | |
| "learning_rate": 1.41736490994282e-06, | |
| "loss": 0.0367, | |
| "step": 2570 | |
| }, | |
| { | |
| "epoch": 1.6986008230452674, | |
| "grad_norm": 0.25149262268239386, | |
| "learning_rate": 1.3589608243130913e-06, | |
| "loss": 0.0393, | |
| "step": 2580 | |
| }, | |
| { | |
| "epoch": 1.7051851851851851, | |
| "grad_norm": 0.23021205825482152, | |
| "learning_rate": 1.3016976816102488e-06, | |
| "loss": 0.039, | |
| "step": 2590 | |
| }, | |
| { | |
| "epoch": 1.711769547325103, | |
| "grad_norm": 0.23806858280834733, | |
| "learning_rate": 1.245583042727877e-06, | |
| "loss": 0.0373, | |
| "step": 2600 | |
| }, | |
| { | |
| "epoch": 1.7183539094650206, | |
| "grad_norm": 0.23371665490669846, | |
| "learning_rate": 1.1906243169137565e-06, | |
| "loss": 0.0373, | |
| "step": 2610 | |
| }, | |
| { | |
| "epoch": 1.7249382716049384, | |
| "grad_norm": 0.2358952389413928, | |
| "learning_rate": 1.1368287607915652e-06, | |
| "loss": 0.0368, | |
| "step": 2620 | |
| }, | |
| { | |
| "epoch": 1.731522633744856, | |
| "grad_norm": 0.26299443632402053, | |
| "learning_rate": 1.084203477402731e-06, | |
| "loss": 0.0383, | |
| "step": 2630 | |
| }, | |
| { | |
| "epoch": 1.7381069958847737, | |
| "grad_norm": 0.25194576045416844, | |
| "learning_rate": 1.0327554152685637e-06, | |
| "loss": 0.0385, | |
| "step": 2640 | |
| }, | |
| { | |
| "epoch": 1.7446913580246912, | |
| "grad_norm": 0.27055501403193677, | |
| "learning_rate": 9.82491367472791e-07, | |
| "loss": 0.0373, | |
| "step": 2650 | |
| }, | |
| { | |
| "epoch": 1.751275720164609, | |
| "grad_norm": 0.2516257996938441, | |
| "learning_rate": 9.334179707646063e-07, | |
| "loss": 0.0386, | |
| "step": 2660 | |
| }, | |
| { | |
| "epoch": 1.7578600823045267, | |
| "grad_norm": 0.2372611964667714, | |
| "learning_rate": 8.855417046823823e-07, | |
| "loss": 0.0388, | |
| "step": 2670 | |
| }, | |
| { | |
| "epoch": 1.7644444444444445, | |
| "grad_norm": 0.2612395915833542, | |
| "learning_rate": 8.388688906981068e-07, | |
| "loss": 0.0378, | |
| "step": 2680 | |
| }, | |
| { | |
| "epoch": 1.7710288065843622, | |
| "grad_norm": 0.2819966444512503, | |
| "learning_rate": 7.93405691382736e-07, | |
| "loss": 0.0374, | |
| "step": 2690 | |
| }, | |
| { | |
| "epoch": 1.77761316872428, | |
| "grad_norm": 0.2376341076802809, | |
| "learning_rate": 7.491581095924771e-07, | |
| "loss": 0.0388, | |
| "step": 2700 | |
| }, | |
| { | |
| "epoch": 1.7841975308641975, | |
| "grad_norm": 0.27938706666642293, | |
| "learning_rate": 7.061319876762029e-07, | |
| "loss": 0.0367, | |
| "step": 2710 | |
| }, | |
| { | |
| "epoch": 1.7907818930041153, | |
| "grad_norm": 0.23576084937693437, | |
| "learning_rate": 6.64333006704031e-07, | |
| "loss": 0.0358, | |
| "step": 2720 | |
| }, | |
| { | |
| "epoch": 1.7973662551440328, | |
| "grad_norm": 0.2634596099223397, | |
| "learning_rate": 6.237666857172198e-07, | |
| "loss": 0.0389, | |
| "step": 2730 | |
| }, | |
| { | |
| "epoch": 1.8039506172839506, | |
| "grad_norm": 0.2477361055678762, | |
| "learning_rate": 5.844383809994358e-07, | |
| "loss": 0.0377, | |
| "step": 2740 | |
| }, | |
| { | |
| "epoch": 1.8105349794238683, | |
| "grad_norm": 0.25385468933758476, | |
| "learning_rate": 5.463532853695252e-07, | |
| "loss": 0.0369, | |
| "step": 2750 | |
| }, | |
| { | |
| "epoch": 1.817119341563786, | |
| "grad_norm": 0.2882725454010458, | |
| "learning_rate": 5.095164274958675e-07, | |
| "loss": 0.0371, | |
| "step": 2760 | |
| }, | |
| { | |
| "epoch": 1.8237037037037038, | |
| "grad_norm": 0.2408825349510827, | |
| "learning_rate": 4.739326712324044e-07, | |
| "loss": 0.0371, | |
| "step": 2770 | |
| }, | |
| { | |
| "epoch": 1.8302880658436214, | |
| "grad_norm": 0.25414366178115644, | |
| "learning_rate": 4.3960671497642384e-07, | |
| "loss": 0.0356, | |
| "step": 2780 | |
| }, | |
| { | |
| "epoch": 1.8368724279835391, | |
| "grad_norm": 0.2616526174196436, | |
| "learning_rate": 4.0654309104819266e-07, | |
| "loss": 0.0389, | |
| "step": 2790 | |
| }, | |
| { | |
| "epoch": 1.8434567901234566, | |
| "grad_norm": 0.2552640639520394, | |
| "learning_rate": 3.7474616509252326e-07, | |
| "loss": 0.0369, | |
| "step": 2800 | |
| }, | |
| { | |
| "epoch": 1.8500411522633744, | |
| "grad_norm": 0.27534471767419216, | |
| "learning_rate": 3.4422013550234555e-07, | |
| "loss": 0.036, | |
| "step": 2810 | |
| }, | |
| { | |
| "epoch": 1.8566255144032922, | |
| "grad_norm": 0.2268489752823173, | |
| "learning_rate": 3.149690328643573e-07, | |
| "loss": 0.0378, | |
| "step": 2820 | |
| }, | |
| { | |
| "epoch": 1.86320987654321, | |
| "grad_norm": 0.24850575363659771, | |
| "learning_rate": 2.8699671942683194e-07, | |
| "loss": 0.0375, | |
| "step": 2830 | |
| }, | |
| { | |
| "epoch": 1.8697942386831277, | |
| "grad_norm": 0.2307919516443814, | |
| "learning_rate": 2.603068885896631e-07, | |
| "loss": 0.0368, | |
| "step": 2840 | |
| }, | |
| { | |
| "epoch": 1.8763786008230454, | |
| "grad_norm": 0.27775012465284954, | |
| "learning_rate": 2.3490306441669385e-07, | |
| "loss": 0.0379, | |
| "step": 2850 | |
| }, | |
| { | |
| "epoch": 1.882962962962963, | |
| "grad_norm": 0.26853576536071394, | |
| "learning_rate": 2.1078860117040188e-07, | |
| "loss": 0.0374, | |
| "step": 2860 | |
| }, | |
| { | |
| "epoch": 1.8895473251028807, | |
| "grad_norm": 0.24536780879948297, | |
| "learning_rate": 1.8796668286902408e-07, | |
| "loss": 0.0372, | |
| "step": 2870 | |
| }, | |
| { | |
| "epoch": 1.8961316872427982, | |
| "grad_norm": 0.24350491480793685, | |
| "learning_rate": 1.6644032286612822e-07, | |
| "loss": 0.0367, | |
| "step": 2880 | |
| }, | |
| { | |
| "epoch": 1.902716049382716, | |
| "grad_norm": 0.26592772651933727, | |
| "learning_rate": 1.4621236345275013e-07, | |
| "loss": 0.0384, | |
| "step": 2890 | |
| }, | |
| { | |
| "epoch": 1.9093004115226337, | |
| "grad_norm": 0.273830096695144, | |
| "learning_rate": 1.272854754821018e-07, | |
| "loss": 0.0376, | |
| "step": 2900 | |
| }, | |
| { | |
| "epoch": 1.9158847736625515, | |
| "grad_norm": 0.24358383615739146, | |
| "learning_rate": 1.0966215801691216e-07, | |
| "loss": 0.0378, | |
| "step": 2910 | |
| }, | |
| { | |
| "epoch": 1.9224691358024693, | |
| "grad_norm": 0.2350902988501883, | |
| "learning_rate": 9.334473799946231e-08, | |
| "loss": 0.036, | |
| "step": 2920 | |
| }, | |
| { | |
| "epoch": 1.9290534979423868, | |
| "grad_norm": 0.2898622674756883, | |
| "learning_rate": 7.833536994433899e-08, | |
| "loss": 0.0384, | |
| "step": 2930 | |
| }, | |
| { | |
| "epoch": 1.9356378600823045, | |
| "grad_norm": 0.2274996600072062, | |
| "learning_rate": 6.463603565396214e-08, | |
| "loss": 0.0357, | |
| "step": 2940 | |
| }, | |
| { | |
| "epoch": 1.942222222222222, | |
| "grad_norm": 0.26822688807400513, | |
| "learning_rate": 5.224854395690093e-08, | |
| "loss": 0.0393, | |
| "step": 2950 | |
| }, | |
| { | |
| "epoch": 1.9488065843621398, | |
| "grad_norm": 0.24259809452621967, | |
| "learning_rate": 4.117453046905362e-08, | |
| "loss": 0.0381, | |
| "step": 2960 | |
| }, | |
| { | |
| "epoch": 1.9553909465020576, | |
| "grad_norm": 0.30345266961207956, | |
| "learning_rate": 3.1415457377674816e-08, | |
| "loss": 0.0389, | |
| "step": 2970 | |
| }, | |
| { | |
| "epoch": 1.9619753086419753, | |
| "grad_norm": 0.24356362727209732, | |
| "learning_rate": 2.2972613248316565e-08, | |
| "loss": 0.0351, | |
| "step": 2980 | |
| }, | |
| { | |
| "epoch": 1.968559670781893, | |
| "grad_norm": 0.25046455301579434, | |
| "learning_rate": 1.5847112854689982e-08, | |
| "loss": 0.0377, | |
| "step": 2990 | |
| }, | |
| { | |
| "epoch": 1.9751440329218108, | |
| "grad_norm": 0.24111018893505698, | |
| "learning_rate": 1.003989703146635e-08, | |
| "loss": 0.0379, | |
| "step": 3000 | |
| }, | |
| { | |
| "epoch": 1.9817283950617284, | |
| "grad_norm": 0.25371728642896235, | |
| "learning_rate": 5.5517325500609175e-09, | |
| "loss": 0.0375, | |
| "step": 3010 | |
| }, | |
| { | |
| "epoch": 1.9883127572016461, | |
| "grad_norm": 0.24486464532889268, | |
| "learning_rate": 2.383212017381675e-09, | |
| "loss": 0.0358, | |
| "step": 3020 | |
| }, | |
| { | |
| "epoch": 1.9948971193415637, | |
| "grad_norm": 0.2608068274735689, | |
| "learning_rate": 5.34753797587495e-10, | |
| "loss": 0.0366, | |
| "step": 3030 | |
| }, | |
| { | |
| "epoch": 2.0, | |
| "step": 3038, | |
| "total_flos": 3771710697373696.0, | |
| "train_loss": 0.06704422599041909, | |
| "train_runtime": 34253.9666, | |
| "train_samples_per_second": 17.026, | |
| "train_steps_per_second": 0.089 | |
| } | |
| ], | |
| "logging_steps": 10, | |
| "max_steps": 3038, | |
| "num_input_tokens_seen": 0, | |
| "num_train_epochs": 2, | |
| "save_steps": 500, | |
| "stateful_callbacks": { | |
| "TrainerControl": { | |
| "args": { | |
| "should_epoch_stop": false, | |
| "should_evaluate": false, | |
| "should_log": false, | |
| "should_save": true, | |
| "should_training_stop": true | |
| }, | |
| "attributes": {} | |
| } | |
| }, | |
| "total_flos": 3771710697373696.0, | |
| "train_batch_size": 3, | |
| "trial_name": null, | |
| "trial_params": null | |
| } | |