| { | |
| "best_global_step": 2000, | |
| "best_metric": 0.8704838022863507, | |
| "best_model_checkpoint": "./SALAMA_NEWMED9/checkpoint-2000", | |
| "epoch": 1.1983520599250936, | |
| "eval_steps": 2000, | |
| "global_step": 2000, | |
| "is_hyper_param_search": false, | |
| "is_local_process_zero": true, | |
| "is_world_process_zero": true, | |
| "log_history": [ | |
| { | |
| "epoch": 0.00599250936329588, | |
| "grad_norm": 3.4430904388427734, | |
| "learning_rate": 1.8e-07, | |
| "loss": 0.0305, | |
| "step": 10 | |
| }, | |
| { | |
| "epoch": 0.01198501872659176, | |
| "grad_norm": 1.3001453876495361, | |
| "learning_rate": 3.8e-07, | |
| "loss": 0.0217, | |
| "step": 20 | |
| }, | |
| { | |
| "epoch": 0.017977528089887642, | |
| "grad_norm": 2.323173999786377, | |
| "learning_rate": 5.800000000000001e-07, | |
| "loss": 0.0259, | |
| "step": 30 | |
| }, | |
| { | |
| "epoch": 0.02397003745318352, | |
| "grad_norm": 1.480966329574585, | |
| "learning_rate": 7.8e-07, | |
| "loss": 0.0274, | |
| "step": 40 | |
| }, | |
| { | |
| "epoch": 0.0299625468164794, | |
| "grad_norm": 0.8444674015045166, | |
| "learning_rate": 9.800000000000001e-07, | |
| "loss": 0.0204, | |
| "step": 50 | |
| }, | |
| { | |
| "epoch": 0.035955056179775284, | |
| "grad_norm": 4.492954254150391, | |
| "learning_rate": 1.1800000000000001e-06, | |
| "loss": 0.0362, | |
| "step": 60 | |
| }, | |
| { | |
| "epoch": 0.041947565543071164, | |
| "grad_norm": 0.8774360418319702, | |
| "learning_rate": 1.3800000000000001e-06, | |
| "loss": 0.0283, | |
| "step": 70 | |
| }, | |
| { | |
| "epoch": 0.04794007490636704, | |
| "grad_norm": 1.3773318529129028, | |
| "learning_rate": 1.5800000000000001e-06, | |
| "loss": 0.0255, | |
| "step": 80 | |
| }, | |
| { | |
| "epoch": 0.05393258426966292, | |
| "grad_norm": 1.632542610168457, | |
| "learning_rate": 1.7800000000000001e-06, | |
| "loss": 0.0186, | |
| "step": 90 | |
| }, | |
| { | |
| "epoch": 0.0599250936329588, | |
| "grad_norm": 2.4346158504486084, | |
| "learning_rate": 1.98e-06, | |
| "loss": 0.0311, | |
| "step": 100 | |
| }, | |
| { | |
| "epoch": 0.06591760299625468, | |
| "grad_norm": 1.4394844770431519, | |
| "learning_rate": 2.1800000000000003e-06, | |
| "loss": 0.0279, | |
| "step": 110 | |
| }, | |
| { | |
| "epoch": 0.07191011235955057, | |
| "grad_norm": 2.2031307220458984, | |
| "learning_rate": 2.38e-06, | |
| "loss": 0.0223, | |
| "step": 120 | |
| }, | |
| { | |
| "epoch": 0.07790262172284644, | |
| "grad_norm": 1.1016514301300049, | |
| "learning_rate": 2.5800000000000003e-06, | |
| "loss": 0.0159, | |
| "step": 130 | |
| }, | |
| { | |
| "epoch": 0.08389513108614233, | |
| "grad_norm": 2.271472454071045, | |
| "learning_rate": 2.7800000000000005e-06, | |
| "loss": 0.0187, | |
| "step": 140 | |
| }, | |
| { | |
| "epoch": 0.0898876404494382, | |
| "grad_norm": 1.3717912435531616, | |
| "learning_rate": 2.9800000000000003e-06, | |
| "loss": 0.0191, | |
| "step": 150 | |
| }, | |
| { | |
| "epoch": 0.09588014981273409, | |
| "grad_norm": 2.001311779022217, | |
| "learning_rate": 3.1800000000000005e-06, | |
| "loss": 0.0245, | |
| "step": 160 | |
| }, | |
| { | |
| "epoch": 0.10187265917602996, | |
| "grad_norm": 2.1671388149261475, | |
| "learning_rate": 3.3800000000000007e-06, | |
| "loss": 0.0207, | |
| "step": 170 | |
| }, | |
| { | |
| "epoch": 0.10786516853932585, | |
| "grad_norm": 2.7302298545837402, | |
| "learning_rate": 3.58e-06, | |
| "loss": 0.0251, | |
| "step": 180 | |
| }, | |
| { | |
| "epoch": 0.11385767790262172, | |
| "grad_norm": 2.360443115234375, | |
| "learning_rate": 3.7800000000000002e-06, | |
| "loss": 0.0279, | |
| "step": 190 | |
| }, | |
| { | |
| "epoch": 0.1198501872659176, | |
| "grad_norm": 2.1473894119262695, | |
| "learning_rate": 3.980000000000001e-06, | |
| "loss": 0.0166, | |
| "step": 200 | |
| }, | |
| { | |
| "epoch": 0.1258426966292135, | |
| "grad_norm": 2.1437275409698486, | |
| "learning_rate": 4.18e-06, | |
| "loss": 0.0296, | |
| "step": 210 | |
| }, | |
| { | |
| "epoch": 0.13183520599250936, | |
| "grad_norm": 1.155143141746521, | |
| "learning_rate": 4.38e-06, | |
| "loss": 0.0235, | |
| "step": 220 | |
| }, | |
| { | |
| "epoch": 0.13782771535580524, | |
| "grad_norm": 1.398034691810608, | |
| "learning_rate": 4.58e-06, | |
| "loss": 0.0256, | |
| "step": 230 | |
| }, | |
| { | |
| "epoch": 0.14382022471910114, | |
| "grad_norm": 1.704981803894043, | |
| "learning_rate": 4.78e-06, | |
| "loss": 0.0314, | |
| "step": 240 | |
| }, | |
| { | |
| "epoch": 0.149812734082397, | |
| "grad_norm": 2.0833070278167725, | |
| "learning_rate": 4.980000000000001e-06, | |
| "loss": 0.0292, | |
| "step": 250 | |
| }, | |
| { | |
| "epoch": 0.15580524344569288, | |
| "grad_norm": 1.6899584531784058, | |
| "learning_rate": 5.18e-06, | |
| "loss": 0.0226, | |
| "step": 260 | |
| }, | |
| { | |
| "epoch": 0.16179775280898875, | |
| "grad_norm": 1.5703374147415161, | |
| "learning_rate": 5.380000000000001e-06, | |
| "loss": 0.0251, | |
| "step": 270 | |
| }, | |
| { | |
| "epoch": 0.16779026217228465, | |
| "grad_norm": 0.9077961444854736, | |
| "learning_rate": 5.580000000000001e-06, | |
| "loss": 0.0193, | |
| "step": 280 | |
| }, | |
| { | |
| "epoch": 0.17378277153558053, | |
| "grad_norm": 2.0035812854766846, | |
| "learning_rate": 5.78e-06, | |
| "loss": 0.0231, | |
| "step": 290 | |
| }, | |
| { | |
| "epoch": 0.1797752808988764, | |
| "grad_norm": 2.8311734199523926, | |
| "learning_rate": 5.98e-06, | |
| "loss": 0.0326, | |
| "step": 300 | |
| }, | |
| { | |
| "epoch": 0.18576779026217227, | |
| "grad_norm": 1.6007928848266602, | |
| "learning_rate": 6.18e-06, | |
| "loss": 0.0231, | |
| "step": 310 | |
| }, | |
| { | |
| "epoch": 0.19176029962546817, | |
| "grad_norm": 1.3683286905288696, | |
| "learning_rate": 6.380000000000001e-06, | |
| "loss": 0.026, | |
| "step": 320 | |
| }, | |
| { | |
| "epoch": 0.19775280898876405, | |
| "grad_norm": 2.6587653160095215, | |
| "learning_rate": 6.5800000000000005e-06, | |
| "loss": 0.0288, | |
| "step": 330 | |
| }, | |
| { | |
| "epoch": 0.20374531835205992, | |
| "grad_norm": 2.617583751678467, | |
| "learning_rate": 6.780000000000001e-06, | |
| "loss": 0.0297, | |
| "step": 340 | |
| }, | |
| { | |
| "epoch": 0.20973782771535582, | |
| "grad_norm": 1.4291918277740479, | |
| "learning_rate": 6.98e-06, | |
| "loss": 0.0172, | |
| "step": 350 | |
| }, | |
| { | |
| "epoch": 0.2157303370786517, | |
| "grad_norm": 1.9586079120635986, | |
| "learning_rate": 7.180000000000001e-06, | |
| "loss": 0.0413, | |
| "step": 360 | |
| }, | |
| { | |
| "epoch": 0.22172284644194756, | |
| "grad_norm": 2.081557273864746, | |
| "learning_rate": 7.3800000000000005e-06, | |
| "loss": 0.0297, | |
| "step": 370 | |
| }, | |
| { | |
| "epoch": 0.22771535580524344, | |
| "grad_norm": 1.5032098293304443, | |
| "learning_rate": 7.58e-06, | |
| "loss": 0.027, | |
| "step": 380 | |
| }, | |
| { | |
| "epoch": 0.23370786516853934, | |
| "grad_norm": 2.316767454147339, | |
| "learning_rate": 7.78e-06, | |
| "loss": 0.0291, | |
| "step": 390 | |
| }, | |
| { | |
| "epoch": 0.2397003745318352, | |
| "grad_norm": 3.565624475479126, | |
| "learning_rate": 7.980000000000002e-06, | |
| "loss": 0.0335, | |
| "step": 400 | |
| }, | |
| { | |
| "epoch": 0.24569288389513108, | |
| "grad_norm": 1.4883167743682861, | |
| "learning_rate": 8.18e-06, | |
| "loss": 0.0254, | |
| "step": 410 | |
| }, | |
| { | |
| "epoch": 0.251685393258427, | |
| "grad_norm": 2.2969090938568115, | |
| "learning_rate": 8.380000000000001e-06, | |
| "loss": 0.0401, | |
| "step": 420 | |
| }, | |
| { | |
| "epoch": 0.25767790262172285, | |
| "grad_norm": 1.9346531629562378, | |
| "learning_rate": 8.580000000000001e-06, | |
| "loss": 0.0254, | |
| "step": 430 | |
| }, | |
| { | |
| "epoch": 0.2636704119850187, | |
| "grad_norm": 0.6965954899787903, | |
| "learning_rate": 8.78e-06, | |
| "loss": 0.0248, | |
| "step": 440 | |
| }, | |
| { | |
| "epoch": 0.2696629213483146, | |
| "grad_norm": 3.862004041671753, | |
| "learning_rate": 8.98e-06, | |
| "loss": 0.0392, | |
| "step": 450 | |
| }, | |
| { | |
| "epoch": 0.27565543071161047, | |
| "grad_norm": 1.418031930923462, | |
| "learning_rate": 9.180000000000002e-06, | |
| "loss": 0.0364, | |
| "step": 460 | |
| }, | |
| { | |
| "epoch": 0.28164794007490634, | |
| "grad_norm": 1.2769050598144531, | |
| "learning_rate": 9.38e-06, | |
| "loss": 0.0239, | |
| "step": 470 | |
| }, | |
| { | |
| "epoch": 0.2876404494382023, | |
| "grad_norm": 2.03157901763916, | |
| "learning_rate": 9.58e-06, | |
| "loss": 0.0302, | |
| "step": 480 | |
| }, | |
| { | |
| "epoch": 0.29363295880149815, | |
| "grad_norm": 2.0723016262054443, | |
| "learning_rate": 9.780000000000001e-06, | |
| "loss": 0.0329, | |
| "step": 490 | |
| }, | |
| { | |
| "epoch": 0.299625468164794, | |
| "grad_norm": 1.585113763809204, | |
| "learning_rate": 9.980000000000001e-06, | |
| "loss": 0.0268, | |
| "step": 500 | |
| }, | |
| { | |
| "epoch": 0.3056179775280899, | |
| "grad_norm": 2.0149030685424805, | |
| "learning_rate": 9.968287526427062e-06, | |
| "loss": 0.0321, | |
| "step": 510 | |
| }, | |
| { | |
| "epoch": 0.31161048689138576, | |
| "grad_norm": 1.5547727346420288, | |
| "learning_rate": 9.933051444679352e-06, | |
| "loss": 0.0298, | |
| "step": 520 | |
| }, | |
| { | |
| "epoch": 0.31760299625468164, | |
| "grad_norm": 2.3159093856811523, | |
| "learning_rate": 9.897815362931644e-06, | |
| "loss": 0.0328, | |
| "step": 530 | |
| }, | |
| { | |
| "epoch": 0.3235955056179775, | |
| "grad_norm": 2.6754517555236816, | |
| "learning_rate": 9.862579281183932e-06, | |
| "loss": 0.0437, | |
| "step": 540 | |
| }, | |
| { | |
| "epoch": 0.3295880149812734, | |
| "grad_norm": 1.470944881439209, | |
| "learning_rate": 9.827343199436224e-06, | |
| "loss": 0.0324, | |
| "step": 550 | |
| }, | |
| { | |
| "epoch": 0.3355805243445693, | |
| "grad_norm": 1.461867094039917, | |
| "learning_rate": 9.792107117688514e-06, | |
| "loss": 0.0391, | |
| "step": 560 | |
| }, | |
| { | |
| "epoch": 0.3415730337078652, | |
| "grad_norm": 2.258100986480713, | |
| "learning_rate": 9.756871035940804e-06, | |
| "loss": 0.0382, | |
| "step": 570 | |
| }, | |
| { | |
| "epoch": 0.34756554307116105, | |
| "grad_norm": 2.2667694091796875, | |
| "learning_rate": 9.721634954193094e-06, | |
| "loss": 0.0393, | |
| "step": 580 | |
| }, | |
| { | |
| "epoch": 0.3535580524344569, | |
| "grad_norm": 2.1521716117858887, | |
| "learning_rate": 9.686398872445385e-06, | |
| "loss": 0.0242, | |
| "step": 590 | |
| }, | |
| { | |
| "epoch": 0.3595505617977528, | |
| "grad_norm": 2.3627607822418213, | |
| "learning_rate": 9.651162790697676e-06, | |
| "loss": 0.0303, | |
| "step": 600 | |
| }, | |
| { | |
| "epoch": 0.36554307116104867, | |
| "grad_norm": 2.5504517555236816, | |
| "learning_rate": 9.615926708949965e-06, | |
| "loss": 0.0384, | |
| "step": 610 | |
| }, | |
| { | |
| "epoch": 0.37153558052434454, | |
| "grad_norm": 2.1967527866363525, | |
| "learning_rate": 9.580690627202257e-06, | |
| "loss": 0.033, | |
| "step": 620 | |
| }, | |
| { | |
| "epoch": 0.3775280898876405, | |
| "grad_norm": 1.6752692461013794, | |
| "learning_rate": 9.545454545454547e-06, | |
| "loss": 0.0246, | |
| "step": 630 | |
| }, | |
| { | |
| "epoch": 0.38352059925093634, | |
| "grad_norm": 1.6692609786987305, | |
| "learning_rate": 9.510218463706837e-06, | |
| "loss": 0.0339, | |
| "step": 640 | |
| }, | |
| { | |
| "epoch": 0.3895131086142322, | |
| "grad_norm": 2.7334742546081543, | |
| "learning_rate": 9.474982381959127e-06, | |
| "loss": 0.0276, | |
| "step": 650 | |
| }, | |
| { | |
| "epoch": 0.3955056179775281, | |
| "grad_norm": 1.5142534971237183, | |
| "learning_rate": 9.439746300211417e-06, | |
| "loss": 0.0347, | |
| "step": 660 | |
| }, | |
| { | |
| "epoch": 0.40149812734082396, | |
| "grad_norm": 2.5328900814056396, | |
| "learning_rate": 9.404510218463707e-06, | |
| "loss": 0.0295, | |
| "step": 670 | |
| }, | |
| { | |
| "epoch": 0.40749063670411984, | |
| "grad_norm": 3.101393938064575, | |
| "learning_rate": 9.369274136715997e-06, | |
| "loss": 0.0409, | |
| "step": 680 | |
| }, | |
| { | |
| "epoch": 0.4134831460674157, | |
| "grad_norm": 2.7148630619049072, | |
| "learning_rate": 9.33403805496829e-06, | |
| "loss": 0.0311, | |
| "step": 690 | |
| }, | |
| { | |
| "epoch": 0.41947565543071164, | |
| "grad_norm": 1.6952073574066162, | |
| "learning_rate": 9.298801973220578e-06, | |
| "loss": 0.0341, | |
| "step": 700 | |
| }, | |
| { | |
| "epoch": 0.4254681647940075, | |
| "grad_norm": 2.545931577682495, | |
| "learning_rate": 9.26356589147287e-06, | |
| "loss": 0.036, | |
| "step": 710 | |
| }, | |
| { | |
| "epoch": 0.4314606741573034, | |
| "grad_norm": 2.246628999710083, | |
| "learning_rate": 9.22832980972516e-06, | |
| "loss": 0.0424, | |
| "step": 720 | |
| }, | |
| { | |
| "epoch": 0.43745318352059925, | |
| "grad_norm": 1.6698744297027588, | |
| "learning_rate": 9.19309372797745e-06, | |
| "loss": 0.0416, | |
| "step": 730 | |
| }, | |
| { | |
| "epoch": 0.4434456928838951, | |
| "grad_norm": 1.2881724834442139, | |
| "learning_rate": 9.15785764622974e-06, | |
| "loss": 0.0366, | |
| "step": 740 | |
| }, | |
| { | |
| "epoch": 0.449438202247191, | |
| "grad_norm": 1.7771434783935547, | |
| "learning_rate": 9.12262156448203e-06, | |
| "loss": 0.0309, | |
| "step": 750 | |
| }, | |
| { | |
| "epoch": 0.45543071161048687, | |
| "grad_norm": 1.563678503036499, | |
| "learning_rate": 9.087385482734322e-06, | |
| "loss": 0.0376, | |
| "step": 760 | |
| }, | |
| { | |
| "epoch": 0.46142322097378274, | |
| "grad_norm": 2.24344801902771, | |
| "learning_rate": 9.05214940098661e-06, | |
| "loss": 0.0332, | |
| "step": 770 | |
| }, | |
| { | |
| "epoch": 0.46741573033707867, | |
| "grad_norm": 3.014191150665283, | |
| "learning_rate": 9.016913319238902e-06, | |
| "loss": 0.0288, | |
| "step": 780 | |
| }, | |
| { | |
| "epoch": 0.47340823970037454, | |
| "grad_norm": 2.138899564743042, | |
| "learning_rate": 8.981677237491192e-06, | |
| "loss": 0.0332, | |
| "step": 790 | |
| }, | |
| { | |
| "epoch": 0.4794007490636704, | |
| "grad_norm": 2.4274239540100098, | |
| "learning_rate": 8.946441155743482e-06, | |
| "loss": 0.0426, | |
| "step": 800 | |
| }, | |
| { | |
| "epoch": 0.4853932584269663, | |
| "grad_norm": 1.720563292503357, | |
| "learning_rate": 8.911205073995772e-06, | |
| "loss": 0.0322, | |
| "step": 810 | |
| }, | |
| { | |
| "epoch": 0.49138576779026216, | |
| "grad_norm": 2.580158233642578, | |
| "learning_rate": 8.875968992248062e-06, | |
| "loss": 0.0337, | |
| "step": 820 | |
| }, | |
| { | |
| "epoch": 0.49737827715355803, | |
| "grad_norm": 1.918359398841858, | |
| "learning_rate": 8.840732910500353e-06, | |
| "loss": 0.0276, | |
| "step": 830 | |
| }, | |
| { | |
| "epoch": 0.503370786516854, | |
| "grad_norm": 3.0238075256347656, | |
| "learning_rate": 8.805496828752643e-06, | |
| "loss": 0.0363, | |
| "step": 840 | |
| }, | |
| { | |
| "epoch": 0.5093632958801498, | |
| "grad_norm": 1.9104974269866943, | |
| "learning_rate": 8.770260747004935e-06, | |
| "loss": 0.0372, | |
| "step": 850 | |
| }, | |
| { | |
| "epoch": 0.5153558052434457, | |
| "grad_norm": 4.188656806945801, | |
| "learning_rate": 8.735024665257225e-06, | |
| "loss": 0.0374, | |
| "step": 860 | |
| }, | |
| { | |
| "epoch": 0.5213483146067416, | |
| "grad_norm": 2.6487040519714355, | |
| "learning_rate": 8.699788583509515e-06, | |
| "loss": 0.0305, | |
| "step": 870 | |
| }, | |
| { | |
| "epoch": 0.5273408239700375, | |
| "grad_norm": 2.9171149730682373, | |
| "learning_rate": 8.664552501761805e-06, | |
| "loss": 0.0321, | |
| "step": 880 | |
| }, | |
| { | |
| "epoch": 0.5333333333333333, | |
| "grad_norm": 2.196499824523926, | |
| "learning_rate": 8.629316420014095e-06, | |
| "loss": 0.0497, | |
| "step": 890 | |
| }, | |
| { | |
| "epoch": 0.5393258426966292, | |
| "grad_norm": 2.324110507965088, | |
| "learning_rate": 8.594080338266385e-06, | |
| "loss": 0.0411, | |
| "step": 900 | |
| }, | |
| { | |
| "epoch": 0.5453183520599251, | |
| "grad_norm": 2.23406720161438, | |
| "learning_rate": 8.558844256518675e-06, | |
| "loss": 0.0302, | |
| "step": 910 | |
| }, | |
| { | |
| "epoch": 0.5513108614232209, | |
| "grad_norm": 2.158843994140625, | |
| "learning_rate": 8.523608174770967e-06, | |
| "loss": 0.0349, | |
| "step": 920 | |
| }, | |
| { | |
| "epoch": 0.5573033707865168, | |
| "grad_norm": 2.5448203086853027, | |
| "learning_rate": 8.488372093023256e-06, | |
| "loss": 0.0338, | |
| "step": 930 | |
| }, | |
| { | |
| "epoch": 0.5632958801498127, | |
| "grad_norm": 2.366697072982788, | |
| "learning_rate": 8.453136011275547e-06, | |
| "loss": 0.0307, | |
| "step": 940 | |
| }, | |
| { | |
| "epoch": 0.5692883895131086, | |
| "grad_norm": 1.7669707536697388, | |
| "learning_rate": 8.417899929527837e-06, | |
| "loss": 0.0374, | |
| "step": 950 | |
| }, | |
| { | |
| "epoch": 0.5752808988764045, | |
| "grad_norm": 1.8498706817626953, | |
| "learning_rate": 8.382663847780128e-06, | |
| "loss": 0.0368, | |
| "step": 960 | |
| }, | |
| { | |
| "epoch": 0.5812734082397004, | |
| "grad_norm": 1.1811097860336304, | |
| "learning_rate": 8.347427766032418e-06, | |
| "loss": 0.0319, | |
| "step": 970 | |
| }, | |
| { | |
| "epoch": 0.5872659176029963, | |
| "grad_norm": 1.306626796722412, | |
| "learning_rate": 8.312191684284708e-06, | |
| "loss": 0.0409, | |
| "step": 980 | |
| }, | |
| { | |
| "epoch": 0.5932584269662922, | |
| "grad_norm": 1.9113121032714844, | |
| "learning_rate": 8.276955602537e-06, | |
| "loss": 0.0329, | |
| "step": 990 | |
| }, | |
| { | |
| "epoch": 0.599250936329588, | |
| "grad_norm": 1.8065577745437622, | |
| "learning_rate": 8.241719520789288e-06, | |
| "loss": 0.0261, | |
| "step": 1000 | |
| }, | |
| { | |
| "epoch": 0.6052434456928839, | |
| "grad_norm": 1.644216775894165, | |
| "learning_rate": 8.20648343904158e-06, | |
| "loss": 0.0455, | |
| "step": 1010 | |
| }, | |
| { | |
| "epoch": 0.6112359550561798, | |
| "grad_norm": 2.524791955947876, | |
| "learning_rate": 8.17124735729387e-06, | |
| "loss": 0.0378, | |
| "step": 1020 | |
| }, | |
| { | |
| "epoch": 0.6172284644194757, | |
| "grad_norm": 4.8924880027771, | |
| "learning_rate": 8.13601127554616e-06, | |
| "loss": 0.0458, | |
| "step": 1030 | |
| }, | |
| { | |
| "epoch": 0.6232209737827715, | |
| "grad_norm": 2.6112005710601807, | |
| "learning_rate": 8.10077519379845e-06, | |
| "loss": 0.0388, | |
| "step": 1040 | |
| }, | |
| { | |
| "epoch": 0.6292134831460674, | |
| "grad_norm": 2.2493460178375244, | |
| "learning_rate": 8.06553911205074e-06, | |
| "loss": 0.0451, | |
| "step": 1050 | |
| }, | |
| { | |
| "epoch": 0.6352059925093633, | |
| "grad_norm": 1.854641079902649, | |
| "learning_rate": 8.03030303030303e-06, | |
| "loss": 0.0434, | |
| "step": 1060 | |
| }, | |
| { | |
| "epoch": 0.6411985018726591, | |
| "grad_norm": 2.749845027923584, | |
| "learning_rate": 7.99506694855532e-06, | |
| "loss": 0.0291, | |
| "step": 1070 | |
| }, | |
| { | |
| "epoch": 0.647191011235955, | |
| "grad_norm": 3.08630108833313, | |
| "learning_rate": 7.959830866807612e-06, | |
| "loss": 0.0256, | |
| "step": 1080 | |
| }, | |
| { | |
| "epoch": 0.6531835205992509, | |
| "grad_norm": 1.4188586473464966, | |
| "learning_rate": 7.924594785059903e-06, | |
| "loss": 0.0363, | |
| "step": 1090 | |
| }, | |
| { | |
| "epoch": 0.6591760299625468, | |
| "grad_norm": 1.34322988986969, | |
| "learning_rate": 7.889358703312193e-06, | |
| "loss": 0.0313, | |
| "step": 1100 | |
| }, | |
| { | |
| "epoch": 0.6651685393258427, | |
| "grad_norm": 1.7974929809570312, | |
| "learning_rate": 7.854122621564483e-06, | |
| "loss": 0.0245, | |
| "step": 1110 | |
| }, | |
| { | |
| "epoch": 0.6711610486891386, | |
| "grad_norm": 2.5316739082336426, | |
| "learning_rate": 7.818886539816773e-06, | |
| "loss": 0.0355, | |
| "step": 1120 | |
| }, | |
| { | |
| "epoch": 0.6771535580524345, | |
| "grad_norm": 3.0023632049560547, | |
| "learning_rate": 7.783650458069063e-06, | |
| "loss": 0.0344, | |
| "step": 1130 | |
| }, | |
| { | |
| "epoch": 0.6831460674157304, | |
| "grad_norm": 2.484605550765991, | |
| "learning_rate": 7.748414376321353e-06, | |
| "loss": 0.0442, | |
| "step": 1140 | |
| }, | |
| { | |
| "epoch": 0.6891385767790262, | |
| "grad_norm": 2.7692389488220215, | |
| "learning_rate": 7.713178294573645e-06, | |
| "loss": 0.0389, | |
| "step": 1150 | |
| }, | |
| { | |
| "epoch": 0.6951310861423221, | |
| "grad_norm": 1.1416544914245605, | |
| "learning_rate": 7.677942212825933e-06, | |
| "loss": 0.0357, | |
| "step": 1160 | |
| }, | |
| { | |
| "epoch": 0.701123595505618, | |
| "grad_norm": 2.1747801303863525, | |
| "learning_rate": 7.642706131078225e-06, | |
| "loss": 0.0329, | |
| "step": 1170 | |
| }, | |
| { | |
| "epoch": 0.7071161048689139, | |
| "grad_norm": 2.4001238346099854, | |
| "learning_rate": 7.607470049330515e-06, | |
| "loss": 0.0327, | |
| "step": 1180 | |
| }, | |
| { | |
| "epoch": 0.7131086142322097, | |
| "grad_norm": 2.489826202392578, | |
| "learning_rate": 7.572233967582805e-06, | |
| "loss": 0.038, | |
| "step": 1190 | |
| }, | |
| { | |
| "epoch": 0.7191011235955056, | |
| "grad_norm": 2.055882453918457, | |
| "learning_rate": 7.5369978858350956e-06, | |
| "loss": 0.0268, | |
| "step": 1200 | |
| }, | |
| { | |
| "epoch": 0.7250936329588015, | |
| "grad_norm": 1.705216646194458, | |
| "learning_rate": 7.5017618040873865e-06, | |
| "loss": 0.0258, | |
| "step": 1210 | |
| }, | |
| { | |
| "epoch": 0.7310861423220973, | |
| "grad_norm": 1.2729088068008423, | |
| "learning_rate": 7.466525722339677e-06, | |
| "loss": 0.0352, | |
| "step": 1220 | |
| }, | |
| { | |
| "epoch": 0.7370786516853932, | |
| "grad_norm": 4.454958438873291, | |
| "learning_rate": 7.431289640591967e-06, | |
| "loss": 0.0343, | |
| "step": 1230 | |
| }, | |
| { | |
| "epoch": 0.7430711610486891, | |
| "grad_norm": 1.8376761674880981, | |
| "learning_rate": 7.396053558844257e-06, | |
| "loss": 0.0337, | |
| "step": 1240 | |
| }, | |
| { | |
| "epoch": 0.7490636704119851, | |
| "grad_norm": 2.3845252990722656, | |
| "learning_rate": 7.360817477096548e-06, | |
| "loss": 0.0406, | |
| "step": 1250 | |
| }, | |
| { | |
| "epoch": 0.755056179775281, | |
| "grad_norm": 6.822844982147217, | |
| "learning_rate": 7.325581395348837e-06, | |
| "loss": 0.0417, | |
| "step": 1260 | |
| }, | |
| { | |
| "epoch": 0.7610486891385768, | |
| "grad_norm": 7.371566295623779, | |
| "learning_rate": 7.290345313601128e-06, | |
| "loss": 0.0271, | |
| "step": 1270 | |
| }, | |
| { | |
| "epoch": 0.7670411985018727, | |
| "grad_norm": 1.3738433122634888, | |
| "learning_rate": 7.255109231853419e-06, | |
| "loss": 0.0296, | |
| "step": 1280 | |
| }, | |
| { | |
| "epoch": 0.7730337078651686, | |
| "grad_norm": 1.8998222351074219, | |
| "learning_rate": 7.219873150105708e-06, | |
| "loss": 0.0317, | |
| "step": 1290 | |
| }, | |
| { | |
| "epoch": 0.7790262172284644, | |
| "grad_norm": 2.569655418395996, | |
| "learning_rate": 7.184637068357999e-06, | |
| "loss": 0.0305, | |
| "step": 1300 | |
| }, | |
| { | |
| "epoch": 0.7850187265917603, | |
| "grad_norm": 1.7254374027252197, | |
| "learning_rate": 7.1494009866102894e-06, | |
| "loss": 0.0327, | |
| "step": 1310 | |
| }, | |
| { | |
| "epoch": 0.7910112359550562, | |
| "grad_norm": 2.161518096923828, | |
| "learning_rate": 7.1141649048625796e-06, | |
| "loss": 0.0329, | |
| "step": 1320 | |
| }, | |
| { | |
| "epoch": 0.797003745318352, | |
| "grad_norm": 1.4740070104599, | |
| "learning_rate": 7.07892882311487e-06, | |
| "loss": 0.0346, | |
| "step": 1330 | |
| }, | |
| { | |
| "epoch": 0.8029962546816479, | |
| "grad_norm": 2.4593379497528076, | |
| "learning_rate": 7.043692741367161e-06, | |
| "loss": 0.0267, | |
| "step": 1340 | |
| }, | |
| { | |
| "epoch": 0.8089887640449438, | |
| "grad_norm": 1.8585057258605957, | |
| "learning_rate": 7.008456659619452e-06, | |
| "loss": 0.0262, | |
| "step": 1350 | |
| }, | |
| { | |
| "epoch": 0.8149812734082397, | |
| "grad_norm": 2.7434608936309814, | |
| "learning_rate": 6.973220577871741e-06, | |
| "loss": 0.039, | |
| "step": 1360 | |
| }, | |
| { | |
| "epoch": 0.8209737827715355, | |
| "grad_norm": 2.6166908740997314, | |
| "learning_rate": 6.937984496124032e-06, | |
| "loss": 0.0346, | |
| "step": 1370 | |
| }, | |
| { | |
| "epoch": 0.8269662921348314, | |
| "grad_norm": 1.4991202354431152, | |
| "learning_rate": 6.902748414376322e-06, | |
| "loss": 0.0355, | |
| "step": 1380 | |
| }, | |
| { | |
| "epoch": 0.8329588014981273, | |
| "grad_norm": 1.4986398220062256, | |
| "learning_rate": 6.867512332628612e-06, | |
| "loss": 0.0332, | |
| "step": 1390 | |
| }, | |
| { | |
| "epoch": 0.8389513108614233, | |
| "grad_norm": 2.284785509109497, | |
| "learning_rate": 6.832276250880902e-06, | |
| "loss": 0.0301, | |
| "step": 1400 | |
| }, | |
| { | |
| "epoch": 0.8449438202247191, | |
| "grad_norm": 1.3910906314849854, | |
| "learning_rate": 6.797040169133193e-06, | |
| "loss": 0.024, | |
| "step": 1410 | |
| }, | |
| { | |
| "epoch": 0.850936329588015, | |
| "grad_norm": 2.0242066383361816, | |
| "learning_rate": 6.7618040873854825e-06, | |
| "loss": 0.0359, | |
| "step": 1420 | |
| }, | |
| { | |
| "epoch": 0.8569288389513109, | |
| "grad_norm": 2.430185317993164, | |
| "learning_rate": 6.7265680056377734e-06, | |
| "loss": 0.0226, | |
| "step": 1430 | |
| }, | |
| { | |
| "epoch": 0.8629213483146068, | |
| "grad_norm": 0.8888330459594727, | |
| "learning_rate": 6.691331923890064e-06, | |
| "loss": 0.0289, | |
| "step": 1440 | |
| }, | |
| { | |
| "epoch": 0.8689138576779026, | |
| "grad_norm": 1.8304884433746338, | |
| "learning_rate": 6.656095842142354e-06, | |
| "loss": 0.0238, | |
| "step": 1450 | |
| }, | |
| { | |
| "epoch": 0.8749063670411985, | |
| "grad_norm": 2.193533182144165, | |
| "learning_rate": 6.620859760394645e-06, | |
| "loss": 0.0385, | |
| "step": 1460 | |
| }, | |
| { | |
| "epoch": 0.8808988764044944, | |
| "grad_norm": 2.304433822631836, | |
| "learning_rate": 6.585623678646935e-06, | |
| "loss": 0.0341, | |
| "step": 1470 | |
| }, | |
| { | |
| "epoch": 0.8868913857677903, | |
| "grad_norm": 1.412208080291748, | |
| "learning_rate": 6.550387596899226e-06, | |
| "loss": 0.0262, | |
| "step": 1480 | |
| }, | |
| { | |
| "epoch": 0.8928838951310861, | |
| "grad_norm": 0.917147696018219, | |
| "learning_rate": 6.515151515151516e-06, | |
| "loss": 0.0219, | |
| "step": 1490 | |
| }, | |
| { | |
| "epoch": 0.898876404494382, | |
| "grad_norm": 1.979040265083313, | |
| "learning_rate": 6.479915433403806e-06, | |
| "loss": 0.0396, | |
| "step": 1500 | |
| }, | |
| { | |
| "epoch": 0.9048689138576779, | |
| "grad_norm": 1.5810662508010864, | |
| "learning_rate": 6.444679351656097e-06, | |
| "loss": 0.0274, | |
| "step": 1510 | |
| }, | |
| { | |
| "epoch": 0.9108614232209737, | |
| "grad_norm": 2.405066728591919, | |
| "learning_rate": 6.409443269908386e-06, | |
| "loss": 0.0388, | |
| "step": 1520 | |
| }, | |
| { | |
| "epoch": 0.9168539325842696, | |
| "grad_norm": 2.646991014480591, | |
| "learning_rate": 6.374207188160677e-06, | |
| "loss": 0.0392, | |
| "step": 1530 | |
| }, | |
| { | |
| "epoch": 0.9228464419475655, | |
| "grad_norm": 2.1389219760894775, | |
| "learning_rate": 6.338971106412968e-06, | |
| "loss": 0.0345, | |
| "step": 1540 | |
| }, | |
| { | |
| "epoch": 0.9288389513108615, | |
| "grad_norm": 1.683204174041748, | |
| "learning_rate": 6.3037350246652574e-06, | |
| "loss": 0.0354, | |
| "step": 1550 | |
| }, | |
| { | |
| "epoch": 0.9348314606741573, | |
| "grad_norm": 2.031403064727783, | |
| "learning_rate": 6.268498942917548e-06, | |
| "loss": 0.0263, | |
| "step": 1560 | |
| }, | |
| { | |
| "epoch": 0.9408239700374532, | |
| "grad_norm": 2.195925712585449, | |
| "learning_rate": 6.2332628611698385e-06, | |
| "loss": 0.028, | |
| "step": 1570 | |
| }, | |
| { | |
| "epoch": 0.9468164794007491, | |
| "grad_norm": 1.9090993404388428, | |
| "learning_rate": 6.198026779422129e-06, | |
| "loss": 0.0339, | |
| "step": 1580 | |
| }, | |
| { | |
| "epoch": 0.952808988764045, | |
| "grad_norm": 2.7698168754577637, | |
| "learning_rate": 6.162790697674419e-06, | |
| "loss": 0.0412, | |
| "step": 1590 | |
| }, | |
| { | |
| "epoch": 0.9588014981273408, | |
| "grad_norm": 2.477466106414795, | |
| "learning_rate": 6.12755461592671e-06, | |
| "loss": 0.0307, | |
| "step": 1600 | |
| }, | |
| { | |
| "epoch": 0.9647940074906367, | |
| "grad_norm": 1.7105556726455688, | |
| "learning_rate": 6.092318534179001e-06, | |
| "loss": 0.0378, | |
| "step": 1610 | |
| }, | |
| { | |
| "epoch": 0.9707865168539326, | |
| "grad_norm": 1.9348987340927124, | |
| "learning_rate": 6.05708245243129e-06, | |
| "loss": 0.0312, | |
| "step": 1620 | |
| }, | |
| { | |
| "epoch": 0.9767790262172285, | |
| "grad_norm": 2.8063669204711914, | |
| "learning_rate": 6.021846370683581e-06, | |
| "loss": 0.0321, | |
| "step": 1630 | |
| }, | |
| { | |
| "epoch": 0.9827715355805243, | |
| "grad_norm": 1.3787035942077637, | |
| "learning_rate": 5.986610288935871e-06, | |
| "loss": 0.0204, | |
| "step": 1640 | |
| }, | |
| { | |
| "epoch": 0.9887640449438202, | |
| "grad_norm": 1.0348024368286133, | |
| "learning_rate": 5.951374207188161e-06, | |
| "loss": 0.0269, | |
| "step": 1650 | |
| }, | |
| { | |
| "epoch": 0.9947565543071161, | |
| "grad_norm": 2.3629918098449707, | |
| "learning_rate": 5.916138125440451e-06, | |
| "loss": 0.0289, | |
| "step": 1660 | |
| }, | |
| { | |
| "epoch": 1.0005992509363295, | |
| "grad_norm": 1.635030746459961, | |
| "learning_rate": 5.880902043692742e-06, | |
| "loss": 0.0271, | |
| "step": 1670 | |
| }, | |
| { | |
| "epoch": 1.0065917602996255, | |
| "grad_norm": 1.2064359188079834, | |
| "learning_rate": 5.8456659619450316e-06, | |
| "loss": 0.0068, | |
| "step": 1680 | |
| }, | |
| { | |
| "epoch": 1.0125842696629213, | |
| "grad_norm": 1.9273971319198608, | |
| "learning_rate": 5.8104298801973225e-06, | |
| "loss": 0.0074, | |
| "step": 1690 | |
| }, | |
| { | |
| "epoch": 1.0185767790262172, | |
| "grad_norm": 1.0188418626785278, | |
| "learning_rate": 5.7751937984496135e-06, | |
| "loss": 0.0124, | |
| "step": 1700 | |
| }, | |
| { | |
| "epoch": 1.024569288389513, | |
| "grad_norm": 0.795347273349762, | |
| "learning_rate": 5.739957716701903e-06, | |
| "loss": 0.0102, | |
| "step": 1710 | |
| }, | |
| { | |
| "epoch": 1.030561797752809, | |
| "grad_norm": 0.6345170736312866, | |
| "learning_rate": 5.704721634954194e-06, | |
| "loss": 0.009, | |
| "step": 1720 | |
| }, | |
| { | |
| "epoch": 1.036554307116105, | |
| "grad_norm": 0.6679102778434753, | |
| "learning_rate": 5.669485553206484e-06, | |
| "loss": 0.0086, | |
| "step": 1730 | |
| }, | |
| { | |
| "epoch": 1.0425468164794007, | |
| "grad_norm": 2.5230817794799805, | |
| "learning_rate": 5.634249471458775e-06, | |
| "loss": 0.0076, | |
| "step": 1740 | |
| }, | |
| { | |
| "epoch": 1.0485393258426967, | |
| "grad_norm": 0.9390186667442322, | |
| "learning_rate": 5.599013389711064e-06, | |
| "loss": 0.0056, | |
| "step": 1750 | |
| }, | |
| { | |
| "epoch": 1.0545318352059925, | |
| "grad_norm": 1.2290983200073242, | |
| "learning_rate": 5.563777307963355e-06, | |
| "loss": 0.0067, | |
| "step": 1760 | |
| }, | |
| { | |
| "epoch": 1.0605243445692885, | |
| "grad_norm": 1.8171833753585815, | |
| "learning_rate": 5.528541226215646e-06, | |
| "loss": 0.009, | |
| "step": 1770 | |
| }, | |
| { | |
| "epoch": 1.0665168539325842, | |
| "grad_norm": 1.3471766710281372, | |
| "learning_rate": 5.493305144467935e-06, | |
| "loss": 0.0061, | |
| "step": 1780 | |
| }, | |
| { | |
| "epoch": 1.0725093632958802, | |
| "grad_norm": 0.8515785336494446, | |
| "learning_rate": 5.458069062720226e-06, | |
| "loss": 0.0059, | |
| "step": 1790 | |
| }, | |
| { | |
| "epoch": 1.078501872659176, | |
| "grad_norm": 1.4226453304290771, | |
| "learning_rate": 5.422832980972516e-06, | |
| "loss": 0.0098, | |
| "step": 1800 | |
| }, | |
| { | |
| "epoch": 1.084494382022472, | |
| "grad_norm": 2.040360927581787, | |
| "learning_rate": 5.3875968992248065e-06, | |
| "loss": 0.009, | |
| "step": 1810 | |
| }, | |
| { | |
| "epoch": 1.0904868913857677, | |
| "grad_norm": 1.0943787097930908, | |
| "learning_rate": 5.352360817477097e-06, | |
| "loss": 0.0107, | |
| "step": 1820 | |
| }, | |
| { | |
| "epoch": 1.0964794007490637, | |
| "grad_norm": 2.117577314376831, | |
| "learning_rate": 5.317124735729388e-06, | |
| "loss": 0.0082, | |
| "step": 1830 | |
| }, | |
| { | |
| "epoch": 1.1024719101123595, | |
| "grad_norm": 1.5584527254104614, | |
| "learning_rate": 5.281888653981677e-06, | |
| "loss": 0.0058, | |
| "step": 1840 | |
| }, | |
| { | |
| "epoch": 1.1084644194756554, | |
| "grad_norm": 1.2435089349746704, | |
| "learning_rate": 5.246652572233968e-06, | |
| "loss": 0.0073, | |
| "step": 1850 | |
| }, | |
| { | |
| "epoch": 1.1144569288389512, | |
| "grad_norm": 1.123214602470398, | |
| "learning_rate": 5.211416490486259e-06, | |
| "loss": 0.0081, | |
| "step": 1860 | |
| }, | |
| { | |
| "epoch": 1.1204494382022472, | |
| "grad_norm": 0.5520896315574646, | |
| "learning_rate": 5.176180408738549e-06, | |
| "loss": 0.009, | |
| "step": 1870 | |
| }, | |
| { | |
| "epoch": 1.1264419475655432, | |
| "grad_norm": 1.1572812795639038, | |
| "learning_rate": 5.140944326990839e-06, | |
| "loss": 0.0062, | |
| "step": 1880 | |
| }, | |
| { | |
| "epoch": 1.132434456928839, | |
| "grad_norm": 1.0478726625442505, | |
| "learning_rate": 5.105708245243129e-06, | |
| "loss": 0.0057, | |
| "step": 1890 | |
| }, | |
| { | |
| "epoch": 1.138426966292135, | |
| "grad_norm": 0.7916937470436096, | |
| "learning_rate": 5.07047216349542e-06, | |
| "loss": 0.0068, | |
| "step": 1900 | |
| }, | |
| { | |
| "epoch": 1.1444194756554307, | |
| "grad_norm": 0.804720401763916, | |
| "learning_rate": 5.0352360817477094e-06, | |
| "loss": 0.0052, | |
| "step": 1910 | |
| }, | |
| { | |
| "epoch": 1.1504119850187267, | |
| "grad_norm": 0.8383046984672546, | |
| "learning_rate": 5e-06, | |
| "loss": 0.0103, | |
| "step": 1920 | |
| }, | |
| { | |
| "epoch": 1.1564044943820224, | |
| "grad_norm": 0.44970327615737915, | |
| "learning_rate": 4.9647639182522905e-06, | |
| "loss": 0.0086, | |
| "step": 1930 | |
| }, | |
| { | |
| "epoch": 1.1623970037453184, | |
| "grad_norm": 1.1988434791564941, | |
| "learning_rate": 4.9295278365045815e-06, | |
| "loss": 0.0091, | |
| "step": 1940 | |
| }, | |
| { | |
| "epoch": 1.1683895131086142, | |
| "grad_norm": 0.7889288067817688, | |
| "learning_rate": 4.894291754756872e-06, | |
| "loss": 0.0057, | |
| "step": 1950 | |
| }, | |
| { | |
| "epoch": 1.1743820224719101, | |
| "grad_norm": 0.5470920205116272, | |
| "learning_rate": 4.859055673009162e-06, | |
| "loss": 0.0072, | |
| "step": 1960 | |
| }, | |
| { | |
| "epoch": 1.180374531835206, | |
| "grad_norm": 1.7936517000198364, | |
| "learning_rate": 4.823819591261452e-06, | |
| "loss": 0.0048, | |
| "step": 1970 | |
| }, | |
| { | |
| "epoch": 1.186367041198502, | |
| "grad_norm": 1.2022855281829834, | |
| "learning_rate": 4.788583509513742e-06, | |
| "loss": 0.0066, | |
| "step": 1980 | |
| }, | |
| { | |
| "epoch": 1.1923595505617977, | |
| "grad_norm": 0.5040098428726196, | |
| "learning_rate": 4.753347427766033e-06, | |
| "loss": 0.0067, | |
| "step": 1990 | |
| }, | |
| { | |
| "epoch": 1.1983520599250936, | |
| "grad_norm": 0.6506677269935608, | |
| "learning_rate": 4.718111346018323e-06, | |
| "loss": 0.0045, | |
| "step": 2000 | |
| }, | |
| { | |
| "epoch": 1.1983520599250936, | |
| "eval_loss": 0.011116987094283104, | |
| "eval_runtime": 9693.0647, | |
| "eval_samples_per_second": 1.377, | |
| "eval_steps_per_second": 0.172, | |
| "eval_wer": 0.8704838022863507, | |
| "step": 2000 | |
| } | |
| ], | |
| "logging_steps": 10, | |
| "max_steps": 3338, | |
| "num_input_tokens_seen": 0, | |
| "num_train_epochs": 2, | |
| "save_steps": 2000, | |
| "stateful_callbacks": { | |
| "TrainerControl": { | |
| "args": { | |
| "should_epoch_stop": false, | |
| "should_evaluate": false, | |
| "should_log": false, | |
| "should_save": true, | |
| "should_training_stop": false | |
| }, | |
| "attributes": {} | |
| } | |
| }, | |
| "total_flos": 6.531054924201984e+19, | |
| "train_batch_size": 4, | |
| "trial_name": null, | |
| "trial_params": null | |
| } | |