| { | |
| "best_global_step": null, | |
| "best_metric": null, | |
| "best_model_checkpoint": null, | |
| "epoch": 250.0, | |
| "eval_steps": 500, | |
| "global_step": 10000, | |
| "is_hyper_param_search": false, | |
| "is_local_process_zero": true, | |
| "is_world_process_zero": true, | |
| "log_history": [ | |
| { | |
| "epoch": 0.25, | |
| "grad_norm": 13.149889945983887, | |
| "learning_rate": 1.8e-06, | |
| "loss": 1.4142, | |
| "step": 10 | |
| }, | |
| { | |
| "epoch": 0.5, | |
| "grad_norm": 5.3455119132995605, | |
| "learning_rate": 3.8e-06, | |
| "loss": 1.2033, | |
| "step": 20 | |
| }, | |
| { | |
| "epoch": 0.75, | |
| "grad_norm": 3.1435389518737793, | |
| "learning_rate": 5.8e-06, | |
| "loss": 0.6036, | |
| "step": 30 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "grad_norm": 2.23374342918396, | |
| "learning_rate": 7.8e-06, | |
| "loss": 0.3176, | |
| "step": 40 | |
| }, | |
| { | |
| "epoch": 1.25, | |
| "grad_norm": 1.6511033773422241, | |
| "learning_rate": 9.800000000000001e-06, | |
| "loss": 0.2071, | |
| "step": 50 | |
| }, | |
| { | |
| "epoch": 1.5, | |
| "grad_norm": 1.4293673038482666, | |
| "learning_rate": 1.18e-05, | |
| "loss": 0.1331, | |
| "step": 60 | |
| }, | |
| { | |
| "epoch": 1.75, | |
| "grad_norm": 1.4380099773406982, | |
| "learning_rate": 1.3800000000000002e-05, | |
| "loss": 0.09, | |
| "step": 70 | |
| }, | |
| { | |
| "epoch": 2.0, | |
| "grad_norm": 0.8167829513549805, | |
| "learning_rate": 1.58e-05, | |
| "loss": 0.0758, | |
| "step": 80 | |
| }, | |
| { | |
| "epoch": 2.25, | |
| "grad_norm": 0.9906498789787292, | |
| "learning_rate": 1.78e-05, | |
| "loss": 0.0559, | |
| "step": 90 | |
| }, | |
| { | |
| "epoch": 2.5, | |
| "grad_norm": 0.940280556678772, | |
| "learning_rate": 1.9800000000000004e-05, | |
| "loss": 0.0599, | |
| "step": 100 | |
| }, | |
| { | |
| "epoch": 2.75, | |
| "grad_norm": 0.5989202260971069, | |
| "learning_rate": 2.18e-05, | |
| "loss": 0.0496, | |
| "step": 110 | |
| }, | |
| { | |
| "epoch": 3.0, | |
| "grad_norm": 1.060807228088379, | |
| "learning_rate": 2.38e-05, | |
| "loss": 0.0467, | |
| "step": 120 | |
| }, | |
| { | |
| "epoch": 3.25, | |
| "grad_norm": 0.7101704478263855, | |
| "learning_rate": 2.58e-05, | |
| "loss": 0.0433, | |
| "step": 130 | |
| }, | |
| { | |
| "epoch": 3.5, | |
| "grad_norm": 0.6841318011283875, | |
| "learning_rate": 2.7800000000000005e-05, | |
| "loss": 0.0421, | |
| "step": 140 | |
| }, | |
| { | |
| "epoch": 3.75, | |
| "grad_norm": 0.9395173788070679, | |
| "learning_rate": 2.98e-05, | |
| "loss": 0.0414, | |
| "step": 150 | |
| }, | |
| { | |
| "epoch": 4.0, | |
| "grad_norm": 0.6530765295028687, | |
| "learning_rate": 3.18e-05, | |
| "loss": 0.0364, | |
| "step": 160 | |
| }, | |
| { | |
| "epoch": 4.25, | |
| "grad_norm": 0.5677581429481506, | |
| "learning_rate": 3.38e-05, | |
| "loss": 0.0316, | |
| "step": 170 | |
| }, | |
| { | |
| "epoch": 4.5, | |
| "grad_norm": 0.6642166376113892, | |
| "learning_rate": 3.58e-05, | |
| "loss": 0.0374, | |
| "step": 180 | |
| }, | |
| { | |
| "epoch": 4.75, | |
| "grad_norm": 0.5084357857704163, | |
| "learning_rate": 3.7800000000000004e-05, | |
| "loss": 0.033, | |
| "step": 190 | |
| }, | |
| { | |
| "epoch": 5.0, | |
| "grad_norm": 0.40896496176719666, | |
| "learning_rate": 3.9800000000000005e-05, | |
| "loss": 0.036, | |
| "step": 200 | |
| }, | |
| { | |
| "epoch": 5.25, | |
| "grad_norm": 0.8260584473609924, | |
| "learning_rate": 4.18e-05, | |
| "loss": 0.0413, | |
| "step": 210 | |
| }, | |
| { | |
| "epoch": 5.5, | |
| "grad_norm": 1.0218405723571777, | |
| "learning_rate": 4.38e-05, | |
| "loss": 0.0395, | |
| "step": 220 | |
| }, | |
| { | |
| "epoch": 5.75, | |
| "grad_norm": 0.7389956712722778, | |
| "learning_rate": 4.58e-05, | |
| "loss": 0.0303, | |
| "step": 230 | |
| }, | |
| { | |
| "epoch": 6.0, | |
| "grad_norm": 0.8274843096733093, | |
| "learning_rate": 4.78e-05, | |
| "loss": 0.0384, | |
| "step": 240 | |
| }, | |
| { | |
| "epoch": 6.25, | |
| "grad_norm": 0.9301137328147888, | |
| "learning_rate": 4.9800000000000004e-05, | |
| "loss": 0.0361, | |
| "step": 250 | |
| }, | |
| { | |
| "epoch": 6.5, | |
| "grad_norm": 0.8189093470573425, | |
| "learning_rate": 5.1800000000000005e-05, | |
| "loss": 0.0362, | |
| "step": 260 | |
| }, | |
| { | |
| "epoch": 6.75, | |
| "grad_norm": 1.1223821640014648, | |
| "learning_rate": 5.380000000000001e-05, | |
| "loss": 0.037, | |
| "step": 270 | |
| }, | |
| { | |
| "epoch": 7.0, | |
| "grad_norm": 0.6035957336425781, | |
| "learning_rate": 5.580000000000001e-05, | |
| "loss": 0.0298, | |
| "step": 280 | |
| }, | |
| { | |
| "epoch": 7.25, | |
| "grad_norm": 0.35879412293434143, | |
| "learning_rate": 5.7799999999999995e-05, | |
| "loss": 0.038, | |
| "step": 290 | |
| }, | |
| { | |
| "epoch": 7.5, | |
| "grad_norm": 0.6832134127616882, | |
| "learning_rate": 5.9800000000000003e-05, | |
| "loss": 0.0342, | |
| "step": 300 | |
| }, | |
| { | |
| "epoch": 7.75, | |
| "grad_norm": 1.0018675327301025, | |
| "learning_rate": 6.18e-05, | |
| "loss": 0.0449, | |
| "step": 310 | |
| }, | |
| { | |
| "epoch": 8.0, | |
| "grad_norm": 0.648226261138916, | |
| "learning_rate": 6.38e-05, | |
| "loss": 0.033, | |
| "step": 320 | |
| }, | |
| { | |
| "epoch": 8.25, | |
| "grad_norm": 0.8478636145591736, | |
| "learning_rate": 6.58e-05, | |
| "loss": 0.0326, | |
| "step": 330 | |
| }, | |
| { | |
| "epoch": 8.5, | |
| "grad_norm": 0.6271067261695862, | |
| "learning_rate": 6.780000000000001e-05, | |
| "loss": 0.0361, | |
| "step": 340 | |
| }, | |
| { | |
| "epoch": 8.75, | |
| "grad_norm": 0.8157103061676025, | |
| "learning_rate": 6.98e-05, | |
| "loss": 0.0332, | |
| "step": 350 | |
| }, | |
| { | |
| "epoch": 9.0, | |
| "grad_norm": 0.5046970248222351, | |
| "learning_rate": 7.18e-05, | |
| "loss": 0.0333, | |
| "step": 360 | |
| }, | |
| { | |
| "epoch": 9.25, | |
| "grad_norm": 0.4200487434864044, | |
| "learning_rate": 7.38e-05, | |
| "loss": 0.0378, | |
| "step": 370 | |
| }, | |
| { | |
| "epoch": 9.5, | |
| "grad_norm": 0.4756053686141968, | |
| "learning_rate": 7.58e-05, | |
| "loss": 0.0353, | |
| "step": 380 | |
| }, | |
| { | |
| "epoch": 9.75, | |
| "grad_norm": 0.5499545335769653, | |
| "learning_rate": 7.780000000000001e-05, | |
| "loss": 0.0388, | |
| "step": 390 | |
| }, | |
| { | |
| "epoch": 10.0, | |
| "grad_norm": 0.5970879197120667, | |
| "learning_rate": 7.98e-05, | |
| "loss": 0.0292, | |
| "step": 400 | |
| }, | |
| { | |
| "epoch": 10.25, | |
| "grad_norm": 0.4654370844364166, | |
| "learning_rate": 8.18e-05, | |
| "loss": 0.0346, | |
| "step": 410 | |
| }, | |
| { | |
| "epoch": 10.5, | |
| "grad_norm": 0.4801628589630127, | |
| "learning_rate": 8.38e-05, | |
| "loss": 0.034, | |
| "step": 420 | |
| }, | |
| { | |
| "epoch": 10.75, | |
| "grad_norm": 0.5524302124977112, | |
| "learning_rate": 8.58e-05, | |
| "loss": 0.034, | |
| "step": 430 | |
| }, | |
| { | |
| "epoch": 11.0, | |
| "grad_norm": 0.6977431774139404, | |
| "learning_rate": 8.78e-05, | |
| "loss": 0.0303, | |
| "step": 440 | |
| }, | |
| { | |
| "epoch": 11.25, | |
| "grad_norm": 0.9158258438110352, | |
| "learning_rate": 8.98e-05, | |
| "loss": 0.0313, | |
| "step": 450 | |
| }, | |
| { | |
| "epoch": 11.5, | |
| "grad_norm": 0.9180648326873779, | |
| "learning_rate": 9.180000000000001e-05, | |
| "loss": 0.0361, | |
| "step": 460 | |
| }, | |
| { | |
| "epoch": 11.75, | |
| "grad_norm": 0.5505082011222839, | |
| "learning_rate": 9.38e-05, | |
| "loss": 0.0375, | |
| "step": 470 | |
| }, | |
| { | |
| "epoch": 12.0, | |
| "grad_norm": 0.5206027626991272, | |
| "learning_rate": 9.58e-05, | |
| "loss": 0.0361, | |
| "step": 480 | |
| }, | |
| { | |
| "epoch": 12.25, | |
| "grad_norm": 0.6917493939399719, | |
| "learning_rate": 9.78e-05, | |
| "loss": 0.0282, | |
| "step": 490 | |
| }, | |
| { | |
| "epoch": 12.5, | |
| "grad_norm": 0.9478809237480164, | |
| "learning_rate": 9.98e-05, | |
| "loss": 0.0337, | |
| "step": 500 | |
| }, | |
| { | |
| "epoch": 12.75, | |
| "grad_norm": 0.5493677258491516, | |
| "learning_rate": 9.9999778549206e-05, | |
| "loss": 0.0317, | |
| "step": 510 | |
| }, | |
| { | |
| "epoch": 13.0, | |
| "grad_norm": 0.561867892742157, | |
| "learning_rate": 9.999901304280685e-05, | |
| "loss": 0.0346, | |
| "step": 520 | |
| }, | |
| { | |
| "epoch": 13.25, | |
| "grad_norm": 0.3665112853050232, | |
| "learning_rate": 9.999770075521164e-05, | |
| "loss": 0.0376, | |
| "step": 530 | |
| }, | |
| { | |
| "epoch": 13.5, | |
| "grad_norm": 0.5569449663162231, | |
| "learning_rate": 9.99958417007713e-05, | |
| "loss": 0.0327, | |
| "step": 540 | |
| }, | |
| { | |
| "epoch": 13.75, | |
| "grad_norm": 0.5099448561668396, | |
| "learning_rate": 9.999343589981615e-05, | |
| "loss": 0.0303, | |
| "step": 550 | |
| }, | |
| { | |
| "epoch": 14.0, | |
| "grad_norm": 0.6323690414428711, | |
| "learning_rate": 9.999048337865568e-05, | |
| "loss": 0.0262, | |
| "step": 560 | |
| }, | |
| { | |
| "epoch": 14.25, | |
| "grad_norm": 0.28097397089004517, | |
| "learning_rate": 9.998698416957815e-05, | |
| "loss": 0.0264, | |
| "step": 570 | |
| }, | |
| { | |
| "epoch": 14.5, | |
| "grad_norm": 0.8999001383781433, | |
| "learning_rate": 9.998293831085037e-05, | |
| "loss": 0.0264, | |
| "step": 580 | |
| }, | |
| { | |
| "epoch": 14.75, | |
| "grad_norm": 0.4309835731983185, | |
| "learning_rate": 9.997834584671719e-05, | |
| "loss": 0.0285, | |
| "step": 590 | |
| }, | |
| { | |
| "epoch": 15.0, | |
| "grad_norm": 0.4197908639907837, | |
| "learning_rate": 9.997320682740107e-05, | |
| "loss": 0.029, | |
| "step": 600 | |
| }, | |
| { | |
| "epoch": 15.25, | |
| "grad_norm": 0.4405575394630432, | |
| "learning_rate": 9.996752130910149e-05, | |
| "loss": 0.032, | |
| "step": 610 | |
| }, | |
| { | |
| "epoch": 15.5, | |
| "grad_norm": 0.3575686812400818, | |
| "learning_rate": 9.99612893539944e-05, | |
| "loss": 0.0232, | |
| "step": 620 | |
| }, | |
| { | |
| "epoch": 15.75, | |
| "grad_norm": 0.615228533744812, | |
| "learning_rate": 9.995451103023144e-05, | |
| "loss": 0.0233, | |
| "step": 630 | |
| }, | |
| { | |
| "epoch": 16.0, | |
| "grad_norm": 0.45813262462615967, | |
| "learning_rate": 9.994718641193928e-05, | |
| "loss": 0.025, | |
| "step": 640 | |
| }, | |
| { | |
| "epoch": 16.25, | |
| "grad_norm": 0.4395560026168823, | |
| "learning_rate": 9.993931557921874e-05, | |
| "loss": 0.0264, | |
| "step": 650 | |
| }, | |
| { | |
| "epoch": 16.5, | |
| "grad_norm": 0.5942047238349915, | |
| "learning_rate": 9.993089861814402e-05, | |
| "loss": 0.0268, | |
| "step": 660 | |
| }, | |
| { | |
| "epoch": 16.75, | |
| "grad_norm": 0.6569145917892456, | |
| "learning_rate": 9.992193562076166e-05, | |
| "loss": 0.033, | |
| "step": 670 | |
| }, | |
| { | |
| "epoch": 17.0, | |
| "grad_norm": 0.6226444840431213, | |
| "learning_rate": 9.991242668508954e-05, | |
| "loss": 0.0266, | |
| "step": 680 | |
| }, | |
| { | |
| "epoch": 17.25, | |
| "grad_norm": 0.47699910402297974, | |
| "learning_rate": 9.990237191511587e-05, | |
| "loss": 0.0297, | |
| "step": 690 | |
| }, | |
| { | |
| "epoch": 17.5, | |
| "grad_norm": 0.6073171496391296, | |
| "learning_rate": 9.989177142079802e-05, | |
| "loss": 0.0296, | |
| "step": 700 | |
| }, | |
| { | |
| "epoch": 17.75, | |
| "grad_norm": 0.35824280977249146, | |
| "learning_rate": 9.988062531806126e-05, | |
| "loss": 0.0327, | |
| "step": 710 | |
| }, | |
| { | |
| "epoch": 18.0, | |
| "grad_norm": 0.5538836121559143, | |
| "learning_rate": 9.986893372879762e-05, | |
| "loss": 0.0292, | |
| "step": 720 | |
| }, | |
| { | |
| "epoch": 18.25, | |
| "grad_norm": 0.4948605000972748, | |
| "learning_rate": 9.985669678086443e-05, | |
| "loss": 0.0292, | |
| "step": 730 | |
| }, | |
| { | |
| "epoch": 18.5, | |
| "grad_norm": 0.38511842489242554, | |
| "learning_rate": 9.984391460808298e-05, | |
| "loss": 0.0221, | |
| "step": 740 | |
| }, | |
| { | |
| "epoch": 18.75, | |
| "grad_norm": 0.4333004653453827, | |
| "learning_rate": 9.983058735023709e-05, | |
| "loss": 0.0226, | |
| "step": 750 | |
| }, | |
| { | |
| "epoch": 19.0, | |
| "grad_norm": 0.6658897399902344, | |
| "learning_rate": 9.98167151530715e-05, | |
| "loss": 0.0205, | |
| "step": 760 | |
| }, | |
| { | |
| "epoch": 19.25, | |
| "grad_norm": 0.5551711916923523, | |
| "learning_rate": 9.980229816829034e-05, | |
| "loss": 0.0218, | |
| "step": 770 | |
| }, | |
| { | |
| "epoch": 19.5, | |
| "grad_norm": 0.39259231090545654, | |
| "learning_rate": 9.978733655355544e-05, | |
| "loss": 0.0258, | |
| "step": 780 | |
| }, | |
| { | |
| "epoch": 19.75, | |
| "grad_norm": 0.45398399233818054, | |
| "learning_rate": 9.977183047248464e-05, | |
| "loss": 0.0196, | |
| "step": 790 | |
| }, | |
| { | |
| "epoch": 20.0, | |
| "grad_norm": 0.37839898467063904, | |
| "learning_rate": 9.975578009464992e-05, | |
| "loss": 0.0243, | |
| "step": 800 | |
| }, | |
| { | |
| "epoch": 20.25, | |
| "grad_norm": 0.8339304327964783, | |
| "learning_rate": 9.97391855955757e-05, | |
| "loss": 0.0263, | |
| "step": 810 | |
| }, | |
| { | |
| "epoch": 20.5, | |
| "grad_norm": 0.6552790403366089, | |
| "learning_rate": 9.972204715673669e-05, | |
| "loss": 0.0296, | |
| "step": 820 | |
| }, | |
| { | |
| "epoch": 20.75, | |
| "grad_norm": 0.36044272780418396, | |
| "learning_rate": 9.970436496555617e-05, | |
| "loss": 0.0216, | |
| "step": 830 | |
| }, | |
| { | |
| "epoch": 21.0, | |
| "grad_norm": 0.45362478494644165, | |
| "learning_rate": 9.968613921540373e-05, | |
| "loss": 0.024, | |
| "step": 840 | |
| }, | |
| { | |
| "epoch": 21.25, | |
| "grad_norm": 0.37655627727508545, | |
| "learning_rate": 9.966737010559326e-05, | |
| "loss": 0.0265, | |
| "step": 850 | |
| }, | |
| { | |
| "epoch": 21.5, | |
| "grad_norm": 0.6144092082977295, | |
| "learning_rate": 9.964805784138072e-05, | |
| "loss": 0.0231, | |
| "step": 860 | |
| }, | |
| { | |
| "epoch": 21.75, | |
| "grad_norm": 0.463652640581131, | |
| "learning_rate": 9.962820263396195e-05, | |
| "loss": 0.0218, | |
| "step": 870 | |
| }, | |
| { | |
| "epoch": 22.0, | |
| "grad_norm": 0.27026480436325073, | |
| "learning_rate": 9.960780470047033e-05, | |
| "loss": 0.0196, | |
| "step": 880 | |
| }, | |
| { | |
| "epoch": 22.25, | |
| "grad_norm": 0.41693419218063354, | |
| "learning_rate": 9.958686426397437e-05, | |
| "loss": 0.0169, | |
| "step": 890 | |
| }, | |
| { | |
| "epoch": 22.5, | |
| "grad_norm": 0.4871846139431, | |
| "learning_rate": 9.956538155347534e-05, | |
| "loss": 0.0226, | |
| "step": 900 | |
| }, | |
| { | |
| "epoch": 22.75, | |
| "grad_norm": 0.4026087522506714, | |
| "learning_rate": 9.95433568039047e-05, | |
| "loss": 0.0161, | |
| "step": 910 | |
| }, | |
| { | |
| "epoch": 23.0, | |
| "grad_norm": 0.26234668493270874, | |
| "learning_rate": 9.952079025612162e-05, | |
| "loss": 0.0189, | |
| "step": 920 | |
| }, | |
| { | |
| "epoch": 23.25, | |
| "grad_norm": 0.38857871294021606, | |
| "learning_rate": 9.949768215691022e-05, | |
| "loss": 0.0199, | |
| "step": 930 | |
| }, | |
| { | |
| "epoch": 23.5, | |
| "grad_norm": 0.3500604033470154, | |
| "learning_rate": 9.9474032758977e-05, | |
| "loss": 0.016, | |
| "step": 940 | |
| }, | |
| { | |
| "epoch": 23.75, | |
| "grad_norm": 0.3929100036621094, | |
| "learning_rate": 9.944984232094794e-05, | |
| "loss": 0.0181, | |
| "step": 950 | |
| }, | |
| { | |
| "epoch": 24.0, | |
| "grad_norm": 0.2801556885242462, | |
| "learning_rate": 9.942511110736584e-05, | |
| "loss": 0.0187, | |
| "step": 960 | |
| }, | |
| { | |
| "epoch": 24.25, | |
| "grad_norm": 0.3869239091873169, | |
| "learning_rate": 9.939983938868726e-05, | |
| "loss": 0.017, | |
| "step": 970 | |
| }, | |
| { | |
| "epoch": 24.5, | |
| "grad_norm": 0.3851812481880188, | |
| "learning_rate": 9.93740274412797e-05, | |
| "loss": 0.0192, | |
| "step": 980 | |
| }, | |
| { | |
| "epoch": 24.75, | |
| "grad_norm": 0.623075544834137, | |
| "learning_rate": 9.934767554741846e-05, | |
| "loss": 0.0246, | |
| "step": 990 | |
| }, | |
| { | |
| "epoch": 25.0, | |
| "grad_norm": 0.35598379373550415, | |
| "learning_rate": 9.932078399528361e-05, | |
| "loss": 0.0206, | |
| "step": 1000 | |
| }, | |
| { | |
| "epoch": 25.25, | |
| "grad_norm": 0.6533331871032715, | |
| "learning_rate": 9.929335307895689e-05, | |
| "loss": 0.0209, | |
| "step": 1010 | |
| }, | |
| { | |
| "epoch": 25.5, | |
| "grad_norm": 0.32766997814178467, | |
| "learning_rate": 9.926538309841839e-05, | |
| "loss": 0.0165, | |
| "step": 1020 | |
| }, | |
| { | |
| "epoch": 25.75, | |
| "grad_norm": 0.4293682277202606, | |
| "learning_rate": 9.923687435954334e-05, | |
| "loss": 0.0196, | |
| "step": 1030 | |
| }, | |
| { | |
| "epoch": 26.0, | |
| "grad_norm": 0.4344434142112732, | |
| "learning_rate": 9.920782717409873e-05, | |
| "loss": 0.0229, | |
| "step": 1040 | |
| }, | |
| { | |
| "epoch": 26.25, | |
| "grad_norm": 0.4949270188808441, | |
| "learning_rate": 9.917824185973994e-05, | |
| "loss": 0.0186, | |
| "step": 1050 | |
| }, | |
| { | |
| "epoch": 26.5, | |
| "grad_norm": 0.6668685674667358, | |
| "learning_rate": 9.914811874000723e-05, | |
| "loss": 0.0245, | |
| "step": 1060 | |
| }, | |
| { | |
| "epoch": 26.75, | |
| "grad_norm": 0.7803040742874146, | |
| "learning_rate": 9.911745814432218e-05, | |
| "loss": 0.0243, | |
| "step": 1070 | |
| }, | |
| { | |
| "epoch": 27.0, | |
| "grad_norm": 0.3841216564178467, | |
| "learning_rate": 9.90862604079842e-05, | |
| "loss": 0.0207, | |
| "step": 1080 | |
| }, | |
| { | |
| "epoch": 27.25, | |
| "grad_norm": 0.5297211408615112, | |
| "learning_rate": 9.90545258721667e-05, | |
| "loss": 0.0246, | |
| "step": 1090 | |
| }, | |
| { | |
| "epoch": 27.5, | |
| "grad_norm": 0.23917119204998016, | |
| "learning_rate": 9.90222548839135e-05, | |
| "loss": 0.0182, | |
| "step": 1100 | |
| }, | |
| { | |
| "epoch": 27.75, | |
| "grad_norm": 0.3145104646682739, | |
| "learning_rate": 9.898944779613495e-05, | |
| "loss": 0.0171, | |
| "step": 1110 | |
| }, | |
| { | |
| "epoch": 28.0, | |
| "grad_norm": 0.4466751217842102, | |
| "learning_rate": 9.89561049676041e-05, | |
| "loss": 0.0183, | |
| "step": 1120 | |
| }, | |
| { | |
| "epoch": 28.25, | |
| "grad_norm": 0.6861326098442078, | |
| "learning_rate": 9.89222267629528e-05, | |
| "loss": 0.0221, | |
| "step": 1130 | |
| }, | |
| { | |
| "epoch": 28.5, | |
| "grad_norm": 0.4263695478439331, | |
| "learning_rate": 9.888781355266763e-05, | |
| "loss": 0.0233, | |
| "step": 1140 | |
| }, | |
| { | |
| "epoch": 28.75, | |
| "grad_norm": 0.6137635111808777, | |
| "learning_rate": 9.885286571308598e-05, | |
| "loss": 0.0224, | |
| "step": 1150 | |
| }, | |
| { | |
| "epoch": 29.0, | |
| "grad_norm": 0.527621865272522, | |
| "learning_rate": 9.881738362639182e-05, | |
| "loss": 0.0177, | |
| "step": 1160 | |
| }, | |
| { | |
| "epoch": 29.25, | |
| "grad_norm": 0.39209073781967163, | |
| "learning_rate": 9.878136768061154e-05, | |
| "loss": 0.0198, | |
| "step": 1170 | |
| }, | |
| { | |
| "epoch": 29.5, | |
| "grad_norm": 0.6302863359451294, | |
| "learning_rate": 9.874481826960979e-05, | |
| "loss": 0.0226, | |
| "step": 1180 | |
| }, | |
| { | |
| "epoch": 29.75, | |
| "grad_norm": 0.34944644570350647, | |
| "learning_rate": 9.870773579308503e-05, | |
| "loss": 0.0159, | |
| "step": 1190 | |
| }, | |
| { | |
| "epoch": 30.0, | |
| "grad_norm": 0.4721897840499878, | |
| "learning_rate": 9.867012065656533e-05, | |
| "loss": 0.0206, | |
| "step": 1200 | |
| }, | |
| { | |
| "epoch": 30.25, | |
| "grad_norm": 0.3942127227783203, | |
| "learning_rate": 9.863197327140376e-05, | |
| "loss": 0.0192, | |
| "step": 1210 | |
| }, | |
| { | |
| "epoch": 30.5, | |
| "grad_norm": 0.2768412232398987, | |
| "learning_rate": 9.859329405477403e-05, | |
| "loss": 0.0138, | |
| "step": 1220 | |
| }, | |
| { | |
| "epoch": 30.75, | |
| "grad_norm": 0.33834919333457947, | |
| "learning_rate": 9.855408342966585e-05, | |
| "loss": 0.018, | |
| "step": 1230 | |
| }, | |
| { | |
| "epoch": 31.0, | |
| "grad_norm": 0.2718036472797394, | |
| "learning_rate": 9.851434182488033e-05, | |
| "loss": 0.0176, | |
| "step": 1240 | |
| }, | |
| { | |
| "epoch": 31.25, | |
| "grad_norm": 0.3397132158279419, | |
| "learning_rate": 9.84740696750253e-05, | |
| "loss": 0.0196, | |
| "step": 1250 | |
| }, | |
| { | |
| "epoch": 31.5, | |
| "grad_norm": 0.3387533128261566, | |
| "learning_rate": 9.843326742051055e-05, | |
| "loss": 0.0207, | |
| "step": 1260 | |
| }, | |
| { | |
| "epoch": 31.75, | |
| "grad_norm": 0.4069482088088989, | |
| "learning_rate": 9.839193550754297e-05, | |
| "loss": 0.0205, | |
| "step": 1270 | |
| }, | |
| { | |
| "epoch": 32.0, | |
| "grad_norm": 0.3819906413555145, | |
| "learning_rate": 9.835007438812177e-05, | |
| "loss": 0.0244, | |
| "step": 1280 | |
| }, | |
| { | |
| "epoch": 32.25, | |
| "grad_norm": 0.5072693824768066, | |
| "learning_rate": 9.830768452003341e-05, | |
| "loss": 0.0255, | |
| "step": 1290 | |
| }, | |
| { | |
| "epoch": 32.5, | |
| "grad_norm": 0.5479236841201782, | |
| "learning_rate": 9.826476636684671e-05, | |
| "loss": 0.0255, | |
| "step": 1300 | |
| }, | |
| { | |
| "epoch": 32.75, | |
| "grad_norm": 0.6064186692237854, | |
| "learning_rate": 9.822132039790773e-05, | |
| "loss": 0.0166, | |
| "step": 1310 | |
| }, | |
| { | |
| "epoch": 33.0, | |
| "grad_norm": 0.5842517614364624, | |
| "learning_rate": 9.817734708833461e-05, | |
| "loss": 0.0209, | |
| "step": 1320 | |
| }, | |
| { | |
| "epoch": 33.25, | |
| "grad_norm": 0.26301082968711853, | |
| "learning_rate": 9.813284691901243e-05, | |
| "loss": 0.0174, | |
| "step": 1330 | |
| }, | |
| { | |
| "epoch": 33.5, | |
| "grad_norm": 0.4220535159111023, | |
| "learning_rate": 9.808782037658792e-05, | |
| "loss": 0.0201, | |
| "step": 1340 | |
| }, | |
| { | |
| "epoch": 33.75, | |
| "grad_norm": 0.42425140738487244, | |
| "learning_rate": 9.804226795346411e-05, | |
| "loss": 0.0146, | |
| "step": 1350 | |
| }, | |
| { | |
| "epoch": 34.0, | |
| "grad_norm": 0.27794766426086426, | |
| "learning_rate": 9.799619014779503e-05, | |
| "loss": 0.0163, | |
| "step": 1360 | |
| }, | |
| { | |
| "epoch": 34.25, | |
| "grad_norm": 0.4631710350513458, | |
| "learning_rate": 9.794958746348013e-05, | |
| "loss": 0.0175, | |
| "step": 1370 | |
| }, | |
| { | |
| "epoch": 34.5, | |
| "grad_norm": 0.4688563644886017, | |
| "learning_rate": 9.790246041015896e-05, | |
| "loss": 0.0196, | |
| "step": 1380 | |
| }, | |
| { | |
| "epoch": 34.75, | |
| "grad_norm": 0.5662594437599182, | |
| "learning_rate": 9.785480950320538e-05, | |
| "loss": 0.017, | |
| "step": 1390 | |
| }, | |
| { | |
| "epoch": 35.0, | |
| "grad_norm": 0.35163137316703796, | |
| "learning_rate": 9.78066352637221e-05, | |
| "loss": 0.0183, | |
| "step": 1400 | |
| }, | |
| { | |
| "epoch": 35.25, | |
| "grad_norm": 0.3633674681186676, | |
| "learning_rate": 9.775793821853488e-05, | |
| "loss": 0.0159, | |
| "step": 1410 | |
| }, | |
| { | |
| "epoch": 35.5, | |
| "grad_norm": 0.31508517265319824, | |
| "learning_rate": 9.77087189001868e-05, | |
| "loss": 0.0176, | |
| "step": 1420 | |
| }, | |
| { | |
| "epoch": 35.75, | |
| "grad_norm": 0.37547263503074646, | |
| "learning_rate": 9.765897784693243e-05, | |
| "loss": 0.0175, | |
| "step": 1430 | |
| }, | |
| { | |
| "epoch": 36.0, | |
| "grad_norm": 0.3714698255062103, | |
| "learning_rate": 9.760871560273197e-05, | |
| "loss": 0.0209, | |
| "step": 1440 | |
| }, | |
| { | |
| "epoch": 36.25, | |
| "grad_norm": 0.6465221047401428, | |
| "learning_rate": 9.755793271724526e-05, | |
| "loss": 0.0204, | |
| "step": 1450 | |
| }, | |
| { | |
| "epoch": 36.5, | |
| "grad_norm": 0.6143114566802979, | |
| "learning_rate": 9.750662974582584e-05, | |
| "loss": 0.0196, | |
| "step": 1460 | |
| }, | |
| { | |
| "epoch": 36.75, | |
| "grad_norm": 0.48664823174476624, | |
| "learning_rate": 9.745480724951473e-05, | |
| "loss": 0.0173, | |
| "step": 1470 | |
| }, | |
| { | |
| "epoch": 37.0, | |
| "grad_norm": 0.47144779562950134, | |
| "learning_rate": 9.740246579503447e-05, | |
| "loss": 0.0243, | |
| "step": 1480 | |
| }, | |
| { | |
| "epoch": 37.25, | |
| "grad_norm": 0.35993170738220215, | |
| "learning_rate": 9.734960595478284e-05, | |
| "loss": 0.0152, | |
| "step": 1490 | |
| }, | |
| { | |
| "epoch": 37.5, | |
| "grad_norm": 0.3285447657108307, | |
| "learning_rate": 9.729622830682657e-05, | |
| "loss": 0.0189, | |
| "step": 1500 | |
| }, | |
| { | |
| "epoch": 37.75, | |
| "grad_norm": 0.4205745458602905, | |
| "learning_rate": 9.724233343489504e-05, | |
| "loss": 0.0222, | |
| "step": 1510 | |
| }, | |
| { | |
| "epoch": 38.0, | |
| "grad_norm": 0.2668244242668152, | |
| "learning_rate": 9.718792192837396e-05, | |
| "loss": 0.0163, | |
| "step": 1520 | |
| }, | |
| { | |
| "epoch": 38.25, | |
| "grad_norm": 0.4396367371082306, | |
| "learning_rate": 9.713299438229886e-05, | |
| "loss": 0.0193, | |
| "step": 1530 | |
| }, | |
| { | |
| "epoch": 38.5, | |
| "grad_norm": 0.40211695432662964, | |
| "learning_rate": 9.707755139734855e-05, | |
| "loss": 0.0164, | |
| "step": 1540 | |
| }, | |
| { | |
| "epoch": 38.75, | |
| "grad_norm": 0.4458588659763336, | |
| "learning_rate": 9.702159357983866e-05, | |
| "loss": 0.0208, | |
| "step": 1550 | |
| }, | |
| { | |
| "epoch": 39.0, | |
| "grad_norm": 0.22913917899131775, | |
| "learning_rate": 9.696512154171492e-05, | |
| "loss": 0.0163, | |
| "step": 1560 | |
| }, | |
| { | |
| "epoch": 39.25, | |
| "grad_norm": 0.4234263002872467, | |
| "learning_rate": 9.690813590054645e-05, | |
| "loss": 0.0158, | |
| "step": 1570 | |
| }, | |
| { | |
| "epoch": 39.5, | |
| "grad_norm": 0.41260766983032227, | |
| "learning_rate": 9.685063727951914e-05, | |
| "loss": 0.0183, | |
| "step": 1580 | |
| }, | |
| { | |
| "epoch": 39.75, | |
| "grad_norm": 0.4093533754348755, | |
| "learning_rate": 9.679262630742865e-05, | |
| "loss": 0.0215, | |
| "step": 1590 | |
| }, | |
| { | |
| "epoch": 40.0, | |
| "grad_norm": 0.5214777588844299, | |
| "learning_rate": 9.673410361867373e-05, | |
| "loss": 0.0183, | |
| "step": 1600 | |
| }, | |
| { | |
| "epoch": 40.25, | |
| "grad_norm": 0.3588806986808777, | |
| "learning_rate": 9.667506985324909e-05, | |
| "loss": 0.0181, | |
| "step": 1610 | |
| }, | |
| { | |
| "epoch": 40.5, | |
| "grad_norm": 0.3200751543045044, | |
| "learning_rate": 9.661552565673855e-05, | |
| "loss": 0.0147, | |
| "step": 1620 | |
| }, | |
| { | |
| "epoch": 40.75, | |
| "grad_norm": 0.2634463608264923, | |
| "learning_rate": 9.655547168030789e-05, | |
| "loss": 0.0139, | |
| "step": 1630 | |
| }, | |
| { | |
| "epoch": 41.0, | |
| "grad_norm": 0.362644225358963, | |
| "learning_rate": 9.649490858069777e-05, | |
| "loss": 0.0142, | |
| "step": 1640 | |
| }, | |
| { | |
| "epoch": 41.25, | |
| "grad_norm": 0.32056477665901184, | |
| "learning_rate": 9.643383702021658e-05, | |
| "loss": 0.0156, | |
| "step": 1650 | |
| }, | |
| { | |
| "epoch": 41.5, | |
| "grad_norm": 0.4088948369026184, | |
| "learning_rate": 9.637225766673307e-05, | |
| "loss": 0.0188, | |
| "step": 1660 | |
| }, | |
| { | |
| "epoch": 41.75, | |
| "grad_norm": 0.37446433305740356, | |
| "learning_rate": 9.631017119366922e-05, | |
| "loss": 0.015, | |
| "step": 1670 | |
| }, | |
| { | |
| "epoch": 42.0, | |
| "grad_norm": 0.4627027213573456, | |
| "learning_rate": 9.624757827999273e-05, | |
| "loss": 0.0163, | |
| "step": 1680 | |
| }, | |
| { | |
| "epoch": 42.25, | |
| "grad_norm": 0.22955283522605896, | |
| "learning_rate": 9.618447961020971e-05, | |
| "loss": 0.019, | |
| "step": 1690 | |
| }, | |
| { | |
| "epoch": 42.5, | |
| "grad_norm": 0.27593836188316345, | |
| "learning_rate": 9.612087587435707e-05, | |
| "loss": 0.0137, | |
| "step": 1700 | |
| }, | |
| { | |
| "epoch": 42.75, | |
| "grad_norm": 0.3275546431541443, | |
| "learning_rate": 9.605676776799508e-05, | |
| "loss": 0.0152, | |
| "step": 1710 | |
| }, | |
| { | |
| "epoch": 43.0, | |
| "grad_norm": 0.38114628195762634, | |
| "learning_rate": 9.599215599219973e-05, | |
| "loss": 0.0181, | |
| "step": 1720 | |
| }, | |
| { | |
| "epoch": 43.25, | |
| "grad_norm": 0.3989734649658203, | |
| "learning_rate": 9.592704125355505e-05, | |
| "loss": 0.019, | |
| "step": 1730 | |
| }, | |
| { | |
| "epoch": 43.5, | |
| "grad_norm": 0.31094446778297424, | |
| "learning_rate": 9.586142426414538e-05, | |
| "loss": 0.0179, | |
| "step": 1740 | |
| }, | |
| { | |
| "epoch": 43.75, | |
| "grad_norm": 0.17701998353004456, | |
| "learning_rate": 9.57953057415476e-05, | |
| "loss": 0.0199, | |
| "step": 1750 | |
| }, | |
| { | |
| "epoch": 44.0, | |
| "grad_norm": 0.29390594363212585, | |
| "learning_rate": 9.572868640882328e-05, | |
| "loss": 0.0163, | |
| "step": 1760 | |
| }, | |
| { | |
| "epoch": 44.25, | |
| "grad_norm": 0.3040190041065216, | |
| "learning_rate": 9.56615669945108e-05, | |
| "loss": 0.0155, | |
| "step": 1770 | |
| }, | |
| { | |
| "epoch": 44.5, | |
| "grad_norm": 0.44003409147262573, | |
| "learning_rate": 9.55939482326173e-05, | |
| "loss": 0.0185, | |
| "step": 1780 | |
| }, | |
| { | |
| "epoch": 44.75, | |
| "grad_norm": 0.3427186608314514, | |
| "learning_rate": 9.552583086261069e-05, | |
| "loss": 0.0191, | |
| "step": 1790 | |
| }, | |
| { | |
| "epoch": 45.0, | |
| "grad_norm": 0.3714499771595001, | |
| "learning_rate": 9.545721562941168e-05, | |
| "loss": 0.0175, | |
| "step": 1800 | |
| }, | |
| { | |
| "epoch": 45.25, | |
| "grad_norm": 0.30852749943733215, | |
| "learning_rate": 9.538810328338543e-05, | |
| "loss": 0.0164, | |
| "step": 1810 | |
| }, | |
| { | |
| "epoch": 45.5, | |
| "grad_norm": 0.2884804904460907, | |
| "learning_rate": 9.531849458033349e-05, | |
| "loss": 0.0161, | |
| "step": 1820 | |
| }, | |
| { | |
| "epoch": 45.75, | |
| "grad_norm": 0.3582738935947418, | |
| "learning_rate": 9.524839028148547e-05, | |
| "loss": 0.0146, | |
| "step": 1830 | |
| }, | |
| { | |
| "epoch": 46.0, | |
| "grad_norm": 0.32050254940986633, | |
| "learning_rate": 9.517779115349077e-05, | |
| "loss": 0.0136, | |
| "step": 1840 | |
| }, | |
| { | |
| "epoch": 46.25, | |
| "grad_norm": 0.40590667724609375, | |
| "learning_rate": 9.510669796841014e-05, | |
| "loss": 0.0165, | |
| "step": 1850 | |
| }, | |
| { | |
| "epoch": 46.5, | |
| "grad_norm": 0.2890171408653259, | |
| "learning_rate": 9.503511150370727e-05, | |
| "loss": 0.016, | |
| "step": 1860 | |
| }, | |
| { | |
| "epoch": 46.75, | |
| "grad_norm": 0.29964378476142883, | |
| "learning_rate": 9.496303254224024e-05, | |
| "loss": 0.0152, | |
| "step": 1870 | |
| }, | |
| { | |
| "epoch": 47.0, | |
| "grad_norm": 0.27994900941848755, | |
| "learning_rate": 9.489046187225306e-05, | |
| "loss": 0.0186, | |
| "step": 1880 | |
| }, | |
| { | |
| "epoch": 47.25, | |
| "grad_norm": 0.31143495440483093, | |
| "learning_rate": 9.481740028736692e-05, | |
| "loss": 0.0133, | |
| "step": 1890 | |
| }, | |
| { | |
| "epoch": 47.5, | |
| "grad_norm": 0.5050742626190186, | |
| "learning_rate": 9.474384858657164e-05, | |
| "loss": 0.019, | |
| "step": 1900 | |
| }, | |
| { | |
| "epoch": 47.75, | |
| "grad_norm": 0.45632266998291016, | |
| "learning_rate": 9.466980757421679e-05, | |
| "loss": 0.0203, | |
| "step": 1910 | |
| }, | |
| { | |
| "epoch": 48.0, | |
| "grad_norm": 0.362568199634552, | |
| "learning_rate": 9.459527806000305e-05, | |
| "loss": 0.0152, | |
| "step": 1920 | |
| }, | |
| { | |
| "epoch": 48.25, | |
| "grad_norm": 0.2526082694530487, | |
| "learning_rate": 9.452026085897325e-05, | |
| "loss": 0.0154, | |
| "step": 1930 | |
| }, | |
| { | |
| "epoch": 48.5, | |
| "grad_norm": 0.29366403818130493, | |
| "learning_rate": 9.444475679150348e-05, | |
| "loss": 0.0184, | |
| "step": 1940 | |
| }, | |
| { | |
| "epoch": 48.75, | |
| "grad_norm": 0.311564177274704, | |
| "learning_rate": 9.436876668329411e-05, | |
| "loss": 0.0155, | |
| "step": 1950 | |
| }, | |
| { | |
| "epoch": 49.0, | |
| "grad_norm": 0.42421355843544006, | |
| "learning_rate": 9.429229136536079e-05, | |
| "loss": 0.0126, | |
| "step": 1960 | |
| }, | |
| { | |
| "epoch": 49.25, | |
| "grad_norm": 0.608485221862793, | |
| "learning_rate": 9.421533167402534e-05, | |
| "loss": 0.0198, | |
| "step": 1970 | |
| }, | |
| { | |
| "epoch": 49.5, | |
| "grad_norm": 0.5886660814285278, | |
| "learning_rate": 9.413788845090666e-05, | |
| "loss": 0.0234, | |
| "step": 1980 | |
| }, | |
| { | |
| "epoch": 49.75, | |
| "grad_norm": 0.5250411033630371, | |
| "learning_rate": 9.405996254291136e-05, | |
| "loss": 0.0218, | |
| "step": 1990 | |
| }, | |
| { | |
| "epoch": 50.0, | |
| "grad_norm": 0.2709248661994934, | |
| "learning_rate": 9.398155480222474e-05, | |
| "loss": 0.0161, | |
| "step": 2000 | |
| }, | |
| { | |
| "epoch": 50.25, | |
| "grad_norm": 0.1994844675064087, | |
| "learning_rate": 9.390266608630128e-05, | |
| "loss": 0.0136, | |
| "step": 2010 | |
| }, | |
| { | |
| "epoch": 50.5, | |
| "grad_norm": 0.17653261125087738, | |
| "learning_rate": 9.38232972578553e-05, | |
| "loss": 0.0163, | |
| "step": 2020 | |
| }, | |
| { | |
| "epoch": 50.75, | |
| "grad_norm": 0.22169753909111023, | |
| "learning_rate": 9.374344918485164e-05, | |
| "loss": 0.0199, | |
| "step": 2030 | |
| }, | |
| { | |
| "epoch": 51.0, | |
| "grad_norm": 0.28427475690841675, | |
| "learning_rate": 9.366312274049602e-05, | |
| "loss": 0.018, | |
| "step": 2040 | |
| }, | |
| { | |
| "epoch": 51.25, | |
| "grad_norm": 0.20987115800380707, | |
| "learning_rate": 9.358231880322554e-05, | |
| "loss": 0.0135, | |
| "step": 2050 | |
| }, | |
| { | |
| "epoch": 51.5, | |
| "grad_norm": 0.18959645926952362, | |
| "learning_rate": 9.350103825669916e-05, | |
| "loss": 0.0138, | |
| "step": 2060 | |
| }, | |
| { | |
| "epoch": 51.75, | |
| "grad_norm": 0.3204769790172577, | |
| "learning_rate": 9.341928198978787e-05, | |
| "loss": 0.0135, | |
| "step": 2070 | |
| }, | |
| { | |
| "epoch": 52.0, | |
| "grad_norm": 0.4980137348175049, | |
| "learning_rate": 9.333705089656512e-05, | |
| "loss": 0.0188, | |
| "step": 2080 | |
| }, | |
| { | |
| "epoch": 52.25, | |
| "grad_norm": 0.4129096567630768, | |
| "learning_rate": 9.325434587629698e-05, | |
| "loss": 0.012, | |
| "step": 2090 | |
| }, | |
| { | |
| "epoch": 52.5, | |
| "grad_norm": 0.4252679944038391, | |
| "learning_rate": 9.31711678334323e-05, | |
| "loss": 0.0151, | |
| "step": 2100 | |
| }, | |
| { | |
| "epoch": 52.75, | |
| "grad_norm": 0.23144613206386566, | |
| "learning_rate": 9.308751767759282e-05, | |
| "loss": 0.014, | |
| "step": 2110 | |
| }, | |
| { | |
| "epoch": 53.0, | |
| "grad_norm": 0.3891558349132538, | |
| "learning_rate": 9.300339632356325e-05, | |
| "loss": 0.0148, | |
| "step": 2120 | |
| }, | |
| { | |
| "epoch": 53.25, | |
| "grad_norm": 0.49045929312705994, | |
| "learning_rate": 9.291880469128124e-05, | |
| "loss": 0.0152, | |
| "step": 2130 | |
| }, | |
| { | |
| "epoch": 53.5, | |
| "grad_norm": 0.40814849734306335, | |
| "learning_rate": 9.283374370582732e-05, | |
| "loss": 0.0114, | |
| "step": 2140 | |
| }, | |
| { | |
| "epoch": 53.75, | |
| "grad_norm": 0.43267515301704407, | |
| "learning_rate": 9.274821429741482e-05, | |
| "loss": 0.0242, | |
| "step": 2150 | |
| }, | |
| { | |
| "epoch": 54.0, | |
| "grad_norm": 0.2985181510448456, | |
| "learning_rate": 9.266221740137961e-05, | |
| "loss": 0.0134, | |
| "step": 2160 | |
| }, | |
| { | |
| "epoch": 54.25, | |
| "grad_norm": 0.3468068540096283, | |
| "learning_rate": 9.257575395817001e-05, | |
| "loss": 0.0142, | |
| "step": 2170 | |
| }, | |
| { | |
| "epoch": 54.5, | |
| "grad_norm": 0.22969213128089905, | |
| "learning_rate": 9.248882491333637e-05, | |
| "loss": 0.0092, | |
| "step": 2180 | |
| }, | |
| { | |
| "epoch": 54.75, | |
| "grad_norm": 0.22361359000205994, | |
| "learning_rate": 9.240143121752076e-05, | |
| "loss": 0.0137, | |
| "step": 2190 | |
| }, | |
| { | |
| "epoch": 55.0, | |
| "grad_norm": 0.24732734262943268, | |
| "learning_rate": 9.23135738264467e-05, | |
| "loss": 0.0141, | |
| "step": 2200 | |
| }, | |
| { | |
| "epoch": 55.25, | |
| "grad_norm": 0.20617972314357758, | |
| "learning_rate": 9.222525370090849e-05, | |
| "loss": 0.0093, | |
| "step": 2210 | |
| }, | |
| { | |
| "epoch": 55.5, | |
| "grad_norm": 0.2772747576236725, | |
| "learning_rate": 9.213647180676088e-05, | |
| "loss": 0.0112, | |
| "step": 2220 | |
| }, | |
| { | |
| "epoch": 55.75, | |
| "grad_norm": 0.21540410816669464, | |
| "learning_rate": 9.204722911490846e-05, | |
| "loss": 0.0134, | |
| "step": 2230 | |
| }, | |
| { | |
| "epoch": 56.0, | |
| "grad_norm": 0.20707572996616364, | |
| "learning_rate": 9.1957526601295e-05, | |
| "loss": 0.0135, | |
| "step": 2240 | |
| }, | |
| { | |
| "epoch": 56.25, | |
| "grad_norm": 0.24764341115951538, | |
| "learning_rate": 9.186736524689281e-05, | |
| "loss": 0.0103, | |
| "step": 2250 | |
| }, | |
| { | |
| "epoch": 56.5, | |
| "grad_norm": 0.31629952788352966, | |
| "learning_rate": 9.177674603769204e-05, | |
| "loss": 0.017, | |
| "step": 2260 | |
| }, | |
| { | |
| "epoch": 56.75, | |
| "grad_norm": 0.29424598813056946, | |
| "learning_rate": 9.168566996468983e-05, | |
| "loss": 0.019, | |
| "step": 2270 | |
| }, | |
| { | |
| "epoch": 57.0, | |
| "grad_norm": 0.24435587227344513, | |
| "learning_rate": 9.159413802387951e-05, | |
| "loss": 0.0133, | |
| "step": 2280 | |
| }, | |
| { | |
| "epoch": 57.25, | |
| "grad_norm": 0.2817462086677551, | |
| "learning_rate": 9.150215121623974e-05, | |
| "loss": 0.0153, | |
| "step": 2290 | |
| }, | |
| { | |
| "epoch": 57.5, | |
| "grad_norm": 0.17769835889339447, | |
| "learning_rate": 9.140971054772349e-05, | |
| "loss": 0.0143, | |
| "step": 2300 | |
| }, | |
| { | |
| "epoch": 57.75, | |
| "grad_norm": 0.2751213014125824, | |
| "learning_rate": 9.131681702924713e-05, | |
| "loss": 0.0154, | |
| "step": 2310 | |
| }, | |
| { | |
| "epoch": 58.0, | |
| "grad_norm": 0.20847024023532867, | |
| "learning_rate": 9.122347167667926e-05, | |
| "loss": 0.0138, | |
| "step": 2320 | |
| }, | |
| { | |
| "epoch": 58.25, | |
| "grad_norm": 0.22063089907169342, | |
| "learning_rate": 9.112967551082973e-05, | |
| "loss": 0.0158, | |
| "step": 2330 | |
| }, | |
| { | |
| "epoch": 58.5, | |
| "grad_norm": 0.39056122303009033, | |
| "learning_rate": 9.103542955743835e-05, | |
| "loss": 0.0202, | |
| "step": 2340 | |
| }, | |
| { | |
| "epoch": 58.75, | |
| "grad_norm": 0.3934856355190277, | |
| "learning_rate": 9.094073484716381e-05, | |
| "loss": 0.0159, | |
| "step": 2350 | |
| }, | |
| { | |
| "epoch": 59.0, | |
| "grad_norm": 0.41992074251174927, | |
| "learning_rate": 9.084559241557226e-05, | |
| "loss": 0.0152, | |
| "step": 2360 | |
| }, | |
| { | |
| "epoch": 59.25, | |
| "grad_norm": 0.30681025981903076, | |
| "learning_rate": 9.075000330312608e-05, | |
| "loss": 0.012, | |
| "step": 2370 | |
| }, | |
| { | |
| "epoch": 59.5, | |
| "grad_norm": 0.21425344049930573, | |
| "learning_rate": 9.065396855517253e-05, | |
| "loss": 0.0121, | |
| "step": 2380 | |
| }, | |
| { | |
| "epoch": 59.75, | |
| "grad_norm": 0.24510438740253448, | |
| "learning_rate": 9.055748922193219e-05, | |
| "loss": 0.0144, | |
| "step": 2390 | |
| }, | |
| { | |
| "epoch": 60.0, | |
| "grad_norm": 0.2346717268228531, | |
| "learning_rate": 9.046056635848761e-05, | |
| "loss": 0.0197, | |
| "step": 2400 | |
| }, | |
| { | |
| "epoch": 60.25, | |
| "grad_norm": 0.40343689918518066, | |
| "learning_rate": 9.036320102477169e-05, | |
| "loss": 0.012, | |
| "step": 2410 | |
| }, | |
| { | |
| "epoch": 60.5, | |
| "grad_norm": 0.447936087846756, | |
| "learning_rate": 9.02653942855561e-05, | |
| "loss": 0.0119, | |
| "step": 2420 | |
| }, | |
| { | |
| "epoch": 60.75, | |
| "grad_norm": 0.43714478611946106, | |
| "learning_rate": 9.016714721043971e-05, | |
| "loss": 0.0141, | |
| "step": 2430 | |
| }, | |
| { | |
| "epoch": 61.0, | |
| "grad_norm": 0.2527328133583069, | |
| "learning_rate": 9.006846087383675e-05, | |
| "loss": 0.0134, | |
| "step": 2440 | |
| }, | |
| { | |
| "epoch": 61.25, | |
| "grad_norm": 0.26533153653144836, | |
| "learning_rate": 8.996933635496523e-05, | |
| "loss": 0.0146, | |
| "step": 2450 | |
| }, | |
| { | |
| "epoch": 61.5, | |
| "grad_norm": 0.18538479506969452, | |
| "learning_rate": 8.986977473783498e-05, | |
| "loss": 0.014, | |
| "step": 2460 | |
| }, | |
| { | |
| "epoch": 61.75, | |
| "grad_norm": 0.4042245149612427, | |
| "learning_rate": 8.97697771112359e-05, | |
| "loss": 0.0147, | |
| "step": 2470 | |
| }, | |
| { | |
| "epoch": 62.0, | |
| "grad_norm": 0.4519740045070648, | |
| "learning_rate": 8.966934456872602e-05, | |
| "loss": 0.0126, | |
| "step": 2480 | |
| }, | |
| { | |
| "epoch": 62.25, | |
| "grad_norm": 0.5037399530410767, | |
| "learning_rate": 8.95684782086195e-05, | |
| "loss": 0.0153, | |
| "step": 2490 | |
| }, | |
| { | |
| "epoch": 62.5, | |
| "grad_norm": 0.36741331219673157, | |
| "learning_rate": 8.946717913397476e-05, | |
| "loss": 0.0154, | |
| "step": 2500 | |
| }, | |
| { | |
| "epoch": 62.75, | |
| "grad_norm": 0.4969617426395416, | |
| "learning_rate": 8.93654484525822e-05, | |
| "loss": 0.016, | |
| "step": 2510 | |
| }, | |
| { | |
| "epoch": 63.0, | |
| "grad_norm": 0.3934231698513031, | |
| "learning_rate": 8.926328727695226e-05, | |
| "loss": 0.0155, | |
| "step": 2520 | |
| }, | |
| { | |
| "epoch": 63.25, | |
| "grad_norm": 0.3299417793750763, | |
| "learning_rate": 8.916069672430319e-05, | |
| "loss": 0.0123, | |
| "step": 2530 | |
| }, | |
| { | |
| "epoch": 63.5, | |
| "grad_norm": 0.4059097170829773, | |
| "learning_rate": 8.905767791654884e-05, | |
| "loss": 0.015, | |
| "step": 2540 | |
| }, | |
| { | |
| "epoch": 63.75, | |
| "grad_norm": 0.2909546494483948, | |
| "learning_rate": 8.895423198028638e-05, | |
| "loss": 0.0158, | |
| "step": 2550 | |
| }, | |
| { | |
| "epoch": 64.0, | |
| "grad_norm": 0.26669272780418396, | |
| "learning_rate": 8.885036004678402e-05, | |
| "loss": 0.0184, | |
| "step": 2560 | |
| }, | |
| { | |
| "epoch": 64.25, | |
| "grad_norm": 0.3337773382663727, | |
| "learning_rate": 8.874606325196857e-05, | |
| "loss": 0.0133, | |
| "step": 2570 | |
| }, | |
| { | |
| "epoch": 64.5, | |
| "grad_norm": 0.2659240961074829, | |
| "learning_rate": 8.864134273641304e-05, | |
| "loss": 0.0174, | |
| "step": 2580 | |
| }, | |
| { | |
| "epoch": 64.75, | |
| "grad_norm": 0.20509116351604462, | |
| "learning_rate": 8.853619964532427e-05, | |
| "loss": 0.0156, | |
| "step": 2590 | |
| }, | |
| { | |
| "epoch": 65.0, | |
| "grad_norm": 0.3653193414211273, | |
| "learning_rate": 8.843063512853019e-05, | |
| "loss": 0.0151, | |
| "step": 2600 | |
| }, | |
| { | |
| "epoch": 65.25, | |
| "grad_norm": 0.29937827587127686, | |
| "learning_rate": 8.832465034046749e-05, | |
| "loss": 0.0145, | |
| "step": 2610 | |
| }, | |
| { | |
| "epoch": 65.5, | |
| "grad_norm": 0.2896886467933655, | |
| "learning_rate": 8.821824644016882e-05, | |
| "loss": 0.0128, | |
| "step": 2620 | |
| }, | |
| { | |
| "epoch": 65.75, | |
| "grad_norm": 0.3686462342739105, | |
| "learning_rate": 8.811142459125019e-05, | |
| "loss": 0.0106, | |
| "step": 2630 | |
| }, | |
| { | |
| "epoch": 66.0, | |
| "grad_norm": 0.23774658143520355, | |
| "learning_rate": 8.800418596189822e-05, | |
| "loss": 0.0116, | |
| "step": 2640 | |
| }, | |
| { | |
| "epoch": 66.25, | |
| "grad_norm": 0.2722029387950897, | |
| "learning_rate": 8.789653172485737e-05, | |
| "loss": 0.0083, | |
| "step": 2650 | |
| }, | |
| { | |
| "epoch": 66.5, | |
| "grad_norm": 0.22109919786453247, | |
| "learning_rate": 8.778846305741715e-05, | |
| "loss": 0.0179, | |
| "step": 2660 | |
| }, | |
| { | |
| "epoch": 66.75, | |
| "grad_norm": 0.3148277699947357, | |
| "learning_rate": 8.767998114139918e-05, | |
| "loss": 0.0166, | |
| "step": 2670 | |
| }, | |
| { | |
| "epoch": 67.0, | |
| "grad_norm": 0.3373554050922394, | |
| "learning_rate": 8.757108716314429e-05, | |
| "loss": 0.0117, | |
| "step": 2680 | |
| }, | |
| { | |
| "epoch": 67.25, | |
| "grad_norm": 0.29900553822517395, | |
| "learning_rate": 8.746178231349962e-05, | |
| "loss": 0.0163, | |
| "step": 2690 | |
| }, | |
| { | |
| "epoch": 67.5, | |
| "grad_norm": 0.2480359971523285, | |
| "learning_rate": 8.735206778780549e-05, | |
| "loss": 0.017, | |
| "step": 2700 | |
| }, | |
| { | |
| "epoch": 67.75, | |
| "grad_norm": 0.3013180196285248, | |
| "learning_rate": 8.724194478588234e-05, | |
| "loss": 0.0162, | |
| "step": 2710 | |
| }, | |
| { | |
| "epoch": 68.0, | |
| "grad_norm": 0.23710669577121735, | |
| "learning_rate": 8.713141451201772e-05, | |
| "loss": 0.0149, | |
| "step": 2720 | |
| }, | |
| { | |
| "epoch": 68.25, | |
| "grad_norm": 0.3515462875366211, | |
| "learning_rate": 8.702047817495295e-05, | |
| "loss": 0.0134, | |
| "step": 2730 | |
| }, | |
| { | |
| "epoch": 68.5, | |
| "grad_norm": 0.3761940896511078, | |
| "learning_rate": 8.69091369878701e-05, | |
| "loss": 0.0159, | |
| "step": 2740 | |
| }, | |
| { | |
| "epoch": 68.75, | |
| "grad_norm": 0.3726300299167633, | |
| "learning_rate": 8.679739216837849e-05, | |
| "loss": 0.0122, | |
| "step": 2750 | |
| }, | |
| { | |
| "epoch": 69.0, | |
| "grad_norm": 0.26920396089553833, | |
| "learning_rate": 8.66852449385016e-05, | |
| "loss": 0.0129, | |
| "step": 2760 | |
| }, | |
| { | |
| "epoch": 69.25, | |
| "grad_norm": 0.378433495759964, | |
| "learning_rate": 8.657269652466356e-05, | |
| "loss": 0.0114, | |
| "step": 2770 | |
| }, | |
| { | |
| "epoch": 69.5, | |
| "grad_norm": 0.3709133267402649, | |
| "learning_rate": 8.645974815767577e-05, | |
| "loss": 0.0131, | |
| "step": 2780 | |
| }, | |
| { | |
| "epoch": 69.75, | |
| "grad_norm": 0.22590842843055725, | |
| "learning_rate": 8.634640107272351e-05, | |
| "loss": 0.0144, | |
| "step": 2790 | |
| }, | |
| { | |
| "epoch": 70.0, | |
| "grad_norm": 0.3659012019634247, | |
| "learning_rate": 8.623265650935234e-05, | |
| "loss": 0.0116, | |
| "step": 2800 | |
| }, | |
| { | |
| "epoch": 70.25, | |
| "grad_norm": 0.28762730956077576, | |
| "learning_rate": 8.611851571145456e-05, | |
| "loss": 0.0133, | |
| "step": 2810 | |
| }, | |
| { | |
| "epoch": 70.5, | |
| "grad_norm": 0.44853711128234863, | |
| "learning_rate": 8.600397992725566e-05, | |
| "loss": 0.0126, | |
| "step": 2820 | |
| }, | |
| { | |
| "epoch": 70.75, | |
| "grad_norm": 0.3202303349971771, | |
| "learning_rate": 8.588905040930061e-05, | |
| "loss": 0.0135, | |
| "step": 2830 | |
| }, | |
| { | |
| "epoch": 71.0, | |
| "grad_norm": 0.27304911613464355, | |
| "learning_rate": 8.577372841444022e-05, | |
| "loss": 0.0103, | |
| "step": 2840 | |
| }, | |
| { | |
| "epoch": 71.25, | |
| "grad_norm": 0.3241058588027954, | |
| "learning_rate": 8.565801520381736e-05, | |
| "loss": 0.0144, | |
| "step": 2850 | |
| }, | |
| { | |
| "epoch": 71.5, | |
| "grad_norm": 0.3490132689476013, | |
| "learning_rate": 8.554191204285313e-05, | |
| "loss": 0.0166, | |
| "step": 2860 | |
| }, | |
| { | |
| "epoch": 71.75, | |
| "grad_norm": 0.22821162641048431, | |
| "learning_rate": 8.542542020123315e-05, | |
| "loss": 0.0118, | |
| "step": 2870 | |
| }, | |
| { | |
| "epoch": 72.0, | |
| "grad_norm": 0.21379388868808746, | |
| "learning_rate": 8.530854095289347e-05, | |
| "loss": 0.0122, | |
| "step": 2880 | |
| }, | |
| { | |
| "epoch": 72.25, | |
| "grad_norm": 0.2390654981136322, | |
| "learning_rate": 8.519127557600688e-05, | |
| "loss": 0.0117, | |
| "step": 2890 | |
| }, | |
| { | |
| "epoch": 72.5, | |
| "grad_norm": 0.2505422532558441, | |
| "learning_rate": 8.507362535296871e-05, | |
| "loss": 0.015, | |
| "step": 2900 | |
| }, | |
| { | |
| "epoch": 72.75, | |
| "grad_norm": 0.31641829013824463, | |
| "learning_rate": 8.495559157038299e-05, | |
| "loss": 0.015, | |
| "step": 2910 | |
| }, | |
| { | |
| "epoch": 73.0, | |
| "grad_norm": 0.5015877485275269, | |
| "learning_rate": 8.483717551904823e-05, | |
| "loss": 0.0152, | |
| "step": 2920 | |
| }, | |
| { | |
| "epoch": 73.25, | |
| "grad_norm": 0.373805433511734, | |
| "learning_rate": 8.47183784939434e-05, | |
| "loss": 0.0098, | |
| "step": 2930 | |
| }, | |
| { | |
| "epoch": 73.5, | |
| "grad_norm": 0.31168583035469055, | |
| "learning_rate": 8.459920179421374e-05, | |
| "loss": 0.0102, | |
| "step": 2940 | |
| }, | |
| { | |
| "epoch": 73.75, | |
| "grad_norm": 0.25392434000968933, | |
| "learning_rate": 8.447964672315656e-05, | |
| "loss": 0.0121, | |
| "step": 2950 | |
| }, | |
| { | |
| "epoch": 74.0, | |
| "grad_norm": 0.2583267390727997, | |
| "learning_rate": 8.435971458820692e-05, | |
| "loss": 0.014, | |
| "step": 2960 | |
| }, | |
| { | |
| "epoch": 74.25, | |
| "grad_norm": 0.2887478470802307, | |
| "learning_rate": 8.423940670092345e-05, | |
| "loss": 0.0143, | |
| "step": 2970 | |
| }, | |
| { | |
| "epoch": 74.5, | |
| "grad_norm": 0.20327523350715637, | |
| "learning_rate": 8.411872437697394e-05, | |
| "loss": 0.014, | |
| "step": 2980 | |
| }, | |
| { | |
| "epoch": 74.75, | |
| "grad_norm": 0.20265914499759674, | |
| "learning_rate": 8.399766893612096e-05, | |
| "loss": 0.0102, | |
| "step": 2990 | |
| }, | |
| { | |
| "epoch": 75.0, | |
| "grad_norm": 0.32352325320243835, | |
| "learning_rate": 8.38762417022074e-05, | |
| "loss": 0.0109, | |
| "step": 3000 | |
| }, | |
| { | |
| "epoch": 75.25, | |
| "grad_norm": 0.23712804913520813, | |
| "learning_rate": 8.375444400314204e-05, | |
| "loss": 0.0145, | |
| "step": 3010 | |
| }, | |
| { | |
| "epoch": 75.5, | |
| "grad_norm": 0.2874774634838104, | |
| "learning_rate": 8.3632277170885e-05, | |
| "loss": 0.0125, | |
| "step": 3020 | |
| }, | |
| { | |
| "epoch": 75.75, | |
| "grad_norm": 0.23495231568813324, | |
| "learning_rate": 8.350974254143318e-05, | |
| "loss": 0.0153, | |
| "step": 3030 | |
| }, | |
| { | |
| "epoch": 76.0, | |
| "grad_norm": 0.25911620259284973, | |
| "learning_rate": 8.338684145480566e-05, | |
| "loss": 0.0136, | |
| "step": 3040 | |
| }, | |
| { | |
| "epoch": 76.25, | |
| "grad_norm": 0.28189530968666077, | |
| "learning_rate": 8.326357525502904e-05, | |
| "loss": 0.0113, | |
| "step": 3050 | |
| }, | |
| { | |
| "epoch": 76.5, | |
| "grad_norm": 0.3920529782772064, | |
| "learning_rate": 8.313994529012273e-05, | |
| "loss": 0.0194, | |
| "step": 3060 | |
| }, | |
| { | |
| "epoch": 76.75, | |
| "grad_norm": 0.30341243743896484, | |
| "learning_rate": 8.301595291208422e-05, | |
| "loss": 0.0174, | |
| "step": 3070 | |
| }, | |
| { | |
| "epoch": 77.0, | |
| "grad_norm": 0.417825847864151, | |
| "learning_rate": 8.289159947687427e-05, | |
| "loss": 0.0172, | |
| "step": 3080 | |
| }, | |
| { | |
| "epoch": 77.25, | |
| "grad_norm": 0.3963508903980255, | |
| "learning_rate": 8.276688634440216e-05, | |
| "loss": 0.014, | |
| "step": 3090 | |
| }, | |
| { | |
| "epoch": 77.5, | |
| "grad_norm": 0.3563499450683594, | |
| "learning_rate": 8.26418148785107e-05, | |
| "loss": 0.0161, | |
| "step": 3100 | |
| }, | |
| { | |
| "epoch": 77.75, | |
| "grad_norm": 0.3115830719470978, | |
| "learning_rate": 8.251638644696141e-05, | |
| "loss": 0.0149, | |
| "step": 3110 | |
| }, | |
| { | |
| "epoch": 78.0, | |
| "grad_norm": 0.3986978530883789, | |
| "learning_rate": 8.23906024214195e-05, | |
| "loss": 0.0153, | |
| "step": 3120 | |
| }, | |
| { | |
| "epoch": 78.25, | |
| "grad_norm": 0.27240195870399475, | |
| "learning_rate": 8.226446417743897e-05, | |
| "loss": 0.0126, | |
| "step": 3130 | |
| }, | |
| { | |
| "epoch": 78.5, | |
| "grad_norm": 0.2304350584745407, | |
| "learning_rate": 8.213797309444742e-05, | |
| "loss": 0.0142, | |
| "step": 3140 | |
| }, | |
| { | |
| "epoch": 78.75, | |
| "grad_norm": 0.1990521103143692, | |
| "learning_rate": 8.201113055573105e-05, | |
| "loss": 0.0126, | |
| "step": 3150 | |
| }, | |
| { | |
| "epoch": 79.0, | |
| "grad_norm": 0.1642603874206543, | |
| "learning_rate": 8.188393794841958e-05, | |
| "loss": 0.0121, | |
| "step": 3160 | |
| }, | |
| { | |
| "epoch": 79.25, | |
| "grad_norm": 0.23555266857147217, | |
| "learning_rate": 8.175639666347094e-05, | |
| "loss": 0.0111, | |
| "step": 3170 | |
| }, | |
| { | |
| "epoch": 79.5, | |
| "grad_norm": 0.28729990124702454, | |
| "learning_rate": 8.162850809565623e-05, | |
| "loss": 0.0152, | |
| "step": 3180 | |
| }, | |
| { | |
| "epoch": 79.75, | |
| "grad_norm": 0.33808666467666626, | |
| "learning_rate": 8.150027364354431e-05, | |
| "loss": 0.0127, | |
| "step": 3190 | |
| }, | |
| { | |
| "epoch": 80.0, | |
| "grad_norm": 0.4436752200126648, | |
| "learning_rate": 8.137169470948662e-05, | |
| "loss": 0.0149, | |
| "step": 3200 | |
| }, | |
| { | |
| "epoch": 80.25, | |
| "grad_norm": 0.4043782949447632, | |
| "learning_rate": 8.124277269960179e-05, | |
| "loss": 0.0146, | |
| "step": 3210 | |
| }, | |
| { | |
| "epoch": 80.5, | |
| "grad_norm": 0.23915347456932068, | |
| "learning_rate": 8.111350902376023e-05, | |
| "loss": 0.014, | |
| "step": 3220 | |
| }, | |
| { | |
| "epoch": 80.75, | |
| "grad_norm": 0.175531268119812, | |
| "learning_rate": 8.098390509556883e-05, | |
| "loss": 0.0098, | |
| "step": 3230 | |
| }, | |
| { | |
| "epoch": 81.0, | |
| "grad_norm": 0.3109751045703888, | |
| "learning_rate": 8.085396233235536e-05, | |
| "loss": 0.0161, | |
| "step": 3240 | |
| }, | |
| { | |
| "epoch": 81.25, | |
| "grad_norm": 0.19472594559192657, | |
| "learning_rate": 8.072368215515306e-05, | |
| "loss": 0.0113, | |
| "step": 3250 | |
| }, | |
| { | |
| "epoch": 81.5, | |
| "grad_norm": 0.2841004431247711, | |
| "learning_rate": 8.059306598868506e-05, | |
| "loss": 0.0128, | |
| "step": 3260 | |
| }, | |
| { | |
| "epoch": 81.75, | |
| "grad_norm": 0.23794560134410858, | |
| "learning_rate": 8.046211526134888e-05, | |
| "loss": 0.0137, | |
| "step": 3270 | |
| }, | |
| { | |
| "epoch": 82.0, | |
| "grad_norm": 0.3024290204048157, | |
| "learning_rate": 8.033083140520065e-05, | |
| "loss": 0.0135, | |
| "step": 3280 | |
| }, | |
| { | |
| "epoch": 82.25, | |
| "grad_norm": 0.3859151303768158, | |
| "learning_rate": 8.019921585593962e-05, | |
| "loss": 0.0108, | |
| "step": 3290 | |
| }, | |
| { | |
| "epoch": 82.5, | |
| "grad_norm": 0.30000364780426025, | |
| "learning_rate": 8.006727005289232e-05, | |
| "loss": 0.0111, | |
| "step": 3300 | |
| }, | |
| { | |
| "epoch": 82.75, | |
| "grad_norm": 0.23926366865634918, | |
| "learning_rate": 7.993499543899692e-05, | |
| "loss": 0.01, | |
| "step": 3310 | |
| }, | |
| { | |
| "epoch": 83.0, | |
| "grad_norm": 0.36803534626960754, | |
| "learning_rate": 7.980239346078742e-05, | |
| "loss": 0.0116, | |
| "step": 3320 | |
| }, | |
| { | |
| "epoch": 83.25, | |
| "grad_norm": 0.5533159971237183, | |
| "learning_rate": 7.966946556837778e-05, | |
| "loss": 0.0115, | |
| "step": 3330 | |
| }, | |
| { | |
| "epoch": 83.5, | |
| "grad_norm": 0.2806551158428192, | |
| "learning_rate": 7.953621321544616e-05, | |
| "loss": 0.0121, | |
| "step": 3340 | |
| }, | |
| { | |
| "epoch": 83.75, | |
| "grad_norm": 0.32250022888183594, | |
| "learning_rate": 7.940263785921896e-05, | |
| "loss": 0.0111, | |
| "step": 3350 | |
| }, | |
| { | |
| "epoch": 84.0, | |
| "grad_norm": 0.21136438846588135, | |
| "learning_rate": 7.926874096045482e-05, | |
| "loss": 0.0124, | |
| "step": 3360 | |
| }, | |
| { | |
| "epoch": 84.25, | |
| "grad_norm": 0.29519808292388916, | |
| "learning_rate": 7.913452398342881e-05, | |
| "loss": 0.0119, | |
| "step": 3370 | |
| }, | |
| { | |
| "epoch": 84.5, | |
| "grad_norm": 0.20215508341789246, | |
| "learning_rate": 7.89999883959163e-05, | |
| "loss": 0.0188, | |
| "step": 3380 | |
| }, | |
| { | |
| "epoch": 84.75, | |
| "grad_norm": 0.20980972051620483, | |
| "learning_rate": 7.886513566917687e-05, | |
| "loss": 0.0141, | |
| "step": 3390 | |
| }, | |
| { | |
| "epoch": 85.0, | |
| "grad_norm": 0.15065793693065643, | |
| "learning_rate": 7.872996727793838e-05, | |
| "loss": 0.0119, | |
| "step": 3400 | |
| }, | |
| { | |
| "epoch": 85.25, | |
| "grad_norm": 0.30695223808288574, | |
| "learning_rate": 7.859448470038069e-05, | |
| "loss": 0.0151, | |
| "step": 3410 | |
| }, | |
| { | |
| "epoch": 85.5, | |
| "grad_norm": 0.17749333381652832, | |
| "learning_rate": 7.845868941811956e-05, | |
| "loss": 0.0125, | |
| "step": 3420 | |
| }, | |
| { | |
| "epoch": 85.75, | |
| "grad_norm": 0.16342045366764069, | |
| "learning_rate": 7.832258291619043e-05, | |
| "loss": 0.0111, | |
| "step": 3430 | |
| }, | |
| { | |
| "epoch": 86.0, | |
| "grad_norm": 0.29444870352745056, | |
| "learning_rate": 7.81861666830322e-05, | |
| "loss": 0.0112, | |
| "step": 3440 | |
| }, | |
| { | |
| "epoch": 86.25, | |
| "grad_norm": 0.4270837903022766, | |
| "learning_rate": 7.804944221047097e-05, | |
| "loss": 0.0137, | |
| "step": 3450 | |
| }, | |
| { | |
| "epoch": 86.5, | |
| "grad_norm": 0.3473292887210846, | |
| "learning_rate": 7.791241099370364e-05, | |
| "loss": 0.0136, | |
| "step": 3460 | |
| }, | |
| { | |
| "epoch": 86.75, | |
| "grad_norm": 0.38873690366744995, | |
| "learning_rate": 7.777507453128163e-05, | |
| "loss": 0.0107, | |
| "step": 3470 | |
| }, | |
| { | |
| "epoch": 87.0, | |
| "grad_norm": 0.2898962199687958, | |
| "learning_rate": 7.763743432509451e-05, | |
| "loss": 0.0097, | |
| "step": 3480 | |
| }, | |
| { | |
| "epoch": 87.25, | |
| "grad_norm": 0.2728941738605499, | |
| "learning_rate": 7.749949188035353e-05, | |
| "loss": 0.013, | |
| "step": 3490 | |
| }, | |
| { | |
| "epoch": 87.5, | |
| "grad_norm": 0.203449085354805, | |
| "learning_rate": 7.736124870557516e-05, | |
| "loss": 0.016, | |
| "step": 3500 | |
| }, | |
| { | |
| "epoch": 87.75, | |
| "grad_norm": 0.28565430641174316, | |
| "learning_rate": 7.722270631256459e-05, | |
| "loss": 0.0123, | |
| "step": 3510 | |
| }, | |
| { | |
| "epoch": 88.0, | |
| "grad_norm": 0.32342642545700073, | |
| "learning_rate": 7.708386621639925e-05, | |
| "loss": 0.0134, | |
| "step": 3520 | |
| }, | |
| { | |
| "epoch": 88.25, | |
| "grad_norm": 0.26711976528167725, | |
| "learning_rate": 7.694472993541219e-05, | |
| "loss": 0.0113, | |
| "step": 3530 | |
| }, | |
| { | |
| "epoch": 88.5, | |
| "grad_norm": 0.26559197902679443, | |
| "learning_rate": 7.680529899117547e-05, | |
| "loss": 0.0137, | |
| "step": 3540 | |
| }, | |
| { | |
| "epoch": 88.75, | |
| "grad_norm": 0.32358163595199585, | |
| "learning_rate": 7.666557490848358e-05, | |
| "loss": 0.0159, | |
| "step": 3550 | |
| }, | |
| { | |
| "epoch": 89.0, | |
| "grad_norm": 0.26062920689582825, | |
| "learning_rate": 7.65255592153367e-05, | |
| "loss": 0.0126, | |
| "step": 3560 | |
| }, | |
| { | |
| "epoch": 89.25, | |
| "grad_norm": 0.22964859008789062, | |
| "learning_rate": 7.638525344292402e-05, | |
| "loss": 0.0123, | |
| "step": 3570 | |
| }, | |
| { | |
| "epoch": 89.5, | |
| "grad_norm": 0.17719513177871704, | |
| "learning_rate": 7.624465912560697e-05, | |
| "loss": 0.0113, | |
| "step": 3580 | |
| }, | |
| { | |
| "epoch": 89.75, | |
| "grad_norm": 0.23025670647621155, | |
| "learning_rate": 7.610377780090249e-05, | |
| "loss": 0.0156, | |
| "step": 3590 | |
| }, | |
| { | |
| "epoch": 90.0, | |
| "grad_norm": 0.23307792842388153, | |
| "learning_rate": 7.596261100946618e-05, | |
| "loss": 0.0123, | |
| "step": 3600 | |
| }, | |
| { | |
| "epoch": 90.25, | |
| "grad_norm": 0.17993809282779694, | |
| "learning_rate": 7.582116029507542e-05, | |
| "loss": 0.0107, | |
| "step": 3610 | |
| }, | |
| { | |
| "epoch": 90.5, | |
| "grad_norm": 0.17383725941181183, | |
| "learning_rate": 7.56794272046126e-05, | |
| "loss": 0.0099, | |
| "step": 3620 | |
| }, | |
| { | |
| "epoch": 90.75, | |
| "grad_norm": 0.24793113768100739, | |
| "learning_rate": 7.55374132880481e-05, | |
| "loss": 0.0117, | |
| "step": 3630 | |
| }, | |
| { | |
| "epoch": 91.0, | |
| "grad_norm": 0.3454414904117584, | |
| "learning_rate": 7.539512009842333e-05, | |
| "loss": 0.0143, | |
| "step": 3640 | |
| }, | |
| { | |
| "epoch": 91.25, | |
| "grad_norm": 0.22444981336593628, | |
| "learning_rate": 7.525254919183382e-05, | |
| "loss": 0.0112, | |
| "step": 3650 | |
| }, | |
| { | |
| "epoch": 91.5, | |
| "grad_norm": 0.3071042001247406, | |
| "learning_rate": 7.510970212741215e-05, | |
| "loss": 0.0141, | |
| "step": 3660 | |
| }, | |
| { | |
| "epoch": 91.75, | |
| "grad_norm": 0.13223372399806976, | |
| "learning_rate": 7.496658046731096e-05, | |
| "loss": 0.0094, | |
| "step": 3670 | |
| }, | |
| { | |
| "epoch": 92.0, | |
| "grad_norm": 0.1920013278722763, | |
| "learning_rate": 7.482318577668578e-05, | |
| "loss": 0.011, | |
| "step": 3680 | |
| }, | |
| { | |
| "epoch": 92.25, | |
| "grad_norm": 0.1618950366973877, | |
| "learning_rate": 7.467951962367796e-05, | |
| "loss": 0.0119, | |
| "step": 3690 | |
| }, | |
| { | |
| "epoch": 92.5, | |
| "grad_norm": 0.2321500927209854, | |
| "learning_rate": 7.453558357939755e-05, | |
| "loss": 0.0133, | |
| "step": 3700 | |
| }, | |
| { | |
| "epoch": 92.75, | |
| "grad_norm": 0.310839980840683, | |
| "learning_rate": 7.439137921790606e-05, | |
| "loss": 0.0112, | |
| "step": 3710 | |
| }, | |
| { | |
| "epoch": 93.0, | |
| "grad_norm": 0.1530730277299881, | |
| "learning_rate": 7.42469081161993e-05, | |
| "loss": 0.0149, | |
| "step": 3720 | |
| }, | |
| { | |
| "epoch": 93.25, | |
| "grad_norm": 0.3117826282978058, | |
| "learning_rate": 7.410217185419006e-05, | |
| "loss": 0.0147, | |
| "step": 3730 | |
| }, | |
| { | |
| "epoch": 93.5, | |
| "grad_norm": 0.20995257794857025, | |
| "learning_rate": 7.395717201469095e-05, | |
| "loss": 0.0105, | |
| "step": 3740 | |
| }, | |
| { | |
| "epoch": 93.75, | |
| "grad_norm": 0.20923268795013428, | |
| "learning_rate": 7.381191018339696e-05, | |
| "loss": 0.0141, | |
| "step": 3750 | |
| }, | |
| { | |
| "epoch": 94.0, | |
| "grad_norm": 0.16217997670173645, | |
| "learning_rate": 7.36663879488682e-05, | |
| "loss": 0.0109, | |
| "step": 3760 | |
| }, | |
| { | |
| "epoch": 94.25, | |
| "grad_norm": 0.1856212615966797, | |
| "learning_rate": 7.352060690251254e-05, | |
| "loss": 0.011, | |
| "step": 3770 | |
| }, | |
| { | |
| "epoch": 94.5, | |
| "grad_norm": 0.274571031332016, | |
| "learning_rate": 7.337456863856811e-05, | |
| "loss": 0.0148, | |
| "step": 3780 | |
| }, | |
| { | |
| "epoch": 94.75, | |
| "grad_norm": 0.2003510743379593, | |
| "learning_rate": 7.3228274754086e-05, | |
| "loss": 0.0103, | |
| "step": 3790 | |
| }, | |
| { | |
| "epoch": 95.0, | |
| "grad_norm": 0.25044745206832886, | |
| "learning_rate": 7.308172684891267e-05, | |
| "loss": 0.0116, | |
| "step": 3800 | |
| }, | |
| { | |
| "epoch": 95.25, | |
| "grad_norm": 0.26451465487480164, | |
| "learning_rate": 7.293492652567255e-05, | |
| "loss": 0.0136, | |
| "step": 3810 | |
| }, | |
| { | |
| "epoch": 95.5, | |
| "grad_norm": 0.21916158497333527, | |
| "learning_rate": 7.278787538975043e-05, | |
| "loss": 0.0094, | |
| "step": 3820 | |
| }, | |
| { | |
| "epoch": 95.75, | |
| "grad_norm": 0.19029930233955383, | |
| "learning_rate": 7.2640575049274e-05, | |
| "loss": 0.0143, | |
| "step": 3830 | |
| }, | |
| { | |
| "epoch": 96.0, | |
| "grad_norm": 0.20158341526985168, | |
| "learning_rate": 7.249302711509616e-05, | |
| "loss": 0.0126, | |
| "step": 3840 | |
| }, | |
| { | |
| "epoch": 96.25, | |
| "grad_norm": 0.2865701913833618, | |
| "learning_rate": 7.23452332007775e-05, | |
| "loss": 0.0108, | |
| "step": 3850 | |
| }, | |
| { | |
| "epoch": 96.5, | |
| "grad_norm": 0.3106520175933838, | |
| "learning_rate": 7.219719492256858e-05, | |
| "loss": 0.0094, | |
| "step": 3860 | |
| }, | |
| { | |
| "epoch": 96.75, | |
| "grad_norm": 0.2534642815589905, | |
| "learning_rate": 7.20489138993923e-05, | |
| "loss": 0.0142, | |
| "step": 3870 | |
| }, | |
| { | |
| "epoch": 97.0, | |
| "grad_norm": 0.267780065536499, | |
| "learning_rate": 7.190039175282614e-05, | |
| "loss": 0.0104, | |
| "step": 3880 | |
| }, | |
| { | |
| "epoch": 97.25, | |
| "grad_norm": 0.23589850962162018, | |
| "learning_rate": 7.175163010708455e-05, | |
| "loss": 0.0105, | |
| "step": 3890 | |
| }, | |
| { | |
| "epoch": 97.5, | |
| "grad_norm": 0.274258017539978, | |
| "learning_rate": 7.1602630589001e-05, | |
| "loss": 0.0114, | |
| "step": 3900 | |
| }, | |
| { | |
| "epoch": 97.75, | |
| "grad_norm": 0.19494503736495972, | |
| "learning_rate": 7.14533948280104e-05, | |
| "loss": 0.0106, | |
| "step": 3910 | |
| }, | |
| { | |
| "epoch": 98.0, | |
| "grad_norm": 0.1633615493774414, | |
| "learning_rate": 7.130392445613109e-05, | |
| "loss": 0.0093, | |
| "step": 3920 | |
| }, | |
| { | |
| "epoch": 98.25, | |
| "grad_norm": 0.3438349664211273, | |
| "learning_rate": 7.115422110794711e-05, | |
| "loss": 0.0097, | |
| "step": 3930 | |
| }, | |
| { | |
| "epoch": 98.5, | |
| "grad_norm": 0.2983504831790924, | |
| "learning_rate": 7.100428642059033e-05, | |
| "loss": 0.0096, | |
| "step": 3940 | |
| }, | |
| { | |
| "epoch": 98.75, | |
| "grad_norm": 0.2533060610294342, | |
| "learning_rate": 7.08541220337224e-05, | |
| "loss": 0.0109, | |
| "step": 3950 | |
| }, | |
| { | |
| "epoch": 99.0, | |
| "grad_norm": 0.2611391842365265, | |
| "learning_rate": 7.070372958951706e-05, | |
| "loss": 0.0145, | |
| "step": 3960 | |
| }, | |
| { | |
| "epoch": 99.25, | |
| "grad_norm": 0.12219741195440292, | |
| "learning_rate": 7.055311073264194e-05, | |
| "loss": 0.0101, | |
| "step": 3970 | |
| }, | |
| { | |
| "epoch": 99.5, | |
| "grad_norm": 0.23046687245368958, | |
| "learning_rate": 7.040226711024077e-05, | |
| "loss": 0.0114, | |
| "step": 3980 | |
| }, | |
| { | |
| "epoch": 99.75, | |
| "grad_norm": 0.28500500321388245, | |
| "learning_rate": 7.02512003719152e-05, | |
| "loss": 0.0102, | |
| "step": 3990 | |
| }, | |
| { | |
| "epoch": 100.0, | |
| "grad_norm": 0.28569942712783813, | |
| "learning_rate": 7.00999121697069e-05, | |
| "loss": 0.0075, | |
| "step": 4000 | |
| }, | |
| { | |
| "epoch": 100.25, | |
| "grad_norm": 0.2506348788738251, | |
| "learning_rate": 6.99484041580794e-05, | |
| "loss": 0.0094, | |
| "step": 4010 | |
| }, | |
| { | |
| "epoch": 100.5, | |
| "grad_norm": 0.19107772409915924, | |
| "learning_rate": 6.979667799390004e-05, | |
| "loss": 0.0073, | |
| "step": 4020 | |
| }, | |
| { | |
| "epoch": 100.75, | |
| "grad_norm": 0.24592654407024384, | |
| "learning_rate": 6.964473533642185e-05, | |
| "loss": 0.0087, | |
| "step": 4030 | |
| }, | |
| { | |
| "epoch": 101.0, | |
| "grad_norm": 0.13424347341060638, | |
| "learning_rate": 6.949257784726539e-05, | |
| "loss": 0.0112, | |
| "step": 4040 | |
| }, | |
| { | |
| "epoch": 101.25, | |
| "grad_norm": 0.18724428117275238, | |
| "learning_rate": 6.934020719040056e-05, | |
| "loss": 0.0114, | |
| "step": 4050 | |
| }, | |
| { | |
| "epoch": 101.5, | |
| "grad_norm": 0.3430480360984802, | |
| "learning_rate": 6.918762503212848e-05, | |
| "loss": 0.0112, | |
| "step": 4060 | |
| }, | |
| { | |
| "epoch": 101.75, | |
| "grad_norm": 0.41409286856651306, | |
| "learning_rate": 6.903483304106319e-05, | |
| "loss": 0.0121, | |
| "step": 4070 | |
| }, | |
| { | |
| "epoch": 102.0, | |
| "grad_norm": 0.209912970662117, | |
| "learning_rate": 6.888183288811341e-05, | |
| "loss": 0.0119, | |
| "step": 4080 | |
| }, | |
| { | |
| "epoch": 102.25, | |
| "grad_norm": 0.438534140586853, | |
| "learning_rate": 6.87286262464643e-05, | |
| "loss": 0.0119, | |
| "step": 4090 | |
| }, | |
| { | |
| "epoch": 102.5, | |
| "grad_norm": 0.3194650113582611, | |
| "learning_rate": 6.857521479155915e-05, | |
| "loss": 0.0099, | |
| "step": 4100 | |
| }, | |
| { | |
| "epoch": 102.75, | |
| "grad_norm": 0.2407376617193222, | |
| "learning_rate": 6.842160020108104e-05, | |
| "loss": 0.0122, | |
| "step": 4110 | |
| }, | |
| { | |
| "epoch": 103.0, | |
| "grad_norm": 0.16524438560009003, | |
| "learning_rate": 6.826778415493455e-05, | |
| "loss": 0.0107, | |
| "step": 4120 | |
| }, | |
| { | |
| "epoch": 103.25, | |
| "grad_norm": 0.2268092781305313, | |
| "learning_rate": 6.811376833522729e-05, | |
| "loss": 0.0122, | |
| "step": 4130 | |
| }, | |
| { | |
| "epoch": 103.5, | |
| "grad_norm": 0.2470845878124237, | |
| "learning_rate": 6.795955442625159e-05, | |
| "loss": 0.0117, | |
| "step": 4140 | |
| }, | |
| { | |
| "epoch": 103.75, | |
| "grad_norm": 0.4060851037502289, | |
| "learning_rate": 6.780514411446608e-05, | |
| "loss": 0.014, | |
| "step": 4150 | |
| }, | |
| { | |
| "epoch": 104.0, | |
| "grad_norm": 0.25651276111602783, | |
| "learning_rate": 6.765053908847716e-05, | |
| "loss": 0.0099, | |
| "step": 4160 | |
| }, | |
| { | |
| "epoch": 104.25, | |
| "grad_norm": 0.3012460768222809, | |
| "learning_rate": 6.749574103902064e-05, | |
| "loss": 0.0118, | |
| "step": 4170 | |
| }, | |
| { | |
| "epoch": 104.5, | |
| "grad_norm": 0.3353002965450287, | |
| "learning_rate": 6.734075165894317e-05, | |
| "loss": 0.014, | |
| "step": 4180 | |
| }, | |
| { | |
| "epoch": 104.75, | |
| "grad_norm": 0.41446223855018616, | |
| "learning_rate": 6.71855726431838e-05, | |
| "loss": 0.0131, | |
| "step": 4190 | |
| }, | |
| { | |
| "epoch": 105.0, | |
| "grad_norm": 0.24888281524181366, | |
| "learning_rate": 6.703020568875538e-05, | |
| "loss": 0.0164, | |
| "step": 4200 | |
| }, | |
| { | |
| "epoch": 105.25, | |
| "grad_norm": 0.3254045844078064, | |
| "learning_rate": 6.687465249472603e-05, | |
| "loss": 0.0115, | |
| "step": 4210 | |
| }, | |
| { | |
| "epoch": 105.5, | |
| "grad_norm": 0.3065283000469208, | |
| "learning_rate": 6.671891476220055e-05, | |
| "loss": 0.0098, | |
| "step": 4220 | |
| }, | |
| { | |
| "epoch": 105.75, | |
| "grad_norm": 0.26779666543006897, | |
| "learning_rate": 6.656299419430183e-05, | |
| "loss": 0.009, | |
| "step": 4230 | |
| }, | |
| { | |
| "epoch": 106.0, | |
| "grad_norm": 0.2584543526172638, | |
| "learning_rate": 6.640689249615223e-05, | |
| "loss": 0.0089, | |
| "step": 4240 | |
| }, | |
| { | |
| "epoch": 106.25, | |
| "grad_norm": 0.39003482460975647, | |
| "learning_rate": 6.625061137485491e-05, | |
| "loss": 0.0107, | |
| "step": 4250 | |
| }, | |
| { | |
| "epoch": 106.5, | |
| "grad_norm": 0.19243167340755463, | |
| "learning_rate": 6.609415253947517e-05, | |
| "loss": 0.0111, | |
| "step": 4260 | |
| }, | |
| { | |
| "epoch": 106.75, | |
| "grad_norm": 0.2182435542345047, | |
| "learning_rate": 6.593751770102178e-05, | |
| "loss": 0.0124, | |
| "step": 4270 | |
| }, | |
| { | |
| "epoch": 107.0, | |
| "grad_norm": 0.1765671819448471, | |
| "learning_rate": 6.578070857242823e-05, | |
| "loss": 0.0112, | |
| "step": 4280 | |
| }, | |
| { | |
| "epoch": 107.25, | |
| "grad_norm": 0.20157065987586975, | |
| "learning_rate": 6.562372686853402e-05, | |
| "loss": 0.0092, | |
| "step": 4290 | |
| }, | |
| { | |
| "epoch": 107.5, | |
| "grad_norm": 0.28021109104156494, | |
| "learning_rate": 6.546657430606593e-05, | |
| "loss": 0.0096, | |
| "step": 4300 | |
| }, | |
| { | |
| "epoch": 107.75, | |
| "grad_norm": 0.27157220244407654, | |
| "learning_rate": 6.530925260361918e-05, | |
| "loss": 0.0112, | |
| "step": 4310 | |
| }, | |
| { | |
| "epoch": 108.0, | |
| "grad_norm": 0.265423446893692, | |
| "learning_rate": 6.515176348163871e-05, | |
| "loss": 0.0094, | |
| "step": 4320 | |
| }, | |
| { | |
| "epoch": 108.25, | |
| "grad_norm": 0.23950523138046265, | |
| "learning_rate": 6.499410866240032e-05, | |
| "loss": 0.0109, | |
| "step": 4330 | |
| }, | |
| { | |
| "epoch": 108.5, | |
| "grad_norm": 0.17336910963058472, | |
| "learning_rate": 6.48362898699919e-05, | |
| "loss": 0.0093, | |
| "step": 4340 | |
| }, | |
| { | |
| "epoch": 108.75, | |
| "grad_norm": 0.19538640975952148, | |
| "learning_rate": 6.467830883029443e-05, | |
| "loss": 0.0088, | |
| "step": 4350 | |
| }, | |
| { | |
| "epoch": 109.0, | |
| "grad_norm": 0.26300865411758423, | |
| "learning_rate": 6.452016727096326e-05, | |
| "loss": 0.008, | |
| "step": 4360 | |
| }, | |
| { | |
| "epoch": 109.25, | |
| "grad_norm": 0.4020210802555084, | |
| "learning_rate": 6.436186692140916e-05, | |
| "loss": 0.0086, | |
| "step": 4370 | |
| }, | |
| { | |
| "epoch": 109.5, | |
| "grad_norm": 0.15748588740825653, | |
| "learning_rate": 6.420340951277938e-05, | |
| "loss": 0.0075, | |
| "step": 4380 | |
| }, | |
| { | |
| "epoch": 109.75, | |
| "grad_norm": 0.2636542320251465, | |
| "learning_rate": 6.404479677793874e-05, | |
| "loss": 0.0091, | |
| "step": 4390 | |
| }, | |
| { | |
| "epoch": 110.0, | |
| "grad_norm": 0.2198638617992401, | |
| "learning_rate": 6.388603045145075e-05, | |
| "loss": 0.0123, | |
| "step": 4400 | |
| }, | |
| { | |
| "epoch": 110.25, | |
| "grad_norm": 0.2724881172180176, | |
| "learning_rate": 6.372711226955843e-05, | |
| "loss": 0.0147, | |
| "step": 4410 | |
| }, | |
| { | |
| "epoch": 110.5, | |
| "grad_norm": 0.3189849853515625, | |
| "learning_rate": 6.356804397016564e-05, | |
| "loss": 0.0118, | |
| "step": 4420 | |
| }, | |
| { | |
| "epoch": 110.75, | |
| "grad_norm": 0.24298754334449768, | |
| "learning_rate": 6.340882729281779e-05, | |
| "loss": 0.0131, | |
| "step": 4430 | |
| }, | |
| { | |
| "epoch": 111.0, | |
| "grad_norm": 0.3537801206111908, | |
| "learning_rate": 6.324946397868294e-05, | |
| "loss": 0.011, | |
| "step": 4440 | |
| }, | |
| { | |
| "epoch": 111.25, | |
| "grad_norm": 0.430812269449234, | |
| "learning_rate": 6.308995577053276e-05, | |
| "loss": 0.0162, | |
| "step": 4450 | |
| }, | |
| { | |
| "epoch": 111.5, | |
| "grad_norm": 0.18427328765392303, | |
| "learning_rate": 6.293030441272347e-05, | |
| "loss": 0.0104, | |
| "step": 4460 | |
| }, | |
| { | |
| "epoch": 111.75, | |
| "grad_norm": 0.3262835144996643, | |
| "learning_rate": 6.277051165117677e-05, | |
| "loss": 0.012, | |
| "step": 4470 | |
| }, | |
| { | |
| "epoch": 112.0, | |
| "grad_norm": 0.3320060968399048, | |
| "learning_rate": 6.261057923336064e-05, | |
| "loss": 0.0102, | |
| "step": 4480 | |
| }, | |
| { | |
| "epoch": 112.25, | |
| "grad_norm": 0.3201114535331726, | |
| "learning_rate": 6.245050890827042e-05, | |
| "loss": 0.0114, | |
| "step": 4490 | |
| }, | |
| { | |
| "epoch": 112.5, | |
| "grad_norm": 0.19090622663497925, | |
| "learning_rate": 6.229030242640952e-05, | |
| "loss": 0.0119, | |
| "step": 4500 | |
| }, | |
| { | |
| "epoch": 112.75, | |
| "grad_norm": 0.3037988841533661, | |
| "learning_rate": 6.212996153977037e-05, | |
| "loss": 0.0113, | |
| "step": 4510 | |
| }, | |
| { | |
| "epoch": 113.0, | |
| "grad_norm": 0.27645960450172424, | |
| "learning_rate": 6.196948800181523e-05, | |
| "loss": 0.0133, | |
| "step": 4520 | |
| }, | |
| { | |
| "epoch": 113.25, | |
| "grad_norm": 0.22976121306419373, | |
| "learning_rate": 6.180888356745695e-05, | |
| "loss": 0.011, | |
| "step": 4530 | |
| }, | |
| { | |
| "epoch": 113.5, | |
| "grad_norm": 0.2242169827222824, | |
| "learning_rate": 6.164814999303995e-05, | |
| "loss": 0.0108, | |
| "step": 4540 | |
| }, | |
| { | |
| "epoch": 113.75, | |
| "grad_norm": 0.26727139949798584, | |
| "learning_rate": 6.148728903632081e-05, | |
| "loss": 0.0093, | |
| "step": 4550 | |
| }, | |
| { | |
| "epoch": 114.0, | |
| "grad_norm": 0.30186429619789124, | |
| "learning_rate": 6.132630245644921e-05, | |
| "loss": 0.0096, | |
| "step": 4560 | |
| }, | |
| { | |
| "epoch": 114.25, | |
| "grad_norm": 0.2465476542711258, | |
| "learning_rate": 6.116519201394857e-05, | |
| "loss": 0.0079, | |
| "step": 4570 | |
| }, | |
| { | |
| "epoch": 114.5, | |
| "grad_norm": 0.22743864357471466, | |
| "learning_rate": 6.10039594706969e-05, | |
| "loss": 0.0101, | |
| "step": 4580 | |
| }, | |
| { | |
| "epoch": 114.75, | |
| "grad_norm": 0.3261238932609558, | |
| "learning_rate": 6.084260658990744e-05, | |
| "loss": 0.0088, | |
| "step": 4590 | |
| }, | |
| { | |
| "epoch": 115.0, | |
| "grad_norm": 0.17115327715873718, | |
| "learning_rate": 6.068113513610943e-05, | |
| "loss": 0.0088, | |
| "step": 4600 | |
| }, | |
| { | |
| "epoch": 115.25, | |
| "grad_norm": 0.2672770023345947, | |
| "learning_rate": 6.0519546875128876e-05, | |
| "loss": 0.0088, | |
| "step": 4610 | |
| }, | |
| { | |
| "epoch": 115.5, | |
| "grad_norm": 0.2757582664489746, | |
| "learning_rate": 6.035784357406906e-05, | |
| "loss": 0.012, | |
| "step": 4620 | |
| }, | |
| { | |
| "epoch": 115.75, | |
| "grad_norm": 0.2405472844839096, | |
| "learning_rate": 6.01960270012914e-05, | |
| "loss": 0.0136, | |
| "step": 4630 | |
| }, | |
| { | |
| "epoch": 116.0, | |
| "grad_norm": 0.2878851890563965, | |
| "learning_rate": 6.003409892639599e-05, | |
| "loss": 0.0104, | |
| "step": 4640 | |
| }, | |
| { | |
| "epoch": 116.25, | |
| "grad_norm": 0.3704386353492737, | |
| "learning_rate": 5.9872061120202336e-05, | |
| "loss": 0.012, | |
| "step": 4650 | |
| }, | |
| { | |
| "epoch": 116.5, | |
| "grad_norm": 0.19459301233291626, | |
| "learning_rate": 5.9709915354729914e-05, | |
| "loss": 0.0108, | |
| "step": 4660 | |
| }, | |
| { | |
| "epoch": 116.75, | |
| "grad_norm": 0.2355605810880661, | |
| "learning_rate": 5.9547663403178824e-05, | |
| "loss": 0.0184, | |
| "step": 4670 | |
| }, | |
| { | |
| "epoch": 117.0, | |
| "grad_norm": 0.3160833716392517, | |
| "learning_rate": 5.9385307039910445e-05, | |
| "loss": 0.0103, | |
| "step": 4680 | |
| }, | |
| { | |
| "epoch": 117.25, | |
| "grad_norm": 0.2000928819179535, | |
| "learning_rate": 5.922284804042792e-05, | |
| "loss": 0.0086, | |
| "step": 4690 | |
| }, | |
| { | |
| "epoch": 117.5, | |
| "grad_norm": 0.14318124949932098, | |
| "learning_rate": 5.906028818135687e-05, | |
| "loss": 0.0201, | |
| "step": 4700 | |
| }, | |
| { | |
| "epoch": 117.75, | |
| "grad_norm": 0.21847835183143616, | |
| "learning_rate": 5.889762924042585e-05, | |
| "loss": 0.0167, | |
| "step": 4710 | |
| }, | |
| { | |
| "epoch": 118.0, | |
| "grad_norm": 0.3395015597343445, | |
| "learning_rate": 5.873487299644699e-05, | |
| "loss": 0.0103, | |
| "step": 4720 | |
| }, | |
| { | |
| "epoch": 118.25, | |
| "grad_norm": 0.21214354038238525, | |
| "learning_rate": 5.857202122929649e-05, | |
| "loss": 0.0107, | |
| "step": 4730 | |
| }, | |
| { | |
| "epoch": 118.5, | |
| "grad_norm": 0.2229463905096054, | |
| "learning_rate": 5.840907571989518e-05, | |
| "loss": 0.011, | |
| "step": 4740 | |
| }, | |
| { | |
| "epoch": 118.75, | |
| "grad_norm": 0.22180570662021637, | |
| "learning_rate": 5.824603825018904e-05, | |
| "loss": 0.0091, | |
| "step": 4750 | |
| }, | |
| { | |
| "epoch": 119.0, | |
| "grad_norm": 0.2962054908275604, | |
| "learning_rate": 5.808291060312975e-05, | |
| "loss": 0.0124, | |
| "step": 4760 | |
| }, | |
| { | |
| "epoch": 119.25, | |
| "grad_norm": 0.23632937669754028, | |
| "learning_rate": 5.7919694562655083e-05, | |
| "loss": 0.0116, | |
| "step": 4770 | |
| }, | |
| { | |
| "epoch": 119.5, | |
| "grad_norm": 0.2798871695995331, | |
| "learning_rate": 5.775639191366954e-05, | |
| "loss": 0.0117, | |
| "step": 4780 | |
| }, | |
| { | |
| "epoch": 119.75, | |
| "grad_norm": 0.2946552634239197, | |
| "learning_rate": 5.75930044420247e-05, | |
| "loss": 0.0126, | |
| "step": 4790 | |
| }, | |
| { | |
| "epoch": 120.0, | |
| "grad_norm": 0.31767404079437256, | |
| "learning_rate": 5.74295339344998e-05, | |
| "loss": 0.0095, | |
| "step": 4800 | |
| }, | |
| { | |
| "epoch": 120.25, | |
| "grad_norm": 0.20685528218746185, | |
| "learning_rate": 5.726598217878211e-05, | |
| "loss": 0.0117, | |
| "step": 4810 | |
| }, | |
| { | |
| "epoch": 120.5, | |
| "grad_norm": 0.23895318806171417, | |
| "learning_rate": 5.71023509634474e-05, | |
| "loss": 0.0104, | |
| "step": 4820 | |
| }, | |
| { | |
| "epoch": 120.75, | |
| "grad_norm": 0.28225383162498474, | |
| "learning_rate": 5.693864207794049e-05, | |
| "loss": 0.008, | |
| "step": 4830 | |
| }, | |
| { | |
| "epoch": 121.0, | |
| "grad_norm": 0.2577228248119354, | |
| "learning_rate": 5.677485731255545e-05, | |
| "loss": 0.0118, | |
| "step": 4840 | |
| }, | |
| { | |
| "epoch": 121.25, | |
| "grad_norm": 0.22346603870391846, | |
| "learning_rate": 5.6610998458416296e-05, | |
| "loss": 0.0182, | |
| "step": 4850 | |
| }, | |
| { | |
| "epoch": 121.5, | |
| "grad_norm": 0.17444445192813873, | |
| "learning_rate": 5.644706730745716e-05, | |
| "loss": 0.0113, | |
| "step": 4860 | |
| }, | |
| { | |
| "epoch": 121.75, | |
| "grad_norm": 0.1593342125415802, | |
| "learning_rate": 5.628306565240287e-05, | |
| "loss": 0.011, | |
| "step": 4870 | |
| }, | |
| { | |
| "epoch": 122.0, | |
| "grad_norm": 0.22958680987358093, | |
| "learning_rate": 5.611899528674923e-05, | |
| "loss": 0.0097, | |
| "step": 4880 | |
| }, | |
| { | |
| "epoch": 122.25, | |
| "grad_norm": 0.26986101269721985, | |
| "learning_rate": 5.595485800474349e-05, | |
| "loss": 0.0117, | |
| "step": 4890 | |
| }, | |
| { | |
| "epoch": 122.5, | |
| "grad_norm": 0.2303800880908966, | |
| "learning_rate": 5.579065560136467e-05, | |
| "loss": 0.0096, | |
| "step": 4900 | |
| }, | |
| { | |
| "epoch": 122.75, | |
| "grad_norm": 0.28559139370918274, | |
| "learning_rate": 5.562638987230392e-05, | |
| "loss": 0.0105, | |
| "step": 4910 | |
| }, | |
| { | |
| "epoch": 123.0, | |
| "grad_norm": 0.20101706683635712, | |
| "learning_rate": 5.546206261394498e-05, | |
| "loss": 0.0083, | |
| "step": 4920 | |
| }, | |
| { | |
| "epoch": 123.25, | |
| "grad_norm": 0.2006942480802536, | |
| "learning_rate": 5.529767562334437e-05, | |
| "loss": 0.0082, | |
| "step": 4930 | |
| }, | |
| { | |
| "epoch": 123.5, | |
| "grad_norm": 0.2533215582370758, | |
| "learning_rate": 5.5133230698211926e-05, | |
| "loss": 0.0086, | |
| "step": 4940 | |
| }, | |
| { | |
| "epoch": 123.75, | |
| "grad_norm": 0.2552017867565155, | |
| "learning_rate": 5.496872963689096e-05, | |
| "loss": 0.0107, | |
| "step": 4950 | |
| }, | |
| { | |
| "epoch": 124.0, | |
| "grad_norm": 0.4189196825027466, | |
| "learning_rate": 5.4804174238338756e-05, | |
| "loss": 0.0129, | |
| "step": 4960 | |
| }, | |
| { | |
| "epoch": 124.25, | |
| "grad_norm": 0.23606009781360626, | |
| "learning_rate": 5.463956630210678e-05, | |
| "loss": 0.0074, | |
| "step": 4970 | |
| }, | |
| { | |
| "epoch": 124.5, | |
| "grad_norm": 0.25102007389068604, | |
| "learning_rate": 5.4474907628321046e-05, | |
| "loss": 0.0115, | |
| "step": 4980 | |
| }, | |
| { | |
| "epoch": 124.75, | |
| "grad_norm": 0.19480489194393158, | |
| "learning_rate": 5.431020001766244e-05, | |
| "loss": 0.0123, | |
| "step": 4990 | |
| }, | |
| { | |
| "epoch": 125.0, | |
| "grad_norm": 0.28561678528785706, | |
| "learning_rate": 5.4145445271346986e-05, | |
| "loss": 0.0111, | |
| "step": 5000 | |
| }, | |
| { | |
| "epoch": 125.25, | |
| "grad_norm": 0.20620761811733246, | |
| "learning_rate": 5.398064519110622e-05, | |
| "loss": 0.0102, | |
| "step": 5010 | |
| }, | |
| { | |
| "epoch": 125.5, | |
| "grad_norm": 0.33209967613220215, | |
| "learning_rate": 5.3815801579167394e-05, | |
| "loss": 0.0131, | |
| "step": 5020 | |
| }, | |
| { | |
| "epoch": 125.75, | |
| "grad_norm": 0.23769836127758026, | |
| "learning_rate": 5.365091623823382e-05, | |
| "loss": 0.0093, | |
| "step": 5030 | |
| }, | |
| { | |
| "epoch": 126.0, | |
| "grad_norm": 0.23213163018226624, | |
| "learning_rate": 5.348599097146521e-05, | |
| "loss": 0.009, | |
| "step": 5040 | |
| }, | |
| { | |
| "epoch": 126.25, | |
| "grad_norm": 0.1278630793094635, | |
| "learning_rate": 5.3321027582457836e-05, | |
| "loss": 0.0086, | |
| "step": 5050 | |
| }, | |
| { | |
| "epoch": 126.5, | |
| "grad_norm": 0.2442750185728073, | |
| "learning_rate": 5.315602787522491e-05, | |
| "loss": 0.0117, | |
| "step": 5060 | |
| }, | |
| { | |
| "epoch": 126.75, | |
| "grad_norm": 0.19907784461975098, | |
| "learning_rate": 5.299099365417678e-05, | |
| "loss": 0.0085, | |
| "step": 5070 | |
| }, | |
| { | |
| "epoch": 127.0, | |
| "grad_norm": 0.21254177391529083, | |
| "learning_rate": 5.2825926724101236e-05, | |
| "loss": 0.007, | |
| "step": 5080 | |
| }, | |
| { | |
| "epoch": 127.25, | |
| "grad_norm": 0.12308558821678162, | |
| "learning_rate": 5.26608288901438e-05, | |
| "loss": 0.0079, | |
| "step": 5090 | |
| }, | |
| { | |
| "epoch": 127.5, | |
| "grad_norm": 0.12892907857894897, | |
| "learning_rate": 5.24957019577879e-05, | |
| "loss": 0.0091, | |
| "step": 5100 | |
| }, | |
| { | |
| "epoch": 127.75, | |
| "grad_norm": 0.177867129445076, | |
| "learning_rate": 5.2330547732835266e-05, | |
| "loss": 0.0129, | |
| "step": 5110 | |
| }, | |
| { | |
| "epoch": 128.0, | |
| "grad_norm": 0.19430725276470184, | |
| "learning_rate": 5.2165368021385996e-05, | |
| "loss": 0.0088, | |
| "step": 5120 | |
| }, | |
| { | |
| "epoch": 128.25, | |
| "grad_norm": 0.21545594930648804, | |
| "learning_rate": 5.200016462981897e-05, | |
| "loss": 0.0146, | |
| "step": 5130 | |
| }, | |
| { | |
| "epoch": 128.5, | |
| "grad_norm": 0.148879274725914, | |
| "learning_rate": 5.1834939364772015e-05, | |
| "loss": 0.0075, | |
| "step": 5140 | |
| }, | |
| { | |
| "epoch": 128.75, | |
| "grad_norm": 0.2902442514896393, | |
| "learning_rate": 5.166969403312214e-05, | |
| "loss": 0.0097, | |
| "step": 5150 | |
| }, | |
| { | |
| "epoch": 129.0, | |
| "grad_norm": 0.23031367361545563, | |
| "learning_rate": 5.1504430441965844e-05, | |
| "loss": 0.0075, | |
| "step": 5160 | |
| }, | |
| { | |
| "epoch": 129.25, | |
| "grad_norm": 0.13191789388656616, | |
| "learning_rate": 5.133915039859923e-05, | |
| "loss": 0.0091, | |
| "step": 5170 | |
| }, | |
| { | |
| "epoch": 129.5, | |
| "grad_norm": 0.15518267452716827, | |
| "learning_rate": 5.1173855710498444e-05, | |
| "loss": 0.0064, | |
| "step": 5180 | |
| }, | |
| { | |
| "epoch": 129.75, | |
| "grad_norm": 0.21780794858932495, | |
| "learning_rate": 5.100854818529967e-05, | |
| "loss": 0.0081, | |
| "step": 5190 | |
| }, | |
| { | |
| "epoch": 130.0, | |
| "grad_norm": 0.17131520807743073, | |
| "learning_rate": 5.084322963077951e-05, | |
| "loss": 0.0118, | |
| "step": 5200 | |
| }, | |
| { | |
| "epoch": 130.25, | |
| "grad_norm": 0.27276504039764404, | |
| "learning_rate": 5.067790185483522e-05, | |
| "loss": 0.012, | |
| "step": 5210 | |
| }, | |
| { | |
| "epoch": 130.5, | |
| "grad_norm": 0.22213535010814667, | |
| "learning_rate": 5.0512566665464844e-05, | |
| "loss": 0.0095, | |
| "step": 5220 | |
| }, | |
| { | |
| "epoch": 130.75, | |
| "grad_norm": 0.3615487217903137, | |
| "learning_rate": 5.034722587074755e-05, | |
| "loss": 0.0115, | |
| "step": 5230 | |
| }, | |
| { | |
| "epoch": 131.0, | |
| "grad_norm": 0.1875467300415039, | |
| "learning_rate": 5.018188127882375e-05, | |
| "loss": 0.0096, | |
| "step": 5240 | |
| }, | |
| { | |
| "epoch": 131.25, | |
| "grad_norm": 0.3611842393875122, | |
| "learning_rate": 5.0016534697875417e-05, | |
| "loss": 0.0071, | |
| "step": 5250 | |
| }, | |
| { | |
| "epoch": 131.5, | |
| "grad_norm": 0.1838546097278595, | |
| "learning_rate": 4.9851187936106294e-05, | |
| "loss": 0.0079, | |
| "step": 5260 | |
| }, | |
| { | |
| "epoch": 131.75, | |
| "grad_norm": 0.3148566782474518, | |
| "learning_rate": 4.968584280172206e-05, | |
| "loss": 0.0099, | |
| "step": 5270 | |
| }, | |
| { | |
| "epoch": 132.0, | |
| "grad_norm": 0.14675936102867126, | |
| "learning_rate": 4.95205011029106e-05, | |
| "loss": 0.0089, | |
| "step": 5280 | |
| }, | |
| { | |
| "epoch": 132.25, | |
| "grad_norm": 0.4316335916519165, | |
| "learning_rate": 4.935516464782227e-05, | |
| "loss": 0.0089, | |
| "step": 5290 | |
| }, | |
| { | |
| "epoch": 132.5, | |
| "grad_norm": 0.19409996271133423, | |
| "learning_rate": 4.918983524455003e-05, | |
| "loss": 0.0087, | |
| "step": 5300 | |
| }, | |
| { | |
| "epoch": 132.75, | |
| "grad_norm": 0.1797177940607071, | |
| "learning_rate": 4.9024514701109766e-05, | |
| "loss": 0.0078, | |
| "step": 5310 | |
| }, | |
| { | |
| "epoch": 133.0, | |
| "grad_norm": 0.16304469108581543, | |
| "learning_rate": 4.885920482542043e-05, | |
| "loss": 0.0098, | |
| "step": 5320 | |
| }, | |
| { | |
| "epoch": 133.25, | |
| "grad_norm": 0.18975849449634552, | |
| "learning_rate": 4.869390742528438e-05, | |
| "loss": 0.0086, | |
| "step": 5330 | |
| }, | |
| { | |
| "epoch": 133.5, | |
| "grad_norm": 0.1938469260931015, | |
| "learning_rate": 4.852862430836744e-05, | |
| "loss": 0.0073, | |
| "step": 5340 | |
| }, | |
| { | |
| "epoch": 133.75, | |
| "grad_norm": 0.18156132102012634, | |
| "learning_rate": 4.836335728217933e-05, | |
| "loss": 0.0087, | |
| "step": 5350 | |
| }, | |
| { | |
| "epoch": 134.0, | |
| "grad_norm": 0.19061091542243958, | |
| "learning_rate": 4.819810815405379e-05, | |
| "loss": 0.0087, | |
| "step": 5360 | |
| }, | |
| { | |
| "epoch": 134.25, | |
| "grad_norm": 0.18980200588703156, | |
| "learning_rate": 4.803287873112877e-05, | |
| "loss": 0.0105, | |
| "step": 5370 | |
| }, | |
| { | |
| "epoch": 134.5, | |
| "grad_norm": 0.18439440429210663, | |
| "learning_rate": 4.786767082032681e-05, | |
| "loss": 0.0105, | |
| "step": 5380 | |
| }, | |
| { | |
| "epoch": 134.75, | |
| "grad_norm": 0.22925539314746857, | |
| "learning_rate": 4.77024862283351e-05, | |
| "loss": 0.0099, | |
| "step": 5390 | |
| }, | |
| { | |
| "epoch": 135.0, | |
| "grad_norm": 0.23364628851413727, | |
| "learning_rate": 4.753732676158593e-05, | |
| "loss": 0.0073, | |
| "step": 5400 | |
| }, | |
| { | |
| "epoch": 135.25, | |
| "grad_norm": 0.3074020743370056, | |
| "learning_rate": 4.737219422623672e-05, | |
| "loss": 0.0078, | |
| "step": 5410 | |
| }, | |
| { | |
| "epoch": 135.5, | |
| "grad_norm": 0.24079379439353943, | |
| "learning_rate": 4.720709042815044e-05, | |
| "loss": 0.0084, | |
| "step": 5420 | |
| }, | |
| { | |
| "epoch": 135.75, | |
| "grad_norm": 0.28063663840293884, | |
| "learning_rate": 4.704201717287578e-05, | |
| "loss": 0.0064, | |
| "step": 5430 | |
| }, | |
| { | |
| "epoch": 136.0, | |
| "grad_norm": 0.40149784088134766, | |
| "learning_rate": 4.6876976265627404e-05, | |
| "loss": 0.0073, | |
| "step": 5440 | |
| }, | |
| { | |
| "epoch": 136.25, | |
| "grad_norm": 0.17972993850708008, | |
| "learning_rate": 4.671196951126626e-05, | |
| "loss": 0.0073, | |
| "step": 5450 | |
| }, | |
| { | |
| "epoch": 136.5, | |
| "grad_norm": 0.32252639532089233, | |
| "learning_rate": 4.654699871427971e-05, | |
| "loss": 0.0127, | |
| "step": 5460 | |
| }, | |
| { | |
| "epoch": 136.75, | |
| "grad_norm": 0.17494434118270874, | |
| "learning_rate": 4.6382065678762034e-05, | |
| "loss": 0.009, | |
| "step": 5470 | |
| }, | |
| { | |
| "epoch": 137.0, | |
| "grad_norm": 0.144280344247818, | |
| "learning_rate": 4.6217172208394424e-05, | |
| "loss": 0.0103, | |
| "step": 5480 | |
| }, | |
| { | |
| "epoch": 137.25, | |
| "grad_norm": 0.20007732510566711, | |
| "learning_rate": 4.605232010642549e-05, | |
| "loss": 0.0056, | |
| "step": 5490 | |
| }, | |
| { | |
| "epoch": 137.5, | |
| "grad_norm": 0.21483971178531647, | |
| "learning_rate": 4.588751117565142e-05, | |
| "loss": 0.008, | |
| "step": 5500 | |
| }, | |
| { | |
| "epoch": 137.75, | |
| "grad_norm": 0.18367771804332733, | |
| "learning_rate": 4.5722747218396214e-05, | |
| "loss": 0.008, | |
| "step": 5510 | |
| }, | |
| { | |
| "epoch": 138.0, | |
| "grad_norm": 0.16514046490192413, | |
| "learning_rate": 4.5558030036492194e-05, | |
| "loss": 0.0081, | |
| "step": 5520 | |
| }, | |
| { | |
| "epoch": 138.25, | |
| "grad_norm": 0.14982137084007263, | |
| "learning_rate": 4.539336143125999e-05, | |
| "loss": 0.0058, | |
| "step": 5530 | |
| }, | |
| { | |
| "epoch": 138.5, | |
| "grad_norm": 0.10444972664117813, | |
| "learning_rate": 4.522874320348916e-05, | |
| "loss": 0.007, | |
| "step": 5540 | |
| }, | |
| { | |
| "epoch": 138.75, | |
| "grad_norm": 0.1786758452653885, | |
| "learning_rate": 4.506417715341821e-05, | |
| "loss": 0.0138, | |
| "step": 5550 | |
| }, | |
| { | |
| "epoch": 139.0, | |
| "grad_norm": 0.1584421694278717, | |
| "learning_rate": 4.489966508071511e-05, | |
| "loss": 0.0065, | |
| "step": 5560 | |
| }, | |
| { | |
| "epoch": 139.25, | |
| "grad_norm": 0.20712852478027344, | |
| "learning_rate": 4.4735208784457575e-05, | |
| "loss": 0.0083, | |
| "step": 5570 | |
| }, | |
| { | |
| "epoch": 139.5, | |
| "grad_norm": 0.1956453025341034, | |
| "learning_rate": 4.457081006311325e-05, | |
| "loss": 0.0073, | |
| "step": 5580 | |
| }, | |
| { | |
| "epoch": 139.75, | |
| "grad_norm": 0.15756206214427948, | |
| "learning_rate": 4.440647071452027e-05, | |
| "loss": 0.0073, | |
| "step": 5590 | |
| }, | |
| { | |
| "epoch": 140.0, | |
| "grad_norm": 0.2822488844394684, | |
| "learning_rate": 4.424219253586737e-05, | |
| "loss": 0.0101, | |
| "step": 5600 | |
| }, | |
| { | |
| "epoch": 140.25, | |
| "grad_norm": 0.2170170098543167, | |
| "learning_rate": 4.407797732367443e-05, | |
| "loss": 0.0085, | |
| "step": 5610 | |
| }, | |
| { | |
| "epoch": 140.5, | |
| "grad_norm": 0.14802826941013336, | |
| "learning_rate": 4.391382687377268e-05, | |
| "loss": 0.0094, | |
| "step": 5620 | |
| }, | |
| { | |
| "epoch": 140.75, | |
| "grad_norm": 0.19422343373298645, | |
| "learning_rate": 4.374974298128512e-05, | |
| "loss": 0.0084, | |
| "step": 5630 | |
| }, | |
| { | |
| "epoch": 141.0, | |
| "grad_norm": 0.1759997010231018, | |
| "learning_rate": 4.358572744060699e-05, | |
| "loss": 0.008, | |
| "step": 5640 | |
| }, | |
| { | |
| "epoch": 141.25, | |
| "grad_norm": 0.20682178437709808, | |
| "learning_rate": 4.342178204538588e-05, | |
| "loss": 0.0101, | |
| "step": 5650 | |
| }, | |
| { | |
| "epoch": 141.5, | |
| "grad_norm": 0.14558851718902588, | |
| "learning_rate": 4.325790858850241e-05, | |
| "loss": 0.0106, | |
| "step": 5660 | |
| }, | |
| { | |
| "epoch": 141.75, | |
| "grad_norm": 0.11343575268983841, | |
| "learning_rate": 4.309410886205043e-05, | |
| "loss": 0.0075, | |
| "step": 5670 | |
| }, | |
| { | |
| "epoch": 142.0, | |
| "grad_norm": 0.21630960702896118, | |
| "learning_rate": 4.293038465731752e-05, | |
| "loss": 0.0088, | |
| "step": 5680 | |
| }, | |
| { | |
| "epoch": 142.25, | |
| "grad_norm": 0.1380923092365265, | |
| "learning_rate": 4.276673776476533e-05, | |
| "loss": 0.0064, | |
| "step": 5690 | |
| }, | |
| { | |
| "epoch": 142.5, | |
| "grad_norm": 0.2421133816242218, | |
| "learning_rate": 4.260316997401007e-05, | |
| "loss": 0.0097, | |
| "step": 5700 | |
| }, | |
| { | |
| "epoch": 142.75, | |
| "grad_norm": 0.21211817860603333, | |
| "learning_rate": 4.243968307380293e-05, | |
| "loss": 0.0093, | |
| "step": 5710 | |
| }, | |
| { | |
| "epoch": 143.0, | |
| "grad_norm": 0.1438850462436676, | |
| "learning_rate": 4.22762788520104e-05, | |
| "loss": 0.0092, | |
| "step": 5720 | |
| }, | |
| { | |
| "epoch": 143.25, | |
| "grad_norm": 0.10566523671150208, | |
| "learning_rate": 4.211295909559491e-05, | |
| "loss": 0.0095, | |
| "step": 5730 | |
| }, | |
| { | |
| "epoch": 143.5, | |
| "grad_norm": 0.13774773478507996, | |
| "learning_rate": 4.194972559059511e-05, | |
| "loss": 0.0103, | |
| "step": 5740 | |
| }, | |
| { | |
| "epoch": 143.75, | |
| "grad_norm": 0.2047201693058014, | |
| "learning_rate": 4.178658012210651e-05, | |
| "loss": 0.0085, | |
| "step": 5750 | |
| }, | |
| { | |
| "epoch": 144.0, | |
| "grad_norm": 0.09229153394699097, | |
| "learning_rate": 4.162352447426177e-05, | |
| "loss": 0.0069, | |
| "step": 5760 | |
| }, | |
| { | |
| "epoch": 144.25, | |
| "grad_norm": 0.2979641556739807, | |
| "learning_rate": 4.146056043021135e-05, | |
| "loss": 0.0075, | |
| "step": 5770 | |
| }, | |
| { | |
| "epoch": 144.5, | |
| "grad_norm": 0.20068320631980896, | |
| "learning_rate": 4.1297689772103944e-05, | |
| "loss": 0.0082, | |
| "step": 5780 | |
| }, | |
| { | |
| "epoch": 144.75, | |
| "grad_norm": 0.1853872686624527, | |
| "learning_rate": 4.113491428106694e-05, | |
| "loss": 0.0085, | |
| "step": 5790 | |
| }, | |
| { | |
| "epoch": 145.0, | |
| "grad_norm": 0.30404505133628845, | |
| "learning_rate": 4.0972235737187055e-05, | |
| "loss": 0.0102, | |
| "step": 5800 | |
| }, | |
| { | |
| "epoch": 145.25, | |
| "grad_norm": 0.20279182493686676, | |
| "learning_rate": 4.080965591949076e-05, | |
| "loss": 0.0075, | |
| "step": 5810 | |
| }, | |
| { | |
| "epoch": 145.5, | |
| "grad_norm": 0.2873016595840454, | |
| "learning_rate": 4.0647176605924924e-05, | |
| "loss": 0.0055, | |
| "step": 5820 | |
| }, | |
| { | |
| "epoch": 145.75, | |
| "grad_norm": 0.2151174396276474, | |
| "learning_rate": 4.0484799573337255e-05, | |
| "loss": 0.0089, | |
| "step": 5830 | |
| }, | |
| { | |
| "epoch": 146.0, | |
| "grad_norm": 0.13774517178535461, | |
| "learning_rate": 4.032252659745699e-05, | |
| "loss": 0.0104, | |
| "step": 5840 | |
| }, | |
| { | |
| "epoch": 146.25, | |
| "grad_norm": 0.31033048033714294, | |
| "learning_rate": 4.016035945287539e-05, | |
| "loss": 0.0081, | |
| "step": 5850 | |
| }, | |
| { | |
| "epoch": 146.5, | |
| "grad_norm": 0.18268409371376038, | |
| "learning_rate": 3.999829991302635e-05, | |
| "loss": 0.0102, | |
| "step": 5860 | |
| }, | |
| { | |
| "epoch": 146.75, | |
| "grad_norm": 0.1759636551141739, | |
| "learning_rate": 3.983634975016707e-05, | |
| "loss": 0.0131, | |
| "step": 5870 | |
| }, | |
| { | |
| "epoch": 147.0, | |
| "grad_norm": 0.12363249808549881, | |
| "learning_rate": 3.967451073535854e-05, | |
| "loss": 0.0073, | |
| "step": 5880 | |
| }, | |
| { | |
| "epoch": 147.25, | |
| "grad_norm": 0.13208454847335815, | |
| "learning_rate": 3.951278463844633e-05, | |
| "loss": 0.0116, | |
| "step": 5890 | |
| }, | |
| { | |
| "epoch": 147.5, | |
| "grad_norm": 0.25533878803253174, | |
| "learning_rate": 3.935117322804111e-05, | |
| "loss": 0.0109, | |
| "step": 5900 | |
| }, | |
| { | |
| "epoch": 147.75, | |
| "grad_norm": 0.1883186548948288, | |
| "learning_rate": 3.918967827149938e-05, | |
| "loss": 0.007, | |
| "step": 5910 | |
| }, | |
| { | |
| "epoch": 148.0, | |
| "grad_norm": 0.23646943271160126, | |
| "learning_rate": 3.9028301534904094e-05, | |
| "loss": 0.008, | |
| "step": 5920 | |
| }, | |
| { | |
| "epoch": 148.25, | |
| "grad_norm": 0.19803385436534882, | |
| "learning_rate": 3.88670447830454e-05, | |
| "loss": 0.0076, | |
| "step": 5930 | |
| }, | |
| { | |
| "epoch": 148.5, | |
| "grad_norm": 0.09101427346467972, | |
| "learning_rate": 3.870590977940132e-05, | |
| "loss": 0.0073, | |
| "step": 5940 | |
| }, | |
| { | |
| "epoch": 148.75, | |
| "grad_norm": 0.19984644651412964, | |
| "learning_rate": 3.8544898286118404e-05, | |
| "loss": 0.0068, | |
| "step": 5950 | |
| }, | |
| { | |
| "epoch": 149.0, | |
| "grad_norm": 0.1640167087316513, | |
| "learning_rate": 3.838401206399257e-05, | |
| "loss": 0.006, | |
| "step": 5960 | |
| }, | |
| { | |
| "epoch": 149.25, | |
| "grad_norm": 0.18579047918319702, | |
| "learning_rate": 3.822325287244975e-05, | |
| "loss": 0.0053, | |
| "step": 5970 | |
| }, | |
| { | |
| "epoch": 149.5, | |
| "grad_norm": 0.152107372879982, | |
| "learning_rate": 3.8062622469526725e-05, | |
| "loss": 0.0064, | |
| "step": 5980 | |
| }, | |
| { | |
| "epoch": 149.75, | |
| "grad_norm": 0.16540779173374176, | |
| "learning_rate": 3.790212261185183e-05, | |
| "loss": 0.007, | |
| "step": 5990 | |
| }, | |
| { | |
| "epoch": 150.0, | |
| "grad_norm": 0.15848080813884735, | |
| "learning_rate": 3.7741755054625794e-05, | |
| "loss": 0.0062, | |
| "step": 6000 | |
| }, | |
| { | |
| "epoch": 150.25, | |
| "grad_norm": 0.21890725195407867, | |
| "learning_rate": 3.758152155160255e-05, | |
| "loss": 0.009, | |
| "step": 6010 | |
| }, | |
| { | |
| "epoch": 150.5, | |
| "grad_norm": 0.14560624957084656, | |
| "learning_rate": 3.742142385506999e-05, | |
| "loss": 0.0059, | |
| "step": 6020 | |
| }, | |
| { | |
| "epoch": 150.75, | |
| "grad_norm": 0.16762469708919525, | |
| "learning_rate": 3.72614637158309e-05, | |
| "loss": 0.0082, | |
| "step": 6030 | |
| }, | |
| { | |
| "epoch": 151.0, | |
| "grad_norm": 0.17747336626052856, | |
| "learning_rate": 3.710164288318371e-05, | |
| "loss": 0.0076, | |
| "step": 6040 | |
| }, | |
| { | |
| "epoch": 151.25, | |
| "grad_norm": 0.11070247739553452, | |
| "learning_rate": 3.694196310490345e-05, | |
| "loss": 0.008, | |
| "step": 6050 | |
| }, | |
| { | |
| "epoch": 151.5, | |
| "grad_norm": 0.17882029712200165, | |
| "learning_rate": 3.678242612722259e-05, | |
| "loss": 0.0068, | |
| "step": 6060 | |
| }, | |
| { | |
| "epoch": 151.75, | |
| "grad_norm": 0.235889732837677, | |
| "learning_rate": 3.6623033694811953e-05, | |
| "loss": 0.0064, | |
| "step": 6070 | |
| }, | |
| { | |
| "epoch": 152.0, | |
| "grad_norm": 0.31911328434944153, | |
| "learning_rate": 3.6463787550761665e-05, | |
| "loss": 0.0078, | |
| "step": 6080 | |
| }, | |
| { | |
| "epoch": 152.25, | |
| "grad_norm": 0.1889229267835617, | |
| "learning_rate": 3.630468943656202e-05, | |
| "loss": 0.0089, | |
| "step": 6090 | |
| }, | |
| { | |
| "epoch": 152.5, | |
| "grad_norm": 0.27575376629829407, | |
| "learning_rate": 3.6145741092084523e-05, | |
| "loss": 0.0087, | |
| "step": 6100 | |
| }, | |
| { | |
| "epoch": 152.75, | |
| "grad_norm": 0.1767558604478836, | |
| "learning_rate": 3.598694425556278e-05, | |
| "loss": 0.0063, | |
| "step": 6110 | |
| }, | |
| { | |
| "epoch": 153.0, | |
| "grad_norm": 0.15844060480594635, | |
| "learning_rate": 3.58283006635736e-05, | |
| "loss": 0.0069, | |
| "step": 6120 | |
| }, | |
| { | |
| "epoch": 153.25, | |
| "grad_norm": 0.13357259333133698, | |
| "learning_rate": 3.566981205101781e-05, | |
| "loss": 0.0069, | |
| "step": 6130 | |
| }, | |
| { | |
| "epoch": 153.5, | |
| "grad_norm": 0.1877880096435547, | |
| "learning_rate": 3.5511480151101556e-05, | |
| "loss": 0.0073, | |
| "step": 6140 | |
| }, | |
| { | |
| "epoch": 153.75, | |
| "grad_norm": 0.2021484524011612, | |
| "learning_rate": 3.5353306695317104e-05, | |
| "loss": 0.0071, | |
| "step": 6150 | |
| }, | |
| { | |
| "epoch": 154.0, | |
| "grad_norm": 0.1393587738275528, | |
| "learning_rate": 3.519529341342402e-05, | |
| "loss": 0.0067, | |
| "step": 6160 | |
| }, | |
| { | |
| "epoch": 154.25, | |
| "grad_norm": 0.19375017285346985, | |
| "learning_rate": 3.503744203343026e-05, | |
| "loss": 0.0063, | |
| "step": 6170 | |
| }, | |
| { | |
| "epoch": 154.5, | |
| "grad_norm": 0.12254150956869125, | |
| "learning_rate": 3.487975428157318e-05, | |
| "loss": 0.0066, | |
| "step": 6180 | |
| }, | |
| { | |
| "epoch": 154.75, | |
| "grad_norm": 0.1326659768819809, | |
| "learning_rate": 3.472223188230083e-05, | |
| "loss": 0.0077, | |
| "step": 6190 | |
| }, | |
| { | |
| "epoch": 155.0, | |
| "grad_norm": 0.1555151343345642, | |
| "learning_rate": 3.4564876558252866e-05, | |
| "loss": 0.0066, | |
| "step": 6200 | |
| }, | |
| { | |
| "epoch": 155.25, | |
| "grad_norm": 0.29415765404701233, | |
| "learning_rate": 3.440769003024195e-05, | |
| "loss": 0.0087, | |
| "step": 6210 | |
| }, | |
| { | |
| "epoch": 155.5, | |
| "grad_norm": 0.20861037075519562, | |
| "learning_rate": 3.425067401723477e-05, | |
| "loss": 0.0065, | |
| "step": 6220 | |
| }, | |
| { | |
| "epoch": 155.75, | |
| "grad_norm": 0.22378014028072357, | |
| "learning_rate": 3.409383023633325e-05, | |
| "loss": 0.0061, | |
| "step": 6230 | |
| }, | |
| { | |
| "epoch": 156.0, | |
| "grad_norm": 0.1030755341053009, | |
| "learning_rate": 3.3937160402755894e-05, | |
| "loss": 0.0066, | |
| "step": 6240 | |
| }, | |
| { | |
| "epoch": 156.25, | |
| "grad_norm": 0.15028028190135956, | |
| "learning_rate": 3.378066622981885e-05, | |
| "loss": 0.0058, | |
| "step": 6250 | |
| }, | |
| { | |
| "epoch": 156.5, | |
| "grad_norm": 0.21049033105373383, | |
| "learning_rate": 3.362434942891738e-05, | |
| "loss": 0.0074, | |
| "step": 6260 | |
| }, | |
| { | |
| "epoch": 156.75, | |
| "grad_norm": 0.22261491417884827, | |
| "learning_rate": 3.346821170950693e-05, | |
| "loss": 0.0087, | |
| "step": 6270 | |
| }, | |
| { | |
| "epoch": 157.0, | |
| "grad_norm": 0.15786141157150269, | |
| "learning_rate": 3.3312254779084585e-05, | |
| "loss": 0.0084, | |
| "step": 6280 | |
| }, | |
| { | |
| "epoch": 157.25, | |
| "grad_norm": 0.17248444259166718, | |
| "learning_rate": 3.315648034317039e-05, | |
| "loss": 0.0068, | |
| "step": 6290 | |
| }, | |
| { | |
| "epoch": 157.5, | |
| "grad_norm": 0.22754375636577606, | |
| "learning_rate": 3.3000890105288564e-05, | |
| "loss": 0.0088, | |
| "step": 6300 | |
| }, | |
| { | |
| "epoch": 157.75, | |
| "grad_norm": 0.22142088413238525, | |
| "learning_rate": 3.284548576694908e-05, | |
| "loss": 0.0062, | |
| "step": 6310 | |
| }, | |
| { | |
| "epoch": 158.0, | |
| "grad_norm": 0.17295658588409424, | |
| "learning_rate": 3.2690269027628815e-05, | |
| "loss": 0.0067, | |
| "step": 6320 | |
| }, | |
| { | |
| "epoch": 158.25, | |
| "grad_norm": 0.17775364220142365, | |
| "learning_rate": 3.253524158475324e-05, | |
| "loss": 0.0053, | |
| "step": 6330 | |
| }, | |
| { | |
| "epoch": 158.5, | |
| "grad_norm": 0.1789032220840454, | |
| "learning_rate": 3.238040513367757e-05, | |
| "loss": 0.0062, | |
| "step": 6340 | |
| }, | |
| { | |
| "epoch": 158.75, | |
| "grad_norm": 0.21051982045173645, | |
| "learning_rate": 3.222576136766843e-05, | |
| "loss": 0.008, | |
| "step": 6350 | |
| }, | |
| { | |
| "epoch": 159.0, | |
| "grad_norm": 0.2447560429573059, | |
| "learning_rate": 3.2071311977885324e-05, | |
| "loss": 0.0069, | |
| "step": 6360 | |
| }, | |
| { | |
| "epoch": 159.25, | |
| "grad_norm": 0.19626660645008087, | |
| "learning_rate": 3.191705865336197e-05, | |
| "loss": 0.0061, | |
| "step": 6370 | |
| }, | |
| { | |
| "epoch": 159.5, | |
| "grad_norm": 0.1637764275074005, | |
| "learning_rate": 3.1763003080988075e-05, | |
| "loss": 0.0083, | |
| "step": 6380 | |
| }, | |
| { | |
| "epoch": 159.75, | |
| "grad_norm": 0.15946759283542633, | |
| "learning_rate": 3.160914694549063e-05, | |
| "loss": 0.0065, | |
| "step": 6390 | |
| }, | |
| { | |
| "epoch": 160.0, | |
| "grad_norm": 0.17754492163658142, | |
| "learning_rate": 3.145549192941573e-05, | |
| "loss": 0.0071, | |
| "step": 6400 | |
| }, | |
| { | |
| "epoch": 160.25, | |
| "grad_norm": 0.1859520822763443, | |
| "learning_rate": 3.130203971310999e-05, | |
| "loss": 0.0055, | |
| "step": 6410 | |
| }, | |
| { | |
| "epoch": 160.5, | |
| "grad_norm": 0.15614630281925201, | |
| "learning_rate": 3.114879197470225e-05, | |
| "loss": 0.0064, | |
| "step": 6420 | |
| }, | |
| { | |
| "epoch": 160.75, | |
| "grad_norm": 0.17567554116249084, | |
| "learning_rate": 3.0995750390085285e-05, | |
| "loss": 0.0051, | |
| "step": 6430 | |
| }, | |
| { | |
| "epoch": 161.0, | |
| "grad_norm": 0.15353727340698242, | |
| "learning_rate": 3.084291663289728e-05, | |
| "loss": 0.007, | |
| "step": 6440 | |
| }, | |
| { | |
| "epoch": 161.25, | |
| "grad_norm": 0.16327092051506042, | |
| "learning_rate": 3.069029237450375e-05, | |
| "loss": 0.0055, | |
| "step": 6450 | |
| }, | |
| { | |
| "epoch": 161.5, | |
| "grad_norm": 0.13596777617931366, | |
| "learning_rate": 3.053787928397911e-05, | |
| "loss": 0.0044, | |
| "step": 6460 | |
| }, | |
| { | |
| "epoch": 161.75, | |
| "grad_norm": 0.12858083844184875, | |
| "learning_rate": 3.0385679028088526e-05, | |
| "loss": 0.005, | |
| "step": 6470 | |
| }, | |
| { | |
| "epoch": 162.0, | |
| "grad_norm": 0.15615972876548767, | |
| "learning_rate": 3.023369327126959e-05, | |
| "loss": 0.0062, | |
| "step": 6480 | |
| }, | |
| { | |
| "epoch": 162.25, | |
| "grad_norm": 0.3536645472049713, | |
| "learning_rate": 3.0081923675614198e-05, | |
| "loss": 0.0068, | |
| "step": 6490 | |
| }, | |
| { | |
| "epoch": 162.5, | |
| "grad_norm": 0.12195973843336105, | |
| "learning_rate": 2.993037190085034e-05, | |
| "loss": 0.0049, | |
| "step": 6500 | |
| }, | |
| { | |
| "epoch": 162.75, | |
| "grad_norm": 0.20225834846496582, | |
| "learning_rate": 2.977903960432392e-05, | |
| "loss": 0.0066, | |
| "step": 6510 | |
| }, | |
| { | |
| "epoch": 163.0, | |
| "grad_norm": 0.17703953385353088, | |
| "learning_rate": 2.9627928440980722e-05, | |
| "loss": 0.0057, | |
| "step": 6520 | |
| }, | |
| { | |
| "epoch": 163.25, | |
| "grad_norm": 0.13128487765789032, | |
| "learning_rate": 2.9477040063348183e-05, | |
| "loss": 0.0051, | |
| "step": 6530 | |
| }, | |
| { | |
| "epoch": 163.5, | |
| "grad_norm": 0.23100434243679047, | |
| "learning_rate": 2.9326376121517456e-05, | |
| "loss": 0.0084, | |
| "step": 6540 | |
| }, | |
| { | |
| "epoch": 163.75, | |
| "grad_norm": 0.1476689726114273, | |
| "learning_rate": 2.9175938263125236e-05, | |
| "loss": 0.0062, | |
| "step": 6550 | |
| }, | |
| { | |
| "epoch": 164.0, | |
| "grad_norm": 0.16052044928073883, | |
| "learning_rate": 2.9025728133335873e-05, | |
| "loss": 0.0057, | |
| "step": 6560 | |
| }, | |
| { | |
| "epoch": 164.25, | |
| "grad_norm": 0.1788512021303177, | |
| "learning_rate": 2.8875747374823288e-05, | |
| "loss": 0.0073, | |
| "step": 6570 | |
| }, | |
| { | |
| "epoch": 164.5, | |
| "grad_norm": 0.1804950088262558, | |
| "learning_rate": 2.872599762775298e-05, | |
| "loss": 0.0066, | |
| "step": 6580 | |
| }, | |
| { | |
| "epoch": 164.75, | |
| "grad_norm": 0.22115811705589294, | |
| "learning_rate": 2.857648052976425e-05, | |
| "loss": 0.0105, | |
| "step": 6590 | |
| }, | |
| { | |
| "epoch": 165.0, | |
| "grad_norm": 0.19970916211605072, | |
| "learning_rate": 2.8427197715952047e-05, | |
| "loss": 0.0064, | |
| "step": 6600 | |
| }, | |
| { | |
| "epoch": 165.25, | |
| "grad_norm": 0.16419823467731476, | |
| "learning_rate": 2.8278150818849393e-05, | |
| "loss": 0.0059, | |
| "step": 6610 | |
| }, | |
| { | |
| "epoch": 165.5, | |
| "grad_norm": 0.2086798995733261, | |
| "learning_rate": 2.812934146840922e-05, | |
| "loss": 0.0058, | |
| "step": 6620 | |
| }, | |
| { | |
| "epoch": 165.75, | |
| "grad_norm": 0.18059085309505463, | |
| "learning_rate": 2.7980771291986764e-05, | |
| "loss": 0.0064, | |
| "step": 6630 | |
| }, | |
| { | |
| "epoch": 166.0, | |
| "grad_norm": 0.2674952745437622, | |
| "learning_rate": 2.783244191432167e-05, | |
| "loss": 0.0062, | |
| "step": 6640 | |
| }, | |
| { | |
| "epoch": 166.25, | |
| "grad_norm": 0.1242055743932724, | |
| "learning_rate": 2.768435495752022e-05, | |
| "loss": 0.0071, | |
| "step": 6650 | |
| }, | |
| { | |
| "epoch": 166.5, | |
| "grad_norm": 0.10322605073451996, | |
| "learning_rate": 2.753651204103771e-05, | |
| "loss": 0.005, | |
| "step": 6660 | |
| }, | |
| { | |
| "epoch": 166.75, | |
| "grad_norm": 0.13184568285942078, | |
| "learning_rate": 2.7388914781660523e-05, | |
| "loss": 0.006, | |
| "step": 6670 | |
| }, | |
| { | |
| "epoch": 167.0, | |
| "grad_norm": 0.08305729925632477, | |
| "learning_rate": 2.7241564793488693e-05, | |
| "loss": 0.0043, | |
| "step": 6680 | |
| }, | |
| { | |
| "epoch": 167.25, | |
| "grad_norm": 0.193691223859787, | |
| "learning_rate": 2.7094463687918037e-05, | |
| "loss": 0.007, | |
| "step": 6690 | |
| }, | |
| { | |
| "epoch": 167.5, | |
| "grad_norm": 0.11765813082456589, | |
| "learning_rate": 2.694761307362268e-05, | |
| "loss": 0.0049, | |
| "step": 6700 | |
| }, | |
| { | |
| "epoch": 167.75, | |
| "grad_norm": 0.12011758238077164, | |
| "learning_rate": 2.6801014556537467e-05, | |
| "loss": 0.0061, | |
| "step": 6710 | |
| }, | |
| { | |
| "epoch": 168.0, | |
| "grad_norm": 0.14065556228160858, | |
| "learning_rate": 2.6654669739840243e-05, | |
| "loss": 0.0049, | |
| "step": 6720 | |
| }, | |
| { | |
| "epoch": 168.25, | |
| "grad_norm": 0.15679895877838135, | |
| "learning_rate": 2.650858022393451e-05, | |
| "loss": 0.0053, | |
| "step": 6730 | |
| }, | |
| { | |
| "epoch": 168.5, | |
| "grad_norm": 0.1384173333644867, | |
| "learning_rate": 2.6362747606431747e-05, | |
| "loss": 0.0051, | |
| "step": 6740 | |
| }, | |
| { | |
| "epoch": 168.75, | |
| "grad_norm": 0.12798947095870972, | |
| "learning_rate": 2.6217173482134172e-05, | |
| "loss": 0.0064, | |
| "step": 6750 | |
| }, | |
| { | |
| "epoch": 169.0, | |
| "grad_norm": 0.1402834951877594, | |
| "learning_rate": 2.6071859443017044e-05, | |
| "loss": 0.0074, | |
| "step": 6760 | |
| }, | |
| { | |
| "epoch": 169.25, | |
| "grad_norm": 0.1125163584947586, | |
| "learning_rate": 2.5926807078211414e-05, | |
| "loss": 0.0045, | |
| "step": 6770 | |
| }, | |
| { | |
| "epoch": 169.5, | |
| "grad_norm": 0.12739096581935883, | |
| "learning_rate": 2.5782017973986728e-05, | |
| "loss": 0.0056, | |
| "step": 6780 | |
| }, | |
| { | |
| "epoch": 169.75, | |
| "grad_norm": 0.11030346900224686, | |
| "learning_rate": 2.5637493713733374e-05, | |
| "loss": 0.0039, | |
| "step": 6790 | |
| }, | |
| { | |
| "epoch": 170.0, | |
| "grad_norm": 0.1563323736190796, | |
| "learning_rate": 2.549323587794559e-05, | |
| "loss": 0.0056, | |
| "step": 6800 | |
| }, | |
| { | |
| "epoch": 170.25, | |
| "grad_norm": 0.1213504895567894, | |
| "learning_rate": 2.5349246044203895e-05, | |
| "loss": 0.0046, | |
| "step": 6810 | |
| }, | |
| { | |
| "epoch": 170.5, | |
| "grad_norm": 0.17844437062740326, | |
| "learning_rate": 2.520552578715808e-05, | |
| "loss": 0.0078, | |
| "step": 6820 | |
| }, | |
| { | |
| "epoch": 170.75, | |
| "grad_norm": 0.16773608326911926, | |
| "learning_rate": 2.506207667850981e-05, | |
| "loss": 0.007, | |
| "step": 6830 | |
| }, | |
| { | |
| "epoch": 171.0, | |
| "grad_norm": 0.09557987004518509, | |
| "learning_rate": 2.4918900286995555e-05, | |
| "loss": 0.0047, | |
| "step": 6840 | |
| }, | |
| { | |
| "epoch": 171.25, | |
| "grad_norm": 0.19044405221939087, | |
| "learning_rate": 2.4775998178369458e-05, | |
| "loss": 0.0042, | |
| "step": 6850 | |
| }, | |
| { | |
| "epoch": 171.5, | |
| "grad_norm": 0.16859029233455658, | |
| "learning_rate": 2.4633371915386017e-05, | |
| "loss": 0.0069, | |
| "step": 6860 | |
| }, | |
| { | |
| "epoch": 171.75, | |
| "grad_norm": 0.20419123768806458, | |
| "learning_rate": 2.4491023057783235e-05, | |
| "loss": 0.0063, | |
| "step": 6870 | |
| }, | |
| { | |
| "epoch": 172.0, | |
| "grad_norm": 0.1309284120798111, | |
| "learning_rate": 2.4348953162265375e-05, | |
| "loss": 0.0065, | |
| "step": 6880 | |
| }, | |
| { | |
| "epoch": 172.25, | |
| "grad_norm": 0.20550362765789032, | |
| "learning_rate": 2.420716378248607e-05, | |
| "loss": 0.0079, | |
| "step": 6890 | |
| }, | |
| { | |
| "epoch": 172.5, | |
| "grad_norm": 0.2013462632894516, | |
| "learning_rate": 2.4065656469031266e-05, | |
| "loss": 0.0075, | |
| "step": 6900 | |
| }, | |
| { | |
| "epoch": 172.75, | |
| "grad_norm": 0.14772872626781464, | |
| "learning_rate": 2.3924432769402268e-05, | |
| "loss": 0.0055, | |
| "step": 6910 | |
| }, | |
| { | |
| "epoch": 173.0, | |
| "grad_norm": 0.1719931662082672, | |
| "learning_rate": 2.3783494227998844e-05, | |
| "loss": 0.0061, | |
| "step": 6920 | |
| }, | |
| { | |
| "epoch": 173.25, | |
| "grad_norm": 0.11645688116550446, | |
| "learning_rate": 2.3642842386102264e-05, | |
| "loss": 0.0056, | |
| "step": 6930 | |
| }, | |
| { | |
| "epoch": 173.5, | |
| "grad_norm": 0.1909249871969223, | |
| "learning_rate": 2.3502478781858567e-05, | |
| "loss": 0.0085, | |
| "step": 6940 | |
| }, | |
| { | |
| "epoch": 173.75, | |
| "grad_norm": 0.23779629170894623, | |
| "learning_rate": 2.3362404950261628e-05, | |
| "loss": 0.0052, | |
| "step": 6950 | |
| }, | |
| { | |
| "epoch": 174.0, | |
| "grad_norm": 0.161439910531044, | |
| "learning_rate": 2.3222622423136458e-05, | |
| "loss": 0.0055, | |
| "step": 6960 | |
| }, | |
| { | |
| "epoch": 174.25, | |
| "grad_norm": 0.15141646564006805, | |
| "learning_rate": 2.3083132729122332e-05, | |
| "loss": 0.0061, | |
| "step": 6970 | |
| }, | |
| { | |
| "epoch": 174.5, | |
| "grad_norm": 0.09013067185878754, | |
| "learning_rate": 2.294393739365621e-05, | |
| "loss": 0.0046, | |
| "step": 6980 | |
| }, | |
| { | |
| "epoch": 174.75, | |
| "grad_norm": 0.10369148850440979, | |
| "learning_rate": 2.2805037938956e-05, | |
| "loss": 0.0077, | |
| "step": 6990 | |
| }, | |
| { | |
| "epoch": 175.0, | |
| "grad_norm": 0.13797912001609802, | |
| "learning_rate": 2.266643588400386e-05, | |
| "loss": 0.0064, | |
| "step": 7000 | |
| }, | |
| { | |
| "epoch": 175.25, | |
| "grad_norm": 0.09309276193380356, | |
| "learning_rate": 2.252813274452969e-05, | |
| "loss": 0.007, | |
| "step": 7010 | |
| }, | |
| { | |
| "epoch": 175.5, | |
| "grad_norm": 0.15403063595294952, | |
| "learning_rate": 2.2390130032994427e-05, | |
| "loss": 0.0048, | |
| "step": 7020 | |
| }, | |
| { | |
| "epoch": 175.75, | |
| "grad_norm": 0.1723039150238037, | |
| "learning_rate": 2.2252429258573633e-05, | |
| "loss": 0.0049, | |
| "step": 7030 | |
| }, | |
| { | |
| "epoch": 176.0, | |
| "grad_norm": 0.22523076832294464, | |
| "learning_rate": 2.2115031927140904e-05, | |
| "loss": 0.0039, | |
| "step": 7040 | |
| }, | |
| { | |
| "epoch": 176.25, | |
| "grad_norm": 0.1994743049144745, | |
| "learning_rate": 2.1977939541251463e-05, | |
| "loss": 0.0051, | |
| "step": 7050 | |
| }, | |
| { | |
| "epoch": 176.5, | |
| "grad_norm": 0.21922245621681213, | |
| "learning_rate": 2.1841153600125684e-05, | |
| "loss": 0.0095, | |
| "step": 7060 | |
| }, | |
| { | |
| "epoch": 176.75, | |
| "grad_norm": 0.14716701209545135, | |
| "learning_rate": 2.170467559963267e-05, | |
| "loss": 0.0076, | |
| "step": 7070 | |
| }, | |
| { | |
| "epoch": 177.0, | |
| "grad_norm": 0.1428631842136383, | |
| "learning_rate": 2.1568507032273982e-05, | |
| "loss": 0.0048, | |
| "step": 7080 | |
| }, | |
| { | |
| "epoch": 177.25, | |
| "grad_norm": 0.12290722876787186, | |
| "learning_rate": 2.1432649387167264e-05, | |
| "loss": 0.0044, | |
| "step": 7090 | |
| }, | |
| { | |
| "epoch": 177.5, | |
| "grad_norm": 0.10502045601606369, | |
| "learning_rate": 2.1297104150029973e-05, | |
| "loss": 0.0045, | |
| "step": 7100 | |
| }, | |
| { | |
| "epoch": 177.75, | |
| "grad_norm": 0.14660410583019257, | |
| "learning_rate": 2.116187280316307e-05, | |
| "loss": 0.0054, | |
| "step": 7110 | |
| }, | |
| { | |
| "epoch": 178.0, | |
| "grad_norm": 0.1619156002998352, | |
| "learning_rate": 2.1026956825434908e-05, | |
| "loss": 0.0065, | |
| "step": 7120 | |
| }, | |
| { | |
| "epoch": 178.25, | |
| "grad_norm": 0.22219659388065338, | |
| "learning_rate": 2.0892357692265017e-05, | |
| "loss": 0.0045, | |
| "step": 7130 | |
| }, | |
| { | |
| "epoch": 178.5, | |
| "grad_norm": 0.13960318267345428, | |
| "learning_rate": 2.0758076875607947e-05, | |
| "loss": 0.0054, | |
| "step": 7140 | |
| }, | |
| { | |
| "epoch": 178.75, | |
| "grad_norm": 0.17951151728630066, | |
| "learning_rate": 2.0624115843937207e-05, | |
| "loss": 0.0056, | |
| "step": 7150 | |
| }, | |
| { | |
| "epoch": 179.0, | |
| "grad_norm": 0.18173860013484955, | |
| "learning_rate": 2.0490476062229157e-05, | |
| "loss": 0.0074, | |
| "step": 7160 | |
| }, | |
| { | |
| "epoch": 179.25, | |
| "grad_norm": 0.1216687336564064, | |
| "learning_rate": 2.035715899194704e-05, | |
| "loss": 0.0047, | |
| "step": 7170 | |
| }, | |
| { | |
| "epoch": 179.5, | |
| "grad_norm": 0.1189367026090622, | |
| "learning_rate": 2.022416609102499e-05, | |
| "loss": 0.0047, | |
| "step": 7180 | |
| }, | |
| { | |
| "epoch": 179.75, | |
| "grad_norm": 0.14440025389194489, | |
| "learning_rate": 2.009149881385205e-05, | |
| "loss": 0.0047, | |
| "step": 7190 | |
| }, | |
| { | |
| "epoch": 180.0, | |
| "grad_norm": 0.11506537348031998, | |
| "learning_rate": 1.995915861125634e-05, | |
| "loss": 0.0077, | |
| "step": 7200 | |
| }, | |
| { | |
| "epoch": 180.25, | |
| "grad_norm": 0.19950439035892487, | |
| "learning_rate": 1.9827146930489065e-05, | |
| "loss": 0.0051, | |
| "step": 7210 | |
| }, | |
| { | |
| "epoch": 180.5, | |
| "grad_norm": 0.10524751245975494, | |
| "learning_rate": 1.9695465215208848e-05, | |
| "loss": 0.0041, | |
| "step": 7220 | |
| }, | |
| { | |
| "epoch": 180.75, | |
| "grad_norm": 0.12314356863498688, | |
| "learning_rate": 1.9564114905465813e-05, | |
| "loss": 0.0046, | |
| "step": 7230 | |
| }, | |
| { | |
| "epoch": 181.0, | |
| "grad_norm": 0.0873304083943367, | |
| "learning_rate": 1.9433097437685936e-05, | |
| "loss": 0.006, | |
| "step": 7240 | |
| }, | |
| { | |
| "epoch": 181.25, | |
| "grad_norm": 0.09135473519563675, | |
| "learning_rate": 1.930241424465521e-05, | |
| "loss": 0.0041, | |
| "step": 7250 | |
| }, | |
| { | |
| "epoch": 181.5, | |
| "grad_norm": 0.13335098326206207, | |
| "learning_rate": 1.9172066755504115e-05, | |
| "loss": 0.0054, | |
| "step": 7260 | |
| }, | |
| { | |
| "epoch": 181.75, | |
| "grad_norm": 0.15308566391468048, | |
| "learning_rate": 1.9042056395691914e-05, | |
| "loss": 0.0042, | |
| "step": 7270 | |
| }, | |
| { | |
| "epoch": 182.0, | |
| "grad_norm": 0.12902922928333282, | |
| "learning_rate": 1.8912384586991066e-05, | |
| "loss": 0.0058, | |
| "step": 7280 | |
| }, | |
| { | |
| "epoch": 182.25, | |
| "grad_norm": 0.13669724762439728, | |
| "learning_rate": 1.8783052747471717e-05, | |
| "loss": 0.0044, | |
| "step": 7290 | |
| }, | |
| { | |
| "epoch": 182.5, | |
| "grad_norm": 0.13429778814315796, | |
| "learning_rate": 1.865406229148611e-05, | |
| "loss": 0.0066, | |
| "step": 7300 | |
| }, | |
| { | |
| "epoch": 182.75, | |
| "grad_norm": 0.15294934809207916, | |
| "learning_rate": 1.8525414629653233e-05, | |
| "loss": 0.0066, | |
| "step": 7310 | |
| }, | |
| { | |
| "epoch": 183.0, | |
| "grad_norm": 0.336831659078598, | |
| "learning_rate": 1.8397111168843255e-05, | |
| "loss": 0.0075, | |
| "step": 7320 | |
| }, | |
| { | |
| "epoch": 183.25, | |
| "grad_norm": 0.14708802103996277, | |
| "learning_rate": 1.8269153312162323e-05, | |
| "loss": 0.0074, | |
| "step": 7330 | |
| }, | |
| { | |
| "epoch": 183.5, | |
| "grad_norm": 0.1348143070936203, | |
| "learning_rate": 1.8141542458937054e-05, | |
| "loss": 0.0048, | |
| "step": 7340 | |
| }, | |
| { | |
| "epoch": 183.75, | |
| "grad_norm": 0.1689818948507309, | |
| "learning_rate": 1.8014280004699268e-05, | |
| "loss": 0.0069, | |
| "step": 7350 | |
| }, | |
| { | |
| "epoch": 184.0, | |
| "grad_norm": 0.09998207539319992, | |
| "learning_rate": 1.788736734117078e-05, | |
| "loss": 0.0054, | |
| "step": 7360 | |
| }, | |
| { | |
| "epoch": 184.25, | |
| "grad_norm": 0.12446059286594391, | |
| "learning_rate": 1.7760805856248152e-05, | |
| "loss": 0.0043, | |
| "step": 7370 | |
| }, | |
| { | |
| "epoch": 184.5, | |
| "grad_norm": 0.07528968900442123, | |
| "learning_rate": 1.7634596933987518e-05, | |
| "loss": 0.0055, | |
| "step": 7380 | |
| }, | |
| { | |
| "epoch": 184.75, | |
| "grad_norm": 0.12255606800317764, | |
| "learning_rate": 1.7508741954589404e-05, | |
| "loss": 0.0044, | |
| "step": 7390 | |
| }, | |
| { | |
| "epoch": 185.0, | |
| "grad_norm": 0.2018868625164032, | |
| "learning_rate": 1.7383242294383717e-05, | |
| "loss": 0.0054, | |
| "step": 7400 | |
| }, | |
| { | |
| "epoch": 185.25, | |
| "grad_norm": 0.11658778786659241, | |
| "learning_rate": 1.7258099325814632e-05, | |
| "loss": 0.0063, | |
| "step": 7410 | |
| }, | |
| { | |
| "epoch": 185.5, | |
| "grad_norm": 0.11764887720346451, | |
| "learning_rate": 1.7133314417425594e-05, | |
| "loss": 0.0044, | |
| "step": 7420 | |
| }, | |
| { | |
| "epoch": 185.75, | |
| "grad_norm": 0.14045648276805878, | |
| "learning_rate": 1.7008888933844408e-05, | |
| "loss": 0.005, | |
| "step": 7430 | |
| }, | |
| { | |
| "epoch": 186.0, | |
| "grad_norm": 0.20185740292072296, | |
| "learning_rate": 1.6884824235768172e-05, | |
| "loss": 0.0053, | |
| "step": 7440 | |
| }, | |
| { | |
| "epoch": 186.25, | |
| "grad_norm": 0.19346244633197784, | |
| "learning_rate": 1.6761121679948592e-05, | |
| "loss": 0.0081, | |
| "step": 7450 | |
| }, | |
| { | |
| "epoch": 186.5, | |
| "grad_norm": 0.13126075267791748, | |
| "learning_rate": 1.663778261917695e-05, | |
| "loss": 0.0049, | |
| "step": 7460 | |
| }, | |
| { | |
| "epoch": 186.75, | |
| "grad_norm": 0.17565523087978363, | |
| "learning_rate": 1.651480840226952e-05, | |
| "loss": 0.0037, | |
| "step": 7470 | |
| }, | |
| { | |
| "epoch": 187.0, | |
| "grad_norm": 0.17475895583629608, | |
| "learning_rate": 1.639220037405258e-05, | |
| "loss": 0.0071, | |
| "step": 7480 | |
| }, | |
| { | |
| "epoch": 187.25, | |
| "grad_norm": 0.14766661822795868, | |
| "learning_rate": 1.6269959875347906e-05, | |
| "loss": 0.0043, | |
| "step": 7490 | |
| }, | |
| { | |
| "epoch": 187.5, | |
| "grad_norm": 0.20571599900722504, | |
| "learning_rate": 1.614808824295802e-05, | |
| "loss": 0.0086, | |
| "step": 7500 | |
| }, | |
| { | |
| "epoch": 187.75, | |
| "grad_norm": 0.1388307809829712, | |
| "learning_rate": 1.602658680965152e-05, | |
| "loss": 0.0049, | |
| "step": 7510 | |
| }, | |
| { | |
| "epoch": 188.0, | |
| "grad_norm": 0.21784968674182892, | |
| "learning_rate": 1.5905456904148686e-05, | |
| "loss": 0.0049, | |
| "step": 7520 | |
| }, | |
| { | |
| "epoch": 188.25, | |
| "grad_norm": 0.07575284689664841, | |
| "learning_rate": 1.57846998511067e-05, | |
| "loss": 0.0045, | |
| "step": 7530 | |
| }, | |
| { | |
| "epoch": 188.5, | |
| "grad_norm": 0.15673498809337616, | |
| "learning_rate": 1.566431697110538e-05, | |
| "loss": 0.0047, | |
| "step": 7540 | |
| }, | |
| { | |
| "epoch": 188.75, | |
| "grad_norm": 0.09218557924032211, | |
| "learning_rate": 1.554430958063259e-05, | |
| "loss": 0.0057, | |
| "step": 7550 | |
| }, | |
| { | |
| "epoch": 189.0, | |
| "grad_norm": 0.15179941058158875, | |
| "learning_rate": 1.5424678992069912e-05, | |
| "loss": 0.0041, | |
| "step": 7560 | |
| }, | |
| { | |
| "epoch": 189.25, | |
| "grad_norm": 0.12532265484333038, | |
| "learning_rate": 1.5305426513678362e-05, | |
| "loss": 0.0035, | |
| "step": 7570 | |
| }, | |
| { | |
| "epoch": 189.5, | |
| "grad_norm": 0.14931413531303406, | |
| "learning_rate": 1.518655344958388e-05, | |
| "loss": 0.0041, | |
| "step": 7580 | |
| }, | |
| { | |
| "epoch": 189.75, | |
| "grad_norm": 0.10902183502912521, | |
| "learning_rate": 1.5068061099763275e-05, | |
| "loss": 0.0077, | |
| "step": 7590 | |
| }, | |
| { | |
| "epoch": 190.0, | |
| "grad_norm": 0.19096030294895172, | |
| "learning_rate": 1.494995076002988e-05, | |
| "loss": 0.0059, | |
| "step": 7600 | |
| }, | |
| { | |
| "epoch": 190.25, | |
| "grad_norm": 0.155643031001091, | |
| "learning_rate": 1.4832223722019456e-05, | |
| "loss": 0.0043, | |
| "step": 7610 | |
| }, | |
| { | |
| "epoch": 190.5, | |
| "grad_norm": 0.15102514624595642, | |
| "learning_rate": 1.4714881273176035e-05, | |
| "loss": 0.0043, | |
| "step": 7620 | |
| }, | |
| { | |
| "epoch": 190.75, | |
| "grad_norm": 0.07781857997179031, | |
| "learning_rate": 1.4597924696737835e-05, | |
| "loss": 0.0038, | |
| "step": 7630 | |
| }, | |
| { | |
| "epoch": 191.0, | |
| "grad_norm": 0.12486126273870468, | |
| "learning_rate": 1.4481355271723252e-05, | |
| "loss": 0.0041, | |
| "step": 7640 | |
| }, | |
| { | |
| "epoch": 191.25, | |
| "grad_norm": 0.09264986962080002, | |
| "learning_rate": 1.4365174272916809e-05, | |
| "loss": 0.0053, | |
| "step": 7650 | |
| }, | |
| { | |
| "epoch": 191.5, | |
| "grad_norm": 0.18692605197429657, | |
| "learning_rate": 1.4249382970855319e-05, | |
| "loss": 0.0043, | |
| "step": 7660 | |
| }, | |
| { | |
| "epoch": 191.75, | |
| "grad_norm": 0.06757988780736923, | |
| "learning_rate": 1.4133982631813903e-05, | |
| "loss": 0.0054, | |
| "step": 7670 | |
| }, | |
| { | |
| "epoch": 192.0, | |
| "grad_norm": 0.14858606457710266, | |
| "learning_rate": 1.4018974517792194e-05, | |
| "loss": 0.0053, | |
| "step": 7680 | |
| }, | |
| { | |
| "epoch": 192.25, | |
| "grad_norm": 0.10279243439435959, | |
| "learning_rate": 1.390435988650048e-05, | |
| "loss": 0.0039, | |
| "step": 7690 | |
| }, | |
| { | |
| "epoch": 192.5, | |
| "grad_norm": 0.19160929322242737, | |
| "learning_rate": 1.3790139991346006e-05, | |
| "loss": 0.0052, | |
| "step": 7700 | |
| }, | |
| { | |
| "epoch": 192.75, | |
| "grad_norm": 0.16689233481884003, | |
| "learning_rate": 1.367631608141926e-05, | |
| "loss": 0.0039, | |
| "step": 7710 | |
| }, | |
| { | |
| "epoch": 193.0, | |
| "grad_norm": 0.07786817848682404, | |
| "learning_rate": 1.3562889401480278e-05, | |
| "loss": 0.0042, | |
| "step": 7720 | |
| }, | |
| { | |
| "epoch": 193.25, | |
| "grad_norm": 0.07822061330080032, | |
| "learning_rate": 1.3449861191945074e-05, | |
| "loss": 0.0039, | |
| "step": 7730 | |
| }, | |
| { | |
| "epoch": 193.5, | |
| "grad_norm": 0.11227503418922424, | |
| "learning_rate": 1.3337232688872009e-05, | |
| "loss": 0.0036, | |
| "step": 7740 | |
| }, | |
| { | |
| "epoch": 193.75, | |
| "grad_norm": 0.09792827814817429, | |
| "learning_rate": 1.3225005123948364e-05, | |
| "loss": 0.005, | |
| "step": 7750 | |
| }, | |
| { | |
| "epoch": 194.0, | |
| "grad_norm": 0.10192038118839264, | |
| "learning_rate": 1.311317972447681e-05, | |
| "loss": 0.0047, | |
| "step": 7760 | |
| }, | |
| { | |
| "epoch": 194.25, | |
| "grad_norm": 0.18177363276481628, | |
| "learning_rate": 1.3001757713361996e-05, | |
| "loss": 0.0043, | |
| "step": 7770 | |
| }, | |
| { | |
| "epoch": 194.5, | |
| "grad_norm": 0.08618608862161636, | |
| "learning_rate": 1.2890740309097204e-05, | |
| "loss": 0.0036, | |
| "step": 7780 | |
| }, | |
| { | |
| "epoch": 194.75, | |
| "grad_norm": 0.1522602140903473, | |
| "learning_rate": 1.2780128725750944e-05, | |
| "loss": 0.0047, | |
| "step": 7790 | |
| }, | |
| { | |
| "epoch": 195.0, | |
| "grad_norm": 0.06630995124578476, | |
| "learning_rate": 1.266992417295379e-05, | |
| "loss": 0.0045, | |
| "step": 7800 | |
| }, | |
| { | |
| "epoch": 195.25, | |
| "grad_norm": 0.11913304775953293, | |
| "learning_rate": 1.2560127855885073e-05, | |
| "loss": 0.0037, | |
| "step": 7810 | |
| }, | |
| { | |
| "epoch": 195.5, | |
| "grad_norm": 0.07550306618213654, | |
| "learning_rate": 1.2450740975259745e-05, | |
| "loss": 0.0044, | |
| "step": 7820 | |
| }, | |
| { | |
| "epoch": 195.75, | |
| "grad_norm": 0.12006478756666183, | |
| "learning_rate": 1.234176472731517e-05, | |
| "loss": 0.0053, | |
| "step": 7830 | |
| }, | |
| { | |
| "epoch": 196.0, | |
| "grad_norm": 0.10951386392116547, | |
| "learning_rate": 1.2233200303798158e-05, | |
| "loss": 0.006, | |
| "step": 7840 | |
| }, | |
| { | |
| "epoch": 196.25, | |
| "grad_norm": 0.087790846824646, | |
| "learning_rate": 1.2125048891951846e-05, | |
| "loss": 0.007, | |
| "step": 7850 | |
| }, | |
| { | |
| "epoch": 196.5, | |
| "grad_norm": 0.23221564292907715, | |
| "learning_rate": 1.2017311674502745e-05, | |
| "loss": 0.0051, | |
| "step": 7860 | |
| }, | |
| { | |
| "epoch": 196.75, | |
| "grad_norm": 0.1510801911354065, | |
| "learning_rate": 1.1909989829647822e-05, | |
| "loss": 0.0032, | |
| "step": 7870 | |
| }, | |
| { | |
| "epoch": 197.0, | |
| "grad_norm": 0.1127174124121666, | |
| "learning_rate": 1.1803084531041553e-05, | |
| "loss": 0.0041, | |
| "step": 7880 | |
| }, | |
| { | |
| "epoch": 197.25, | |
| "grad_norm": 0.1897778958082199, | |
| "learning_rate": 1.1696596947783162e-05, | |
| "loss": 0.0048, | |
| "step": 7890 | |
| }, | |
| { | |
| "epoch": 197.5, | |
| "grad_norm": 0.1312101036310196, | |
| "learning_rate": 1.1590528244403803e-05, | |
| "loss": 0.0056, | |
| "step": 7900 | |
| }, | |
| { | |
| "epoch": 197.75, | |
| "grad_norm": 0.16427397727966309, | |
| "learning_rate": 1.148487958085382e-05, | |
| "loss": 0.0039, | |
| "step": 7910 | |
| }, | |
| { | |
| "epoch": 198.0, | |
| "grad_norm": 0.14600983262062073, | |
| "learning_rate": 1.1379652112490086e-05, | |
| "loss": 0.0036, | |
| "step": 7920 | |
| }, | |
| { | |
| "epoch": 198.25, | |
| "grad_norm": 0.13961434364318848, | |
| "learning_rate": 1.1274846990063315e-05, | |
| "loss": 0.0045, | |
| "step": 7930 | |
| }, | |
| { | |
| "epoch": 198.5, | |
| "grad_norm": 0.13402146100997925, | |
| "learning_rate": 1.117046535970554e-05, | |
| "loss": 0.004, | |
| "step": 7940 | |
| }, | |
| { | |
| "epoch": 198.75, | |
| "grad_norm": 0.07271775603294373, | |
| "learning_rate": 1.106650836291755e-05, | |
| "loss": 0.0047, | |
| "step": 7950 | |
| }, | |
| { | |
| "epoch": 199.0, | |
| "grad_norm": 0.09896207600831985, | |
| "learning_rate": 1.0962977136556418e-05, | |
| "loss": 0.0058, | |
| "step": 7960 | |
| }, | |
| { | |
| "epoch": 199.25, | |
| "grad_norm": 0.21567489206790924, | |
| "learning_rate": 1.0859872812823024e-05, | |
| "loss": 0.0101, | |
| "step": 7970 | |
| }, | |
| { | |
| "epoch": 199.5, | |
| "grad_norm": 0.08785581588745117, | |
| "learning_rate": 1.0757196519249747e-05, | |
| "loss": 0.0061, | |
| "step": 7980 | |
| }, | |
| { | |
| "epoch": 199.75, | |
| "grad_norm": 0.1338099092245102, | |
| "learning_rate": 1.0654949378688077e-05, | |
| "loss": 0.0053, | |
| "step": 7990 | |
| }, | |
| { | |
| "epoch": 200.0, | |
| "grad_norm": 0.07626625150442123, | |
| "learning_rate": 1.0553132509296376e-05, | |
| "loss": 0.0099, | |
| "step": 8000 | |
| }, | |
| { | |
| "epoch": 200.25, | |
| "grad_norm": 0.10850115120410919, | |
| "learning_rate": 1.0451747024527613e-05, | |
| "loss": 0.0041, | |
| "step": 8010 | |
| }, | |
| { | |
| "epoch": 200.5, | |
| "grad_norm": 0.09877875447273254, | |
| "learning_rate": 1.0350794033117189e-05, | |
| "loss": 0.0045, | |
| "step": 8020 | |
| }, | |
| { | |
| "epoch": 200.75, | |
| "grad_norm": 0.1204601377248764, | |
| "learning_rate": 1.0250274639070856e-05, | |
| "loss": 0.0042, | |
| "step": 8030 | |
| }, | |
| { | |
| "epoch": 201.0, | |
| "grad_norm": 0.1761455088853836, | |
| "learning_rate": 1.0150189941652599e-05, | |
| "loss": 0.0046, | |
| "step": 8040 | |
| }, | |
| { | |
| "epoch": 201.25, | |
| "grad_norm": 0.06259100139141083, | |
| "learning_rate": 1.0050541035372635e-05, | |
| "loss": 0.004, | |
| "step": 8050 | |
| }, | |
| { | |
| "epoch": 201.5, | |
| "grad_norm": 0.1302749216556549, | |
| "learning_rate": 9.951329009975458e-06, | |
| "loss": 0.0042, | |
| "step": 8060 | |
| }, | |
| { | |
| "epoch": 201.75, | |
| "grad_norm": 0.1296391487121582, | |
| "learning_rate": 9.852554950427845e-06, | |
| "loss": 0.0036, | |
| "step": 8070 | |
| }, | |
| { | |
| "epoch": 202.0, | |
| "grad_norm": 0.0929638147354126, | |
| "learning_rate": 9.754219936907105e-06, | |
| "loss": 0.0036, | |
| "step": 8080 | |
| }, | |
| { | |
| "epoch": 202.25, | |
| "grad_norm": 0.09810598194599152, | |
| "learning_rate": 9.656325044789194e-06, | |
| "loss": 0.003, | |
| "step": 8090 | |
| }, | |
| { | |
| "epoch": 202.5, | |
| "grad_norm": 0.1328025460243225, | |
| "learning_rate": 9.55887134463697e-06, | |
| "loss": 0.0049, | |
| "step": 8100 | |
| }, | |
| { | |
| "epoch": 202.75, | |
| "grad_norm": 0.19226421415805817, | |
| "learning_rate": 9.461859902188475e-06, | |
| "loss": 0.0039, | |
| "step": 8110 | |
| }, | |
| { | |
| "epoch": 203.0, | |
| "grad_norm": 0.07218576222658157, | |
| "learning_rate": 9.365291778345303e-06, | |
| "loss": 0.0037, | |
| "step": 8120 | |
| }, | |
| { | |
| "epoch": 203.25, | |
| "grad_norm": 0.10829392820596695, | |
| "learning_rate": 9.269168029160991e-06, | |
| "loss": 0.0049, | |
| "step": 8130 | |
| }, | |
| { | |
| "epoch": 203.5, | |
| "grad_norm": 0.11217450350522995, | |
| "learning_rate": 9.173489705829447e-06, | |
| "loss": 0.0041, | |
| "step": 8140 | |
| }, | |
| { | |
| "epoch": 203.75, | |
| "grad_norm": 0.10603217035531998, | |
| "learning_rate": 9.078257854673516e-06, | |
| "loss": 0.0027, | |
| "step": 8150 | |
| }, | |
| { | |
| "epoch": 204.0, | |
| "grad_norm": 0.13703373074531555, | |
| "learning_rate": 8.983473517133429e-06, | |
| "loss": 0.005, | |
| "step": 8160 | |
| }, | |
| { | |
| "epoch": 204.25, | |
| "grad_norm": 0.1856386959552765, | |
| "learning_rate": 8.889137729755537e-06, | |
| "loss": 0.0043, | |
| "step": 8170 | |
| }, | |
| { | |
| "epoch": 204.5, | |
| "grad_norm": 0.12523987889289856, | |
| "learning_rate": 8.79525152418087e-06, | |
| "loss": 0.0037, | |
| "step": 8180 | |
| }, | |
| { | |
| "epoch": 204.75, | |
| "grad_norm": 0.17454007267951965, | |
| "learning_rate": 8.701815927133961e-06, | |
| "loss": 0.0034, | |
| "step": 8190 | |
| }, | |
| { | |
| "epoch": 205.0, | |
| "grad_norm": 0.22606174647808075, | |
| "learning_rate": 8.608831960411534e-06, | |
| "loss": 0.0053, | |
| "step": 8200 | |
| }, | |
| { | |
| "epoch": 205.25, | |
| "grad_norm": 0.12180226296186447, | |
| "learning_rate": 8.516300640871321e-06, | |
| "loss": 0.0031, | |
| "step": 8210 | |
| }, | |
| { | |
| "epoch": 205.5, | |
| "grad_norm": 0.08695121854543686, | |
| "learning_rate": 8.424222980421038e-06, | |
| "loss": 0.0038, | |
| "step": 8220 | |
| }, | |
| { | |
| "epoch": 205.75, | |
| "grad_norm": 0.13644380867481232, | |
| "learning_rate": 8.332599986007184e-06, | |
| "loss": 0.0058, | |
| "step": 8230 | |
| }, | |
| { | |
| "epoch": 206.0, | |
| "grad_norm": 0.1191595196723938, | |
| "learning_rate": 8.241432659604203e-06, | |
| "loss": 0.0041, | |
| "step": 8240 | |
| }, | |
| { | |
| "epoch": 206.25, | |
| "grad_norm": 0.08874445408582687, | |
| "learning_rate": 8.150721998203331e-06, | |
| "loss": 0.0048, | |
| "step": 8250 | |
| }, | |
| { | |
| "epoch": 206.5, | |
| "grad_norm": 0.07907148450613022, | |
| "learning_rate": 8.06046899380184e-06, | |
| "loss": 0.0032, | |
| "step": 8260 | |
| }, | |
| { | |
| "epoch": 206.75, | |
| "grad_norm": 0.10713402926921844, | |
| "learning_rate": 7.970674633392133e-06, | |
| "loss": 0.0035, | |
| "step": 8270 | |
| }, | |
| { | |
| "epoch": 207.0, | |
| "grad_norm": 0.11685764789581299, | |
| "learning_rate": 7.881339898950924e-06, | |
| "loss": 0.0037, | |
| "step": 8280 | |
| }, | |
| { | |
| "epoch": 207.25, | |
| "grad_norm": 0.13961216807365417, | |
| "learning_rate": 7.792465767428597e-06, | |
| "loss": 0.0075, | |
| "step": 8290 | |
| }, | |
| { | |
| "epoch": 207.5, | |
| "grad_norm": 0.1829424947500229, | |
| "learning_rate": 7.704053210738376e-06, | |
| "loss": 0.0045, | |
| "step": 8300 | |
| }, | |
| { | |
| "epoch": 207.75, | |
| "grad_norm": 0.1581076681613922, | |
| "learning_rate": 7.6161031957458494e-06, | |
| "loss": 0.0032, | |
| "step": 8310 | |
| }, | |
| { | |
| "epoch": 208.0, | |
| "grad_norm": 0.08495005965232849, | |
| "learning_rate": 7.5286166842582605e-06, | |
| "loss": 0.0033, | |
| "step": 8320 | |
| }, | |
| { | |
| "epoch": 208.25, | |
| "grad_norm": 0.1170332059264183, | |
| "learning_rate": 7.4415946330140814e-06, | |
| "loss": 0.0062, | |
| "step": 8330 | |
| }, | |
| { | |
| "epoch": 208.5, | |
| "grad_norm": 0.05432112142443657, | |
| "learning_rate": 7.3550379936725644e-06, | |
| "loss": 0.0038, | |
| "step": 8340 | |
| }, | |
| { | |
| "epoch": 208.75, | |
| "grad_norm": 0.07181187719106674, | |
| "learning_rate": 7.2689477128032035e-06, | |
| "loss": 0.0032, | |
| "step": 8350 | |
| }, | |
| { | |
| "epoch": 209.0, | |
| "grad_norm": 0.08231719583272934, | |
| "learning_rate": 7.183324731875551e-06, | |
| "loss": 0.0044, | |
| "step": 8360 | |
| }, | |
| { | |
| "epoch": 209.25, | |
| "grad_norm": 0.0662977546453476, | |
| "learning_rate": 7.098169987248782e-06, | |
| "loss": 0.0037, | |
| "step": 8370 | |
| }, | |
| { | |
| "epoch": 209.5, | |
| "grad_norm": 0.09705691784620285, | |
| "learning_rate": 7.013484410161553e-06, | |
| "loss": 0.0038, | |
| "step": 8380 | |
| }, | |
| { | |
| "epoch": 209.75, | |
| "grad_norm": 0.08059106767177582, | |
| "learning_rate": 6.92926892672176e-06, | |
| "loss": 0.0025, | |
| "step": 8390 | |
| }, | |
| { | |
| "epoch": 210.0, | |
| "grad_norm": 0.07411599904298782, | |
| "learning_rate": 6.845524457896446e-06, | |
| "loss": 0.004, | |
| "step": 8400 | |
| }, | |
| { | |
| "epoch": 210.25, | |
| "grad_norm": 0.12973953783512115, | |
| "learning_rate": 6.7622519195017165e-06, | |
| "loss": 0.0042, | |
| "step": 8410 | |
| }, | |
| { | |
| "epoch": 210.5, | |
| "grad_norm": 0.07746973633766174, | |
| "learning_rate": 6.679452222192684e-06, | |
| "loss": 0.0034, | |
| "step": 8420 | |
| }, | |
| { | |
| "epoch": 210.75, | |
| "grad_norm": 0.08138252794742584, | |
| "learning_rate": 6.597126271453579e-06, | |
| "loss": 0.0035, | |
| "step": 8430 | |
| }, | |
| { | |
| "epoch": 211.0, | |
| "grad_norm": 0.10311781615018845, | |
| "learning_rate": 6.51527496758782e-06, | |
| "loss": 0.0041, | |
| "step": 8440 | |
| }, | |
| { | |
| "epoch": 211.25, | |
| "grad_norm": 0.10894886404275894, | |
| "learning_rate": 6.433899205708155e-06, | |
| "loss": 0.0047, | |
| "step": 8450 | |
| }, | |
| { | |
| "epoch": 211.5, | |
| "grad_norm": 0.22221334278583527, | |
| "learning_rate": 6.352999875726856e-06, | |
| "loss": 0.0033, | |
| "step": 8460 | |
| }, | |
| { | |
| "epoch": 211.75, | |
| "grad_norm": 0.11968138813972473, | |
| "learning_rate": 6.272577862346052e-06, | |
| "loss": 0.0032, | |
| "step": 8470 | |
| }, | |
| { | |
| "epoch": 212.0, | |
| "grad_norm": 0.14150996506214142, | |
| "learning_rate": 6.192634045047996e-06, | |
| "loss": 0.004, | |
| "step": 8480 | |
| }, | |
| { | |
| "epoch": 212.25, | |
| "grad_norm": 0.15094904601573944, | |
| "learning_rate": 6.113169298085458e-06, | |
| "loss": 0.0038, | |
| "step": 8490 | |
| }, | |
| { | |
| "epoch": 212.5, | |
| "grad_norm": 0.14192979037761688, | |
| "learning_rate": 6.034184490472195e-06, | |
| "loss": 0.0075, | |
| "step": 8500 | |
| }, | |
| { | |
| "epoch": 212.75, | |
| "grad_norm": 0.0771539956331253, | |
| "learning_rate": 5.955680485973386e-06, | |
| "loss": 0.0046, | |
| "step": 8510 | |
| }, | |
| { | |
| "epoch": 213.0, | |
| "grad_norm": 0.14663337171077728, | |
| "learning_rate": 5.877658143096265e-06, | |
| "loss": 0.0034, | |
| "step": 8520 | |
| }, | |
| { | |
| "epoch": 213.25, | |
| "grad_norm": 0.1873416304588318, | |
| "learning_rate": 5.800118315080661e-06, | |
| "loss": 0.0045, | |
| "step": 8530 | |
| }, | |
| { | |
| "epoch": 213.5, | |
| "grad_norm": 0.09949974715709686, | |
| "learning_rate": 5.723061849889716e-06, | |
| "loss": 0.0033, | |
| "step": 8540 | |
| }, | |
| { | |
| "epoch": 213.75, | |
| "grad_norm": 0.10987547785043716, | |
| "learning_rate": 5.646489590200604e-06, | |
| "loss": 0.0033, | |
| "step": 8550 | |
| }, | |
| { | |
| "epoch": 214.0, | |
| "grad_norm": 0.10888543725013733, | |
| "learning_rate": 5.570402373395256e-06, | |
| "loss": 0.0028, | |
| "step": 8560 | |
| }, | |
| { | |
| "epoch": 214.25, | |
| "grad_norm": 0.07901471853256226, | |
| "learning_rate": 5.494801031551305e-06, | |
| "loss": 0.0033, | |
| "step": 8570 | |
| }, | |
| { | |
| "epoch": 214.5, | |
| "grad_norm": 0.16858859360218048, | |
| "learning_rate": 5.41968639143291e-06, | |
| "loss": 0.0025, | |
| "step": 8580 | |
| }, | |
| { | |
| "epoch": 214.75, | |
| "grad_norm": 0.06866457313299179, | |
| "learning_rate": 5.345059274481751e-06, | |
| "loss": 0.0041, | |
| "step": 8590 | |
| }, | |
| { | |
| "epoch": 215.0, | |
| "grad_norm": 0.07472264021635056, | |
| "learning_rate": 5.270920496808002e-06, | |
| "loss": 0.0027, | |
| "step": 8600 | |
| }, | |
| { | |
| "epoch": 215.25, | |
| "grad_norm": 0.08445484936237335, | |
| "learning_rate": 5.1972708691814695e-06, | |
| "loss": 0.0026, | |
| "step": 8610 | |
| }, | |
| { | |
| "epoch": 215.5, | |
| "grad_norm": 0.09009315073490143, | |
| "learning_rate": 5.124111197022674e-06, | |
| "loss": 0.003, | |
| "step": 8620 | |
| }, | |
| { | |
| "epoch": 215.75, | |
| "grad_norm": 0.18223707377910614, | |
| "learning_rate": 5.051442280394081e-06, | |
| "loss": 0.0041, | |
| "step": 8630 | |
| }, | |
| { | |
| "epoch": 216.0, | |
| "grad_norm": 0.11874091625213623, | |
| "learning_rate": 4.979264913991322e-06, | |
| "loss": 0.0043, | |
| "step": 8640 | |
| }, | |
| { | |
| "epoch": 216.25, | |
| "grad_norm": 0.05684478208422661, | |
| "learning_rate": 4.907579887134489e-06, | |
| "loss": 0.0025, | |
| "step": 8650 | |
| }, | |
| { | |
| "epoch": 216.5, | |
| "grad_norm": 0.10025288909673691, | |
| "learning_rate": 4.836387983759572e-06, | |
| "loss": 0.003, | |
| "step": 8660 | |
| }, | |
| { | |
| "epoch": 216.75, | |
| "grad_norm": 0.06758899241685867, | |
| "learning_rate": 4.765689982409816e-06, | |
| "loss": 0.0063, | |
| "step": 8670 | |
| }, | |
| { | |
| "epoch": 217.0, | |
| "grad_norm": 0.10817938297986984, | |
| "learning_rate": 4.695486656227233e-06, | |
| "loss": 0.0036, | |
| "step": 8680 | |
| }, | |
| { | |
| "epoch": 217.25, | |
| "grad_norm": 0.10704730451107025, | |
| "learning_rate": 4.625778772944156e-06, | |
| "loss": 0.0042, | |
| "step": 8690 | |
| }, | |
| { | |
| "epoch": 217.5, | |
| "grad_norm": 0.07309766113758087, | |
| "learning_rate": 4.556567094874825e-06, | |
| "loss": 0.0043, | |
| "step": 8700 | |
| }, | |
| { | |
| "epoch": 217.75, | |
| "grad_norm": 0.11254431307315826, | |
| "learning_rate": 4.487852378907059e-06, | |
| "loss": 0.0032, | |
| "step": 8710 | |
| }, | |
| { | |
| "epoch": 218.0, | |
| "grad_norm": 0.08993148058652878, | |
| "learning_rate": 4.419635376493986e-06, | |
| "loss": 0.0047, | |
| "step": 8720 | |
| }, | |
| { | |
| "epoch": 218.25, | |
| "grad_norm": 0.06215588003396988, | |
| "learning_rate": 4.351916833645825e-06, | |
| "loss": 0.0025, | |
| "step": 8730 | |
| }, | |
| { | |
| "epoch": 218.5, | |
| "grad_norm": 0.05950621888041496, | |
| "learning_rate": 4.284697490921691e-06, | |
| "loss": 0.0039, | |
| "step": 8740 | |
| }, | |
| { | |
| "epoch": 218.75, | |
| "grad_norm": 0.09767914563417435, | |
| "learning_rate": 4.2179780834215585e-06, | |
| "loss": 0.004, | |
| "step": 8750 | |
| }, | |
| { | |
| "epoch": 219.0, | |
| "grad_norm": 0.17002426087856293, | |
| "learning_rate": 4.151759340778178e-06, | |
| "loss": 0.0032, | |
| "step": 8760 | |
| }, | |
| { | |
| "epoch": 219.25, | |
| "grad_norm": 0.09146512299776077, | |
| "learning_rate": 4.086041987149109e-06, | |
| "loss": 0.0047, | |
| "step": 8770 | |
| }, | |
| { | |
| "epoch": 219.5, | |
| "grad_norm": 0.0718497633934021, | |
| "learning_rate": 4.020826741208811e-06, | |
| "loss": 0.0027, | |
| "step": 8780 | |
| }, | |
| { | |
| "epoch": 219.75, | |
| "grad_norm": 0.11396536231040955, | |
| "learning_rate": 3.956114316140746e-06, | |
| "loss": 0.003, | |
| "step": 8790 | |
| }, | |
| { | |
| "epoch": 220.0, | |
| "grad_norm": 0.1519760638475418, | |
| "learning_rate": 3.891905419629643e-06, | |
| "loss": 0.0051, | |
| "step": 8800 | |
| }, | |
| { | |
| "epoch": 220.25, | |
| "grad_norm": 0.07514660060405731, | |
| "learning_rate": 3.8282007538536946e-06, | |
| "loss": 0.0039, | |
| "step": 8810 | |
| }, | |
| { | |
| "epoch": 220.5, | |
| "grad_norm": 0.20433557033538818, | |
| "learning_rate": 3.7650010154769265e-06, | |
| "loss": 0.004, | |
| "step": 8820 | |
| }, | |
| { | |
| "epoch": 220.75, | |
| "grad_norm": 0.07488595694303513, | |
| "learning_rate": 3.7023068956415608e-06, | |
| "loss": 0.0055, | |
| "step": 8830 | |
| }, | |
| { | |
| "epoch": 221.0, | |
| "grad_norm": 0.15067219734191895, | |
| "learning_rate": 3.6401190799604303e-06, | |
| "loss": 0.0038, | |
| "step": 8840 | |
| }, | |
| { | |
| "epoch": 221.25, | |
| "grad_norm": 0.10581692308187485, | |
| "learning_rate": 3.578438248509536e-06, | |
| "loss": 0.0033, | |
| "step": 8850 | |
| }, | |
| { | |
| "epoch": 221.5, | |
| "grad_norm": 0.13871265947818756, | |
| "learning_rate": 3.5172650758205583e-06, | |
| "loss": 0.0027, | |
| "step": 8860 | |
| }, | |
| { | |
| "epoch": 221.75, | |
| "grad_norm": 0.11503514647483826, | |
| "learning_rate": 3.45660023087353e-06, | |
| "loss": 0.0031, | |
| "step": 8870 | |
| }, | |
| { | |
| "epoch": 222.0, | |
| "grad_norm": 0.14610545337200165, | |
| "learning_rate": 3.3964443770894528e-06, | |
| "loss": 0.0032, | |
| "step": 8880 | |
| }, | |
| { | |
| "epoch": 222.25, | |
| "grad_norm": 0.05525064840912819, | |
| "learning_rate": 3.3367981723231245e-06, | |
| "loss": 0.0022, | |
| "step": 8890 | |
| }, | |
| { | |
| "epoch": 222.5, | |
| "grad_norm": 0.1545409858226776, | |
| "learning_rate": 3.2776622688558746e-06, | |
| "loss": 0.004, | |
| "step": 8900 | |
| }, | |
| { | |
| "epoch": 222.75, | |
| "grad_norm": 0.06882253289222717, | |
| "learning_rate": 3.2190373133884677e-06, | |
| "loss": 0.0034, | |
| "step": 8910 | |
| }, | |
| { | |
| "epoch": 223.0, | |
| "grad_norm": 0.07723452895879745, | |
| "learning_rate": 3.1609239470340446e-06, | |
| "loss": 0.0033, | |
| "step": 8920 | |
| }, | |
| { | |
| "epoch": 223.25, | |
| "grad_norm": 0.05147286877036095, | |
| "learning_rate": 3.1033228053110373e-06, | |
| "loss": 0.0033, | |
| "step": 8930 | |
| }, | |
| { | |
| "epoch": 223.5, | |
| "grad_norm": 0.08452828973531723, | |
| "learning_rate": 3.0462345181363314e-06, | |
| "loss": 0.0027, | |
| "step": 8940 | |
| }, | |
| { | |
| "epoch": 223.75, | |
| "grad_norm": 0.07312655448913574, | |
| "learning_rate": 2.9896597098182654e-06, | |
| "loss": 0.0028, | |
| "step": 8950 | |
| }, | |
| { | |
| "epoch": 224.0, | |
| "grad_norm": 0.19344282150268555, | |
| "learning_rate": 2.933598999049891e-06, | |
| "loss": 0.0028, | |
| "step": 8960 | |
| }, | |
| { | |
| "epoch": 224.25, | |
| "grad_norm": 0.049786657094955444, | |
| "learning_rate": 2.8780529989021697e-06, | |
| "loss": 0.0035, | |
| "step": 8970 | |
| }, | |
| { | |
| "epoch": 224.5, | |
| "grad_norm": 0.09912026673555374, | |
| "learning_rate": 2.823022316817242e-06, | |
| "loss": 0.004, | |
| "step": 8980 | |
| }, | |
| { | |
| "epoch": 224.75, | |
| "grad_norm": 0.15381453931331635, | |
| "learning_rate": 2.7685075546018456e-06, | |
| "loss": 0.0028, | |
| "step": 8990 | |
| }, | |
| { | |
| "epoch": 225.0, | |
| "grad_norm": 0.09527960419654846, | |
| "learning_rate": 2.7145093084206598e-06, | |
| "loss": 0.0081, | |
| "step": 9000 | |
| }, | |
| { | |
| "epoch": 225.25, | |
| "grad_norm": 0.046797893941402435, | |
| "learning_rate": 2.661028168789892e-06, | |
| "loss": 0.0028, | |
| "step": 9010 | |
| }, | |
| { | |
| "epoch": 225.5, | |
| "grad_norm": 0.12533637881278992, | |
| "learning_rate": 2.6080647205706855e-06, | |
| "loss": 0.0047, | |
| "step": 9020 | |
| }, | |
| { | |
| "epoch": 225.75, | |
| "grad_norm": 0.14190489053726196, | |
| "learning_rate": 2.555619542962834e-06, | |
| "loss": 0.0026, | |
| "step": 9030 | |
| }, | |
| { | |
| "epoch": 226.0, | |
| "grad_norm": 0.08050890266895294, | |
| "learning_rate": 2.503693209498409e-06, | |
| "loss": 0.0023, | |
| "step": 9040 | |
| }, | |
| { | |
| "epoch": 226.25, | |
| "grad_norm": 0.10872689634561539, | |
| "learning_rate": 2.452286288035449e-06, | |
| "loss": 0.0038, | |
| "step": 9050 | |
| }, | |
| { | |
| "epoch": 226.5, | |
| "grad_norm": 0.07601405680179596, | |
| "learning_rate": 2.4013993407518363e-06, | |
| "loss": 0.0022, | |
| "step": 9060 | |
| }, | |
| { | |
| "epoch": 226.75, | |
| "grad_norm": 0.22459723055362701, | |
| "learning_rate": 2.351032924139063e-06, | |
| "loss": 0.0041, | |
| "step": 9070 | |
| }, | |
| { | |
| "epoch": 227.0, | |
| "grad_norm": 0.11310923099517822, | |
| "learning_rate": 2.30118758899619e-06, | |
| "loss": 0.0033, | |
| "step": 9080 | |
| }, | |
| { | |
| "epoch": 227.25, | |
| "grad_norm": 0.08301689475774765, | |
| "learning_rate": 2.2518638804238157e-06, | |
| "loss": 0.0029, | |
| "step": 9090 | |
| }, | |
| { | |
| "epoch": 227.5, | |
| "grad_norm": 0.06724400073289871, | |
| "learning_rate": 2.203062337818118e-06, | |
| "loss": 0.0044, | |
| "step": 9100 | |
| }, | |
| { | |
| "epoch": 227.75, | |
| "grad_norm": 0.13066349923610687, | |
| "learning_rate": 2.1547834948649483e-06, | |
| "loss": 0.0032, | |
| "step": 9110 | |
| }, | |
| { | |
| "epoch": 228.0, | |
| "grad_norm": 0.18874166905879974, | |
| "learning_rate": 2.1070278795340017e-06, | |
| "loss": 0.0038, | |
| "step": 9120 | |
| }, | |
| { | |
| "epoch": 228.25, | |
| "grad_norm": 0.087242990732193, | |
| "learning_rate": 2.059796014073029e-06, | |
| "loss": 0.0034, | |
| "step": 9130 | |
| }, | |
| { | |
| "epoch": 228.5, | |
| "grad_norm": 0.1420496702194214, | |
| "learning_rate": 2.01308841500214e-06, | |
| "loss": 0.0034, | |
| "step": 9140 | |
| }, | |
| { | |
| "epoch": 228.75, | |
| "grad_norm": 0.04035526141524315, | |
| "learning_rate": 1.9669055931081704e-06, | |
| "loss": 0.0022, | |
| "step": 9150 | |
| }, | |
| { | |
| "epoch": 229.0, | |
| "grad_norm": 0.08264325559139252, | |
| "learning_rate": 1.9212480534390507e-06, | |
| "loss": 0.0024, | |
| "step": 9160 | |
| }, | |
| { | |
| "epoch": 229.25, | |
| "grad_norm": 0.06740362197160721, | |
| "learning_rate": 1.8761162952983246e-06, | |
| "loss": 0.0045, | |
| "step": 9170 | |
| }, | |
| { | |
| "epoch": 229.5, | |
| "grad_norm": 0.041461482644081116, | |
| "learning_rate": 1.8315108122396618e-06, | |
| "loss": 0.0036, | |
| "step": 9180 | |
| }, | |
| { | |
| "epoch": 229.75, | |
| "grad_norm": 0.06524401158094406, | |
| "learning_rate": 1.787432092061475e-06, | |
| "loss": 0.0031, | |
| "step": 9190 | |
| }, | |
| { | |
| "epoch": 230.0, | |
| "grad_norm": 0.09731091558933258, | |
| "learning_rate": 1.743880616801602e-06, | |
| "loss": 0.0053, | |
| "step": 9200 | |
| }, | |
| { | |
| "epoch": 230.25, | |
| "grad_norm": 0.15023453533649445, | |
| "learning_rate": 1.7008568627319865e-06, | |
| "loss": 0.0022, | |
| "step": 9210 | |
| }, | |
| { | |
| "epoch": 230.5, | |
| "grad_norm": 0.08554910868406296, | |
| "learning_rate": 1.6583613003535226e-06, | |
| "loss": 0.0055, | |
| "step": 9220 | |
| }, | |
| { | |
| "epoch": 230.75, | |
| "grad_norm": 0.060999348759651184, | |
| "learning_rate": 1.6163943943908522e-06, | |
| "loss": 0.0026, | |
| "step": 9230 | |
| }, | |
| { | |
| "epoch": 231.0, | |
| "grad_norm": 0.10573070496320724, | |
| "learning_rate": 1.5749566037873476e-06, | |
| "loss": 0.005, | |
| "step": 9240 | |
| }, | |
| { | |
| "epoch": 231.25, | |
| "grad_norm": 0.07315921783447266, | |
| "learning_rate": 1.5340483817000428e-06, | |
| "loss": 0.0045, | |
| "step": 9250 | |
| }, | |
| { | |
| "epoch": 231.5, | |
| "grad_norm": 0.08707199990749359, | |
| "learning_rate": 1.4936701754947101e-06, | |
| "loss": 0.003, | |
| "step": 9260 | |
| }, | |
| { | |
| "epoch": 231.75, | |
| "grad_norm": 0.03300639986991882, | |
| "learning_rate": 1.4538224267409361e-06, | |
| "loss": 0.0034, | |
| "step": 9270 | |
| }, | |
| { | |
| "epoch": 232.0, | |
| "grad_norm": 0.06528275460004807, | |
| "learning_rate": 1.414505571207314e-06, | |
| "loss": 0.0027, | |
| "step": 9280 | |
| }, | |
| { | |
| "epoch": 232.25, | |
| "grad_norm": 0.10502535104751587, | |
| "learning_rate": 1.3757200388566816e-06, | |
| "loss": 0.0037, | |
| "step": 9290 | |
| }, | |
| { | |
| "epoch": 232.5, | |
| "grad_norm": 0.1114937961101532, | |
| "learning_rate": 1.3374662538414074e-06, | |
| "loss": 0.0033, | |
| "step": 9300 | |
| }, | |
| { | |
| "epoch": 232.75, | |
| "grad_norm": 0.1334535777568817, | |
| "learning_rate": 1.2997446344987617e-06, | |
| "loss": 0.0032, | |
| "step": 9310 | |
| }, | |
| { | |
| "epoch": 233.0, | |
| "grad_norm": 0.15189684927463531, | |
| "learning_rate": 1.262555593346315e-06, | |
| "loss": 0.0045, | |
| "step": 9320 | |
| }, | |
| { | |
| "epoch": 233.25, | |
| "grad_norm": 0.03467800095677376, | |
| "learning_rate": 1.2258995370774685e-06, | |
| "loss": 0.0026, | |
| "step": 9330 | |
| }, | |
| { | |
| "epoch": 233.5, | |
| "grad_norm": 0.16183927655220032, | |
| "learning_rate": 1.1897768665569798e-06, | |
| "loss": 0.0037, | |
| "step": 9340 | |
| }, | |
| { | |
| "epoch": 233.75, | |
| "grad_norm": 0.06361699104309082, | |
| "learning_rate": 1.1541879768165954e-06, | |
| "loss": 0.0055, | |
| "step": 9350 | |
| }, | |
| { | |
| "epoch": 234.0, | |
| "grad_norm": 0.08980465680360794, | |
| "learning_rate": 1.1191332570507085e-06, | |
| "loss": 0.0034, | |
| "step": 9360 | |
| }, | |
| { | |
| "epoch": 234.25, | |
| "grad_norm": 0.05992332473397255, | |
| "learning_rate": 1.0846130906121132e-06, | |
| "loss": 0.0025, | |
| "step": 9370 | |
| }, | |
| { | |
| "epoch": 234.5, | |
| "grad_norm": 0.17438055574893951, | |
| "learning_rate": 1.0506278550078131e-06, | |
| "loss": 0.0051, | |
| "step": 9380 | |
| }, | |
| { | |
| "epoch": 234.75, | |
| "grad_norm": 0.058787088841199875, | |
| "learning_rate": 1.0171779218949185e-06, | |
| "loss": 0.0049, | |
| "step": 9390 | |
| }, | |
| { | |
| "epoch": 235.0, | |
| "grad_norm": 0.15100450813770294, | |
| "learning_rate": 9.842636570765174e-07, | |
| "loss": 0.0041, | |
| "step": 9400 | |
| }, | |
| { | |
| "epoch": 235.25, | |
| "grad_norm": 0.08583474904298782, | |
| "learning_rate": 9.518854204977612e-07, | |
| "loss": 0.0025, | |
| "step": 9410 | |
| }, | |
| { | |
| "epoch": 235.5, | |
| "grad_norm": 0.06979444622993469, | |
| "learning_rate": 9.200435662418349e-07, | |
| "loss": 0.0033, | |
| "step": 9420 | |
| }, | |
| { | |
| "epoch": 235.75, | |
| "grad_norm": 0.11030140519142151, | |
| "learning_rate": 8.887384425261658e-07, | |
| "loss": 0.0034, | |
| "step": 9430 | |
| }, | |
| { | |
| "epoch": 236.0, | |
| "grad_norm": 0.06448012590408325, | |
| "learning_rate": 8.579703916985648e-07, | |
| "loss": 0.0027, | |
| "step": 9440 | |
| }, | |
| { | |
| "epoch": 236.25, | |
| "grad_norm": 0.167857825756073, | |
| "learning_rate": 8.277397502335194e-07, | |
| "loss": 0.0026, | |
| "step": 9450 | |
| }, | |
| { | |
| "epoch": 236.5, | |
| "grad_norm": 0.09123705327510834, | |
| "learning_rate": 7.980468487284675e-07, | |
| "loss": 0.0036, | |
| "step": 9460 | |
| }, | |
| { | |
| "epoch": 236.75, | |
| "grad_norm": 0.06160133704543114, | |
| "learning_rate": 7.688920119002297e-07, | |
| "loss": 0.0039, | |
| "step": 9470 | |
| }, | |
| { | |
| "epoch": 237.0, | |
| "grad_norm": 0.08287883549928665, | |
| "learning_rate": 7.402755585814269e-07, | |
| "loss": 0.0029, | |
| "step": 9480 | |
| }, | |
| { | |
| "epoch": 237.25, | |
| "grad_norm": 0.05484398826956749, | |
| "learning_rate": 7.121978017170073e-07, | |
| "loss": 0.0023, | |
| "step": 9490 | |
| }, | |
| { | |
| "epoch": 237.5, | |
| "grad_norm": 0.04693023860454559, | |
| "learning_rate": 6.846590483608306e-07, | |
| "loss": 0.0041, | |
| "step": 9500 | |
| }, | |
| { | |
| "epoch": 237.75, | |
| "grad_norm": 0.14454308152198792, | |
| "learning_rate": 6.576595996722834e-07, | |
| "loss": 0.0035, | |
| "step": 9510 | |
| }, | |
| { | |
| "epoch": 238.0, | |
| "grad_norm": 0.06262636929750443, | |
| "learning_rate": 6.311997509130141e-07, | |
| "loss": 0.002, | |
| "step": 9520 | |
| }, | |
| { | |
| "epoch": 238.25, | |
| "grad_norm": 0.03196113556623459, | |
| "learning_rate": 6.052797914436803e-07, | |
| "loss": 0.006, | |
| "step": 9530 | |
| }, | |
| { | |
| "epoch": 238.5, | |
| "grad_norm": 0.08593147993087769, | |
| "learning_rate": 5.799000047208181e-07, | |
| "loss": 0.0049, | |
| "step": 9540 | |
| }, | |
| { | |
| "epoch": 238.75, | |
| "grad_norm": 0.0887538343667984, | |
| "learning_rate": 5.550606682937054e-07, | |
| "loss": 0.0031, | |
| "step": 9550 | |
| }, | |
| { | |
| "epoch": 239.0, | |
| "grad_norm": 0.13892526924610138, | |
| "learning_rate": 5.307620538013481e-07, | |
| "loss": 0.0038, | |
| "step": 9560 | |
| }, | |
| { | |
| "epoch": 239.25, | |
| "grad_norm": 0.1304452270269394, | |
| "learning_rate": 5.070044269694874e-07, | |
| "loss": 0.0038, | |
| "step": 9570 | |
| }, | |
| { | |
| "epoch": 239.5, | |
| "grad_norm": 0.1446593701839447, | |
| "learning_rate": 4.837880476077417e-07, | |
| "loss": 0.0035, | |
| "step": 9580 | |
| }, | |
| { | |
| "epoch": 239.75, | |
| "grad_norm": 0.1164969801902771, | |
| "learning_rate": 4.6111316960670835e-07, | |
| "loss": 0.0032, | |
| "step": 9590 | |
| }, | |
| { | |
| "epoch": 240.0, | |
| "grad_norm": 0.15182846784591675, | |
| "learning_rate": 4.389800409352218e-07, | |
| "loss": 0.0036, | |
| "step": 9600 | |
| }, | |
| { | |
| "epoch": 240.25, | |
| "grad_norm": 0.02840401977300644, | |
| "learning_rate": 4.173889036376277e-07, | |
| "loss": 0.0042, | |
| "step": 9610 | |
| }, | |
| { | |
| "epoch": 240.5, | |
| "grad_norm": 0.09419921785593033, | |
| "learning_rate": 3.963399938311463e-07, | |
| "loss": 0.002, | |
| "step": 9620 | |
| }, | |
| { | |
| "epoch": 240.75, | |
| "grad_norm": 0.16660663485527039, | |
| "learning_rate": 3.7583354170328545e-07, | |
| "loss": 0.0043, | |
| "step": 9630 | |
| }, | |
| { | |
| "epoch": 241.0, | |
| "grad_norm": 0.13299746811389923, | |
| "learning_rate": 3.558697715093207e-07, | |
| "loss": 0.0029, | |
| "step": 9640 | |
| }, | |
| { | |
| "epoch": 241.25, | |
| "grad_norm": 0.12092949450016022, | |
| "learning_rate": 3.3644890156983576e-07, | |
| "loss": 0.0061, | |
| "step": 9650 | |
| }, | |
| { | |
| "epoch": 241.5, | |
| "grad_norm": 0.060206662863492966, | |
| "learning_rate": 3.175711442683638e-07, | |
| "loss": 0.0025, | |
| "step": 9660 | |
| }, | |
| { | |
| "epoch": 241.75, | |
| "grad_norm": 0.08426082134246826, | |
| "learning_rate": 2.9923670604902197e-07, | |
| "loss": 0.0028, | |
| "step": 9670 | |
| }, | |
| { | |
| "epoch": 242.0, | |
| "grad_norm": 0.0851016417145729, | |
| "learning_rate": 2.814457874143028e-07, | |
| "loss": 0.0042, | |
| "step": 9680 | |
| }, | |
| { | |
| "epoch": 242.25, | |
| "grad_norm": 0.07830564677715302, | |
| "learning_rate": 2.641985829228366e-07, | |
| "loss": 0.0023, | |
| "step": 9690 | |
| }, | |
| { | |
| "epoch": 242.5, | |
| "grad_norm": 0.11447057127952576, | |
| "learning_rate": 2.474952811872877e-07, | |
| "loss": 0.0043, | |
| "step": 9700 | |
| }, | |
| { | |
| "epoch": 242.75, | |
| "grad_norm": 0.05448366329073906, | |
| "learning_rate": 2.3133606487228397e-07, | |
| "loss": 0.0025, | |
| "step": 9710 | |
| }, | |
| { | |
| "epoch": 243.0, | |
| "grad_norm": 0.03906814008951187, | |
| "learning_rate": 2.157211106924295e-07, | |
| "loss": 0.0027, | |
| "step": 9720 | |
| }, | |
| { | |
| "epoch": 243.25, | |
| "grad_norm": 0.039081498980522156, | |
| "learning_rate": 2.006505894103672e-07, | |
| "loss": 0.0023, | |
| "step": 9730 | |
| }, | |
| { | |
| "epoch": 243.5, | |
| "grad_norm": 0.027017461135983467, | |
| "learning_rate": 1.8612466583489696e-07, | |
| "loss": 0.0034, | |
| "step": 9740 | |
| }, | |
| { | |
| "epoch": 243.75, | |
| "grad_norm": 0.09606248885393143, | |
| "learning_rate": 1.7214349881918834e-07, | |
| "loss": 0.0048, | |
| "step": 9750 | |
| }, | |
| { | |
| "epoch": 244.0, | |
| "grad_norm": 0.05149327963590622, | |
| "learning_rate": 1.5870724125904845e-07, | |
| "loss": 0.0033, | |
| "step": 9760 | |
| }, | |
| { | |
| "epoch": 244.25, | |
| "grad_norm": 0.14646421372890472, | |
| "learning_rate": 1.4581604009124006e-07, | |
| "loss": 0.004, | |
| "step": 9770 | |
| }, | |
| { | |
| "epoch": 244.5, | |
| "grad_norm": 0.06484408676624298, | |
| "learning_rate": 1.334700362918717e-07, | |
| "loss": 0.0025, | |
| "step": 9780 | |
| }, | |
| { | |
| "epoch": 244.75, | |
| "grad_norm": 0.037307512015104294, | |
| "learning_rate": 1.2166936487486015e-07, | |
| "loss": 0.0022, | |
| "step": 9790 | |
| }, | |
| { | |
| "epoch": 245.0, | |
| "grad_norm": 0.1416601985692978, | |
| "learning_rate": 1.1041415489045914e-07, | |
| "loss": 0.0028, | |
| "step": 9800 | |
| }, | |
| { | |
| "epoch": 245.25, | |
| "grad_norm": 0.18991032242774963, | |
| "learning_rate": 9.970452942384412e-08, | |
| "loss": 0.0035, | |
| "step": 9810 | |
| }, | |
| { | |
| "epoch": 245.5, | |
| "grad_norm": 0.07187339663505554, | |
| "learning_rate": 8.954060559375754e-08, | |
| "loss": 0.0023, | |
| "step": 9820 | |
| }, | |
| { | |
| "epoch": 245.75, | |
| "grad_norm": 0.07545599341392517, | |
| "learning_rate": 7.99224945512489e-08, | |
| "loss": 0.0028, | |
| "step": 9830 | |
| }, | |
| { | |
| "epoch": 246.0, | |
| "grad_norm": 0.10132917016744614, | |
| "learning_rate": 7.085030147843675e-08, | |
| "loss": 0.0037, | |
| "step": 9840 | |
| }, | |
| { | |
| "epoch": 246.25, | |
| "grad_norm": 0.06773098558187485, | |
| "learning_rate": 6.232412558736523e-08, | |
| "loss": 0.0049, | |
| "step": 9850 | |
| }, | |
| { | |
| "epoch": 246.5, | |
| "grad_norm": 0.19954784214496613, | |
| "learning_rate": 5.434406011893822e-08, | |
| "loss": 0.0029, | |
| "step": 9860 | |
| }, | |
| { | |
| "epoch": 246.75, | |
| "grad_norm": 0.1477789282798767, | |
| "learning_rate": 4.6910192341864664e-08, | |
| "loss": 0.0027, | |
| "step": 9870 | |
| }, | |
| { | |
| "epoch": 247.0, | |
| "grad_norm": 0.063265360891819, | |
| "learning_rate": 4.0022603551737035e-08, | |
| "loss": 0.0066, | |
| "step": 9880 | |
| }, | |
| { | |
| "epoch": 247.25, | |
| "grad_norm": 0.054880768060684204, | |
| "learning_rate": 3.3681369070120985e-08, | |
| "loss": 0.0033, | |
| "step": 9890 | |
| }, | |
| { | |
| "epoch": 247.5, | |
| "grad_norm": 0.02149050496518612, | |
| "learning_rate": 2.7886558243744866e-08, | |
| "loss": 0.0031, | |
| "step": 9900 | |
| }, | |
| { | |
| "epoch": 247.75, | |
| "grad_norm": 0.18347109854221344, | |
| "learning_rate": 2.2638234443722596e-08, | |
| "loss": 0.0036, | |
| "step": 9910 | |
| }, | |
| { | |
| "epoch": 248.0, | |
| "grad_norm": 0.0956336110830307, | |
| "learning_rate": 1.7936455064887504e-08, | |
| "loss": 0.0044, | |
| "step": 9920 | |
| }, | |
| { | |
| "epoch": 248.25, | |
| "grad_norm": 0.18302513659000397, | |
| "learning_rate": 1.378127152514841e-08, | |
| "loss": 0.0031, | |
| "step": 9930 | |
| }, | |
| { | |
| "epoch": 248.5, | |
| "grad_norm": 0.08520723134279251, | |
| "learning_rate": 1.0172729264917857e-08, | |
| "loss": 0.0071, | |
| "step": 9940 | |
| }, | |
| { | |
| "epoch": 248.75, | |
| "grad_norm": 0.039946626871824265, | |
| "learning_rate": 7.1108677466458215e-09, | |
| "loss": 0.0026, | |
| "step": 9950 | |
| }, | |
| { | |
| "epoch": 249.0, | |
| "grad_norm": 0.17577043175697327, | |
| "learning_rate": 4.595720454353414e-09, | |
| "loss": 0.0047, | |
| "step": 9960 | |
| }, | |
| { | |
| "epoch": 249.25, | |
| "grad_norm": 0.1010713055729866, | |
| "learning_rate": 2.627314893294264e-09, | |
| "loss": 0.0034, | |
| "step": 9970 | |
| }, | |
| { | |
| "epoch": 249.5, | |
| "grad_norm": 0.04403097182512283, | |
| "learning_rate": 1.2056725896270048e-09, | |
| "loss": 0.0041, | |
| "step": 9980 | |
| }, | |
| { | |
| "epoch": 249.75, | |
| "grad_norm": 0.10400509834289551, | |
| "learning_rate": 3.308090902098826e-10, | |
| "loss": 0.0024, | |
| "step": 9990 | |
| }, | |
| { | |
| "epoch": 250.0, | |
| "grad_norm": 0.0933779925107956, | |
| "learning_rate": 2.7339624120159555e-12, | |
| "loss": 0.0028, | |
| "step": 10000 | |
| }, | |
| { | |
| "epoch": 250.0, | |
| "step": 10000, | |
| "total_flos": 0.0, | |
| "train_loss": 0.015269231259077788, | |
| "train_runtime": 7033.4938, | |
| "train_samples_per_second": 45.497, | |
| "train_steps_per_second": 1.422 | |
| } | |
| ], | |
| "logging_steps": 10, | |
| "max_steps": 10000, | |
| "num_input_tokens_seen": 0, | |
| "num_train_epochs": 250, | |
| "save_steps": 1000, | |
| "stateful_callbacks": { | |
| "TrainerControl": { | |
| "args": { | |
| "should_epoch_stop": false, | |
| "should_evaluate": false, | |
| "should_log": false, | |
| "should_save": true, | |
| "should_training_stop": true | |
| }, | |
| "attributes": {} | |
| } | |
| }, | |
| "total_flos": 0.0, | |
| "train_batch_size": 32, | |
| "trial_name": null, | |
| "trial_params": null | |
| } | |