| { | |
| "best_global_step": null, | |
| "best_metric": null, | |
| "best_model_checkpoint": null, | |
| "epoch": 33.222591362126245, | |
| "eval_steps": 500, | |
| "global_step": 10000, | |
| "is_hyper_param_search": false, | |
| "is_local_process_zero": true, | |
| "is_world_process_zero": true, | |
| "log_history": [ | |
| { | |
| "epoch": 0.03322259136212625, | |
| "grad_norm": 16.49517250061035, | |
| "learning_rate": 1.8e-06, | |
| "loss": 1.1556, | |
| "step": 10 | |
| }, | |
| { | |
| "epoch": 0.0664451827242525, | |
| "grad_norm": 5.904155731201172, | |
| "learning_rate": 3.8e-06, | |
| "loss": 0.9225, | |
| "step": 20 | |
| }, | |
| { | |
| "epoch": 0.09966777408637874, | |
| "grad_norm": 3.602107048034668, | |
| "learning_rate": 5.8e-06, | |
| "loss": 0.4007, | |
| "step": 30 | |
| }, | |
| { | |
| "epoch": 0.132890365448505, | |
| "grad_norm": 1.9652377367019653, | |
| "learning_rate": 7.8e-06, | |
| "loss": 0.2561, | |
| "step": 40 | |
| }, | |
| { | |
| "epoch": 0.16611295681063123, | |
| "grad_norm": 2.4883804321289062, | |
| "learning_rate": 9.800000000000001e-06, | |
| "loss": 0.2118, | |
| "step": 50 | |
| }, | |
| { | |
| "epoch": 0.19933554817275748, | |
| "grad_norm": 1.733266830444336, | |
| "learning_rate": 1.18e-05, | |
| "loss": 0.1852, | |
| "step": 60 | |
| }, | |
| { | |
| "epoch": 0.23255813953488372, | |
| "grad_norm": 1.3866091966629028, | |
| "learning_rate": 1.3800000000000002e-05, | |
| "loss": 0.1448, | |
| "step": 70 | |
| }, | |
| { | |
| "epoch": 0.26578073089701, | |
| "grad_norm": 1.1435869932174683, | |
| "learning_rate": 1.58e-05, | |
| "loss": 0.1502, | |
| "step": 80 | |
| }, | |
| { | |
| "epoch": 0.29900332225913623, | |
| "grad_norm": 1.0303682088851929, | |
| "learning_rate": 1.78e-05, | |
| "loss": 0.1247, | |
| "step": 90 | |
| }, | |
| { | |
| "epoch": 0.33222591362126247, | |
| "grad_norm": 1.3942605257034302, | |
| "learning_rate": 1.9800000000000004e-05, | |
| "loss": 0.1097, | |
| "step": 100 | |
| }, | |
| { | |
| "epoch": 0.3654485049833887, | |
| "grad_norm": 0.9301076531410217, | |
| "learning_rate": 2.18e-05, | |
| "loss": 0.1061, | |
| "step": 110 | |
| }, | |
| { | |
| "epoch": 0.39867109634551495, | |
| "grad_norm": 1.4038463830947876, | |
| "learning_rate": 2.38e-05, | |
| "loss": 0.09, | |
| "step": 120 | |
| }, | |
| { | |
| "epoch": 0.4318936877076412, | |
| "grad_norm": 0.9942566752433777, | |
| "learning_rate": 2.58e-05, | |
| "loss": 0.0845, | |
| "step": 130 | |
| }, | |
| { | |
| "epoch": 0.46511627906976744, | |
| "grad_norm": 0.8723886609077454, | |
| "learning_rate": 2.7800000000000005e-05, | |
| "loss": 0.0823, | |
| "step": 140 | |
| }, | |
| { | |
| "epoch": 0.4983388704318937, | |
| "grad_norm": 0.7243990302085876, | |
| "learning_rate": 2.98e-05, | |
| "loss": 0.0692, | |
| "step": 150 | |
| }, | |
| { | |
| "epoch": 0.53156146179402, | |
| "grad_norm": 1.035720944404602, | |
| "learning_rate": 3.18e-05, | |
| "loss": 0.0721, | |
| "step": 160 | |
| }, | |
| { | |
| "epoch": 0.5647840531561462, | |
| "grad_norm": 1.8913172483444214, | |
| "learning_rate": 3.38e-05, | |
| "loss": 0.0761, | |
| "step": 170 | |
| }, | |
| { | |
| "epoch": 0.5980066445182725, | |
| "grad_norm": 1.1191561222076416, | |
| "learning_rate": 3.58e-05, | |
| "loss": 0.081, | |
| "step": 180 | |
| }, | |
| { | |
| "epoch": 0.6312292358803987, | |
| "grad_norm": 1.3315305709838867, | |
| "learning_rate": 3.7800000000000004e-05, | |
| "loss": 0.0708, | |
| "step": 190 | |
| }, | |
| { | |
| "epoch": 0.6644518272425249, | |
| "grad_norm": 1.152005910873413, | |
| "learning_rate": 3.9800000000000005e-05, | |
| "loss": 0.0671, | |
| "step": 200 | |
| }, | |
| { | |
| "epoch": 0.6976744186046512, | |
| "grad_norm": 0.9747628569602966, | |
| "learning_rate": 4.18e-05, | |
| "loss": 0.0678, | |
| "step": 210 | |
| }, | |
| { | |
| "epoch": 0.7308970099667774, | |
| "grad_norm": 0.9837548732757568, | |
| "learning_rate": 4.38e-05, | |
| "loss": 0.0648, | |
| "step": 220 | |
| }, | |
| { | |
| "epoch": 0.7641196013289037, | |
| "grad_norm": 1.23843514919281, | |
| "learning_rate": 4.58e-05, | |
| "loss": 0.0598, | |
| "step": 230 | |
| }, | |
| { | |
| "epoch": 0.7973421926910299, | |
| "grad_norm": 1.3136545419692993, | |
| "learning_rate": 4.78e-05, | |
| "loss": 0.0646, | |
| "step": 240 | |
| }, | |
| { | |
| "epoch": 0.8305647840531561, | |
| "grad_norm": 0.700904130935669, | |
| "learning_rate": 4.9800000000000004e-05, | |
| "loss": 0.0597, | |
| "step": 250 | |
| }, | |
| { | |
| "epoch": 0.8637873754152824, | |
| "grad_norm": 0.6108095645904541, | |
| "learning_rate": 5.1800000000000005e-05, | |
| "loss": 0.0631, | |
| "step": 260 | |
| }, | |
| { | |
| "epoch": 0.8970099667774086, | |
| "grad_norm": 0.9979882836341858, | |
| "learning_rate": 5.380000000000001e-05, | |
| "loss": 0.0654, | |
| "step": 270 | |
| }, | |
| { | |
| "epoch": 0.9302325581395349, | |
| "grad_norm": 0.8695108890533447, | |
| "learning_rate": 5.580000000000001e-05, | |
| "loss": 0.0621, | |
| "step": 280 | |
| }, | |
| { | |
| "epoch": 0.9634551495016611, | |
| "grad_norm": 0.5228680968284607, | |
| "learning_rate": 5.7799999999999995e-05, | |
| "loss": 0.0615, | |
| "step": 290 | |
| }, | |
| { | |
| "epoch": 0.9966777408637874, | |
| "grad_norm": 0.7989741563796997, | |
| "learning_rate": 5.9800000000000003e-05, | |
| "loss": 0.0572, | |
| "step": 300 | |
| }, | |
| { | |
| "epoch": 1.0299003322259137, | |
| "grad_norm": 1.0117868185043335, | |
| "learning_rate": 6.18e-05, | |
| "loss": 0.0832, | |
| "step": 310 | |
| }, | |
| { | |
| "epoch": 1.06312292358804, | |
| "grad_norm": 0.6062241792678833, | |
| "learning_rate": 6.38e-05, | |
| "loss": 0.0601, | |
| "step": 320 | |
| }, | |
| { | |
| "epoch": 1.0963455149501662, | |
| "grad_norm": 0.8119208216667175, | |
| "learning_rate": 6.58e-05, | |
| "loss": 0.061, | |
| "step": 330 | |
| }, | |
| { | |
| "epoch": 1.1295681063122924, | |
| "grad_norm": 1.240936517715454, | |
| "learning_rate": 6.780000000000001e-05, | |
| "loss": 0.0579, | |
| "step": 340 | |
| }, | |
| { | |
| "epoch": 1.1627906976744187, | |
| "grad_norm": 0.6764455437660217, | |
| "learning_rate": 6.98e-05, | |
| "loss": 0.0593, | |
| "step": 350 | |
| }, | |
| { | |
| "epoch": 1.196013289036545, | |
| "grad_norm": 0.9562680721282959, | |
| "learning_rate": 7.18e-05, | |
| "loss": 0.0566, | |
| "step": 360 | |
| }, | |
| { | |
| "epoch": 1.2292358803986712, | |
| "grad_norm": 0.909827709197998, | |
| "learning_rate": 7.38e-05, | |
| "loss": 0.0601, | |
| "step": 370 | |
| }, | |
| { | |
| "epoch": 1.2624584717607974, | |
| "grad_norm": 0.9948165416717529, | |
| "learning_rate": 7.58e-05, | |
| "loss": 0.0564, | |
| "step": 380 | |
| }, | |
| { | |
| "epoch": 1.2956810631229236, | |
| "grad_norm": 0.8981932997703552, | |
| "learning_rate": 7.780000000000001e-05, | |
| "loss": 0.0579, | |
| "step": 390 | |
| }, | |
| { | |
| "epoch": 1.3289036544850499, | |
| "grad_norm": 1.1012845039367676, | |
| "learning_rate": 7.98e-05, | |
| "loss": 0.0563, | |
| "step": 400 | |
| }, | |
| { | |
| "epoch": 1.3621262458471761, | |
| "grad_norm": 0.6229888200759888, | |
| "learning_rate": 8.18e-05, | |
| "loss": 0.0544, | |
| "step": 410 | |
| }, | |
| { | |
| "epoch": 1.3953488372093024, | |
| "grad_norm": 0.6055900454521179, | |
| "learning_rate": 8.38e-05, | |
| "loss": 0.0491, | |
| "step": 420 | |
| }, | |
| { | |
| "epoch": 1.4285714285714286, | |
| "grad_norm": 0.662727952003479, | |
| "learning_rate": 8.58e-05, | |
| "loss": 0.0526, | |
| "step": 430 | |
| }, | |
| { | |
| "epoch": 1.4617940199335548, | |
| "grad_norm": 0.5412904024124146, | |
| "learning_rate": 8.78e-05, | |
| "loss": 0.0524, | |
| "step": 440 | |
| }, | |
| { | |
| "epoch": 1.495016611295681, | |
| "grad_norm": 0.552821934223175, | |
| "learning_rate": 8.98e-05, | |
| "loss": 0.0518, | |
| "step": 450 | |
| }, | |
| { | |
| "epoch": 1.5282392026578073, | |
| "grad_norm": 0.7804833650588989, | |
| "learning_rate": 9.180000000000001e-05, | |
| "loss": 0.0509, | |
| "step": 460 | |
| }, | |
| { | |
| "epoch": 1.5614617940199336, | |
| "grad_norm": 0.6241787672042847, | |
| "learning_rate": 9.38e-05, | |
| "loss": 0.0506, | |
| "step": 470 | |
| }, | |
| { | |
| "epoch": 1.5946843853820598, | |
| "grad_norm": 0.6631738543510437, | |
| "learning_rate": 9.58e-05, | |
| "loss": 0.0489, | |
| "step": 480 | |
| }, | |
| { | |
| "epoch": 1.627906976744186, | |
| "grad_norm": 0.5631969571113586, | |
| "learning_rate": 9.78e-05, | |
| "loss": 0.0399, | |
| "step": 490 | |
| }, | |
| { | |
| "epoch": 1.6611295681063123, | |
| "grad_norm": 0.6725241541862488, | |
| "learning_rate": 9.98e-05, | |
| "loss": 0.0493, | |
| "step": 500 | |
| }, | |
| { | |
| "epoch": 1.6943521594684385, | |
| "grad_norm": 0.7589104175567627, | |
| "learning_rate": 9.9999778549206e-05, | |
| "loss": 0.047, | |
| "step": 510 | |
| }, | |
| { | |
| "epoch": 1.7275747508305648, | |
| "grad_norm": 0.7991833090782166, | |
| "learning_rate": 9.999901304280685e-05, | |
| "loss": 0.0473, | |
| "step": 520 | |
| }, | |
| { | |
| "epoch": 1.760797342192691, | |
| "grad_norm": 0.7787967920303345, | |
| "learning_rate": 9.999770075521164e-05, | |
| "loss": 0.0494, | |
| "step": 530 | |
| }, | |
| { | |
| "epoch": 1.7940199335548173, | |
| "grad_norm": 0.6019904613494873, | |
| "learning_rate": 9.99958417007713e-05, | |
| "loss": 0.048, | |
| "step": 540 | |
| }, | |
| { | |
| "epoch": 1.8272425249169435, | |
| "grad_norm": 0.5640949606895447, | |
| "learning_rate": 9.999343589981615e-05, | |
| "loss": 0.0454, | |
| "step": 550 | |
| }, | |
| { | |
| "epoch": 1.8604651162790697, | |
| "grad_norm": 0.44856587052345276, | |
| "learning_rate": 9.999048337865568e-05, | |
| "loss": 0.0455, | |
| "step": 560 | |
| }, | |
| { | |
| "epoch": 1.893687707641196, | |
| "grad_norm": 0.6371676921844482, | |
| "learning_rate": 9.998698416957815e-05, | |
| "loss": 0.0429, | |
| "step": 570 | |
| }, | |
| { | |
| "epoch": 1.9269102990033222, | |
| "grad_norm": 0.8966260552406311, | |
| "learning_rate": 9.998293831085037e-05, | |
| "loss": 0.048, | |
| "step": 580 | |
| }, | |
| { | |
| "epoch": 1.9601328903654485, | |
| "grad_norm": 0.5906923413276672, | |
| "learning_rate": 9.997834584671719e-05, | |
| "loss": 0.046, | |
| "step": 590 | |
| }, | |
| { | |
| "epoch": 1.9933554817275747, | |
| "grad_norm": 0.5524625778198242, | |
| "learning_rate": 9.997320682740107e-05, | |
| "loss": 0.0476, | |
| "step": 600 | |
| }, | |
| { | |
| "epoch": 2.026578073089701, | |
| "grad_norm": 0.5328683853149414, | |
| "learning_rate": 9.996752130910149e-05, | |
| "loss": 0.0485, | |
| "step": 610 | |
| }, | |
| { | |
| "epoch": 2.0598006644518274, | |
| "grad_norm": 0.7532830834388733, | |
| "learning_rate": 9.99612893539944e-05, | |
| "loss": 0.0429, | |
| "step": 620 | |
| }, | |
| { | |
| "epoch": 2.0930232558139537, | |
| "grad_norm": 0.5240247845649719, | |
| "learning_rate": 9.995451103023144e-05, | |
| "loss": 0.0367, | |
| "step": 630 | |
| }, | |
| { | |
| "epoch": 2.12624584717608, | |
| "grad_norm": 0.5653980374336243, | |
| "learning_rate": 9.994718641193928e-05, | |
| "loss": 0.0396, | |
| "step": 640 | |
| }, | |
| { | |
| "epoch": 2.159468438538206, | |
| "grad_norm": 0.7227243185043335, | |
| "learning_rate": 9.993931557921874e-05, | |
| "loss": 0.0396, | |
| "step": 650 | |
| }, | |
| { | |
| "epoch": 2.1926910299003324, | |
| "grad_norm": 0.5951940417289734, | |
| "learning_rate": 9.993089861814402e-05, | |
| "loss": 0.0412, | |
| "step": 660 | |
| }, | |
| { | |
| "epoch": 2.2259136212624586, | |
| "grad_norm": 0.5065795183181763, | |
| "learning_rate": 9.992193562076166e-05, | |
| "loss": 0.0414, | |
| "step": 670 | |
| }, | |
| { | |
| "epoch": 2.259136212624585, | |
| "grad_norm": 0.5497207045555115, | |
| "learning_rate": 9.991242668508954e-05, | |
| "loss": 0.0426, | |
| "step": 680 | |
| }, | |
| { | |
| "epoch": 2.292358803986711, | |
| "grad_norm": 0.462469220161438, | |
| "learning_rate": 9.990237191511587e-05, | |
| "loss": 0.0421, | |
| "step": 690 | |
| }, | |
| { | |
| "epoch": 2.3255813953488373, | |
| "grad_norm": 0.6984947919845581, | |
| "learning_rate": 9.989177142079802e-05, | |
| "loss": 0.0446, | |
| "step": 700 | |
| }, | |
| { | |
| "epoch": 2.3588039867109636, | |
| "grad_norm": 0.443020224571228, | |
| "learning_rate": 9.988062531806126e-05, | |
| "loss": 0.0462, | |
| "step": 710 | |
| }, | |
| { | |
| "epoch": 2.39202657807309, | |
| "grad_norm": 0.5449220538139343, | |
| "learning_rate": 9.986893372879762e-05, | |
| "loss": 0.0446, | |
| "step": 720 | |
| }, | |
| { | |
| "epoch": 2.425249169435216, | |
| "grad_norm": 0.5548967123031616, | |
| "learning_rate": 9.985669678086443e-05, | |
| "loss": 0.0391, | |
| "step": 730 | |
| }, | |
| { | |
| "epoch": 2.4584717607973423, | |
| "grad_norm": 0.8857158422470093, | |
| "learning_rate": 9.984391460808298e-05, | |
| "loss": 0.0382, | |
| "step": 740 | |
| }, | |
| { | |
| "epoch": 2.4916943521594686, | |
| "grad_norm": 0.8469329476356506, | |
| "learning_rate": 9.983058735023709e-05, | |
| "loss": 0.0362, | |
| "step": 750 | |
| }, | |
| { | |
| "epoch": 2.524916943521595, | |
| "grad_norm": 0.4711757004261017, | |
| "learning_rate": 9.98167151530715e-05, | |
| "loss": 0.034, | |
| "step": 760 | |
| }, | |
| { | |
| "epoch": 2.558139534883721, | |
| "grad_norm": 0.4842841923236847, | |
| "learning_rate": 9.980229816829034e-05, | |
| "loss": 0.0298, | |
| "step": 770 | |
| }, | |
| { | |
| "epoch": 2.5913621262458473, | |
| "grad_norm": 0.8352331519126892, | |
| "learning_rate": 9.978733655355544e-05, | |
| "loss": 0.0368, | |
| "step": 780 | |
| }, | |
| { | |
| "epoch": 2.6245847176079735, | |
| "grad_norm": 0.7118296027183533, | |
| "learning_rate": 9.977183047248464e-05, | |
| "loss": 0.033, | |
| "step": 790 | |
| }, | |
| { | |
| "epoch": 2.6578073089700998, | |
| "grad_norm": 0.36444178223609924, | |
| "learning_rate": 9.975578009464992e-05, | |
| "loss": 0.0402, | |
| "step": 800 | |
| }, | |
| { | |
| "epoch": 2.691029900332226, | |
| "grad_norm": 0.4943622052669525, | |
| "learning_rate": 9.97391855955757e-05, | |
| "loss": 0.0397, | |
| "step": 810 | |
| }, | |
| { | |
| "epoch": 2.7242524916943522, | |
| "grad_norm": 0.5186375975608826, | |
| "learning_rate": 9.972204715673669e-05, | |
| "loss": 0.0391, | |
| "step": 820 | |
| }, | |
| { | |
| "epoch": 2.7574750830564785, | |
| "grad_norm": 0.4055125117301941, | |
| "learning_rate": 9.970436496555617e-05, | |
| "loss": 0.0373, | |
| "step": 830 | |
| }, | |
| { | |
| "epoch": 2.7906976744186047, | |
| "grad_norm": 0.5435572862625122, | |
| "learning_rate": 9.968613921540373e-05, | |
| "loss": 0.0321, | |
| "step": 840 | |
| }, | |
| { | |
| "epoch": 2.823920265780731, | |
| "grad_norm": 0.5882667303085327, | |
| "learning_rate": 9.966737010559326e-05, | |
| "loss": 0.0372, | |
| "step": 850 | |
| }, | |
| { | |
| "epoch": 2.857142857142857, | |
| "grad_norm": 0.6232360601425171, | |
| "learning_rate": 9.964805784138072e-05, | |
| "loss": 0.0317, | |
| "step": 860 | |
| }, | |
| { | |
| "epoch": 2.8903654485049834, | |
| "grad_norm": 0.47762417793273926, | |
| "learning_rate": 9.962820263396195e-05, | |
| "loss": 0.0345, | |
| "step": 870 | |
| }, | |
| { | |
| "epoch": 2.9235880398671097, | |
| "grad_norm": 0.645271360874176, | |
| "learning_rate": 9.960780470047033e-05, | |
| "loss": 0.0338, | |
| "step": 880 | |
| }, | |
| { | |
| "epoch": 2.956810631229236, | |
| "grad_norm": 0.6059306263923645, | |
| "learning_rate": 9.958686426397437e-05, | |
| "loss": 0.0326, | |
| "step": 890 | |
| }, | |
| { | |
| "epoch": 2.990033222591362, | |
| "grad_norm": 0.4821760356426239, | |
| "learning_rate": 9.956538155347534e-05, | |
| "loss": 0.0374, | |
| "step": 900 | |
| }, | |
| { | |
| "epoch": 3.0232558139534884, | |
| "grad_norm": 0.6130004525184631, | |
| "learning_rate": 9.95433568039047e-05, | |
| "loss": 0.0283, | |
| "step": 910 | |
| }, | |
| { | |
| "epoch": 3.0564784053156147, | |
| "grad_norm": 0.5384393930435181, | |
| "learning_rate": 9.952079025612162e-05, | |
| "loss": 0.0286, | |
| "step": 920 | |
| }, | |
| { | |
| "epoch": 3.089700996677741, | |
| "grad_norm": 0.6059784293174744, | |
| "learning_rate": 9.949768215691022e-05, | |
| "loss": 0.0261, | |
| "step": 930 | |
| }, | |
| { | |
| "epoch": 3.122923588039867, | |
| "grad_norm": 0.6603447794914246, | |
| "learning_rate": 9.9474032758977e-05, | |
| "loss": 0.0301, | |
| "step": 940 | |
| }, | |
| { | |
| "epoch": 3.1561461794019934, | |
| "grad_norm": 0.5607938766479492, | |
| "learning_rate": 9.944984232094794e-05, | |
| "loss": 0.033, | |
| "step": 950 | |
| }, | |
| { | |
| "epoch": 3.1893687707641196, | |
| "grad_norm": 0.3325658142566681, | |
| "learning_rate": 9.942511110736584e-05, | |
| "loss": 0.0352, | |
| "step": 960 | |
| }, | |
| { | |
| "epoch": 3.222591362126246, | |
| "grad_norm": 0.5348853468894958, | |
| "learning_rate": 9.939983938868726e-05, | |
| "loss": 0.0312, | |
| "step": 970 | |
| }, | |
| { | |
| "epoch": 3.255813953488372, | |
| "grad_norm": 0.5491706728935242, | |
| "learning_rate": 9.93740274412797e-05, | |
| "loss": 0.0279, | |
| "step": 980 | |
| }, | |
| { | |
| "epoch": 3.2890365448504983, | |
| "grad_norm": 0.3640613257884979, | |
| "learning_rate": 9.934767554741846e-05, | |
| "loss": 0.0292, | |
| "step": 990 | |
| }, | |
| { | |
| "epoch": 3.3222591362126246, | |
| "grad_norm": 0.40242475271224976, | |
| "learning_rate": 9.932078399528361e-05, | |
| "loss": 0.0269, | |
| "step": 1000 | |
| }, | |
| { | |
| "epoch": 3.355481727574751, | |
| "grad_norm": 0.3187004625797272, | |
| "learning_rate": 9.929335307895689e-05, | |
| "loss": 0.0278, | |
| "step": 1010 | |
| }, | |
| { | |
| "epoch": 3.388704318936877, | |
| "grad_norm": 0.5363311767578125, | |
| "learning_rate": 9.926538309841839e-05, | |
| "loss": 0.0266, | |
| "step": 1020 | |
| }, | |
| { | |
| "epoch": 3.4219269102990033, | |
| "grad_norm": 0.5733344554901123, | |
| "learning_rate": 9.923687435954334e-05, | |
| "loss": 0.0305, | |
| "step": 1030 | |
| }, | |
| { | |
| "epoch": 3.4551495016611296, | |
| "grad_norm": 0.4922335147857666, | |
| "learning_rate": 9.920782717409873e-05, | |
| "loss": 0.0307, | |
| "step": 1040 | |
| }, | |
| { | |
| "epoch": 3.488372093023256, | |
| "grad_norm": 0.6312623620033264, | |
| "learning_rate": 9.917824185973994e-05, | |
| "loss": 0.0281, | |
| "step": 1050 | |
| }, | |
| { | |
| "epoch": 3.521594684385382, | |
| "grad_norm": 0.4072914719581604, | |
| "learning_rate": 9.914811874000723e-05, | |
| "loss": 0.0347, | |
| "step": 1060 | |
| }, | |
| { | |
| "epoch": 3.5548172757475083, | |
| "grad_norm": 0.386290580034256, | |
| "learning_rate": 9.911745814432218e-05, | |
| "loss": 0.0397, | |
| "step": 1070 | |
| }, | |
| { | |
| "epoch": 3.5880398671096345, | |
| "grad_norm": 0.4319952130317688, | |
| "learning_rate": 9.90862604079842e-05, | |
| "loss": 0.028, | |
| "step": 1080 | |
| }, | |
| { | |
| "epoch": 3.6212624584717608, | |
| "grad_norm": 0.3090722858905792, | |
| "learning_rate": 9.90545258721667e-05, | |
| "loss": 0.0294, | |
| "step": 1090 | |
| }, | |
| { | |
| "epoch": 3.654485049833887, | |
| "grad_norm": 0.41966256499290466, | |
| "learning_rate": 9.90222548839135e-05, | |
| "loss": 0.0283, | |
| "step": 1100 | |
| }, | |
| { | |
| "epoch": 3.6877076411960132, | |
| "grad_norm": 0.34183767437934875, | |
| "learning_rate": 9.898944779613495e-05, | |
| "loss": 0.0311, | |
| "step": 1110 | |
| }, | |
| { | |
| "epoch": 3.7209302325581395, | |
| "grad_norm": 0.29612553119659424, | |
| "learning_rate": 9.89561049676041e-05, | |
| "loss": 0.0263, | |
| "step": 1120 | |
| }, | |
| { | |
| "epoch": 3.7541528239202657, | |
| "grad_norm": 0.29920074343681335, | |
| "learning_rate": 9.89222267629528e-05, | |
| "loss": 0.0315, | |
| "step": 1130 | |
| }, | |
| { | |
| "epoch": 3.787375415282392, | |
| "grad_norm": 0.4629840552806854, | |
| "learning_rate": 9.888781355266763e-05, | |
| "loss": 0.0297, | |
| "step": 1140 | |
| }, | |
| { | |
| "epoch": 3.820598006644518, | |
| "grad_norm": 0.6953020691871643, | |
| "learning_rate": 9.885286571308598e-05, | |
| "loss": 0.0279, | |
| "step": 1150 | |
| }, | |
| { | |
| "epoch": 3.8538205980066444, | |
| "grad_norm": 0.4963432848453522, | |
| "learning_rate": 9.881738362639182e-05, | |
| "loss": 0.0276, | |
| "step": 1160 | |
| }, | |
| { | |
| "epoch": 3.8870431893687707, | |
| "grad_norm": 0.1942342221736908, | |
| "learning_rate": 9.878136768061154e-05, | |
| "loss": 0.0275, | |
| "step": 1170 | |
| }, | |
| { | |
| "epoch": 3.920265780730897, | |
| "grad_norm": 0.46550989151000977, | |
| "learning_rate": 9.874481826960979e-05, | |
| "loss": 0.0278, | |
| "step": 1180 | |
| }, | |
| { | |
| "epoch": 3.953488372093023, | |
| "grad_norm": 0.5046531558036804, | |
| "learning_rate": 9.870773579308503e-05, | |
| "loss": 0.0292, | |
| "step": 1190 | |
| }, | |
| { | |
| "epoch": 3.9867109634551494, | |
| "grad_norm": 0.7471413016319275, | |
| "learning_rate": 9.867012065656533e-05, | |
| "loss": 0.0302, | |
| "step": 1200 | |
| }, | |
| { | |
| "epoch": 4.019933554817276, | |
| "grad_norm": 0.4875882863998413, | |
| "learning_rate": 9.863197327140376e-05, | |
| "loss": 0.0321, | |
| "step": 1210 | |
| }, | |
| { | |
| "epoch": 4.053156146179402, | |
| "grad_norm": 0.4417405426502228, | |
| "learning_rate": 9.859329405477403e-05, | |
| "loss": 0.0303, | |
| "step": 1220 | |
| }, | |
| { | |
| "epoch": 4.086378737541528, | |
| "grad_norm": 0.3285420835018158, | |
| "learning_rate": 9.855408342966585e-05, | |
| "loss": 0.0308, | |
| "step": 1230 | |
| }, | |
| { | |
| "epoch": 4.119601328903655, | |
| "grad_norm": 0.383506178855896, | |
| "learning_rate": 9.851434182488033e-05, | |
| "loss": 0.0308, | |
| "step": 1240 | |
| }, | |
| { | |
| "epoch": 4.152823920265781, | |
| "grad_norm": 0.4398779571056366, | |
| "learning_rate": 9.84740696750253e-05, | |
| "loss": 0.0292, | |
| "step": 1250 | |
| }, | |
| { | |
| "epoch": 4.186046511627907, | |
| "grad_norm": 0.5198376178741455, | |
| "learning_rate": 9.843326742051055e-05, | |
| "loss": 0.0325, | |
| "step": 1260 | |
| }, | |
| { | |
| "epoch": 4.219269102990033, | |
| "grad_norm": 0.4738619923591614, | |
| "learning_rate": 9.839193550754297e-05, | |
| "loss": 0.0251, | |
| "step": 1270 | |
| }, | |
| { | |
| "epoch": 4.25249169435216, | |
| "grad_norm": 0.33822280168533325, | |
| "learning_rate": 9.835007438812177e-05, | |
| "loss": 0.0312, | |
| "step": 1280 | |
| }, | |
| { | |
| "epoch": 4.285714285714286, | |
| "grad_norm": 0.500541090965271, | |
| "learning_rate": 9.830768452003341e-05, | |
| "loss": 0.0348, | |
| "step": 1290 | |
| }, | |
| { | |
| "epoch": 4.318936877076412, | |
| "grad_norm": 0.3859885632991791, | |
| "learning_rate": 9.826476636684671e-05, | |
| "loss": 0.0303, | |
| "step": 1300 | |
| }, | |
| { | |
| "epoch": 4.352159468438538, | |
| "grad_norm": 0.43077021837234497, | |
| "learning_rate": 9.822132039790773e-05, | |
| "loss": 0.0276, | |
| "step": 1310 | |
| }, | |
| { | |
| "epoch": 4.385382059800665, | |
| "grad_norm": 0.4338962435722351, | |
| "learning_rate": 9.817734708833461e-05, | |
| "loss": 0.0272, | |
| "step": 1320 | |
| }, | |
| { | |
| "epoch": 4.4186046511627906, | |
| "grad_norm": 0.5305380821228027, | |
| "learning_rate": 9.813284691901243e-05, | |
| "loss": 0.0224, | |
| "step": 1330 | |
| }, | |
| { | |
| "epoch": 4.451827242524917, | |
| "grad_norm": 0.181693434715271, | |
| "learning_rate": 9.808782037658792e-05, | |
| "loss": 0.024, | |
| "step": 1340 | |
| }, | |
| { | |
| "epoch": 4.485049833887043, | |
| "grad_norm": 0.35233795642852783, | |
| "learning_rate": 9.804226795346411e-05, | |
| "loss": 0.0244, | |
| "step": 1350 | |
| }, | |
| { | |
| "epoch": 4.51827242524917, | |
| "grad_norm": 0.5381154417991638, | |
| "learning_rate": 9.799619014779503e-05, | |
| "loss": 0.0261, | |
| "step": 1360 | |
| }, | |
| { | |
| "epoch": 4.5514950166112955, | |
| "grad_norm": 0.43545234203338623, | |
| "learning_rate": 9.794958746348013e-05, | |
| "loss": 0.0238, | |
| "step": 1370 | |
| }, | |
| { | |
| "epoch": 4.584717607973422, | |
| "grad_norm": 0.2619587481021881, | |
| "learning_rate": 9.790246041015896e-05, | |
| "loss": 0.0265, | |
| "step": 1380 | |
| }, | |
| { | |
| "epoch": 4.617940199335548, | |
| "grad_norm": 0.3800243139266968, | |
| "learning_rate": 9.785480950320538e-05, | |
| "loss": 0.0227, | |
| "step": 1390 | |
| }, | |
| { | |
| "epoch": 4.651162790697675, | |
| "grad_norm": 0.38735443353652954, | |
| "learning_rate": 9.78066352637221e-05, | |
| "loss": 0.0259, | |
| "step": 1400 | |
| }, | |
| { | |
| "epoch": 4.6843853820598005, | |
| "grad_norm": 0.4869486689567566, | |
| "learning_rate": 9.775793821853488e-05, | |
| "loss": 0.0269, | |
| "step": 1410 | |
| }, | |
| { | |
| "epoch": 4.717607973421927, | |
| "grad_norm": 0.23203584551811218, | |
| "learning_rate": 9.77087189001868e-05, | |
| "loss": 0.0303, | |
| "step": 1420 | |
| }, | |
| { | |
| "epoch": 4.750830564784053, | |
| "grad_norm": 0.37429264187812805, | |
| "learning_rate": 9.765897784693243e-05, | |
| "loss": 0.0257, | |
| "step": 1430 | |
| }, | |
| { | |
| "epoch": 4.78405315614618, | |
| "grad_norm": 0.5015078186988831, | |
| "learning_rate": 9.760871560273197e-05, | |
| "loss": 0.032, | |
| "step": 1440 | |
| }, | |
| { | |
| "epoch": 4.8172757475083055, | |
| "grad_norm": 0.3568253815174103, | |
| "learning_rate": 9.755793271724526e-05, | |
| "loss": 0.0277, | |
| "step": 1450 | |
| }, | |
| { | |
| "epoch": 4.850498338870432, | |
| "grad_norm": 0.3456842303276062, | |
| "learning_rate": 9.750662974582584e-05, | |
| "loss": 0.0249, | |
| "step": 1460 | |
| }, | |
| { | |
| "epoch": 4.883720930232558, | |
| "grad_norm": 0.3531418442726135, | |
| "learning_rate": 9.745480724951473e-05, | |
| "loss": 0.0245, | |
| "step": 1470 | |
| }, | |
| { | |
| "epoch": 4.916943521594685, | |
| "grad_norm": 0.2724597752094269, | |
| "learning_rate": 9.740246579503447e-05, | |
| "loss": 0.0305, | |
| "step": 1480 | |
| }, | |
| { | |
| "epoch": 4.95016611295681, | |
| "grad_norm": 0.2732029855251312, | |
| "learning_rate": 9.734960595478284e-05, | |
| "loss": 0.0233, | |
| "step": 1490 | |
| }, | |
| { | |
| "epoch": 4.983388704318937, | |
| "grad_norm": 0.3414079546928406, | |
| "learning_rate": 9.729622830682657e-05, | |
| "loss": 0.0248, | |
| "step": 1500 | |
| }, | |
| { | |
| "epoch": 5.016611295681063, | |
| "grad_norm": 0.5630635619163513, | |
| "learning_rate": 9.724233343489504e-05, | |
| "loss": 0.0329, | |
| "step": 1510 | |
| }, | |
| { | |
| "epoch": 5.04983388704319, | |
| "grad_norm": 0.4476262629032135, | |
| "learning_rate": 9.718792192837396e-05, | |
| "loss": 0.0254, | |
| "step": 1520 | |
| }, | |
| { | |
| "epoch": 5.083056478405315, | |
| "grad_norm": 0.3482656478881836, | |
| "learning_rate": 9.713299438229886e-05, | |
| "loss": 0.03, | |
| "step": 1530 | |
| }, | |
| { | |
| "epoch": 5.116279069767442, | |
| "grad_norm": 0.42722585797309875, | |
| "learning_rate": 9.707755139734855e-05, | |
| "loss": 0.0226, | |
| "step": 1540 | |
| }, | |
| { | |
| "epoch": 5.149501661129568, | |
| "grad_norm": 0.44865331053733826, | |
| "learning_rate": 9.702159357983866e-05, | |
| "loss": 0.035, | |
| "step": 1550 | |
| }, | |
| { | |
| "epoch": 5.1827242524916945, | |
| "grad_norm": 0.3667987287044525, | |
| "learning_rate": 9.696512154171492e-05, | |
| "loss": 0.025, | |
| "step": 1560 | |
| }, | |
| { | |
| "epoch": 5.21594684385382, | |
| "grad_norm": 0.2647327780723572, | |
| "learning_rate": 9.690813590054645e-05, | |
| "loss": 0.0268, | |
| "step": 1570 | |
| }, | |
| { | |
| "epoch": 5.249169435215947, | |
| "grad_norm": 0.32716986536979675, | |
| "learning_rate": 9.685063727951914e-05, | |
| "loss": 0.0242, | |
| "step": 1580 | |
| }, | |
| { | |
| "epoch": 5.282392026578073, | |
| "grad_norm": 0.32449787855148315, | |
| "learning_rate": 9.679262630742865e-05, | |
| "loss": 0.0233, | |
| "step": 1590 | |
| }, | |
| { | |
| "epoch": 5.3156146179401995, | |
| "grad_norm": 0.417737752199173, | |
| "learning_rate": 9.673410361867373e-05, | |
| "loss": 0.0251, | |
| "step": 1600 | |
| }, | |
| { | |
| "epoch": 5.348837209302325, | |
| "grad_norm": 0.33530154824256897, | |
| "learning_rate": 9.667506985324909e-05, | |
| "loss": 0.0238, | |
| "step": 1610 | |
| }, | |
| { | |
| "epoch": 5.382059800664452, | |
| "grad_norm": 0.39416244626045227, | |
| "learning_rate": 9.661552565673855e-05, | |
| "loss": 0.0251, | |
| "step": 1620 | |
| }, | |
| { | |
| "epoch": 5.415282392026578, | |
| "grad_norm": 0.3443300724029541, | |
| "learning_rate": 9.655547168030789e-05, | |
| "loss": 0.0218, | |
| "step": 1630 | |
| }, | |
| { | |
| "epoch": 5.4485049833887045, | |
| "grad_norm": 0.4587835371494293, | |
| "learning_rate": 9.649490858069777e-05, | |
| "loss": 0.0258, | |
| "step": 1640 | |
| }, | |
| { | |
| "epoch": 5.48172757475083, | |
| "grad_norm": 0.41582098603248596, | |
| "learning_rate": 9.643383702021658e-05, | |
| "loss": 0.0261, | |
| "step": 1650 | |
| }, | |
| { | |
| "epoch": 5.514950166112957, | |
| "grad_norm": 0.28211021423339844, | |
| "learning_rate": 9.637225766673307e-05, | |
| "loss": 0.0267, | |
| "step": 1660 | |
| }, | |
| { | |
| "epoch": 5.548172757475083, | |
| "grad_norm": 0.4471738934516907, | |
| "learning_rate": 9.631017119366922e-05, | |
| "loss": 0.0235, | |
| "step": 1670 | |
| }, | |
| { | |
| "epoch": 5.5813953488372094, | |
| "grad_norm": 0.28704577684402466, | |
| "learning_rate": 9.624757827999273e-05, | |
| "loss": 0.0233, | |
| "step": 1680 | |
| }, | |
| { | |
| "epoch": 5.614617940199335, | |
| "grad_norm": 0.41606009006500244, | |
| "learning_rate": 9.618447961020971e-05, | |
| "loss": 0.0274, | |
| "step": 1690 | |
| }, | |
| { | |
| "epoch": 5.647840531561462, | |
| "grad_norm": 0.47395747900009155, | |
| "learning_rate": 9.612087587435707e-05, | |
| "loss": 0.0224, | |
| "step": 1700 | |
| }, | |
| { | |
| "epoch": 5.681063122923588, | |
| "grad_norm": 0.347327321767807, | |
| "learning_rate": 9.605676776799508e-05, | |
| "loss": 0.0218, | |
| "step": 1710 | |
| }, | |
| { | |
| "epoch": 5.714285714285714, | |
| "grad_norm": 0.2327083796262741, | |
| "learning_rate": 9.599215599219973e-05, | |
| "loss": 0.0234, | |
| "step": 1720 | |
| }, | |
| { | |
| "epoch": 5.74750830564784, | |
| "grad_norm": 0.3055405020713806, | |
| "learning_rate": 9.592704125355505e-05, | |
| "loss": 0.0242, | |
| "step": 1730 | |
| }, | |
| { | |
| "epoch": 5.780730897009967, | |
| "grad_norm": 0.46758589148521423, | |
| "learning_rate": 9.586142426414538e-05, | |
| "loss": 0.0236, | |
| "step": 1740 | |
| }, | |
| { | |
| "epoch": 5.813953488372093, | |
| "grad_norm": 0.42915889620780945, | |
| "learning_rate": 9.57953057415476e-05, | |
| "loss": 0.0236, | |
| "step": 1750 | |
| }, | |
| { | |
| "epoch": 5.847176079734219, | |
| "grad_norm": 0.3505151867866516, | |
| "learning_rate": 9.572868640882328e-05, | |
| "loss": 0.0226, | |
| "step": 1760 | |
| }, | |
| { | |
| "epoch": 5.880398671096345, | |
| "grad_norm": 0.3558598756790161, | |
| "learning_rate": 9.56615669945108e-05, | |
| "loss": 0.0238, | |
| "step": 1770 | |
| }, | |
| { | |
| "epoch": 5.913621262458472, | |
| "grad_norm": 0.32505741715431213, | |
| "learning_rate": 9.55939482326173e-05, | |
| "loss": 0.0274, | |
| "step": 1780 | |
| }, | |
| { | |
| "epoch": 5.946843853820598, | |
| "grad_norm": 0.3497382402420044, | |
| "learning_rate": 9.552583086261069e-05, | |
| "loss": 0.0288, | |
| "step": 1790 | |
| }, | |
| { | |
| "epoch": 5.980066445182724, | |
| "grad_norm": 0.336085706949234, | |
| "learning_rate": 9.545721562941168e-05, | |
| "loss": 0.0285, | |
| "step": 1800 | |
| }, | |
| { | |
| "epoch": 6.01328903654485, | |
| "grad_norm": 0.4602932929992676, | |
| "learning_rate": 9.538810328338543e-05, | |
| "loss": 0.0232, | |
| "step": 1810 | |
| }, | |
| { | |
| "epoch": 6.046511627906977, | |
| "grad_norm": 0.37610024213790894, | |
| "learning_rate": 9.531849458033349e-05, | |
| "loss": 0.0304, | |
| "step": 1820 | |
| }, | |
| { | |
| "epoch": 6.079734219269103, | |
| "grad_norm": 0.4268096685409546, | |
| "learning_rate": 9.524839028148547e-05, | |
| "loss": 0.0243, | |
| "step": 1830 | |
| }, | |
| { | |
| "epoch": 6.112956810631229, | |
| "grad_norm": 0.705152153968811, | |
| "learning_rate": 9.517779115349077e-05, | |
| "loss": 0.0231, | |
| "step": 1840 | |
| }, | |
| { | |
| "epoch": 6.146179401993355, | |
| "grad_norm": 0.3031271696090698, | |
| "learning_rate": 9.510669796841014e-05, | |
| "loss": 0.0249, | |
| "step": 1850 | |
| }, | |
| { | |
| "epoch": 6.179401993355482, | |
| "grad_norm": 0.46579620242118835, | |
| "learning_rate": 9.503511150370727e-05, | |
| "loss": 0.025, | |
| "step": 1860 | |
| }, | |
| { | |
| "epoch": 6.212624584717608, | |
| "grad_norm": 0.4709901511669159, | |
| "learning_rate": 9.496303254224024e-05, | |
| "loss": 0.03, | |
| "step": 1870 | |
| }, | |
| { | |
| "epoch": 6.245847176079734, | |
| "grad_norm": 0.20217536389827728, | |
| "learning_rate": 9.489046187225306e-05, | |
| "loss": 0.027, | |
| "step": 1880 | |
| }, | |
| { | |
| "epoch": 6.27906976744186, | |
| "grad_norm": 0.35509705543518066, | |
| "learning_rate": 9.481740028736692e-05, | |
| "loss": 0.0183, | |
| "step": 1890 | |
| }, | |
| { | |
| "epoch": 6.312292358803987, | |
| "grad_norm": 0.299204558134079, | |
| "learning_rate": 9.474384858657164e-05, | |
| "loss": 0.0241, | |
| "step": 1900 | |
| }, | |
| { | |
| "epoch": 6.3455149501661126, | |
| "grad_norm": 0.3697379529476166, | |
| "learning_rate": 9.466980757421679e-05, | |
| "loss": 0.0269, | |
| "step": 1910 | |
| }, | |
| { | |
| "epoch": 6.378737541528239, | |
| "grad_norm": 0.5883201956748962, | |
| "learning_rate": 9.459527806000305e-05, | |
| "loss": 0.0218, | |
| "step": 1920 | |
| }, | |
| { | |
| "epoch": 6.411960132890365, | |
| "grad_norm": 0.39769867062568665, | |
| "learning_rate": 9.452026085897325e-05, | |
| "loss": 0.0245, | |
| "step": 1930 | |
| }, | |
| { | |
| "epoch": 6.445182724252492, | |
| "grad_norm": 0.5797551274299622, | |
| "learning_rate": 9.444475679150348e-05, | |
| "loss": 0.0265, | |
| "step": 1940 | |
| }, | |
| { | |
| "epoch": 6.4784053156146175, | |
| "grad_norm": 0.462607204914093, | |
| "learning_rate": 9.436876668329411e-05, | |
| "loss": 0.0255, | |
| "step": 1950 | |
| }, | |
| { | |
| "epoch": 6.511627906976744, | |
| "grad_norm": 0.41783976554870605, | |
| "learning_rate": 9.429229136536079e-05, | |
| "loss": 0.0228, | |
| "step": 1960 | |
| }, | |
| { | |
| "epoch": 6.544850498338871, | |
| "grad_norm": 0.31457412242889404, | |
| "learning_rate": 9.421533167402534e-05, | |
| "loss": 0.0233, | |
| "step": 1970 | |
| }, | |
| { | |
| "epoch": 6.578073089700997, | |
| "grad_norm": 0.28764960169792175, | |
| "learning_rate": 9.413788845090666e-05, | |
| "loss": 0.023, | |
| "step": 1980 | |
| }, | |
| { | |
| "epoch": 6.6112956810631225, | |
| "grad_norm": 0.2633940875530243, | |
| "learning_rate": 9.405996254291136e-05, | |
| "loss": 0.0264, | |
| "step": 1990 | |
| }, | |
| { | |
| "epoch": 6.644518272425249, | |
| "grad_norm": 0.33487606048583984, | |
| "learning_rate": 9.398155480222474e-05, | |
| "loss": 0.023, | |
| "step": 2000 | |
| }, | |
| { | |
| "epoch": 6.677740863787376, | |
| "grad_norm": 0.3526790142059326, | |
| "learning_rate": 9.390266608630128e-05, | |
| "loss": 0.0212, | |
| "step": 2010 | |
| }, | |
| { | |
| "epoch": 6.710963455149502, | |
| "grad_norm": 0.4186701774597168, | |
| "learning_rate": 9.38232972578553e-05, | |
| "loss": 0.0218, | |
| "step": 2020 | |
| }, | |
| { | |
| "epoch": 6.7441860465116275, | |
| "grad_norm": 0.36500057578086853, | |
| "learning_rate": 9.374344918485164e-05, | |
| "loss": 0.0267, | |
| "step": 2030 | |
| }, | |
| { | |
| "epoch": 6.777408637873754, | |
| "grad_norm": 0.4534339904785156, | |
| "learning_rate": 9.366312274049602e-05, | |
| "loss": 0.0271, | |
| "step": 2040 | |
| }, | |
| { | |
| "epoch": 6.810631229235881, | |
| "grad_norm": 0.32824012637138367, | |
| "learning_rate": 9.358231880322554e-05, | |
| "loss": 0.0232, | |
| "step": 2050 | |
| }, | |
| { | |
| "epoch": 6.843853820598007, | |
| "grad_norm": 0.37586942315101624, | |
| "learning_rate": 9.350103825669916e-05, | |
| "loss": 0.0202, | |
| "step": 2060 | |
| }, | |
| { | |
| "epoch": 6.877076411960132, | |
| "grad_norm": 0.21727822721004486, | |
| "learning_rate": 9.341928198978787e-05, | |
| "loss": 0.0197, | |
| "step": 2070 | |
| }, | |
| { | |
| "epoch": 6.910299003322259, | |
| "grad_norm": 0.3569616377353668, | |
| "learning_rate": 9.333705089656512e-05, | |
| "loss": 0.0223, | |
| "step": 2080 | |
| }, | |
| { | |
| "epoch": 6.943521594684386, | |
| "grad_norm": 0.3130171000957489, | |
| "learning_rate": 9.325434587629698e-05, | |
| "loss": 0.0198, | |
| "step": 2090 | |
| }, | |
| { | |
| "epoch": 6.976744186046512, | |
| "grad_norm": 0.3402811288833618, | |
| "learning_rate": 9.31711678334323e-05, | |
| "loss": 0.0224, | |
| "step": 2100 | |
| }, | |
| { | |
| "epoch": 7.009966777408638, | |
| "grad_norm": 0.3928144872188568, | |
| "learning_rate": 9.308751767759282e-05, | |
| "loss": 0.0184, | |
| "step": 2110 | |
| }, | |
| { | |
| "epoch": 7.043189368770764, | |
| "grad_norm": 0.39649125933647156, | |
| "learning_rate": 9.300339632356325e-05, | |
| "loss": 0.0206, | |
| "step": 2120 | |
| }, | |
| { | |
| "epoch": 7.076411960132891, | |
| "grad_norm": 0.5337740778923035, | |
| "learning_rate": 9.291880469128124e-05, | |
| "loss": 0.023, | |
| "step": 2130 | |
| }, | |
| { | |
| "epoch": 7.1096345514950166, | |
| "grad_norm": 0.4840176999568939, | |
| "learning_rate": 9.283374370582732e-05, | |
| "loss": 0.0217, | |
| "step": 2140 | |
| }, | |
| { | |
| "epoch": 7.142857142857143, | |
| "grad_norm": 0.3124825656414032, | |
| "learning_rate": 9.274821429741482e-05, | |
| "loss": 0.0252, | |
| "step": 2150 | |
| }, | |
| { | |
| "epoch": 7.176079734219269, | |
| "grad_norm": 0.38995927572250366, | |
| "learning_rate": 9.266221740137961e-05, | |
| "loss": 0.0193, | |
| "step": 2160 | |
| }, | |
| { | |
| "epoch": 7.209302325581396, | |
| "grad_norm": 0.4084652066230774, | |
| "learning_rate": 9.257575395817001e-05, | |
| "loss": 0.0235, | |
| "step": 2170 | |
| }, | |
| { | |
| "epoch": 7.2425249169435215, | |
| "grad_norm": 0.189564511179924, | |
| "learning_rate": 9.248882491333637e-05, | |
| "loss": 0.0176, | |
| "step": 2180 | |
| }, | |
| { | |
| "epoch": 7.275747508305648, | |
| "grad_norm": 0.38772428035736084, | |
| "learning_rate": 9.240143121752076e-05, | |
| "loss": 0.0215, | |
| "step": 2190 | |
| }, | |
| { | |
| "epoch": 7.308970099667774, | |
| "grad_norm": 0.4078693985939026, | |
| "learning_rate": 9.23135738264467e-05, | |
| "loss": 0.0215, | |
| "step": 2200 | |
| }, | |
| { | |
| "epoch": 7.342192691029901, | |
| "grad_norm": 0.2535257339477539, | |
| "learning_rate": 9.222525370090849e-05, | |
| "loss": 0.0183, | |
| "step": 2210 | |
| }, | |
| { | |
| "epoch": 7.3754152823920265, | |
| "grad_norm": 0.3889135718345642, | |
| "learning_rate": 9.213647180676088e-05, | |
| "loss": 0.0177, | |
| "step": 2220 | |
| }, | |
| { | |
| "epoch": 7.408637873754153, | |
| "grad_norm": 0.3870062232017517, | |
| "learning_rate": 9.204722911490846e-05, | |
| "loss": 0.0226, | |
| "step": 2230 | |
| }, | |
| { | |
| "epoch": 7.441860465116279, | |
| "grad_norm": 0.4052248001098633, | |
| "learning_rate": 9.1957526601295e-05, | |
| "loss": 0.0234, | |
| "step": 2240 | |
| }, | |
| { | |
| "epoch": 7.475083056478406, | |
| "grad_norm": 0.28612571954727173, | |
| "learning_rate": 9.186736524689281e-05, | |
| "loss": 0.0196, | |
| "step": 2250 | |
| }, | |
| { | |
| "epoch": 7.5083056478405314, | |
| "grad_norm": 0.35402119159698486, | |
| "learning_rate": 9.177674603769204e-05, | |
| "loss": 0.0242, | |
| "step": 2260 | |
| }, | |
| { | |
| "epoch": 7.541528239202658, | |
| "grad_norm": 0.3981899619102478, | |
| "learning_rate": 9.168566996468983e-05, | |
| "loss": 0.0236, | |
| "step": 2270 | |
| }, | |
| { | |
| "epoch": 7.574750830564784, | |
| "grad_norm": 0.20559972524642944, | |
| "learning_rate": 9.159413802387951e-05, | |
| "loss": 0.0228, | |
| "step": 2280 | |
| }, | |
| { | |
| "epoch": 7.607973421926911, | |
| "grad_norm": 0.25476157665252686, | |
| "learning_rate": 9.150215121623974e-05, | |
| "loss": 0.0224, | |
| "step": 2290 | |
| }, | |
| { | |
| "epoch": 7.641196013289036, | |
| "grad_norm": 0.22289226949214935, | |
| "learning_rate": 9.140971054772349e-05, | |
| "loss": 0.0213, | |
| "step": 2300 | |
| }, | |
| { | |
| "epoch": 7.674418604651163, | |
| "grad_norm": 0.266042023897171, | |
| "learning_rate": 9.131681702924713e-05, | |
| "loss": 0.0221, | |
| "step": 2310 | |
| }, | |
| { | |
| "epoch": 7.707641196013289, | |
| "grad_norm": 0.24307210743427277, | |
| "learning_rate": 9.122347167667926e-05, | |
| "loss": 0.021, | |
| "step": 2320 | |
| }, | |
| { | |
| "epoch": 7.740863787375416, | |
| "grad_norm": 0.36476147174835205, | |
| "learning_rate": 9.112967551082973e-05, | |
| "loss": 0.0215, | |
| "step": 2330 | |
| }, | |
| { | |
| "epoch": 7.774086378737541, | |
| "grad_norm": 0.2570967674255371, | |
| "learning_rate": 9.103542955743835e-05, | |
| "loss": 0.0264, | |
| "step": 2340 | |
| }, | |
| { | |
| "epoch": 7.807308970099668, | |
| "grad_norm": 0.30510249733924866, | |
| "learning_rate": 9.094073484716381e-05, | |
| "loss": 0.023, | |
| "step": 2350 | |
| }, | |
| { | |
| "epoch": 7.840531561461794, | |
| "grad_norm": 0.3401687741279602, | |
| "learning_rate": 9.084559241557226e-05, | |
| "loss": 0.0243, | |
| "step": 2360 | |
| }, | |
| { | |
| "epoch": 7.8737541528239205, | |
| "grad_norm": 0.4495038390159607, | |
| "learning_rate": 9.075000330312608e-05, | |
| "loss": 0.0186, | |
| "step": 2370 | |
| }, | |
| { | |
| "epoch": 7.906976744186046, | |
| "grad_norm": 0.22529259324073792, | |
| "learning_rate": 9.065396855517253e-05, | |
| "loss": 0.0197, | |
| "step": 2380 | |
| }, | |
| { | |
| "epoch": 7.940199335548173, | |
| "grad_norm": 0.30005428194999695, | |
| "learning_rate": 9.055748922193219e-05, | |
| "loss": 0.0184, | |
| "step": 2390 | |
| }, | |
| { | |
| "epoch": 7.973421926910299, | |
| "grad_norm": 0.3295758068561554, | |
| "learning_rate": 9.046056635848761e-05, | |
| "loss": 0.0231, | |
| "step": 2400 | |
| }, | |
| { | |
| "epoch": 8.006644518272426, | |
| "grad_norm": 0.40123850107192993, | |
| "learning_rate": 9.036320102477169e-05, | |
| "loss": 0.02, | |
| "step": 2410 | |
| }, | |
| { | |
| "epoch": 8.039867109634551, | |
| "grad_norm": 0.22583815455436707, | |
| "learning_rate": 9.02653942855561e-05, | |
| "loss": 0.0208, | |
| "step": 2420 | |
| }, | |
| { | |
| "epoch": 8.073089700996677, | |
| "grad_norm": 0.3022814393043518, | |
| "learning_rate": 9.016714721043971e-05, | |
| "loss": 0.0193, | |
| "step": 2430 | |
| }, | |
| { | |
| "epoch": 8.106312292358805, | |
| "grad_norm": 0.433930903673172, | |
| "learning_rate": 9.006846087383675e-05, | |
| "loss": 0.0233, | |
| "step": 2440 | |
| }, | |
| { | |
| "epoch": 8.13953488372093, | |
| "grad_norm": 0.4505160450935364, | |
| "learning_rate": 8.996933635496523e-05, | |
| "loss": 0.0196, | |
| "step": 2450 | |
| }, | |
| { | |
| "epoch": 8.172757475083056, | |
| "grad_norm": 0.4341692328453064, | |
| "learning_rate": 8.986977473783498e-05, | |
| "loss": 0.0216, | |
| "step": 2460 | |
| }, | |
| { | |
| "epoch": 8.205980066445182, | |
| "grad_norm": 0.39989882707595825, | |
| "learning_rate": 8.97697771112359e-05, | |
| "loss": 0.0231, | |
| "step": 2470 | |
| }, | |
| { | |
| "epoch": 8.23920265780731, | |
| "grad_norm": 0.31618350744247437, | |
| "learning_rate": 8.966934456872602e-05, | |
| "loss": 0.0159, | |
| "step": 2480 | |
| }, | |
| { | |
| "epoch": 8.272425249169435, | |
| "grad_norm": 0.3098665475845337, | |
| "learning_rate": 8.95684782086195e-05, | |
| "loss": 0.0177, | |
| "step": 2490 | |
| }, | |
| { | |
| "epoch": 8.305647840531561, | |
| "grad_norm": 0.24335923790931702, | |
| "learning_rate": 8.946717913397476e-05, | |
| "loss": 0.0191, | |
| "step": 2500 | |
| }, | |
| { | |
| "epoch": 8.338870431893687, | |
| "grad_norm": 0.31290650367736816, | |
| "learning_rate": 8.93654484525822e-05, | |
| "loss": 0.0199, | |
| "step": 2510 | |
| }, | |
| { | |
| "epoch": 8.372093023255815, | |
| "grad_norm": 0.4052436947822571, | |
| "learning_rate": 8.926328727695226e-05, | |
| "loss": 0.021, | |
| "step": 2520 | |
| }, | |
| { | |
| "epoch": 8.40531561461794, | |
| "grad_norm": 0.432539701461792, | |
| "learning_rate": 8.916069672430319e-05, | |
| "loss": 0.0197, | |
| "step": 2530 | |
| }, | |
| { | |
| "epoch": 8.438538205980066, | |
| "grad_norm": 0.30588874220848083, | |
| "learning_rate": 8.905767791654884e-05, | |
| "loss": 0.0155, | |
| "step": 2540 | |
| }, | |
| { | |
| "epoch": 8.471760797342192, | |
| "grad_norm": 0.2926918566226959, | |
| "learning_rate": 8.895423198028638e-05, | |
| "loss": 0.0213, | |
| "step": 2550 | |
| }, | |
| { | |
| "epoch": 8.50498338870432, | |
| "grad_norm": 0.2769851088523865, | |
| "learning_rate": 8.885036004678402e-05, | |
| "loss": 0.0247, | |
| "step": 2560 | |
| }, | |
| { | |
| "epoch": 8.538205980066445, | |
| "grad_norm": 0.48992279171943665, | |
| "learning_rate": 8.874606325196857e-05, | |
| "loss": 0.0213, | |
| "step": 2570 | |
| }, | |
| { | |
| "epoch": 8.571428571428571, | |
| "grad_norm": 0.3409804105758667, | |
| "learning_rate": 8.864134273641304e-05, | |
| "loss": 0.0215, | |
| "step": 2580 | |
| }, | |
| { | |
| "epoch": 8.604651162790697, | |
| "grad_norm": 0.2671858072280884, | |
| "learning_rate": 8.853619964532427e-05, | |
| "loss": 0.0189, | |
| "step": 2590 | |
| }, | |
| { | |
| "epoch": 8.637873754152825, | |
| "grad_norm": 0.29365599155426025, | |
| "learning_rate": 8.843063512853019e-05, | |
| "loss": 0.0203, | |
| "step": 2600 | |
| }, | |
| { | |
| "epoch": 8.67109634551495, | |
| "grad_norm": 0.35508570075035095, | |
| "learning_rate": 8.832465034046749e-05, | |
| "loss": 0.0198, | |
| "step": 2610 | |
| }, | |
| { | |
| "epoch": 8.704318936877076, | |
| "grad_norm": 0.31511878967285156, | |
| "learning_rate": 8.821824644016882e-05, | |
| "loss": 0.0191, | |
| "step": 2620 | |
| }, | |
| { | |
| "epoch": 8.737541528239202, | |
| "grad_norm": 0.2841794490814209, | |
| "learning_rate": 8.811142459125019e-05, | |
| "loss": 0.0184, | |
| "step": 2630 | |
| }, | |
| { | |
| "epoch": 8.77076411960133, | |
| "grad_norm": 0.32950523495674133, | |
| "learning_rate": 8.800418596189822e-05, | |
| "loss": 0.0159, | |
| "step": 2640 | |
| }, | |
| { | |
| "epoch": 8.803986710963455, | |
| "grad_norm": 0.23667795956134796, | |
| "learning_rate": 8.789653172485737e-05, | |
| "loss": 0.0148, | |
| "step": 2650 | |
| }, | |
| { | |
| "epoch": 8.837209302325581, | |
| "grad_norm": 0.3478001058101654, | |
| "learning_rate": 8.778846305741715e-05, | |
| "loss": 0.0273, | |
| "step": 2660 | |
| }, | |
| { | |
| "epoch": 8.870431893687707, | |
| "grad_norm": 0.4005625247955322, | |
| "learning_rate": 8.767998114139918e-05, | |
| "loss": 0.0207, | |
| "step": 2670 | |
| }, | |
| { | |
| "epoch": 8.903654485049834, | |
| "grad_norm": 0.20847535133361816, | |
| "learning_rate": 8.757108716314429e-05, | |
| "loss": 0.0183, | |
| "step": 2680 | |
| }, | |
| { | |
| "epoch": 8.93687707641196, | |
| "grad_norm": 0.1629849672317505, | |
| "learning_rate": 8.746178231349962e-05, | |
| "loss": 0.0234, | |
| "step": 2690 | |
| }, | |
| { | |
| "epoch": 8.970099667774086, | |
| "grad_norm": 0.24203428626060486, | |
| "learning_rate": 8.735206778780549e-05, | |
| "loss": 0.0184, | |
| "step": 2700 | |
| }, | |
| { | |
| "epoch": 9.003322259136212, | |
| "grad_norm": 0.2886161208152771, | |
| "learning_rate": 8.724194478588234e-05, | |
| "loss": 0.0193, | |
| "step": 2710 | |
| }, | |
| { | |
| "epoch": 9.03654485049834, | |
| "grad_norm": 0.3591567575931549, | |
| "learning_rate": 8.713141451201772e-05, | |
| "loss": 0.0208, | |
| "step": 2720 | |
| }, | |
| { | |
| "epoch": 9.069767441860465, | |
| "grad_norm": 0.3133392333984375, | |
| "learning_rate": 8.702047817495295e-05, | |
| "loss": 0.0195, | |
| "step": 2730 | |
| }, | |
| { | |
| "epoch": 9.102990033222591, | |
| "grad_norm": 0.37037843465805054, | |
| "learning_rate": 8.69091369878701e-05, | |
| "loss": 0.019, | |
| "step": 2740 | |
| }, | |
| { | |
| "epoch": 9.136212624584717, | |
| "grad_norm": 0.2838710844516754, | |
| "learning_rate": 8.679739216837849e-05, | |
| "loss": 0.0176, | |
| "step": 2750 | |
| }, | |
| { | |
| "epoch": 9.169435215946844, | |
| "grad_norm": 0.41625136137008667, | |
| "learning_rate": 8.66852449385016e-05, | |
| "loss": 0.0176, | |
| "step": 2760 | |
| }, | |
| { | |
| "epoch": 9.20265780730897, | |
| "grad_norm": 0.38990315794944763, | |
| "learning_rate": 8.657269652466356e-05, | |
| "loss": 0.0179, | |
| "step": 2770 | |
| }, | |
| { | |
| "epoch": 9.235880398671096, | |
| "grad_norm": 0.2293078452348709, | |
| "learning_rate": 8.645974815767577e-05, | |
| "loss": 0.0173, | |
| "step": 2780 | |
| }, | |
| { | |
| "epoch": 9.269102990033222, | |
| "grad_norm": 0.27936089038848877, | |
| "learning_rate": 8.634640107272351e-05, | |
| "loss": 0.0214, | |
| "step": 2790 | |
| }, | |
| { | |
| "epoch": 9.30232558139535, | |
| "grad_norm": 0.3115111291408539, | |
| "learning_rate": 8.623265650935234e-05, | |
| "loss": 0.016, | |
| "step": 2800 | |
| }, | |
| { | |
| "epoch": 9.335548172757475, | |
| "grad_norm": 0.4285588264465332, | |
| "learning_rate": 8.611851571145456e-05, | |
| "loss": 0.0213, | |
| "step": 2810 | |
| }, | |
| { | |
| "epoch": 9.368770764119601, | |
| "grad_norm": 0.29907381534576416, | |
| "learning_rate": 8.600397992725566e-05, | |
| "loss": 0.0209, | |
| "step": 2820 | |
| }, | |
| { | |
| "epoch": 9.401993355481727, | |
| "grad_norm": 0.25026509165763855, | |
| "learning_rate": 8.588905040930061e-05, | |
| "loss": 0.0197, | |
| "step": 2830 | |
| }, | |
| { | |
| "epoch": 9.435215946843854, | |
| "grad_norm": 0.3209597170352936, | |
| "learning_rate": 8.577372841444022e-05, | |
| "loss": 0.0172, | |
| "step": 2840 | |
| }, | |
| { | |
| "epoch": 9.46843853820598, | |
| "grad_norm": 0.19637836515903473, | |
| "learning_rate": 8.565801520381736e-05, | |
| "loss": 0.0192, | |
| "step": 2850 | |
| }, | |
| { | |
| "epoch": 9.501661129568106, | |
| "grad_norm": 0.2835908532142639, | |
| "learning_rate": 8.554191204285313e-05, | |
| "loss": 0.0195, | |
| "step": 2860 | |
| }, | |
| { | |
| "epoch": 9.534883720930232, | |
| "grad_norm": 0.3389662802219391, | |
| "learning_rate": 8.542542020123315e-05, | |
| "loss": 0.0173, | |
| "step": 2870 | |
| }, | |
| { | |
| "epoch": 9.56810631229236, | |
| "grad_norm": 0.3397357165813446, | |
| "learning_rate": 8.530854095289347e-05, | |
| "loss": 0.019, | |
| "step": 2880 | |
| }, | |
| { | |
| "epoch": 9.601328903654485, | |
| "grad_norm": 0.28326690196990967, | |
| "learning_rate": 8.519127557600688e-05, | |
| "loss": 0.0156, | |
| "step": 2890 | |
| }, | |
| { | |
| "epoch": 9.634551495016611, | |
| "grad_norm": 0.43105173110961914, | |
| "learning_rate": 8.507362535296871e-05, | |
| "loss": 0.0169, | |
| "step": 2900 | |
| }, | |
| { | |
| "epoch": 9.667774086378738, | |
| "grad_norm": 0.2692944407463074, | |
| "learning_rate": 8.495559157038299e-05, | |
| "loss": 0.0218, | |
| "step": 2910 | |
| }, | |
| { | |
| "epoch": 9.700996677740864, | |
| "grad_norm": 0.21992726624011993, | |
| "learning_rate": 8.483717551904823e-05, | |
| "loss": 0.0259, | |
| "step": 2920 | |
| }, | |
| { | |
| "epoch": 9.73421926910299, | |
| "grad_norm": 0.18732020258903503, | |
| "learning_rate": 8.47183784939434e-05, | |
| "loss": 0.0168, | |
| "step": 2930 | |
| }, | |
| { | |
| "epoch": 9.767441860465116, | |
| "grad_norm": 0.38717469573020935, | |
| "learning_rate": 8.459920179421374e-05, | |
| "loss": 0.0183, | |
| "step": 2940 | |
| }, | |
| { | |
| "epoch": 9.800664451827242, | |
| "grad_norm": 0.24515467882156372, | |
| "learning_rate": 8.447964672315656e-05, | |
| "loss": 0.0169, | |
| "step": 2950 | |
| }, | |
| { | |
| "epoch": 9.83388704318937, | |
| "grad_norm": 0.25871196389198303, | |
| "learning_rate": 8.435971458820692e-05, | |
| "loss": 0.015, | |
| "step": 2960 | |
| }, | |
| { | |
| "epoch": 9.867109634551495, | |
| "grad_norm": 0.19653911888599396, | |
| "learning_rate": 8.423940670092345e-05, | |
| "loss": 0.0195, | |
| "step": 2970 | |
| }, | |
| { | |
| "epoch": 9.90033222591362, | |
| "grad_norm": 0.36540013551712036, | |
| "learning_rate": 8.411872437697394e-05, | |
| "loss": 0.0216, | |
| "step": 2980 | |
| }, | |
| { | |
| "epoch": 9.933554817275748, | |
| "grad_norm": 0.21347872912883759, | |
| "learning_rate": 8.399766893612096e-05, | |
| "loss": 0.0168, | |
| "step": 2990 | |
| }, | |
| { | |
| "epoch": 9.966777408637874, | |
| "grad_norm": 0.2640397548675537, | |
| "learning_rate": 8.38762417022074e-05, | |
| "loss": 0.0146, | |
| "step": 3000 | |
| }, | |
| { | |
| "epoch": 10.0, | |
| "grad_norm": 0.9582907557487488, | |
| "learning_rate": 8.375444400314204e-05, | |
| "loss": 0.0201, | |
| "step": 3010 | |
| }, | |
| { | |
| "epoch": 10.033222591362126, | |
| "grad_norm": 0.546707808971405, | |
| "learning_rate": 8.3632277170885e-05, | |
| "loss": 0.0231, | |
| "step": 3020 | |
| }, | |
| { | |
| "epoch": 10.066445182724253, | |
| "grad_norm": 0.36035218834877014, | |
| "learning_rate": 8.350974254143318e-05, | |
| "loss": 0.0244, | |
| "step": 3030 | |
| }, | |
| { | |
| "epoch": 10.09966777408638, | |
| "grad_norm": 0.40394413471221924, | |
| "learning_rate": 8.338684145480566e-05, | |
| "loss": 0.024, | |
| "step": 3040 | |
| }, | |
| { | |
| "epoch": 10.132890365448505, | |
| "grad_norm": 0.3483884334564209, | |
| "learning_rate": 8.326357525502904e-05, | |
| "loss": 0.0199, | |
| "step": 3050 | |
| }, | |
| { | |
| "epoch": 10.16611295681063, | |
| "grad_norm": 0.30480167269706726, | |
| "learning_rate": 8.313994529012273e-05, | |
| "loss": 0.023, | |
| "step": 3060 | |
| }, | |
| { | |
| "epoch": 10.199335548172758, | |
| "grad_norm": 0.29263198375701904, | |
| "learning_rate": 8.301595291208422e-05, | |
| "loss": 0.0194, | |
| "step": 3070 | |
| }, | |
| { | |
| "epoch": 10.232558139534884, | |
| "grad_norm": 0.37630799412727356, | |
| "learning_rate": 8.289159947687427e-05, | |
| "loss": 0.0227, | |
| "step": 3080 | |
| }, | |
| { | |
| "epoch": 10.26578073089701, | |
| "grad_norm": 0.316376268863678, | |
| "learning_rate": 8.276688634440216e-05, | |
| "loss": 0.0176, | |
| "step": 3090 | |
| }, | |
| { | |
| "epoch": 10.299003322259136, | |
| "grad_norm": 0.28122377395629883, | |
| "learning_rate": 8.26418148785107e-05, | |
| "loss": 0.0219, | |
| "step": 3100 | |
| }, | |
| { | |
| "epoch": 10.332225913621263, | |
| "grad_norm": 0.36189690232276917, | |
| "learning_rate": 8.251638644696141e-05, | |
| "loss": 0.0196, | |
| "step": 3110 | |
| }, | |
| { | |
| "epoch": 10.365448504983389, | |
| "grad_norm": 0.31648537516593933, | |
| "learning_rate": 8.23906024214195e-05, | |
| "loss": 0.0227, | |
| "step": 3120 | |
| }, | |
| { | |
| "epoch": 10.398671096345515, | |
| "grad_norm": 0.36495092511177063, | |
| "learning_rate": 8.226446417743897e-05, | |
| "loss": 0.0156, | |
| "step": 3130 | |
| }, | |
| { | |
| "epoch": 10.43189368770764, | |
| "grad_norm": 0.3638916313648224, | |
| "learning_rate": 8.213797309444742e-05, | |
| "loss": 0.0228, | |
| "step": 3140 | |
| }, | |
| { | |
| "epoch": 10.465116279069768, | |
| "grad_norm": 0.2252400666475296, | |
| "learning_rate": 8.201113055573105e-05, | |
| "loss": 0.0195, | |
| "step": 3150 | |
| }, | |
| { | |
| "epoch": 10.498338870431894, | |
| "grad_norm": 0.5001240372657776, | |
| "learning_rate": 8.188393794841958e-05, | |
| "loss": 0.0216, | |
| "step": 3160 | |
| }, | |
| { | |
| "epoch": 10.53156146179402, | |
| "grad_norm": 0.3034009635448456, | |
| "learning_rate": 8.175639666347094e-05, | |
| "loss": 0.0192, | |
| "step": 3170 | |
| }, | |
| { | |
| "epoch": 10.564784053156146, | |
| "grad_norm": 0.32421091198921204, | |
| "learning_rate": 8.162850809565623e-05, | |
| "loss": 0.0193, | |
| "step": 3180 | |
| }, | |
| { | |
| "epoch": 10.598006644518273, | |
| "grad_norm": 0.2746524214744568, | |
| "learning_rate": 8.150027364354431e-05, | |
| "loss": 0.0176, | |
| "step": 3190 | |
| }, | |
| { | |
| "epoch": 10.631229235880399, | |
| "grad_norm": 0.5117692351341248, | |
| "learning_rate": 8.137169470948662e-05, | |
| "loss": 0.0217, | |
| "step": 3200 | |
| }, | |
| { | |
| "epoch": 10.664451827242525, | |
| "grad_norm": 0.322994589805603, | |
| "learning_rate": 8.124277269960179e-05, | |
| "loss": 0.0187, | |
| "step": 3210 | |
| }, | |
| { | |
| "epoch": 10.69767441860465, | |
| "grad_norm": 0.18638882040977478, | |
| "learning_rate": 8.111350902376023e-05, | |
| "loss": 0.0173, | |
| "step": 3220 | |
| }, | |
| { | |
| "epoch": 10.730897009966778, | |
| "grad_norm": 0.2154085487127304, | |
| "learning_rate": 8.098390509556883e-05, | |
| "loss": 0.0151, | |
| "step": 3230 | |
| }, | |
| { | |
| "epoch": 10.764119601328904, | |
| "grad_norm": 0.14285944402217865, | |
| "learning_rate": 8.085396233235536e-05, | |
| "loss": 0.018, | |
| "step": 3240 | |
| }, | |
| { | |
| "epoch": 10.79734219269103, | |
| "grad_norm": 0.34248557686805725, | |
| "learning_rate": 8.072368215515306e-05, | |
| "loss": 0.0146, | |
| "step": 3250 | |
| }, | |
| { | |
| "epoch": 10.830564784053156, | |
| "grad_norm": 0.2130262702703476, | |
| "learning_rate": 8.059306598868506e-05, | |
| "loss": 0.0124, | |
| "step": 3260 | |
| }, | |
| { | |
| "epoch": 10.863787375415283, | |
| "grad_norm": 0.40157315135002136, | |
| "learning_rate": 8.046211526134888e-05, | |
| "loss": 0.0257, | |
| "step": 3270 | |
| }, | |
| { | |
| "epoch": 10.897009966777409, | |
| "grad_norm": 0.38071388006210327, | |
| "learning_rate": 8.033083140520065e-05, | |
| "loss": 0.0186, | |
| "step": 3280 | |
| }, | |
| { | |
| "epoch": 10.930232558139535, | |
| "grad_norm": 0.24200329184532166, | |
| "learning_rate": 8.019921585593962e-05, | |
| "loss": 0.0161, | |
| "step": 3290 | |
| }, | |
| { | |
| "epoch": 10.96345514950166, | |
| "grad_norm": 0.23784863948822021, | |
| "learning_rate": 8.006727005289232e-05, | |
| "loss": 0.0169, | |
| "step": 3300 | |
| }, | |
| { | |
| "epoch": 10.996677740863788, | |
| "grad_norm": 0.2810799777507782, | |
| "learning_rate": 7.993499543899692e-05, | |
| "loss": 0.0175, | |
| "step": 3310 | |
| }, | |
| { | |
| "epoch": 11.029900332225914, | |
| "grad_norm": 0.3434373140335083, | |
| "learning_rate": 7.980239346078742e-05, | |
| "loss": 0.0238, | |
| "step": 3320 | |
| }, | |
| { | |
| "epoch": 11.06312292358804, | |
| "grad_norm": 0.4956740438938141, | |
| "learning_rate": 7.966946556837778e-05, | |
| "loss": 0.0219, | |
| "step": 3330 | |
| }, | |
| { | |
| "epoch": 11.096345514950166, | |
| "grad_norm": 0.22429168224334717, | |
| "learning_rate": 7.953621321544616e-05, | |
| "loss": 0.0221, | |
| "step": 3340 | |
| }, | |
| { | |
| "epoch": 11.129568106312293, | |
| "grad_norm": 0.34100890159606934, | |
| "learning_rate": 7.940263785921896e-05, | |
| "loss": 0.0217, | |
| "step": 3350 | |
| }, | |
| { | |
| "epoch": 11.162790697674419, | |
| "grad_norm": 0.3702620565891266, | |
| "learning_rate": 7.926874096045482e-05, | |
| "loss": 0.028, | |
| "step": 3360 | |
| }, | |
| { | |
| "epoch": 11.196013289036545, | |
| "grad_norm": 0.3129262626171112, | |
| "learning_rate": 7.913452398342881e-05, | |
| "loss": 0.018, | |
| "step": 3370 | |
| }, | |
| { | |
| "epoch": 11.22923588039867, | |
| "grad_norm": 0.3758520483970642, | |
| "learning_rate": 7.89999883959163e-05, | |
| "loss": 0.0253, | |
| "step": 3380 | |
| }, | |
| { | |
| "epoch": 11.262458471760798, | |
| "grad_norm": 0.36128005385398865, | |
| "learning_rate": 7.886513566917687e-05, | |
| "loss": 0.0231, | |
| "step": 3390 | |
| }, | |
| { | |
| "epoch": 11.295681063122924, | |
| "grad_norm": 0.3874170482158661, | |
| "learning_rate": 7.872996727793838e-05, | |
| "loss": 0.0193, | |
| "step": 3400 | |
| }, | |
| { | |
| "epoch": 11.32890365448505, | |
| "grad_norm": 0.4195541739463806, | |
| "learning_rate": 7.859448470038069e-05, | |
| "loss": 0.0202, | |
| "step": 3410 | |
| }, | |
| { | |
| "epoch": 11.362126245847175, | |
| "grad_norm": 0.36057302355766296, | |
| "learning_rate": 7.845868941811956e-05, | |
| "loss": 0.0216, | |
| "step": 3420 | |
| }, | |
| { | |
| "epoch": 11.395348837209303, | |
| "grad_norm": 0.3191004991531372, | |
| "learning_rate": 7.832258291619043e-05, | |
| "loss": 0.0207, | |
| "step": 3430 | |
| }, | |
| { | |
| "epoch": 11.428571428571429, | |
| "grad_norm": 0.3054732084274292, | |
| "learning_rate": 7.81861666830322e-05, | |
| "loss": 0.0175, | |
| "step": 3440 | |
| }, | |
| { | |
| "epoch": 11.461794019933555, | |
| "grad_norm": 0.2970241606235504, | |
| "learning_rate": 7.804944221047097e-05, | |
| "loss": 0.0213, | |
| "step": 3450 | |
| }, | |
| { | |
| "epoch": 11.49501661129568, | |
| "grad_norm": 0.3414061367511749, | |
| "learning_rate": 7.791241099370364e-05, | |
| "loss": 0.0177, | |
| "step": 3460 | |
| }, | |
| { | |
| "epoch": 11.528239202657808, | |
| "grad_norm": 0.4683157205581665, | |
| "learning_rate": 7.777507453128163e-05, | |
| "loss": 0.0242, | |
| "step": 3470 | |
| }, | |
| { | |
| "epoch": 11.561461794019934, | |
| "grad_norm": 0.21441063284873962, | |
| "learning_rate": 7.763743432509451e-05, | |
| "loss": 0.0169, | |
| "step": 3480 | |
| }, | |
| { | |
| "epoch": 11.59468438538206, | |
| "grad_norm": 0.29838085174560547, | |
| "learning_rate": 7.749949188035353e-05, | |
| "loss": 0.0188, | |
| "step": 3490 | |
| }, | |
| { | |
| "epoch": 11.627906976744185, | |
| "grad_norm": 0.3362698554992676, | |
| "learning_rate": 7.736124870557516e-05, | |
| "loss": 0.019, | |
| "step": 3500 | |
| }, | |
| { | |
| "epoch": 11.661129568106313, | |
| "grad_norm": 0.27162593603134155, | |
| "learning_rate": 7.722270631256459e-05, | |
| "loss": 0.0257, | |
| "step": 3510 | |
| }, | |
| { | |
| "epoch": 11.694352159468439, | |
| "grad_norm": 0.2674727737903595, | |
| "learning_rate": 7.708386621639925e-05, | |
| "loss": 0.0201, | |
| "step": 3520 | |
| }, | |
| { | |
| "epoch": 11.727574750830565, | |
| "grad_norm": 0.25655072927474976, | |
| "learning_rate": 7.694472993541219e-05, | |
| "loss": 0.0179, | |
| "step": 3530 | |
| }, | |
| { | |
| "epoch": 11.76079734219269, | |
| "grad_norm": 0.24716055393218994, | |
| "learning_rate": 7.680529899117547e-05, | |
| "loss": 0.0181, | |
| "step": 3540 | |
| }, | |
| { | |
| "epoch": 11.794019933554818, | |
| "grad_norm": 0.4514801800251007, | |
| "learning_rate": 7.666557490848358e-05, | |
| "loss": 0.0247, | |
| "step": 3550 | |
| }, | |
| { | |
| "epoch": 11.827242524916944, | |
| "grad_norm": 0.5603443384170532, | |
| "learning_rate": 7.65255592153367e-05, | |
| "loss": 0.0217, | |
| "step": 3560 | |
| }, | |
| { | |
| "epoch": 11.86046511627907, | |
| "grad_norm": 0.32442739605903625, | |
| "learning_rate": 7.638525344292402e-05, | |
| "loss": 0.0214, | |
| "step": 3570 | |
| }, | |
| { | |
| "epoch": 11.893687707641195, | |
| "grad_norm": 0.27662280201911926, | |
| "learning_rate": 7.624465912560697e-05, | |
| "loss": 0.0221, | |
| "step": 3580 | |
| }, | |
| { | |
| "epoch": 11.926910299003323, | |
| "grad_norm": 0.22196006774902344, | |
| "learning_rate": 7.610377780090249e-05, | |
| "loss": 0.0214, | |
| "step": 3590 | |
| }, | |
| { | |
| "epoch": 11.960132890365449, | |
| "grad_norm": 0.36716607213020325, | |
| "learning_rate": 7.596261100946618e-05, | |
| "loss": 0.0182, | |
| "step": 3600 | |
| }, | |
| { | |
| "epoch": 11.993355481727574, | |
| "grad_norm": 0.23657159507274628, | |
| "learning_rate": 7.582116029507542e-05, | |
| "loss": 0.0228, | |
| "step": 3610 | |
| }, | |
| { | |
| "epoch": 12.0265780730897, | |
| "grad_norm": 0.3037751317024231, | |
| "learning_rate": 7.56794272046126e-05, | |
| "loss": 0.0158, | |
| "step": 3620 | |
| }, | |
| { | |
| "epoch": 12.059800664451828, | |
| "grad_norm": 0.21974584460258484, | |
| "learning_rate": 7.55374132880481e-05, | |
| "loss": 0.0181, | |
| "step": 3630 | |
| }, | |
| { | |
| "epoch": 12.093023255813954, | |
| "grad_norm": 0.20289649069309235, | |
| "learning_rate": 7.539512009842333e-05, | |
| "loss": 0.0179, | |
| "step": 3640 | |
| }, | |
| { | |
| "epoch": 12.12624584717608, | |
| "grad_norm": 0.2716876268386841, | |
| "learning_rate": 7.525254919183382e-05, | |
| "loss": 0.0213, | |
| "step": 3650 | |
| }, | |
| { | |
| "epoch": 12.159468438538205, | |
| "grad_norm": 0.2261294424533844, | |
| "learning_rate": 7.510970212741215e-05, | |
| "loss": 0.0198, | |
| "step": 3660 | |
| }, | |
| { | |
| "epoch": 12.192691029900333, | |
| "grad_norm": 0.25505638122558594, | |
| "learning_rate": 7.496658046731096e-05, | |
| "loss": 0.0167, | |
| "step": 3670 | |
| }, | |
| { | |
| "epoch": 12.225913621262459, | |
| "grad_norm": 0.23943322896957397, | |
| "learning_rate": 7.482318577668578e-05, | |
| "loss": 0.0176, | |
| "step": 3680 | |
| }, | |
| { | |
| "epoch": 12.259136212624584, | |
| "grad_norm": 0.4076962172985077, | |
| "learning_rate": 7.467951962367796e-05, | |
| "loss": 0.0186, | |
| "step": 3690 | |
| }, | |
| { | |
| "epoch": 12.29235880398671, | |
| "grad_norm": 0.34784767031669617, | |
| "learning_rate": 7.453558357939755e-05, | |
| "loss": 0.0212, | |
| "step": 3700 | |
| }, | |
| { | |
| "epoch": 12.325581395348838, | |
| "grad_norm": 0.342916876077652, | |
| "learning_rate": 7.439137921790606e-05, | |
| "loss": 0.0163, | |
| "step": 3710 | |
| }, | |
| { | |
| "epoch": 12.358803986710964, | |
| "grad_norm": 0.3107483386993408, | |
| "learning_rate": 7.42469081161993e-05, | |
| "loss": 0.0206, | |
| "step": 3720 | |
| }, | |
| { | |
| "epoch": 12.39202657807309, | |
| "grad_norm": 0.3539960980415344, | |
| "learning_rate": 7.410217185419006e-05, | |
| "loss": 0.0174, | |
| "step": 3730 | |
| }, | |
| { | |
| "epoch": 12.425249169435215, | |
| "grad_norm": 0.3659731149673462, | |
| "learning_rate": 7.395717201469095e-05, | |
| "loss": 0.0167, | |
| "step": 3740 | |
| }, | |
| { | |
| "epoch": 12.458471760797343, | |
| "grad_norm": 0.2727093994617462, | |
| "learning_rate": 7.381191018339696e-05, | |
| "loss": 0.019, | |
| "step": 3750 | |
| }, | |
| { | |
| "epoch": 12.491694352159469, | |
| "grad_norm": 0.41176939010620117, | |
| "learning_rate": 7.36663879488682e-05, | |
| "loss": 0.0176, | |
| "step": 3760 | |
| }, | |
| { | |
| "epoch": 12.524916943521594, | |
| "grad_norm": 0.4040158987045288, | |
| "learning_rate": 7.352060690251254e-05, | |
| "loss": 0.02, | |
| "step": 3770 | |
| }, | |
| { | |
| "epoch": 12.55813953488372, | |
| "grad_norm": 0.3903579115867615, | |
| "learning_rate": 7.337456863856811e-05, | |
| "loss": 0.0183, | |
| "step": 3780 | |
| }, | |
| { | |
| "epoch": 12.591362126245848, | |
| "grad_norm": 0.3343384265899658, | |
| "learning_rate": 7.3228274754086e-05, | |
| "loss": 0.0227, | |
| "step": 3790 | |
| }, | |
| { | |
| "epoch": 12.624584717607974, | |
| "grad_norm": 0.35453101992607117, | |
| "learning_rate": 7.308172684891267e-05, | |
| "loss": 0.0146, | |
| "step": 3800 | |
| }, | |
| { | |
| "epoch": 12.6578073089701, | |
| "grad_norm": 0.35144782066345215, | |
| "learning_rate": 7.293492652567255e-05, | |
| "loss": 0.0196, | |
| "step": 3810 | |
| }, | |
| { | |
| "epoch": 12.691029900332225, | |
| "grad_norm": 0.35027822852134705, | |
| "learning_rate": 7.278787538975043e-05, | |
| "loss": 0.0149, | |
| "step": 3820 | |
| }, | |
| { | |
| "epoch": 12.724252491694353, | |
| "grad_norm": 0.34613221883773804, | |
| "learning_rate": 7.2640575049274e-05, | |
| "loss": 0.0207, | |
| "step": 3830 | |
| }, | |
| { | |
| "epoch": 12.757475083056478, | |
| "grad_norm": 0.34608450531959534, | |
| "learning_rate": 7.249302711509616e-05, | |
| "loss": 0.0173, | |
| "step": 3840 | |
| }, | |
| { | |
| "epoch": 12.790697674418604, | |
| "grad_norm": 0.31654372811317444, | |
| "learning_rate": 7.23452332007775e-05, | |
| "loss": 0.0146, | |
| "step": 3850 | |
| }, | |
| { | |
| "epoch": 12.82392026578073, | |
| "grad_norm": 0.2791317403316498, | |
| "learning_rate": 7.219719492256858e-05, | |
| "loss": 0.015, | |
| "step": 3860 | |
| }, | |
| { | |
| "epoch": 12.857142857142858, | |
| "grad_norm": 0.44848188757896423, | |
| "learning_rate": 7.20489138993923e-05, | |
| "loss": 0.0175, | |
| "step": 3870 | |
| }, | |
| { | |
| "epoch": 12.890365448504983, | |
| "grad_norm": 0.3050365149974823, | |
| "learning_rate": 7.190039175282614e-05, | |
| "loss": 0.0174, | |
| "step": 3880 | |
| }, | |
| { | |
| "epoch": 12.92358803986711, | |
| "grad_norm": 0.290131539106369, | |
| "learning_rate": 7.175163010708455e-05, | |
| "loss": 0.0194, | |
| "step": 3890 | |
| }, | |
| { | |
| "epoch": 12.956810631229235, | |
| "grad_norm": 0.3899692893028259, | |
| "learning_rate": 7.1602630589001e-05, | |
| "loss": 0.0141, | |
| "step": 3900 | |
| }, | |
| { | |
| "epoch": 12.990033222591363, | |
| "grad_norm": 0.25462275743484497, | |
| "learning_rate": 7.14533948280104e-05, | |
| "loss": 0.021, | |
| "step": 3910 | |
| }, | |
| { | |
| "epoch": 13.023255813953488, | |
| "grad_norm": 0.25204241275787354, | |
| "learning_rate": 7.130392445613109e-05, | |
| "loss": 0.0159, | |
| "step": 3920 | |
| }, | |
| { | |
| "epoch": 13.056478405315614, | |
| "grad_norm": 0.35123640298843384, | |
| "learning_rate": 7.115422110794711e-05, | |
| "loss": 0.0189, | |
| "step": 3930 | |
| }, | |
| { | |
| "epoch": 13.08970099667774, | |
| "grad_norm": 0.3995964229106903, | |
| "learning_rate": 7.100428642059033e-05, | |
| "loss": 0.0175, | |
| "step": 3940 | |
| }, | |
| { | |
| "epoch": 13.122923588039868, | |
| "grad_norm": 0.3554200530052185, | |
| "learning_rate": 7.08541220337224e-05, | |
| "loss": 0.0191, | |
| "step": 3950 | |
| }, | |
| { | |
| "epoch": 13.156146179401993, | |
| "grad_norm": 0.3538668751716614, | |
| "learning_rate": 7.070372958951706e-05, | |
| "loss": 0.0186, | |
| "step": 3960 | |
| }, | |
| { | |
| "epoch": 13.18936877076412, | |
| "grad_norm": 0.1743728220462799, | |
| "learning_rate": 7.055311073264194e-05, | |
| "loss": 0.016, | |
| "step": 3970 | |
| }, | |
| { | |
| "epoch": 13.222591362126245, | |
| "grad_norm": 0.287780225276947, | |
| "learning_rate": 7.040226711024077e-05, | |
| "loss": 0.0187, | |
| "step": 3980 | |
| }, | |
| { | |
| "epoch": 13.255813953488373, | |
| "grad_norm": 0.24803023040294647, | |
| "learning_rate": 7.02512003719152e-05, | |
| "loss": 0.0179, | |
| "step": 3990 | |
| }, | |
| { | |
| "epoch": 13.289036544850498, | |
| "grad_norm": 0.2810683846473694, | |
| "learning_rate": 7.00999121697069e-05, | |
| "loss": 0.0181, | |
| "step": 4000 | |
| }, | |
| { | |
| "epoch": 13.322259136212624, | |
| "grad_norm": 0.31502121686935425, | |
| "learning_rate": 6.99484041580794e-05, | |
| "loss": 0.0205, | |
| "step": 4010 | |
| }, | |
| { | |
| "epoch": 13.355481727574752, | |
| "grad_norm": 0.23561535775661469, | |
| "learning_rate": 6.979667799390004e-05, | |
| "loss": 0.0174, | |
| "step": 4020 | |
| }, | |
| { | |
| "epoch": 13.388704318936878, | |
| "grad_norm": 0.36243340373039246, | |
| "learning_rate": 6.964473533642185e-05, | |
| "loss": 0.014, | |
| "step": 4030 | |
| }, | |
| { | |
| "epoch": 13.421926910299003, | |
| "grad_norm": 0.24262340366840363, | |
| "learning_rate": 6.949257784726539e-05, | |
| "loss": 0.0171, | |
| "step": 4040 | |
| }, | |
| { | |
| "epoch": 13.455149501661129, | |
| "grad_norm": 0.23523877561092377, | |
| "learning_rate": 6.934020719040056e-05, | |
| "loss": 0.0173, | |
| "step": 4050 | |
| }, | |
| { | |
| "epoch": 13.488372093023255, | |
| "grad_norm": 0.2318788468837738, | |
| "learning_rate": 6.918762503212848e-05, | |
| "loss": 0.0156, | |
| "step": 4060 | |
| }, | |
| { | |
| "epoch": 13.521594684385382, | |
| "grad_norm": 0.265948623418808, | |
| "learning_rate": 6.903483304106319e-05, | |
| "loss": 0.0169, | |
| "step": 4070 | |
| }, | |
| { | |
| "epoch": 13.554817275747508, | |
| "grad_norm": 0.2921631932258606, | |
| "learning_rate": 6.888183288811341e-05, | |
| "loss": 0.0173, | |
| "step": 4080 | |
| }, | |
| { | |
| "epoch": 13.588039867109634, | |
| "grad_norm": 0.2878166735172272, | |
| "learning_rate": 6.87286262464643e-05, | |
| "loss": 0.014, | |
| "step": 4090 | |
| }, | |
| { | |
| "epoch": 13.621262458471762, | |
| "grad_norm": 0.36211255192756653, | |
| "learning_rate": 6.857521479155915e-05, | |
| "loss": 0.0124, | |
| "step": 4100 | |
| }, | |
| { | |
| "epoch": 13.654485049833887, | |
| "grad_norm": 0.4308359920978546, | |
| "learning_rate": 6.842160020108104e-05, | |
| "loss": 0.0184, | |
| "step": 4110 | |
| }, | |
| { | |
| "epoch": 13.687707641196013, | |
| "grad_norm": 0.3886549174785614, | |
| "learning_rate": 6.826778415493455e-05, | |
| "loss": 0.0175, | |
| "step": 4120 | |
| }, | |
| { | |
| "epoch": 13.720930232558139, | |
| "grad_norm": 0.29117047786712646, | |
| "learning_rate": 6.811376833522729e-05, | |
| "loss": 0.0182, | |
| "step": 4130 | |
| }, | |
| { | |
| "epoch": 13.754152823920267, | |
| "grad_norm": 0.26800787448883057, | |
| "learning_rate": 6.795955442625159e-05, | |
| "loss": 0.0146, | |
| "step": 4140 | |
| }, | |
| { | |
| "epoch": 13.787375415282392, | |
| "grad_norm": 0.30751490592956543, | |
| "learning_rate": 6.780514411446608e-05, | |
| "loss": 0.0187, | |
| "step": 4150 | |
| }, | |
| { | |
| "epoch": 13.820598006644518, | |
| "grad_norm": 0.36831483244895935, | |
| "learning_rate": 6.765053908847716e-05, | |
| "loss": 0.0161, | |
| "step": 4160 | |
| }, | |
| { | |
| "epoch": 13.853820598006644, | |
| "grad_norm": 0.32650047540664673, | |
| "learning_rate": 6.749574103902064e-05, | |
| "loss": 0.0114, | |
| "step": 4170 | |
| }, | |
| { | |
| "epoch": 13.887043189368772, | |
| "grad_norm": 0.22459985315799713, | |
| "learning_rate": 6.734075165894317e-05, | |
| "loss": 0.0195, | |
| "step": 4180 | |
| }, | |
| { | |
| "epoch": 13.920265780730897, | |
| "grad_norm": 0.2692617177963257, | |
| "learning_rate": 6.71855726431838e-05, | |
| "loss": 0.0178, | |
| "step": 4190 | |
| }, | |
| { | |
| "epoch": 13.953488372093023, | |
| "grad_norm": 0.28635933995246887, | |
| "learning_rate": 6.703020568875538e-05, | |
| "loss": 0.0234, | |
| "step": 4200 | |
| }, | |
| { | |
| "epoch": 13.986710963455149, | |
| "grad_norm": 0.1947304904460907, | |
| "learning_rate": 6.687465249472603e-05, | |
| "loss": 0.0137, | |
| "step": 4210 | |
| }, | |
| { | |
| "epoch": 14.019933554817277, | |
| "grad_norm": 0.454317182302475, | |
| "learning_rate": 6.671891476220055e-05, | |
| "loss": 0.0205, | |
| "step": 4220 | |
| }, | |
| { | |
| "epoch": 14.053156146179402, | |
| "grad_norm": 0.2946520447731018, | |
| "learning_rate": 6.656299419430183e-05, | |
| "loss": 0.0196, | |
| "step": 4230 | |
| }, | |
| { | |
| "epoch": 14.086378737541528, | |
| "grad_norm": 0.3139476478099823, | |
| "learning_rate": 6.640689249615223e-05, | |
| "loss": 0.0153, | |
| "step": 4240 | |
| }, | |
| { | |
| "epoch": 14.119601328903654, | |
| "grad_norm": 0.44135165214538574, | |
| "learning_rate": 6.625061137485491e-05, | |
| "loss": 0.0174, | |
| "step": 4250 | |
| }, | |
| { | |
| "epoch": 14.152823920265782, | |
| "grad_norm": 0.3966526687145233, | |
| "learning_rate": 6.609415253947517e-05, | |
| "loss": 0.0181, | |
| "step": 4260 | |
| }, | |
| { | |
| "epoch": 14.186046511627907, | |
| "grad_norm": 0.48178598284721375, | |
| "learning_rate": 6.593751770102178e-05, | |
| "loss": 0.0229, | |
| "step": 4270 | |
| }, | |
| { | |
| "epoch": 14.219269102990033, | |
| "grad_norm": 0.4271031320095062, | |
| "learning_rate": 6.578070857242823e-05, | |
| "loss": 0.0173, | |
| "step": 4280 | |
| }, | |
| { | |
| "epoch": 14.252491694352159, | |
| "grad_norm": 0.1896420121192932, | |
| "learning_rate": 6.562372686853402e-05, | |
| "loss": 0.0141, | |
| "step": 4290 | |
| }, | |
| { | |
| "epoch": 14.285714285714286, | |
| "grad_norm": 0.4319266974925995, | |
| "learning_rate": 6.546657430606593e-05, | |
| "loss": 0.0164, | |
| "step": 4300 | |
| }, | |
| { | |
| "epoch": 14.318936877076412, | |
| "grad_norm": 0.24711720645427704, | |
| "learning_rate": 6.530925260361918e-05, | |
| "loss": 0.0161, | |
| "step": 4310 | |
| }, | |
| { | |
| "epoch": 14.352159468438538, | |
| "grad_norm": 0.2857001721858978, | |
| "learning_rate": 6.515176348163871e-05, | |
| "loss": 0.014, | |
| "step": 4320 | |
| }, | |
| { | |
| "epoch": 14.385382059800664, | |
| "grad_norm": 0.3837076723575592, | |
| "learning_rate": 6.499410866240032e-05, | |
| "loss": 0.0162, | |
| "step": 4330 | |
| }, | |
| { | |
| "epoch": 14.418604651162791, | |
| "grad_norm": 0.23993246257305145, | |
| "learning_rate": 6.48362898699919e-05, | |
| "loss": 0.0138, | |
| "step": 4340 | |
| }, | |
| { | |
| "epoch": 14.451827242524917, | |
| "grad_norm": 0.30589741468429565, | |
| "learning_rate": 6.467830883029443e-05, | |
| "loss": 0.0134, | |
| "step": 4350 | |
| }, | |
| { | |
| "epoch": 14.485049833887043, | |
| "grad_norm": 0.26103895902633667, | |
| "learning_rate": 6.452016727096326e-05, | |
| "loss": 0.0147, | |
| "step": 4360 | |
| }, | |
| { | |
| "epoch": 14.518272425249169, | |
| "grad_norm": 0.1985149085521698, | |
| "learning_rate": 6.436186692140916e-05, | |
| "loss": 0.0143, | |
| "step": 4370 | |
| }, | |
| { | |
| "epoch": 14.551495016611296, | |
| "grad_norm": 0.2402988076210022, | |
| "learning_rate": 6.420340951277938e-05, | |
| "loss": 0.0127, | |
| "step": 4380 | |
| }, | |
| { | |
| "epoch": 14.584717607973422, | |
| "grad_norm": 0.2162981629371643, | |
| "learning_rate": 6.404479677793874e-05, | |
| "loss": 0.0123, | |
| "step": 4390 | |
| }, | |
| { | |
| "epoch": 14.617940199335548, | |
| "grad_norm": 0.21887542307376862, | |
| "learning_rate": 6.388603045145075e-05, | |
| "loss": 0.0144, | |
| "step": 4400 | |
| }, | |
| { | |
| "epoch": 14.651162790697674, | |
| "grad_norm": 0.26267585158348083, | |
| "learning_rate": 6.372711226955843e-05, | |
| "loss": 0.0212, | |
| "step": 4410 | |
| }, | |
| { | |
| "epoch": 14.684385382059801, | |
| "grad_norm": 0.3892922103404999, | |
| "learning_rate": 6.356804397016564e-05, | |
| "loss": 0.0181, | |
| "step": 4420 | |
| }, | |
| { | |
| "epoch": 14.717607973421927, | |
| "grad_norm": 0.30290210247039795, | |
| "learning_rate": 6.340882729281779e-05, | |
| "loss": 0.0183, | |
| "step": 4430 | |
| }, | |
| { | |
| "epoch": 14.750830564784053, | |
| "grad_norm": 0.2289399802684784, | |
| "learning_rate": 6.324946397868294e-05, | |
| "loss": 0.0177, | |
| "step": 4440 | |
| }, | |
| { | |
| "epoch": 14.784053156146179, | |
| "grad_norm": 0.22953011095523834, | |
| "learning_rate": 6.308995577053276e-05, | |
| "loss": 0.0159, | |
| "step": 4450 | |
| }, | |
| { | |
| "epoch": 14.817275747508306, | |
| "grad_norm": 0.2985853850841522, | |
| "learning_rate": 6.293030441272347e-05, | |
| "loss": 0.0206, | |
| "step": 4460 | |
| }, | |
| { | |
| "epoch": 14.850498338870432, | |
| "grad_norm": 0.19090281426906586, | |
| "learning_rate": 6.277051165117677e-05, | |
| "loss": 0.0148, | |
| "step": 4470 | |
| }, | |
| { | |
| "epoch": 14.883720930232558, | |
| "grad_norm": 0.17965582013130188, | |
| "learning_rate": 6.261057923336064e-05, | |
| "loss": 0.0149, | |
| "step": 4480 | |
| }, | |
| { | |
| "epoch": 14.916943521594684, | |
| "grad_norm": 0.22538508474826813, | |
| "learning_rate": 6.245050890827042e-05, | |
| "loss": 0.0159, | |
| "step": 4490 | |
| }, | |
| { | |
| "epoch": 14.950166112956811, | |
| "grad_norm": 0.2691894769668579, | |
| "learning_rate": 6.229030242640952e-05, | |
| "loss": 0.0172, | |
| "step": 4500 | |
| }, | |
| { | |
| "epoch": 14.983388704318937, | |
| "grad_norm": 0.2683984637260437, | |
| "learning_rate": 6.212996153977037e-05, | |
| "loss": 0.0153, | |
| "step": 4510 | |
| }, | |
| { | |
| "epoch": 15.016611295681063, | |
| "grad_norm": 0.3912666440010071, | |
| "learning_rate": 6.196948800181523e-05, | |
| "loss": 0.019, | |
| "step": 4520 | |
| }, | |
| { | |
| "epoch": 15.049833887043189, | |
| "grad_norm": 0.22746188938617706, | |
| "learning_rate": 6.180888356745695e-05, | |
| "loss": 0.0222, | |
| "step": 4530 | |
| }, | |
| { | |
| "epoch": 15.083056478405316, | |
| "grad_norm": 0.20105905830860138, | |
| "learning_rate": 6.164814999303995e-05, | |
| "loss": 0.0154, | |
| "step": 4540 | |
| }, | |
| { | |
| "epoch": 15.116279069767442, | |
| "grad_norm": 0.24199578166007996, | |
| "learning_rate": 6.148728903632081e-05, | |
| "loss": 0.0166, | |
| "step": 4550 | |
| }, | |
| { | |
| "epoch": 15.149501661129568, | |
| "grad_norm": 0.3318256139755249, | |
| "learning_rate": 6.132630245644921e-05, | |
| "loss": 0.0164, | |
| "step": 4560 | |
| }, | |
| { | |
| "epoch": 15.182724252491694, | |
| "grad_norm": 0.41996484994888306, | |
| "learning_rate": 6.116519201394857e-05, | |
| "loss": 0.014, | |
| "step": 4570 | |
| }, | |
| { | |
| "epoch": 15.215946843853821, | |
| "grad_norm": 0.22289037704467773, | |
| "learning_rate": 6.10039594706969e-05, | |
| "loss": 0.0158, | |
| "step": 4580 | |
| }, | |
| { | |
| "epoch": 15.249169435215947, | |
| "grad_norm": 0.3248358368873596, | |
| "learning_rate": 6.084260658990744e-05, | |
| "loss": 0.0131, | |
| "step": 4590 | |
| }, | |
| { | |
| "epoch": 15.282392026578073, | |
| "grad_norm": 0.3744429051876068, | |
| "learning_rate": 6.068113513610943e-05, | |
| "loss": 0.0161, | |
| "step": 4600 | |
| }, | |
| { | |
| "epoch": 15.315614617940199, | |
| "grad_norm": 0.21573661267757416, | |
| "learning_rate": 6.0519546875128876e-05, | |
| "loss": 0.0145, | |
| "step": 4610 | |
| }, | |
| { | |
| "epoch": 15.348837209302326, | |
| "grad_norm": 0.22471874952316284, | |
| "learning_rate": 6.035784357406906e-05, | |
| "loss": 0.0151, | |
| "step": 4620 | |
| }, | |
| { | |
| "epoch": 15.382059800664452, | |
| "grad_norm": 0.2581987679004669, | |
| "learning_rate": 6.01960270012914e-05, | |
| "loss": 0.0141, | |
| "step": 4630 | |
| }, | |
| { | |
| "epoch": 15.415282392026578, | |
| "grad_norm": 0.4127011001110077, | |
| "learning_rate": 6.003409892639599e-05, | |
| "loss": 0.0165, | |
| "step": 4640 | |
| }, | |
| { | |
| "epoch": 15.448504983388704, | |
| "grad_norm": 0.21382664144039154, | |
| "learning_rate": 5.9872061120202336e-05, | |
| "loss": 0.0129, | |
| "step": 4650 | |
| }, | |
| { | |
| "epoch": 15.481727574750831, | |
| "grad_norm": 0.33771613240242004, | |
| "learning_rate": 5.9709915354729914e-05, | |
| "loss": 0.0168, | |
| "step": 4660 | |
| }, | |
| { | |
| "epoch": 15.514950166112957, | |
| "grad_norm": 0.22272400557994843, | |
| "learning_rate": 5.9547663403178824e-05, | |
| "loss": 0.0183, | |
| "step": 4670 | |
| }, | |
| { | |
| "epoch": 15.548172757475083, | |
| "grad_norm": 0.2086458057165146, | |
| "learning_rate": 5.9385307039910445e-05, | |
| "loss": 0.0148, | |
| "step": 4680 | |
| }, | |
| { | |
| "epoch": 15.581395348837209, | |
| "grad_norm": 0.3473312556743622, | |
| "learning_rate": 5.922284804042792e-05, | |
| "loss": 0.0145, | |
| "step": 4690 | |
| }, | |
| { | |
| "epoch": 15.614617940199336, | |
| "grad_norm": 0.24149948358535767, | |
| "learning_rate": 5.906028818135687e-05, | |
| "loss": 0.017, | |
| "step": 4700 | |
| }, | |
| { | |
| "epoch": 15.647840531561462, | |
| "grad_norm": 0.27326861023902893, | |
| "learning_rate": 5.889762924042585e-05, | |
| "loss": 0.0166, | |
| "step": 4710 | |
| }, | |
| { | |
| "epoch": 15.681063122923588, | |
| "grad_norm": 0.2840353846549988, | |
| "learning_rate": 5.873487299644699e-05, | |
| "loss": 0.0164, | |
| "step": 4720 | |
| }, | |
| { | |
| "epoch": 15.714285714285714, | |
| "grad_norm": 0.20497111976146698, | |
| "learning_rate": 5.857202122929649e-05, | |
| "loss": 0.015, | |
| "step": 4730 | |
| }, | |
| { | |
| "epoch": 15.747508305647841, | |
| "grad_norm": 0.35892006754875183, | |
| "learning_rate": 5.840907571989518e-05, | |
| "loss": 0.0178, | |
| "step": 4740 | |
| }, | |
| { | |
| "epoch": 15.780730897009967, | |
| "grad_norm": 0.23509638011455536, | |
| "learning_rate": 5.824603825018904e-05, | |
| "loss": 0.0156, | |
| "step": 4750 | |
| }, | |
| { | |
| "epoch": 15.813953488372093, | |
| "grad_norm": 0.36275961995124817, | |
| "learning_rate": 5.808291060312975e-05, | |
| "loss": 0.0147, | |
| "step": 4760 | |
| }, | |
| { | |
| "epoch": 15.847176079734218, | |
| "grad_norm": 0.18457593023777008, | |
| "learning_rate": 5.7919694562655083e-05, | |
| "loss": 0.0179, | |
| "step": 4770 | |
| }, | |
| { | |
| "epoch": 15.880398671096346, | |
| "grad_norm": 0.2808282971382141, | |
| "learning_rate": 5.775639191366954e-05, | |
| "loss": 0.0152, | |
| "step": 4780 | |
| }, | |
| { | |
| "epoch": 15.913621262458472, | |
| "grad_norm": 0.3134106695652008, | |
| "learning_rate": 5.75930044420247e-05, | |
| "loss": 0.0176, | |
| "step": 4790 | |
| }, | |
| { | |
| "epoch": 15.946843853820598, | |
| "grad_norm": 0.1334390789270401, | |
| "learning_rate": 5.74295339344998e-05, | |
| "loss": 0.0136, | |
| "step": 4800 | |
| }, | |
| { | |
| "epoch": 15.980066445182723, | |
| "grad_norm": 0.1954159289598465, | |
| "learning_rate": 5.726598217878211e-05, | |
| "loss": 0.0127, | |
| "step": 4810 | |
| }, | |
| { | |
| "epoch": 16.01328903654485, | |
| "grad_norm": 0.3972402513027191, | |
| "learning_rate": 5.71023509634474e-05, | |
| "loss": 0.0165, | |
| "step": 4820 | |
| }, | |
| { | |
| "epoch": 16.046511627906977, | |
| "grad_norm": 0.2406802922487259, | |
| "learning_rate": 5.693864207794049e-05, | |
| "loss": 0.0191, | |
| "step": 4830 | |
| }, | |
| { | |
| "epoch": 16.079734219269103, | |
| "grad_norm": 0.2822772264480591, | |
| "learning_rate": 5.677485731255545e-05, | |
| "loss": 0.0151, | |
| "step": 4840 | |
| }, | |
| { | |
| "epoch": 16.11295681063123, | |
| "grad_norm": 0.3374737799167633, | |
| "learning_rate": 5.6610998458416296e-05, | |
| "loss": 0.0251, | |
| "step": 4850 | |
| }, | |
| { | |
| "epoch": 16.146179401993354, | |
| "grad_norm": 0.23243625462055206, | |
| "learning_rate": 5.644706730745716e-05, | |
| "loss": 0.0172, | |
| "step": 4860 | |
| }, | |
| { | |
| "epoch": 16.17940199335548, | |
| "grad_norm": 0.17415964603424072, | |
| "learning_rate": 5.628306565240287e-05, | |
| "loss": 0.0129, | |
| "step": 4870 | |
| }, | |
| { | |
| "epoch": 16.21262458471761, | |
| "grad_norm": 0.24170176684856415, | |
| "learning_rate": 5.611899528674923e-05, | |
| "loss": 0.0167, | |
| "step": 4880 | |
| }, | |
| { | |
| "epoch": 16.245847176079735, | |
| "grad_norm": 0.18468914926052094, | |
| "learning_rate": 5.595485800474349e-05, | |
| "loss": 0.0176, | |
| "step": 4890 | |
| }, | |
| { | |
| "epoch": 16.27906976744186, | |
| "grad_norm": 0.17150849103927612, | |
| "learning_rate": 5.579065560136467e-05, | |
| "loss": 0.0126, | |
| "step": 4900 | |
| }, | |
| { | |
| "epoch": 16.312292358803987, | |
| "grad_norm": 0.30511826276779175, | |
| "learning_rate": 5.562638987230392e-05, | |
| "loss": 0.0172, | |
| "step": 4910 | |
| }, | |
| { | |
| "epoch": 16.345514950166113, | |
| "grad_norm": 0.27298447489738464, | |
| "learning_rate": 5.546206261394498e-05, | |
| "loss": 0.0154, | |
| "step": 4920 | |
| }, | |
| { | |
| "epoch": 16.37873754152824, | |
| "grad_norm": 0.1913800686597824, | |
| "learning_rate": 5.529767562334437e-05, | |
| "loss": 0.0133, | |
| "step": 4930 | |
| }, | |
| { | |
| "epoch": 16.411960132890364, | |
| "grad_norm": 0.22306601703166962, | |
| "learning_rate": 5.5133230698211926e-05, | |
| "loss": 0.0128, | |
| "step": 4940 | |
| }, | |
| { | |
| "epoch": 16.44518272425249, | |
| "grad_norm": 0.29052093625068665, | |
| "learning_rate": 5.496872963689096e-05, | |
| "loss": 0.0148, | |
| "step": 4950 | |
| }, | |
| { | |
| "epoch": 16.47840531561462, | |
| "grad_norm": 0.22746320068836212, | |
| "learning_rate": 5.4804174238338756e-05, | |
| "loss": 0.0171, | |
| "step": 4960 | |
| }, | |
| { | |
| "epoch": 16.511627906976745, | |
| "grad_norm": 0.35950127243995667, | |
| "learning_rate": 5.463956630210678e-05, | |
| "loss": 0.0164, | |
| "step": 4970 | |
| }, | |
| { | |
| "epoch": 16.54485049833887, | |
| "grad_norm": 0.3040778934955597, | |
| "learning_rate": 5.4474907628321046e-05, | |
| "loss": 0.013, | |
| "step": 4980 | |
| }, | |
| { | |
| "epoch": 16.578073089700997, | |
| "grad_norm": 0.3150460124015808, | |
| "learning_rate": 5.431020001766244e-05, | |
| "loss": 0.0182, | |
| "step": 4990 | |
| }, | |
| { | |
| "epoch": 16.611295681063122, | |
| "grad_norm": 0.25338777899742126, | |
| "learning_rate": 5.4145445271346986e-05, | |
| "loss": 0.0159, | |
| "step": 5000 | |
| }, | |
| { | |
| "epoch": 16.64451827242525, | |
| "grad_norm": 0.25719591975212097, | |
| "learning_rate": 5.398064519110622e-05, | |
| "loss": 0.0142, | |
| "step": 5010 | |
| }, | |
| { | |
| "epoch": 16.677740863787374, | |
| "grad_norm": 0.30168062448501587, | |
| "learning_rate": 5.3815801579167394e-05, | |
| "loss": 0.0138, | |
| "step": 5020 | |
| }, | |
| { | |
| "epoch": 16.710963455149503, | |
| "grad_norm": 0.17436672747135162, | |
| "learning_rate": 5.365091623823382e-05, | |
| "loss": 0.0154, | |
| "step": 5030 | |
| }, | |
| { | |
| "epoch": 16.74418604651163, | |
| "grad_norm": 0.23474833369255066, | |
| "learning_rate": 5.348599097146521e-05, | |
| "loss": 0.0153, | |
| "step": 5040 | |
| }, | |
| { | |
| "epoch": 16.777408637873755, | |
| "grad_norm": 0.16410623490810394, | |
| "learning_rate": 5.3321027582457836e-05, | |
| "loss": 0.0117, | |
| "step": 5050 | |
| }, | |
| { | |
| "epoch": 16.81063122923588, | |
| "grad_norm": 0.2162051796913147, | |
| "learning_rate": 5.315602787522491e-05, | |
| "loss": 0.0146, | |
| "step": 5060 | |
| }, | |
| { | |
| "epoch": 16.843853820598007, | |
| "grad_norm": 0.25784435868263245, | |
| "learning_rate": 5.299099365417678e-05, | |
| "loss": 0.0136, | |
| "step": 5070 | |
| }, | |
| { | |
| "epoch": 16.877076411960132, | |
| "grad_norm": 0.22715456783771515, | |
| "learning_rate": 5.2825926724101236e-05, | |
| "loss": 0.0129, | |
| "step": 5080 | |
| }, | |
| { | |
| "epoch": 16.910299003322258, | |
| "grad_norm": 0.2097283899784088, | |
| "learning_rate": 5.26608288901438e-05, | |
| "loss": 0.0142, | |
| "step": 5090 | |
| }, | |
| { | |
| "epoch": 16.943521594684384, | |
| "grad_norm": 0.2484341710805893, | |
| "learning_rate": 5.24957019577879e-05, | |
| "loss": 0.0142, | |
| "step": 5100 | |
| }, | |
| { | |
| "epoch": 16.97674418604651, | |
| "grad_norm": 0.3111594021320343, | |
| "learning_rate": 5.2330547732835266e-05, | |
| "loss": 0.0171, | |
| "step": 5110 | |
| }, | |
| { | |
| "epoch": 17.00996677740864, | |
| "grad_norm": 0.31417858600616455, | |
| "learning_rate": 5.2165368021385996e-05, | |
| "loss": 0.0208, | |
| "step": 5120 | |
| }, | |
| { | |
| "epoch": 17.043189368770765, | |
| "grad_norm": 0.2889890968799591, | |
| "learning_rate": 5.200016462981897e-05, | |
| "loss": 0.0178, | |
| "step": 5130 | |
| }, | |
| { | |
| "epoch": 17.07641196013289, | |
| "grad_norm": 0.2410278171300888, | |
| "learning_rate": 5.1834939364772015e-05, | |
| "loss": 0.0168, | |
| "step": 5140 | |
| }, | |
| { | |
| "epoch": 17.109634551495017, | |
| "grad_norm": 0.22877810895442963, | |
| "learning_rate": 5.166969403312214e-05, | |
| "loss": 0.0143, | |
| "step": 5150 | |
| }, | |
| { | |
| "epoch": 17.142857142857142, | |
| "grad_norm": 0.3436548709869385, | |
| "learning_rate": 5.1504430441965844e-05, | |
| "loss": 0.0137, | |
| "step": 5160 | |
| }, | |
| { | |
| "epoch": 17.176079734219268, | |
| "grad_norm": 0.1852232813835144, | |
| "learning_rate": 5.133915039859923e-05, | |
| "loss": 0.0144, | |
| "step": 5170 | |
| }, | |
| { | |
| "epoch": 17.209302325581394, | |
| "grad_norm": 0.2806830108165741, | |
| "learning_rate": 5.1173855710498444e-05, | |
| "loss": 0.0125, | |
| "step": 5180 | |
| }, | |
| { | |
| "epoch": 17.242524916943523, | |
| "grad_norm": 0.17646856606006622, | |
| "learning_rate": 5.100854818529967e-05, | |
| "loss": 0.012, | |
| "step": 5190 | |
| }, | |
| { | |
| "epoch": 17.27574750830565, | |
| "grad_norm": 0.25370362401008606, | |
| "learning_rate": 5.084322963077951e-05, | |
| "loss": 0.015, | |
| "step": 5200 | |
| }, | |
| { | |
| "epoch": 17.308970099667775, | |
| "grad_norm": 0.42946335673332214, | |
| "learning_rate": 5.067790185483522e-05, | |
| "loss": 0.0181, | |
| "step": 5210 | |
| }, | |
| { | |
| "epoch": 17.3421926910299, | |
| "grad_norm": 0.2286258190870285, | |
| "learning_rate": 5.0512566665464844e-05, | |
| "loss": 0.0151, | |
| "step": 5220 | |
| }, | |
| { | |
| "epoch": 17.375415282392026, | |
| "grad_norm": 0.25382083654403687, | |
| "learning_rate": 5.034722587074755e-05, | |
| "loss": 0.0142, | |
| "step": 5230 | |
| }, | |
| { | |
| "epoch": 17.408637873754152, | |
| "grad_norm": 0.30657899379730225, | |
| "learning_rate": 5.018188127882375e-05, | |
| "loss": 0.0167, | |
| "step": 5240 | |
| }, | |
| { | |
| "epoch": 17.441860465116278, | |
| "grad_norm": 0.27695420384407043, | |
| "learning_rate": 5.0016534697875417e-05, | |
| "loss": 0.0117, | |
| "step": 5250 | |
| }, | |
| { | |
| "epoch": 17.475083056478404, | |
| "grad_norm": 0.18801657855510712, | |
| "learning_rate": 4.9851187936106294e-05, | |
| "loss": 0.0122, | |
| "step": 5260 | |
| }, | |
| { | |
| "epoch": 17.508305647840533, | |
| "grad_norm": 0.23032335937023163, | |
| "learning_rate": 4.968584280172206e-05, | |
| "loss": 0.0156, | |
| "step": 5270 | |
| }, | |
| { | |
| "epoch": 17.54152823920266, | |
| "grad_norm": 0.26843783259391785, | |
| "learning_rate": 4.95205011029106e-05, | |
| "loss": 0.0157, | |
| "step": 5280 | |
| }, | |
| { | |
| "epoch": 17.574750830564785, | |
| "grad_norm": 0.3416936993598938, | |
| "learning_rate": 4.935516464782227e-05, | |
| "loss": 0.0155, | |
| "step": 5290 | |
| }, | |
| { | |
| "epoch": 17.60797342192691, | |
| "grad_norm": 0.20719528198242188, | |
| "learning_rate": 4.918983524455003e-05, | |
| "loss": 0.013, | |
| "step": 5300 | |
| }, | |
| { | |
| "epoch": 17.641196013289036, | |
| "grad_norm": 0.2207994908094406, | |
| "learning_rate": 4.9024514701109766e-05, | |
| "loss": 0.014, | |
| "step": 5310 | |
| }, | |
| { | |
| "epoch": 17.674418604651162, | |
| "grad_norm": 0.25513842701911926, | |
| "learning_rate": 4.885920482542043e-05, | |
| "loss": 0.0133, | |
| "step": 5320 | |
| }, | |
| { | |
| "epoch": 17.707641196013288, | |
| "grad_norm": 0.2589441239833832, | |
| "learning_rate": 4.869390742528438e-05, | |
| "loss": 0.0125, | |
| "step": 5330 | |
| }, | |
| { | |
| "epoch": 17.740863787375414, | |
| "grad_norm": 0.28627756237983704, | |
| "learning_rate": 4.852862430836744e-05, | |
| "loss": 0.0137, | |
| "step": 5340 | |
| }, | |
| { | |
| "epoch": 17.774086378737543, | |
| "grad_norm": 0.2636752128601074, | |
| "learning_rate": 4.836335728217933e-05, | |
| "loss": 0.0131, | |
| "step": 5350 | |
| }, | |
| { | |
| "epoch": 17.80730897009967, | |
| "grad_norm": 0.19189167022705078, | |
| "learning_rate": 4.819810815405379e-05, | |
| "loss": 0.0133, | |
| "step": 5360 | |
| }, | |
| { | |
| "epoch": 17.840531561461795, | |
| "grad_norm": 0.21339979767799377, | |
| "learning_rate": 4.803287873112877e-05, | |
| "loss": 0.0161, | |
| "step": 5370 | |
| }, | |
| { | |
| "epoch": 17.87375415282392, | |
| "grad_norm": 0.27499207854270935, | |
| "learning_rate": 4.786767082032681e-05, | |
| "loss": 0.0144, | |
| "step": 5380 | |
| }, | |
| { | |
| "epoch": 17.906976744186046, | |
| "grad_norm": 0.18827593326568604, | |
| "learning_rate": 4.77024862283351e-05, | |
| "loss": 0.0129, | |
| "step": 5390 | |
| }, | |
| { | |
| "epoch": 17.940199335548172, | |
| "grad_norm": 0.15863770246505737, | |
| "learning_rate": 4.753732676158593e-05, | |
| "loss": 0.0129, | |
| "step": 5400 | |
| }, | |
| { | |
| "epoch": 17.973421926910298, | |
| "grad_norm": 0.29626619815826416, | |
| "learning_rate": 4.737219422623672e-05, | |
| "loss": 0.0137, | |
| "step": 5410 | |
| }, | |
| { | |
| "epoch": 18.006644518272424, | |
| "grad_norm": 0.19254808127880096, | |
| "learning_rate": 4.720709042815044e-05, | |
| "loss": 0.0169, | |
| "step": 5420 | |
| }, | |
| { | |
| "epoch": 18.039867109634553, | |
| "grad_norm": 0.1836787462234497, | |
| "learning_rate": 4.704201717287578e-05, | |
| "loss": 0.0134, | |
| "step": 5430 | |
| }, | |
| { | |
| "epoch": 18.07308970099668, | |
| "grad_norm": 0.20814216136932373, | |
| "learning_rate": 4.6876976265627404e-05, | |
| "loss": 0.0135, | |
| "step": 5440 | |
| }, | |
| { | |
| "epoch": 18.106312292358805, | |
| "grad_norm": 0.17879468202590942, | |
| "learning_rate": 4.671196951126626e-05, | |
| "loss": 0.0138, | |
| "step": 5450 | |
| }, | |
| { | |
| "epoch": 18.13953488372093, | |
| "grad_norm": 0.38961148262023926, | |
| "learning_rate": 4.654699871427971e-05, | |
| "loss": 0.0138, | |
| "step": 5460 | |
| }, | |
| { | |
| "epoch": 18.172757475083056, | |
| "grad_norm": 0.23467040061950684, | |
| "learning_rate": 4.6382065678762034e-05, | |
| "loss": 0.0145, | |
| "step": 5470 | |
| }, | |
| { | |
| "epoch": 18.205980066445182, | |
| "grad_norm": 0.23017248511314392, | |
| "learning_rate": 4.6217172208394424e-05, | |
| "loss": 0.0125, | |
| "step": 5480 | |
| }, | |
| { | |
| "epoch": 18.239202657807308, | |
| "grad_norm": 0.25644469261169434, | |
| "learning_rate": 4.605232010642549e-05, | |
| "loss": 0.0132, | |
| "step": 5490 | |
| }, | |
| { | |
| "epoch": 18.272425249169434, | |
| "grad_norm": 0.27478766441345215, | |
| "learning_rate": 4.588751117565142e-05, | |
| "loss": 0.0117, | |
| "step": 5500 | |
| }, | |
| { | |
| "epoch": 18.305647840531563, | |
| "grad_norm": 0.25997769832611084, | |
| "learning_rate": 4.5722747218396214e-05, | |
| "loss": 0.0126, | |
| "step": 5510 | |
| }, | |
| { | |
| "epoch": 18.33887043189369, | |
| "grad_norm": 0.2996649742126465, | |
| "learning_rate": 4.5558030036492194e-05, | |
| "loss": 0.0105, | |
| "step": 5520 | |
| }, | |
| { | |
| "epoch": 18.372093023255815, | |
| "grad_norm": 0.13013173639774323, | |
| "learning_rate": 4.539336143125999e-05, | |
| "loss": 0.0136, | |
| "step": 5530 | |
| }, | |
| { | |
| "epoch": 18.40531561461794, | |
| "grad_norm": 0.22051449120044708, | |
| "learning_rate": 4.522874320348916e-05, | |
| "loss": 0.0144, | |
| "step": 5540 | |
| }, | |
| { | |
| "epoch": 18.438538205980066, | |
| "grad_norm": 0.24437175691127777, | |
| "learning_rate": 4.506417715341821e-05, | |
| "loss": 0.0199, | |
| "step": 5550 | |
| }, | |
| { | |
| "epoch": 18.471760797342192, | |
| "grad_norm": 0.1537032574415207, | |
| "learning_rate": 4.489966508071511e-05, | |
| "loss": 0.0119, | |
| "step": 5560 | |
| }, | |
| { | |
| "epoch": 18.504983388704318, | |
| "grad_norm": 0.1860104352235794, | |
| "learning_rate": 4.4735208784457575e-05, | |
| "loss": 0.011, | |
| "step": 5570 | |
| }, | |
| { | |
| "epoch": 18.538205980066444, | |
| "grad_norm": 0.2010582834482193, | |
| "learning_rate": 4.457081006311325e-05, | |
| "loss": 0.0112, | |
| "step": 5580 | |
| }, | |
| { | |
| "epoch": 18.571428571428573, | |
| "grad_norm": 0.2242826372385025, | |
| "learning_rate": 4.440647071452027e-05, | |
| "loss": 0.012, | |
| "step": 5590 | |
| }, | |
| { | |
| "epoch": 18.6046511627907, | |
| "grad_norm": 0.23328198492527008, | |
| "learning_rate": 4.424219253586737e-05, | |
| "loss": 0.0111, | |
| "step": 5600 | |
| }, | |
| { | |
| "epoch": 18.637873754152825, | |
| "grad_norm": 0.19966065883636475, | |
| "learning_rate": 4.407797732367443e-05, | |
| "loss": 0.0145, | |
| "step": 5610 | |
| }, | |
| { | |
| "epoch": 18.67109634551495, | |
| "grad_norm": 0.2838165760040283, | |
| "learning_rate": 4.391382687377268e-05, | |
| "loss": 0.0135, | |
| "step": 5620 | |
| }, | |
| { | |
| "epoch": 18.704318936877076, | |
| "grad_norm": 0.2065529078245163, | |
| "learning_rate": 4.374974298128512e-05, | |
| "loss": 0.0114, | |
| "step": 5630 | |
| }, | |
| { | |
| "epoch": 18.737541528239202, | |
| "grad_norm": 0.282686710357666, | |
| "learning_rate": 4.358572744060699e-05, | |
| "loss": 0.0129, | |
| "step": 5640 | |
| }, | |
| { | |
| "epoch": 18.770764119601328, | |
| "grad_norm": 0.23530226945877075, | |
| "learning_rate": 4.342178204538588e-05, | |
| "loss": 0.0115, | |
| "step": 5650 | |
| }, | |
| { | |
| "epoch": 18.803986710963454, | |
| "grad_norm": 0.19305096566677094, | |
| "learning_rate": 4.325790858850241e-05, | |
| "loss": 0.015, | |
| "step": 5660 | |
| }, | |
| { | |
| "epoch": 18.837209302325583, | |
| "grad_norm": 0.2113550901412964, | |
| "learning_rate": 4.309410886205043e-05, | |
| "loss": 0.0096, | |
| "step": 5670 | |
| }, | |
| { | |
| "epoch": 18.87043189368771, | |
| "grad_norm": 0.23037958145141602, | |
| "learning_rate": 4.293038465731752e-05, | |
| "loss": 0.0146, | |
| "step": 5680 | |
| }, | |
| { | |
| "epoch": 18.903654485049834, | |
| "grad_norm": 0.18224431574344635, | |
| "learning_rate": 4.276673776476533e-05, | |
| "loss": 0.0121, | |
| "step": 5690 | |
| }, | |
| { | |
| "epoch": 18.93687707641196, | |
| "grad_norm": 0.2601468563079834, | |
| "learning_rate": 4.260316997401007e-05, | |
| "loss": 0.013, | |
| "step": 5700 | |
| }, | |
| { | |
| "epoch": 18.970099667774086, | |
| "grad_norm": 0.3363877832889557, | |
| "learning_rate": 4.243968307380293e-05, | |
| "loss": 0.015, | |
| "step": 5710 | |
| }, | |
| { | |
| "epoch": 19.003322259136212, | |
| "grad_norm": 0.19996199011802673, | |
| "learning_rate": 4.22762788520104e-05, | |
| "loss": 0.0092, | |
| "step": 5720 | |
| }, | |
| { | |
| "epoch": 19.036544850498338, | |
| "grad_norm": 0.20175497233867645, | |
| "learning_rate": 4.211295909559491e-05, | |
| "loss": 0.0158, | |
| "step": 5730 | |
| }, | |
| { | |
| "epoch": 19.069767441860463, | |
| "grad_norm": 0.167605921626091, | |
| "learning_rate": 4.194972559059511e-05, | |
| "loss": 0.0124, | |
| "step": 5740 | |
| }, | |
| { | |
| "epoch": 19.102990033222593, | |
| "grad_norm": 0.18364398181438446, | |
| "learning_rate": 4.178658012210651e-05, | |
| "loss": 0.0138, | |
| "step": 5750 | |
| }, | |
| { | |
| "epoch": 19.13621262458472, | |
| "grad_norm": 0.29462730884552, | |
| "learning_rate": 4.162352447426177e-05, | |
| "loss": 0.0125, | |
| "step": 5760 | |
| }, | |
| { | |
| "epoch": 19.169435215946844, | |
| "grad_norm": 0.2075863629579544, | |
| "learning_rate": 4.146056043021135e-05, | |
| "loss": 0.0116, | |
| "step": 5770 | |
| }, | |
| { | |
| "epoch": 19.20265780730897, | |
| "grad_norm": 0.2390693575143814, | |
| "learning_rate": 4.1297689772103944e-05, | |
| "loss": 0.0137, | |
| "step": 5780 | |
| }, | |
| { | |
| "epoch": 19.235880398671096, | |
| "grad_norm": 0.2695210576057434, | |
| "learning_rate": 4.113491428106694e-05, | |
| "loss": 0.0122, | |
| "step": 5790 | |
| }, | |
| { | |
| "epoch": 19.269102990033222, | |
| "grad_norm": 0.23042955994606018, | |
| "learning_rate": 4.0972235737187055e-05, | |
| "loss": 0.014, | |
| "step": 5800 | |
| }, | |
| { | |
| "epoch": 19.302325581395348, | |
| "grad_norm": 0.24274908006191254, | |
| "learning_rate": 4.080965591949076e-05, | |
| "loss": 0.0119, | |
| "step": 5810 | |
| }, | |
| { | |
| "epoch": 19.335548172757473, | |
| "grad_norm": 0.256891131401062, | |
| "learning_rate": 4.0647176605924924e-05, | |
| "loss": 0.0096, | |
| "step": 5820 | |
| }, | |
| { | |
| "epoch": 19.368770764119603, | |
| "grad_norm": 0.23432119190692902, | |
| "learning_rate": 4.0484799573337255e-05, | |
| "loss": 0.0121, | |
| "step": 5830 | |
| }, | |
| { | |
| "epoch": 19.40199335548173, | |
| "grad_norm": 0.23672959208488464, | |
| "learning_rate": 4.032252659745699e-05, | |
| "loss": 0.0132, | |
| "step": 5840 | |
| }, | |
| { | |
| "epoch": 19.435215946843854, | |
| "grad_norm": 0.18530480563640594, | |
| "learning_rate": 4.016035945287539e-05, | |
| "loss": 0.0126, | |
| "step": 5850 | |
| }, | |
| { | |
| "epoch": 19.46843853820598, | |
| "grad_norm": 0.23715436458587646, | |
| "learning_rate": 3.999829991302635e-05, | |
| "loss": 0.013, | |
| "step": 5860 | |
| }, | |
| { | |
| "epoch": 19.501661129568106, | |
| "grad_norm": 0.17024758458137512, | |
| "learning_rate": 3.983634975016707e-05, | |
| "loss": 0.0182, | |
| "step": 5870 | |
| }, | |
| { | |
| "epoch": 19.53488372093023, | |
| "grad_norm": 0.23801341652870178, | |
| "learning_rate": 3.967451073535854e-05, | |
| "loss": 0.011, | |
| "step": 5880 | |
| }, | |
| { | |
| "epoch": 19.568106312292358, | |
| "grad_norm": 0.20012985169887543, | |
| "learning_rate": 3.951278463844633e-05, | |
| "loss": 0.0117, | |
| "step": 5890 | |
| }, | |
| { | |
| "epoch": 19.601328903654483, | |
| "grad_norm": 0.18276239931583405, | |
| "learning_rate": 3.935117322804111e-05, | |
| "loss": 0.0169, | |
| "step": 5900 | |
| }, | |
| { | |
| "epoch": 19.634551495016613, | |
| "grad_norm": 0.32923269271850586, | |
| "learning_rate": 3.918967827149938e-05, | |
| "loss": 0.011, | |
| "step": 5910 | |
| }, | |
| { | |
| "epoch": 19.66777408637874, | |
| "grad_norm": 0.12837934494018555, | |
| "learning_rate": 3.9028301534904094e-05, | |
| "loss": 0.0136, | |
| "step": 5920 | |
| }, | |
| { | |
| "epoch": 19.700996677740864, | |
| "grad_norm": 0.14650031924247742, | |
| "learning_rate": 3.88670447830454e-05, | |
| "loss": 0.0099, | |
| "step": 5930 | |
| }, | |
| { | |
| "epoch": 19.73421926910299, | |
| "grad_norm": 0.21855078637599945, | |
| "learning_rate": 3.870590977940132e-05, | |
| "loss": 0.0107, | |
| "step": 5940 | |
| }, | |
| { | |
| "epoch": 19.767441860465116, | |
| "grad_norm": 0.30323830246925354, | |
| "learning_rate": 3.8544898286118404e-05, | |
| "loss": 0.0111, | |
| "step": 5950 | |
| }, | |
| { | |
| "epoch": 19.80066445182724, | |
| "grad_norm": 0.23002244532108307, | |
| "learning_rate": 3.838401206399257e-05, | |
| "loss": 0.01, | |
| "step": 5960 | |
| }, | |
| { | |
| "epoch": 19.833887043189367, | |
| "grad_norm": 0.1509399712085724, | |
| "learning_rate": 3.822325287244975e-05, | |
| "loss": 0.0094, | |
| "step": 5970 | |
| }, | |
| { | |
| "epoch": 19.867109634551497, | |
| "grad_norm": 0.22663408517837524, | |
| "learning_rate": 3.8062622469526725e-05, | |
| "loss": 0.0103, | |
| "step": 5980 | |
| }, | |
| { | |
| "epoch": 19.900332225913623, | |
| "grad_norm": 0.16443394124507904, | |
| "learning_rate": 3.790212261185183e-05, | |
| "loss": 0.0091, | |
| "step": 5990 | |
| }, | |
| { | |
| "epoch": 19.93355481727575, | |
| "grad_norm": 0.226366326212883, | |
| "learning_rate": 3.7741755054625794e-05, | |
| "loss": 0.0119, | |
| "step": 6000 | |
| }, | |
| { | |
| "epoch": 19.966777408637874, | |
| "grad_norm": 0.15423819422721863, | |
| "learning_rate": 3.758152155160255e-05, | |
| "loss": 0.0117, | |
| "step": 6010 | |
| }, | |
| { | |
| "epoch": 20.0, | |
| "grad_norm": 0.5820074677467346, | |
| "learning_rate": 3.742142385506999e-05, | |
| "loss": 0.0103, | |
| "step": 6020 | |
| }, | |
| { | |
| "epoch": 20.033222591362126, | |
| "grad_norm": 0.2385380119085312, | |
| "learning_rate": 3.72614637158309e-05, | |
| "loss": 0.0102, | |
| "step": 6030 | |
| }, | |
| { | |
| "epoch": 20.06644518272425, | |
| "grad_norm": 0.25446420907974243, | |
| "learning_rate": 3.710164288318371e-05, | |
| "loss": 0.0117, | |
| "step": 6040 | |
| }, | |
| { | |
| "epoch": 20.099667774086377, | |
| "grad_norm": 0.3364415168762207, | |
| "learning_rate": 3.694196310490345e-05, | |
| "loss": 0.0099, | |
| "step": 6050 | |
| }, | |
| { | |
| "epoch": 20.132890365448507, | |
| "grad_norm": 0.23453129827976227, | |
| "learning_rate": 3.678242612722259e-05, | |
| "loss": 0.012, | |
| "step": 6060 | |
| }, | |
| { | |
| "epoch": 20.166112956810633, | |
| "grad_norm": 0.19058457016944885, | |
| "learning_rate": 3.6623033694811953e-05, | |
| "loss": 0.0083, | |
| "step": 6070 | |
| }, | |
| { | |
| "epoch": 20.19933554817276, | |
| "grad_norm": 0.21197690069675446, | |
| "learning_rate": 3.6463787550761665e-05, | |
| "loss": 0.0114, | |
| "step": 6080 | |
| }, | |
| { | |
| "epoch": 20.232558139534884, | |
| "grad_norm": 0.2696029543876648, | |
| "learning_rate": 3.630468943656202e-05, | |
| "loss": 0.0095, | |
| "step": 6090 | |
| }, | |
| { | |
| "epoch": 20.26578073089701, | |
| "grad_norm": 0.2509767413139343, | |
| "learning_rate": 3.6145741092084523e-05, | |
| "loss": 0.0131, | |
| "step": 6100 | |
| }, | |
| { | |
| "epoch": 20.299003322259136, | |
| "grad_norm": 0.14039862155914307, | |
| "learning_rate": 3.598694425556278e-05, | |
| "loss": 0.0125, | |
| "step": 6110 | |
| }, | |
| { | |
| "epoch": 20.33222591362126, | |
| "grad_norm": 0.11676931381225586, | |
| "learning_rate": 3.58283006635736e-05, | |
| "loss": 0.008, | |
| "step": 6120 | |
| }, | |
| { | |
| "epoch": 20.365448504983387, | |
| "grad_norm": 0.2353682965040207, | |
| "learning_rate": 3.566981205101781e-05, | |
| "loss": 0.0106, | |
| "step": 6130 | |
| }, | |
| { | |
| "epoch": 20.398671096345517, | |
| "grad_norm": 0.22952032089233398, | |
| "learning_rate": 3.5511480151101556e-05, | |
| "loss": 0.0115, | |
| "step": 6140 | |
| }, | |
| { | |
| "epoch": 20.431893687707642, | |
| "grad_norm": 0.25359752774238586, | |
| "learning_rate": 3.5353306695317104e-05, | |
| "loss": 0.0118, | |
| "step": 6150 | |
| }, | |
| { | |
| "epoch": 20.46511627906977, | |
| "grad_norm": 0.23889994621276855, | |
| "learning_rate": 3.519529341342402e-05, | |
| "loss": 0.0085, | |
| "step": 6160 | |
| }, | |
| { | |
| "epoch": 20.498338870431894, | |
| "grad_norm": 0.11435896158218384, | |
| "learning_rate": 3.503744203343026e-05, | |
| "loss": 0.0079, | |
| "step": 6170 | |
| }, | |
| { | |
| "epoch": 20.53156146179402, | |
| "grad_norm": 0.2550327777862549, | |
| "learning_rate": 3.487975428157318e-05, | |
| "loss": 0.0141, | |
| "step": 6180 | |
| }, | |
| { | |
| "epoch": 20.564784053156146, | |
| "grad_norm": 0.15230843424797058, | |
| "learning_rate": 3.472223188230083e-05, | |
| "loss": 0.0119, | |
| "step": 6190 | |
| }, | |
| { | |
| "epoch": 20.59800664451827, | |
| "grad_norm": 0.1876257359981537, | |
| "learning_rate": 3.4564876558252866e-05, | |
| "loss": 0.012, | |
| "step": 6200 | |
| }, | |
| { | |
| "epoch": 20.631229235880397, | |
| "grad_norm": 0.2064155787229538, | |
| "learning_rate": 3.440769003024195e-05, | |
| "loss": 0.0121, | |
| "step": 6210 | |
| }, | |
| { | |
| "epoch": 20.664451827242527, | |
| "grad_norm": 0.12937350571155548, | |
| "learning_rate": 3.425067401723477e-05, | |
| "loss": 0.0101, | |
| "step": 6220 | |
| }, | |
| { | |
| "epoch": 20.697674418604652, | |
| "grad_norm": 0.2537197172641754, | |
| "learning_rate": 3.409383023633325e-05, | |
| "loss": 0.0125, | |
| "step": 6230 | |
| }, | |
| { | |
| "epoch": 20.730897009966778, | |
| "grad_norm": 0.17130358517169952, | |
| "learning_rate": 3.3937160402755894e-05, | |
| "loss": 0.0118, | |
| "step": 6240 | |
| }, | |
| { | |
| "epoch": 20.764119601328904, | |
| "grad_norm": 0.32221508026123047, | |
| "learning_rate": 3.378066622981885e-05, | |
| "loss": 0.0107, | |
| "step": 6250 | |
| }, | |
| { | |
| "epoch": 20.79734219269103, | |
| "grad_norm": 0.13424603641033173, | |
| "learning_rate": 3.362434942891738e-05, | |
| "loss": 0.0112, | |
| "step": 6260 | |
| }, | |
| { | |
| "epoch": 20.830564784053156, | |
| "grad_norm": 0.2683691382408142, | |
| "learning_rate": 3.346821170950693e-05, | |
| "loss": 0.0142, | |
| "step": 6270 | |
| }, | |
| { | |
| "epoch": 20.86378737541528, | |
| "grad_norm": 0.28922441601753235, | |
| "learning_rate": 3.3312254779084585e-05, | |
| "loss": 0.0125, | |
| "step": 6280 | |
| }, | |
| { | |
| "epoch": 20.897009966777407, | |
| "grad_norm": 0.3019663095474243, | |
| "learning_rate": 3.315648034317039e-05, | |
| "loss": 0.013, | |
| "step": 6290 | |
| }, | |
| { | |
| "epoch": 20.930232558139537, | |
| "grad_norm": 0.2417183667421341, | |
| "learning_rate": 3.3000890105288564e-05, | |
| "loss": 0.0115, | |
| "step": 6300 | |
| }, | |
| { | |
| "epoch": 20.963455149501662, | |
| "grad_norm": 0.12260009348392487, | |
| "learning_rate": 3.284548576694908e-05, | |
| "loss": 0.0135, | |
| "step": 6310 | |
| }, | |
| { | |
| "epoch": 20.996677740863788, | |
| "grad_norm": 0.1857173889875412, | |
| "learning_rate": 3.2690269027628815e-05, | |
| "loss": 0.0105, | |
| "step": 6320 | |
| }, | |
| { | |
| "epoch": 21.029900332225914, | |
| "grad_norm": 0.1845966875553131, | |
| "learning_rate": 3.253524158475324e-05, | |
| "loss": 0.012, | |
| "step": 6330 | |
| }, | |
| { | |
| "epoch": 21.06312292358804, | |
| "grad_norm": 0.24921785295009613, | |
| "learning_rate": 3.238040513367757e-05, | |
| "loss": 0.0086, | |
| "step": 6340 | |
| }, | |
| { | |
| "epoch": 21.096345514950166, | |
| "grad_norm": 0.27889764308929443, | |
| "learning_rate": 3.222576136766843e-05, | |
| "loss": 0.0122, | |
| "step": 6350 | |
| }, | |
| { | |
| "epoch": 21.12956810631229, | |
| "grad_norm": 0.22039970755577087, | |
| "learning_rate": 3.2071311977885324e-05, | |
| "loss": 0.0088, | |
| "step": 6360 | |
| }, | |
| { | |
| "epoch": 21.162790697674417, | |
| "grad_norm": 0.21786560118198395, | |
| "learning_rate": 3.191705865336197e-05, | |
| "loss": 0.0101, | |
| "step": 6370 | |
| }, | |
| { | |
| "epoch": 21.196013289036546, | |
| "grad_norm": 0.2396402209997177, | |
| "learning_rate": 3.1763003080988075e-05, | |
| "loss": 0.0117, | |
| "step": 6380 | |
| }, | |
| { | |
| "epoch": 21.229235880398672, | |
| "grad_norm": 0.21523918211460114, | |
| "learning_rate": 3.160914694549063e-05, | |
| "loss": 0.0118, | |
| "step": 6390 | |
| }, | |
| { | |
| "epoch": 21.262458471760798, | |
| "grad_norm": 0.2706628143787384, | |
| "learning_rate": 3.145549192941573e-05, | |
| "loss": 0.0099, | |
| "step": 6400 | |
| }, | |
| { | |
| "epoch": 21.295681063122924, | |
| "grad_norm": 0.25570225715637207, | |
| "learning_rate": 3.130203971310999e-05, | |
| "loss": 0.0112, | |
| "step": 6410 | |
| }, | |
| { | |
| "epoch": 21.32890365448505, | |
| "grad_norm": 0.2580873370170593, | |
| "learning_rate": 3.114879197470225e-05, | |
| "loss": 0.0132, | |
| "step": 6420 | |
| }, | |
| { | |
| "epoch": 21.362126245847175, | |
| "grad_norm": 0.23050130903720856, | |
| "learning_rate": 3.0995750390085285e-05, | |
| "loss": 0.0116, | |
| "step": 6430 | |
| }, | |
| { | |
| "epoch": 21.3953488372093, | |
| "grad_norm": 0.2522919178009033, | |
| "learning_rate": 3.084291663289728e-05, | |
| "loss": 0.0101, | |
| "step": 6440 | |
| }, | |
| { | |
| "epoch": 21.428571428571427, | |
| "grad_norm": 0.12361680716276169, | |
| "learning_rate": 3.069029237450375e-05, | |
| "loss": 0.0084, | |
| "step": 6450 | |
| }, | |
| { | |
| "epoch": 21.461794019933556, | |
| "grad_norm": 0.14684048295021057, | |
| "learning_rate": 3.053787928397911e-05, | |
| "loss": 0.0091, | |
| "step": 6460 | |
| }, | |
| { | |
| "epoch": 21.495016611295682, | |
| "grad_norm": 0.2606411576271057, | |
| "learning_rate": 3.0385679028088526e-05, | |
| "loss": 0.0079, | |
| "step": 6470 | |
| }, | |
| { | |
| "epoch": 21.528239202657808, | |
| "grad_norm": 0.20318882167339325, | |
| "learning_rate": 3.023369327126959e-05, | |
| "loss": 0.008, | |
| "step": 6480 | |
| }, | |
| { | |
| "epoch": 21.561461794019934, | |
| "grad_norm": 0.20854884386062622, | |
| "learning_rate": 3.0081923675614198e-05, | |
| "loss": 0.0112, | |
| "step": 6490 | |
| }, | |
| { | |
| "epoch": 21.59468438538206, | |
| "grad_norm": 0.19162316620349884, | |
| "learning_rate": 2.993037190085034e-05, | |
| "loss": 0.008, | |
| "step": 6500 | |
| }, | |
| { | |
| "epoch": 21.627906976744185, | |
| "grad_norm": 0.20191577076911926, | |
| "learning_rate": 2.977903960432392e-05, | |
| "loss": 0.0088, | |
| "step": 6510 | |
| }, | |
| { | |
| "epoch": 21.66112956810631, | |
| "grad_norm": 0.289853036403656, | |
| "learning_rate": 2.9627928440980722e-05, | |
| "loss": 0.0087, | |
| "step": 6520 | |
| }, | |
| { | |
| "epoch": 21.694352159468437, | |
| "grad_norm": 0.16470327973365784, | |
| "learning_rate": 2.9477040063348183e-05, | |
| "loss": 0.011, | |
| "step": 6530 | |
| }, | |
| { | |
| "epoch": 21.727574750830566, | |
| "grad_norm": 0.2899600565433502, | |
| "learning_rate": 2.9326376121517456e-05, | |
| "loss": 0.0128, | |
| "step": 6540 | |
| }, | |
| { | |
| "epoch": 21.760797342192692, | |
| "grad_norm": 0.2802336812019348, | |
| "learning_rate": 2.9175938263125236e-05, | |
| "loss": 0.0138, | |
| "step": 6550 | |
| }, | |
| { | |
| "epoch": 21.794019933554818, | |
| "grad_norm": 0.22840915620326996, | |
| "learning_rate": 2.9025728133335873e-05, | |
| "loss": 0.0117, | |
| "step": 6560 | |
| }, | |
| { | |
| "epoch": 21.827242524916944, | |
| "grad_norm": 0.289288729429245, | |
| "learning_rate": 2.8875747374823288e-05, | |
| "loss": 0.0101, | |
| "step": 6570 | |
| }, | |
| { | |
| "epoch": 21.86046511627907, | |
| "grad_norm": 0.19778776168823242, | |
| "learning_rate": 2.872599762775298e-05, | |
| "loss": 0.0102, | |
| "step": 6580 | |
| }, | |
| { | |
| "epoch": 21.893687707641195, | |
| "grad_norm": 0.1731463372707367, | |
| "learning_rate": 2.857648052976425e-05, | |
| "loss": 0.0109, | |
| "step": 6590 | |
| }, | |
| { | |
| "epoch": 21.92691029900332, | |
| "grad_norm": 0.14055639505386353, | |
| "learning_rate": 2.8427197715952047e-05, | |
| "loss": 0.0158, | |
| "step": 6600 | |
| }, | |
| { | |
| "epoch": 21.960132890365447, | |
| "grad_norm": 0.22689878940582275, | |
| "learning_rate": 2.8278150818849393e-05, | |
| "loss": 0.0097, | |
| "step": 6610 | |
| }, | |
| { | |
| "epoch": 21.993355481727576, | |
| "grad_norm": 0.17787189781665802, | |
| "learning_rate": 2.812934146840922e-05, | |
| "loss": 0.0111, | |
| "step": 6620 | |
| }, | |
| { | |
| "epoch": 22.026578073089702, | |
| "grad_norm": 0.2983497977256775, | |
| "learning_rate": 2.7980771291986764e-05, | |
| "loss": 0.0128, | |
| "step": 6630 | |
| }, | |
| { | |
| "epoch": 22.059800664451828, | |
| "grad_norm": 0.28037431836128235, | |
| "learning_rate": 2.783244191432167e-05, | |
| "loss": 0.0129, | |
| "step": 6640 | |
| }, | |
| { | |
| "epoch": 22.093023255813954, | |
| "grad_norm": 0.17736779153347015, | |
| "learning_rate": 2.768435495752022e-05, | |
| "loss": 0.0143, | |
| "step": 6650 | |
| }, | |
| { | |
| "epoch": 22.12624584717608, | |
| "grad_norm": 0.18235179781913757, | |
| "learning_rate": 2.753651204103771e-05, | |
| "loss": 0.0107, | |
| "step": 6660 | |
| }, | |
| { | |
| "epoch": 22.159468438538205, | |
| "grad_norm": 0.21416562795639038, | |
| "learning_rate": 2.7388914781660523e-05, | |
| "loss": 0.0091, | |
| "step": 6670 | |
| }, | |
| { | |
| "epoch": 22.19269102990033, | |
| "grad_norm": 0.22016173601150513, | |
| "learning_rate": 2.7241564793488693e-05, | |
| "loss": 0.0111, | |
| "step": 6680 | |
| }, | |
| { | |
| "epoch": 22.225913621262457, | |
| "grad_norm": 0.20562654733657837, | |
| "learning_rate": 2.7094463687918037e-05, | |
| "loss": 0.0093, | |
| "step": 6690 | |
| }, | |
| { | |
| "epoch": 22.259136212624586, | |
| "grad_norm": 0.21838662028312683, | |
| "learning_rate": 2.694761307362268e-05, | |
| "loss": 0.0127, | |
| "step": 6700 | |
| }, | |
| { | |
| "epoch": 22.292358803986712, | |
| "grad_norm": 0.20718765258789062, | |
| "learning_rate": 2.6801014556537467e-05, | |
| "loss": 0.0092, | |
| "step": 6710 | |
| }, | |
| { | |
| "epoch": 22.325581395348838, | |
| "grad_norm": 0.20617736876010895, | |
| "learning_rate": 2.6654669739840243e-05, | |
| "loss": 0.01, | |
| "step": 6720 | |
| }, | |
| { | |
| "epoch": 22.358803986710964, | |
| "grad_norm": 0.24590416252613068, | |
| "learning_rate": 2.650858022393451e-05, | |
| "loss": 0.0097, | |
| "step": 6730 | |
| }, | |
| { | |
| "epoch": 22.39202657807309, | |
| "grad_norm": 0.21153171360492706, | |
| "learning_rate": 2.6362747606431747e-05, | |
| "loss": 0.0112, | |
| "step": 6740 | |
| }, | |
| { | |
| "epoch": 22.425249169435215, | |
| "grad_norm": 0.1685451716184616, | |
| "learning_rate": 2.6217173482134172e-05, | |
| "loss": 0.0116, | |
| "step": 6750 | |
| }, | |
| { | |
| "epoch": 22.45847176079734, | |
| "grad_norm": 0.14820748567581177, | |
| "learning_rate": 2.6071859443017044e-05, | |
| "loss": 0.0119, | |
| "step": 6760 | |
| }, | |
| { | |
| "epoch": 22.491694352159467, | |
| "grad_norm": 0.15515606105327606, | |
| "learning_rate": 2.5926807078211414e-05, | |
| "loss": 0.0101, | |
| "step": 6770 | |
| }, | |
| { | |
| "epoch": 22.524916943521596, | |
| "grad_norm": 0.1173735037446022, | |
| "learning_rate": 2.5782017973986728e-05, | |
| "loss": 0.0061, | |
| "step": 6780 | |
| }, | |
| { | |
| "epoch": 22.558139534883722, | |
| "grad_norm": 0.19914469122886658, | |
| "learning_rate": 2.5637493713733374e-05, | |
| "loss": 0.0097, | |
| "step": 6790 | |
| }, | |
| { | |
| "epoch": 22.591362126245848, | |
| "grad_norm": 0.21056263148784637, | |
| "learning_rate": 2.549323587794559e-05, | |
| "loss": 0.0082, | |
| "step": 6800 | |
| }, | |
| { | |
| "epoch": 22.624584717607974, | |
| "grad_norm": 0.1811390370130539, | |
| "learning_rate": 2.5349246044203895e-05, | |
| "loss": 0.0088, | |
| "step": 6810 | |
| }, | |
| { | |
| "epoch": 22.6578073089701, | |
| "grad_norm": 0.24768255650997162, | |
| "learning_rate": 2.520552578715808e-05, | |
| "loss": 0.0107, | |
| "step": 6820 | |
| }, | |
| { | |
| "epoch": 22.691029900332225, | |
| "grad_norm": 0.17765392363071442, | |
| "learning_rate": 2.506207667850981e-05, | |
| "loss": 0.0094, | |
| "step": 6830 | |
| }, | |
| { | |
| "epoch": 22.72425249169435, | |
| "grad_norm": 0.23039370775222778, | |
| "learning_rate": 2.4918900286995555e-05, | |
| "loss": 0.0081, | |
| "step": 6840 | |
| }, | |
| { | |
| "epoch": 22.757475083056477, | |
| "grad_norm": 0.1583041399717331, | |
| "learning_rate": 2.4775998178369458e-05, | |
| "loss": 0.008, | |
| "step": 6850 | |
| }, | |
| { | |
| "epoch": 22.790697674418606, | |
| "grad_norm": 0.140150249004364, | |
| "learning_rate": 2.4633371915386017e-05, | |
| "loss": 0.0081, | |
| "step": 6860 | |
| }, | |
| { | |
| "epoch": 22.823920265780732, | |
| "grad_norm": 0.12436725944280624, | |
| "learning_rate": 2.4491023057783235e-05, | |
| "loss": 0.0078, | |
| "step": 6870 | |
| }, | |
| { | |
| "epoch": 22.857142857142858, | |
| "grad_norm": 0.21081511676311493, | |
| "learning_rate": 2.4348953162265375e-05, | |
| "loss": 0.0086, | |
| "step": 6880 | |
| }, | |
| { | |
| "epoch": 22.890365448504983, | |
| "grad_norm": 0.12631870806217194, | |
| "learning_rate": 2.420716378248607e-05, | |
| "loss": 0.0121, | |
| "step": 6890 | |
| }, | |
| { | |
| "epoch": 22.92358803986711, | |
| "grad_norm": 0.11978428810834885, | |
| "learning_rate": 2.4065656469031266e-05, | |
| "loss": 0.0109, | |
| "step": 6900 | |
| }, | |
| { | |
| "epoch": 22.956810631229235, | |
| "grad_norm": 0.14292840659618378, | |
| "learning_rate": 2.3924432769402268e-05, | |
| "loss": 0.0108, | |
| "step": 6910 | |
| }, | |
| { | |
| "epoch": 22.99003322259136, | |
| "grad_norm": 0.17778891324996948, | |
| "learning_rate": 2.3783494227998844e-05, | |
| "loss": 0.0079, | |
| "step": 6920 | |
| }, | |
| { | |
| "epoch": 23.023255813953487, | |
| "grad_norm": 0.18700818717479706, | |
| "learning_rate": 2.3642842386102264e-05, | |
| "loss": 0.0157, | |
| "step": 6930 | |
| }, | |
| { | |
| "epoch": 23.056478405315616, | |
| "grad_norm": 0.22154302895069122, | |
| "learning_rate": 2.3502478781858567e-05, | |
| "loss": 0.016, | |
| "step": 6940 | |
| }, | |
| { | |
| "epoch": 23.089700996677742, | |
| "grad_norm": 0.14165480434894562, | |
| "learning_rate": 2.3362404950261628e-05, | |
| "loss": 0.0084, | |
| "step": 6950 | |
| }, | |
| { | |
| "epoch": 23.122923588039868, | |
| "grad_norm": 0.1183454692363739, | |
| "learning_rate": 2.3222622423136458e-05, | |
| "loss": 0.0089, | |
| "step": 6960 | |
| }, | |
| { | |
| "epoch": 23.156146179401993, | |
| "grad_norm": 0.3031390905380249, | |
| "learning_rate": 2.3083132729122332e-05, | |
| "loss": 0.0123, | |
| "step": 6970 | |
| }, | |
| { | |
| "epoch": 23.18936877076412, | |
| "grad_norm": 0.1480850726366043, | |
| "learning_rate": 2.294393739365621e-05, | |
| "loss": 0.0091, | |
| "step": 6980 | |
| }, | |
| { | |
| "epoch": 23.222591362126245, | |
| "grad_norm": 0.19382554292678833, | |
| "learning_rate": 2.2805037938956e-05, | |
| "loss": 0.0074, | |
| "step": 6990 | |
| }, | |
| { | |
| "epoch": 23.25581395348837, | |
| "grad_norm": 0.15653474628925323, | |
| "learning_rate": 2.266643588400386e-05, | |
| "loss": 0.0117, | |
| "step": 7000 | |
| }, | |
| { | |
| "epoch": 23.289036544850497, | |
| "grad_norm": 0.18898485600948334, | |
| "learning_rate": 2.252813274452969e-05, | |
| "loss": 0.009, | |
| "step": 7010 | |
| }, | |
| { | |
| "epoch": 23.322259136212626, | |
| "grad_norm": 0.14634114503860474, | |
| "learning_rate": 2.2390130032994427e-05, | |
| "loss": 0.0081, | |
| "step": 7020 | |
| }, | |
| { | |
| "epoch": 23.35548172757475, | |
| "grad_norm": 0.1799151599407196, | |
| "learning_rate": 2.2252429258573633e-05, | |
| "loss": 0.0091, | |
| "step": 7030 | |
| }, | |
| { | |
| "epoch": 23.388704318936878, | |
| "grad_norm": 0.13711026310920715, | |
| "learning_rate": 2.2115031927140904e-05, | |
| "loss": 0.0074, | |
| "step": 7040 | |
| }, | |
| { | |
| "epoch": 23.421926910299003, | |
| "grad_norm": 0.25118833780288696, | |
| "learning_rate": 2.1977939541251463e-05, | |
| "loss": 0.0085, | |
| "step": 7050 | |
| }, | |
| { | |
| "epoch": 23.45514950166113, | |
| "grad_norm": 0.24253642559051514, | |
| "learning_rate": 2.1841153600125684e-05, | |
| "loss": 0.0097, | |
| "step": 7060 | |
| }, | |
| { | |
| "epoch": 23.488372093023255, | |
| "grad_norm": 0.2377505898475647, | |
| "learning_rate": 2.170467559963267e-05, | |
| "loss": 0.014, | |
| "step": 7070 | |
| }, | |
| { | |
| "epoch": 23.52159468438538, | |
| "grad_norm": 0.15439948439598083, | |
| "learning_rate": 2.1568507032273982e-05, | |
| "loss": 0.009, | |
| "step": 7080 | |
| }, | |
| { | |
| "epoch": 23.55481727574751, | |
| "grad_norm": 0.16242630779743195, | |
| "learning_rate": 2.1432649387167264e-05, | |
| "loss": 0.0092, | |
| "step": 7090 | |
| }, | |
| { | |
| "epoch": 23.588039867109636, | |
| "grad_norm": 0.20561891794204712, | |
| "learning_rate": 2.1297104150029973e-05, | |
| "loss": 0.0081, | |
| "step": 7100 | |
| }, | |
| { | |
| "epoch": 23.62126245847176, | |
| "grad_norm": 0.15706400573253632, | |
| "learning_rate": 2.116187280316307e-05, | |
| "loss": 0.0077, | |
| "step": 7110 | |
| }, | |
| { | |
| "epoch": 23.654485049833887, | |
| "grad_norm": 0.22200746834278107, | |
| "learning_rate": 2.1026956825434908e-05, | |
| "loss": 0.0075, | |
| "step": 7120 | |
| }, | |
| { | |
| "epoch": 23.687707641196013, | |
| "grad_norm": 0.17513321340084076, | |
| "learning_rate": 2.0892357692265017e-05, | |
| "loss": 0.0086, | |
| "step": 7130 | |
| }, | |
| { | |
| "epoch": 23.72093023255814, | |
| "grad_norm": 0.15748685598373413, | |
| "learning_rate": 2.0758076875607947e-05, | |
| "loss": 0.0079, | |
| "step": 7140 | |
| }, | |
| { | |
| "epoch": 23.754152823920265, | |
| "grad_norm": 0.2524207532405853, | |
| "learning_rate": 2.0624115843937207e-05, | |
| "loss": 0.0085, | |
| "step": 7150 | |
| }, | |
| { | |
| "epoch": 23.78737541528239, | |
| "grad_norm": 0.1676102578639984, | |
| "learning_rate": 2.0490476062229157e-05, | |
| "loss": 0.009, | |
| "step": 7160 | |
| }, | |
| { | |
| "epoch": 23.82059800664452, | |
| "grad_norm": 0.18977515399456024, | |
| "learning_rate": 2.035715899194704e-05, | |
| "loss": 0.0101, | |
| "step": 7170 | |
| }, | |
| { | |
| "epoch": 23.853820598006646, | |
| "grad_norm": 0.16387055814266205, | |
| "learning_rate": 2.022416609102499e-05, | |
| "loss": 0.0097, | |
| "step": 7180 | |
| }, | |
| { | |
| "epoch": 23.88704318936877, | |
| "grad_norm": 0.18103492259979248, | |
| "learning_rate": 2.009149881385205e-05, | |
| "loss": 0.0085, | |
| "step": 7190 | |
| }, | |
| { | |
| "epoch": 23.920265780730897, | |
| "grad_norm": 0.17272917926311493, | |
| "learning_rate": 1.995915861125634e-05, | |
| "loss": 0.0087, | |
| "step": 7200 | |
| }, | |
| { | |
| "epoch": 23.953488372093023, | |
| "grad_norm": 0.19008910655975342, | |
| "learning_rate": 1.9827146930489065e-05, | |
| "loss": 0.0116, | |
| "step": 7210 | |
| }, | |
| { | |
| "epoch": 23.98671096345515, | |
| "grad_norm": 0.17901214957237244, | |
| "learning_rate": 1.9695465215208848e-05, | |
| "loss": 0.0084, | |
| "step": 7220 | |
| }, | |
| { | |
| "epoch": 24.019933554817275, | |
| "grad_norm": 0.17212511599063873, | |
| "learning_rate": 1.9564114905465813e-05, | |
| "loss": 0.0064, | |
| "step": 7230 | |
| }, | |
| { | |
| "epoch": 24.0531561461794, | |
| "grad_norm": 0.3507051467895508, | |
| "learning_rate": 1.9433097437685936e-05, | |
| "loss": 0.0068, | |
| "step": 7240 | |
| }, | |
| { | |
| "epoch": 24.08637873754153, | |
| "grad_norm": 0.18519094586372375, | |
| "learning_rate": 1.930241424465521e-05, | |
| "loss": 0.0101, | |
| "step": 7250 | |
| }, | |
| { | |
| "epoch": 24.119601328903656, | |
| "grad_norm": 0.24379052221775055, | |
| "learning_rate": 1.9172066755504115e-05, | |
| "loss": 0.0082, | |
| "step": 7260 | |
| }, | |
| { | |
| "epoch": 24.15282392026578, | |
| "grad_norm": 0.12175963073968887, | |
| "learning_rate": 1.9042056395691914e-05, | |
| "loss": 0.0059, | |
| "step": 7270 | |
| }, | |
| { | |
| "epoch": 24.186046511627907, | |
| "grad_norm": 0.21823808550834656, | |
| "learning_rate": 1.8912384586991066e-05, | |
| "loss": 0.0076, | |
| "step": 7280 | |
| }, | |
| { | |
| "epoch": 24.219269102990033, | |
| "grad_norm": 0.16986721754074097, | |
| "learning_rate": 1.8783052747471717e-05, | |
| "loss": 0.007, | |
| "step": 7290 | |
| }, | |
| { | |
| "epoch": 24.25249169435216, | |
| "grad_norm": 0.19669653475284576, | |
| "learning_rate": 1.865406229148611e-05, | |
| "loss": 0.0081, | |
| "step": 7300 | |
| }, | |
| { | |
| "epoch": 24.285714285714285, | |
| "grad_norm": 0.14709433913230896, | |
| "learning_rate": 1.8525414629653233e-05, | |
| "loss": 0.0068, | |
| "step": 7310 | |
| }, | |
| { | |
| "epoch": 24.31893687707641, | |
| "grad_norm": 0.1506931632757187, | |
| "learning_rate": 1.8397111168843255e-05, | |
| "loss": 0.0108, | |
| "step": 7320 | |
| }, | |
| { | |
| "epoch": 24.35215946843854, | |
| "grad_norm": 0.14726154506206512, | |
| "learning_rate": 1.8269153312162323e-05, | |
| "loss": 0.0089, | |
| "step": 7330 | |
| }, | |
| { | |
| "epoch": 24.385382059800666, | |
| "grad_norm": 0.21428826451301575, | |
| "learning_rate": 1.8141542458937054e-05, | |
| "loss": 0.0088, | |
| "step": 7340 | |
| }, | |
| { | |
| "epoch": 24.41860465116279, | |
| "grad_norm": 0.13095128536224365, | |
| "learning_rate": 1.8014280004699268e-05, | |
| "loss": 0.0099, | |
| "step": 7350 | |
| }, | |
| { | |
| "epoch": 24.451827242524917, | |
| "grad_norm": 0.17902006208896637, | |
| "learning_rate": 1.788736734117078e-05, | |
| "loss": 0.0103, | |
| "step": 7360 | |
| }, | |
| { | |
| "epoch": 24.485049833887043, | |
| "grad_norm": 0.1946352869272232, | |
| "learning_rate": 1.7760805856248152e-05, | |
| "loss": 0.0072, | |
| "step": 7370 | |
| }, | |
| { | |
| "epoch": 24.51827242524917, | |
| "grad_norm": 0.1819358617067337, | |
| "learning_rate": 1.7634596933987518e-05, | |
| "loss": 0.0072, | |
| "step": 7380 | |
| }, | |
| { | |
| "epoch": 24.551495016611295, | |
| "grad_norm": 0.156645268201828, | |
| "learning_rate": 1.7508741954589404e-05, | |
| "loss": 0.0074, | |
| "step": 7390 | |
| }, | |
| { | |
| "epoch": 24.58471760797342, | |
| "grad_norm": 0.16451464593410492, | |
| "learning_rate": 1.7383242294383717e-05, | |
| "loss": 0.0081, | |
| "step": 7400 | |
| }, | |
| { | |
| "epoch": 24.61794019933555, | |
| "grad_norm": 0.12670625746250153, | |
| "learning_rate": 1.7258099325814632e-05, | |
| "loss": 0.0097, | |
| "step": 7410 | |
| }, | |
| { | |
| "epoch": 24.651162790697676, | |
| "grad_norm": 0.14196054637432098, | |
| "learning_rate": 1.7133314417425594e-05, | |
| "loss": 0.0079, | |
| "step": 7420 | |
| }, | |
| { | |
| "epoch": 24.6843853820598, | |
| "grad_norm": 0.11734014004468918, | |
| "learning_rate": 1.7008888933844408e-05, | |
| "loss": 0.0081, | |
| "step": 7430 | |
| }, | |
| { | |
| "epoch": 24.717607973421927, | |
| "grad_norm": 0.11686446517705917, | |
| "learning_rate": 1.6884824235768172e-05, | |
| "loss": 0.0098, | |
| "step": 7440 | |
| }, | |
| { | |
| "epoch": 24.750830564784053, | |
| "grad_norm": 0.21845193207263947, | |
| "learning_rate": 1.6761121679948592e-05, | |
| "loss": 0.0068, | |
| "step": 7450 | |
| }, | |
| { | |
| "epoch": 24.78405315614618, | |
| "grad_norm": 0.19278839230537415, | |
| "learning_rate": 1.663778261917695e-05, | |
| "loss": 0.009, | |
| "step": 7460 | |
| }, | |
| { | |
| "epoch": 24.817275747508305, | |
| "grad_norm": 0.13726863265037537, | |
| "learning_rate": 1.651480840226952e-05, | |
| "loss": 0.009, | |
| "step": 7470 | |
| }, | |
| { | |
| "epoch": 24.85049833887043, | |
| "grad_norm": 0.14601445198059082, | |
| "learning_rate": 1.639220037405258e-05, | |
| "loss": 0.0085, | |
| "step": 7480 | |
| }, | |
| { | |
| "epoch": 24.88372093023256, | |
| "grad_norm": 0.13516046106815338, | |
| "learning_rate": 1.6269959875347906e-05, | |
| "loss": 0.0106, | |
| "step": 7490 | |
| }, | |
| { | |
| "epoch": 24.916943521594686, | |
| "grad_norm": 0.12706805765628815, | |
| "learning_rate": 1.614808824295802e-05, | |
| "loss": 0.0094, | |
| "step": 7500 | |
| }, | |
| { | |
| "epoch": 24.95016611295681, | |
| "grad_norm": 0.15375488996505737, | |
| "learning_rate": 1.602658680965152e-05, | |
| "loss": 0.0078, | |
| "step": 7510 | |
| }, | |
| { | |
| "epoch": 24.983388704318937, | |
| "grad_norm": 0.10782153159379959, | |
| "learning_rate": 1.5905456904148686e-05, | |
| "loss": 0.0067, | |
| "step": 7520 | |
| }, | |
| { | |
| "epoch": 25.016611295681063, | |
| "grad_norm": 0.19712236523628235, | |
| "learning_rate": 1.57846998511067e-05, | |
| "loss": 0.0081, | |
| "step": 7530 | |
| }, | |
| { | |
| "epoch": 25.04983388704319, | |
| "grad_norm": 0.1177951842546463, | |
| "learning_rate": 1.566431697110538e-05, | |
| "loss": 0.0089, | |
| "step": 7540 | |
| }, | |
| { | |
| "epoch": 25.083056478405314, | |
| "grad_norm": 0.18782255053520203, | |
| "learning_rate": 1.554430958063259e-05, | |
| "loss": 0.0087, | |
| "step": 7550 | |
| }, | |
| { | |
| "epoch": 25.11627906976744, | |
| "grad_norm": 0.1974753737449646, | |
| "learning_rate": 1.5424678992069912e-05, | |
| "loss": 0.0085, | |
| "step": 7560 | |
| }, | |
| { | |
| "epoch": 25.14950166112957, | |
| "grad_norm": 0.27396559715270996, | |
| "learning_rate": 1.5305426513678362e-05, | |
| "loss": 0.0066, | |
| "step": 7570 | |
| }, | |
| { | |
| "epoch": 25.182724252491695, | |
| "grad_norm": 0.15109151601791382, | |
| "learning_rate": 1.518655344958388e-05, | |
| "loss": 0.0075, | |
| "step": 7580 | |
| }, | |
| { | |
| "epoch": 25.21594684385382, | |
| "grad_norm": 0.1766371876001358, | |
| "learning_rate": 1.5068061099763275e-05, | |
| "loss": 0.0059, | |
| "step": 7590 | |
| }, | |
| { | |
| "epoch": 25.249169435215947, | |
| "grad_norm": 0.16913636028766632, | |
| "learning_rate": 1.494995076002988e-05, | |
| "loss": 0.0121, | |
| "step": 7600 | |
| }, | |
| { | |
| "epoch": 25.282392026578073, | |
| "grad_norm": 0.15169569849967957, | |
| "learning_rate": 1.4832223722019456e-05, | |
| "loss": 0.0091, | |
| "step": 7610 | |
| }, | |
| { | |
| "epoch": 25.3156146179402, | |
| "grad_norm": 0.1753765344619751, | |
| "learning_rate": 1.4714881273176035e-05, | |
| "loss": 0.0081, | |
| "step": 7620 | |
| }, | |
| { | |
| "epoch": 25.348837209302324, | |
| "grad_norm": 0.16382759809494019, | |
| "learning_rate": 1.4597924696737835e-05, | |
| "loss": 0.0072, | |
| "step": 7630 | |
| }, | |
| { | |
| "epoch": 25.38205980066445, | |
| "grad_norm": 0.23668570816516876, | |
| "learning_rate": 1.4481355271723252e-05, | |
| "loss": 0.0078, | |
| "step": 7640 | |
| }, | |
| { | |
| "epoch": 25.41528239202658, | |
| "grad_norm": 0.153398334980011, | |
| "learning_rate": 1.4365174272916809e-05, | |
| "loss": 0.0071, | |
| "step": 7650 | |
| }, | |
| { | |
| "epoch": 25.448504983388705, | |
| "grad_norm": 0.3028714656829834, | |
| "learning_rate": 1.4249382970855319e-05, | |
| "loss": 0.0073, | |
| "step": 7660 | |
| }, | |
| { | |
| "epoch": 25.48172757475083, | |
| "grad_norm": 0.22805887460708618, | |
| "learning_rate": 1.4133982631813903e-05, | |
| "loss": 0.0077, | |
| "step": 7670 | |
| }, | |
| { | |
| "epoch": 25.514950166112957, | |
| "grad_norm": 0.19053567945957184, | |
| "learning_rate": 1.4018974517792194e-05, | |
| "loss": 0.0087, | |
| "step": 7680 | |
| }, | |
| { | |
| "epoch": 25.548172757475083, | |
| "grad_norm": 0.17864179611206055, | |
| "learning_rate": 1.390435988650048e-05, | |
| "loss": 0.0072, | |
| "step": 7690 | |
| }, | |
| { | |
| "epoch": 25.58139534883721, | |
| "grad_norm": 0.13033527135849, | |
| "learning_rate": 1.3790139991346006e-05, | |
| "loss": 0.007, | |
| "step": 7700 | |
| }, | |
| { | |
| "epoch": 25.614617940199334, | |
| "grad_norm": 0.12433844804763794, | |
| "learning_rate": 1.367631608141926e-05, | |
| "loss": 0.0081, | |
| "step": 7710 | |
| }, | |
| { | |
| "epoch": 25.64784053156146, | |
| "grad_norm": 0.18164899945259094, | |
| "learning_rate": 1.3562889401480278e-05, | |
| "loss": 0.0065, | |
| "step": 7720 | |
| }, | |
| { | |
| "epoch": 25.68106312292359, | |
| "grad_norm": 0.0895913615822792, | |
| "learning_rate": 1.3449861191945074e-05, | |
| "loss": 0.007, | |
| "step": 7730 | |
| }, | |
| { | |
| "epoch": 25.714285714285715, | |
| "grad_norm": 0.21247518062591553, | |
| "learning_rate": 1.3337232688872009e-05, | |
| "loss": 0.0069, | |
| "step": 7740 | |
| }, | |
| { | |
| "epoch": 25.74750830564784, | |
| "grad_norm": 0.20556800067424774, | |
| "learning_rate": 1.3225005123948364e-05, | |
| "loss": 0.0083, | |
| "step": 7750 | |
| }, | |
| { | |
| "epoch": 25.780730897009967, | |
| "grad_norm": 0.15787535905838013, | |
| "learning_rate": 1.311317972447681e-05, | |
| "loss": 0.0089, | |
| "step": 7760 | |
| }, | |
| { | |
| "epoch": 25.813953488372093, | |
| "grad_norm": 0.13835358619689941, | |
| "learning_rate": 1.3001757713361996e-05, | |
| "loss": 0.0073, | |
| "step": 7770 | |
| }, | |
| { | |
| "epoch": 25.84717607973422, | |
| "grad_norm": 0.12107749283313751, | |
| "learning_rate": 1.2890740309097204e-05, | |
| "loss": 0.0073, | |
| "step": 7780 | |
| }, | |
| { | |
| "epoch": 25.880398671096344, | |
| "grad_norm": 0.10240599513053894, | |
| "learning_rate": 1.2780128725750944e-05, | |
| "loss": 0.006, | |
| "step": 7790 | |
| }, | |
| { | |
| "epoch": 25.91362126245847, | |
| "grad_norm": 0.14549055695533752, | |
| "learning_rate": 1.266992417295379e-05, | |
| "loss": 0.0067, | |
| "step": 7800 | |
| }, | |
| { | |
| "epoch": 25.9468438538206, | |
| "grad_norm": 0.2025129497051239, | |
| "learning_rate": 1.2560127855885073e-05, | |
| "loss": 0.0063, | |
| "step": 7810 | |
| }, | |
| { | |
| "epoch": 25.980066445182725, | |
| "grad_norm": 0.07502119243144989, | |
| "learning_rate": 1.2450740975259745e-05, | |
| "loss": 0.006, | |
| "step": 7820 | |
| }, | |
| { | |
| "epoch": 26.01328903654485, | |
| "grad_norm": 0.14232605695724487, | |
| "learning_rate": 1.234176472731517e-05, | |
| "loss": 0.0054, | |
| "step": 7830 | |
| }, | |
| { | |
| "epoch": 26.046511627906977, | |
| "grad_norm": 0.19360148906707764, | |
| "learning_rate": 1.2233200303798158e-05, | |
| "loss": 0.0103, | |
| "step": 7840 | |
| }, | |
| { | |
| "epoch": 26.079734219269103, | |
| "grad_norm": 0.13156625628471375, | |
| "learning_rate": 1.2125048891951846e-05, | |
| "loss": 0.0081, | |
| "step": 7850 | |
| }, | |
| { | |
| "epoch": 26.11295681063123, | |
| "grad_norm": 0.1898665726184845, | |
| "learning_rate": 1.2017311674502745e-05, | |
| "loss": 0.0111, | |
| "step": 7860 | |
| }, | |
| { | |
| "epoch": 26.146179401993354, | |
| "grad_norm": 0.15508055686950684, | |
| "learning_rate": 1.1909989829647822e-05, | |
| "loss": 0.0071, | |
| "step": 7870 | |
| }, | |
| { | |
| "epoch": 26.17940199335548, | |
| "grad_norm": 0.19671839475631714, | |
| "learning_rate": 1.1803084531041553e-05, | |
| "loss": 0.0054, | |
| "step": 7880 | |
| }, | |
| { | |
| "epoch": 26.21262458471761, | |
| "grad_norm": 0.2435973882675171, | |
| "learning_rate": 1.1696596947783162e-05, | |
| "loss": 0.0081, | |
| "step": 7890 | |
| }, | |
| { | |
| "epoch": 26.245847176079735, | |
| "grad_norm": 0.13469842076301575, | |
| "learning_rate": 1.1590528244403803e-05, | |
| "loss": 0.0084, | |
| "step": 7900 | |
| }, | |
| { | |
| "epoch": 26.27906976744186, | |
| "grad_norm": 0.1078217402100563, | |
| "learning_rate": 1.148487958085382e-05, | |
| "loss": 0.008, | |
| "step": 7910 | |
| }, | |
| { | |
| "epoch": 26.312292358803987, | |
| "grad_norm": 0.09761466830968857, | |
| "learning_rate": 1.1379652112490086e-05, | |
| "loss": 0.008, | |
| "step": 7920 | |
| }, | |
| { | |
| "epoch": 26.345514950166113, | |
| "grad_norm": 0.22728003561496735, | |
| "learning_rate": 1.1274846990063315e-05, | |
| "loss": 0.0076, | |
| "step": 7930 | |
| }, | |
| { | |
| "epoch": 26.37873754152824, | |
| "grad_norm": 0.1786966621875763, | |
| "learning_rate": 1.117046535970554e-05, | |
| "loss": 0.0084, | |
| "step": 7940 | |
| }, | |
| { | |
| "epoch": 26.411960132890364, | |
| "grad_norm": 0.13671472668647766, | |
| "learning_rate": 1.106650836291755e-05, | |
| "loss": 0.0064, | |
| "step": 7950 | |
| }, | |
| { | |
| "epoch": 26.44518272425249, | |
| "grad_norm": 0.1379915177822113, | |
| "learning_rate": 1.0962977136556418e-05, | |
| "loss": 0.0099, | |
| "step": 7960 | |
| }, | |
| { | |
| "epoch": 26.47840531561462, | |
| "grad_norm": 0.2053348422050476, | |
| "learning_rate": 1.0859872812823024e-05, | |
| "loss": 0.0085, | |
| "step": 7970 | |
| }, | |
| { | |
| "epoch": 26.511627906976745, | |
| "grad_norm": 0.15667298436164856, | |
| "learning_rate": 1.0757196519249747e-05, | |
| "loss": 0.0108, | |
| "step": 7980 | |
| }, | |
| { | |
| "epoch": 26.54485049833887, | |
| "grad_norm": 0.13062669336795807, | |
| "learning_rate": 1.0654949378688077e-05, | |
| "loss": 0.0102, | |
| "step": 7990 | |
| }, | |
| { | |
| "epoch": 26.578073089700997, | |
| "grad_norm": 0.17740964889526367, | |
| "learning_rate": 1.0553132509296376e-05, | |
| "loss": 0.0089, | |
| "step": 8000 | |
| }, | |
| { | |
| "epoch": 26.611295681063122, | |
| "grad_norm": 0.15447011590003967, | |
| "learning_rate": 1.0451747024527613e-05, | |
| "loss": 0.0087, | |
| "step": 8010 | |
| }, | |
| { | |
| "epoch": 26.64451827242525, | |
| "grad_norm": 0.14210566878318787, | |
| "learning_rate": 1.0350794033117189e-05, | |
| "loss": 0.0077, | |
| "step": 8020 | |
| }, | |
| { | |
| "epoch": 26.677740863787374, | |
| "grad_norm": 0.1365930140018463, | |
| "learning_rate": 1.0250274639070856e-05, | |
| "loss": 0.0078, | |
| "step": 8030 | |
| }, | |
| { | |
| "epoch": 26.710963455149503, | |
| "grad_norm": 0.1764291524887085, | |
| "learning_rate": 1.0150189941652599e-05, | |
| "loss": 0.0069, | |
| "step": 8040 | |
| }, | |
| { | |
| "epoch": 26.74418604651163, | |
| "grad_norm": 0.1317356824874878, | |
| "learning_rate": 1.0050541035372635e-05, | |
| "loss": 0.0074, | |
| "step": 8050 | |
| }, | |
| { | |
| "epoch": 26.777408637873755, | |
| "grad_norm": 0.1677209883928299, | |
| "learning_rate": 9.951329009975458e-06, | |
| "loss": 0.0072, | |
| "step": 8060 | |
| }, | |
| { | |
| "epoch": 26.81063122923588, | |
| "grad_norm": 0.0969085842370987, | |
| "learning_rate": 9.852554950427845e-06, | |
| "loss": 0.0056, | |
| "step": 8070 | |
| }, | |
| { | |
| "epoch": 26.843853820598007, | |
| "grad_norm": 0.11716752499341965, | |
| "learning_rate": 9.754219936907105e-06, | |
| "loss": 0.0054, | |
| "step": 8080 | |
| }, | |
| { | |
| "epoch": 26.877076411960132, | |
| "grad_norm": 0.11322319507598877, | |
| "learning_rate": 9.656325044789194e-06, | |
| "loss": 0.0076, | |
| "step": 8090 | |
| }, | |
| { | |
| "epoch": 26.910299003322258, | |
| "grad_norm": 0.08541762083768845, | |
| "learning_rate": 9.55887134463697e-06, | |
| "loss": 0.0059, | |
| "step": 8100 | |
| }, | |
| { | |
| "epoch": 26.943521594684384, | |
| "grad_norm": 0.17191821336746216, | |
| "learning_rate": 9.461859902188475e-06, | |
| "loss": 0.0088, | |
| "step": 8110 | |
| }, | |
| { | |
| "epoch": 26.97674418604651, | |
| "grad_norm": 0.1234305128455162, | |
| "learning_rate": 9.365291778345303e-06, | |
| "loss": 0.0046, | |
| "step": 8120 | |
| }, | |
| { | |
| "epoch": 27.00996677740864, | |
| "grad_norm": 0.16608087718486786, | |
| "learning_rate": 9.269168029160991e-06, | |
| "loss": 0.0055, | |
| "step": 8130 | |
| }, | |
| { | |
| "epoch": 27.043189368770765, | |
| "grad_norm": 0.22004766762256622, | |
| "learning_rate": 9.173489705829447e-06, | |
| "loss": 0.0077, | |
| "step": 8140 | |
| }, | |
| { | |
| "epoch": 27.07641196013289, | |
| "grad_norm": 0.095779649913311, | |
| "learning_rate": 9.078257854673516e-06, | |
| "loss": 0.0066, | |
| "step": 8150 | |
| }, | |
| { | |
| "epoch": 27.109634551495017, | |
| "grad_norm": 0.09602490812540054, | |
| "learning_rate": 8.983473517133429e-06, | |
| "loss": 0.0046, | |
| "step": 8160 | |
| }, | |
| { | |
| "epoch": 27.142857142857142, | |
| "grad_norm": 0.11624102294445038, | |
| "learning_rate": 8.889137729755537e-06, | |
| "loss": 0.0084, | |
| "step": 8170 | |
| }, | |
| { | |
| "epoch": 27.176079734219268, | |
| "grad_norm": 0.1401866227388382, | |
| "learning_rate": 8.79525152418087e-06, | |
| "loss": 0.0075, | |
| "step": 8180 | |
| }, | |
| { | |
| "epoch": 27.209302325581394, | |
| "grad_norm": 0.10816841572523117, | |
| "learning_rate": 8.701815927133961e-06, | |
| "loss": 0.0054, | |
| "step": 8190 | |
| }, | |
| { | |
| "epoch": 27.242524916943523, | |
| "grad_norm": 0.15892091393470764, | |
| "learning_rate": 8.608831960411534e-06, | |
| "loss": 0.007, | |
| "step": 8200 | |
| }, | |
| { | |
| "epoch": 27.27574750830565, | |
| "grad_norm": 0.21621884405612946, | |
| "learning_rate": 8.516300640871321e-06, | |
| "loss": 0.0079, | |
| "step": 8210 | |
| }, | |
| { | |
| "epoch": 27.308970099667775, | |
| "grad_norm": 0.14722590148448944, | |
| "learning_rate": 8.424222980421038e-06, | |
| "loss": 0.0067, | |
| "step": 8220 | |
| }, | |
| { | |
| "epoch": 27.3421926910299, | |
| "grad_norm": 0.13478265702724457, | |
| "learning_rate": 8.332599986007184e-06, | |
| "loss": 0.0055, | |
| "step": 8230 | |
| }, | |
| { | |
| "epoch": 27.375415282392026, | |
| "grad_norm": 0.22776460647583008, | |
| "learning_rate": 8.241432659604203e-06, | |
| "loss": 0.0086, | |
| "step": 8240 | |
| }, | |
| { | |
| "epoch": 27.408637873754152, | |
| "grad_norm": 0.19308777153491974, | |
| "learning_rate": 8.150721998203331e-06, | |
| "loss": 0.0057, | |
| "step": 8250 | |
| }, | |
| { | |
| "epoch": 27.441860465116278, | |
| "grad_norm": 0.14634957909584045, | |
| "learning_rate": 8.06046899380184e-06, | |
| "loss": 0.0072, | |
| "step": 8260 | |
| }, | |
| { | |
| "epoch": 27.475083056478404, | |
| "grad_norm": 0.12301809340715408, | |
| "learning_rate": 7.970674633392133e-06, | |
| "loss": 0.0058, | |
| "step": 8270 | |
| }, | |
| { | |
| "epoch": 27.508305647840533, | |
| "grad_norm": 0.09747922420501709, | |
| "learning_rate": 7.881339898950924e-06, | |
| "loss": 0.0068, | |
| "step": 8280 | |
| }, | |
| { | |
| "epoch": 27.54152823920266, | |
| "grad_norm": 0.1105487197637558, | |
| "learning_rate": 7.792465767428597e-06, | |
| "loss": 0.0066, | |
| "step": 8290 | |
| }, | |
| { | |
| "epoch": 27.574750830564785, | |
| "grad_norm": 0.2476223260164261, | |
| "learning_rate": 7.704053210738376e-06, | |
| "loss": 0.0104, | |
| "step": 8300 | |
| }, | |
| { | |
| "epoch": 27.60797342192691, | |
| "grad_norm": 0.30715879797935486, | |
| "learning_rate": 7.6161031957458494e-06, | |
| "loss": 0.0063, | |
| "step": 8310 | |
| }, | |
| { | |
| "epoch": 27.641196013289036, | |
| "grad_norm": 0.1410503387451172, | |
| "learning_rate": 7.5286166842582605e-06, | |
| "loss": 0.0071, | |
| "step": 8320 | |
| }, | |
| { | |
| "epoch": 27.674418604651162, | |
| "grad_norm": 0.1309058517217636, | |
| "learning_rate": 7.4415946330140814e-06, | |
| "loss": 0.0059, | |
| "step": 8330 | |
| }, | |
| { | |
| "epoch": 27.707641196013288, | |
| "grad_norm": 0.16256766021251678, | |
| "learning_rate": 7.3550379936725644e-06, | |
| "loss": 0.0089, | |
| "step": 8340 | |
| }, | |
| { | |
| "epoch": 27.740863787375414, | |
| "grad_norm": 0.10909809172153473, | |
| "learning_rate": 7.2689477128032035e-06, | |
| "loss": 0.0052, | |
| "step": 8350 | |
| }, | |
| { | |
| "epoch": 27.774086378737543, | |
| "grad_norm": 0.23795467615127563, | |
| "learning_rate": 7.183324731875551e-06, | |
| "loss": 0.0081, | |
| "step": 8360 | |
| }, | |
| { | |
| "epoch": 27.80730897009967, | |
| "grad_norm": 0.1650657057762146, | |
| "learning_rate": 7.098169987248782e-06, | |
| "loss": 0.0067, | |
| "step": 8370 | |
| }, | |
| { | |
| "epoch": 27.840531561461795, | |
| "grad_norm": 0.14811821281909943, | |
| "learning_rate": 7.013484410161553e-06, | |
| "loss": 0.006, | |
| "step": 8380 | |
| }, | |
| { | |
| "epoch": 27.87375415282392, | |
| "grad_norm": 0.13227351009845734, | |
| "learning_rate": 6.92926892672176e-06, | |
| "loss": 0.0059, | |
| "step": 8390 | |
| }, | |
| { | |
| "epoch": 27.906976744186046, | |
| "grad_norm": 0.1540694683790207, | |
| "learning_rate": 6.845524457896446e-06, | |
| "loss": 0.0058, | |
| "step": 8400 | |
| }, | |
| { | |
| "epoch": 27.940199335548172, | |
| "grad_norm": 0.1506929099559784, | |
| "learning_rate": 6.7622519195017165e-06, | |
| "loss": 0.0064, | |
| "step": 8410 | |
| }, | |
| { | |
| "epoch": 27.973421926910298, | |
| "grad_norm": 0.07793653756380081, | |
| "learning_rate": 6.679452222192684e-06, | |
| "loss": 0.0066, | |
| "step": 8420 | |
| }, | |
| { | |
| "epoch": 28.006644518272424, | |
| "grad_norm": 0.2653924524784088, | |
| "learning_rate": 6.597126271453579e-06, | |
| "loss": 0.0035, | |
| "step": 8430 | |
| }, | |
| { | |
| "epoch": 28.039867109634553, | |
| "grad_norm": 0.18384550511837006, | |
| "learning_rate": 6.51527496758782e-06, | |
| "loss": 0.0054, | |
| "step": 8440 | |
| }, | |
| { | |
| "epoch": 28.07308970099668, | |
| "grad_norm": 0.11345284432172775, | |
| "learning_rate": 6.433899205708155e-06, | |
| "loss": 0.0048, | |
| "step": 8450 | |
| }, | |
| { | |
| "epoch": 28.106312292358805, | |
| "grad_norm": 0.09172622859477997, | |
| "learning_rate": 6.352999875726856e-06, | |
| "loss": 0.005, | |
| "step": 8460 | |
| }, | |
| { | |
| "epoch": 28.13953488372093, | |
| "grad_norm": 0.14451231062412262, | |
| "learning_rate": 6.272577862346052e-06, | |
| "loss": 0.005, | |
| "step": 8470 | |
| }, | |
| { | |
| "epoch": 28.172757475083056, | |
| "grad_norm": 0.08029838651418686, | |
| "learning_rate": 6.192634045047996e-06, | |
| "loss": 0.0052, | |
| "step": 8480 | |
| }, | |
| { | |
| "epoch": 28.205980066445182, | |
| "grad_norm": 0.12096463143825531, | |
| "learning_rate": 6.113169298085458e-06, | |
| "loss": 0.0063, | |
| "step": 8490 | |
| }, | |
| { | |
| "epoch": 28.239202657807308, | |
| "grad_norm": 0.19323348999023438, | |
| "learning_rate": 6.034184490472195e-06, | |
| "loss": 0.0064, | |
| "step": 8500 | |
| }, | |
| { | |
| "epoch": 28.272425249169434, | |
| "grad_norm": 0.07583433389663696, | |
| "learning_rate": 5.955680485973386e-06, | |
| "loss": 0.0116, | |
| "step": 8510 | |
| }, | |
| { | |
| "epoch": 28.305647840531563, | |
| "grad_norm": 0.18973293900489807, | |
| "learning_rate": 5.877658143096265e-06, | |
| "loss": 0.0082, | |
| "step": 8520 | |
| }, | |
| { | |
| "epoch": 28.33887043189369, | |
| "grad_norm": 0.1218564510345459, | |
| "learning_rate": 5.800118315080661e-06, | |
| "loss": 0.0062, | |
| "step": 8530 | |
| }, | |
| { | |
| "epoch": 28.372093023255815, | |
| "grad_norm": 0.17067843675613403, | |
| "learning_rate": 5.723061849889716e-06, | |
| "loss": 0.0077, | |
| "step": 8540 | |
| }, | |
| { | |
| "epoch": 28.40531561461794, | |
| "grad_norm": 0.18358756601810455, | |
| "learning_rate": 5.646489590200604e-06, | |
| "loss": 0.0061, | |
| "step": 8550 | |
| }, | |
| { | |
| "epoch": 28.438538205980066, | |
| "grad_norm": 0.1261061728000641, | |
| "learning_rate": 5.570402373395256e-06, | |
| "loss": 0.0047, | |
| "step": 8560 | |
| }, | |
| { | |
| "epoch": 28.471760797342192, | |
| "grad_norm": 0.08276877552270889, | |
| "learning_rate": 5.494801031551305e-06, | |
| "loss": 0.0055, | |
| "step": 8570 | |
| }, | |
| { | |
| "epoch": 28.504983388704318, | |
| "grad_norm": 0.11832363903522491, | |
| "learning_rate": 5.41968639143291e-06, | |
| "loss": 0.0056, | |
| "step": 8580 | |
| }, | |
| { | |
| "epoch": 28.538205980066444, | |
| "grad_norm": 0.17504636943340302, | |
| "learning_rate": 5.345059274481751e-06, | |
| "loss": 0.0054, | |
| "step": 8590 | |
| }, | |
| { | |
| "epoch": 28.571428571428573, | |
| "grad_norm": 0.11633206903934479, | |
| "learning_rate": 5.270920496808002e-06, | |
| "loss": 0.0073, | |
| "step": 8600 | |
| }, | |
| { | |
| "epoch": 28.6046511627907, | |
| "grad_norm": 0.16951923072338104, | |
| "learning_rate": 5.1972708691814695e-06, | |
| "loss": 0.0044, | |
| "step": 8610 | |
| }, | |
| { | |
| "epoch": 28.637873754152825, | |
| "grad_norm": 0.06973368674516678, | |
| "learning_rate": 5.124111197022674e-06, | |
| "loss": 0.0068, | |
| "step": 8620 | |
| }, | |
| { | |
| "epoch": 28.67109634551495, | |
| "grad_norm": 0.13720272481441498, | |
| "learning_rate": 5.051442280394081e-06, | |
| "loss": 0.0075, | |
| "step": 8630 | |
| }, | |
| { | |
| "epoch": 28.704318936877076, | |
| "grad_norm": 0.12353954464197159, | |
| "learning_rate": 4.979264913991322e-06, | |
| "loss": 0.0073, | |
| "step": 8640 | |
| }, | |
| { | |
| "epoch": 28.737541528239202, | |
| "grad_norm": 0.1034538745880127, | |
| "learning_rate": 4.907579887134489e-06, | |
| "loss": 0.0076, | |
| "step": 8650 | |
| }, | |
| { | |
| "epoch": 28.770764119601328, | |
| "grad_norm": 0.1328953206539154, | |
| "learning_rate": 4.836387983759572e-06, | |
| "loss": 0.006, | |
| "step": 8660 | |
| }, | |
| { | |
| "epoch": 28.803986710963454, | |
| "grad_norm": 0.13733744621276855, | |
| "learning_rate": 4.765689982409816e-06, | |
| "loss": 0.0056, | |
| "step": 8670 | |
| }, | |
| { | |
| "epoch": 28.837209302325583, | |
| "grad_norm": 0.1435350924730301, | |
| "learning_rate": 4.695486656227233e-06, | |
| "loss": 0.0074, | |
| "step": 8680 | |
| }, | |
| { | |
| "epoch": 28.87043189368771, | |
| "grad_norm": 0.10954142361879349, | |
| "learning_rate": 4.625778772944156e-06, | |
| "loss": 0.0056, | |
| "step": 8690 | |
| }, | |
| { | |
| "epoch": 28.903654485049834, | |
| "grad_norm": 0.2844279408454895, | |
| "learning_rate": 4.556567094874825e-06, | |
| "loss": 0.006, | |
| "step": 8700 | |
| }, | |
| { | |
| "epoch": 28.93687707641196, | |
| "grad_norm": 0.09259454160928726, | |
| "learning_rate": 4.487852378907059e-06, | |
| "loss": 0.0064, | |
| "step": 8710 | |
| }, | |
| { | |
| "epoch": 28.970099667774086, | |
| "grad_norm": 0.11572569608688354, | |
| "learning_rate": 4.419635376493986e-06, | |
| "loss": 0.0049, | |
| "step": 8720 | |
| }, | |
| { | |
| "epoch": 29.003322259136212, | |
| "grad_norm": 0.13336044549942017, | |
| "learning_rate": 4.351916833645825e-06, | |
| "loss": 0.0082, | |
| "step": 8730 | |
| }, | |
| { | |
| "epoch": 29.036544850498338, | |
| "grad_norm": 0.09690898656845093, | |
| "learning_rate": 4.284697490921691e-06, | |
| "loss": 0.0039, | |
| "step": 8740 | |
| }, | |
| { | |
| "epoch": 29.069767441860463, | |
| "grad_norm": 0.17928744852542877, | |
| "learning_rate": 4.2179780834215585e-06, | |
| "loss": 0.0065, | |
| "step": 8750 | |
| }, | |
| { | |
| "epoch": 29.102990033222593, | |
| "grad_norm": 0.13231436908245087, | |
| "learning_rate": 4.151759340778178e-06, | |
| "loss": 0.0049, | |
| "step": 8760 | |
| }, | |
| { | |
| "epoch": 29.13621262458472, | |
| "grad_norm": 0.19340196251869202, | |
| "learning_rate": 4.086041987149109e-06, | |
| "loss": 0.0079, | |
| "step": 8770 | |
| }, | |
| { | |
| "epoch": 29.169435215946844, | |
| "grad_norm": 0.1559029370546341, | |
| "learning_rate": 4.020826741208811e-06, | |
| "loss": 0.0047, | |
| "step": 8780 | |
| }, | |
| { | |
| "epoch": 29.20265780730897, | |
| "grad_norm": 0.12119326740503311, | |
| "learning_rate": 3.956114316140746e-06, | |
| "loss": 0.007, | |
| "step": 8790 | |
| }, | |
| { | |
| "epoch": 29.235880398671096, | |
| "grad_norm": 0.14684447646141052, | |
| "learning_rate": 3.891905419629643e-06, | |
| "loss": 0.0055, | |
| "step": 8800 | |
| }, | |
| { | |
| "epoch": 29.269102990033222, | |
| "grad_norm": 0.12230011075735092, | |
| "learning_rate": 3.8282007538536946e-06, | |
| "loss": 0.0067, | |
| "step": 8810 | |
| }, | |
| { | |
| "epoch": 29.302325581395348, | |
| "grad_norm": 0.1537322849035263, | |
| "learning_rate": 3.7650010154769265e-06, | |
| "loss": 0.0073, | |
| "step": 8820 | |
| }, | |
| { | |
| "epoch": 29.335548172757473, | |
| "grad_norm": 0.14978474378585815, | |
| "learning_rate": 3.7023068956415608e-06, | |
| "loss": 0.0097, | |
| "step": 8830 | |
| }, | |
| { | |
| "epoch": 29.368770764119603, | |
| "grad_norm": 0.18187600374221802, | |
| "learning_rate": 3.6401190799604303e-06, | |
| "loss": 0.0044, | |
| "step": 8840 | |
| }, | |
| { | |
| "epoch": 29.40199335548173, | |
| "grad_norm": 0.1751221865415573, | |
| "learning_rate": 3.578438248509536e-06, | |
| "loss": 0.0069, | |
| "step": 8850 | |
| }, | |
| { | |
| "epoch": 29.435215946843854, | |
| "grad_norm": 0.12987150251865387, | |
| "learning_rate": 3.5172650758205583e-06, | |
| "loss": 0.0057, | |
| "step": 8860 | |
| }, | |
| { | |
| "epoch": 29.46843853820598, | |
| "grad_norm": 0.07622663676738739, | |
| "learning_rate": 3.45660023087353e-06, | |
| "loss": 0.0046, | |
| "step": 8870 | |
| }, | |
| { | |
| "epoch": 29.501661129568106, | |
| "grad_norm": 0.2524184286594391, | |
| "learning_rate": 3.3964443770894528e-06, | |
| "loss": 0.0088, | |
| "step": 8880 | |
| }, | |
| { | |
| "epoch": 29.53488372093023, | |
| "grad_norm": 0.11506592482328415, | |
| "learning_rate": 3.3367981723231245e-06, | |
| "loss": 0.0066, | |
| "step": 8890 | |
| }, | |
| { | |
| "epoch": 29.568106312292358, | |
| "grad_norm": 0.19349972903728485, | |
| "learning_rate": 3.2776622688558746e-06, | |
| "loss": 0.005, | |
| "step": 8900 | |
| }, | |
| { | |
| "epoch": 29.601328903654483, | |
| "grad_norm": 0.16394150257110596, | |
| "learning_rate": 3.2190373133884677e-06, | |
| "loss": 0.006, | |
| "step": 8910 | |
| }, | |
| { | |
| "epoch": 29.634551495016613, | |
| "grad_norm": 0.13305546343326569, | |
| "learning_rate": 3.1609239470340446e-06, | |
| "loss": 0.0051, | |
| "step": 8920 | |
| }, | |
| { | |
| "epoch": 29.66777408637874, | |
| "grad_norm": 0.06419292092323303, | |
| "learning_rate": 3.1033228053110373e-06, | |
| "loss": 0.0082, | |
| "step": 8930 | |
| }, | |
| { | |
| "epoch": 29.700996677740864, | |
| "grad_norm": 0.0762164518237114, | |
| "learning_rate": 3.0462345181363314e-06, | |
| "loss": 0.0051, | |
| "step": 8940 | |
| }, | |
| { | |
| "epoch": 29.73421926910299, | |
| "grad_norm": 0.1778092384338379, | |
| "learning_rate": 2.9896597098182654e-06, | |
| "loss": 0.005, | |
| "step": 8950 | |
| }, | |
| { | |
| "epoch": 29.767441860465116, | |
| "grad_norm": 0.21796007454395294, | |
| "learning_rate": 2.933598999049891e-06, | |
| "loss": 0.0053, | |
| "step": 8960 | |
| }, | |
| { | |
| "epoch": 29.80066445182724, | |
| "grad_norm": 0.1496661901473999, | |
| "learning_rate": 2.8780529989021697e-06, | |
| "loss": 0.006, | |
| "step": 8970 | |
| }, | |
| { | |
| "epoch": 29.833887043189367, | |
| "grad_norm": 0.1072162613272667, | |
| "learning_rate": 2.823022316817242e-06, | |
| "loss": 0.0069, | |
| "step": 8980 | |
| }, | |
| { | |
| "epoch": 29.867109634551497, | |
| "grad_norm": 0.18262557685375214, | |
| "learning_rate": 2.7685075546018456e-06, | |
| "loss": 0.007, | |
| "step": 8990 | |
| }, | |
| { | |
| "epoch": 29.900332225913623, | |
| "grad_norm": 0.16827644407749176, | |
| "learning_rate": 2.7145093084206598e-06, | |
| "loss": 0.0045, | |
| "step": 9000 | |
| }, | |
| { | |
| "epoch": 29.93355481727575, | |
| "grad_norm": 0.1170128583908081, | |
| "learning_rate": 2.661028168789892e-06, | |
| "loss": 0.009, | |
| "step": 9010 | |
| }, | |
| { | |
| "epoch": 29.966777408637874, | |
| "grad_norm": 0.18168750405311584, | |
| "learning_rate": 2.6080647205706855e-06, | |
| "loss": 0.0079, | |
| "step": 9020 | |
| }, | |
| { | |
| "epoch": 30.0, | |
| "grad_norm": 0.13957791030406952, | |
| "learning_rate": 2.555619542962834e-06, | |
| "loss": 0.0063, | |
| "step": 9030 | |
| }, | |
| { | |
| "epoch": 30.033222591362126, | |
| "grad_norm": 0.1216946691274643, | |
| "learning_rate": 2.503693209498409e-06, | |
| "loss": 0.0046, | |
| "step": 9040 | |
| }, | |
| { | |
| "epoch": 30.06644518272425, | |
| "grad_norm": 0.10699830204248428, | |
| "learning_rate": 2.452286288035449e-06, | |
| "loss": 0.0047, | |
| "step": 9050 | |
| }, | |
| { | |
| "epoch": 30.099667774086377, | |
| "grad_norm": 0.07205400615930557, | |
| "learning_rate": 2.4013993407518363e-06, | |
| "loss": 0.0041, | |
| "step": 9060 | |
| }, | |
| { | |
| "epoch": 30.132890365448507, | |
| "grad_norm": 0.09559330344200134, | |
| "learning_rate": 2.351032924139063e-06, | |
| "loss": 0.0042, | |
| "step": 9070 | |
| }, | |
| { | |
| "epoch": 30.166112956810633, | |
| "grad_norm": 0.10662047564983368, | |
| "learning_rate": 2.30118758899619e-06, | |
| "loss": 0.0064, | |
| "step": 9080 | |
| }, | |
| { | |
| "epoch": 30.19933554817276, | |
| "grad_norm": 0.18922418355941772, | |
| "learning_rate": 2.2518638804238157e-06, | |
| "loss": 0.0062, | |
| "step": 9090 | |
| }, | |
| { | |
| "epoch": 30.232558139534884, | |
| "grad_norm": 0.09248873591423035, | |
| "learning_rate": 2.203062337818118e-06, | |
| "loss": 0.0056, | |
| "step": 9100 | |
| }, | |
| { | |
| "epoch": 30.26578073089701, | |
| "grad_norm": 0.07492813467979431, | |
| "learning_rate": 2.1547834948649483e-06, | |
| "loss": 0.0062, | |
| "step": 9110 | |
| }, | |
| { | |
| "epoch": 30.299003322259136, | |
| "grad_norm": 0.04594607278704643, | |
| "learning_rate": 2.1070278795340017e-06, | |
| "loss": 0.0051, | |
| "step": 9120 | |
| }, | |
| { | |
| "epoch": 30.33222591362126, | |
| "grad_norm": 0.1435636729001999, | |
| "learning_rate": 2.059796014073029e-06, | |
| "loss": 0.0065, | |
| "step": 9130 | |
| }, | |
| { | |
| "epoch": 30.365448504983387, | |
| "grad_norm": 0.09786313772201538, | |
| "learning_rate": 2.01308841500214e-06, | |
| "loss": 0.006, | |
| "step": 9140 | |
| }, | |
| { | |
| "epoch": 30.398671096345517, | |
| "grad_norm": 0.05220019444823265, | |
| "learning_rate": 1.9669055931081704e-06, | |
| "loss": 0.0063, | |
| "step": 9150 | |
| }, | |
| { | |
| "epoch": 30.431893687707642, | |
| "grad_norm": 0.09716635942459106, | |
| "learning_rate": 1.9212480534390507e-06, | |
| "loss": 0.0037, | |
| "step": 9160 | |
| }, | |
| { | |
| "epoch": 30.46511627906977, | |
| "grad_norm": 0.07489084452390671, | |
| "learning_rate": 1.8761162952983246e-06, | |
| "loss": 0.0055, | |
| "step": 9170 | |
| }, | |
| { | |
| "epoch": 30.498338870431894, | |
| "grad_norm": 0.1921928972005844, | |
| "learning_rate": 1.8315108122396618e-06, | |
| "loss": 0.0084, | |
| "step": 9180 | |
| }, | |
| { | |
| "epoch": 30.53156146179402, | |
| "grad_norm": 0.14873501658439636, | |
| "learning_rate": 1.787432092061475e-06, | |
| "loss": 0.0053, | |
| "step": 9190 | |
| }, | |
| { | |
| "epoch": 30.564784053156146, | |
| "grad_norm": 0.18568743765354156, | |
| "learning_rate": 1.743880616801602e-06, | |
| "loss": 0.0074, | |
| "step": 9200 | |
| }, | |
| { | |
| "epoch": 30.59800664451827, | |
| "grad_norm": 0.22179032862186432, | |
| "learning_rate": 1.7008568627319865e-06, | |
| "loss": 0.0058, | |
| "step": 9210 | |
| }, | |
| { | |
| "epoch": 30.631229235880397, | |
| "grad_norm": 0.10541599243879318, | |
| "learning_rate": 1.6583613003535226e-06, | |
| "loss": 0.0046, | |
| "step": 9220 | |
| }, | |
| { | |
| "epoch": 30.664451827242527, | |
| "grad_norm": 0.09513723850250244, | |
| "learning_rate": 1.6163943943908522e-06, | |
| "loss": 0.0067, | |
| "step": 9230 | |
| }, | |
| { | |
| "epoch": 30.697674418604652, | |
| "grad_norm": 0.10265201330184937, | |
| "learning_rate": 1.5749566037873476e-06, | |
| "loss": 0.0047, | |
| "step": 9240 | |
| }, | |
| { | |
| "epoch": 30.730897009966778, | |
| "grad_norm": 0.11902108788490295, | |
| "learning_rate": 1.5340483817000428e-06, | |
| "loss": 0.007, | |
| "step": 9250 | |
| }, | |
| { | |
| "epoch": 30.764119601328904, | |
| "grad_norm": 0.12097522616386414, | |
| "learning_rate": 1.4936701754947101e-06, | |
| "loss": 0.0054, | |
| "step": 9260 | |
| }, | |
| { | |
| "epoch": 30.79734219269103, | |
| "grad_norm": 0.09330645948648453, | |
| "learning_rate": 1.4538224267409361e-06, | |
| "loss": 0.0057, | |
| "step": 9270 | |
| }, | |
| { | |
| "epoch": 30.830564784053156, | |
| "grad_norm": 0.1806746870279312, | |
| "learning_rate": 1.414505571207314e-06, | |
| "loss": 0.0086, | |
| "step": 9280 | |
| }, | |
| { | |
| "epoch": 30.86378737541528, | |
| "grad_norm": 0.1468958705663681, | |
| "learning_rate": 1.3757200388566816e-06, | |
| "loss": 0.006, | |
| "step": 9290 | |
| }, | |
| { | |
| "epoch": 30.897009966777407, | |
| "grad_norm": 0.032069865614175797, | |
| "learning_rate": 1.3374662538414074e-06, | |
| "loss": 0.0058, | |
| "step": 9300 | |
| }, | |
| { | |
| "epoch": 30.930232558139537, | |
| "grad_norm": 0.1010846197605133, | |
| "learning_rate": 1.2997446344987617e-06, | |
| "loss": 0.0041, | |
| "step": 9310 | |
| }, | |
| { | |
| "epoch": 30.963455149501662, | |
| "grad_norm": 0.06703367084264755, | |
| "learning_rate": 1.262555593346315e-06, | |
| "loss": 0.0058, | |
| "step": 9320 | |
| }, | |
| { | |
| "epoch": 30.996677740863788, | |
| "grad_norm": 0.18721553683280945, | |
| "learning_rate": 1.2258995370774685e-06, | |
| "loss": 0.0074, | |
| "step": 9330 | |
| }, | |
| { | |
| "epoch": 31.029900332225914, | |
| "grad_norm": 0.19383810460567474, | |
| "learning_rate": 1.1897768665569798e-06, | |
| "loss": 0.0068, | |
| "step": 9340 | |
| }, | |
| { | |
| "epoch": 31.06312292358804, | |
| "grad_norm": 0.1317012906074524, | |
| "learning_rate": 1.1541879768165954e-06, | |
| "loss": 0.0059, | |
| "step": 9350 | |
| }, | |
| { | |
| "epoch": 31.096345514950166, | |
| "grad_norm": 0.1485646516084671, | |
| "learning_rate": 1.1191332570507085e-06, | |
| "loss": 0.0074, | |
| "step": 9360 | |
| }, | |
| { | |
| "epoch": 31.12956810631229, | |
| "grad_norm": 0.19343367218971252, | |
| "learning_rate": 1.0846130906121132e-06, | |
| "loss": 0.0065, | |
| "step": 9370 | |
| }, | |
| { | |
| "epoch": 31.162790697674417, | |
| "grad_norm": 0.10918621718883514, | |
| "learning_rate": 1.0506278550078131e-06, | |
| "loss": 0.0048, | |
| "step": 9380 | |
| }, | |
| { | |
| "epoch": 31.196013289036546, | |
| "grad_norm": 0.1376069337129593, | |
| "learning_rate": 1.0171779218949185e-06, | |
| "loss": 0.0075, | |
| "step": 9390 | |
| }, | |
| { | |
| "epoch": 31.229235880398672, | |
| "grad_norm": 0.07469493895769119, | |
| "learning_rate": 9.842636570765174e-07, | |
| "loss": 0.0071, | |
| "step": 9400 | |
| }, | |
| { | |
| "epoch": 31.262458471760798, | |
| "grad_norm": 0.0960230752825737, | |
| "learning_rate": 9.518854204977612e-07, | |
| "loss": 0.0065, | |
| "step": 9410 | |
| }, | |
| { | |
| "epoch": 31.295681063122924, | |
| "grad_norm": 0.08492382615804672, | |
| "learning_rate": 9.200435662418349e-07, | |
| "loss": 0.0056, | |
| "step": 9420 | |
| }, | |
| { | |
| "epoch": 31.32890365448505, | |
| "grad_norm": 0.11253948509693146, | |
| "learning_rate": 8.887384425261658e-07, | |
| "loss": 0.0053, | |
| "step": 9430 | |
| }, | |
| { | |
| "epoch": 31.362126245847175, | |
| "grad_norm": 0.16154888272285461, | |
| "learning_rate": 8.579703916985648e-07, | |
| "loss": 0.0057, | |
| "step": 9440 | |
| }, | |
| { | |
| "epoch": 31.3953488372093, | |
| "grad_norm": 0.14829444885253906, | |
| "learning_rate": 8.277397502335194e-07, | |
| "loss": 0.0059, | |
| "step": 9450 | |
| }, | |
| { | |
| "epoch": 31.428571428571427, | |
| "grad_norm": 0.1551903337240219, | |
| "learning_rate": 7.980468487284675e-07, | |
| "loss": 0.0053, | |
| "step": 9460 | |
| }, | |
| { | |
| "epoch": 31.461794019933556, | |
| "grad_norm": 0.1427416205406189, | |
| "learning_rate": 7.688920119002297e-07, | |
| "loss": 0.0081, | |
| "step": 9470 | |
| }, | |
| { | |
| "epoch": 31.495016611295682, | |
| "grad_norm": 0.06173241510987282, | |
| "learning_rate": 7.402755585814269e-07, | |
| "loss": 0.0052, | |
| "step": 9480 | |
| }, | |
| { | |
| "epoch": 31.528239202657808, | |
| "grad_norm": 0.20448872447013855, | |
| "learning_rate": 7.121978017170073e-07, | |
| "loss": 0.0065, | |
| "step": 9490 | |
| }, | |
| { | |
| "epoch": 31.561461794019934, | |
| "grad_norm": 0.17006798088550568, | |
| "learning_rate": 6.846590483608306e-07, | |
| "loss": 0.0056, | |
| "step": 9500 | |
| }, | |
| { | |
| "epoch": 31.59468438538206, | |
| "grad_norm": 0.1203543022274971, | |
| "learning_rate": 6.576595996722834e-07, | |
| "loss": 0.0055, | |
| "step": 9510 | |
| }, | |
| { | |
| "epoch": 31.627906976744185, | |
| "grad_norm": 0.12413649260997772, | |
| "learning_rate": 6.311997509130141e-07, | |
| "loss": 0.0065, | |
| "step": 9520 | |
| }, | |
| { | |
| "epoch": 31.66112956810631, | |
| "grad_norm": 0.2612221837043762, | |
| "learning_rate": 6.052797914436803e-07, | |
| "loss": 0.0093, | |
| "step": 9530 | |
| }, | |
| { | |
| "epoch": 31.694352159468437, | |
| "grad_norm": 0.17194348573684692, | |
| "learning_rate": 5.799000047208181e-07, | |
| "loss": 0.0054, | |
| "step": 9540 | |
| }, | |
| { | |
| "epoch": 31.727574750830566, | |
| "grad_norm": 0.11993417143821716, | |
| "learning_rate": 5.550606682937054e-07, | |
| "loss": 0.0068, | |
| "step": 9550 | |
| }, | |
| { | |
| "epoch": 31.760797342192692, | |
| "grad_norm": 0.1255716234445572, | |
| "learning_rate": 5.307620538013481e-07, | |
| "loss": 0.0049, | |
| "step": 9560 | |
| }, | |
| { | |
| "epoch": 31.794019933554818, | |
| "grad_norm": 0.1203114315867424, | |
| "learning_rate": 5.070044269694874e-07, | |
| "loss": 0.0086, | |
| "step": 9570 | |
| }, | |
| { | |
| "epoch": 31.827242524916944, | |
| "grad_norm": 0.17764365673065186, | |
| "learning_rate": 4.837880476077417e-07, | |
| "loss": 0.0071, | |
| "step": 9580 | |
| }, | |
| { | |
| "epoch": 31.86046511627907, | |
| "grad_norm": 0.1459934264421463, | |
| "learning_rate": 4.6111316960670835e-07, | |
| "loss": 0.0067, | |
| "step": 9590 | |
| }, | |
| { | |
| "epoch": 31.893687707641195, | |
| "grad_norm": 0.24273799359798431, | |
| "learning_rate": 4.389800409352218e-07, | |
| "loss": 0.0065, | |
| "step": 9600 | |
| }, | |
| { | |
| "epoch": 31.92691029900332, | |
| "grad_norm": 0.1205635517835617, | |
| "learning_rate": 4.173889036376277e-07, | |
| "loss": 0.0062, | |
| "step": 9610 | |
| }, | |
| { | |
| "epoch": 31.960132890365447, | |
| "grad_norm": 0.08896120637655258, | |
| "learning_rate": 3.963399938311463e-07, | |
| "loss": 0.0048, | |
| "step": 9620 | |
| }, | |
| { | |
| "epoch": 31.993355481727576, | |
| "grad_norm": 0.13939517736434937, | |
| "learning_rate": 3.7583354170328545e-07, | |
| "loss": 0.0043, | |
| "step": 9630 | |
| }, | |
| { | |
| "epoch": 32.0265780730897, | |
| "grad_norm": 0.28227749466896057, | |
| "learning_rate": 3.558697715093207e-07, | |
| "loss": 0.0051, | |
| "step": 9640 | |
| }, | |
| { | |
| "epoch": 32.05980066445183, | |
| "grad_norm": 0.11711577326059341, | |
| "learning_rate": 3.3644890156983576e-07, | |
| "loss": 0.0083, | |
| "step": 9650 | |
| }, | |
| { | |
| "epoch": 32.093023255813954, | |
| "grad_norm": 0.188113734126091, | |
| "learning_rate": 3.175711442683638e-07, | |
| "loss": 0.0085, | |
| "step": 9660 | |
| }, | |
| { | |
| "epoch": 32.12624584717608, | |
| "grad_norm": 0.15848992764949799, | |
| "learning_rate": 2.9923670604902197e-07, | |
| "loss": 0.0046, | |
| "step": 9670 | |
| }, | |
| { | |
| "epoch": 32.159468438538205, | |
| "grad_norm": 0.09046372771263123, | |
| "learning_rate": 2.814457874143028e-07, | |
| "loss": 0.0055, | |
| "step": 9680 | |
| }, | |
| { | |
| "epoch": 32.19269102990033, | |
| "grad_norm": 0.08672670274972916, | |
| "learning_rate": 2.641985829228366e-07, | |
| "loss": 0.0068, | |
| "step": 9690 | |
| }, | |
| { | |
| "epoch": 32.22591362126246, | |
| "grad_norm": 0.22394372522830963, | |
| "learning_rate": 2.474952811872877e-07, | |
| "loss": 0.0063, | |
| "step": 9700 | |
| }, | |
| { | |
| "epoch": 32.25913621262458, | |
| "grad_norm": 0.10601115971803665, | |
| "learning_rate": 2.3133606487228397e-07, | |
| "loss": 0.0065, | |
| "step": 9710 | |
| }, | |
| { | |
| "epoch": 32.29235880398671, | |
| "grad_norm": 0.15347513556480408, | |
| "learning_rate": 2.157211106924295e-07, | |
| "loss": 0.0049, | |
| "step": 9720 | |
| }, | |
| { | |
| "epoch": 32.325581395348834, | |
| "grad_norm": 0.2134745717048645, | |
| "learning_rate": 2.006505894103672e-07, | |
| "loss": 0.005, | |
| "step": 9730 | |
| }, | |
| { | |
| "epoch": 32.35880398671096, | |
| "grad_norm": 0.15517526865005493, | |
| "learning_rate": 1.8612466583489696e-07, | |
| "loss": 0.0081, | |
| "step": 9740 | |
| }, | |
| { | |
| "epoch": 32.39202657807309, | |
| "grad_norm": 0.07906592637300491, | |
| "learning_rate": 1.7214349881918834e-07, | |
| "loss": 0.0053, | |
| "step": 9750 | |
| }, | |
| { | |
| "epoch": 32.42524916943522, | |
| "grad_norm": 0.17843244969844818, | |
| "learning_rate": 1.5870724125904845e-07, | |
| "loss": 0.007, | |
| "step": 9760 | |
| }, | |
| { | |
| "epoch": 32.458471760797345, | |
| "grad_norm": 0.20996271073818207, | |
| "learning_rate": 1.4581604009124006e-07, | |
| "loss": 0.0057, | |
| "step": 9770 | |
| }, | |
| { | |
| "epoch": 32.49169435215947, | |
| "grad_norm": 0.07877013832330704, | |
| "learning_rate": 1.334700362918717e-07, | |
| "loss": 0.0049, | |
| "step": 9780 | |
| }, | |
| { | |
| "epoch": 32.524916943521596, | |
| "grad_norm": 0.18172283470630646, | |
| "learning_rate": 1.2166936487486015e-07, | |
| "loss": 0.0046, | |
| "step": 9790 | |
| }, | |
| { | |
| "epoch": 32.55813953488372, | |
| "grad_norm": 0.082504041492939, | |
| "learning_rate": 1.1041415489045914e-07, | |
| "loss": 0.0049, | |
| "step": 9800 | |
| }, | |
| { | |
| "epoch": 32.59136212624585, | |
| "grad_norm": 0.05255156010389328, | |
| "learning_rate": 9.970452942384412e-08, | |
| "loss": 0.0051, | |
| "step": 9810 | |
| }, | |
| { | |
| "epoch": 32.62458471760797, | |
| "grad_norm": 0.07134050875902176, | |
| "learning_rate": 8.954060559375754e-08, | |
| "loss": 0.008, | |
| "step": 9820 | |
| }, | |
| { | |
| "epoch": 32.6578073089701, | |
| "grad_norm": 0.09449620544910431, | |
| "learning_rate": 7.99224945512489e-08, | |
| "loss": 0.0044, | |
| "step": 9830 | |
| }, | |
| { | |
| "epoch": 32.691029900332225, | |
| "grad_norm": 0.10606354475021362, | |
| "learning_rate": 7.085030147843675e-08, | |
| "loss": 0.0064, | |
| "step": 9840 | |
| }, | |
| { | |
| "epoch": 32.72425249169435, | |
| "grad_norm": 0.10147126019001007, | |
| "learning_rate": 6.232412558736523e-08, | |
| "loss": 0.0037, | |
| "step": 9850 | |
| }, | |
| { | |
| "epoch": 32.75747508305648, | |
| "grad_norm": 0.12870968878269196, | |
| "learning_rate": 5.434406011893822e-08, | |
| "loss": 0.0094, | |
| "step": 9860 | |
| }, | |
| { | |
| "epoch": 32.7906976744186, | |
| "grad_norm": 0.09744128584861755, | |
| "learning_rate": 4.6910192341864664e-08, | |
| "loss": 0.0063, | |
| "step": 9870 | |
| }, | |
| { | |
| "epoch": 32.82392026578073, | |
| "grad_norm": 0.13845191895961761, | |
| "learning_rate": 4.0022603551737035e-08, | |
| "loss": 0.0071, | |
| "step": 9880 | |
| }, | |
| { | |
| "epoch": 32.857142857142854, | |
| "grad_norm": 0.08200283348560333, | |
| "learning_rate": 3.3681369070120985e-08, | |
| "loss": 0.0078, | |
| "step": 9890 | |
| }, | |
| { | |
| "epoch": 32.89036544850498, | |
| "grad_norm": 0.08790437132120132, | |
| "learning_rate": 2.7886558243744866e-08, | |
| "loss": 0.004, | |
| "step": 9900 | |
| }, | |
| { | |
| "epoch": 32.92358803986711, | |
| "grad_norm": 0.13174009323120117, | |
| "learning_rate": 2.2638234443722596e-08, | |
| "loss": 0.0061, | |
| "step": 9910 | |
| }, | |
| { | |
| "epoch": 32.95681063122924, | |
| "grad_norm": 0.20475421845912933, | |
| "learning_rate": 1.7936455064887504e-08, | |
| "loss": 0.008, | |
| "step": 9920 | |
| }, | |
| { | |
| "epoch": 32.990033222591364, | |
| "grad_norm": 0.12375841289758682, | |
| "learning_rate": 1.378127152514841e-08, | |
| "loss": 0.0069, | |
| "step": 9930 | |
| }, | |
| { | |
| "epoch": 33.02325581395349, | |
| "grad_norm": 0.11901507526636124, | |
| "learning_rate": 1.0172729264917857e-08, | |
| "loss": 0.0056, | |
| "step": 9940 | |
| }, | |
| { | |
| "epoch": 33.056478405315616, | |
| "grad_norm": 0.049505483359098434, | |
| "learning_rate": 7.1108677466458215e-09, | |
| "loss": 0.0094, | |
| "step": 9950 | |
| }, | |
| { | |
| "epoch": 33.08970099667774, | |
| "grad_norm": 0.08091825991868973, | |
| "learning_rate": 4.595720454353414e-09, | |
| "loss": 0.0049, | |
| "step": 9960 | |
| }, | |
| { | |
| "epoch": 33.12292358803987, | |
| "grad_norm": 0.12687945365905762, | |
| "learning_rate": 2.627314893294264e-09, | |
| "loss": 0.0081, | |
| "step": 9970 | |
| }, | |
| { | |
| "epoch": 33.15614617940199, | |
| "grad_norm": 0.09400459378957748, | |
| "learning_rate": 1.2056725896270048e-09, | |
| "loss": 0.0069, | |
| "step": 9980 | |
| }, | |
| { | |
| "epoch": 33.18936877076412, | |
| "grad_norm": 0.06770706921815872, | |
| "learning_rate": 3.308090902098826e-10, | |
| "loss": 0.0068, | |
| "step": 9990 | |
| }, | |
| { | |
| "epoch": 33.222591362126245, | |
| "grad_norm": 0.18788298964500427, | |
| "learning_rate": 2.7339624120159555e-12, | |
| "loss": 0.0052, | |
| "step": 10000 | |
| } | |
| ], | |
| "logging_steps": 10, | |
| "max_steps": 10000, | |
| "num_input_tokens_seen": 0, | |
| "num_train_epochs": 34, | |
| "save_steps": 1000, | |
| "stateful_callbacks": { | |
| "TrainerControl": { | |
| "args": { | |
| "should_epoch_stop": false, | |
| "should_evaluate": false, | |
| "should_log": false, | |
| "should_save": true, | |
| "should_training_stop": true | |
| }, | |
| "attributes": {} | |
| } | |
| }, | |
| "total_flos": 0.0, | |
| "train_batch_size": 32, | |
| "trial_name": null, | |
| "trial_params": null | |
| } | |