| { | |
| "best_global_step": null, | |
| "best_metric": null, | |
| "best_model_checkpoint": null, | |
| "epoch": 53.191489361702125, | |
| "eval_steps": 500, | |
| "global_step": 10000, | |
| "is_hyper_param_search": false, | |
| "is_local_process_zero": true, | |
| "is_world_process_zero": true, | |
| "log_history": [ | |
| { | |
| "epoch": 0.05319148936170213, | |
| "grad_norm": 7.919111251831055, | |
| "learning_rate": 1.8e-06, | |
| "loss": 1.0579, | |
| "step": 10 | |
| }, | |
| { | |
| "epoch": 0.10638297872340426, | |
| "grad_norm": 3.648937463760376, | |
| "learning_rate": 3.8e-06, | |
| "loss": 0.8716, | |
| "step": 20 | |
| }, | |
| { | |
| "epoch": 0.1595744680851064, | |
| "grad_norm": 1.0641181468963623, | |
| "learning_rate": 5.8e-06, | |
| "loss": 0.388, | |
| "step": 30 | |
| }, | |
| { | |
| "epoch": 0.2127659574468085, | |
| "grad_norm": 0.7329219579696655, | |
| "learning_rate": 7.8e-06, | |
| "loss": 0.1894, | |
| "step": 40 | |
| }, | |
| { | |
| "epoch": 0.26595744680851063, | |
| "grad_norm": 0.6118923425674438, | |
| "learning_rate": 9.800000000000001e-06, | |
| "loss": 0.1196, | |
| "step": 50 | |
| }, | |
| { | |
| "epoch": 0.3191489361702128, | |
| "grad_norm": 0.5522341728210449, | |
| "learning_rate": 1.18e-05, | |
| "loss": 0.0976, | |
| "step": 60 | |
| }, | |
| { | |
| "epoch": 0.3723404255319149, | |
| "grad_norm": 0.3278864324092865, | |
| "learning_rate": 1.3800000000000002e-05, | |
| "loss": 0.0925, | |
| "step": 70 | |
| }, | |
| { | |
| "epoch": 0.425531914893617, | |
| "grad_norm": 0.2791081666946411, | |
| "learning_rate": 1.58e-05, | |
| "loss": 0.0806, | |
| "step": 80 | |
| }, | |
| { | |
| "epoch": 0.4787234042553192, | |
| "grad_norm": 0.342252254486084, | |
| "learning_rate": 1.78e-05, | |
| "loss": 0.0737, | |
| "step": 90 | |
| }, | |
| { | |
| "epoch": 0.5319148936170213, | |
| "grad_norm": 0.3785165250301361, | |
| "learning_rate": 1.9800000000000004e-05, | |
| "loss": 0.0663, | |
| "step": 100 | |
| }, | |
| { | |
| "epoch": 0.5851063829787234, | |
| "grad_norm": 0.3752205967903137, | |
| "learning_rate": 2.18e-05, | |
| "loss": 0.0608, | |
| "step": 110 | |
| }, | |
| { | |
| "epoch": 0.6382978723404256, | |
| "grad_norm": 0.5072270035743713, | |
| "learning_rate": 2.38e-05, | |
| "loss": 0.0555, | |
| "step": 120 | |
| }, | |
| { | |
| "epoch": 0.6914893617021277, | |
| "grad_norm": 0.3033965229988098, | |
| "learning_rate": 2.58e-05, | |
| "loss": 0.0526, | |
| "step": 130 | |
| }, | |
| { | |
| "epoch": 0.7446808510638298, | |
| "grad_norm": 0.35947826504707336, | |
| "learning_rate": 2.7800000000000005e-05, | |
| "loss": 0.053, | |
| "step": 140 | |
| }, | |
| { | |
| "epoch": 0.7978723404255319, | |
| "grad_norm": 0.44656550884246826, | |
| "learning_rate": 2.98e-05, | |
| "loss": 0.0483, | |
| "step": 150 | |
| }, | |
| { | |
| "epoch": 0.851063829787234, | |
| "grad_norm": 0.4438610076904297, | |
| "learning_rate": 3.18e-05, | |
| "loss": 0.0429, | |
| "step": 160 | |
| }, | |
| { | |
| "epoch": 0.9042553191489362, | |
| "grad_norm": 0.3336922824382782, | |
| "learning_rate": 3.38e-05, | |
| "loss": 0.0409, | |
| "step": 170 | |
| }, | |
| { | |
| "epoch": 0.9574468085106383, | |
| "grad_norm": 0.3612188696861267, | |
| "learning_rate": 3.58e-05, | |
| "loss": 0.0388, | |
| "step": 180 | |
| }, | |
| { | |
| "epoch": 1.0106382978723405, | |
| "grad_norm": 0.384271502494812, | |
| "learning_rate": 3.7800000000000004e-05, | |
| "loss": 0.039, | |
| "step": 190 | |
| }, | |
| { | |
| "epoch": 1.0638297872340425, | |
| "grad_norm": 0.2974545359611511, | |
| "learning_rate": 3.9800000000000005e-05, | |
| "loss": 0.0365, | |
| "step": 200 | |
| }, | |
| { | |
| "epoch": 1.1170212765957448, | |
| "grad_norm": 0.33678877353668213, | |
| "learning_rate": 4.18e-05, | |
| "loss": 0.0317, | |
| "step": 210 | |
| }, | |
| { | |
| "epoch": 1.1702127659574468, | |
| "grad_norm": 0.3305199444293976, | |
| "learning_rate": 4.38e-05, | |
| "loss": 0.0288, | |
| "step": 220 | |
| }, | |
| { | |
| "epoch": 1.2234042553191489, | |
| "grad_norm": 0.23536141216754913, | |
| "learning_rate": 4.58e-05, | |
| "loss": 0.0272, | |
| "step": 230 | |
| }, | |
| { | |
| "epoch": 1.2765957446808511, | |
| "grad_norm": 0.2794311046600342, | |
| "learning_rate": 4.78e-05, | |
| "loss": 0.0284, | |
| "step": 240 | |
| }, | |
| { | |
| "epoch": 1.3297872340425532, | |
| "grad_norm": 0.31590068340301514, | |
| "learning_rate": 4.9800000000000004e-05, | |
| "loss": 0.0255, | |
| "step": 250 | |
| }, | |
| { | |
| "epoch": 1.3829787234042552, | |
| "grad_norm": 0.2788073420524597, | |
| "learning_rate": 5.1800000000000005e-05, | |
| "loss": 0.0229, | |
| "step": 260 | |
| }, | |
| { | |
| "epoch": 1.4361702127659575, | |
| "grad_norm": 0.25668105483055115, | |
| "learning_rate": 5.380000000000001e-05, | |
| "loss": 0.0262, | |
| "step": 270 | |
| }, | |
| { | |
| "epoch": 1.4893617021276595, | |
| "grad_norm": 0.2523292601108551, | |
| "learning_rate": 5.580000000000001e-05, | |
| "loss": 0.024, | |
| "step": 280 | |
| }, | |
| { | |
| "epoch": 1.5425531914893615, | |
| "grad_norm": 0.3773905038833618, | |
| "learning_rate": 5.7799999999999995e-05, | |
| "loss": 0.023, | |
| "step": 290 | |
| }, | |
| { | |
| "epoch": 1.5957446808510638, | |
| "grad_norm": 0.29697832465171814, | |
| "learning_rate": 5.9800000000000003e-05, | |
| "loss": 0.0266, | |
| "step": 300 | |
| }, | |
| { | |
| "epoch": 1.648936170212766, | |
| "grad_norm": 0.2920249402523041, | |
| "learning_rate": 6.18e-05, | |
| "loss": 0.0221, | |
| "step": 310 | |
| }, | |
| { | |
| "epoch": 1.702127659574468, | |
| "grad_norm": 0.33827799558639526, | |
| "learning_rate": 6.38e-05, | |
| "loss": 0.0205, | |
| "step": 320 | |
| }, | |
| { | |
| "epoch": 1.7553191489361701, | |
| "grad_norm": 0.29844555258750916, | |
| "learning_rate": 6.58e-05, | |
| "loss": 0.0223, | |
| "step": 330 | |
| }, | |
| { | |
| "epoch": 1.8085106382978724, | |
| "grad_norm": 0.29775071144104004, | |
| "learning_rate": 6.780000000000001e-05, | |
| "loss": 0.0219, | |
| "step": 340 | |
| }, | |
| { | |
| "epoch": 1.8617021276595744, | |
| "grad_norm": 0.2987380623817444, | |
| "learning_rate": 6.98e-05, | |
| "loss": 0.0192, | |
| "step": 350 | |
| }, | |
| { | |
| "epoch": 1.9148936170212765, | |
| "grad_norm": 0.24299071729183197, | |
| "learning_rate": 7.18e-05, | |
| "loss": 0.0182, | |
| "step": 360 | |
| }, | |
| { | |
| "epoch": 1.9680851063829787, | |
| "grad_norm": 0.38501736521720886, | |
| "learning_rate": 7.38e-05, | |
| "loss": 0.0189, | |
| "step": 370 | |
| }, | |
| { | |
| "epoch": 2.021276595744681, | |
| "grad_norm": 0.24245889484882355, | |
| "learning_rate": 7.58e-05, | |
| "loss": 0.0196, | |
| "step": 380 | |
| }, | |
| { | |
| "epoch": 2.074468085106383, | |
| "grad_norm": 0.3855898380279541, | |
| "learning_rate": 7.780000000000001e-05, | |
| "loss": 0.0215, | |
| "step": 390 | |
| }, | |
| { | |
| "epoch": 2.127659574468085, | |
| "grad_norm": 0.2730031907558441, | |
| "learning_rate": 7.98e-05, | |
| "loss": 0.0184, | |
| "step": 400 | |
| }, | |
| { | |
| "epoch": 2.1808510638297873, | |
| "grad_norm": 0.2059756964445114, | |
| "learning_rate": 8.18e-05, | |
| "loss": 0.017, | |
| "step": 410 | |
| }, | |
| { | |
| "epoch": 2.2340425531914896, | |
| "grad_norm": 0.3113689422607422, | |
| "learning_rate": 8.38e-05, | |
| "loss": 0.0166, | |
| "step": 420 | |
| }, | |
| { | |
| "epoch": 2.2872340425531914, | |
| "grad_norm": 0.283891886472702, | |
| "learning_rate": 8.58e-05, | |
| "loss": 0.0213, | |
| "step": 430 | |
| }, | |
| { | |
| "epoch": 2.3404255319148937, | |
| "grad_norm": 0.29447758197784424, | |
| "learning_rate": 8.78e-05, | |
| "loss": 0.0179, | |
| "step": 440 | |
| }, | |
| { | |
| "epoch": 2.393617021276596, | |
| "grad_norm": 0.29670682549476624, | |
| "learning_rate": 8.98e-05, | |
| "loss": 0.0203, | |
| "step": 450 | |
| }, | |
| { | |
| "epoch": 2.4468085106382977, | |
| "grad_norm": 0.310756117105484, | |
| "learning_rate": 9.180000000000001e-05, | |
| "loss": 0.0184, | |
| "step": 460 | |
| }, | |
| { | |
| "epoch": 2.5, | |
| "grad_norm": 0.2237200140953064, | |
| "learning_rate": 9.38e-05, | |
| "loss": 0.0169, | |
| "step": 470 | |
| }, | |
| { | |
| "epoch": 2.5531914893617023, | |
| "grad_norm": 0.3102606534957886, | |
| "learning_rate": 9.58e-05, | |
| "loss": 0.0167, | |
| "step": 480 | |
| }, | |
| { | |
| "epoch": 2.6063829787234045, | |
| "grad_norm": 0.18940214812755585, | |
| "learning_rate": 9.78e-05, | |
| "loss": 0.0146, | |
| "step": 490 | |
| }, | |
| { | |
| "epoch": 2.6595744680851063, | |
| "grad_norm": 0.20104342699050903, | |
| "learning_rate": 9.98e-05, | |
| "loss": 0.0147, | |
| "step": 500 | |
| }, | |
| { | |
| "epoch": 2.7127659574468086, | |
| "grad_norm": 0.27057933807373047, | |
| "learning_rate": 9.9999778549206e-05, | |
| "loss": 0.0168, | |
| "step": 510 | |
| }, | |
| { | |
| "epoch": 2.7659574468085104, | |
| "grad_norm": 0.3414050340652466, | |
| "learning_rate": 9.999901304280685e-05, | |
| "loss": 0.0191, | |
| "step": 520 | |
| }, | |
| { | |
| "epoch": 2.8191489361702127, | |
| "grad_norm": 0.3564453721046448, | |
| "learning_rate": 9.999770075521164e-05, | |
| "loss": 0.0158, | |
| "step": 530 | |
| }, | |
| { | |
| "epoch": 2.872340425531915, | |
| "grad_norm": 0.278254896402359, | |
| "learning_rate": 9.99958417007713e-05, | |
| "loss": 0.0169, | |
| "step": 540 | |
| }, | |
| { | |
| "epoch": 2.925531914893617, | |
| "grad_norm": 0.35206979513168335, | |
| "learning_rate": 9.999343589981615e-05, | |
| "loss": 0.0181, | |
| "step": 550 | |
| }, | |
| { | |
| "epoch": 2.978723404255319, | |
| "grad_norm": 0.29966768622398376, | |
| "learning_rate": 9.999048337865568e-05, | |
| "loss": 0.0152, | |
| "step": 560 | |
| }, | |
| { | |
| "epoch": 3.0319148936170213, | |
| "grad_norm": 0.27077358961105347, | |
| "learning_rate": 9.998698416957815e-05, | |
| "loss": 0.0136, | |
| "step": 570 | |
| }, | |
| { | |
| "epoch": 3.0851063829787235, | |
| "grad_norm": 0.2030802071094513, | |
| "learning_rate": 9.998293831085037e-05, | |
| "loss": 0.0162, | |
| "step": 580 | |
| }, | |
| { | |
| "epoch": 3.1382978723404253, | |
| "grad_norm": 0.28943827748298645, | |
| "learning_rate": 9.997834584671719e-05, | |
| "loss": 0.0138, | |
| "step": 590 | |
| }, | |
| { | |
| "epoch": 3.1914893617021276, | |
| "grad_norm": 0.2366258203983307, | |
| "learning_rate": 9.997320682740107e-05, | |
| "loss": 0.0135, | |
| "step": 600 | |
| }, | |
| { | |
| "epoch": 3.24468085106383, | |
| "grad_norm": 0.21549157798290253, | |
| "learning_rate": 9.996752130910149e-05, | |
| "loss": 0.0146, | |
| "step": 610 | |
| }, | |
| { | |
| "epoch": 3.297872340425532, | |
| "grad_norm": 0.3054635524749756, | |
| "learning_rate": 9.99612893539944e-05, | |
| "loss": 0.012, | |
| "step": 620 | |
| }, | |
| { | |
| "epoch": 3.351063829787234, | |
| "grad_norm": 0.2836059629917145, | |
| "learning_rate": 9.995451103023144e-05, | |
| "loss": 0.0141, | |
| "step": 630 | |
| }, | |
| { | |
| "epoch": 3.404255319148936, | |
| "grad_norm": 0.23018118739128113, | |
| "learning_rate": 9.994718641193928e-05, | |
| "loss": 0.0147, | |
| "step": 640 | |
| }, | |
| { | |
| "epoch": 3.4574468085106385, | |
| "grad_norm": 0.22073271870613098, | |
| "learning_rate": 9.993931557921874e-05, | |
| "loss": 0.0121, | |
| "step": 650 | |
| }, | |
| { | |
| "epoch": 3.5106382978723403, | |
| "grad_norm": 0.31048282980918884, | |
| "learning_rate": 9.993089861814402e-05, | |
| "loss": 0.0136, | |
| "step": 660 | |
| }, | |
| { | |
| "epoch": 3.5638297872340425, | |
| "grad_norm": 0.22893300652503967, | |
| "learning_rate": 9.992193562076166e-05, | |
| "loss": 0.0127, | |
| "step": 670 | |
| }, | |
| { | |
| "epoch": 3.617021276595745, | |
| "grad_norm": 0.39467865228652954, | |
| "learning_rate": 9.991242668508954e-05, | |
| "loss": 0.0159, | |
| "step": 680 | |
| }, | |
| { | |
| "epoch": 3.670212765957447, | |
| "grad_norm": 0.40455904603004456, | |
| "learning_rate": 9.990237191511587e-05, | |
| "loss": 0.0144, | |
| "step": 690 | |
| }, | |
| { | |
| "epoch": 3.723404255319149, | |
| "grad_norm": 0.30655208230018616, | |
| "learning_rate": 9.989177142079802e-05, | |
| "loss": 0.0119, | |
| "step": 700 | |
| }, | |
| { | |
| "epoch": 3.776595744680851, | |
| "grad_norm": 0.3529535233974457, | |
| "learning_rate": 9.988062531806126e-05, | |
| "loss": 0.0146, | |
| "step": 710 | |
| }, | |
| { | |
| "epoch": 3.829787234042553, | |
| "grad_norm": 0.28031685948371887, | |
| "learning_rate": 9.986893372879762e-05, | |
| "loss": 0.0134, | |
| "step": 720 | |
| }, | |
| { | |
| "epoch": 3.882978723404255, | |
| "grad_norm": 0.38010212779045105, | |
| "learning_rate": 9.985669678086443e-05, | |
| "loss": 0.0168, | |
| "step": 730 | |
| }, | |
| { | |
| "epoch": 3.9361702127659575, | |
| "grad_norm": 0.27353808283805847, | |
| "learning_rate": 9.984391460808298e-05, | |
| "loss": 0.0129, | |
| "step": 740 | |
| }, | |
| { | |
| "epoch": 3.9893617021276597, | |
| "grad_norm": 0.17019018530845642, | |
| "learning_rate": 9.983058735023709e-05, | |
| "loss": 0.0138, | |
| "step": 750 | |
| }, | |
| { | |
| "epoch": 4.042553191489362, | |
| "grad_norm": 0.21186164021492004, | |
| "learning_rate": 9.98167151530715e-05, | |
| "loss": 0.0128, | |
| "step": 760 | |
| }, | |
| { | |
| "epoch": 4.095744680851064, | |
| "grad_norm": 0.27775657176971436, | |
| "learning_rate": 9.980229816829034e-05, | |
| "loss": 0.0138, | |
| "step": 770 | |
| }, | |
| { | |
| "epoch": 4.148936170212766, | |
| "grad_norm": 0.27738213539123535, | |
| "learning_rate": 9.978733655355544e-05, | |
| "loss": 0.0122, | |
| "step": 780 | |
| }, | |
| { | |
| "epoch": 4.202127659574468, | |
| "grad_norm": 0.18977205455303192, | |
| "learning_rate": 9.977183047248464e-05, | |
| "loss": 0.0135, | |
| "step": 790 | |
| }, | |
| { | |
| "epoch": 4.25531914893617, | |
| "grad_norm": 0.2916596233844757, | |
| "learning_rate": 9.975578009464992e-05, | |
| "loss": 0.0141, | |
| "step": 800 | |
| }, | |
| { | |
| "epoch": 4.308510638297872, | |
| "grad_norm": 0.2108543962240219, | |
| "learning_rate": 9.97391855955757e-05, | |
| "loss": 0.0115, | |
| "step": 810 | |
| }, | |
| { | |
| "epoch": 4.361702127659575, | |
| "grad_norm": 0.2652151882648468, | |
| "learning_rate": 9.972204715673669e-05, | |
| "loss": 0.0132, | |
| "step": 820 | |
| }, | |
| { | |
| "epoch": 4.414893617021277, | |
| "grad_norm": 0.3371218144893646, | |
| "learning_rate": 9.970436496555617e-05, | |
| "loss": 0.014, | |
| "step": 830 | |
| }, | |
| { | |
| "epoch": 4.468085106382979, | |
| "grad_norm": 0.2222210019826889, | |
| "learning_rate": 9.968613921540373e-05, | |
| "loss": 0.0122, | |
| "step": 840 | |
| }, | |
| { | |
| "epoch": 4.5212765957446805, | |
| "grad_norm": 0.29762572050094604, | |
| "learning_rate": 9.966737010559326e-05, | |
| "loss": 0.0124, | |
| "step": 850 | |
| }, | |
| { | |
| "epoch": 4.574468085106383, | |
| "grad_norm": 0.17217318713665009, | |
| "learning_rate": 9.964805784138072e-05, | |
| "loss": 0.0116, | |
| "step": 860 | |
| }, | |
| { | |
| "epoch": 4.627659574468085, | |
| "grad_norm": 0.3846725821495056, | |
| "learning_rate": 9.962820263396195e-05, | |
| "loss": 0.0118, | |
| "step": 870 | |
| }, | |
| { | |
| "epoch": 4.680851063829787, | |
| "grad_norm": 0.2057282030582428, | |
| "learning_rate": 9.960780470047033e-05, | |
| "loss": 0.01, | |
| "step": 880 | |
| }, | |
| { | |
| "epoch": 4.73404255319149, | |
| "grad_norm": 0.1839686930179596, | |
| "learning_rate": 9.958686426397437e-05, | |
| "loss": 0.0098, | |
| "step": 890 | |
| }, | |
| { | |
| "epoch": 4.787234042553192, | |
| "grad_norm": 0.17685005068778992, | |
| "learning_rate": 9.956538155347534e-05, | |
| "loss": 0.0109, | |
| "step": 900 | |
| }, | |
| { | |
| "epoch": 4.840425531914894, | |
| "grad_norm": 0.23603062331676483, | |
| "learning_rate": 9.95433568039047e-05, | |
| "loss": 0.012, | |
| "step": 910 | |
| }, | |
| { | |
| "epoch": 4.8936170212765955, | |
| "grad_norm": 0.15905340015888214, | |
| "learning_rate": 9.952079025612162e-05, | |
| "loss": 0.011, | |
| "step": 920 | |
| }, | |
| { | |
| "epoch": 4.946808510638298, | |
| "grad_norm": 0.17970947921276093, | |
| "learning_rate": 9.949768215691022e-05, | |
| "loss": 0.0115, | |
| "step": 930 | |
| }, | |
| { | |
| "epoch": 5.0, | |
| "grad_norm": 0.21087981760501862, | |
| "learning_rate": 9.9474032758977e-05, | |
| "loss": 0.0109, | |
| "step": 940 | |
| }, | |
| { | |
| "epoch": 5.053191489361702, | |
| "grad_norm": 0.2115887701511383, | |
| "learning_rate": 9.944984232094794e-05, | |
| "loss": 0.0127, | |
| "step": 950 | |
| }, | |
| { | |
| "epoch": 5.1063829787234045, | |
| "grad_norm": 0.20834548771381378, | |
| "learning_rate": 9.942511110736584e-05, | |
| "loss": 0.0136, | |
| "step": 960 | |
| }, | |
| { | |
| "epoch": 5.159574468085107, | |
| "grad_norm": 0.2452813684940338, | |
| "learning_rate": 9.939983938868726e-05, | |
| "loss": 0.0112, | |
| "step": 970 | |
| }, | |
| { | |
| "epoch": 5.212765957446808, | |
| "grad_norm": 0.2894115149974823, | |
| "learning_rate": 9.93740274412797e-05, | |
| "loss": 0.0114, | |
| "step": 980 | |
| }, | |
| { | |
| "epoch": 5.26595744680851, | |
| "grad_norm": 0.28714385628700256, | |
| "learning_rate": 9.934767554741846e-05, | |
| "loss": 0.0145, | |
| "step": 990 | |
| }, | |
| { | |
| "epoch": 5.319148936170213, | |
| "grad_norm": 0.20027801394462585, | |
| "learning_rate": 9.932078399528361e-05, | |
| "loss": 0.011, | |
| "step": 1000 | |
| }, | |
| { | |
| "epoch": 5.372340425531915, | |
| "grad_norm": 0.2205878496170044, | |
| "learning_rate": 9.929335307895689e-05, | |
| "loss": 0.0108, | |
| "step": 1010 | |
| }, | |
| { | |
| "epoch": 5.425531914893617, | |
| "grad_norm": 0.26671263575553894, | |
| "learning_rate": 9.926538309841839e-05, | |
| "loss": 0.011, | |
| "step": 1020 | |
| }, | |
| { | |
| "epoch": 5.4787234042553195, | |
| "grad_norm": 0.1921129673719406, | |
| "learning_rate": 9.923687435954334e-05, | |
| "loss": 0.0108, | |
| "step": 1030 | |
| }, | |
| { | |
| "epoch": 5.531914893617021, | |
| "grad_norm": 0.22930213809013367, | |
| "learning_rate": 9.920782717409873e-05, | |
| "loss": 0.0097, | |
| "step": 1040 | |
| }, | |
| { | |
| "epoch": 5.585106382978723, | |
| "grad_norm": 0.18687856197357178, | |
| "learning_rate": 9.917824185973994e-05, | |
| "loss": 0.01, | |
| "step": 1050 | |
| }, | |
| { | |
| "epoch": 5.638297872340425, | |
| "grad_norm": 0.23251059651374817, | |
| "learning_rate": 9.914811874000723e-05, | |
| "loss": 0.0121, | |
| "step": 1060 | |
| }, | |
| { | |
| "epoch": 5.691489361702128, | |
| "grad_norm": 0.19350872933864594, | |
| "learning_rate": 9.911745814432218e-05, | |
| "loss": 0.0102, | |
| "step": 1070 | |
| }, | |
| { | |
| "epoch": 5.74468085106383, | |
| "grad_norm": 0.2315436154603958, | |
| "learning_rate": 9.90862604079842e-05, | |
| "loss": 0.0126, | |
| "step": 1080 | |
| }, | |
| { | |
| "epoch": 5.797872340425532, | |
| "grad_norm": 0.17269481718540192, | |
| "learning_rate": 9.90545258721667e-05, | |
| "loss": 0.0098, | |
| "step": 1090 | |
| }, | |
| { | |
| "epoch": 5.851063829787234, | |
| "grad_norm": 0.1511460244655609, | |
| "learning_rate": 9.90222548839135e-05, | |
| "loss": 0.0113, | |
| "step": 1100 | |
| }, | |
| { | |
| "epoch": 5.904255319148936, | |
| "grad_norm": 0.23465615510940552, | |
| "learning_rate": 9.898944779613495e-05, | |
| "loss": 0.0114, | |
| "step": 1110 | |
| }, | |
| { | |
| "epoch": 5.957446808510638, | |
| "grad_norm": 0.1964588165283203, | |
| "learning_rate": 9.89561049676041e-05, | |
| "loss": 0.0115, | |
| "step": 1120 | |
| }, | |
| { | |
| "epoch": 6.01063829787234, | |
| "grad_norm": 0.30035772919654846, | |
| "learning_rate": 9.89222267629528e-05, | |
| "loss": 0.0109, | |
| "step": 1130 | |
| }, | |
| { | |
| "epoch": 6.0638297872340425, | |
| "grad_norm": 0.1801254004240036, | |
| "learning_rate": 9.888781355266763e-05, | |
| "loss": 0.0107, | |
| "step": 1140 | |
| }, | |
| { | |
| "epoch": 6.117021276595745, | |
| "grad_norm": 0.2040286362171173, | |
| "learning_rate": 9.885286571308598e-05, | |
| "loss": 0.0135, | |
| "step": 1150 | |
| }, | |
| { | |
| "epoch": 6.170212765957447, | |
| "grad_norm": 0.24270781874656677, | |
| "learning_rate": 9.881738362639182e-05, | |
| "loss": 0.0101, | |
| "step": 1160 | |
| }, | |
| { | |
| "epoch": 6.223404255319149, | |
| "grad_norm": 0.17117181420326233, | |
| "learning_rate": 9.878136768061154e-05, | |
| "loss": 0.0094, | |
| "step": 1170 | |
| }, | |
| { | |
| "epoch": 6.276595744680851, | |
| "grad_norm": 0.21084389090538025, | |
| "learning_rate": 9.874481826960979e-05, | |
| "loss": 0.0091, | |
| "step": 1180 | |
| }, | |
| { | |
| "epoch": 6.329787234042553, | |
| "grad_norm": 0.27777960896492004, | |
| "learning_rate": 9.870773579308503e-05, | |
| "loss": 0.011, | |
| "step": 1190 | |
| }, | |
| { | |
| "epoch": 6.382978723404255, | |
| "grad_norm": 0.2961879372596741, | |
| "learning_rate": 9.867012065656533e-05, | |
| "loss": 0.0108, | |
| "step": 1200 | |
| }, | |
| { | |
| "epoch": 6.4361702127659575, | |
| "grad_norm": 0.1783706396818161, | |
| "learning_rate": 9.863197327140376e-05, | |
| "loss": 0.0088, | |
| "step": 1210 | |
| }, | |
| { | |
| "epoch": 6.48936170212766, | |
| "grad_norm": 0.2931004762649536, | |
| "learning_rate": 9.859329405477403e-05, | |
| "loss": 0.0127, | |
| "step": 1220 | |
| }, | |
| { | |
| "epoch": 6.542553191489362, | |
| "grad_norm": 0.219028502702713, | |
| "learning_rate": 9.855408342966585e-05, | |
| "loss": 0.0097, | |
| "step": 1230 | |
| }, | |
| { | |
| "epoch": 6.595744680851064, | |
| "grad_norm": 0.2385806441307068, | |
| "learning_rate": 9.851434182488033e-05, | |
| "loss": 0.0105, | |
| "step": 1240 | |
| }, | |
| { | |
| "epoch": 6.648936170212766, | |
| "grad_norm": 0.16756264865398407, | |
| "learning_rate": 9.84740696750253e-05, | |
| "loss": 0.0098, | |
| "step": 1250 | |
| }, | |
| { | |
| "epoch": 6.702127659574468, | |
| "grad_norm": 0.21152247488498688, | |
| "learning_rate": 9.843326742051055e-05, | |
| "loss": 0.0098, | |
| "step": 1260 | |
| }, | |
| { | |
| "epoch": 6.75531914893617, | |
| "grad_norm": 0.22754335403442383, | |
| "learning_rate": 9.839193550754297e-05, | |
| "loss": 0.012, | |
| "step": 1270 | |
| }, | |
| { | |
| "epoch": 6.808510638297872, | |
| "grad_norm": 0.3244580626487732, | |
| "learning_rate": 9.835007438812177e-05, | |
| "loss": 0.0091, | |
| "step": 1280 | |
| }, | |
| { | |
| "epoch": 6.861702127659575, | |
| "grad_norm": 0.3235946297645569, | |
| "learning_rate": 9.830768452003341e-05, | |
| "loss": 0.0088, | |
| "step": 1290 | |
| }, | |
| { | |
| "epoch": 6.914893617021277, | |
| "grad_norm": 0.2910916805267334, | |
| "learning_rate": 9.826476636684671e-05, | |
| "loss": 0.0125, | |
| "step": 1300 | |
| }, | |
| { | |
| "epoch": 6.968085106382979, | |
| "grad_norm": 0.18724599480628967, | |
| "learning_rate": 9.822132039790773e-05, | |
| "loss": 0.0115, | |
| "step": 1310 | |
| }, | |
| { | |
| "epoch": 7.0212765957446805, | |
| "grad_norm": 0.2930501699447632, | |
| "learning_rate": 9.817734708833461e-05, | |
| "loss": 0.0093, | |
| "step": 1320 | |
| }, | |
| { | |
| "epoch": 7.074468085106383, | |
| "grad_norm": 0.23762063682079315, | |
| "learning_rate": 9.813284691901243e-05, | |
| "loss": 0.0094, | |
| "step": 1330 | |
| }, | |
| { | |
| "epoch": 7.127659574468085, | |
| "grad_norm": 0.2083939164876938, | |
| "learning_rate": 9.808782037658792e-05, | |
| "loss": 0.008, | |
| "step": 1340 | |
| }, | |
| { | |
| "epoch": 7.180851063829787, | |
| "grad_norm": 0.18419736623764038, | |
| "learning_rate": 9.804226795346411e-05, | |
| "loss": 0.0081, | |
| "step": 1350 | |
| }, | |
| { | |
| "epoch": 7.23404255319149, | |
| "grad_norm": 0.21024487912654877, | |
| "learning_rate": 9.799619014779503e-05, | |
| "loss": 0.0082, | |
| "step": 1360 | |
| }, | |
| { | |
| "epoch": 7.287234042553192, | |
| "grad_norm": 0.20090395212173462, | |
| "learning_rate": 9.794958746348013e-05, | |
| "loss": 0.0095, | |
| "step": 1370 | |
| }, | |
| { | |
| "epoch": 7.340425531914893, | |
| "grad_norm": 0.23332229256629944, | |
| "learning_rate": 9.790246041015896e-05, | |
| "loss": 0.009, | |
| "step": 1380 | |
| }, | |
| { | |
| "epoch": 7.3936170212765955, | |
| "grad_norm": 0.20141930878162384, | |
| "learning_rate": 9.785480950320538e-05, | |
| "loss": 0.008, | |
| "step": 1390 | |
| }, | |
| { | |
| "epoch": 7.446808510638298, | |
| "grad_norm": 0.23216331005096436, | |
| "learning_rate": 9.78066352637221e-05, | |
| "loss": 0.0097, | |
| "step": 1400 | |
| }, | |
| { | |
| "epoch": 7.5, | |
| "grad_norm": 0.20977161824703217, | |
| "learning_rate": 9.775793821853488e-05, | |
| "loss": 0.0087, | |
| "step": 1410 | |
| }, | |
| { | |
| "epoch": 7.553191489361702, | |
| "grad_norm": 0.18909209966659546, | |
| "learning_rate": 9.77087189001868e-05, | |
| "loss": 0.0081, | |
| "step": 1420 | |
| }, | |
| { | |
| "epoch": 7.6063829787234045, | |
| "grad_norm": 0.16039927303791046, | |
| "learning_rate": 9.765897784693243e-05, | |
| "loss": 0.0084, | |
| "step": 1430 | |
| }, | |
| { | |
| "epoch": 7.659574468085106, | |
| "grad_norm": 0.16434164345264435, | |
| "learning_rate": 9.760871560273197e-05, | |
| "loss": 0.0093, | |
| "step": 1440 | |
| }, | |
| { | |
| "epoch": 7.712765957446808, | |
| "grad_norm": 0.21537816524505615, | |
| "learning_rate": 9.755793271724526e-05, | |
| "loss": 0.0097, | |
| "step": 1450 | |
| }, | |
| { | |
| "epoch": 7.76595744680851, | |
| "grad_norm": 0.21732476353645325, | |
| "learning_rate": 9.750662974582584e-05, | |
| "loss": 0.0099, | |
| "step": 1460 | |
| }, | |
| { | |
| "epoch": 7.819148936170213, | |
| "grad_norm": 0.18821750581264496, | |
| "learning_rate": 9.745480724951473e-05, | |
| "loss": 0.0075, | |
| "step": 1470 | |
| }, | |
| { | |
| "epoch": 7.872340425531915, | |
| "grad_norm": 0.2072674185037613, | |
| "learning_rate": 9.740246579503447e-05, | |
| "loss": 0.0109, | |
| "step": 1480 | |
| }, | |
| { | |
| "epoch": 7.925531914893617, | |
| "grad_norm": 0.23589925467967987, | |
| "learning_rate": 9.734960595478284e-05, | |
| "loss": 0.01, | |
| "step": 1490 | |
| }, | |
| { | |
| "epoch": 7.9787234042553195, | |
| "grad_norm": 0.16421300172805786, | |
| "learning_rate": 9.729622830682657e-05, | |
| "loss": 0.0109, | |
| "step": 1500 | |
| }, | |
| { | |
| "epoch": 8.03191489361702, | |
| "grad_norm": 0.18344688415527344, | |
| "learning_rate": 9.724233343489504e-05, | |
| "loss": 0.0087, | |
| "step": 1510 | |
| }, | |
| { | |
| "epoch": 8.085106382978724, | |
| "grad_norm": 0.22817589342594147, | |
| "learning_rate": 9.718792192837396e-05, | |
| "loss": 0.008, | |
| "step": 1520 | |
| }, | |
| { | |
| "epoch": 8.138297872340425, | |
| "grad_norm": 0.23019234836101532, | |
| "learning_rate": 9.713299438229886e-05, | |
| "loss": 0.009, | |
| "step": 1530 | |
| }, | |
| { | |
| "epoch": 8.191489361702128, | |
| "grad_norm": 0.16504380106925964, | |
| "learning_rate": 9.707755139734855e-05, | |
| "loss": 0.0091, | |
| "step": 1540 | |
| }, | |
| { | |
| "epoch": 8.24468085106383, | |
| "grad_norm": 0.16765393316745758, | |
| "learning_rate": 9.702159357983866e-05, | |
| "loss": 0.008, | |
| "step": 1550 | |
| }, | |
| { | |
| "epoch": 8.297872340425531, | |
| "grad_norm": 0.21124182641506195, | |
| "learning_rate": 9.696512154171492e-05, | |
| "loss": 0.0081, | |
| "step": 1560 | |
| }, | |
| { | |
| "epoch": 8.351063829787234, | |
| "grad_norm": 0.18493524193763733, | |
| "learning_rate": 9.690813590054645e-05, | |
| "loss": 0.009, | |
| "step": 1570 | |
| }, | |
| { | |
| "epoch": 8.404255319148936, | |
| "grad_norm": 0.1426675021648407, | |
| "learning_rate": 9.685063727951914e-05, | |
| "loss": 0.0085, | |
| "step": 1580 | |
| }, | |
| { | |
| "epoch": 8.457446808510639, | |
| "grad_norm": 0.19608302414417267, | |
| "learning_rate": 9.679262630742865e-05, | |
| "loss": 0.0081, | |
| "step": 1590 | |
| }, | |
| { | |
| "epoch": 8.51063829787234, | |
| "grad_norm": 0.16939395666122437, | |
| "learning_rate": 9.673410361867373e-05, | |
| "loss": 0.0081, | |
| "step": 1600 | |
| }, | |
| { | |
| "epoch": 8.563829787234042, | |
| "grad_norm": 0.21645651757717133, | |
| "learning_rate": 9.667506985324909e-05, | |
| "loss": 0.0087, | |
| "step": 1610 | |
| }, | |
| { | |
| "epoch": 8.617021276595745, | |
| "grad_norm": 0.15645526349544525, | |
| "learning_rate": 9.661552565673855e-05, | |
| "loss": 0.0072, | |
| "step": 1620 | |
| }, | |
| { | |
| "epoch": 8.670212765957446, | |
| "grad_norm": 0.17213892936706543, | |
| "learning_rate": 9.655547168030789e-05, | |
| "loss": 0.0107, | |
| "step": 1630 | |
| }, | |
| { | |
| "epoch": 8.72340425531915, | |
| "grad_norm": 0.25185009837150574, | |
| "learning_rate": 9.649490858069777e-05, | |
| "loss": 0.0094, | |
| "step": 1640 | |
| }, | |
| { | |
| "epoch": 8.77659574468085, | |
| "grad_norm": 0.14914864301681519, | |
| "learning_rate": 9.643383702021658e-05, | |
| "loss": 0.0086, | |
| "step": 1650 | |
| }, | |
| { | |
| "epoch": 8.829787234042554, | |
| "grad_norm": 0.17778900265693665, | |
| "learning_rate": 9.637225766673307e-05, | |
| "loss": 0.0088, | |
| "step": 1660 | |
| }, | |
| { | |
| "epoch": 8.882978723404255, | |
| "grad_norm": 0.14826127886772156, | |
| "learning_rate": 9.631017119366922e-05, | |
| "loss": 0.0077, | |
| "step": 1670 | |
| }, | |
| { | |
| "epoch": 8.936170212765958, | |
| "grad_norm": 0.18762026727199554, | |
| "learning_rate": 9.624757827999273e-05, | |
| "loss": 0.0075, | |
| "step": 1680 | |
| }, | |
| { | |
| "epoch": 8.98936170212766, | |
| "grad_norm": 0.17554470896720886, | |
| "learning_rate": 9.618447961020971e-05, | |
| "loss": 0.0073, | |
| "step": 1690 | |
| }, | |
| { | |
| "epoch": 9.042553191489361, | |
| "grad_norm": 0.21771858632564545, | |
| "learning_rate": 9.612087587435707e-05, | |
| "loss": 0.0113, | |
| "step": 1700 | |
| }, | |
| { | |
| "epoch": 9.095744680851064, | |
| "grad_norm": 0.1472388207912445, | |
| "learning_rate": 9.605676776799508e-05, | |
| "loss": 0.0085, | |
| "step": 1710 | |
| }, | |
| { | |
| "epoch": 9.148936170212766, | |
| "grad_norm": 0.18204329907894135, | |
| "learning_rate": 9.599215599219973e-05, | |
| "loss": 0.0092, | |
| "step": 1720 | |
| }, | |
| { | |
| "epoch": 9.202127659574469, | |
| "grad_norm": 0.22582733631134033, | |
| "learning_rate": 9.592704125355505e-05, | |
| "loss": 0.0093, | |
| "step": 1730 | |
| }, | |
| { | |
| "epoch": 9.25531914893617, | |
| "grad_norm": 0.199408620595932, | |
| "learning_rate": 9.586142426414538e-05, | |
| "loss": 0.0088, | |
| "step": 1740 | |
| }, | |
| { | |
| "epoch": 9.308510638297872, | |
| "grad_norm": 0.27061811089515686, | |
| "learning_rate": 9.57953057415476e-05, | |
| "loss": 0.0087, | |
| "step": 1750 | |
| }, | |
| { | |
| "epoch": 9.361702127659575, | |
| "grad_norm": 0.20301592350006104, | |
| "learning_rate": 9.572868640882328e-05, | |
| "loss": 0.0073, | |
| "step": 1760 | |
| }, | |
| { | |
| "epoch": 9.414893617021276, | |
| "grad_norm": 0.17418834567070007, | |
| "learning_rate": 9.56615669945108e-05, | |
| "loss": 0.0073, | |
| "step": 1770 | |
| }, | |
| { | |
| "epoch": 9.46808510638298, | |
| "grad_norm": 0.1484975814819336, | |
| "learning_rate": 9.55939482326173e-05, | |
| "loss": 0.0077, | |
| "step": 1780 | |
| }, | |
| { | |
| "epoch": 9.52127659574468, | |
| "grad_norm": 0.2033277153968811, | |
| "learning_rate": 9.552583086261069e-05, | |
| "loss": 0.0067, | |
| "step": 1790 | |
| }, | |
| { | |
| "epoch": 9.574468085106384, | |
| "grad_norm": 0.14535771310329437, | |
| "learning_rate": 9.545721562941168e-05, | |
| "loss": 0.0075, | |
| "step": 1800 | |
| }, | |
| { | |
| "epoch": 9.627659574468085, | |
| "grad_norm": 0.18686817586421967, | |
| "learning_rate": 9.538810328338543e-05, | |
| "loss": 0.0073, | |
| "step": 1810 | |
| }, | |
| { | |
| "epoch": 9.680851063829786, | |
| "grad_norm": 0.1654263436794281, | |
| "learning_rate": 9.531849458033349e-05, | |
| "loss": 0.0077, | |
| "step": 1820 | |
| }, | |
| { | |
| "epoch": 9.73404255319149, | |
| "grad_norm": 0.14076223969459534, | |
| "learning_rate": 9.524839028148547e-05, | |
| "loss": 0.0082, | |
| "step": 1830 | |
| }, | |
| { | |
| "epoch": 9.787234042553191, | |
| "grad_norm": 0.23140083253383636, | |
| "learning_rate": 9.517779115349077e-05, | |
| "loss": 0.0074, | |
| "step": 1840 | |
| }, | |
| { | |
| "epoch": 9.840425531914894, | |
| "grad_norm": 0.21250662207603455, | |
| "learning_rate": 9.510669796841014e-05, | |
| "loss": 0.0066, | |
| "step": 1850 | |
| }, | |
| { | |
| "epoch": 9.893617021276595, | |
| "grad_norm": 0.23771288990974426, | |
| "learning_rate": 9.503511150370727e-05, | |
| "loss": 0.0104, | |
| "step": 1860 | |
| }, | |
| { | |
| "epoch": 9.946808510638299, | |
| "grad_norm": 0.19595006108283997, | |
| "learning_rate": 9.496303254224024e-05, | |
| "loss": 0.008, | |
| "step": 1870 | |
| }, | |
| { | |
| "epoch": 10.0, | |
| "grad_norm": 0.2794885039329529, | |
| "learning_rate": 9.489046187225306e-05, | |
| "loss": 0.0086, | |
| "step": 1880 | |
| }, | |
| { | |
| "epoch": 10.053191489361701, | |
| "grad_norm": 0.1546381413936615, | |
| "learning_rate": 9.481740028736692e-05, | |
| "loss": 0.0085, | |
| "step": 1890 | |
| }, | |
| { | |
| "epoch": 10.106382978723405, | |
| "grad_norm": 0.29406481981277466, | |
| "learning_rate": 9.474384858657164e-05, | |
| "loss": 0.01, | |
| "step": 1900 | |
| }, | |
| { | |
| "epoch": 10.159574468085106, | |
| "grad_norm": 0.2059696614742279, | |
| "learning_rate": 9.466980757421679e-05, | |
| "loss": 0.0076, | |
| "step": 1910 | |
| }, | |
| { | |
| "epoch": 10.212765957446809, | |
| "grad_norm": 0.20823757350444794, | |
| "learning_rate": 9.459527806000305e-05, | |
| "loss": 0.0077, | |
| "step": 1920 | |
| }, | |
| { | |
| "epoch": 10.26595744680851, | |
| "grad_norm": 0.17863772809505463, | |
| "learning_rate": 9.452026085897325e-05, | |
| "loss": 0.0084, | |
| "step": 1930 | |
| }, | |
| { | |
| "epoch": 10.319148936170214, | |
| "grad_norm": 0.24163362383842468, | |
| "learning_rate": 9.444475679150348e-05, | |
| "loss": 0.009, | |
| "step": 1940 | |
| }, | |
| { | |
| "epoch": 10.372340425531915, | |
| "grad_norm": 0.1531575620174408, | |
| "learning_rate": 9.436876668329411e-05, | |
| "loss": 0.0079, | |
| "step": 1950 | |
| }, | |
| { | |
| "epoch": 10.425531914893616, | |
| "grad_norm": 0.20025062561035156, | |
| "learning_rate": 9.429229136536079e-05, | |
| "loss": 0.0105, | |
| "step": 1960 | |
| }, | |
| { | |
| "epoch": 10.47872340425532, | |
| "grad_norm": 0.18752774596214294, | |
| "learning_rate": 9.421533167402534e-05, | |
| "loss": 0.0082, | |
| "step": 1970 | |
| }, | |
| { | |
| "epoch": 10.53191489361702, | |
| "grad_norm": 0.13088159263134003, | |
| "learning_rate": 9.413788845090666e-05, | |
| "loss": 0.0077, | |
| "step": 1980 | |
| }, | |
| { | |
| "epoch": 10.585106382978724, | |
| "grad_norm": 0.16975660622119904, | |
| "learning_rate": 9.405996254291136e-05, | |
| "loss": 0.0096, | |
| "step": 1990 | |
| }, | |
| { | |
| "epoch": 10.638297872340425, | |
| "grad_norm": 0.12248742580413818, | |
| "learning_rate": 9.398155480222474e-05, | |
| "loss": 0.0071, | |
| "step": 2000 | |
| }, | |
| { | |
| "epoch": 10.691489361702128, | |
| "grad_norm": 0.23046736419200897, | |
| "learning_rate": 9.390266608630128e-05, | |
| "loss": 0.0089, | |
| "step": 2010 | |
| }, | |
| { | |
| "epoch": 10.74468085106383, | |
| "grad_norm": 0.1831723153591156, | |
| "learning_rate": 9.38232972578553e-05, | |
| "loss": 0.0066, | |
| "step": 2020 | |
| }, | |
| { | |
| "epoch": 10.797872340425531, | |
| "grad_norm": 0.20898064970970154, | |
| "learning_rate": 9.374344918485164e-05, | |
| "loss": 0.0087, | |
| "step": 2030 | |
| }, | |
| { | |
| "epoch": 10.851063829787234, | |
| "grad_norm": 0.13936717808246613, | |
| "learning_rate": 9.366312274049602e-05, | |
| "loss": 0.0066, | |
| "step": 2040 | |
| }, | |
| { | |
| "epoch": 10.904255319148936, | |
| "grad_norm": 0.15688860416412354, | |
| "learning_rate": 9.358231880322554e-05, | |
| "loss": 0.0093, | |
| "step": 2050 | |
| }, | |
| { | |
| "epoch": 10.957446808510639, | |
| "grad_norm": 0.17876432836055756, | |
| "learning_rate": 9.350103825669916e-05, | |
| "loss": 0.0072, | |
| "step": 2060 | |
| }, | |
| { | |
| "epoch": 11.01063829787234, | |
| "grad_norm": 0.14713621139526367, | |
| "learning_rate": 9.341928198978787e-05, | |
| "loss": 0.0073, | |
| "step": 2070 | |
| }, | |
| { | |
| "epoch": 11.063829787234043, | |
| "grad_norm": 0.1754753589630127, | |
| "learning_rate": 9.333705089656512e-05, | |
| "loss": 0.0058, | |
| "step": 2080 | |
| }, | |
| { | |
| "epoch": 11.117021276595745, | |
| "grad_norm": 0.1781453937292099, | |
| "learning_rate": 9.325434587629698e-05, | |
| "loss": 0.009, | |
| "step": 2090 | |
| }, | |
| { | |
| "epoch": 11.170212765957446, | |
| "grad_norm": 0.23320874571800232, | |
| "learning_rate": 9.31711678334323e-05, | |
| "loss": 0.0082, | |
| "step": 2100 | |
| }, | |
| { | |
| "epoch": 11.22340425531915, | |
| "grad_norm": 0.14676867425441742, | |
| "learning_rate": 9.308751767759282e-05, | |
| "loss": 0.0065, | |
| "step": 2110 | |
| }, | |
| { | |
| "epoch": 11.27659574468085, | |
| "grad_norm": 0.18670448660850525, | |
| "learning_rate": 9.300339632356325e-05, | |
| "loss": 0.0069, | |
| "step": 2120 | |
| }, | |
| { | |
| "epoch": 11.329787234042554, | |
| "grad_norm": 0.19785605370998383, | |
| "learning_rate": 9.291880469128124e-05, | |
| "loss": 0.0064, | |
| "step": 2130 | |
| }, | |
| { | |
| "epoch": 11.382978723404255, | |
| "grad_norm": 0.1706223338842392, | |
| "learning_rate": 9.283374370582732e-05, | |
| "loss": 0.0075, | |
| "step": 2140 | |
| }, | |
| { | |
| "epoch": 11.436170212765958, | |
| "grad_norm": 0.24619443714618683, | |
| "learning_rate": 9.274821429741482e-05, | |
| "loss": 0.0083, | |
| "step": 2150 | |
| }, | |
| { | |
| "epoch": 11.48936170212766, | |
| "grad_norm": 0.1221090778708458, | |
| "learning_rate": 9.266221740137961e-05, | |
| "loss": 0.0099, | |
| "step": 2160 | |
| }, | |
| { | |
| "epoch": 11.542553191489361, | |
| "grad_norm": 0.2016286700963974, | |
| "learning_rate": 9.257575395817001e-05, | |
| "loss": 0.0086, | |
| "step": 2170 | |
| }, | |
| { | |
| "epoch": 11.595744680851064, | |
| "grad_norm": 0.15547704696655273, | |
| "learning_rate": 9.248882491333637e-05, | |
| "loss": 0.0091, | |
| "step": 2180 | |
| }, | |
| { | |
| "epoch": 11.648936170212766, | |
| "grad_norm": 0.1360115110874176, | |
| "learning_rate": 9.240143121752076e-05, | |
| "loss": 0.0074, | |
| "step": 2190 | |
| }, | |
| { | |
| "epoch": 11.702127659574469, | |
| "grad_norm": 0.1512525975704193, | |
| "learning_rate": 9.23135738264467e-05, | |
| "loss": 0.0065, | |
| "step": 2200 | |
| }, | |
| { | |
| "epoch": 11.75531914893617, | |
| "grad_norm": 0.1892113834619522, | |
| "learning_rate": 9.222525370090849e-05, | |
| "loss": 0.0094, | |
| "step": 2210 | |
| }, | |
| { | |
| "epoch": 11.808510638297872, | |
| "grad_norm": 0.1855061948299408, | |
| "learning_rate": 9.213647180676088e-05, | |
| "loss": 0.0079, | |
| "step": 2220 | |
| }, | |
| { | |
| "epoch": 11.861702127659575, | |
| "grad_norm": 0.1603952795267105, | |
| "learning_rate": 9.204722911490846e-05, | |
| "loss": 0.0075, | |
| "step": 2230 | |
| }, | |
| { | |
| "epoch": 11.914893617021276, | |
| "grad_norm": 0.169124573469162, | |
| "learning_rate": 9.1957526601295e-05, | |
| "loss": 0.0097, | |
| "step": 2240 | |
| }, | |
| { | |
| "epoch": 11.96808510638298, | |
| "grad_norm": 0.1613597273826599, | |
| "learning_rate": 9.186736524689281e-05, | |
| "loss": 0.0076, | |
| "step": 2250 | |
| }, | |
| { | |
| "epoch": 12.02127659574468, | |
| "grad_norm": 0.15133880078792572, | |
| "learning_rate": 9.177674603769204e-05, | |
| "loss": 0.0073, | |
| "step": 2260 | |
| }, | |
| { | |
| "epoch": 12.074468085106384, | |
| "grad_norm": 0.20068326592445374, | |
| "learning_rate": 9.168566996468983e-05, | |
| "loss": 0.0089, | |
| "step": 2270 | |
| }, | |
| { | |
| "epoch": 12.127659574468085, | |
| "grad_norm": 0.20986078679561615, | |
| "learning_rate": 9.159413802387951e-05, | |
| "loss": 0.0085, | |
| "step": 2280 | |
| }, | |
| { | |
| "epoch": 12.180851063829786, | |
| "grad_norm": 0.20550352334976196, | |
| "learning_rate": 9.150215121623974e-05, | |
| "loss": 0.0075, | |
| "step": 2290 | |
| }, | |
| { | |
| "epoch": 12.23404255319149, | |
| "grad_norm": 0.1844809204339981, | |
| "learning_rate": 9.140971054772349e-05, | |
| "loss": 0.0088, | |
| "step": 2300 | |
| }, | |
| { | |
| "epoch": 12.287234042553191, | |
| "grad_norm": 0.19343963265419006, | |
| "learning_rate": 9.131681702924713e-05, | |
| "loss": 0.0074, | |
| "step": 2310 | |
| }, | |
| { | |
| "epoch": 12.340425531914894, | |
| "grad_norm": 0.14274075627326965, | |
| "learning_rate": 9.122347167667926e-05, | |
| "loss": 0.0075, | |
| "step": 2320 | |
| }, | |
| { | |
| "epoch": 12.393617021276595, | |
| "grad_norm": 0.18554295599460602, | |
| "learning_rate": 9.112967551082973e-05, | |
| "loss": 0.0075, | |
| "step": 2330 | |
| }, | |
| { | |
| "epoch": 12.446808510638299, | |
| "grad_norm": 0.1767973154783249, | |
| "learning_rate": 9.103542955743835e-05, | |
| "loss": 0.0075, | |
| "step": 2340 | |
| }, | |
| { | |
| "epoch": 12.5, | |
| "grad_norm": 0.20641733705997467, | |
| "learning_rate": 9.094073484716381e-05, | |
| "loss": 0.0071, | |
| "step": 2350 | |
| }, | |
| { | |
| "epoch": 12.553191489361701, | |
| "grad_norm": 0.1635461002588272, | |
| "learning_rate": 9.084559241557226e-05, | |
| "loss": 0.0089, | |
| "step": 2360 | |
| }, | |
| { | |
| "epoch": 12.606382978723405, | |
| "grad_norm": 0.184914693236351, | |
| "learning_rate": 9.075000330312608e-05, | |
| "loss": 0.0072, | |
| "step": 2370 | |
| }, | |
| { | |
| "epoch": 12.659574468085106, | |
| "grad_norm": 0.16729353368282318, | |
| "learning_rate": 9.065396855517253e-05, | |
| "loss": 0.01, | |
| "step": 2380 | |
| }, | |
| { | |
| "epoch": 12.712765957446809, | |
| "grad_norm": 0.19467006623744965, | |
| "learning_rate": 9.055748922193219e-05, | |
| "loss": 0.007, | |
| "step": 2390 | |
| }, | |
| { | |
| "epoch": 12.76595744680851, | |
| "grad_norm": 0.17373105883598328, | |
| "learning_rate": 9.046056635848761e-05, | |
| "loss": 0.0096, | |
| "step": 2400 | |
| }, | |
| { | |
| "epoch": 12.819148936170214, | |
| "grad_norm": 0.19389963150024414, | |
| "learning_rate": 9.036320102477169e-05, | |
| "loss": 0.0092, | |
| "step": 2410 | |
| }, | |
| { | |
| "epoch": 12.872340425531915, | |
| "grad_norm": 0.16150271892547607, | |
| "learning_rate": 9.02653942855561e-05, | |
| "loss": 0.0075, | |
| "step": 2420 | |
| }, | |
| { | |
| "epoch": 12.925531914893616, | |
| "grad_norm": 0.1727665364742279, | |
| "learning_rate": 9.016714721043971e-05, | |
| "loss": 0.0072, | |
| "step": 2430 | |
| }, | |
| { | |
| "epoch": 12.97872340425532, | |
| "grad_norm": 0.21858614683151245, | |
| "learning_rate": 9.006846087383675e-05, | |
| "loss": 0.0089, | |
| "step": 2440 | |
| }, | |
| { | |
| "epoch": 13.03191489361702, | |
| "grad_norm": 0.214237242937088, | |
| "learning_rate": 8.996933635496523e-05, | |
| "loss": 0.008, | |
| "step": 2450 | |
| }, | |
| { | |
| "epoch": 13.085106382978724, | |
| "grad_norm": 0.16075631976127625, | |
| "learning_rate": 8.986977473783498e-05, | |
| "loss": 0.0069, | |
| "step": 2460 | |
| }, | |
| { | |
| "epoch": 13.138297872340425, | |
| "grad_norm": 0.19414788484573364, | |
| "learning_rate": 8.97697771112359e-05, | |
| "loss": 0.0076, | |
| "step": 2470 | |
| }, | |
| { | |
| "epoch": 13.191489361702128, | |
| "grad_norm": 0.20983470976352692, | |
| "learning_rate": 8.966934456872602e-05, | |
| "loss": 0.0079, | |
| "step": 2480 | |
| }, | |
| { | |
| "epoch": 13.24468085106383, | |
| "grad_norm": 0.22086811065673828, | |
| "learning_rate": 8.95684782086195e-05, | |
| "loss": 0.007, | |
| "step": 2490 | |
| }, | |
| { | |
| "epoch": 13.297872340425531, | |
| "grad_norm": 0.16258001327514648, | |
| "learning_rate": 8.946717913397476e-05, | |
| "loss": 0.0076, | |
| "step": 2500 | |
| }, | |
| { | |
| "epoch": 13.351063829787234, | |
| "grad_norm": 0.1627521961927414, | |
| "learning_rate": 8.93654484525822e-05, | |
| "loss": 0.0067, | |
| "step": 2510 | |
| }, | |
| { | |
| "epoch": 13.404255319148936, | |
| "grad_norm": 0.18251019716262817, | |
| "learning_rate": 8.926328727695226e-05, | |
| "loss": 0.0075, | |
| "step": 2520 | |
| }, | |
| { | |
| "epoch": 13.457446808510639, | |
| "grad_norm": 0.20372505486011505, | |
| "learning_rate": 8.916069672430319e-05, | |
| "loss": 0.0081, | |
| "step": 2530 | |
| }, | |
| { | |
| "epoch": 13.51063829787234, | |
| "grad_norm": 0.202079638838768, | |
| "learning_rate": 8.905767791654884e-05, | |
| "loss": 0.0077, | |
| "step": 2540 | |
| }, | |
| { | |
| "epoch": 13.563829787234042, | |
| "grad_norm": 0.18187732994556427, | |
| "learning_rate": 8.895423198028638e-05, | |
| "loss": 0.0069, | |
| "step": 2550 | |
| }, | |
| { | |
| "epoch": 13.617021276595745, | |
| "grad_norm": 0.18868182599544525, | |
| "learning_rate": 8.885036004678402e-05, | |
| "loss": 0.0066, | |
| "step": 2560 | |
| }, | |
| { | |
| "epoch": 13.670212765957446, | |
| "grad_norm": 0.13480782508850098, | |
| "learning_rate": 8.874606325196857e-05, | |
| "loss": 0.0064, | |
| "step": 2570 | |
| }, | |
| { | |
| "epoch": 13.72340425531915, | |
| "grad_norm": 0.1289985477924347, | |
| "learning_rate": 8.864134273641304e-05, | |
| "loss": 0.0071, | |
| "step": 2580 | |
| }, | |
| { | |
| "epoch": 13.77659574468085, | |
| "grad_norm": 0.13561029732227325, | |
| "learning_rate": 8.853619964532427e-05, | |
| "loss": 0.0064, | |
| "step": 2590 | |
| }, | |
| { | |
| "epoch": 13.829787234042554, | |
| "grad_norm": 0.14998756349086761, | |
| "learning_rate": 8.843063512853019e-05, | |
| "loss": 0.0078, | |
| "step": 2600 | |
| }, | |
| { | |
| "epoch": 13.882978723404255, | |
| "grad_norm": 0.13307224214076996, | |
| "learning_rate": 8.832465034046749e-05, | |
| "loss": 0.0067, | |
| "step": 2610 | |
| }, | |
| { | |
| "epoch": 13.936170212765958, | |
| "grad_norm": 0.17667639255523682, | |
| "learning_rate": 8.821824644016882e-05, | |
| "loss": 0.0071, | |
| "step": 2620 | |
| }, | |
| { | |
| "epoch": 13.98936170212766, | |
| "grad_norm": 0.1743881106376648, | |
| "learning_rate": 8.811142459125019e-05, | |
| "loss": 0.0073, | |
| "step": 2630 | |
| }, | |
| { | |
| "epoch": 14.042553191489361, | |
| "grad_norm": 0.20977191627025604, | |
| "learning_rate": 8.800418596189822e-05, | |
| "loss": 0.0073, | |
| "step": 2640 | |
| }, | |
| { | |
| "epoch": 14.095744680851064, | |
| "grad_norm": 0.20122361183166504, | |
| "learning_rate": 8.789653172485737e-05, | |
| "loss": 0.0075, | |
| "step": 2650 | |
| }, | |
| { | |
| "epoch": 14.148936170212766, | |
| "grad_norm": 0.17474465072155, | |
| "learning_rate": 8.778846305741715e-05, | |
| "loss": 0.0056, | |
| "step": 2660 | |
| }, | |
| { | |
| "epoch": 14.202127659574469, | |
| "grad_norm": 0.18404212594032288, | |
| "learning_rate": 8.767998114139918e-05, | |
| "loss": 0.0086, | |
| "step": 2670 | |
| }, | |
| { | |
| "epoch": 14.25531914893617, | |
| "grad_norm": 0.19821803271770477, | |
| "learning_rate": 8.757108716314429e-05, | |
| "loss": 0.0093, | |
| "step": 2680 | |
| }, | |
| { | |
| "epoch": 14.308510638297872, | |
| "grad_norm": 0.15421666204929352, | |
| "learning_rate": 8.746178231349962e-05, | |
| "loss": 0.0059, | |
| "step": 2690 | |
| }, | |
| { | |
| "epoch": 14.361702127659575, | |
| "grad_norm": 0.1361829936504364, | |
| "learning_rate": 8.735206778780549e-05, | |
| "loss": 0.0057, | |
| "step": 2700 | |
| }, | |
| { | |
| "epoch": 14.414893617021276, | |
| "grad_norm": 0.197443887591362, | |
| "learning_rate": 8.724194478588234e-05, | |
| "loss": 0.0061, | |
| "step": 2710 | |
| }, | |
| { | |
| "epoch": 14.46808510638298, | |
| "grad_norm": 0.1521860659122467, | |
| "learning_rate": 8.713141451201772e-05, | |
| "loss": 0.0074, | |
| "step": 2720 | |
| }, | |
| { | |
| "epoch": 14.52127659574468, | |
| "grad_norm": 0.1834147423505783, | |
| "learning_rate": 8.702047817495295e-05, | |
| "loss": 0.0089, | |
| "step": 2730 | |
| }, | |
| { | |
| "epoch": 14.574468085106384, | |
| "grad_norm": 0.16649259626865387, | |
| "learning_rate": 8.69091369878701e-05, | |
| "loss": 0.0057, | |
| "step": 2740 | |
| }, | |
| { | |
| "epoch": 14.627659574468085, | |
| "grad_norm": 0.13391681015491486, | |
| "learning_rate": 8.679739216837849e-05, | |
| "loss": 0.0062, | |
| "step": 2750 | |
| }, | |
| { | |
| "epoch": 14.680851063829786, | |
| "grad_norm": 0.11012919247150421, | |
| "learning_rate": 8.66852449385016e-05, | |
| "loss": 0.0073, | |
| "step": 2760 | |
| }, | |
| { | |
| "epoch": 14.73404255319149, | |
| "grad_norm": 0.09199478477239609, | |
| "learning_rate": 8.657269652466356e-05, | |
| "loss": 0.0056, | |
| "step": 2770 | |
| }, | |
| { | |
| "epoch": 14.787234042553191, | |
| "grad_norm": 0.18235042691230774, | |
| "learning_rate": 8.645974815767577e-05, | |
| "loss": 0.0062, | |
| "step": 2780 | |
| }, | |
| { | |
| "epoch": 14.840425531914894, | |
| "grad_norm": 0.16901960968971252, | |
| "learning_rate": 8.634640107272351e-05, | |
| "loss": 0.0056, | |
| "step": 2790 | |
| }, | |
| { | |
| "epoch": 14.893617021276595, | |
| "grad_norm": 0.17238442599773407, | |
| "learning_rate": 8.623265650935234e-05, | |
| "loss": 0.0054, | |
| "step": 2800 | |
| }, | |
| { | |
| "epoch": 14.946808510638299, | |
| "grad_norm": 0.25071072578430176, | |
| "learning_rate": 8.611851571145456e-05, | |
| "loss": 0.0083, | |
| "step": 2810 | |
| }, | |
| { | |
| "epoch": 15.0, | |
| "grad_norm": 0.2194031924009323, | |
| "learning_rate": 8.600397992725566e-05, | |
| "loss": 0.0085, | |
| "step": 2820 | |
| }, | |
| { | |
| "epoch": 15.053191489361701, | |
| "grad_norm": 0.18971814215183258, | |
| "learning_rate": 8.588905040930061e-05, | |
| "loss": 0.008, | |
| "step": 2830 | |
| }, | |
| { | |
| "epoch": 15.106382978723405, | |
| "grad_norm": 0.17306965589523315, | |
| "learning_rate": 8.577372841444022e-05, | |
| "loss": 0.0092, | |
| "step": 2840 | |
| }, | |
| { | |
| "epoch": 15.159574468085106, | |
| "grad_norm": 0.1638938933610916, | |
| "learning_rate": 8.565801520381736e-05, | |
| "loss": 0.0066, | |
| "step": 2850 | |
| }, | |
| { | |
| "epoch": 15.212765957446809, | |
| "grad_norm": 0.15540580451488495, | |
| "learning_rate": 8.554191204285313e-05, | |
| "loss": 0.0065, | |
| "step": 2860 | |
| }, | |
| { | |
| "epoch": 15.26595744680851, | |
| "grad_norm": 0.25182926654815674, | |
| "learning_rate": 8.542542020123315e-05, | |
| "loss": 0.0068, | |
| "step": 2870 | |
| }, | |
| { | |
| "epoch": 15.319148936170214, | |
| "grad_norm": 0.20930540561676025, | |
| "learning_rate": 8.530854095289347e-05, | |
| "loss": 0.0078, | |
| "step": 2880 | |
| }, | |
| { | |
| "epoch": 15.372340425531915, | |
| "grad_norm": 0.2499101758003235, | |
| "learning_rate": 8.519127557600688e-05, | |
| "loss": 0.0084, | |
| "step": 2890 | |
| }, | |
| { | |
| "epoch": 15.425531914893616, | |
| "grad_norm": 0.22977644205093384, | |
| "learning_rate": 8.507362535296871e-05, | |
| "loss": 0.0072, | |
| "step": 2900 | |
| }, | |
| { | |
| "epoch": 15.47872340425532, | |
| "grad_norm": 0.2463008016347885, | |
| "learning_rate": 8.495559157038299e-05, | |
| "loss": 0.0072, | |
| "step": 2910 | |
| }, | |
| { | |
| "epoch": 15.53191489361702, | |
| "grad_norm": 0.19558028876781464, | |
| "learning_rate": 8.483717551904823e-05, | |
| "loss": 0.006, | |
| "step": 2920 | |
| }, | |
| { | |
| "epoch": 15.585106382978724, | |
| "grad_norm": 0.1859913170337677, | |
| "learning_rate": 8.47183784939434e-05, | |
| "loss": 0.0063, | |
| "step": 2930 | |
| }, | |
| { | |
| "epoch": 15.638297872340425, | |
| "grad_norm": 0.2142346054315567, | |
| "learning_rate": 8.459920179421374e-05, | |
| "loss": 0.0065, | |
| "step": 2940 | |
| }, | |
| { | |
| "epoch": 15.691489361702128, | |
| "grad_norm": 0.14805962145328522, | |
| "learning_rate": 8.447964672315656e-05, | |
| "loss": 0.0071, | |
| "step": 2950 | |
| }, | |
| { | |
| "epoch": 15.74468085106383, | |
| "grad_norm": 0.12391805648803711, | |
| "learning_rate": 8.435971458820692e-05, | |
| "loss": 0.0074, | |
| "step": 2960 | |
| }, | |
| { | |
| "epoch": 15.797872340425531, | |
| "grad_norm": 0.1614798903465271, | |
| "learning_rate": 8.423940670092345e-05, | |
| "loss": 0.007, | |
| "step": 2970 | |
| }, | |
| { | |
| "epoch": 15.851063829787234, | |
| "grad_norm": 0.1519756019115448, | |
| "learning_rate": 8.411872437697394e-05, | |
| "loss": 0.0097, | |
| "step": 2980 | |
| }, | |
| { | |
| "epoch": 15.904255319148936, | |
| "grad_norm": 0.1478251814842224, | |
| "learning_rate": 8.399766893612096e-05, | |
| "loss": 0.0076, | |
| "step": 2990 | |
| }, | |
| { | |
| "epoch": 15.957446808510639, | |
| "grad_norm": 0.15515626966953278, | |
| "learning_rate": 8.38762417022074e-05, | |
| "loss": 0.0068, | |
| "step": 3000 | |
| }, | |
| { | |
| "epoch": 16.01063829787234, | |
| "grad_norm": 0.13135063648223877, | |
| "learning_rate": 8.375444400314204e-05, | |
| "loss": 0.0087, | |
| "step": 3010 | |
| }, | |
| { | |
| "epoch": 16.06382978723404, | |
| "grad_norm": 0.1597781777381897, | |
| "learning_rate": 8.3632277170885e-05, | |
| "loss": 0.0065, | |
| "step": 3020 | |
| }, | |
| { | |
| "epoch": 16.117021276595743, | |
| "grad_norm": 0.09916092455387115, | |
| "learning_rate": 8.350974254143318e-05, | |
| "loss": 0.0067, | |
| "step": 3030 | |
| }, | |
| { | |
| "epoch": 16.170212765957448, | |
| "grad_norm": 0.16787171363830566, | |
| "learning_rate": 8.338684145480566e-05, | |
| "loss": 0.0062, | |
| "step": 3040 | |
| }, | |
| { | |
| "epoch": 16.22340425531915, | |
| "grad_norm": 0.11977628618478775, | |
| "learning_rate": 8.326357525502904e-05, | |
| "loss": 0.0065, | |
| "step": 3050 | |
| }, | |
| { | |
| "epoch": 16.27659574468085, | |
| "grad_norm": 0.1516636610031128, | |
| "learning_rate": 8.313994529012273e-05, | |
| "loss": 0.0067, | |
| "step": 3060 | |
| }, | |
| { | |
| "epoch": 16.329787234042552, | |
| "grad_norm": 0.1618969738483429, | |
| "learning_rate": 8.301595291208422e-05, | |
| "loss": 0.0074, | |
| "step": 3070 | |
| }, | |
| { | |
| "epoch": 16.382978723404257, | |
| "grad_norm": 0.16748309135437012, | |
| "learning_rate": 8.289159947687427e-05, | |
| "loss": 0.0055, | |
| "step": 3080 | |
| }, | |
| { | |
| "epoch": 16.43617021276596, | |
| "grad_norm": 0.15574246644973755, | |
| "learning_rate": 8.276688634440216e-05, | |
| "loss": 0.0056, | |
| "step": 3090 | |
| }, | |
| { | |
| "epoch": 16.48936170212766, | |
| "grad_norm": 0.10888471454381943, | |
| "learning_rate": 8.26418148785107e-05, | |
| "loss": 0.0108, | |
| "step": 3100 | |
| }, | |
| { | |
| "epoch": 16.54255319148936, | |
| "grad_norm": 0.15701264142990112, | |
| "learning_rate": 8.251638644696141e-05, | |
| "loss": 0.0089, | |
| "step": 3110 | |
| }, | |
| { | |
| "epoch": 16.595744680851062, | |
| "grad_norm": 0.1421189159154892, | |
| "learning_rate": 8.23906024214195e-05, | |
| "loss": 0.0073, | |
| "step": 3120 | |
| }, | |
| { | |
| "epoch": 16.648936170212767, | |
| "grad_norm": 0.1660069227218628, | |
| "learning_rate": 8.226446417743897e-05, | |
| "loss": 0.0068, | |
| "step": 3130 | |
| }, | |
| { | |
| "epoch": 16.70212765957447, | |
| "grad_norm": 0.289872407913208, | |
| "learning_rate": 8.213797309444742e-05, | |
| "loss": 0.0073, | |
| "step": 3140 | |
| }, | |
| { | |
| "epoch": 16.75531914893617, | |
| "grad_norm": 0.29917657375335693, | |
| "learning_rate": 8.201113055573105e-05, | |
| "loss": 0.0062, | |
| "step": 3150 | |
| }, | |
| { | |
| "epoch": 16.80851063829787, | |
| "grad_norm": 0.20638254284858704, | |
| "learning_rate": 8.188393794841958e-05, | |
| "loss": 0.0063, | |
| "step": 3160 | |
| }, | |
| { | |
| "epoch": 16.861702127659573, | |
| "grad_norm": 0.1633564531803131, | |
| "learning_rate": 8.175639666347094e-05, | |
| "loss": 0.0074, | |
| "step": 3170 | |
| }, | |
| { | |
| "epoch": 16.914893617021278, | |
| "grad_norm": 0.15572208166122437, | |
| "learning_rate": 8.162850809565623e-05, | |
| "loss": 0.008, | |
| "step": 3180 | |
| }, | |
| { | |
| "epoch": 16.96808510638298, | |
| "grad_norm": 0.13141052424907684, | |
| "learning_rate": 8.150027364354431e-05, | |
| "loss": 0.0082, | |
| "step": 3190 | |
| }, | |
| { | |
| "epoch": 17.02127659574468, | |
| "grad_norm": 0.2099199891090393, | |
| "learning_rate": 8.137169470948662e-05, | |
| "loss": 0.0067, | |
| "step": 3200 | |
| }, | |
| { | |
| "epoch": 17.074468085106382, | |
| "grad_norm": 0.14079205691814423, | |
| "learning_rate": 8.124277269960179e-05, | |
| "loss": 0.0077, | |
| "step": 3210 | |
| }, | |
| { | |
| "epoch": 17.127659574468087, | |
| "grad_norm": 0.2687934637069702, | |
| "learning_rate": 8.111350902376023e-05, | |
| "loss": 0.0072, | |
| "step": 3220 | |
| }, | |
| { | |
| "epoch": 17.180851063829788, | |
| "grad_norm": 0.20266003906726837, | |
| "learning_rate": 8.098390509556883e-05, | |
| "loss": 0.0072, | |
| "step": 3230 | |
| }, | |
| { | |
| "epoch": 17.23404255319149, | |
| "grad_norm": 0.16316485404968262, | |
| "learning_rate": 8.085396233235536e-05, | |
| "loss": 0.0064, | |
| "step": 3240 | |
| }, | |
| { | |
| "epoch": 17.28723404255319, | |
| "grad_norm": 0.15714746713638306, | |
| "learning_rate": 8.072368215515306e-05, | |
| "loss": 0.0062, | |
| "step": 3250 | |
| }, | |
| { | |
| "epoch": 17.340425531914892, | |
| "grad_norm": 0.12047432363033295, | |
| "learning_rate": 8.059306598868506e-05, | |
| "loss": 0.0069, | |
| "step": 3260 | |
| }, | |
| { | |
| "epoch": 17.393617021276597, | |
| "grad_norm": 0.14536887407302856, | |
| "learning_rate": 8.046211526134888e-05, | |
| "loss": 0.0074, | |
| "step": 3270 | |
| }, | |
| { | |
| "epoch": 17.4468085106383, | |
| "grad_norm": 0.1603090912103653, | |
| "learning_rate": 8.033083140520065e-05, | |
| "loss": 0.008, | |
| "step": 3280 | |
| }, | |
| { | |
| "epoch": 17.5, | |
| "grad_norm": 0.18343211710453033, | |
| "learning_rate": 8.019921585593962e-05, | |
| "loss": 0.0063, | |
| "step": 3290 | |
| }, | |
| { | |
| "epoch": 17.5531914893617, | |
| "grad_norm": 0.14314745366573334, | |
| "learning_rate": 8.006727005289232e-05, | |
| "loss": 0.0064, | |
| "step": 3300 | |
| }, | |
| { | |
| "epoch": 17.606382978723403, | |
| "grad_norm": 0.15809331834316254, | |
| "learning_rate": 7.993499543899692e-05, | |
| "loss": 0.0046, | |
| "step": 3310 | |
| }, | |
| { | |
| "epoch": 17.659574468085108, | |
| "grad_norm": 0.14878803491592407, | |
| "learning_rate": 7.980239346078742e-05, | |
| "loss": 0.0083, | |
| "step": 3320 | |
| }, | |
| { | |
| "epoch": 17.71276595744681, | |
| "grad_norm": 0.0949820801615715, | |
| "learning_rate": 7.966946556837778e-05, | |
| "loss": 0.0069, | |
| "step": 3330 | |
| }, | |
| { | |
| "epoch": 17.76595744680851, | |
| "grad_norm": 0.18976077437400818, | |
| "learning_rate": 7.953621321544616e-05, | |
| "loss": 0.007, | |
| "step": 3340 | |
| }, | |
| { | |
| "epoch": 17.819148936170212, | |
| "grad_norm": 0.22473865747451782, | |
| "learning_rate": 7.940263785921896e-05, | |
| "loss": 0.0065, | |
| "step": 3350 | |
| }, | |
| { | |
| "epoch": 17.872340425531917, | |
| "grad_norm": 0.1575072556734085, | |
| "learning_rate": 7.926874096045482e-05, | |
| "loss": 0.0056, | |
| "step": 3360 | |
| }, | |
| { | |
| "epoch": 17.925531914893618, | |
| "grad_norm": 0.17159603536128998, | |
| "learning_rate": 7.913452398342881e-05, | |
| "loss": 0.0075, | |
| "step": 3370 | |
| }, | |
| { | |
| "epoch": 17.97872340425532, | |
| "grad_norm": 0.12392999231815338, | |
| "learning_rate": 7.89999883959163e-05, | |
| "loss": 0.0062, | |
| "step": 3380 | |
| }, | |
| { | |
| "epoch": 18.03191489361702, | |
| "grad_norm": 0.1577315479516983, | |
| "learning_rate": 7.886513566917687e-05, | |
| "loss": 0.0061, | |
| "step": 3390 | |
| }, | |
| { | |
| "epoch": 18.085106382978722, | |
| "grad_norm": 0.11595878005027771, | |
| "learning_rate": 7.872996727793838e-05, | |
| "loss": 0.0053, | |
| "step": 3400 | |
| }, | |
| { | |
| "epoch": 18.138297872340427, | |
| "grad_norm": 0.13440750539302826, | |
| "learning_rate": 7.859448470038069e-05, | |
| "loss": 0.0057, | |
| "step": 3410 | |
| }, | |
| { | |
| "epoch": 18.19148936170213, | |
| "grad_norm": 0.12178987264633179, | |
| "learning_rate": 7.845868941811956e-05, | |
| "loss": 0.0062, | |
| "step": 3420 | |
| }, | |
| { | |
| "epoch": 18.24468085106383, | |
| "grad_norm": 0.11918295174837112, | |
| "learning_rate": 7.832258291619043e-05, | |
| "loss": 0.0064, | |
| "step": 3430 | |
| }, | |
| { | |
| "epoch": 18.29787234042553, | |
| "grad_norm": 0.1700301468372345, | |
| "learning_rate": 7.81861666830322e-05, | |
| "loss": 0.0063, | |
| "step": 3440 | |
| }, | |
| { | |
| "epoch": 18.351063829787233, | |
| "grad_norm": 0.1254538893699646, | |
| "learning_rate": 7.804944221047097e-05, | |
| "loss": 0.0055, | |
| "step": 3450 | |
| }, | |
| { | |
| "epoch": 18.404255319148938, | |
| "grad_norm": 0.20456427335739136, | |
| "learning_rate": 7.791241099370364e-05, | |
| "loss": 0.0068, | |
| "step": 3460 | |
| }, | |
| { | |
| "epoch": 18.45744680851064, | |
| "grad_norm": 0.16722074151039124, | |
| "learning_rate": 7.777507453128163e-05, | |
| "loss": 0.0049, | |
| "step": 3470 | |
| }, | |
| { | |
| "epoch": 18.51063829787234, | |
| "grad_norm": 0.1924172341823578, | |
| "learning_rate": 7.763743432509451e-05, | |
| "loss": 0.0058, | |
| "step": 3480 | |
| }, | |
| { | |
| "epoch": 18.56382978723404, | |
| "grad_norm": 0.15702924132347107, | |
| "learning_rate": 7.749949188035353e-05, | |
| "loss": 0.008, | |
| "step": 3490 | |
| }, | |
| { | |
| "epoch": 18.617021276595743, | |
| "grad_norm": 0.1602710485458374, | |
| "learning_rate": 7.736124870557516e-05, | |
| "loss": 0.0064, | |
| "step": 3500 | |
| }, | |
| { | |
| "epoch": 18.670212765957448, | |
| "grad_norm": 0.15722621977329254, | |
| "learning_rate": 7.722270631256459e-05, | |
| "loss": 0.0054, | |
| "step": 3510 | |
| }, | |
| { | |
| "epoch": 18.72340425531915, | |
| "grad_norm": 0.14395569264888763, | |
| "learning_rate": 7.708386621639925e-05, | |
| "loss": 0.0052, | |
| "step": 3520 | |
| }, | |
| { | |
| "epoch": 18.77659574468085, | |
| "grad_norm": 0.19118018448352814, | |
| "learning_rate": 7.694472993541219e-05, | |
| "loss": 0.0059, | |
| "step": 3530 | |
| }, | |
| { | |
| "epoch": 18.829787234042552, | |
| "grad_norm": 0.20880447328090668, | |
| "learning_rate": 7.680529899117547e-05, | |
| "loss": 0.0095, | |
| "step": 3540 | |
| }, | |
| { | |
| "epoch": 18.882978723404257, | |
| "grad_norm": 0.11841943860054016, | |
| "learning_rate": 7.666557490848358e-05, | |
| "loss": 0.0049, | |
| "step": 3550 | |
| }, | |
| { | |
| "epoch": 18.93617021276596, | |
| "grad_norm": 0.08501628786325455, | |
| "learning_rate": 7.65255592153367e-05, | |
| "loss": 0.0053, | |
| "step": 3560 | |
| }, | |
| { | |
| "epoch": 18.98936170212766, | |
| "grad_norm": 0.12406400591135025, | |
| "learning_rate": 7.638525344292402e-05, | |
| "loss": 0.0058, | |
| "step": 3570 | |
| }, | |
| { | |
| "epoch": 19.04255319148936, | |
| "grad_norm": 0.1231900006532669, | |
| "learning_rate": 7.624465912560697e-05, | |
| "loss": 0.007, | |
| "step": 3580 | |
| }, | |
| { | |
| "epoch": 19.095744680851062, | |
| "grad_norm": 0.128575399518013, | |
| "learning_rate": 7.610377780090249e-05, | |
| "loss": 0.0065, | |
| "step": 3590 | |
| }, | |
| { | |
| "epoch": 19.148936170212767, | |
| "grad_norm": 0.13342691957950592, | |
| "learning_rate": 7.596261100946618e-05, | |
| "loss": 0.0058, | |
| "step": 3600 | |
| }, | |
| { | |
| "epoch": 19.20212765957447, | |
| "grad_norm": 0.1609726995229721, | |
| "learning_rate": 7.582116029507542e-05, | |
| "loss": 0.0074, | |
| "step": 3610 | |
| }, | |
| { | |
| "epoch": 19.25531914893617, | |
| "grad_norm": 0.13631616532802582, | |
| "learning_rate": 7.56794272046126e-05, | |
| "loss": 0.0044, | |
| "step": 3620 | |
| }, | |
| { | |
| "epoch": 19.30851063829787, | |
| "grad_norm": 0.1915193498134613, | |
| "learning_rate": 7.55374132880481e-05, | |
| "loss": 0.0063, | |
| "step": 3630 | |
| }, | |
| { | |
| "epoch": 19.361702127659573, | |
| "grad_norm": 0.23263674974441528, | |
| "learning_rate": 7.539512009842333e-05, | |
| "loss": 0.0064, | |
| "step": 3640 | |
| }, | |
| { | |
| "epoch": 19.414893617021278, | |
| "grad_norm": 0.1964254528284073, | |
| "learning_rate": 7.525254919183382e-05, | |
| "loss": 0.0056, | |
| "step": 3650 | |
| }, | |
| { | |
| "epoch": 19.46808510638298, | |
| "grad_norm": 0.151455819606781, | |
| "learning_rate": 7.510970212741215e-05, | |
| "loss": 0.008, | |
| "step": 3660 | |
| }, | |
| { | |
| "epoch": 19.52127659574468, | |
| "grad_norm": 0.11037199199199677, | |
| "learning_rate": 7.496658046731096e-05, | |
| "loss": 0.0073, | |
| "step": 3670 | |
| }, | |
| { | |
| "epoch": 19.574468085106382, | |
| "grad_norm": 0.14608484506607056, | |
| "learning_rate": 7.482318577668578e-05, | |
| "loss": 0.0063, | |
| "step": 3680 | |
| }, | |
| { | |
| "epoch": 19.627659574468083, | |
| "grad_norm": 0.12101870030164719, | |
| "learning_rate": 7.467951962367796e-05, | |
| "loss": 0.0065, | |
| "step": 3690 | |
| }, | |
| { | |
| "epoch": 19.680851063829788, | |
| "grad_norm": 0.172244131565094, | |
| "learning_rate": 7.453558357939755e-05, | |
| "loss": 0.0063, | |
| "step": 3700 | |
| }, | |
| { | |
| "epoch": 19.73404255319149, | |
| "grad_norm": 0.1500503122806549, | |
| "learning_rate": 7.439137921790606e-05, | |
| "loss": 0.007, | |
| "step": 3710 | |
| }, | |
| { | |
| "epoch": 19.78723404255319, | |
| "grad_norm": 0.09187772125005722, | |
| "learning_rate": 7.42469081161993e-05, | |
| "loss": 0.0055, | |
| "step": 3720 | |
| }, | |
| { | |
| "epoch": 19.840425531914892, | |
| "grad_norm": 0.16654223203659058, | |
| "learning_rate": 7.410217185419006e-05, | |
| "loss": 0.0067, | |
| "step": 3730 | |
| }, | |
| { | |
| "epoch": 19.893617021276597, | |
| "grad_norm": 0.1414085030555725, | |
| "learning_rate": 7.395717201469095e-05, | |
| "loss": 0.0065, | |
| "step": 3740 | |
| }, | |
| { | |
| "epoch": 19.9468085106383, | |
| "grad_norm": 0.14494206011295319, | |
| "learning_rate": 7.381191018339696e-05, | |
| "loss": 0.0085, | |
| "step": 3750 | |
| }, | |
| { | |
| "epoch": 20.0, | |
| "grad_norm": 0.17019270360469818, | |
| "learning_rate": 7.36663879488682e-05, | |
| "loss": 0.0067, | |
| "step": 3760 | |
| }, | |
| { | |
| "epoch": 20.0531914893617, | |
| "grad_norm": 0.19633671641349792, | |
| "learning_rate": 7.352060690251254e-05, | |
| "loss": 0.007, | |
| "step": 3770 | |
| }, | |
| { | |
| "epoch": 20.106382978723403, | |
| "grad_norm": 0.1447569876909256, | |
| "learning_rate": 7.337456863856811e-05, | |
| "loss": 0.0066, | |
| "step": 3780 | |
| }, | |
| { | |
| "epoch": 20.159574468085108, | |
| "grad_norm": 0.1572294980287552, | |
| "learning_rate": 7.3228274754086e-05, | |
| "loss": 0.0064, | |
| "step": 3790 | |
| }, | |
| { | |
| "epoch": 20.21276595744681, | |
| "grad_norm": 0.13722430169582367, | |
| "learning_rate": 7.308172684891267e-05, | |
| "loss": 0.0055, | |
| "step": 3800 | |
| }, | |
| { | |
| "epoch": 20.26595744680851, | |
| "grad_norm": 0.14371681213378906, | |
| "learning_rate": 7.293492652567255e-05, | |
| "loss": 0.0051, | |
| "step": 3810 | |
| }, | |
| { | |
| "epoch": 20.319148936170212, | |
| "grad_norm": 0.15320344269275665, | |
| "learning_rate": 7.278787538975043e-05, | |
| "loss": 0.0048, | |
| "step": 3820 | |
| }, | |
| { | |
| "epoch": 20.372340425531913, | |
| "grad_norm": 0.2013072371482849, | |
| "learning_rate": 7.2640575049274e-05, | |
| "loss": 0.0053, | |
| "step": 3830 | |
| }, | |
| { | |
| "epoch": 20.425531914893618, | |
| "grad_norm": 0.2290789932012558, | |
| "learning_rate": 7.249302711509616e-05, | |
| "loss": 0.006, | |
| "step": 3840 | |
| }, | |
| { | |
| "epoch": 20.47872340425532, | |
| "grad_norm": 0.20443342626094818, | |
| "learning_rate": 7.23452332007775e-05, | |
| "loss": 0.0054, | |
| "step": 3850 | |
| }, | |
| { | |
| "epoch": 20.53191489361702, | |
| "grad_norm": 0.1452774554491043, | |
| "learning_rate": 7.219719492256858e-05, | |
| "loss": 0.0057, | |
| "step": 3860 | |
| }, | |
| { | |
| "epoch": 20.585106382978722, | |
| "grad_norm": 0.16736865043640137, | |
| "learning_rate": 7.20489138993923e-05, | |
| "loss": 0.0055, | |
| "step": 3870 | |
| }, | |
| { | |
| "epoch": 20.638297872340427, | |
| "grad_norm": 0.18624937534332275, | |
| "learning_rate": 7.190039175282614e-05, | |
| "loss": 0.0056, | |
| "step": 3880 | |
| }, | |
| { | |
| "epoch": 20.69148936170213, | |
| "grad_norm": 0.1478068232536316, | |
| "learning_rate": 7.175163010708455e-05, | |
| "loss": 0.0064, | |
| "step": 3890 | |
| }, | |
| { | |
| "epoch": 20.74468085106383, | |
| "grad_norm": 0.11505433917045593, | |
| "learning_rate": 7.1602630589001e-05, | |
| "loss": 0.005, | |
| "step": 3900 | |
| }, | |
| { | |
| "epoch": 20.79787234042553, | |
| "grad_norm": 0.13151772320270538, | |
| "learning_rate": 7.14533948280104e-05, | |
| "loss": 0.0044, | |
| "step": 3910 | |
| }, | |
| { | |
| "epoch": 20.851063829787233, | |
| "grad_norm": 0.23022718727588654, | |
| "learning_rate": 7.130392445613109e-05, | |
| "loss": 0.006, | |
| "step": 3920 | |
| }, | |
| { | |
| "epoch": 20.904255319148938, | |
| "grad_norm": 0.16949890553951263, | |
| "learning_rate": 7.115422110794711e-05, | |
| "loss": 0.0051, | |
| "step": 3930 | |
| }, | |
| { | |
| "epoch": 20.95744680851064, | |
| "grad_norm": 0.11132027953863144, | |
| "learning_rate": 7.100428642059033e-05, | |
| "loss": 0.0057, | |
| "step": 3940 | |
| }, | |
| { | |
| "epoch": 21.01063829787234, | |
| "grad_norm": 0.12514273822307587, | |
| "learning_rate": 7.08541220337224e-05, | |
| "loss": 0.0065, | |
| "step": 3950 | |
| }, | |
| { | |
| "epoch": 21.06382978723404, | |
| "grad_norm": 0.15788310766220093, | |
| "learning_rate": 7.070372958951706e-05, | |
| "loss": 0.0069, | |
| "step": 3960 | |
| }, | |
| { | |
| "epoch": 21.117021276595743, | |
| "grad_norm": 0.16550275683403015, | |
| "learning_rate": 7.055311073264194e-05, | |
| "loss": 0.0064, | |
| "step": 3970 | |
| }, | |
| { | |
| "epoch": 21.170212765957448, | |
| "grad_norm": 0.17971380054950714, | |
| "learning_rate": 7.040226711024077e-05, | |
| "loss": 0.0061, | |
| "step": 3980 | |
| }, | |
| { | |
| "epoch": 21.22340425531915, | |
| "grad_norm": 0.12029004842042923, | |
| "learning_rate": 7.02512003719152e-05, | |
| "loss": 0.0051, | |
| "step": 3990 | |
| }, | |
| { | |
| "epoch": 21.27659574468085, | |
| "grad_norm": 0.10426761955022812, | |
| "learning_rate": 7.00999121697069e-05, | |
| "loss": 0.0073, | |
| "step": 4000 | |
| }, | |
| { | |
| "epoch": 21.329787234042552, | |
| "grad_norm": 0.10406999289989471, | |
| "learning_rate": 6.99484041580794e-05, | |
| "loss": 0.0071, | |
| "step": 4010 | |
| }, | |
| { | |
| "epoch": 21.382978723404257, | |
| "grad_norm": 0.14519692957401276, | |
| "learning_rate": 6.979667799390004e-05, | |
| "loss": 0.0075, | |
| "step": 4020 | |
| }, | |
| { | |
| "epoch": 21.43617021276596, | |
| "grad_norm": 0.14059394598007202, | |
| "learning_rate": 6.964473533642185e-05, | |
| "loss": 0.0053, | |
| "step": 4030 | |
| }, | |
| { | |
| "epoch": 21.48936170212766, | |
| "grad_norm": 0.15268488228321075, | |
| "learning_rate": 6.949257784726539e-05, | |
| "loss": 0.0052, | |
| "step": 4040 | |
| }, | |
| { | |
| "epoch": 21.54255319148936, | |
| "grad_norm": 0.2996109426021576, | |
| "learning_rate": 6.934020719040056e-05, | |
| "loss": 0.0056, | |
| "step": 4050 | |
| }, | |
| { | |
| "epoch": 21.595744680851062, | |
| "grad_norm": 0.21426759660243988, | |
| "learning_rate": 6.918762503212848e-05, | |
| "loss": 0.006, | |
| "step": 4060 | |
| }, | |
| { | |
| "epoch": 21.648936170212767, | |
| "grad_norm": 0.18181400001049042, | |
| "learning_rate": 6.903483304106319e-05, | |
| "loss": 0.0054, | |
| "step": 4070 | |
| }, | |
| { | |
| "epoch": 21.70212765957447, | |
| "grad_norm": 0.19103117287158966, | |
| "learning_rate": 6.888183288811341e-05, | |
| "loss": 0.0063, | |
| "step": 4080 | |
| }, | |
| { | |
| "epoch": 21.75531914893617, | |
| "grad_norm": 0.24462604522705078, | |
| "learning_rate": 6.87286262464643e-05, | |
| "loss": 0.0064, | |
| "step": 4090 | |
| }, | |
| { | |
| "epoch": 21.80851063829787, | |
| "grad_norm": 0.22812288999557495, | |
| "learning_rate": 6.857521479155915e-05, | |
| "loss": 0.0058, | |
| "step": 4100 | |
| }, | |
| { | |
| "epoch": 21.861702127659573, | |
| "grad_norm": 0.17214304208755493, | |
| "learning_rate": 6.842160020108104e-05, | |
| "loss": 0.0056, | |
| "step": 4110 | |
| }, | |
| { | |
| "epoch": 21.914893617021278, | |
| "grad_norm": 0.183730810880661, | |
| "learning_rate": 6.826778415493455e-05, | |
| "loss": 0.0049, | |
| "step": 4120 | |
| }, | |
| { | |
| "epoch": 21.96808510638298, | |
| "grad_norm": 0.13164620101451874, | |
| "learning_rate": 6.811376833522729e-05, | |
| "loss": 0.0044, | |
| "step": 4130 | |
| }, | |
| { | |
| "epoch": 22.02127659574468, | |
| "grad_norm": 0.15655295550823212, | |
| "learning_rate": 6.795955442625159e-05, | |
| "loss": 0.0058, | |
| "step": 4140 | |
| }, | |
| { | |
| "epoch": 22.074468085106382, | |
| "grad_norm": 0.12938356399536133, | |
| "learning_rate": 6.780514411446608e-05, | |
| "loss": 0.0046, | |
| "step": 4150 | |
| }, | |
| { | |
| "epoch": 22.127659574468087, | |
| "grad_norm": 0.10411049425601959, | |
| "learning_rate": 6.765053908847716e-05, | |
| "loss": 0.0048, | |
| "step": 4160 | |
| }, | |
| { | |
| "epoch": 22.180851063829788, | |
| "grad_norm": 0.11426330357789993, | |
| "learning_rate": 6.749574103902064e-05, | |
| "loss": 0.0052, | |
| "step": 4170 | |
| }, | |
| { | |
| "epoch": 22.23404255319149, | |
| "grad_norm": 0.12123023718595505, | |
| "learning_rate": 6.734075165894317e-05, | |
| "loss": 0.007, | |
| "step": 4180 | |
| }, | |
| { | |
| "epoch": 22.28723404255319, | |
| "grad_norm": 0.1345352679491043, | |
| "learning_rate": 6.71855726431838e-05, | |
| "loss": 0.0052, | |
| "step": 4190 | |
| }, | |
| { | |
| "epoch": 22.340425531914892, | |
| "grad_norm": 0.12121392041444778, | |
| "learning_rate": 6.703020568875538e-05, | |
| "loss": 0.0058, | |
| "step": 4200 | |
| }, | |
| { | |
| "epoch": 22.393617021276597, | |
| "grad_norm": 0.16804833710193634, | |
| "learning_rate": 6.687465249472603e-05, | |
| "loss": 0.009, | |
| "step": 4210 | |
| }, | |
| { | |
| "epoch": 22.4468085106383, | |
| "grad_norm": 0.10779353231191635, | |
| "learning_rate": 6.671891476220055e-05, | |
| "loss": 0.0051, | |
| "step": 4220 | |
| }, | |
| { | |
| "epoch": 22.5, | |
| "grad_norm": 0.14378249645233154, | |
| "learning_rate": 6.656299419430183e-05, | |
| "loss": 0.0052, | |
| "step": 4230 | |
| }, | |
| { | |
| "epoch": 22.5531914893617, | |
| "grad_norm": 0.10970911383628845, | |
| "learning_rate": 6.640689249615223e-05, | |
| "loss": 0.0054, | |
| "step": 4240 | |
| }, | |
| { | |
| "epoch": 22.606382978723403, | |
| "grad_norm": 0.14757134020328522, | |
| "learning_rate": 6.625061137485491e-05, | |
| "loss": 0.0061, | |
| "step": 4250 | |
| }, | |
| { | |
| "epoch": 22.659574468085108, | |
| "grad_norm": 0.12002810835838318, | |
| "learning_rate": 6.609415253947517e-05, | |
| "loss": 0.0053, | |
| "step": 4260 | |
| }, | |
| { | |
| "epoch": 22.71276595744681, | |
| "grad_norm": 0.0790410116314888, | |
| "learning_rate": 6.593751770102178e-05, | |
| "loss": 0.0047, | |
| "step": 4270 | |
| }, | |
| { | |
| "epoch": 22.76595744680851, | |
| "grad_norm": 0.13231422007083893, | |
| "learning_rate": 6.578070857242823e-05, | |
| "loss": 0.006, | |
| "step": 4280 | |
| }, | |
| { | |
| "epoch": 22.819148936170212, | |
| "grad_norm": 0.13650839030742645, | |
| "learning_rate": 6.562372686853402e-05, | |
| "loss": 0.0048, | |
| "step": 4290 | |
| }, | |
| { | |
| "epoch": 22.872340425531917, | |
| "grad_norm": 0.15151211619377136, | |
| "learning_rate": 6.546657430606593e-05, | |
| "loss": 0.005, | |
| "step": 4300 | |
| }, | |
| { | |
| "epoch": 22.925531914893618, | |
| "grad_norm": 0.1874573677778244, | |
| "learning_rate": 6.530925260361918e-05, | |
| "loss": 0.0063, | |
| "step": 4310 | |
| }, | |
| { | |
| "epoch": 22.97872340425532, | |
| "grad_norm": 0.1430789679288864, | |
| "learning_rate": 6.515176348163871e-05, | |
| "loss": 0.0057, | |
| "step": 4320 | |
| }, | |
| { | |
| "epoch": 23.03191489361702, | |
| "grad_norm": 0.14078618586063385, | |
| "learning_rate": 6.499410866240032e-05, | |
| "loss": 0.0044, | |
| "step": 4330 | |
| }, | |
| { | |
| "epoch": 23.085106382978722, | |
| "grad_norm": 0.10171610116958618, | |
| "learning_rate": 6.48362898699919e-05, | |
| "loss": 0.0045, | |
| "step": 4340 | |
| }, | |
| { | |
| "epoch": 23.138297872340427, | |
| "grad_norm": 0.13780160248279572, | |
| "learning_rate": 6.467830883029443e-05, | |
| "loss": 0.006, | |
| "step": 4350 | |
| }, | |
| { | |
| "epoch": 23.19148936170213, | |
| "grad_norm": 0.12256841361522675, | |
| "learning_rate": 6.452016727096326e-05, | |
| "loss": 0.0057, | |
| "step": 4360 | |
| }, | |
| { | |
| "epoch": 23.24468085106383, | |
| "grad_norm": 0.1547333151102066, | |
| "learning_rate": 6.436186692140916e-05, | |
| "loss": 0.0038, | |
| "step": 4370 | |
| }, | |
| { | |
| "epoch": 23.29787234042553, | |
| "grad_norm": 0.11319739371538162, | |
| "learning_rate": 6.420340951277938e-05, | |
| "loss": 0.0046, | |
| "step": 4380 | |
| }, | |
| { | |
| "epoch": 23.351063829787233, | |
| "grad_norm": 0.14978808164596558, | |
| "learning_rate": 6.404479677793874e-05, | |
| "loss": 0.0049, | |
| "step": 4390 | |
| }, | |
| { | |
| "epoch": 23.404255319148938, | |
| "grad_norm": 0.15863363444805145, | |
| "learning_rate": 6.388603045145075e-05, | |
| "loss": 0.0071, | |
| "step": 4400 | |
| }, | |
| { | |
| "epoch": 23.45744680851064, | |
| "grad_norm": 0.13348370790481567, | |
| "learning_rate": 6.372711226955843e-05, | |
| "loss": 0.0063, | |
| "step": 4410 | |
| }, | |
| { | |
| "epoch": 23.51063829787234, | |
| "grad_norm": 0.13283595442771912, | |
| "learning_rate": 6.356804397016564e-05, | |
| "loss": 0.0055, | |
| "step": 4420 | |
| }, | |
| { | |
| "epoch": 23.56382978723404, | |
| "grad_norm": 0.19219309091567993, | |
| "learning_rate": 6.340882729281779e-05, | |
| "loss": 0.0076, | |
| "step": 4430 | |
| }, | |
| { | |
| "epoch": 23.617021276595743, | |
| "grad_norm": 0.14636899530887604, | |
| "learning_rate": 6.324946397868294e-05, | |
| "loss": 0.0043, | |
| "step": 4440 | |
| }, | |
| { | |
| "epoch": 23.670212765957448, | |
| "grad_norm": 0.2261485457420349, | |
| "learning_rate": 6.308995577053276e-05, | |
| "loss": 0.006, | |
| "step": 4450 | |
| }, | |
| { | |
| "epoch": 23.72340425531915, | |
| "grad_norm": 0.175037682056427, | |
| "learning_rate": 6.293030441272347e-05, | |
| "loss": 0.0044, | |
| "step": 4460 | |
| }, | |
| { | |
| "epoch": 23.77659574468085, | |
| "grad_norm": 0.18601231276988983, | |
| "learning_rate": 6.277051165117677e-05, | |
| "loss": 0.0068, | |
| "step": 4470 | |
| }, | |
| { | |
| "epoch": 23.829787234042552, | |
| "grad_norm": 0.1710241436958313, | |
| "learning_rate": 6.261057923336064e-05, | |
| "loss": 0.0058, | |
| "step": 4480 | |
| }, | |
| { | |
| "epoch": 23.882978723404257, | |
| "grad_norm": 0.158255934715271, | |
| "learning_rate": 6.245050890827042e-05, | |
| "loss": 0.0047, | |
| "step": 4490 | |
| }, | |
| { | |
| "epoch": 23.93617021276596, | |
| "grad_norm": 0.16506224870681763, | |
| "learning_rate": 6.229030242640952e-05, | |
| "loss": 0.0047, | |
| "step": 4500 | |
| }, | |
| { | |
| "epoch": 23.98936170212766, | |
| "grad_norm": 0.10506505519151688, | |
| "learning_rate": 6.212996153977037e-05, | |
| "loss": 0.0081, | |
| "step": 4510 | |
| }, | |
| { | |
| "epoch": 24.04255319148936, | |
| "grad_norm": 0.13968034088611603, | |
| "learning_rate": 6.196948800181523e-05, | |
| "loss": 0.0056, | |
| "step": 4520 | |
| }, | |
| { | |
| "epoch": 24.095744680851062, | |
| "grad_norm": 0.12571664154529572, | |
| "learning_rate": 6.180888356745695e-05, | |
| "loss": 0.0044, | |
| "step": 4530 | |
| }, | |
| { | |
| "epoch": 24.148936170212767, | |
| "grad_norm": 0.08922986686229706, | |
| "learning_rate": 6.164814999303995e-05, | |
| "loss": 0.0043, | |
| "step": 4540 | |
| }, | |
| { | |
| "epoch": 24.20212765957447, | |
| "grad_norm": 0.0964173898100853, | |
| "learning_rate": 6.148728903632081e-05, | |
| "loss": 0.0051, | |
| "step": 4550 | |
| }, | |
| { | |
| "epoch": 24.25531914893617, | |
| "grad_norm": 0.131297305226326, | |
| "learning_rate": 6.132630245644921e-05, | |
| "loss": 0.0046, | |
| "step": 4560 | |
| }, | |
| { | |
| "epoch": 24.30851063829787, | |
| "grad_norm": 0.17526111006736755, | |
| "learning_rate": 6.116519201394857e-05, | |
| "loss": 0.0062, | |
| "step": 4570 | |
| }, | |
| { | |
| "epoch": 24.361702127659573, | |
| "grad_norm": 0.22532807290554047, | |
| "learning_rate": 6.10039594706969e-05, | |
| "loss": 0.007, | |
| "step": 4580 | |
| }, | |
| { | |
| "epoch": 24.414893617021278, | |
| "grad_norm": 0.15460269153118134, | |
| "learning_rate": 6.084260658990744e-05, | |
| "loss": 0.0049, | |
| "step": 4590 | |
| }, | |
| { | |
| "epoch": 24.46808510638298, | |
| "grad_norm": 0.1336296796798706, | |
| "learning_rate": 6.068113513610943e-05, | |
| "loss": 0.0059, | |
| "step": 4600 | |
| }, | |
| { | |
| "epoch": 24.52127659574468, | |
| "grad_norm": 0.17546795308589935, | |
| "learning_rate": 6.0519546875128876e-05, | |
| "loss": 0.0056, | |
| "step": 4610 | |
| }, | |
| { | |
| "epoch": 24.574468085106382, | |
| "grad_norm": 0.13485364615917206, | |
| "learning_rate": 6.035784357406906e-05, | |
| "loss": 0.0056, | |
| "step": 4620 | |
| }, | |
| { | |
| "epoch": 24.627659574468083, | |
| "grad_norm": 0.1507759690284729, | |
| "learning_rate": 6.01960270012914e-05, | |
| "loss": 0.0045, | |
| "step": 4630 | |
| }, | |
| { | |
| "epoch": 24.680851063829788, | |
| "grad_norm": 0.1594783067703247, | |
| "learning_rate": 6.003409892639599e-05, | |
| "loss": 0.0048, | |
| "step": 4640 | |
| }, | |
| { | |
| "epoch": 24.73404255319149, | |
| "grad_norm": 0.1882818192243576, | |
| "learning_rate": 5.9872061120202336e-05, | |
| "loss": 0.0053, | |
| "step": 4650 | |
| }, | |
| { | |
| "epoch": 24.78723404255319, | |
| "grad_norm": 0.12019731849431992, | |
| "learning_rate": 5.9709915354729914e-05, | |
| "loss": 0.0045, | |
| "step": 4660 | |
| }, | |
| { | |
| "epoch": 24.840425531914892, | |
| "grad_norm": 0.13788677752017975, | |
| "learning_rate": 5.9547663403178824e-05, | |
| "loss": 0.0058, | |
| "step": 4670 | |
| }, | |
| { | |
| "epoch": 24.893617021276597, | |
| "grad_norm": 0.1534523367881775, | |
| "learning_rate": 5.9385307039910445e-05, | |
| "loss": 0.0065, | |
| "step": 4680 | |
| }, | |
| { | |
| "epoch": 24.9468085106383, | |
| "grad_norm": 0.1684124618768692, | |
| "learning_rate": 5.922284804042792e-05, | |
| "loss": 0.0054, | |
| "step": 4690 | |
| }, | |
| { | |
| "epoch": 25.0, | |
| "grad_norm": 0.12294324487447739, | |
| "learning_rate": 5.906028818135687e-05, | |
| "loss": 0.0046, | |
| "step": 4700 | |
| }, | |
| { | |
| "epoch": 25.0531914893617, | |
| "grad_norm": 0.11209085583686829, | |
| "learning_rate": 5.889762924042585e-05, | |
| "loss": 0.0043, | |
| "step": 4710 | |
| }, | |
| { | |
| "epoch": 25.106382978723403, | |
| "grad_norm": 0.09571782499551773, | |
| "learning_rate": 5.873487299644699e-05, | |
| "loss": 0.0048, | |
| "step": 4720 | |
| }, | |
| { | |
| "epoch": 25.159574468085108, | |
| "grad_norm": 0.0861656591296196, | |
| "learning_rate": 5.857202122929649e-05, | |
| "loss": 0.0056, | |
| "step": 4730 | |
| }, | |
| { | |
| "epoch": 25.21276595744681, | |
| "grad_norm": 0.12018528580665588, | |
| "learning_rate": 5.840907571989518e-05, | |
| "loss": 0.0045, | |
| "step": 4740 | |
| }, | |
| { | |
| "epoch": 25.26595744680851, | |
| "grad_norm": 0.16271594166755676, | |
| "learning_rate": 5.824603825018904e-05, | |
| "loss": 0.0047, | |
| "step": 4750 | |
| }, | |
| { | |
| "epoch": 25.319148936170212, | |
| "grad_norm": 0.10884833335876465, | |
| "learning_rate": 5.808291060312975e-05, | |
| "loss": 0.0057, | |
| "step": 4760 | |
| }, | |
| { | |
| "epoch": 25.372340425531913, | |
| "grad_norm": 0.13237303495407104, | |
| "learning_rate": 5.7919694562655083e-05, | |
| "loss": 0.0038, | |
| "step": 4770 | |
| }, | |
| { | |
| "epoch": 25.425531914893618, | |
| "grad_norm": 0.14684104919433594, | |
| "learning_rate": 5.775639191366954e-05, | |
| "loss": 0.0059, | |
| "step": 4780 | |
| }, | |
| { | |
| "epoch": 25.47872340425532, | |
| "grad_norm": 0.13502024114131927, | |
| "learning_rate": 5.75930044420247e-05, | |
| "loss": 0.0066, | |
| "step": 4790 | |
| }, | |
| { | |
| "epoch": 25.53191489361702, | |
| "grad_norm": 0.128277987241745, | |
| "learning_rate": 5.74295339344998e-05, | |
| "loss": 0.0048, | |
| "step": 4800 | |
| }, | |
| { | |
| "epoch": 25.585106382978722, | |
| "grad_norm": 0.09760946780443192, | |
| "learning_rate": 5.726598217878211e-05, | |
| "loss": 0.0049, | |
| "step": 4810 | |
| }, | |
| { | |
| "epoch": 25.638297872340427, | |
| "grad_norm": 0.1846017986536026, | |
| "learning_rate": 5.71023509634474e-05, | |
| "loss": 0.0059, | |
| "step": 4820 | |
| }, | |
| { | |
| "epoch": 25.69148936170213, | |
| "grad_norm": 0.11778675019741058, | |
| "learning_rate": 5.693864207794049e-05, | |
| "loss": 0.0043, | |
| "step": 4830 | |
| }, | |
| { | |
| "epoch": 25.74468085106383, | |
| "grad_norm": 0.1281134933233261, | |
| "learning_rate": 5.677485731255545e-05, | |
| "loss": 0.0043, | |
| "step": 4840 | |
| }, | |
| { | |
| "epoch": 25.79787234042553, | |
| "grad_norm": 0.15498732030391693, | |
| "learning_rate": 5.6610998458416296e-05, | |
| "loss": 0.0066, | |
| "step": 4850 | |
| }, | |
| { | |
| "epoch": 25.851063829787233, | |
| "grad_norm": 0.1598815768957138, | |
| "learning_rate": 5.644706730745716e-05, | |
| "loss": 0.0055, | |
| "step": 4860 | |
| }, | |
| { | |
| "epoch": 25.904255319148938, | |
| "grad_norm": 0.12758943438529968, | |
| "learning_rate": 5.628306565240287e-05, | |
| "loss": 0.005, | |
| "step": 4870 | |
| }, | |
| { | |
| "epoch": 25.95744680851064, | |
| "grad_norm": 0.1843452900648117, | |
| "learning_rate": 5.611899528674923e-05, | |
| "loss": 0.0052, | |
| "step": 4880 | |
| }, | |
| { | |
| "epoch": 26.01063829787234, | |
| "grad_norm": 0.13359080255031586, | |
| "learning_rate": 5.595485800474349e-05, | |
| "loss": 0.0053, | |
| "step": 4890 | |
| }, | |
| { | |
| "epoch": 26.06382978723404, | |
| "grad_norm": 0.15048083662986755, | |
| "learning_rate": 5.579065560136467e-05, | |
| "loss": 0.0053, | |
| "step": 4900 | |
| }, | |
| { | |
| "epoch": 26.117021276595743, | |
| "grad_norm": 0.1319904625415802, | |
| "learning_rate": 5.562638987230392e-05, | |
| "loss": 0.0045, | |
| "step": 4910 | |
| }, | |
| { | |
| "epoch": 26.170212765957448, | |
| "grad_norm": 0.1363140046596527, | |
| "learning_rate": 5.546206261394498e-05, | |
| "loss": 0.0045, | |
| "step": 4920 | |
| }, | |
| { | |
| "epoch": 26.22340425531915, | |
| "grad_norm": 0.10928544402122498, | |
| "learning_rate": 5.529767562334437e-05, | |
| "loss": 0.0042, | |
| "step": 4930 | |
| }, | |
| { | |
| "epoch": 26.27659574468085, | |
| "grad_norm": 0.11054402589797974, | |
| "learning_rate": 5.5133230698211926e-05, | |
| "loss": 0.0044, | |
| "step": 4940 | |
| }, | |
| { | |
| "epoch": 26.329787234042552, | |
| "grad_norm": 0.12386688590049744, | |
| "learning_rate": 5.496872963689096e-05, | |
| "loss": 0.005, | |
| "step": 4950 | |
| }, | |
| { | |
| "epoch": 26.382978723404257, | |
| "grad_norm": 0.09309570491313934, | |
| "learning_rate": 5.4804174238338756e-05, | |
| "loss": 0.0045, | |
| "step": 4960 | |
| }, | |
| { | |
| "epoch": 26.43617021276596, | |
| "grad_norm": 0.08379004895687103, | |
| "learning_rate": 5.463956630210678e-05, | |
| "loss": 0.0039, | |
| "step": 4970 | |
| }, | |
| { | |
| "epoch": 26.48936170212766, | |
| "grad_norm": 0.10490185767412186, | |
| "learning_rate": 5.4474907628321046e-05, | |
| "loss": 0.0044, | |
| "step": 4980 | |
| }, | |
| { | |
| "epoch": 26.54255319148936, | |
| "grad_norm": 0.10295175760984421, | |
| "learning_rate": 5.431020001766244e-05, | |
| "loss": 0.0048, | |
| "step": 4990 | |
| }, | |
| { | |
| "epoch": 26.595744680851062, | |
| "grad_norm": 0.1423989087343216, | |
| "learning_rate": 5.4145445271346986e-05, | |
| "loss": 0.0045, | |
| "step": 5000 | |
| }, | |
| { | |
| "epoch": 26.648936170212767, | |
| "grad_norm": 0.148647278547287, | |
| "learning_rate": 5.398064519110622e-05, | |
| "loss": 0.0078, | |
| "step": 5010 | |
| }, | |
| { | |
| "epoch": 26.70212765957447, | |
| "grad_norm": 0.13346539437770844, | |
| "learning_rate": 5.3815801579167394e-05, | |
| "loss": 0.0049, | |
| "step": 5020 | |
| }, | |
| { | |
| "epoch": 26.75531914893617, | |
| "grad_norm": 0.10231109708547592, | |
| "learning_rate": 5.365091623823382e-05, | |
| "loss": 0.0036, | |
| "step": 5030 | |
| }, | |
| { | |
| "epoch": 26.80851063829787, | |
| "grad_norm": 0.08533221483230591, | |
| "learning_rate": 5.348599097146521e-05, | |
| "loss": 0.0049, | |
| "step": 5040 | |
| }, | |
| { | |
| "epoch": 26.861702127659573, | |
| "grad_norm": 0.11859337240457535, | |
| "learning_rate": 5.3321027582457836e-05, | |
| "loss": 0.0043, | |
| "step": 5050 | |
| }, | |
| { | |
| "epoch": 26.914893617021278, | |
| "grad_norm": 0.1568300873041153, | |
| "learning_rate": 5.315602787522491e-05, | |
| "loss": 0.0049, | |
| "step": 5060 | |
| }, | |
| { | |
| "epoch": 26.96808510638298, | |
| "grad_norm": 0.19247455894947052, | |
| "learning_rate": 5.299099365417678e-05, | |
| "loss": 0.0048, | |
| "step": 5070 | |
| }, | |
| { | |
| "epoch": 27.02127659574468, | |
| "grad_norm": 0.16885174810886383, | |
| "learning_rate": 5.2825926724101236e-05, | |
| "loss": 0.0053, | |
| "step": 5080 | |
| }, | |
| { | |
| "epoch": 27.074468085106382, | |
| "grad_norm": 0.16551966965198517, | |
| "learning_rate": 5.26608288901438e-05, | |
| "loss": 0.0095, | |
| "step": 5090 | |
| }, | |
| { | |
| "epoch": 27.127659574468087, | |
| "grad_norm": 0.16125935316085815, | |
| "learning_rate": 5.24957019577879e-05, | |
| "loss": 0.0061, | |
| "step": 5100 | |
| }, | |
| { | |
| "epoch": 27.180851063829788, | |
| "grad_norm": 0.09818519651889801, | |
| "learning_rate": 5.2330547732835266e-05, | |
| "loss": 0.0082, | |
| "step": 5110 | |
| }, | |
| { | |
| "epoch": 27.23404255319149, | |
| "grad_norm": 0.10520809143781662, | |
| "learning_rate": 5.2165368021385996e-05, | |
| "loss": 0.0056, | |
| "step": 5120 | |
| }, | |
| { | |
| "epoch": 27.28723404255319, | |
| "grad_norm": 0.20799444615840912, | |
| "learning_rate": 5.200016462981897e-05, | |
| "loss": 0.0057, | |
| "step": 5130 | |
| }, | |
| { | |
| "epoch": 27.340425531914892, | |
| "grad_norm": 0.13227200508117676, | |
| "learning_rate": 5.1834939364772015e-05, | |
| "loss": 0.0049, | |
| "step": 5140 | |
| }, | |
| { | |
| "epoch": 27.393617021276597, | |
| "grad_norm": 0.13228751718997955, | |
| "learning_rate": 5.166969403312214e-05, | |
| "loss": 0.004, | |
| "step": 5150 | |
| }, | |
| { | |
| "epoch": 27.4468085106383, | |
| "grad_norm": 0.1374226212501526, | |
| "learning_rate": 5.1504430441965844e-05, | |
| "loss": 0.0052, | |
| "step": 5160 | |
| }, | |
| { | |
| "epoch": 27.5, | |
| "grad_norm": 0.1806238740682602, | |
| "learning_rate": 5.133915039859923e-05, | |
| "loss": 0.0041, | |
| "step": 5170 | |
| }, | |
| { | |
| "epoch": 27.5531914893617, | |
| "grad_norm": 0.1809604912996292, | |
| "learning_rate": 5.1173855710498444e-05, | |
| "loss": 0.0038, | |
| "step": 5180 | |
| }, | |
| { | |
| "epoch": 27.606382978723403, | |
| "grad_norm": 0.1646224707365036, | |
| "learning_rate": 5.100854818529967e-05, | |
| "loss": 0.0045, | |
| "step": 5190 | |
| }, | |
| { | |
| "epoch": 27.659574468085108, | |
| "grad_norm": 0.09681407362222672, | |
| "learning_rate": 5.084322963077951e-05, | |
| "loss": 0.0045, | |
| "step": 5200 | |
| }, | |
| { | |
| "epoch": 27.71276595744681, | |
| "grad_norm": 0.1483067274093628, | |
| "learning_rate": 5.067790185483522e-05, | |
| "loss": 0.0045, | |
| "step": 5210 | |
| }, | |
| { | |
| "epoch": 27.76595744680851, | |
| "grad_norm": 0.13828535377979279, | |
| "learning_rate": 5.0512566665464844e-05, | |
| "loss": 0.0046, | |
| "step": 5220 | |
| }, | |
| { | |
| "epoch": 27.819148936170212, | |
| "grad_norm": 0.158182293176651, | |
| "learning_rate": 5.034722587074755e-05, | |
| "loss": 0.0041, | |
| "step": 5230 | |
| }, | |
| { | |
| "epoch": 27.872340425531917, | |
| "grad_norm": 0.13419833779335022, | |
| "learning_rate": 5.018188127882375e-05, | |
| "loss": 0.0052, | |
| "step": 5240 | |
| }, | |
| { | |
| "epoch": 27.925531914893618, | |
| "grad_norm": 0.07399006187915802, | |
| "learning_rate": 5.0016534697875417e-05, | |
| "loss": 0.0041, | |
| "step": 5250 | |
| }, | |
| { | |
| "epoch": 27.97872340425532, | |
| "grad_norm": 0.16203218698501587, | |
| "learning_rate": 4.9851187936106294e-05, | |
| "loss": 0.0055, | |
| "step": 5260 | |
| }, | |
| { | |
| "epoch": 28.03191489361702, | |
| "grad_norm": 0.1287853717803955, | |
| "learning_rate": 4.968584280172206e-05, | |
| "loss": 0.0045, | |
| "step": 5270 | |
| }, | |
| { | |
| "epoch": 28.085106382978722, | |
| "grad_norm": 0.10558535903692245, | |
| "learning_rate": 4.95205011029106e-05, | |
| "loss": 0.0038, | |
| "step": 5280 | |
| }, | |
| { | |
| "epoch": 28.138297872340427, | |
| "grad_norm": 0.1431814730167389, | |
| "learning_rate": 4.935516464782227e-05, | |
| "loss": 0.0049, | |
| "step": 5290 | |
| }, | |
| { | |
| "epoch": 28.19148936170213, | |
| "grad_norm": 0.10517885535955429, | |
| "learning_rate": 4.918983524455003e-05, | |
| "loss": 0.0058, | |
| "step": 5300 | |
| }, | |
| { | |
| "epoch": 28.24468085106383, | |
| "grad_norm": 0.12191879004240036, | |
| "learning_rate": 4.9024514701109766e-05, | |
| "loss": 0.004, | |
| "step": 5310 | |
| }, | |
| { | |
| "epoch": 28.29787234042553, | |
| "grad_norm": 0.11636163294315338, | |
| "learning_rate": 4.885920482542043e-05, | |
| "loss": 0.0059, | |
| "step": 5320 | |
| }, | |
| { | |
| "epoch": 28.351063829787233, | |
| "grad_norm": 0.06456343084573746, | |
| "learning_rate": 4.869390742528438e-05, | |
| "loss": 0.0042, | |
| "step": 5330 | |
| }, | |
| { | |
| "epoch": 28.404255319148938, | |
| "grad_norm": 0.11878684908151627, | |
| "learning_rate": 4.852862430836744e-05, | |
| "loss": 0.0045, | |
| "step": 5340 | |
| }, | |
| { | |
| "epoch": 28.45744680851064, | |
| "grad_norm": 0.11376497894525528, | |
| "learning_rate": 4.836335728217933e-05, | |
| "loss": 0.0039, | |
| "step": 5350 | |
| }, | |
| { | |
| "epoch": 28.51063829787234, | |
| "grad_norm": 0.10782907158136368, | |
| "learning_rate": 4.819810815405379e-05, | |
| "loss": 0.0034, | |
| "step": 5360 | |
| }, | |
| { | |
| "epoch": 28.56382978723404, | |
| "grad_norm": 0.10200174152851105, | |
| "learning_rate": 4.803287873112877e-05, | |
| "loss": 0.0049, | |
| "step": 5370 | |
| }, | |
| { | |
| "epoch": 28.617021276595743, | |
| "grad_norm": 0.1601831167936325, | |
| "learning_rate": 4.786767082032681e-05, | |
| "loss": 0.0035, | |
| "step": 5380 | |
| }, | |
| { | |
| "epoch": 28.670212765957448, | |
| "grad_norm": 0.09151140600442886, | |
| "learning_rate": 4.77024862283351e-05, | |
| "loss": 0.0037, | |
| "step": 5390 | |
| }, | |
| { | |
| "epoch": 28.72340425531915, | |
| "grad_norm": 0.11615727841854095, | |
| "learning_rate": 4.753732676158593e-05, | |
| "loss": 0.0038, | |
| "step": 5400 | |
| }, | |
| { | |
| "epoch": 28.77659574468085, | |
| "grad_norm": 0.09714711457490921, | |
| "learning_rate": 4.737219422623672e-05, | |
| "loss": 0.0038, | |
| "step": 5410 | |
| }, | |
| { | |
| "epoch": 28.829787234042552, | |
| "grad_norm": 0.14821487665176392, | |
| "learning_rate": 4.720709042815044e-05, | |
| "loss": 0.0036, | |
| "step": 5420 | |
| }, | |
| { | |
| "epoch": 28.882978723404257, | |
| "grad_norm": 0.07862147688865662, | |
| "learning_rate": 4.704201717287578e-05, | |
| "loss": 0.0064, | |
| "step": 5430 | |
| }, | |
| { | |
| "epoch": 28.93617021276596, | |
| "grad_norm": 0.14432406425476074, | |
| "learning_rate": 4.6876976265627404e-05, | |
| "loss": 0.0048, | |
| "step": 5440 | |
| }, | |
| { | |
| "epoch": 28.98936170212766, | |
| "grad_norm": 0.15641769766807556, | |
| "learning_rate": 4.671196951126626e-05, | |
| "loss": 0.0042, | |
| "step": 5450 | |
| }, | |
| { | |
| "epoch": 29.04255319148936, | |
| "grad_norm": 0.16375018656253815, | |
| "learning_rate": 4.654699871427971e-05, | |
| "loss": 0.0034, | |
| "step": 5460 | |
| }, | |
| { | |
| "epoch": 29.095744680851062, | |
| "grad_norm": 0.10680592805147171, | |
| "learning_rate": 4.6382065678762034e-05, | |
| "loss": 0.004, | |
| "step": 5470 | |
| }, | |
| { | |
| "epoch": 29.148936170212767, | |
| "grad_norm": 0.13168980181217194, | |
| "learning_rate": 4.6217172208394424e-05, | |
| "loss": 0.0036, | |
| "step": 5480 | |
| }, | |
| { | |
| "epoch": 29.20212765957447, | |
| "grad_norm": 0.13390226662158966, | |
| "learning_rate": 4.605232010642549e-05, | |
| "loss": 0.0043, | |
| "step": 5490 | |
| }, | |
| { | |
| "epoch": 29.25531914893617, | |
| "grad_norm": 0.08686613291501999, | |
| "learning_rate": 4.588751117565142e-05, | |
| "loss": 0.0037, | |
| "step": 5500 | |
| }, | |
| { | |
| "epoch": 29.30851063829787, | |
| "grad_norm": 0.13847863674163818, | |
| "learning_rate": 4.5722747218396214e-05, | |
| "loss": 0.0048, | |
| "step": 5510 | |
| }, | |
| { | |
| "epoch": 29.361702127659573, | |
| "grad_norm": 0.08126416802406311, | |
| "learning_rate": 4.5558030036492194e-05, | |
| "loss": 0.0048, | |
| "step": 5520 | |
| }, | |
| { | |
| "epoch": 29.414893617021278, | |
| "grad_norm": 0.09339109808206558, | |
| "learning_rate": 4.539336143125999e-05, | |
| "loss": 0.0038, | |
| "step": 5530 | |
| }, | |
| { | |
| "epoch": 29.46808510638298, | |
| "grad_norm": 0.1315300613641739, | |
| "learning_rate": 4.522874320348916e-05, | |
| "loss": 0.0051, | |
| "step": 5540 | |
| }, | |
| { | |
| "epoch": 29.52127659574468, | |
| "grad_norm": 0.10935825109481812, | |
| "learning_rate": 4.506417715341821e-05, | |
| "loss": 0.0042, | |
| "step": 5550 | |
| }, | |
| { | |
| "epoch": 29.574468085106382, | |
| "grad_norm": 0.1070227101445198, | |
| "learning_rate": 4.489966508071511e-05, | |
| "loss": 0.0046, | |
| "step": 5560 | |
| }, | |
| { | |
| "epoch": 29.627659574468083, | |
| "grad_norm": 0.11543095111846924, | |
| "learning_rate": 4.4735208784457575e-05, | |
| "loss": 0.0043, | |
| "step": 5570 | |
| }, | |
| { | |
| "epoch": 29.680851063829788, | |
| "grad_norm": 0.09868212044239044, | |
| "learning_rate": 4.457081006311325e-05, | |
| "loss": 0.0034, | |
| "step": 5580 | |
| }, | |
| { | |
| "epoch": 29.73404255319149, | |
| "grad_norm": 0.10761543363332748, | |
| "learning_rate": 4.440647071452027e-05, | |
| "loss": 0.004, | |
| "step": 5590 | |
| }, | |
| { | |
| "epoch": 29.78723404255319, | |
| "grad_norm": 0.07485445588827133, | |
| "learning_rate": 4.424219253586737e-05, | |
| "loss": 0.0047, | |
| "step": 5600 | |
| }, | |
| { | |
| "epoch": 29.840425531914892, | |
| "grad_norm": 0.1419522911310196, | |
| "learning_rate": 4.407797732367443e-05, | |
| "loss": 0.0041, | |
| "step": 5610 | |
| }, | |
| { | |
| "epoch": 29.893617021276597, | |
| "grad_norm": 0.12195772677659988, | |
| "learning_rate": 4.391382687377268e-05, | |
| "loss": 0.0047, | |
| "step": 5620 | |
| }, | |
| { | |
| "epoch": 29.9468085106383, | |
| "grad_norm": 0.12314418703317642, | |
| "learning_rate": 4.374974298128512e-05, | |
| "loss": 0.0048, | |
| "step": 5630 | |
| }, | |
| { | |
| "epoch": 30.0, | |
| "grad_norm": 0.09774700552225113, | |
| "learning_rate": 4.358572744060699e-05, | |
| "loss": 0.0048, | |
| "step": 5640 | |
| }, | |
| { | |
| "epoch": 30.0531914893617, | |
| "grad_norm": 0.11019723862409592, | |
| "learning_rate": 4.342178204538588e-05, | |
| "loss": 0.0044, | |
| "step": 5650 | |
| }, | |
| { | |
| "epoch": 30.106382978723403, | |
| "grad_norm": 0.09534955024719238, | |
| "learning_rate": 4.325790858850241e-05, | |
| "loss": 0.0031, | |
| "step": 5660 | |
| }, | |
| { | |
| "epoch": 30.159574468085108, | |
| "grad_norm": 0.16368848085403442, | |
| "learning_rate": 4.309410886205043e-05, | |
| "loss": 0.005, | |
| "step": 5670 | |
| }, | |
| { | |
| "epoch": 30.21276595744681, | |
| "grad_norm": 0.08627234399318695, | |
| "learning_rate": 4.293038465731752e-05, | |
| "loss": 0.0035, | |
| "step": 5680 | |
| }, | |
| { | |
| "epoch": 30.26595744680851, | |
| "grad_norm": 0.09576146304607391, | |
| "learning_rate": 4.276673776476533e-05, | |
| "loss": 0.004, | |
| "step": 5690 | |
| }, | |
| { | |
| "epoch": 30.319148936170212, | |
| "grad_norm": 0.11205677688121796, | |
| "learning_rate": 4.260316997401007e-05, | |
| "loss": 0.0045, | |
| "step": 5700 | |
| }, | |
| { | |
| "epoch": 30.372340425531913, | |
| "grad_norm": 0.10938065499067307, | |
| "learning_rate": 4.243968307380293e-05, | |
| "loss": 0.0034, | |
| "step": 5710 | |
| }, | |
| { | |
| "epoch": 30.425531914893618, | |
| "grad_norm": 0.10687950998544693, | |
| "learning_rate": 4.22762788520104e-05, | |
| "loss": 0.0033, | |
| "step": 5720 | |
| }, | |
| { | |
| "epoch": 30.47872340425532, | |
| "grad_norm": 0.09232348948717117, | |
| "learning_rate": 4.211295909559491e-05, | |
| "loss": 0.0045, | |
| "step": 5730 | |
| }, | |
| { | |
| "epoch": 30.53191489361702, | |
| "grad_norm": 0.11671149730682373, | |
| "learning_rate": 4.194972559059511e-05, | |
| "loss": 0.0036, | |
| "step": 5740 | |
| }, | |
| { | |
| "epoch": 30.585106382978722, | |
| "grad_norm": 0.16930541396141052, | |
| "learning_rate": 4.178658012210651e-05, | |
| "loss": 0.0057, | |
| "step": 5750 | |
| }, | |
| { | |
| "epoch": 30.638297872340427, | |
| "grad_norm": 0.14280372858047485, | |
| "learning_rate": 4.162352447426177e-05, | |
| "loss": 0.0048, | |
| "step": 5760 | |
| }, | |
| { | |
| "epoch": 30.69148936170213, | |
| "grad_norm": 0.08777961879968643, | |
| "learning_rate": 4.146056043021135e-05, | |
| "loss": 0.0034, | |
| "step": 5770 | |
| }, | |
| { | |
| "epoch": 30.74468085106383, | |
| "grad_norm": 0.10008836537599564, | |
| "learning_rate": 4.1297689772103944e-05, | |
| "loss": 0.0036, | |
| "step": 5780 | |
| }, | |
| { | |
| "epoch": 30.79787234042553, | |
| "grad_norm": 0.10526333004236221, | |
| "learning_rate": 4.113491428106694e-05, | |
| "loss": 0.0037, | |
| "step": 5790 | |
| }, | |
| { | |
| "epoch": 30.851063829787233, | |
| "grad_norm": 0.10240600258111954, | |
| "learning_rate": 4.0972235737187055e-05, | |
| "loss": 0.004, | |
| "step": 5800 | |
| }, | |
| { | |
| "epoch": 30.904255319148938, | |
| "grad_norm": 0.11126586049795151, | |
| "learning_rate": 4.080965591949076e-05, | |
| "loss": 0.0051, | |
| "step": 5810 | |
| }, | |
| { | |
| "epoch": 30.95744680851064, | |
| "grad_norm": 0.17938531935214996, | |
| "learning_rate": 4.0647176605924924e-05, | |
| "loss": 0.0035, | |
| "step": 5820 | |
| }, | |
| { | |
| "epoch": 31.01063829787234, | |
| "grad_norm": 0.17085476219654083, | |
| "learning_rate": 4.0484799573337255e-05, | |
| "loss": 0.0051, | |
| "step": 5830 | |
| }, | |
| { | |
| "epoch": 31.06382978723404, | |
| "grad_norm": 0.06396578997373581, | |
| "learning_rate": 4.032252659745699e-05, | |
| "loss": 0.0034, | |
| "step": 5840 | |
| }, | |
| { | |
| "epoch": 31.117021276595743, | |
| "grad_norm": 0.10177069902420044, | |
| "learning_rate": 4.016035945287539e-05, | |
| "loss": 0.0034, | |
| "step": 5850 | |
| }, | |
| { | |
| "epoch": 31.170212765957448, | |
| "grad_norm": 0.14542987942695618, | |
| "learning_rate": 3.999829991302635e-05, | |
| "loss": 0.0046, | |
| "step": 5860 | |
| }, | |
| { | |
| "epoch": 31.22340425531915, | |
| "grad_norm": 0.1978503167629242, | |
| "learning_rate": 3.983634975016707e-05, | |
| "loss": 0.005, | |
| "step": 5870 | |
| }, | |
| { | |
| "epoch": 31.27659574468085, | |
| "grad_norm": 0.1334468573331833, | |
| "learning_rate": 3.967451073535854e-05, | |
| "loss": 0.0038, | |
| "step": 5880 | |
| }, | |
| { | |
| "epoch": 31.329787234042552, | |
| "grad_norm": 0.10558764636516571, | |
| "learning_rate": 3.951278463844633e-05, | |
| "loss": 0.0043, | |
| "step": 5890 | |
| }, | |
| { | |
| "epoch": 31.382978723404257, | |
| "grad_norm": 0.10375098139047623, | |
| "learning_rate": 3.935117322804111e-05, | |
| "loss": 0.0044, | |
| "step": 5900 | |
| }, | |
| { | |
| "epoch": 31.43617021276596, | |
| "grad_norm": 0.12965364754199982, | |
| "learning_rate": 3.918967827149938e-05, | |
| "loss": 0.0048, | |
| "step": 5910 | |
| }, | |
| { | |
| "epoch": 31.48936170212766, | |
| "grad_norm": 0.21120120584964752, | |
| "learning_rate": 3.9028301534904094e-05, | |
| "loss": 0.0048, | |
| "step": 5920 | |
| }, | |
| { | |
| "epoch": 31.54255319148936, | |
| "grad_norm": 0.14576758444309235, | |
| "learning_rate": 3.88670447830454e-05, | |
| "loss": 0.0041, | |
| "step": 5930 | |
| }, | |
| { | |
| "epoch": 31.595744680851062, | |
| "grad_norm": 0.10575301945209503, | |
| "learning_rate": 3.870590977940132e-05, | |
| "loss": 0.0038, | |
| "step": 5940 | |
| }, | |
| { | |
| "epoch": 31.648936170212767, | |
| "grad_norm": 0.11868831515312195, | |
| "learning_rate": 3.8544898286118404e-05, | |
| "loss": 0.0056, | |
| "step": 5950 | |
| }, | |
| { | |
| "epoch": 31.70212765957447, | |
| "grad_norm": 0.12907589972019196, | |
| "learning_rate": 3.838401206399257e-05, | |
| "loss": 0.0042, | |
| "step": 5960 | |
| }, | |
| { | |
| "epoch": 31.75531914893617, | |
| "grad_norm": 0.1045163944363594, | |
| "learning_rate": 3.822325287244975e-05, | |
| "loss": 0.0045, | |
| "step": 5970 | |
| }, | |
| { | |
| "epoch": 31.80851063829787, | |
| "grad_norm": 0.13737359642982483, | |
| "learning_rate": 3.8062622469526725e-05, | |
| "loss": 0.0042, | |
| "step": 5980 | |
| }, | |
| { | |
| "epoch": 31.861702127659573, | |
| "grad_norm": 0.15456537902355194, | |
| "learning_rate": 3.790212261185183e-05, | |
| "loss": 0.005, | |
| "step": 5990 | |
| }, | |
| { | |
| "epoch": 31.914893617021278, | |
| "grad_norm": 0.10629570484161377, | |
| "learning_rate": 3.7741755054625794e-05, | |
| "loss": 0.0054, | |
| "step": 6000 | |
| }, | |
| { | |
| "epoch": 31.96808510638298, | |
| "grad_norm": 0.14226874709129333, | |
| "learning_rate": 3.758152155160255e-05, | |
| "loss": 0.0049, | |
| "step": 6010 | |
| }, | |
| { | |
| "epoch": 32.02127659574468, | |
| "grad_norm": 0.08999091386795044, | |
| "learning_rate": 3.742142385506999e-05, | |
| "loss": 0.0032, | |
| "step": 6020 | |
| }, | |
| { | |
| "epoch": 32.07446808510638, | |
| "grad_norm": 0.12861484289169312, | |
| "learning_rate": 3.72614637158309e-05, | |
| "loss": 0.0042, | |
| "step": 6030 | |
| }, | |
| { | |
| "epoch": 32.12765957446808, | |
| "grad_norm": 0.1234334409236908, | |
| "learning_rate": 3.710164288318371e-05, | |
| "loss": 0.0035, | |
| "step": 6040 | |
| }, | |
| { | |
| "epoch": 32.180851063829785, | |
| "grad_norm": 0.19052104651927948, | |
| "learning_rate": 3.694196310490345e-05, | |
| "loss": 0.0046, | |
| "step": 6050 | |
| }, | |
| { | |
| "epoch": 32.234042553191486, | |
| "grad_norm": 0.13919176161289215, | |
| "learning_rate": 3.678242612722259e-05, | |
| "loss": 0.0043, | |
| "step": 6060 | |
| }, | |
| { | |
| "epoch": 32.287234042553195, | |
| "grad_norm": 0.19726930558681488, | |
| "learning_rate": 3.6623033694811953e-05, | |
| "loss": 0.0036, | |
| "step": 6070 | |
| }, | |
| { | |
| "epoch": 32.340425531914896, | |
| "grad_norm": 0.1662585437297821, | |
| "learning_rate": 3.6463787550761665e-05, | |
| "loss": 0.006, | |
| "step": 6080 | |
| }, | |
| { | |
| "epoch": 32.3936170212766, | |
| "grad_norm": 0.11743825674057007, | |
| "learning_rate": 3.630468943656202e-05, | |
| "loss": 0.0049, | |
| "step": 6090 | |
| }, | |
| { | |
| "epoch": 32.4468085106383, | |
| "grad_norm": 0.16987460851669312, | |
| "learning_rate": 3.6145741092084523e-05, | |
| "loss": 0.004, | |
| "step": 6100 | |
| }, | |
| { | |
| "epoch": 32.5, | |
| "grad_norm": 0.16572213172912598, | |
| "learning_rate": 3.598694425556278e-05, | |
| "loss": 0.006, | |
| "step": 6110 | |
| }, | |
| { | |
| "epoch": 32.5531914893617, | |
| "grad_norm": 0.1315222978591919, | |
| "learning_rate": 3.58283006635736e-05, | |
| "loss": 0.0044, | |
| "step": 6120 | |
| }, | |
| { | |
| "epoch": 32.6063829787234, | |
| "grad_norm": 0.15399786829948425, | |
| "learning_rate": 3.566981205101781e-05, | |
| "loss": 0.0047, | |
| "step": 6130 | |
| }, | |
| { | |
| "epoch": 32.659574468085104, | |
| "grad_norm": 0.20791779458522797, | |
| "learning_rate": 3.5511480151101556e-05, | |
| "loss": 0.0042, | |
| "step": 6140 | |
| }, | |
| { | |
| "epoch": 32.712765957446805, | |
| "grad_norm": 0.12782886624336243, | |
| "learning_rate": 3.5353306695317104e-05, | |
| "loss": 0.0045, | |
| "step": 6150 | |
| }, | |
| { | |
| "epoch": 32.765957446808514, | |
| "grad_norm": 0.15812277793884277, | |
| "learning_rate": 3.519529341342402e-05, | |
| "loss": 0.0057, | |
| "step": 6160 | |
| }, | |
| { | |
| "epoch": 32.819148936170215, | |
| "grad_norm": 0.131177619099617, | |
| "learning_rate": 3.503744203343026e-05, | |
| "loss": 0.0037, | |
| "step": 6170 | |
| }, | |
| { | |
| "epoch": 32.87234042553192, | |
| "grad_norm": 0.12614566087722778, | |
| "learning_rate": 3.487975428157318e-05, | |
| "loss": 0.0045, | |
| "step": 6180 | |
| }, | |
| { | |
| "epoch": 32.92553191489362, | |
| "grad_norm": 0.10733070969581604, | |
| "learning_rate": 3.472223188230083e-05, | |
| "loss": 0.0036, | |
| "step": 6190 | |
| }, | |
| { | |
| "epoch": 32.97872340425532, | |
| "grad_norm": 0.12159145623445511, | |
| "learning_rate": 3.4564876558252866e-05, | |
| "loss": 0.0039, | |
| "step": 6200 | |
| }, | |
| { | |
| "epoch": 33.03191489361702, | |
| "grad_norm": 0.09802938997745514, | |
| "learning_rate": 3.440769003024195e-05, | |
| "loss": 0.0033, | |
| "step": 6210 | |
| }, | |
| { | |
| "epoch": 33.08510638297872, | |
| "grad_norm": 0.16482697427272797, | |
| "learning_rate": 3.425067401723477e-05, | |
| "loss": 0.0043, | |
| "step": 6220 | |
| }, | |
| { | |
| "epoch": 33.138297872340424, | |
| "grad_norm": 0.1470826417207718, | |
| "learning_rate": 3.409383023633325e-05, | |
| "loss": 0.0045, | |
| "step": 6230 | |
| }, | |
| { | |
| "epoch": 33.191489361702125, | |
| "grad_norm": 0.10905544459819794, | |
| "learning_rate": 3.3937160402755894e-05, | |
| "loss": 0.0033, | |
| "step": 6240 | |
| }, | |
| { | |
| "epoch": 33.244680851063826, | |
| "grad_norm": 0.10362398624420166, | |
| "learning_rate": 3.378066622981885e-05, | |
| "loss": 0.0035, | |
| "step": 6250 | |
| }, | |
| { | |
| "epoch": 33.297872340425535, | |
| "grad_norm": 0.08409572392702103, | |
| "learning_rate": 3.362434942891738e-05, | |
| "loss": 0.0034, | |
| "step": 6260 | |
| }, | |
| { | |
| "epoch": 33.351063829787236, | |
| "grad_norm": 0.05727813020348549, | |
| "learning_rate": 3.346821170950693e-05, | |
| "loss": 0.0038, | |
| "step": 6270 | |
| }, | |
| { | |
| "epoch": 33.40425531914894, | |
| "grad_norm": 0.07205785065889359, | |
| "learning_rate": 3.3312254779084585e-05, | |
| "loss": 0.0036, | |
| "step": 6280 | |
| }, | |
| { | |
| "epoch": 33.45744680851064, | |
| "grad_norm": 0.13627049326896667, | |
| "learning_rate": 3.315648034317039e-05, | |
| "loss": 0.0047, | |
| "step": 6290 | |
| }, | |
| { | |
| "epoch": 33.51063829787234, | |
| "grad_norm": 0.09786457568407059, | |
| "learning_rate": 3.3000890105288564e-05, | |
| "loss": 0.0043, | |
| "step": 6300 | |
| }, | |
| { | |
| "epoch": 33.56382978723404, | |
| "grad_norm": 0.10560285300016403, | |
| "learning_rate": 3.284548576694908e-05, | |
| "loss": 0.0054, | |
| "step": 6310 | |
| }, | |
| { | |
| "epoch": 33.61702127659574, | |
| "grad_norm": 0.13821476697921753, | |
| "learning_rate": 3.2690269027628815e-05, | |
| "loss": 0.0045, | |
| "step": 6320 | |
| }, | |
| { | |
| "epoch": 33.670212765957444, | |
| "grad_norm": 0.16312015056610107, | |
| "learning_rate": 3.253524158475324e-05, | |
| "loss": 0.0048, | |
| "step": 6330 | |
| }, | |
| { | |
| "epoch": 33.723404255319146, | |
| "grad_norm": 0.15426048636436462, | |
| "learning_rate": 3.238040513367757e-05, | |
| "loss": 0.0045, | |
| "step": 6340 | |
| }, | |
| { | |
| "epoch": 33.776595744680854, | |
| "grad_norm": 0.14695295691490173, | |
| "learning_rate": 3.222576136766843e-05, | |
| "loss": 0.0054, | |
| "step": 6350 | |
| }, | |
| { | |
| "epoch": 33.829787234042556, | |
| "grad_norm": 0.10114528983831406, | |
| "learning_rate": 3.2071311977885324e-05, | |
| "loss": 0.0039, | |
| "step": 6360 | |
| }, | |
| { | |
| "epoch": 33.88297872340426, | |
| "grad_norm": 0.09737411141395569, | |
| "learning_rate": 3.191705865336197e-05, | |
| "loss": 0.0043, | |
| "step": 6370 | |
| }, | |
| { | |
| "epoch": 33.93617021276596, | |
| "grad_norm": 0.10860741138458252, | |
| "learning_rate": 3.1763003080988075e-05, | |
| "loss": 0.0034, | |
| "step": 6380 | |
| }, | |
| { | |
| "epoch": 33.98936170212766, | |
| "grad_norm": 0.12892873585224152, | |
| "learning_rate": 3.160914694549063e-05, | |
| "loss": 0.0044, | |
| "step": 6390 | |
| }, | |
| { | |
| "epoch": 34.04255319148936, | |
| "grad_norm": 0.11092966049909592, | |
| "learning_rate": 3.145549192941573e-05, | |
| "loss": 0.0045, | |
| "step": 6400 | |
| }, | |
| { | |
| "epoch": 34.09574468085106, | |
| "grad_norm": 0.141037255525589, | |
| "learning_rate": 3.130203971310999e-05, | |
| "loss": 0.0042, | |
| "step": 6410 | |
| }, | |
| { | |
| "epoch": 34.148936170212764, | |
| "grad_norm": 0.09191036224365234, | |
| "learning_rate": 3.114879197470225e-05, | |
| "loss": 0.004, | |
| "step": 6420 | |
| }, | |
| { | |
| "epoch": 34.202127659574465, | |
| "grad_norm": 0.11062730848789215, | |
| "learning_rate": 3.0995750390085285e-05, | |
| "loss": 0.0049, | |
| "step": 6430 | |
| }, | |
| { | |
| "epoch": 34.255319148936174, | |
| "grad_norm": 0.07029895484447479, | |
| "learning_rate": 3.084291663289728e-05, | |
| "loss": 0.0035, | |
| "step": 6440 | |
| }, | |
| { | |
| "epoch": 34.308510638297875, | |
| "grad_norm": 0.1491190791130066, | |
| "learning_rate": 3.069029237450375e-05, | |
| "loss": 0.0046, | |
| "step": 6450 | |
| }, | |
| { | |
| "epoch": 34.361702127659576, | |
| "grad_norm": 0.08219806104898453, | |
| "learning_rate": 3.053787928397911e-05, | |
| "loss": 0.0029, | |
| "step": 6460 | |
| }, | |
| { | |
| "epoch": 34.41489361702128, | |
| "grad_norm": 0.06165226176381111, | |
| "learning_rate": 3.0385679028088526e-05, | |
| "loss": 0.004, | |
| "step": 6470 | |
| }, | |
| { | |
| "epoch": 34.46808510638298, | |
| "grad_norm": 0.0756673663854599, | |
| "learning_rate": 3.023369327126959e-05, | |
| "loss": 0.003, | |
| "step": 6480 | |
| }, | |
| { | |
| "epoch": 34.52127659574468, | |
| "grad_norm": 0.10556336492300034, | |
| "learning_rate": 3.0081923675614198e-05, | |
| "loss": 0.003, | |
| "step": 6490 | |
| }, | |
| { | |
| "epoch": 34.57446808510638, | |
| "grad_norm": 0.0876508355140686, | |
| "learning_rate": 2.993037190085034e-05, | |
| "loss": 0.003, | |
| "step": 6500 | |
| }, | |
| { | |
| "epoch": 34.62765957446808, | |
| "grad_norm": 0.14084947109222412, | |
| "learning_rate": 2.977903960432392e-05, | |
| "loss": 0.0038, | |
| "step": 6510 | |
| }, | |
| { | |
| "epoch": 34.680851063829785, | |
| "grad_norm": 0.12581570446491241, | |
| "learning_rate": 2.9627928440980722e-05, | |
| "loss": 0.0036, | |
| "step": 6520 | |
| }, | |
| { | |
| "epoch": 34.734042553191486, | |
| "grad_norm": 0.0699087530374527, | |
| "learning_rate": 2.9477040063348183e-05, | |
| "loss": 0.0042, | |
| "step": 6530 | |
| }, | |
| { | |
| "epoch": 34.787234042553195, | |
| "grad_norm": 0.10906992107629776, | |
| "learning_rate": 2.9326376121517456e-05, | |
| "loss": 0.0033, | |
| "step": 6540 | |
| }, | |
| { | |
| "epoch": 34.840425531914896, | |
| "grad_norm": 0.09810652583837509, | |
| "learning_rate": 2.9175938263125236e-05, | |
| "loss": 0.0041, | |
| "step": 6550 | |
| }, | |
| { | |
| "epoch": 34.8936170212766, | |
| "grad_norm": 0.15860515832901, | |
| "learning_rate": 2.9025728133335873e-05, | |
| "loss": 0.0031, | |
| "step": 6560 | |
| }, | |
| { | |
| "epoch": 34.9468085106383, | |
| "grad_norm": 0.11491245776414871, | |
| "learning_rate": 2.8875747374823288e-05, | |
| "loss": 0.0038, | |
| "step": 6570 | |
| }, | |
| { | |
| "epoch": 35.0, | |
| "grad_norm": 0.13872069120407104, | |
| "learning_rate": 2.872599762775298e-05, | |
| "loss": 0.0048, | |
| "step": 6580 | |
| }, | |
| { | |
| "epoch": 35.0531914893617, | |
| "grad_norm": 0.1380651593208313, | |
| "learning_rate": 2.857648052976425e-05, | |
| "loss": 0.0039, | |
| "step": 6590 | |
| }, | |
| { | |
| "epoch": 35.1063829787234, | |
| "grad_norm": 0.11134478449821472, | |
| "learning_rate": 2.8427197715952047e-05, | |
| "loss": 0.0032, | |
| "step": 6600 | |
| }, | |
| { | |
| "epoch": 35.159574468085104, | |
| "grad_norm": 0.08269906044006348, | |
| "learning_rate": 2.8278150818849393e-05, | |
| "loss": 0.0042, | |
| "step": 6610 | |
| }, | |
| { | |
| "epoch": 35.212765957446805, | |
| "grad_norm": 0.11090809851884842, | |
| "learning_rate": 2.812934146840922e-05, | |
| "loss": 0.0028, | |
| "step": 6620 | |
| }, | |
| { | |
| "epoch": 35.265957446808514, | |
| "grad_norm": 0.057636458426713943, | |
| "learning_rate": 2.7980771291986764e-05, | |
| "loss": 0.0022, | |
| "step": 6630 | |
| }, | |
| { | |
| "epoch": 35.319148936170215, | |
| "grad_norm": 0.0825556293129921, | |
| "learning_rate": 2.783244191432167e-05, | |
| "loss": 0.0034, | |
| "step": 6640 | |
| }, | |
| { | |
| "epoch": 35.37234042553192, | |
| "grad_norm": 0.07268603891134262, | |
| "learning_rate": 2.768435495752022e-05, | |
| "loss": 0.0029, | |
| "step": 6650 | |
| }, | |
| { | |
| "epoch": 35.42553191489362, | |
| "grad_norm": 0.11303126066923141, | |
| "learning_rate": 2.753651204103771e-05, | |
| "loss": 0.0048, | |
| "step": 6660 | |
| }, | |
| { | |
| "epoch": 35.47872340425532, | |
| "grad_norm": 0.08885964006185532, | |
| "learning_rate": 2.7388914781660523e-05, | |
| "loss": 0.0033, | |
| "step": 6670 | |
| }, | |
| { | |
| "epoch": 35.53191489361702, | |
| "grad_norm": 0.10438922792673111, | |
| "learning_rate": 2.7241564793488693e-05, | |
| "loss": 0.0036, | |
| "step": 6680 | |
| }, | |
| { | |
| "epoch": 35.58510638297872, | |
| "grad_norm": 0.09745248407125473, | |
| "learning_rate": 2.7094463687918037e-05, | |
| "loss": 0.003, | |
| "step": 6690 | |
| }, | |
| { | |
| "epoch": 35.638297872340424, | |
| "grad_norm": 0.07535896450281143, | |
| "learning_rate": 2.694761307362268e-05, | |
| "loss": 0.0037, | |
| "step": 6700 | |
| }, | |
| { | |
| "epoch": 35.691489361702125, | |
| "grad_norm": 0.09335043281316757, | |
| "learning_rate": 2.6801014556537467e-05, | |
| "loss": 0.0032, | |
| "step": 6710 | |
| }, | |
| { | |
| "epoch": 35.744680851063826, | |
| "grad_norm": 0.08827803283929825, | |
| "learning_rate": 2.6654669739840243e-05, | |
| "loss": 0.0036, | |
| "step": 6720 | |
| }, | |
| { | |
| "epoch": 35.797872340425535, | |
| "grad_norm": 0.10325001180171967, | |
| "learning_rate": 2.650858022393451e-05, | |
| "loss": 0.0053, | |
| "step": 6730 | |
| }, | |
| { | |
| "epoch": 35.851063829787236, | |
| "grad_norm": 0.11005754768848419, | |
| "learning_rate": 2.6362747606431747e-05, | |
| "loss": 0.0043, | |
| "step": 6740 | |
| }, | |
| { | |
| "epoch": 35.90425531914894, | |
| "grad_norm": 0.09084655344486237, | |
| "learning_rate": 2.6217173482134172e-05, | |
| "loss": 0.003, | |
| "step": 6750 | |
| }, | |
| { | |
| "epoch": 35.95744680851064, | |
| "grad_norm": 0.07052236795425415, | |
| "learning_rate": 2.6071859443017044e-05, | |
| "loss": 0.003, | |
| "step": 6760 | |
| }, | |
| { | |
| "epoch": 36.01063829787234, | |
| "grad_norm": 0.07960718870162964, | |
| "learning_rate": 2.5926807078211414e-05, | |
| "loss": 0.003, | |
| "step": 6770 | |
| }, | |
| { | |
| "epoch": 36.06382978723404, | |
| "grad_norm": 0.08602727949619293, | |
| "learning_rate": 2.5782017973986728e-05, | |
| "loss": 0.0033, | |
| "step": 6780 | |
| }, | |
| { | |
| "epoch": 36.11702127659574, | |
| "grad_norm": 0.12412593513727188, | |
| "learning_rate": 2.5637493713733374e-05, | |
| "loss": 0.0031, | |
| "step": 6790 | |
| }, | |
| { | |
| "epoch": 36.170212765957444, | |
| "grad_norm": 0.09140954166650772, | |
| "learning_rate": 2.549323587794559e-05, | |
| "loss": 0.0032, | |
| "step": 6800 | |
| }, | |
| { | |
| "epoch": 36.223404255319146, | |
| "grad_norm": 0.057922929525375366, | |
| "learning_rate": 2.5349246044203895e-05, | |
| "loss": 0.0025, | |
| "step": 6810 | |
| }, | |
| { | |
| "epoch": 36.276595744680854, | |
| "grad_norm": 0.0833427757024765, | |
| "learning_rate": 2.520552578715808e-05, | |
| "loss": 0.0033, | |
| "step": 6820 | |
| }, | |
| { | |
| "epoch": 36.329787234042556, | |
| "grad_norm": 0.08583550155162811, | |
| "learning_rate": 2.506207667850981e-05, | |
| "loss": 0.0034, | |
| "step": 6830 | |
| }, | |
| { | |
| "epoch": 36.38297872340426, | |
| "grad_norm": 0.051419563591480255, | |
| "learning_rate": 2.4918900286995555e-05, | |
| "loss": 0.0038, | |
| "step": 6840 | |
| }, | |
| { | |
| "epoch": 36.43617021276596, | |
| "grad_norm": 0.092071533203125, | |
| "learning_rate": 2.4775998178369458e-05, | |
| "loss": 0.0027, | |
| "step": 6850 | |
| }, | |
| { | |
| "epoch": 36.48936170212766, | |
| "grad_norm": 0.08134209364652634, | |
| "learning_rate": 2.4633371915386017e-05, | |
| "loss": 0.0037, | |
| "step": 6860 | |
| }, | |
| { | |
| "epoch": 36.54255319148936, | |
| "grad_norm": 0.1107717752456665, | |
| "learning_rate": 2.4491023057783235e-05, | |
| "loss": 0.003, | |
| "step": 6870 | |
| }, | |
| { | |
| "epoch": 36.59574468085106, | |
| "grad_norm": 0.07810019701719284, | |
| "learning_rate": 2.4348953162265375e-05, | |
| "loss": 0.0029, | |
| "step": 6880 | |
| }, | |
| { | |
| "epoch": 36.648936170212764, | |
| "grad_norm": 0.06717360764741898, | |
| "learning_rate": 2.420716378248607e-05, | |
| "loss": 0.0032, | |
| "step": 6890 | |
| }, | |
| { | |
| "epoch": 36.702127659574465, | |
| "grad_norm": 0.06955134868621826, | |
| "learning_rate": 2.4065656469031266e-05, | |
| "loss": 0.0029, | |
| "step": 6900 | |
| }, | |
| { | |
| "epoch": 36.755319148936174, | |
| "grad_norm": 0.09230419248342514, | |
| "learning_rate": 2.3924432769402268e-05, | |
| "loss": 0.0042, | |
| "step": 6910 | |
| }, | |
| { | |
| "epoch": 36.808510638297875, | |
| "grad_norm": 0.13188576698303223, | |
| "learning_rate": 2.3783494227998844e-05, | |
| "loss": 0.0032, | |
| "step": 6920 | |
| }, | |
| { | |
| "epoch": 36.861702127659576, | |
| "grad_norm": 0.13067126274108887, | |
| "learning_rate": 2.3642842386102264e-05, | |
| "loss": 0.0035, | |
| "step": 6930 | |
| }, | |
| { | |
| "epoch": 36.91489361702128, | |
| "grad_norm": 0.10605791956186295, | |
| "learning_rate": 2.3502478781858567e-05, | |
| "loss": 0.0035, | |
| "step": 6940 | |
| }, | |
| { | |
| "epoch": 36.96808510638298, | |
| "grad_norm": 0.09728527069091797, | |
| "learning_rate": 2.3362404950261628e-05, | |
| "loss": 0.0027, | |
| "step": 6950 | |
| }, | |
| { | |
| "epoch": 37.02127659574468, | |
| "grad_norm": 0.10021881759166718, | |
| "learning_rate": 2.3222622423136458e-05, | |
| "loss": 0.0026, | |
| "step": 6960 | |
| }, | |
| { | |
| "epoch": 37.07446808510638, | |
| "grad_norm": 0.10751120001077652, | |
| "learning_rate": 2.3083132729122332e-05, | |
| "loss": 0.003, | |
| "step": 6970 | |
| }, | |
| { | |
| "epoch": 37.12765957446808, | |
| "grad_norm": 0.08491197973489761, | |
| "learning_rate": 2.294393739365621e-05, | |
| "loss": 0.0029, | |
| "step": 6980 | |
| }, | |
| { | |
| "epoch": 37.180851063829785, | |
| "grad_norm": 0.10174734890460968, | |
| "learning_rate": 2.2805037938956e-05, | |
| "loss": 0.0026, | |
| "step": 6990 | |
| }, | |
| { | |
| "epoch": 37.234042553191486, | |
| "grad_norm": 0.08755861222743988, | |
| "learning_rate": 2.266643588400386e-05, | |
| "loss": 0.0027, | |
| "step": 7000 | |
| }, | |
| { | |
| "epoch": 37.287234042553195, | |
| "grad_norm": 0.07355935871601105, | |
| "learning_rate": 2.252813274452969e-05, | |
| "loss": 0.0026, | |
| "step": 7010 | |
| }, | |
| { | |
| "epoch": 37.340425531914896, | |
| "grad_norm": 0.06787097454071045, | |
| "learning_rate": 2.2390130032994427e-05, | |
| "loss": 0.0026, | |
| "step": 7020 | |
| }, | |
| { | |
| "epoch": 37.3936170212766, | |
| "grad_norm": 0.10680534690618515, | |
| "learning_rate": 2.2252429258573633e-05, | |
| "loss": 0.0031, | |
| "step": 7030 | |
| }, | |
| { | |
| "epoch": 37.4468085106383, | |
| "grad_norm": 0.12556082010269165, | |
| "learning_rate": 2.2115031927140904e-05, | |
| "loss": 0.0046, | |
| "step": 7040 | |
| }, | |
| { | |
| "epoch": 37.5, | |
| "grad_norm": 0.08649590611457825, | |
| "learning_rate": 2.1977939541251463e-05, | |
| "loss": 0.0026, | |
| "step": 7050 | |
| }, | |
| { | |
| "epoch": 37.5531914893617, | |
| "grad_norm": 0.13685281574726105, | |
| "learning_rate": 2.1841153600125684e-05, | |
| "loss": 0.0038, | |
| "step": 7060 | |
| }, | |
| { | |
| "epoch": 37.6063829787234, | |
| "grad_norm": 0.14401614665985107, | |
| "learning_rate": 2.170467559963267e-05, | |
| "loss": 0.0031, | |
| "step": 7070 | |
| }, | |
| { | |
| "epoch": 37.659574468085104, | |
| "grad_norm": 0.11651716381311417, | |
| "learning_rate": 2.1568507032273982e-05, | |
| "loss": 0.0027, | |
| "step": 7080 | |
| }, | |
| { | |
| "epoch": 37.712765957446805, | |
| "grad_norm": 0.0741334781050682, | |
| "learning_rate": 2.1432649387167264e-05, | |
| "loss": 0.0034, | |
| "step": 7090 | |
| }, | |
| { | |
| "epoch": 37.765957446808514, | |
| "grad_norm": 0.11076577007770538, | |
| "learning_rate": 2.1297104150029973e-05, | |
| "loss": 0.0029, | |
| "step": 7100 | |
| }, | |
| { | |
| "epoch": 37.819148936170215, | |
| "grad_norm": 0.09969626367092133, | |
| "learning_rate": 2.116187280316307e-05, | |
| "loss": 0.0039, | |
| "step": 7110 | |
| }, | |
| { | |
| "epoch": 37.87234042553192, | |
| "grad_norm": 0.08539766073226929, | |
| "learning_rate": 2.1026956825434908e-05, | |
| "loss": 0.0028, | |
| "step": 7120 | |
| }, | |
| { | |
| "epoch": 37.92553191489362, | |
| "grad_norm": 0.10213065147399902, | |
| "learning_rate": 2.0892357692265017e-05, | |
| "loss": 0.0047, | |
| "step": 7130 | |
| }, | |
| { | |
| "epoch": 37.97872340425532, | |
| "grad_norm": 0.08959858119487762, | |
| "learning_rate": 2.0758076875607947e-05, | |
| "loss": 0.0048, | |
| "step": 7140 | |
| }, | |
| { | |
| "epoch": 38.03191489361702, | |
| "grad_norm": 0.12130507081747055, | |
| "learning_rate": 2.0624115843937207e-05, | |
| "loss": 0.0038, | |
| "step": 7150 | |
| }, | |
| { | |
| "epoch": 38.08510638297872, | |
| "grad_norm": 0.10663936287164688, | |
| "learning_rate": 2.0490476062229157e-05, | |
| "loss": 0.0042, | |
| "step": 7160 | |
| }, | |
| { | |
| "epoch": 38.138297872340424, | |
| "grad_norm": 0.11199136823415756, | |
| "learning_rate": 2.035715899194704e-05, | |
| "loss": 0.0048, | |
| "step": 7170 | |
| }, | |
| { | |
| "epoch": 38.191489361702125, | |
| "grad_norm": 0.08271807432174683, | |
| "learning_rate": 2.022416609102499e-05, | |
| "loss": 0.0038, | |
| "step": 7180 | |
| }, | |
| { | |
| "epoch": 38.244680851063826, | |
| "grad_norm": 0.0905468761920929, | |
| "learning_rate": 2.009149881385205e-05, | |
| "loss": 0.0034, | |
| "step": 7190 | |
| }, | |
| { | |
| "epoch": 38.297872340425535, | |
| "grad_norm": 0.08787393569946289, | |
| "learning_rate": 1.995915861125634e-05, | |
| "loss": 0.0038, | |
| "step": 7200 | |
| }, | |
| { | |
| "epoch": 38.351063829787236, | |
| "grad_norm": 0.08496265113353729, | |
| "learning_rate": 1.9827146930489065e-05, | |
| "loss": 0.0027, | |
| "step": 7210 | |
| }, | |
| { | |
| "epoch": 38.40425531914894, | |
| "grad_norm": 0.08889112621545792, | |
| "learning_rate": 1.9695465215208848e-05, | |
| "loss": 0.0024, | |
| "step": 7220 | |
| }, | |
| { | |
| "epoch": 38.45744680851064, | |
| "grad_norm": 0.08953122794628143, | |
| "learning_rate": 1.9564114905465813e-05, | |
| "loss": 0.0034, | |
| "step": 7230 | |
| }, | |
| { | |
| "epoch": 38.51063829787234, | |
| "grad_norm": 0.07801223546266556, | |
| "learning_rate": 1.9433097437685936e-05, | |
| "loss": 0.003, | |
| "step": 7240 | |
| }, | |
| { | |
| "epoch": 38.56382978723404, | |
| "grad_norm": 0.09836780279874802, | |
| "learning_rate": 1.930241424465521e-05, | |
| "loss": 0.0035, | |
| "step": 7250 | |
| }, | |
| { | |
| "epoch": 38.61702127659574, | |
| "grad_norm": 0.08662564307451248, | |
| "learning_rate": 1.9172066755504115e-05, | |
| "loss": 0.0033, | |
| "step": 7260 | |
| }, | |
| { | |
| "epoch": 38.670212765957444, | |
| "grad_norm": 0.10797368735074997, | |
| "learning_rate": 1.9042056395691914e-05, | |
| "loss": 0.0026, | |
| "step": 7270 | |
| }, | |
| { | |
| "epoch": 38.723404255319146, | |
| "grad_norm": 0.1386696994304657, | |
| "learning_rate": 1.8912384586991066e-05, | |
| "loss": 0.0039, | |
| "step": 7280 | |
| }, | |
| { | |
| "epoch": 38.776595744680854, | |
| "grad_norm": 0.07777447253465652, | |
| "learning_rate": 1.8783052747471717e-05, | |
| "loss": 0.0027, | |
| "step": 7290 | |
| }, | |
| { | |
| "epoch": 38.829787234042556, | |
| "grad_norm": 0.07893183827400208, | |
| "learning_rate": 1.865406229148611e-05, | |
| "loss": 0.0029, | |
| "step": 7300 | |
| }, | |
| { | |
| "epoch": 38.88297872340426, | |
| "grad_norm": 0.0996665209531784, | |
| "learning_rate": 1.8525414629653233e-05, | |
| "loss": 0.0032, | |
| "step": 7310 | |
| }, | |
| { | |
| "epoch": 38.93617021276596, | |
| "grad_norm": 0.08222153782844543, | |
| "learning_rate": 1.8397111168843255e-05, | |
| "loss": 0.003, | |
| "step": 7320 | |
| }, | |
| { | |
| "epoch": 38.98936170212766, | |
| "grad_norm": 0.1121872141957283, | |
| "learning_rate": 1.8269153312162323e-05, | |
| "loss": 0.0036, | |
| "step": 7330 | |
| }, | |
| { | |
| "epoch": 39.04255319148936, | |
| "grad_norm": 0.12059096246957779, | |
| "learning_rate": 1.8141542458937054e-05, | |
| "loss": 0.0047, | |
| "step": 7340 | |
| }, | |
| { | |
| "epoch": 39.09574468085106, | |
| "grad_norm": 0.07197275012731552, | |
| "learning_rate": 1.8014280004699268e-05, | |
| "loss": 0.0027, | |
| "step": 7350 | |
| }, | |
| { | |
| "epoch": 39.148936170212764, | |
| "grad_norm": 0.11810290813446045, | |
| "learning_rate": 1.788736734117078e-05, | |
| "loss": 0.0033, | |
| "step": 7360 | |
| }, | |
| { | |
| "epoch": 39.202127659574465, | |
| "grad_norm": 0.08631691336631775, | |
| "learning_rate": 1.7760805856248152e-05, | |
| "loss": 0.0027, | |
| "step": 7370 | |
| }, | |
| { | |
| "epoch": 39.255319148936174, | |
| "grad_norm": 0.07374894618988037, | |
| "learning_rate": 1.7634596933987518e-05, | |
| "loss": 0.0043, | |
| "step": 7380 | |
| }, | |
| { | |
| "epoch": 39.308510638297875, | |
| "grad_norm": 0.08490117639303207, | |
| "learning_rate": 1.7508741954589404e-05, | |
| "loss": 0.0028, | |
| "step": 7390 | |
| }, | |
| { | |
| "epoch": 39.361702127659576, | |
| "grad_norm": 0.08613576740026474, | |
| "learning_rate": 1.7383242294383717e-05, | |
| "loss": 0.003, | |
| "step": 7400 | |
| }, | |
| { | |
| "epoch": 39.41489361702128, | |
| "grad_norm": 0.05537436902523041, | |
| "learning_rate": 1.7258099325814632e-05, | |
| "loss": 0.0025, | |
| "step": 7410 | |
| }, | |
| { | |
| "epoch": 39.46808510638298, | |
| "grad_norm": 0.06878998875617981, | |
| "learning_rate": 1.7133314417425594e-05, | |
| "loss": 0.0033, | |
| "step": 7420 | |
| }, | |
| { | |
| "epoch": 39.52127659574468, | |
| "grad_norm": 0.05284791439771652, | |
| "learning_rate": 1.7008888933844408e-05, | |
| "loss": 0.0028, | |
| "step": 7430 | |
| }, | |
| { | |
| "epoch": 39.57446808510638, | |
| "grad_norm": 0.07566659897565842, | |
| "learning_rate": 1.6884824235768172e-05, | |
| "loss": 0.0028, | |
| "step": 7440 | |
| }, | |
| { | |
| "epoch": 39.62765957446808, | |
| "grad_norm": 0.05655248463153839, | |
| "learning_rate": 1.6761121679948592e-05, | |
| "loss": 0.0036, | |
| "step": 7450 | |
| }, | |
| { | |
| "epoch": 39.680851063829785, | |
| "grad_norm": 0.0711337998509407, | |
| "learning_rate": 1.663778261917695e-05, | |
| "loss": 0.0023, | |
| "step": 7460 | |
| }, | |
| { | |
| "epoch": 39.734042553191486, | |
| "grad_norm": 0.06308384984731674, | |
| "learning_rate": 1.651480840226952e-05, | |
| "loss": 0.003, | |
| "step": 7470 | |
| }, | |
| { | |
| "epoch": 39.787234042553195, | |
| "grad_norm": 0.07132750004529953, | |
| "learning_rate": 1.639220037405258e-05, | |
| "loss": 0.0023, | |
| "step": 7480 | |
| }, | |
| { | |
| "epoch": 39.840425531914896, | |
| "grad_norm": 0.11412476003170013, | |
| "learning_rate": 1.6269959875347906e-05, | |
| "loss": 0.0047, | |
| "step": 7490 | |
| }, | |
| { | |
| "epoch": 39.8936170212766, | |
| "grad_norm": 0.061417169868946075, | |
| "learning_rate": 1.614808824295802e-05, | |
| "loss": 0.0024, | |
| "step": 7500 | |
| }, | |
| { | |
| "epoch": 39.9468085106383, | |
| "grad_norm": 0.09479163587093353, | |
| "learning_rate": 1.602658680965152e-05, | |
| "loss": 0.0026, | |
| "step": 7510 | |
| }, | |
| { | |
| "epoch": 40.0, | |
| "grad_norm": 0.11803433299064636, | |
| "learning_rate": 1.5905456904148686e-05, | |
| "loss": 0.0025, | |
| "step": 7520 | |
| }, | |
| { | |
| "epoch": 40.0531914893617, | |
| "grad_norm": 0.08452431857585907, | |
| "learning_rate": 1.57846998511067e-05, | |
| "loss": 0.0026, | |
| "step": 7530 | |
| }, | |
| { | |
| "epoch": 40.1063829787234, | |
| "grad_norm": 0.07583118230104446, | |
| "learning_rate": 1.566431697110538e-05, | |
| "loss": 0.0021, | |
| "step": 7540 | |
| }, | |
| { | |
| "epoch": 40.159574468085104, | |
| "grad_norm": 0.07186367362737656, | |
| "learning_rate": 1.554430958063259e-05, | |
| "loss": 0.0032, | |
| "step": 7550 | |
| }, | |
| { | |
| "epoch": 40.212765957446805, | |
| "grad_norm": 0.10631489753723145, | |
| "learning_rate": 1.5424678992069912e-05, | |
| "loss": 0.0027, | |
| "step": 7560 | |
| }, | |
| { | |
| "epoch": 40.265957446808514, | |
| "grad_norm": 0.10350894182920456, | |
| "learning_rate": 1.5305426513678362e-05, | |
| "loss": 0.0034, | |
| "step": 7570 | |
| }, | |
| { | |
| "epoch": 40.319148936170215, | |
| "grad_norm": 0.09066586196422577, | |
| "learning_rate": 1.518655344958388e-05, | |
| "loss": 0.0037, | |
| "step": 7580 | |
| }, | |
| { | |
| "epoch": 40.37234042553192, | |
| "grad_norm": 0.10660271346569061, | |
| "learning_rate": 1.5068061099763275e-05, | |
| "loss": 0.0035, | |
| "step": 7590 | |
| }, | |
| { | |
| "epoch": 40.42553191489362, | |
| "grad_norm": 0.14096103608608246, | |
| "learning_rate": 1.494995076002988e-05, | |
| "loss": 0.0038, | |
| "step": 7600 | |
| }, | |
| { | |
| "epoch": 40.47872340425532, | |
| "grad_norm": 0.08838651329278946, | |
| "learning_rate": 1.4832223722019456e-05, | |
| "loss": 0.0029, | |
| "step": 7610 | |
| }, | |
| { | |
| "epoch": 40.53191489361702, | |
| "grad_norm": 0.10196083039045334, | |
| "learning_rate": 1.4714881273176035e-05, | |
| "loss": 0.005, | |
| "step": 7620 | |
| }, | |
| { | |
| "epoch": 40.58510638297872, | |
| "grad_norm": 0.08884672820568085, | |
| "learning_rate": 1.4597924696737835e-05, | |
| "loss": 0.0024, | |
| "step": 7630 | |
| }, | |
| { | |
| "epoch": 40.638297872340424, | |
| "grad_norm": 0.11271373927593231, | |
| "learning_rate": 1.4481355271723252e-05, | |
| "loss": 0.0027, | |
| "step": 7640 | |
| }, | |
| { | |
| "epoch": 40.691489361702125, | |
| "grad_norm": 0.043638743460178375, | |
| "learning_rate": 1.4365174272916809e-05, | |
| "loss": 0.0025, | |
| "step": 7650 | |
| }, | |
| { | |
| "epoch": 40.744680851063826, | |
| "grad_norm": 0.0915859192609787, | |
| "learning_rate": 1.4249382970855319e-05, | |
| "loss": 0.0044, | |
| "step": 7660 | |
| }, | |
| { | |
| "epoch": 40.797872340425535, | |
| "grad_norm": 0.06853626668453217, | |
| "learning_rate": 1.4133982631813903e-05, | |
| "loss": 0.0023, | |
| "step": 7670 | |
| }, | |
| { | |
| "epoch": 40.851063829787236, | |
| "grad_norm": 0.058651868253946304, | |
| "learning_rate": 1.4018974517792194e-05, | |
| "loss": 0.0024, | |
| "step": 7680 | |
| }, | |
| { | |
| "epoch": 40.90425531914894, | |
| "grad_norm": 0.08537851274013519, | |
| "learning_rate": 1.390435988650048e-05, | |
| "loss": 0.003, | |
| "step": 7690 | |
| }, | |
| { | |
| "epoch": 40.95744680851064, | |
| "grad_norm": 0.059476338326931, | |
| "learning_rate": 1.3790139991346006e-05, | |
| "loss": 0.0031, | |
| "step": 7700 | |
| }, | |
| { | |
| "epoch": 41.01063829787234, | |
| "grad_norm": 0.06727033108472824, | |
| "learning_rate": 1.367631608141926e-05, | |
| "loss": 0.0024, | |
| "step": 7710 | |
| }, | |
| { | |
| "epoch": 41.06382978723404, | |
| "grad_norm": 0.10193111002445221, | |
| "learning_rate": 1.3562889401480278e-05, | |
| "loss": 0.0035, | |
| "step": 7720 | |
| }, | |
| { | |
| "epoch": 41.11702127659574, | |
| "grad_norm": 0.07144525647163391, | |
| "learning_rate": 1.3449861191945074e-05, | |
| "loss": 0.002, | |
| "step": 7730 | |
| }, | |
| { | |
| "epoch": 41.170212765957444, | |
| "grad_norm": 0.06083846837282181, | |
| "learning_rate": 1.3337232688872009e-05, | |
| "loss": 0.0024, | |
| "step": 7740 | |
| }, | |
| { | |
| "epoch": 41.223404255319146, | |
| "grad_norm": 0.04304542765021324, | |
| "learning_rate": 1.3225005123948364e-05, | |
| "loss": 0.0029, | |
| "step": 7750 | |
| }, | |
| { | |
| "epoch": 41.276595744680854, | |
| "grad_norm": 0.06175091490149498, | |
| "learning_rate": 1.311317972447681e-05, | |
| "loss": 0.0025, | |
| "step": 7760 | |
| }, | |
| { | |
| "epoch": 41.329787234042556, | |
| "grad_norm": 0.07543773204088211, | |
| "learning_rate": 1.3001757713361996e-05, | |
| "loss": 0.0026, | |
| "step": 7770 | |
| }, | |
| { | |
| "epoch": 41.38297872340426, | |
| "grad_norm": 0.09967610985040665, | |
| "learning_rate": 1.2890740309097204e-05, | |
| "loss": 0.0058, | |
| "step": 7780 | |
| }, | |
| { | |
| "epoch": 41.43617021276596, | |
| "grad_norm": 0.10137089341878891, | |
| "learning_rate": 1.2780128725750944e-05, | |
| "loss": 0.0026, | |
| "step": 7790 | |
| }, | |
| { | |
| "epoch": 41.48936170212766, | |
| "grad_norm": 0.07548239082098007, | |
| "learning_rate": 1.266992417295379e-05, | |
| "loss": 0.0033, | |
| "step": 7800 | |
| }, | |
| { | |
| "epoch": 41.54255319148936, | |
| "grad_norm": 0.08309435844421387, | |
| "learning_rate": 1.2560127855885073e-05, | |
| "loss": 0.0024, | |
| "step": 7810 | |
| }, | |
| { | |
| "epoch": 41.59574468085106, | |
| "grad_norm": 0.1144685372710228, | |
| "learning_rate": 1.2450740975259745e-05, | |
| "loss": 0.0033, | |
| "step": 7820 | |
| }, | |
| { | |
| "epoch": 41.648936170212764, | |
| "grad_norm": 0.08674435317516327, | |
| "learning_rate": 1.234176472731517e-05, | |
| "loss": 0.0033, | |
| "step": 7830 | |
| }, | |
| { | |
| "epoch": 41.702127659574465, | |
| "grad_norm": 0.10484985262155533, | |
| "learning_rate": 1.2233200303798158e-05, | |
| "loss": 0.0029, | |
| "step": 7840 | |
| }, | |
| { | |
| "epoch": 41.755319148936174, | |
| "grad_norm": 0.12625475227832794, | |
| "learning_rate": 1.2125048891951846e-05, | |
| "loss": 0.003, | |
| "step": 7850 | |
| }, | |
| { | |
| "epoch": 41.808510638297875, | |
| "grad_norm": 0.15388774871826172, | |
| "learning_rate": 1.2017311674502745e-05, | |
| "loss": 0.0032, | |
| "step": 7860 | |
| }, | |
| { | |
| "epoch": 41.861702127659576, | |
| "grad_norm": 0.0795295313000679, | |
| "learning_rate": 1.1909989829647822e-05, | |
| "loss": 0.0023, | |
| "step": 7870 | |
| }, | |
| { | |
| "epoch": 41.91489361702128, | |
| "grad_norm": 0.05043603107333183, | |
| "learning_rate": 1.1803084531041553e-05, | |
| "loss": 0.0025, | |
| "step": 7880 | |
| }, | |
| { | |
| "epoch": 41.96808510638298, | |
| "grad_norm": 0.057318609207868576, | |
| "learning_rate": 1.1696596947783162e-05, | |
| "loss": 0.0019, | |
| "step": 7890 | |
| }, | |
| { | |
| "epoch": 42.02127659574468, | |
| "grad_norm": 0.06033405289053917, | |
| "learning_rate": 1.1590528244403803e-05, | |
| "loss": 0.0022, | |
| "step": 7900 | |
| }, | |
| { | |
| "epoch": 42.07446808510638, | |
| "grad_norm": 0.06762022525072098, | |
| "learning_rate": 1.148487958085382e-05, | |
| "loss": 0.0023, | |
| "step": 7910 | |
| }, | |
| { | |
| "epoch": 42.12765957446808, | |
| "grad_norm": 0.11204301565885544, | |
| "learning_rate": 1.1379652112490086e-05, | |
| "loss": 0.0023, | |
| "step": 7920 | |
| }, | |
| { | |
| "epoch": 42.180851063829785, | |
| "grad_norm": 0.04214387387037277, | |
| "learning_rate": 1.1274846990063315e-05, | |
| "loss": 0.0028, | |
| "step": 7930 | |
| }, | |
| { | |
| "epoch": 42.234042553191486, | |
| "grad_norm": 0.0785481259226799, | |
| "learning_rate": 1.117046535970554e-05, | |
| "loss": 0.003, | |
| "step": 7940 | |
| }, | |
| { | |
| "epoch": 42.287234042553195, | |
| "grad_norm": 0.06954558193683624, | |
| "learning_rate": 1.106650836291755e-05, | |
| "loss": 0.0027, | |
| "step": 7950 | |
| }, | |
| { | |
| "epoch": 42.340425531914896, | |
| "grad_norm": 0.06954865157604218, | |
| "learning_rate": 1.0962977136556418e-05, | |
| "loss": 0.0028, | |
| "step": 7960 | |
| }, | |
| { | |
| "epoch": 42.3936170212766, | |
| "grad_norm": 0.04941001534461975, | |
| "learning_rate": 1.0859872812823024e-05, | |
| "loss": 0.0021, | |
| "step": 7970 | |
| }, | |
| { | |
| "epoch": 42.4468085106383, | |
| "grad_norm": 0.05050547793507576, | |
| "learning_rate": 1.0757196519249747e-05, | |
| "loss": 0.0025, | |
| "step": 7980 | |
| }, | |
| { | |
| "epoch": 42.5, | |
| "grad_norm": 0.06856332719326019, | |
| "learning_rate": 1.0654949378688077e-05, | |
| "loss": 0.0028, | |
| "step": 7990 | |
| }, | |
| { | |
| "epoch": 42.5531914893617, | |
| "grad_norm": 0.06205028295516968, | |
| "learning_rate": 1.0553132509296376e-05, | |
| "loss": 0.0026, | |
| "step": 8000 | |
| }, | |
| { | |
| "epoch": 42.6063829787234, | |
| "grad_norm": 0.04532815143465996, | |
| "learning_rate": 1.0451747024527613e-05, | |
| "loss": 0.002, | |
| "step": 8010 | |
| }, | |
| { | |
| "epoch": 42.659574468085104, | |
| "grad_norm": 0.07619915157556534, | |
| "learning_rate": 1.0350794033117189e-05, | |
| "loss": 0.0023, | |
| "step": 8020 | |
| }, | |
| { | |
| "epoch": 42.712765957446805, | |
| "grad_norm": 0.06983453780412674, | |
| "learning_rate": 1.0250274639070856e-05, | |
| "loss": 0.0019, | |
| "step": 8030 | |
| }, | |
| { | |
| "epoch": 42.765957446808514, | |
| "grad_norm": 0.04398789629340172, | |
| "learning_rate": 1.0150189941652599e-05, | |
| "loss": 0.0024, | |
| "step": 8040 | |
| }, | |
| { | |
| "epoch": 42.819148936170215, | |
| "grad_norm": 0.058723773807287216, | |
| "learning_rate": 1.0050541035372635e-05, | |
| "loss": 0.003, | |
| "step": 8050 | |
| }, | |
| { | |
| "epoch": 42.87234042553192, | |
| "grad_norm": 0.05902779474854469, | |
| "learning_rate": 9.951329009975458e-06, | |
| "loss": 0.0021, | |
| "step": 8060 | |
| }, | |
| { | |
| "epoch": 42.92553191489362, | |
| "grad_norm": 0.07050108164548874, | |
| "learning_rate": 9.852554950427845e-06, | |
| "loss": 0.0024, | |
| "step": 8070 | |
| }, | |
| { | |
| "epoch": 42.97872340425532, | |
| "grad_norm": 0.06568225473165512, | |
| "learning_rate": 9.754219936907105e-06, | |
| "loss": 0.0025, | |
| "step": 8080 | |
| }, | |
| { | |
| "epoch": 43.03191489361702, | |
| "grad_norm": 0.06125905364751816, | |
| "learning_rate": 9.656325044789194e-06, | |
| "loss": 0.0023, | |
| "step": 8090 | |
| }, | |
| { | |
| "epoch": 43.08510638297872, | |
| "grad_norm": 0.045195359736680984, | |
| "learning_rate": 9.55887134463697e-06, | |
| "loss": 0.0021, | |
| "step": 8100 | |
| }, | |
| { | |
| "epoch": 43.138297872340424, | |
| "grad_norm": 0.05011230334639549, | |
| "learning_rate": 9.461859902188475e-06, | |
| "loss": 0.0035, | |
| "step": 8110 | |
| }, | |
| { | |
| "epoch": 43.191489361702125, | |
| "grad_norm": 0.07293812930583954, | |
| "learning_rate": 9.365291778345303e-06, | |
| "loss": 0.0026, | |
| "step": 8120 | |
| }, | |
| { | |
| "epoch": 43.244680851063826, | |
| "grad_norm": 0.07763762772083282, | |
| "learning_rate": 9.269168029160991e-06, | |
| "loss": 0.0033, | |
| "step": 8130 | |
| }, | |
| { | |
| "epoch": 43.297872340425535, | |
| "grad_norm": 0.10731100291013718, | |
| "learning_rate": 9.173489705829447e-06, | |
| "loss": 0.0024, | |
| "step": 8140 | |
| }, | |
| { | |
| "epoch": 43.351063829787236, | |
| "grad_norm": 0.06918442249298096, | |
| "learning_rate": 9.078257854673516e-06, | |
| "loss": 0.0021, | |
| "step": 8150 | |
| }, | |
| { | |
| "epoch": 43.40425531914894, | |
| "grad_norm": 0.08106331527233124, | |
| "learning_rate": 8.983473517133429e-06, | |
| "loss": 0.0028, | |
| "step": 8160 | |
| }, | |
| { | |
| "epoch": 43.45744680851064, | |
| "grad_norm": 0.0482093021273613, | |
| "learning_rate": 8.889137729755537e-06, | |
| "loss": 0.0029, | |
| "step": 8170 | |
| }, | |
| { | |
| "epoch": 43.51063829787234, | |
| "grad_norm": 0.07667967677116394, | |
| "learning_rate": 8.79525152418087e-06, | |
| "loss": 0.0036, | |
| "step": 8180 | |
| }, | |
| { | |
| "epoch": 43.56382978723404, | |
| "grad_norm": 0.0798843502998352, | |
| "learning_rate": 8.701815927133961e-06, | |
| "loss": 0.0038, | |
| "step": 8190 | |
| }, | |
| { | |
| "epoch": 43.61702127659574, | |
| "grad_norm": 0.10639572143554688, | |
| "learning_rate": 8.608831960411534e-06, | |
| "loss": 0.0034, | |
| "step": 8200 | |
| }, | |
| { | |
| "epoch": 43.670212765957444, | |
| "grad_norm": 0.058767713606357574, | |
| "learning_rate": 8.516300640871321e-06, | |
| "loss": 0.0023, | |
| "step": 8210 | |
| }, | |
| { | |
| "epoch": 43.723404255319146, | |
| "grad_norm": 0.048779603093862534, | |
| "learning_rate": 8.424222980421038e-06, | |
| "loss": 0.0026, | |
| "step": 8220 | |
| }, | |
| { | |
| "epoch": 43.776595744680854, | |
| "grad_norm": 0.09605692327022552, | |
| "learning_rate": 8.332599986007184e-06, | |
| "loss": 0.0022, | |
| "step": 8230 | |
| }, | |
| { | |
| "epoch": 43.829787234042556, | |
| "grad_norm": 0.0503205768764019, | |
| "learning_rate": 8.241432659604203e-06, | |
| "loss": 0.0025, | |
| "step": 8240 | |
| }, | |
| { | |
| "epoch": 43.88297872340426, | |
| "grad_norm": 0.07670602202415466, | |
| "learning_rate": 8.150721998203331e-06, | |
| "loss": 0.0026, | |
| "step": 8250 | |
| }, | |
| { | |
| "epoch": 43.93617021276596, | |
| "grad_norm": 0.05223650112748146, | |
| "learning_rate": 8.06046899380184e-06, | |
| "loss": 0.002, | |
| "step": 8260 | |
| }, | |
| { | |
| "epoch": 43.98936170212766, | |
| "grad_norm": 0.05128388851881027, | |
| "learning_rate": 7.970674633392133e-06, | |
| "loss": 0.0025, | |
| "step": 8270 | |
| }, | |
| { | |
| "epoch": 44.04255319148936, | |
| "grad_norm": 0.0417180098593235, | |
| "learning_rate": 7.881339898950924e-06, | |
| "loss": 0.0018, | |
| "step": 8280 | |
| }, | |
| { | |
| "epoch": 44.09574468085106, | |
| "grad_norm": 0.08264292776584625, | |
| "learning_rate": 7.792465767428597e-06, | |
| "loss": 0.0021, | |
| "step": 8290 | |
| }, | |
| { | |
| "epoch": 44.148936170212764, | |
| "grad_norm": 0.0884149819612503, | |
| "learning_rate": 7.704053210738376e-06, | |
| "loss": 0.0029, | |
| "step": 8300 | |
| }, | |
| { | |
| "epoch": 44.202127659574465, | |
| "grad_norm": 0.04234097898006439, | |
| "learning_rate": 7.6161031957458494e-06, | |
| "loss": 0.003, | |
| "step": 8310 | |
| }, | |
| { | |
| "epoch": 44.255319148936174, | |
| "grad_norm": 0.08253144472837448, | |
| "learning_rate": 7.5286166842582605e-06, | |
| "loss": 0.003, | |
| "step": 8320 | |
| }, | |
| { | |
| "epoch": 44.308510638297875, | |
| "grad_norm": 0.0639602392911911, | |
| "learning_rate": 7.4415946330140814e-06, | |
| "loss": 0.0031, | |
| "step": 8330 | |
| }, | |
| { | |
| "epoch": 44.361702127659576, | |
| "grad_norm": 0.0564262792468071, | |
| "learning_rate": 7.3550379936725644e-06, | |
| "loss": 0.0025, | |
| "step": 8340 | |
| }, | |
| { | |
| "epoch": 44.41489361702128, | |
| "grad_norm": 0.06634574383497238, | |
| "learning_rate": 7.2689477128032035e-06, | |
| "loss": 0.0024, | |
| "step": 8350 | |
| }, | |
| { | |
| "epoch": 44.46808510638298, | |
| "grad_norm": 0.041368499398231506, | |
| "learning_rate": 7.183324731875551e-06, | |
| "loss": 0.0027, | |
| "step": 8360 | |
| }, | |
| { | |
| "epoch": 44.52127659574468, | |
| "grad_norm": 0.05097515508532524, | |
| "learning_rate": 7.098169987248782e-06, | |
| "loss": 0.0016, | |
| "step": 8370 | |
| }, | |
| { | |
| "epoch": 44.57446808510638, | |
| "grad_norm": 0.07151399552822113, | |
| "learning_rate": 7.013484410161553e-06, | |
| "loss": 0.0026, | |
| "step": 8380 | |
| }, | |
| { | |
| "epoch": 44.62765957446808, | |
| "grad_norm": 0.08623804897069931, | |
| "learning_rate": 6.92926892672176e-06, | |
| "loss": 0.0024, | |
| "step": 8390 | |
| }, | |
| { | |
| "epoch": 44.680851063829785, | |
| "grad_norm": 0.07321808487176895, | |
| "learning_rate": 6.845524457896446e-06, | |
| "loss": 0.005, | |
| "step": 8400 | |
| }, | |
| { | |
| "epoch": 44.734042553191486, | |
| "grad_norm": 0.07570762187242508, | |
| "learning_rate": 6.7622519195017165e-06, | |
| "loss": 0.0021, | |
| "step": 8410 | |
| }, | |
| { | |
| "epoch": 44.787234042553195, | |
| "grad_norm": 0.04331159219145775, | |
| "learning_rate": 6.679452222192684e-06, | |
| "loss": 0.0018, | |
| "step": 8420 | |
| }, | |
| { | |
| "epoch": 44.840425531914896, | |
| "grad_norm": 0.05139905959367752, | |
| "learning_rate": 6.597126271453579e-06, | |
| "loss": 0.0024, | |
| "step": 8430 | |
| }, | |
| { | |
| "epoch": 44.8936170212766, | |
| "grad_norm": 0.05808750167489052, | |
| "learning_rate": 6.51527496758782e-06, | |
| "loss": 0.002, | |
| "step": 8440 | |
| }, | |
| { | |
| "epoch": 44.9468085106383, | |
| "grad_norm": 0.06475529074668884, | |
| "learning_rate": 6.433899205708155e-06, | |
| "loss": 0.0035, | |
| "step": 8450 | |
| }, | |
| { | |
| "epoch": 45.0, | |
| "grad_norm": 0.11789180338382721, | |
| "learning_rate": 6.352999875726856e-06, | |
| "loss": 0.0023, | |
| "step": 8460 | |
| }, | |
| { | |
| "epoch": 45.0531914893617, | |
| "grad_norm": 0.11120691895484924, | |
| "learning_rate": 6.272577862346052e-06, | |
| "loss": 0.003, | |
| "step": 8470 | |
| }, | |
| { | |
| "epoch": 45.1063829787234, | |
| "grad_norm": 0.04893869534134865, | |
| "learning_rate": 6.192634045047996e-06, | |
| "loss": 0.0033, | |
| "step": 8480 | |
| }, | |
| { | |
| "epoch": 45.159574468085104, | |
| "grad_norm": 0.0998014286160469, | |
| "learning_rate": 6.113169298085458e-06, | |
| "loss": 0.0033, | |
| "step": 8490 | |
| }, | |
| { | |
| "epoch": 45.212765957446805, | |
| "grad_norm": 0.06577343493700027, | |
| "learning_rate": 6.034184490472195e-06, | |
| "loss": 0.0029, | |
| "step": 8500 | |
| }, | |
| { | |
| "epoch": 45.265957446808514, | |
| "grad_norm": 0.058120228350162506, | |
| "learning_rate": 5.955680485973386e-06, | |
| "loss": 0.0039, | |
| "step": 8510 | |
| }, | |
| { | |
| "epoch": 45.319148936170215, | |
| "grad_norm": 0.061770133674144745, | |
| "learning_rate": 5.877658143096265e-06, | |
| "loss": 0.0021, | |
| "step": 8520 | |
| }, | |
| { | |
| "epoch": 45.37234042553192, | |
| "grad_norm": 0.1257171332836151, | |
| "learning_rate": 5.800118315080661e-06, | |
| "loss": 0.003, | |
| "step": 8530 | |
| }, | |
| { | |
| "epoch": 45.42553191489362, | |
| "grad_norm": 0.051821257919073105, | |
| "learning_rate": 5.723061849889716e-06, | |
| "loss": 0.0019, | |
| "step": 8540 | |
| }, | |
| { | |
| "epoch": 45.47872340425532, | |
| "grad_norm": 0.052124377340078354, | |
| "learning_rate": 5.646489590200604e-06, | |
| "loss": 0.0022, | |
| "step": 8550 | |
| }, | |
| { | |
| "epoch": 45.53191489361702, | |
| "grad_norm": 0.03840922564268112, | |
| "learning_rate": 5.570402373395256e-06, | |
| "loss": 0.0024, | |
| "step": 8560 | |
| }, | |
| { | |
| "epoch": 45.58510638297872, | |
| "grad_norm": 0.03678499162197113, | |
| "learning_rate": 5.494801031551305e-06, | |
| "loss": 0.0028, | |
| "step": 8570 | |
| }, | |
| { | |
| "epoch": 45.638297872340424, | |
| "grad_norm": 0.07335398346185684, | |
| "learning_rate": 5.41968639143291e-06, | |
| "loss": 0.0019, | |
| "step": 8580 | |
| }, | |
| { | |
| "epoch": 45.691489361702125, | |
| "grad_norm": 0.10466412454843521, | |
| "learning_rate": 5.345059274481751e-06, | |
| "loss": 0.0026, | |
| "step": 8590 | |
| }, | |
| { | |
| "epoch": 45.744680851063826, | |
| "grad_norm": 0.08951956778764725, | |
| "learning_rate": 5.270920496808002e-06, | |
| "loss": 0.0026, | |
| "step": 8600 | |
| }, | |
| { | |
| "epoch": 45.797872340425535, | |
| "grad_norm": 0.07427313923835754, | |
| "learning_rate": 5.1972708691814695e-06, | |
| "loss": 0.003, | |
| "step": 8610 | |
| }, | |
| { | |
| "epoch": 45.851063829787236, | |
| "grad_norm": 0.04322253167629242, | |
| "learning_rate": 5.124111197022674e-06, | |
| "loss": 0.0033, | |
| "step": 8620 | |
| }, | |
| { | |
| "epoch": 45.90425531914894, | |
| "grad_norm": 0.10772758722305298, | |
| "learning_rate": 5.051442280394081e-06, | |
| "loss": 0.0028, | |
| "step": 8630 | |
| }, | |
| { | |
| "epoch": 45.95744680851064, | |
| "grad_norm": 0.046713996678590775, | |
| "learning_rate": 4.979264913991322e-06, | |
| "loss": 0.0042, | |
| "step": 8640 | |
| }, | |
| { | |
| "epoch": 46.01063829787234, | |
| "grad_norm": 0.10422065854072571, | |
| "learning_rate": 4.907579887134489e-06, | |
| "loss": 0.0031, | |
| "step": 8650 | |
| }, | |
| { | |
| "epoch": 46.06382978723404, | |
| "grad_norm": 0.09733374416828156, | |
| "learning_rate": 4.836387983759572e-06, | |
| "loss": 0.0023, | |
| "step": 8660 | |
| }, | |
| { | |
| "epoch": 46.11702127659574, | |
| "grad_norm": 0.0919572040438652, | |
| "learning_rate": 4.765689982409816e-06, | |
| "loss": 0.0031, | |
| "step": 8670 | |
| }, | |
| { | |
| "epoch": 46.170212765957444, | |
| "grad_norm": 0.049500588327646255, | |
| "learning_rate": 4.695486656227233e-06, | |
| "loss": 0.0031, | |
| "step": 8680 | |
| }, | |
| { | |
| "epoch": 46.223404255319146, | |
| "grad_norm": 0.07581859081983566, | |
| "learning_rate": 4.625778772944156e-06, | |
| "loss": 0.0034, | |
| "step": 8690 | |
| }, | |
| { | |
| "epoch": 46.276595744680854, | |
| "grad_norm": 0.08395475894212723, | |
| "learning_rate": 4.556567094874825e-06, | |
| "loss": 0.0027, | |
| "step": 8700 | |
| }, | |
| { | |
| "epoch": 46.329787234042556, | |
| "grad_norm": 0.044549595564603806, | |
| "learning_rate": 4.487852378907059e-06, | |
| "loss": 0.0026, | |
| "step": 8710 | |
| }, | |
| { | |
| "epoch": 46.38297872340426, | |
| "grad_norm": 0.08976976573467255, | |
| "learning_rate": 4.419635376493986e-06, | |
| "loss": 0.0024, | |
| "step": 8720 | |
| }, | |
| { | |
| "epoch": 46.43617021276596, | |
| "grad_norm": 0.05146001651883125, | |
| "learning_rate": 4.351916833645825e-06, | |
| "loss": 0.003, | |
| "step": 8730 | |
| }, | |
| { | |
| "epoch": 46.48936170212766, | |
| "grad_norm": 0.058233533054590225, | |
| "learning_rate": 4.284697490921691e-06, | |
| "loss": 0.0025, | |
| "step": 8740 | |
| }, | |
| { | |
| "epoch": 46.54255319148936, | |
| "grad_norm": 0.11170299351215363, | |
| "learning_rate": 4.2179780834215585e-06, | |
| "loss": 0.0021, | |
| "step": 8750 | |
| }, | |
| { | |
| "epoch": 46.59574468085106, | |
| "grad_norm": 0.05459124222397804, | |
| "learning_rate": 4.151759340778178e-06, | |
| "loss": 0.0026, | |
| "step": 8760 | |
| }, | |
| { | |
| "epoch": 46.648936170212764, | |
| "grad_norm": 0.0541393905878067, | |
| "learning_rate": 4.086041987149109e-06, | |
| "loss": 0.002, | |
| "step": 8770 | |
| }, | |
| { | |
| "epoch": 46.702127659574465, | |
| "grad_norm": 0.047844503074884415, | |
| "learning_rate": 4.020826741208811e-06, | |
| "loss": 0.0021, | |
| "step": 8780 | |
| }, | |
| { | |
| "epoch": 46.755319148936174, | |
| "grad_norm": 0.033653054386377335, | |
| "learning_rate": 3.956114316140746e-06, | |
| "loss": 0.002, | |
| "step": 8790 | |
| }, | |
| { | |
| "epoch": 46.808510638297875, | |
| "grad_norm": 0.0943630039691925, | |
| "learning_rate": 3.891905419629643e-06, | |
| "loss": 0.0035, | |
| "step": 8800 | |
| }, | |
| { | |
| "epoch": 46.861702127659576, | |
| "grad_norm": 0.08439528942108154, | |
| "learning_rate": 3.8282007538536946e-06, | |
| "loss": 0.0021, | |
| "step": 8810 | |
| }, | |
| { | |
| "epoch": 46.91489361702128, | |
| "grad_norm": 0.06539375334978104, | |
| "learning_rate": 3.7650010154769265e-06, | |
| "loss": 0.002, | |
| "step": 8820 | |
| }, | |
| { | |
| "epoch": 46.96808510638298, | |
| "grad_norm": 0.0709037035703659, | |
| "learning_rate": 3.7023068956415608e-06, | |
| "loss": 0.0024, | |
| "step": 8830 | |
| }, | |
| { | |
| "epoch": 47.02127659574468, | |
| "grad_norm": 0.059936098754405975, | |
| "learning_rate": 3.6401190799604303e-06, | |
| "loss": 0.0038, | |
| "step": 8840 | |
| }, | |
| { | |
| "epoch": 47.07446808510638, | |
| "grad_norm": 0.0661567896604538, | |
| "learning_rate": 3.578438248509536e-06, | |
| "loss": 0.003, | |
| "step": 8850 | |
| }, | |
| { | |
| "epoch": 47.12765957446808, | |
| "grad_norm": 0.07179677486419678, | |
| "learning_rate": 3.5172650758205583e-06, | |
| "loss": 0.0022, | |
| "step": 8860 | |
| }, | |
| { | |
| "epoch": 47.180851063829785, | |
| "grad_norm": 0.08600521087646484, | |
| "learning_rate": 3.45660023087353e-06, | |
| "loss": 0.0029, | |
| "step": 8870 | |
| }, | |
| { | |
| "epoch": 47.234042553191486, | |
| "grad_norm": 0.06164412945508957, | |
| "learning_rate": 3.3964443770894528e-06, | |
| "loss": 0.002, | |
| "step": 8880 | |
| }, | |
| { | |
| "epoch": 47.287234042553195, | |
| "grad_norm": 0.03534013777971268, | |
| "learning_rate": 3.3367981723231245e-06, | |
| "loss": 0.0019, | |
| "step": 8890 | |
| }, | |
| { | |
| "epoch": 47.340425531914896, | |
| "grad_norm": 0.08636914938688278, | |
| "learning_rate": 3.2776622688558746e-06, | |
| "loss": 0.0026, | |
| "step": 8900 | |
| }, | |
| { | |
| "epoch": 47.3936170212766, | |
| "grad_norm": 0.07446467876434326, | |
| "learning_rate": 3.2190373133884677e-06, | |
| "loss": 0.0024, | |
| "step": 8910 | |
| }, | |
| { | |
| "epoch": 47.4468085106383, | |
| "grad_norm": 0.07032009214162827, | |
| "learning_rate": 3.1609239470340446e-06, | |
| "loss": 0.0028, | |
| "step": 8920 | |
| }, | |
| { | |
| "epoch": 47.5, | |
| "grad_norm": 0.05509141832590103, | |
| "learning_rate": 3.1033228053110373e-06, | |
| "loss": 0.0025, | |
| "step": 8930 | |
| }, | |
| { | |
| "epoch": 47.5531914893617, | |
| "grad_norm": 0.04528696462512016, | |
| "learning_rate": 3.0462345181363314e-06, | |
| "loss": 0.0018, | |
| "step": 8940 | |
| }, | |
| { | |
| "epoch": 47.6063829787234, | |
| "grad_norm": 0.08887184411287308, | |
| "learning_rate": 2.9896597098182654e-06, | |
| "loss": 0.0021, | |
| "step": 8950 | |
| }, | |
| { | |
| "epoch": 47.659574468085104, | |
| "grad_norm": 0.0275883711874485, | |
| "learning_rate": 2.933598999049891e-06, | |
| "loss": 0.0017, | |
| "step": 8960 | |
| }, | |
| { | |
| "epoch": 47.712765957446805, | |
| "grad_norm": 0.05030861869454384, | |
| "learning_rate": 2.8780529989021697e-06, | |
| "loss": 0.0022, | |
| "step": 8970 | |
| }, | |
| { | |
| "epoch": 47.765957446808514, | |
| "grad_norm": 0.03914127126336098, | |
| "learning_rate": 2.823022316817242e-06, | |
| "loss": 0.0022, | |
| "step": 8980 | |
| }, | |
| { | |
| "epoch": 47.819148936170215, | |
| "grad_norm": 0.04058268293738365, | |
| "learning_rate": 2.7685075546018456e-06, | |
| "loss": 0.0018, | |
| "step": 8990 | |
| }, | |
| { | |
| "epoch": 47.87234042553192, | |
| "grad_norm": 0.027987739071249962, | |
| "learning_rate": 2.7145093084206598e-06, | |
| "loss": 0.002, | |
| "step": 9000 | |
| }, | |
| { | |
| "epoch": 47.92553191489362, | |
| "grad_norm": 0.027753984555602074, | |
| "learning_rate": 2.661028168789892e-06, | |
| "loss": 0.002, | |
| "step": 9010 | |
| }, | |
| { | |
| "epoch": 47.97872340425532, | |
| "grad_norm": 0.08038720488548279, | |
| "learning_rate": 2.6080647205706855e-06, | |
| "loss": 0.0018, | |
| "step": 9020 | |
| }, | |
| { | |
| "epoch": 48.03191489361702, | |
| "grad_norm": 0.07296367734670639, | |
| "learning_rate": 2.555619542962834e-06, | |
| "loss": 0.002, | |
| "step": 9030 | |
| }, | |
| { | |
| "epoch": 48.08510638297872, | |
| "grad_norm": 0.081815704703331, | |
| "learning_rate": 2.503693209498409e-06, | |
| "loss": 0.0021, | |
| "step": 9040 | |
| }, | |
| { | |
| "epoch": 48.138297872340424, | |
| "grad_norm": 0.030825141817331314, | |
| "learning_rate": 2.452286288035449e-06, | |
| "loss": 0.0028, | |
| "step": 9050 | |
| }, | |
| { | |
| "epoch": 48.191489361702125, | |
| "grad_norm": 0.025555279105901718, | |
| "learning_rate": 2.4013993407518363e-06, | |
| "loss": 0.0023, | |
| "step": 9060 | |
| }, | |
| { | |
| "epoch": 48.244680851063826, | |
| "grad_norm": 0.03425487503409386, | |
| "learning_rate": 2.351032924139063e-06, | |
| "loss": 0.0026, | |
| "step": 9070 | |
| }, | |
| { | |
| "epoch": 48.297872340425535, | |
| "grad_norm": 0.07835087180137634, | |
| "learning_rate": 2.30118758899619e-06, | |
| "loss": 0.0022, | |
| "step": 9080 | |
| }, | |
| { | |
| "epoch": 48.351063829787236, | |
| "grad_norm": 0.05660055950284004, | |
| "learning_rate": 2.2518638804238157e-06, | |
| "loss": 0.0021, | |
| "step": 9090 | |
| }, | |
| { | |
| "epoch": 48.40425531914894, | |
| "grad_norm": 0.026358595117926598, | |
| "learning_rate": 2.203062337818118e-06, | |
| "loss": 0.0026, | |
| "step": 9100 | |
| }, | |
| { | |
| "epoch": 48.45744680851064, | |
| "grad_norm": 0.044379107654094696, | |
| "learning_rate": 2.1547834948649483e-06, | |
| "loss": 0.0021, | |
| "step": 9110 | |
| }, | |
| { | |
| "epoch": 48.51063829787234, | |
| "grad_norm": 0.04037246108055115, | |
| "learning_rate": 2.1070278795340017e-06, | |
| "loss": 0.002, | |
| "step": 9120 | |
| }, | |
| { | |
| "epoch": 48.56382978723404, | |
| "grad_norm": 0.02138288877904415, | |
| "learning_rate": 2.059796014073029e-06, | |
| "loss": 0.0025, | |
| "step": 9130 | |
| }, | |
| { | |
| "epoch": 48.61702127659574, | |
| "grad_norm": 0.04572650417685509, | |
| "learning_rate": 2.01308841500214e-06, | |
| "loss": 0.0017, | |
| "step": 9140 | |
| }, | |
| { | |
| "epoch": 48.670212765957444, | |
| "grad_norm": 0.023222651332616806, | |
| "learning_rate": 1.9669055931081704e-06, | |
| "loss": 0.0017, | |
| "step": 9150 | |
| }, | |
| { | |
| "epoch": 48.723404255319146, | |
| "grad_norm": 0.032427698373794556, | |
| "learning_rate": 1.9212480534390507e-06, | |
| "loss": 0.0021, | |
| "step": 9160 | |
| }, | |
| { | |
| "epoch": 48.776595744680854, | |
| "grad_norm": 0.0687011331319809, | |
| "learning_rate": 1.8761162952983246e-06, | |
| "loss": 0.0031, | |
| "step": 9170 | |
| }, | |
| { | |
| "epoch": 48.829787234042556, | |
| "grad_norm": 0.024210453033447266, | |
| "learning_rate": 1.8315108122396618e-06, | |
| "loss": 0.0024, | |
| "step": 9180 | |
| }, | |
| { | |
| "epoch": 48.88297872340426, | |
| "grad_norm": 0.07589123398065567, | |
| "learning_rate": 1.787432092061475e-06, | |
| "loss": 0.003, | |
| "step": 9190 | |
| }, | |
| { | |
| "epoch": 48.93617021276596, | |
| "grad_norm": 0.04133829101920128, | |
| "learning_rate": 1.743880616801602e-06, | |
| "loss": 0.0027, | |
| "step": 9200 | |
| }, | |
| { | |
| "epoch": 48.98936170212766, | |
| "grad_norm": 0.03747500479221344, | |
| "learning_rate": 1.7008568627319865e-06, | |
| "loss": 0.0028, | |
| "step": 9210 | |
| }, | |
| { | |
| "epoch": 49.04255319148936, | |
| "grad_norm": 0.12031756341457367, | |
| "learning_rate": 1.6583613003535226e-06, | |
| "loss": 0.0025, | |
| "step": 9220 | |
| }, | |
| { | |
| "epoch": 49.09574468085106, | |
| "grad_norm": 0.0421098992228508, | |
| "learning_rate": 1.6163943943908522e-06, | |
| "loss": 0.002, | |
| "step": 9230 | |
| }, | |
| { | |
| "epoch": 49.148936170212764, | |
| "grad_norm": 0.044460028409957886, | |
| "learning_rate": 1.5749566037873476e-06, | |
| "loss": 0.0015, | |
| "step": 9240 | |
| }, | |
| { | |
| "epoch": 49.202127659574465, | |
| "grad_norm": 0.052290093153715134, | |
| "learning_rate": 1.5340483817000428e-06, | |
| "loss": 0.0035, | |
| "step": 9250 | |
| }, | |
| { | |
| "epoch": 49.255319148936174, | |
| "grad_norm": 0.032837122678756714, | |
| "learning_rate": 1.4936701754947101e-06, | |
| "loss": 0.0024, | |
| "step": 9260 | |
| }, | |
| { | |
| "epoch": 49.308510638297875, | |
| "grad_norm": 0.04038367420434952, | |
| "learning_rate": 1.4538224267409361e-06, | |
| "loss": 0.0043, | |
| "step": 9270 | |
| }, | |
| { | |
| "epoch": 49.361702127659576, | |
| "grad_norm": 0.038758691400289536, | |
| "learning_rate": 1.414505571207314e-06, | |
| "loss": 0.0016, | |
| "step": 9280 | |
| }, | |
| { | |
| "epoch": 49.41489361702128, | |
| "grad_norm": 0.030594009906053543, | |
| "learning_rate": 1.3757200388566816e-06, | |
| "loss": 0.0016, | |
| "step": 9290 | |
| }, | |
| { | |
| "epoch": 49.46808510638298, | |
| "grad_norm": 0.08106256276369095, | |
| "learning_rate": 1.3374662538414074e-06, | |
| "loss": 0.0023, | |
| "step": 9300 | |
| }, | |
| { | |
| "epoch": 49.52127659574468, | |
| "grad_norm": 0.04515732452273369, | |
| "learning_rate": 1.2997446344987617e-06, | |
| "loss": 0.002, | |
| "step": 9310 | |
| }, | |
| { | |
| "epoch": 49.57446808510638, | |
| "grad_norm": 0.03496381640434265, | |
| "learning_rate": 1.262555593346315e-06, | |
| "loss": 0.0029, | |
| "step": 9320 | |
| }, | |
| { | |
| "epoch": 49.62765957446808, | |
| "grad_norm": 0.060685280710458755, | |
| "learning_rate": 1.2258995370774685e-06, | |
| "loss": 0.0025, | |
| "step": 9330 | |
| }, | |
| { | |
| "epoch": 49.680851063829785, | |
| "grad_norm": 0.06380672007799149, | |
| "learning_rate": 1.1897768665569798e-06, | |
| "loss": 0.0018, | |
| "step": 9340 | |
| }, | |
| { | |
| "epoch": 49.734042553191486, | |
| "grad_norm": 0.0492476150393486, | |
| "learning_rate": 1.1541879768165954e-06, | |
| "loss": 0.0022, | |
| "step": 9350 | |
| }, | |
| { | |
| "epoch": 49.787234042553195, | |
| "grad_norm": 0.09407757967710495, | |
| "learning_rate": 1.1191332570507085e-06, | |
| "loss": 0.0029, | |
| "step": 9360 | |
| }, | |
| { | |
| "epoch": 49.840425531914896, | |
| "grad_norm": 0.04183247312903404, | |
| "learning_rate": 1.0846130906121132e-06, | |
| "loss": 0.0022, | |
| "step": 9370 | |
| }, | |
| { | |
| "epoch": 49.8936170212766, | |
| "grad_norm": 0.06712931394577026, | |
| "learning_rate": 1.0506278550078131e-06, | |
| "loss": 0.002, | |
| "step": 9380 | |
| }, | |
| { | |
| "epoch": 49.9468085106383, | |
| "grad_norm": 0.03611158952116966, | |
| "learning_rate": 1.0171779218949185e-06, | |
| "loss": 0.0015, | |
| "step": 9390 | |
| }, | |
| { | |
| "epoch": 50.0, | |
| "grad_norm": 0.05905800685286522, | |
| "learning_rate": 9.842636570765174e-07, | |
| "loss": 0.0019, | |
| "step": 9400 | |
| }, | |
| { | |
| "epoch": 50.0531914893617, | |
| "grad_norm": 0.09038300812244415, | |
| "learning_rate": 9.518854204977612e-07, | |
| "loss": 0.0044, | |
| "step": 9410 | |
| }, | |
| { | |
| "epoch": 50.1063829787234, | |
| "grad_norm": 0.05526236444711685, | |
| "learning_rate": 9.200435662418349e-07, | |
| "loss": 0.0034, | |
| "step": 9420 | |
| }, | |
| { | |
| "epoch": 50.159574468085104, | |
| "grad_norm": 0.05418349429965019, | |
| "learning_rate": 8.887384425261658e-07, | |
| "loss": 0.0027, | |
| "step": 9430 | |
| }, | |
| { | |
| "epoch": 50.212765957446805, | |
| "grad_norm": 0.06250227242708206, | |
| "learning_rate": 8.579703916985648e-07, | |
| "loss": 0.0031, | |
| "step": 9440 | |
| }, | |
| { | |
| "epoch": 50.265957446808514, | |
| "grad_norm": 0.06487517803907394, | |
| "learning_rate": 8.277397502335194e-07, | |
| "loss": 0.0031, | |
| "step": 9450 | |
| }, | |
| { | |
| "epoch": 50.319148936170215, | |
| "grad_norm": 0.09735055267810822, | |
| "learning_rate": 7.980468487284675e-07, | |
| "loss": 0.0024, | |
| "step": 9460 | |
| }, | |
| { | |
| "epoch": 50.37234042553192, | |
| "grad_norm": 0.030906975269317627, | |
| "learning_rate": 7.688920119002297e-07, | |
| "loss": 0.0021, | |
| "step": 9470 | |
| }, | |
| { | |
| "epoch": 50.42553191489362, | |
| "grad_norm": 0.031883593648672104, | |
| "learning_rate": 7.402755585814269e-07, | |
| "loss": 0.0018, | |
| "step": 9480 | |
| }, | |
| { | |
| "epoch": 50.47872340425532, | |
| "grad_norm": 0.08295193314552307, | |
| "learning_rate": 7.121978017170073e-07, | |
| "loss": 0.0021, | |
| "step": 9490 | |
| }, | |
| { | |
| "epoch": 50.53191489361702, | |
| "grad_norm": 0.03284180909395218, | |
| "learning_rate": 6.846590483608306e-07, | |
| "loss": 0.0018, | |
| "step": 9500 | |
| }, | |
| { | |
| "epoch": 50.58510638297872, | |
| "grad_norm": 0.049817781895399094, | |
| "learning_rate": 6.576595996722834e-07, | |
| "loss": 0.0021, | |
| "step": 9510 | |
| }, | |
| { | |
| "epoch": 50.638297872340424, | |
| "grad_norm": 0.0772111639380455, | |
| "learning_rate": 6.311997509130141e-07, | |
| "loss": 0.0019, | |
| "step": 9520 | |
| }, | |
| { | |
| "epoch": 50.691489361702125, | |
| "grad_norm": 0.025426926091313362, | |
| "learning_rate": 6.052797914436803e-07, | |
| "loss": 0.0019, | |
| "step": 9530 | |
| }, | |
| { | |
| "epoch": 50.744680851063826, | |
| "grad_norm": 0.023016205057501793, | |
| "learning_rate": 5.799000047208181e-07, | |
| "loss": 0.0017, | |
| "step": 9540 | |
| }, | |
| { | |
| "epoch": 50.797872340425535, | |
| "grad_norm": 0.02694050595164299, | |
| "learning_rate": 5.550606682937054e-07, | |
| "loss": 0.0027, | |
| "step": 9550 | |
| }, | |
| { | |
| "epoch": 50.851063829787236, | |
| "grad_norm": 0.027509916573762894, | |
| "learning_rate": 5.307620538013481e-07, | |
| "loss": 0.0022, | |
| "step": 9560 | |
| }, | |
| { | |
| "epoch": 50.90425531914894, | |
| "grad_norm": 0.04123242571949959, | |
| "learning_rate": 5.070044269694874e-07, | |
| "loss": 0.0019, | |
| "step": 9570 | |
| }, | |
| { | |
| "epoch": 50.95744680851064, | |
| "grad_norm": 0.04065615311264992, | |
| "learning_rate": 4.837880476077417e-07, | |
| "loss": 0.0016, | |
| "step": 9580 | |
| }, | |
| { | |
| "epoch": 51.01063829787234, | |
| "grad_norm": 0.029824523255228996, | |
| "learning_rate": 4.6111316960670835e-07, | |
| "loss": 0.0022, | |
| "step": 9590 | |
| }, | |
| { | |
| "epoch": 51.06382978723404, | |
| "grad_norm": 0.04467928409576416, | |
| "learning_rate": 4.389800409352218e-07, | |
| "loss": 0.002, | |
| "step": 9600 | |
| }, | |
| { | |
| "epoch": 51.11702127659574, | |
| "grad_norm": 0.06823906302452087, | |
| "learning_rate": 4.173889036376277e-07, | |
| "loss": 0.0017, | |
| "step": 9610 | |
| }, | |
| { | |
| "epoch": 51.170212765957444, | |
| "grad_norm": 0.02919851616024971, | |
| "learning_rate": 3.963399938311463e-07, | |
| "loss": 0.0025, | |
| "step": 9620 | |
| }, | |
| { | |
| "epoch": 51.223404255319146, | |
| "grad_norm": 0.046854037791490555, | |
| "learning_rate": 3.7583354170328545e-07, | |
| "loss": 0.0029, | |
| "step": 9630 | |
| }, | |
| { | |
| "epoch": 51.276595744680854, | |
| "grad_norm": 0.04605266824364662, | |
| "learning_rate": 3.558697715093207e-07, | |
| "loss": 0.0018, | |
| "step": 9640 | |
| }, | |
| { | |
| "epoch": 51.329787234042556, | |
| "grad_norm": 0.04546935483813286, | |
| "learning_rate": 3.3644890156983576e-07, | |
| "loss": 0.0019, | |
| "step": 9650 | |
| }, | |
| { | |
| "epoch": 51.38297872340426, | |
| "grad_norm": 0.05104997009038925, | |
| "learning_rate": 3.175711442683638e-07, | |
| "loss": 0.002, | |
| "step": 9660 | |
| }, | |
| { | |
| "epoch": 51.43617021276596, | |
| "grad_norm": 0.055750805884599686, | |
| "learning_rate": 2.9923670604902197e-07, | |
| "loss": 0.0026, | |
| "step": 9670 | |
| }, | |
| { | |
| "epoch": 51.48936170212766, | |
| "grad_norm": 0.03699976205825806, | |
| "learning_rate": 2.814457874143028e-07, | |
| "loss": 0.0032, | |
| "step": 9680 | |
| }, | |
| { | |
| "epoch": 51.54255319148936, | |
| "grad_norm": 0.017326273024082184, | |
| "learning_rate": 2.641985829228366e-07, | |
| "loss": 0.0016, | |
| "step": 9690 | |
| }, | |
| { | |
| "epoch": 51.59574468085106, | |
| "grad_norm": 0.025185925886034966, | |
| "learning_rate": 2.474952811872877e-07, | |
| "loss": 0.0018, | |
| "step": 9700 | |
| }, | |
| { | |
| "epoch": 51.648936170212764, | |
| "grad_norm": 0.02028697542846203, | |
| "learning_rate": 2.3133606487228397e-07, | |
| "loss": 0.0019, | |
| "step": 9710 | |
| }, | |
| { | |
| "epoch": 51.702127659574465, | |
| "grad_norm": 0.06325230747461319, | |
| "learning_rate": 2.157211106924295e-07, | |
| "loss": 0.0021, | |
| "step": 9720 | |
| }, | |
| { | |
| "epoch": 51.755319148936174, | |
| "grad_norm": 0.04482292756438255, | |
| "learning_rate": 2.006505894103672e-07, | |
| "loss": 0.0024, | |
| "step": 9730 | |
| }, | |
| { | |
| "epoch": 51.808510638297875, | |
| "grad_norm": 0.029405387118458748, | |
| "learning_rate": 1.8612466583489696e-07, | |
| "loss": 0.0016, | |
| "step": 9740 | |
| }, | |
| { | |
| "epoch": 51.861702127659576, | |
| "grad_norm": 0.0340152233839035, | |
| "learning_rate": 1.7214349881918834e-07, | |
| "loss": 0.0023, | |
| "step": 9750 | |
| }, | |
| { | |
| "epoch": 51.91489361702128, | |
| "grad_norm": 0.06583211570978165, | |
| "learning_rate": 1.5870724125904845e-07, | |
| "loss": 0.0027, | |
| "step": 9760 | |
| }, | |
| { | |
| "epoch": 51.96808510638298, | |
| "grad_norm": 0.08173692226409912, | |
| "learning_rate": 1.4581604009124006e-07, | |
| "loss": 0.0025, | |
| "step": 9770 | |
| }, | |
| { | |
| "epoch": 52.02127659574468, | |
| "grad_norm": 0.03838649019598961, | |
| "learning_rate": 1.334700362918717e-07, | |
| "loss": 0.0016, | |
| "step": 9780 | |
| }, | |
| { | |
| "epoch": 52.07446808510638, | |
| "grad_norm": 0.037551455199718475, | |
| "learning_rate": 1.2166936487486015e-07, | |
| "loss": 0.0028, | |
| "step": 9790 | |
| }, | |
| { | |
| "epoch": 52.12765957446808, | |
| "grad_norm": 0.031509045511484146, | |
| "learning_rate": 1.1041415489045914e-07, | |
| "loss": 0.0022, | |
| "step": 9800 | |
| }, | |
| { | |
| "epoch": 52.180851063829785, | |
| "grad_norm": 0.048134464770555496, | |
| "learning_rate": 9.970452942384412e-08, | |
| "loss": 0.0029, | |
| "step": 9810 | |
| }, | |
| { | |
| "epoch": 52.234042553191486, | |
| "grad_norm": 0.09030331671237946, | |
| "learning_rate": 8.954060559375754e-08, | |
| "loss": 0.0019, | |
| "step": 9820 | |
| }, | |
| { | |
| "epoch": 52.287234042553195, | |
| "grad_norm": 0.07785779237747192, | |
| "learning_rate": 7.99224945512489e-08, | |
| "loss": 0.002, | |
| "step": 9830 | |
| }, | |
| { | |
| "epoch": 52.340425531914896, | |
| "grad_norm": 0.05769821256399155, | |
| "learning_rate": 7.085030147843675e-08, | |
| "loss": 0.0015, | |
| "step": 9840 | |
| }, | |
| { | |
| "epoch": 52.3936170212766, | |
| "grad_norm": 0.06649912148714066, | |
| "learning_rate": 6.232412558736523e-08, | |
| "loss": 0.003, | |
| "step": 9850 | |
| }, | |
| { | |
| "epoch": 52.4468085106383, | |
| "grad_norm": 0.021498311311006546, | |
| "learning_rate": 5.434406011893822e-08, | |
| "loss": 0.0024, | |
| "step": 9860 | |
| }, | |
| { | |
| "epoch": 52.5, | |
| "grad_norm": 0.026517577469348907, | |
| "learning_rate": 4.6910192341864664e-08, | |
| "loss": 0.0028, | |
| "step": 9870 | |
| }, | |
| { | |
| "epoch": 52.5531914893617, | |
| "grad_norm": 0.04207788407802582, | |
| "learning_rate": 4.0022603551737035e-08, | |
| "loss": 0.0017, | |
| "step": 9880 | |
| }, | |
| { | |
| "epoch": 52.6063829787234, | |
| "grad_norm": 0.0679144635796547, | |
| "learning_rate": 3.3681369070120985e-08, | |
| "loss": 0.0025, | |
| "step": 9890 | |
| }, | |
| { | |
| "epoch": 52.659574468085104, | |
| "grad_norm": 0.018932249397039413, | |
| "learning_rate": 2.7886558243744866e-08, | |
| "loss": 0.0017, | |
| "step": 9900 | |
| }, | |
| { | |
| "epoch": 52.712765957446805, | |
| "grad_norm": 0.036958422511816025, | |
| "learning_rate": 2.2638234443722596e-08, | |
| "loss": 0.0017, | |
| "step": 9910 | |
| }, | |
| { | |
| "epoch": 52.765957446808514, | |
| "grad_norm": 0.05482904240489006, | |
| "learning_rate": 1.7936455064887504e-08, | |
| "loss": 0.0026, | |
| "step": 9920 | |
| }, | |
| { | |
| "epoch": 52.819148936170215, | |
| "grad_norm": 0.02221103385090828, | |
| "learning_rate": 1.378127152514841e-08, | |
| "loss": 0.0017, | |
| "step": 9930 | |
| }, | |
| { | |
| "epoch": 52.87234042553192, | |
| "grad_norm": 0.06416762620210648, | |
| "learning_rate": 1.0172729264917857e-08, | |
| "loss": 0.0045, | |
| "step": 9940 | |
| }, | |
| { | |
| "epoch": 52.92553191489362, | |
| "grad_norm": 0.018295176327228546, | |
| "learning_rate": 7.1108677466458215e-09, | |
| "loss": 0.0025, | |
| "step": 9950 | |
| }, | |
| { | |
| "epoch": 52.97872340425532, | |
| "grad_norm": 0.0375579297542572, | |
| "learning_rate": 4.595720454353414e-09, | |
| "loss": 0.0023, | |
| "step": 9960 | |
| }, | |
| { | |
| "epoch": 53.03191489361702, | |
| "grad_norm": 0.026658225804567337, | |
| "learning_rate": 2.627314893294264e-09, | |
| "loss": 0.0017, | |
| "step": 9970 | |
| }, | |
| { | |
| "epoch": 53.08510638297872, | |
| "grad_norm": 0.023903975263237953, | |
| "learning_rate": 1.2056725896270048e-09, | |
| "loss": 0.002, | |
| "step": 9980 | |
| }, | |
| { | |
| "epoch": 53.138297872340424, | |
| "grad_norm": 0.054526735097169876, | |
| "learning_rate": 3.308090902098826e-10, | |
| "loss": 0.002, | |
| "step": 9990 | |
| }, | |
| { | |
| "epoch": 53.191489361702125, | |
| "grad_norm": 0.028628919273614883, | |
| "learning_rate": 2.7339624120159555e-12, | |
| "loss": 0.0018, | |
| "step": 10000 | |
| }, | |
| { | |
| "epoch": 53.191489361702125, | |
| "step": 10000, | |
| "total_flos": 0.0, | |
| "train_loss": 0.009301510003954173, | |
| "train_runtime": 20791.9435, | |
| "train_samples_per_second": 192.382, | |
| "train_steps_per_second": 0.481 | |
| } | |
| ], | |
| "logging_steps": 10, | |
| "max_steps": 10000, | |
| "num_input_tokens_seen": 0, | |
| "num_train_epochs": 54, | |
| "save_steps": 1000, | |
| "stateful_callbacks": { | |
| "TrainerControl": { | |
| "args": { | |
| "should_epoch_stop": false, | |
| "should_evaluate": false, | |
| "should_log": false, | |
| "should_save": true, | |
| "should_training_stop": true | |
| }, | |
| "attributes": {} | |
| } | |
| }, | |
| "total_flos": 0.0, | |
| "train_batch_size": 50, | |
| "trial_name": null, | |
| "trial_params": null | |
| } | |