| { | |
| "best_metric": null, | |
| "best_model_checkpoint": null, | |
| "epoch": 29.767441860465116, | |
| "global_step": 480, | |
| "is_hyper_param_search": false, | |
| "is_local_process_zero": true, | |
| "is_world_process_zero": true, | |
| "log_history": [ | |
| { | |
| "epoch": 0.06, | |
| "learning_rate": 1.3333333333333334e-06, | |
| "loss": 2.1574, | |
| "step": 1 | |
| }, | |
| { | |
| "epoch": 0.12, | |
| "learning_rate": 2.666666666666667e-06, | |
| "loss": 2.1101, | |
| "step": 2 | |
| }, | |
| { | |
| "epoch": 0.19, | |
| "learning_rate": 4.000000000000001e-06, | |
| "loss": 2.074, | |
| "step": 3 | |
| }, | |
| { | |
| "epoch": 0.25, | |
| "learning_rate": 5.333333333333334e-06, | |
| "loss": 1.9878, | |
| "step": 4 | |
| }, | |
| { | |
| "epoch": 0.31, | |
| "learning_rate": 6.666666666666667e-06, | |
| "loss": 1.5101, | |
| "step": 5 | |
| }, | |
| { | |
| "epoch": 0.37, | |
| "learning_rate": 8.000000000000001e-06, | |
| "loss": 1.3858, | |
| "step": 6 | |
| }, | |
| { | |
| "epoch": 0.43, | |
| "learning_rate": 9.333333333333334e-06, | |
| "loss": 1.1866, | |
| "step": 7 | |
| }, | |
| { | |
| "epoch": 0.5, | |
| "learning_rate": 1.0666666666666667e-05, | |
| "loss": 1.0788, | |
| "step": 8 | |
| }, | |
| { | |
| "epoch": 0.56, | |
| "learning_rate": 1.2e-05, | |
| "loss": 0.8755, | |
| "step": 9 | |
| }, | |
| { | |
| "epoch": 0.62, | |
| "learning_rate": 1.3333333333333333e-05, | |
| "loss": 0.8617, | |
| "step": 10 | |
| }, | |
| { | |
| "epoch": 0.68, | |
| "learning_rate": 1.4666666666666666e-05, | |
| "loss": 0.8315, | |
| "step": 11 | |
| }, | |
| { | |
| "epoch": 0.74, | |
| "learning_rate": 1.6000000000000003e-05, | |
| "loss": 0.8335, | |
| "step": 12 | |
| }, | |
| { | |
| "epoch": 0.81, | |
| "learning_rate": 1.7333333333333336e-05, | |
| "loss": 0.7985, | |
| "step": 13 | |
| }, | |
| { | |
| "epoch": 0.87, | |
| "learning_rate": 1.866666666666667e-05, | |
| "loss": 0.799, | |
| "step": 14 | |
| }, | |
| { | |
| "epoch": 0.93, | |
| "learning_rate": 2e-05, | |
| "loss": 0.722, | |
| "step": 15 | |
| }, | |
| { | |
| "epoch": 0.99, | |
| "learning_rate": 1.999977177553799e-05, | |
| "loss": 0.7152, | |
| "step": 16 | |
| }, | |
| { | |
| "epoch": 1.05, | |
| "learning_rate": 1.9999087112569246e-05, | |
| "loss": 0.6966, | |
| "step": 17 | |
| }, | |
| { | |
| "epoch": 1.12, | |
| "learning_rate": 1.9997946042345128e-05, | |
| "loss": 0.5921, | |
| "step": 18 | |
| }, | |
| { | |
| "epoch": 1.18, | |
| "learning_rate": 1.9996348616949673e-05, | |
| "loss": 0.5473, | |
| "step": 19 | |
| }, | |
| { | |
| "epoch": 1.24, | |
| "learning_rate": 1.999429490929718e-05, | |
| "loss": 0.5623, | |
| "step": 20 | |
| }, | |
| { | |
| "epoch": 1.3, | |
| "learning_rate": 1.9991785013128922e-05, | |
| "loss": 0.5566, | |
| "step": 21 | |
| }, | |
| { | |
| "epoch": 1.36, | |
| "learning_rate": 1.998881904300884e-05, | |
| "loss": 0.5882, | |
| "step": 22 | |
| }, | |
| { | |
| "epoch": 1.43, | |
| "learning_rate": 1.998539713431832e-05, | |
| "loss": 0.5511, | |
| "step": 23 | |
| }, | |
| { | |
| "epoch": 1.49, | |
| "learning_rate": 1.998151944325001e-05, | |
| "loss": 0.5052, | |
| "step": 24 | |
| }, | |
| { | |
| "epoch": 1.55, | |
| "learning_rate": 1.9977186146800707e-05, | |
| "loss": 0.4874, | |
| "step": 25 | |
| }, | |
| { | |
| "epoch": 1.61, | |
| "learning_rate": 1.997239744276326e-05, | |
| "loss": 0.5485, | |
| "step": 26 | |
| }, | |
| { | |
| "epoch": 1.67, | |
| "learning_rate": 1.996715354971755e-05, | |
| "loss": 0.5021, | |
| "step": 27 | |
| }, | |
| { | |
| "epoch": 1.74, | |
| "learning_rate": 1.9961454707020515e-05, | |
| "loss": 0.5273, | |
| "step": 28 | |
| }, | |
| { | |
| "epoch": 1.8, | |
| "learning_rate": 1.995530117479521e-05, | |
| "loss": 0.4885, | |
| "step": 29 | |
| }, | |
| { | |
| "epoch": 1.86, | |
| "learning_rate": 1.994869323391895e-05, | |
| "loss": 0.4381, | |
| "step": 30 | |
| }, | |
| { | |
| "epoch": 1.92, | |
| "learning_rate": 1.9941631186010496e-05, | |
| "loss": 0.5024, | |
| "step": 31 | |
| }, | |
| { | |
| "epoch": 1.98, | |
| "learning_rate": 1.993411535341625e-05, | |
| "loss": 0.4849, | |
| "step": 32 | |
| }, | |
| { | |
| "epoch": 2.05, | |
| "learning_rate": 1.9926146079195597e-05, | |
| "loss": 0.3431, | |
| "step": 33 | |
| }, | |
| { | |
| "epoch": 2.11, | |
| "learning_rate": 1.991772372710519e-05, | |
| "loss": 0.2722, | |
| "step": 34 | |
| }, | |
| { | |
| "epoch": 2.17, | |
| "learning_rate": 1.990884868158239e-05, | |
| "loss": 0.2383, | |
| "step": 35 | |
| }, | |
| { | |
| "epoch": 2.23, | |
| "learning_rate": 1.989952134772769e-05, | |
| "loss": 0.2314, | |
| "step": 36 | |
| }, | |
| { | |
| "epoch": 2.29, | |
| "learning_rate": 1.988974215128625e-05, | |
| "loss": 0.2271, | |
| "step": 37 | |
| }, | |
| { | |
| "epoch": 2.36, | |
| "learning_rate": 1.987951153862843e-05, | |
| "loss": 0.2152, | |
| "step": 38 | |
| }, | |
| { | |
| "epoch": 2.42, | |
| "learning_rate": 1.9868829976729444e-05, | |
| "loss": 0.2366, | |
| "step": 39 | |
| }, | |
| { | |
| "epoch": 2.48, | |
| "learning_rate": 1.985769795314804e-05, | |
| "loss": 0.1924, | |
| "step": 40 | |
| }, | |
| { | |
| "epoch": 2.54, | |
| "learning_rate": 1.9846115976004234e-05, | |
| "loss": 0.2124, | |
| "step": 41 | |
| }, | |
| { | |
| "epoch": 2.6, | |
| "learning_rate": 1.983408457395613e-05, | |
| "loss": 0.2282, | |
| "step": 42 | |
| }, | |
| { | |
| "epoch": 2.67, | |
| "learning_rate": 1.9821604296175774e-05, | |
| "loss": 0.2015, | |
| "step": 43 | |
| }, | |
| { | |
| "epoch": 2.73, | |
| "learning_rate": 1.9808675712324108e-05, | |
| "loss": 0.2275, | |
| "step": 44 | |
| }, | |
| { | |
| "epoch": 2.79, | |
| "learning_rate": 1.9795299412524948e-05, | |
| "loss": 0.2298, | |
| "step": 45 | |
| }, | |
| { | |
| "epoch": 2.85, | |
| "learning_rate": 1.9781476007338058e-05, | |
| "loss": 0.2037, | |
| "step": 46 | |
| }, | |
| { | |
| "epoch": 2.91, | |
| "learning_rate": 1.9767206127731283e-05, | |
| "loss": 0.2217, | |
| "step": 47 | |
| }, | |
| { | |
| "epoch": 2.98, | |
| "learning_rate": 1.975249042505174e-05, | |
| "loss": 0.2081, | |
| "step": 48 | |
| }, | |
| { | |
| "epoch": 3.04, | |
| "learning_rate": 1.9737329570996098e-05, | |
| "loss": 0.1353, | |
| "step": 49 | |
| }, | |
| { | |
| "epoch": 3.1, | |
| "learning_rate": 1.9721724257579907e-05, | |
| "loss": 0.0982, | |
| "step": 50 | |
| }, | |
| { | |
| "epoch": 3.16, | |
| "learning_rate": 1.970567519710602e-05, | |
| "loss": 0.0934, | |
| "step": 51 | |
| }, | |
| { | |
| "epoch": 3.22, | |
| "learning_rate": 1.968918312213207e-05, | |
| "loss": 0.0848, | |
| "step": 52 | |
| }, | |
| { | |
| "epoch": 3.29, | |
| "learning_rate": 1.9672248785437053e-05, | |
| "loss": 0.0722, | |
| "step": 53 | |
| }, | |
| { | |
| "epoch": 3.35, | |
| "learning_rate": 1.9654872959986936e-05, | |
| "loss": 0.0873, | |
| "step": 54 | |
| }, | |
| { | |
| "epoch": 3.41, | |
| "learning_rate": 1.963705643889941e-05, | |
| "loss": 0.0786, | |
| "step": 55 | |
| }, | |
| { | |
| "epoch": 3.47, | |
| "learning_rate": 1.961880003540766e-05, | |
| "loss": 0.0693, | |
| "step": 56 | |
| }, | |
| { | |
| "epoch": 3.53, | |
| "learning_rate": 1.960010458282326e-05, | |
| "loss": 0.0728, | |
| "step": 57 | |
| }, | |
| { | |
| "epoch": 3.6, | |
| "learning_rate": 1.958097093449813e-05, | |
| "loss": 0.0767, | |
| "step": 58 | |
| }, | |
| { | |
| "epoch": 3.66, | |
| "learning_rate": 1.9561399963785586e-05, | |
| "loss": 0.0873, | |
| "step": 59 | |
| }, | |
| { | |
| "epoch": 3.72, | |
| "learning_rate": 1.954139256400049e-05, | |
| "loss": 0.0961, | |
| "step": 60 | |
| }, | |
| { | |
| "epoch": 3.78, | |
| "learning_rate": 1.9520949648378444e-05, | |
| "loss": 0.0868, | |
| "step": 61 | |
| }, | |
| { | |
| "epoch": 3.84, | |
| "learning_rate": 1.9500072150034136e-05, | |
| "loss": 0.0814, | |
| "step": 62 | |
| }, | |
| { | |
| "epoch": 3.91, | |
| "learning_rate": 1.947876102191873e-05, | |
| "loss": 0.0829, | |
| "step": 63 | |
| }, | |
| { | |
| "epoch": 3.97, | |
| "learning_rate": 1.945701723677637e-05, | |
| "loss": 0.0857, | |
| "step": 64 | |
| }, | |
| { | |
| "epoch": 4.03, | |
| "learning_rate": 1.9434841787099804e-05, | |
| "loss": 0.06, | |
| "step": 65 | |
| }, | |
| { | |
| "epoch": 4.09, | |
| "learning_rate": 1.9412235685085034e-05, | |
| "loss": 0.0401, | |
| "step": 66 | |
| }, | |
| { | |
| "epoch": 4.16, | |
| "learning_rate": 1.9389199962585156e-05, | |
| "loss": 0.03, | |
| "step": 67 | |
| }, | |
| { | |
| "epoch": 4.22, | |
| "learning_rate": 1.9365735671063247e-05, | |
| "loss": 0.0312, | |
| "step": 68 | |
| }, | |
| { | |
| "epoch": 4.28, | |
| "learning_rate": 1.9341843881544372e-05, | |
| "loss": 0.0344, | |
| "step": 69 | |
| }, | |
| { | |
| "epoch": 4.34, | |
| "learning_rate": 1.9317525684566686e-05, | |
| "loss": 0.0255, | |
| "step": 70 | |
| }, | |
| { | |
| "epoch": 4.4, | |
| "learning_rate": 1.9292782190131677e-05, | |
| "loss": 0.031, | |
| "step": 71 | |
| }, | |
| { | |
| "epoch": 4.47, | |
| "learning_rate": 1.926761452765349e-05, | |
| "loss": 0.033, | |
| "step": 72 | |
| }, | |
| { | |
| "epoch": 4.53, | |
| "learning_rate": 1.9242023845907362e-05, | |
| "loss": 0.0361, | |
| "step": 73 | |
| }, | |
| { | |
| "epoch": 4.59, | |
| "learning_rate": 1.921601131297721e-05, | |
| "loss": 0.0337, | |
| "step": 74 | |
| }, | |
| { | |
| "epoch": 4.65, | |
| "learning_rate": 1.918957811620231e-05, | |
| "loss": 0.0301, | |
| "step": 75 | |
| }, | |
| { | |
| "epoch": 4.71, | |
| "learning_rate": 1.9162725462123074e-05, | |
| "loss": 0.0346, | |
| "step": 76 | |
| }, | |
| { | |
| "epoch": 4.78, | |
| "learning_rate": 1.913545457642601e-05, | |
| "loss": 0.0289, | |
| "step": 77 | |
| }, | |
| { | |
| "epoch": 4.84, | |
| "learning_rate": 1.9107766703887764e-05, | |
| "loss": 0.0358, | |
| "step": 78 | |
| }, | |
| { | |
| "epoch": 4.9, | |
| "learning_rate": 1.9079663108318304e-05, | |
| "loss": 0.0399, | |
| "step": 79 | |
| }, | |
| { | |
| "epoch": 4.96, | |
| "learning_rate": 1.9051145072503216e-05, | |
| "loss": 0.0366, | |
| "step": 80 | |
| }, | |
| { | |
| "epoch": 5.02, | |
| "learning_rate": 1.9022213898145176e-05, | |
| "loss": 0.0236, | |
| "step": 81 | |
| }, | |
| { | |
| "epoch": 5.09, | |
| "learning_rate": 1.8992870905804535e-05, | |
| "loss": 0.0133, | |
| "step": 82 | |
| }, | |
| { | |
| "epoch": 5.15, | |
| "learning_rate": 1.896311743483901e-05, | |
| "loss": 0.0128, | |
| "step": 83 | |
| }, | |
| { | |
| "epoch": 5.21, | |
| "learning_rate": 1.893295484334259e-05, | |
| "loss": 0.0137, | |
| "step": 84 | |
| }, | |
| { | |
| "epoch": 5.27, | |
| "learning_rate": 1.8902384508083518e-05, | |
| "loss": 0.0113, | |
| "step": 85 | |
| }, | |
| { | |
| "epoch": 5.33, | |
| "learning_rate": 1.8871407824441453e-05, | |
| "loss": 0.0156, | |
| "step": 86 | |
| }, | |
| { | |
| "epoch": 5.4, | |
| "learning_rate": 1.8840026206343786e-05, | |
| "loss": 0.0202, | |
| "step": 87 | |
| }, | |
| { | |
| "epoch": 5.46, | |
| "learning_rate": 1.8808241086201106e-05, | |
| "loss": 0.0152, | |
| "step": 88 | |
| }, | |
| { | |
| "epoch": 5.52, | |
| "learning_rate": 1.877605391484179e-05, | |
| "loss": 0.0162, | |
| "step": 89 | |
| }, | |
| { | |
| "epoch": 5.58, | |
| "learning_rate": 1.8743466161445823e-05, | |
| "loss": 0.0161, | |
| "step": 90 | |
| }, | |
| { | |
| "epoch": 5.64, | |
| "learning_rate": 1.8710479313477697e-05, | |
| "loss": 0.0133, | |
| "step": 91 | |
| }, | |
| { | |
| "epoch": 5.71, | |
| "learning_rate": 1.867709487661854e-05, | |
| "loss": 0.0154, | |
| "step": 92 | |
| }, | |
| { | |
| "epoch": 5.77, | |
| "learning_rate": 1.8643314374697377e-05, | |
| "loss": 0.0203, | |
| "step": 93 | |
| }, | |
| { | |
| "epoch": 5.83, | |
| "learning_rate": 1.8609139349621588e-05, | |
| "loss": 0.0151, | |
| "step": 94 | |
| }, | |
| { | |
| "epoch": 5.89, | |
| "learning_rate": 1.857457136130651e-05, | |
| "loss": 0.0146, | |
| "step": 95 | |
| }, | |
| { | |
| "epoch": 5.95, | |
| "learning_rate": 1.853961198760426e-05, | |
| "loss": 0.0199, | |
| "step": 96 | |
| }, | |
| { | |
| "epoch": 6.02, | |
| "learning_rate": 1.8504262824231675e-05, | |
| "loss": 0.0148, | |
| "step": 97 | |
| }, | |
| { | |
| "epoch": 6.08, | |
| "learning_rate": 1.8468525484697527e-05, | |
| "loss": 0.0069, | |
| "step": 98 | |
| }, | |
| { | |
| "epoch": 6.14, | |
| "learning_rate": 1.8432401600228823e-05, | |
| "loss": 0.0073, | |
| "step": 99 | |
| }, | |
| { | |
| "epoch": 6.2, | |
| "learning_rate": 1.839589281969639e-05, | |
| "loss": 0.0076, | |
| "step": 100 | |
| }, | |
| { | |
| "epoch": 6.26, | |
| "learning_rate": 1.8359000809539584e-05, | |
| "loss": 0.008, | |
| "step": 101 | |
| }, | |
| { | |
| "epoch": 6.33, | |
| "learning_rate": 1.832172725369024e-05, | |
| "loss": 0.0074, | |
| "step": 102 | |
| }, | |
| { | |
| "epoch": 6.39, | |
| "learning_rate": 1.8284073853495808e-05, | |
| "loss": 0.0078, | |
| "step": 103 | |
| }, | |
| { | |
| "epoch": 6.45, | |
| "learning_rate": 1.8246042327641678e-05, | |
| "loss": 0.0079, | |
| "step": 104 | |
| }, | |
| { | |
| "epoch": 6.51, | |
| "learning_rate": 1.8207634412072765e-05, | |
| "loss": 0.0092, | |
| "step": 105 | |
| }, | |
| { | |
| "epoch": 6.57, | |
| "learning_rate": 1.816885185991424e-05, | |
| "loss": 0.0093, | |
| "step": 106 | |
| }, | |
| { | |
| "epoch": 6.64, | |
| "learning_rate": 1.812969644139152e-05, | |
| "loss": 0.0076, | |
| "step": 107 | |
| }, | |
| { | |
| "epoch": 6.7, | |
| "learning_rate": 1.8090169943749477e-05, | |
| "loss": 0.0075, | |
| "step": 108 | |
| }, | |
| { | |
| "epoch": 6.76, | |
| "learning_rate": 1.8050274171170835e-05, | |
| "loss": 0.0122, | |
| "step": 109 | |
| }, | |
| { | |
| "epoch": 6.82, | |
| "learning_rate": 1.8010010944693846e-05, | |
| "loss": 0.0102, | |
| "step": 110 | |
| }, | |
| { | |
| "epoch": 6.88, | |
| "learning_rate": 1.7969382102129153e-05, | |
| "loss": 0.0109, | |
| "step": 111 | |
| }, | |
| { | |
| "epoch": 6.95, | |
| "learning_rate": 1.7928389497975897e-05, | |
| "loss": 0.0072, | |
| "step": 112 | |
| }, | |
| { | |
| "epoch": 7.01, | |
| "learning_rate": 1.7887035003337082e-05, | |
| "loss": 0.0115, | |
| "step": 113 | |
| }, | |
| { | |
| "epoch": 7.07, | |
| "learning_rate": 1.7845320505834176e-05, | |
| "loss": 0.0077, | |
| "step": 114 | |
| }, | |
| { | |
| "epoch": 7.13, | |
| "learning_rate": 1.780324790952092e-05, | |
| "loss": 0.0047, | |
| "step": 115 | |
| }, | |
| { | |
| "epoch": 7.19, | |
| "learning_rate": 1.776081913479645e-05, | |
| "loss": 0.005, | |
| "step": 116 | |
| }, | |
| { | |
| "epoch": 7.26, | |
| "learning_rate": 1.771803611831762e-05, | |
| "loss": 0.0055, | |
| "step": 117 | |
| }, | |
| { | |
| "epoch": 7.32, | |
| "learning_rate": 1.767490081291062e-05, | |
| "loss": 0.0043, | |
| "step": 118 | |
| }, | |
| { | |
| "epoch": 7.38, | |
| "learning_rate": 1.7631415187481818e-05, | |
| "loss": 0.0056, | |
| "step": 119 | |
| }, | |
| { | |
| "epoch": 7.44, | |
| "learning_rate": 1.758758122692791e-05, | |
| "loss": 0.005, | |
| "step": 120 | |
| }, | |
| { | |
| "epoch": 7.5, | |
| "learning_rate": 1.754340093204531e-05, | |
| "loss": 0.0075, | |
| "step": 121 | |
| }, | |
| { | |
| "epoch": 7.57, | |
| "learning_rate": 1.749887631943882e-05, | |
| "loss": 0.0068, | |
| "step": 122 | |
| }, | |
| { | |
| "epoch": 7.63, | |
| "learning_rate": 1.74540094214296e-05, | |
| "loss": 0.006, | |
| "step": 123 | |
| }, | |
| { | |
| "epoch": 7.69, | |
| "learning_rate": 1.740880228596237e-05, | |
| "loss": 0.0067, | |
| "step": 124 | |
| }, | |
| { | |
| "epoch": 7.75, | |
| "learning_rate": 1.7363256976511972e-05, | |
| "loss": 0.0056, | |
| "step": 125 | |
| }, | |
| { | |
| "epoch": 7.81, | |
| "learning_rate": 1.7317375571989158e-05, | |
| "loss": 0.0068, | |
| "step": 126 | |
| }, | |
| { | |
| "epoch": 7.88, | |
| "learning_rate": 1.7271160166645695e-05, | |
| "loss": 0.0061, | |
| "step": 127 | |
| }, | |
| { | |
| "epoch": 7.94, | |
| "learning_rate": 1.722461286997879e-05, | |
| "loss": 0.0061, | |
| "step": 128 | |
| }, | |
| { | |
| "epoch": 8.0, | |
| "learning_rate": 1.717773580663479e-05, | |
| "loss": 0.0063, | |
| "step": 129 | |
| }, | |
| { | |
| "epoch": 8.06, | |
| "learning_rate": 1.7130531116312202e-05, | |
| "loss": 0.0032, | |
| "step": 130 | |
| }, | |
| { | |
| "epoch": 8.12, | |
| "learning_rate": 1.708300095366405e-05, | |
| "loss": 0.003, | |
| "step": 131 | |
| }, | |
| { | |
| "epoch": 8.19, | |
| "learning_rate": 1.703514748819948e-05, | |
| "loss": 0.0046, | |
| "step": 132 | |
| }, | |
| { | |
| "epoch": 8.25, | |
| "learning_rate": 1.6986972904184783e-05, | |
| "loss": 0.0034, | |
| "step": 133 | |
| }, | |
| { | |
| "epoch": 8.31, | |
| "learning_rate": 1.693847940054366e-05, | |
| "loss": 0.0033, | |
| "step": 134 | |
| }, | |
| { | |
| "epoch": 8.37, | |
| "learning_rate": 1.688966919075687e-05, | |
| "loss": 0.0028, | |
| "step": 135 | |
| }, | |
| { | |
| "epoch": 8.43, | |
| "learning_rate": 1.684054450276118e-05, | |
| "loss": 0.0035, | |
| "step": 136 | |
| }, | |
| { | |
| "epoch": 8.5, | |
| "learning_rate": 1.6791107578847688e-05, | |
| "loss": 0.0029, | |
| "step": 137 | |
| }, | |
| { | |
| "epoch": 8.56, | |
| "learning_rate": 1.6741360675559475e-05, | |
| "loss": 0.0056, | |
| "step": 138 | |
| }, | |
| { | |
| "epoch": 8.62, | |
| "learning_rate": 1.6691306063588583e-05, | |
| "loss": 0.0047, | |
| "step": 139 | |
| }, | |
| { | |
| "epoch": 8.68, | |
| "learning_rate": 1.6640946027672395e-05, | |
| "loss": 0.0052, | |
| "step": 140 | |
| }, | |
| { | |
| "epoch": 8.74, | |
| "learning_rate": 1.659028286648932e-05, | |
| "loss": 0.0045, | |
| "step": 141 | |
| }, | |
| { | |
| "epoch": 8.81, | |
| "learning_rate": 1.653931889255391e-05, | |
| "loss": 0.0045, | |
| "step": 142 | |
| }, | |
| { | |
| "epoch": 8.87, | |
| "learning_rate": 1.648805643211127e-05, | |
| "loss": 0.0054, | |
| "step": 143 | |
| }, | |
| { | |
| "epoch": 8.93, | |
| "learning_rate": 1.6436497825030886e-05, | |
| "loss": 0.0048, | |
| "step": 144 | |
| }, | |
| { | |
| "epoch": 8.99, | |
| "learning_rate": 1.6384645424699835e-05, | |
| "loss": 0.0056, | |
| "step": 145 | |
| }, | |
| { | |
| "epoch": 9.05, | |
| "learning_rate": 1.6332501597915353e-05, | |
| "loss": 0.0031, | |
| "step": 146 | |
| }, | |
| { | |
| "epoch": 9.12, | |
| "learning_rate": 1.6280068724776795e-05, | |
| "loss": 0.0026, | |
| "step": 147 | |
| }, | |
| { | |
| "epoch": 9.18, | |
| "learning_rate": 1.622734919857702e-05, | |
| "loss": 0.0022, | |
| "step": 148 | |
| }, | |
| { | |
| "epoch": 9.24, | |
| "learning_rate": 1.617434542569313e-05, | |
| "loss": 0.0023, | |
| "step": 149 | |
| }, | |
| { | |
| "epoch": 9.3, | |
| "learning_rate": 1.612105982547663e-05, | |
| "loss": 0.003, | |
| "step": 150 | |
| }, | |
| { | |
| "epoch": 9.36, | |
| "learning_rate": 1.6067494830143014e-05, | |
| "loss": 0.0028, | |
| "step": 151 | |
| }, | |
| { | |
| "epoch": 9.43, | |
| "learning_rate": 1.6013652884660723e-05, | |
| "loss": 0.0028, | |
| "step": 152 | |
| }, | |
| { | |
| "epoch": 9.49, | |
| "learning_rate": 1.5959536446639572e-05, | |
| "loss": 0.0025, | |
| "step": 153 | |
| }, | |
| { | |
| "epoch": 9.55, | |
| "learning_rate": 1.5905147986218546e-05, | |
| "loss": 0.0049, | |
| "step": 154 | |
| }, | |
| { | |
| "epoch": 9.61, | |
| "learning_rate": 1.5850489985953076e-05, | |
| "loss": 0.0041, | |
| "step": 155 | |
| }, | |
| { | |
| "epoch": 9.67, | |
| "learning_rate": 1.57955649407017e-05, | |
| "loss": 0.0039, | |
| "step": 156 | |
| }, | |
| { | |
| "epoch": 9.74, | |
| "learning_rate": 1.5740375357512198e-05, | |
| "loss": 0.0041, | |
| "step": 157 | |
| }, | |
| { | |
| "epoch": 9.8, | |
| "learning_rate": 1.568492375550715e-05, | |
| "loss": 0.0039, | |
| "step": 158 | |
| }, | |
| { | |
| "epoch": 9.86, | |
| "learning_rate": 1.562921266576898e-05, | |
| "loss": 0.004, | |
| "step": 159 | |
| }, | |
| { | |
| "epoch": 9.92, | |
| "learning_rate": 1.5573244631224364e-05, | |
| "loss": 0.0049, | |
| "step": 160 | |
| }, | |
| { | |
| "epoch": 9.98, | |
| "learning_rate": 1.5517022206528233e-05, | |
| "loss": 0.0039, | |
| "step": 161 | |
| }, | |
| { | |
| "epoch": 10.05, | |
| "learning_rate": 1.5460547957947105e-05, | |
| "loss": 0.0029, | |
| "step": 162 | |
| }, | |
| { | |
| "epoch": 10.11, | |
| "learning_rate": 1.540382446324198e-05, | |
| "loss": 0.0021, | |
| "step": 163 | |
| }, | |
| { | |
| "epoch": 10.17, | |
| "learning_rate": 1.5346854311550673e-05, | |
| "loss": 0.0018, | |
| "step": 164 | |
| }, | |
| { | |
| "epoch": 10.23, | |
| "learning_rate": 1.5289640103269626e-05, | |
| "loss": 0.0025, | |
| "step": 165 | |
| }, | |
| { | |
| "epoch": 10.29, | |
| "learning_rate": 1.523218444993522e-05, | |
| "loss": 0.0026, | |
| "step": 166 | |
| }, | |
| { | |
| "epoch": 10.36, | |
| "learning_rate": 1.5174489974104574e-05, | |
| "loss": 0.0035, | |
| "step": 167 | |
| }, | |
| { | |
| "epoch": 10.42, | |
| "learning_rate": 1.5116559309235825e-05, | |
| "loss": 0.0024, | |
| "step": 168 | |
| }, | |
| { | |
| "epoch": 10.48, | |
| "learning_rate": 1.5058395099567935e-05, | |
| "loss": 0.0042, | |
| "step": 169 | |
| }, | |
| { | |
| "epoch": 10.54, | |
| "learning_rate": 1.5000000000000002e-05, | |
| "loss": 0.0022, | |
| "step": 170 | |
| }, | |
| { | |
| "epoch": 10.6, | |
| "learning_rate": 1.4941376675970058e-05, | |
| "loss": 0.0035, | |
| "step": 171 | |
| }, | |
| { | |
| "epoch": 10.67, | |
| "learning_rate": 1.4882527803333422e-05, | |
| "loss": 0.0032, | |
| "step": 172 | |
| }, | |
| { | |
| "epoch": 10.73, | |
| "learning_rate": 1.4823456068240558e-05, | |
| "loss": 0.0043, | |
| "step": 173 | |
| }, | |
| { | |
| "epoch": 10.79, | |
| "learning_rate": 1.4764164167014451e-05, | |
| "loss": 0.0029, | |
| "step": 174 | |
| }, | |
| { | |
| "epoch": 10.85, | |
| "learning_rate": 1.4704654806027558e-05, | |
| "loss": 0.005, | |
| "step": 175 | |
| }, | |
| { | |
| "epoch": 10.91, | |
| "learning_rate": 1.4644930701578252e-05, | |
| "loss": 0.0046, | |
| "step": 176 | |
| }, | |
| { | |
| "epoch": 10.98, | |
| "learning_rate": 1.4584994579766865e-05, | |
| "loss": 0.0044, | |
| "step": 177 | |
| }, | |
| { | |
| "epoch": 11.04, | |
| "learning_rate": 1.4524849176371219e-05, | |
| "loss": 0.0038, | |
| "step": 178 | |
| }, | |
| { | |
| "epoch": 11.1, | |
| "learning_rate": 1.4464497236721779e-05, | |
| "loss": 0.0028, | |
| "step": 179 | |
| }, | |
| { | |
| "epoch": 11.16, | |
| "learning_rate": 1.4403941515576344e-05, | |
| "loss": 0.0022, | |
| "step": 180 | |
| }, | |
| { | |
| "epoch": 11.22, | |
| "learning_rate": 1.4343184776994288e-05, | |
| "loss": 0.0017, | |
| "step": 181 | |
| }, | |
| { | |
| "epoch": 11.29, | |
| "learning_rate": 1.4282229794210404e-05, | |
| "loss": 0.0049, | |
| "step": 182 | |
| }, | |
| { | |
| "epoch": 11.35, | |
| "learning_rate": 1.422107934950832e-05, | |
| "loss": 0.0025, | |
| "step": 183 | |
| }, | |
| { | |
| "epoch": 11.41, | |
| "learning_rate": 1.415973623409351e-05, | |
| "loss": 0.0034, | |
| "step": 184 | |
| }, | |
| { | |
| "epoch": 11.47, | |
| "learning_rate": 1.4098203247965876e-05, | |
| "loss": 0.0029, | |
| "step": 185 | |
| }, | |
| { | |
| "epoch": 11.53, | |
| "learning_rate": 1.4036483199791949e-05, | |
| "loss": 0.0024, | |
| "step": 186 | |
| }, | |
| { | |
| "epoch": 11.6, | |
| "learning_rate": 1.3974578906776683e-05, | |
| "loss": 0.0033, | |
| "step": 187 | |
| }, | |
| { | |
| "epoch": 11.66, | |
| "learning_rate": 1.3912493194534876e-05, | |
| "loss": 0.0033, | |
| "step": 188 | |
| }, | |
| { | |
| "epoch": 11.72, | |
| "learning_rate": 1.3850228896962178e-05, | |
| "loss": 0.003, | |
| "step": 189 | |
| }, | |
| { | |
| "epoch": 11.78, | |
| "learning_rate": 1.3787788856105762e-05, | |
| "loss": 0.0026, | |
| "step": 190 | |
| }, | |
| { | |
| "epoch": 11.84, | |
| "learning_rate": 1.3725175922034566e-05, | |
| "loss": 0.0037, | |
| "step": 191 | |
| }, | |
| { | |
| "epoch": 11.91, | |
| "learning_rate": 1.366239295270923e-05, | |
| "loss": 0.004, | |
| "step": 192 | |
| }, | |
| { | |
| "epoch": 11.97, | |
| "learning_rate": 1.3599442813851633e-05, | |
| "loss": 0.0036, | |
| "step": 193 | |
| }, | |
| { | |
| "epoch": 12.03, | |
| "learning_rate": 1.3536328378814094e-05, | |
| "loss": 0.0029, | |
| "step": 194 | |
| }, | |
| { | |
| "epoch": 12.09, | |
| "learning_rate": 1.3473052528448203e-05, | |
| "loss": 0.0022, | |
| "step": 195 | |
| }, | |
| { | |
| "epoch": 12.16, | |
| "learning_rate": 1.3409618150973349e-05, | |
| "loss": 0.0033, | |
| "step": 196 | |
| }, | |
| { | |
| "epoch": 12.22, | |
| "learning_rate": 1.334602814184486e-05, | |
| "loss": 0.0022, | |
| "step": 197 | |
| }, | |
| { | |
| "epoch": 12.28, | |
| "learning_rate": 1.3282285403621864e-05, | |
| "loss": 0.0019, | |
| "step": 198 | |
| }, | |
| { | |
| "epoch": 12.34, | |
| "learning_rate": 1.3218392845834789e-05, | |
| "loss": 0.0016, | |
| "step": 199 | |
| }, | |
| { | |
| "epoch": 12.4, | |
| "learning_rate": 1.3154353384852559e-05, | |
| "loss": 0.0021, | |
| "step": 200 | |
| }, | |
| { | |
| "epoch": 12.47, | |
| "learning_rate": 1.3090169943749475e-05, | |
| "loss": 0.0026, | |
| "step": 201 | |
| }, | |
| { | |
| "epoch": 12.53, | |
| "learning_rate": 1.3025845452171808e-05, | |
| "loss": 0.0022, | |
| "step": 202 | |
| }, | |
| { | |
| "epoch": 12.59, | |
| "learning_rate": 1.2961382846204056e-05, | |
| "loss": 0.0027, | |
| "step": 203 | |
| }, | |
| { | |
| "epoch": 12.65, | |
| "learning_rate": 1.2896785068234925e-05, | |
| "loss": 0.0023, | |
| "step": 204 | |
| }, | |
| { | |
| "epoch": 12.71, | |
| "learning_rate": 1.283205506682304e-05, | |
| "loss": 0.0027, | |
| "step": 205 | |
| }, | |
| { | |
| "epoch": 12.78, | |
| "learning_rate": 1.2767195796562359e-05, | |
| "loss": 0.0031, | |
| "step": 206 | |
| }, | |
| { | |
| "epoch": 12.84, | |
| "learning_rate": 1.2702210217947289e-05, | |
| "loss": 0.0044, | |
| "step": 207 | |
| }, | |
| { | |
| "epoch": 12.9, | |
| "learning_rate": 1.263710129723757e-05, | |
| "loss": 0.0027, | |
| "step": 208 | |
| }, | |
| { | |
| "epoch": 12.96, | |
| "learning_rate": 1.257187200632289e-05, | |
| "loss": 0.0027, | |
| "step": 209 | |
| }, | |
| { | |
| "epoch": 13.02, | |
| "learning_rate": 1.2506525322587207e-05, | |
| "loss": 0.0019, | |
| "step": 210 | |
| }, | |
| { | |
| "epoch": 13.09, | |
| "learning_rate": 1.2441064228772874e-05, | |
| "loss": 0.0019, | |
| "step": 211 | |
| }, | |
| { | |
| "epoch": 13.15, | |
| "learning_rate": 1.2375491712844472e-05, | |
| "loss": 0.002, | |
| "step": 212 | |
| }, | |
| { | |
| "epoch": 13.21, | |
| "learning_rate": 1.2309810767852435e-05, | |
| "loss": 0.0021, | |
| "step": 213 | |
| }, | |
| { | |
| "epoch": 13.27, | |
| "learning_rate": 1.2244024391796432e-05, | |
| "loss": 0.0021, | |
| "step": 214 | |
| }, | |
| { | |
| "epoch": 13.33, | |
| "learning_rate": 1.2178135587488515e-05, | |
| "loss": 0.0016, | |
| "step": 215 | |
| }, | |
| { | |
| "epoch": 13.4, | |
| "learning_rate": 1.2112147362416076e-05, | |
| "loss": 0.0022, | |
| "step": 216 | |
| }, | |
| { | |
| "epoch": 13.46, | |
| "learning_rate": 1.204606272860454e-05, | |
| "loss": 0.0027, | |
| "step": 217 | |
| }, | |
| { | |
| "epoch": 13.52, | |
| "learning_rate": 1.1979884702479909e-05, | |
| "loss": 0.0023, | |
| "step": 218 | |
| }, | |
| { | |
| "epoch": 13.58, | |
| "learning_rate": 1.1913616304731064e-05, | |
| "loss": 0.0028, | |
| "step": 219 | |
| }, | |
| { | |
| "epoch": 13.64, | |
| "learning_rate": 1.1847260560171895e-05, | |
| "loss": 0.0022, | |
| "step": 220 | |
| }, | |
| { | |
| "epoch": 13.71, | |
| "learning_rate": 1.1780820497603215e-05, | |
| "loss": 0.0021, | |
| "step": 221 | |
| }, | |
| { | |
| "epoch": 13.77, | |
| "learning_rate": 1.1714299149674538e-05, | |
| "loss": 0.002, | |
| "step": 222 | |
| }, | |
| { | |
| "epoch": 13.83, | |
| "learning_rate": 1.1647699552745628e-05, | |
| "loss": 0.0022, | |
| "step": 223 | |
| }, | |
| { | |
| "epoch": 13.89, | |
| "learning_rate": 1.1581024746747925e-05, | |
| "loss": 0.0022, | |
| "step": 224 | |
| }, | |
| { | |
| "epoch": 13.95, | |
| "learning_rate": 1.1514277775045768e-05, | |
| "loss": 0.0026, | |
| "step": 225 | |
| }, | |
| { | |
| "epoch": 14.02, | |
| "learning_rate": 1.1447461684297505e-05, | |
| "loss": 0.0021, | |
| "step": 226 | |
| }, | |
| { | |
| "epoch": 14.08, | |
| "learning_rate": 1.1380579524316406e-05, | |
| "loss": 0.0016, | |
| "step": 227 | |
| }, | |
| { | |
| "epoch": 14.14, | |
| "learning_rate": 1.1313634347931466e-05, | |
| "loss": 0.0025, | |
| "step": 228 | |
| }, | |
| { | |
| "epoch": 14.2, | |
| "learning_rate": 1.1246629210848062e-05, | |
| "loss": 0.0016, | |
| "step": 229 | |
| }, | |
| { | |
| "epoch": 14.26, | |
| "learning_rate": 1.1179567171508463e-05, | |
| "loss": 0.0016, | |
| "step": 230 | |
| }, | |
| { | |
| "epoch": 14.33, | |
| "learning_rate": 1.1112451290952238e-05, | |
| "loss": 0.0017, | |
| "step": 231 | |
| }, | |
| { | |
| "epoch": 14.39, | |
| "learning_rate": 1.1045284632676535e-05, | |
| "loss": 0.0019, | |
| "step": 232 | |
| }, | |
| { | |
| "epoch": 14.45, | |
| "learning_rate": 1.0978070262496248e-05, | |
| "loss": 0.0018, | |
| "step": 233 | |
| }, | |
| { | |
| "epoch": 14.51, | |
| "learning_rate": 1.0910811248404064e-05, | |
| "loss": 0.0018, | |
| "step": 234 | |
| }, | |
| { | |
| "epoch": 14.57, | |
| "learning_rate": 1.0843510660430447e-05, | |
| "loss": 0.0022, | |
| "step": 235 | |
| }, | |
| { | |
| "epoch": 14.64, | |
| "learning_rate": 1.07761715705035e-05, | |
| "loss": 0.0023, | |
| "step": 236 | |
| }, | |
| { | |
| "epoch": 14.7, | |
| "learning_rate": 1.070879705230873e-05, | |
| "loss": 0.0022, | |
| "step": 237 | |
| }, | |
| { | |
| "epoch": 14.76, | |
| "learning_rate": 1.0641390181148772e-05, | |
| "loss": 0.0031, | |
| "step": 238 | |
| }, | |
| { | |
| "epoch": 14.82, | |
| "learning_rate": 1.0573954033803006e-05, | |
| "loss": 0.0021, | |
| "step": 239 | |
| }, | |
| { | |
| "epoch": 14.88, | |
| "learning_rate": 1.0506491688387128e-05, | |
| "loss": 0.0024, | |
| "step": 240 | |
| }, | |
| { | |
| "epoch": 14.95, | |
| "learning_rate": 1.0439006224212629e-05, | |
| "loss": 0.0028, | |
| "step": 241 | |
| }, | |
| { | |
| "epoch": 15.01, | |
| "learning_rate": 1.037150072164626e-05, | |
| "loss": 0.0023, | |
| "step": 242 | |
| }, | |
| { | |
| "epoch": 15.07, | |
| "learning_rate": 1.030397826196943e-05, | |
| "loss": 0.0017, | |
| "step": 243 | |
| }, | |
| { | |
| "epoch": 15.13, | |
| "learning_rate": 1.0236441927237534e-05, | |
| "loss": 0.0014, | |
| "step": 244 | |
| }, | |
| { | |
| "epoch": 15.19, | |
| "learning_rate": 1.0168894800139311e-05, | |
| "loss": 0.0015, | |
| "step": 245 | |
| }, | |
| { | |
| "epoch": 15.26, | |
| "learning_rate": 1.0101339963856112e-05, | |
| "loss": 0.0013, | |
| "step": 246 | |
| }, | |
| { | |
| "epoch": 15.32, | |
| "learning_rate": 1.0033780501921164e-05, | |
| "loss": 0.0022, | |
| "step": 247 | |
| }, | |
| { | |
| "epoch": 15.38, | |
| "learning_rate": 9.966219498078839e-06, | |
| "loss": 0.002, | |
| "step": 248 | |
| }, | |
| { | |
| "epoch": 15.44, | |
| "learning_rate": 9.898660036143893e-06, | |
| "loss": 0.0015, | |
| "step": 249 | |
| }, | |
| { | |
| "epoch": 15.5, | |
| "learning_rate": 9.83110519986069e-06, | |
| "loss": 0.0014, | |
| "step": 250 | |
| }, | |
| { | |
| "epoch": 15.57, | |
| "learning_rate": 9.763558072762467e-06, | |
| "loss": 0.0014, | |
| "step": 251 | |
| }, | |
| { | |
| "epoch": 15.63, | |
| "learning_rate": 9.696021738030575e-06, | |
| "loss": 0.0019, | |
| "step": 252 | |
| }, | |
| { | |
| "epoch": 15.69, | |
| "learning_rate": 9.62849927835374e-06, | |
| "loss": 0.0019, | |
| "step": 253 | |
| }, | |
| { | |
| "epoch": 15.75, | |
| "learning_rate": 9.560993775787373e-06, | |
| "loss": 0.0017, | |
| "step": 254 | |
| }, | |
| { | |
| "epoch": 15.81, | |
| "learning_rate": 9.493508311612874e-06, | |
| "loss": 0.0017, | |
| "step": 255 | |
| }, | |
| { | |
| "epoch": 15.88, | |
| "learning_rate": 9.426045966196992e-06, | |
| "loss": 0.0023, | |
| "step": 256 | |
| }, | |
| { | |
| "epoch": 15.94, | |
| "learning_rate": 9.35860981885123e-06, | |
| "loss": 0.0026, | |
| "step": 257 | |
| }, | |
| { | |
| "epoch": 16.0, | |
| "learning_rate": 9.291202947691272e-06, | |
| "loss": 0.0025, | |
| "step": 258 | |
| }, | |
| { | |
| "epoch": 16.06, | |
| "learning_rate": 9.2238284294965e-06, | |
| "loss": 0.0018, | |
| "step": 259 | |
| }, | |
| { | |
| "epoch": 16.12, | |
| "learning_rate": 9.156489339569555e-06, | |
| "loss": 0.0011, | |
| "step": 260 | |
| }, | |
| { | |
| "epoch": 16.19, | |
| "learning_rate": 9.089188751595937e-06, | |
| "loss": 0.0017, | |
| "step": 261 | |
| }, | |
| { | |
| "epoch": 16.25, | |
| "learning_rate": 9.021929737503757e-06, | |
| "loss": 0.0014, | |
| "step": 262 | |
| }, | |
| { | |
| "epoch": 16.31, | |
| "learning_rate": 8.954715367323468e-06, | |
| "loss": 0.0016, | |
| "step": 263 | |
| }, | |
| { | |
| "epoch": 16.37, | |
| "learning_rate": 8.887548709047765e-06, | |
| "loss": 0.0019, | |
| "step": 264 | |
| }, | |
| { | |
| "epoch": 16.43, | |
| "learning_rate": 8.820432828491542e-06, | |
| "loss": 0.0015, | |
| "step": 265 | |
| }, | |
| { | |
| "epoch": 16.5, | |
| "learning_rate": 8.753370789151941e-06, | |
| "loss": 0.0016, | |
| "step": 266 | |
| }, | |
| { | |
| "epoch": 16.56, | |
| "learning_rate": 8.686365652068536e-06, | |
| "loss": 0.0014, | |
| "step": 267 | |
| }, | |
| { | |
| "epoch": 16.62, | |
| "learning_rate": 8.619420475683597e-06, | |
| "loss": 0.0013, | |
| "step": 268 | |
| }, | |
| { | |
| "epoch": 16.68, | |
| "learning_rate": 8.552538315702497e-06, | |
| "loss": 0.0016, | |
| "step": 269 | |
| }, | |
| { | |
| "epoch": 16.74, | |
| "learning_rate": 8.485722224954237e-06, | |
| "loss": 0.0024, | |
| "step": 270 | |
| }, | |
| { | |
| "epoch": 16.81, | |
| "learning_rate": 8.418975253252079e-06, | |
| "loss": 0.0016, | |
| "step": 271 | |
| }, | |
| { | |
| "epoch": 16.87, | |
| "learning_rate": 8.352300447254372e-06, | |
| "loss": 0.0024, | |
| "step": 272 | |
| }, | |
| { | |
| "epoch": 16.93, | |
| "learning_rate": 8.285700850325467e-06, | |
| "loss": 0.0021, | |
| "step": 273 | |
| }, | |
| { | |
| "epoch": 16.99, | |
| "learning_rate": 8.219179502396786e-06, | |
| "loss": 0.0021, | |
| "step": 274 | |
| }, | |
| { | |
| "epoch": 17.05, | |
| "learning_rate": 8.15273943982811e-06, | |
| "loss": 0.0016, | |
| "step": 275 | |
| }, | |
| { | |
| "epoch": 17.12, | |
| "learning_rate": 8.086383695268937e-06, | |
| "loss": 0.0017, | |
| "step": 276 | |
| }, | |
| { | |
| "epoch": 17.18, | |
| "learning_rate": 8.020115297520093e-06, | |
| "loss": 0.0011, | |
| "step": 277 | |
| }, | |
| { | |
| "epoch": 17.24, | |
| "learning_rate": 7.953937271395465e-06, | |
| "loss": 0.0016, | |
| "step": 278 | |
| }, | |
| { | |
| "epoch": 17.3, | |
| "learning_rate": 7.887852637583927e-06, | |
| "loss": 0.0014, | |
| "step": 279 | |
| }, | |
| { | |
| "epoch": 17.36, | |
| "learning_rate": 7.821864412511485e-06, | |
| "loss": 0.0015, | |
| "step": 280 | |
| }, | |
| { | |
| "epoch": 17.43, | |
| "learning_rate": 7.755975608203571e-06, | |
| "loss": 0.0018, | |
| "step": 281 | |
| }, | |
| { | |
| "epoch": 17.49, | |
| "learning_rate": 7.690189232147566e-06, | |
| "loss": 0.0021, | |
| "step": 282 | |
| }, | |
| { | |
| "epoch": 17.55, | |
| "learning_rate": 7.624508287155534e-06, | |
| "loss": 0.0018, | |
| "step": 283 | |
| }, | |
| { | |
| "epoch": 17.61, | |
| "learning_rate": 7.558935771227129e-06, | |
| "loss": 0.0013, | |
| "step": 284 | |
| }, | |
| { | |
| "epoch": 17.67, | |
| "learning_rate": 7.493474677412795e-06, | |
| "loss": 0.0015, | |
| "step": 285 | |
| }, | |
| { | |
| "epoch": 17.74, | |
| "learning_rate": 7.428127993677116e-06, | |
| "loss": 0.0016, | |
| "step": 286 | |
| }, | |
| { | |
| "epoch": 17.8, | |
| "learning_rate": 7.362898702762433e-06, | |
| "loss": 0.0017, | |
| "step": 287 | |
| }, | |
| { | |
| "epoch": 17.86, | |
| "learning_rate": 7.297789782052716e-06, | |
| "loss": 0.0017, | |
| "step": 288 | |
| }, | |
| { | |
| "epoch": 17.92, | |
| "learning_rate": 7.232804203437645e-06, | |
| "loss": 0.0023, | |
| "step": 289 | |
| }, | |
| { | |
| "epoch": 17.98, | |
| "learning_rate": 7.16794493317696e-06, | |
| "loss": 0.0023, | |
| "step": 290 | |
| }, | |
| { | |
| "epoch": 18.05, | |
| "learning_rate": 7.10321493176508e-06, | |
| "loss": 0.0018, | |
| "step": 291 | |
| }, | |
| { | |
| "epoch": 18.11, | |
| "learning_rate": 7.038617153795948e-06, | |
| "loss": 0.0016, | |
| "step": 292 | |
| }, | |
| { | |
| "epoch": 18.17, | |
| "learning_rate": 6.974154547828191e-06, | |
| "loss": 0.0014, | |
| "step": 293 | |
| }, | |
| { | |
| "epoch": 18.23, | |
| "learning_rate": 6.909830056250527e-06, | |
| "loss": 0.0018, | |
| "step": 294 | |
| }, | |
| { | |
| "epoch": 18.29, | |
| "learning_rate": 6.845646615147445e-06, | |
| "loss": 0.0017, | |
| "step": 295 | |
| }, | |
| { | |
| "epoch": 18.36, | |
| "learning_rate": 6.781607154165217e-06, | |
| "loss": 0.0014, | |
| "step": 296 | |
| }, | |
| { | |
| "epoch": 18.42, | |
| "learning_rate": 6.717714596378138e-06, | |
| "loss": 0.0013, | |
| "step": 297 | |
| }, | |
| { | |
| "epoch": 18.48, | |
| "learning_rate": 6.65397185815514e-06, | |
| "loss": 0.0009, | |
| "step": 298 | |
| }, | |
| { | |
| "epoch": 18.54, | |
| "learning_rate": 6.5903818490266554e-06, | |
| "loss": 0.0012, | |
| "step": 299 | |
| }, | |
| { | |
| "epoch": 18.6, | |
| "learning_rate": 6.526947471551799e-06, | |
| "loss": 0.0018, | |
| "step": 300 | |
| }, | |
| { | |
| "epoch": 18.67, | |
| "learning_rate": 6.4636716211859076e-06, | |
| "loss": 0.0017, | |
| "step": 301 | |
| }, | |
| { | |
| "epoch": 18.73, | |
| "learning_rate": 6.400557186148371e-06, | |
| "loss": 0.0019, | |
| "step": 302 | |
| }, | |
| { | |
| "epoch": 18.79, | |
| "learning_rate": 6.337607047290774e-06, | |
| "loss": 0.002, | |
| "step": 303 | |
| }, | |
| { | |
| "epoch": 18.85, | |
| "learning_rate": 6.274824077965438e-06, | |
| "loss": 0.0013, | |
| "step": 304 | |
| }, | |
| { | |
| "epoch": 18.91, | |
| "learning_rate": 6.21221114389424e-06, | |
| "loss": 0.0019, | |
| "step": 305 | |
| }, | |
| { | |
| "epoch": 18.98, | |
| "learning_rate": 6.149771103037821e-06, | |
| "loss": 0.0015, | |
| "step": 306 | |
| }, | |
| { | |
| "epoch": 19.04, | |
| "learning_rate": 6.087506805465127e-06, | |
| "loss": 0.0023, | |
| "step": 307 | |
| }, | |
| { | |
| "epoch": 19.1, | |
| "learning_rate": 6.025421093223318e-06, | |
| "loss": 0.0015, | |
| "step": 308 | |
| }, | |
| { | |
| "epoch": 19.16, | |
| "learning_rate": 5.963516800208056e-06, | |
| "loss": 0.0014, | |
| "step": 309 | |
| }, | |
| { | |
| "epoch": 19.22, | |
| "learning_rate": 5.901796752034128e-06, | |
| "loss": 0.0012, | |
| "step": 310 | |
| }, | |
| { | |
| "epoch": 19.29, | |
| "learning_rate": 5.84026376590649e-06, | |
| "loss": 0.0012, | |
| "step": 311 | |
| }, | |
| { | |
| "epoch": 19.35, | |
| "learning_rate": 5.7789206504916815e-06, | |
| "loss": 0.0019, | |
| "step": 312 | |
| }, | |
| { | |
| "epoch": 19.41, | |
| "learning_rate": 5.7177702057896015e-06, | |
| "loss": 0.0012, | |
| "step": 313 | |
| }, | |
| { | |
| "epoch": 19.47, | |
| "learning_rate": 5.656815223005714e-06, | |
| "loss": 0.0019, | |
| "step": 314 | |
| }, | |
| { | |
| "epoch": 19.53, | |
| "learning_rate": 5.5960584844236565e-06, | |
| "loss": 0.0013, | |
| "step": 315 | |
| }, | |
| { | |
| "epoch": 19.6, | |
| "learning_rate": 5.535502763278222e-06, | |
| "loss": 0.002, | |
| "step": 316 | |
| }, | |
| { | |
| "epoch": 19.66, | |
| "learning_rate": 5.475150823628786e-06, | |
| "loss": 0.0017, | |
| "step": 317 | |
| }, | |
| { | |
| "epoch": 19.72, | |
| "learning_rate": 5.415005420233141e-06, | |
| "loss": 0.0012, | |
| "step": 318 | |
| }, | |
| { | |
| "epoch": 19.78, | |
| "learning_rate": 5.355069298421747e-06, | |
| "loss": 0.0016, | |
| "step": 319 | |
| }, | |
| { | |
| "epoch": 19.84, | |
| "learning_rate": 5.295345193972445e-06, | |
| "loss": 0.002, | |
| "step": 320 | |
| }, | |
| { | |
| "epoch": 19.91, | |
| "learning_rate": 5.235835832985552e-06, | |
| "loss": 0.0013, | |
| "step": 321 | |
| }, | |
| { | |
| "epoch": 19.97, | |
| "learning_rate": 5.176543931759447e-06, | |
| "loss": 0.0018, | |
| "step": 322 | |
| }, | |
| { | |
| "epoch": 20.03, | |
| "learning_rate": 5.117472196666578e-06, | |
| "loss": 0.0017, | |
| "step": 323 | |
| }, | |
| { | |
| "epoch": 20.09, | |
| "learning_rate": 5.058623324029944e-06, | |
| "loss": 0.0013, | |
| "step": 324 | |
| }, | |
| { | |
| "epoch": 20.16, | |
| "learning_rate": 5.000000000000003e-06, | |
| "loss": 0.0011, | |
| "step": 325 | |
| }, | |
| { | |
| "epoch": 20.22, | |
| "learning_rate": 4.941604900432065e-06, | |
| "loss": 0.0015, | |
| "step": 326 | |
| }, | |
| { | |
| "epoch": 20.28, | |
| "learning_rate": 4.8834406907641784e-06, | |
| "loss": 0.0013, | |
| "step": 327 | |
| }, | |
| { | |
| "epoch": 20.34, | |
| "learning_rate": 4.825510025895429e-06, | |
| "loss": 0.0012, | |
| "step": 328 | |
| }, | |
| { | |
| "epoch": 20.4, | |
| "learning_rate": 4.767815550064778e-06, | |
| "loss": 0.0014, | |
| "step": 329 | |
| }, | |
| { | |
| "epoch": 20.47, | |
| "learning_rate": 4.710359896730379e-06, | |
| "loss": 0.0014, | |
| "step": 330 | |
| }, | |
| { | |
| "epoch": 20.53, | |
| "learning_rate": 4.65314568844933e-06, | |
| "loss": 0.0014, | |
| "step": 331 | |
| }, | |
| { | |
| "epoch": 20.59, | |
| "learning_rate": 4.596175536758024e-06, | |
| "loss": 0.0018, | |
| "step": 332 | |
| }, | |
| { | |
| "epoch": 20.65, | |
| "learning_rate": 4.539452042052901e-06, | |
| "loss": 0.0018, | |
| "step": 333 | |
| }, | |
| { | |
| "epoch": 20.71, | |
| "learning_rate": 4.482977793471769e-06, | |
| "loss": 0.0013, | |
| "step": 334 | |
| }, | |
| { | |
| "epoch": 20.78, | |
| "learning_rate": 4.426755368775637e-06, | |
| "loss": 0.002, | |
| "step": 335 | |
| }, | |
| { | |
| "epoch": 20.84, | |
| "learning_rate": 4.370787334231026e-06, | |
| "loss": 0.0016, | |
| "step": 336 | |
| }, | |
| { | |
| "epoch": 20.9, | |
| "learning_rate": 4.315076244492847e-06, | |
| "loss": 0.0017, | |
| "step": 337 | |
| }, | |
| { | |
| "epoch": 20.96, | |
| "learning_rate": 4.2596246424878055e-06, | |
| "loss": 0.0014, | |
| "step": 338 | |
| }, | |
| { | |
| "epoch": 21.02, | |
| "learning_rate": 4.204435059298303e-06, | |
| "loss": 0.0019, | |
| "step": 339 | |
| }, | |
| { | |
| "epoch": 21.09, | |
| "learning_rate": 4.149510014046922e-06, | |
| "loss": 0.0013, | |
| "step": 340 | |
| }, | |
| { | |
| "epoch": 21.15, | |
| "learning_rate": 4.094852013781456e-06, | |
| "loss": 0.0015, | |
| "step": 341 | |
| }, | |
| { | |
| "epoch": 21.21, | |
| "learning_rate": 4.040463553360431e-06, | |
| "loss": 0.0017, | |
| "step": 342 | |
| }, | |
| { | |
| "epoch": 21.27, | |
| "learning_rate": 3.986347115339281e-06, | |
| "loss": 0.001, | |
| "step": 343 | |
| }, | |
| { | |
| "epoch": 21.33, | |
| "learning_rate": 3.932505169856993e-06, | |
| "loss": 0.0014, | |
| "step": 344 | |
| }, | |
| { | |
| "epoch": 21.4, | |
| "learning_rate": 3.878940174523371e-06, | |
| "loss": 0.0016, | |
| "step": 345 | |
| }, | |
| { | |
| "epoch": 21.46, | |
| "learning_rate": 3.825654574306873e-06, | |
| "loss": 0.0012, | |
| "step": 346 | |
| }, | |
| { | |
| "epoch": 21.52, | |
| "learning_rate": 3.7726508014229825e-06, | |
| "loss": 0.0015, | |
| "step": 347 | |
| }, | |
| { | |
| "epoch": 21.58, | |
| "learning_rate": 3.7199312752232053e-06, | |
| "loss": 0.0012, | |
| "step": 348 | |
| }, | |
| { | |
| "epoch": 21.64, | |
| "learning_rate": 3.6674984020846503e-06, | |
| "loss": 0.0016, | |
| "step": 349 | |
| }, | |
| { | |
| "epoch": 21.71, | |
| "learning_rate": 3.6153545753001663e-06, | |
| "loss": 0.0017, | |
| "step": 350 | |
| }, | |
| { | |
| "epoch": 21.77, | |
| "learning_rate": 3.563502174969117e-06, | |
| "loss": 0.0012, | |
| "step": 351 | |
| }, | |
| { | |
| "epoch": 21.83, | |
| "learning_rate": 3.5119435678887328e-06, | |
| "loss": 0.0016, | |
| "step": 352 | |
| }, | |
| { | |
| "epoch": 21.89, | |
| "learning_rate": 3.460681107446091e-06, | |
| "loss": 0.0018, | |
| "step": 353 | |
| }, | |
| { | |
| "epoch": 21.95, | |
| "learning_rate": 3.409717133510683e-06, | |
| "loss": 0.0017, | |
| "step": 354 | |
| }, | |
| { | |
| "epoch": 22.02, | |
| "learning_rate": 3.3590539723276083e-06, | |
| "loss": 0.0015, | |
| "step": 355 | |
| }, | |
| { | |
| "epoch": 22.08, | |
| "learning_rate": 3.308693936411421e-06, | |
| "loss": 0.0012, | |
| "step": 356 | |
| }, | |
| { | |
| "epoch": 22.14, | |
| "learning_rate": 3.258639324440527e-06, | |
| "loss": 0.0018, | |
| "step": 357 | |
| }, | |
| { | |
| "epoch": 22.2, | |
| "learning_rate": 3.208892421152314e-06, | |
| "loss": 0.0014, | |
| "step": 358 | |
| }, | |
| { | |
| "epoch": 22.26, | |
| "learning_rate": 3.159455497238827e-06, | |
| "loss": 0.0013, | |
| "step": 359 | |
| }, | |
| { | |
| "epoch": 22.33, | |
| "learning_rate": 3.110330809243134e-06, | |
| "loss": 0.0012, | |
| "step": 360 | |
| }, | |
| { | |
| "epoch": 22.39, | |
| "learning_rate": 3.0615205994563412e-06, | |
| "loss": 0.0013, | |
| "step": 361 | |
| }, | |
| { | |
| "epoch": 22.45, | |
| "learning_rate": 3.0130270958152196e-06, | |
| "loss": 0.0013, | |
| "step": 362 | |
| }, | |
| { | |
| "epoch": 22.51, | |
| "learning_rate": 2.964852511800519e-06, | |
| "loss": 0.0013, | |
| "step": 363 | |
| }, | |
| { | |
| "epoch": 22.57, | |
| "learning_rate": 2.9169990463359556e-06, | |
| "loss": 0.0016, | |
| "step": 364 | |
| }, | |
| { | |
| "epoch": 22.64, | |
| "learning_rate": 2.869468883687798e-06, | |
| "loss": 0.0018, | |
| "step": 365 | |
| }, | |
| { | |
| "epoch": 22.7, | |
| "learning_rate": 2.822264193365212e-06, | |
| "loss": 0.0014, | |
| "step": 366 | |
| }, | |
| { | |
| "epoch": 22.76, | |
| "learning_rate": 2.775387130021214e-06, | |
| "loss": 0.0015, | |
| "step": 367 | |
| }, | |
| { | |
| "epoch": 22.82, | |
| "learning_rate": 2.7288398333543063e-06, | |
| "loss": 0.0016, | |
| "step": 368 | |
| }, | |
| { | |
| "epoch": 22.88, | |
| "learning_rate": 2.6826244280108438e-06, | |
| "loss": 0.0011, | |
| "step": 369 | |
| }, | |
| { | |
| "epoch": 22.95, | |
| "learning_rate": 2.6367430234880286e-06, | |
| "loss": 0.0017, | |
| "step": 370 | |
| }, | |
| { | |
| "epoch": 23.01, | |
| "learning_rate": 2.591197714037631e-06, | |
| "loss": 0.0014, | |
| "step": 371 | |
| }, | |
| { | |
| "epoch": 23.07, | |
| "learning_rate": 2.545990578570404e-06, | |
| "loss": 0.0012, | |
| "step": 372 | |
| }, | |
| { | |
| "epoch": 23.13, | |
| "learning_rate": 2.5011236805611818e-06, | |
| "loss": 0.0014, | |
| "step": 373 | |
| }, | |
| { | |
| "epoch": 23.19, | |
| "learning_rate": 2.4565990679546913e-06, | |
| "loss": 0.0012, | |
| "step": 374 | |
| }, | |
| { | |
| "epoch": 23.26, | |
| "learning_rate": 2.4124187730720916e-06, | |
| "loss": 0.001, | |
| "step": 375 | |
| }, | |
| { | |
| "epoch": 23.32, | |
| "learning_rate": 2.3685848125181843e-06, | |
| "loss": 0.0012, | |
| "step": 376 | |
| }, | |
| { | |
| "epoch": 23.38, | |
| "learning_rate": 2.3250991870893837e-06, | |
| "loss": 0.0015, | |
| "step": 377 | |
| }, | |
| { | |
| "epoch": 23.44, | |
| "learning_rate": 2.2819638816823796e-06, | |
| "loss": 0.0012, | |
| "step": 378 | |
| }, | |
| { | |
| "epoch": 23.5, | |
| "learning_rate": 2.239180865203552e-06, | |
| "loss": 0.0012, | |
| "step": 379 | |
| }, | |
| { | |
| "epoch": 23.57, | |
| "learning_rate": 2.196752090479083e-06, | |
| "loss": 0.002, | |
| "step": 380 | |
| }, | |
| { | |
| "epoch": 23.63, | |
| "learning_rate": 2.154679494165829e-06, | |
| "loss": 0.0012, | |
| "step": 381 | |
| }, | |
| { | |
| "epoch": 23.69, | |
| "learning_rate": 2.1129649966629185e-06, | |
| "loss": 0.0012, | |
| "step": 382 | |
| }, | |
| { | |
| "epoch": 23.75, | |
| "learning_rate": 2.0716105020241074e-06, | |
| "loss": 0.0011, | |
| "step": 383 | |
| }, | |
| { | |
| "epoch": 23.81, | |
| "learning_rate": 2.030617897870851e-06, | |
| "loss": 0.0016, | |
| "step": 384 | |
| }, | |
| { | |
| "epoch": 23.88, | |
| "learning_rate": 1.9899890553061565e-06, | |
| "loss": 0.0018, | |
| "step": 385 | |
| }, | |
| { | |
| "epoch": 23.94, | |
| "learning_rate": 1.9497258288291655e-06, | |
| "loss": 0.0018, | |
| "step": 386 | |
| }, | |
| { | |
| "epoch": 24.0, | |
| "learning_rate": 1.9098300562505266e-06, | |
| "loss": 0.0019, | |
| "step": 387 | |
| }, | |
| { | |
| "epoch": 24.06, | |
| "learning_rate": 1.8703035586084817e-06, | |
| "loss": 0.0012, | |
| "step": 388 | |
| }, | |
| { | |
| "epoch": 24.12, | |
| "learning_rate": 1.8311481400857622e-06, | |
| "loss": 0.0012, | |
| "step": 389 | |
| }, | |
| { | |
| "epoch": 24.19, | |
| "learning_rate": 1.7923655879272395e-06, | |
| "loss": 0.0014, | |
| "step": 390 | |
| }, | |
| { | |
| "epoch": 24.25, | |
| "learning_rate": 1.753957672358324e-06, | |
| "loss": 0.0016, | |
| "step": 391 | |
| }, | |
| { | |
| "epoch": 24.31, | |
| "learning_rate": 1.7159261465041954e-06, | |
| "loss": 0.0014, | |
| "step": 392 | |
| }, | |
| { | |
| "epoch": 24.37, | |
| "learning_rate": 1.6782727463097626e-06, | |
| "loss": 0.0015, | |
| "step": 393 | |
| }, | |
| { | |
| "epoch": 24.43, | |
| "learning_rate": 1.6409991904604173e-06, | |
| "loss": 0.0018, | |
| "step": 394 | |
| }, | |
| { | |
| "epoch": 24.5, | |
| "learning_rate": 1.60410718030361e-06, | |
| "loss": 0.0011, | |
| "step": 395 | |
| }, | |
| { | |
| "epoch": 24.56, | |
| "learning_rate": 1.5675983997711797e-06, | |
| "loss": 0.0013, | |
| "step": 396 | |
| }, | |
| { | |
| "epoch": 24.62, | |
| "learning_rate": 1.5314745153024768e-06, | |
| "loss": 0.0014, | |
| "step": 397 | |
| }, | |
| { | |
| "epoch": 24.68, | |
| "learning_rate": 1.4957371757683258e-06, | |
| "loss": 0.0014, | |
| "step": 398 | |
| }, | |
| { | |
| "epoch": 24.74, | |
| "learning_rate": 1.4603880123957448e-06, | |
| "loss": 0.0014, | |
| "step": 399 | |
| }, | |
| { | |
| "epoch": 24.81, | |
| "learning_rate": 1.425428638693489e-06, | |
| "loss": 0.0019, | |
| "step": 400 | |
| }, | |
| { | |
| "epoch": 24.87, | |
| "learning_rate": 1.390860650378414e-06, | |
| "loss": 0.0014, | |
| "step": 401 | |
| }, | |
| { | |
| "epoch": 24.93, | |
| "learning_rate": 1.356685625302625e-06, | |
| "loss": 0.0013, | |
| "step": 402 | |
| }, | |
| { | |
| "epoch": 24.99, | |
| "learning_rate": 1.3229051233814637e-06, | |
| "loss": 0.0013, | |
| "step": 403 | |
| }, | |
| { | |
| "epoch": 25.05, | |
| "learning_rate": 1.2895206865223065e-06, | |
| "loss": 0.0012, | |
| "step": 404 | |
| }, | |
| { | |
| "epoch": 25.12, | |
| "learning_rate": 1.2565338385541792e-06, | |
| "loss": 0.0013, | |
| "step": 405 | |
| }, | |
| { | |
| "epoch": 25.18, | |
| "learning_rate": 1.2239460851582118e-06, | |
| "loss": 0.0013, | |
| "step": 406 | |
| }, | |
| { | |
| "epoch": 25.24, | |
| "learning_rate": 1.1917589137989006e-06, | |
| "loss": 0.0013, | |
| "step": 407 | |
| }, | |
| { | |
| "epoch": 25.3, | |
| "learning_rate": 1.159973793656215e-06, | |
| "loss": 0.0014, | |
| "step": 408 | |
| }, | |
| { | |
| "epoch": 25.36, | |
| "learning_rate": 1.1285921755585504e-06, | |
| "loss": 0.0013, | |
| "step": 409 | |
| }, | |
| { | |
| "epoch": 25.43, | |
| "learning_rate": 1.097615491916485e-06, | |
| "loss": 0.0016, | |
| "step": 410 | |
| }, | |
| { | |
| "epoch": 25.49, | |
| "learning_rate": 1.0670451566574102e-06, | |
| "loss": 0.0014, | |
| "step": 411 | |
| }, | |
| { | |
| "epoch": 25.55, | |
| "learning_rate": 1.0368825651609893e-06, | |
| "loss": 0.0014, | |
| "step": 412 | |
| }, | |
| { | |
| "epoch": 25.61, | |
| "learning_rate": 1.007129094195468e-06, | |
| "loss": 0.0013, | |
| "step": 413 | |
| }, | |
| { | |
| "epoch": 25.67, | |
| "learning_rate": 9.77786101854825e-07, | |
| "loss": 0.0015, | |
| "step": 414 | |
| }, | |
| { | |
| "epoch": 25.74, | |
| "learning_rate": 9.488549274967873e-07, | |
| "loss": 0.0012, | |
| "step": 415 | |
| }, | |
| { | |
| "epoch": 25.8, | |
| "learning_rate": 9.203368916817012e-07, | |
| "loss": 0.0014, | |
| "step": 416 | |
| }, | |
| { | |
| "epoch": 25.86, | |
| "learning_rate": 8.92233296112236e-07, | |
| "loss": 0.0012, | |
| "step": 417 | |
| }, | |
| { | |
| "epoch": 25.92, | |
| "learning_rate": 8.645454235739903e-07, | |
| "loss": 0.0013, | |
| "step": 418 | |
| }, | |
| { | |
| "epoch": 25.98, | |
| "learning_rate": 8.37274537876931e-07, | |
| "loss": 0.0014, | |
| "step": 419 | |
| }, | |
| { | |
| "epoch": 26.05, | |
| "learning_rate": 8.10421883797694e-07, | |
| "loss": 0.0012, | |
| "step": 420 | |
| }, | |
| { | |
| "epoch": 26.11, | |
| "learning_rate": 7.839886870227909e-07, | |
| "loss": 0.0013, | |
| "step": 421 | |
| }, | |
| { | |
| "epoch": 26.17, | |
| "learning_rate": 7.579761540926434e-07, | |
| "loss": 0.0017, | |
| "step": 422 | |
| }, | |
| { | |
| "epoch": 26.23, | |
| "learning_rate": 7.32385472346514e-07, | |
| "loss": 0.001, | |
| "step": 423 | |
| }, | |
| { | |
| "epoch": 26.29, | |
| "learning_rate": 7.072178098683247e-07, | |
| "loss": 0.0015, | |
| "step": 424 | |
| }, | |
| { | |
| "epoch": 26.36, | |
| "learning_rate": 6.824743154333157e-07, | |
| "loss": 0.0015, | |
| "step": 425 | |
| }, | |
| { | |
| "epoch": 26.42, | |
| "learning_rate": 6.581561184556296e-07, | |
| "loss": 0.0013, | |
| "step": 426 | |
| }, | |
| { | |
| "epoch": 26.48, | |
| "learning_rate": 6.342643289367523e-07, | |
| "loss": 0.0016, | |
| "step": 427 | |
| }, | |
| { | |
| "epoch": 26.54, | |
| "learning_rate": 6.108000374148448e-07, | |
| "loss": 0.0013, | |
| "step": 428 | |
| }, | |
| { | |
| "epoch": 26.6, | |
| "learning_rate": 5.877643149149669e-07, | |
| "loss": 0.0018, | |
| "step": 429 | |
| }, | |
| { | |
| "epoch": 26.67, | |
| "learning_rate": 5.651582129001987e-07, | |
| "loss": 0.0011, | |
| "step": 430 | |
| }, | |
| { | |
| "epoch": 26.73, | |
| "learning_rate": 5.429827632236284e-07, | |
| "loss": 0.0016, | |
| "step": 431 | |
| }, | |
| { | |
| "epoch": 26.79, | |
| "learning_rate": 5.212389780812733e-07, | |
| "loss": 0.0012, | |
| "step": 432 | |
| }, | |
| { | |
| "epoch": 26.85, | |
| "learning_rate": 4.999278499658667e-07, | |
| "loss": 0.0012, | |
| "step": 433 | |
| }, | |
| { | |
| "epoch": 26.91, | |
| "learning_rate": 4.790503516215572e-07, | |
| "loss": 0.0013, | |
| "step": 434 | |
| }, | |
| { | |
| "epoch": 26.98, | |
| "learning_rate": 4.5860743599951186e-07, | |
| "loss": 0.0013, | |
| "step": 435 | |
| }, | |
| { | |
| "epoch": 27.04, | |
| "learning_rate": 4.3860003621441384e-07, | |
| "loss": 0.0011, | |
| "step": 436 | |
| }, | |
| { | |
| "epoch": 27.1, | |
| "learning_rate": 4.190290655018736e-07, | |
| "loss": 0.0015, | |
| "step": 437 | |
| }, | |
| { | |
| "epoch": 27.16, | |
| "learning_rate": 3.998954171767422e-07, | |
| "loss": 0.0011, | |
| "step": 438 | |
| }, | |
| { | |
| "epoch": 27.22, | |
| "learning_rate": 3.8119996459234144e-07, | |
| "loss": 0.0011, | |
| "step": 439 | |
| }, | |
| { | |
| "epoch": 27.29, | |
| "learning_rate": 3.629435611005916e-07, | |
| "loss": 0.0015, | |
| "step": 440 | |
| }, | |
| { | |
| "epoch": 27.35, | |
| "learning_rate": 3.451270400130646e-07, | |
| "loss": 0.0015, | |
| "step": 441 | |
| }, | |
| { | |
| "epoch": 27.41, | |
| "learning_rate": 3.2775121456295024e-07, | |
| "loss": 0.0012, | |
| "step": 442 | |
| }, | |
| { | |
| "epoch": 27.47, | |
| "learning_rate": 3.10816877867931e-07, | |
| "loss": 0.0013, | |
| "step": 443 | |
| }, | |
| { | |
| "epoch": 27.53, | |
| "learning_rate": 2.943248028939838e-07, | |
| "loss": 0.0016, | |
| "step": 444 | |
| }, | |
| { | |
| "epoch": 27.6, | |
| "learning_rate": 2.7827574242009434e-07, | |
| "loss": 0.0015, | |
| "step": 445 | |
| }, | |
| { | |
| "epoch": 27.66, | |
| "learning_rate": 2.6267042900390173e-07, | |
| "loss": 0.001, | |
| "step": 446 | |
| }, | |
| { | |
| "epoch": 27.72, | |
| "learning_rate": 2.4750957494826033e-07, | |
| "loss": 0.0011, | |
| "step": 447 | |
| }, | |
| { | |
| "epoch": 27.78, | |
| "learning_rate": 2.3279387226871842e-07, | |
| "loss": 0.0012, | |
| "step": 448 | |
| }, | |
| { | |
| "epoch": 27.84, | |
| "learning_rate": 2.1852399266194312e-07, | |
| "loss": 0.0013, | |
| "step": 449 | |
| }, | |
| { | |
| "epoch": 27.91, | |
| "learning_rate": 2.0470058747505516e-07, | |
| "loss": 0.0015, | |
| "step": 450 | |
| }, | |
| { | |
| "epoch": 27.97, | |
| "learning_rate": 1.9132428767589471e-07, | |
| "loss": 0.0016, | |
| "step": 451 | |
| }, | |
| { | |
| "epoch": 28.03, | |
| "learning_rate": 1.783957038242279e-07, | |
| "loss": 0.0014, | |
| "step": 452 | |
| }, | |
| { | |
| "epoch": 28.09, | |
| "learning_rate": 1.6591542604387445e-07, | |
| "loss": 0.0012, | |
| "step": 453 | |
| }, | |
| { | |
| "epoch": 28.16, | |
| "learning_rate": 1.538840239957684e-07, | |
| "loss": 0.0013, | |
| "step": 454 | |
| }, | |
| { | |
| "epoch": 28.22, | |
| "learning_rate": 1.4230204685196202e-07, | |
| "loss": 0.0012, | |
| "step": 455 | |
| }, | |
| { | |
| "epoch": 28.28, | |
| "learning_rate": 1.3117002327055927e-07, | |
| "loss": 0.0012, | |
| "step": 456 | |
| }, | |
| { | |
| "epoch": 28.34, | |
| "learning_rate": 1.20488461371574e-07, | |
| "loss": 0.0013, | |
| "step": 457 | |
| }, | |
| { | |
| "epoch": 28.4, | |
| "learning_rate": 1.102578487137529e-07, | |
| "loss": 0.0012, | |
| "step": 458 | |
| }, | |
| { | |
| "epoch": 28.47, | |
| "learning_rate": 1.0047865227230891e-07, | |
| "loss": 0.001, | |
| "step": 459 | |
| }, | |
| { | |
| "epoch": 28.53, | |
| "learning_rate": 9.11513184176116e-08, | |
| "loss": 0.0016, | |
| "step": 460 | |
| }, | |
| { | |
| "epoch": 28.59, | |
| "learning_rate": 8.227627289481121e-08, | |
| "loss": 0.0014, | |
| "step": 461 | |
| }, | |
| { | |
| "epoch": 28.65, | |
| "learning_rate": 7.385392080440535e-08, | |
| "loss": 0.0016, | |
| "step": 462 | |
| }, | |
| { | |
| "epoch": 28.71, | |
| "learning_rate": 6.588464658374816e-08, | |
| "loss": 0.0011, | |
| "step": 463 | |
| }, | |
| { | |
| "epoch": 28.78, | |
| "learning_rate": 5.836881398950667e-08, | |
| "loss": 0.0014, | |
| "step": 464 | |
| }, | |
| { | |
| "epoch": 28.84, | |
| "learning_rate": 5.1306766081048456e-08, | |
| "loss": 0.0014, | |
| "step": 465 | |
| }, | |
| { | |
| "epoch": 28.9, | |
| "learning_rate": 4.469882520479196e-08, | |
| "loss": 0.0015, | |
| "step": 466 | |
| }, | |
| { | |
| "epoch": 28.96, | |
| "learning_rate": 3.854529297948606e-08, | |
| "loss": 0.0016, | |
| "step": 467 | |
| }, | |
| { | |
| "epoch": 29.02, | |
| "learning_rate": 3.284645028244771e-08, | |
| "loss": 0.0011, | |
| "step": 468 | |
| }, | |
| { | |
| "epoch": 29.09, | |
| "learning_rate": 2.760255723673888e-08, | |
| "loss": 0.0015, | |
| "step": 469 | |
| }, | |
| { | |
| "epoch": 29.15, | |
| "learning_rate": 2.2813853199292745e-08, | |
| "loss": 0.0012, | |
| "step": 470 | |
| }, | |
| { | |
| "epoch": 29.21, | |
| "learning_rate": 1.8480556749991274e-08, | |
| "loss": 0.0018, | |
| "step": 471 | |
| }, | |
| { | |
| "epoch": 29.27, | |
| "learning_rate": 1.4602865681682122e-08, | |
| "loss": 0.0014, | |
| "step": 472 | |
| }, | |
| { | |
| "epoch": 29.33, | |
| "learning_rate": 1.1180956991160285e-08, | |
| "loss": 0.0016, | |
| "step": 473 | |
| }, | |
| { | |
| "epoch": 29.4, | |
| "learning_rate": 8.214986871076803e-09, | |
| "loss": 0.0011, | |
| "step": 474 | |
| }, | |
| { | |
| "epoch": 29.46, | |
| "learning_rate": 5.705090702819993e-09, | |
| "loss": 0.0011, | |
| "step": 475 | |
| }, | |
| { | |
| "epoch": 29.52, | |
| "learning_rate": 3.6513830503293047e-09, | |
| "loss": 0.0013, | |
| "step": 476 | |
| }, | |
| { | |
| "epoch": 29.58, | |
| "learning_rate": 2.053957654871708e-09, | |
| "loss": 0.0014, | |
| "step": 477 | |
| }, | |
| { | |
| "epoch": 29.64, | |
| "learning_rate": 9.128874307551273e-10, | |
| "loss": 0.0013, | |
| "step": 478 | |
| }, | |
| { | |
| "epoch": 29.71, | |
| "learning_rate": 2.282244620088747e-10, | |
| "loss": 0.0013, | |
| "step": 479 | |
| }, | |
| { | |
| "epoch": 29.77, | |
| "learning_rate": 0.0, | |
| "loss": 0.0016, | |
| "step": 480 | |
| }, | |
| { | |
| "epoch": 29.77, | |
| "step": 480, | |
| "total_flos": 2.2343126815342592e+17, | |
| "train_loss": 0.07362916635223277, | |
| "train_runtime": 3434.0129, | |
| "train_samples_per_second": 18.023, | |
| "train_steps_per_second": 0.14 | |
| } | |
| ], | |
| "max_steps": 480, | |
| "num_train_epochs": 30, | |
| "total_flos": 2.2343126815342592e+17, | |
| "trial_name": null, | |
| "trial_params": null | |
| } | |