| { | |
| "best_metric": null, | |
| "best_model_checkpoint": null, | |
| "epoch": 2.9878213802435725, | |
| "eval_steps": 500, | |
| "global_step": 276, | |
| "is_hyper_param_search": false, | |
| "is_local_process_zero": true, | |
| "is_world_process_zero": true, | |
| "log_history": [ | |
| { | |
| "epoch": 0.010825439783491205, | |
| "grad_norm": 5.8079091029435865, | |
| "learning_rate": 2.8571428571428573e-06, | |
| "loss": 0.8567, | |
| "step": 1 | |
| }, | |
| { | |
| "epoch": 0.02165087956698241, | |
| "grad_norm": 5.811672220394839, | |
| "learning_rate": 5.7142857142857145e-06, | |
| "loss": 0.8604, | |
| "step": 2 | |
| }, | |
| { | |
| "epoch": 0.03247631935047361, | |
| "grad_norm": 5.449507701601476, | |
| "learning_rate": 8.571428571428571e-06, | |
| "loss": 0.8528, | |
| "step": 3 | |
| }, | |
| { | |
| "epoch": 0.04330175913396482, | |
| "grad_norm": 2.3063006110984086, | |
| "learning_rate": 1.1428571428571429e-05, | |
| "loss": 0.776, | |
| "step": 4 | |
| }, | |
| { | |
| "epoch": 0.05412719891745602, | |
| "grad_norm": 3.9302521280440543, | |
| "learning_rate": 1.4285714285714287e-05, | |
| "loss": 0.7546, | |
| "step": 5 | |
| }, | |
| { | |
| "epoch": 0.06495263870094722, | |
| "grad_norm": 4.207181909155043, | |
| "learning_rate": 1.7142857142857142e-05, | |
| "loss": 0.751, | |
| "step": 6 | |
| }, | |
| { | |
| "epoch": 0.07577807848443843, | |
| "grad_norm": 4.360918963292297, | |
| "learning_rate": 2e-05, | |
| "loss": 0.7344, | |
| "step": 7 | |
| }, | |
| { | |
| "epoch": 0.08660351826792964, | |
| "grad_norm": 2.911250822571266, | |
| "learning_rate": 2.2857142857142858e-05, | |
| "loss": 0.729, | |
| "step": 8 | |
| }, | |
| { | |
| "epoch": 0.09742895805142084, | |
| "grad_norm": 2.7455493318657966, | |
| "learning_rate": 2.5714285714285718e-05, | |
| "loss": 0.672, | |
| "step": 9 | |
| }, | |
| { | |
| "epoch": 0.10825439783491204, | |
| "grad_norm": 2.1241241512314795, | |
| "learning_rate": 2.8571428571428574e-05, | |
| "loss": 0.6659, | |
| "step": 10 | |
| }, | |
| { | |
| "epoch": 0.11907983761840325, | |
| "grad_norm": 1.5668463996929993, | |
| "learning_rate": 3.142857142857143e-05, | |
| "loss": 0.6379, | |
| "step": 11 | |
| }, | |
| { | |
| "epoch": 0.12990527740189445, | |
| "grad_norm": 1.3619379997378058, | |
| "learning_rate": 3.4285714285714284e-05, | |
| "loss": 0.6305, | |
| "step": 12 | |
| }, | |
| { | |
| "epoch": 0.14073071718538566, | |
| "grad_norm": 1.566918593012661, | |
| "learning_rate": 3.714285714285715e-05, | |
| "loss": 0.6142, | |
| "step": 13 | |
| }, | |
| { | |
| "epoch": 0.15155615696887687, | |
| "grad_norm": 1.2044004334843796, | |
| "learning_rate": 4e-05, | |
| "loss": 0.6246, | |
| "step": 14 | |
| }, | |
| { | |
| "epoch": 0.16238159675236807, | |
| "grad_norm": 1.4087070173540086, | |
| "learning_rate": 4.2857142857142856e-05, | |
| "loss": 0.6075, | |
| "step": 15 | |
| }, | |
| { | |
| "epoch": 0.17320703653585928, | |
| "grad_norm": 1.203436631182031, | |
| "learning_rate": 4.5714285714285716e-05, | |
| "loss": 0.5986, | |
| "step": 16 | |
| }, | |
| { | |
| "epoch": 0.18403247631935046, | |
| "grad_norm": 1.722190196328099, | |
| "learning_rate": 4.857142857142857e-05, | |
| "loss": 0.5943, | |
| "step": 17 | |
| }, | |
| { | |
| "epoch": 0.19485791610284167, | |
| "grad_norm": 1.1305378018180656, | |
| "learning_rate": 5.1428571428571436e-05, | |
| "loss": 0.577, | |
| "step": 18 | |
| }, | |
| { | |
| "epoch": 0.20568335588633288, | |
| "grad_norm": 1.6031033848905385, | |
| "learning_rate": 5.4285714285714295e-05, | |
| "loss": 0.5697, | |
| "step": 19 | |
| }, | |
| { | |
| "epoch": 0.2165087956698241, | |
| "grad_norm": 1.6591800967410824, | |
| "learning_rate": 5.714285714285715e-05, | |
| "loss": 0.5746, | |
| "step": 20 | |
| }, | |
| { | |
| "epoch": 0.2273342354533153, | |
| "grad_norm": 1.5025149200657286, | |
| "learning_rate": 6.000000000000001e-05, | |
| "loss": 0.5683, | |
| "step": 21 | |
| }, | |
| { | |
| "epoch": 0.2381596752368065, | |
| "grad_norm": 1.2019447493505488, | |
| "learning_rate": 6.285714285714286e-05, | |
| "loss": 0.5676, | |
| "step": 22 | |
| }, | |
| { | |
| "epoch": 0.2489851150202977, | |
| "grad_norm": 1.9839452102168298, | |
| "learning_rate": 6.571428571428571e-05, | |
| "loss": 0.5639, | |
| "step": 23 | |
| }, | |
| { | |
| "epoch": 0.2598105548037889, | |
| "grad_norm": 1.34504393296771, | |
| "learning_rate": 6.857142857142857e-05, | |
| "loss": 0.5667, | |
| "step": 24 | |
| }, | |
| { | |
| "epoch": 0.2706359945872801, | |
| "grad_norm": 2.227974826889621, | |
| "learning_rate": 7.142857142857143e-05, | |
| "loss": 0.5639, | |
| "step": 25 | |
| }, | |
| { | |
| "epoch": 0.2814614343707713, | |
| "grad_norm": 1.6397454252709596, | |
| "learning_rate": 7.42857142857143e-05, | |
| "loss": 0.5434, | |
| "step": 26 | |
| }, | |
| { | |
| "epoch": 0.2922868741542625, | |
| "grad_norm": 1.7236920014146049, | |
| "learning_rate": 7.714285714285715e-05, | |
| "loss": 0.5453, | |
| "step": 27 | |
| }, | |
| { | |
| "epoch": 0.30311231393775373, | |
| "grad_norm": 1.3679886100442236, | |
| "learning_rate": 8e-05, | |
| "loss": 0.5518, | |
| "step": 28 | |
| }, | |
| { | |
| "epoch": 0.31393775372124494, | |
| "grad_norm": 1.7636609095058786, | |
| "learning_rate": 7.999679062421315e-05, | |
| "loss": 0.5488, | |
| "step": 29 | |
| }, | |
| { | |
| "epoch": 0.32476319350473615, | |
| "grad_norm": 1.8909077915573775, | |
| "learning_rate": 7.998716301185722e-05, | |
| "loss": 0.5434, | |
| "step": 30 | |
| }, | |
| { | |
| "epoch": 0.33558863328822736, | |
| "grad_norm": 1.4831299671419245, | |
| "learning_rate": 7.997111870786354e-05, | |
| "loss": 0.543, | |
| "step": 31 | |
| }, | |
| { | |
| "epoch": 0.34641407307171856, | |
| "grad_norm": 1.5685011400813527, | |
| "learning_rate": 7.994866028684212e-05, | |
| "loss": 0.5386, | |
| "step": 32 | |
| }, | |
| { | |
| "epoch": 0.3572395128552097, | |
| "grad_norm": 1.8163117507870878, | |
| "learning_rate": 7.991979135266861e-05, | |
| "loss": 0.5426, | |
| "step": 33 | |
| }, | |
| { | |
| "epoch": 0.3680649526387009, | |
| "grad_norm": 1.3553007909847135, | |
| "learning_rate": 7.98845165379059e-05, | |
| "loss": 0.5419, | |
| "step": 34 | |
| }, | |
| { | |
| "epoch": 0.37889039242219213, | |
| "grad_norm": 1.5002520273898525, | |
| "learning_rate": 7.984284150306085e-05, | |
| "loss": 0.525, | |
| "step": 35 | |
| }, | |
| { | |
| "epoch": 0.38971583220568334, | |
| "grad_norm": 1.5788765506619407, | |
| "learning_rate": 7.97947729356758e-05, | |
| "loss": 0.5161, | |
| "step": 36 | |
| }, | |
| { | |
| "epoch": 0.40054127198917455, | |
| "grad_norm": 1.0932865412546702, | |
| "learning_rate": 7.974031854925562e-05, | |
| "loss": 0.5241, | |
| "step": 37 | |
| }, | |
| { | |
| "epoch": 0.41136671177266576, | |
| "grad_norm": 1.1169446884746776, | |
| "learning_rate": 7.967948708202972e-05, | |
| "loss": 0.5135, | |
| "step": 38 | |
| }, | |
| { | |
| "epoch": 0.42219215155615697, | |
| "grad_norm": 1.7368905888186483, | |
| "learning_rate": 7.961228829555003e-05, | |
| "loss": 0.5232, | |
| "step": 39 | |
| }, | |
| { | |
| "epoch": 0.4330175913396482, | |
| "grad_norm": 1.14825749468684, | |
| "learning_rate": 7.953873297312447e-05, | |
| "loss": 0.5114, | |
| "step": 40 | |
| }, | |
| { | |
| "epoch": 0.4438430311231394, | |
| "grad_norm": 1.681948559743569, | |
| "learning_rate": 7.945883291808655e-05, | |
| "loss": 0.5151, | |
| "step": 41 | |
| }, | |
| { | |
| "epoch": 0.4546684709066306, | |
| "grad_norm": 1.1384144753190577, | |
| "learning_rate": 7.937260095190137e-05, | |
| "loss": 0.5079, | |
| "step": 42 | |
| }, | |
| { | |
| "epoch": 0.4654939106901218, | |
| "grad_norm": 1.6677218265436802, | |
| "learning_rate": 7.928005091210817e-05, | |
| "loss": 0.5128, | |
| "step": 43 | |
| }, | |
| { | |
| "epoch": 0.476319350473613, | |
| "grad_norm": 1.4237449938878564, | |
| "learning_rate": 7.918119765009979e-05, | |
| "loss": 0.511, | |
| "step": 44 | |
| }, | |
| { | |
| "epoch": 0.4871447902571042, | |
| "grad_norm": 1.3134667589020321, | |
| "learning_rate": 7.907605702873948e-05, | |
| "loss": 0.5101, | |
| "step": 45 | |
| }, | |
| { | |
| "epoch": 0.4979702300405954, | |
| "grad_norm": 1.0853939092704676, | |
| "learning_rate": 7.896464591981549e-05, | |
| "loss": 0.5086, | |
| "step": 46 | |
| }, | |
| { | |
| "epoch": 0.5087956698240866, | |
| "grad_norm": 1.6016710689451, | |
| "learning_rate": 7.884698220133357e-05, | |
| "loss": 0.5163, | |
| "step": 47 | |
| }, | |
| { | |
| "epoch": 0.5196211096075778, | |
| "grad_norm": 1.203978171530331, | |
| "learning_rate": 7.872308475464818e-05, | |
| "loss": 0.5086, | |
| "step": 48 | |
| }, | |
| { | |
| "epoch": 0.530446549391069, | |
| "grad_norm": 1.3897099566312143, | |
| "learning_rate": 7.859297346143258e-05, | |
| "loss": 0.5051, | |
| "step": 49 | |
| }, | |
| { | |
| "epoch": 0.5412719891745602, | |
| "grad_norm": 1.2903904022789845, | |
| "learning_rate": 7.84566692004885e-05, | |
| "loss": 0.5036, | |
| "step": 50 | |
| }, | |
| { | |
| "epoch": 0.5520974289580515, | |
| "grad_norm": 0.9821064573534952, | |
| "learning_rate": 7.831419384439565e-05, | |
| "loss": 0.4979, | |
| "step": 51 | |
| }, | |
| { | |
| "epoch": 0.5629228687415426, | |
| "grad_norm": 1.1967263169199975, | |
| "learning_rate": 7.816557025600196e-05, | |
| "loss": 0.4954, | |
| "step": 52 | |
| }, | |
| { | |
| "epoch": 0.5737483085250338, | |
| "grad_norm": 1.5118785206523027, | |
| "learning_rate": 7.80108222847547e-05, | |
| "loss": 0.4935, | |
| "step": 53 | |
| }, | |
| { | |
| "epoch": 0.584573748308525, | |
| "grad_norm": 1.328515593795635, | |
| "learning_rate": 7.784997476287349e-05, | |
| "loss": 0.5029, | |
| "step": 54 | |
| }, | |
| { | |
| "epoch": 0.5953991880920162, | |
| "grad_norm": 1.085170937371965, | |
| "learning_rate": 7.76830535013654e-05, | |
| "loss": 0.4922, | |
| "step": 55 | |
| }, | |
| { | |
| "epoch": 0.6062246278755075, | |
| "grad_norm": 1.1543845766613074, | |
| "learning_rate": 7.751008528588322e-05, | |
| "loss": 0.4954, | |
| "step": 56 | |
| }, | |
| { | |
| "epoch": 0.6170500676589986, | |
| "grad_norm": 1.2151109406542857, | |
| "learning_rate": 7.733109787242708e-05, | |
| "loss": 0.4981, | |
| "step": 57 | |
| }, | |
| { | |
| "epoch": 0.6278755074424899, | |
| "grad_norm": 1.6809116509174178, | |
| "learning_rate": 7.71461199828905e-05, | |
| "loss": 0.4992, | |
| "step": 58 | |
| }, | |
| { | |
| "epoch": 0.638700947225981, | |
| "grad_norm": 1.1929403575426991, | |
| "learning_rate": 7.695518130045147e-05, | |
| "loss": 0.4804, | |
| "step": 59 | |
| }, | |
| { | |
| "epoch": 0.6495263870094723, | |
| "grad_norm": 1.3373806033082491, | |
| "learning_rate": 7.675831246480923e-05, | |
| "loss": 0.4918, | |
| "step": 60 | |
| }, | |
| { | |
| "epoch": 0.6603518267929634, | |
| "grad_norm": 1.1370461825495959, | |
| "learning_rate": 7.655554506726747e-05, | |
| "loss": 0.5003, | |
| "step": 61 | |
| }, | |
| { | |
| "epoch": 0.6711772665764547, | |
| "grad_norm": 1.5541020268182568, | |
| "learning_rate": 7.6346911645665e-05, | |
| "loss": 0.4872, | |
| "step": 62 | |
| }, | |
| { | |
| "epoch": 0.6820027063599459, | |
| "grad_norm": 1.0196032384903368, | |
| "learning_rate": 7.61324456791544e-05, | |
| "loss": 0.4971, | |
| "step": 63 | |
| }, | |
| { | |
| "epoch": 0.6928281461434371, | |
| "grad_norm": 1.355258802840393, | |
| "learning_rate": 7.591218158282968e-05, | |
| "loss": 0.4961, | |
| "step": 64 | |
| }, | |
| { | |
| "epoch": 0.7036535859269283, | |
| "grad_norm": 1.5317006352800375, | |
| "learning_rate": 7.568615470220369e-05, | |
| "loss": 0.4836, | |
| "step": 65 | |
| }, | |
| { | |
| "epoch": 0.7144790257104194, | |
| "grad_norm": 0.9602544899213347, | |
| "learning_rate": 7.545440130753634e-05, | |
| "loss": 0.4934, | |
| "step": 66 | |
| }, | |
| { | |
| "epoch": 0.7253044654939107, | |
| "grad_norm": 1.3937893813198252, | |
| "learning_rate": 7.52169585880143e-05, | |
| "loss": 0.492, | |
| "step": 67 | |
| }, | |
| { | |
| "epoch": 0.7361299052774019, | |
| "grad_norm": 1.2170568243121482, | |
| "learning_rate": 7.497386464578329e-05, | |
| "loss": 0.5004, | |
| "step": 68 | |
| }, | |
| { | |
| "epoch": 0.7469553450608931, | |
| "grad_norm": 1.215088332835355, | |
| "learning_rate": 7.472515848983394e-05, | |
| "loss": 0.4927, | |
| "step": 69 | |
| }, | |
| { | |
| "epoch": 0.7577807848443843, | |
| "grad_norm": 1.068568427145697, | |
| "learning_rate": 7.447088002974199e-05, | |
| "loss": 0.4735, | |
| "step": 70 | |
| }, | |
| { | |
| "epoch": 0.7686062246278755, | |
| "grad_norm": 1.2666434730845704, | |
| "learning_rate": 7.421107006926408e-05, | |
| "loss": 0.4776, | |
| "step": 71 | |
| }, | |
| { | |
| "epoch": 0.7794316644113667, | |
| "grad_norm": 1.0693035758098317, | |
| "learning_rate": 7.394577029979004e-05, | |
| "loss": 0.4878, | |
| "step": 72 | |
| }, | |
| { | |
| "epoch": 0.790257104194858, | |
| "grad_norm": 1.5502809637898465, | |
| "learning_rate": 7.367502329365268e-05, | |
| "loss": 0.4949, | |
| "step": 73 | |
| }, | |
| { | |
| "epoch": 0.8010825439783491, | |
| "grad_norm": 0.8873154529201425, | |
| "learning_rate": 7.33988724972963e-05, | |
| "loss": 0.4889, | |
| "step": 74 | |
| }, | |
| { | |
| "epoch": 0.8119079837618404, | |
| "grad_norm": 1.27058950939853, | |
| "learning_rate": 7.311736222430487e-05, | |
| "loss": 0.4861, | |
| "step": 75 | |
| }, | |
| { | |
| "epoch": 0.8227334235453315, | |
| "grad_norm": 0.7866888980990746, | |
| "learning_rate": 7.283053764829106e-05, | |
| "loss": 0.4783, | |
| "step": 76 | |
| }, | |
| { | |
| "epoch": 0.8335588633288228, | |
| "grad_norm": 1.0938731157860102, | |
| "learning_rate": 7.253844479564737e-05, | |
| "loss": 0.4764, | |
| "step": 77 | |
| }, | |
| { | |
| "epoch": 0.8443843031123139, | |
| "grad_norm": 1.1807281415252031, | |
| "learning_rate": 7.224113053816021e-05, | |
| "loss": 0.4817, | |
| "step": 78 | |
| }, | |
| { | |
| "epoch": 0.8552097428958051, | |
| "grad_norm": 1.322179202967281, | |
| "learning_rate": 7.193864258548855e-05, | |
| "loss": 0.4853, | |
| "step": 79 | |
| }, | |
| { | |
| "epoch": 0.8660351826792964, | |
| "grad_norm": 1.2155369734676524, | |
| "learning_rate": 7.163102947750794e-05, | |
| "loss": 0.4796, | |
| "step": 80 | |
| }, | |
| { | |
| "epoch": 0.8768606224627875, | |
| "grad_norm": 0.588289504862737, | |
| "learning_rate": 7.131834057652142e-05, | |
| "loss": 0.4863, | |
| "step": 81 | |
| }, | |
| { | |
| "epoch": 0.8876860622462788, | |
| "grad_norm": 1.0985408806535708, | |
| "learning_rate": 7.100062605933835e-05, | |
| "loss": 0.4762, | |
| "step": 82 | |
| }, | |
| { | |
| "epoch": 0.8985115020297699, | |
| "grad_norm": 1.74082517425262, | |
| "learning_rate": 7.067793690922268e-05, | |
| "loss": 0.4853, | |
| "step": 83 | |
| }, | |
| { | |
| "epoch": 0.9093369418132612, | |
| "grad_norm": 0.7638072853814706, | |
| "learning_rate": 7.035032490771165e-05, | |
| "loss": 0.4794, | |
| "step": 84 | |
| }, | |
| { | |
| "epoch": 0.9201623815967523, | |
| "grad_norm": 1.2164285440187212, | |
| "learning_rate": 7.001784262630652e-05, | |
| "loss": 0.48, | |
| "step": 85 | |
| }, | |
| { | |
| "epoch": 0.9309878213802436, | |
| "grad_norm": 1.2499859479078839, | |
| "learning_rate": 6.968054341803644e-05, | |
| "loss": 0.4853, | |
| "step": 86 | |
| }, | |
| { | |
| "epoch": 0.9418132611637348, | |
| "grad_norm": 1.1495652891036365, | |
| "learning_rate": 6.933848140889705e-05, | |
| "loss": 0.476, | |
| "step": 87 | |
| }, | |
| { | |
| "epoch": 0.952638700947226, | |
| "grad_norm": 1.1590588610177872, | |
| "learning_rate": 6.89917114891648e-05, | |
| "loss": 0.4653, | |
| "step": 88 | |
| }, | |
| { | |
| "epoch": 0.9634641407307172, | |
| "grad_norm": 0.8043870196059654, | |
| "learning_rate": 6.864028930458892e-05, | |
| "loss": 0.4788, | |
| "step": 89 | |
| }, | |
| { | |
| "epoch": 0.9742895805142084, | |
| "grad_norm": 0.8931105851691834, | |
| "learning_rate": 6.828427124746191e-05, | |
| "loss": 0.4776, | |
| "step": 90 | |
| }, | |
| { | |
| "epoch": 0.9851150202976996, | |
| "grad_norm": 1.2474558559687694, | |
| "learning_rate": 6.792371444757037e-05, | |
| "loss": 0.4803, | |
| "step": 91 | |
| }, | |
| { | |
| "epoch": 0.9959404600811907, | |
| "grad_norm": 0.8560601402041761, | |
| "learning_rate": 6.755867676302747e-05, | |
| "loss": 0.4798, | |
| "step": 92 | |
| }, | |
| { | |
| "epoch": 1.006765899864682, | |
| "grad_norm": 2.0148609986627783, | |
| "learning_rate": 6.718921677098853e-05, | |
| "loss": 0.7491, | |
| "step": 93 | |
| }, | |
| { | |
| "epoch": 1.0175913396481733, | |
| "grad_norm": 0.6667855075813555, | |
| "learning_rate": 6.681539375825115e-05, | |
| "loss": 0.4535, | |
| "step": 94 | |
| }, | |
| { | |
| "epoch": 1.0284167794316643, | |
| "grad_norm": 0.9162841549268478, | |
| "learning_rate": 6.643726771174164e-05, | |
| "loss": 0.455, | |
| "step": 95 | |
| }, | |
| { | |
| "epoch": 1.0392422192151556, | |
| "grad_norm": 1.0155165692532704, | |
| "learning_rate": 6.60548993088889e-05, | |
| "loss": 0.454, | |
| "step": 96 | |
| }, | |
| { | |
| "epoch": 1.0500676589986468, | |
| "grad_norm": 1.078599089633044, | |
| "learning_rate": 6.56683499078876e-05, | |
| "loss": 0.4455, | |
| "step": 97 | |
| }, | |
| { | |
| "epoch": 1.060893098782138, | |
| "grad_norm": 1.1351986372223781, | |
| "learning_rate": 6.527768153785216e-05, | |
| "loss": 0.4541, | |
| "step": 98 | |
| }, | |
| { | |
| "epoch": 1.0717185385656292, | |
| "grad_norm": 0.7315670031697065, | |
| "learning_rate": 6.488295688886295e-05, | |
| "loss": 0.4479, | |
| "step": 99 | |
| }, | |
| { | |
| "epoch": 1.0825439783491204, | |
| "grad_norm": 1.3582463875830837, | |
| "learning_rate": 6.448423930190653e-05, | |
| "loss": 0.4486, | |
| "step": 100 | |
| }, | |
| { | |
| "epoch": 1.0933694181326117, | |
| "grad_norm": 0.8080345177396737, | |
| "learning_rate": 6.408159275871132e-05, | |
| "loss": 0.446, | |
| "step": 101 | |
| }, | |
| { | |
| "epoch": 1.104194857916103, | |
| "grad_norm": 1.160068769869394, | |
| "learning_rate": 6.36750818714807e-05, | |
| "loss": 0.4471, | |
| "step": 102 | |
| }, | |
| { | |
| "epoch": 1.115020297699594, | |
| "grad_norm": 0.8897562278529675, | |
| "learning_rate": 6.326477187252455e-05, | |
| "loss": 0.449, | |
| "step": 103 | |
| }, | |
| { | |
| "epoch": 1.1258457374830853, | |
| "grad_norm": 1.0199996165037868, | |
| "learning_rate": 6.28507286037917e-05, | |
| "loss": 0.4427, | |
| "step": 104 | |
| }, | |
| { | |
| "epoch": 1.1366711772665765, | |
| "grad_norm": 0.7249873315321067, | |
| "learning_rate": 6.243301850630419e-05, | |
| "loss": 0.4453, | |
| "step": 105 | |
| }, | |
| { | |
| "epoch": 1.1474966170500678, | |
| "grad_norm": 1.189445348682187, | |
| "learning_rate": 6.201170860949565e-05, | |
| "loss": 0.4515, | |
| "step": 106 | |
| }, | |
| { | |
| "epoch": 1.1583220568335588, | |
| "grad_norm": 1.1424188944548885, | |
| "learning_rate": 6.15868665204552e-05, | |
| "loss": 0.4452, | |
| "step": 107 | |
| }, | |
| { | |
| "epoch": 1.16914749661705, | |
| "grad_norm": 0.8857107534333528, | |
| "learning_rate": 6.11585604130785e-05, | |
| "loss": 0.4393, | |
| "step": 108 | |
| }, | |
| { | |
| "epoch": 1.1799729364005414, | |
| "grad_norm": 0.7495223764013124, | |
| "learning_rate": 6.072685901712808e-05, | |
| "loss": 0.4446, | |
| "step": 109 | |
| }, | |
| { | |
| "epoch": 1.1907983761840324, | |
| "grad_norm": 0.6658761551232241, | |
| "learning_rate": 6.02918316072043e-05, | |
| "loss": 0.4499, | |
| "step": 110 | |
| }, | |
| { | |
| "epoch": 1.2016238159675237, | |
| "grad_norm": 0.6905937205369403, | |
| "learning_rate": 5.9853547991628967e-05, | |
| "loss": 0.4477, | |
| "step": 111 | |
| }, | |
| { | |
| "epoch": 1.212449255751015, | |
| "grad_norm": 0.6381399917783538, | |
| "learning_rate": 5.941207850124325e-05, | |
| "loss": 0.4421, | |
| "step": 112 | |
| }, | |
| { | |
| "epoch": 1.2232746955345062, | |
| "grad_norm": 0.7541941618755881, | |
| "learning_rate": 5.896749397812181e-05, | |
| "loss": 0.4382, | |
| "step": 113 | |
| }, | |
| { | |
| "epoch": 1.2341001353179972, | |
| "grad_norm": 0.5923805845803851, | |
| "learning_rate": 5.8519865764204834e-05, | |
| "loss": 0.4459, | |
| "step": 114 | |
| }, | |
| { | |
| "epoch": 1.2449255751014885, | |
| "grad_norm": 0.6109388260249382, | |
| "learning_rate": 5.8069265689849884e-05, | |
| "loss": 0.4513, | |
| "step": 115 | |
| }, | |
| { | |
| "epoch": 1.2557510148849798, | |
| "grad_norm": 0.533980708840664, | |
| "learning_rate": 5.761576606230538e-05, | |
| "loss": 0.443, | |
| "step": 116 | |
| }, | |
| { | |
| "epoch": 1.266576454668471, | |
| "grad_norm": 0.7052629150640184, | |
| "learning_rate": 5.7159439654107506e-05, | |
| "loss": 0.4404, | |
| "step": 117 | |
| }, | |
| { | |
| "epoch": 1.277401894451962, | |
| "grad_norm": 0.6111064530168573, | |
| "learning_rate": 5.6700359691402533e-05, | |
| "loss": 0.4402, | |
| "step": 118 | |
| }, | |
| { | |
| "epoch": 1.2882273342354533, | |
| "grad_norm": 0.5508566705964758, | |
| "learning_rate": 5.6238599842196285e-05, | |
| "loss": 0.4449, | |
| "step": 119 | |
| }, | |
| { | |
| "epoch": 1.2990527740189446, | |
| "grad_norm": 0.6028745516198664, | |
| "learning_rate": 5.5774234204532746e-05, | |
| "loss": 0.4365, | |
| "step": 120 | |
| }, | |
| { | |
| "epoch": 1.3098782138024356, | |
| "grad_norm": 0.721324745203013, | |
| "learning_rate": 5.5307337294603595e-05, | |
| "loss": 0.4442, | |
| "step": 121 | |
| }, | |
| { | |
| "epoch": 1.320703653585927, | |
| "grad_norm": 0.9341313496474372, | |
| "learning_rate": 5.483798403479072e-05, | |
| "loss": 0.4403, | |
| "step": 122 | |
| }, | |
| { | |
| "epoch": 1.3315290933694182, | |
| "grad_norm": 1.0476784062313165, | |
| "learning_rate": 5.436624974164349e-05, | |
| "loss": 0.4434, | |
| "step": 123 | |
| }, | |
| { | |
| "epoch": 1.3423545331529092, | |
| "grad_norm": 0.917250294000449, | |
| "learning_rate": 5.389221011379281e-05, | |
| "loss": 0.4409, | |
| "step": 124 | |
| }, | |
| { | |
| "epoch": 1.3531799729364005, | |
| "grad_norm": 0.7818649176171457, | |
| "learning_rate": 5.3415941219803895e-05, | |
| "loss": 0.4405, | |
| "step": 125 | |
| }, | |
| { | |
| "epoch": 1.3640054127198917, | |
| "grad_norm": 0.5772276458708397, | |
| "learning_rate": 5.2937519485969525e-05, | |
| "loss": 0.4397, | |
| "step": 126 | |
| }, | |
| { | |
| "epoch": 1.374830852503383, | |
| "grad_norm": 0.4347968698038521, | |
| "learning_rate": 5.245702168404616e-05, | |
| "loss": 0.4325, | |
| "step": 127 | |
| }, | |
| { | |
| "epoch": 1.3856562922868743, | |
| "grad_norm": 0.49507391797694716, | |
| "learning_rate": 5.1974524918934336e-05, | |
| "loss": 0.4353, | |
| "step": 128 | |
| }, | |
| { | |
| "epoch": 1.3964817320703653, | |
| "grad_norm": 0.49182716841159074, | |
| "learning_rate": 5.14901066163058e-05, | |
| "loss": 0.4341, | |
| "step": 129 | |
| }, | |
| { | |
| "epoch": 1.4073071718538566, | |
| "grad_norm": 0.5209355239170891, | |
| "learning_rate": 5.1003844510179126e-05, | |
| "loss": 0.4391, | |
| "step": 130 | |
| }, | |
| { | |
| "epoch": 1.4181326116373478, | |
| "grad_norm": 0.555660282513782, | |
| "learning_rate": 5.0515816630445795e-05, | |
| "loss": 0.4408, | |
| "step": 131 | |
| }, | |
| { | |
| "epoch": 1.4289580514208389, | |
| "grad_norm": 0.4906371694319118, | |
| "learning_rate": 5.002610129034883e-05, | |
| "loss": 0.442, | |
| "step": 132 | |
| }, | |
| { | |
| "epoch": 1.4397834912043301, | |
| "grad_norm": 0.39564350538031545, | |
| "learning_rate": 4.953477707391597e-05, | |
| "loss": 0.4359, | |
| "step": 133 | |
| }, | |
| { | |
| "epoch": 1.4506089309878214, | |
| "grad_norm": 0.36420248532962185, | |
| "learning_rate": 4.90419228233494e-05, | |
| "loss": 0.4371, | |
| "step": 134 | |
| }, | |
| { | |
| "epoch": 1.4614343707713127, | |
| "grad_norm": 0.3693636826961334, | |
| "learning_rate": 4.854761762637403e-05, | |
| "loss": 0.4375, | |
| "step": 135 | |
| }, | |
| { | |
| "epoch": 1.472259810554804, | |
| "grad_norm": 0.37103016749538226, | |
| "learning_rate": 4.805194080354641e-05, | |
| "loss": 0.4367, | |
| "step": 136 | |
| }, | |
| { | |
| "epoch": 1.483085250338295, | |
| "grad_norm": 0.4634070182199332, | |
| "learning_rate": 4.7554971895526175e-05, | |
| "loss": 0.434, | |
| "step": 137 | |
| }, | |
| { | |
| "epoch": 1.4939106901217862, | |
| "grad_norm": 0.39637839232896416, | |
| "learning_rate": 4.705679065031235e-05, | |
| "loss": 0.4405, | |
| "step": 138 | |
| }, | |
| { | |
| "epoch": 1.5047361299052775, | |
| "grad_norm": 0.4167931396685767, | |
| "learning_rate": 4.6557477010446206e-05, | |
| "loss": 0.439, | |
| "step": 139 | |
| }, | |
| { | |
| "epoch": 1.5155615696887685, | |
| "grad_norm": 0.34078264652362333, | |
| "learning_rate": 4.605711110018307e-05, | |
| "loss": 0.4383, | |
| "step": 140 | |
| }, | |
| { | |
| "epoch": 1.5263870094722598, | |
| "grad_norm": 0.326633599627453, | |
| "learning_rate": 4.555577321263477e-05, | |
| "loss": 0.4344, | |
| "step": 141 | |
| }, | |
| { | |
| "epoch": 1.537212449255751, | |
| "grad_norm": 0.3976277159519867, | |
| "learning_rate": 4.505354379688518e-05, | |
| "loss": 0.4354, | |
| "step": 142 | |
| }, | |
| { | |
| "epoch": 1.548037889039242, | |
| "grad_norm": 0.4357572785065164, | |
| "learning_rate": 4.4550503445080606e-05, | |
| "loss": 0.4417, | |
| "step": 143 | |
| }, | |
| { | |
| "epoch": 1.5588633288227334, | |
| "grad_norm": 0.3916962827302459, | |
| "learning_rate": 4.4046732879497295e-05, | |
| "loss": 0.4341, | |
| "step": 144 | |
| }, | |
| { | |
| "epoch": 1.5696887686062246, | |
| "grad_norm": 0.30012283877463697, | |
| "learning_rate": 4.354231293958801e-05, | |
| "loss": 0.4322, | |
| "step": 145 | |
| }, | |
| { | |
| "epoch": 1.5805142083897157, | |
| "grad_norm": 0.2885107465168404, | |
| "learning_rate": 4.3037324569009854e-05, | |
| "loss": 0.4323, | |
| "step": 146 | |
| }, | |
| { | |
| "epoch": 1.5913396481732072, | |
| "grad_norm": 0.26624048862887445, | |
| "learning_rate": 4.2531848802635264e-05, | |
| "loss": 0.4278, | |
| "step": 147 | |
| }, | |
| { | |
| "epoch": 1.6021650879566982, | |
| "grad_norm": 0.3506386895842403, | |
| "learning_rate": 4.202596675354851e-05, | |
| "loss": 0.4479, | |
| "step": 148 | |
| }, | |
| { | |
| "epoch": 1.6129905277401895, | |
| "grad_norm": 0.40487113874803043, | |
| "learning_rate": 4.151975960002958e-05, | |
| "loss": 0.4302, | |
| "step": 149 | |
| }, | |
| { | |
| "epoch": 1.6238159675236807, | |
| "grad_norm": 0.23297174487170752, | |
| "learning_rate": 4.101330857252752e-05, | |
| "loss": 0.4367, | |
| "step": 150 | |
| }, | |
| { | |
| "epoch": 1.6346414073071718, | |
| "grad_norm": 0.30369826324347926, | |
| "learning_rate": 4.050669494062561e-05, | |
| "loss": 0.4372, | |
| "step": 151 | |
| }, | |
| { | |
| "epoch": 1.645466847090663, | |
| "grad_norm": 0.29979293086538217, | |
| "learning_rate": 4e-05, | |
| "loss": 0.4308, | |
| "step": 152 | |
| }, | |
| { | |
| "epoch": 1.6562922868741543, | |
| "grad_norm": 0.21998478305579725, | |
| "learning_rate": 3.9493305059374405e-05, | |
| "loss": 0.444, | |
| "step": 153 | |
| }, | |
| { | |
| "epoch": 1.6671177266576453, | |
| "grad_norm": 0.2822111397888722, | |
| "learning_rate": 3.8986691427472496e-05, | |
| "loss": 0.4362, | |
| "step": 154 | |
| }, | |
| { | |
| "epoch": 1.6779431664411368, | |
| "grad_norm": 0.19298090698642797, | |
| "learning_rate": 3.8480240399970436e-05, | |
| "loss": 0.4291, | |
| "step": 155 | |
| }, | |
| { | |
| "epoch": 1.6887686062246279, | |
| "grad_norm": 0.24201705060461937, | |
| "learning_rate": 3.7974033246451496e-05, | |
| "loss": 0.4374, | |
| "step": 156 | |
| }, | |
| { | |
| "epoch": 1.699594046008119, | |
| "grad_norm": 0.2926866609659229, | |
| "learning_rate": 3.746815119736475e-05, | |
| "loss": 0.4237, | |
| "step": 157 | |
| }, | |
| { | |
| "epoch": 1.7104194857916104, | |
| "grad_norm": 0.22986191257503155, | |
| "learning_rate": 3.696267543099016e-05, | |
| "loss": 0.4349, | |
| "step": 158 | |
| }, | |
| { | |
| "epoch": 1.7212449255751014, | |
| "grad_norm": 0.21762455575624456, | |
| "learning_rate": 3.6457687060412e-05, | |
| "loss": 0.4411, | |
| "step": 159 | |
| }, | |
| { | |
| "epoch": 1.7320703653585927, | |
| "grad_norm": 0.23006031819487235, | |
| "learning_rate": 3.595326712050272e-05, | |
| "loss": 0.4352, | |
| "step": 160 | |
| }, | |
| { | |
| "epoch": 1.742895805142084, | |
| "grad_norm": 0.23480749792102354, | |
| "learning_rate": 3.5449496554919414e-05, | |
| "loss": 0.432, | |
| "step": 161 | |
| }, | |
| { | |
| "epoch": 1.753721244925575, | |
| "grad_norm": 0.20514925914229476, | |
| "learning_rate": 3.494645620311484e-05, | |
| "loss": 0.4374, | |
| "step": 162 | |
| }, | |
| { | |
| "epoch": 1.7645466847090663, | |
| "grad_norm": 0.21723286316559756, | |
| "learning_rate": 3.444422678736525e-05, | |
| "loss": 0.4309, | |
| "step": 163 | |
| }, | |
| { | |
| "epoch": 1.7753721244925575, | |
| "grad_norm": 0.19908292246838455, | |
| "learning_rate": 3.394288889981695e-05, | |
| "loss": 0.4285, | |
| "step": 164 | |
| }, | |
| { | |
| "epoch": 1.7861975642760486, | |
| "grad_norm": 0.21848824155386468, | |
| "learning_rate": 3.34425229895538e-05, | |
| "loss": 0.4358, | |
| "step": 165 | |
| }, | |
| { | |
| "epoch": 1.79702300405954, | |
| "grad_norm": 0.19073388047330828, | |
| "learning_rate": 3.294320934968768e-05, | |
| "loss": 0.4285, | |
| "step": 166 | |
| }, | |
| { | |
| "epoch": 1.8078484438430311, | |
| "grad_norm": 0.20593306151831323, | |
| "learning_rate": 3.2445028104473845e-05, | |
| "loss": 0.4347, | |
| "step": 167 | |
| }, | |
| { | |
| "epoch": 1.8186738836265224, | |
| "grad_norm": 0.21441680855669412, | |
| "learning_rate": 3.194805919645359e-05, | |
| "loss": 0.4308, | |
| "step": 168 | |
| }, | |
| { | |
| "epoch": 1.8294993234100136, | |
| "grad_norm": 0.18002633127557865, | |
| "learning_rate": 3.145238237362596e-05, | |
| "loss": 0.4392, | |
| "step": 169 | |
| }, | |
| { | |
| "epoch": 1.8403247631935047, | |
| "grad_norm": 0.18961769159187056, | |
| "learning_rate": 3.0958077176650606e-05, | |
| "loss": 0.43, | |
| "step": 170 | |
| }, | |
| { | |
| "epoch": 1.851150202976996, | |
| "grad_norm": 0.16827003419417288, | |
| "learning_rate": 3.0465222926084036e-05, | |
| "loss": 0.426, | |
| "step": 171 | |
| }, | |
| { | |
| "epoch": 1.8619756427604872, | |
| "grad_norm": 0.1766724359697219, | |
| "learning_rate": 2.997389870965118e-05, | |
| "loss": 0.4273, | |
| "step": 172 | |
| }, | |
| { | |
| "epoch": 1.8728010825439783, | |
| "grad_norm": 0.1937265928321859, | |
| "learning_rate": 2.948418336955421e-05, | |
| "loss": 0.4216, | |
| "step": 173 | |
| }, | |
| { | |
| "epoch": 1.8836265223274695, | |
| "grad_norm": 0.1733839063792691, | |
| "learning_rate": 2.899615548982088e-05, | |
| "loss": 0.4342, | |
| "step": 174 | |
| }, | |
| { | |
| "epoch": 1.8944519621109608, | |
| "grad_norm": 0.1691834042941349, | |
| "learning_rate": 2.8509893383694213e-05, | |
| "loss": 0.428, | |
| "step": 175 | |
| }, | |
| { | |
| "epoch": 1.9052774018944518, | |
| "grad_norm": 0.17556950089441328, | |
| "learning_rate": 2.8025475081065684e-05, | |
| "loss": 0.4235, | |
| "step": 176 | |
| }, | |
| { | |
| "epoch": 1.9161028416779433, | |
| "grad_norm": 0.17292630455576483, | |
| "learning_rate": 2.754297831595385e-05, | |
| "loss": 0.4242, | |
| "step": 177 | |
| }, | |
| { | |
| "epoch": 1.9269282814614344, | |
| "grad_norm": 0.203395158544133, | |
| "learning_rate": 2.7062480514030478e-05, | |
| "loss": 0.4346, | |
| "step": 178 | |
| }, | |
| { | |
| "epoch": 1.9377537212449256, | |
| "grad_norm": 0.17761313930923656, | |
| "learning_rate": 2.658405878019612e-05, | |
| "loss": 0.4284, | |
| "step": 179 | |
| }, | |
| { | |
| "epoch": 1.9485791610284169, | |
| "grad_norm": 0.1857053642993675, | |
| "learning_rate": 2.6107789886207195e-05, | |
| "loss": 0.4311, | |
| "step": 180 | |
| }, | |
| { | |
| "epoch": 1.959404600811908, | |
| "grad_norm": 0.168623231091507, | |
| "learning_rate": 2.563375025835652e-05, | |
| "loss": 0.4296, | |
| "step": 181 | |
| }, | |
| { | |
| "epoch": 1.9702300405953992, | |
| "grad_norm": 0.16395731003065203, | |
| "learning_rate": 2.5162015965209295e-05, | |
| "loss": 0.4263, | |
| "step": 182 | |
| }, | |
| { | |
| "epoch": 1.9810554803788905, | |
| "grad_norm": 0.17813784128811913, | |
| "learning_rate": 2.4692662705396412e-05, | |
| "loss": 0.4337, | |
| "step": 183 | |
| }, | |
| { | |
| "epoch": 1.9918809201623815, | |
| "grad_norm": 0.1692020847630486, | |
| "learning_rate": 2.4225765795467267e-05, | |
| "loss": 0.4345, | |
| "step": 184 | |
| }, | |
| { | |
| "epoch": 2.002706359945873, | |
| "grad_norm": 0.29642981794747025, | |
| "learning_rate": 2.376140015780372e-05, | |
| "loss": 0.6828, | |
| "step": 185 | |
| }, | |
| { | |
| "epoch": 2.013531799729364, | |
| "grad_norm": 0.22282508513365898, | |
| "learning_rate": 2.3299640308597487e-05, | |
| "loss": 0.4033, | |
| "step": 186 | |
| }, | |
| { | |
| "epoch": 2.024357239512855, | |
| "grad_norm": 0.19906807218157607, | |
| "learning_rate": 2.2840560345892518e-05, | |
| "loss": 0.4028, | |
| "step": 187 | |
| }, | |
| { | |
| "epoch": 2.0351826792963466, | |
| "grad_norm": 0.21438465882623423, | |
| "learning_rate": 2.2384233937694626e-05, | |
| "loss": 0.4036, | |
| "step": 188 | |
| }, | |
| { | |
| "epoch": 2.0460081190798376, | |
| "grad_norm": 0.20912970610685283, | |
| "learning_rate": 2.1930734310150116e-05, | |
| "loss": 0.3975, | |
| "step": 189 | |
| }, | |
| { | |
| "epoch": 2.0568335588633286, | |
| "grad_norm": 0.2005177294111497, | |
| "learning_rate": 2.1480134235795173e-05, | |
| "loss": 0.4022, | |
| "step": 190 | |
| }, | |
| { | |
| "epoch": 2.06765899864682, | |
| "grad_norm": 0.19782562683985525, | |
| "learning_rate": 2.10325060218782e-05, | |
| "loss": 0.4073, | |
| "step": 191 | |
| }, | |
| { | |
| "epoch": 2.078484438430311, | |
| "grad_norm": 0.23611436091693633, | |
| "learning_rate": 2.0587921498756768e-05, | |
| "loss": 0.4109, | |
| "step": 192 | |
| }, | |
| { | |
| "epoch": 2.089309878213802, | |
| "grad_norm": 0.20974660868469683, | |
| "learning_rate": 2.014645200837105e-05, | |
| "loss": 0.4034, | |
| "step": 193 | |
| }, | |
| { | |
| "epoch": 2.1001353179972937, | |
| "grad_norm": 0.19267649854044877, | |
| "learning_rate": 1.9708168392795718e-05, | |
| "loss": 0.3953, | |
| "step": 194 | |
| }, | |
| { | |
| "epoch": 2.1109607577807847, | |
| "grad_norm": 0.22011389396639292, | |
| "learning_rate": 1.9273140982871936e-05, | |
| "loss": 0.4047, | |
| "step": 195 | |
| }, | |
| { | |
| "epoch": 2.121786197564276, | |
| "grad_norm": 0.18180878911713266, | |
| "learning_rate": 1.8841439586921515e-05, | |
| "loss": 0.4039, | |
| "step": 196 | |
| }, | |
| { | |
| "epoch": 2.1326116373477673, | |
| "grad_norm": 0.21012423766700125, | |
| "learning_rate": 1.841313347954482e-05, | |
| "loss": 0.3986, | |
| "step": 197 | |
| }, | |
| { | |
| "epoch": 2.1434370771312583, | |
| "grad_norm": 0.15463640007730903, | |
| "learning_rate": 1.7988291390504348e-05, | |
| "loss": 0.3975, | |
| "step": 198 | |
| }, | |
| { | |
| "epoch": 2.15426251691475, | |
| "grad_norm": 0.193505600575409, | |
| "learning_rate": 1.7566981493695828e-05, | |
| "loss": 0.4043, | |
| "step": 199 | |
| }, | |
| { | |
| "epoch": 2.165087956698241, | |
| "grad_norm": 0.16774935171850072, | |
| "learning_rate": 1.71492713962083e-05, | |
| "loss": 0.4068, | |
| "step": 200 | |
| }, | |
| { | |
| "epoch": 2.175913396481732, | |
| "grad_norm": 0.15543728413541416, | |
| "learning_rate": 1.673522812747544e-05, | |
| "loss": 0.4025, | |
| "step": 201 | |
| }, | |
| { | |
| "epoch": 2.1867388362652234, | |
| "grad_norm": 0.1581064576242491, | |
| "learning_rate": 1.6324918128519306e-05, | |
| "loss": 0.3995, | |
| "step": 202 | |
| }, | |
| { | |
| "epoch": 2.1975642760487144, | |
| "grad_norm": 0.1433403956609895, | |
| "learning_rate": 1.5918407241288678e-05, | |
| "loss": 0.406, | |
| "step": 203 | |
| }, | |
| { | |
| "epoch": 2.208389715832206, | |
| "grad_norm": 0.1444921358019384, | |
| "learning_rate": 1.5515760698093485e-05, | |
| "loss": 0.4043, | |
| "step": 204 | |
| }, | |
| { | |
| "epoch": 2.219215155615697, | |
| "grad_norm": 0.14781491117947443, | |
| "learning_rate": 1.511704311113705e-05, | |
| "loss": 0.4024, | |
| "step": 205 | |
| }, | |
| { | |
| "epoch": 2.230040595399188, | |
| "grad_norm": 0.1394579893993049, | |
| "learning_rate": 1.4722318462147844e-05, | |
| "loss": 0.4029, | |
| "step": 206 | |
| }, | |
| { | |
| "epoch": 2.2408660351826795, | |
| "grad_norm": 0.16023643196582224, | |
| "learning_rate": 1.4331650092112406e-05, | |
| "loss": 0.4066, | |
| "step": 207 | |
| }, | |
| { | |
| "epoch": 2.2516914749661705, | |
| "grad_norm": 0.1348210060414343, | |
| "learning_rate": 1.394510069111112e-05, | |
| "loss": 0.3973, | |
| "step": 208 | |
| }, | |
| { | |
| "epoch": 2.2625169147496615, | |
| "grad_norm": 0.13391825126021842, | |
| "learning_rate": 1.3562732288258377e-05, | |
| "loss": 0.4025, | |
| "step": 209 | |
| }, | |
| { | |
| "epoch": 2.273342354533153, | |
| "grad_norm": 0.12844012269731564, | |
| "learning_rate": 1.3184606241748857e-05, | |
| "loss": 0.3978, | |
| "step": 210 | |
| }, | |
| { | |
| "epoch": 2.284167794316644, | |
| "grad_norm": 0.13584285882845534, | |
| "learning_rate": 1.2810783229011486e-05, | |
| "loss": 0.3991, | |
| "step": 211 | |
| }, | |
| { | |
| "epoch": 2.2949932341001356, | |
| "grad_norm": 0.13168511677095765, | |
| "learning_rate": 1.2441323236972536e-05, | |
| "loss": 0.3999, | |
| "step": 212 | |
| }, | |
| { | |
| "epoch": 2.3058186738836266, | |
| "grad_norm": 0.12398647003241701, | |
| "learning_rate": 1.2076285552429642e-05, | |
| "loss": 0.4077, | |
| "step": 213 | |
| }, | |
| { | |
| "epoch": 2.3166441136671176, | |
| "grad_norm": 0.13229640626265612, | |
| "learning_rate": 1.1715728752538103e-05, | |
| "loss": 0.4019, | |
| "step": 214 | |
| }, | |
| { | |
| "epoch": 2.3274695534506087, | |
| "grad_norm": 0.12385054713595073, | |
| "learning_rate": 1.1359710695411086e-05, | |
| "loss": 0.4029, | |
| "step": 215 | |
| }, | |
| { | |
| "epoch": 2.3382949932341, | |
| "grad_norm": 0.12591732770348577, | |
| "learning_rate": 1.100828851083521e-05, | |
| "loss": 0.4084, | |
| "step": 216 | |
| }, | |
| { | |
| "epoch": 2.349120433017591, | |
| "grad_norm": 0.13251886738270005, | |
| "learning_rate": 1.0661518591102973e-05, | |
| "loss": 0.4019, | |
| "step": 217 | |
| }, | |
| { | |
| "epoch": 2.3599458728010827, | |
| "grad_norm": 0.1231996291813589, | |
| "learning_rate": 1.0319456581963578e-05, | |
| "loss": 0.4097, | |
| "step": 218 | |
| }, | |
| { | |
| "epoch": 2.3707713125845737, | |
| "grad_norm": 0.12007287837624477, | |
| "learning_rate": 9.982157373693502e-06, | |
| "loss": 0.407, | |
| "step": 219 | |
| }, | |
| { | |
| "epoch": 2.381596752368065, | |
| "grad_norm": 0.11828968531594988, | |
| "learning_rate": 9.649675092288366e-06, | |
| "loss": 0.3942, | |
| "step": 220 | |
| }, | |
| { | |
| "epoch": 2.3924221921515563, | |
| "grad_norm": 0.1266852929005505, | |
| "learning_rate": 9.322063090777331e-06, | |
| "loss": 0.402, | |
| "step": 221 | |
| }, | |
| { | |
| "epoch": 2.4032476319350473, | |
| "grad_norm": 0.13131525920519638, | |
| "learning_rate": 8.99937394066165e-06, | |
| "loss": 0.4079, | |
| "step": 222 | |
| }, | |
| { | |
| "epoch": 2.414073071718539, | |
| "grad_norm": 0.1276300518950864, | |
| "learning_rate": 8.681659423478587e-06, | |
| "loss": 0.4075, | |
| "step": 223 | |
| }, | |
| { | |
| "epoch": 2.42489851150203, | |
| "grad_norm": 0.1284596882094962, | |
| "learning_rate": 8.368970522492064e-06, | |
| "loss": 0.4041, | |
| "step": 224 | |
| }, | |
| { | |
| "epoch": 2.435723951285521, | |
| "grad_norm": 0.11600707488242912, | |
| "learning_rate": 8.06135741451146e-06, | |
| "loss": 0.4024, | |
| "step": 225 | |
| }, | |
| { | |
| "epoch": 2.4465493910690124, | |
| "grad_norm": 0.1369413509292135, | |
| "learning_rate": 7.758869461839808e-06, | |
| "loss": 0.4054, | |
| "step": 226 | |
| }, | |
| { | |
| "epoch": 2.4573748308525034, | |
| "grad_norm": 0.11693298201832159, | |
| "learning_rate": 7.461555204352655e-06, | |
| "loss": 0.3988, | |
| "step": 227 | |
| }, | |
| { | |
| "epoch": 2.4682002706359945, | |
| "grad_norm": 0.11962758908233623, | |
| "learning_rate": 7.169462351708958e-06, | |
| "loss": 0.4007, | |
| "step": 228 | |
| }, | |
| { | |
| "epoch": 2.479025710419486, | |
| "grad_norm": 0.12081712635114038, | |
| "learning_rate": 6.882637775695147e-06, | |
| "loss": 0.3973, | |
| "step": 229 | |
| }, | |
| { | |
| "epoch": 2.489851150202977, | |
| "grad_norm": 0.12426870098020382, | |
| "learning_rate": 6.60112750270371e-06, | |
| "loss": 0.3985, | |
| "step": 230 | |
| }, | |
| { | |
| "epoch": 2.500676589986468, | |
| "grad_norm": 0.11586754894597827, | |
| "learning_rate": 6.324976706347317e-06, | |
| "loss": 0.4078, | |
| "step": 231 | |
| }, | |
| { | |
| "epoch": 2.5115020297699595, | |
| "grad_norm": 0.10751212068030966, | |
| "learning_rate": 6.054229700209959e-06, | |
| "loss": 0.4054, | |
| "step": 232 | |
| }, | |
| { | |
| "epoch": 2.5223274695534506, | |
| "grad_norm": 0.12026857943267745, | |
| "learning_rate": 5.788929930735916e-06, | |
| "loss": 0.4, | |
| "step": 233 | |
| }, | |
| { | |
| "epoch": 2.533152909336942, | |
| "grad_norm": 0.10350592956190559, | |
| "learning_rate": 5.529119970258014e-06, | |
| "loss": 0.4032, | |
| "step": 234 | |
| }, | |
| { | |
| "epoch": 2.543978349120433, | |
| "grad_norm": 0.09850020851661254, | |
| "learning_rate": 5.274841510166062e-06, | |
| "loss": 0.3961, | |
| "step": 235 | |
| }, | |
| { | |
| "epoch": 2.554803788903924, | |
| "grad_norm": 0.10692798146184619, | |
| "learning_rate": 5.026135354216717e-06, | |
| "loss": 0.3953, | |
| "step": 236 | |
| }, | |
| { | |
| "epoch": 2.565629228687415, | |
| "grad_norm": 0.1026156051139519, | |
| "learning_rate": 4.783041411985716e-06, | |
| "loss": 0.4026, | |
| "step": 237 | |
| }, | |
| { | |
| "epoch": 2.5764546684709067, | |
| "grad_norm": 0.0984956835805336, | |
| "learning_rate": 4.545598692463675e-06, | |
| "loss": 0.4012, | |
| "step": 238 | |
| }, | |
| { | |
| "epoch": 2.5872801082543977, | |
| "grad_norm": 0.09517297299245513, | |
| "learning_rate": 4.3138452977963266e-06, | |
| "loss": 0.3986, | |
| "step": 239 | |
| }, | |
| { | |
| "epoch": 2.598105548037889, | |
| "grad_norm": 0.1017472403907421, | |
| "learning_rate": 4.087818417170337e-06, | |
| "loss": 0.4054, | |
| "step": 240 | |
| }, | |
| { | |
| "epoch": 2.60893098782138, | |
| "grad_norm": 0.10517463959416935, | |
| "learning_rate": 3.867554320845601e-06, | |
| "loss": 0.4081, | |
| "step": 241 | |
| }, | |
| { | |
| "epoch": 2.6197564276048713, | |
| "grad_norm": 0.10283676657115687, | |
| "learning_rate": 3.6530883543350038e-06, | |
| "loss": 0.4078, | |
| "step": 242 | |
| }, | |
| { | |
| "epoch": 2.6305818673883627, | |
| "grad_norm": 0.09668185967839843, | |
| "learning_rate": 3.4444549327325325e-06, | |
| "loss": 0.3986, | |
| "step": 243 | |
| }, | |
| { | |
| "epoch": 2.641407307171854, | |
| "grad_norm": 0.10655772797882479, | |
| "learning_rate": 3.241687535190776e-06, | |
| "loss": 0.3971, | |
| "step": 244 | |
| }, | |
| { | |
| "epoch": 2.6522327469553453, | |
| "grad_norm": 0.10412740990228317, | |
| "learning_rate": 3.0448186995485307e-06, | |
| "loss": 0.398, | |
| "step": 245 | |
| }, | |
| { | |
| "epoch": 2.6630581867388363, | |
| "grad_norm": 0.10294803068291954, | |
| "learning_rate": 2.853880017109516e-06, | |
| "loss": 0.3984, | |
| "step": 246 | |
| }, | |
| { | |
| "epoch": 2.6738836265223274, | |
| "grad_norm": 0.09605814302194737, | |
| "learning_rate": 2.6689021275729366e-06, | |
| "loss": 0.4045, | |
| "step": 247 | |
| }, | |
| { | |
| "epoch": 2.6847090663058184, | |
| "grad_norm": 0.10355709761757277, | |
| "learning_rate": 2.489914714116788e-06, | |
| "loss": 0.3993, | |
| "step": 248 | |
| }, | |
| { | |
| "epoch": 2.69553450608931, | |
| "grad_norm": 0.10534682127661901, | |
| "learning_rate": 2.316946498634605e-06, | |
| "loss": 0.396, | |
| "step": 249 | |
| }, | |
| { | |
| "epoch": 2.706359945872801, | |
| "grad_norm": 0.09850383904197658, | |
| "learning_rate": 2.1500252371265253e-06, | |
| "loss": 0.4071, | |
| "step": 250 | |
| }, | |
| { | |
| "epoch": 2.7171853856562924, | |
| "grad_norm": 0.09796274629692603, | |
| "learning_rate": 1.989177715245307e-06, | |
| "loss": 0.4029, | |
| "step": 251 | |
| }, | |
| { | |
| "epoch": 2.7280108254397835, | |
| "grad_norm": 0.09270281667503455, | |
| "learning_rate": 1.8344297439980475e-06, | |
| "loss": 0.4067, | |
| "step": 252 | |
| }, | |
| { | |
| "epoch": 2.7388362652232745, | |
| "grad_norm": 0.09318564757274785, | |
| "learning_rate": 1.685806155604346e-06, | |
| "loss": 0.4055, | |
| "step": 253 | |
| }, | |
| { | |
| "epoch": 2.749661705006766, | |
| "grad_norm": 0.09361381405899726, | |
| "learning_rate": 1.5433307995115043e-06, | |
| "loss": 0.3995, | |
| "step": 254 | |
| }, | |
| { | |
| "epoch": 2.760487144790257, | |
| "grad_norm": 0.08807467149477492, | |
| "learning_rate": 1.4070265385674176e-06, | |
| "loss": 0.3949, | |
| "step": 255 | |
| }, | |
| { | |
| "epoch": 2.7713125845737485, | |
| "grad_norm": 0.09403613096147612, | |
| "learning_rate": 1.276915245351833e-06, | |
| "loss": 0.3936, | |
| "step": 256 | |
| }, | |
| { | |
| "epoch": 2.7821380243572396, | |
| "grad_norm": 0.0872925419115015, | |
| "learning_rate": 1.1530177986664425e-06, | |
| "loss": 0.4077, | |
| "step": 257 | |
| }, | |
| { | |
| "epoch": 2.7929634641407306, | |
| "grad_norm": 0.0850031850974251, | |
| "learning_rate": 1.0353540801845229e-06, | |
| "loss": 0.4003, | |
| "step": 258 | |
| }, | |
| { | |
| "epoch": 2.803788903924222, | |
| "grad_norm": 0.08692284082323248, | |
| "learning_rate": 9.239429712605274e-07, | |
| "loss": 0.3991, | |
| "step": 259 | |
| }, | |
| { | |
| "epoch": 2.814614343707713, | |
| "grad_norm": 0.08257477054022927, | |
| "learning_rate": 8.188023499002206e-07, | |
| "loss": 0.4046, | |
| "step": 260 | |
| }, | |
| { | |
| "epoch": 2.825439783491204, | |
| "grad_norm": 0.08278787367986447, | |
| "learning_rate": 7.199490878918314e-07, | |
| "loss": 0.3939, | |
| "step": 261 | |
| }, | |
| { | |
| "epoch": 2.8362652232746957, | |
| "grad_norm": 0.08297388622794061, | |
| "learning_rate": 6.273990480986314e-07, | |
| "loss": 0.3993, | |
| "step": 262 | |
| }, | |
| { | |
| "epoch": 2.8470906630581867, | |
| "grad_norm": 0.08598446782763781, | |
| "learning_rate": 5.411670819134651e-07, | |
| "loss": 0.4034, | |
| "step": 263 | |
| }, | |
| { | |
| "epoch": 2.8579161028416777, | |
| "grad_norm": 0.08779257361395829, | |
| "learning_rate": 4.6126702687554483e-07, | |
| "loss": 0.399, | |
| "step": 264 | |
| }, | |
| { | |
| "epoch": 2.8687415426251692, | |
| "grad_norm": 0.08649244863382963, | |
| "learning_rate": 3.8771170444996895e-07, | |
| "loss": 0.4036, | |
| "step": 265 | |
| }, | |
| { | |
| "epoch": 2.8795669824086603, | |
| "grad_norm": 0.08480882436189274, | |
| "learning_rate": 3.2051291797027925e-07, | |
| "loss": 0.402, | |
| "step": 266 | |
| }, | |
| { | |
| "epoch": 2.8903924221921518, | |
| "grad_norm": 0.08449641777806831, | |
| "learning_rate": 2.59681450744389e-07, | |
| "loss": 0.3975, | |
| "step": 267 | |
| }, | |
| { | |
| "epoch": 2.901217861975643, | |
| "grad_norm": 0.08224385199164784, | |
| "learning_rate": 2.0522706432419382e-07, | |
| "loss": 0.3989, | |
| "step": 268 | |
| }, | |
| { | |
| "epoch": 2.912043301759134, | |
| "grad_norm": 0.08340221321505677, | |
| "learning_rate": 1.5715849693916264e-07, | |
| "loss": 0.4009, | |
| "step": 269 | |
| }, | |
| { | |
| "epoch": 2.9228687415426253, | |
| "grad_norm": 0.08271264876685368, | |
| "learning_rate": 1.1548346209410366e-07, | |
| "loss": 0.4023, | |
| "step": 270 | |
| }, | |
| { | |
| "epoch": 2.9336941813261164, | |
| "grad_norm": 0.08260005569786469, | |
| "learning_rate": 8.020864733140343e-08, | |
| "loss": 0.3993, | |
| "step": 271 | |
| }, | |
| { | |
| "epoch": 2.944519621109608, | |
| "grad_norm": 0.08278469542802738, | |
| "learning_rate": 5.133971315788966e-08, | |
| "loss": 0.4053, | |
| "step": 272 | |
| }, | |
| { | |
| "epoch": 2.955345060893099, | |
| "grad_norm": 0.08624728365001678, | |
| "learning_rate": 2.8881292136468952e-08, | |
| "loss": 0.4024, | |
| "step": 273 | |
| }, | |
| { | |
| "epoch": 2.96617050067659, | |
| "grad_norm": 0.08795666384161806, | |
| "learning_rate": 1.2836988142779228e-08, | |
| "loss": 0.4015, | |
| "step": 274 | |
| }, | |
| { | |
| "epoch": 2.976995940460081, | |
| "grad_norm": 0.08192972934685559, | |
| "learning_rate": 3.209375786856761e-09, | |
| "loss": 0.4094, | |
| "step": 275 | |
| }, | |
| { | |
| "epoch": 2.9878213802435725, | |
| "grad_norm": 0.07989052292080247, | |
| "learning_rate": 0.0, | |
| "loss": 0.3967, | |
| "step": 276 | |
| }, | |
| { | |
| "epoch": 2.9878213802435725, | |
| "step": 276, | |
| "total_flos": 6.629121904995205e+18, | |
| "train_loss": 0.463554704102917, | |
| "train_runtime": 27205.7226, | |
| "train_samples_per_second": 5.214, | |
| "train_steps_per_second": 0.01 | |
| } | |
| ], | |
| "logging_steps": 1.0, | |
| "max_steps": 276, | |
| "num_input_tokens_seen": 0, | |
| "num_train_epochs": 3, | |
| "save_steps": 500, | |
| "stateful_callbacks": { | |
| "TrainerControl": { | |
| "args": { | |
| "should_epoch_stop": false, | |
| "should_evaluate": false, | |
| "should_log": false, | |
| "should_save": true, | |
| "should_training_stop": true | |
| }, | |
| "attributes": {} | |
| } | |
| }, | |
| "total_flos": 6.629121904995205e+18, | |
| "train_batch_size": 1, | |
| "trial_name": null, | |
| "trial_params": null | |
| } | |