| { |
| "best_metric": 0.8962368802798873, |
| "best_model_checkpoint": "finetune/models/plant-dnamamba-BPE-NoduleExpDup/checkpoint-129855", |
| "epoch": 4.0, |
| "eval_steps": 500, |
| "global_step": 173140, |
| "is_hyper_param_search": false, |
| "is_local_process_zero": true, |
| "is_world_process_zero": true, |
| "log_history": [ |
| { |
| "epoch": 0.011551345731777752, |
| "grad_norm": 29.45172119140625, |
| "learning_rate": 4.620218074293107e-07, |
| "loss": 1.0687, |
| "step": 500 |
| }, |
| { |
| "epoch": 0.023102691463555505, |
| "grad_norm": 31.642974853515625, |
| "learning_rate": 9.240436148586214e-07, |
| "loss": 0.9807, |
| "step": 1000 |
| }, |
| { |
| "epoch": 0.034654037195333255, |
| "grad_norm": 12.195789337158203, |
| "learning_rate": 1.3860654222879322e-06, |
| "loss": 0.8373, |
| "step": 1500 |
| }, |
| { |
| "epoch": 0.04620538292711101, |
| "grad_norm": 13.536879539489746, |
| "learning_rate": 1.8480872297172428e-06, |
| "loss": 0.7818, |
| "step": 2000 |
| }, |
| { |
| "epoch": 0.05775672865888876, |
| "grad_norm": 13.85306453704834, |
| "learning_rate": 2.3101090371465534e-06, |
| "loss": 0.7234, |
| "step": 2500 |
| }, |
| { |
| "epoch": 0.06930807439066651, |
| "grad_norm": 11.797886848449707, |
| "learning_rate": 2.7721308445758644e-06, |
| "loss": 0.6925, |
| "step": 3000 |
| }, |
| { |
| "epoch": 0.08085942012244426, |
| "grad_norm": 6.770650386810303, |
| "learning_rate": 3.2341526520051746e-06, |
| "loss": 0.6751, |
| "step": 3500 |
| }, |
| { |
| "epoch": 0.09241076585422202, |
| "grad_norm": 10.289820671081543, |
| "learning_rate": 3.6961744594344856e-06, |
| "loss": 0.6636, |
| "step": 4000 |
| }, |
| { |
| "epoch": 0.10396211158599977, |
| "grad_norm": 15.198243141174316, |
| "learning_rate": 4.158196266863797e-06, |
| "loss": 0.6563, |
| "step": 4500 |
| }, |
| { |
| "epoch": 0.11551345731777753, |
| "grad_norm": 13.971009254455566, |
| "learning_rate": 4.620218074293107e-06, |
| "loss": 0.6481, |
| "step": 5000 |
| }, |
| { |
| "epoch": 0.12706480304955528, |
| "grad_norm": 8.441276550292969, |
| "learning_rate": 5.082239881722417e-06, |
| "loss": 0.6549, |
| "step": 5500 |
| }, |
| { |
| "epoch": 0.13861614878133302, |
| "grad_norm": 14.8948335647583, |
| "learning_rate": 5.544261689151729e-06, |
| "loss": 0.6507, |
| "step": 6000 |
| }, |
| { |
| "epoch": 0.1501674945131108, |
| "grad_norm": 7.725317478179932, |
| "learning_rate": 6.006283496581039e-06, |
| "loss": 0.6544, |
| "step": 6500 |
| }, |
| { |
| "epoch": 0.16171884024488853, |
| "grad_norm": 17.12333106994629, |
| "learning_rate": 6.468305304010349e-06, |
| "loss": 0.6476, |
| "step": 7000 |
| }, |
| { |
| "epoch": 0.17327018597666627, |
| "grad_norm": 7.033656120300293, |
| "learning_rate": 6.930327111439661e-06, |
| "loss": 0.6496, |
| "step": 7500 |
| }, |
| { |
| "epoch": 0.18482153170844404, |
| "grad_norm": 7.053399085998535, |
| "learning_rate": 7.392348918868971e-06, |
| "loss": 0.6523, |
| "step": 8000 |
| }, |
| { |
| "epoch": 0.19637287744022178, |
| "grad_norm": 6.655728340148926, |
| "learning_rate": 7.854370726298282e-06, |
| "loss": 0.6455, |
| "step": 8500 |
| }, |
| { |
| "epoch": 0.20792422317199954, |
| "grad_norm": 8.08369255065918, |
| "learning_rate": 8.316392533727593e-06, |
| "loss": 0.6415, |
| "step": 9000 |
| }, |
| { |
| "epoch": 0.21947556890377729, |
| "grad_norm": 3.947148084640503, |
| "learning_rate": 8.778414341156903e-06, |
| "loss": 0.6514, |
| "step": 9500 |
| }, |
| { |
| "epoch": 0.23102691463555505, |
| "grad_norm": 8.264023780822754, |
| "learning_rate": 9.240436148586214e-06, |
| "loss": 0.6431, |
| "step": 10000 |
| }, |
| { |
| "epoch": 0.2425782603673328, |
| "grad_norm": 4.74052095413208, |
| "learning_rate": 9.702457956015525e-06, |
| "loss": 0.6331, |
| "step": 10500 |
| }, |
| { |
| "epoch": 0.25412960609911056, |
| "grad_norm": 12.625064849853516, |
| "learning_rate": 9.991342538776186e-06, |
| "loss": 0.6449, |
| "step": 11000 |
| }, |
| { |
| "epoch": 0.2656809518308883, |
| "grad_norm": 17.814157485961914, |
| "learning_rate": 9.967023827473336e-06, |
| "loss": 0.6414, |
| "step": 11500 |
| }, |
| { |
| "epoch": 0.27723229756266604, |
| "grad_norm": 15.34868049621582, |
| "learning_rate": 9.942705116170485e-06, |
| "loss": 0.6287, |
| "step": 12000 |
| }, |
| { |
| "epoch": 0.2887836432944438, |
| "grad_norm": 5.785658836364746, |
| "learning_rate": 9.918386404867634e-06, |
| "loss": 0.6417, |
| "step": 12500 |
| }, |
| { |
| "epoch": 0.3003349890262216, |
| "grad_norm": 3.7156598567962646, |
| "learning_rate": 9.894067693564784e-06, |
| "loss": 0.6436, |
| "step": 13000 |
| }, |
| { |
| "epoch": 0.3118863347579993, |
| "grad_norm": 12.272802352905273, |
| "learning_rate": 9.869748982261933e-06, |
| "loss": 0.6505, |
| "step": 13500 |
| }, |
| { |
| "epoch": 0.32343768048977706, |
| "grad_norm": 15.592353820800781, |
| "learning_rate": 9.845430270959082e-06, |
| "loss": 0.6313, |
| "step": 14000 |
| }, |
| { |
| "epoch": 0.3349890262215548, |
| "grad_norm": 4.684946537017822, |
| "learning_rate": 9.821111559656232e-06, |
| "loss": 0.6457, |
| "step": 14500 |
| }, |
| { |
| "epoch": 0.34654037195333254, |
| "grad_norm": 6.0165300369262695, |
| "learning_rate": 9.79679284835338e-06, |
| "loss": 0.641, |
| "step": 15000 |
| }, |
| { |
| "epoch": 0.35809171768511033, |
| "grad_norm": 10.924590110778809, |
| "learning_rate": 9.772474137050531e-06, |
| "loss": 0.6293, |
| "step": 15500 |
| }, |
| { |
| "epoch": 0.3696430634168881, |
| "grad_norm": 7.296951770782471, |
| "learning_rate": 9.74815542574768e-06, |
| "loss": 0.627, |
| "step": 16000 |
| }, |
| { |
| "epoch": 0.3811944091486658, |
| "grad_norm": 14.750964164733887, |
| "learning_rate": 9.723836714444829e-06, |
| "loss": 0.6254, |
| "step": 16500 |
| }, |
| { |
| "epoch": 0.39274575488044355, |
| "grad_norm": 7.377384662628174, |
| "learning_rate": 9.699518003141977e-06, |
| "loss": 0.6287, |
| "step": 17000 |
| }, |
| { |
| "epoch": 0.40429710061222135, |
| "grad_norm": 6.540673732757568, |
| "learning_rate": 9.675199291839128e-06, |
| "loss": 0.6309, |
| "step": 17500 |
| }, |
| { |
| "epoch": 0.4158484463439991, |
| "grad_norm": 6.4445295333862305, |
| "learning_rate": 9.650880580536277e-06, |
| "loss": 0.6302, |
| "step": 18000 |
| }, |
| { |
| "epoch": 0.42739979207577683, |
| "grad_norm": 7.85426664352417, |
| "learning_rate": 9.626561869233427e-06, |
| "loss": 0.6201, |
| "step": 18500 |
| }, |
| { |
| "epoch": 0.43895113780755457, |
| "grad_norm": 5.960766792297363, |
| "learning_rate": 9.602243157930576e-06, |
| "loss": 0.626, |
| "step": 19000 |
| }, |
| { |
| "epoch": 0.4505024835393323, |
| "grad_norm": 9.376029968261719, |
| "learning_rate": 9.577924446627726e-06, |
| "loss": 0.6164, |
| "step": 19500 |
| }, |
| { |
| "epoch": 0.4620538292711101, |
| "grad_norm": 17.05916976928711, |
| "learning_rate": 9.553605735324875e-06, |
| "loss": 0.6232, |
| "step": 20000 |
| }, |
| { |
| "epoch": 0.47360517500288785, |
| "grad_norm": 7.523278713226318, |
| "learning_rate": 9.529287024022024e-06, |
| "loss": 0.601, |
| "step": 20500 |
| }, |
| { |
| "epoch": 0.4851565207346656, |
| "grad_norm": 6.75208044052124, |
| "learning_rate": 9.504968312719172e-06, |
| "loss": 0.6157, |
| "step": 21000 |
| }, |
| { |
| "epoch": 0.4967078664664433, |
| "grad_norm": 12.919069290161133, |
| "learning_rate": 9.480649601416323e-06, |
| "loss": 0.6098, |
| "step": 21500 |
| }, |
| { |
| "epoch": 0.5082592121982211, |
| "grad_norm": 19.89571189880371, |
| "learning_rate": 9.456330890113472e-06, |
| "loss": 0.6131, |
| "step": 22000 |
| }, |
| { |
| "epoch": 0.5198105579299989, |
| "grad_norm": 7.436350345611572, |
| "learning_rate": 9.432012178810622e-06, |
| "loss": 0.6121, |
| "step": 22500 |
| }, |
| { |
| "epoch": 0.5313619036617766, |
| "grad_norm": 10.576221466064453, |
| "learning_rate": 9.40769346750777e-06, |
| "loss": 0.6005, |
| "step": 23000 |
| }, |
| { |
| "epoch": 0.5429132493935543, |
| "grad_norm": 12.731452941894531, |
| "learning_rate": 9.383374756204921e-06, |
| "loss": 0.6217, |
| "step": 23500 |
| }, |
| { |
| "epoch": 0.5544645951253321, |
| "grad_norm": 9.219985961914062, |
| "learning_rate": 9.35905604490207e-06, |
| "loss": 0.5985, |
| "step": 24000 |
| }, |
| { |
| "epoch": 0.5660159408571098, |
| "grad_norm": 12.659988403320312, |
| "learning_rate": 9.334737333599219e-06, |
| "loss": 0.5951, |
| "step": 24500 |
| }, |
| { |
| "epoch": 0.5775672865888876, |
| "grad_norm": 32.879234313964844, |
| "learning_rate": 9.310418622296367e-06, |
| "loss": 0.5961, |
| "step": 25000 |
| }, |
| { |
| "epoch": 0.5891186323206653, |
| "grad_norm": 9.763121604919434, |
| "learning_rate": 9.286099910993518e-06, |
| "loss": 0.6081, |
| "step": 25500 |
| }, |
| { |
| "epoch": 0.6006699780524432, |
| "grad_norm": 9.057079315185547, |
| "learning_rate": 9.261781199690667e-06, |
| "loss": 0.5863, |
| "step": 26000 |
| }, |
| { |
| "epoch": 0.6122213237842209, |
| "grad_norm": 17.381582260131836, |
| "learning_rate": 9.237462488387817e-06, |
| "loss": 0.5857, |
| "step": 26500 |
| }, |
| { |
| "epoch": 0.6237726695159986, |
| "grad_norm": 18.3979549407959, |
| "learning_rate": 9.213143777084966e-06, |
| "loss": 0.5853, |
| "step": 27000 |
| }, |
| { |
| "epoch": 0.6353240152477764, |
| "grad_norm": 11.13846206665039, |
| "learning_rate": 9.188825065782114e-06, |
| "loss": 0.582, |
| "step": 27500 |
| }, |
| { |
| "epoch": 0.6468753609795541, |
| "grad_norm": 8.506060600280762, |
| "learning_rate": 9.164506354479265e-06, |
| "loss": 0.5773, |
| "step": 28000 |
| }, |
| { |
| "epoch": 0.6584267067113319, |
| "grad_norm": 9.814123153686523, |
| "learning_rate": 9.140187643176414e-06, |
| "loss": 0.5797, |
| "step": 28500 |
| }, |
| { |
| "epoch": 0.6699780524431096, |
| "grad_norm": 11.70206069946289, |
| "learning_rate": 9.115868931873562e-06, |
| "loss": 0.5587, |
| "step": 29000 |
| }, |
| { |
| "epoch": 0.6815293981748873, |
| "grad_norm": 11.949383735656738, |
| "learning_rate": 9.091550220570711e-06, |
| "loss": 0.5769, |
| "step": 29500 |
| }, |
| { |
| "epoch": 0.6930807439066651, |
| "grad_norm": 11.51278018951416, |
| "learning_rate": 9.067231509267862e-06, |
| "loss": 0.5798, |
| "step": 30000 |
| }, |
| { |
| "epoch": 0.7046320896384429, |
| "grad_norm": 13.705997467041016, |
| "learning_rate": 9.042912797965012e-06, |
| "loss": 0.5585, |
| "step": 30500 |
| }, |
| { |
| "epoch": 0.7161834353702207, |
| "grad_norm": 18.50054168701172, |
| "learning_rate": 9.01859408666216e-06, |
| "loss": 0.5656, |
| "step": 31000 |
| }, |
| { |
| "epoch": 0.7277347811019984, |
| "grad_norm": 23.920692443847656, |
| "learning_rate": 8.99427537535931e-06, |
| "loss": 0.5493, |
| "step": 31500 |
| }, |
| { |
| "epoch": 0.7392861268337761, |
| "grad_norm": 15.425186157226562, |
| "learning_rate": 8.96995666405646e-06, |
| "loss": 0.5538, |
| "step": 32000 |
| }, |
| { |
| "epoch": 0.7508374725655539, |
| "grad_norm": 11.077018737792969, |
| "learning_rate": 8.945637952753609e-06, |
| "loss": 0.5553, |
| "step": 32500 |
| }, |
| { |
| "epoch": 0.7623888182973316, |
| "grad_norm": 17.128128051757812, |
| "learning_rate": 8.921319241450757e-06, |
| "loss": 0.5657, |
| "step": 33000 |
| }, |
| { |
| "epoch": 0.7739401640291094, |
| "grad_norm": 37.22261428833008, |
| "learning_rate": 8.897000530147906e-06, |
| "loss": 0.5573, |
| "step": 33500 |
| }, |
| { |
| "epoch": 0.7854915097608871, |
| "grad_norm": 18.649255752563477, |
| "learning_rate": 8.872681818845056e-06, |
| "loss": 0.5337, |
| "step": 34000 |
| }, |
| { |
| "epoch": 0.7970428554926648, |
| "grad_norm": 11.49511432647705, |
| "learning_rate": 8.848363107542205e-06, |
| "loss": 0.5442, |
| "step": 34500 |
| }, |
| { |
| "epoch": 0.8085942012244427, |
| "grad_norm": 20.64067840576172, |
| "learning_rate": 8.824044396239356e-06, |
| "loss": 0.5632, |
| "step": 35000 |
| }, |
| { |
| "epoch": 0.8201455469562204, |
| "grad_norm": 18.51673126220703, |
| "learning_rate": 8.799725684936504e-06, |
| "loss": 0.5454, |
| "step": 35500 |
| }, |
| { |
| "epoch": 0.8316968926879982, |
| "grad_norm": 18.139997482299805, |
| "learning_rate": 8.775406973633655e-06, |
| "loss": 0.5371, |
| "step": 36000 |
| }, |
| { |
| "epoch": 0.8432482384197759, |
| "grad_norm": 21.852693557739258, |
| "learning_rate": 8.751088262330804e-06, |
| "loss": 0.539, |
| "step": 36500 |
| }, |
| { |
| "epoch": 0.8547995841515537, |
| "grad_norm": 31.914161682128906, |
| "learning_rate": 8.726769551027952e-06, |
| "loss": 0.5383, |
| "step": 37000 |
| }, |
| { |
| "epoch": 0.8663509298833314, |
| "grad_norm": 9.24451732635498, |
| "learning_rate": 8.702450839725101e-06, |
| "loss": 0.548, |
| "step": 37500 |
| }, |
| { |
| "epoch": 0.8779022756151091, |
| "grad_norm": 27.3262882232666, |
| "learning_rate": 8.678132128422251e-06, |
| "loss": 0.524, |
| "step": 38000 |
| }, |
| { |
| "epoch": 0.8894536213468869, |
| "grad_norm": 23.231952667236328, |
| "learning_rate": 8.6538134171194e-06, |
| "loss": 0.5121, |
| "step": 38500 |
| }, |
| { |
| "epoch": 0.9010049670786646, |
| "grad_norm": 28.35572052001953, |
| "learning_rate": 8.62949470581655e-06, |
| "loss": 0.5333, |
| "step": 39000 |
| }, |
| { |
| "epoch": 0.9125563128104425, |
| "grad_norm": 24.61405372619629, |
| "learning_rate": 8.6051759945137e-06, |
| "loss": 0.5015, |
| "step": 39500 |
| }, |
| { |
| "epoch": 0.9241076585422202, |
| "grad_norm": 30.62819480895996, |
| "learning_rate": 8.580857283210848e-06, |
| "loss": 0.5092, |
| "step": 40000 |
| }, |
| { |
| "epoch": 0.935659004273998, |
| "grad_norm": 19.295331954956055, |
| "learning_rate": 8.556538571907999e-06, |
| "loss": 0.5125, |
| "step": 40500 |
| }, |
| { |
| "epoch": 0.9472103500057757, |
| "grad_norm": 10.785584449768066, |
| "learning_rate": 8.532219860605147e-06, |
| "loss": 0.5161, |
| "step": 41000 |
| }, |
| { |
| "epoch": 0.9587616957375534, |
| "grad_norm": 24.81365394592285, |
| "learning_rate": 8.507901149302296e-06, |
| "loss": 0.498, |
| "step": 41500 |
| }, |
| { |
| "epoch": 0.9703130414693312, |
| "grad_norm": 48.026973724365234, |
| "learning_rate": 8.483582437999446e-06, |
| "loss": 0.4916, |
| "step": 42000 |
| }, |
| { |
| "epoch": 0.9818643872011089, |
| "grad_norm": 6.653892517089844, |
| "learning_rate": 8.459263726696595e-06, |
| "loss": 0.4944, |
| "step": 42500 |
| }, |
| { |
| "epoch": 0.9934157329328867, |
| "grad_norm": 39.814945220947266, |
| "learning_rate": 8.434945015393746e-06, |
| "loss": 0.4954, |
| "step": 43000 |
| }, |
| { |
| "epoch": 1.0, |
| "eval_accuracy": 0.7796003234376805, |
| "eval_f1": 0.7890500619140279, |
| "eval_loss": 0.4737609326839447, |
| "eval_matthews_correlation": 0.5599099791389662, |
| "eval_precision": 0.7656524910955671, |
| "eval_recall": 0.8139227225035354, |
| "eval_runtime": 550.4755, |
| "eval_samples_per_second": 78.632, |
| "eval_steps_per_second": 4.916, |
| "step": 43285 |
| }, |
| { |
| "epoch": 1.0049670786646645, |
| "grad_norm": 22.5259952545166, |
| "learning_rate": 8.410626304090894e-06, |
| "loss": 0.4339, |
| "step": 43500 |
| }, |
| { |
| "epoch": 1.0165184243964422, |
| "grad_norm": 25.5583438873291, |
| "learning_rate": 8.386307592788043e-06, |
| "loss": 0.3274, |
| "step": 44000 |
| }, |
| { |
| "epoch": 1.02806977012822, |
| "grad_norm": 9.524903297424316, |
| "learning_rate": 8.361988881485194e-06, |
| "loss": 0.3372, |
| "step": 44500 |
| }, |
| { |
| "epoch": 1.0396211158599977, |
| "grad_norm": 37.77333068847656, |
| "learning_rate": 8.337670170182342e-06, |
| "loss": 0.3471, |
| "step": 45000 |
| }, |
| { |
| "epoch": 1.0511724615917755, |
| "grad_norm": 47.2765007019043, |
| "learning_rate": 8.313351458879491e-06, |
| "loss": 0.3436, |
| "step": 45500 |
| }, |
| { |
| "epoch": 1.0627238073235532, |
| "grad_norm": 58.79914474487305, |
| "learning_rate": 8.289032747576641e-06, |
| "loss": 0.3524, |
| "step": 46000 |
| }, |
| { |
| "epoch": 1.074275153055331, |
| "grad_norm": 23.910385131835938, |
| "learning_rate": 8.26471403627379e-06, |
| "loss": 0.3283, |
| "step": 46500 |
| }, |
| { |
| "epoch": 1.0858264987871087, |
| "grad_norm": 65.18335723876953, |
| "learning_rate": 8.24039532497094e-06, |
| "loss": 0.3529, |
| "step": 47000 |
| }, |
| { |
| "epoch": 1.0973778445188864, |
| "grad_norm": 29.333236694335938, |
| "learning_rate": 8.21607661366809e-06, |
| "loss": 0.3471, |
| "step": 47500 |
| }, |
| { |
| "epoch": 1.1089291902506642, |
| "grad_norm": 41.33262252807617, |
| "learning_rate": 8.191757902365238e-06, |
| "loss": 0.3507, |
| "step": 48000 |
| }, |
| { |
| "epoch": 1.120480535982442, |
| "grad_norm": 91.7591781616211, |
| "learning_rate": 8.167439191062387e-06, |
| "loss": 0.3506, |
| "step": 48500 |
| }, |
| { |
| "epoch": 1.1320318817142196, |
| "grad_norm": 7.366957187652588, |
| "learning_rate": 8.143120479759537e-06, |
| "loss": 0.3526, |
| "step": 49000 |
| }, |
| { |
| "epoch": 1.1435832274459974, |
| "grad_norm": 31.159626007080078, |
| "learning_rate": 8.118801768456686e-06, |
| "loss": 0.3423, |
| "step": 49500 |
| }, |
| { |
| "epoch": 1.1551345731777751, |
| "grad_norm": 15.73024845123291, |
| "learning_rate": 8.094483057153836e-06, |
| "loss": 0.3516, |
| "step": 50000 |
| }, |
| { |
| "epoch": 1.1666859189095529, |
| "grad_norm": 66.58766174316406, |
| "learning_rate": 8.070164345850985e-06, |
| "loss": 0.3435, |
| "step": 50500 |
| }, |
| { |
| "epoch": 1.1782372646413308, |
| "grad_norm": 48.182308197021484, |
| "learning_rate": 8.045845634548136e-06, |
| "loss": 0.344, |
| "step": 51000 |
| }, |
| { |
| "epoch": 1.1897886103731086, |
| "grad_norm": 42.61834716796875, |
| "learning_rate": 8.021526923245284e-06, |
| "loss": 0.3739, |
| "step": 51500 |
| }, |
| { |
| "epoch": 1.2013399561048863, |
| "grad_norm": 59.38914489746094, |
| "learning_rate": 7.997208211942433e-06, |
| "loss": 0.3399, |
| "step": 52000 |
| }, |
| { |
| "epoch": 1.212891301836664, |
| "grad_norm": 5.7647857666015625, |
| "learning_rate": 7.972889500639582e-06, |
| "loss": 0.3518, |
| "step": 52500 |
| }, |
| { |
| "epoch": 1.2244426475684418, |
| "grad_norm": 73.71373748779297, |
| "learning_rate": 7.948570789336732e-06, |
| "loss": 0.3523, |
| "step": 53000 |
| }, |
| { |
| "epoch": 1.2359939933002195, |
| "grad_norm": 14.03942584991455, |
| "learning_rate": 7.924252078033881e-06, |
| "loss": 0.3512, |
| "step": 53500 |
| }, |
| { |
| "epoch": 1.2475453390319973, |
| "grad_norm": 53.254791259765625, |
| "learning_rate": 7.899933366731031e-06, |
| "loss": 0.3454, |
| "step": 54000 |
| }, |
| { |
| "epoch": 1.259096684763775, |
| "grad_norm": 4.009112358093262, |
| "learning_rate": 7.87561465542818e-06, |
| "loss": 0.3406, |
| "step": 54500 |
| }, |
| { |
| "epoch": 1.2706480304955527, |
| "grad_norm": 37.73891067504883, |
| "learning_rate": 7.85129594412533e-06, |
| "loss": 0.3474, |
| "step": 55000 |
| }, |
| { |
| "epoch": 1.2821993762273305, |
| "grad_norm": 84.36405181884766, |
| "learning_rate": 7.82697723282248e-06, |
| "loss": 0.3551, |
| "step": 55500 |
| }, |
| { |
| "epoch": 1.2937507219591082, |
| "grad_norm": 43.2830810546875, |
| "learning_rate": 7.802658521519628e-06, |
| "loss": 0.3521, |
| "step": 56000 |
| }, |
| { |
| "epoch": 1.305302067690886, |
| "grad_norm": 143.8572998046875, |
| "learning_rate": 7.778339810216777e-06, |
| "loss": 0.3544, |
| "step": 56500 |
| }, |
| { |
| "epoch": 1.3168534134226637, |
| "grad_norm": 19.368745803833008, |
| "learning_rate": 7.754021098913927e-06, |
| "loss": 0.332, |
| "step": 57000 |
| }, |
| { |
| "epoch": 1.3284047591544415, |
| "grad_norm": 37.57652282714844, |
| "learning_rate": 7.729702387611076e-06, |
| "loss": 0.3614, |
| "step": 57500 |
| }, |
| { |
| "epoch": 1.3399561048862192, |
| "grad_norm": 12.710687637329102, |
| "learning_rate": 7.705383676308226e-06, |
| "loss": 0.3397, |
| "step": 58000 |
| }, |
| { |
| "epoch": 1.351507450617997, |
| "grad_norm": 9.734477043151855, |
| "learning_rate": 7.681064965005375e-06, |
| "loss": 0.3337, |
| "step": 58500 |
| }, |
| { |
| "epoch": 1.3630587963497747, |
| "grad_norm": 50.98896408081055, |
| "learning_rate": 7.656746253702524e-06, |
| "loss": 0.3466, |
| "step": 59000 |
| }, |
| { |
| "epoch": 1.3746101420815524, |
| "grad_norm": 36.90542221069336, |
| "learning_rate": 7.632427542399674e-06, |
| "loss": 0.3345, |
| "step": 59500 |
| }, |
| { |
| "epoch": 1.3861614878133302, |
| "grad_norm": 87.90083312988281, |
| "learning_rate": 7.608108831096823e-06, |
| "loss": 0.3268, |
| "step": 60000 |
| }, |
| { |
| "epoch": 1.397712833545108, |
| "grad_norm": 33.43013000488281, |
| "learning_rate": 7.583790119793973e-06, |
| "loss": 0.3572, |
| "step": 60500 |
| }, |
| { |
| "epoch": 1.4092641792768856, |
| "grad_norm": 10.33069896697998, |
| "learning_rate": 7.559471408491121e-06, |
| "loss": 0.3565, |
| "step": 61000 |
| }, |
| { |
| "epoch": 1.4208155250086634, |
| "grad_norm": 1.686011791229248, |
| "learning_rate": 7.535152697188272e-06, |
| "loss": 0.3647, |
| "step": 61500 |
| }, |
| { |
| "epoch": 1.4323668707404413, |
| "grad_norm": 20.76148223876953, |
| "learning_rate": 7.5108339858854206e-06, |
| "loss": 0.3284, |
| "step": 62000 |
| }, |
| { |
| "epoch": 1.443918216472219, |
| "grad_norm": 9.261433601379395, |
| "learning_rate": 7.48651527458257e-06, |
| "loss": 0.3474, |
| "step": 62500 |
| }, |
| { |
| "epoch": 1.4554695622039968, |
| "grad_norm": 38.97019577026367, |
| "learning_rate": 7.462196563279719e-06, |
| "loss": 0.3433, |
| "step": 63000 |
| }, |
| { |
| "epoch": 1.4670209079357746, |
| "grad_norm": 43.138450622558594, |
| "learning_rate": 7.437877851976869e-06, |
| "loss": 0.3406, |
| "step": 63500 |
| }, |
| { |
| "epoch": 1.4785722536675523, |
| "grad_norm": 10.66066837310791, |
| "learning_rate": 7.413559140674018e-06, |
| "loss": 0.3533, |
| "step": 64000 |
| }, |
| { |
| "epoch": 1.49012359939933, |
| "grad_norm": 32.289588928222656, |
| "learning_rate": 7.389240429371168e-06, |
| "loss": 0.3377, |
| "step": 64500 |
| }, |
| { |
| "epoch": 1.5016749451311078, |
| "grad_norm": 85.28985595703125, |
| "learning_rate": 7.364921718068316e-06, |
| "loss": 0.3598, |
| "step": 65000 |
| }, |
| { |
| "epoch": 1.5132262908628855, |
| "grad_norm": 56.37942123413086, |
| "learning_rate": 7.340603006765467e-06, |
| "loss": 0.3354, |
| "step": 65500 |
| }, |
| { |
| "epoch": 1.5247776365946633, |
| "grad_norm": 91.6646499633789, |
| "learning_rate": 7.3162842954626156e-06, |
| "loss": 0.3557, |
| "step": 66000 |
| }, |
| { |
| "epoch": 1.536328982326441, |
| "grad_norm": 22.282794952392578, |
| "learning_rate": 7.291965584159765e-06, |
| "loss": 0.3375, |
| "step": 66500 |
| }, |
| { |
| "epoch": 1.5478803280582187, |
| "grad_norm": 23.59638214111328, |
| "learning_rate": 7.267646872856914e-06, |
| "loss": 0.3613, |
| "step": 67000 |
| }, |
| { |
| "epoch": 1.5594316737899967, |
| "grad_norm": 101.47071838378906, |
| "learning_rate": 7.243328161554064e-06, |
| "loss": 0.3625, |
| "step": 67500 |
| }, |
| { |
| "epoch": 1.5709830195217744, |
| "grad_norm": 10.680140495300293, |
| "learning_rate": 7.219009450251213e-06, |
| "loss": 0.3447, |
| "step": 68000 |
| }, |
| { |
| "epoch": 1.5825343652535522, |
| "grad_norm": 3.526810646057129, |
| "learning_rate": 7.194690738948363e-06, |
| "loss": 0.3752, |
| "step": 68500 |
| }, |
| { |
| "epoch": 1.59408571098533, |
| "grad_norm": 63.79883575439453, |
| "learning_rate": 7.170372027645511e-06, |
| "loss": 0.3556, |
| "step": 69000 |
| }, |
| { |
| "epoch": 1.6056370567171077, |
| "grad_norm": 57.77899169921875, |
| "learning_rate": 7.14605331634266e-06, |
| "loss": 0.3734, |
| "step": 69500 |
| }, |
| { |
| "epoch": 1.6171884024488854, |
| "grad_norm": 12.811071395874023, |
| "learning_rate": 7.1217346050398105e-06, |
| "loss": 0.3314, |
| "step": 70000 |
| }, |
| { |
| "epoch": 1.6287397481806631, |
| "grad_norm": 45.64273452758789, |
| "learning_rate": 7.09741589373696e-06, |
| "loss": 0.3603, |
| "step": 70500 |
| }, |
| { |
| "epoch": 1.6402910939124409, |
| "grad_norm": 0.7724096775054932, |
| "learning_rate": 7.073097182434109e-06, |
| "loss": 0.3531, |
| "step": 71000 |
| }, |
| { |
| "epoch": 1.6518424396442186, |
| "grad_norm": 111.21202850341797, |
| "learning_rate": 7.048778471131258e-06, |
| "loss": 0.3687, |
| "step": 71500 |
| }, |
| { |
| "epoch": 1.6633937853759964, |
| "grad_norm": 2.9768288135528564, |
| "learning_rate": 7.024459759828408e-06, |
| "loss": 0.362, |
| "step": 72000 |
| }, |
| { |
| "epoch": 1.674945131107774, |
| "grad_norm": 27.48809242248535, |
| "learning_rate": 7.000141048525558e-06, |
| "loss": 0.3616, |
| "step": 72500 |
| }, |
| { |
| "epoch": 1.6864964768395518, |
| "grad_norm": 48.34687805175781, |
| "learning_rate": 6.975822337222706e-06, |
| "loss": 0.3367, |
| "step": 73000 |
| }, |
| { |
| "epoch": 1.6980478225713296, |
| "grad_norm": 39.69007110595703, |
| "learning_rate": 6.951503625919855e-06, |
| "loss": 0.3621, |
| "step": 73500 |
| }, |
| { |
| "epoch": 1.7095991683031073, |
| "grad_norm": 110.33565521240234, |
| "learning_rate": 6.9271849146170055e-06, |
| "loss": 0.3539, |
| "step": 74000 |
| }, |
| { |
| "epoch": 1.721150514034885, |
| "grad_norm": 41.324222564697266, |
| "learning_rate": 6.902866203314154e-06, |
| "loss": 0.3433, |
| "step": 74500 |
| }, |
| { |
| "epoch": 1.7327018597666628, |
| "grad_norm": 73.07483673095703, |
| "learning_rate": 6.878547492011304e-06, |
| "loss": 0.3581, |
| "step": 75000 |
| }, |
| { |
| "epoch": 1.7442532054984405, |
| "grad_norm": 2.784154176712036, |
| "learning_rate": 6.854228780708453e-06, |
| "loss": 0.3506, |
| "step": 75500 |
| }, |
| { |
| "epoch": 1.7558045512302183, |
| "grad_norm": 46.016876220703125, |
| "learning_rate": 6.829910069405603e-06, |
| "loss": 0.3687, |
| "step": 76000 |
| }, |
| { |
| "epoch": 1.767355896961996, |
| "grad_norm": 10.431674003601074, |
| "learning_rate": 6.805591358102752e-06, |
| "loss": 0.3448, |
| "step": 76500 |
| }, |
| { |
| "epoch": 1.7789072426937738, |
| "grad_norm": 77.33363342285156, |
| "learning_rate": 6.781272646799901e-06, |
| "loss": 0.3688, |
| "step": 77000 |
| }, |
| { |
| "epoch": 1.7904585884255515, |
| "grad_norm": 36.162261962890625, |
| "learning_rate": 6.75695393549705e-06, |
| "loss": 0.3481, |
| "step": 77500 |
| }, |
| { |
| "epoch": 1.8020099341573292, |
| "grad_norm": 34.184844970703125, |
| "learning_rate": 6.7326352241942005e-06, |
| "loss": 0.3538, |
| "step": 78000 |
| }, |
| { |
| "epoch": 1.813561279889107, |
| "grad_norm": 89.07086944580078, |
| "learning_rate": 6.708316512891349e-06, |
| "loss": 0.3735, |
| "step": 78500 |
| }, |
| { |
| "epoch": 1.8251126256208847, |
| "grad_norm": 3.9982004165649414, |
| "learning_rate": 6.683997801588499e-06, |
| "loss": 0.3394, |
| "step": 79000 |
| }, |
| { |
| "epoch": 1.8366639713526625, |
| "grad_norm": 81.374267578125, |
| "learning_rate": 6.659679090285648e-06, |
| "loss": 0.3174, |
| "step": 79500 |
| }, |
| { |
| "epoch": 1.8482153170844402, |
| "grad_norm": 0.20297826826572418, |
| "learning_rate": 6.635360378982797e-06, |
| "loss": 0.3153, |
| "step": 80000 |
| }, |
| { |
| "epoch": 1.859766662816218, |
| "grad_norm": 59.6692008972168, |
| "learning_rate": 6.611041667679947e-06, |
| "loss": 0.3545, |
| "step": 80500 |
| }, |
| { |
| "epoch": 1.8713180085479957, |
| "grad_norm": 1.2928632497787476, |
| "learning_rate": 6.586722956377096e-06, |
| "loss": 0.3373, |
| "step": 81000 |
| }, |
| { |
| "epoch": 1.8828693542797736, |
| "grad_norm": 94.39313507080078, |
| "learning_rate": 6.562404245074245e-06, |
| "loss": 0.3222, |
| "step": 81500 |
| }, |
| { |
| "epoch": 1.8944207000115514, |
| "grad_norm": 29.352657318115234, |
| "learning_rate": 6.538085533771395e-06, |
| "loss": 0.3418, |
| "step": 82000 |
| }, |
| { |
| "epoch": 1.9059720457433291, |
| "grad_norm": 11.170536041259766, |
| "learning_rate": 6.513766822468544e-06, |
| "loss": 0.3423, |
| "step": 82500 |
| }, |
| { |
| "epoch": 1.9175233914751069, |
| "grad_norm": 90.75904846191406, |
| "learning_rate": 6.489448111165694e-06, |
| "loss": 0.3253, |
| "step": 83000 |
| }, |
| { |
| "epoch": 1.9290747372068846, |
| "grad_norm": 79.5523681640625, |
| "learning_rate": 6.4651293998628426e-06, |
| "loss": 0.3423, |
| "step": 83500 |
| }, |
| { |
| "epoch": 1.9406260829386623, |
| "grad_norm": 2.362600564956665, |
| "learning_rate": 6.440810688559992e-06, |
| "loss": 0.3233, |
| "step": 84000 |
| }, |
| { |
| "epoch": 1.95217742867044, |
| "grad_norm": 106.5050277709961, |
| "learning_rate": 6.416491977257142e-06, |
| "loss": 0.3291, |
| "step": 84500 |
| }, |
| { |
| "epoch": 1.9637287744022178, |
| "grad_norm": 44.49625015258789, |
| "learning_rate": 6.392173265954291e-06, |
| "loss": 0.3462, |
| "step": 85000 |
| }, |
| { |
| "epoch": 1.9752801201339956, |
| "grad_norm": 65.74842071533203, |
| "learning_rate": 6.36785455465144e-06, |
| "loss": 0.3356, |
| "step": 85500 |
| }, |
| { |
| "epoch": 1.9868314658657735, |
| "grad_norm": 25.529869079589844, |
| "learning_rate": 6.34353584334859e-06, |
| "loss": 0.3418, |
| "step": 86000 |
| }, |
| { |
| "epoch": 1.9983828115975513, |
| "grad_norm": 0.7819985151290894, |
| "learning_rate": 6.319217132045739e-06, |
| "loss": 0.3299, |
| "step": 86500 |
| }, |
| { |
| "epoch": 2.0, |
| "eval_accuracy": 0.8750144391821647, |
| "eval_f1": 0.883420247381804, |
| "eval_loss": 0.5118392705917358, |
| "eval_matthews_correlation": 0.7549403916374646, |
| "eval_precision": 0.8371656115989381, |
| "eval_recall": 0.9350850782354819, |
| "eval_runtime": 543.2127, |
| "eval_samples_per_second": 79.683, |
| "eval_steps_per_second": 4.981, |
| "step": 86570 |
| }, |
| { |
| "epoch": 2.009934157329329, |
| "grad_norm": 0.09307216852903366, |
| "learning_rate": 6.294898420742889e-06, |
| "loss": 0.2056, |
| "step": 87000 |
| }, |
| { |
| "epoch": 2.0214855030611067, |
| "grad_norm": 0.1265561431646347, |
| "learning_rate": 6.2705797094400376e-06, |
| "loss": 0.201, |
| "step": 87500 |
| }, |
| { |
| "epoch": 2.0330368487928845, |
| "grad_norm": 0.5366824865341187, |
| "learning_rate": 6.246260998137187e-06, |
| "loss": 0.1897, |
| "step": 88000 |
| }, |
| { |
| "epoch": 2.0445881945246622, |
| "grad_norm": 37.82719421386719, |
| "learning_rate": 6.221942286834337e-06, |
| "loss": 0.2095, |
| "step": 88500 |
| }, |
| { |
| "epoch": 2.05613954025644, |
| "grad_norm": 0.5017096996307373, |
| "learning_rate": 6.197623575531486e-06, |
| "loss": 0.1988, |
| "step": 89000 |
| }, |
| { |
| "epoch": 2.0676908859882177, |
| "grad_norm": 0.10803969949483871, |
| "learning_rate": 6.173304864228635e-06, |
| "loss": 0.1991, |
| "step": 89500 |
| }, |
| { |
| "epoch": 2.0792422317199954, |
| "grad_norm": 0.3513072431087494, |
| "learning_rate": 6.148986152925785e-06, |
| "loss": 0.2018, |
| "step": 90000 |
| }, |
| { |
| "epoch": 2.090793577451773, |
| "grad_norm": 1.3080644607543945, |
| "learning_rate": 6.124667441622933e-06, |
| "loss": 0.2095, |
| "step": 90500 |
| }, |
| { |
| "epoch": 2.102344923183551, |
| "grad_norm": 5.2385053634643555, |
| "learning_rate": 6.100348730320084e-06, |
| "loss": 0.1974, |
| "step": 91000 |
| }, |
| { |
| "epoch": 2.1138962689153287, |
| "grad_norm": 10.532730102539062, |
| "learning_rate": 6.0760300190172326e-06, |
| "loss": 0.2019, |
| "step": 91500 |
| }, |
| { |
| "epoch": 2.1254476146471064, |
| "grad_norm": 0.104009710252285, |
| "learning_rate": 6.051711307714382e-06, |
| "loss": 0.2091, |
| "step": 92000 |
| }, |
| { |
| "epoch": 2.136998960378884, |
| "grad_norm": 0.038262490183115005, |
| "learning_rate": 6.027392596411531e-06, |
| "loss": 0.2422, |
| "step": 92500 |
| }, |
| { |
| "epoch": 2.148550306110662, |
| "grad_norm": 0.30325543880462646, |
| "learning_rate": 6.003073885108681e-06, |
| "loss": 0.1971, |
| "step": 93000 |
| }, |
| { |
| "epoch": 2.1601016518424396, |
| "grad_norm": 0.2501794099807739, |
| "learning_rate": 5.97875517380583e-06, |
| "loss": 0.2123, |
| "step": 93500 |
| }, |
| { |
| "epoch": 2.1716529975742174, |
| "grad_norm": 0.9724676609039307, |
| "learning_rate": 5.95443646250298e-06, |
| "loss": 0.2222, |
| "step": 94000 |
| }, |
| { |
| "epoch": 2.183204343305995, |
| "grad_norm": 23.507680892944336, |
| "learning_rate": 5.930117751200128e-06, |
| "loss": 0.199, |
| "step": 94500 |
| }, |
| { |
| "epoch": 2.194755689037773, |
| "grad_norm": 23.95125389099121, |
| "learning_rate": 5.905799039897279e-06, |
| "loss": 0.2367, |
| "step": 95000 |
| }, |
| { |
| "epoch": 2.2063070347695506, |
| "grad_norm": 0.026822537183761597, |
| "learning_rate": 5.8814803285944275e-06, |
| "loss": 0.1918, |
| "step": 95500 |
| }, |
| { |
| "epoch": 2.2178583805013283, |
| "grad_norm": 0.11111404001712799, |
| "learning_rate": 5.857161617291577e-06, |
| "loss": 0.2047, |
| "step": 96000 |
| }, |
| { |
| "epoch": 2.229409726233106, |
| "grad_norm": 0.1919553279876709, |
| "learning_rate": 5.832842905988726e-06, |
| "loss": 0.225, |
| "step": 96500 |
| }, |
| { |
| "epoch": 2.240961071964884, |
| "grad_norm": 0.44445013999938965, |
| "learning_rate": 5.808524194685876e-06, |
| "loss": 0.2373, |
| "step": 97000 |
| }, |
| { |
| "epoch": 2.2525124176966616, |
| "grad_norm": 0.0314050130546093, |
| "learning_rate": 5.784205483383025e-06, |
| "loss": 0.2313, |
| "step": 97500 |
| }, |
| { |
| "epoch": 2.2640637634284393, |
| "grad_norm": 0.3491435945034027, |
| "learning_rate": 5.759886772080175e-06, |
| "loss": 0.2149, |
| "step": 98000 |
| }, |
| { |
| "epoch": 2.275615109160217, |
| "grad_norm": 0.41699114441871643, |
| "learning_rate": 5.735568060777323e-06, |
| "loss": 0.2012, |
| "step": 98500 |
| }, |
| { |
| "epoch": 2.2871664548919948, |
| "grad_norm": 0.05766846239566803, |
| "learning_rate": 5.711249349474474e-06, |
| "loss": 0.2247, |
| "step": 99000 |
| }, |
| { |
| "epoch": 2.2987178006237725, |
| "grad_norm": 10.02811336517334, |
| "learning_rate": 5.6869306381716225e-06, |
| "loss": 0.2081, |
| "step": 99500 |
| }, |
| { |
| "epoch": 2.3102691463555503, |
| "grad_norm": 0.1262376755475998, |
| "learning_rate": 5.662611926868772e-06, |
| "loss": 0.2471, |
| "step": 100000 |
| }, |
| { |
| "epoch": 2.321820492087328, |
| "grad_norm": 7.784575939178467, |
| "learning_rate": 5.638293215565921e-06, |
| "loss": 0.2188, |
| "step": 100500 |
| }, |
| { |
| "epoch": 2.3333718378191057, |
| "grad_norm": 0.4924183189868927, |
| "learning_rate": 5.61397450426307e-06, |
| "loss": 0.182, |
| "step": 101000 |
| }, |
| { |
| "epoch": 2.3449231835508835, |
| "grad_norm": 0.8090869188308716, |
| "learning_rate": 5.58965579296022e-06, |
| "loss": 0.2307, |
| "step": 101500 |
| }, |
| { |
| "epoch": 2.3564745292826617, |
| "grad_norm": 0.5244800448417664, |
| "learning_rate": 5.56533708165737e-06, |
| "loss": 0.2162, |
| "step": 102000 |
| }, |
| { |
| "epoch": 2.368025875014439, |
| "grad_norm": 0.12117698788642883, |
| "learning_rate": 5.541018370354518e-06, |
| "loss": 0.2339, |
| "step": 102500 |
| }, |
| { |
| "epoch": 2.379577220746217, |
| "grad_norm": 139.68356323242188, |
| "learning_rate": 5.516699659051667e-06, |
| "loss": 0.2306, |
| "step": 103000 |
| }, |
| { |
| "epoch": 2.391128566477995, |
| "grad_norm": 0.34436774253845215, |
| "learning_rate": 5.4923809477488175e-06, |
| "loss": 0.2289, |
| "step": 103500 |
| }, |
| { |
| "epoch": 2.4026799122097726, |
| "grad_norm": 0.06684353947639465, |
| "learning_rate": 5.468062236445967e-06, |
| "loss": 0.2388, |
| "step": 104000 |
| }, |
| { |
| "epoch": 2.4142312579415504, |
| "grad_norm": 38.31502914428711, |
| "learning_rate": 5.443743525143116e-06, |
| "loss": 0.2185, |
| "step": 104500 |
| }, |
| { |
| "epoch": 2.425782603673328, |
| "grad_norm": 0.47605860233306885, |
| "learning_rate": 5.419424813840265e-06, |
| "loss": 0.2536, |
| "step": 105000 |
| }, |
| { |
| "epoch": 2.437333949405106, |
| "grad_norm": 2.486711025238037, |
| "learning_rate": 5.395106102537415e-06, |
| "loss": 0.224, |
| "step": 105500 |
| }, |
| { |
| "epoch": 2.4488852951368836, |
| "grad_norm": 0.19100871682167053, |
| "learning_rate": 5.370787391234565e-06, |
| "loss": 0.2429, |
| "step": 106000 |
| }, |
| { |
| "epoch": 2.4604366408686613, |
| "grad_norm": 0.6943984627723694, |
| "learning_rate": 5.346468679931713e-06, |
| "loss": 0.2051, |
| "step": 106500 |
| }, |
| { |
| "epoch": 2.471987986600439, |
| "grad_norm": 100.54878234863281, |
| "learning_rate": 5.322149968628862e-06, |
| "loss": 0.2177, |
| "step": 107000 |
| }, |
| { |
| "epoch": 2.483539332332217, |
| "grad_norm": 1.6368072032928467, |
| "learning_rate": 5.2978312573260125e-06, |
| "loss": 0.2182, |
| "step": 107500 |
| }, |
| { |
| "epoch": 2.4950906780639945, |
| "grad_norm": 0.33522534370422363, |
| "learning_rate": 5.273512546023162e-06, |
| "loss": 0.2011, |
| "step": 108000 |
| }, |
| { |
| "epoch": 2.5066420237957723, |
| "grad_norm": 1.7450274229049683, |
| "learning_rate": 5.249193834720311e-06, |
| "loss": 0.2316, |
| "step": 108500 |
| }, |
| { |
| "epoch": 2.51819336952755, |
| "grad_norm": 0.29246997833251953, |
| "learning_rate": 5.2248751234174596e-06, |
| "loss": 0.2311, |
| "step": 109000 |
| }, |
| { |
| "epoch": 2.5297447152593278, |
| "grad_norm": 0.0980878472328186, |
| "learning_rate": 5.20055641211461e-06, |
| "loss": 0.2224, |
| "step": 109500 |
| }, |
| { |
| "epoch": 2.5412960609911055, |
| "grad_norm": 19.347719192504883, |
| "learning_rate": 5.17623770081176e-06, |
| "loss": 0.2156, |
| "step": 110000 |
| }, |
| { |
| "epoch": 2.5528474067228832, |
| "grad_norm": 0.4555191099643707, |
| "learning_rate": 5.151918989508908e-06, |
| "loss": 0.2192, |
| "step": 110500 |
| }, |
| { |
| "epoch": 2.564398752454661, |
| "grad_norm": 8.327524185180664, |
| "learning_rate": 5.127600278206057e-06, |
| "loss": 0.2331, |
| "step": 111000 |
| }, |
| { |
| "epoch": 2.5759500981864387, |
| "grad_norm": 0.23247091472148895, |
| "learning_rate": 5.103281566903207e-06, |
| "loss": 0.2068, |
| "step": 111500 |
| }, |
| { |
| "epoch": 2.5875014439182165, |
| "grad_norm": 0.5253803730010986, |
| "learning_rate": 5.078962855600357e-06, |
| "loss": 0.2199, |
| "step": 112000 |
| }, |
| { |
| "epoch": 2.599052789649994, |
| "grad_norm": 0.16480697691440582, |
| "learning_rate": 5.054644144297506e-06, |
| "loss": 0.2303, |
| "step": 112500 |
| }, |
| { |
| "epoch": 2.610604135381772, |
| "grad_norm": 13.176816940307617, |
| "learning_rate": 5.0303254329946546e-06, |
| "loss": 0.2463, |
| "step": 113000 |
| }, |
| { |
| "epoch": 2.6221554811135497, |
| "grad_norm": 0.30696120858192444, |
| "learning_rate": 5.006006721691804e-06, |
| "loss": 0.229, |
| "step": 113500 |
| }, |
| { |
| "epoch": 2.6337068268453274, |
| "grad_norm": 10.747370719909668, |
| "learning_rate": 4.981688010388954e-06, |
| "loss": 0.239, |
| "step": 114000 |
| }, |
| { |
| "epoch": 2.645258172577105, |
| "grad_norm": 10.800712585449219, |
| "learning_rate": 4.957369299086103e-06, |
| "loss": 0.2464, |
| "step": 114500 |
| }, |
| { |
| "epoch": 2.656809518308883, |
| "grad_norm": 137.77926635742188, |
| "learning_rate": 4.933050587783252e-06, |
| "loss": 0.2082, |
| "step": 115000 |
| }, |
| { |
| "epoch": 2.6683608640406606, |
| "grad_norm": 47.672916412353516, |
| "learning_rate": 4.908731876480402e-06, |
| "loss": 0.2169, |
| "step": 115500 |
| }, |
| { |
| "epoch": 2.6799122097724384, |
| "grad_norm": 13.38288402557373, |
| "learning_rate": 4.884413165177551e-06, |
| "loss": 0.2316, |
| "step": 116000 |
| }, |
| { |
| "epoch": 2.691463555504216, |
| "grad_norm": 1.015547275543213, |
| "learning_rate": 4.860094453874701e-06, |
| "loss": 0.2129, |
| "step": 116500 |
| }, |
| { |
| "epoch": 2.703014901235994, |
| "grad_norm": 0.6633150577545166, |
| "learning_rate": 4.8357757425718495e-06, |
| "loss": 0.2346, |
| "step": 117000 |
| }, |
| { |
| "epoch": 2.7145662469677716, |
| "grad_norm": 0.4231832027435303, |
| "learning_rate": 4.811457031268999e-06, |
| "loss": 0.2465, |
| "step": 117500 |
| }, |
| { |
| "epoch": 2.7261175926995493, |
| "grad_norm": 0.1299211084842682, |
| "learning_rate": 4.787138319966149e-06, |
| "loss": 0.2395, |
| "step": 118000 |
| }, |
| { |
| "epoch": 2.7376689384313275, |
| "grad_norm": 0.05142102763056755, |
| "learning_rate": 4.762819608663298e-06, |
| "loss": 0.2327, |
| "step": 118500 |
| }, |
| { |
| "epoch": 2.749220284163105, |
| "grad_norm": 0.11573930829763412, |
| "learning_rate": 4.738500897360447e-06, |
| "loss": 0.2035, |
| "step": 119000 |
| }, |
| { |
| "epoch": 2.760771629894883, |
| "grad_norm": 0.5103738307952881, |
| "learning_rate": 4.714182186057597e-06, |
| "loss": 0.2195, |
| "step": 119500 |
| }, |
| { |
| "epoch": 2.7723229756266603, |
| "grad_norm": 0.45950281620025635, |
| "learning_rate": 4.689863474754746e-06, |
| "loss": 0.2523, |
| "step": 120000 |
| }, |
| { |
| "epoch": 2.7838743213584385, |
| "grad_norm": 0.3899378180503845, |
| "learning_rate": 4.665544763451896e-06, |
| "loss": 0.2071, |
| "step": 120500 |
| }, |
| { |
| "epoch": 2.795425667090216, |
| "grad_norm": 6.975862503051758, |
| "learning_rate": 4.6412260521490445e-06, |
| "loss": 0.2541, |
| "step": 121000 |
| }, |
| { |
| "epoch": 2.806977012821994, |
| "grad_norm": 34.19971466064453, |
| "learning_rate": 4.616907340846194e-06, |
| "loss": 0.2136, |
| "step": 121500 |
| }, |
| { |
| "epoch": 2.8185283585537713, |
| "grad_norm": 0.10601510852575302, |
| "learning_rate": 4.592588629543344e-06, |
| "loss": 0.2143, |
| "step": 122000 |
| }, |
| { |
| "epoch": 2.8300797042855494, |
| "grad_norm": 7.919482231140137, |
| "learning_rate": 4.5682699182404924e-06, |
| "loss": 0.2346, |
| "step": 122500 |
| }, |
| { |
| "epoch": 2.8416310500173267, |
| "grad_norm": 0.47603341937065125, |
| "learning_rate": 4.543951206937642e-06, |
| "loss": 0.2185, |
| "step": 123000 |
| }, |
| { |
| "epoch": 2.853182395749105, |
| "grad_norm": 26.099021911621094, |
| "learning_rate": 4.519632495634792e-06, |
| "loss": 0.2307, |
| "step": 123500 |
| }, |
| { |
| "epoch": 2.8647337414808827, |
| "grad_norm": 0.5455193519592285, |
| "learning_rate": 4.495313784331941e-06, |
| "loss": 0.2057, |
| "step": 124000 |
| }, |
| { |
| "epoch": 2.8762850872126604, |
| "grad_norm": 2.6065433025360107, |
| "learning_rate": 4.47099507302909e-06, |
| "loss": 0.2273, |
| "step": 124500 |
| }, |
| { |
| "epoch": 2.887836432944438, |
| "grad_norm": 0.44194814562797546, |
| "learning_rate": 4.4466763617262395e-06, |
| "loss": 0.2237, |
| "step": 125000 |
| }, |
| { |
| "epoch": 2.899387778676216, |
| "grad_norm": 2.659928321838379, |
| "learning_rate": 4.422357650423389e-06, |
| "loss": 0.2162, |
| "step": 125500 |
| }, |
| { |
| "epoch": 2.9109391244079936, |
| "grad_norm": 1.6970124244689941, |
| "learning_rate": 4.398038939120539e-06, |
| "loss": 0.2316, |
| "step": 126000 |
| }, |
| { |
| "epoch": 2.9224904701397714, |
| "grad_norm": 103.99176025390625, |
| "learning_rate": 4.3737202278176874e-06, |
| "loss": 0.2209, |
| "step": 126500 |
| }, |
| { |
| "epoch": 2.934041815871549, |
| "grad_norm": 0.775031328201294, |
| "learning_rate": 4.349401516514837e-06, |
| "loss": 0.2073, |
| "step": 127000 |
| }, |
| { |
| "epoch": 2.945593161603327, |
| "grad_norm": 0.6462440490722656, |
| "learning_rate": 4.325082805211987e-06, |
| "loss": 0.2185, |
| "step": 127500 |
| }, |
| { |
| "epoch": 2.9571445073351046, |
| "grad_norm": 2.127567768096924, |
| "learning_rate": 4.300764093909136e-06, |
| "loss": 0.2188, |
| "step": 128000 |
| }, |
| { |
| "epoch": 2.9686958530668823, |
| "grad_norm": 0.3134165406227112, |
| "learning_rate": 4.276445382606285e-06, |
| "loss": 0.2495, |
| "step": 128500 |
| }, |
| { |
| "epoch": 2.98024719879866, |
| "grad_norm": 0.4288942515850067, |
| "learning_rate": 4.2521266713034345e-06, |
| "loss": 0.2171, |
| "step": 129000 |
| }, |
| { |
| "epoch": 2.991798544530438, |
| "grad_norm": 51.89253234863281, |
| "learning_rate": 4.227807960000584e-06, |
| "loss": 0.1999, |
| "step": 129500 |
| }, |
| { |
| "epoch": 3.0, |
| "eval_accuracy": 0.887628508721266, |
| "eval_f1": 0.8962368802798873, |
| "eval_loss": 0.683422863483429, |
| "eval_matthews_correlation": 0.7825720644338475, |
| "eval_precision": 0.8417551592867161, |
| "eval_recall": 0.9582592035034898, |
| "eval_runtime": 541.3245, |
| "eval_samples_per_second": 79.961, |
| "eval_steps_per_second": 4.999, |
| "step": 129855 |
| }, |
| { |
| "epoch": 3.0033498902622155, |
| "grad_norm": 8.645707130432129, |
| "learning_rate": 4.203489248697734e-06, |
| "loss": 0.2205, |
| "step": 130000 |
| }, |
| { |
| "epoch": 3.0149012359939933, |
| "grad_norm": 0.025636987760663033, |
| "learning_rate": 4.179170537394882e-06, |
| "loss": 0.1589, |
| "step": 130500 |
| }, |
| { |
| "epoch": 3.026452581725771, |
| "grad_norm": 9.634745597839355, |
| "learning_rate": 4.154851826092032e-06, |
| "loss": 0.1591, |
| "step": 131000 |
| }, |
| { |
| "epoch": 3.0380039274575488, |
| "grad_norm": 0.9399522542953491, |
| "learning_rate": 4.130533114789182e-06, |
| "loss": 0.1473, |
| "step": 131500 |
| }, |
| { |
| "epoch": 3.0495552731893265, |
| "grad_norm": 0.17956668138504028, |
| "learning_rate": 4.106214403486331e-06, |
| "loss": 0.1727, |
| "step": 132000 |
| }, |
| { |
| "epoch": 3.0611066189211042, |
| "grad_norm": 36.57072067260742, |
| "learning_rate": 4.08189569218348e-06, |
| "loss": 0.1547, |
| "step": 132500 |
| }, |
| { |
| "epoch": 3.072657964652882, |
| "grad_norm": 0.30588293075561523, |
| "learning_rate": 4.0575769808806295e-06, |
| "loss": 0.1727, |
| "step": 133000 |
| }, |
| { |
| "epoch": 3.0842093103846597, |
| "grad_norm": 2.21580171585083, |
| "learning_rate": 4.033258269577779e-06, |
| "loss": 0.1519, |
| "step": 133500 |
| }, |
| { |
| "epoch": 3.0957606561164375, |
| "grad_norm": 35.42125701904297, |
| "learning_rate": 4.008939558274928e-06, |
| "loss": 0.162, |
| "step": 134000 |
| }, |
| { |
| "epoch": 3.107312001848215, |
| "grad_norm": 19.89476776123047, |
| "learning_rate": 3.984620846972077e-06, |
| "loss": 0.1489, |
| "step": 134500 |
| }, |
| { |
| "epoch": 3.118863347579993, |
| "grad_norm": 0.5846690535545349, |
| "learning_rate": 3.960302135669227e-06, |
| "loss": 0.1838, |
| "step": 135000 |
| }, |
| { |
| "epoch": 3.1304146933117707, |
| "grad_norm": 33.746910095214844, |
| "learning_rate": 3.935983424366377e-06, |
| "loss": 0.1657, |
| "step": 135500 |
| }, |
| { |
| "epoch": 3.1419660390435484, |
| "grad_norm": 0.1903204321861267, |
| "learning_rate": 3.911664713063525e-06, |
| "loss": 0.1431, |
| "step": 136000 |
| }, |
| { |
| "epoch": 3.153517384775326, |
| "grad_norm": 1.768623948097229, |
| "learning_rate": 3.887346001760675e-06, |
| "loss": 0.157, |
| "step": 136500 |
| }, |
| { |
| "epoch": 3.165068730507104, |
| "grad_norm": 9.588990211486816, |
| "learning_rate": 3.8630272904578245e-06, |
| "loss": 0.1508, |
| "step": 137000 |
| }, |
| { |
| "epoch": 3.1766200762388817, |
| "grad_norm": 0.5915334820747375, |
| "learning_rate": 3.838708579154974e-06, |
| "loss": 0.1769, |
| "step": 137500 |
| }, |
| { |
| "epoch": 3.1881714219706594, |
| "grad_norm": 8.681124687194824, |
| "learning_rate": 3.814389867852123e-06, |
| "loss": 0.1802, |
| "step": 138000 |
| }, |
| { |
| "epoch": 3.199722767702437, |
| "grad_norm": 9.283745765686035, |
| "learning_rate": 3.7900711565492724e-06, |
| "loss": 0.1584, |
| "step": 138500 |
| }, |
| { |
| "epoch": 3.2112741134342153, |
| "grad_norm": 0.6678259968757629, |
| "learning_rate": 3.7657524452464216e-06, |
| "loss": 0.1554, |
| "step": 139000 |
| }, |
| { |
| "epoch": 3.2228254591659926, |
| "grad_norm": 0.31929489970207214, |
| "learning_rate": 3.741433733943571e-06, |
| "loss": 0.1702, |
| "step": 139500 |
| }, |
| { |
| "epoch": 3.234376804897771, |
| "grad_norm": 2.6308133602142334, |
| "learning_rate": 3.7171150226407203e-06, |
| "loss": 0.1597, |
| "step": 140000 |
| }, |
| { |
| "epoch": 3.2459281506295485, |
| "grad_norm": 0.37557047605514526, |
| "learning_rate": 3.69279631133787e-06, |
| "loss": 0.1542, |
| "step": 140500 |
| }, |
| { |
| "epoch": 3.2574794963613263, |
| "grad_norm": 0.47243937849998474, |
| "learning_rate": 3.668477600035019e-06, |
| "loss": 0.1595, |
| "step": 141000 |
| }, |
| { |
| "epoch": 3.269030842093104, |
| "grad_norm": 0.026057422161102295, |
| "learning_rate": 3.6441588887321686e-06, |
| "loss": 0.1917, |
| "step": 141500 |
| }, |
| { |
| "epoch": 3.2805821878248818, |
| "grad_norm": 0.18791376054286957, |
| "learning_rate": 3.619840177429318e-06, |
| "loss": 0.1325, |
| "step": 142000 |
| }, |
| { |
| "epoch": 3.2921335335566595, |
| "grad_norm": 0.5374308824539185, |
| "learning_rate": 3.5955214661264674e-06, |
| "loss": 0.1703, |
| "step": 142500 |
| }, |
| { |
| "epoch": 3.3036848792884372, |
| "grad_norm": 0.0012748383451253176, |
| "learning_rate": 3.5712027548236165e-06, |
| "loss": 0.1509, |
| "step": 143000 |
| }, |
| { |
| "epoch": 3.315236225020215, |
| "grad_norm": 0.19805267453193665, |
| "learning_rate": 3.5468840435207657e-06, |
| "loss": 0.1437, |
| "step": 143500 |
| }, |
| { |
| "epoch": 3.3267875707519927, |
| "grad_norm": 15.450338363647461, |
| "learning_rate": 3.5225653322179153e-06, |
| "loss": 0.1741, |
| "step": 144000 |
| }, |
| { |
| "epoch": 3.3383389164837705, |
| "grad_norm": 0.3308798670768738, |
| "learning_rate": 3.4982466209150645e-06, |
| "loss": 0.1392, |
| "step": 144500 |
| }, |
| { |
| "epoch": 3.349890262215548, |
| "grad_norm": 21.04158592224121, |
| "learning_rate": 3.473927909612214e-06, |
| "loss": 0.1704, |
| "step": 145000 |
| }, |
| { |
| "epoch": 3.361441607947326, |
| "grad_norm": 8.919427871704102, |
| "learning_rate": 3.449609198309363e-06, |
| "loss": 0.158, |
| "step": 145500 |
| }, |
| { |
| "epoch": 3.3729929536791037, |
| "grad_norm": 0.04234965518116951, |
| "learning_rate": 3.425290487006513e-06, |
| "loss": 0.1676, |
| "step": 146000 |
| }, |
| { |
| "epoch": 3.3845442994108814, |
| "grad_norm": 0.7735577821731567, |
| "learning_rate": 3.400971775703662e-06, |
| "loss": 0.1717, |
| "step": 146500 |
| }, |
| { |
| "epoch": 3.396095645142659, |
| "grad_norm": 0.0031218251679092646, |
| "learning_rate": 3.3766530644008115e-06, |
| "loss": 0.1852, |
| "step": 147000 |
| }, |
| { |
| "epoch": 3.407646990874437, |
| "grad_norm": 0.2701342701911926, |
| "learning_rate": 3.3523343530979607e-06, |
| "loss": 0.178, |
| "step": 147500 |
| }, |
| { |
| "epoch": 3.4191983366062146, |
| "grad_norm": 14.016953468322754, |
| "learning_rate": 3.3280156417951103e-06, |
| "loss": 0.1832, |
| "step": 148000 |
| }, |
| { |
| "epoch": 3.4307496823379924, |
| "grad_norm": 0.002833212958648801, |
| "learning_rate": 3.3036969304922594e-06, |
| "loss": 0.1431, |
| "step": 148500 |
| }, |
| { |
| "epoch": 3.44230102806977, |
| "grad_norm": 0.38238525390625, |
| "learning_rate": 3.279378219189409e-06, |
| "loss": 0.1736, |
| "step": 149000 |
| }, |
| { |
| "epoch": 3.453852373801548, |
| "grad_norm": 0.019360702484846115, |
| "learning_rate": 3.255059507886558e-06, |
| "loss": 0.1589, |
| "step": 149500 |
| }, |
| { |
| "epoch": 3.4654037195333256, |
| "grad_norm": 316.6242370605469, |
| "learning_rate": 3.2307407965837078e-06, |
| "loss": 0.1813, |
| "step": 150000 |
| }, |
| { |
| "epoch": 3.4769550652651033, |
| "grad_norm": 0.6166911125183105, |
| "learning_rate": 3.206422085280857e-06, |
| "loss": 0.1562, |
| "step": 150500 |
| }, |
| { |
| "epoch": 3.488506410996881, |
| "grad_norm": 0.12754186987876892, |
| "learning_rate": 3.1821033739780065e-06, |
| "loss": 0.1826, |
| "step": 151000 |
| }, |
| { |
| "epoch": 3.500057756728659, |
| "grad_norm": 1.4088376760482788, |
| "learning_rate": 3.1577846626751557e-06, |
| "loss": 0.1751, |
| "step": 151500 |
| }, |
| { |
| "epoch": 3.5116091024604366, |
| "grad_norm": 0.6573951840400696, |
| "learning_rate": 3.1334659513723053e-06, |
| "loss": 0.1612, |
| "step": 152000 |
| }, |
| { |
| "epoch": 3.5231604481922143, |
| "grad_norm": 0.6012342572212219, |
| "learning_rate": 3.1091472400694544e-06, |
| "loss": 0.1734, |
| "step": 152500 |
| }, |
| { |
| "epoch": 3.534711793923992, |
| "grad_norm": 0.6947406530380249, |
| "learning_rate": 3.084828528766604e-06, |
| "loss": 0.1574, |
| "step": 153000 |
| }, |
| { |
| "epoch": 3.54626313965577, |
| "grad_norm": 0.49752572178840637, |
| "learning_rate": 3.060509817463753e-06, |
| "loss": 0.142, |
| "step": 153500 |
| }, |
| { |
| "epoch": 3.5578144853875475, |
| "grad_norm": 0.6474779844284058, |
| "learning_rate": 3.0361911061609028e-06, |
| "loss": 0.1511, |
| "step": 154000 |
| }, |
| { |
| "epoch": 3.5693658311193253, |
| "grad_norm": 1.3100844621658325, |
| "learning_rate": 3.011872394858052e-06, |
| "loss": 0.1652, |
| "step": 154500 |
| }, |
| { |
| "epoch": 3.580917176851103, |
| "grad_norm": 0.9856226444244385, |
| "learning_rate": 2.987553683555201e-06, |
| "loss": 0.1884, |
| "step": 155000 |
| }, |
| { |
| "epoch": 3.5924685225828807, |
| "grad_norm": 73.24075317382812, |
| "learning_rate": 2.9632349722523507e-06, |
| "loss": 0.1629, |
| "step": 155500 |
| }, |
| { |
| "epoch": 3.6040198683146585, |
| "grad_norm": 8.039386749267578, |
| "learning_rate": 2.9389162609495e-06, |
| "loss": 0.1628, |
| "step": 156000 |
| }, |
| { |
| "epoch": 3.6155712140464367, |
| "grad_norm": 0.0022829582449048758, |
| "learning_rate": 2.9145975496466494e-06, |
| "loss": 0.1532, |
| "step": 156500 |
| }, |
| { |
| "epoch": 3.627122559778214, |
| "grad_norm": 0.43138188123703003, |
| "learning_rate": 2.8902788383437986e-06, |
| "loss": 0.1711, |
| "step": 157000 |
| }, |
| { |
| "epoch": 3.638673905509992, |
| "grad_norm": 7.881419658660889, |
| "learning_rate": 2.865960127040948e-06, |
| "loss": 0.155, |
| "step": 157500 |
| }, |
| { |
| "epoch": 3.6502252512417694, |
| "grad_norm": 117.18584442138672, |
| "learning_rate": 2.8416414157380973e-06, |
| "loss": 0.155, |
| "step": 158000 |
| }, |
| { |
| "epoch": 3.6617765969735476, |
| "grad_norm": 0.0010274089872837067, |
| "learning_rate": 2.817322704435247e-06, |
| "loss": 0.1717, |
| "step": 158500 |
| }, |
| { |
| "epoch": 3.673327942705325, |
| "grad_norm": 109.89828491210938, |
| "learning_rate": 2.793003993132396e-06, |
| "loss": 0.1608, |
| "step": 159000 |
| }, |
| { |
| "epoch": 3.684879288437103, |
| "grad_norm": 0.7722026109695435, |
| "learning_rate": 2.7686852818295457e-06, |
| "loss": 0.1789, |
| "step": 159500 |
| }, |
| { |
| "epoch": 3.6964306341688804, |
| "grad_norm": 0.0013100790092721581, |
| "learning_rate": 2.744366570526695e-06, |
| "loss": 0.1601, |
| "step": 160000 |
| }, |
| { |
| "epoch": 3.7079819799006586, |
| "grad_norm": 0.25156673789024353, |
| "learning_rate": 2.7200478592238444e-06, |
| "loss": 0.1588, |
| "step": 160500 |
| }, |
| { |
| "epoch": 3.7195333256324363, |
| "grad_norm": 0.18242871761322021, |
| "learning_rate": 2.6957291479209936e-06, |
| "loss": 0.1626, |
| "step": 161000 |
| }, |
| { |
| "epoch": 3.731084671364214, |
| "grad_norm": 0.008753698319196701, |
| "learning_rate": 2.671410436618143e-06, |
| "loss": 0.1654, |
| "step": 161500 |
| }, |
| { |
| "epoch": 3.742636017095992, |
| "grad_norm": 0.6226741671562195, |
| "learning_rate": 2.6470917253152923e-06, |
| "loss": 0.174, |
| "step": 162000 |
| }, |
| { |
| "epoch": 3.7541873628277695, |
| "grad_norm": 12.760992050170898, |
| "learning_rate": 2.622773014012442e-06, |
| "loss": 0.1656, |
| "step": 162500 |
| }, |
| { |
| "epoch": 3.7657387085595473, |
| "grad_norm": 16.012075424194336, |
| "learning_rate": 2.598454302709591e-06, |
| "loss": 0.15, |
| "step": 163000 |
| }, |
| { |
| "epoch": 3.777290054291325, |
| "grad_norm": 7.900235652923584, |
| "learning_rate": 2.5741355914067407e-06, |
| "loss": 0.1705, |
| "step": 163500 |
| }, |
| { |
| "epoch": 3.7888414000231028, |
| "grad_norm": 2.0820932388305664, |
| "learning_rate": 2.54981688010389e-06, |
| "loss": 0.1752, |
| "step": 164000 |
| }, |
| { |
| "epoch": 3.8003927457548805, |
| "grad_norm": 1.189961552619934, |
| "learning_rate": 2.5254981688010394e-06, |
| "loss": 0.1578, |
| "step": 164500 |
| }, |
| { |
| "epoch": 3.8119440914866582, |
| "grad_norm": 0.005685470532625914, |
| "learning_rate": 2.5011794574981886e-06, |
| "loss": 0.147, |
| "step": 165000 |
| }, |
| { |
| "epoch": 3.823495437218436, |
| "grad_norm": 0.33695000410079956, |
| "learning_rate": 2.4768607461953377e-06, |
| "loss": 0.1835, |
| "step": 165500 |
| }, |
| { |
| "epoch": 3.8350467829502137, |
| "grad_norm": 18.306541442871094, |
| "learning_rate": 2.4525420348924873e-06, |
| "loss": 0.1963, |
| "step": 166000 |
| }, |
| { |
| "epoch": 3.8465981286819915, |
| "grad_norm": 0.6380665302276611, |
| "learning_rate": 2.4282233235896365e-06, |
| "loss": 0.1696, |
| "step": 166500 |
| }, |
| { |
| "epoch": 3.858149474413769, |
| "grad_norm": 0.31257039308547974, |
| "learning_rate": 2.403904612286786e-06, |
| "loss": 0.166, |
| "step": 167000 |
| }, |
| { |
| "epoch": 3.869700820145547, |
| "grad_norm": 0.09384506940841675, |
| "learning_rate": 2.3795859009839352e-06, |
| "loss": 0.1734, |
| "step": 167500 |
| }, |
| { |
| "epoch": 3.8812521658773247, |
| "grad_norm": 0.12250012159347534, |
| "learning_rate": 2.355267189681085e-06, |
| "loss": 0.1705, |
| "step": 168000 |
| }, |
| { |
| "epoch": 3.8928035116091024, |
| "grad_norm": 0.3609682619571686, |
| "learning_rate": 2.330948478378234e-06, |
| "loss": 0.1548, |
| "step": 168500 |
| }, |
| { |
| "epoch": 3.90435485734088, |
| "grad_norm": 9.560044288635254, |
| "learning_rate": 2.3066297670753836e-06, |
| "loss": 0.1468, |
| "step": 169000 |
| }, |
| { |
| "epoch": 3.915906203072658, |
| "grad_norm": 1.053947925567627, |
| "learning_rate": 2.2823110557725327e-06, |
| "loss": 0.1686, |
| "step": 169500 |
| }, |
| { |
| "epoch": 3.9274575488044356, |
| "grad_norm": 0.032445378601551056, |
| "learning_rate": 2.2579923444696823e-06, |
| "loss": 0.1487, |
| "step": 170000 |
| }, |
| { |
| "epoch": 3.9390088945362134, |
| "grad_norm": 8.668307304382324, |
| "learning_rate": 2.2336736331668315e-06, |
| "loss": 0.1791, |
| "step": 170500 |
| }, |
| { |
| "epoch": 3.950560240267991, |
| "grad_norm": 9.52789306640625, |
| "learning_rate": 2.2093549218639806e-06, |
| "loss": 0.1841, |
| "step": 171000 |
| }, |
| { |
| "epoch": 3.962111585999769, |
| "grad_norm": 0.011110074818134308, |
| "learning_rate": 2.18503621056113e-06, |
| "loss": 0.1625, |
| "step": 171500 |
| }, |
| { |
| "epoch": 3.9736629317315466, |
| "grad_norm": 82.39501190185547, |
| "learning_rate": 2.1607174992582794e-06, |
| "loss": 0.1714, |
| "step": 172000 |
| }, |
| { |
| "epoch": 3.9852142774633244, |
| "grad_norm": 1.0335499048233032, |
| "learning_rate": 2.136398787955429e-06, |
| "loss": 0.15, |
| "step": 172500 |
| }, |
| { |
| "epoch": 3.996765623195102, |
| "grad_norm": 2.844334363937378, |
| "learning_rate": 2.112080076652578e-06, |
| "loss": 0.1628, |
| "step": 173000 |
| }, |
| { |
| "epoch": 4.0, |
| "eval_accuracy": 0.8855723691810096, |
| "eval_f1": 0.895216738242823, |
| "eval_loss": 0.9363193511962891, |
| "eval_matthews_correlation": 0.7805306223945409, |
| "eval_precision": 0.8347009626005997, |
| "eval_recall": 0.9651931937411614, |
| "eval_runtime": 550.7934, |
| "eval_samples_per_second": 78.587, |
| "eval_steps_per_second": 4.913, |
| "step": 173140 |
| } |
| ], |
| "logging_steps": 500, |
| "max_steps": 216425, |
| "num_input_tokens_seen": 0, |
| "num_train_epochs": 5, |
| "save_steps": 200, |
| "stateful_callbacks": { |
| "TrainerControl": { |
| "args": { |
| "should_epoch_stop": false, |
| "should_evaluate": false, |
| "should_log": false, |
| "should_save": true, |
| "should_training_stop": false |
| }, |
| "attributes": {} |
| } |
| }, |
| "total_flos": 3.851751926497198e+17, |
| "train_batch_size": 8, |
| "trial_name": null, |
| "trial_params": null |
| } |
|
|