| { |
| "best_metric": 0.883420247381804, |
| "best_model_checkpoint": "finetune/models/plant-dnamamba-BPE-NoduleExpDup/checkpoint-86570", |
| "epoch": 2.0, |
| "eval_steps": 500, |
| "global_step": 86570, |
| "is_hyper_param_search": false, |
| "is_local_process_zero": true, |
| "is_world_process_zero": true, |
| "log_history": [ |
| { |
| "epoch": 0.011551345731777752, |
| "grad_norm": 29.45172119140625, |
| "learning_rate": 4.620218074293107e-07, |
| "loss": 1.0687, |
| "step": 500 |
| }, |
| { |
| "epoch": 0.023102691463555505, |
| "grad_norm": 31.642974853515625, |
| "learning_rate": 9.240436148586214e-07, |
| "loss": 0.9807, |
| "step": 1000 |
| }, |
| { |
| "epoch": 0.034654037195333255, |
| "grad_norm": 12.195789337158203, |
| "learning_rate": 1.3860654222879322e-06, |
| "loss": 0.8373, |
| "step": 1500 |
| }, |
| { |
| "epoch": 0.04620538292711101, |
| "grad_norm": 13.536879539489746, |
| "learning_rate": 1.8480872297172428e-06, |
| "loss": 0.7818, |
| "step": 2000 |
| }, |
| { |
| "epoch": 0.05775672865888876, |
| "grad_norm": 13.85306453704834, |
| "learning_rate": 2.3101090371465534e-06, |
| "loss": 0.7234, |
| "step": 2500 |
| }, |
| { |
| "epoch": 0.06930807439066651, |
| "grad_norm": 11.797886848449707, |
| "learning_rate": 2.7721308445758644e-06, |
| "loss": 0.6925, |
| "step": 3000 |
| }, |
| { |
| "epoch": 0.08085942012244426, |
| "grad_norm": 6.770650386810303, |
| "learning_rate": 3.2341526520051746e-06, |
| "loss": 0.6751, |
| "step": 3500 |
| }, |
| { |
| "epoch": 0.09241076585422202, |
| "grad_norm": 10.289820671081543, |
| "learning_rate": 3.6961744594344856e-06, |
| "loss": 0.6636, |
| "step": 4000 |
| }, |
| { |
| "epoch": 0.10396211158599977, |
| "grad_norm": 15.198243141174316, |
| "learning_rate": 4.158196266863797e-06, |
| "loss": 0.6563, |
| "step": 4500 |
| }, |
| { |
| "epoch": 0.11551345731777753, |
| "grad_norm": 13.971009254455566, |
| "learning_rate": 4.620218074293107e-06, |
| "loss": 0.6481, |
| "step": 5000 |
| }, |
| { |
| "epoch": 0.12706480304955528, |
| "grad_norm": 8.441276550292969, |
| "learning_rate": 5.082239881722417e-06, |
| "loss": 0.6549, |
| "step": 5500 |
| }, |
| { |
| "epoch": 0.13861614878133302, |
| "grad_norm": 14.8948335647583, |
| "learning_rate": 5.544261689151729e-06, |
| "loss": 0.6507, |
| "step": 6000 |
| }, |
| { |
| "epoch": 0.1501674945131108, |
| "grad_norm": 7.725317478179932, |
| "learning_rate": 6.006283496581039e-06, |
| "loss": 0.6544, |
| "step": 6500 |
| }, |
| { |
| "epoch": 0.16171884024488853, |
| "grad_norm": 17.12333106994629, |
| "learning_rate": 6.468305304010349e-06, |
| "loss": 0.6476, |
| "step": 7000 |
| }, |
| { |
| "epoch": 0.17327018597666627, |
| "grad_norm": 7.033656120300293, |
| "learning_rate": 6.930327111439661e-06, |
| "loss": 0.6496, |
| "step": 7500 |
| }, |
| { |
| "epoch": 0.18482153170844404, |
| "grad_norm": 7.053399085998535, |
| "learning_rate": 7.392348918868971e-06, |
| "loss": 0.6523, |
| "step": 8000 |
| }, |
| { |
| "epoch": 0.19637287744022178, |
| "grad_norm": 6.655728340148926, |
| "learning_rate": 7.854370726298282e-06, |
| "loss": 0.6455, |
| "step": 8500 |
| }, |
| { |
| "epoch": 0.20792422317199954, |
| "grad_norm": 8.08369255065918, |
| "learning_rate": 8.316392533727593e-06, |
| "loss": 0.6415, |
| "step": 9000 |
| }, |
| { |
| "epoch": 0.21947556890377729, |
| "grad_norm": 3.947148084640503, |
| "learning_rate": 8.778414341156903e-06, |
| "loss": 0.6514, |
| "step": 9500 |
| }, |
| { |
| "epoch": 0.23102691463555505, |
| "grad_norm": 8.264023780822754, |
| "learning_rate": 9.240436148586214e-06, |
| "loss": 0.6431, |
| "step": 10000 |
| }, |
| { |
| "epoch": 0.2425782603673328, |
| "grad_norm": 4.74052095413208, |
| "learning_rate": 9.702457956015525e-06, |
| "loss": 0.6331, |
| "step": 10500 |
| }, |
| { |
| "epoch": 0.25412960609911056, |
| "grad_norm": 12.625064849853516, |
| "learning_rate": 9.991342538776186e-06, |
| "loss": 0.6449, |
| "step": 11000 |
| }, |
| { |
| "epoch": 0.2656809518308883, |
| "grad_norm": 17.814157485961914, |
| "learning_rate": 9.967023827473336e-06, |
| "loss": 0.6414, |
| "step": 11500 |
| }, |
| { |
| "epoch": 0.27723229756266604, |
| "grad_norm": 15.34868049621582, |
| "learning_rate": 9.942705116170485e-06, |
| "loss": 0.6287, |
| "step": 12000 |
| }, |
| { |
| "epoch": 0.2887836432944438, |
| "grad_norm": 5.785658836364746, |
| "learning_rate": 9.918386404867634e-06, |
| "loss": 0.6417, |
| "step": 12500 |
| }, |
| { |
| "epoch": 0.3003349890262216, |
| "grad_norm": 3.7156598567962646, |
| "learning_rate": 9.894067693564784e-06, |
| "loss": 0.6436, |
| "step": 13000 |
| }, |
| { |
| "epoch": 0.3118863347579993, |
| "grad_norm": 12.272802352905273, |
| "learning_rate": 9.869748982261933e-06, |
| "loss": 0.6505, |
| "step": 13500 |
| }, |
| { |
| "epoch": 0.32343768048977706, |
| "grad_norm": 15.592353820800781, |
| "learning_rate": 9.845430270959082e-06, |
| "loss": 0.6313, |
| "step": 14000 |
| }, |
| { |
| "epoch": 0.3349890262215548, |
| "grad_norm": 4.684946537017822, |
| "learning_rate": 9.821111559656232e-06, |
| "loss": 0.6457, |
| "step": 14500 |
| }, |
| { |
| "epoch": 0.34654037195333254, |
| "grad_norm": 6.0165300369262695, |
| "learning_rate": 9.79679284835338e-06, |
| "loss": 0.641, |
| "step": 15000 |
| }, |
| { |
| "epoch": 0.35809171768511033, |
| "grad_norm": 10.924590110778809, |
| "learning_rate": 9.772474137050531e-06, |
| "loss": 0.6293, |
| "step": 15500 |
| }, |
| { |
| "epoch": 0.3696430634168881, |
| "grad_norm": 7.296951770782471, |
| "learning_rate": 9.74815542574768e-06, |
| "loss": 0.627, |
| "step": 16000 |
| }, |
| { |
| "epoch": 0.3811944091486658, |
| "grad_norm": 14.750964164733887, |
| "learning_rate": 9.723836714444829e-06, |
| "loss": 0.6254, |
| "step": 16500 |
| }, |
| { |
| "epoch": 0.39274575488044355, |
| "grad_norm": 7.377384662628174, |
| "learning_rate": 9.699518003141977e-06, |
| "loss": 0.6287, |
| "step": 17000 |
| }, |
| { |
| "epoch": 0.40429710061222135, |
| "grad_norm": 6.540673732757568, |
| "learning_rate": 9.675199291839128e-06, |
| "loss": 0.6309, |
| "step": 17500 |
| }, |
| { |
| "epoch": 0.4158484463439991, |
| "grad_norm": 6.4445295333862305, |
| "learning_rate": 9.650880580536277e-06, |
| "loss": 0.6302, |
| "step": 18000 |
| }, |
| { |
| "epoch": 0.42739979207577683, |
| "grad_norm": 7.85426664352417, |
| "learning_rate": 9.626561869233427e-06, |
| "loss": 0.6201, |
| "step": 18500 |
| }, |
| { |
| "epoch": 0.43895113780755457, |
| "grad_norm": 5.960766792297363, |
| "learning_rate": 9.602243157930576e-06, |
| "loss": 0.626, |
| "step": 19000 |
| }, |
| { |
| "epoch": 0.4505024835393323, |
| "grad_norm": 9.376029968261719, |
| "learning_rate": 9.577924446627726e-06, |
| "loss": 0.6164, |
| "step": 19500 |
| }, |
| { |
| "epoch": 0.4620538292711101, |
| "grad_norm": 17.05916976928711, |
| "learning_rate": 9.553605735324875e-06, |
| "loss": 0.6232, |
| "step": 20000 |
| }, |
| { |
| "epoch": 0.47360517500288785, |
| "grad_norm": 7.523278713226318, |
| "learning_rate": 9.529287024022024e-06, |
| "loss": 0.601, |
| "step": 20500 |
| }, |
| { |
| "epoch": 0.4851565207346656, |
| "grad_norm": 6.75208044052124, |
| "learning_rate": 9.504968312719172e-06, |
| "loss": 0.6157, |
| "step": 21000 |
| }, |
| { |
| "epoch": 0.4967078664664433, |
| "grad_norm": 12.919069290161133, |
| "learning_rate": 9.480649601416323e-06, |
| "loss": 0.6098, |
| "step": 21500 |
| }, |
| { |
| "epoch": 0.5082592121982211, |
| "grad_norm": 19.89571189880371, |
| "learning_rate": 9.456330890113472e-06, |
| "loss": 0.6131, |
| "step": 22000 |
| }, |
| { |
| "epoch": 0.5198105579299989, |
| "grad_norm": 7.436350345611572, |
| "learning_rate": 9.432012178810622e-06, |
| "loss": 0.6121, |
| "step": 22500 |
| }, |
| { |
| "epoch": 0.5313619036617766, |
| "grad_norm": 10.576221466064453, |
| "learning_rate": 9.40769346750777e-06, |
| "loss": 0.6005, |
| "step": 23000 |
| }, |
| { |
| "epoch": 0.5429132493935543, |
| "grad_norm": 12.731452941894531, |
| "learning_rate": 9.383374756204921e-06, |
| "loss": 0.6217, |
| "step": 23500 |
| }, |
| { |
| "epoch": 0.5544645951253321, |
| "grad_norm": 9.219985961914062, |
| "learning_rate": 9.35905604490207e-06, |
| "loss": 0.5985, |
| "step": 24000 |
| }, |
| { |
| "epoch": 0.5660159408571098, |
| "grad_norm": 12.659988403320312, |
| "learning_rate": 9.334737333599219e-06, |
| "loss": 0.5951, |
| "step": 24500 |
| }, |
| { |
| "epoch": 0.5775672865888876, |
| "grad_norm": 32.879234313964844, |
| "learning_rate": 9.310418622296367e-06, |
| "loss": 0.5961, |
| "step": 25000 |
| }, |
| { |
| "epoch": 0.5891186323206653, |
| "grad_norm": 9.763121604919434, |
| "learning_rate": 9.286099910993518e-06, |
| "loss": 0.6081, |
| "step": 25500 |
| }, |
| { |
| "epoch": 0.6006699780524432, |
| "grad_norm": 9.057079315185547, |
| "learning_rate": 9.261781199690667e-06, |
| "loss": 0.5863, |
| "step": 26000 |
| }, |
| { |
| "epoch": 0.6122213237842209, |
| "grad_norm": 17.381582260131836, |
| "learning_rate": 9.237462488387817e-06, |
| "loss": 0.5857, |
| "step": 26500 |
| }, |
| { |
| "epoch": 0.6237726695159986, |
| "grad_norm": 18.3979549407959, |
| "learning_rate": 9.213143777084966e-06, |
| "loss": 0.5853, |
| "step": 27000 |
| }, |
| { |
| "epoch": 0.6353240152477764, |
| "grad_norm": 11.13846206665039, |
| "learning_rate": 9.188825065782114e-06, |
| "loss": 0.582, |
| "step": 27500 |
| }, |
| { |
| "epoch": 0.6468753609795541, |
| "grad_norm": 8.506060600280762, |
| "learning_rate": 9.164506354479265e-06, |
| "loss": 0.5773, |
| "step": 28000 |
| }, |
| { |
| "epoch": 0.6584267067113319, |
| "grad_norm": 9.814123153686523, |
| "learning_rate": 9.140187643176414e-06, |
| "loss": 0.5797, |
| "step": 28500 |
| }, |
| { |
| "epoch": 0.6699780524431096, |
| "grad_norm": 11.70206069946289, |
| "learning_rate": 9.115868931873562e-06, |
| "loss": 0.5587, |
| "step": 29000 |
| }, |
| { |
| "epoch": 0.6815293981748873, |
| "grad_norm": 11.949383735656738, |
| "learning_rate": 9.091550220570711e-06, |
| "loss": 0.5769, |
| "step": 29500 |
| }, |
| { |
| "epoch": 0.6930807439066651, |
| "grad_norm": 11.51278018951416, |
| "learning_rate": 9.067231509267862e-06, |
| "loss": 0.5798, |
| "step": 30000 |
| }, |
| { |
| "epoch": 0.7046320896384429, |
| "grad_norm": 13.705997467041016, |
| "learning_rate": 9.042912797965012e-06, |
| "loss": 0.5585, |
| "step": 30500 |
| }, |
| { |
| "epoch": 0.7161834353702207, |
| "grad_norm": 18.50054168701172, |
| "learning_rate": 9.01859408666216e-06, |
| "loss": 0.5656, |
| "step": 31000 |
| }, |
| { |
| "epoch": 0.7277347811019984, |
| "grad_norm": 23.920692443847656, |
| "learning_rate": 8.99427537535931e-06, |
| "loss": 0.5493, |
| "step": 31500 |
| }, |
| { |
| "epoch": 0.7392861268337761, |
| "grad_norm": 15.425186157226562, |
| "learning_rate": 8.96995666405646e-06, |
| "loss": 0.5538, |
| "step": 32000 |
| }, |
| { |
| "epoch": 0.7508374725655539, |
| "grad_norm": 11.077018737792969, |
| "learning_rate": 8.945637952753609e-06, |
| "loss": 0.5553, |
| "step": 32500 |
| }, |
| { |
| "epoch": 0.7623888182973316, |
| "grad_norm": 17.128128051757812, |
| "learning_rate": 8.921319241450757e-06, |
| "loss": 0.5657, |
| "step": 33000 |
| }, |
| { |
| "epoch": 0.7739401640291094, |
| "grad_norm": 37.22261428833008, |
| "learning_rate": 8.897000530147906e-06, |
| "loss": 0.5573, |
| "step": 33500 |
| }, |
| { |
| "epoch": 0.7854915097608871, |
| "grad_norm": 18.649255752563477, |
| "learning_rate": 8.872681818845056e-06, |
| "loss": 0.5337, |
| "step": 34000 |
| }, |
| { |
| "epoch": 0.7970428554926648, |
| "grad_norm": 11.49511432647705, |
| "learning_rate": 8.848363107542205e-06, |
| "loss": 0.5442, |
| "step": 34500 |
| }, |
| { |
| "epoch": 0.8085942012244427, |
| "grad_norm": 20.64067840576172, |
| "learning_rate": 8.824044396239356e-06, |
| "loss": 0.5632, |
| "step": 35000 |
| }, |
| { |
| "epoch": 0.8201455469562204, |
| "grad_norm": 18.51673126220703, |
| "learning_rate": 8.799725684936504e-06, |
| "loss": 0.5454, |
| "step": 35500 |
| }, |
| { |
| "epoch": 0.8316968926879982, |
| "grad_norm": 18.139997482299805, |
| "learning_rate": 8.775406973633655e-06, |
| "loss": 0.5371, |
| "step": 36000 |
| }, |
| { |
| "epoch": 0.8432482384197759, |
| "grad_norm": 21.852693557739258, |
| "learning_rate": 8.751088262330804e-06, |
| "loss": 0.539, |
| "step": 36500 |
| }, |
| { |
| "epoch": 0.8547995841515537, |
| "grad_norm": 31.914161682128906, |
| "learning_rate": 8.726769551027952e-06, |
| "loss": 0.5383, |
| "step": 37000 |
| }, |
| { |
| "epoch": 0.8663509298833314, |
| "grad_norm": 9.24451732635498, |
| "learning_rate": 8.702450839725101e-06, |
| "loss": 0.548, |
| "step": 37500 |
| }, |
| { |
| "epoch": 0.8779022756151091, |
| "grad_norm": 27.3262882232666, |
| "learning_rate": 8.678132128422251e-06, |
| "loss": 0.524, |
| "step": 38000 |
| }, |
| { |
| "epoch": 0.8894536213468869, |
| "grad_norm": 23.231952667236328, |
| "learning_rate": 8.6538134171194e-06, |
| "loss": 0.5121, |
| "step": 38500 |
| }, |
| { |
| "epoch": 0.9010049670786646, |
| "grad_norm": 28.35572052001953, |
| "learning_rate": 8.62949470581655e-06, |
| "loss": 0.5333, |
| "step": 39000 |
| }, |
| { |
| "epoch": 0.9125563128104425, |
| "grad_norm": 24.61405372619629, |
| "learning_rate": 8.6051759945137e-06, |
| "loss": 0.5015, |
| "step": 39500 |
| }, |
| { |
| "epoch": 0.9241076585422202, |
| "grad_norm": 30.62819480895996, |
| "learning_rate": 8.580857283210848e-06, |
| "loss": 0.5092, |
| "step": 40000 |
| }, |
| { |
| "epoch": 0.935659004273998, |
| "grad_norm": 19.295331954956055, |
| "learning_rate": 8.556538571907999e-06, |
| "loss": 0.5125, |
| "step": 40500 |
| }, |
| { |
| "epoch": 0.9472103500057757, |
| "grad_norm": 10.785584449768066, |
| "learning_rate": 8.532219860605147e-06, |
| "loss": 0.5161, |
| "step": 41000 |
| }, |
| { |
| "epoch": 0.9587616957375534, |
| "grad_norm": 24.81365394592285, |
| "learning_rate": 8.507901149302296e-06, |
| "loss": 0.498, |
| "step": 41500 |
| }, |
| { |
| "epoch": 0.9703130414693312, |
| "grad_norm": 48.026973724365234, |
| "learning_rate": 8.483582437999446e-06, |
| "loss": 0.4916, |
| "step": 42000 |
| }, |
| { |
| "epoch": 0.9818643872011089, |
| "grad_norm": 6.653892517089844, |
| "learning_rate": 8.459263726696595e-06, |
| "loss": 0.4944, |
| "step": 42500 |
| }, |
| { |
| "epoch": 0.9934157329328867, |
| "grad_norm": 39.814945220947266, |
| "learning_rate": 8.434945015393746e-06, |
| "loss": 0.4954, |
| "step": 43000 |
| }, |
| { |
| "epoch": 1.0, |
| "eval_accuracy": 0.7796003234376805, |
| "eval_f1": 0.7890500619140279, |
| "eval_loss": 0.4737609326839447, |
| "eval_matthews_correlation": 0.5599099791389662, |
| "eval_precision": 0.7656524910955671, |
| "eval_recall": 0.8139227225035354, |
| "eval_runtime": 550.4755, |
| "eval_samples_per_second": 78.632, |
| "eval_steps_per_second": 4.916, |
| "step": 43285 |
| }, |
| { |
| "epoch": 1.0049670786646645, |
| "grad_norm": 22.5259952545166, |
| "learning_rate": 8.410626304090894e-06, |
| "loss": 0.4339, |
| "step": 43500 |
| }, |
| { |
| "epoch": 1.0165184243964422, |
| "grad_norm": 25.5583438873291, |
| "learning_rate": 8.386307592788043e-06, |
| "loss": 0.3274, |
| "step": 44000 |
| }, |
| { |
| "epoch": 1.02806977012822, |
| "grad_norm": 9.524903297424316, |
| "learning_rate": 8.361988881485194e-06, |
| "loss": 0.3372, |
| "step": 44500 |
| }, |
| { |
| "epoch": 1.0396211158599977, |
| "grad_norm": 37.77333068847656, |
| "learning_rate": 8.337670170182342e-06, |
| "loss": 0.3471, |
| "step": 45000 |
| }, |
| { |
| "epoch": 1.0511724615917755, |
| "grad_norm": 47.2765007019043, |
| "learning_rate": 8.313351458879491e-06, |
| "loss": 0.3436, |
| "step": 45500 |
| }, |
| { |
| "epoch": 1.0627238073235532, |
| "grad_norm": 58.79914474487305, |
| "learning_rate": 8.289032747576641e-06, |
| "loss": 0.3524, |
| "step": 46000 |
| }, |
| { |
| "epoch": 1.074275153055331, |
| "grad_norm": 23.910385131835938, |
| "learning_rate": 8.26471403627379e-06, |
| "loss": 0.3283, |
| "step": 46500 |
| }, |
| { |
| "epoch": 1.0858264987871087, |
| "grad_norm": 65.18335723876953, |
| "learning_rate": 8.24039532497094e-06, |
| "loss": 0.3529, |
| "step": 47000 |
| }, |
| { |
| "epoch": 1.0973778445188864, |
| "grad_norm": 29.333236694335938, |
| "learning_rate": 8.21607661366809e-06, |
| "loss": 0.3471, |
| "step": 47500 |
| }, |
| { |
| "epoch": 1.1089291902506642, |
| "grad_norm": 41.33262252807617, |
| "learning_rate": 8.191757902365238e-06, |
| "loss": 0.3507, |
| "step": 48000 |
| }, |
| { |
| "epoch": 1.120480535982442, |
| "grad_norm": 91.7591781616211, |
| "learning_rate": 8.167439191062387e-06, |
| "loss": 0.3506, |
| "step": 48500 |
| }, |
| { |
| "epoch": 1.1320318817142196, |
| "grad_norm": 7.366957187652588, |
| "learning_rate": 8.143120479759537e-06, |
| "loss": 0.3526, |
| "step": 49000 |
| }, |
| { |
| "epoch": 1.1435832274459974, |
| "grad_norm": 31.159626007080078, |
| "learning_rate": 8.118801768456686e-06, |
| "loss": 0.3423, |
| "step": 49500 |
| }, |
| { |
| "epoch": 1.1551345731777751, |
| "grad_norm": 15.73024845123291, |
| "learning_rate": 8.094483057153836e-06, |
| "loss": 0.3516, |
| "step": 50000 |
| }, |
| { |
| "epoch": 1.1666859189095529, |
| "grad_norm": 66.58766174316406, |
| "learning_rate": 8.070164345850985e-06, |
| "loss": 0.3435, |
| "step": 50500 |
| }, |
| { |
| "epoch": 1.1782372646413308, |
| "grad_norm": 48.182308197021484, |
| "learning_rate": 8.045845634548136e-06, |
| "loss": 0.344, |
| "step": 51000 |
| }, |
| { |
| "epoch": 1.1897886103731086, |
| "grad_norm": 42.61834716796875, |
| "learning_rate": 8.021526923245284e-06, |
| "loss": 0.3739, |
| "step": 51500 |
| }, |
| { |
| "epoch": 1.2013399561048863, |
| "grad_norm": 59.38914489746094, |
| "learning_rate": 7.997208211942433e-06, |
| "loss": 0.3399, |
| "step": 52000 |
| }, |
| { |
| "epoch": 1.212891301836664, |
| "grad_norm": 5.7647857666015625, |
| "learning_rate": 7.972889500639582e-06, |
| "loss": 0.3518, |
| "step": 52500 |
| }, |
| { |
| "epoch": 1.2244426475684418, |
| "grad_norm": 73.71373748779297, |
| "learning_rate": 7.948570789336732e-06, |
| "loss": 0.3523, |
| "step": 53000 |
| }, |
| { |
| "epoch": 1.2359939933002195, |
| "grad_norm": 14.03942584991455, |
| "learning_rate": 7.924252078033881e-06, |
| "loss": 0.3512, |
| "step": 53500 |
| }, |
| { |
| "epoch": 1.2475453390319973, |
| "grad_norm": 53.254791259765625, |
| "learning_rate": 7.899933366731031e-06, |
| "loss": 0.3454, |
| "step": 54000 |
| }, |
| { |
| "epoch": 1.259096684763775, |
| "grad_norm": 4.009112358093262, |
| "learning_rate": 7.87561465542818e-06, |
| "loss": 0.3406, |
| "step": 54500 |
| }, |
| { |
| "epoch": 1.2706480304955527, |
| "grad_norm": 37.73891067504883, |
| "learning_rate": 7.85129594412533e-06, |
| "loss": 0.3474, |
| "step": 55000 |
| }, |
| { |
| "epoch": 1.2821993762273305, |
| "grad_norm": 84.36405181884766, |
| "learning_rate": 7.82697723282248e-06, |
| "loss": 0.3551, |
| "step": 55500 |
| }, |
| { |
| "epoch": 1.2937507219591082, |
| "grad_norm": 43.2830810546875, |
| "learning_rate": 7.802658521519628e-06, |
| "loss": 0.3521, |
| "step": 56000 |
| }, |
| { |
| "epoch": 1.305302067690886, |
| "grad_norm": 143.8572998046875, |
| "learning_rate": 7.778339810216777e-06, |
| "loss": 0.3544, |
| "step": 56500 |
| }, |
| { |
| "epoch": 1.3168534134226637, |
| "grad_norm": 19.368745803833008, |
| "learning_rate": 7.754021098913927e-06, |
| "loss": 0.332, |
| "step": 57000 |
| }, |
| { |
| "epoch": 1.3284047591544415, |
| "grad_norm": 37.57652282714844, |
| "learning_rate": 7.729702387611076e-06, |
| "loss": 0.3614, |
| "step": 57500 |
| }, |
| { |
| "epoch": 1.3399561048862192, |
| "grad_norm": 12.710687637329102, |
| "learning_rate": 7.705383676308226e-06, |
| "loss": 0.3397, |
| "step": 58000 |
| }, |
| { |
| "epoch": 1.351507450617997, |
| "grad_norm": 9.734477043151855, |
| "learning_rate": 7.681064965005375e-06, |
| "loss": 0.3337, |
| "step": 58500 |
| }, |
| { |
| "epoch": 1.3630587963497747, |
| "grad_norm": 50.98896408081055, |
| "learning_rate": 7.656746253702524e-06, |
| "loss": 0.3466, |
| "step": 59000 |
| }, |
| { |
| "epoch": 1.3746101420815524, |
| "grad_norm": 36.90542221069336, |
| "learning_rate": 7.632427542399674e-06, |
| "loss": 0.3345, |
| "step": 59500 |
| }, |
| { |
| "epoch": 1.3861614878133302, |
| "grad_norm": 87.90083312988281, |
| "learning_rate": 7.608108831096823e-06, |
| "loss": 0.3268, |
| "step": 60000 |
| }, |
| { |
| "epoch": 1.397712833545108, |
| "grad_norm": 33.43013000488281, |
| "learning_rate": 7.583790119793973e-06, |
| "loss": 0.3572, |
| "step": 60500 |
| }, |
| { |
| "epoch": 1.4092641792768856, |
| "grad_norm": 10.33069896697998, |
| "learning_rate": 7.559471408491121e-06, |
| "loss": 0.3565, |
| "step": 61000 |
| }, |
| { |
| "epoch": 1.4208155250086634, |
| "grad_norm": 1.686011791229248, |
| "learning_rate": 7.535152697188272e-06, |
| "loss": 0.3647, |
| "step": 61500 |
| }, |
| { |
| "epoch": 1.4323668707404413, |
| "grad_norm": 20.76148223876953, |
| "learning_rate": 7.5108339858854206e-06, |
| "loss": 0.3284, |
| "step": 62000 |
| }, |
| { |
| "epoch": 1.443918216472219, |
| "grad_norm": 9.261433601379395, |
| "learning_rate": 7.48651527458257e-06, |
| "loss": 0.3474, |
| "step": 62500 |
| }, |
| { |
| "epoch": 1.4554695622039968, |
| "grad_norm": 38.97019577026367, |
| "learning_rate": 7.462196563279719e-06, |
| "loss": 0.3433, |
| "step": 63000 |
| }, |
| { |
| "epoch": 1.4670209079357746, |
| "grad_norm": 43.138450622558594, |
| "learning_rate": 7.437877851976869e-06, |
| "loss": 0.3406, |
| "step": 63500 |
| }, |
| { |
| "epoch": 1.4785722536675523, |
| "grad_norm": 10.66066837310791, |
| "learning_rate": 7.413559140674018e-06, |
| "loss": 0.3533, |
| "step": 64000 |
| }, |
| { |
| "epoch": 1.49012359939933, |
| "grad_norm": 32.289588928222656, |
| "learning_rate": 7.389240429371168e-06, |
| "loss": 0.3377, |
| "step": 64500 |
| }, |
| { |
| "epoch": 1.5016749451311078, |
| "grad_norm": 85.28985595703125, |
| "learning_rate": 7.364921718068316e-06, |
| "loss": 0.3598, |
| "step": 65000 |
| }, |
| { |
| "epoch": 1.5132262908628855, |
| "grad_norm": 56.37942123413086, |
| "learning_rate": 7.340603006765467e-06, |
| "loss": 0.3354, |
| "step": 65500 |
| }, |
| { |
| "epoch": 1.5247776365946633, |
| "grad_norm": 91.6646499633789, |
| "learning_rate": 7.3162842954626156e-06, |
| "loss": 0.3557, |
| "step": 66000 |
| }, |
| { |
| "epoch": 1.536328982326441, |
| "grad_norm": 22.282794952392578, |
| "learning_rate": 7.291965584159765e-06, |
| "loss": 0.3375, |
| "step": 66500 |
| }, |
| { |
| "epoch": 1.5478803280582187, |
| "grad_norm": 23.59638214111328, |
| "learning_rate": 7.267646872856914e-06, |
| "loss": 0.3613, |
| "step": 67000 |
| }, |
| { |
| "epoch": 1.5594316737899967, |
| "grad_norm": 101.47071838378906, |
| "learning_rate": 7.243328161554064e-06, |
| "loss": 0.3625, |
| "step": 67500 |
| }, |
| { |
| "epoch": 1.5709830195217744, |
| "grad_norm": 10.680140495300293, |
| "learning_rate": 7.219009450251213e-06, |
| "loss": 0.3447, |
| "step": 68000 |
| }, |
| { |
| "epoch": 1.5825343652535522, |
| "grad_norm": 3.526810646057129, |
| "learning_rate": 7.194690738948363e-06, |
| "loss": 0.3752, |
| "step": 68500 |
| }, |
| { |
| "epoch": 1.59408571098533, |
| "grad_norm": 63.79883575439453, |
| "learning_rate": 7.170372027645511e-06, |
| "loss": 0.3556, |
| "step": 69000 |
| }, |
| { |
| "epoch": 1.6056370567171077, |
| "grad_norm": 57.77899169921875, |
| "learning_rate": 7.14605331634266e-06, |
| "loss": 0.3734, |
| "step": 69500 |
| }, |
| { |
| "epoch": 1.6171884024488854, |
| "grad_norm": 12.811071395874023, |
| "learning_rate": 7.1217346050398105e-06, |
| "loss": 0.3314, |
| "step": 70000 |
| }, |
| { |
| "epoch": 1.6287397481806631, |
| "grad_norm": 45.64273452758789, |
| "learning_rate": 7.09741589373696e-06, |
| "loss": 0.3603, |
| "step": 70500 |
| }, |
| { |
| "epoch": 1.6402910939124409, |
| "grad_norm": 0.7724096775054932, |
| "learning_rate": 7.073097182434109e-06, |
| "loss": 0.3531, |
| "step": 71000 |
| }, |
| { |
| "epoch": 1.6518424396442186, |
| "grad_norm": 111.21202850341797, |
| "learning_rate": 7.048778471131258e-06, |
| "loss": 0.3687, |
| "step": 71500 |
| }, |
| { |
| "epoch": 1.6633937853759964, |
| "grad_norm": 2.9768288135528564, |
| "learning_rate": 7.024459759828408e-06, |
| "loss": 0.362, |
| "step": 72000 |
| }, |
| { |
| "epoch": 1.674945131107774, |
| "grad_norm": 27.48809242248535, |
| "learning_rate": 7.000141048525558e-06, |
| "loss": 0.3616, |
| "step": 72500 |
| }, |
| { |
| "epoch": 1.6864964768395518, |
| "grad_norm": 48.34687805175781, |
| "learning_rate": 6.975822337222706e-06, |
| "loss": 0.3367, |
| "step": 73000 |
| }, |
| { |
| "epoch": 1.6980478225713296, |
| "grad_norm": 39.69007110595703, |
| "learning_rate": 6.951503625919855e-06, |
| "loss": 0.3621, |
| "step": 73500 |
| }, |
| { |
| "epoch": 1.7095991683031073, |
| "grad_norm": 110.33565521240234, |
| "learning_rate": 6.9271849146170055e-06, |
| "loss": 0.3539, |
| "step": 74000 |
| }, |
| { |
| "epoch": 1.721150514034885, |
| "grad_norm": 41.324222564697266, |
| "learning_rate": 6.902866203314154e-06, |
| "loss": 0.3433, |
| "step": 74500 |
| }, |
| { |
| "epoch": 1.7327018597666628, |
| "grad_norm": 73.07483673095703, |
| "learning_rate": 6.878547492011304e-06, |
| "loss": 0.3581, |
| "step": 75000 |
| }, |
| { |
| "epoch": 1.7442532054984405, |
| "grad_norm": 2.784154176712036, |
| "learning_rate": 6.854228780708453e-06, |
| "loss": 0.3506, |
| "step": 75500 |
| }, |
| { |
| "epoch": 1.7558045512302183, |
| "grad_norm": 46.016876220703125, |
| "learning_rate": 6.829910069405603e-06, |
| "loss": 0.3687, |
| "step": 76000 |
| }, |
| { |
| "epoch": 1.767355896961996, |
| "grad_norm": 10.431674003601074, |
| "learning_rate": 6.805591358102752e-06, |
| "loss": 0.3448, |
| "step": 76500 |
| }, |
| { |
| "epoch": 1.7789072426937738, |
| "grad_norm": 77.33363342285156, |
| "learning_rate": 6.781272646799901e-06, |
| "loss": 0.3688, |
| "step": 77000 |
| }, |
| { |
| "epoch": 1.7904585884255515, |
| "grad_norm": 36.162261962890625, |
| "learning_rate": 6.75695393549705e-06, |
| "loss": 0.3481, |
| "step": 77500 |
| }, |
| { |
| "epoch": 1.8020099341573292, |
| "grad_norm": 34.184844970703125, |
| "learning_rate": 6.7326352241942005e-06, |
| "loss": 0.3538, |
| "step": 78000 |
| }, |
| { |
| "epoch": 1.813561279889107, |
| "grad_norm": 89.07086944580078, |
| "learning_rate": 6.708316512891349e-06, |
| "loss": 0.3735, |
| "step": 78500 |
| }, |
| { |
| "epoch": 1.8251126256208847, |
| "grad_norm": 3.9982004165649414, |
| "learning_rate": 6.683997801588499e-06, |
| "loss": 0.3394, |
| "step": 79000 |
| }, |
| { |
| "epoch": 1.8366639713526625, |
| "grad_norm": 81.374267578125, |
| "learning_rate": 6.659679090285648e-06, |
| "loss": 0.3174, |
| "step": 79500 |
| }, |
| { |
| "epoch": 1.8482153170844402, |
| "grad_norm": 0.20297826826572418, |
| "learning_rate": 6.635360378982797e-06, |
| "loss": 0.3153, |
| "step": 80000 |
| }, |
| { |
| "epoch": 1.859766662816218, |
| "grad_norm": 59.6692008972168, |
| "learning_rate": 6.611041667679947e-06, |
| "loss": 0.3545, |
| "step": 80500 |
| }, |
| { |
| "epoch": 1.8713180085479957, |
| "grad_norm": 1.2928632497787476, |
| "learning_rate": 6.586722956377096e-06, |
| "loss": 0.3373, |
| "step": 81000 |
| }, |
| { |
| "epoch": 1.8828693542797736, |
| "grad_norm": 94.39313507080078, |
| "learning_rate": 6.562404245074245e-06, |
| "loss": 0.3222, |
| "step": 81500 |
| }, |
| { |
| "epoch": 1.8944207000115514, |
| "grad_norm": 29.352657318115234, |
| "learning_rate": 6.538085533771395e-06, |
| "loss": 0.3418, |
| "step": 82000 |
| }, |
| { |
| "epoch": 1.9059720457433291, |
| "grad_norm": 11.170536041259766, |
| "learning_rate": 6.513766822468544e-06, |
| "loss": 0.3423, |
| "step": 82500 |
| }, |
| { |
| "epoch": 1.9175233914751069, |
| "grad_norm": 90.75904846191406, |
| "learning_rate": 6.489448111165694e-06, |
| "loss": 0.3253, |
| "step": 83000 |
| }, |
| { |
| "epoch": 1.9290747372068846, |
| "grad_norm": 79.5523681640625, |
| "learning_rate": 6.4651293998628426e-06, |
| "loss": 0.3423, |
| "step": 83500 |
| }, |
| { |
| "epoch": 1.9406260829386623, |
| "grad_norm": 2.362600564956665, |
| "learning_rate": 6.440810688559992e-06, |
| "loss": 0.3233, |
| "step": 84000 |
| }, |
| { |
| "epoch": 1.95217742867044, |
| "grad_norm": 106.5050277709961, |
| "learning_rate": 6.416491977257142e-06, |
| "loss": 0.3291, |
| "step": 84500 |
| }, |
| { |
| "epoch": 1.9637287744022178, |
| "grad_norm": 44.49625015258789, |
| "learning_rate": 6.392173265954291e-06, |
| "loss": 0.3462, |
| "step": 85000 |
| }, |
| { |
| "epoch": 1.9752801201339956, |
| "grad_norm": 65.74842071533203, |
| "learning_rate": 6.36785455465144e-06, |
| "loss": 0.3356, |
| "step": 85500 |
| }, |
| { |
| "epoch": 1.9868314658657735, |
| "grad_norm": 25.529869079589844, |
| "learning_rate": 6.34353584334859e-06, |
| "loss": 0.3418, |
| "step": 86000 |
| }, |
| { |
| "epoch": 1.9983828115975513, |
| "grad_norm": 0.7819985151290894, |
| "learning_rate": 6.319217132045739e-06, |
| "loss": 0.3299, |
| "step": 86500 |
| }, |
| { |
| "epoch": 2.0, |
| "eval_accuracy": 0.8750144391821647, |
| "eval_f1": 0.883420247381804, |
| "eval_loss": 0.5118392705917358, |
| "eval_matthews_correlation": 0.7549403916374646, |
| "eval_precision": 0.8371656115989381, |
| "eval_recall": 0.9350850782354819, |
| "eval_runtime": 543.2127, |
| "eval_samples_per_second": 79.683, |
| "eval_steps_per_second": 4.981, |
| "step": 86570 |
| } |
| ], |
| "logging_steps": 500, |
| "max_steps": 216425, |
| "num_input_tokens_seen": 0, |
| "num_train_epochs": 5, |
| "save_steps": 200, |
| "stateful_callbacks": { |
| "TrainerControl": { |
| "args": { |
| "should_epoch_stop": false, |
| "should_evaluate": false, |
| "should_log": false, |
| "should_save": true, |
| "should_training_stop": false |
| }, |
| "attributes": {} |
| } |
| }, |
| "total_flos": 1.925875963248599e+17, |
| "train_batch_size": 8, |
| "trial_name": null, |
| "trial_params": null |
| } |
|
|