| { |
| "best_metric": 0.7890500619140279, |
| "best_model_checkpoint": "finetune/models/plant-dnamamba-BPE-NoduleExpDup/checkpoint-43285", |
| "epoch": 1.0, |
| "eval_steps": 500, |
| "global_step": 43285, |
| "is_hyper_param_search": false, |
| "is_local_process_zero": true, |
| "is_world_process_zero": true, |
| "log_history": [ |
| { |
| "epoch": 0.011551345731777752, |
| "grad_norm": 29.45172119140625, |
| "learning_rate": 4.620218074293107e-07, |
| "loss": 1.0687, |
| "step": 500 |
| }, |
| { |
| "epoch": 0.023102691463555505, |
| "grad_norm": 31.642974853515625, |
| "learning_rate": 9.240436148586214e-07, |
| "loss": 0.9807, |
| "step": 1000 |
| }, |
| { |
| "epoch": 0.034654037195333255, |
| "grad_norm": 12.195789337158203, |
| "learning_rate": 1.3860654222879322e-06, |
| "loss": 0.8373, |
| "step": 1500 |
| }, |
| { |
| "epoch": 0.04620538292711101, |
| "grad_norm": 13.536879539489746, |
| "learning_rate": 1.8480872297172428e-06, |
| "loss": 0.7818, |
| "step": 2000 |
| }, |
| { |
| "epoch": 0.05775672865888876, |
| "grad_norm": 13.85306453704834, |
| "learning_rate": 2.3101090371465534e-06, |
| "loss": 0.7234, |
| "step": 2500 |
| }, |
| { |
| "epoch": 0.06930807439066651, |
| "grad_norm": 11.797886848449707, |
| "learning_rate": 2.7721308445758644e-06, |
| "loss": 0.6925, |
| "step": 3000 |
| }, |
| { |
| "epoch": 0.08085942012244426, |
| "grad_norm": 6.770650386810303, |
| "learning_rate": 3.2341526520051746e-06, |
| "loss": 0.6751, |
| "step": 3500 |
| }, |
| { |
| "epoch": 0.09241076585422202, |
| "grad_norm": 10.289820671081543, |
| "learning_rate": 3.6961744594344856e-06, |
| "loss": 0.6636, |
| "step": 4000 |
| }, |
| { |
| "epoch": 0.10396211158599977, |
| "grad_norm": 15.198243141174316, |
| "learning_rate": 4.158196266863797e-06, |
| "loss": 0.6563, |
| "step": 4500 |
| }, |
| { |
| "epoch": 0.11551345731777753, |
| "grad_norm": 13.971009254455566, |
| "learning_rate": 4.620218074293107e-06, |
| "loss": 0.6481, |
| "step": 5000 |
| }, |
| { |
| "epoch": 0.12706480304955528, |
| "grad_norm": 8.441276550292969, |
| "learning_rate": 5.082239881722417e-06, |
| "loss": 0.6549, |
| "step": 5500 |
| }, |
| { |
| "epoch": 0.13861614878133302, |
| "grad_norm": 14.8948335647583, |
| "learning_rate": 5.544261689151729e-06, |
| "loss": 0.6507, |
| "step": 6000 |
| }, |
| { |
| "epoch": 0.1501674945131108, |
| "grad_norm": 7.725317478179932, |
| "learning_rate": 6.006283496581039e-06, |
| "loss": 0.6544, |
| "step": 6500 |
| }, |
| { |
| "epoch": 0.16171884024488853, |
| "grad_norm": 17.12333106994629, |
| "learning_rate": 6.468305304010349e-06, |
| "loss": 0.6476, |
| "step": 7000 |
| }, |
| { |
| "epoch": 0.17327018597666627, |
| "grad_norm": 7.033656120300293, |
| "learning_rate": 6.930327111439661e-06, |
| "loss": 0.6496, |
| "step": 7500 |
| }, |
| { |
| "epoch": 0.18482153170844404, |
| "grad_norm": 7.053399085998535, |
| "learning_rate": 7.392348918868971e-06, |
| "loss": 0.6523, |
| "step": 8000 |
| }, |
| { |
| "epoch": 0.19637287744022178, |
| "grad_norm": 6.655728340148926, |
| "learning_rate": 7.854370726298282e-06, |
| "loss": 0.6455, |
| "step": 8500 |
| }, |
| { |
| "epoch": 0.20792422317199954, |
| "grad_norm": 8.08369255065918, |
| "learning_rate": 8.316392533727593e-06, |
| "loss": 0.6415, |
| "step": 9000 |
| }, |
| { |
| "epoch": 0.21947556890377729, |
| "grad_norm": 3.947148084640503, |
| "learning_rate": 8.778414341156903e-06, |
| "loss": 0.6514, |
| "step": 9500 |
| }, |
| { |
| "epoch": 0.23102691463555505, |
| "grad_norm": 8.264023780822754, |
| "learning_rate": 9.240436148586214e-06, |
| "loss": 0.6431, |
| "step": 10000 |
| }, |
| { |
| "epoch": 0.2425782603673328, |
| "grad_norm": 4.74052095413208, |
| "learning_rate": 9.702457956015525e-06, |
| "loss": 0.6331, |
| "step": 10500 |
| }, |
| { |
| "epoch": 0.25412960609911056, |
| "grad_norm": 12.625064849853516, |
| "learning_rate": 9.991342538776186e-06, |
| "loss": 0.6449, |
| "step": 11000 |
| }, |
| { |
| "epoch": 0.2656809518308883, |
| "grad_norm": 17.814157485961914, |
| "learning_rate": 9.967023827473336e-06, |
| "loss": 0.6414, |
| "step": 11500 |
| }, |
| { |
| "epoch": 0.27723229756266604, |
| "grad_norm": 15.34868049621582, |
| "learning_rate": 9.942705116170485e-06, |
| "loss": 0.6287, |
| "step": 12000 |
| }, |
| { |
| "epoch": 0.2887836432944438, |
| "grad_norm": 5.785658836364746, |
| "learning_rate": 9.918386404867634e-06, |
| "loss": 0.6417, |
| "step": 12500 |
| }, |
| { |
| "epoch": 0.3003349890262216, |
| "grad_norm": 3.7156598567962646, |
| "learning_rate": 9.894067693564784e-06, |
| "loss": 0.6436, |
| "step": 13000 |
| }, |
| { |
| "epoch": 0.3118863347579993, |
| "grad_norm": 12.272802352905273, |
| "learning_rate": 9.869748982261933e-06, |
| "loss": 0.6505, |
| "step": 13500 |
| }, |
| { |
| "epoch": 0.32343768048977706, |
| "grad_norm": 15.592353820800781, |
| "learning_rate": 9.845430270959082e-06, |
| "loss": 0.6313, |
| "step": 14000 |
| }, |
| { |
| "epoch": 0.3349890262215548, |
| "grad_norm": 4.684946537017822, |
| "learning_rate": 9.821111559656232e-06, |
| "loss": 0.6457, |
| "step": 14500 |
| }, |
| { |
| "epoch": 0.34654037195333254, |
| "grad_norm": 6.0165300369262695, |
| "learning_rate": 9.79679284835338e-06, |
| "loss": 0.641, |
| "step": 15000 |
| }, |
| { |
| "epoch": 0.35809171768511033, |
| "grad_norm": 10.924590110778809, |
| "learning_rate": 9.772474137050531e-06, |
| "loss": 0.6293, |
| "step": 15500 |
| }, |
| { |
| "epoch": 0.3696430634168881, |
| "grad_norm": 7.296951770782471, |
| "learning_rate": 9.74815542574768e-06, |
| "loss": 0.627, |
| "step": 16000 |
| }, |
| { |
| "epoch": 0.3811944091486658, |
| "grad_norm": 14.750964164733887, |
| "learning_rate": 9.723836714444829e-06, |
| "loss": 0.6254, |
| "step": 16500 |
| }, |
| { |
| "epoch": 0.39274575488044355, |
| "grad_norm": 7.377384662628174, |
| "learning_rate": 9.699518003141977e-06, |
| "loss": 0.6287, |
| "step": 17000 |
| }, |
| { |
| "epoch": 0.40429710061222135, |
| "grad_norm": 6.540673732757568, |
| "learning_rate": 9.675199291839128e-06, |
| "loss": 0.6309, |
| "step": 17500 |
| }, |
| { |
| "epoch": 0.4158484463439991, |
| "grad_norm": 6.4445295333862305, |
| "learning_rate": 9.650880580536277e-06, |
| "loss": 0.6302, |
| "step": 18000 |
| }, |
| { |
| "epoch": 0.42739979207577683, |
| "grad_norm": 7.85426664352417, |
| "learning_rate": 9.626561869233427e-06, |
| "loss": 0.6201, |
| "step": 18500 |
| }, |
| { |
| "epoch": 0.43895113780755457, |
| "grad_norm": 5.960766792297363, |
| "learning_rate": 9.602243157930576e-06, |
| "loss": 0.626, |
| "step": 19000 |
| }, |
| { |
| "epoch": 0.4505024835393323, |
| "grad_norm": 9.376029968261719, |
| "learning_rate": 9.577924446627726e-06, |
| "loss": 0.6164, |
| "step": 19500 |
| }, |
| { |
| "epoch": 0.4620538292711101, |
| "grad_norm": 17.05916976928711, |
| "learning_rate": 9.553605735324875e-06, |
| "loss": 0.6232, |
| "step": 20000 |
| }, |
| { |
| "epoch": 0.47360517500288785, |
| "grad_norm": 7.523278713226318, |
| "learning_rate": 9.529287024022024e-06, |
| "loss": 0.601, |
| "step": 20500 |
| }, |
| { |
| "epoch": 0.4851565207346656, |
| "grad_norm": 6.75208044052124, |
| "learning_rate": 9.504968312719172e-06, |
| "loss": 0.6157, |
| "step": 21000 |
| }, |
| { |
| "epoch": 0.4967078664664433, |
| "grad_norm": 12.919069290161133, |
| "learning_rate": 9.480649601416323e-06, |
| "loss": 0.6098, |
| "step": 21500 |
| }, |
| { |
| "epoch": 0.5082592121982211, |
| "grad_norm": 19.89571189880371, |
| "learning_rate": 9.456330890113472e-06, |
| "loss": 0.6131, |
| "step": 22000 |
| }, |
| { |
| "epoch": 0.5198105579299989, |
| "grad_norm": 7.436350345611572, |
| "learning_rate": 9.432012178810622e-06, |
| "loss": 0.6121, |
| "step": 22500 |
| }, |
| { |
| "epoch": 0.5313619036617766, |
| "grad_norm": 10.576221466064453, |
| "learning_rate": 9.40769346750777e-06, |
| "loss": 0.6005, |
| "step": 23000 |
| }, |
| { |
| "epoch": 0.5429132493935543, |
| "grad_norm": 12.731452941894531, |
| "learning_rate": 9.383374756204921e-06, |
| "loss": 0.6217, |
| "step": 23500 |
| }, |
| { |
| "epoch": 0.5544645951253321, |
| "grad_norm": 9.219985961914062, |
| "learning_rate": 9.35905604490207e-06, |
| "loss": 0.5985, |
| "step": 24000 |
| }, |
| { |
| "epoch": 0.5660159408571098, |
| "grad_norm": 12.659988403320312, |
| "learning_rate": 9.334737333599219e-06, |
| "loss": 0.5951, |
| "step": 24500 |
| }, |
| { |
| "epoch": 0.5775672865888876, |
| "grad_norm": 32.879234313964844, |
| "learning_rate": 9.310418622296367e-06, |
| "loss": 0.5961, |
| "step": 25000 |
| }, |
| { |
| "epoch": 0.5891186323206653, |
| "grad_norm": 9.763121604919434, |
| "learning_rate": 9.286099910993518e-06, |
| "loss": 0.6081, |
| "step": 25500 |
| }, |
| { |
| "epoch": 0.6006699780524432, |
| "grad_norm": 9.057079315185547, |
| "learning_rate": 9.261781199690667e-06, |
| "loss": 0.5863, |
| "step": 26000 |
| }, |
| { |
| "epoch": 0.6122213237842209, |
| "grad_norm": 17.381582260131836, |
| "learning_rate": 9.237462488387817e-06, |
| "loss": 0.5857, |
| "step": 26500 |
| }, |
| { |
| "epoch": 0.6237726695159986, |
| "grad_norm": 18.3979549407959, |
| "learning_rate": 9.213143777084966e-06, |
| "loss": 0.5853, |
| "step": 27000 |
| }, |
| { |
| "epoch": 0.6353240152477764, |
| "grad_norm": 11.13846206665039, |
| "learning_rate": 9.188825065782114e-06, |
| "loss": 0.582, |
| "step": 27500 |
| }, |
| { |
| "epoch": 0.6468753609795541, |
| "grad_norm": 8.506060600280762, |
| "learning_rate": 9.164506354479265e-06, |
| "loss": 0.5773, |
| "step": 28000 |
| }, |
| { |
| "epoch": 0.6584267067113319, |
| "grad_norm": 9.814123153686523, |
| "learning_rate": 9.140187643176414e-06, |
| "loss": 0.5797, |
| "step": 28500 |
| }, |
| { |
| "epoch": 0.6699780524431096, |
| "grad_norm": 11.70206069946289, |
| "learning_rate": 9.115868931873562e-06, |
| "loss": 0.5587, |
| "step": 29000 |
| }, |
| { |
| "epoch": 0.6815293981748873, |
| "grad_norm": 11.949383735656738, |
| "learning_rate": 9.091550220570711e-06, |
| "loss": 0.5769, |
| "step": 29500 |
| }, |
| { |
| "epoch": 0.6930807439066651, |
| "grad_norm": 11.51278018951416, |
| "learning_rate": 9.067231509267862e-06, |
| "loss": 0.5798, |
| "step": 30000 |
| }, |
| { |
| "epoch": 0.7046320896384429, |
| "grad_norm": 13.705997467041016, |
| "learning_rate": 9.042912797965012e-06, |
| "loss": 0.5585, |
| "step": 30500 |
| }, |
| { |
| "epoch": 0.7161834353702207, |
| "grad_norm": 18.50054168701172, |
| "learning_rate": 9.01859408666216e-06, |
| "loss": 0.5656, |
| "step": 31000 |
| }, |
| { |
| "epoch": 0.7277347811019984, |
| "grad_norm": 23.920692443847656, |
| "learning_rate": 8.99427537535931e-06, |
| "loss": 0.5493, |
| "step": 31500 |
| }, |
| { |
| "epoch": 0.7392861268337761, |
| "grad_norm": 15.425186157226562, |
| "learning_rate": 8.96995666405646e-06, |
| "loss": 0.5538, |
| "step": 32000 |
| }, |
| { |
| "epoch": 0.7508374725655539, |
| "grad_norm": 11.077018737792969, |
| "learning_rate": 8.945637952753609e-06, |
| "loss": 0.5553, |
| "step": 32500 |
| }, |
| { |
| "epoch": 0.7623888182973316, |
| "grad_norm": 17.128128051757812, |
| "learning_rate": 8.921319241450757e-06, |
| "loss": 0.5657, |
| "step": 33000 |
| }, |
| { |
| "epoch": 0.7739401640291094, |
| "grad_norm": 37.22261428833008, |
| "learning_rate": 8.897000530147906e-06, |
| "loss": 0.5573, |
| "step": 33500 |
| }, |
| { |
| "epoch": 0.7854915097608871, |
| "grad_norm": 18.649255752563477, |
| "learning_rate": 8.872681818845056e-06, |
| "loss": 0.5337, |
| "step": 34000 |
| }, |
| { |
| "epoch": 0.7970428554926648, |
| "grad_norm": 11.49511432647705, |
| "learning_rate": 8.848363107542205e-06, |
| "loss": 0.5442, |
| "step": 34500 |
| }, |
| { |
| "epoch": 0.8085942012244427, |
| "grad_norm": 20.64067840576172, |
| "learning_rate": 8.824044396239356e-06, |
| "loss": 0.5632, |
| "step": 35000 |
| }, |
| { |
| "epoch": 0.8201455469562204, |
| "grad_norm": 18.51673126220703, |
| "learning_rate": 8.799725684936504e-06, |
| "loss": 0.5454, |
| "step": 35500 |
| }, |
| { |
| "epoch": 0.8316968926879982, |
| "grad_norm": 18.139997482299805, |
| "learning_rate": 8.775406973633655e-06, |
| "loss": 0.5371, |
| "step": 36000 |
| }, |
| { |
| "epoch": 0.8432482384197759, |
| "grad_norm": 21.852693557739258, |
| "learning_rate": 8.751088262330804e-06, |
| "loss": 0.539, |
| "step": 36500 |
| }, |
| { |
| "epoch": 0.8547995841515537, |
| "grad_norm": 31.914161682128906, |
| "learning_rate": 8.726769551027952e-06, |
| "loss": 0.5383, |
| "step": 37000 |
| }, |
| { |
| "epoch": 0.8663509298833314, |
| "grad_norm": 9.24451732635498, |
| "learning_rate": 8.702450839725101e-06, |
| "loss": 0.548, |
| "step": 37500 |
| }, |
| { |
| "epoch": 0.8779022756151091, |
| "grad_norm": 27.3262882232666, |
| "learning_rate": 8.678132128422251e-06, |
| "loss": 0.524, |
| "step": 38000 |
| }, |
| { |
| "epoch": 0.8894536213468869, |
| "grad_norm": 23.231952667236328, |
| "learning_rate": 8.6538134171194e-06, |
| "loss": 0.5121, |
| "step": 38500 |
| }, |
| { |
| "epoch": 0.9010049670786646, |
| "grad_norm": 28.35572052001953, |
| "learning_rate": 8.62949470581655e-06, |
| "loss": 0.5333, |
| "step": 39000 |
| }, |
| { |
| "epoch": 0.9125563128104425, |
| "grad_norm": 24.61405372619629, |
| "learning_rate": 8.6051759945137e-06, |
| "loss": 0.5015, |
| "step": 39500 |
| }, |
| { |
| "epoch": 0.9241076585422202, |
| "grad_norm": 30.62819480895996, |
| "learning_rate": 8.580857283210848e-06, |
| "loss": 0.5092, |
| "step": 40000 |
| }, |
| { |
| "epoch": 0.935659004273998, |
| "grad_norm": 19.295331954956055, |
| "learning_rate": 8.556538571907999e-06, |
| "loss": 0.5125, |
| "step": 40500 |
| }, |
| { |
| "epoch": 0.9472103500057757, |
| "grad_norm": 10.785584449768066, |
| "learning_rate": 8.532219860605147e-06, |
| "loss": 0.5161, |
| "step": 41000 |
| }, |
| { |
| "epoch": 0.9587616957375534, |
| "grad_norm": 24.81365394592285, |
| "learning_rate": 8.507901149302296e-06, |
| "loss": 0.498, |
| "step": 41500 |
| }, |
| { |
| "epoch": 0.9703130414693312, |
| "grad_norm": 48.026973724365234, |
| "learning_rate": 8.483582437999446e-06, |
| "loss": 0.4916, |
| "step": 42000 |
| }, |
| { |
| "epoch": 0.9818643872011089, |
| "grad_norm": 6.653892517089844, |
| "learning_rate": 8.459263726696595e-06, |
| "loss": 0.4944, |
| "step": 42500 |
| }, |
| { |
| "epoch": 0.9934157329328867, |
| "grad_norm": 39.814945220947266, |
| "learning_rate": 8.434945015393746e-06, |
| "loss": 0.4954, |
| "step": 43000 |
| }, |
| { |
| "epoch": 1.0, |
| "eval_accuracy": 0.7796003234376805, |
| "eval_f1": 0.7890500619140279, |
| "eval_loss": 0.4737609326839447, |
| "eval_matthews_correlation": 0.5599099791389662, |
| "eval_precision": 0.7656524910955671, |
| "eval_recall": 0.8139227225035354, |
| "eval_runtime": 550.4755, |
| "eval_samples_per_second": 78.632, |
| "eval_steps_per_second": 4.916, |
| "step": 43285 |
| } |
| ], |
| "logging_steps": 500, |
| "max_steps": 216425, |
| "num_input_tokens_seen": 0, |
| "num_train_epochs": 5, |
| "save_steps": 200, |
| "stateful_callbacks": { |
| "TrainerControl": { |
| "args": { |
| "should_epoch_stop": false, |
| "should_evaluate": false, |
| "should_log": false, |
| "should_save": true, |
| "should_training_stop": false |
| }, |
| "attributes": {} |
| } |
| }, |
| "total_flos": 9.629379816242995e+16, |
| "train_batch_size": 8, |
| "trial_name": null, |
| "trial_params": null |
| } |
|
|