| { | |
| "best_metric": 0.11368879675865173, | |
| "best_model_checkpoint": "BBC/checkpoint-446", | |
| "epoch": 2.0, | |
| "eval_steps": 500, | |
| "global_step": 446, | |
| "is_hyper_param_search": false, | |
| "is_local_process_zero": true, | |
| "is_world_process_zero": true, | |
| "log_history": [ | |
| { | |
| "epoch": 0.04932735426008968, | |
| "grad_norm": 2.2345685958862305, | |
| "learning_rate": 8.208955223880597e-06, | |
| "loss": 1.6224, | |
| "step": 11 | |
| }, | |
| { | |
| "epoch": 0.09865470852017937, | |
| "grad_norm": 3.739938974380493, | |
| "learning_rate": 1.6417910447761194e-05, | |
| "loss": 1.5896, | |
| "step": 22 | |
| }, | |
| { | |
| "epoch": 0.14798206278026907, | |
| "grad_norm": 2.8613524436950684, | |
| "learning_rate": 2.4626865671641793e-05, | |
| "loss": 1.4769, | |
| "step": 33 | |
| }, | |
| { | |
| "epoch": 0.19730941704035873, | |
| "grad_norm": 4.428678035736084, | |
| "learning_rate": 3.283582089552239e-05, | |
| "loss": 1.2341, | |
| "step": 44 | |
| }, | |
| { | |
| "epoch": 0.24663677130044842, | |
| "grad_norm": 2.953808069229126, | |
| "learning_rate": 4.104477611940299e-05, | |
| "loss": 0.9053, | |
| "step": 55 | |
| }, | |
| { | |
| "epoch": 0.29596412556053814, | |
| "grad_norm": 4.068609237670898, | |
| "learning_rate": 4.850746268656717e-05, | |
| "loss": 0.5467, | |
| "step": 66 | |
| }, | |
| { | |
| "epoch": 0.3452914798206278, | |
| "grad_norm": Infinity, | |
| "learning_rate": 4.933554817275748e-05, | |
| "loss": 0.4172, | |
| "step": 77 | |
| }, | |
| { | |
| "epoch": 0.39461883408071746, | |
| "grad_norm": 17.124122619628906, | |
| "learning_rate": 4.8421926910299006e-05, | |
| "loss": 0.2072, | |
| "step": 88 | |
| }, | |
| { | |
| "epoch": 0.4439461883408072, | |
| "grad_norm": 4.019614219665527, | |
| "learning_rate": 4.750830564784053e-05, | |
| "loss": 0.2032, | |
| "step": 99 | |
| }, | |
| { | |
| "epoch": 0.49327354260089684, | |
| "grad_norm": 16.75712776184082, | |
| "learning_rate": 4.659468438538207e-05, | |
| "loss": 0.2285, | |
| "step": 110 | |
| }, | |
| { | |
| "epoch": 0.5426008968609866, | |
| "grad_norm": 0.29395201802253723, | |
| "learning_rate": 4.568106312292359e-05, | |
| "loss": 0.1339, | |
| "step": 121 | |
| }, | |
| { | |
| "epoch": 0.5919282511210763, | |
| "grad_norm": 0.19628354907035828, | |
| "learning_rate": 4.476744186046512e-05, | |
| "loss": 0.2383, | |
| "step": 132 | |
| }, | |
| { | |
| "epoch": 0.6412556053811659, | |
| "grad_norm": 11.740421295166016, | |
| "learning_rate": 4.385382059800665e-05, | |
| "loss": 0.1953, | |
| "step": 143 | |
| }, | |
| { | |
| "epoch": 0.6905829596412556, | |
| "grad_norm": 11.316059112548828, | |
| "learning_rate": 4.2940199335548175e-05, | |
| "loss": 0.1348, | |
| "step": 154 | |
| }, | |
| { | |
| "epoch": 0.7399103139013453, | |
| "grad_norm": 0.13278043270111084, | |
| "learning_rate": 4.20265780730897e-05, | |
| "loss": 0.0165, | |
| "step": 165 | |
| }, | |
| { | |
| "epoch": 0.7892376681614349, | |
| "grad_norm": 0.08719899505376816, | |
| "learning_rate": 4.111295681063123e-05, | |
| "loss": 0.0113, | |
| "step": 176 | |
| }, | |
| { | |
| "epoch": 0.8385650224215246, | |
| "grad_norm": 0.10285717248916626, | |
| "learning_rate": 4.019933554817276e-05, | |
| "loss": 0.0562, | |
| "step": 187 | |
| }, | |
| { | |
| "epoch": 0.8878923766816144, | |
| "grad_norm": 0.17199571430683136, | |
| "learning_rate": 3.928571428571429e-05, | |
| "loss": 0.1757, | |
| "step": 198 | |
| }, | |
| { | |
| "epoch": 0.9372197309417041, | |
| "grad_norm": 4.296013832092285, | |
| "learning_rate": 3.837209302325582e-05, | |
| "loss": 0.1623, | |
| "step": 209 | |
| }, | |
| { | |
| "epoch": 0.9865470852017937, | |
| "grad_norm": 0.0789671242237091, | |
| "learning_rate": 3.7458471760797344e-05, | |
| "loss": 0.1756, | |
| "step": 220 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "eval_accuracy": 0.9415730337078652, | |
| "eval_f1_macro": 0.9405622158426933, | |
| "eval_f1_micro": 0.9415730337078652, | |
| "eval_f1_weighted": 0.9410207995888995, | |
| "eval_loss": 0.2357553094625473, | |
| "eval_precision_macro": 0.9403873157660169, | |
| "eval_precision_micro": 0.9415730337078652, | |
| "eval_precision_weighted": 0.945045999282218, | |
| "eval_recall_macro": 0.9449872676343263, | |
| "eval_recall_micro": 0.9415730337078652, | |
| "eval_recall_weighted": 0.9415730337078652, | |
| "eval_runtime": 1.1567, | |
| "eval_samples_per_second": 384.701, | |
| "eval_steps_per_second": 24.206, | |
| "step": 223 | |
| }, | |
| { | |
| "epoch": 1.0358744394618835, | |
| "grad_norm": 36.847206115722656, | |
| "learning_rate": 3.662790697674418e-05, | |
| "loss": 0.2338, | |
| "step": 231 | |
| }, | |
| { | |
| "epoch": 1.0852017937219731, | |
| "grad_norm": 0.21538594365119934, | |
| "learning_rate": 3.571428571428572e-05, | |
| "loss": 0.034, | |
| "step": 242 | |
| }, | |
| { | |
| "epoch": 1.1345291479820627, | |
| "grad_norm": 0.09186477214097977, | |
| "learning_rate": 3.4800664451827244e-05, | |
| "loss": 0.0824, | |
| "step": 253 | |
| }, | |
| { | |
| "epoch": 1.1838565022421526, | |
| "grad_norm": 0.10708645731210709, | |
| "learning_rate": 3.397009966777408e-05, | |
| "loss": 0.0314, | |
| "step": 264 | |
| }, | |
| { | |
| "epoch": 1.2331838565022422, | |
| "grad_norm": 0.061287783086299896, | |
| "learning_rate": 3.305647840531562e-05, | |
| "loss": 0.0139, | |
| "step": 275 | |
| }, | |
| { | |
| "epoch": 1.2825112107623318, | |
| "grad_norm": 26.10398292541504, | |
| "learning_rate": 3.2142857142857144e-05, | |
| "loss": 0.1447, | |
| "step": 286 | |
| }, | |
| { | |
| "epoch": 1.3318385650224216, | |
| "grad_norm": 0.06678097695112228, | |
| "learning_rate": 3.122923588039867e-05, | |
| "loss": 0.0048, | |
| "step": 297 | |
| }, | |
| { | |
| "epoch": 1.3811659192825112, | |
| "grad_norm": 0.03612040355801582, | |
| "learning_rate": 3.03156146179402e-05, | |
| "loss": 0.0365, | |
| "step": 308 | |
| }, | |
| { | |
| "epoch": 1.4304932735426008, | |
| "grad_norm": 0.05210395157337189, | |
| "learning_rate": 2.940199335548173e-05, | |
| "loss": 0.1038, | |
| "step": 319 | |
| }, | |
| { | |
| "epoch": 1.4798206278026906, | |
| "grad_norm": 0.04557207226753235, | |
| "learning_rate": 2.848837209302326e-05, | |
| "loss": 0.0745, | |
| "step": 330 | |
| }, | |
| { | |
| "epoch": 1.5291479820627802, | |
| "grad_norm": 0.09987442195415497, | |
| "learning_rate": 2.7574750830564782e-05, | |
| "loss": 0.1806, | |
| "step": 341 | |
| }, | |
| { | |
| "epoch": 1.57847533632287, | |
| "grad_norm": 0.0828678086400032, | |
| "learning_rate": 2.6661129568106313e-05, | |
| "loss": 0.0048, | |
| "step": 352 | |
| }, | |
| { | |
| "epoch": 1.6278026905829597, | |
| "grad_norm": 0.05174608528614044, | |
| "learning_rate": 2.5747508305647843e-05, | |
| "loss": 0.136, | |
| "step": 363 | |
| }, | |
| { | |
| "epoch": 1.6771300448430493, | |
| "grad_norm": 0.05517464876174927, | |
| "learning_rate": 2.483388704318937e-05, | |
| "loss": 0.1924, | |
| "step": 374 | |
| }, | |
| { | |
| "epoch": 1.726457399103139, | |
| "grad_norm": 0.0699392706155777, | |
| "learning_rate": 2.39202657807309e-05, | |
| "loss": 0.0129, | |
| "step": 385 | |
| }, | |
| { | |
| "epoch": 1.7757847533632287, | |
| "grad_norm": 0.04404047504067421, | |
| "learning_rate": 2.3006644518272428e-05, | |
| "loss": 0.0032, | |
| "step": 396 | |
| }, | |
| { | |
| "epoch": 1.8251121076233185, | |
| "grad_norm": 0.048387836664915085, | |
| "learning_rate": 2.2093023255813955e-05, | |
| "loss": 0.0437, | |
| "step": 407 | |
| }, | |
| { | |
| "epoch": 1.8744394618834082, | |
| "grad_norm": 0.06495353579521179, | |
| "learning_rate": 2.1179401993355482e-05, | |
| "loss": 0.0095, | |
| "step": 418 | |
| }, | |
| { | |
| "epoch": 1.9237668161434978, | |
| "grad_norm": 1.6995466947555542, | |
| "learning_rate": 2.026578073089701e-05, | |
| "loss": 0.0047, | |
| "step": 429 | |
| }, | |
| { | |
| "epoch": 1.9730941704035874, | |
| "grad_norm": 0.02669903263449669, | |
| "learning_rate": 1.935215946843854e-05, | |
| "loss": 0.0239, | |
| "step": 440 | |
| }, | |
| { | |
| "epoch": 2.0, | |
| "eval_accuracy": 0.9752808988764045, | |
| "eval_f1_macro": 0.9748328397861948, | |
| "eval_f1_micro": 0.9752808988764045, | |
| "eval_f1_weighted": 0.9752071164560256, | |
| "eval_loss": 0.11368879675865173, | |
| "eval_precision_macro": 0.9752973544608207, | |
| "eval_precision_micro": 0.9752808988764045, | |
| "eval_precision_weighted": 0.9756012580457148, | |
| "eval_recall_macro": 0.9748949579831934, | |
| "eval_recall_micro": 0.9752808988764045, | |
| "eval_recall_weighted": 0.9752808988764045, | |
| "eval_runtime": 1.1607, | |
| "eval_samples_per_second": 383.394, | |
| "eval_steps_per_second": 24.124, | |
| "step": 446 | |
| } | |
| ], | |
| "logging_steps": 11, | |
| "max_steps": 669, | |
| "num_input_tokens_seen": 0, | |
| "num_train_epochs": 3, | |
| "save_steps": 500, | |
| "total_flos": 117902292326400.0, | |
| "train_batch_size": 8, | |
| "trial_name": null, | |
| "trial_params": null | |
| } | |