| { | |
| "best_global_step": 2783, | |
| "best_metric": 0.7164420485175202, | |
| "best_model_checkpoint": "models/affectnet_finetuned\\checkpoint-2783", | |
| "epoch": 1.0, | |
| "eval_steps": 500, | |
| "global_step": 2783, | |
| "is_hyper_param_search": false, | |
| "is_local_process_zero": true, | |
| "is_world_process_zero": true, | |
| "log_history": [ | |
| { | |
| "epoch": 0.0035932446999640674, | |
| "grad_norm": 5.816065311431885, | |
| "learning_rate": 1.9935321595400647e-05, | |
| "loss": 1.9356, | |
| "step": 10 | |
| }, | |
| { | |
| "epoch": 0.007186489399928135, | |
| "grad_norm": 16.718177795410156, | |
| "learning_rate": 1.9863456701401367e-05, | |
| "loss": 1.7984, | |
| "step": 20 | |
| }, | |
| { | |
| "epoch": 0.010779734099892203, | |
| "grad_norm": 14.972896575927734, | |
| "learning_rate": 1.9791591807402086e-05, | |
| "loss": 1.847, | |
| "step": 30 | |
| }, | |
| { | |
| "epoch": 0.01437297879985627, | |
| "grad_norm": 18.699365615844727, | |
| "learning_rate": 1.9719726913402805e-05, | |
| "loss": 1.9015, | |
| "step": 40 | |
| }, | |
| { | |
| "epoch": 0.017966223499820338, | |
| "grad_norm": 14.717846870422363, | |
| "learning_rate": 1.9647862019403524e-05, | |
| "loss": 1.2742, | |
| "step": 50 | |
| }, | |
| { | |
| "epoch": 0.021559468199784406, | |
| "grad_norm": 13.507522583007812, | |
| "learning_rate": 1.9575997125404243e-05, | |
| "loss": 1.0376, | |
| "step": 60 | |
| }, | |
| { | |
| "epoch": 0.025152712899748474, | |
| "grad_norm": 14.181863784790039, | |
| "learning_rate": 1.950413223140496e-05, | |
| "loss": 1.6585, | |
| "step": 70 | |
| }, | |
| { | |
| "epoch": 0.02874595759971254, | |
| "grad_norm": 9.426386833190918, | |
| "learning_rate": 1.9432267337405678e-05, | |
| "loss": 1.2604, | |
| "step": 80 | |
| }, | |
| { | |
| "epoch": 0.03233920229967661, | |
| "grad_norm": 6.423051834106445, | |
| "learning_rate": 1.93604024434064e-05, | |
| "loss": 1.2368, | |
| "step": 90 | |
| }, | |
| { | |
| "epoch": 0.035932446999640676, | |
| "grad_norm": 11.146883964538574, | |
| "learning_rate": 1.9288537549407116e-05, | |
| "loss": 1.3696, | |
| "step": 100 | |
| }, | |
| { | |
| "epoch": 0.039525691699604744, | |
| "grad_norm": 10.348139762878418, | |
| "learning_rate": 1.9216672655407835e-05, | |
| "loss": 1.2237, | |
| "step": 110 | |
| }, | |
| { | |
| "epoch": 0.04311893639956881, | |
| "grad_norm": 9.791911125183105, | |
| "learning_rate": 1.9144807761408554e-05, | |
| "loss": 1.3294, | |
| "step": 120 | |
| }, | |
| { | |
| "epoch": 0.04671218109953288, | |
| "grad_norm": 6.4929585456848145, | |
| "learning_rate": 1.9072942867409273e-05, | |
| "loss": 1.2819, | |
| "step": 130 | |
| }, | |
| { | |
| "epoch": 0.05030542579949695, | |
| "grad_norm": 13.437801361083984, | |
| "learning_rate": 1.9001077973409992e-05, | |
| "loss": 1.1734, | |
| "step": 140 | |
| }, | |
| { | |
| "epoch": 0.05389867049946101, | |
| "grad_norm": 9.22572135925293, | |
| "learning_rate": 1.8929213079410708e-05, | |
| "loss": 1.0934, | |
| "step": 150 | |
| }, | |
| { | |
| "epoch": 0.05749191519942508, | |
| "grad_norm": 12.908567428588867, | |
| "learning_rate": 1.8857348185411427e-05, | |
| "loss": 1.1791, | |
| "step": 160 | |
| }, | |
| { | |
| "epoch": 0.061085159899389146, | |
| "grad_norm": 9.806719779968262, | |
| "learning_rate": 1.8785483291412146e-05, | |
| "loss": 1.2094, | |
| "step": 170 | |
| }, | |
| { | |
| "epoch": 0.06467840459935321, | |
| "grad_norm": 6.507504463195801, | |
| "learning_rate": 1.8713618397412865e-05, | |
| "loss": 1.1484, | |
| "step": 180 | |
| }, | |
| { | |
| "epoch": 0.06827164929931728, | |
| "grad_norm": 13.343854904174805, | |
| "learning_rate": 1.8641753503413585e-05, | |
| "loss": 1.0378, | |
| "step": 190 | |
| }, | |
| { | |
| "epoch": 0.07186489399928135, | |
| "grad_norm": 9.213022232055664, | |
| "learning_rate": 1.85698886094143e-05, | |
| "loss": 1.149, | |
| "step": 200 | |
| }, | |
| { | |
| "epoch": 0.07545813869924542, | |
| "grad_norm": 6.583609580993652, | |
| "learning_rate": 1.849802371541502e-05, | |
| "loss": 0.9091, | |
| "step": 210 | |
| }, | |
| { | |
| "epoch": 0.07905138339920949, | |
| "grad_norm": 13.28250789642334, | |
| "learning_rate": 1.8426158821415742e-05, | |
| "loss": 1.104, | |
| "step": 220 | |
| }, | |
| { | |
| "epoch": 0.08264462809917356, | |
| "grad_norm": 9.129759788513184, | |
| "learning_rate": 1.8354293927416458e-05, | |
| "loss": 1.175, | |
| "step": 230 | |
| }, | |
| { | |
| "epoch": 0.08623787279913762, | |
| "grad_norm": 7.381193161010742, | |
| "learning_rate": 1.8282429033417177e-05, | |
| "loss": 0.9531, | |
| "step": 240 | |
| }, | |
| { | |
| "epoch": 0.08983111749910169, | |
| "grad_norm": 8.401652336120605, | |
| "learning_rate": 1.8210564139417896e-05, | |
| "loss": 1.0652, | |
| "step": 250 | |
| }, | |
| { | |
| "epoch": 0.09342436219906576, | |
| "grad_norm": 6.572427272796631, | |
| "learning_rate": 1.8138699245418615e-05, | |
| "loss": 0.8957, | |
| "step": 260 | |
| }, | |
| { | |
| "epoch": 0.09701760689902983, | |
| "grad_norm": 13.609588623046875, | |
| "learning_rate": 1.8066834351419334e-05, | |
| "loss": 1.0328, | |
| "step": 270 | |
| }, | |
| { | |
| "epoch": 0.1006108515989939, | |
| "grad_norm": 9.873039245605469, | |
| "learning_rate": 1.7994969457420053e-05, | |
| "loss": 0.8974, | |
| "step": 280 | |
| }, | |
| { | |
| "epoch": 0.10420409629895797, | |
| "grad_norm": 6.486234188079834, | |
| "learning_rate": 1.792310456342077e-05, | |
| "loss": 0.7499, | |
| "step": 290 | |
| }, | |
| { | |
| "epoch": 0.10779734099892202, | |
| "grad_norm": 8.80713176727295, | |
| "learning_rate": 1.7851239669421488e-05, | |
| "loss": 1.0299, | |
| "step": 300 | |
| }, | |
| { | |
| "epoch": 0.11139058569888609, | |
| "grad_norm": 11.095004081726074, | |
| "learning_rate": 1.7779374775422207e-05, | |
| "loss": 0.9532, | |
| "step": 310 | |
| }, | |
| { | |
| "epoch": 0.11498383039885016, | |
| "grad_norm": 8.18423843383789, | |
| "learning_rate": 1.7707509881422926e-05, | |
| "loss": 0.9966, | |
| "step": 320 | |
| }, | |
| { | |
| "epoch": 0.11857707509881422, | |
| "grad_norm": 7.924959182739258, | |
| "learning_rate": 1.7635644987423645e-05, | |
| "loss": 1.0115, | |
| "step": 330 | |
| }, | |
| { | |
| "epoch": 0.12217031979877829, | |
| "grad_norm": 9.02883529663086, | |
| "learning_rate": 1.7563780093424364e-05, | |
| "loss": 0.961, | |
| "step": 340 | |
| }, | |
| { | |
| "epoch": 0.12576356449874238, | |
| "grad_norm": 11.199103355407715, | |
| "learning_rate": 1.7491915199425083e-05, | |
| "loss": 1.0795, | |
| "step": 350 | |
| }, | |
| { | |
| "epoch": 0.12935680919870643, | |
| "grad_norm": 4.646785259246826, | |
| "learning_rate": 1.7420050305425803e-05, | |
| "loss": 0.9701, | |
| "step": 360 | |
| }, | |
| { | |
| "epoch": 0.1329500538986705, | |
| "grad_norm": 8.631613731384277, | |
| "learning_rate": 1.7348185411426518e-05, | |
| "loss": 1.0499, | |
| "step": 370 | |
| }, | |
| { | |
| "epoch": 0.13654329859863457, | |
| "grad_norm": 10.40414047241211, | |
| "learning_rate": 1.7276320517427237e-05, | |
| "loss": 1.0192, | |
| "step": 380 | |
| }, | |
| { | |
| "epoch": 0.14013654329859865, | |
| "grad_norm": 6.724309921264648, | |
| "learning_rate": 1.7204455623427956e-05, | |
| "loss": 0.7622, | |
| "step": 390 | |
| }, | |
| { | |
| "epoch": 0.1437297879985627, | |
| "grad_norm": 10.001465797424316, | |
| "learning_rate": 1.7132590729428676e-05, | |
| "loss": 1.0283, | |
| "step": 400 | |
| }, | |
| { | |
| "epoch": 0.14732303269852676, | |
| "grad_norm": 9.960790634155273, | |
| "learning_rate": 1.7060725835429395e-05, | |
| "loss": 0.9756, | |
| "step": 410 | |
| }, | |
| { | |
| "epoch": 0.15091627739849084, | |
| "grad_norm": 10.12285041809082, | |
| "learning_rate": 1.698886094143011e-05, | |
| "loss": 0.9986, | |
| "step": 420 | |
| }, | |
| { | |
| "epoch": 0.1545095220984549, | |
| "grad_norm": 11.869123458862305, | |
| "learning_rate": 1.6916996047430833e-05, | |
| "loss": 0.846, | |
| "step": 430 | |
| }, | |
| { | |
| "epoch": 0.15810276679841898, | |
| "grad_norm": 5.06349515914917, | |
| "learning_rate": 1.6845131153431552e-05, | |
| "loss": 1.0217, | |
| "step": 440 | |
| }, | |
| { | |
| "epoch": 0.16169601149838303, | |
| "grad_norm": 9.066526412963867, | |
| "learning_rate": 1.6773266259432268e-05, | |
| "loss": 0.9221, | |
| "step": 450 | |
| }, | |
| { | |
| "epoch": 0.1652892561983471, | |
| "grad_norm": 9.309064865112305, | |
| "learning_rate": 1.6701401365432987e-05, | |
| "loss": 0.7854, | |
| "step": 460 | |
| }, | |
| { | |
| "epoch": 0.16888250089831117, | |
| "grad_norm": 12.74048900604248, | |
| "learning_rate": 1.6629536471433706e-05, | |
| "loss": 1.0911, | |
| "step": 470 | |
| }, | |
| { | |
| "epoch": 0.17247574559827525, | |
| "grad_norm": 6.426724433898926, | |
| "learning_rate": 1.6557671577434425e-05, | |
| "loss": 0.9062, | |
| "step": 480 | |
| }, | |
| { | |
| "epoch": 0.1760689902982393, | |
| "grad_norm": 10.126945495605469, | |
| "learning_rate": 1.6485806683435144e-05, | |
| "loss": 0.9157, | |
| "step": 490 | |
| }, | |
| { | |
| "epoch": 0.17966223499820339, | |
| "grad_norm": 4.212762832641602, | |
| "learning_rate": 1.641394178943586e-05, | |
| "loss": 0.8974, | |
| "step": 500 | |
| }, | |
| { | |
| "epoch": 0.18325547969816744, | |
| "grad_norm": 9.264360427856445, | |
| "learning_rate": 1.634207689543658e-05, | |
| "loss": 0.9294, | |
| "step": 510 | |
| }, | |
| { | |
| "epoch": 0.18684872439813152, | |
| "grad_norm": 8.043506622314453, | |
| "learning_rate": 1.62702120014373e-05, | |
| "loss": 0.8889, | |
| "step": 520 | |
| }, | |
| { | |
| "epoch": 0.19044196909809558, | |
| "grad_norm": 7.407253742218018, | |
| "learning_rate": 1.6198347107438017e-05, | |
| "loss": 0.888, | |
| "step": 530 | |
| }, | |
| { | |
| "epoch": 0.19403521379805966, | |
| "grad_norm": 3.76177716255188, | |
| "learning_rate": 1.6126482213438736e-05, | |
| "loss": 0.7739, | |
| "step": 540 | |
| }, | |
| { | |
| "epoch": 0.1976284584980237, | |
| "grad_norm": 6.74066686630249, | |
| "learning_rate": 1.6054617319439455e-05, | |
| "loss": 0.8938, | |
| "step": 550 | |
| }, | |
| { | |
| "epoch": 0.2012217031979878, | |
| "grad_norm": 8.588924407958984, | |
| "learning_rate": 1.5982752425440174e-05, | |
| "loss": 1.0615, | |
| "step": 560 | |
| }, | |
| { | |
| "epoch": 0.20481494789795185, | |
| "grad_norm": 7.100722312927246, | |
| "learning_rate": 1.5910887531440894e-05, | |
| "loss": 0.788, | |
| "step": 570 | |
| }, | |
| { | |
| "epoch": 0.20840819259791593, | |
| "grad_norm": 8.932958602905273, | |
| "learning_rate": 1.583902263744161e-05, | |
| "loss": 0.9478, | |
| "step": 580 | |
| }, | |
| { | |
| "epoch": 0.21200143729787999, | |
| "grad_norm": 5.2799787521362305, | |
| "learning_rate": 1.576715774344233e-05, | |
| "loss": 0.9483, | |
| "step": 590 | |
| }, | |
| { | |
| "epoch": 0.21559468199784404, | |
| "grad_norm": 6.58836030960083, | |
| "learning_rate": 1.5695292849443047e-05, | |
| "loss": 0.9327, | |
| "step": 600 | |
| }, | |
| { | |
| "epoch": 0.21918792669780812, | |
| "grad_norm": 6.726273059844971, | |
| "learning_rate": 1.5623427955443767e-05, | |
| "loss": 0.8143, | |
| "step": 610 | |
| }, | |
| { | |
| "epoch": 0.22278117139777218, | |
| "grad_norm": 13.367673873901367, | |
| "learning_rate": 1.5551563061444486e-05, | |
| "loss": 0.7846, | |
| "step": 620 | |
| }, | |
| { | |
| "epoch": 0.22637441609773626, | |
| "grad_norm": 7.093170642852783, | |
| "learning_rate": 1.5479698167445205e-05, | |
| "loss": 0.8274, | |
| "step": 630 | |
| }, | |
| { | |
| "epoch": 0.2299676607977003, | |
| "grad_norm": 7.058470726013184, | |
| "learning_rate": 1.5407833273445924e-05, | |
| "loss": 0.8349, | |
| "step": 640 | |
| }, | |
| { | |
| "epoch": 0.2335609054976644, | |
| "grad_norm": 7.336752414703369, | |
| "learning_rate": 1.5335968379446643e-05, | |
| "loss": 1.0872, | |
| "step": 650 | |
| }, | |
| { | |
| "epoch": 0.23715415019762845, | |
| "grad_norm": 7.090172290802002, | |
| "learning_rate": 1.5264103485447362e-05, | |
| "loss": 0.8561, | |
| "step": 660 | |
| }, | |
| { | |
| "epoch": 0.24074739489759253, | |
| "grad_norm": 7.062121391296387, | |
| "learning_rate": 1.5192238591448078e-05, | |
| "loss": 0.8298, | |
| "step": 670 | |
| }, | |
| { | |
| "epoch": 0.24434063959755659, | |
| "grad_norm": 4.680623531341553, | |
| "learning_rate": 1.5120373697448799e-05, | |
| "loss": 0.7485, | |
| "step": 680 | |
| }, | |
| { | |
| "epoch": 0.24793388429752067, | |
| "grad_norm": 4.166479110717773, | |
| "learning_rate": 1.5048508803449516e-05, | |
| "loss": 0.8893, | |
| "step": 690 | |
| }, | |
| { | |
| "epoch": 0.25152712899748475, | |
| "grad_norm": 7.121062278747559, | |
| "learning_rate": 1.4976643909450235e-05, | |
| "loss": 0.6611, | |
| "step": 700 | |
| }, | |
| { | |
| "epoch": 0.2551203736974488, | |
| "grad_norm": 5.491664886474609, | |
| "learning_rate": 1.4904779015450954e-05, | |
| "loss": 1.0562, | |
| "step": 710 | |
| }, | |
| { | |
| "epoch": 0.25871361839741286, | |
| "grad_norm": 9.273249626159668, | |
| "learning_rate": 1.4832914121451672e-05, | |
| "loss": 1.0254, | |
| "step": 720 | |
| }, | |
| { | |
| "epoch": 0.26230686309737694, | |
| "grad_norm": 10.43608570098877, | |
| "learning_rate": 1.476104922745239e-05, | |
| "loss": 0.8357, | |
| "step": 730 | |
| }, | |
| { | |
| "epoch": 0.265900107797341, | |
| "grad_norm": 9.218886375427246, | |
| "learning_rate": 1.468918433345311e-05, | |
| "loss": 0.8972, | |
| "step": 740 | |
| }, | |
| { | |
| "epoch": 0.26949335249730505, | |
| "grad_norm": 6.967902660369873, | |
| "learning_rate": 1.4617319439453827e-05, | |
| "loss": 0.9041, | |
| "step": 750 | |
| }, | |
| { | |
| "epoch": 0.27308659719726913, | |
| "grad_norm": 9.598542213439941, | |
| "learning_rate": 1.4545454545454546e-05, | |
| "loss": 0.9741, | |
| "step": 760 | |
| }, | |
| { | |
| "epoch": 0.2766798418972332, | |
| "grad_norm": 7.814625263214111, | |
| "learning_rate": 1.4473589651455264e-05, | |
| "loss": 0.9535, | |
| "step": 770 | |
| }, | |
| { | |
| "epoch": 0.2802730865971973, | |
| "grad_norm": 5.9632744789123535, | |
| "learning_rate": 1.4401724757455985e-05, | |
| "loss": 0.8045, | |
| "step": 780 | |
| }, | |
| { | |
| "epoch": 0.2838663312971613, | |
| "grad_norm": 8.069158554077148, | |
| "learning_rate": 1.4329859863456704e-05, | |
| "loss": 0.8547, | |
| "step": 790 | |
| }, | |
| { | |
| "epoch": 0.2874595759971254, | |
| "grad_norm": 8.814532279968262, | |
| "learning_rate": 1.4257994969457421e-05, | |
| "loss": 1.1339, | |
| "step": 800 | |
| }, | |
| { | |
| "epoch": 0.2910528206970895, | |
| "grad_norm": 7.119789123535156, | |
| "learning_rate": 1.418613007545814e-05, | |
| "loss": 0.8693, | |
| "step": 810 | |
| }, | |
| { | |
| "epoch": 0.2946460653970535, | |
| "grad_norm": 6.96940279006958, | |
| "learning_rate": 1.411426518145886e-05, | |
| "loss": 0.7695, | |
| "step": 820 | |
| }, | |
| { | |
| "epoch": 0.2982393100970176, | |
| "grad_norm": 6.049989223480225, | |
| "learning_rate": 1.4042400287459577e-05, | |
| "loss": 0.7869, | |
| "step": 830 | |
| }, | |
| { | |
| "epoch": 0.3018325547969817, | |
| "grad_norm": 6.798286437988281, | |
| "learning_rate": 1.3970535393460296e-05, | |
| "loss": 0.8735, | |
| "step": 840 | |
| }, | |
| { | |
| "epoch": 0.30542579949694576, | |
| "grad_norm": 7.636628150939941, | |
| "learning_rate": 1.3898670499461015e-05, | |
| "loss": 0.5762, | |
| "step": 850 | |
| }, | |
| { | |
| "epoch": 0.3090190441969098, | |
| "grad_norm": 8.8123140335083, | |
| "learning_rate": 1.3826805605461732e-05, | |
| "loss": 0.8944, | |
| "step": 860 | |
| }, | |
| { | |
| "epoch": 0.31261228889687387, | |
| "grad_norm": 8.178088188171387, | |
| "learning_rate": 1.3754940711462453e-05, | |
| "loss": 0.8161, | |
| "step": 870 | |
| }, | |
| { | |
| "epoch": 0.31620553359683795, | |
| "grad_norm": 5.31364107131958, | |
| "learning_rate": 1.3683075817463169e-05, | |
| "loss": 0.7076, | |
| "step": 880 | |
| }, | |
| { | |
| "epoch": 0.31979877829680203, | |
| "grad_norm": 5.974244594573975, | |
| "learning_rate": 1.361121092346389e-05, | |
| "loss": 0.7999, | |
| "step": 890 | |
| }, | |
| { | |
| "epoch": 0.32339202299676606, | |
| "grad_norm": 9.109082221984863, | |
| "learning_rate": 1.3539346029464609e-05, | |
| "loss": 0.9564, | |
| "step": 900 | |
| }, | |
| { | |
| "epoch": 0.32698526769673014, | |
| "grad_norm": 9.144074440002441, | |
| "learning_rate": 1.3467481135465326e-05, | |
| "loss": 0.9651, | |
| "step": 910 | |
| }, | |
| { | |
| "epoch": 0.3305785123966942, | |
| "grad_norm": 11.100114822387695, | |
| "learning_rate": 1.3395616241466045e-05, | |
| "loss": 0.9018, | |
| "step": 920 | |
| }, | |
| { | |
| "epoch": 0.3341717570966583, | |
| "grad_norm": 5.25788688659668, | |
| "learning_rate": 1.3323751347466764e-05, | |
| "loss": 0.6115, | |
| "step": 930 | |
| }, | |
| { | |
| "epoch": 0.33776500179662233, | |
| "grad_norm": 3.6270971298217773, | |
| "learning_rate": 1.3251886453467482e-05, | |
| "loss": 0.6783, | |
| "step": 940 | |
| }, | |
| { | |
| "epoch": 0.3413582464965864, | |
| "grad_norm": 9.1345853805542, | |
| "learning_rate": 1.31800215594682e-05, | |
| "loss": 0.9194, | |
| "step": 950 | |
| }, | |
| { | |
| "epoch": 0.3449514911965505, | |
| "grad_norm": 7.9600090980529785, | |
| "learning_rate": 1.3108156665468918e-05, | |
| "loss": 0.7061, | |
| "step": 960 | |
| }, | |
| { | |
| "epoch": 0.3485447358965146, | |
| "grad_norm": 11.217706680297852, | |
| "learning_rate": 1.3036291771469637e-05, | |
| "loss": 0.8675, | |
| "step": 970 | |
| }, | |
| { | |
| "epoch": 0.3521379805964786, | |
| "grad_norm": 4.390411376953125, | |
| "learning_rate": 1.2964426877470358e-05, | |
| "loss": 0.928, | |
| "step": 980 | |
| }, | |
| { | |
| "epoch": 0.3557312252964427, | |
| "grad_norm": 6.773019790649414, | |
| "learning_rate": 1.2892561983471074e-05, | |
| "loss": 0.8153, | |
| "step": 990 | |
| }, | |
| { | |
| "epoch": 0.35932446999640677, | |
| "grad_norm": 8.140026092529297, | |
| "learning_rate": 1.2820697089471795e-05, | |
| "loss": 0.7, | |
| "step": 1000 | |
| }, | |
| { | |
| "epoch": 0.3629177146963708, | |
| "grad_norm": 5.35282039642334, | |
| "learning_rate": 1.2748832195472514e-05, | |
| "loss": 0.8485, | |
| "step": 1010 | |
| }, | |
| { | |
| "epoch": 0.3665109593963349, | |
| "grad_norm": 8.486083984375, | |
| "learning_rate": 1.2676967301473231e-05, | |
| "loss": 0.7128, | |
| "step": 1020 | |
| }, | |
| { | |
| "epoch": 0.37010420409629896, | |
| "grad_norm": 8.271173477172852, | |
| "learning_rate": 1.260510240747395e-05, | |
| "loss": 0.8304, | |
| "step": 1030 | |
| }, | |
| { | |
| "epoch": 0.37369744879626304, | |
| "grad_norm": 7.8794331550598145, | |
| "learning_rate": 1.2533237513474668e-05, | |
| "loss": 0.7342, | |
| "step": 1040 | |
| }, | |
| { | |
| "epoch": 0.37729069349622707, | |
| "grad_norm": 7.080440044403076, | |
| "learning_rate": 1.2461372619475387e-05, | |
| "loss": 0.8883, | |
| "step": 1050 | |
| }, | |
| { | |
| "epoch": 0.38088393819619115, | |
| "grad_norm": 7.4731903076171875, | |
| "learning_rate": 1.2389507725476106e-05, | |
| "loss": 0.8742, | |
| "step": 1060 | |
| }, | |
| { | |
| "epoch": 0.38447718289615523, | |
| "grad_norm": 11.303037643432617, | |
| "learning_rate": 1.2317642831476823e-05, | |
| "loss": 0.9059, | |
| "step": 1070 | |
| }, | |
| { | |
| "epoch": 0.3880704275961193, | |
| "grad_norm": 3.480085611343384, | |
| "learning_rate": 1.2245777937477542e-05, | |
| "loss": 0.7729, | |
| "step": 1080 | |
| }, | |
| { | |
| "epoch": 0.39166367229608334, | |
| "grad_norm": 8.819855690002441, | |
| "learning_rate": 1.2173913043478263e-05, | |
| "loss": 0.9153, | |
| "step": 1090 | |
| }, | |
| { | |
| "epoch": 0.3952569169960474, | |
| "grad_norm": 7.252877235412598, | |
| "learning_rate": 1.210204814947898e-05, | |
| "loss": 0.7462, | |
| "step": 1100 | |
| }, | |
| { | |
| "epoch": 0.3988501616960115, | |
| "grad_norm": 9.326582908630371, | |
| "learning_rate": 1.20301832554797e-05, | |
| "loss": 0.8735, | |
| "step": 1110 | |
| }, | |
| { | |
| "epoch": 0.4024434063959756, | |
| "grad_norm": 7.847351551055908, | |
| "learning_rate": 1.1958318361480419e-05, | |
| "loss": 0.9442, | |
| "step": 1120 | |
| }, | |
| { | |
| "epoch": 0.4060366510959396, | |
| "grad_norm": 9.304997444152832, | |
| "learning_rate": 1.1886453467481136e-05, | |
| "loss": 0.7541, | |
| "step": 1130 | |
| }, | |
| { | |
| "epoch": 0.4096298957959037, | |
| "grad_norm": 6.713771820068359, | |
| "learning_rate": 1.1814588573481855e-05, | |
| "loss": 0.7521, | |
| "step": 1140 | |
| }, | |
| { | |
| "epoch": 0.4132231404958678, | |
| "grad_norm": 6.436892986297607, | |
| "learning_rate": 1.1742723679482573e-05, | |
| "loss": 0.7816, | |
| "step": 1150 | |
| }, | |
| { | |
| "epoch": 0.41681638519583186, | |
| "grad_norm": 1.2891592979431152, | |
| "learning_rate": 1.1670858785483292e-05, | |
| "loss": 0.8019, | |
| "step": 1160 | |
| }, | |
| { | |
| "epoch": 0.4204096298957959, | |
| "grad_norm": 5.085257530212402, | |
| "learning_rate": 1.1598993891484011e-05, | |
| "loss": 0.7397, | |
| "step": 1170 | |
| }, | |
| { | |
| "epoch": 0.42400287459575997, | |
| "grad_norm": 3.4146158695220947, | |
| "learning_rate": 1.1527128997484728e-05, | |
| "loss": 0.857, | |
| "step": 1180 | |
| }, | |
| { | |
| "epoch": 0.42759611929572405, | |
| "grad_norm": 3.6032421588897705, | |
| "learning_rate": 1.1455264103485449e-05, | |
| "loss": 0.7684, | |
| "step": 1190 | |
| }, | |
| { | |
| "epoch": 0.4311893639956881, | |
| "grad_norm": 5.2717390060424805, | |
| "learning_rate": 1.1383399209486168e-05, | |
| "loss": 0.7619, | |
| "step": 1200 | |
| }, | |
| { | |
| "epoch": 0.43478260869565216, | |
| "grad_norm": 6.56957483291626, | |
| "learning_rate": 1.1311534315486886e-05, | |
| "loss": 0.7833, | |
| "step": 1210 | |
| }, | |
| { | |
| "epoch": 0.43837585339561624, | |
| "grad_norm": 3.006985664367676, | |
| "learning_rate": 1.1239669421487605e-05, | |
| "loss": 0.6434, | |
| "step": 1220 | |
| }, | |
| { | |
| "epoch": 0.4419690980955803, | |
| "grad_norm": 7.374829292297363, | |
| "learning_rate": 1.1167804527488322e-05, | |
| "loss": 0.6415, | |
| "step": 1230 | |
| }, | |
| { | |
| "epoch": 0.44556234279554435, | |
| "grad_norm": 5.94835901260376, | |
| "learning_rate": 1.1095939633489041e-05, | |
| "loss": 0.771, | |
| "step": 1240 | |
| }, | |
| { | |
| "epoch": 0.44915558749550843, | |
| "grad_norm": 8.715871810913086, | |
| "learning_rate": 1.102407473948976e-05, | |
| "loss": 0.9368, | |
| "step": 1250 | |
| }, | |
| { | |
| "epoch": 0.4527488321954725, | |
| "grad_norm": 3.8362183570861816, | |
| "learning_rate": 1.0952209845490478e-05, | |
| "loss": 0.5955, | |
| "step": 1260 | |
| }, | |
| { | |
| "epoch": 0.4563420768954366, | |
| "grad_norm": 8.438674926757812, | |
| "learning_rate": 1.0880344951491197e-05, | |
| "loss": 0.9797, | |
| "step": 1270 | |
| }, | |
| { | |
| "epoch": 0.4599353215954006, | |
| "grad_norm": 7.920464515686035, | |
| "learning_rate": 1.0808480057491918e-05, | |
| "loss": 0.9476, | |
| "step": 1280 | |
| }, | |
| { | |
| "epoch": 0.4635285662953647, | |
| "grad_norm": 5.010650157928467, | |
| "learning_rate": 1.0736615163492633e-05, | |
| "loss": 0.8197, | |
| "step": 1290 | |
| }, | |
| { | |
| "epoch": 0.4671218109953288, | |
| "grad_norm": 9.06809139251709, | |
| "learning_rate": 1.0664750269493354e-05, | |
| "loss": 0.7204, | |
| "step": 1300 | |
| }, | |
| { | |
| "epoch": 0.47071505569529287, | |
| "grad_norm": 8.399813652038574, | |
| "learning_rate": 1.0592885375494073e-05, | |
| "loss": 0.9134, | |
| "step": 1310 | |
| }, | |
| { | |
| "epoch": 0.4743083003952569, | |
| "grad_norm": 8.060338020324707, | |
| "learning_rate": 1.052102048149479e-05, | |
| "loss": 0.8562, | |
| "step": 1320 | |
| }, | |
| { | |
| "epoch": 0.477901545095221, | |
| "grad_norm": 4.944212436676025, | |
| "learning_rate": 1.044915558749551e-05, | |
| "loss": 0.7243, | |
| "step": 1330 | |
| }, | |
| { | |
| "epoch": 0.48149478979518506, | |
| "grad_norm": 8.986311912536621, | |
| "learning_rate": 1.0377290693496227e-05, | |
| "loss": 0.7713, | |
| "step": 1340 | |
| }, | |
| { | |
| "epoch": 0.48508803449514915, | |
| "grad_norm": 7.62788200378418, | |
| "learning_rate": 1.0305425799496946e-05, | |
| "loss": 0.788, | |
| "step": 1350 | |
| }, | |
| { | |
| "epoch": 0.48868127919511317, | |
| "grad_norm": 4.31091833114624, | |
| "learning_rate": 1.0233560905497665e-05, | |
| "loss": 0.7138, | |
| "step": 1360 | |
| }, | |
| { | |
| "epoch": 0.49227452389507725, | |
| "grad_norm": 4.296084880828857, | |
| "learning_rate": 1.0161696011498383e-05, | |
| "loss": 0.8723, | |
| "step": 1370 | |
| }, | |
| { | |
| "epoch": 0.49586776859504134, | |
| "grad_norm": 7.611781597137451, | |
| "learning_rate": 1.0089831117499102e-05, | |
| "loss": 0.8424, | |
| "step": 1380 | |
| }, | |
| { | |
| "epoch": 0.49946101329500536, | |
| "grad_norm": 5.098887920379639, | |
| "learning_rate": 1.0017966223499823e-05, | |
| "loss": 0.5966, | |
| "step": 1390 | |
| }, | |
| { | |
| "epoch": 0.5030542579949695, | |
| "grad_norm": 7.735159873962402, | |
| "learning_rate": 9.94610132950054e-06, | |
| "loss": 0.8796, | |
| "step": 1400 | |
| }, | |
| { | |
| "epoch": 0.5066475026949335, | |
| "grad_norm": 12.891678810119629, | |
| "learning_rate": 9.87423643550126e-06, | |
| "loss": 0.9252, | |
| "step": 1410 | |
| }, | |
| { | |
| "epoch": 0.5102407473948976, | |
| "grad_norm": 4.764705181121826, | |
| "learning_rate": 9.802371541501977e-06, | |
| "loss": 0.7076, | |
| "step": 1420 | |
| }, | |
| { | |
| "epoch": 0.5138339920948617, | |
| "grad_norm": 8.446547508239746, | |
| "learning_rate": 9.730506647502696e-06, | |
| "loss": 0.6827, | |
| "step": 1430 | |
| }, | |
| { | |
| "epoch": 0.5174272367948257, | |
| "grad_norm": 7.600401878356934, | |
| "learning_rate": 9.658641753503413e-06, | |
| "loss": 0.7225, | |
| "step": 1440 | |
| }, | |
| { | |
| "epoch": 0.5210204814947897, | |
| "grad_norm": 5.719719409942627, | |
| "learning_rate": 9.586776859504134e-06, | |
| "loss": 0.6234, | |
| "step": 1450 | |
| }, | |
| { | |
| "epoch": 0.5246137261947539, | |
| "grad_norm": 6.583295822143555, | |
| "learning_rate": 9.514911965504851e-06, | |
| "loss": 0.7279, | |
| "step": 1460 | |
| }, | |
| { | |
| "epoch": 0.5282069708947179, | |
| "grad_norm": 6.228325843811035, | |
| "learning_rate": 9.44304707150557e-06, | |
| "loss": 0.7585, | |
| "step": 1470 | |
| }, | |
| { | |
| "epoch": 0.531800215594682, | |
| "grad_norm": 7.8453688621521, | |
| "learning_rate": 9.37118217750629e-06, | |
| "loss": 0.8482, | |
| "step": 1480 | |
| }, | |
| { | |
| "epoch": 0.5353934602946461, | |
| "grad_norm": 7.193871974945068, | |
| "learning_rate": 9.299317283507007e-06, | |
| "loss": 1.026, | |
| "step": 1490 | |
| }, | |
| { | |
| "epoch": 0.5389867049946101, | |
| "grad_norm": 4.170319080352783, | |
| "learning_rate": 9.227452389507726e-06, | |
| "loss": 0.9686, | |
| "step": 1500 | |
| }, | |
| { | |
| "epoch": 0.5425799496945742, | |
| "grad_norm": 4.138383388519287, | |
| "learning_rate": 9.155587495508445e-06, | |
| "loss": 0.7255, | |
| "step": 1510 | |
| }, | |
| { | |
| "epoch": 0.5461731943945383, | |
| "grad_norm": 3.0961215496063232, | |
| "learning_rate": 9.083722601509164e-06, | |
| "loss": 0.8167, | |
| "step": 1520 | |
| }, | |
| { | |
| "epoch": 0.5497664390945023, | |
| "grad_norm": 4.225940704345703, | |
| "learning_rate": 9.011857707509882e-06, | |
| "loss": 0.9055, | |
| "step": 1530 | |
| }, | |
| { | |
| "epoch": 0.5533596837944664, | |
| "grad_norm": 3.334969997406006, | |
| "learning_rate": 8.9399928135106e-06, | |
| "loss": 0.6123, | |
| "step": 1540 | |
| }, | |
| { | |
| "epoch": 0.5569529284944305, | |
| "grad_norm": 6.848125457763672, | |
| "learning_rate": 8.868127919511318e-06, | |
| "loss": 0.814, | |
| "step": 1550 | |
| }, | |
| { | |
| "epoch": 0.5605461731943946, | |
| "grad_norm": 6.627513408660889, | |
| "learning_rate": 8.796263025512039e-06, | |
| "loss": 0.668, | |
| "step": 1560 | |
| }, | |
| { | |
| "epoch": 0.5641394178943586, | |
| "grad_norm": 4.6623382568359375, | |
| "learning_rate": 8.724398131512756e-06, | |
| "loss": 0.5911, | |
| "step": 1570 | |
| }, | |
| { | |
| "epoch": 0.5677326625943226, | |
| "grad_norm": 6.965736389160156, | |
| "learning_rate": 8.652533237513476e-06, | |
| "loss": 0.8645, | |
| "step": 1580 | |
| }, | |
| { | |
| "epoch": 0.5713259072942868, | |
| "grad_norm": 5.471493244171143, | |
| "learning_rate": 8.580668343514193e-06, | |
| "loss": 0.7217, | |
| "step": 1590 | |
| }, | |
| { | |
| "epoch": 0.5749191519942508, | |
| "grad_norm": 10.030547142028809, | |
| "learning_rate": 8.508803449514914e-06, | |
| "loss": 0.8575, | |
| "step": 1600 | |
| }, | |
| { | |
| "epoch": 0.5785123966942148, | |
| "grad_norm": 6.807426452636719, | |
| "learning_rate": 8.436938555515631e-06, | |
| "loss": 0.913, | |
| "step": 1610 | |
| }, | |
| { | |
| "epoch": 0.582105641394179, | |
| "grad_norm": 6.466938018798828, | |
| "learning_rate": 8.36507366151635e-06, | |
| "loss": 0.9222, | |
| "step": 1620 | |
| }, | |
| { | |
| "epoch": 0.585698886094143, | |
| "grad_norm": 4.761947154998779, | |
| "learning_rate": 8.293208767517068e-06, | |
| "loss": 0.8348, | |
| "step": 1630 | |
| }, | |
| { | |
| "epoch": 0.589292130794107, | |
| "grad_norm": 6.450058460235596, | |
| "learning_rate": 8.221343873517787e-06, | |
| "loss": 0.6135, | |
| "step": 1640 | |
| }, | |
| { | |
| "epoch": 0.5928853754940712, | |
| "grad_norm": 6.998159885406494, | |
| "learning_rate": 8.149478979518506e-06, | |
| "loss": 0.694, | |
| "step": 1650 | |
| }, | |
| { | |
| "epoch": 0.5964786201940352, | |
| "grad_norm": 6.691666126251221, | |
| "learning_rate": 8.077614085519225e-06, | |
| "loss": 0.7971, | |
| "step": 1660 | |
| }, | |
| { | |
| "epoch": 0.6000718648939993, | |
| "grad_norm": 7.232285976409912, | |
| "learning_rate": 8.005749191519944e-06, | |
| "loss": 0.7618, | |
| "step": 1670 | |
| }, | |
| { | |
| "epoch": 0.6036651095939634, | |
| "grad_norm": 6.530168056488037, | |
| "learning_rate": 7.933884297520661e-06, | |
| "loss": 0.6388, | |
| "step": 1680 | |
| }, | |
| { | |
| "epoch": 0.6072583542939274, | |
| "grad_norm": 3.380647659301758, | |
| "learning_rate": 7.86201940352138e-06, | |
| "loss": 0.6414, | |
| "step": 1690 | |
| }, | |
| { | |
| "epoch": 0.6108515989938915, | |
| "grad_norm": 7.021971225738525, | |
| "learning_rate": 7.790154509522098e-06, | |
| "loss": 0.8261, | |
| "step": 1700 | |
| }, | |
| { | |
| "epoch": 0.6144448436938555, | |
| "grad_norm": 8.17015552520752, | |
| "learning_rate": 7.718289615522819e-06, | |
| "loss": 0.722, | |
| "step": 1710 | |
| }, | |
| { | |
| "epoch": 0.6180380883938196, | |
| "grad_norm": 11.588608741760254, | |
| "learning_rate": 7.646424721523536e-06, | |
| "loss": 0.8205, | |
| "step": 1720 | |
| }, | |
| { | |
| "epoch": 0.6216313330937837, | |
| "grad_norm": 5.765032768249512, | |
| "learning_rate": 7.574559827524255e-06, | |
| "loss": 0.6734, | |
| "step": 1730 | |
| }, | |
| { | |
| "epoch": 0.6252245777937477, | |
| "grad_norm": 8.35347843170166, | |
| "learning_rate": 7.5026949335249736e-06, | |
| "loss": 0.8921, | |
| "step": 1740 | |
| }, | |
| { | |
| "epoch": 0.6288178224937119, | |
| "grad_norm": 5.127620220184326, | |
| "learning_rate": 7.430830039525693e-06, | |
| "loss": 0.6934, | |
| "step": 1750 | |
| }, | |
| { | |
| "epoch": 0.6324110671936759, | |
| "grad_norm": 4.722314834594727, | |
| "learning_rate": 7.358965145526411e-06, | |
| "loss": 0.6978, | |
| "step": 1760 | |
| }, | |
| { | |
| "epoch": 0.6360043118936399, | |
| "grad_norm": 7.5209455490112305, | |
| "learning_rate": 7.287100251527129e-06, | |
| "loss": 0.7246, | |
| "step": 1770 | |
| }, | |
| { | |
| "epoch": 0.6395975565936041, | |
| "grad_norm": 7.9390435218811035, | |
| "learning_rate": 7.2152353575278474e-06, | |
| "loss": 0.7407, | |
| "step": 1780 | |
| }, | |
| { | |
| "epoch": 0.6431908012935681, | |
| "grad_norm": 4.685247898101807, | |
| "learning_rate": 7.143370463528567e-06, | |
| "loss": 0.8586, | |
| "step": 1790 | |
| }, | |
| { | |
| "epoch": 0.6467840459935321, | |
| "grad_norm": 6.8963623046875, | |
| "learning_rate": 7.071505569529286e-06, | |
| "loss": 0.7048, | |
| "step": 1800 | |
| }, | |
| { | |
| "epoch": 0.6503772906934963, | |
| "grad_norm": 7.452021598815918, | |
| "learning_rate": 6.999640675530004e-06, | |
| "loss": 0.7082, | |
| "step": 1810 | |
| }, | |
| { | |
| "epoch": 0.6539705353934603, | |
| "grad_norm": 7.505745887756348, | |
| "learning_rate": 6.927775781530722e-06, | |
| "loss": 0.683, | |
| "step": 1820 | |
| }, | |
| { | |
| "epoch": 0.6575637800934243, | |
| "grad_norm": 7.095577716827393, | |
| "learning_rate": 6.855910887531442e-06, | |
| "loss": 0.7954, | |
| "step": 1830 | |
| }, | |
| { | |
| "epoch": 0.6611570247933884, | |
| "grad_norm": 5.106074333190918, | |
| "learning_rate": 6.78404599353216e-06, | |
| "loss": 0.7707, | |
| "step": 1840 | |
| }, | |
| { | |
| "epoch": 0.6647502694933525, | |
| "grad_norm": 4.525816917419434, | |
| "learning_rate": 6.712181099532879e-06, | |
| "loss": 0.7274, | |
| "step": 1850 | |
| }, | |
| { | |
| "epoch": 0.6683435141933166, | |
| "grad_norm": 9.403148651123047, | |
| "learning_rate": 6.640316205533598e-06, | |
| "loss": 0.7981, | |
| "step": 1860 | |
| }, | |
| { | |
| "epoch": 0.6719367588932806, | |
| "grad_norm": 2.272888422012329, | |
| "learning_rate": 6.568451311534316e-06, | |
| "loss": 0.6679, | |
| "step": 1870 | |
| }, | |
| { | |
| "epoch": 0.6755300035932447, | |
| "grad_norm": 3.779609203338623, | |
| "learning_rate": 6.496586417535034e-06, | |
| "loss": 0.6771, | |
| "step": 1880 | |
| }, | |
| { | |
| "epoch": 0.6791232482932088, | |
| "grad_norm": 6.928153038024902, | |
| "learning_rate": 6.424721523535753e-06, | |
| "loss": 0.8161, | |
| "step": 1890 | |
| }, | |
| { | |
| "epoch": 0.6827164929931728, | |
| "grad_norm": 9.023139953613281, | |
| "learning_rate": 6.3528566295364724e-06, | |
| "loss": 0.7828, | |
| "step": 1900 | |
| }, | |
| { | |
| "epoch": 0.6863097376931369, | |
| "grad_norm": 6.370776653289795, | |
| "learning_rate": 6.280991735537191e-06, | |
| "loss": 0.7397, | |
| "step": 1910 | |
| }, | |
| { | |
| "epoch": 0.689902982393101, | |
| "grad_norm": 4.26768159866333, | |
| "learning_rate": 6.209126841537909e-06, | |
| "loss": 0.671, | |
| "step": 1920 | |
| }, | |
| { | |
| "epoch": 0.693496227093065, | |
| "grad_norm": 11.181378364562988, | |
| "learning_rate": 6.137261947538627e-06, | |
| "loss": 0.8204, | |
| "step": 1930 | |
| }, | |
| { | |
| "epoch": 0.6970894717930292, | |
| "grad_norm": 11.705507278442383, | |
| "learning_rate": 6.065397053539347e-06, | |
| "loss": 0.6734, | |
| "step": 1940 | |
| }, | |
| { | |
| "epoch": 0.7006827164929932, | |
| "grad_norm": 8.077978134155273, | |
| "learning_rate": 5.993532159540065e-06, | |
| "loss": 0.9679, | |
| "step": 1950 | |
| }, | |
| { | |
| "epoch": 0.7042759611929572, | |
| "grad_norm": 3.5401253700256348, | |
| "learning_rate": 5.921667265540784e-06, | |
| "loss": 0.7084, | |
| "step": 1960 | |
| }, | |
| { | |
| "epoch": 0.7078692058929213, | |
| "grad_norm": 7.2463555335998535, | |
| "learning_rate": 5.849802371541502e-06, | |
| "loss": 0.7845, | |
| "step": 1970 | |
| }, | |
| { | |
| "epoch": 0.7114624505928854, | |
| "grad_norm": 3.2862935066223145, | |
| "learning_rate": 5.777937477542221e-06, | |
| "loss": 0.5628, | |
| "step": 1980 | |
| }, | |
| { | |
| "epoch": 0.7150556952928494, | |
| "grad_norm": 11.543691635131836, | |
| "learning_rate": 5.70607258354294e-06, | |
| "loss": 0.7221, | |
| "step": 1990 | |
| }, | |
| { | |
| "epoch": 0.7186489399928135, | |
| "grad_norm": 8.188749313354492, | |
| "learning_rate": 5.634207689543658e-06, | |
| "loss": 0.7816, | |
| "step": 2000 | |
| }, | |
| { | |
| "epoch": 0.7222421846927776, | |
| "grad_norm": 5.3175740242004395, | |
| "learning_rate": 5.562342795544377e-06, | |
| "loss": 0.6603, | |
| "step": 2010 | |
| }, | |
| { | |
| "epoch": 0.7258354293927416, | |
| "grad_norm": 6.805153846740723, | |
| "learning_rate": 5.490477901545096e-06, | |
| "loss": 0.8621, | |
| "step": 2020 | |
| }, | |
| { | |
| "epoch": 0.7294286740927057, | |
| "grad_norm": 11.035388946533203, | |
| "learning_rate": 5.418613007545814e-06, | |
| "loss": 0.8959, | |
| "step": 2030 | |
| }, | |
| { | |
| "epoch": 0.7330219187926698, | |
| "grad_norm": 6.663356304168701, | |
| "learning_rate": 5.346748113546532e-06, | |
| "loss": 0.7328, | |
| "step": 2040 | |
| }, | |
| { | |
| "epoch": 0.7366151634926339, | |
| "grad_norm": 8.319828987121582, | |
| "learning_rate": 5.274883219547252e-06, | |
| "loss": 0.8377, | |
| "step": 2050 | |
| }, | |
| { | |
| "epoch": 0.7402084081925979, | |
| "grad_norm": 12.84389591217041, | |
| "learning_rate": 5.2030183255479705e-06, | |
| "loss": 0.8101, | |
| "step": 2060 | |
| }, | |
| { | |
| "epoch": 0.743801652892562, | |
| "grad_norm": 9.387964248657227, | |
| "learning_rate": 5.131153431548689e-06, | |
| "loss": 0.819, | |
| "step": 2070 | |
| }, | |
| { | |
| "epoch": 0.7473948975925261, | |
| "grad_norm": 13.591238021850586, | |
| "learning_rate": 5.059288537549407e-06, | |
| "loss": 0.5644, | |
| "step": 2080 | |
| }, | |
| { | |
| "epoch": 0.7509881422924901, | |
| "grad_norm": 7.670563697814941, | |
| "learning_rate": 4.987423643550126e-06, | |
| "loss": 0.8611, | |
| "step": 2090 | |
| }, | |
| { | |
| "epoch": 0.7545813869924541, | |
| "grad_norm": 7.460164546966553, | |
| "learning_rate": 4.915558749550845e-06, | |
| "loss": 0.5785, | |
| "step": 2100 | |
| }, | |
| { | |
| "epoch": 0.7581746316924183, | |
| "grad_norm": 10.167394638061523, | |
| "learning_rate": 4.8436938555515634e-06, | |
| "loss": 0.8433, | |
| "step": 2110 | |
| }, | |
| { | |
| "epoch": 0.7617678763923823, | |
| "grad_norm": 5.6100945472717285, | |
| "learning_rate": 4.7718289615522825e-06, | |
| "loss": 0.6128, | |
| "step": 2120 | |
| }, | |
| { | |
| "epoch": 0.7653611210923464, | |
| "grad_norm": 7.009652614593506, | |
| "learning_rate": 4.699964067553001e-06, | |
| "loss": 0.6752, | |
| "step": 2130 | |
| }, | |
| { | |
| "epoch": 0.7689543657923105, | |
| "grad_norm": 6.665421485900879, | |
| "learning_rate": 4.62809917355372e-06, | |
| "loss": 0.551, | |
| "step": 2140 | |
| }, | |
| { | |
| "epoch": 0.7725476104922745, | |
| "grad_norm": 7.132537364959717, | |
| "learning_rate": 4.556234279554438e-06, | |
| "loss": 0.7817, | |
| "step": 2150 | |
| }, | |
| { | |
| "epoch": 0.7761408551922386, | |
| "grad_norm": 7.3072943687438965, | |
| "learning_rate": 4.484369385555156e-06, | |
| "loss": 0.7506, | |
| "step": 2160 | |
| }, | |
| { | |
| "epoch": 0.7797340998922027, | |
| "grad_norm": 6.452977657318115, | |
| "learning_rate": 4.4125044915558755e-06, | |
| "loss": 0.7703, | |
| "step": 2170 | |
| }, | |
| { | |
| "epoch": 0.7833273445921667, | |
| "grad_norm": 9.874847412109375, | |
| "learning_rate": 4.340639597556594e-06, | |
| "loss": 0.8214, | |
| "step": 2180 | |
| }, | |
| { | |
| "epoch": 0.7869205892921308, | |
| "grad_norm": 4.646761417388916, | |
| "learning_rate": 4.268774703557312e-06, | |
| "loss": 0.8222, | |
| "step": 2190 | |
| }, | |
| { | |
| "epoch": 0.7905138339920948, | |
| "grad_norm": 4.57820463180542, | |
| "learning_rate": 4.196909809558031e-06, | |
| "loss": 0.6275, | |
| "step": 2200 | |
| }, | |
| { | |
| "epoch": 0.7941070786920589, | |
| "grad_norm": 6.416154861450195, | |
| "learning_rate": 4.125044915558749e-06, | |
| "loss": 0.6548, | |
| "step": 2210 | |
| }, | |
| { | |
| "epoch": 0.797700323392023, | |
| "grad_norm": 6.637320041656494, | |
| "learning_rate": 4.0531800215594685e-06, | |
| "loss": 0.8877, | |
| "step": 2220 | |
| }, | |
| { | |
| "epoch": 0.801293568091987, | |
| "grad_norm": 5.396492004394531, | |
| "learning_rate": 3.981315127560187e-06, | |
| "loss": 0.8622, | |
| "step": 2230 | |
| }, | |
| { | |
| "epoch": 0.8048868127919512, | |
| "grad_norm": 7.009629726409912, | |
| "learning_rate": 3.909450233560906e-06, | |
| "loss": 0.7937, | |
| "step": 2240 | |
| }, | |
| { | |
| "epoch": 0.8084800574919152, | |
| "grad_norm": 6.630249977111816, | |
| "learning_rate": 3.837585339561625e-06, | |
| "loss": 0.8945, | |
| "step": 2250 | |
| }, | |
| { | |
| "epoch": 0.8120733021918792, | |
| "grad_norm": 7.130268096923828, | |
| "learning_rate": 3.7657204455623432e-06, | |
| "loss": 0.8868, | |
| "step": 2260 | |
| }, | |
| { | |
| "epoch": 0.8156665468918434, | |
| "grad_norm": 5.933340072631836, | |
| "learning_rate": 3.693855551563062e-06, | |
| "loss": 0.8686, | |
| "step": 2270 | |
| }, | |
| { | |
| "epoch": 0.8192597915918074, | |
| "grad_norm": 3.7892746925354004, | |
| "learning_rate": 3.62199065756378e-06, | |
| "loss": 0.6425, | |
| "step": 2280 | |
| }, | |
| { | |
| "epoch": 0.8228530362917714, | |
| "grad_norm": 11.255279541015625, | |
| "learning_rate": 3.5501257635644993e-06, | |
| "loss": 0.7639, | |
| "step": 2290 | |
| }, | |
| { | |
| "epoch": 0.8264462809917356, | |
| "grad_norm": 6.262332916259766, | |
| "learning_rate": 3.4782608695652175e-06, | |
| "loss": 0.6272, | |
| "step": 2300 | |
| }, | |
| { | |
| "epoch": 0.8300395256916996, | |
| "grad_norm": 4.936995506286621, | |
| "learning_rate": 3.4063959755659366e-06, | |
| "loss": 0.635, | |
| "step": 2310 | |
| }, | |
| { | |
| "epoch": 0.8336327703916637, | |
| "grad_norm": 5.9594926834106445, | |
| "learning_rate": 3.334531081566655e-06, | |
| "loss": 0.6994, | |
| "step": 2320 | |
| }, | |
| { | |
| "epoch": 0.8372260150916278, | |
| "grad_norm": 3.9269344806671143, | |
| "learning_rate": 3.2626661875673735e-06, | |
| "loss": 0.5302, | |
| "step": 2330 | |
| }, | |
| { | |
| "epoch": 0.8408192597915918, | |
| "grad_norm": 6.650905609130859, | |
| "learning_rate": 3.1908012935680922e-06, | |
| "loss": 0.8061, | |
| "step": 2340 | |
| }, | |
| { | |
| "epoch": 0.8444125044915559, | |
| "grad_norm": 6.4028449058532715, | |
| "learning_rate": 3.118936399568811e-06, | |
| "loss": 0.6817, | |
| "step": 2350 | |
| }, | |
| { | |
| "epoch": 0.8480057491915199, | |
| "grad_norm": 6.031358242034912, | |
| "learning_rate": 3.047071505569529e-06, | |
| "loss": 0.7775, | |
| "step": 2360 | |
| }, | |
| { | |
| "epoch": 0.851598993891484, | |
| "grad_norm": 8.679971694946289, | |
| "learning_rate": 2.9752066115702483e-06, | |
| "loss": 0.9736, | |
| "step": 2370 | |
| }, | |
| { | |
| "epoch": 0.8551922385914481, | |
| "grad_norm": 9.925468444824219, | |
| "learning_rate": 2.9033417175709665e-06, | |
| "loss": 0.6576, | |
| "step": 2380 | |
| }, | |
| { | |
| "epoch": 0.8587854832914121, | |
| "grad_norm": 6.079770565032959, | |
| "learning_rate": 2.8314768235716856e-06, | |
| "loss": 0.9628, | |
| "step": 2390 | |
| }, | |
| { | |
| "epoch": 0.8623787279913762, | |
| "grad_norm": 5.954907417297363, | |
| "learning_rate": 2.759611929572404e-06, | |
| "loss": 0.7555, | |
| "step": 2400 | |
| }, | |
| { | |
| "epoch": 0.8659719726913403, | |
| "grad_norm": 5.5433783531188965, | |
| "learning_rate": 2.687747035573123e-06, | |
| "loss": 0.5379, | |
| "step": 2410 | |
| }, | |
| { | |
| "epoch": 0.8695652173913043, | |
| "grad_norm": 9.579656600952148, | |
| "learning_rate": 2.6158821415738412e-06, | |
| "loss": 0.6478, | |
| "step": 2420 | |
| }, | |
| { | |
| "epoch": 0.8731584620912685, | |
| "grad_norm": 5.076817035675049, | |
| "learning_rate": 2.54401724757456e-06, | |
| "loss": 0.6276, | |
| "step": 2430 | |
| }, | |
| { | |
| "epoch": 0.8767517067912325, | |
| "grad_norm": 8.012504577636719, | |
| "learning_rate": 2.4721523535752786e-06, | |
| "loss": 0.6421, | |
| "step": 2440 | |
| }, | |
| { | |
| "epoch": 0.8803449514911965, | |
| "grad_norm": 7.541149139404297, | |
| "learning_rate": 2.4002874595759973e-06, | |
| "loss": 0.6516, | |
| "step": 2450 | |
| }, | |
| { | |
| "epoch": 0.8839381961911607, | |
| "grad_norm": 6.304592132568359, | |
| "learning_rate": 2.328422565576716e-06, | |
| "loss": 0.6013, | |
| "step": 2460 | |
| }, | |
| { | |
| "epoch": 0.8875314408911247, | |
| "grad_norm": 4.353475570678711, | |
| "learning_rate": 2.2565576715774346e-06, | |
| "loss": 0.8041, | |
| "step": 2470 | |
| }, | |
| { | |
| "epoch": 0.8911246855910887, | |
| "grad_norm": 7.181800365447998, | |
| "learning_rate": 2.1846927775781533e-06, | |
| "loss": 0.8463, | |
| "step": 2480 | |
| }, | |
| { | |
| "epoch": 0.8947179302910528, | |
| "grad_norm": 3.317331552505493, | |
| "learning_rate": 2.112827883578872e-06, | |
| "loss": 0.6592, | |
| "step": 2490 | |
| }, | |
| { | |
| "epoch": 0.8983111749910169, | |
| "grad_norm": 5.976993083953857, | |
| "learning_rate": 2.0409629895795903e-06, | |
| "loss": 0.701, | |
| "step": 2500 | |
| }, | |
| { | |
| "epoch": 0.901904419690981, | |
| "grad_norm": 5.5971832275390625, | |
| "learning_rate": 1.969098095580309e-06, | |
| "loss": 0.7341, | |
| "step": 2510 | |
| }, | |
| { | |
| "epoch": 0.905497664390945, | |
| "grad_norm": 10.143183708190918, | |
| "learning_rate": 1.8972332015810276e-06, | |
| "loss": 0.8767, | |
| "step": 2520 | |
| }, | |
| { | |
| "epoch": 0.9090909090909091, | |
| "grad_norm": 7.039025783538818, | |
| "learning_rate": 1.8253683075817465e-06, | |
| "loss": 0.6021, | |
| "step": 2530 | |
| }, | |
| { | |
| "epoch": 0.9126841537908732, | |
| "grad_norm": 5.272881984710693, | |
| "learning_rate": 1.7535034135824652e-06, | |
| "loss": 0.7078, | |
| "step": 2540 | |
| }, | |
| { | |
| "epoch": 0.9162773984908372, | |
| "grad_norm": 6.3512349128723145, | |
| "learning_rate": 1.6816385195831839e-06, | |
| "loss": 0.8479, | |
| "step": 2550 | |
| }, | |
| { | |
| "epoch": 0.9198706431908013, | |
| "grad_norm": 5.514537811279297, | |
| "learning_rate": 1.6097736255839025e-06, | |
| "loss": 0.7474, | |
| "step": 2560 | |
| }, | |
| { | |
| "epoch": 0.9234638878907654, | |
| "grad_norm": 7.537227630615234, | |
| "learning_rate": 1.537908731584621e-06, | |
| "loss": 0.7484, | |
| "step": 2570 | |
| }, | |
| { | |
| "epoch": 0.9270571325907294, | |
| "grad_norm": 6.216177940368652, | |
| "learning_rate": 1.4660438375853397e-06, | |
| "loss": 0.7374, | |
| "step": 2580 | |
| }, | |
| { | |
| "epoch": 0.9306503772906934, | |
| "grad_norm": 5.915536403656006, | |
| "learning_rate": 1.3941789435860584e-06, | |
| "loss": 0.7044, | |
| "step": 2590 | |
| }, | |
| { | |
| "epoch": 0.9342436219906576, | |
| "grad_norm": 7.503188133239746, | |
| "learning_rate": 1.322314049586777e-06, | |
| "loss": 0.7071, | |
| "step": 2600 | |
| }, | |
| { | |
| "epoch": 0.9378368666906216, | |
| "grad_norm": 6.930708408355713, | |
| "learning_rate": 1.2504491555874957e-06, | |
| "loss": 0.8625, | |
| "step": 2610 | |
| }, | |
| { | |
| "epoch": 0.9414301113905857, | |
| "grad_norm": 7.8779296875, | |
| "learning_rate": 1.1785842615882142e-06, | |
| "loss": 0.614, | |
| "step": 2620 | |
| }, | |
| { | |
| "epoch": 0.9450233560905498, | |
| "grad_norm": 6.569777488708496, | |
| "learning_rate": 1.1067193675889329e-06, | |
| "loss": 0.6891, | |
| "step": 2630 | |
| }, | |
| { | |
| "epoch": 0.9486166007905138, | |
| "grad_norm": 6.492995738983154, | |
| "learning_rate": 1.0348544735896516e-06, | |
| "loss": 0.7514, | |
| "step": 2640 | |
| }, | |
| { | |
| "epoch": 0.9522098454904779, | |
| "grad_norm": 5.516550540924072, | |
| "learning_rate": 9.629895795903702e-07, | |
| "loss": 0.6322, | |
| "step": 2650 | |
| }, | |
| { | |
| "epoch": 0.955803090190442, | |
| "grad_norm": 8.970290184020996, | |
| "learning_rate": 8.911246855910888e-07, | |
| "loss": 0.8457, | |
| "step": 2660 | |
| }, | |
| { | |
| "epoch": 0.959396334890406, | |
| "grad_norm": 7.239262580871582, | |
| "learning_rate": 8.192597915918074e-07, | |
| "loss": 0.7673, | |
| "step": 2670 | |
| }, | |
| { | |
| "epoch": 0.9629895795903701, | |
| "grad_norm": 6.02666711807251, | |
| "learning_rate": 7.473948975925262e-07, | |
| "loss": 0.619, | |
| "step": 2680 | |
| }, | |
| { | |
| "epoch": 0.9665828242903342, | |
| "grad_norm": 5.881245136260986, | |
| "learning_rate": 6.755300035932449e-07, | |
| "loss": 0.7459, | |
| "step": 2690 | |
| }, | |
| { | |
| "epoch": 0.9701760689902983, | |
| "grad_norm": 6.145602703094482, | |
| "learning_rate": 6.036651095939633e-07, | |
| "loss": 0.7081, | |
| "step": 2700 | |
| }, | |
| { | |
| "epoch": 0.9737693136902623, | |
| "grad_norm": 7.62270450592041, | |
| "learning_rate": 5.318002155946821e-07, | |
| "loss": 0.7145, | |
| "step": 2710 | |
| }, | |
| { | |
| "epoch": 0.9773625583902263, | |
| "grad_norm": 7.651326656341553, | |
| "learning_rate": 4.599353215954007e-07, | |
| "loss": 0.6958, | |
| "step": 2720 | |
| }, | |
| { | |
| "epoch": 0.9809558030901905, | |
| "grad_norm": 5.604285717010498, | |
| "learning_rate": 3.880704275961193e-07, | |
| "loss": 0.8249, | |
| "step": 2730 | |
| }, | |
| { | |
| "epoch": 0.9845490477901545, | |
| "grad_norm": 3.623215436935425, | |
| "learning_rate": 3.1620553359683794e-07, | |
| "loss": 0.5312, | |
| "step": 2740 | |
| }, | |
| { | |
| "epoch": 0.9881422924901185, | |
| "grad_norm": 4.056583881378174, | |
| "learning_rate": 2.443406395975566e-07, | |
| "loss": 0.7, | |
| "step": 2750 | |
| }, | |
| { | |
| "epoch": 0.9917355371900827, | |
| "grad_norm": 6.8995466232299805, | |
| "learning_rate": 1.7247574559827524e-07, | |
| "loss": 0.6235, | |
| "step": 2760 | |
| }, | |
| { | |
| "epoch": 0.9953287818900467, | |
| "grad_norm": 5.702864646911621, | |
| "learning_rate": 1.0061085159899391e-07, | |
| "loss": 0.7579, | |
| "step": 2770 | |
| }, | |
| { | |
| "epoch": 0.9989220265900107, | |
| "grad_norm": 5.987502574920654, | |
| "learning_rate": 2.8745957599712543e-08, | |
| "loss": 0.627, | |
| "step": 2780 | |
| }, | |
| { | |
| "epoch": 1.0, | |
| "eval_accuracy": 0.7164420485175202, | |
| "eval_loss": 0.7538194060325623, | |
| "eval_runtime": 708.1853, | |
| "eval_samples_per_second": 7.858, | |
| "eval_steps_per_second": 0.983, | |
| "step": 2783 | |
| } | |
| ], | |
| "logging_steps": 10, | |
| "max_steps": 2783, | |
| "num_input_tokens_seen": 0, | |
| "num_train_epochs": 1, | |
| "save_steps": 500, | |
| "stateful_callbacks": { | |
| "TrainerControl": { | |
| "args": { | |
| "should_epoch_stop": false, | |
| "should_evaluate": false, | |
| "should_log": false, | |
| "should_save": true, | |
| "should_training_stop": true | |
| }, | |
| "attributes": {} | |
| } | |
| }, | |
| "total_flos": 1.7248939997743964e+18, | |
| "train_batch_size": 8, | |
| "trial_name": null, | |
| "trial_params": null | |
| } | |