| { |
| "best_global_step": 32500, |
| "best_metric": 0.8929190695762744, |
| "best_model_checkpoint": "./lang-ner-xlmr/checkpoint-32500", |
| "epoch": 2.0, |
| "eval_steps": 2500, |
| "global_step": 34550, |
| "is_hyper_param_search": false, |
| "is_local_process_zero": true, |
| "is_world_process_zero": true, |
| "log_history": [ |
| { |
| "epoch": 0.005788879562360705, |
| "grad_norm": 2.855613946914673, |
| "learning_rate": 4.9856729377713464e-05, |
| "loss": 2.9271701049804686, |
| "step": 100 |
| }, |
| { |
| "epoch": 0.01157775912472141, |
| "grad_norm": 1.3880442380905151, |
| "learning_rate": 4.9712011577424025e-05, |
| "loss": 0.34735004425048827, |
| "step": 200 |
| }, |
| { |
| "epoch": 0.017366638687082114, |
| "grad_norm": 2.103525400161743, |
| "learning_rate": 4.9567293777134585e-05, |
| "loss": 0.14239531517028808, |
| "step": 300 |
| }, |
| { |
| "epoch": 0.02315551824944282, |
| "grad_norm": 1.3836640119552612, |
| "learning_rate": 4.942257597684515e-05, |
| "loss": 0.10217083930969238, |
| "step": 400 |
| }, |
| { |
| "epoch": 0.028944397811803524, |
| "grad_norm": 2.793973445892334, |
| "learning_rate": 4.927785817655572e-05, |
| "loss": 0.08668763160705567, |
| "step": 500 |
| }, |
| { |
| "epoch": 0.03473327737416423, |
| "grad_norm": 0.9237315654754639, |
| "learning_rate": 4.913314037626629e-05, |
| "loss": 0.07951077461242675, |
| "step": 600 |
| }, |
| { |
| "epoch": 0.04052215693652494, |
| "grad_norm": 1.0332331657409668, |
| "learning_rate": 4.898842257597685e-05, |
| "loss": 0.07264325618743897, |
| "step": 700 |
| }, |
| { |
| "epoch": 0.04631103649888564, |
| "grad_norm": 0.7395448088645935, |
| "learning_rate": 4.884370477568741e-05, |
| "loss": 0.0672088623046875, |
| "step": 800 |
| }, |
| { |
| "epoch": 0.052099916061246344, |
| "grad_norm": 0.4162349998950958, |
| "learning_rate": 4.8698986975397976e-05, |
| "loss": 0.06357911109924316, |
| "step": 900 |
| }, |
| { |
| "epoch": 0.05788879562360705, |
| "grad_norm": 1.134838342666626, |
| "learning_rate": 4.855426917510854e-05, |
| "loss": 0.06361278057098389, |
| "step": 1000 |
| }, |
| { |
| "epoch": 0.06367767518596776, |
| "grad_norm": 0.9246254563331604, |
| "learning_rate": 4.8409551374819104e-05, |
| "loss": 0.06069922924041748, |
| "step": 1100 |
| }, |
| { |
| "epoch": 0.06946655474832845, |
| "grad_norm": 0.4614603519439697, |
| "learning_rate": 4.826483357452967e-05, |
| "loss": 0.0631745719909668, |
| "step": 1200 |
| }, |
| { |
| "epoch": 0.07525543431068916, |
| "grad_norm": 1.585123062133789, |
| "learning_rate": 4.812011577424023e-05, |
| "loss": 0.05860543251037598, |
| "step": 1300 |
| }, |
| { |
| "epoch": 0.08104431387304988, |
| "grad_norm": 0.521318793296814, |
| "learning_rate": 4.79753979739508e-05, |
| "loss": 0.056866631507873536, |
| "step": 1400 |
| }, |
| { |
| "epoch": 0.08683319343541057, |
| "grad_norm": 0.9092019200325012, |
| "learning_rate": 4.7830680173661366e-05, |
| "loss": 0.06285685062408447, |
| "step": 1500 |
| }, |
| { |
| "epoch": 0.09262207299777128, |
| "grad_norm": 0.612201452255249, |
| "learning_rate": 4.7685962373371927e-05, |
| "loss": 0.054396772384643556, |
| "step": 1600 |
| }, |
| { |
| "epoch": 0.09841095256013199, |
| "grad_norm": 0.6812730431556702, |
| "learning_rate": 4.7541244573082494e-05, |
| "loss": 0.05651024341583252, |
| "step": 1700 |
| }, |
| { |
| "epoch": 0.10419983212249269, |
| "grad_norm": 0.8203144073486328, |
| "learning_rate": 4.7396526772793054e-05, |
| "loss": 0.061346669197082516, |
| "step": 1800 |
| }, |
| { |
| "epoch": 0.1099887116848534, |
| "grad_norm": 1.9262521266937256, |
| "learning_rate": 4.725180897250362e-05, |
| "loss": 0.05345334053039551, |
| "step": 1900 |
| }, |
| { |
| "epoch": 0.1157775912472141, |
| "grad_norm": 1.366341471672058, |
| "learning_rate": 4.710709117221418e-05, |
| "loss": 0.049096508026123045, |
| "step": 2000 |
| }, |
| { |
| "epoch": 0.1215664708095748, |
| "grad_norm": 1.9058470726013184, |
| "learning_rate": 4.696237337192475e-05, |
| "loss": 0.04964198589324951, |
| "step": 2100 |
| }, |
| { |
| "epoch": 0.12735535037193552, |
| "grad_norm": 0.20036761462688446, |
| "learning_rate": 4.681765557163532e-05, |
| "loss": 0.048866801261901856, |
| "step": 2200 |
| }, |
| { |
| "epoch": 0.13314422993429623, |
| "grad_norm": 0.6949800252914429, |
| "learning_rate": 4.667293777134588e-05, |
| "loss": 0.049001736640930174, |
| "step": 2300 |
| }, |
| { |
| "epoch": 0.1389331094966569, |
| "grad_norm": 1.4212485551834106, |
| "learning_rate": 4.6528219971056445e-05, |
| "loss": 0.05099475383758545, |
| "step": 2400 |
| }, |
| { |
| "epoch": 0.14472198905901762, |
| "grad_norm": 0.5998656749725342, |
| "learning_rate": 4.6383502170767005e-05, |
| "loss": 0.046523327827453616, |
| "step": 2500 |
| }, |
| { |
| "epoch": 0.14472198905901762, |
| "eval_accuracy": 0.9827913999033898, |
| "eval_f1": 0.8260160577581378, |
| "eval_loss": 0.08191228657960892, |
| "eval_precision": 0.794459210305773, |
| "eval_recall": 0.8601835528362543, |
| "eval_runtime": 41.7733, |
| "eval_samples_per_second": 299.234, |
| "eval_steps_per_second": 8.331, |
| "step": 2500 |
| }, |
| { |
| "epoch": 0.15051086862137833, |
| "grad_norm": 0.26346391439437866, |
| "learning_rate": 4.623878437047757e-05, |
| "loss": 0.04394114971160889, |
| "step": 2600 |
| }, |
| { |
| "epoch": 0.15629974818373904, |
| "grad_norm": 0.6683024168014526, |
| "learning_rate": 4.609406657018814e-05, |
| "loss": 0.04297324180603027, |
| "step": 2700 |
| }, |
| { |
| "epoch": 0.16208862774609975, |
| "grad_norm": 0.792713463306427, |
| "learning_rate": 4.59493487698987e-05, |
| "loss": 0.044585137367248534, |
| "step": 2800 |
| }, |
| { |
| "epoch": 0.16787750730846046, |
| "grad_norm": 0.8176506161689758, |
| "learning_rate": 4.580463096960926e-05, |
| "loss": 0.04621150970458984, |
| "step": 2900 |
| }, |
| { |
| "epoch": 0.17366638687082114, |
| "grad_norm": 0.21292151510715485, |
| "learning_rate": 4.565991316931983e-05, |
| "loss": 0.04335964679718018, |
| "step": 3000 |
| }, |
| { |
| "epoch": 0.17945526643318185, |
| "grad_norm": 0.5183402299880981, |
| "learning_rate": 4.5515195369030395e-05, |
| "loss": 0.045092535018920896, |
| "step": 3100 |
| }, |
| { |
| "epoch": 0.18524414599554256, |
| "grad_norm": 0.8505319356918335, |
| "learning_rate": 4.537047756874096e-05, |
| "loss": 0.03996892213821411, |
| "step": 3200 |
| }, |
| { |
| "epoch": 0.19103302555790327, |
| "grad_norm": 2.187335252761841, |
| "learning_rate": 4.522575976845152e-05, |
| "loss": 0.048550710678100586, |
| "step": 3300 |
| }, |
| { |
| "epoch": 0.19682190512026398, |
| "grad_norm": 0.5401766896247864, |
| "learning_rate": 4.5081041968162084e-05, |
| "loss": 0.03830349683761597, |
| "step": 3400 |
| }, |
| { |
| "epoch": 0.20261078468262467, |
| "grad_norm": 3.9655873775482178, |
| "learning_rate": 4.493632416787265e-05, |
| "loss": 0.04094114303588867, |
| "step": 3500 |
| }, |
| { |
| "epoch": 0.20839966424498538, |
| "grad_norm": 2.4231624603271484, |
| "learning_rate": 4.479160636758322e-05, |
| "loss": 0.050821685791015626, |
| "step": 3600 |
| }, |
| { |
| "epoch": 0.2141885438073461, |
| "grad_norm": 0.8014847040176392, |
| "learning_rate": 4.464688856729378e-05, |
| "loss": 0.04339597225189209, |
| "step": 3700 |
| }, |
| { |
| "epoch": 0.2199774233697068, |
| "grad_norm": 0.6645370125770569, |
| "learning_rate": 4.450217076700434e-05, |
| "loss": 0.04568838119506836, |
| "step": 3800 |
| }, |
| { |
| "epoch": 0.2257663029320675, |
| "grad_norm": 0.7347345948219299, |
| "learning_rate": 4.435745296671491e-05, |
| "loss": 0.038632638454437256, |
| "step": 3900 |
| }, |
| { |
| "epoch": 0.2315551824944282, |
| "grad_norm": 0.5599935054779053, |
| "learning_rate": 4.4212735166425474e-05, |
| "loss": 0.048587980270385744, |
| "step": 4000 |
| }, |
| { |
| "epoch": 0.2373440620567889, |
| "grad_norm": 0.6407144665718079, |
| "learning_rate": 4.406801736613604e-05, |
| "loss": 0.04263918876647949, |
| "step": 4100 |
| }, |
| { |
| "epoch": 0.2431329416191496, |
| "grad_norm": 0.6048207879066467, |
| "learning_rate": 4.39232995658466e-05, |
| "loss": 0.039798662662506104, |
| "step": 4200 |
| }, |
| { |
| "epoch": 0.24892182118151032, |
| "grad_norm": 1.7978732585906982, |
| "learning_rate": 4.377858176555716e-05, |
| "loss": 0.04390395164489746, |
| "step": 4300 |
| }, |
| { |
| "epoch": 0.25471070074387103, |
| "grad_norm": 0.5556352734565735, |
| "learning_rate": 4.363386396526773e-05, |
| "loss": 0.03865460634231568, |
| "step": 4400 |
| }, |
| { |
| "epoch": 0.2604995803062317, |
| "grad_norm": 1.2710750102996826, |
| "learning_rate": 4.34891461649783e-05, |
| "loss": 0.041819486618041996, |
| "step": 4500 |
| }, |
| { |
| "epoch": 0.26628845986859245, |
| "grad_norm": 0.5540153384208679, |
| "learning_rate": 4.334442836468886e-05, |
| "loss": 0.03990748643875122, |
| "step": 4600 |
| }, |
| { |
| "epoch": 0.27207733943095314, |
| "grad_norm": 0.24696502089500427, |
| "learning_rate": 4.319971056439942e-05, |
| "loss": 0.039545681476593014, |
| "step": 4700 |
| }, |
| { |
| "epoch": 0.2778662189933138, |
| "grad_norm": 0.2856484353542328, |
| "learning_rate": 4.3054992764109985e-05, |
| "loss": 0.04045855045318603, |
| "step": 4800 |
| }, |
| { |
| "epoch": 0.28365509855567456, |
| "grad_norm": 0.4371595084667206, |
| "learning_rate": 4.291027496382055e-05, |
| "loss": 0.04244920253753662, |
| "step": 4900 |
| }, |
| { |
| "epoch": 0.28944397811803524, |
| "grad_norm": 0.42544451355934143, |
| "learning_rate": 4.276555716353112e-05, |
| "loss": 0.04395281791687012, |
| "step": 5000 |
| }, |
| { |
| "epoch": 0.28944397811803524, |
| "eval_accuracy": 0.9842557123690646, |
| "eval_f1": 0.8330216670406662, |
| "eval_loss": 0.07031189650297165, |
| "eval_precision": 0.8023149704625138, |
| "eval_recall": 0.8661723530021778, |
| "eval_runtime": 32.2916, |
| "eval_samples_per_second": 387.098, |
| "eval_steps_per_second": 10.777, |
| "step": 5000 |
| }, |
| { |
| "epoch": 0.295232857680396, |
| "grad_norm": 0.46878165006637573, |
| "learning_rate": 4.262083936324168e-05, |
| "loss": 0.041322021484375, |
| "step": 5100 |
| }, |
| { |
| "epoch": 0.30102173724275666, |
| "grad_norm": 0.6579214930534363, |
| "learning_rate": 4.247612156295224e-05, |
| "loss": 0.03830409288406372, |
| "step": 5200 |
| }, |
| { |
| "epoch": 0.30681061680511734, |
| "grad_norm": 0.42139366269111633, |
| "learning_rate": 4.233140376266281e-05, |
| "loss": 0.03726105690002442, |
| "step": 5300 |
| }, |
| { |
| "epoch": 0.3125994963674781, |
| "grad_norm": 0.2871117889881134, |
| "learning_rate": 4.2186685962373376e-05, |
| "loss": 0.04068236351013184, |
| "step": 5400 |
| }, |
| { |
| "epoch": 0.31838837592983876, |
| "grad_norm": 0.5657113790512085, |
| "learning_rate": 4.2041968162083936e-05, |
| "loss": 0.041853628158569335, |
| "step": 5500 |
| }, |
| { |
| "epoch": 0.3241772554921995, |
| "grad_norm": 1.3855081796646118, |
| "learning_rate": 4.1897250361794504e-05, |
| "loss": 0.039269649982452394, |
| "step": 5600 |
| }, |
| { |
| "epoch": 0.3299661350545602, |
| "grad_norm": 0.8465105295181274, |
| "learning_rate": 4.1752532561505064e-05, |
| "loss": 0.03952852487564087, |
| "step": 5700 |
| }, |
| { |
| "epoch": 0.3357550146169209, |
| "grad_norm": 0.3637921214103699, |
| "learning_rate": 4.160781476121563e-05, |
| "loss": 0.03640928745269775, |
| "step": 5800 |
| }, |
| { |
| "epoch": 0.3415438941792816, |
| "grad_norm": 1.1824228763580322, |
| "learning_rate": 4.14630969609262e-05, |
| "loss": 0.03920279264450073, |
| "step": 5900 |
| }, |
| { |
| "epoch": 0.3473327737416423, |
| "grad_norm": 0.18278339505195618, |
| "learning_rate": 4.131837916063676e-05, |
| "loss": 0.03853697776794433, |
| "step": 6000 |
| }, |
| { |
| "epoch": 0.353121653304003, |
| "grad_norm": 0.6014581322669983, |
| "learning_rate": 4.1173661360347326e-05, |
| "loss": 0.03705440759658814, |
| "step": 6100 |
| }, |
| { |
| "epoch": 0.3589105328663637, |
| "grad_norm": 0.46531063318252563, |
| "learning_rate": 4.102894356005789e-05, |
| "loss": 0.03999743700027466, |
| "step": 6200 |
| }, |
| { |
| "epoch": 0.36469941242872445, |
| "grad_norm": 0.1958639770746231, |
| "learning_rate": 4.0884225759768454e-05, |
| "loss": 0.03664716958999634, |
| "step": 6300 |
| }, |
| { |
| "epoch": 0.37048829199108513, |
| "grad_norm": 0.8862726092338562, |
| "learning_rate": 4.0739507959479015e-05, |
| "loss": 0.0379261326789856, |
| "step": 6400 |
| }, |
| { |
| "epoch": 0.3762771715534458, |
| "grad_norm": 0.9592189192771912, |
| "learning_rate": 4.059479015918958e-05, |
| "loss": 0.03712235450744629, |
| "step": 6500 |
| }, |
| { |
| "epoch": 0.38206605111580655, |
| "grad_norm": 0.6839034557342529, |
| "learning_rate": 4.045007235890015e-05, |
| "loss": 0.03996901988983154, |
| "step": 6600 |
| }, |
| { |
| "epoch": 0.38785493067816723, |
| "grad_norm": 0.4922831952571869, |
| "learning_rate": 4.030535455861071e-05, |
| "loss": 0.03477381706237793, |
| "step": 6700 |
| }, |
| { |
| "epoch": 0.39364381024052797, |
| "grad_norm": 0.7703688740730286, |
| "learning_rate": 4.016063675832128e-05, |
| "loss": 0.03694391012191772, |
| "step": 6800 |
| }, |
| { |
| "epoch": 0.39943268980288865, |
| "grad_norm": 0.9563616514205933, |
| "learning_rate": 4.001591895803184e-05, |
| "loss": 0.038102331161499026, |
| "step": 6900 |
| }, |
| { |
| "epoch": 0.40522156936524933, |
| "grad_norm": 0.5397209525108337, |
| "learning_rate": 3.9871201157742405e-05, |
| "loss": 0.03435644626617432, |
| "step": 7000 |
| }, |
| { |
| "epoch": 0.4110104489276101, |
| "grad_norm": 0.33264386653900146, |
| "learning_rate": 3.972648335745297e-05, |
| "loss": 0.034106507301330566, |
| "step": 7100 |
| }, |
| { |
| "epoch": 0.41679932848997076, |
| "grad_norm": 0.4908270239830017, |
| "learning_rate": 3.958176555716353e-05, |
| "loss": 0.03293986797332764, |
| "step": 7200 |
| }, |
| { |
| "epoch": 0.4225882080523315, |
| "grad_norm": 1.4851772785186768, |
| "learning_rate": 3.9437047756874093e-05, |
| "loss": 0.041512365341186526, |
| "step": 7300 |
| }, |
| { |
| "epoch": 0.4283770876146922, |
| "grad_norm": 1.109512209892273, |
| "learning_rate": 3.929232995658466e-05, |
| "loss": 0.03690081834793091, |
| "step": 7400 |
| }, |
| { |
| "epoch": 0.43416596717705286, |
| "grad_norm": 1.2585190534591675, |
| "learning_rate": 3.914761215629523e-05, |
| "loss": 0.035063812732696535, |
| "step": 7500 |
| }, |
| { |
| "epoch": 0.43416596717705286, |
| "eval_accuracy": 0.9860376945037634, |
| "eval_f1": 0.8609184178154288, |
| "eval_loss": 0.06113845109939575, |
| "eval_precision": 0.8426553461929942, |
| "eval_recall": 0.8799906668049362, |
| "eval_runtime": 32.811, |
| "eval_samples_per_second": 380.97, |
| "eval_steps_per_second": 10.606, |
| "step": 7500 |
| }, |
| { |
| "epoch": 0.4399548467394136, |
| "grad_norm": 0.5866221189498901, |
| "learning_rate": 3.9002894356005795e-05, |
| "loss": 0.03833510398864746, |
| "step": 7600 |
| }, |
| { |
| "epoch": 0.4457437263017743, |
| "grad_norm": 0.3515453636646271, |
| "learning_rate": 3.885817655571635e-05, |
| "loss": 0.03402453660964966, |
| "step": 7700 |
| }, |
| { |
| "epoch": 0.451532605864135, |
| "grad_norm": 0.5338568687438965, |
| "learning_rate": 3.8713458755426916e-05, |
| "loss": 0.03593117237091065, |
| "step": 7800 |
| }, |
| { |
| "epoch": 0.4573214854264957, |
| "grad_norm": 0.42428648471832275, |
| "learning_rate": 3.8568740955137484e-05, |
| "loss": 0.036249592304229736, |
| "step": 7900 |
| }, |
| { |
| "epoch": 0.4631103649888564, |
| "grad_norm": 0.5833432078361511, |
| "learning_rate": 3.842402315484805e-05, |
| "loss": 0.033896231651306154, |
| "step": 8000 |
| }, |
| { |
| "epoch": 0.4688992445512171, |
| "grad_norm": 0.24067725241184235, |
| "learning_rate": 3.827930535455861e-05, |
| "loss": 0.03889668464660644, |
| "step": 8100 |
| }, |
| { |
| "epoch": 0.4746881241135778, |
| "grad_norm": 1.6670092344284058, |
| "learning_rate": 3.813458755426917e-05, |
| "loss": 0.036442911624908446, |
| "step": 8200 |
| }, |
| { |
| "epoch": 0.48047700367593854, |
| "grad_norm": 0.6004341244697571, |
| "learning_rate": 3.798986975397974e-05, |
| "loss": 0.037013137340545656, |
| "step": 8300 |
| }, |
| { |
| "epoch": 0.4862658832382992, |
| "grad_norm": 0.3944297432899475, |
| "learning_rate": 3.784515195369031e-05, |
| "loss": 0.036278882026672364, |
| "step": 8400 |
| }, |
| { |
| "epoch": 0.4920547628006599, |
| "grad_norm": 0.6234197020530701, |
| "learning_rate": 3.7700434153400874e-05, |
| "loss": 0.038663127422332765, |
| "step": 8500 |
| }, |
| { |
| "epoch": 0.49784364236302064, |
| "grad_norm": 0.403590589761734, |
| "learning_rate": 3.7555716353111435e-05, |
| "loss": 0.037761948108673095, |
| "step": 8600 |
| }, |
| { |
| "epoch": 0.5036325219253813, |
| "grad_norm": 0.44773924350738525, |
| "learning_rate": 3.7410998552821995e-05, |
| "loss": 0.03594711542129517, |
| "step": 8700 |
| }, |
| { |
| "epoch": 0.5094214014877421, |
| "grad_norm": 0.8247023820877075, |
| "learning_rate": 3.726628075253256e-05, |
| "loss": 0.03601913452148438, |
| "step": 8800 |
| }, |
| { |
| "epoch": 0.5152102810501028, |
| "grad_norm": 1.1590162515640259, |
| "learning_rate": 3.712156295224313e-05, |
| "loss": 0.03685137271881103, |
| "step": 8900 |
| }, |
| { |
| "epoch": 0.5209991606124634, |
| "grad_norm": 0.6263375878334045, |
| "learning_rate": 3.697684515195369e-05, |
| "loss": 0.036658039093017576, |
| "step": 9000 |
| }, |
| { |
| "epoch": 0.5267880401748242, |
| "grad_norm": 0.5682932138442993, |
| "learning_rate": 3.683212735166426e-05, |
| "loss": 0.03221390724182129, |
| "step": 9100 |
| }, |
| { |
| "epoch": 0.5325769197371849, |
| "grad_norm": 0.4283082187175751, |
| "learning_rate": 3.668740955137482e-05, |
| "loss": 0.03447196245193482, |
| "step": 9200 |
| }, |
| { |
| "epoch": 0.5383657992995455, |
| "grad_norm": 0.513534426689148, |
| "learning_rate": 3.6542691751085385e-05, |
| "loss": 0.03253072500228882, |
| "step": 9300 |
| }, |
| { |
| "epoch": 0.5441546788619063, |
| "grad_norm": 0.1790156066417694, |
| "learning_rate": 3.639797395079595e-05, |
| "loss": 0.03559961318969727, |
| "step": 9400 |
| }, |
| { |
| "epoch": 0.549943558424267, |
| "grad_norm": 1.3116710186004639, |
| "learning_rate": 3.625325615050651e-05, |
| "loss": 0.0339055609703064, |
| "step": 9500 |
| }, |
| { |
| "epoch": 0.5557324379866276, |
| "grad_norm": 0.7609701752662659, |
| "learning_rate": 3.610853835021708e-05, |
| "loss": 0.033302297592163084, |
| "step": 9600 |
| }, |
| { |
| "epoch": 0.5615213175489884, |
| "grad_norm": 0.6684858798980713, |
| "learning_rate": 3.596382054992764e-05, |
| "loss": 0.03894152641296387, |
| "step": 9700 |
| }, |
| { |
| "epoch": 0.5673101971113491, |
| "grad_norm": 0.25678926706314087, |
| "learning_rate": 3.581910274963821e-05, |
| "loss": 0.033086774349212644, |
| "step": 9800 |
| }, |
| { |
| "epoch": 0.5730990766737099, |
| "grad_norm": 1.2110711336135864, |
| "learning_rate": 3.567438494934877e-05, |
| "loss": 0.037588162422180174, |
| "step": 9900 |
| }, |
| { |
| "epoch": 0.5788879562360705, |
| "grad_norm": 0.7344034910202026, |
| "learning_rate": 3.5529667149059336e-05, |
| "loss": 0.03140888452529907, |
| "step": 10000 |
| }, |
| { |
| "epoch": 0.5788879562360705, |
| "eval_accuracy": 0.9871526392423338, |
| "eval_f1": 0.8693991367126322, |
| "eval_loss": 0.0593421570956707, |
| "eval_precision": 0.8542474665332166, |
| "eval_recall": 0.8850979985481696, |
| "eval_runtime": 32.4321, |
| "eval_samples_per_second": 385.42, |
| "eval_steps_per_second": 10.73, |
| "step": 10000 |
| }, |
| { |
| "epoch": 0.5846768357984312, |
| "grad_norm": 1.8443788290023804, |
| "learning_rate": 3.5384949348769903e-05, |
| "loss": 0.034446282386779783, |
| "step": 10100 |
| }, |
| { |
| "epoch": 0.590465715360792, |
| "grad_norm": 3.829798698425293, |
| "learning_rate": 3.5240231548480464e-05, |
| "loss": 0.031233868598937987, |
| "step": 10200 |
| }, |
| { |
| "epoch": 0.5962545949231526, |
| "grad_norm": 0.22466252744197845, |
| "learning_rate": 3.5095513748191025e-05, |
| "loss": 0.03304744958877563, |
| "step": 10300 |
| }, |
| { |
| "epoch": 0.6020434744855133, |
| "grad_norm": 0.2421451061964035, |
| "learning_rate": 3.495079594790159e-05, |
| "loss": 0.032493886947631834, |
| "step": 10400 |
| }, |
| { |
| "epoch": 0.6078323540478741, |
| "grad_norm": 6.648340225219727, |
| "learning_rate": 3.480607814761216e-05, |
| "loss": 0.034116878509521484, |
| "step": 10500 |
| }, |
| { |
| "epoch": 0.6136212336102347, |
| "grad_norm": 0.5613166093826294, |
| "learning_rate": 3.4661360347322726e-05, |
| "loss": 0.03445070266723633, |
| "step": 10600 |
| }, |
| { |
| "epoch": 0.6194101131725954, |
| "grad_norm": 0.24381904304027557, |
| "learning_rate": 3.451664254703329e-05, |
| "loss": 0.03339603900909424, |
| "step": 10700 |
| }, |
| { |
| "epoch": 0.6251989927349562, |
| "grad_norm": 0.5657049417495728, |
| "learning_rate": 3.437192474674385e-05, |
| "loss": 0.03580770492553711, |
| "step": 10800 |
| }, |
| { |
| "epoch": 0.6309878722973169, |
| "grad_norm": 0.16176201403141022, |
| "learning_rate": 3.4227206946454415e-05, |
| "loss": 0.03211415767669678, |
| "step": 10900 |
| }, |
| { |
| "epoch": 0.6367767518596775, |
| "grad_norm": 0.9659017324447632, |
| "learning_rate": 3.408248914616498e-05, |
| "loss": 0.032091012001037596, |
| "step": 11000 |
| }, |
| { |
| "epoch": 0.6425656314220383, |
| "grad_norm": 0.37151074409484863, |
| "learning_rate": 3.393777134587555e-05, |
| "loss": 0.030981764793395997, |
| "step": 11100 |
| }, |
| { |
| "epoch": 0.648354510984399, |
| "grad_norm": 0.24677230417728424, |
| "learning_rate": 3.37930535455861e-05, |
| "loss": 0.031599972248077396, |
| "step": 11200 |
| }, |
| { |
| "epoch": 0.6541433905467596, |
| "grad_norm": 0.5559372901916504, |
| "learning_rate": 3.364833574529667e-05, |
| "loss": 0.030684573650360106, |
| "step": 11300 |
| }, |
| { |
| "epoch": 0.6599322701091204, |
| "grad_norm": 0.309211790561676, |
| "learning_rate": 3.350361794500724e-05, |
| "loss": 0.03191032409667969, |
| "step": 11400 |
| }, |
| { |
| "epoch": 0.6657211496714811, |
| "grad_norm": 0.1990319788455963, |
| "learning_rate": 3.3358900144717805e-05, |
| "loss": 0.030000259876251222, |
| "step": 11500 |
| }, |
| { |
| "epoch": 0.6715100292338418, |
| "grad_norm": 0.6448608636856079, |
| "learning_rate": 3.3214182344428366e-05, |
| "loss": 0.03020137071609497, |
| "step": 11600 |
| }, |
| { |
| "epoch": 0.6772989087962025, |
| "grad_norm": 0.4042809009552002, |
| "learning_rate": 3.3069464544138926e-05, |
| "loss": 0.0288107705116272, |
| "step": 11700 |
| }, |
| { |
| "epoch": 0.6830877883585632, |
| "grad_norm": 0.48739156126976013, |
| "learning_rate": 3.2924746743849493e-05, |
| "loss": 0.02975822925567627, |
| "step": 11800 |
| }, |
| { |
| "epoch": 0.688876667920924, |
| "grad_norm": 0.3511544466018677, |
| "learning_rate": 3.278002894356006e-05, |
| "loss": 0.030011208057403566, |
| "step": 11900 |
| }, |
| { |
| "epoch": 0.6946655474832846, |
| "grad_norm": 0.20981945097446442, |
| "learning_rate": 3.263531114327063e-05, |
| "loss": 0.030392658710479737, |
| "step": 12000 |
| }, |
| { |
| "epoch": 0.7004544270456453, |
| "grad_norm": 1.725892186164856, |
| "learning_rate": 3.249059334298119e-05, |
| "loss": 0.0313503360748291, |
| "step": 12100 |
| }, |
| { |
| "epoch": 0.706243306608006, |
| "grad_norm": 0.16606950759887695, |
| "learning_rate": 3.234587554269175e-05, |
| "loss": 0.03735621213912964, |
| "step": 12200 |
| }, |
| { |
| "epoch": 0.7120321861703667, |
| "grad_norm": 0.4616575539112091, |
| "learning_rate": 3.2201157742402316e-05, |
| "loss": 0.03038395643234253, |
| "step": 12300 |
| }, |
| { |
| "epoch": 0.7178210657327274, |
| "grad_norm": 0.40673601627349854, |
| "learning_rate": 3.2056439942112884e-05, |
| "loss": 0.035007147789001464, |
| "step": 12400 |
| }, |
| { |
| "epoch": 0.7236099452950882, |
| "grad_norm": 0.27561140060424805, |
| "learning_rate": 3.1911722141823444e-05, |
| "loss": 0.032888281345367434, |
| "step": 12500 |
| }, |
| { |
| "epoch": 0.7236099452950882, |
| "eval_accuracy": 0.9868453058965968, |
| "eval_f1": 0.8583013393139975, |
| "eval_loss": 0.056312501430511475, |
| "eval_precision": 0.8394012243178269, |
| "eval_recall": 0.8780721767084932, |
| "eval_runtime": 32.461, |
| "eval_samples_per_second": 385.078, |
| "eval_steps_per_second": 10.721, |
| "step": 12500 |
| }, |
| { |
| "epoch": 0.7293988248574489, |
| "grad_norm": 0.7054789662361145, |
| "learning_rate": 3.176700434153401e-05, |
| "loss": 0.031692807674407956, |
| "step": 12600 |
| }, |
| { |
| "epoch": 0.7351877044198095, |
| "grad_norm": 0.31711819767951965, |
| "learning_rate": 3.162228654124457e-05, |
| "loss": 0.03108912706375122, |
| "step": 12700 |
| }, |
| { |
| "epoch": 0.7409765839821703, |
| "grad_norm": 0.24462510645389557, |
| "learning_rate": 3.147756874095514e-05, |
| "loss": 0.03314091444015503, |
| "step": 12800 |
| }, |
| { |
| "epoch": 0.746765463544531, |
| "grad_norm": 0.37591752409935, |
| "learning_rate": 3.133285094066571e-05, |
| "loss": 0.031636784076690676, |
| "step": 12900 |
| }, |
| { |
| "epoch": 0.7525543431068916, |
| "grad_norm": 0.16671085357666016, |
| "learning_rate": 3.118813314037627e-05, |
| "loss": 0.027464551925659178, |
| "step": 13000 |
| }, |
| { |
| "epoch": 0.7583432226692524, |
| "grad_norm": 1.5406149625778198, |
| "learning_rate": 3.1043415340086835e-05, |
| "loss": 0.03511166572570801, |
| "step": 13100 |
| }, |
| { |
| "epoch": 0.7641321022316131, |
| "grad_norm": 0.1813717931509018, |
| "learning_rate": 3.0898697539797395e-05, |
| "loss": 0.0282421875, |
| "step": 13200 |
| }, |
| { |
| "epoch": 0.7699209817939737, |
| "grad_norm": 0.25257691740989685, |
| "learning_rate": 3.075397973950796e-05, |
| "loss": 0.029794423580169677, |
| "step": 13300 |
| }, |
| { |
| "epoch": 0.7757098613563345, |
| "grad_norm": 0.534304141998291, |
| "learning_rate": 3.060926193921852e-05, |
| "loss": 0.030250344276428223, |
| "step": 13400 |
| }, |
| { |
| "epoch": 0.7814987409186952, |
| "grad_norm": 0.8739162087440491, |
| "learning_rate": 3.046454413892909e-05, |
| "loss": 0.031027588844299316, |
| "step": 13500 |
| }, |
| { |
| "epoch": 0.7872876204810559, |
| "grad_norm": 0.540971577167511, |
| "learning_rate": 3.0319826338639658e-05, |
| "loss": 0.03038400173187256, |
| "step": 13600 |
| }, |
| { |
| "epoch": 0.7930765000434166, |
| "grad_norm": 0.7075596451759338, |
| "learning_rate": 3.0175108538350215e-05, |
| "loss": 0.03126862049102783, |
| "step": 13700 |
| }, |
| { |
| "epoch": 0.7988653796057773, |
| "grad_norm": 0.22685867547988892, |
| "learning_rate": 3.0030390738060782e-05, |
| "loss": 0.03233437061309814, |
| "step": 13800 |
| }, |
| { |
| "epoch": 0.804654259168138, |
| "grad_norm": 3.8470654487609863, |
| "learning_rate": 2.9885672937771346e-05, |
| "loss": 0.03189040184020996, |
| "step": 13900 |
| }, |
| { |
| "epoch": 0.8104431387304987, |
| "grad_norm": 0.16255338490009308, |
| "learning_rate": 2.9740955137481913e-05, |
| "loss": 0.030559961795806886, |
| "step": 14000 |
| }, |
| { |
| "epoch": 0.8162320182928594, |
| "grad_norm": 0.4145297110080719, |
| "learning_rate": 2.9596237337192477e-05, |
| "loss": 0.027420687675476074, |
| "step": 14100 |
| }, |
| { |
| "epoch": 0.8220208978552201, |
| "grad_norm": 0.23214294016361237, |
| "learning_rate": 2.9451519536903038e-05, |
| "loss": 0.02794215202331543, |
| "step": 14200 |
| }, |
| { |
| "epoch": 0.8278097774175808, |
| "grad_norm": 0.18062900006771088, |
| "learning_rate": 2.9306801736613605e-05, |
| "loss": 0.028336293697357177, |
| "step": 14300 |
| }, |
| { |
| "epoch": 0.8335986569799415, |
| "grad_norm": 0.7959395051002502, |
| "learning_rate": 2.916208393632417e-05, |
| "loss": 0.028267955780029295, |
| "step": 14400 |
| }, |
| { |
| "epoch": 0.8393875365423022, |
| "grad_norm": 0.5956525802612305, |
| "learning_rate": 2.9017366136034736e-05, |
| "loss": 0.028834567070007325, |
| "step": 14500 |
| }, |
| { |
| "epoch": 0.845176416104663, |
| "grad_norm": 0.3449164628982544, |
| "learning_rate": 2.88726483357453e-05, |
| "loss": 0.03145886182785034, |
| "step": 14600 |
| }, |
| { |
| "epoch": 0.8509652956670236, |
| "grad_norm": 0.16741275787353516, |
| "learning_rate": 2.872793053545586e-05, |
| "loss": 0.026835625171661378, |
| "step": 14700 |
| }, |
| { |
| "epoch": 0.8567541752293844, |
| "grad_norm": 0.46983611583709717, |
| "learning_rate": 2.8583212735166425e-05, |
| "loss": 0.03117095708847046, |
| "step": 14800 |
| }, |
| { |
| "epoch": 0.8625430547917451, |
| "grad_norm": 0.22663377225399017, |
| "learning_rate": 2.8438494934876992e-05, |
| "loss": 0.028009552955627442, |
| "step": 14900 |
| }, |
| { |
| "epoch": 0.8683319343541057, |
| "grad_norm": 0.23122131824493408, |
| "learning_rate": 2.8293777134587556e-05, |
| "loss": 0.028059725761413575, |
| "step": 15000 |
| }, |
| { |
| "epoch": 0.8683319343541057, |
| "eval_accuracy": 0.9886279771195148, |
| "eval_f1": 0.8722127148731833, |
| "eval_loss": 0.04878264293074608, |
| "eval_precision": 0.8595016360432922, |
| "eval_recall": 0.8853054028829203, |
| "eval_runtime": 32.7448, |
| "eval_samples_per_second": 381.74, |
| "eval_steps_per_second": 10.628, |
| "step": 15000 |
| }, |
| { |
| "epoch": 0.8741208139164665, |
| "grad_norm": 0.6293914318084717, |
| "learning_rate": 2.8149059334298123e-05, |
| "loss": 0.030007996559143067, |
| "step": 15100 |
| }, |
| { |
| "epoch": 0.8799096934788272, |
| "grad_norm": 0.9233108758926392, |
| "learning_rate": 2.8004341534008684e-05, |
| "loss": 0.030494704246520996, |
| "step": 15200 |
| }, |
| { |
| "epoch": 0.8856985730411879, |
| "grad_norm": 0.28081145882606506, |
| "learning_rate": 2.7859623733719247e-05, |
| "loss": 0.026717009544372557, |
| "step": 15300 |
| }, |
| { |
| "epoch": 0.8914874526035486, |
| "grad_norm": 0.6658106446266174, |
| "learning_rate": 2.7714905933429815e-05, |
| "loss": 0.031118297576904298, |
| "step": 15400 |
| }, |
| { |
| "epoch": 0.8972763321659093, |
| "grad_norm": 0.940838098526001, |
| "learning_rate": 2.757018813314038e-05, |
| "loss": 0.03049607515335083, |
| "step": 15500 |
| }, |
| { |
| "epoch": 0.90306521172827, |
| "grad_norm": 0.12419965863227844, |
| "learning_rate": 2.7425470332850946e-05, |
| "loss": 0.026502671241760253, |
| "step": 15600 |
| }, |
| { |
| "epoch": 0.9088540912906307, |
| "grad_norm": 1.1096726655960083, |
| "learning_rate": 2.7280752532561503e-05, |
| "loss": 0.02529672861099243, |
| "step": 15700 |
| }, |
| { |
| "epoch": 0.9146429708529914, |
| "grad_norm": 0.3621964752674103, |
| "learning_rate": 2.713603473227207e-05, |
| "loss": 0.02636525869369507, |
| "step": 15800 |
| }, |
| { |
| "epoch": 0.9204318504153521, |
| "grad_norm": 0.21582575142383575, |
| "learning_rate": 2.6991316931982634e-05, |
| "loss": 0.03223026037216187, |
| "step": 15900 |
| }, |
| { |
| "epoch": 0.9262207299777128, |
| "grad_norm": 0.25821372866630554, |
| "learning_rate": 2.68465991316932e-05, |
| "loss": 0.026826431751251222, |
| "step": 16000 |
| }, |
| { |
| "epoch": 0.9320096095400735, |
| "grad_norm": 0.399935245513916, |
| "learning_rate": 2.6701881331403766e-05, |
| "loss": 0.028431081771850587, |
| "step": 16100 |
| }, |
| { |
| "epoch": 0.9377984891024342, |
| "grad_norm": 0.5942548513412476, |
| "learning_rate": 2.6557163531114326e-05, |
| "loss": 0.027340149879455565, |
| "step": 16200 |
| }, |
| { |
| "epoch": 0.943587368664795, |
| "grad_norm": 0.3555188775062561, |
| "learning_rate": 2.6412445730824893e-05, |
| "loss": 0.02627647399902344, |
| "step": 16300 |
| }, |
| { |
| "epoch": 0.9493762482271556, |
| "grad_norm": 0.14022284746170044, |
| "learning_rate": 2.6267727930535457e-05, |
| "loss": 0.027896411418914795, |
| "step": 16400 |
| }, |
| { |
| "epoch": 0.9551651277895163, |
| "grad_norm": 0.5751340389251709, |
| "learning_rate": 2.612301013024602e-05, |
| "loss": 0.02570687770843506, |
| "step": 16500 |
| }, |
| { |
| "epoch": 0.9609540073518771, |
| "grad_norm": 0.44881781935691833, |
| "learning_rate": 2.597829232995659e-05, |
| "loss": 0.030154764652252197, |
| "step": 16600 |
| }, |
| { |
| "epoch": 0.9667428869142377, |
| "grad_norm": 0.1963057667016983, |
| "learning_rate": 2.583357452966715e-05, |
| "loss": 0.031277601718902585, |
| "step": 16700 |
| }, |
| { |
| "epoch": 0.9725317664765984, |
| "grad_norm": 0.4031456410884857, |
| "learning_rate": 2.5688856729377713e-05, |
| "loss": 0.030245101451873778, |
| "step": 16800 |
| }, |
| { |
| "epoch": 0.9783206460389592, |
| "grad_norm": 0.29256272315979004, |
| "learning_rate": 2.554413892908828e-05, |
| "loss": 0.031824679374694825, |
| "step": 16900 |
| }, |
| { |
| "epoch": 0.9841095256013198, |
| "grad_norm": 0.42400193214416504, |
| "learning_rate": 2.5399421128798844e-05, |
| "loss": 0.028290491104125976, |
| "step": 17000 |
| }, |
| { |
| "epoch": 0.9898984051636806, |
| "grad_norm": 1.173027753829956, |
| "learning_rate": 2.525470332850941e-05, |
| "loss": 0.02786562204360962, |
| "step": 17100 |
| }, |
| { |
| "epoch": 0.9956872847260413, |
| "grad_norm": 0.13319961726665497, |
| "learning_rate": 2.510998552821997e-05, |
| "loss": 0.030247974395751952, |
| "step": 17200 |
| }, |
| { |
| "epoch": 1.0014472198905902, |
| "grad_norm": 0.8692908883094788, |
| "learning_rate": 2.4965267727930536e-05, |
| "loss": 0.029876837730407713, |
| "step": 17300 |
| }, |
| { |
| "epoch": 1.0072360994529508, |
| "grad_norm": 0.132135272026062, |
| "learning_rate": 2.48205499276411e-05, |
| "loss": 0.028613386154174806, |
| "step": 17400 |
| }, |
| { |
| "epoch": 1.0130249790153116, |
| "grad_norm": 0.43961101770401, |
| "learning_rate": 2.4675832127351664e-05, |
| "loss": 0.02742033004760742, |
| "step": 17500 |
| }, |
| { |
| "epoch": 1.0130249790153116, |
| "eval_accuracy": 0.9893659906022149, |
| "eval_f1": 0.8761479591836735, |
| "eval_loss": 0.04772739112377167, |
| "eval_precision": 0.8623330320377623, |
| "eval_recall": 0.8904127346261537, |
| "eval_runtime": 32.383, |
| "eval_samples_per_second": 386.005, |
| "eval_steps_per_second": 10.746, |
| "step": 17500 |
| }, |
| { |
| "epoch": 1.0188138585776723, |
| "grad_norm": 1.3907451629638672, |
| "learning_rate": 2.453111432706223e-05, |
| "loss": 0.026595606803894042, |
| "step": 17600 |
| }, |
| { |
| "epoch": 1.0246027381400329, |
| "grad_norm": 0.15164747834205627, |
| "learning_rate": 2.4386396526772795e-05, |
| "loss": 0.024490804672241212, |
| "step": 17700 |
| }, |
| { |
| "epoch": 1.0303916177023937, |
| "grad_norm": 0.44126424193382263, |
| "learning_rate": 2.424167872648336e-05, |
| "loss": 0.023133702278137207, |
| "step": 17800 |
| }, |
| { |
| "epoch": 1.0361804972647544, |
| "grad_norm": 0.14820177853107452, |
| "learning_rate": 2.4096960926193923e-05, |
| "loss": 0.02218240737915039, |
| "step": 17900 |
| }, |
| { |
| "epoch": 1.0419693768271152, |
| "grad_norm": 0.5905808210372925, |
| "learning_rate": 2.3952243125904487e-05, |
| "loss": 0.020209569931030274, |
| "step": 18000 |
| }, |
| { |
| "epoch": 1.0477582563894758, |
| "grad_norm": 0.17811790108680725, |
| "learning_rate": 2.380752532561505e-05, |
| "loss": 0.023463377952575682, |
| "step": 18100 |
| }, |
| { |
| "epoch": 1.0535471359518365, |
| "grad_norm": 0.18444955348968506, |
| "learning_rate": 2.3662807525325618e-05, |
| "loss": 0.02178111791610718, |
| "step": 18200 |
| }, |
| { |
| "epoch": 1.0593360155141973, |
| "grad_norm": 0.3301508128643036, |
| "learning_rate": 2.351808972503618e-05, |
| "loss": 0.02543956518173218, |
| "step": 18300 |
| }, |
| { |
| "epoch": 1.065124895076558, |
| "grad_norm": 0.1861371099948883, |
| "learning_rate": 2.3373371924746746e-05, |
| "loss": 0.024148540496826174, |
| "step": 18400 |
| }, |
| { |
| "epoch": 1.0709137746389186, |
| "grad_norm": 0.3149067163467407, |
| "learning_rate": 2.3228654124457306e-05, |
| "loss": 0.023476483821868895, |
| "step": 18500 |
| }, |
| { |
| "epoch": 1.0767026542012794, |
| "grad_norm": 0.17988641560077667, |
| "learning_rate": 2.3083936324167874e-05, |
| "loss": 0.02149003028869629, |
| "step": 18600 |
| }, |
| { |
| "epoch": 1.08249153376364, |
| "grad_norm": 0.1312856823205948, |
| "learning_rate": 2.2939218523878438e-05, |
| "loss": 0.023462765216827393, |
| "step": 18700 |
| }, |
| { |
| "epoch": 1.0882804133260007, |
| "grad_norm": 0.20052380859851837, |
| "learning_rate": 2.2794500723589e-05, |
| "loss": 0.025409071445465087, |
| "step": 18800 |
| }, |
| { |
| "epoch": 1.0940692928883615, |
| "grad_norm": 0.2507052421569824, |
| "learning_rate": 2.264978292329957e-05, |
| "loss": 0.026879096031188966, |
| "step": 18900 |
| }, |
| { |
| "epoch": 1.0998581724507221, |
| "grad_norm": 0.15323978662490845, |
| "learning_rate": 2.250506512301013e-05, |
| "loss": 0.023210296630859373, |
| "step": 19000 |
| }, |
| { |
| "epoch": 1.1056470520130828, |
| "grad_norm": 0.1345296949148178, |
| "learning_rate": 2.2360347322720697e-05, |
| "loss": 0.020816826820373537, |
| "step": 19100 |
| }, |
| { |
| "epoch": 1.1114359315754436, |
| "grad_norm": 0.2258956879377365, |
| "learning_rate": 2.221562952243126e-05, |
| "loss": 0.023039839267730712, |
| "step": 19200 |
| }, |
| { |
| "epoch": 1.1172248111378043, |
| "grad_norm": 0.17817442119121552, |
| "learning_rate": 2.2070911722141824e-05, |
| "loss": 0.02491870164871216, |
| "step": 19300 |
| }, |
| { |
| "epoch": 1.1230136907001649, |
| "grad_norm": 0.23916760087013245, |
| "learning_rate": 2.192619392185239e-05, |
| "loss": 0.02417280673980713, |
| "step": 19400 |
| }, |
| { |
| "epoch": 1.1288025702625257, |
| "grad_norm": 0.5979987978935242, |
| "learning_rate": 2.1781476121562952e-05, |
| "loss": 0.025201258659362794, |
| "step": 19500 |
| }, |
| { |
| "epoch": 1.1345914498248864, |
| "grad_norm": 0.7608785033226013, |
| "learning_rate": 2.1636758321273516e-05, |
| "loss": 0.02450251579284668, |
| "step": 19600 |
| }, |
| { |
| "epoch": 1.1403803293872472, |
| "grad_norm": 1.2524911165237427, |
| "learning_rate": 2.1492040520984084e-05, |
| "loss": 0.02275413990020752, |
| "step": 19700 |
| }, |
| { |
| "epoch": 1.1461692089496078, |
| "grad_norm": 0.4412006139755249, |
| "learning_rate": 2.1347322720694647e-05, |
| "loss": 0.023493762016296386, |
| "step": 19800 |
| }, |
| { |
| "epoch": 1.1519580885119685, |
| "grad_norm": 0.1437130719423294, |
| "learning_rate": 2.120260492040521e-05, |
| "loss": 0.024696297645568847, |
| "step": 19900 |
| }, |
| { |
| "epoch": 1.1577469680743293, |
| "grad_norm": 0.6046096086502075, |
| "learning_rate": 2.1057887120115775e-05, |
| "loss": 0.023616561889648436, |
| "step": 20000 |
| }, |
| { |
| "epoch": 1.1577469680743293, |
| "eval_accuracy": 0.9893949323074188, |
| "eval_f1": 0.8802002835647409, |
| "eval_loss": 0.04831967130303383, |
| "eval_precision": 0.8675126520129919, |
| "eval_recall": 0.8932645442289744, |
| "eval_runtime": 32.3669, |
| "eval_samples_per_second": 386.197, |
| "eval_steps_per_second": 10.752, |
| "step": 20000 |
| }, |
| { |
| "epoch": 1.16353584763669, |
| "grad_norm": 0.3236995339393616, |
| "learning_rate": 2.091316931982634e-05, |
| "loss": 0.024802815914154053, |
| "step": 20100 |
| }, |
| { |
| "epoch": 1.1693247271990506, |
| "grad_norm": 1.4749183654785156, |
| "learning_rate": 2.0768451519536907e-05, |
| "loss": 0.0224468994140625, |
| "step": 20200 |
| }, |
| { |
| "epoch": 1.1751136067614114, |
| "grad_norm": 0.7911006212234497, |
| "learning_rate": 2.0623733719247467e-05, |
| "loss": 0.021509246826171877, |
| "step": 20300 |
| }, |
| { |
| "epoch": 1.180902486323772, |
| "grad_norm": 0.23929397761821747, |
| "learning_rate": 2.0479015918958034e-05, |
| "loss": 0.02354356527328491, |
| "step": 20400 |
| }, |
| { |
| "epoch": 1.1866913658861327, |
| "grad_norm": 0.28904756903648376, |
| "learning_rate": 2.0334298118668595e-05, |
| "loss": 0.021830506324768066, |
| "step": 20500 |
| }, |
| { |
| "epoch": 1.1924802454484935, |
| "grad_norm": 0.24184764921665192, |
| "learning_rate": 2.0189580318379162e-05, |
| "loss": 0.022603347301483154, |
| "step": 20600 |
| }, |
| { |
| "epoch": 1.1982691250108541, |
| "grad_norm": 0.9602212905883789, |
| "learning_rate": 2.0044862518089726e-05, |
| "loss": 0.024089906215667724, |
| "step": 20700 |
| }, |
| { |
| "epoch": 1.2040580045732148, |
| "grad_norm": 0.18168984353542328, |
| "learning_rate": 1.990014471780029e-05, |
| "loss": 0.02312016725540161, |
| "step": 20800 |
| }, |
| { |
| "epoch": 1.2098468841355756, |
| "grad_norm": 0.5313889384269714, |
| "learning_rate": 1.9755426917510854e-05, |
| "loss": 0.022191174030303955, |
| "step": 20900 |
| }, |
| { |
| "epoch": 1.2156357636979362, |
| "grad_norm": 1.4984052181243896, |
| "learning_rate": 1.9610709117221418e-05, |
| "loss": 0.020048980712890626, |
| "step": 21000 |
| }, |
| { |
| "epoch": 1.2214246432602969, |
| "grad_norm": 0.9525922536849976, |
| "learning_rate": 1.9465991316931985e-05, |
| "loss": 0.023054752349853515, |
| "step": 21100 |
| }, |
| { |
| "epoch": 1.2272135228226577, |
| "grad_norm": 0.20787988603115082, |
| "learning_rate": 1.932127351664255e-05, |
| "loss": 0.02273550271987915, |
| "step": 21200 |
| }, |
| { |
| "epoch": 1.2330024023850183, |
| "grad_norm": 0.19591836631298065, |
| "learning_rate": 1.9176555716353113e-05, |
| "loss": 0.02390467405319214, |
| "step": 21300 |
| }, |
| { |
| "epoch": 1.2387912819473792, |
| "grad_norm": 0.1931363046169281, |
| "learning_rate": 1.9031837916063677e-05, |
| "loss": 0.023615493774414062, |
| "step": 21400 |
| }, |
| { |
| "epoch": 1.2445801615097398, |
| "grad_norm": 0.16858945786952972, |
| "learning_rate": 1.888712011577424e-05, |
| "loss": 0.02515049457550049, |
| "step": 21500 |
| }, |
| { |
| "epoch": 1.2503690410721005, |
| "grad_norm": 0.1835613250732422, |
| "learning_rate": 1.8742402315484805e-05, |
| "loss": 0.02049088478088379, |
| "step": 21600 |
| }, |
| { |
| "epoch": 1.256157920634461, |
| "grad_norm": 1.2275546789169312, |
| "learning_rate": 1.8597684515195372e-05, |
| "loss": 0.022141406536102293, |
| "step": 21700 |
| }, |
| { |
| "epoch": 1.261946800196822, |
| "grad_norm": 1.089022159576416, |
| "learning_rate": 1.8452966714905933e-05, |
| "loss": 0.022219130992889403, |
| "step": 21800 |
| }, |
| { |
| "epoch": 1.2677356797591826, |
| "grad_norm": 0.32157450914382935, |
| "learning_rate": 1.83082489146165e-05, |
| "loss": 0.025665369033813477, |
| "step": 21900 |
| }, |
| { |
| "epoch": 1.2735245593215434, |
| "grad_norm": 0.28916555643081665, |
| "learning_rate": 1.816353111432706e-05, |
| "loss": 0.022141153812408446, |
| "step": 22000 |
| }, |
| { |
| "epoch": 1.279313438883904, |
| "grad_norm": 0.4547838866710663, |
| "learning_rate": 1.8018813314037628e-05, |
| "loss": 0.02140897512435913, |
| "step": 22100 |
| }, |
| { |
| "epoch": 1.2851023184462647, |
| "grad_norm": 0.2740997076034546, |
| "learning_rate": 1.787409551374819e-05, |
| "loss": 0.01996962308883667, |
| "step": 22200 |
| }, |
| { |
| "epoch": 1.2908911980086255, |
| "grad_norm": 0.17777739465236664, |
| "learning_rate": 1.7729377713458756e-05, |
| "loss": 0.020210413932800292, |
| "step": 22300 |
| }, |
| { |
| "epoch": 1.2966800775709861, |
| "grad_norm": 0.2631765604019165, |
| "learning_rate": 1.7584659913169323e-05, |
| "loss": 0.02249443531036377, |
| "step": 22400 |
| }, |
| { |
| "epoch": 1.3024689571333468, |
| "grad_norm": 2.136570453643799, |
| "learning_rate": 1.7439942112879883e-05, |
| "loss": 0.02353119134902954, |
| "step": 22500 |
| }, |
| { |
| "epoch": 1.3024689571333468, |
| "eval_accuracy": 0.9900812641736834, |
| "eval_f1": 0.8825427722569409, |
| "eval_loss": 0.046070199459791183, |
| "eval_precision": 0.8720012146978439, |
| "eval_recall": 0.8933423208545058, |
| "eval_runtime": 32.9824, |
| "eval_samples_per_second": 378.99, |
| "eval_steps_per_second": 10.551, |
| "step": 22500 |
| }, |
| { |
| "epoch": 1.3082578366957076, |
| "grad_norm": 0.16276246309280396, |
| "learning_rate": 1.729522431259045e-05, |
| "loss": 0.021528096199035646, |
| "step": 22600 |
| }, |
| { |
| "epoch": 1.3140467162580682, |
| "grad_norm": 0.1779128611087799, |
| "learning_rate": 1.7150506512301015e-05, |
| "loss": 0.021381580829620363, |
| "step": 22700 |
| }, |
| { |
| "epoch": 1.3198355958204289, |
| "grad_norm": 0.12842315435409546, |
| "learning_rate": 1.700578871201158e-05, |
| "loss": 0.022963883876800536, |
| "step": 22800 |
| }, |
| { |
| "epoch": 1.3256244753827897, |
| "grad_norm": 0.38589125871658325, |
| "learning_rate": 1.6861070911722142e-05, |
| "loss": 0.025010223388671874, |
| "step": 22900 |
| }, |
| { |
| "epoch": 1.3314133549451503, |
| "grad_norm": 0.17700180411338806, |
| "learning_rate": 1.6716353111432706e-05, |
| "loss": 0.025014562606811522, |
| "step": 23000 |
| }, |
| { |
| "epoch": 1.3372022345075112, |
| "grad_norm": 0.1977829933166504, |
| "learning_rate": 1.657163531114327e-05, |
| "loss": 0.019888348579406738, |
| "step": 23100 |
| }, |
| { |
| "epoch": 1.3429911140698718, |
| "grad_norm": 0.21066619455814362, |
| "learning_rate": 1.6426917510853838e-05, |
| "loss": 0.021409926414489747, |
| "step": 23200 |
| }, |
| { |
| "epoch": 1.3487799936322324, |
| "grad_norm": 0.1378607600927353, |
| "learning_rate": 1.6282199710564398e-05, |
| "loss": 0.02372417688369751, |
| "step": 23300 |
| }, |
| { |
| "epoch": 1.354568873194593, |
| "grad_norm": 0.3123644292354584, |
| "learning_rate": 1.6137481910274965e-05, |
| "loss": 0.021678264141082763, |
| "step": 23400 |
| }, |
| { |
| "epoch": 1.360357752756954, |
| "grad_norm": 0.19485028088092804, |
| "learning_rate": 1.599276410998553e-05, |
| "loss": 0.024895586967468263, |
| "step": 23500 |
| }, |
| { |
| "epoch": 1.3661466323193145, |
| "grad_norm": 0.2177608162164688, |
| "learning_rate": 1.5848046309696093e-05, |
| "loss": 0.020630009174346924, |
| "step": 23600 |
| }, |
| { |
| "epoch": 1.3719355118816754, |
| "grad_norm": 1.222109317779541, |
| "learning_rate": 1.570332850940666e-05, |
| "loss": 0.022696774005889892, |
| "step": 23700 |
| }, |
| { |
| "epoch": 1.377724391444036, |
| "grad_norm": 0.5391749143600464, |
| "learning_rate": 1.555861070911722e-05, |
| "loss": 0.019349946975708007, |
| "step": 23800 |
| }, |
| { |
| "epoch": 1.3835132710063966, |
| "grad_norm": 0.8667725920677185, |
| "learning_rate": 1.541389290882779e-05, |
| "loss": 0.021944737434387206, |
| "step": 23900 |
| }, |
| { |
| "epoch": 1.3893021505687575, |
| "grad_norm": 0.2695491909980774, |
| "learning_rate": 1.526917510853835e-05, |
| "loss": 0.023090031147003174, |
| "step": 24000 |
| }, |
| { |
| "epoch": 1.3950910301311181, |
| "grad_norm": 0.470904678106308, |
| "learning_rate": 1.5124457308248916e-05, |
| "loss": 0.023335537910461425, |
| "step": 24100 |
| }, |
| { |
| "epoch": 1.4008799096934788, |
| "grad_norm": 0.12368060648441315, |
| "learning_rate": 1.4979739507959482e-05, |
| "loss": 0.020203118324279786, |
| "step": 24200 |
| }, |
| { |
| "epoch": 1.4066687892558396, |
| "grad_norm": 0.8601803779602051, |
| "learning_rate": 1.4835021707670044e-05, |
| "loss": 0.024733288288116453, |
| "step": 24300 |
| }, |
| { |
| "epoch": 1.4124576688182002, |
| "grad_norm": 0.12211176753044128, |
| "learning_rate": 1.469030390738061e-05, |
| "loss": 0.0239523983001709, |
| "step": 24400 |
| }, |
| { |
| "epoch": 1.4182465483805609, |
| "grad_norm": 0.15338654816150665, |
| "learning_rate": 1.4545586107091172e-05, |
| "loss": 0.02348433494567871, |
| "step": 24500 |
| }, |
| { |
| "epoch": 1.4240354279429217, |
| "grad_norm": 0.14509518444538116, |
| "learning_rate": 1.4400868306801738e-05, |
| "loss": 0.024761919975280763, |
| "step": 24600 |
| }, |
| { |
| "epoch": 1.4298243075052823, |
| "grad_norm": 0.3267483115196228, |
| "learning_rate": 1.4256150506512303e-05, |
| "loss": 0.02035109281539917, |
| "step": 24700 |
| }, |
| { |
| "epoch": 1.4356131870676432, |
| "grad_norm": 0.7865790128707886, |
| "learning_rate": 1.4111432706222865e-05, |
| "loss": 0.020213403701782227, |
| "step": 24800 |
| }, |
| { |
| "epoch": 1.4414020666300038, |
| "grad_norm": 0.6819100975990295, |
| "learning_rate": 1.3966714905933431e-05, |
| "loss": 0.02440610408782959, |
| "step": 24900 |
| }, |
| { |
| "epoch": 1.4471909461923644, |
| "grad_norm": 0.22851134836673737, |
| "learning_rate": 1.3821997105643993e-05, |
| "loss": 0.019472746849060057, |
| "step": 25000 |
| }, |
| { |
| "epoch": 1.4471909461923644, |
| "eval_accuracy": 0.9902693852575088, |
| "eval_f1": 0.8853341536568484, |
| "eval_loss": 0.04390441253781319, |
| "eval_precision": 0.8755260355929625, |
| "eval_recall": 0.8953645131183242, |
| "eval_runtime": 32.2884, |
| "eval_samples_per_second": 387.136, |
| "eval_steps_per_second": 10.778, |
| "step": 25000 |
| }, |
| { |
| "epoch": 1.452979825754725, |
| "grad_norm": 0.3433781564235687, |
| "learning_rate": 1.3677279305354559e-05, |
| "loss": 0.021682107448577882, |
| "step": 25100 |
| }, |
| { |
| "epoch": 1.458768705317086, |
| "grad_norm": 0.2263897955417633, |
| "learning_rate": 1.3532561505065124e-05, |
| "loss": 0.020724060535430907, |
| "step": 25200 |
| }, |
| { |
| "epoch": 1.4645575848794465, |
| "grad_norm": 0.5709189772605896, |
| "learning_rate": 1.3387843704775688e-05, |
| "loss": 0.023897509574890136, |
| "step": 25300 |
| }, |
| { |
| "epoch": 1.4703464644418074, |
| "grad_norm": 0.13530676066875458, |
| "learning_rate": 1.3243125904486254e-05, |
| "loss": 0.022753942012786865, |
| "step": 25400 |
| }, |
| { |
| "epoch": 1.476135344004168, |
| "grad_norm": 0.42096275091171265, |
| "learning_rate": 1.3098408104196816e-05, |
| "loss": 0.021938772201538087, |
| "step": 25500 |
| }, |
| { |
| "epoch": 1.4819242235665286, |
| "grad_norm": 2.0374867916107178, |
| "learning_rate": 1.2953690303907382e-05, |
| "loss": 0.023144633769989015, |
| "step": 25600 |
| }, |
| { |
| "epoch": 1.4877131031288893, |
| "grad_norm": 0.22001959383487701, |
| "learning_rate": 1.2808972503617947e-05, |
| "loss": 0.021838405132293702, |
| "step": 25700 |
| }, |
| { |
| "epoch": 1.4935019826912501, |
| "grad_norm": 0.2641585171222687, |
| "learning_rate": 1.266425470332851e-05, |
| "loss": 0.021520249843597412, |
| "step": 25800 |
| }, |
| { |
| "epoch": 1.4992908622536107, |
| "grad_norm": 0.191110759973526, |
| "learning_rate": 1.2519536903039075e-05, |
| "loss": 0.01960566997528076, |
| "step": 25900 |
| }, |
| { |
| "epoch": 1.5050797418159716, |
| "grad_norm": 1.1695928573608398, |
| "learning_rate": 1.2374819102749639e-05, |
| "loss": 0.022222683429718018, |
| "step": 26000 |
| }, |
| { |
| "epoch": 1.5108686213783322, |
| "grad_norm": 0.19455936551094055, |
| "learning_rate": 1.2230101302460203e-05, |
| "loss": 0.020654516220092775, |
| "step": 26100 |
| }, |
| { |
| "epoch": 1.5166575009406928, |
| "grad_norm": 0.15471012890338898, |
| "learning_rate": 1.2085383502170767e-05, |
| "loss": 0.022886896133422853, |
| "step": 26200 |
| }, |
| { |
| "epoch": 1.5224463805030535, |
| "grad_norm": 1.4628580808639526, |
| "learning_rate": 1.1940665701881331e-05, |
| "loss": 0.02138796329498291, |
| "step": 26300 |
| }, |
| { |
| "epoch": 1.5282352600654143, |
| "grad_norm": 0.49336832761764526, |
| "learning_rate": 1.1795947901591896e-05, |
| "loss": 0.021676077842712402, |
| "step": 26400 |
| }, |
| { |
| "epoch": 1.5340241396277752, |
| "grad_norm": 0.18711985647678375, |
| "learning_rate": 1.1651230101302462e-05, |
| "loss": 0.022171633243560793, |
| "step": 26500 |
| }, |
| { |
| "epoch": 1.5398130191901358, |
| "grad_norm": 0.197096049785614, |
| "learning_rate": 1.1506512301013026e-05, |
| "loss": 0.021774094104766845, |
| "step": 26600 |
| }, |
| { |
| "epoch": 1.5456018987524964, |
| "grad_norm": 0.17335055768489838, |
| "learning_rate": 1.136179450072359e-05, |
| "loss": 0.019333064556121826, |
| "step": 26700 |
| }, |
| { |
| "epoch": 1.551390778314857, |
| "grad_norm": 0.9639333486557007, |
| "learning_rate": 1.1217076700434154e-05, |
| "loss": 0.021504545211791994, |
| "step": 26800 |
| }, |
| { |
| "epoch": 1.557179657877218, |
| "grad_norm": 0.7893505692481995, |
| "learning_rate": 1.1072358900144718e-05, |
| "loss": 0.01952719211578369, |
| "step": 26900 |
| }, |
| { |
| "epoch": 1.5629685374395785, |
| "grad_norm": 0.11000892519950867, |
| "learning_rate": 1.0927641099855283e-05, |
| "loss": 0.023864905834197998, |
| "step": 27000 |
| }, |
| { |
| "epoch": 1.5687574170019394, |
| "grad_norm": 1.0035220384597778, |
| "learning_rate": 1.0782923299565847e-05, |
| "loss": 0.022396252155303956, |
| "step": 27100 |
| }, |
| { |
| "epoch": 1.5745462965643, |
| "grad_norm": 0.18122516572475433, |
| "learning_rate": 1.0638205499276411e-05, |
| "loss": 0.02149169921875, |
| "step": 27200 |
| }, |
| { |
| "epoch": 1.5803351761266606, |
| "grad_norm": 0.4648451805114746, |
| "learning_rate": 1.0493487698986975e-05, |
| "loss": 0.02050316572189331, |
| "step": 27300 |
| }, |
| { |
| "epoch": 1.5861240556890213, |
| "grad_norm": 0.7127793431282043, |
| "learning_rate": 1.0348769898697539e-05, |
| "loss": 0.021997590065002442, |
| "step": 27400 |
| }, |
| { |
| "epoch": 1.591912935251382, |
| "grad_norm": 0.41411492228507996, |
| "learning_rate": 1.0204052098408105e-05, |
| "loss": 0.02217501640319824, |
| "step": 27500 |
| }, |
| { |
| "epoch": 1.591912935251382, |
| "eval_accuracy": 0.9900791969090259, |
| "eval_f1": 0.886322562452736, |
| "eval_loss": 0.04419637843966484, |
| "eval_precision": 0.8764925089360408, |
| "eval_recall": 0.8963756092502333, |
| "eval_runtime": 32.2485, |
| "eval_samples_per_second": 387.615, |
| "eval_steps_per_second": 10.791, |
| "step": 27500 |
| }, |
| { |
| "epoch": 1.597701814813743, |
| "grad_norm": 0.7603561878204346, |
| "learning_rate": 1.005933429811867e-05, |
| "loss": 0.019264650344848634, |
| "step": 27600 |
| }, |
| { |
| "epoch": 1.6034906943761036, |
| "grad_norm": 0.44727957248687744, |
| "learning_rate": 9.914616497829234e-06, |
| "loss": 0.024952189922332765, |
| "step": 27700 |
| }, |
| { |
| "epoch": 1.6092795739384642, |
| "grad_norm": 0.3509667217731476, |
| "learning_rate": 9.769898697539798e-06, |
| "loss": 0.02343012571334839, |
| "step": 27800 |
| }, |
| { |
| "epoch": 1.6150684535008248, |
| "grad_norm": 0.1554228961467743, |
| "learning_rate": 9.625180897250362e-06, |
| "loss": 0.022485718727111817, |
| "step": 27900 |
| }, |
| { |
| "epoch": 1.6208573330631855, |
| "grad_norm": 0.48960787057876587, |
| "learning_rate": 9.480463096960928e-06, |
| "loss": 0.022733292579650878, |
| "step": 28000 |
| }, |
| { |
| "epoch": 1.6266462126255463, |
| "grad_norm": 0.4882723093032837, |
| "learning_rate": 9.335745296671492e-06, |
| "loss": 0.02101508617401123, |
| "step": 28100 |
| }, |
| { |
| "epoch": 1.6324350921879072, |
| "grad_norm": 0.3346649408340454, |
| "learning_rate": 9.191027496382055e-06, |
| "loss": 0.02136120557785034, |
| "step": 28200 |
| }, |
| { |
| "epoch": 1.6382239717502678, |
| "grad_norm": 0.43525007367134094, |
| "learning_rate": 9.04630969609262e-06, |
| "loss": 0.019716554880142213, |
| "step": 28300 |
| }, |
| { |
| "epoch": 1.6440128513126284, |
| "grad_norm": 1.4369014501571655, |
| "learning_rate": 8.901591895803183e-06, |
| "loss": 0.021362924575805665, |
| "step": 28400 |
| }, |
| { |
| "epoch": 1.649801730874989, |
| "grad_norm": 0.36247485876083374, |
| "learning_rate": 8.756874095513747e-06, |
| "loss": 0.022539958953857422, |
| "step": 28500 |
| }, |
| { |
| "epoch": 1.65559061043735, |
| "grad_norm": 0.1396828442811966, |
| "learning_rate": 8.612156295224313e-06, |
| "loss": 0.01977320432662964, |
| "step": 28600 |
| }, |
| { |
| "epoch": 1.6613794899997105, |
| "grad_norm": 0.09776929765939713, |
| "learning_rate": 8.467438494934877e-06, |
| "loss": 0.017783541679382325, |
| "step": 28700 |
| }, |
| { |
| "epoch": 1.6671683695620714, |
| "grad_norm": 0.20341573655605316, |
| "learning_rate": 8.322720694645442e-06, |
| "loss": 0.01956522822380066, |
| "step": 28800 |
| }, |
| { |
| "epoch": 1.672957249124432, |
| "grad_norm": 0.24861733615398407, |
| "learning_rate": 8.178002894356006e-06, |
| "loss": 0.02077195405960083, |
| "step": 28900 |
| }, |
| { |
| "epoch": 1.6787461286867926, |
| "grad_norm": 0.14596840739250183, |
| "learning_rate": 8.03328509406657e-06, |
| "loss": 0.019236416816711427, |
| "step": 29000 |
| }, |
| { |
| "epoch": 1.6845350082491533, |
| "grad_norm": 0.1839938908815384, |
| "learning_rate": 7.888567293777136e-06, |
| "loss": 0.023349857330322264, |
| "step": 29100 |
| }, |
| { |
| "epoch": 1.690323887811514, |
| "grad_norm": 0.5248807668685913, |
| "learning_rate": 7.7438494934877e-06, |
| "loss": 0.020743234157562254, |
| "step": 29200 |
| }, |
| { |
| "epoch": 1.696112767373875, |
| "grad_norm": 0.23738259077072144, |
| "learning_rate": 7.599131693198264e-06, |
| "loss": 0.02022059917449951, |
| "step": 29300 |
| }, |
| { |
| "epoch": 1.7019016469362356, |
| "grad_norm": 0.6698647737503052, |
| "learning_rate": 7.4544138929088275e-06, |
| "loss": 0.01865126848220825, |
| "step": 29400 |
| }, |
| { |
| "epoch": 1.7076905264985962, |
| "grad_norm": 1.6622151136398315, |
| "learning_rate": 7.309696092619392e-06, |
| "loss": 0.020002634525299073, |
| "step": 29500 |
| }, |
| { |
| "epoch": 1.7134794060609568, |
| "grad_norm": 0.3179205358028412, |
| "learning_rate": 7.164978292329957e-06, |
| "loss": 0.021126785278320313, |
| "step": 29600 |
| }, |
| { |
| "epoch": 1.7192682856233175, |
| "grad_norm": 0.10927774757146835, |
| "learning_rate": 7.020260492040522e-06, |
| "loss": 0.018858327865600585, |
| "step": 29700 |
| }, |
| { |
| "epoch": 1.7250571651856783, |
| "grad_norm": 0.186599463224411, |
| "learning_rate": 6.875542691751086e-06, |
| "loss": 0.01988905429840088, |
| "step": 29800 |
| }, |
| { |
| "epoch": 1.7308460447480392, |
| "grad_norm": 0.22831496596336365, |
| "learning_rate": 6.73082489146165e-06, |
| "loss": 0.020202479362487792, |
| "step": 29900 |
| }, |
| { |
| "epoch": 1.7366349243103998, |
| "grad_norm": 0.3853333294391632, |
| "learning_rate": 6.586107091172214e-06, |
| "loss": 0.01944092631340027, |
| "step": 30000 |
| }, |
| { |
| "epoch": 1.7366349243103998, |
| "eval_accuracy": 0.9902431999051815, |
| "eval_f1": 0.8897238929427909, |
| "eval_loss": 0.043809473514556885, |
| "eval_precision": 0.8803197563760944, |
| "eval_recall": 0.8993311210204293, |
| "eval_runtime": 32.3718, |
| "eval_samples_per_second": 386.139, |
| "eval_steps_per_second": 10.75, |
| "step": 30000 |
| }, |
| { |
| "epoch": 1.7424238038727604, |
| "grad_norm": 0.5895107984542847, |
| "learning_rate": 6.441389290882779e-06, |
| "loss": 0.020335075855255128, |
| "step": 30100 |
| }, |
| { |
| "epoch": 1.748212683435121, |
| "grad_norm": 0.10662174969911575, |
| "learning_rate": 6.296671490593344e-06, |
| "loss": 0.020258052349090575, |
| "step": 30200 |
| }, |
| { |
| "epoch": 1.7540015629974819, |
| "grad_norm": 3.988093376159668, |
| "learning_rate": 6.151953690303908e-06, |
| "loss": 0.0205714750289917, |
| "step": 30300 |
| }, |
| { |
| "epoch": 1.7597904425598425, |
| "grad_norm": 0.5093377828598022, |
| "learning_rate": 6.007235890014472e-06, |
| "loss": 0.020437090396881102, |
| "step": 30400 |
| }, |
| { |
| "epoch": 1.7655793221222034, |
| "grad_norm": 0.29351383447647095, |
| "learning_rate": 5.8625180897250366e-06, |
| "loss": 0.01850534439086914, |
| "step": 30500 |
| }, |
| { |
| "epoch": 1.771368201684564, |
| "grad_norm": 0.199212908744812, |
| "learning_rate": 5.7178002894356005e-06, |
| "loss": 0.02155954599380493, |
| "step": 30600 |
| }, |
| { |
| "epoch": 1.7771570812469246, |
| "grad_norm": 0.346227765083313, |
| "learning_rate": 5.573082489146165e-06, |
| "loss": 0.019093557596206664, |
| "step": 30700 |
| }, |
| { |
| "epoch": 1.7829459608092852, |
| "grad_norm": 0.28586652874946594, |
| "learning_rate": 5.42836468885673e-06, |
| "loss": 0.022267727851867675, |
| "step": 30800 |
| }, |
| { |
| "epoch": 1.788734840371646, |
| "grad_norm": 0.16195492446422577, |
| "learning_rate": 5.283646888567294e-06, |
| "loss": 0.019566291570663454, |
| "step": 30900 |
| }, |
| { |
| "epoch": 1.7945237199340067, |
| "grad_norm": 0.2971166968345642, |
| "learning_rate": 5.138929088277859e-06, |
| "loss": 0.01942929744720459, |
| "step": 31000 |
| }, |
| { |
| "epoch": 1.8003125994963676, |
| "grad_norm": 0.17818105220794678, |
| "learning_rate": 4.994211287988423e-06, |
| "loss": 0.02038184642791748, |
| "step": 31100 |
| }, |
| { |
| "epoch": 1.8061014790587282, |
| "grad_norm": 0.29902341961860657, |
| "learning_rate": 4.849493487698987e-06, |
| "loss": 0.019359084367752074, |
| "step": 31200 |
| }, |
| { |
| "epoch": 1.8118903586210888, |
| "grad_norm": 0.25175297260284424, |
| "learning_rate": 4.704775687409551e-06, |
| "loss": 0.02133084535598755, |
| "step": 31300 |
| }, |
| { |
| "epoch": 1.8176792381834495, |
| "grad_norm": 0.37808459997177124, |
| "learning_rate": 4.560057887120116e-06, |
| "loss": 0.01949509859085083, |
| "step": 31400 |
| }, |
| { |
| "epoch": 1.8234681177458103, |
| "grad_norm": 0.6678466200828552, |
| "learning_rate": 4.415340086830681e-06, |
| "loss": 0.018650727272033693, |
| "step": 31500 |
| }, |
| { |
| "epoch": 1.8292569973081712, |
| "grad_norm": 0.5661903619766235, |
| "learning_rate": 4.270622286541245e-06, |
| "loss": 0.020224461555480956, |
| "step": 31600 |
| }, |
| { |
| "epoch": 1.8350458768705318, |
| "grad_norm": 0.295162558555603, |
| "learning_rate": 4.125904486251809e-06, |
| "loss": 0.0190382981300354, |
| "step": 31700 |
| }, |
| { |
| "epoch": 1.8408347564328924, |
| "grad_norm": 0.5357158184051514, |
| "learning_rate": 3.981186685962373e-06, |
| "loss": 0.02294764518737793, |
| "step": 31800 |
| }, |
| { |
| "epoch": 1.846623635995253, |
| "grad_norm": 0.2613368630409241, |
| "learning_rate": 3.836468885672938e-06, |
| "loss": 0.019658113718032836, |
| "step": 31900 |
| }, |
| { |
| "epoch": 1.8524125155576137, |
| "grad_norm": 0.3176723122596741, |
| "learning_rate": 3.6917510853835025e-06, |
| "loss": 0.01859914779663086, |
| "step": 32000 |
| }, |
| { |
| "epoch": 1.8582013951199745, |
| "grad_norm": 0.1181308925151825, |
| "learning_rate": 3.547033285094067e-06, |
| "loss": 0.02106498718261719, |
| "step": 32100 |
| }, |
| { |
| "epoch": 1.8639902746823354, |
| "grad_norm": 0.6285836100578308, |
| "learning_rate": 3.4023154848046308e-06, |
| "loss": 0.019543524980545044, |
| "step": 32200 |
| }, |
| { |
| "epoch": 1.869779154244696, |
| "grad_norm": 0.221060112118721, |
| "learning_rate": 3.2575976845151955e-06, |
| "loss": 0.019698797464370726, |
| "step": 32300 |
| }, |
| { |
| "epoch": 1.8755680338070566, |
| "grad_norm": 0.46701422333717346, |
| "learning_rate": 3.11287988422576e-06, |
| "loss": 0.02131035327911377, |
| "step": 32400 |
| }, |
| { |
| "epoch": 1.8813569133694172, |
| "grad_norm": 0.8717579245567322, |
| "learning_rate": 2.968162083936324e-06, |
| "loss": 0.0199610698223114, |
| "step": 32500 |
| }, |
| { |
| "epoch": 1.8813569133694172, |
| "eval_accuracy": 0.9909171281835014, |
| "eval_f1": 0.8929190695762744, |
| "eval_loss": 0.04036634415388107, |
| "eval_precision": 0.8848160875652284, |
| "eval_recall": 0.9011718344913409, |
| "eval_runtime": 32.8086, |
| "eval_samples_per_second": 380.998, |
| "eval_steps_per_second": 10.607, |
| "step": 32500 |
| }, |
| { |
| "epoch": 1.887145792931778, |
| "grad_norm": 0.42211058735847473, |
| "learning_rate": 2.8234442836468885e-06, |
| "loss": 0.01953967571258545, |
| "step": 32600 |
| }, |
| { |
| "epoch": 1.8929346724941387, |
| "grad_norm": 0.14807343482971191, |
| "learning_rate": 2.6787264833574533e-06, |
| "loss": 0.02133486270904541, |
| "step": 32700 |
| }, |
| { |
| "epoch": 1.8987235520564996, |
| "grad_norm": 0.2488560974597931, |
| "learning_rate": 2.5340086830680177e-06, |
| "loss": 0.02061129570007324, |
| "step": 32800 |
| }, |
| { |
| "epoch": 1.9045124316188602, |
| "grad_norm": 0.12760600447654724, |
| "learning_rate": 2.3892908827785816e-06, |
| "loss": 0.018405574560165405, |
| "step": 32900 |
| }, |
| { |
| "epoch": 1.9103013111812208, |
| "grad_norm": 0.14860856533050537, |
| "learning_rate": 2.2445730824891463e-06, |
| "loss": 0.018643089532852174, |
| "step": 33000 |
| }, |
| { |
| "epoch": 1.9160901907435814, |
| "grad_norm": 0.21985752880573273, |
| "learning_rate": 2.0998552821997107e-06, |
| "loss": 0.01826516628265381, |
| "step": 33100 |
| }, |
| { |
| "epoch": 1.9218790703059423, |
| "grad_norm": 0.12793774902820587, |
| "learning_rate": 1.955137481910275e-06, |
| "loss": 0.018108237981796265, |
| "step": 33200 |
| }, |
| { |
| "epoch": 1.9276679498683031, |
| "grad_norm": 0.6992884278297424, |
| "learning_rate": 1.8104196816208394e-06, |
| "loss": 0.018568735122680664, |
| "step": 33300 |
| }, |
| { |
| "epoch": 1.9334568294306638, |
| "grad_norm": 0.16569721698760986, |
| "learning_rate": 1.6657018813314037e-06, |
| "loss": 0.020773212909698486, |
| "step": 33400 |
| }, |
| { |
| "epoch": 1.9392457089930244, |
| "grad_norm": 0.2137010395526886, |
| "learning_rate": 1.5209840810419682e-06, |
| "loss": 0.020217933654785157, |
| "step": 33500 |
| }, |
| { |
| "epoch": 1.945034588555385, |
| "grad_norm": 0.2512764036655426, |
| "learning_rate": 1.3762662807525328e-06, |
| "loss": 0.021186413764953612, |
| "step": 33600 |
| }, |
| { |
| "epoch": 1.9508234681177457, |
| "grad_norm": 1.0330688953399658, |
| "learning_rate": 1.231548480463097e-06, |
| "loss": 0.02138917684555054, |
| "step": 33700 |
| }, |
| { |
| "epoch": 1.9566123476801065, |
| "grad_norm": 0.3796994090080261, |
| "learning_rate": 1.0868306801736615e-06, |
| "loss": 0.017367198467254638, |
| "step": 33800 |
| }, |
| { |
| "epoch": 1.9624012272424673, |
| "grad_norm": 0.35818561911582947, |
| "learning_rate": 9.421128798842257e-07, |
| "loss": 0.019348199367523192, |
| "step": 33900 |
| }, |
| { |
| "epoch": 1.968190106804828, |
| "grad_norm": 0.4799649715423584, |
| "learning_rate": 7.973950795947903e-07, |
| "loss": 0.022617738246917724, |
| "step": 34000 |
| }, |
| { |
| "epoch": 1.9739789863671886, |
| "grad_norm": 0.8583610653877258, |
| "learning_rate": 6.526772793053546e-07, |
| "loss": 0.019666857719421386, |
| "step": 34100 |
| }, |
| { |
| "epoch": 1.9797678659295492, |
| "grad_norm": 0.6525281667709351, |
| "learning_rate": 5.079594790159189e-07, |
| "loss": 0.020464680194854735, |
| "step": 34200 |
| }, |
| { |
| "epoch": 1.98555674549191, |
| "grad_norm": 1.2383188009262085, |
| "learning_rate": 3.632416787264834e-07, |
| "loss": 0.018764994144439696, |
| "step": 34300 |
| }, |
| { |
| "epoch": 1.9913456250542707, |
| "grad_norm": 0.19601252675056458, |
| "learning_rate": 2.1852387843704776e-07, |
| "loss": 0.018834112882614135, |
| "step": 34400 |
| }, |
| { |
| "epoch": 1.9971345046166316, |
| "grad_norm": 0.25431913137435913, |
| "learning_rate": 7.380607814761216e-08, |
| "loss": 0.018961663246154784, |
| "step": 34500 |
| }, |
| { |
| "epoch": 2.0, |
| "step": 34550, |
| "total_flos": 1.1376036548759528e+18, |
| "train_loss": 0.03949730815487212, |
| "train_runtime": 14944.1573, |
| "train_samples_per_second": 332.906, |
| "train_steps_per_second": 2.312 |
| } |
| ], |
| "logging_steps": 100, |
| "max_steps": 34550, |
| "num_input_tokens_seen": 0, |
| "num_train_epochs": 2, |
| "save_steps": 2500, |
| "stateful_callbacks": { |
| "TrainerControl": { |
| "args": { |
| "should_epoch_stop": false, |
| "should_evaluate": false, |
| "should_log": false, |
| "should_save": true, |
| "should_training_stop": true |
| }, |
| "attributes": {} |
| } |
| }, |
| "total_flos": 1.1376036548759528e+18, |
| "train_batch_size": 72, |
| "trial_name": null, |
| "trial_params": null |
| } |
|
|