| { | |
| "best_epoch": 33, | |
| "best_stage": "stage1", | |
| "best_val_acc": 0.9545232273838631, | |
| "test_loss": 0.646228389306502, | |
| "test_acc": 0.9613691931540342, | |
| "test_macro_f1": 0.9320283953570693, | |
| "hard_mining": { | |
| "enabled": true, | |
| "executed": true, | |
| "num_samples": 16334, | |
| "confidence_threshold": 0.85, | |
| "misclassified": 545, | |
| "low_confidence": 1700, | |
| "both": 451, | |
| "weight_min": 1.0, | |
| "weight_max": 4.0, | |
| "weight_mean": 1.176564221868495 | |
| }, | |
| "history": [ | |
| { | |
| "stage": "stage1", | |
| "epoch": 1, | |
| "train_loss": 4.313515394232994, | |
| "train_acc": 0.032019101261173015, | |
| "train_macro_f1": 0.02712417475898117, | |
| "val_loss": 7.769173968922008, | |
| "val_acc": 0.014180929095354523, | |
| "val_macro_f1": 0.003789197040298735, | |
| "duration_seconds": 41.54798349970952 | |
| }, | |
| { | |
| "stage": "stage1", | |
| "epoch": 2, | |
| "train_loss": 3.444204147471938, | |
| "train_acc": 0.09850618342108486, | |
| "train_macro_f1": 0.105352354413626, | |
| "val_loss": 3.6144719557328657, | |
| "val_acc": 0.10073349633251834, | |
| "val_macro_f1": 0.07000853003986085, | |
| "duration_seconds": 38.47417518682778 | |
| }, | |
| { | |
| "stage": "stage1", | |
| "epoch": 3, | |
| "train_loss": 2.8109747060509616, | |
| "train_acc": 0.20154279417166646, | |
| "train_macro_f1": 0.21854176612220216, | |
| "val_loss": 2.98252272605896, | |
| "val_acc": 0.17946210268948656, | |
| "val_macro_f1": 0.14989537030978706, | |
| "duration_seconds": 38.17656293977052 | |
| }, | |
| { | |
| "stage": "stage1", | |
| "epoch": 4, | |
| "train_loss": 2.2996954086214996, | |
| "train_acc": 0.34761846455246725, | |
| "train_macro_f1": 0.37294280895416104, | |
| "val_loss": 2.3156512650576504, | |
| "val_acc": 0.37457212713936433, | |
| "val_macro_f1": 0.36651348862376204, | |
| "duration_seconds": 37.970143370795995 | |
| }, | |
| { | |
| "stage": "stage1", | |
| "epoch": 5, | |
| "train_loss": 1.6524738683256992, | |
| "train_acc": 0.6124035753642708, | |
| "train_macro_f1": 0.6176766827413694, | |
| "val_loss": 1.4232586188749834, | |
| "val_acc": 0.7530562347188264, | |
| "val_macro_f1": 0.7361482815159818, | |
| "duration_seconds": 37.93143594684079 | |
| }, | |
| { | |
| "stage": "stage1", | |
| "epoch": 6, | |
| "train_loss": 1.208171897156294, | |
| "train_acc": 0.8022529692665605, | |
| "train_macro_f1": 0.791375137567527, | |
| "val_loss": 1.096138444813815, | |
| "val_acc": 0.8557457212713936, | |
| "val_macro_f1": 0.8341570413748813, | |
| "duration_seconds": 38.05841494211927 | |
| }, | |
| { | |
| "stage": "stage1", | |
| "epoch": 7, | |
| "train_loss": 1.0467550879301026, | |
| "train_acc": 0.8506183421084854, | |
| "train_macro_f1": 0.8464827124195602, | |
| "val_loss": 0.912368427623402, | |
| "val_acc": 0.9080684596577017, | |
| "val_macro_f1": 0.8785396246910446, | |
| "duration_seconds": 37.41684066830203 | |
| }, | |
| { | |
| "stage": "stage1", | |
| "epoch": 8, | |
| "train_loss": 0.9626071349132893, | |
| "train_acc": 0.8759030243663524, | |
| "train_macro_f1": 0.8737804943842077, | |
| "val_loss": 0.9386219707402316, | |
| "val_acc": 0.8948655256723717, | |
| "val_macro_f1": 0.8682503489532023, | |
| "duration_seconds": 38.04319545486942 | |
| }, | |
| { | |
| "stage": "stage1", | |
| "epoch": 9, | |
| "train_loss": 0.8973523711049279, | |
| "train_acc": 0.8907799681645647, | |
| "train_macro_f1": 0.8875551854478719, | |
| "val_loss": 0.8646871664307334, | |
| "val_acc": 0.9198044009779951, | |
| "val_macro_f1": 0.8949053659799362, | |
| "duration_seconds": 37.64396993210539 | |
| }, | |
| { | |
| "stage": "stage1", | |
| "epoch": 10, | |
| "train_loss": 0.8764715506586918, | |
| "train_acc": 0.8992286029141667, | |
| "train_macro_f1": 0.8913087240589926, | |
| "val_loss": 0.8934382850473578, | |
| "val_acc": 0.908557457212714, | |
| "val_macro_f1": 0.8811464151370096, | |
| "duration_seconds": 38.161499897018075 | |
| }, | |
| { | |
| "stage": "stage1", | |
| "epoch": 11, | |
| "train_loss": 0.8411068756913029, | |
| "train_acc": 0.9013101506060978, | |
| "train_macro_f1": 0.8970095282070171, | |
| "val_loss": 0.8103193749081005, | |
| "val_acc": 0.9315403422982885, | |
| "val_macro_f1": 0.900308708225663, | |
| "duration_seconds": 38.78951836982742 | |
| }, | |
| { | |
| "stage": "stage1", | |
| "epoch": 12, | |
| "train_loss": 0.801855732535207, | |
| "train_acc": 0.9101261173013346, | |
| "train_macro_f1": 0.9095695021002316, | |
| "val_loss": 0.7941674590110779, | |
| "val_acc": 0.9339853300733496, | |
| "val_macro_f1": 0.8975888047900716, | |
| "duration_seconds": 37.50118718901649 | |
| }, | |
| { | |
| "stage": "stage1", | |
| "epoch": 13, | |
| "train_loss": 0.7851105207620666, | |
| "train_acc": 0.9166768703318232, | |
| "train_macro_f1": 0.9156568805546416, | |
| "val_loss": 0.7914232449098066, | |
| "val_acc": 0.9364303178484108, | |
| "val_macro_f1": 0.9084976516741216, | |
| "duration_seconds": 37.85148463025689 | |
| }, | |
| { | |
| "stage": "stage1", | |
| "epoch": 14, | |
| "train_loss": 0.7669343338456265, | |
| "train_acc": 0.9204726337700502, | |
| "train_macro_f1": 0.9229367514640775, | |
| "val_loss": 0.7820644216103987, | |
| "val_acc": 0.9354523227383863, | |
| "val_macro_f1": 0.9072646736307985, | |
| "duration_seconds": 38.37839019019157 | |
| }, | |
| { | |
| "stage": "stage1", | |
| "epoch": 15, | |
| "train_loss": 0.7542976293452951, | |
| "train_acc": 0.925003061099547, | |
| "train_macro_f1": 0.9252287661475155, | |
| "val_loss": 0.7722757241942666, | |
| "val_acc": 0.9378973105134474, | |
| "val_macro_f1": 0.9088258270013261, | |
| "duration_seconds": 38.02175585925579 | |
| }, | |
| { | |
| "stage": "stage1", | |
| "epoch": 16, | |
| "train_loss": 0.724993243466976, | |
| "train_acc": 0.9310640382025224, | |
| "train_macro_f1": 0.9306218869346541, | |
| "val_loss": 0.7765102765776895, | |
| "val_acc": 0.9325183374083129, | |
| "val_macro_f1": 0.8957598016047307, | |
| "duration_seconds": 38.236370380967855 | |
| }, | |
| { | |
| "stage": "stage1", | |
| "epoch": 17, | |
| "train_loss": 0.7193509769994159, | |
| "train_acc": 0.9331455858944533, | |
| "train_macro_f1": 0.9339855859569205, | |
| "val_loss": 0.7548327012495561, | |
| "val_acc": 0.9422982885085575, | |
| "val_macro_f1": 0.9146078158676976, | |
| "duration_seconds": 37.599709355738014 | |
| }, | |
| { | |
| "stage": "stage1", | |
| "epoch": 18, | |
| "train_loss": 0.7072599779727847, | |
| "train_acc": 0.933880249785723, | |
| "train_macro_f1": 0.93310010660679, | |
| "val_loss": 0.7916218855164268, | |
| "val_acc": 0.9330073349633252, | |
| "val_macro_f1": 0.8968142015045353, | |
| "duration_seconds": 38.33785875607282 | |
| }, | |
| { | |
| "stage": "stage1", | |
| "epoch": 19, | |
| "train_loss": 0.7052893208902936, | |
| "train_acc": 0.9335741398310273, | |
| "train_macro_f1": 0.9343232005341593, | |
| "val_loss": 0.7364852699366483, | |
| "val_acc": 0.9452322738386308, | |
| "val_macro_f1": 0.9182284675084373, | |
| "duration_seconds": 37.837123352102935 | |
| }, | |
| { | |
| "stage": "stage1", | |
| "epoch": 20, | |
| "train_loss": 0.6814761432104333, | |
| "train_acc": 0.9384106771152197, | |
| "train_macro_f1": 0.9398322150903461, | |
| "val_loss": 0.7354252338409424, | |
| "val_acc": 0.943276283618582, | |
| "val_macro_f1": 0.9160987470807599, | |
| "duration_seconds": 38.50653554406017 | |
| }, | |
| { | |
| "stage": "stage1", | |
| "epoch": 21, | |
| "train_loss": 0.6830035545105158, | |
| "train_acc": 0.9377984572058283, | |
| "train_macro_f1": 0.9395218264544717, | |
| "val_loss": 0.7638225717978044, | |
| "val_acc": 0.9354523227383863, | |
| "val_macro_f1": 0.8995650263429223, | |
| "duration_seconds": 37.889779151417315 | |
| }, | |
| { | |
| "stage": "stage1", | |
| "epoch": 22, | |
| "train_loss": 0.6549201711665752, | |
| "train_acc": 0.9458185380188564, | |
| "train_macro_f1": 0.9479852966726539, | |
| "val_loss": 0.748274640603499, | |
| "val_acc": 0.9408312958435208, | |
| "val_macro_f1": 0.913678433990217, | |
| "duration_seconds": 37.69011677801609 | |
| }, | |
| { | |
| "stage": "stage1", | |
| "epoch": 23, | |
| "train_loss": 0.6476058356983717, | |
| "train_acc": 0.9478388637198482, | |
| "train_macro_f1": 0.9474735112676054, | |
| "val_loss": 0.7422939701513811, | |
| "val_acc": 0.9393643031784841, | |
| "val_macro_f1": 0.9118126326501514, | |
| "duration_seconds": 37.879671565722674 | |
| }, | |
| { | |
| "stage": "stage1", | |
| "epoch": 24, | |
| "train_loss": 0.6383854310179866, | |
| "train_acc": 0.9452063181094649, | |
| "train_macro_f1": 0.9456692832105631, | |
| "val_loss": 0.70200622623617, | |
| "val_acc": 0.952078239608802, | |
| "val_macro_f1": 0.9221311577849888, | |
| "duration_seconds": 39.13599858107045 | |
| }, | |
| { | |
| "stage": "stage1", | |
| "epoch": 25, | |
| "train_loss": 0.6240727319273838, | |
| "train_acc": 0.9534100648953104, | |
| "train_macro_f1": 0.9560553119679458, | |
| "val_loss": 0.7065791866996072, | |
| "val_acc": 0.9525672371638142, | |
| "val_macro_f1": 0.9233286511154518, | |
| "duration_seconds": 37.369449513964355 | |
| }, | |
| { | |
| "stage": "stage1", | |
| "epoch": 26, | |
| "train_loss": 0.6299820653227872, | |
| "train_acc": 0.9505938533121097, | |
| "train_macro_f1": 0.9530535174633679, | |
| "val_loss": 0.7115304686806418, | |
| "val_acc": 0.9476772616136919, | |
| "val_macro_f1": 0.9190713459538334, | |
| "duration_seconds": 38.107163506094366 | |
| }, | |
| { | |
| "stage": "stage1", | |
| "epoch": 27, | |
| "train_loss": 0.620378524758095, | |
| "train_acc": 0.9531039549406146, | |
| "train_macro_f1": 0.9549947894496518, | |
| "val_loss": 0.6997524770823392, | |
| "val_acc": 0.9486552567237164, | |
| "val_macro_f1": 0.9201905369641149, | |
| "duration_seconds": 37.054033300839365 | |
| }, | |
| { | |
| "stage": "stage1", | |
| "epoch": 28, | |
| "train_loss": 0.6110506050808485, | |
| "train_acc": 0.9556140565691196, | |
| "train_macro_f1": 0.958136157178548, | |
| "val_loss": 0.6986935463818637, | |
| "val_acc": 0.9491442542787286, | |
| "val_macro_f1": 0.9200785518230558, | |
| "duration_seconds": 38.087362293154 | |
| }, | |
| { | |
| "stage": "stage1", | |
| "epoch": 29, | |
| "train_loss": 0.598165498916493, | |
| "train_acc": 0.959532263989225, | |
| "train_macro_f1": 0.9600834433079148, | |
| "val_loss": 0.7002380436116998, | |
| "val_acc": 0.9506112469437653, | |
| "val_macro_f1": 0.9219051963307497, | |
| "duration_seconds": 37.679548679850996 | |
| }, | |
| { | |
| "stage": "stage1", | |
| "epoch": 30, | |
| "train_loss": 0.5900751698848813, | |
| "train_acc": 0.959593485980164, | |
| "train_macro_f1": 0.9605770687335475, | |
| "val_loss": 0.695245005867698, | |
| "val_acc": 0.9511002444987775, | |
| "val_macro_f1": 0.9216505884105199, | |
| "duration_seconds": 38.43997255899012 | |
| }, | |
| { | |
| "stage": "stage1", | |
| "epoch": 31, | |
| "train_loss": 0.5878149402696032, | |
| "train_acc": 0.9606342598261296, | |
| "train_macro_f1": 0.9621756383388788, | |
| "val_loss": 0.6914988647807728, | |
| "val_acc": 0.9525672371638142, | |
| "val_macro_f1": 0.9230561654384433, | |
| "duration_seconds": 37.00720879295841 | |
| }, | |
| { | |
| "stage": "stage1", | |
| "epoch": 32, | |
| "train_loss": 0.5783637138300164, | |
| "train_acc": 0.9626545855271214, | |
| "train_macro_f1": 0.9650474377206499, | |
| "val_loss": 0.6921304247596047, | |
| "val_acc": 0.9525672371638142, | |
| "val_macro_f1": 0.9242248262820537, | |
| "duration_seconds": 38.078143508173525 | |
| }, | |
| { | |
| "stage": "stage1", | |
| "epoch": 33, | |
| "train_loss": 0.5720760066841923, | |
| "train_acc": 0.9652871311375046, | |
| "train_macro_f1": 0.9668956502827502, | |
| "val_loss": 0.6832193461331454, | |
| "val_acc": 0.9545232273838631, | |
| "val_macro_f1": 0.9254622457557515, | |
| "duration_seconds": 37.823320649098605 | |
| }, | |
| { | |
| "stage": "stage1", | |
| "epoch": 34, | |
| "train_loss": 0.5669168850710226, | |
| "train_acc": 0.9660217950287744, | |
| "train_macro_f1": 0.9679376980718958, | |
| "val_loss": 0.6903422366489064, | |
| "val_acc": 0.952078239608802, | |
| "val_macro_f1": 0.9141290090107064, | |
| "duration_seconds": 37.19663280015811 | |
| }, | |
| { | |
| "stage": "stage1", | |
| "epoch": 35, | |
| "train_loss": 0.5721360119276269, | |
| "train_acc": 0.9652259091465655, | |
| "train_macro_f1": 0.9672558713264995, | |
| "val_loss": 0.6860878196629611, | |
| "val_acc": 0.9535452322738386, | |
| "val_macro_f1": 0.9246890611512996, | |
| "duration_seconds": 37.865042545832694 | |
| }, | |
| { | |
| "stage": "stage1", | |
| "epoch": 36, | |
| "train_loss": 0.5751806615396987, | |
| "train_acc": 0.9634504714093303, | |
| "train_macro_f1": 0.9650922342936058, | |
| "val_loss": 0.6858713843605735, | |
| "val_acc": 0.9506112469437653, | |
| "val_macro_f1": 0.9215963057574214, | |
| "duration_seconds": 38.06451524980366 | |
| }, | |
| { | |
| "stage": "stage1", | |
| "epoch": 37, | |
| "train_loss": 0.5691629336323849, | |
| "train_acc": 0.963756581364026, | |
| "train_macro_f1": 0.9659098190263928, | |
| "val_loss": 0.6852591091936285, | |
| "val_acc": 0.9511002444987775, | |
| "val_macro_f1": 0.9221256983893021, | |
| "duration_seconds": 37.62006014632061 | |
| }, | |
| { | |
| "stage": "stage1", | |
| "epoch": 38, | |
| "train_loss": 0.563821169526078, | |
| "train_acc": 0.9661442390106526, | |
| "train_macro_f1": 0.9676408629024923, | |
| "val_loss": 0.6863496682860635, | |
| "val_acc": 0.9515892420537897, | |
| "val_macro_f1": 0.9228874257523916, | |
| "duration_seconds": 37.83984856121242 | |
| }, | |
| { | |
| "stage": "stage1", | |
| "epoch": 39, | |
| "train_loss": 0.5663612668597421, | |
| "train_acc": 0.9646136892371739, | |
| "train_macro_f1": 0.966505548310987, | |
| "val_loss": 0.685564474626021, | |
| "val_acc": 0.952078239608802, | |
| "val_macro_f1": 0.9230060614495823, | |
| "duration_seconds": 38.010506270918995 | |
| }, | |
| { | |
| "stage": "stage1", | |
| "epoch": 40, | |
| "train_loss": 0.5743493382320848, | |
| "train_acc": 0.9646749112281131, | |
| "train_macro_f1": 0.9661682215935604, | |
| "val_loss": 0.685387134552002, | |
| "val_acc": 0.9515892420537897, | |
| "val_macro_f1": 0.9225555286063664, | |
| "duration_seconds": 37.642080747988075 | |
| }, | |
| { | |
| "stage": "stage2", | |
| "epoch": 1, | |
| "train_loss": 0.6768135927444281, | |
| "train_acc": 0.9281253826374434, | |
| "train_macro_f1": 0.9207764039512437, | |
| "val_loss": 0.7352005947719921, | |
| "val_acc": 0.9408312958435208, | |
| "val_macro_f1": 0.908830505411882, | |
| "duration_seconds": 40.97610312793404 | |
| }, | |
| { | |
| "stage": "stage2", | |
| "epoch": 2, | |
| "train_loss": 0.6523073114628015, | |
| "train_acc": 0.9372474592873761, | |
| "train_macro_f1": 0.9393616328475114, | |
| "val_loss": 0.7405287569219415, | |
| "val_acc": 0.943276283618582, | |
| "val_macro_f1": 0.9145481739672323, | |
| "duration_seconds": 40.34182390011847 | |
| }, | |
| { | |
| "stage": "stage2", | |
| "epoch": 3, | |
| "train_loss": 0.6370004353135131, | |
| "train_acc": 0.9430635484265948, | |
| "train_macro_f1": 0.9449550202236057, | |
| "val_loss": 0.7183141654187982, | |
| "val_acc": 0.9418092909535453, | |
| "val_macro_f1": 0.9110062617729418, | |
| "duration_seconds": 39.924425372853875 | |
| }, | |
| { | |
| "stage": "stage2", | |
| "epoch": 4, | |
| "train_loss": 0.6195670698964318, | |
| "train_acc": 0.9518182931308926, | |
| "train_macro_f1": 0.9531957634054529, | |
| "val_loss": 0.722674245184118, | |
| "val_acc": 0.9427872860635697, | |
| "val_macro_f1": 0.9122724208628346, | |
| "duration_seconds": 39.511029839050025 | |
| } | |
| ] | |
| } |