| { | |
| "best_metric": 0.681284212993957, | |
| "best_model_checkpoint": "/bigwork/nhwpziet/appropriateness-style-transfer/data/models/multilabel-debertav3-conservative/fold0/fold0.0/checkpoint-1250", | |
| "epoch": 18.229166666666668, | |
| "global_step": 1750, | |
| "is_hyper_param_search": false, | |
| "is_local_process_zero": true, | |
| "is_world_process_zero": true, | |
| "log_history": [ | |
| { | |
| "epoch": 2.6, | |
| "eval_Appropriateness_macroF1": 0.6499349052509764, | |
| "eval_Appropriateness_precision": 0.663745660824655, | |
| "eval_Appropriateness_recall": 0.6624663978494624, | |
| "eval_Commitment_macroF1": 0.48068768147730356, | |
| "eval_Commitment_precision": 0.6111764705882352, | |
| "eval_Commitment_recall": 0.5880626223091976, | |
| "eval_Committed Openness_macroF1": 0.5892457057505601, | |
| "eval_Committed Openness_precision": 0.6571428571428571, | |
| "eval_Committed Openness_recall": 0.6731601731601732, | |
| "eval_Committed Seriousness_macroF1": 0.6182764603817235, | |
| "eval_Committed Seriousness_precision": 0.6083439761600681, | |
| "eval_Committed Seriousness_recall": 0.77997799779978, | |
| "eval_Emotional Intensity_macroF1": 0.574343023682986, | |
| "eval_Emotional Intensity_precision": 0.5939801879603759, | |
| "eval_Emotional Intensity_recall": 0.6541666666666667, | |
| "eval_Emotional Typology_macroF1": 0.5431247790738777, | |
| "eval_Emotional Typology_precision": 0.6008333333333333, | |
| "eval_Emotional Typology_recall": 0.65625, | |
| "eval_Emotions_macroF1": 0.530779524924724, | |
| "eval_Emotions_precision": 0.5917211328976035, | |
| "eval_Emotions_recall": 0.6085163418909165, | |
| "eval_Intelligibility_macroF1": 0.6261437908496732, | |
| "eval_Intelligibility_precision": 0.625, | |
| "eval_Intelligibility_recall": 0.6328999638858794, | |
| "eval_Intelligible Organization_macroF1": 0.5602359709424212, | |
| "eval_Intelligible Organization_precision": 0.5534420289855072, | |
| "eval_Intelligible Organization_recall": 0.6025789626195306, | |
| "eval_Intelligible Position_macroF1": 0.6010933398284214, | |
| "eval_Intelligible Position_precision": 0.5964783923519656, | |
| "eval_Intelligible Position_recall": 0.6235632183908046, | |
| "eval_Intelligible Relevance_macroF1": 0.528479573942976, | |
| "eval_Intelligible Relevance_precision": 0.5802107728337236, | |
| "eval_Intelligible Relevance_recall": 0.6112658080983873, | |
| "eval_Not classified_macroF1": 0.4965675057208238, | |
| "eval_Not classified_precision": 0.49318181818181817, | |
| "eval_Not classified_recall": 0.5, | |
| "eval_Orthography_macroF1": 0.5675185241191593, | |
| "eval_Orthography_precision": 0.5641177243463408, | |
| "eval_Orthography_recall": 0.7877358490566038, | |
| "eval_Other_macroF1": 0.6333333333333333, | |
| "eval_Other_precision": 0.6477093206951027, | |
| "eval_Other_recall": 0.6220095693779903, | |
| "eval_loss": 1.0271475315093994, | |
| "eval_mean_F1": 0.5714117228056399, | |
| "eval_mean_precision": 0.5990774054501132, | |
| "eval_mean_recall": 0.6430466836503852, | |
| "eval_runtime": 2.5376, | |
| "eval_samples_per_second": 86.696, | |
| "eval_steps_per_second": 5.517, | |
| "step": 250 | |
| }, | |
| { | |
| "epoch": 5.21, | |
| "learning_rate": 2.483101851851852e-06, | |
| "loss": 0.9858, | |
| "step": 500 | |
| }, | |
| { | |
| "epoch": 5.21, | |
| "eval_Appropriateness_macroF1": 0.665130632415188, | |
| "eval_Appropriateness_precision": 0.7096551724137932, | |
| "eval_Appropriateness_recall": 0.6915322580645161, | |
| "eval_Commitment_macroF1": 0.7509093975090939, | |
| "eval_Commitment_precision": 0.7618189102564102, | |
| "eval_Commitment_recall": 0.7435933277420557, | |
| "eval_Committed Openness_macroF1": 0.7043862174872828, | |
| "eval_Committed Openness_precision": 0.7002627627627627, | |
| "eval_Committed Openness_recall": 0.7099567099567099, | |
| "eval_Committed Seriousness_macroF1": 0.7135416666666666, | |
| "eval_Committed Seriousness_precision": 0.6732215153267784, | |
| "eval_Committed Seriousness_recall": 0.8294829482948295, | |
| "eval_Emotional Intensity_macroF1": 0.679588485017508, | |
| "eval_Emotional Intensity_precision": 0.6660829284826573, | |
| "eval_Emotional Intensity_recall": 0.7041666666666666, | |
| "eval_Emotional Typology_macroF1": 0.6851543561435789, | |
| "eval_Emotional Typology_precision": 0.672855451512216, | |
| "eval_Emotional Typology_recall": 0.7073863636363636, | |
| "eval_Emotions_macroF1": 0.714746897409436, | |
| "eval_Emotions_precision": 0.7212543554006968, | |
| "eval_Emotions_recall": 0.709506134653057, | |
| "eval_Intelligibility_macroF1": 0.634313796343138, | |
| "eval_Intelligibility_precision": 0.6514896304874198, | |
| "eval_Intelligibility_recall": 0.6299205489346335, | |
| "eval_Intelligible Organization_macroF1": 0.5869602321215225, | |
| "eval_Intelligible Organization_precision": 0.5771428571428572, | |
| "eval_Intelligible Organization_recall": 0.676035931614025, | |
| "eval_Intelligible Position_macroF1": 0.6466018283244253, | |
| "eval_Intelligible Position_precision": 0.6393939393939394, | |
| "eval_Intelligible Position_recall": 0.6580459770114943, | |
| "eval_Intelligible Relevance_macroF1": 0.6084230126783319, | |
| "eval_Intelligible Relevance_precision": 0.6041666666666666, | |
| "eval_Intelligible Relevance_recall": 0.6160227404571296, | |
| "eval_Not classified_macroF1": 0.48717948717948717, | |
| "eval_Not classified_precision": 0.49292452830188677, | |
| "eval_Not classified_recall": 0.4815668202764977, | |
| "eval_Orthography_macroF1": 0.5915611814345991, | |
| "eval_Orthography_precision": 0.5756166002067642, | |
| "eval_Orthography_recall": 0.8018867924528301, | |
| "eval_Other_macroF1": 0.6100006112843084, | |
| "eval_Other_precision": 0.5868421052631579, | |
| "eval_Other_recall": 0.7153110047846889, | |
| "eval_loss": 0.9632763266563416, | |
| "eval_mean_F1": 0.6484641287153262, | |
| "eval_mean_precision": 0.6451948159727147, | |
| "eval_mean_recall": 0.6910295874675355, | |
| "eval_runtime": 2.3793, | |
| "eval_samples_per_second": 92.466, | |
| "eval_steps_per_second": 5.884, | |
| "step": 500 | |
| }, | |
| { | |
| "epoch": 7.81, | |
| "eval_Appropriateness_macroF1": 0.6687855470056876, | |
| "eval_Appropriateness_precision": 0.7210960960960962, | |
| "eval_Appropriateness_recall": 0.6979166666666667, | |
| "eval_Commitment_macroF1": 0.7442119098170052, | |
| "eval_Commitment_precision": 0.778556095356457, | |
| "eval_Commitment_recall": 0.7297549156648961, | |
| "eval_Committed Openness_macroF1": 0.6861471861471862, | |
| "eval_Committed Openness_precision": 0.6861471861471862, | |
| "eval_Committed Openness_recall": 0.6861471861471862, | |
| "eval_Committed Seriousness_macroF1": 0.7894233070112466, | |
| "eval_Committed Seriousness_precision": 0.758078231292517, | |
| "eval_Committed Seriousness_recall": 0.8338833883388339, | |
| "eval_Emotional Intensity_macroF1": 0.671913835956918, | |
| "eval_Emotional Intensity_precision": 0.665079365079365, | |
| "eval_Emotional Intensity_recall": 0.6805555555555556, | |
| "eval_Emotional Typology_macroF1": 0.6675863881224543, | |
| "eval_Emotional Typology_precision": 0.6706795077581594, | |
| "eval_Emotional Typology_recall": 0.6647727272727273, | |
| "eval_Emotions_macroF1": 0.6727568690185514, | |
| "eval_Emotions_precision": 0.6957771114442779, | |
| "eval_Emotions_recall": 0.6615630477368801, | |
| "eval_Intelligibility_macroF1": 0.6358605696781047, | |
| "eval_Intelligibility_precision": 0.6641622415546267, | |
| "eval_Intelligibility_recall": 0.6311845431563741, | |
| "eval_Intelligible Organization_macroF1": 0.6031397786553557, | |
| "eval_Intelligible Organization_precision": 0.5866545893719807, | |
| "eval_Intelligible Organization_recall": 0.6663286004056794, | |
| "eval_Intelligible Position_macroF1": 0.6535852713178294, | |
| "eval_Intelligible Position_precision": 0.6494117647058824, | |
| "eval_Intelligible Position_recall": 0.6586706646676661, | |
| "eval_Intelligible Relevance_macroF1": 0.6247058823529412, | |
| "eval_Intelligible Relevance_precision": 0.6265664160401002, | |
| "eval_Intelligible Relevance_recall": 0.6230421162547859, | |
| "eval_Not classified_macroF1": 0.46987951807228917, | |
| "eval_Not classified_precision": 0.49242424242424243, | |
| "eval_Not classified_recall": 0.44930875576036866, | |
| "eval_Orthography_macroF1": 0.6369636963696369, | |
| "eval_Orthography_precision": 0.6019345238095238, | |
| "eval_Orthography_recall": 0.8231132075471699, | |
| "eval_Other_macroF1": 0.6201803184346825, | |
| "eval_Other_precision": 0.5941220238095238, | |
| "eval_Other_recall": 0.7200956937799043, | |
| "eval_loss": 1.0039398670196533, | |
| "eval_mean_F1": 0.6532242912828492, | |
| "eval_mean_precision": 0.6564778139207099, | |
| "eval_mean_recall": 0.6804526477824782, | |
| "eval_runtime": 2.3736, | |
| "eval_samples_per_second": 92.687, | |
| "eval_steps_per_second": 5.898, | |
| "step": 750 | |
| }, | |
| { | |
| "epoch": 10.42, | |
| "learning_rate": 1.6439814814814818e-06, | |
| "loss": 0.6344, | |
| "step": 1000 | |
| }, | |
| { | |
| "epoch": 10.42, | |
| "eval_Appropriateness_macroF1": 0.7227215438335501, | |
| "eval_Appropriateness_precision": 0.7335093454027324, | |
| "eval_Appropriateness_recall": 0.7340389784946236, | |
| "eval_Commitment_macroF1": 0.7471264367816092, | |
| "eval_Commitment_precision": 0.7442557442557443, | |
| "eval_Commitment_recall": 0.75062901873078, | |
| "eval_Committed Openness_macroF1": 0.7027304072313857, | |
| "eval_Committed Openness_precision": 0.6964285714285714, | |
| "eval_Committed Openness_recall": 0.7164502164502164, | |
| "eval_Committed Seriousness_macroF1": 0.7809150020502607, | |
| "eval_Committed Seriousness_precision": 0.7471794871794872, | |
| "eval_Committed Seriousness_recall": 0.8314081408140814, | |
| "eval_Emotional Intensity_macroF1": 0.7108560413563481, | |
| "eval_Emotional Intensity_precision": 0.6917723476647077, | |
| "eval_Emotional Intensity_recall": 0.7583333333333333, | |
| "eval_Emotional Typology_macroF1": 0.6484848484848484, | |
| "eval_Emotional Typology_precision": 0.6385281385281385, | |
| "eval_Emotional Typology_recall": 0.6818181818181819, | |
| "eval_Emotions_macroF1": 0.7161290322580646, | |
| "eval_Emotions_precision": 0.7097610135329686, | |
| "eval_Emotions_recall": 0.7253325085060316, | |
| "eval_Intelligibility_macroF1": 0.6560919540229884, | |
| "eval_Intelligibility_precision": 0.6597222222222222, | |
| "eval_Intelligibility_recall": 0.653665583243048, | |
| "eval_Intelligible Organization_macroF1": 0.6163150492264416, | |
| "eval_Intelligible Organization_precision": 0.5989583333333333, | |
| "eval_Intelligible Organization_recall": 0.6541582150101419, | |
| "eval_Intelligible Position_macroF1": 0.6809374637428935, | |
| "eval_Intelligible Position_precision": 0.6711672473867596, | |
| "eval_Intelligible Position_recall": 0.6964017991004497, | |
| "eval_Intelligible Relevance_macroF1": 0.6410148436677545, | |
| "eval_Intelligible Relevance_precision": 0.6337918251877865, | |
| "eval_Intelligible Relevance_recall": 0.6591251885369533, | |
| "eval_Not classified_macroF1": 0.4835680751173709, | |
| "eval_Not classified_precision": 0.49282296650717705, | |
| "eval_Not classified_recall": 0.47465437788018433, | |
| "eval_Orthography_macroF1": 0.6474358974358975, | |
| "eval_Orthography_precision": 0.6151960784313726, | |
| "eval_Orthography_recall": 0.7216981132075472, | |
| "eval_Other_macroF1": 0.6251457268406422, | |
| "eval_Other_precision": 0.607843137254902, | |
| "eval_Other_recall": 0.65311004784689, | |
| "eval_loss": 1.042500615119934, | |
| "eval_mean_F1": 0.6699623087178611, | |
| "eval_mean_precision": 0.6600668898797073, | |
| "eval_mean_recall": 0.6936302644980331, | |
| "eval_runtime": 2.3751, | |
| "eval_samples_per_second": 92.628, | |
| "eval_steps_per_second": 5.895, | |
| "step": 1000 | |
| }, | |
| { | |
| "epoch": 13.02, | |
| "eval_Appropriateness_macroF1": 0.7133461562325495, | |
| "eval_Appropriateness_precision": 0.7343254138433828, | |
| "eval_Appropriateness_recall": 0.729502688172043, | |
| "eval_Commitment_macroF1": 0.7317334894156667, | |
| "eval_Commitment_precision": 0.7467780183524075, | |
| "eval_Commitment_recall": 0.7230453825365762, | |
| "eval_Committed Openness_macroF1": 0.6888108477221735, | |
| "eval_Committed Openness_precision": 0.6873065015479876, | |
| "eval_Committed Openness_recall": 0.6904761904761905, | |
| "eval_Committed Seriousness_macroF1": 0.8377037086714506, | |
| "eval_Committed Seriousness_precision": 0.8296674522126211, | |
| "eval_Committed Seriousness_recall": 0.8462596259625963, | |
| "eval_Emotional Intensity_macroF1": 0.7021316792090431, | |
| "eval_Emotional Intensity_precision": 0.68795683954055, | |
| "eval_Emotional Intensity_recall": 0.725, | |
| "eval_Emotional Typology_macroF1": 0.6717985439789951, | |
| "eval_Emotional Typology_precision": 0.6619812583668006, | |
| "eval_Emotional Typology_recall": 0.6875, | |
| "eval_Emotions_macroF1": 0.7263898811886428, | |
| "eval_Emotions_precision": 0.7332317073170731, | |
| "eval_Emotions_recall": 0.7208475100525827, | |
| "eval_Intelligibility_macroF1": 0.6780801872988002, | |
| "eval_Intelligibility_precision": 0.7053571428571428, | |
| "eval_Intelligibility_recall": 0.670278078728783, | |
| "eval_Intelligible Organization_macroF1": 0.6212121212121213, | |
| "eval_Intelligible Organization_precision": 0.6037228938783343, | |
| "eval_Intelligible Organization_recall": 0.6566212691973341, | |
| "eval_Intelligible Position_macroF1": 0.6759941089837997, | |
| "eval_Intelligible Position_precision": 0.6702053602506091, | |
| "eval_Intelligible Position_recall": 0.6832833583208395, | |
| "eval_Intelligible Relevance_macroF1": 0.6514073348506237, | |
| "eval_Intelligible Relevance_precision": 0.6504120879120879, | |
| "eval_Intelligible Relevance_recall": 0.6524538809606684, | |
| "eval_Not classified_macroF1": 0.48598130841121495, | |
| "eval_Not classified_precision": 0.4928909952606635, | |
| "eval_Not classified_recall": 0.4792626728110599, | |
| "eval_Orthography_macroF1": 0.6974621827728466, | |
| "eval_Orthography_precision": 0.6722488038277512, | |
| "eval_Orthography_recall": 0.7334905660377358, | |
| "eval_Other_macroF1": 0.6559274319674695, | |
| "eval_Other_precision": 0.6498397435897436, | |
| "eval_Other_recall": 0.6626794258373205, | |
| "eval_loss": 1.1269534826278687, | |
| "eval_mean_F1": 0.681284212993957, | |
| "eval_mean_precision": 0.6804231584826539, | |
| "eval_mean_recall": 0.690050046363838, | |
| "eval_runtime": 2.3748, | |
| "eval_samples_per_second": 92.64, | |
| "eval_steps_per_second": 5.895, | |
| "step": 1250 | |
| }, | |
| { | |
| "epoch": 15.62, | |
| "learning_rate": 8.048611111111111e-07, | |
| "loss": 0.4806, | |
| "step": 1500 | |
| }, | |
| { | |
| "epoch": 15.62, | |
| "eval_Appropriateness_macroF1": 0.7136304469100602, | |
| "eval_Appropriateness_precision": 0.7265263157894737, | |
| "eval_Appropriateness_recall": 0.7259744623655914, | |
| "eval_Commitment_macroF1": 0.7376972530683812, | |
| "eval_Commitment_precision": 0.7385510510510511, | |
| "eval_Commitment_recall": 0.7368837946137359, | |
| "eval_Committed Openness_macroF1": 0.6961904761904762, | |
| "eval_Committed Openness_precision": 0.6914105886708626, | |
| "eval_Committed Openness_recall": 0.7034632034632035, | |
| "eval_Committed Seriousness_macroF1": 0.8271798900235664, | |
| "eval_Committed Seriousness_precision": 0.8125, | |
| "eval_Committed Seriousness_recall": 0.8437843784378438, | |
| "eval_Emotional Intensity_macroF1": 0.715986019710948, | |
| "eval_Emotional Intensity_precision": 0.7007193409908342, | |
| "eval_Emotional Intensity_recall": 0.7402777777777778, | |
| "eval_Emotional Typology_macroF1": 0.6764705882352942, | |
| "eval_Emotional Typology_precision": 0.6652874564459931, | |
| "eval_Emotional Typology_recall": 0.6960227272727273, | |
| "eval_Emotions_macroF1": 0.7250000000000001, | |
| "eval_Emotions_precision": 0.7274449942099168, | |
| "eval_Emotions_recall": 0.7227549231879575, | |
| "eval_Intelligibility_macroF1": 0.655406195207481, | |
| "eval_Intelligibility_precision": 0.6635937957272462, | |
| "eval_Intelligibility_recall": 0.6514084507042254, | |
| "eval_Intelligible Organization_macroF1": 0.6212121212121213, | |
| "eval_Intelligible Organization_precision": 0.6037228938783343, | |
| "eval_Intelligible Organization_recall": 0.6566212691973341, | |
| "eval_Intelligible Position_macroF1": 0.6520167212744323, | |
| "eval_Intelligible Position_precision": 0.6427083333333333, | |
| "eval_Intelligible Position_recall": 0.6711644177911045, | |
| "eval_Intelligible Relevance_macroF1": 0.6329781508987191, | |
| "eval_Intelligible Relevance_precision": 0.626731371954302, | |
| "eval_Intelligible Relevance_recall": 0.6454345051630119, | |
| "eval_Not classified_macroF1": 0.48598130841121495, | |
| "eval_Not classified_precision": 0.4928909952606635, | |
| "eval_Not classified_recall": 0.4792626728110599, | |
| "eval_Orthography_macroF1": 0.6524486571879936, | |
| "eval_Orthography_precision": 0.638095238095238, | |
| "eval_Orthography_recall": 0.6709905660377358, | |
| "eval_Other_macroF1": 0.6249573815206273, | |
| "eval_Other_precision": 0.6309523809523809, | |
| "eval_Other_recall": 0.6196172248803828, | |
| "eval_loss": 1.1886050701141357, | |
| "eval_mean_F1": 0.6726539435608083, | |
| "eval_mean_precision": 0.6686524825971165, | |
| "eval_mean_recall": 0.6831185981216922, | |
| "eval_runtime": 2.3741, | |
| "eval_samples_per_second": 92.666, | |
| "eval_steps_per_second": 5.897, | |
| "step": 1500 | |
| }, | |
| { | |
| "epoch": 18.23, | |
| "eval_Appropriateness_macroF1": 0.7089947089947091, | |
| "eval_Appropriateness_precision": 0.7166666666666667, | |
| "eval_Appropriateness_recall": 0.7184139784946236, | |
| "eval_Commitment_macroF1": 0.7352832284339134, | |
| "eval_Commitment_precision": 0.7337931034482759, | |
| "eval_Commitment_recall": 0.7369303885937937, | |
| "eval_Committed Openness_macroF1": 0.684019684019684, | |
| "eval_Committed Openness_precision": 0.6788211788211789, | |
| "eval_Committed Openness_recall": 0.6937229437229437, | |
| "eval_Committed Seriousness_macroF1": 0.8075, | |
| "eval_Committed Seriousness_precision": 0.7828282828282829, | |
| "eval_Committed Seriousness_recall": 0.8388338833883389, | |
| "eval_Emotional Intensity_macroF1": 0.715986019710948, | |
| "eval_Emotional Intensity_precision": 0.7007193409908342, | |
| "eval_Emotional Intensity_recall": 0.7402777777777778, | |
| "eval_Emotional Typology_macroF1": 0.6764705882352942, | |
| "eval_Emotional Typology_precision": 0.6652874564459931, | |
| "eval_Emotional Typology_recall": 0.6960227272727273, | |
| "eval_Emotions_macroF1": 0.7293122856619283, | |
| "eval_Emotions_precision": 0.7331843337590465, | |
| "eval_Emotions_recall": 0.7258995772760078, | |
| "eval_Intelligibility_macroF1": 0.666206896551724, | |
| "eval_Intelligibility_precision": 0.670045045045045, | |
| "eval_Intelligibility_recall": 0.6635969664138678, | |
| "eval_Intelligible Organization_macroF1": 0.6464646464646464, | |
| "eval_Intelligible Organization_precision": 0.6248320859719823, | |
| "eval_Intelligible Organization_recall": 0.6884960880904085, | |
| "eval_Intelligible Position_macroF1": 0.6686656261124346, | |
| "eval_Intelligible Position_precision": 0.6573200992555832, | |
| "eval_Intelligible Position_recall": 0.6980259870064968, | |
| "eval_Intelligible Relevance_macroF1": 0.6294346773344563, | |
| "eval_Intelligible Relevance_precision": 0.6230611647644132, | |
| "eval_Intelligible Relevance_recall": 0.646362687086669, | |
| "eval_Not classified_macroF1": 0.48598130841121495, | |
| "eval_Not classified_precision": 0.4928909952606635, | |
| "eval_Not classified_recall": 0.4792626728110599, | |
| "eval_Orthography_macroF1": 0.6974621827728466, | |
| "eval_Orthography_precision": 0.6722488038277512, | |
| "eval_Orthography_recall": 0.7334905660377358, | |
| "eval_Other_macroF1": 0.6559274319674695, | |
| "eval_Other_precision": 0.6498397435897436, | |
| "eval_Other_recall": 0.6626794258373205, | |
| "eval_loss": 1.1870639324188232, | |
| "eval_mean_F1": 0.6791220917622336, | |
| "eval_mean_precision": 0.6715384500482472, | |
| "eval_mean_recall": 0.6944296907006979, | |
| "eval_runtime": 2.3724, | |
| "eval_samples_per_second": 92.734, | |
| "eval_steps_per_second": 5.901, | |
| "step": 1750 | |
| } | |
| ], | |
| "max_steps": 1920, | |
| "num_train_epochs": 20, | |
| "total_flos": 9734558105656692.0, | |
| "trial_name": null, | |
| "trial_params": null | |
| } | |