| { | |
| "epoch": 4.000946297610598, | |
| "eval_accuracy": 0.6889576471371062, | |
| "eval_animal_abuse/accuracy": 0.994510430182653, | |
| "eval_animal_abuse/f1": 0.7730398899587345, | |
| "eval_animal_abuse/fpr": 0.0034328408440749783, | |
| "eval_animal_abuse/precision": 0.7336814621409922, | |
| "eval_animal_abuse/recall": 0.8168604651162791, | |
| "eval_animal_abuse/threshold": 0.5, | |
| "eval_child_abuse/accuracy": 0.9964234620886981, | |
| "eval_child_abuse/f1": 0.6570972886762361, | |
| "eval_child_abuse/fpr": 0.0014720396112477183, | |
| "eval_child_abuse/precision": 0.7006802721088435, | |
| "eval_child_abuse/recall": 0.6186186186186187, | |
| "eval_child_abuse/threshold": 0.5, | |
| "eval_controversial_topics,politics/accuracy": 0.9715041421299531, | |
| "eval_controversial_topics,politics/f1": 0.46850760161340366, | |
| "eval_controversial_topics,politics/fpr": 0.010742723778143858, | |
| "eval_controversial_topics,politics/precision": 0.5467052860246199, | |
| "eval_controversial_topics,politics/recall": 0.40988056460369166, | |
| "eval_controversial_topics,politics/threshold": 0.5, | |
| "eval_discrimination,stereotype,injustice/accuracy": 0.9564327777223276, | |
| "eval_discrimination,stereotype,injustice/f1": 0.7228864670405248, | |
| "eval_discrimination,stereotype,injustice/fpr": 0.02268044963313691, | |
| "eval_discrimination,stereotype,injustice/precision": 0.7313209162920146, | |
| "eval_discrimination,stereotype,injustice/recall": 0.7146443514644352, | |
| "eval_discrimination,stereotype,injustice/threshold": 0.5, | |
| "eval_drug_abuse,weapons,banned_substance/accuracy": 0.974199021858469, | |
| "eval_drug_abuse,weapons,banned_substance/f1": 0.7740713765477057, | |
| "eval_drug_abuse,weapons,banned_substance/fpr": 0.014490198843604543, | |
| "eval_drug_abuse,weapons,banned_substance/precision": 0.7637252083932164, | |
| "eval_drug_abuse,weapons,banned_substance/recall": 0.7847017129356173, | |
| "eval_drug_abuse,weapons,banned_substance/threshold": 0.5, | |
| "eval_financial_crime,property_crime,theft/accuracy": 0.9601091259939448, | |
| "eval_financial_crime,property_crime,theft/f1": 0.8050723459600065, | |
| "eval_financial_crime,property_crime,theft/fpr": 0.02762471665775938, | |
| "eval_financial_crime,property_crime,theft/precision": 0.7676329251278872, | |
| "eval_financial_crime,property_crime,theft/recall": 0.8463510511023756, | |
| "eval_financial_crime,property_crime,theft/threshold": 0.5, | |
| "eval_flagged/accuracy": 0.8562065409056127, | |
| "eval_flagged/aucpr": 0.9090708301585808, | |
| "eval_flagged/f1": 0.8688912482936447, | |
| "eval_flagged/fpr": 0.14386978209503754, | |
| "eval_flagged/precision": 0.8818929154222729, | |
| "eval_flagged/recall": 0.8562673761621477, | |
| "eval_hate_speech,offensive_language/accuracy": 0.9506271417639818, | |
| "eval_hate_speech,offensive_language/f1": 0.7009873060648801, | |
| "eval_hate_speech,offensive_language/fpr": 0.019422620135209172, | |
| "eval_hate_speech,offensive_language/precision": 0.765962131219727, | |
| "eval_hate_speech,offensive_language/recall": 0.6461738484398217, | |
| "eval_hate_speech,offensive_language/threshold": 0.5, | |
| "eval_loss": 0.08228794485330582, | |
| "eval_macro_f1": 0.6418844118541835, | |
| "eval_macro_precision": 0.704528612500405, | |
| "eval_macro_recall": 0.6272299800362929, | |
| "eval_micro_f1": 0.7567282129720675, | |
| "eval_micro_precision": 0.7753502863859401, | |
| "eval_micro_recall": 0.7389796753027367, | |
| "eval_misinformation_regarding_ethics,laws_and_safety/accuracy": 0.9878730412216788, | |
| "eval_misinformation_regarding_ethics,laws_and_safety/f1": 0.07369758576874205, | |
| "eval_misinformation_regarding_ethics,laws_and_safety/fpr": 0.00045467558055335624, | |
| "eval_misinformation_regarding_ethics,laws_and_safety/precision": 0.5178571428571429, | |
| "eval_misinformation_regarding_ethics,laws_and_safety/recall": 0.03967168262653899, | |
| "eval_misinformation_regarding_ethics,laws_and_safety/threshold": 0.5, | |
| "eval_non_violent_unethical_behavior/accuracy": 0.8879628705459627, | |
| "eval_non_violent_unethical_behavior/f1": 0.6949590108247656, | |
| "eval_non_violent_unethical_behavior/fpr": 0.05109201129380491, | |
| "eval_non_violent_unethical_behavior/precision": 0.7571301687555512, | |
| "eval_non_violent_unethical_behavior/recall": 0.642223338355935, | |
| "eval_non_violent_unethical_behavior/threshold": 0.5, | |
| "eval_privacy_violation/accuracy": 0.9808530458794956, | |
| "eval_privacy_violation/f1": 0.8130583076173461, | |
| "eval_privacy_violation/fpr": 0.012038916497515203, | |
| "eval_privacy_violation/precision": 0.784393607019743, | |
| "eval_privacy_violation/recall": 0.8438975050573162, | |
| "eval_privacy_violation/threshold": 0.5, | |
| "eval_runtime": 598.0644, | |
| "eval_samples_per_second": 100.514, | |
| "eval_self_harm/accuracy": 0.9965399075090661, | |
| "eval_self_harm/f1": 0.7360406091370558, | |
| "eval_self_harm/fpr": 0.0014739380946000244, | |
| "eval_self_harm/precision": 0.7671957671957672, | |
| "eval_self_harm/recall": 0.7073170731707317, | |
| "eval_self_harm/threshold": 0.5, | |
| "eval_sexually_explicit,adult_content/accuracy": 0.984296503310377, | |
| "eval_sexually_explicit,adult_content/f1": 0.678254942058623, | |
| "eval_sexually_explicit,adult_content/fpr": 0.00838631598684097, | |
| "eval_sexually_explicit,adult_content/precision": 0.6691324815063887, | |
| "eval_sexually_explicit,adult_content/recall": 0.6876295784381479, | |
| "eval_sexually_explicit,adult_content/threshold": 0.5, | |
| "eval_steps_per_second": 1.572, | |
| "eval_terrorism,organized_crime/accuracy": 0.9920817114149783, | |
| "eval_terrorism,organized_crime/f1": 0.23225806451612904, | |
| "eval_terrorism,organized_crime/fpr": 0.001123538980094912, | |
| "eval_terrorism,organized_crime/precision": 0.5179856115107914, | |
| "eval_terrorism,organized_crime/recall": 0.1496881496881497, | |
| "eval_terrorism,organized_crime/threshold": 0.5, | |
| "eval_violence,aiding_and_abetting,incitement/accuracy": 0.9220980137738297, | |
| "eval_violence,aiding_and_abetting,incitement/f1": 0.8564509701744168, | |
| "eval_violence,aiding_and_abetting,incitement/fpr": 0.06031004940845822, | |
| "eval_violence,aiding_and_abetting,incitement/precision": 0.8399975948529854, | |
| "eval_violence,aiding_and_abetting,incitement/recall": 0.8735617808904452, | |
| "eval_violence,aiding_and_abetting,incitement/threshold": 0.5, | |
| "test_accuracy": 0.6766379207090669, | |
| "test_animal_abuse/accuracy": 0.9931578632171517, | |
| "test_animal_abuse/f1": 0.7676664972038637, | |
| "test_animal_abuse/fpr": 0.005037019055710637, | |
| "test_animal_abuse/precision": 0.6945722171113156, | |
| "test_animal_abuse/recall": 0.8579545454545454, | |
| "test_animal_abuse/threshold": 0.5, | |
| "test_child_abuse/accuracy": 0.9963169241825368, | |
| "test_child_abuse/f1": 0.6070287539936102, | |
| "test_child_abuse/fpr": 0.0012642986152919908, | |
| "test_child_abuse/precision": 0.6934306569343066, | |
| "test_child_abuse/recall": 0.5397727272727273, | |
| "test_child_abuse/threshold": 0.5, | |
| "test_controversial_topics,politics/accuracy": 0.9715085638998683, | |
| "test_controversial_topics,politics/f1": 0.45519610649871173, | |
| "test_controversial_topics,politics/fpr": 0.011352768779885837, | |
| "test_controversial_topics,politics/precision": 0.519268451992162, | |
| "test_controversial_topics,politics/recall": 0.40519877675840976, | |
| "test_controversial_topics,politics/threshold": 0.5, | |
| "test_discrimination,stereotype,injustice/accuracy": 0.9543957360162894, | |
| "test_discrimination,stereotype,injustice/f1": 0.7218772826880935, | |
| "test_discrimination,stereotype,injustice/fpr": 0.023755877742946668, | |
| "test_discrimination,stereotype,injustice/precision": 0.7309541420118343, | |
| "test_discrimination,stereotype,injustice/recall": 0.713023088023088, | |
| "test_discrimination,stereotype,injustice/threshold": 0.5, | |
| "test_drug_abuse,weapons,banned_substance/accuracy": 0.9734848484848485, | |
| "test_drug_abuse,weapons,banned_substance/f1": 0.7634566582075598, | |
| "test_drug_abuse,weapons,banned_substance/fpr": 0.014630821418381233, | |
| "test_drug_abuse,weapons,banned_substance/precision": 0.755884686590849, | |
| "test_drug_abuse,weapons,banned_substance/recall": 0.7711818672423097, | |
| "test_drug_abuse,weapons,banned_substance/threshold": 0.5, | |
| "test_financial_crime,property_crime,theft/accuracy": 0.9579590370104204, | |
| "test_financial_crime,property_crime,theft/f1": 0.7996575342465754, | |
| "test_financial_crime,property_crime,theft/fpr": 0.0271945610877824, | |
| "test_financial_crime,property_crime,theft/precision": 0.7744610281923715, | |
| "test_financial_crime,property_crime,theft/recall": 0.8265486725663717, | |
| "test_financial_crime,property_crime,theft/threshold": 0.5, | |
| "test_flagged/accuracy": 0.8487393699844292, | |
| "test_flagged/aucpr": 0.9047138471115324, | |
| "test_flagged/f1": 0.8630417395312266, | |
| "test_flagged/fpr": 0.15492622560685337, | |
| "test_flagged/precision": 0.8747698480309983, | |
| "test_flagged/recall": 0.8516239499170635, | |
| "test_hate_speech,offensive_language/accuracy": 0.9497844053180021, | |
| "test_hate_speech,offensive_language/f1": 0.686307519640853, | |
| "test_hate_speech,offensive_language/fpr": 0.017700424021299645, | |
| "test_hate_speech,offensive_language/precision": 0.7730720606826802, | |
| "test_hate_speech,offensive_language/recall": 0.6170534813319879, | |
| "test_hate_speech,offensive_language/threshold": 0.5, | |
| "test_loss": 0.08679678291082382, | |
| "test_macro_f1": 0.6252685023565255, | |
| "test_macro_precision": 0.6847558918566842, | |
| "test_macro_recall": 0.6093259557964672, | |
| "test_micro_f1": 0.7468949012290593, | |
| "test_micro_precision": 0.7705170480223624, | |
| "test_micro_recall": 0.724678064292686, | |
| "test_misinformation_regarding_ethics,laws_and_safety/accuracy": 0.987633249490957, | |
| "test_misinformation_regarding_ethics,laws_and_safety/f1": 0.05707762557077625, | |
| "test_misinformation_regarding_ethics,laws_and_safety/fpr": 0.0005304959379168174, | |
| "test_misinformation_regarding_ethics,laws_and_safety/precision": 0.4166666666666667, | |
| "test_misinformation_regarding_ethics,laws_and_safety/recall": 0.030637254901960783, | |
| "test_misinformation_regarding_ethics,laws_and_safety/threshold": 0.5, | |
| "test_non_violent_unethical_behavior/accuracy": 0.8818571086357647, | |
| "test_non_violent_unethical_behavior/f1": 0.6774575924790517, | |
| "test_non_violent_unethical_behavior/fpr": 0.050999362507968556, | |
| "test_non_violent_unethical_behavior/precision": 0.7528845280276187, | |
| "test_non_violent_unethical_behavior/recall": 0.6157675731906672, | |
| "test_non_violent_unethical_behavior/threshold": 0.5, | |
| "test_privacy_violation/accuracy": 0.9793687866810397, | |
| "test_privacy_violation/f1": 0.8086111111111111, | |
| "test_privacy_violation/fpr": 0.01268442169778534, | |
| "test_privacy_violation/precision": 0.7837910608508347, | |
| "test_privacy_violation/recall": 0.8350545037292025, | |
| "test_privacy_violation/threshold": 0.5, | |
| "test_runtime": 664.4818, | |
| "test_samples_per_second": 100.517, | |
| "test_self_harm/accuracy": 0.9963917834471194, | |
| "test_self_harm/f1": 0.714792899408284, | |
| "test_self_harm/fpr": 0.0011910505367265691, | |
| "test_self_harm/precision": 0.7926509186351706, | |
| "test_self_harm/recall": 0.6508620689655172, | |
| "test_self_harm/threshold": 0.5, | |
| "test_sexually_explicit,adult_content/accuracy": 0.9838902862618277, | |
| "test_sexually_explicit,adult_content/f1": 0.6515544041450777, | |
| "test_sexually_explicit,adult_content/fpr": 0.009186954524575089, | |
| "test_sexually_explicit,adult_content/precision": 0.6264009962640099, | |
| "test_sexually_explicit,adult_content/recall": 0.6788124156545209, | |
| "test_sexually_explicit,adult_content/threshold": 0.5, | |
| "test_steps_per_second": 1.571, | |
| "test_terrorism,organized_crime/accuracy": 0.9913911845730028, | |
| "test_terrorism,organized_crime/f1": 0.1958041958041958, | |
| "test_terrorism,organized_crime/fpr": 0.001343680173923544, | |
| "test_terrorism,organized_crime/precision": 0.44025157232704404, | |
| "test_terrorism,organized_crime/recall": 0.12589928057553956, | |
| "test_terrorism,organized_crime/threshold": 0.5, | |
| "test_violence,aiding_and_abetting,incitement/accuracy": 0.9157533836387591, | |
| "test_violence,aiding_and_abetting,incitement/f1": 0.8472708519935944, | |
| "test_violence,aiding_and_abetting,incitement/fpr": 0.06457640343312376, | |
| "test_violence,aiding_and_abetting,incitement/precision": 0.8322934997067136, | |
| "test_violence,aiding_and_abetting,incitement/recall": 0.8627971254836927, | |
| "test_violence,aiding_and_abetting,incitement/threshold": 0.5, | |
| "total_flos": 0.0, | |
| "train_loss": 0.09237335174007019, | |
| "train_runtime": 91030.0055, | |
| "train_samples_per_second": 59.433, | |
| "train_steps_per_second": 0.929 | |
| } |