| { | |
| "epoch": 4.000946297610598, | |
| "eval_accuracy": 0.6889576471371062, | |
| "eval_animal_abuse/accuracy": 0.994510430182653, | |
| "eval_animal_abuse/f1": 0.7730398899587345, | |
| "eval_animal_abuse/fpr": 0.0034328408440749783, | |
| "eval_animal_abuse/precision": 0.7336814621409922, | |
| "eval_animal_abuse/recall": 0.8168604651162791, | |
| "eval_animal_abuse/threshold": 0.5, | |
| "eval_child_abuse/accuracy": 0.9964234620886981, | |
| "eval_child_abuse/f1": 0.6570972886762361, | |
| "eval_child_abuse/fpr": 0.0014720396112477183, | |
| "eval_child_abuse/precision": 0.7006802721088435, | |
| "eval_child_abuse/recall": 0.6186186186186187, | |
| "eval_child_abuse/threshold": 0.5, | |
| "eval_controversial_topics,politics/accuracy": 0.9715041421299531, | |
| "eval_controversial_topics,politics/f1": 0.46850760161340366, | |
| "eval_controversial_topics,politics/fpr": 0.010742723778143858, | |
| "eval_controversial_topics,politics/precision": 0.5467052860246199, | |
| "eval_controversial_topics,politics/recall": 0.40988056460369166, | |
| "eval_controversial_topics,politics/threshold": 0.5, | |
| "eval_discrimination,stereotype,injustice/accuracy": 0.9564327777223276, | |
| "eval_discrimination,stereotype,injustice/f1": 0.7228864670405248, | |
| "eval_discrimination,stereotype,injustice/fpr": 0.02268044963313691, | |
| "eval_discrimination,stereotype,injustice/precision": 0.7313209162920146, | |
| "eval_discrimination,stereotype,injustice/recall": 0.7146443514644352, | |
| "eval_discrimination,stereotype,injustice/threshold": 0.5, | |
| "eval_drug_abuse,weapons,banned_substance/accuracy": 0.974199021858469, | |
| "eval_drug_abuse,weapons,banned_substance/f1": 0.7740713765477057, | |
| "eval_drug_abuse,weapons,banned_substance/fpr": 0.014490198843604543, | |
| "eval_drug_abuse,weapons,banned_substance/precision": 0.7637252083932164, | |
| "eval_drug_abuse,weapons,banned_substance/recall": 0.7847017129356173, | |
| "eval_drug_abuse,weapons,banned_substance/threshold": 0.5, | |
| "eval_financial_crime,property_crime,theft/accuracy": 0.9601091259939448, | |
| "eval_financial_crime,property_crime,theft/f1": 0.8050723459600065, | |
| "eval_financial_crime,property_crime,theft/fpr": 0.02762471665775938, | |
| "eval_financial_crime,property_crime,theft/precision": 0.7676329251278872, | |
| "eval_financial_crime,property_crime,theft/recall": 0.8463510511023756, | |
| "eval_financial_crime,property_crime,theft/threshold": 0.5, | |
| "eval_flagged/accuracy": 0.8562065409056127, | |
| "eval_flagged/aucpr": 0.9090708301585808, | |
| "eval_flagged/f1": 0.8688912482936447, | |
| "eval_flagged/fpr": 0.14386978209503754, | |
| "eval_flagged/precision": 0.8818929154222729, | |
| "eval_flagged/recall": 0.8562673761621477, | |
| "eval_hate_speech,offensive_language/accuracy": 0.9506271417639818, | |
| "eval_hate_speech,offensive_language/f1": 0.7009873060648801, | |
| "eval_hate_speech,offensive_language/fpr": 0.019422620135209172, | |
| "eval_hate_speech,offensive_language/precision": 0.765962131219727, | |
| "eval_hate_speech,offensive_language/recall": 0.6461738484398217, | |
| "eval_hate_speech,offensive_language/threshold": 0.5, | |
| "eval_loss": 0.08228794485330582, | |
| "eval_macro_f1": 0.6418844118541835, | |
| "eval_macro_precision": 0.704528612500405, | |
| "eval_macro_recall": 0.6272299800362929, | |
| "eval_micro_f1": 0.7567282129720675, | |
| "eval_micro_precision": 0.7753502863859401, | |
| "eval_micro_recall": 0.7389796753027367, | |
| "eval_misinformation_regarding_ethics,laws_and_safety/accuracy": 0.9878730412216788, | |
| "eval_misinformation_regarding_ethics,laws_and_safety/f1": 0.07369758576874205, | |
| "eval_misinformation_regarding_ethics,laws_and_safety/fpr": 0.00045467558055335624, | |
| "eval_misinformation_regarding_ethics,laws_and_safety/precision": 0.5178571428571429, | |
| "eval_misinformation_regarding_ethics,laws_and_safety/recall": 0.03967168262653899, | |
| "eval_misinformation_regarding_ethics,laws_and_safety/threshold": 0.5, | |
| "eval_non_violent_unethical_behavior/accuracy": 0.8879628705459627, | |
| "eval_non_violent_unethical_behavior/f1": 0.6949590108247656, | |
| "eval_non_violent_unethical_behavior/fpr": 0.05109201129380491, | |
| "eval_non_violent_unethical_behavior/precision": 0.7571301687555512, | |
| "eval_non_violent_unethical_behavior/recall": 0.642223338355935, | |
| "eval_non_violent_unethical_behavior/threshold": 0.5, | |
| "eval_privacy_violation/accuracy": 0.9808530458794956, | |
| "eval_privacy_violation/f1": 0.8130583076173461, | |
| "eval_privacy_violation/fpr": 0.012038916497515203, | |
| "eval_privacy_violation/precision": 0.784393607019743, | |
| "eval_privacy_violation/recall": 0.8438975050573162, | |
| "eval_privacy_violation/threshold": 0.5, | |
| "eval_runtime": 598.0644, | |
| "eval_samples_per_second": 100.514, | |
| "eval_self_harm/accuracy": 0.9965399075090661, | |
| "eval_self_harm/f1": 0.7360406091370558, | |
| "eval_self_harm/fpr": 0.0014739380946000244, | |
| "eval_self_harm/precision": 0.7671957671957672, | |
| "eval_self_harm/recall": 0.7073170731707317, | |
| "eval_self_harm/threshold": 0.5, | |
| "eval_sexually_explicit,adult_content/accuracy": 0.984296503310377, | |
| "eval_sexually_explicit,adult_content/f1": 0.678254942058623, | |
| "eval_sexually_explicit,adult_content/fpr": 0.00838631598684097, | |
| "eval_sexually_explicit,adult_content/precision": 0.6691324815063887, | |
| "eval_sexually_explicit,adult_content/recall": 0.6876295784381479, | |
| "eval_sexually_explicit,adult_content/threshold": 0.5, | |
| "eval_steps_per_second": 1.572, | |
| "eval_terrorism,organized_crime/accuracy": 0.9920817114149783, | |
| "eval_terrorism,organized_crime/f1": 0.23225806451612904, | |
| "eval_terrorism,organized_crime/fpr": 0.001123538980094912, | |
| "eval_terrorism,organized_crime/precision": 0.5179856115107914, | |
| "eval_terrorism,organized_crime/recall": 0.1496881496881497, | |
| "eval_terrorism,organized_crime/threshold": 0.5, | |
| "eval_violence,aiding_and_abetting,incitement/accuracy": 0.9220980137738297, | |
| "eval_violence,aiding_and_abetting,incitement/f1": 0.8564509701744168, | |
| "eval_violence,aiding_and_abetting,incitement/fpr": 0.06031004940845822, | |
| "eval_violence,aiding_and_abetting,incitement/precision": 0.8399975948529854, | |
| "eval_violence,aiding_and_abetting,incitement/recall": 0.8735617808904452, | |
| "eval_violence,aiding_and_abetting,incitement/threshold": 0.5 | |
| } |