File size: 6,242 Bytes
52d26e4
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
{
    "test_accuracy": 0.6766379207090669,
    "test_animal_abuse/accuracy": 0.9931578632171517,
    "test_animal_abuse/f1": 0.7676664972038637,
    "test_animal_abuse/fpr": 0.005037019055710637,
    "test_animal_abuse/precision": 0.6945722171113156,
    "test_animal_abuse/recall": 0.8579545454545454,
    "test_animal_abuse/threshold": 0.5,
    "test_child_abuse/accuracy": 0.9963169241825368,
    "test_child_abuse/f1": 0.6070287539936102,
    "test_child_abuse/fpr": 0.0012642986152919908,
    "test_child_abuse/precision": 0.6934306569343066,
    "test_child_abuse/recall": 0.5397727272727273,
    "test_child_abuse/threshold": 0.5,
    "test_controversial_topics,politics/accuracy": 0.9715085638998683,
    "test_controversial_topics,politics/f1": 0.45519610649871173,
    "test_controversial_topics,politics/fpr": 0.011352768779885837,
    "test_controversial_topics,politics/precision": 0.519268451992162,
    "test_controversial_topics,politics/recall": 0.40519877675840976,
    "test_controversial_topics,politics/threshold": 0.5,
    "test_discrimination,stereotype,injustice/accuracy": 0.9543957360162894,
    "test_discrimination,stereotype,injustice/f1": 0.7218772826880935,
    "test_discrimination,stereotype,injustice/fpr": 0.023755877742946668,
    "test_discrimination,stereotype,injustice/precision": 0.7309541420118343,
    "test_discrimination,stereotype,injustice/recall": 0.713023088023088,
    "test_discrimination,stereotype,injustice/threshold": 0.5,
    "test_drug_abuse,weapons,banned_substance/accuracy": 0.9734848484848485,
    "test_drug_abuse,weapons,banned_substance/f1": 0.7634566582075598,
    "test_drug_abuse,weapons,banned_substance/fpr": 0.014630821418381233,
    "test_drug_abuse,weapons,banned_substance/precision": 0.755884686590849,
    "test_drug_abuse,weapons,banned_substance/recall": 0.7711818672423097,
    "test_drug_abuse,weapons,banned_substance/threshold": 0.5,
    "test_financial_crime,property_crime,theft/accuracy": 0.9579590370104204,
    "test_financial_crime,property_crime,theft/f1": 0.7996575342465754,
    "test_financial_crime,property_crime,theft/fpr": 0.0271945610877824,
    "test_financial_crime,property_crime,theft/precision": 0.7744610281923715,
    "test_financial_crime,property_crime,theft/recall": 0.8265486725663717,
    "test_financial_crime,property_crime,theft/threshold": 0.5,
    "test_flagged/accuracy": 0.8487393699844292,
    "test_flagged/aucpr": 0.9047138471115324,
    "test_flagged/f1": 0.8630417395312266,
    "test_flagged/fpr": 0.15492622560685337,
    "test_flagged/precision": 0.8747698480309983,
    "test_flagged/recall": 0.8516239499170635,
    "test_hate_speech,offensive_language/accuracy": 0.9497844053180021,
    "test_hate_speech,offensive_language/f1": 0.686307519640853,
    "test_hate_speech,offensive_language/fpr": 0.017700424021299645,
    "test_hate_speech,offensive_language/precision": 0.7730720606826802,
    "test_hate_speech,offensive_language/recall": 0.6170534813319879,
    "test_hate_speech,offensive_language/threshold": 0.5,
    "test_loss": 0.08679678291082382,
    "test_macro_f1": 0.6252685023565255,
    "test_macro_precision": 0.6847558918566842,
    "test_macro_recall": 0.6093259557964672,
    "test_micro_f1": 0.7468949012290593,
    "test_micro_precision": 0.7705170480223624,
    "test_micro_recall": 0.724678064292686,
    "test_misinformation_regarding_ethics,laws_and_safety/accuracy": 0.987633249490957,
    "test_misinformation_regarding_ethics,laws_and_safety/f1": 0.05707762557077625,
    "test_misinformation_regarding_ethics,laws_and_safety/fpr": 0.0005304959379168174,
    "test_misinformation_regarding_ethics,laws_and_safety/precision": 0.4166666666666667,
    "test_misinformation_regarding_ethics,laws_and_safety/recall": 0.030637254901960783,
    "test_misinformation_regarding_ethics,laws_and_safety/threshold": 0.5,
    "test_non_violent_unethical_behavior/accuracy": 0.8818571086357647,
    "test_non_violent_unethical_behavior/f1": 0.6774575924790517,
    "test_non_violent_unethical_behavior/fpr": 0.050999362507968556,
    "test_non_violent_unethical_behavior/precision": 0.7528845280276187,
    "test_non_violent_unethical_behavior/recall": 0.6157675731906672,
    "test_non_violent_unethical_behavior/threshold": 0.5,
    "test_privacy_violation/accuracy": 0.9793687866810397,
    "test_privacy_violation/f1": 0.8086111111111111,
    "test_privacy_violation/fpr": 0.01268442169778534,
    "test_privacy_violation/precision": 0.7837910608508347,
    "test_privacy_violation/recall": 0.8350545037292025,
    "test_privacy_violation/threshold": 0.5,
    "test_runtime": 664.4818,
    "test_samples_per_second": 100.517,
    "test_self_harm/accuracy": 0.9963917834471194,
    "test_self_harm/f1": 0.714792899408284,
    "test_self_harm/fpr": 0.0011910505367265691,
    "test_self_harm/precision": 0.7926509186351706,
    "test_self_harm/recall": 0.6508620689655172,
    "test_self_harm/threshold": 0.5,
    "test_sexually_explicit,adult_content/accuracy": 0.9838902862618277,
    "test_sexually_explicit,adult_content/f1": 0.6515544041450777,
    "test_sexually_explicit,adult_content/fpr": 0.009186954524575089,
    "test_sexually_explicit,adult_content/precision": 0.6264009962640099,
    "test_sexually_explicit,adult_content/recall": 0.6788124156545209,
    "test_sexually_explicit,adult_content/threshold": 0.5,
    "test_steps_per_second": 1.571,
    "test_terrorism,organized_crime/accuracy": 0.9913911845730028,
    "test_terrorism,organized_crime/f1": 0.1958041958041958,
    "test_terrorism,organized_crime/fpr": 0.001343680173923544,
    "test_terrorism,organized_crime/precision": 0.44025157232704404,
    "test_terrorism,organized_crime/recall": 0.12589928057553956,
    "test_terrorism,organized_crime/threshold": 0.5,
    "test_violence,aiding_and_abetting,incitement/accuracy": 0.9157533836387591,
    "test_violence,aiding_and_abetting,incitement/f1": 0.8472708519935944,
    "test_violence,aiding_and_abetting,incitement/fpr": 0.06457640343312376,
    "test_violence,aiding_and_abetting,incitement/precision": 0.8322934997067136,
    "test_violence,aiding_and_abetting,incitement/recall": 0.8627971254836927,
    "test_violence,aiding_and_abetting,incitement/threshold": 0.5
}