bitlabsdb commited on
Commit
197d477
·
verified ·
1 Parent(s): b4e0b87

Upload trained model (Acc: 70.59%)

Browse files
.gitattributes CHANGED
@@ -33,3 +33,4 @@ saved_model/**/* filter=lfs diff=lfs merge=lfs -text
33
  *.zip filter=lfs diff=lfs merge=lfs -text
34
  *.zst filter=lfs diff=lfs merge=lfs -text
35
  *tfevents* filter=lfs diff=lfs merge=lfs -text
 
 
33
  *.zip filter=lfs diff=lfs merge=lfs -text
34
  *.zst filter=lfs diff=lfs merge=lfs -text
35
  *tfevents* filter=lfs diff=lfs merge=lfs -text
36
+ training_dashboard.png filter=lfs diff=lfs merge=lfs -text
README.md ADDED
@@ -0,0 +1,23 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ ---
2
+ license: apache-2.0
3
+ tags:
4
+ - fairsteer
5
+ - bias-detection
6
+ - tinyllama
7
+ library_name: pytorch
8
+ pipeline_tag: text-classification
9
+ ---
10
+
11
+ # BAD Classifier for FairSteer
12
+
13
+ Biased Activation Detection (BAD) classifier for TinyLlama-1.1B.
14
+
15
+ ## Artifacts
16
+ - **Model**: `pytorch_model.bin`
17
+ - **Scaler**: `scaler.pkl` (StandardScaler)
18
+ - **Config**: `config.json`
19
+
20
+ ## Stats
21
+ - **Balanced Accuracy**: 70.59%
22
+ - **Best Layer**: 14
23
+ - **Training Date**: 2025-11-22
config.json ADDED
@@ -0,0 +1,8 @@
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "input_dim": 2048,
3
+ "layer_idx": 14,
4
+ "base_model": "TinyLlama/TinyLlama-1.1B-Chat-v1.0",
5
+ "best_val_bal_acc": 0.7058823529411764,
6
+ "training_method": "GroupSplit + Standardized + Balanced",
7
+ "training_date": "2025-11-22T06:17:21.814381"
8
+ }
pytorch_model.bin ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:ab0e56eb041389264683115dc61767582955e4c0ae6c0765ff371365f70a1648
3
+ size 10197
scaler.pkl ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:528aeca9a4e03cc19f21fc5909fed0de91cc4aa03a3fd7369126fa7ced1e8d23
3
+ size 49615
training_dashboard.png ADDED

Git LFS Details

  • SHA256: 9c872d9e43b922c1161f455baee640c8b336ed85fbc0a43dfc966f74e15b8335
  • Pointer size: 131 Bytes
  • Size of remote file: 146 kB
training_history.json ADDED
@@ -0,0 +1,83 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "val_auc": [
3
+ 0.6211072664359861,
4
+ 0.6920415224913495,
5
+ 0.7136678200692042,
6
+ 0.7344290657439446,
7
+ 0.741349480968858,
8
+ 0.754325259515571,
9
+ 0.7525951557093425,
10
+ 0.7517301038062284,
11
+ 0.7482698961937717,
12
+ 0.7465397923875432,
13
+ 0.7508650519031141,
14
+ 0.7551903114186851,
15
+ 0.7577854671280277,
16
+ 0.7603806228373702,
17
+ 0.7603806228373703,
18
+ 0.7586505190311419,
19
+ 0.7595155709342561,
20
+ 0.7586505190311418,
21
+ 0.7577854671280276,
22
+ 0.7586505190311419,
23
+ 0.759515570934256,
24
+ 0.7586505190311418,
25
+ 0.7577854671280276,
26
+ 0.7577854671280276,
27
+ 0.7569204152249135
28
+ ],
29
+ "val_bal_acc": [
30
+ 0.6176470588235294,
31
+ 0.6470588235294118,
32
+ 0.6911764705882353,
33
+ 0.6764705882352942,
34
+ 0.6764705882352942,
35
+ 0.6764705882352942,
36
+ 0.6764705882352942,
37
+ 0.6764705882352942,
38
+ 0.6764705882352942,
39
+ 0.7058823529411764,
40
+ 0.7058823529411764,
41
+ 0.7058823529411764,
42
+ 0.7058823529411764,
43
+ 0.7058823529411764,
44
+ 0.7058823529411764,
45
+ 0.6911764705882353,
46
+ 0.6911764705882353,
47
+ 0.6911764705882353,
48
+ 0.6911764705882353,
49
+ 0.6911764705882353,
50
+ 0.6911764705882353,
51
+ 0.6911764705882353,
52
+ 0.6911764705882353,
53
+ 0.6911764705882353,
54
+ 0.6911764705882353
55
+ ],
56
+ "train_loss": [
57
+ 0.8083319664001465,
58
+ 0.6498552362124125,
59
+ 0.5912849505742391,
60
+ 0.5488456885019938,
61
+ 0.5162904063860575,
62
+ 0.49281708399454754,
63
+ 0.47320712606112164,
64
+ 0.4591151773929596,
65
+ 0.45622848471005756,
66
+ 0.450364351272583,
67
+ 0.44668323795000714,
68
+ 0.4440191388130188,
69
+ 0.4374308983484904,
70
+ 0.4289304514726003,
71
+ 0.43392103910446167,
72
+ 0.43235499660174054,
73
+ 0.42323625087738037,
74
+ 0.43220386902491253,
75
+ 0.42419079939524335,
76
+ 0.4267427921295166,
77
+ 0.4278755187988281,
78
+ 0.42331310113271076,
79
+ 0.4220277667045593,
80
+ 0.4177366296450297,
81
+ 0.42305230100949603
82
+ ]
83
+ }