Upload config.json with huggingface_hub
Browse files- config.json +16 -7
config.json
CHANGED
|
@@ -3,7 +3,7 @@
|
|
| 3 |
"input_dim": 2048,
|
| 4 |
"output_dim": 2,
|
| 5 |
"hidden_size": 2048,
|
| 6 |
-
"target_layer":
|
| 7 |
"num_layers": 22,
|
| 8 |
"base_model_name": "TinyLlama/TinyLlama-1.1B-Chat-v1.0",
|
| 9 |
"label_mapping": {
|
|
@@ -18,16 +18,25 @@
|
|
| 18 |
},
|
| 19 |
"layer_selection": {
|
| 20 |
"candidates": [
|
| 21 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 22 |
],
|
| 23 |
-
"selected":
|
| 24 |
"accuracies": {
|
| 25 |
-
"
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 26 |
}
|
| 27 |
},
|
| 28 |
"performance": {
|
| 29 |
-
"
|
| 30 |
-
"best_epoch": 5
|
| 31 |
},
|
| 32 |
-
"trained_date": "2025-11-04
|
| 33 |
}
|
|
|
|
| 3 |
"input_dim": 2048,
|
| 4 |
"output_dim": 2,
|
| 5 |
"hidden_size": 2048,
|
| 6 |
+
"target_layer": 15,
|
| 7 |
"num_layers": 22,
|
| 8 |
"base_model_name": "TinyLlama/TinyLlama-1.1B-Chat-v1.0",
|
| 9 |
"label_mapping": {
|
|
|
|
| 18 |
},
|
| 19 |
"layer_selection": {
|
| 20 |
"candidates": [
|
| 21 |
+
11,
|
| 22 |
+
12,
|
| 23 |
+
13,
|
| 24 |
+
14,
|
| 25 |
+
15,
|
| 26 |
+
16
|
| 27 |
],
|
| 28 |
+
"selected": 15,
|
| 29 |
"accuracies": {
|
| 30 |
+
"11": 0.8152321577072144,
|
| 31 |
+
"12": 0.8395493626594543,
|
| 32 |
+
"13": 0.8271411657333374,
|
| 33 |
+
"14": 0.8292092084884644,
|
| 34 |
+
"15": 0.8414747714996338,
|
| 35 |
+
"16": 0.8393354415893555
|
| 36 |
}
|
| 37 |
},
|
| 38 |
"performance": {
|
| 39 |
+
"best_epoch": 50
|
|
|
|
| 40 |
},
|
| 41 |
+
"trained_date": "2025-11-04 13:03:45"
|
| 42 |
}
|