defefekt commited on
Commit
8ee668b
·
verified ·
1 Parent(s): c735c89

ViTAMIn-O Auto-Upload: Seed 42

Browse files
README.md CHANGED
@@ -1,3 +1,28 @@
1
- ---
2
- license: mit
3
- ---
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ ---
2
+ tags:
3
+ - image-classification
4
+ - biology
5
+ - organoids
6
+ - vitamin-o
7
+ library_name: transformers
8
+ ---
9
+
10
+ # ViTAMIn-O Custom Organoid Model
11
+
12
+ This model was trained using the **ColabViTAMIn-O** code-free infrastructure.
13
+
14
+ ## Model Details
15
+ * **Base Architecture:** `ViTAMIn-O/ViTAMIn-O_base_model`
16
+ * **Task Type:** `Classification`
17
+ * **Repository:** `ViTAMIn-O/PDLO_classifier`
18
+
19
+ ## Training Hyperparameters
20
+ * **Seed:** `42`
21
+ * **Epochs:** `20`
22
+ * **Batch Size:** `32`
23
+
24
+ ## Evaluation Metrics (Test Set)
25
+ * **Accuracy:** `0.9250`
26
+ * **Global AUROC:** `0.9881`
27
+
28
+ *This model card was auto-generated by the ViTAMIn-O pipeline to ensure reproducibility and open-science transparency.*
best_metric_tracker.json ADDED
@@ -0,0 +1 @@
 
 
1
+ {"best_auroc": 0.988125, "seed": 42}
config.json ADDED
@@ -0,0 +1,59 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "architectures": [
3
+ "SwinForImageClassification"
4
+ ],
5
+ "attention_probs_dropout_prob": 0.0,
6
+ "depths": [
7
+ 2,
8
+ 2,
9
+ 18,
10
+ 2
11
+ ],
12
+ "drop_path_rate": 0.1,
13
+ "dtype": "float32",
14
+ "embed_dim": 192,
15
+ "encoder_stride": 32,
16
+ "hidden_act": "gelu",
17
+ "hidden_dropout_prob": 0.0,
18
+ "hidden_size": 1536,
19
+ "id2label": {
20
+ "0": "satisfactory",
21
+ "1": "unsatisfactory"
22
+ },
23
+ "image_size": 224,
24
+ "initializer_range": 0.02,
25
+ "label2id": {
26
+ "satisfactory": 0,
27
+ "unsatisfactory": 1
28
+ },
29
+ "layer_norm_eps": 1e-05,
30
+ "mlp_ratio": 4.0,
31
+ "model_type": "swin",
32
+ "num_channels": 3,
33
+ "num_heads": [
34
+ 6,
35
+ 12,
36
+ 24,
37
+ 48
38
+ ],
39
+ "num_layers": 4,
40
+ "out_features": [
41
+ "stage4"
42
+ ],
43
+ "out_indices": [
44
+ 4
45
+ ],
46
+ "patch_size": 4,
47
+ "path_norm": true,
48
+ "qkv_bias": true,
49
+ "stage_names": [
50
+ "stem",
51
+ "stage1",
52
+ "stage2",
53
+ "stage3",
54
+ "stage4"
55
+ ],
56
+ "transformers_version": "5.0.0",
57
+ "use_absolute_embeddings": false,
58
+ "window_size": 7
59
+ }
history_seed_42.csv ADDED
@@ -0,0 +1,21 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ epoch,train_loss,val_loss
2
+ 1,0.47701140120625496,0.27136044204235077
3
+ 2,0.23053773492574692,0.14618618041276932
4
+ 3,0.15947299357503653,0.18215838074684143
5
+ 4,0.14106942061334848,0.10485737025737762
6
+ 5,0.10757852531969547,0.06512117385864258
7
+ 6,0.08166468795388937,0.06458092108368874
8
+ 7,0.05907565797679126,0.04530151002109051
9
+ 8,0.05296348361298442,0.08381620422005653
10
+ 9,0.030643414589576423,0.05677217058837414
11
+ 10,0.02560428116703406,0.046924264170229435
12
+ 11,0.02832902316004038,0.04248674865812063
13
+ 12,0.04046634410042316,0.04350257199257612
14
+ 13,0.039843140286393464,0.04167834855616093
15
+ 14,0.04443829064257443,0.04984385333955288
16
+ 15,0.0256515754153952,0.042679313570261
17
+ 16,0.01758114353287965,0.04563109390437603
18
+ 17,0.018423314642859623,0.04632285609841347
19
+ 18,0.012032014259602875,0.05606958921998739
20
+ 19,0.02342665100877639,0.057493677362799644
21
+ 20,0.013762574351858348,0.028420954942703247
model.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:9edd84bb8d05044392bd402b8c1b4ea2c5bd82ef5c446cce46db4fcb63e1a615
3
+ size 780512112
predictions_seed_42.csv ADDED
@@ -0,0 +1,81 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ true_label,pred_label,prob_satisfactory,prob_unsatisfactory
2
+ satisfactory,satisfactory,0.99975115,0.00024879188
3
+ satisfactory,satisfactory,0.999995,4.9860037e-06
4
+ satisfactory,satisfactory,0.9999865,1.3447506e-05
5
+ satisfactory,satisfactory,0.99999833,1.6752168e-06
6
+ satisfactory,satisfactory,0.99990296,9.7011376e-05
7
+ satisfactory,satisfactory,0.999845,0.00015495297
8
+ satisfactory,satisfactory,0.9999678,3.221187e-05
9
+ satisfactory,satisfactory,0.99982387,0.00017614503
10
+ satisfactory,satisfactory,0.99612844,0.0038714865
11
+ satisfactory,satisfactory,0.9111792,0.088820815
12
+ satisfactory,satisfactory,0.9998123,0.00018768643
13
+ satisfactory,satisfactory,0.9996138,0.00038619278
14
+ satisfactory,satisfactory,0.9998698,0.00013016172
15
+ satisfactory,satisfactory,0.9997104,0.00028954403
16
+ satisfactory,satisfactory,0.9999722,2.7753875e-05
17
+ satisfactory,satisfactory,0.9998447,0.00015533656
18
+ satisfactory,satisfactory,0.9999825,1.7504377e-05
19
+ satisfactory,satisfactory,0.9995271,0.0004729762
20
+ satisfactory,satisfactory,0.99984634,0.00015365175
21
+ satisfactory,satisfactory,0.99323976,0.006760193
22
+ satisfactory,satisfactory,0.9999471,5.2959807e-05
23
+ satisfactory,satisfactory,0.9302152,0.069784835
24
+ satisfactory,satisfactory,0.99997544,2.45773e-05
25
+ satisfactory,satisfactory,0.99957603,0.0004239332
26
+ satisfactory,satisfactory,0.9859832,0.0140167605
27
+ satisfactory,unsatisfactory,0.088003166,0.91199684
28
+ satisfactory,satisfactory,0.9993351,0.00066482223
29
+ satisfactory,satisfactory,0.9528593,0.04714072
30
+ satisfactory,satisfactory,0.9995402,0.00045984852
31
+ satisfactory,satisfactory,0.997577,0.0024230187
32
+ satisfactory,satisfactory,0.5349651,0.46503487
33
+ satisfactory,satisfactory,0.99999213,7.889113e-06
34
+ satisfactory,unsatisfactory,0.4313945,0.5686055
35
+ satisfactory,satisfactory,0.9997942,0.00020581657
36
+ satisfactory,satisfactory,0.5057574,0.49424264
37
+ satisfactory,satisfactory,0.99994934,5.062743e-05
38
+ satisfactory,satisfactory,0.9517021,0.04829788
39
+ satisfactory,satisfactory,0.99940085,0.0005991365
40
+ satisfactory,satisfactory,0.98058194,0.019418033
41
+ satisfactory,satisfactory,0.99962807,0.00037187806
42
+ unsatisfactory,satisfactory,0.79206705,0.20793296
43
+ unsatisfactory,unsatisfactory,0.0047677523,0.9952323
44
+ unsatisfactory,satisfactory,0.85552657,0.14447348
45
+ unsatisfactory,unsatisfactory,0.0009917954,0.99900824
46
+ unsatisfactory,unsatisfactory,0.00016868734,0.9998313
47
+ unsatisfactory,unsatisfactory,0.00013977822,0.99986017
48
+ unsatisfactory,unsatisfactory,0.050983462,0.9490166
49
+ unsatisfactory,unsatisfactory,0.000707345,0.9992926
50
+ unsatisfactory,unsatisfactory,0.000112059475,0.99988794
51
+ unsatisfactory,unsatisfactory,6.7700625e-05,0.9999323
52
+ unsatisfactory,unsatisfactory,0.22897442,0.7710256
53
+ unsatisfactory,unsatisfactory,9.719304e-05,0.99990284
54
+ unsatisfactory,unsatisfactory,0.017098254,0.9829017
55
+ unsatisfactory,unsatisfactory,2.8999524e-05,0.99997103
56
+ unsatisfactory,unsatisfactory,0.0001132711,0.99988675
57
+ unsatisfactory,satisfactory,0.5715112,0.4284888
58
+ unsatisfactory,unsatisfactory,0.08374097,0.91625905
59
+ unsatisfactory,unsatisfactory,0.00025100916,0.999749
60
+ unsatisfactory,unsatisfactory,7.1715695e-06,0.99999285
61
+ unsatisfactory,unsatisfactory,6.0524697e-05,0.99993944
62
+ unsatisfactory,unsatisfactory,7.107401e-05,0.99992895
63
+ unsatisfactory,unsatisfactory,0.0008521297,0.9991479
64
+ unsatisfactory,unsatisfactory,0.011060039,0.98893994
65
+ unsatisfactory,unsatisfactory,0.0031355496,0.9968644
66
+ unsatisfactory,unsatisfactory,0.00011907512,0.9998809
67
+ unsatisfactory,unsatisfactory,0.00013133489,0.99986863
68
+ unsatisfactory,satisfactory,0.6512135,0.34878644
69
+ unsatisfactory,unsatisfactory,0.0007176918,0.9992823
70
+ unsatisfactory,unsatisfactory,2.2451277e-05,0.9999776
71
+ unsatisfactory,unsatisfactory,1.7993949e-05,0.999982
72
+ unsatisfactory,unsatisfactory,0.00086633215,0.99913365
73
+ unsatisfactory,unsatisfactory,0.00020952737,0.9997905
74
+ unsatisfactory,unsatisfactory,0.00024627327,0.9997538
75
+ unsatisfactory,unsatisfactory,0.34562668,0.65437335
76
+ unsatisfactory,unsatisfactory,0.03512636,0.9648737
77
+ unsatisfactory,unsatisfactory,8.823718e-05,0.9999118
78
+ unsatisfactory,unsatisfactory,0.19964169,0.8003583
79
+ unsatisfactory,unsatisfactory,0.00011612113,0.9998839
80
+ unsatisfactory,unsatisfactory,1.17525e-05,0.9999882
81
+ unsatisfactory,unsatisfactory,0.0004509651,0.999549
preprocessor_config.json ADDED
@@ -0,0 +1,23 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "data_format": "channels_first",
3
+ "do_normalize": true,
4
+ "do_rescale": true,
5
+ "do_resize": true,
6
+ "image_mean": [
7
+ 0.5,
8
+ 0.5,
9
+ 0.5
10
+ ],
11
+ "image_processor_type": "ViTImageProcessorFast",
12
+ "image_std": [
13
+ 0.5,
14
+ 0.5,
15
+ 0.5
16
+ ],
17
+ "resample": 3,
18
+ "rescale_factor": 0.00392156862745098,
19
+ "size": {
20
+ "height": 224,
21
+ "width": 224
22
+ }
23
+ }
results_seed_42.json ADDED
@@ -0,0 +1 @@
 
 
1
+ {"Accuracy": 0.925, "Balanced_Accuracy": 0.925, "MCC": 0.8510644963469901, "Cohen_Kappa": 0.85, "Brier_Score": 0.04958225747752335, "Global_AUROC": 0.988125, "satisfactory_AUROC": 0.988125, "unsatisfactory_AUROC": 0.988125, "satisfactory_Precision": 0.9047619047619048, "satisfactory_Recall": 0.95, "satisfactory_Specificity": 0.9, "satisfactory_NPV": 0.9473684210526315, "satisfactory_F1": 0.9268292682926829, "unsatisfactory_Precision": 0.9473684210526315, "unsatisfactory_Recall": 0.9, "unsatisfactory_Specificity": 0.95, "unsatisfactory_NPV": 0.9047619047619048, "unsatisfactory_F1": 0.9230769230769231, "Macro_Precision": 0.9260651629072681, "Macro_Recall": 0.925, "Macro_Specificity": 0.925, "Macro_F1": 0.924953095684803, "Seed": 42}