Upload DeiT3 model from experiment b2
Browse filesThis view is limited to 50 files because it contains too many changes.
See raw diff
- .gitattributes +2 -0
- README.md +161 -0
- config.json +76 -0
- confusion_matrices/DeiT3_Confusion_Matrix_a.png +0 -0
- confusion_matrices/DeiT3_Confusion_Matrix_b.png +0 -0
- confusion_matrices/DeiT3_Confusion_Matrix_c.png +0 -0
- confusion_matrices/DeiT3_Confusion_Matrix_d.png +0 -0
- confusion_matrices/DeiT3_Confusion_Matrix_e.png +0 -0
- confusion_matrices/DeiT3_Confusion_Matrix_f.png +0 -0
- confusion_matrices/DeiT3_Confusion_Matrix_g.png +0 -0
- confusion_matrices/DeiT3_Confusion_Matrix_h.png +0 -0
- confusion_matrices/DeiT3_Confusion_Matrix_i.png +0 -0
- confusion_matrices/DeiT3_Confusion_Matrix_j.png +0 -0
- confusion_matrices/DeiT3_Confusion_Matrix_k.png +0 -0
- confusion_matrices/DeiT3_Confusion_Matrix_l.png +0 -0
- deit3-gravit-b2.pth +3 -0
- evaluation_results.csv +133 -0
- model.safetensors +3 -0
- pytorch_model.bin +3 -0
- roc_confusion_matrix/DeiT3_roc_confusion_matrix_a.png +0 -0
- roc_confusion_matrix/DeiT3_roc_confusion_matrix_b.png +0 -0
- roc_confusion_matrix/DeiT3_roc_confusion_matrix_c.png +0 -0
- roc_confusion_matrix/DeiT3_roc_confusion_matrix_d.png +0 -0
- roc_confusion_matrix/DeiT3_roc_confusion_matrix_e.png +0 -0
- roc_confusion_matrix/DeiT3_roc_confusion_matrix_f.png +0 -0
- roc_confusion_matrix/DeiT3_roc_confusion_matrix_g.png +0 -0
- roc_confusion_matrix/DeiT3_roc_confusion_matrix_h.png +0 -0
- roc_confusion_matrix/DeiT3_roc_confusion_matrix_i.png +0 -0
- roc_confusion_matrix/DeiT3_roc_confusion_matrix_j.png +0 -0
- roc_confusion_matrix/DeiT3_roc_confusion_matrix_k.png +0 -0
- roc_confusion_matrix/DeiT3_roc_confusion_matrix_l.png +0 -0
- roc_curves/DeiT3_ROC_a.png +0 -0
- roc_curves/DeiT3_ROC_b.png +0 -0
- roc_curves/DeiT3_ROC_c.png +0 -0
- roc_curves/DeiT3_ROC_d.png +0 -0
- roc_curves/DeiT3_ROC_e.png +0 -0
- roc_curves/DeiT3_ROC_f.png +0 -0
- roc_curves/DeiT3_ROC_g.png +0 -0
- roc_curves/DeiT3_ROC_h.png +0 -0
- roc_curves/DeiT3_ROC_i.png +0 -0
- roc_curves/DeiT3_ROC_j.png +0 -0
- roc_curves/DeiT3_ROC_k.png +0 -0
- roc_curves/DeiT3_ROC_l.png +0 -0
- training_curves/DeiT3_accuracy.png +0 -0
- training_curves/DeiT3_auc.png +0 -0
- training_curves/DeiT3_combined_metrics.png +3 -0
- training_curves/DeiT3_f1.png +0 -0
- training_curves/DeiT3_loss.png +0 -0
- training_curves/DeiT3_metrics.csv +34 -0
- training_metrics.csv +34 -0
.gitattributes
CHANGED
|
@@ -33,3 +33,5 @@ saved_model/**/* filter=lfs diff=lfs merge=lfs -text
|
|
| 33 |
*.zip filter=lfs diff=lfs merge=lfs -text
|
| 34 |
*.zst filter=lfs diff=lfs merge=lfs -text
|
| 35 |
*tfevents* filter=lfs diff=lfs merge=lfs -text
|
|
|
|
|
|
|
|
|
| 33 |
*.zip filter=lfs diff=lfs merge=lfs -text
|
| 34 |
*.zst filter=lfs diff=lfs merge=lfs -text
|
| 35 |
*tfevents* filter=lfs diff=lfs merge=lfs -text
|
| 36 |
+
training_curves/DeiT3_combined_metrics.png filter=lfs diff=lfs merge=lfs -text
|
| 37 |
+
training_notebook_b2.ipynb filter=lfs diff=lfs merge=lfs -text
|
README.md
ADDED
|
@@ -0,0 +1,161 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
---
|
| 2 |
+
license: apache-2.0
|
| 3 |
+
tags:
|
| 4 |
+
- vision-transformer
|
| 5 |
+
- image-classification
|
| 6 |
+
- pytorch
|
| 7 |
+
- timm
|
| 8 |
+
- deit3
|
| 9 |
+
- gravitational-lensing
|
| 10 |
+
- strong-lensing
|
| 11 |
+
- astronomy
|
| 12 |
+
- astrophysics
|
| 13 |
+
datasets:
|
| 14 |
+
- J24
|
| 15 |
+
metrics:
|
| 16 |
+
- accuracy
|
| 17 |
+
- auc
|
| 18 |
+
- f1
|
| 19 |
+
model-index:
|
| 20 |
+
- name: DeiT3-b2
|
| 21 |
+
results:
|
| 22 |
+
- task:
|
| 23 |
+
type: image-classification
|
| 24 |
+
name: Strong Gravitational Lens Discovery
|
| 25 |
+
dataset:
|
| 26 |
+
type: common-test-sample
|
| 27 |
+
name: Common Test Sample (More et al. 2024)
|
| 28 |
+
metrics:
|
| 29 |
+
- type: accuracy
|
| 30 |
+
value: 0.8174
|
| 31 |
+
name: Average Accuracy
|
| 32 |
+
- type: auc
|
| 33 |
+
value: 0.7716
|
| 34 |
+
name: Average AUC-ROC
|
| 35 |
+
- type: f1
|
| 36 |
+
value: 0.5031
|
| 37 |
+
name: Average F1-Score
|
| 38 |
+
---
|
| 39 |
+
|
| 40 |
+
# 🌌 deit3-gravit-b2
|
| 41 |
+
|
| 42 |
+
🔭 This model is part of **GraViT**: Transfer Learning with Vision Transformers and MLP-Mixer for Strong Gravitational Lens Discovery
|
| 43 |
+
|
| 44 |
+
🔗 **GitHub Repository**: [https://github.com/parlange/gravit](https://github.com/parlange/gravit)
|
| 45 |
+
|
| 46 |
+
## 🛰️ Model Details
|
| 47 |
+
|
| 48 |
+
- **🤖 Model Type**: DeiT3
|
| 49 |
+
- **🧪 Experiment**: B2 - J24-half
|
| 50 |
+
- **🌌 Dataset**: J24
|
| 51 |
+
- **🪐 Fine-tuning Strategy**: half
|
| 52 |
+
|
| 53 |
+
|
| 54 |
+
|
| 55 |
+
## 💻 Quick Start
|
| 56 |
+
|
| 57 |
+
```python
|
| 58 |
+
import torch
|
| 59 |
+
import timm
|
| 60 |
+
|
| 61 |
+
# Load the model directly from the Hub
|
| 62 |
+
model = timm.create_model(
|
| 63 |
+
'hf-hub:parlange/deit3-gravit-b2',
|
| 64 |
+
pretrained=True
|
| 65 |
+
)
|
| 66 |
+
model.eval()
|
| 67 |
+
|
| 68 |
+
# Example inference
|
| 69 |
+
dummy_input = torch.randn(1, 3, 224, 224)
|
| 70 |
+
with torch.no_grad():
|
| 71 |
+
output = model(dummy_input)
|
| 72 |
+
predictions = torch.softmax(output, dim=1)
|
| 73 |
+
print(f"Lens probability: {predictions[0][1]:.4f}")
|
| 74 |
+
```
|
| 75 |
+
|
| 76 |
+
## ⚡️ Training Configuration
|
| 77 |
+
|
| 78 |
+
**Training Dataset:** J24 (Jaelani et al. 2024)
|
| 79 |
+
**Fine-tuning Strategy:** half
|
| 80 |
+
|
| 81 |
+
|
| 82 |
+
| 🔧 Parameter | 📝 Value |
|
| 83 |
+
|--------------|----------|
|
| 84 |
+
| Batch Size | 192 |
|
| 85 |
+
| Learning Rate | AdamW with ReduceLROnPlateau |
|
| 86 |
+
| Epochs | 100 |
|
| 87 |
+
| Patience | 10 |
|
| 88 |
+
| Optimizer | AdamW |
|
| 89 |
+
| Scheduler | ReduceLROnPlateau |
|
| 90 |
+
| Image Size | 224x224 |
|
| 91 |
+
| Fine Tune Mode | half |
|
| 92 |
+
| Stochastic Depth Probability | 0.1 |
|
| 93 |
+
|
| 94 |
+
|
| 95 |
+
## 📈 Training Curves
|
| 96 |
+
|
| 97 |
+

|
| 98 |
+
|
| 99 |
+
|
| 100 |
+
## 🏁 Final Epoch Training Metrics
|
| 101 |
+
|
| 102 |
+
| Metric | Training | Validation |
|
| 103 |
+
|:---------:|:-----------:|:-------------:|
|
| 104 |
+
| 📉 Loss | 0.0200 | 0.0879 |
|
| 105 |
+
| 🎯 Accuracy | 0.9928 | 0.9786 |
|
| 106 |
+
| 📊 AUC-ROC | 0.9997 | 0.9968 |
|
| 107 |
+
| ⚖️ F1 Score | 0.9928 | 0.9784 |
|
| 108 |
+
|
| 109 |
+
|
| 110 |
+
## ☑️ Evaluation Results
|
| 111 |
+
|
| 112 |
+
### ROC Curves and Confusion Matrices
|
| 113 |
+
|
| 114 |
+
Performance across all test datasets (a through l) in the Common Test Sample (More et al. 2024):
|
| 115 |
+
|
| 116 |
+

|
| 117 |
+

|
| 118 |
+

|
| 119 |
+

|
| 120 |
+

|
| 121 |
+

|
| 122 |
+

|
| 123 |
+

|
| 124 |
+

|
| 125 |
+

|
| 126 |
+

|
| 127 |
+

|
| 128 |
+
|
| 129 |
+
### 📋 Performance Summary
|
| 130 |
+
|
| 131 |
+
Average performance across 12 test datasets from the Common Test Sample (More et al. 2024):
|
| 132 |
+
|
| 133 |
+
| Metric | Value |
|
| 134 |
+
|-----------|----------|
|
| 135 |
+
| 🎯 Average Accuracy | 0.8174 |
|
| 136 |
+
| 📈 Average AUC-ROC | 0.7716 |
|
| 137 |
+
| ⚖️ Average F1-Score | 0.5031 |
|
| 138 |
+
|
| 139 |
+
|
| 140 |
+
## 📘 Citation
|
| 141 |
+
|
| 142 |
+
If you use this model in your research, please cite:
|
| 143 |
+
|
| 144 |
+
```bibtex
|
| 145 |
+
@misc{parlange2025gravit,
|
| 146 |
+
title={GraViT: Transfer Learning with Vision Transformers and MLP-Mixer for Strong Gravitational Lens Discovery},
|
| 147 |
+
author={René Parlange and Juan C. Cuevas-Tello and Octavio Valenzuela and Omar de J. Cabrera-Rosas and Tomás Verdugo and Anupreeta More and Anton T. Jaelani},
|
| 148 |
+
year={2025},
|
| 149 |
+
eprint={2509.00226},
|
| 150 |
+
archivePrefix={arXiv},
|
| 151 |
+
primaryClass={cs.CV},
|
| 152 |
+
url={https://arxiv.org/abs/2509.00226},
|
| 153 |
+
}
|
| 154 |
+
```
|
| 155 |
+
|
| 156 |
+
---
|
| 157 |
+
|
| 158 |
+
|
| 159 |
+
## Model Card Contact
|
| 160 |
+
|
| 161 |
+
For questions about this model, please contact the author through: https://github.com/parlange/
|
config.json
ADDED
|
@@ -0,0 +1,76 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
{
|
| 2 |
+
"architecture": "deit3_base_patch16_224",
|
| 3 |
+
"num_classes": 2,
|
| 4 |
+
"num_features": 1000,
|
| 5 |
+
"global_pool": "avg",
|
| 6 |
+
"crop_pct": 0.875,
|
| 7 |
+
"interpolation": "bicubic",
|
| 8 |
+
"mean": [
|
| 9 |
+
0.485,
|
| 10 |
+
0.456,
|
| 11 |
+
0.406
|
| 12 |
+
],
|
| 13 |
+
"std": [
|
| 14 |
+
0.229,
|
| 15 |
+
0.224,
|
| 16 |
+
0.225
|
| 17 |
+
],
|
| 18 |
+
"first_conv": "conv1",
|
| 19 |
+
"classifier": "fc",
|
| 20 |
+
"input_size": [
|
| 21 |
+
3,
|
| 22 |
+
224,
|
| 23 |
+
224
|
| 24 |
+
],
|
| 25 |
+
"pool_size": [
|
| 26 |
+
7,
|
| 27 |
+
7
|
| 28 |
+
],
|
| 29 |
+
"pretrained_cfg": {
|
| 30 |
+
"tag": "gravit_b2",
|
| 31 |
+
"custom_load": false,
|
| 32 |
+
"input_size": [
|
| 33 |
+
3,
|
| 34 |
+
224,
|
| 35 |
+
224
|
| 36 |
+
],
|
| 37 |
+
"fixed_input_size": true,
|
| 38 |
+
"interpolation": "bicubic",
|
| 39 |
+
"crop_pct": 0.875,
|
| 40 |
+
"crop_mode": "center",
|
| 41 |
+
"mean": [
|
| 42 |
+
0.485,
|
| 43 |
+
0.456,
|
| 44 |
+
0.406
|
| 45 |
+
],
|
| 46 |
+
"std": [
|
| 47 |
+
0.229,
|
| 48 |
+
0.224,
|
| 49 |
+
0.225
|
| 50 |
+
],
|
| 51 |
+
"num_classes": 2,
|
| 52 |
+
"pool_size": [
|
| 53 |
+
7,
|
| 54 |
+
7
|
| 55 |
+
],
|
| 56 |
+
"first_conv": "conv1",
|
| 57 |
+
"classifier": "fc"
|
| 58 |
+
},
|
| 59 |
+
"model_name": "deit3_gravit_b2",
|
| 60 |
+
"experiment": "b2",
|
| 61 |
+
"training_strategy": "half",
|
| 62 |
+
"dataset": "J24",
|
| 63 |
+
"hyperparameters": {
|
| 64 |
+
"batch_size": "192",
|
| 65 |
+
"learning_rate": "AdamW with ReduceLROnPlateau",
|
| 66 |
+
"epochs": "100",
|
| 67 |
+
"patience": "10",
|
| 68 |
+
"optimizer": "AdamW",
|
| 69 |
+
"scheduler": "ReduceLROnPlateau",
|
| 70 |
+
"image_size": "224x224",
|
| 71 |
+
"fine_tune_mode": "half",
|
| 72 |
+
"stochastic_depth_probability": "0.1"
|
| 73 |
+
},
|
| 74 |
+
"hf_hub_id": "parlange/deit3-gravit-b2",
|
| 75 |
+
"license": "apache-2.0"
|
| 76 |
+
}
|
confusion_matrices/DeiT3_Confusion_Matrix_a.png
ADDED
|
confusion_matrices/DeiT3_Confusion_Matrix_b.png
ADDED
|
confusion_matrices/DeiT3_Confusion_Matrix_c.png
ADDED
|
confusion_matrices/DeiT3_Confusion_Matrix_d.png
ADDED
|
confusion_matrices/DeiT3_Confusion_Matrix_e.png
ADDED
|
confusion_matrices/DeiT3_Confusion_Matrix_f.png
ADDED
|
confusion_matrices/DeiT3_Confusion_Matrix_g.png
ADDED
|
confusion_matrices/DeiT3_Confusion_Matrix_h.png
ADDED
|
confusion_matrices/DeiT3_Confusion_Matrix_i.png
ADDED
|
confusion_matrices/DeiT3_Confusion_Matrix_j.png
ADDED
|
confusion_matrices/DeiT3_Confusion_Matrix_k.png
ADDED
|
confusion_matrices/DeiT3_Confusion_Matrix_l.png
ADDED
|
deit3-gravit-b2.pth
ADDED
|
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
version https://git-lfs.github.com/spec/v1
|
| 2 |
+
oid sha256:85b4b7aeeaa4109666ac00df83a1b14b6eca8d97cb32116d2bbc98772fe3d455
|
| 3 |
+
size 343337390
|
evaluation_results.csv
ADDED
|
@@ -0,0 +1,133 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
Model,Dataset,Loss,Accuracy,AUCROC,F1
|
| 2 |
+
ViT,a,0.3504258575622333,0.8975165042439485,0.7724705340699815,0.3607843137254902
|
| 3 |
+
ViT,b,0.6139323879378804,0.8443885570575291,0.7591445672191528,0.2709867452135493
|
| 4 |
+
ViT,c,0.2921286642045504,0.9195221628418736,0.7922799263351749,0.41818181818181815
|
| 5 |
+
ViT,d,0.26607740657091966,0.9192077962904747,0.7989585635359117,0.41723356009070295
|
| 6 |
+
ViT,e,0.5051617136084026,0.8562019758507134,0.8035646711571935,0.5841269841269842
|
| 7 |
+
ViT,f,0.29864826543026823,0.9141042521880567,0.7820238007404096,0.1423047177107502
|
| 8 |
+
ViT,g,1.732062186717987,0.5718333333333333,0.6607354444444444,0.3945321706339854
|
| 9 |
+
ViT,h,1.5614525699615478,0.6116666666666667,0.696484388888889,0.4180819180819181
|
| 10 |
+
ViT,i,1.5476410572528838,0.6115,0.7046536666666666,0.41797752808988764
|
| 11 |
+
ViT,j,0.45324009704589846,0.8708333333333333,0.9427406111111111,0.8716249792943515
|
| 12 |
+
ViT,k,0.2688189628124237,0.9105,0.9655912222222223,0.9073978272115882
|
| 13 |
+
ViT,l,0.7289746560802081,0.8074665538575432,0.8200002274918716,0.6616485456741938
|
| 14 |
+
MLP-Mixer,a,0.47016938226997235,0.8553913863564917,0.726208103130755,0.2966360856269113
|
| 15 |
+
MLP-Mixer,b,0.648904620120826,0.8333857277585665,0.7598747697974219,0.26795580110497236
|
| 16 |
+
MLP-Mixer,c,0.3869087039131445,0.8874567745991826,0.733244935543278,0.35144927536231885
|
| 17 |
+
MLP-Mixer,d,0.3633993222574062,0.8997170701037409,0.7682688766114181,0.37816764132553604
|
| 18 |
+
MLP-Mixer,e,0.5312218908486329,0.8474204171240395,0.8186331643078787,0.5825825825825826
|
| 19 |
+
MLP-Mixer,f,0.3886312065722082,0.8861435984819146,0.7510127466766198,0.11658653846153846
|
| 20 |
+
MLP-Mixer,g,1.9292033066749572,0.5541666666666667,0.6170044444444445,0.36566279345506286
|
| 21 |
+
MLP-Mixer,h,1.7903018260002137,0.5828333333333333,0.5599299999999999,0.3812113720642769
|
| 22 |
+
MLP-Mixer,i,1.7778379353284837,0.5893333333333334,0.6098859999999999,0.3849226160758862
|
| 23 |
+
MLP-Mixer,j,0.5891341290473938,0.8386666666666667,0.9153434444444444,0.8365968939905469
|
| 24 |
+
MLP-Mixer,k,0.4377687557935715,0.8738333333333334,0.9274168333333332,0.8674951864169438
|
| 25 |
+
MLP-Mixer,l,0.8833867266855338,0.7767965734228756,0.7550110803792328,0.6132135984605517
|
| 26 |
+
CvT,a,0.710060378377897,0.7095253065073877,0.4610672191528545,0.09941520467836257
|
| 27 |
+
CvT,b,0.6279841153848774,0.7516504243948444,0.5819742173112339,0.11434977578475336
|
| 28 |
+
CvT,c,0.7670251699747135,0.6592266582835586,0.4085635359116022,0.08600337268128162
|
| 29 |
+
CvT,d,0.45408995114606493,0.8179817667400189,0.5189116022099447,0.14977973568281938
|
| 30 |
+
CvT,e,0.6853549914551096,0.756311745334797,0.6546734276848558,0.3148148148148148
|
| 31 |
+
CvT,f,0.5548892200417859,0.7615211834869491,0.5019215495653457,0.03206538824269098
|
| 32 |
+
CvT,g,1.6338303427696228,0.46316666666666667,0.5036535,0.21419858502073677
|
| 33 |
+
CvT,h,1.7075452818870545,0.4141666666666667,0.3043033333333333,0.1998634190758024
|
| 34 |
+
CvT,i,1.5416374638080597,0.49833333333333335,0.4133462222222223,0.22582304526748972
|
| 35 |
+
CvT,j,0.6718657946586609,0.7025,0.7951043333333334,0.6775067750677507
|
| 36 |
+
CvT,k,0.5796729214191437,0.7376666666666667,0.8160571111111111,0.7043576258452291
|
| 37 |
+
CvT,l,0.9414389114699858,0.6422716937232299,0.5856238290999722,0.41148325358851673
|
| 38 |
+
Swin,a,0.38957844183659335,0.9173215969820812,0.7264815837937385,0.30971128608923887
|
| 39 |
+
Swin,b,0.506205921398323,0.8714240804778371,0.7070128913443832,0.2239089184060721
|
| 40 |
+
Swin,c,0.33732365382351015,0.9254951273184533,0.7497605893186002,0.3323943661971831
|
| 41 |
+
Swin,d,0.25108740707354066,0.9465576862621817,0.823268876611418,0.4097222222222222
|
| 42 |
+
Swin,e,0.7929391115167793,0.8419319429198683,0.7817982290168772,0.45038167938931295
|
| 43 |
+
Swin,f,0.21592594718169755,0.9430717992409573,0.7533609214757848,0.13833528722157093
|
| 44 |
+
Swin,g,2.702541620135307,0.5423333333333333,0.5866217222222223,0.2826541274817137
|
| 45 |
+
Swin,h,2.613005870103836,0.571,0.6225122222222222,0.2959518599562363
|
| 46 |
+
Swin,i,2.567286303862929,0.5821666666666667,0.7211619444444445,0.3014767344664252
|
| 47 |
+
Swin,j,0.42502203929424287,0.8855,0.9479796111111113,0.8833021912688975
|
| 48 |
+
Swin,k,0.28976670680940153,0.9253333333333333,0.9796539999999999,0.9206798866855525
|
| 49 |
+
Swin,l,1.03963865723415,0.8099518798582835,0.7976283082751249,0.6403842305383229
|
| 50 |
+
CaiT,a,0.3913202127339292,0.8953159383841559,0.6907965009208104,0.31901840490797545
|
| 51 |
+
CaiT,b,0.5226519536631019,0.8626218170386671,0.7338508287292819,0.2630691399662732
|
| 52 |
+
CaiT,c,0.3735890439830086,0.8984596038981453,0.6786878453038674,0.325678496868476
|
| 53 |
+
CaiT,d,0.2843286003735934,0.9254951273184533,0.76402394106814,0.3969465648854962
|
| 54 |
+
CaiT,e,0.6834587411940687,0.8463227222832053,0.7675773859078181,0.527027027027027
|
| 55 |
+
CaiT,f,0.27040889076227814,0.918054372240725,0.7197493196998433,0.128500823723229
|
| 56 |
+
CaiT,g,2.0468120236396787,0.5761666666666667,0.6781162222222221,0.38351515151515153
|
| 57 |
+
CaiT,h,1.967783824443817,0.5951666666666666,0.5982157777777778,0.3944153577661431
|
| 58 |
+
CaiT,i,1.92046093159914,0.6095,0.7142172222222223,0.40305732484076434
|
| 59 |
+
CaiT,j,0.30098878836631776,0.9125,0.9733297777777777,0.9145368712355526
|
| 60 |
+
CaiT,k,0.1746376877427101,0.9458333333333333,0.9841325555555557,0.9453138145717651
|
| 61 |
+
CaiT,l,0.8100430545029764,0.817143461477447,0.813651736379369,0.6802293323469576
|
| 62 |
+
DeiT,a,0.3698357029348677,0.9119773656082992,0.7087136279926335,0.37777777777777777
|
| 63 |
+
DeiT,b,0.5088012874857205,0.8833700094309965,0.7757348066298342,0.3142329020332717
|
| 64 |
+
DeiT,c,0.3891148048258922,0.9160641307764854,0.7151408839779005,0.3890160183066362
|
| 65 |
+
DeiT,d,0.32573777145838745,0.9352404904118202,0.8077476979742173,0.4521276595744681
|
| 66 |
+
DeiT,e,0.7152948476881215,0.862788144895719,0.814546280178612,0.576271186440678
|
| 67 |
+
DeiT,f,0.2608369113050038,0.9330028657733715,0.7554304661629335,0.1642512077294686
|
| 68 |
+
DeiT,g,2.54885491502285,0.5731666666666667,0.6792770555555556,0.35798445725745803
|
| 69 |
+
DeiT,h,2.485401116847992,0.5905,0.5861576111111111,0.3675675675675676
|
| 70 |
+
DeiT,i,2.451800708413124,0.6006666666666667,0.7191378888888889,0.37343096234309625
|
| 71 |
+
DeiT,j,0.43799715077877044,0.9003333333333333,0.9592409444444444,0.8995295698924731
|
| 72 |
+
DeiT,k,0.3409429641962051,0.9278333333333333,0.9696212777777777,0.9251771211335753
|
| 73 |
+
DeiT,l,1.0167739923843866,0.816297393051663,0.8008547085670262,0.6667945520813351
|
| 74 |
+
DeiT3,a,0.41754333621036777,0.9192077962904747,0.7515147329650091,0.43015521064301554
|
| 75 |
+
DeiT3,b,0.5932955155673173,0.8773970449544168,0.7794677716390424,0.3321917808219178
|
| 76 |
+
DeiT3,c,0.4092358484072567,0.9154353976736875,0.7444696132596684,0.4190064794816415
|
| 77 |
+
DeiT3,d,0.5747989024035925,0.8783401446086136,0.7588406998158379,0.33390705679862304
|
| 78 |
+
DeiT3,e,0.7725568269302764,0.8759604829857299,0.8239688185877545,0.6319218241042345
|
| 79 |
+
DeiT3,f,0.3546846674270619,0.916350398884672,0.7623233064106626,0.152276295133438
|
| 80 |
+
DeiT3,g,2.8653497416973113,0.5903333333333334,0.6676445555555556,0.40828117477130477
|
| 81 |
+
DeiT3,h,2.767767428398132,0.6105,0.5985711666666667,0.4205306223654848
|
| 82 |
+
DeiT3,i,2.8555434824228287,0.5908333333333333,0.6332408888888889,0.40857624668754516
|
| 83 |
+
DeiT3,j,0.407473158121109,0.9088333333333334,0.9701894444444443,0.9098103874690849
|
| 84 |
+
DeiT3,k,0.39766689217090606,0.9093333333333333,0.9684031111111111,0.9102606400527878
|
| 85 |
+
DeiT3,l,1.1462537638319459,0.8163502723282745,0.8007222930468951,0.6808197775939712
|
| 86 |
+
Twins_SVT,a,0.4471702475530552,0.8126375353662371,0.6335423572744014,0.1989247311827957
|
| 87 |
+
Twins_SVT,b,0.4493988096264315,0.8060358377868595,0.6959318600368325,0.1934640522875817
|
| 88 |
+
Twins_SVT,c,0.5063522113864807,0.7780572147123546,0.5922486187845304,0.17330210772833723
|
| 89 |
+
Twins_SVT,d,0.3254203815124425,0.8849418421879912,0.7219650092081031,0.28793774319066145
|
| 90 |
+
Twins_SVT,e,0.5195407480099591,0.7771679473106476,0.7089003254370696,0.42165242165242167
|
| 91 |
+
Twins_SVT,f,0.3887786737239737,0.8404461312059485,0.6636732736434142,0.06702898550724638
|
| 92 |
+
Twins_SVT,g,1.2475184862613677,0.4825,0.532602,0.20689655172413793
|
| 93 |
+
Twins_SVT,h,1.277713261127472,0.4676666666666667,0.36107222222222224,0.2022977022977023
|
| 94 |
+
Twins_SVT,i,1.1817892324924468,0.5243333333333333,0.5369667222222223,0.2210698689956332
|
| 95 |
+
Twins_SVT,j,0.5273123075962066,0.7598333333333334,0.8436975000000001,0.7417099838680767
|
| 96 |
+
Twins_SVT,k,0.46158305954933165,0.8016666666666666,0.8776625,0.7766516516516516
|
| 97 |
+
Twins_SVT,l,0.7025143162813111,0.7046163608481836,0.6522568273932748,0.47706422018348627
|
| 98 |
+
Twins_PCPVT,a,0.45982081515914197,0.7900031436655139,0.6319235727440148,0.17326732673267325
|
| 99 |
+
Twins_PCPVT,b,0.37307003830934016,0.8333857277585665,0.729316758747698,0.208955223880597
|
| 100 |
+
Twins_PCPVT,c,0.5298199271376273,0.7510216912920465,0.5787163904235728,0.15021459227467812
|
| 101 |
+
Twins_PCPVT,d,0.4890483941382786,0.7840301791889343,0.6198968692449357,0.16928657799274485
|
| 102 |
+
Twins_PCPVT,e,0.4528412980515138,0.8068057080131723,0.7623628244910315,0.4430379746835443
|
| 103 |
+
Twins_PCPVT,f,0.41534574994755774,0.8134149175121989,0.6469823751264034,0.05492349941153393
|
| 104 |
+
Twins_PCPVT,g,0.97830464220047,0.5461666666666667,0.6621347777777777,0.33827460510328067
|
| 105 |
+
Twins_PCPVT,h,1.0614082341194153,0.5025,0.47131744444444446,0.31802604523646333
|
| 106 |
+
Twins_PCPVT,i,1.0397925007343292,0.52,0.5257673333333334,0.3258426966292135
|
| 107 |
+
Twins_PCPVT,j,0.36769862127304076,0.8383333333333334,0.9181693333333335,0.834696659850034
|
| 108 |
+
Twins_PCPVT,k,0.42918648648262026,0.8121666666666667,0.8860434444444444,0.8129460580912863
|
| 109 |
+
Twins_PCPVT,l,0.6099785904964774,0.7216434879170853,0.7243182138507473,0.5498546263040875
|
| 110 |
+
PiT,a,0.37776082014932605,0.8651367494498585,0.6834337016574586,0.25906735751295334
|
| 111 |
+
PiT,b,0.44755573657390196,0.8365293932725558,0.7427127071823205,0.22388059701492538
|
| 112 |
+
PiT,c,0.40049510616170875,0.8528764539453002,0.6488581952117863,0.24271844660194175
|
| 113 |
+
PiT,d,0.23405979966281606,0.9214083621502672,0.7678987108655617,0.375
|
| 114 |
+
PiT,e,0.4743333708670739,0.8430296377607025,0.8043820479830468,0.5119453924914675
|
| 115 |
+
PiT,f,0.2892587873664891,0.8926496785686624,0.716096531011705,0.09765625
|
| 116 |
+
PiT,g,1.521324759721756,0.547,0.6473084444444445,0.3386861313868613
|
| 117 |
+
PiT,h,1.4963747837543488,0.5556666666666666,0.5119071111111111,0.3430261212419911
|
| 118 |
+
PiT,i,1.408136343061924,0.592,0.658547,0.3625
|
| 119 |
+
PiT,j,0.6195285122394562,0.7638333333333334,0.8635346666666667,0.7381260395490667
|
| 120 |
+
PiT,k,0.5063400955796242,0.8088333333333333,0.902523,0.7768916553199766
|
| 121 |
+
PiT,l,0.7616236460134194,0.7518375548622495,0.7319390263322477,0.5412063740346075
|
| 122 |
+
Ensemble,a,,0.9179503300848789,0.7188591160220995,0.38588235294117645
|
| 123 |
+
Ensemble,b,,0.8682804149638479,0.760756906077348,0.28130360205831906
|
| 124 |
+
Ensemble,c,,0.9358692235146181,0.6998213627992634,0.44565217391304346
|
| 125 |
+
Ensemble,d,,0.9440427538509902,0.7630147329650093,0.47953216374269003
|
| 126 |
+
Ensemble,e,,0.8770581778265643,0.808151063346704,0.5942028985507246
|
| 127 |
+
Ensemble,f,,0.9397413058632174,0.7397727124771607,0.1740976645435244
|
| 128 |
+
Ensemble,g,,0.5465,0.6648292222222223,0.30569022709874966
|
| 129 |
+
Ensemble,h,,0.5823333333333334,0.5448976666666666,0.3234341252699784
|
| 130 |
+
Ensemble,i,,0.5866666666666667,0.6481456666666666,0.32572050027188687
|
| 131 |
+
Ensemble,j,,0.8898333333333334,0.9515273333333334,0.8894463957183476
|
| 132 |
+
Ensemble,k,,0.93,0.9758612222222223,0.926803764377832
|
| 133 |
+
Ensemble,l,,0.8138649463275343,0.7884954119718932,0.6549019607843137
|
model.safetensors
ADDED
|
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
version https://git-lfs.github.com/spec/v1
|
| 2 |
+
oid sha256:20db63b7499b771560c443eb2969b594fa273923c236444cf231c46a251b943d
|
| 3 |
+
size 343287616
|
pytorch_model.bin
ADDED
|
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
version https://git-lfs.github.com/spec/v1
|
| 2 |
+
oid sha256:85b4b7aeeaa4109666ac00df83a1b14b6eca8d97cb32116d2bbc98772fe3d455
|
| 3 |
+
size 343337390
|
roc_confusion_matrix/DeiT3_roc_confusion_matrix_a.png
ADDED
|
roc_confusion_matrix/DeiT3_roc_confusion_matrix_b.png
ADDED
|
roc_confusion_matrix/DeiT3_roc_confusion_matrix_c.png
ADDED
|
roc_confusion_matrix/DeiT3_roc_confusion_matrix_d.png
ADDED
|
roc_confusion_matrix/DeiT3_roc_confusion_matrix_e.png
ADDED
|
roc_confusion_matrix/DeiT3_roc_confusion_matrix_f.png
ADDED
|
roc_confusion_matrix/DeiT3_roc_confusion_matrix_g.png
ADDED
|
roc_confusion_matrix/DeiT3_roc_confusion_matrix_h.png
ADDED
|
roc_confusion_matrix/DeiT3_roc_confusion_matrix_i.png
ADDED
|
roc_confusion_matrix/DeiT3_roc_confusion_matrix_j.png
ADDED
|
roc_confusion_matrix/DeiT3_roc_confusion_matrix_k.png
ADDED
|
roc_confusion_matrix/DeiT3_roc_confusion_matrix_l.png
ADDED
|
roc_curves/DeiT3_ROC_a.png
ADDED
|
roc_curves/DeiT3_ROC_b.png
ADDED
|
roc_curves/DeiT3_ROC_c.png
ADDED
|
roc_curves/DeiT3_ROC_d.png
ADDED
|
roc_curves/DeiT3_ROC_e.png
ADDED
|
roc_curves/DeiT3_ROC_f.png
ADDED
|
roc_curves/DeiT3_ROC_g.png
ADDED
|
roc_curves/DeiT3_ROC_h.png
ADDED
|
roc_curves/DeiT3_ROC_i.png
ADDED
|
roc_curves/DeiT3_ROC_j.png
ADDED
|
roc_curves/DeiT3_ROC_k.png
ADDED
|
roc_curves/DeiT3_ROC_l.png
ADDED
|
training_curves/DeiT3_accuracy.png
ADDED
|
training_curves/DeiT3_auc.png
ADDED
|
training_curves/DeiT3_combined_metrics.png
ADDED
|
Git LFS Details
|
training_curves/DeiT3_f1.png
ADDED
|
training_curves/DeiT3_loss.png
ADDED
|
training_curves/DeiT3_metrics.csv
ADDED
|
@@ -0,0 +1,34 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
epoch,train_loss,val_loss,train_accuracy,val_accuracy,train_auc,val_auc,train_f1,val_f1
|
| 2 |
+
1,0.20823453406474376,0.09757183578428348,0.9119422350087437,0.9630225080385852,0.9725903702821805,0.9915828919147744,0.910297666934835,0.9624796084828712
|
| 3 |
+
2,0.09916189847298737,0.09389385130627746,0.9656456253173126,0.9640943193997856,0.9919289132386885,0.9941578813747216,0.9652635181382615,0.9632070291048874
|
| 4 |
+
3,0.08628897206001343,0.07643812209655235,0.9700456930106617,0.9726688102893891,0.9939255987772581,0.995206142754245,0.9697263397947549,0.972327726532827
|
| 5 |
+
4,0.07218970677123938,0.07539547409658647,0.9752355164438428,0.9732047159699893,0.9956400170771655,0.9953945426076607,0.9750298617826062,0.9729437229437229
|
| 6 |
+
5,0.06614316622907083,0.08399687376819623,0.9775483725390647,0.9732047159699893,0.9961454641874501,0.9956823118959574,0.9773361425886908,0.9727965179542981
|
| 7 |
+
6,0.061957231697081554,0.09183556833259546,0.9776611947876122,0.9710610932475884,0.9967495806453283,0.9954910400935566,0.9774692762858443,0.9705561613958561
|
| 8 |
+
7,0.054957266779070195,0.09187950299292132,0.9799740508828341,0.969989281886388,0.9975748148995466,0.9954336011138567,0.9798421441144739,0.9701492537313433
|
| 9 |
+
8,0.05209087519541592,0.07983304823709837,0.9814971512382242,0.9705251875669882,0.9977005648962951,0.9962555529133624,0.9813921824473819,0.9706353443673251
|
| 10 |
+
9,0.04740831561803939,0.09881889566156259,0.9840074462684041,0.9748124330117899,0.9980009214357941,0.9957426228246422,0.9839107857325274,0.9743589743589743
|
| 11 |
+
10,0.0446742644357306,0.07986885764399525,0.9833587183392565,0.9758842443729904,0.9983884865757738,0.9962113248989936,0.9832681073109864,0.9755035383777899
|
| 12 |
+
11,0.035358944823620266,0.0793747516211206,0.9879562249675636,0.9785637727759914,0.9989876737827071,0.9963147150624534,0.9879019691174388,0.9784714747039828
|
| 13 |
+
12,0.031669339039948494,0.07909312547211478,0.9896203531336379,0.9769560557341908,0.9991141684177519,0.9963985759728153,0.9895786135024921,0.9768692845615923
|
| 14 |
+
13,0.028682257606570522,0.074580498111593,0.9902126699385119,0.9764201500535906,0.9992601159327363,0.996554235607802,0.990182486914698,0.9763440860215054
|
| 15 |
+
14,0.028616402251874458,0.0833205767095664,0.9899024087550065,0.9785637727759914,0.9992961354236629,0.996564574624148,0.9898704091449267,0.9783783783783784
|
| 16 |
+
15,0.02636249609232292,0.07616832004291069,0.9915665369210809,0.977491961414791,0.999344236193616,0.9966432660263369,0.99154292179324,0.9773706896551724
|
| 17 |
+
16,0.027063428668989625,0.07865303261295392,0.9900716421278276,0.9785637727759914,0.9994422261383196,0.9967035769550219,0.9900384876613085,0.9783783783783784
|
| 18 |
+
17,0.025056365976369527,0.0766807585667184,0.9915101257968071,0.9764201500535906,0.9994092424804475,0.9967512513081729,0.9914892413832104,0.9763694951664876
|
| 19 |
+
18,0.022788198721839563,0.0873115486749882,0.9922716759745022,0.9748124330117899,0.9995251053353718,0.9965404502526741,0.9922493776872595,0.974771873322598
|
| 20 |
+
19,0.0201940940704684,0.08487435485389072,0.9928639927793761,0.9796355841371919,0.9996676510625834,0.9966283318916149,0.9928537129621784,0.9795037756202805
|
| 21 |
+
20,0.024204948161444823,0.08423052416737056,0.9918203869803125,0.9785637727759914,0.999543313969269,0.9967001306162397,0.9917935367026997,0.9784250269687162
|
| 22 |
+
21,0.022210648509376364,0.08479755786262524,0.9925255260337339,0.9785637727759914,0.9995650707728263,0.9967236805979167,0.9925096808841403,0.9784250269687162
|
| 23 |
+
22,0.021209317767076265,0.08378840901460678,0.9924691149094601,0.9790996784565916,0.9996395186913903,0.9967661854428946,0.9924484543371892,0.978953049109552
|
| 24 |
+
23,0.020506948225085437,0.08634610890958853,0.9930332261521972,0.9796355841371919,0.9996642317726285,0.9967748012898497,0.9930220075147611,0.9794816414686826
|
| 25 |
+
24,0.02156740725595226,0.08542097731226893,0.9925255260337339,0.9790996784565916,0.9996177809811226,0.9967535488673607,0.99250247559768,0.978953049109552
|
| 26 |
+
25,0.022897444096062744,0.0846650462921026,0.9917921814181757,0.9785637727759914,0.9995248539403916,0.9967661854428946,0.9917668694299052,0.9784250269687162
|
| 27 |
+
26,0.020436251746264584,0.08710799287178125,0.992497320471597,0.9796355841371919,0.9996710130726694,0.9967816939674139,0.992476950053736,0.9794816414686826
|
| 28 |
+
27,0.01992749585198731,0.08673783346196079,0.9933434873357025,0.9785637727759914,0.9996570892911916,0.9967460817999998,0.9933288104929896,0.9784250269687162
|
| 29 |
+
28,0.021403753744647282,0.08858360019527448,0.9928075816551024,0.9796355841371919,0.999606274091893,0.9967058745142099,0.9927849927849928,0.9794816414686826
|
| 30 |
+
29,0.020299959013733132,0.08722626937738952,0.9927229649686918,0.9790996784565916,0.9996462984003238,0.9966846220917209,0.9927077444884115,0.978953049109552
|
| 31 |
+
30,0.020390506141604238,0.0871722036523451,0.9928075816551024,0.9790996784565916,0.9996539166228944,0.9967156391407588,0.992791519434629,0.978953049109552
|
| 32 |
+
31,0.02156839072644423,0.08853877165693179,0.9918485925424494,0.9790996784565916,0.9996199401139606,0.9967432098510148,0.9918326974706797,0.9789303079416531
|
| 33 |
+
32,0.021945808510259637,0.0874848713637165,0.9926101427201444,0.9785637727759914,0.9995596260030608,0.9967196598693377,0.9925892402556995,0.9784250269687162
|
| 34 |
+
33,0.019981902686257737,0.08787980412171968,0.9928075816551024,0.9785637727759914,0.9996844881618405,0.9967598671551277,0.992789888879464,0.978401727861771
|
training_metrics.csv
ADDED
|
@@ -0,0 +1,34 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
epoch,train_loss,val_loss,train_accuracy,val_accuracy,train_auc,val_auc,train_f1,val_f1
|
| 2 |
+
1,0.20823453406474376,0.09757183578428348,0.9119422350087437,0.9630225080385852,0.9725903702821805,0.9915828919147744,0.910297666934835,0.9624796084828712
|
| 3 |
+
2,0.09916189847298737,0.09389385130627746,0.9656456253173126,0.9640943193997856,0.9919289132386885,0.9941578813747216,0.9652635181382615,0.9632070291048874
|
| 4 |
+
3,0.08628897206001343,0.07643812209655235,0.9700456930106617,0.9726688102893891,0.9939255987772581,0.995206142754245,0.9697263397947549,0.972327726532827
|
| 5 |
+
4,0.07218970677123938,0.07539547409658647,0.9752355164438428,0.9732047159699893,0.9956400170771655,0.9953945426076607,0.9750298617826062,0.9729437229437229
|
| 6 |
+
5,0.06614316622907083,0.08399687376819623,0.9775483725390647,0.9732047159699893,0.9961454641874501,0.9956823118959574,0.9773361425886908,0.9727965179542981
|
| 7 |
+
6,0.061957231697081554,0.09183556833259546,0.9776611947876122,0.9710610932475884,0.9967495806453283,0.9954910400935566,0.9774692762858443,0.9705561613958561
|
| 8 |
+
7,0.054957266779070195,0.09187950299292132,0.9799740508828341,0.969989281886388,0.9975748148995466,0.9954336011138567,0.9798421441144739,0.9701492537313433
|
| 9 |
+
8,0.05209087519541592,0.07983304823709837,0.9814971512382242,0.9705251875669882,0.9977005648962951,0.9962555529133624,0.9813921824473819,0.9706353443673251
|
| 10 |
+
9,0.04740831561803939,0.09881889566156259,0.9840074462684041,0.9748124330117899,0.9980009214357941,0.9957426228246422,0.9839107857325274,0.9743589743589743
|
| 11 |
+
10,0.0446742644357306,0.07986885764399525,0.9833587183392565,0.9758842443729904,0.9983884865757738,0.9962113248989936,0.9832681073109864,0.9755035383777899
|
| 12 |
+
11,0.035358944823620266,0.0793747516211206,0.9879562249675636,0.9785637727759914,0.9989876737827071,0.9963147150624534,0.9879019691174388,0.9784714747039828
|
| 13 |
+
12,0.031669339039948494,0.07909312547211478,0.9896203531336379,0.9769560557341908,0.9991141684177519,0.9963985759728153,0.9895786135024921,0.9768692845615923
|
| 14 |
+
13,0.028682257606570522,0.074580498111593,0.9902126699385119,0.9764201500535906,0.9992601159327363,0.996554235607802,0.990182486914698,0.9763440860215054
|
| 15 |
+
14,0.028616402251874458,0.0833205767095664,0.9899024087550065,0.9785637727759914,0.9992961354236629,0.996564574624148,0.9898704091449267,0.9783783783783784
|
| 16 |
+
15,0.02636249609232292,0.07616832004291069,0.9915665369210809,0.977491961414791,0.999344236193616,0.9966432660263369,0.99154292179324,0.9773706896551724
|
| 17 |
+
16,0.027063428668989625,0.07865303261295392,0.9900716421278276,0.9785637727759914,0.9994422261383196,0.9967035769550219,0.9900384876613085,0.9783783783783784
|
| 18 |
+
17,0.025056365976369527,0.0766807585667184,0.9915101257968071,0.9764201500535906,0.9994092424804475,0.9967512513081729,0.9914892413832104,0.9763694951664876
|
| 19 |
+
18,0.022788198721839563,0.0873115486749882,0.9922716759745022,0.9748124330117899,0.9995251053353718,0.9965404502526741,0.9922493776872595,0.974771873322598
|
| 20 |
+
19,0.0201940940704684,0.08487435485389072,0.9928639927793761,0.9796355841371919,0.9996676510625834,0.9966283318916149,0.9928537129621784,0.9795037756202805
|
| 21 |
+
20,0.024204948161444823,0.08423052416737056,0.9918203869803125,0.9785637727759914,0.999543313969269,0.9967001306162397,0.9917935367026997,0.9784250269687162
|
| 22 |
+
21,0.022210648509376364,0.08479755786262524,0.9925255260337339,0.9785637727759914,0.9995650707728263,0.9967236805979167,0.9925096808841403,0.9784250269687162
|
| 23 |
+
22,0.021209317767076265,0.08378840901460678,0.9924691149094601,0.9790996784565916,0.9996395186913903,0.9967661854428946,0.9924484543371892,0.978953049109552
|
| 24 |
+
23,0.020506948225085437,0.08634610890958853,0.9930332261521972,0.9796355841371919,0.9996642317726285,0.9967748012898497,0.9930220075147611,0.9794816414686826
|
| 25 |
+
24,0.02156740725595226,0.08542097731226893,0.9925255260337339,0.9790996784565916,0.9996177809811226,0.9967535488673607,0.99250247559768,0.978953049109552
|
| 26 |
+
25,0.022897444096062744,0.0846650462921026,0.9917921814181757,0.9785637727759914,0.9995248539403916,0.9967661854428946,0.9917668694299052,0.9784250269687162
|
| 27 |
+
26,0.020436251746264584,0.08710799287178125,0.992497320471597,0.9796355841371919,0.9996710130726694,0.9967816939674139,0.992476950053736,0.9794816414686826
|
| 28 |
+
27,0.01992749585198731,0.08673783346196079,0.9933434873357025,0.9785637727759914,0.9996570892911916,0.9967460817999998,0.9933288104929896,0.9784250269687162
|
| 29 |
+
28,0.021403753744647282,0.08858360019527448,0.9928075816551024,0.9796355841371919,0.999606274091893,0.9967058745142099,0.9927849927849928,0.9794816414686826
|
| 30 |
+
29,0.020299959013733132,0.08722626937738952,0.9927229649686918,0.9790996784565916,0.9996462984003238,0.9966846220917209,0.9927077444884115,0.978953049109552
|
| 31 |
+
30,0.020390506141604238,0.0871722036523451,0.9928075816551024,0.9790996784565916,0.9996539166228944,0.9967156391407588,0.992791519434629,0.978953049109552
|
| 32 |
+
31,0.02156839072644423,0.08853877165693179,0.9918485925424494,0.9790996784565916,0.9996199401139606,0.9967432098510148,0.9918326974706797,0.9789303079416531
|
| 33 |
+
32,0.021945808510259637,0.0874848713637165,0.9926101427201444,0.9785637727759914,0.9995596260030608,0.9967196598693377,0.9925892402556995,0.9784250269687162
|
| 34 |
+
33,0.019981902686257737,0.08787980412171968,0.9928075816551024,0.9785637727759914,0.9996844881618405,0.9967598671551277,0.992789888879464,0.978401727861771
|