Upload PiT model from experiment a3
Browse filesThis view is limited to 50 files because it contains too many changes.
See raw diff
- .gitattributes +2 -0
- README.md +161 -0
- config.json +76 -0
- confusion_matrices/PiT_Confusion_Matrix_a.png +0 -0
- confusion_matrices/PiT_Confusion_Matrix_b.png +0 -0
- confusion_matrices/PiT_Confusion_Matrix_c.png +0 -0
- confusion_matrices/PiT_Confusion_Matrix_d.png +0 -0
- confusion_matrices/PiT_Confusion_Matrix_e.png +0 -0
- confusion_matrices/PiT_Confusion_Matrix_f.png +0 -0
- confusion_matrices/PiT_Confusion_Matrix_g.png +0 -0
- confusion_matrices/PiT_Confusion_Matrix_h.png +0 -0
- confusion_matrices/PiT_Confusion_Matrix_i.png +0 -0
- confusion_matrices/PiT_Confusion_Matrix_j.png +0 -0
- confusion_matrices/PiT_Confusion_Matrix_k.png +0 -0
- confusion_matrices/PiT_Confusion_Matrix_l.png +0 -0
- evaluation_results.csv +145 -0
- model.safetensors +3 -0
- pit-gravit-a3.pth +3 -0
- pytorch_model.bin +3 -0
- roc_confusion_matrix/PiT_roc_confusion_matrix_a.png +0 -0
- roc_confusion_matrix/PiT_roc_confusion_matrix_b.png +0 -0
- roc_confusion_matrix/PiT_roc_confusion_matrix_c.png +0 -0
- roc_confusion_matrix/PiT_roc_confusion_matrix_d.png +0 -0
- roc_confusion_matrix/PiT_roc_confusion_matrix_e.png +0 -0
- roc_confusion_matrix/PiT_roc_confusion_matrix_f.png +0 -0
- roc_confusion_matrix/PiT_roc_confusion_matrix_g.png +0 -0
- roc_confusion_matrix/PiT_roc_confusion_matrix_h.png +0 -0
- roc_confusion_matrix/PiT_roc_confusion_matrix_i.png +0 -0
- roc_confusion_matrix/PiT_roc_confusion_matrix_j.png +0 -0
- roc_confusion_matrix/PiT_roc_confusion_matrix_k.png +0 -0
- roc_confusion_matrix/PiT_roc_confusion_matrix_l.png +0 -0
- roc_curves/PiT_ROC_a.png +0 -0
- roc_curves/PiT_ROC_b.png +0 -0
- roc_curves/PiT_ROC_c.png +0 -0
- roc_curves/PiT_ROC_d.png +0 -0
- roc_curves/PiT_ROC_e.png +0 -0
- roc_curves/PiT_ROC_f.png +0 -0
- roc_curves/PiT_ROC_g.png +0 -0
- roc_curves/PiT_ROC_h.png +0 -0
- roc_curves/PiT_ROC_i.png +0 -0
- roc_curves/PiT_ROC_j.png +0 -0
- roc_curves/PiT_ROC_k.png +0 -0
- roc_curves/PiT_ROC_l.png +0 -0
- training_curves/PiT_accuracy.png +0 -0
- training_curves/PiT_auc.png +0 -0
- training_curves/PiT_combined_metrics.png +3 -0
- training_curves/PiT_f1.png +0 -0
- training_curves/PiT_loss.png +0 -0
- training_curves/PiT_metrics.csv +51 -0
- training_metrics.csv +51 -0
.gitattributes
CHANGED
|
@@ -33,3 +33,5 @@ saved_model/**/* filter=lfs diff=lfs merge=lfs -text
|
|
| 33 |
*.zip filter=lfs diff=lfs merge=lfs -text
|
| 34 |
*.zst filter=lfs diff=lfs merge=lfs -text
|
| 35 |
*tfevents* filter=lfs diff=lfs merge=lfs -text
|
|
|
|
|
|
|
|
|
| 33 |
*.zip filter=lfs diff=lfs merge=lfs -text
|
| 34 |
*.zst filter=lfs diff=lfs merge=lfs -text
|
| 35 |
*tfevents* filter=lfs diff=lfs merge=lfs -text
|
| 36 |
+
training_curves/PiT_combined_metrics.png filter=lfs diff=lfs merge=lfs -text
|
| 37 |
+
training_notebook_a3.ipynb filter=lfs diff=lfs merge=lfs -text
|
README.md
ADDED
|
@@ -0,0 +1,161 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
---
|
| 2 |
+
license: apache-2.0
|
| 3 |
+
tags:
|
| 4 |
+
- vision-transformer
|
| 5 |
+
- image-classification
|
| 6 |
+
- pytorch
|
| 7 |
+
- timm
|
| 8 |
+
- pit
|
| 9 |
+
- gravitational-lensing
|
| 10 |
+
- strong-lensing
|
| 11 |
+
- astronomy
|
| 12 |
+
- astrophysics
|
| 13 |
+
datasets:
|
| 14 |
+
- C21
|
| 15 |
+
metrics:
|
| 16 |
+
- accuracy
|
| 17 |
+
- auc
|
| 18 |
+
- f1
|
| 19 |
+
model-index:
|
| 20 |
+
- name: PiT-a3
|
| 21 |
+
results:
|
| 22 |
+
- task:
|
| 23 |
+
type: image-classification
|
| 24 |
+
name: Strong Gravitational Lens Discovery
|
| 25 |
+
dataset:
|
| 26 |
+
type: common-test-sample
|
| 27 |
+
name: Common Test Sample (More et al. 2024)
|
| 28 |
+
metrics:
|
| 29 |
+
- type: accuracy
|
| 30 |
+
value: 0.7590
|
| 31 |
+
name: Average Accuracy
|
| 32 |
+
- type: auc
|
| 33 |
+
value: 0.8687
|
| 34 |
+
name: Average AUC-ROC
|
| 35 |
+
- type: f1
|
| 36 |
+
value: 0.5159
|
| 37 |
+
name: Average F1-Score
|
| 38 |
+
---
|
| 39 |
+
|
| 40 |
+
# 🌌 pit-gravit-a3
|
| 41 |
+
|
| 42 |
+
🔭 This model is part of **GraViT**: Transfer Learning with Vision Transformers and MLP-Mixer for Strong Gravitational Lens Discovery
|
| 43 |
+
|
| 44 |
+
🔗 **GitHub Repository**: [https://github.com/parlange/gravit](https://github.com/parlange/gravit)
|
| 45 |
+
|
| 46 |
+
## 🛰️ Model Details
|
| 47 |
+
|
| 48 |
+
- **🤖 Model Type**: PiT
|
| 49 |
+
- **🧪 Experiment**: A3 - C21-all-blocks-ResNet18
|
| 50 |
+
- **🌌 Dataset**: C21
|
| 51 |
+
- **🪐 Fine-tuning Strategy**: all-blocks
|
| 52 |
+
|
| 53 |
+
|
| 54 |
+
|
| 55 |
+
## 💻 Quick Start
|
| 56 |
+
|
| 57 |
+
```python
|
| 58 |
+
import torch
|
| 59 |
+
import timm
|
| 60 |
+
|
| 61 |
+
# Load the model directly from the Hub
|
| 62 |
+
model = timm.create_model(
|
| 63 |
+
'hf-hub:parlange/pit-gravit-a3',
|
| 64 |
+
pretrained=True
|
| 65 |
+
)
|
| 66 |
+
model.eval()
|
| 67 |
+
|
| 68 |
+
# Example inference
|
| 69 |
+
dummy_input = torch.randn(1, 3, 224, 224)
|
| 70 |
+
with torch.no_grad():
|
| 71 |
+
output = model(dummy_input)
|
| 72 |
+
predictions = torch.softmax(output, dim=1)
|
| 73 |
+
print(f"Lens probability: {predictions[0][1]:.4f}")
|
| 74 |
+
```
|
| 75 |
+
|
| 76 |
+
## ⚡️ Training Configuration
|
| 77 |
+
|
| 78 |
+
**Training Dataset:** C21 (Cañameras et al. 2021)
|
| 79 |
+
**Fine-tuning Strategy:** all-blocks
|
| 80 |
+
|
| 81 |
+
|
| 82 |
+
| 🔧 Parameter | 📝 Value |
|
| 83 |
+
|--------------|----------|
|
| 84 |
+
| Batch Size | 192 |
|
| 85 |
+
| Learning Rate | AdamW with ReduceLROnPlateau |
|
| 86 |
+
| Epochs | 100 |
|
| 87 |
+
| Patience | 10 |
|
| 88 |
+
| Optimizer | AdamW |
|
| 89 |
+
| Scheduler | ReduceLROnPlateau |
|
| 90 |
+
| Image Size | 224x224 |
|
| 91 |
+
| Fine Tune Mode | all_blocks |
|
| 92 |
+
| Stochastic Depth Probability | 0.1 |
|
| 93 |
+
|
| 94 |
+
|
| 95 |
+
## 📈 Training Curves
|
| 96 |
+
|
| 97 |
+

|
| 98 |
+
|
| 99 |
+
|
| 100 |
+
## 🏁 Final Epoch Training Metrics
|
| 101 |
+
|
| 102 |
+
| Metric | Training | Validation |
|
| 103 |
+
|:---------:|:-----------:|:-------------:|
|
| 104 |
+
| 📉 Loss | 0.0038 | 0.0166 |
|
| 105 |
+
| 🎯 Accuracy | 0.9987 | 0.9960 |
|
| 106 |
+
| 📊 AUC-ROC | 1.0000 | 0.9999 |
|
| 107 |
+
| ⚖️ F1 Score | 0.9987 | 0.9960 |
|
| 108 |
+
|
| 109 |
+
|
| 110 |
+
## ☑️ Evaluation Results
|
| 111 |
+
|
| 112 |
+
### ROC Curves and Confusion Matrices
|
| 113 |
+
|
| 114 |
+
Performance across all test datasets (a through l) in the Common Test Sample (More et al. 2024):
|
| 115 |
+
|
| 116 |
+

|
| 117 |
+

|
| 118 |
+

|
| 119 |
+

|
| 120 |
+

|
| 121 |
+

|
| 122 |
+

|
| 123 |
+

|
| 124 |
+

|
| 125 |
+

|
| 126 |
+

|
| 127 |
+

|
| 128 |
+
|
| 129 |
+
### 📋 Performance Summary
|
| 130 |
+
|
| 131 |
+
Average performance across 12 test datasets from the Common Test Sample (More et al. 2024):
|
| 132 |
+
|
| 133 |
+
| Metric | Value |
|
| 134 |
+
|-----------|----------|
|
| 135 |
+
| 🎯 Average Accuracy | 0.7590 |
|
| 136 |
+
| 📈 Average AUC-ROC | 0.8687 |
|
| 137 |
+
| ⚖️ Average F1-Score | 0.5159 |
|
| 138 |
+
|
| 139 |
+
|
| 140 |
+
## 📘 Citation
|
| 141 |
+
|
| 142 |
+
If you use this model in your research, please cite:
|
| 143 |
+
|
| 144 |
+
```bibtex
|
| 145 |
+
@misc{parlange2025gravit,
|
| 146 |
+
title={GraViT: Transfer Learning with Vision Transformers and MLP-Mixer for Strong Gravitational Lens Discovery},
|
| 147 |
+
author={René Parlange and Juan C. Cuevas-Tello and Octavio Valenzuela and Omar de J. Cabrera-Rosas and Tomás Verdugo and Anupreeta More and Anton T. Jaelani},
|
| 148 |
+
year={2025},
|
| 149 |
+
eprint={2509.00226},
|
| 150 |
+
archivePrefix={arXiv},
|
| 151 |
+
primaryClass={cs.CV},
|
| 152 |
+
url={https://arxiv.org/abs/2509.00226},
|
| 153 |
+
}
|
| 154 |
+
```
|
| 155 |
+
|
| 156 |
+
---
|
| 157 |
+
|
| 158 |
+
|
| 159 |
+
## Model Card Contact
|
| 160 |
+
|
| 161 |
+
For questions about this model, please contact the author through: https://github.com/parlange/
|
config.json
ADDED
|
@@ -0,0 +1,76 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
{
|
| 2 |
+
"architecture": "vit_base_patch16_224",
|
| 3 |
+
"num_classes": 2,
|
| 4 |
+
"num_features": 1000,
|
| 5 |
+
"global_pool": "avg",
|
| 6 |
+
"crop_pct": 0.875,
|
| 7 |
+
"interpolation": "bicubic",
|
| 8 |
+
"mean": [
|
| 9 |
+
0.485,
|
| 10 |
+
0.456,
|
| 11 |
+
0.406
|
| 12 |
+
],
|
| 13 |
+
"std": [
|
| 14 |
+
0.229,
|
| 15 |
+
0.224,
|
| 16 |
+
0.225
|
| 17 |
+
],
|
| 18 |
+
"first_conv": "conv1",
|
| 19 |
+
"classifier": "fc",
|
| 20 |
+
"input_size": [
|
| 21 |
+
3,
|
| 22 |
+
224,
|
| 23 |
+
224
|
| 24 |
+
],
|
| 25 |
+
"pool_size": [
|
| 26 |
+
7,
|
| 27 |
+
7
|
| 28 |
+
],
|
| 29 |
+
"pretrained_cfg": {
|
| 30 |
+
"tag": "gravit_a3",
|
| 31 |
+
"custom_load": false,
|
| 32 |
+
"input_size": [
|
| 33 |
+
3,
|
| 34 |
+
224,
|
| 35 |
+
224
|
| 36 |
+
],
|
| 37 |
+
"fixed_input_size": true,
|
| 38 |
+
"interpolation": "bicubic",
|
| 39 |
+
"crop_pct": 0.875,
|
| 40 |
+
"crop_mode": "center",
|
| 41 |
+
"mean": [
|
| 42 |
+
0.485,
|
| 43 |
+
0.456,
|
| 44 |
+
0.406
|
| 45 |
+
],
|
| 46 |
+
"std": [
|
| 47 |
+
0.229,
|
| 48 |
+
0.224,
|
| 49 |
+
0.225
|
| 50 |
+
],
|
| 51 |
+
"num_classes": 2,
|
| 52 |
+
"pool_size": [
|
| 53 |
+
7,
|
| 54 |
+
7
|
| 55 |
+
],
|
| 56 |
+
"first_conv": "conv1",
|
| 57 |
+
"classifier": "fc"
|
| 58 |
+
},
|
| 59 |
+
"model_name": "pit_gravit_a3",
|
| 60 |
+
"experiment": "a3",
|
| 61 |
+
"training_strategy": "all-blocks",
|
| 62 |
+
"dataset": "C21",
|
| 63 |
+
"hyperparameters": {
|
| 64 |
+
"batch_size": "192",
|
| 65 |
+
"learning_rate": "AdamW with ReduceLROnPlateau",
|
| 66 |
+
"epochs": "100",
|
| 67 |
+
"patience": "10",
|
| 68 |
+
"optimizer": "AdamW",
|
| 69 |
+
"scheduler": "ReduceLROnPlateau",
|
| 70 |
+
"image_size": "224x224",
|
| 71 |
+
"fine_tune_mode": "all_blocks",
|
| 72 |
+
"stochastic_depth_probability": "0.1"
|
| 73 |
+
},
|
| 74 |
+
"hf_hub_id": "parlange/pit-gravit-a3",
|
| 75 |
+
"license": "apache-2.0"
|
| 76 |
+
}
|
confusion_matrices/PiT_Confusion_Matrix_a.png
ADDED
|
confusion_matrices/PiT_Confusion_Matrix_b.png
ADDED
|
confusion_matrices/PiT_Confusion_Matrix_c.png
ADDED
|
confusion_matrices/PiT_Confusion_Matrix_d.png
ADDED
|
confusion_matrices/PiT_Confusion_Matrix_e.png
ADDED
|
confusion_matrices/PiT_Confusion_Matrix_f.png
ADDED
|
confusion_matrices/PiT_Confusion_Matrix_g.png
ADDED
|
confusion_matrices/PiT_Confusion_Matrix_h.png
ADDED
|
confusion_matrices/PiT_Confusion_Matrix_i.png
ADDED
|
confusion_matrices/PiT_Confusion_Matrix_j.png
ADDED
|
confusion_matrices/PiT_Confusion_Matrix_k.png
ADDED
|
confusion_matrices/PiT_Confusion_Matrix_l.png
ADDED
|
evaluation_results.csv
ADDED
|
@@ -0,0 +1,145 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
Model,Dataset,Loss,Accuracy,AUCROC,F1
|
| 2 |
+
ViT,a,0.3652414279945689,0.8912291732159698,0.8917799263351749,0.4383116883116883
|
| 3 |
+
ViT,b,0.21351260967325467,0.9459289531593839,0.9444456721915285,0.6108597285067874
|
| 4 |
+
ViT,c,0.850089883324636,0.758252121974222,0.8170580110497236,0.2598652550529355
|
| 5 |
+
ViT,d,0.125797027155771,0.9723357434768941,0.9632872928176796,0.7541899441340782
|
| 6 |
+
ViT,e,0.5662868288882347,0.8803512623490669,0.9096268826156058,0.712401055408971
|
| 7 |
+
ViT,f,0.3309973750718474,0.8993106653241422,0.9044572137856804,0.17197452229299362
|
| 8 |
+
ViT,g,0.06683265567384661,0.9775,0.9995110555555556,0.9779303580186366
|
| 9 |
+
ViT,h,0.4043246931489557,0.878,0.9965371111111111,0.8909740840035746
|
| 10 |
+
ViT,i,0.020328776298090816,0.9915,0.999877,0.991546494281452
|
| 11 |
+
ViT,j,7.168746640741825,0.509,0.4538551111111111,0.1088929219600726
|
| 12 |
+
ViT,k,7.1222426953688265,0.523,0.48649888888888887,0.11173184357541899
|
| 13 |
+
ViT,l,2.481225689433318,0.7816614668711332,0.6814674636422873,0.6155851410483195
|
| 14 |
+
MLP-Mixer,a,0.16029433381890995,0.9534737503929582,0.8991058931860036,0.6084656084656085
|
| 15 |
+
MLP-Mixer,b,0.1373700322756056,0.9629047469349261,0.9401123388581952,0.6609195402298851
|
| 16 |
+
MLP-Mixer,c,0.27720538959380253,0.9141779314680918,0.8577974217311233,0.4572564612326044
|
| 17 |
+
MLP-Mixer,d,0.11598697743656425,0.9682489783087079,0.955084714548803,0.6948640483383686
|
| 18 |
+
MLP-Mixer,e,0.40732662754315313,0.8957189901207464,0.9144403239234089,0.7076923076923077
|
| 19 |
+
MLP-Mixer,f,0.10986769875007599,0.9635194795135931,0.9131062483453624,0.3281027104136947
|
| 20 |
+
MLP-Mixer,g,0.028587095644325017,0.9901666666666666,0.9997029999999999,0.9902398676592225
|
| 21 |
+
MLP-Mixer,h,0.10272313961014151,0.9643333333333334,0.998619888888889,0.9654838709677419
|
| 22 |
+
MLP-Mixer,i,0.01725051350519061,0.993,0.9998728888888889,0.9930325149303252
|
| 23 |
+
MLP-Mixer,j,3.9579579369425772,0.5163333333333333,0.6010726111111112,0.09369144284821987
|
| 24 |
+
MLP-Mixer,k,3.9466213275045154,0.5191666666666667,0.6683313333333333,0.09419152276295134
|
| 25 |
+
MLP-Mixer,l,1.3240398510736613,0.8240177674369414,0.744620509879184,0.6619260463226331
|
| 26 |
+
CvT,a,0.2081917969545678,0.9352404904118202,0.9240874769797423,0.5672268907563025
|
| 27 |
+
CvT,b,0.15224700975965233,0.9575605155611443,0.9504567219152855,0.6666666666666666
|
| 28 |
+
CvT,c,0.49155200616401235,0.8597925180760767,0.8735598526703499,0.3770949720670391
|
| 29 |
+
CvT,d,0.09181369168063785,0.9739075762338887,0.9780386740331491,0.7648725212464589
|
| 30 |
+
CvT,e,0.32513720903680565,0.9143798024149287,0.9261106486036479,0.7758620689655172
|
| 31 |
+
CvT,f,0.18988747454602042,0.940825652544342,0.9312245836823443,0.2611218568665377
|
| 32 |
+
CvT,g,0.04789180162362754,0.9845,0.9996506666666666,0.9847165160230074
|
| 33 |
+
CvT,h,0.22777999278716743,0.9326666666666666,0.9980902777777778,0.9368355222013759
|
| 34 |
+
CvT,i,0.01585208964161575,0.9931666666666666,0.9999184444444444,0.9932040444223438
|
| 35 |
+
CvT,j,4.557504334926605,0.5136666666666667,0.4219552222222222,0.1049079754601227
|
| 36 |
+
CvT,k,4.525464609175921,0.5223333333333333,0.7008621111111111,0.10660847880299251
|
| 37 |
+
CvT,l,1.5619914421430425,0.809793242028449,0.7106891758223672,0.6473875110283306
|
| 38 |
+
Swin,a,0.13412107322078123,0.9607041810751336,0.921658379373849,0.6753246753246753
|
| 39 |
+
Swin,b,0.12724596003046465,0.9556743162527507,0.9398747697974218,0.6483790523690773
|
| 40 |
+
Swin,c,0.17965890587556665,0.9443571204023892,0.9029281767955801,0.5949656750572082
|
| 41 |
+
Swin,d,0.06030154695547883,0.9833385727758567,0.9931565377532229,0.8306709265175719
|
| 42 |
+
Swin,e,0.4499740816497384,0.8562019758507134,0.8587376069022933,0.6649616368286445
|
| 43 |
+
Swin,f,0.09911939381673643,0.9672372395631632,0.9347786366220656,0.3806734992679356
|
| 44 |
+
Swin,g,0.038731103701516986,0.985,0.9998738888888888,0.9852216748768473
|
| 45 |
+
Swin,h,0.06651869903318584,0.979,0.999785,0.9794319294809011
|
| 46 |
+
Swin,i,0.0032394034853205087,0.9996666666666667,0.9999967777777777,0.9996667777407531
|
| 47 |
+
Swin,j,5.625512751281262,0.496,0.10115866666666667,0.04182509505703422
|
| 48 |
+
Swin,k,5.590021039650775,0.5106666666666667,0.37088833333333326,0.04302477183833116
|
| 49 |
+
Swin,l,1.8411567582560506,0.822484268415208,0.5854755526940438,0.65565699046056
|
| 50 |
+
CaiT,a,0.20098744187104706,0.9478151524677775,0.9047292817679559,0.6047619047619047
|
| 51 |
+
CaiT,b,0.12978406120776775,0.9685633448601069,0.9467771639042358,0.7175141242937854
|
| 52 |
+
CaiT,c,0.2854890818960897,0.9248663942156554,0.8821031307550645,0.5152129817444219
|
| 53 |
+
CaiT,d,0.10324450797222483,0.9789374410562716,0.9716519337016575,0.7912772585669782
|
| 54 |
+
CaiT,e,0.4533339374577044,0.8990120746432492,0.9009763112086581,0.7341040462427746
|
| 55 |
+
CaiT,f,0.11921021888981406,0.9653009062040121,0.9248623124563286,0.36182336182336183
|
| 56 |
+
CaiT,g,0.02093089486740064,0.9923333333333333,0.9999474444444445,0.9923916639100232
|
| 57 |
+
CaiT,h,0.10348050882376265,0.9691666666666666,0.9995327777777778,0.9700889248181084
|
| 58 |
+
CaiT,i,0.006860514354542829,0.9978333333333333,0.9999788888888889,0.9978380176284717
|
| 59 |
+
CaiT,j,4.86097429022938,0.5145,0.5177155,0.08367411135577225
|
| 60 |
+
CaiT,k,4.8469039183312566,0.52,0.7220807222222222,0.0845518118245391
|
| 61 |
+
CaiT,l,1.617374696626156,0.8247051980328909,0.7258400455085416,0.6629384850025419
|
| 62 |
+
DeiT,a,0.27381800207860135,0.8953159383841559,0.8986252302025783,0.43463497453310695
|
| 63 |
+
DeiT,b,0.1319006348473214,0.9534737503929582,0.9522191528545119,0.6336633663366337
|
| 64 |
+
DeiT,c,0.5039299008644665,0.7972335743476894,0.8374493554327808,0.28412874583795783
|
| 65 |
+
DeiT,d,0.07908134242881022,0.9742219427852876,0.9686187845303867,0.757396449704142
|
| 66 |
+
DeiT,e,0.3087258418422892,0.8869374313940724,0.9196775902520246,0.713091922005571
|
| 67 |
+
DeiT,f,0.2156604077872147,0.9148787855317171,0.9145406292179694,0.18892988929889298
|
| 68 |
+
DeiT,g,0.04722818533703685,0.9821666666666666,0.9995671111111111,0.9824099950682229
|
| 69 |
+
DeiT,h,0.24446571580693124,0.8993333333333333,0.9973094444444446,0.9082066869300912
|
| 70 |
+
DeiT,i,0.01922516017779708,0.9931666666666666,0.9998760000000001,0.9931859730762839
|
| 71 |
+
DeiT,j,3.201324864923954,0.5038333333333334,0.5165665555555555,0.07345160286336756
|
| 72 |
+
DeiT,k,3.173321856930852,0.5148333333333334,0.5144209444444445,0.074992055926279
|
| 73 |
+
DeiT,l,1.1505240741554759,0.7888530484902967,0.6992895835538881,0.6182965299684543
|
| 74 |
+
DeiT3,a,0.14936432559529178,0.9519019176359635,0.9349825046040517,0.6240786240786241
|
| 75 |
+
DeiT3,b,0.14933169767358925,0.9515875510845646,0.9439152854511971,0.6225490196078431
|
| 76 |
+
DeiT3,c,0.22061521056612046,0.9368123231688148,0.9148646408839779,0.5582417582417583
|
| 77 |
+
DeiT3,d,0.10169743415564346,0.9682489783087079,0.9643388581952119,0.7154929577464789
|
| 78 |
+
DeiT3,e,0.49460477683729975,0.8518111964873765,0.8834708241882994,0.6529562982005142
|
| 79 |
+
DeiT3,f,0.13154652377257423,0.9591046394547286,0.9363108852365101,0.3248081841432225
|
| 80 |
+
DeiT3,g,0.05130978459212929,0.9823333333333333,0.9997385555555556,0.982605841811618
|
| 81 |
+
DeiT3,h,0.08910193234775216,0.9745,0.9995217222222222,0.975085490962384
|
| 82 |
+
DeiT3,i,0.02605568784568459,0.9911666666666666,0.9998692222222222,0.9912266181095845
|
| 83 |
+
DeiT3,j,3.271038075208664,0.5056666666666667,0.4370591666666666,0.08286951144094001
|
| 84 |
+
DeiT3,k,3.245784008204937,0.5145,0.5090863888888889,0.08425023577491354
|
| 85 |
+
DeiT3,l,1.112910136327876,0.8202104595209138,0.7017805623573649,0.656912209889001
|
| 86 |
+
Twins_SVT,a,0.18666393640786363,0.9355548569632192,0.9178057090239411,0.5665961945031712
|
| 87 |
+
Twins_SVT,b,0.12907026239917624,0.9559886828041496,0.958646408839779,0.6568627450980392
|
| 88 |
+
Twins_SVT,c,0.37308393637551635,0.8491040553285131,0.8678618784530387,0.3582887700534759
|
| 89 |
+
Twins_SVT,d,0.07745791368212238,0.9789374410562716,0.982377532228361,0.8
|
| 90 |
+
Twins_SVT,e,0.46567638177510545,0.8572996706915478,0.8989404374479679,0.6733668341708543
|
| 91 |
+
Twins_SVT,f,0.1620182347072841,0.9354039191387189,0.9297958448525039,0.24319419237749546
|
| 92 |
+
Twins_SVT,g,0.03985891605913639,0.9833333333333333,0.9995775555555556,0.9835688465330266
|
| 93 |
+
Twins_SVT,h,0.16922680978477,0.9266666666666666,0.9980967777777777,0.9315281668222845
|
| 94 |
+
Twins_SVT,i,0.012495769090950489,0.9955,0.9999184444444444,0.9955097289206719
|
| 95 |
+
Twins_SVT,j,5.5489834444224835,0.49816666666666665,0.40547916666666667,0.051653543307086616
|
| 96 |
+
Twins_SVT,k,5.521620307348669,0.5103333333333333,0.5116922222222222,0.052869116698903935
|
| 97 |
+
Twins_SVT,l,1.8607527904559573,0.8012267992173867,0.6587701738584388,0.6306377124889456
|
| 98 |
+
Twins_PCPVT,a,0.5452342244934938,0.7969192077962904,0.895244014732965,0.3228511530398323
|
| 99 |
+
Twins_PCPVT,b,0.33666214395135274,0.8836843759823955,0.926718232044199,0.45427728613569324
|
| 100 |
+
Twins_PCPVT,c,0.8452249476460681,0.680289217227287,0.8652670349907919,0.23245283018867924
|
| 101 |
+
Twins_PCPVT,d,0.14044462036633035,0.9487582521219742,0.9665690607734807,0.6539278131634819
|
| 102 |
+
Twins_PCPVT,e,0.9610598787387299,0.6893523600439078,0.849345341708923,0.5211505922165821
|
| 103 |
+
Twins_PCPVT,f,0.4996267252911643,0.8163581442181086,0.909773537083411,0.11496827174318776
|
| 104 |
+
Twins_PCPVT,g,0.16858429829776286,0.9403333333333334,0.9974785,0.9434260429835651
|
| 105 |
+
Twins_PCPVT,h,0.4382073585242033,0.8325,0.9937448888888889,0.8559139784946237
|
| 106 |
+
Twins_PCPVT,i,0.06455629007518292,0.9748333333333333,0.999179,0.97533082829603
|
| 107 |
+
Twins_PCPVT,j,2.452636483669281,0.5051666666666667,0.3821618888888889,0.20123755716976055
|
| 108 |
+
Twins_PCPVT,k,2.348608487725258,0.5396666666666666,0.6640067222222222,0.21310541310541312
|
| 109 |
+
Twins_PCPVT,l,1.069163286557427,0.7349690656231823,0.6925868024466941,0.5836517693969098
|
| 110 |
+
PiT,a,1.4542810870805876,0.7161270040867652,0.9052854511970534,0.26645004061738425
|
| 111 |
+
PiT,b,0.48083062532262133,0.8921722728701666,0.9610782688766114,0.488822652757079
|
| 112 |
+
PiT,c,3.0596617273248543,0.5278214397988054,0.823461325966851,0.17923497267759564
|
| 113 |
+
PiT,d,0.0542862427054065,0.9864822382898459,0.9955837937384899,0.8840970350404312
|
| 114 |
+
PiT,e,1.2118028251844757,0.7727771679473107,0.9089305986528419,0.6130841121495327
|
| 115 |
+
PiT,f,1.3020594693266185,0.7730617303074897,0.9206398944503998,0.10067526089625538
|
| 116 |
+
PiT,g,0.2426671743527986,0.9443333333333334,0.9978501666666667,0.9471351693573916
|
| 117 |
+
PiT,h,1.609877428545151,0.7511666666666666,0.97719,0.8003209843520128
|
| 118 |
+
PiT,i,0.01652756105083972,0.9943333333333333,0.999909,0.9943502824858758
|
| 119 |
+
PiT,j,4.793344738483429,0.4985,0.4088527777777778,0.1740323908866319
|
| 120 |
+
PiT,k,4.567205076335464,0.5485,0.8206032222222223,0.18965001495662578
|
| 121 |
+
PiT,l,2.3343544872733246,0.7027655861667812,0.7048351046138528,0.5527174345508077
|
| 122 |
+
ResNet-18,a,1.1835124935604648,0.6365922665828356,0.9285580110497238,0.22830440587449932
|
| 123 |
+
ResNet-18,b,1.0141342383478842,0.7117258723671801,0.936340699815838,0.2716441620333598
|
| 124 |
+
ResNet-18,c,1.8204121201415364,0.5328513046211883,0.8883425414364641,0.18708971553610504
|
| 125 |
+
ResNet-18,d,0.011235734144227473,0.9949701351776171,0.9996408839779005,0.9553072625698324
|
| 126 |
+
ResNet-18,e,1.056551858831839,0.6125137211855104,0.9263868916975707,0.4920863309352518
|
| 127 |
+
ResNet-18,f,1.0591482175922742,0.6988614359848192,0.9375419355678716,0.08085106382978724
|
| 128 |
+
ResNet-18,g,0.5340945276358107,0.8486666666666667,0.9978233333333334,0.8685201274254272
|
| 129 |
+
ResNet-18,h,0.9615578431227186,0.7538333333333334,0.9955927777777778,0.8024080267558529
|
| 130 |
+
ResNet-18,i,0.002392592921940377,0.9988333333333334,0.9999976666666667,0.998834304746045
|
| 131 |
+
ResNet-18,j,7.01291295003891,0.35383333333333333,0.07525633333333334,0.01524003048006096
|
| 132 |
+
ResNet-18,k,6.481211027059704,0.504,0.7462510555555556,0.019762845849802372
|
| 133 |
+
ResNet-18,l,2.7791932088225773,0.6373010417217493,0.6165565470444929,0.4826910023380345
|
| 134 |
+
Ensemble,a,,0.9487582521219742,0.9267523020257826,0.6320541760722348
|
| 135 |
+
Ensemble,b,,0.9698208110657026,0.9539668508287292,0.7446808510638298
|
| 136 |
+
Ensemble,c,,0.8924866394215656,0.8899631675874772,0.45016077170418006
|
| 137 |
+
Ensemble,d,,0.9823954731216599,0.9802780847145488,0.8333333333333334
|
| 138 |
+
Ensemble,e,,0.9023051591657519,0.9130326193899946,0.7588075880758808
|
| 139 |
+
Ensemble,f,,0.9549221593989621,0.9363232543302679,0.3248259860788863
|
| 140 |
+
Ensemble,g,,0.9905,0.999917,0.9905831818932761
|
| 141 |
+
Ensemble,h,,0.9495,0.9994414444444444,0.9518971265280203
|
| 142 |
+
Ensemble,i,,0.9971666666666666,0.9999844444444443,0.997172792283386
|
| 143 |
+
Ensemble,j,,0.5115,0.3640104444444444,0.07801195344447939
|
| 144 |
+
Ensemble,k,,0.5181666666666667,0.6087856666666667,0.07900605288308378
|
| 145 |
+
Ensemble,l,,0.817037702924224,0.6790663441280981,0.6534455128205128
|
model.safetensors
ADDED
|
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
version https://git-lfs.github.com/spec/v1
|
| 2 |
+
oid sha256:26af6ee601d9a75b5e9d2c48d0e96aa0250d9e0c94669aa9f0e557dbd75355aa
|
| 3 |
+
size 290985688
|
pit-gravit-a3.pth
ADDED
|
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
version https://git-lfs.github.com/spec/v1
|
| 2 |
+
oid sha256:41ba251989ae702d3d2de143066bab0d50d70052bb27a1aa9051ada0e24785d8
|
| 3 |
+
size 291039882
|
pytorch_model.bin
ADDED
|
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
version https://git-lfs.github.com/spec/v1
|
| 2 |
+
oid sha256:41ba251989ae702d3d2de143066bab0d50d70052bb27a1aa9051ada0e24785d8
|
| 3 |
+
size 291039882
|
roc_confusion_matrix/PiT_roc_confusion_matrix_a.png
ADDED
|
roc_confusion_matrix/PiT_roc_confusion_matrix_b.png
ADDED
|
roc_confusion_matrix/PiT_roc_confusion_matrix_c.png
ADDED
|
roc_confusion_matrix/PiT_roc_confusion_matrix_d.png
ADDED
|
roc_confusion_matrix/PiT_roc_confusion_matrix_e.png
ADDED
|
roc_confusion_matrix/PiT_roc_confusion_matrix_f.png
ADDED
|
roc_confusion_matrix/PiT_roc_confusion_matrix_g.png
ADDED
|
roc_confusion_matrix/PiT_roc_confusion_matrix_h.png
ADDED
|
roc_confusion_matrix/PiT_roc_confusion_matrix_i.png
ADDED
|
roc_confusion_matrix/PiT_roc_confusion_matrix_j.png
ADDED
|
roc_confusion_matrix/PiT_roc_confusion_matrix_k.png
ADDED
|
roc_confusion_matrix/PiT_roc_confusion_matrix_l.png
ADDED
|
roc_curves/PiT_ROC_a.png
ADDED
|
roc_curves/PiT_ROC_b.png
ADDED
|
roc_curves/PiT_ROC_c.png
ADDED
|
roc_curves/PiT_ROC_d.png
ADDED
|
roc_curves/PiT_ROC_e.png
ADDED
|
roc_curves/PiT_ROC_f.png
ADDED
|
roc_curves/PiT_ROC_g.png
ADDED
|
roc_curves/PiT_ROC_h.png
ADDED
|
roc_curves/PiT_ROC_i.png
ADDED
|
roc_curves/PiT_ROC_j.png
ADDED
|
roc_curves/PiT_ROC_k.png
ADDED
|
roc_curves/PiT_ROC_l.png
ADDED
|
training_curves/PiT_accuracy.png
ADDED
|
training_curves/PiT_auc.png
ADDED
|
training_curves/PiT_combined_metrics.png
ADDED
|
Git LFS Details
|
training_curves/PiT_f1.png
ADDED
|
training_curves/PiT_loss.png
ADDED
|
training_curves/PiT_metrics.csv
ADDED
|
@@ -0,0 +1,51 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
epoch,train_loss,val_loss,train_accuracy,val_accuracy,train_auc,val_auc,train_f1,val_f1
|
| 2 |
+
1,0.08380111252069473,0.05348298209905624,0.9663625,0.983,0.9956030796875,0.9983500000000001,0.9663612385464455,0.9830845771144279
|
| 3 |
+
2,0.041526970613375305,0.057092710956931116,0.9851375,0.976,0.9987992937500001,0.997856,0.9851350844512233,0.9763779527559056
|
| 4 |
+
3,0.03518724364750087,0.05157612532377243,0.9869125,0.978,0.9991720912500001,0.998972,0.9869107002212805,0.9775967413441955
|
| 5 |
+
4,0.030401692363340407,0.027769247174146586,0.988775,0.99,0.999388499375,0.99954,0.9887733159973996,0.9900596421471173
|
| 6 |
+
5,0.026774657212290914,0.029289492189884185,0.9906,0.988,0.9994664218750001,0.9994879999999999,0.9905945918903369,0.9880239520958084
|
| 7 |
+
6,0.02564074798002839,0.03437352496385574,0.9904875,0.984,0.99955344875,0.999422,0.9904873810922636,0.9838709677419355
|
| 8 |
+
7,0.022729650076944382,0.03372676639258861,0.9919,0.991,0.9996243059375,0.9994419999999999,0.9919028340080972,0.9909729187562688
|
| 9 |
+
8,0.02426139419497922,0.026328840628266334,0.9912625,0.994,0.9996046865625,0.999582,0.991261953872117,0.9940119760479041
|
| 10 |
+
9,0.022508089120825754,0.027908500015735627,0.9914875,0.991,0.9996199100000001,0.9995580000000001,0.9914863293702885,0.9909729187562688
|
| 11 |
+
10,0.021495544563932344,0.029435926083475353,0.99205,0.991,0.999693424375,0.9986379999999999,0.9920480120030007,0.9910269192422732
|
| 12 |
+
11,0.019883352918596938,0.04117086360044778,0.9928875,0.989,0.999712243125,0.999426,0.992888833343748,0.9889224572004028
|
| 13 |
+
12,0.021287586880242453,0.03494359707832336,0.9922875,0.988,0.9996557534375,0.999516,0.992286825097196,0.988
|
| 14 |
+
13,0.019410972280753776,0.031757158495485785,0.99295,0.99,0.9997082581250001,0.999506,0.9929524666366771,0.9900199600798403
|
| 15 |
+
14,0.018788172438275068,0.01990511727356352,0.99325,0.992,0.999754944375,0.999768,0.9932479743923177,0.9920159680638723
|
| 16 |
+
15,0.019693653361592442,0.029755046516656877,0.9928875,0.989,0.9997258168750001,0.9986579999999999,0.9928865218967609,0.9890547263681592
|
| 17 |
+
16,0.018050431178323924,0.035269855976104734,0.9931625,0.987,0.9997742821875,0.999514,0.9931619016663958,0.9869083585095669
|
| 18 |
+
17,0.01741551666567102,0.01681467080116272,0.9936,0.996,0.999789191875,0.999856,0.9935993599359936,0.9959919839679359
|
| 19 |
+
18,0.018152127362787724,0.0352143643796444,0.9931875,0.987,0.9997540181250001,0.999164,0.9931891175845736,0.9869608826479438
|
| 20 |
+
19,0.01749123232383281,0.027128501921892166,0.9935625,0.99,0.99979617125,0.99966,0.9935612927423892,0.9900398406374502
|
| 21 |
+
20,0.017235335105098785,0.02004218239337206,0.9937,0.992,0.9997753318749999,0.999792,0.9937,0.9919839679358717
|
| 22 |
+
21,0.01637661460810341,0.024918013259768487,0.994125,0.991,0.9998114468749999,0.99964,0.994124853121328,0.9910625620655412
|
| 23 |
+
22,0.015613745130226016,0.0389368434548378,0.9942375,0.985,0.9998256103125001,0.9987199999999999,0.9942378601337416,0.9851924975320829
|
| 24 |
+
23,0.015016568828211166,0.03343766522407532,0.9947,0.99,0.9998105456249999,0.998522,0.9946998674966874,0.9899598393574297
|
| 25 |
+
24,0.009668744673673063,0.015450777329489938,0.9963875,0.997,0.9999366387499999,0.9989,0.996386641827434,0.997002997002997
|
| 26 |
+
25,0.007466233547689626,0.014600379575043917,0.997275,0.997,0.999963644375,0.999846,0.9972747956096707,0.996996996996997
|
| 27 |
+
26,0.006449911724647973,0.015622713446675334,0.9975875,0.996,0.9999733146875,0.9989319999999999,0.9975877110752809,0.9959919839679359
|
| 28 |
+
27,0.006039172103576129,0.016120399681851267,0.9978375,0.997,0.9999748803124999,0.999862,0.9978374189032089,0.996996996996997
|
| 29 |
+
28,0.005462758123714593,0.01527087467815727,0.99775,0.996,0.999980760625,0.998926,0.9977497187148393,0.9959919839679359
|
| 30 |
+
29,0.005839177356922301,0.015614406049251557,0.99785,0.996,0.9999779015624999,0.9999180000000001,0.9978499462486562,0.9959919839679359
|
| 31 |
+
30,0.006277850946440594,0.013740215204656124,0.9975875,0.997,0.9999744187499999,0.99894,0.9975879522589515,0.996996996996997
|
| 32 |
+
31,0.006149465677636908,0.019031632061931304,0.9979375,0.995,0.9999592309375,0.999892,0.9979375773408498,0.9949849548645938
|
| 33 |
+
32,0.005350587552280922,0.01742541548621375,0.99805,0.997,0.9999688412500001,0.9998840000000001,0.9980499512487813,0.996996996996997
|
| 34 |
+
33,0.005217145767689362,0.01744158554074238,0.9983625,0.996,0.9999800975,0.999882,0.9983625204684942,0.9959919839679359
|
| 35 |
+
34,0.005266412850524648,0.01763286733628047,0.9979625,0.996,0.999982090625,0.9998940000000001,0.9979624745309317,0.9959919839679359
|
| 36 |
+
35,0.005074514870402345,0.015134185580594931,0.998225,0.996,0.9999814871875,0.99988,0.9982247337100565,0.9959919839679359
|
| 37 |
+
36,0.004922963310018531,0.021650766939041204,0.9981625,0.993,0.999981743125,0.999892,0.9981627066954968,0.992964824120603
|
| 38 |
+
37,0.00401174271954078,0.017578382298350334,0.9985,0.995,0.9999899465625,0.999894,0.9984997374540545,0.994994994994995
|
| 39 |
+
38,0.004837633376332815,0.016524362772695894,0.9984,0.997,0.9999843865624998,0.9999,0.9984,0.996996996996997
|
| 40 |
+
39,0.00429279389311996,0.01624877165351063,0.998575,0.996,0.9999878940624999,0.999898,0.9985748574857486,0.9959919839679359
|
| 41 |
+
40,0.004438498888333561,0.015829742405563594,0.998275,0.996,0.9999877690624999,0.999908,0.9982752587111933,0.9959919839679359
|
| 42 |
+
41,0.004388659512973391,0.015052482064813376,0.9983,0.997,0.9999875365624998,0.999888,0.9982998299829983,0.996996996996997
|
| 43 |
+
42,0.003829265673486225,0.01608032351732436,0.9985125,0.996,0.9999909409375,0.999892,0.9985125929629398,0.996
|
| 44 |
+
43,0.0037422880321450066,0.01568331126915291,0.9986625,0.996,0.9999902384374999,0.9998960000000001,0.9986626838809664,0.9959919839679359
|
| 45 |
+
44,0.0038982069094134204,0.016396091327565955,0.998525,0.995,0.9999778949999999,0.999892,0.9985251106167038,0.994994994994995
|
| 46 |
+
45,0.003952378381142626,0.016937893855385484,0.9985375,0.996,0.9999901646874999,0.999892,0.9985374451541933,0.9959919839679359
|
| 47 |
+
46,0.0035518867560203945,0.01758141304552373,0.998725,0.996,0.9999923165625,0.9998959999999999,0.9987251274872513,0.9959919839679359
|
| 48 |
+
47,0.004056843713918352,0.017710735796021254,0.9984625,0.995,0.9999893496875,0.9998940000000001,0.9984627113771857,0.994994994994995
|
| 49 |
+
48,0.004265557135605923,0.016513990357521834,0.9984375,0.996,0.9999873125,0.999904,0.9984375976501468,0.9959919839679359
|
| 50 |
+
49,0.004287949476872745,0.0173485397323966,0.9984125,0.996,0.999988038125,0.999894,0.9984124007750484,0.9959919839679359
|
| 51 |
+
50,0.003753211729459872,0.01663763615489006,0.9987125,0.996,0.9999905503125,0.999902,0.9987124195262204,0.9959919839679359
|
training_metrics.csv
ADDED
|
@@ -0,0 +1,51 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
epoch,train_loss,val_loss,train_accuracy,val_accuracy,train_auc,val_auc,train_f1,val_f1
|
| 2 |
+
1,0.08380111252069473,0.05348298209905624,0.9663625,0.983,0.9956030796875,0.9983500000000001,0.9663612385464455,0.9830845771144279
|
| 3 |
+
2,0.041526970613375305,0.057092710956931116,0.9851375,0.976,0.9987992937500001,0.997856,0.9851350844512233,0.9763779527559056
|
| 4 |
+
3,0.03518724364750087,0.05157612532377243,0.9869125,0.978,0.9991720912500001,0.998972,0.9869107002212805,0.9775967413441955
|
| 5 |
+
4,0.030401692363340407,0.027769247174146586,0.988775,0.99,0.999388499375,0.99954,0.9887733159973996,0.9900596421471173
|
| 6 |
+
5,0.026774657212290914,0.029289492189884185,0.9906,0.988,0.9994664218750001,0.9994879999999999,0.9905945918903369,0.9880239520958084
|
| 7 |
+
6,0.02564074798002839,0.03437352496385574,0.9904875,0.984,0.99955344875,0.999422,0.9904873810922636,0.9838709677419355
|
| 8 |
+
7,0.022729650076944382,0.03372676639258861,0.9919,0.991,0.9996243059375,0.9994419999999999,0.9919028340080972,0.9909729187562688
|
| 9 |
+
8,0.02426139419497922,0.026328840628266334,0.9912625,0.994,0.9996046865625,0.999582,0.991261953872117,0.9940119760479041
|
| 10 |
+
9,0.022508089120825754,0.027908500015735627,0.9914875,0.991,0.9996199100000001,0.9995580000000001,0.9914863293702885,0.9909729187562688
|
| 11 |
+
10,0.021495544563932344,0.029435926083475353,0.99205,0.991,0.999693424375,0.9986379999999999,0.9920480120030007,0.9910269192422732
|
| 12 |
+
11,0.019883352918596938,0.04117086360044778,0.9928875,0.989,0.999712243125,0.999426,0.992888833343748,0.9889224572004028
|
| 13 |
+
12,0.021287586880242453,0.03494359707832336,0.9922875,0.988,0.9996557534375,0.999516,0.992286825097196,0.988
|
| 14 |
+
13,0.019410972280753776,0.031757158495485785,0.99295,0.99,0.9997082581250001,0.999506,0.9929524666366771,0.9900199600798403
|
| 15 |
+
14,0.018788172438275068,0.01990511727356352,0.99325,0.992,0.999754944375,0.999768,0.9932479743923177,0.9920159680638723
|
| 16 |
+
15,0.019693653361592442,0.029755046516656877,0.9928875,0.989,0.9997258168750001,0.9986579999999999,0.9928865218967609,0.9890547263681592
|
| 17 |
+
16,0.018050431178323924,0.035269855976104734,0.9931625,0.987,0.9997742821875,0.999514,0.9931619016663958,0.9869083585095669
|
| 18 |
+
17,0.01741551666567102,0.01681467080116272,0.9936,0.996,0.999789191875,0.999856,0.9935993599359936,0.9959919839679359
|
| 19 |
+
18,0.018152127362787724,0.0352143643796444,0.9931875,0.987,0.9997540181250001,0.999164,0.9931891175845736,0.9869608826479438
|
| 20 |
+
19,0.01749123232383281,0.027128501921892166,0.9935625,0.99,0.99979617125,0.99966,0.9935612927423892,0.9900398406374502
|
| 21 |
+
20,0.017235335105098785,0.02004218239337206,0.9937,0.992,0.9997753318749999,0.999792,0.9937,0.9919839679358717
|
| 22 |
+
21,0.01637661460810341,0.024918013259768487,0.994125,0.991,0.9998114468749999,0.99964,0.994124853121328,0.9910625620655412
|
| 23 |
+
22,0.015613745130226016,0.0389368434548378,0.9942375,0.985,0.9998256103125001,0.9987199999999999,0.9942378601337416,0.9851924975320829
|
| 24 |
+
23,0.015016568828211166,0.03343766522407532,0.9947,0.99,0.9998105456249999,0.998522,0.9946998674966874,0.9899598393574297
|
| 25 |
+
24,0.009668744673673063,0.015450777329489938,0.9963875,0.997,0.9999366387499999,0.9989,0.996386641827434,0.997002997002997
|
| 26 |
+
25,0.007466233547689626,0.014600379575043917,0.997275,0.997,0.999963644375,0.999846,0.9972747956096707,0.996996996996997
|
| 27 |
+
26,0.006449911724647973,0.015622713446675334,0.9975875,0.996,0.9999733146875,0.9989319999999999,0.9975877110752809,0.9959919839679359
|
| 28 |
+
27,0.006039172103576129,0.016120399681851267,0.9978375,0.997,0.9999748803124999,0.999862,0.9978374189032089,0.996996996996997
|
| 29 |
+
28,0.005462758123714593,0.01527087467815727,0.99775,0.996,0.999980760625,0.998926,0.9977497187148393,0.9959919839679359
|
| 30 |
+
29,0.005839177356922301,0.015614406049251557,0.99785,0.996,0.9999779015624999,0.9999180000000001,0.9978499462486562,0.9959919839679359
|
| 31 |
+
30,0.006277850946440594,0.013740215204656124,0.9975875,0.997,0.9999744187499999,0.99894,0.9975879522589515,0.996996996996997
|
| 32 |
+
31,0.006149465677636908,0.019031632061931304,0.9979375,0.995,0.9999592309375,0.999892,0.9979375773408498,0.9949849548645938
|
| 33 |
+
32,0.005350587552280922,0.01742541548621375,0.99805,0.997,0.9999688412500001,0.9998840000000001,0.9980499512487813,0.996996996996997
|
| 34 |
+
33,0.005217145767689362,0.01744158554074238,0.9983625,0.996,0.9999800975,0.999882,0.9983625204684942,0.9959919839679359
|
| 35 |
+
34,0.005266412850524648,0.01763286733628047,0.9979625,0.996,0.999982090625,0.9998940000000001,0.9979624745309317,0.9959919839679359
|
| 36 |
+
35,0.005074514870402345,0.015134185580594931,0.998225,0.996,0.9999814871875,0.99988,0.9982247337100565,0.9959919839679359
|
| 37 |
+
36,0.004922963310018531,0.021650766939041204,0.9981625,0.993,0.999981743125,0.999892,0.9981627066954968,0.992964824120603
|
| 38 |
+
37,0.00401174271954078,0.017578382298350334,0.9985,0.995,0.9999899465625,0.999894,0.9984997374540545,0.994994994994995
|
| 39 |
+
38,0.004837633376332815,0.016524362772695894,0.9984,0.997,0.9999843865624998,0.9999,0.9984,0.996996996996997
|
| 40 |
+
39,0.00429279389311996,0.01624877165351063,0.998575,0.996,0.9999878940624999,0.999898,0.9985748574857486,0.9959919839679359
|
| 41 |
+
40,0.004438498888333561,0.015829742405563594,0.998275,0.996,0.9999877690624999,0.999908,0.9982752587111933,0.9959919839679359
|
| 42 |
+
41,0.004388659512973391,0.015052482064813376,0.9983,0.997,0.9999875365624998,0.999888,0.9982998299829983,0.996996996996997
|
| 43 |
+
42,0.003829265673486225,0.01608032351732436,0.9985125,0.996,0.9999909409375,0.999892,0.9985125929629398,0.996
|
| 44 |
+
43,0.0037422880321450066,0.01568331126915291,0.9986625,0.996,0.9999902384374999,0.9998960000000001,0.9986626838809664,0.9959919839679359
|
| 45 |
+
44,0.0038982069094134204,0.016396091327565955,0.998525,0.995,0.9999778949999999,0.999892,0.9985251106167038,0.994994994994995
|
| 46 |
+
45,0.003952378381142626,0.016937893855385484,0.9985375,0.996,0.9999901646874999,0.999892,0.9985374451541933,0.9959919839679359
|
| 47 |
+
46,0.0035518867560203945,0.01758141304552373,0.998725,0.996,0.9999923165625,0.9998959999999999,0.9987251274872513,0.9959919839679359
|
| 48 |
+
47,0.004056843713918352,0.017710735796021254,0.9984625,0.995,0.9999893496875,0.9998940000000001,0.9984627113771857,0.994994994994995
|
| 49 |
+
48,0.004265557135605923,0.016513990357521834,0.9984375,0.996,0.9999873125,0.999904,0.9984375976501468,0.9959919839679359
|
| 50 |
+
49,0.004287949476872745,0.0173485397323966,0.9984125,0.996,0.999988038125,0.999894,0.9984124007750484,0.9959919839679359
|
| 51 |
+
50,0.003753211729459872,0.01663763615489006,0.9987125,0.996,0.9999905503125,0.999902,0.9987124195262204,0.9959919839679359
|