eacortes commited on
Commit
0133b75
·
verified ·
1 Parent(s): ebc2445

Upload 19 files

Browse files
Files changed (19) hide show
  1. README.md +315 -3
  2. config.json +54 -0
  3. configuration_modchembert.py +84 -0
  4. logs_modchembert_classification_ModChemBERT-MLM-TAFT/modchembert_deepchem_splits_run_bace_classification_epochs100_batch_size32_20250923_153237.log +357 -0
  5. logs_modchembert_classification_ModChemBERT-MLM-TAFT/modchembert_deepchem_splits_run_bbbp_epochs100_batch_size32_20250923_160210.log +337 -0
  6. logs_modchembert_classification_ModChemBERT-MLM-TAFT/modchembert_deepchem_splits_run_clintox_epochs100_batch_size32_20250923_163713.log +373 -0
  7. logs_modchembert_classification_ModChemBERT-MLM-TAFT/modchembert_deepchem_splits_run_hiv_epochs100_batch_size32_20250923_153257.log +331 -0
  8. logs_modchembert_classification_ModChemBERT-MLM-TAFT/modchembert_deepchem_splits_run_sider_epochs100_batch_size32_20250923_170602.log +355 -0
  9. logs_modchembert_classification_ModChemBERT-MLM-TAFT/modchembert_deepchem_splits_run_tox21_epochs100_batch_size32_20250923_173331.log +329 -0
  10. logs_modchembert_regression_ModChemBERT-MLM-TAFT/modchembert_deepchem_splits_run_bace_regression_epochs100_batch_size32_20250923_153243.log +327 -0
  11. logs_modchembert_regression_ModChemBERT-MLM-TAFT/modchembert_deepchem_splits_run_clearance_epochs100_batch_size32_20250923_155847.log +343 -0
  12. logs_modchembert_regression_ModChemBERT-MLM-TAFT/modchembert_deepchem_splits_run_delaney_epochs100_batch_size32_20250923_161546.log +349 -0
  13. logs_modchembert_regression_ModChemBERT-MLM-TAFT/modchembert_deepchem_splits_run_freesolv_epochs100_batch_size32_20250923_163344.log +367 -0
  14. logs_modchembert_regression_ModChemBERT-MLM-TAFT/modchembert_deepchem_splits_run_lipo_epochs100_batch_size32_20250923_164701.log +349 -0
  15. model.safetensors +3 -0
  16. modeling_modchembert.py +554 -0
  17. special_tokens_map.json +37 -0
  18. tokenizer.json +2554 -0
  19. tokenizer_config.json +53 -0
README.md CHANGED
@@ -1,3 +1,315 @@
1
- ---
2
- license: apache-2.0
3
- ---
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ ---
2
+ license: apache-2.0
3
+ base_model: Derify/ModChemBERT-MLM
4
+ datasets:
5
+ - Derify/augmented_canonical_druglike_QED_Pfizer_15M
6
+ metrics:
7
+ - roc_auc
8
+ - rmse
9
+ library_name: transformers
10
+ tags:
11
+ - modernbert
12
+ - ModChemBERT
13
+ - cheminformatics
14
+ - chemical-language-model
15
+ - molecular-property-prediction
16
+ - mergekit
17
+ - merge
18
+ pipeline_tag: fill-mask
19
+ model-index:
20
+ - name: Derify/ModChemBERT-MLM
21
+ results:
22
+ - task:
23
+ type: text-classification
24
+ name: Classification (ROC AUC)
25
+ dataset:
26
+ name: BACE
27
+ type: BACE
28
+ metrics:
29
+ - type: roc_auc
30
+ value: 0.7924
31
+ - task:
32
+ type: text-classification
33
+ name: Classification (ROC AUC)
34
+ dataset:
35
+ name: BBBP
36
+ type: BBBP
37
+ metrics:
38
+ - type: roc_auc
39
+ value: 0.7282
40
+ - task:
41
+ type: text-classification
42
+ name: Classification (ROC AUC)
43
+ dataset:
44
+ name: CLINTOX
45
+ type: CLINTOX
46
+ metrics:
47
+ - type: roc_auc
48
+ value: 0.9725
49
+ - task:
50
+ type: text-classification
51
+ name: Classification (ROC AUC)
52
+ dataset:
53
+ name: HIV
54
+ type: HIV
55
+ metrics:
56
+ - type: roc_auc
57
+ value: 0.7770
58
+ - task:
59
+ type: text-classification
60
+ name: Classification (ROC AUC)
61
+ dataset:
62
+ name: SIDER
63
+ type: SIDER
64
+ metrics:
65
+ - type: roc_auc
66
+ value: 0.6542
67
+ - task:
68
+ type: text-classification
69
+ name: Classification (ROC AUC)
70
+ dataset:
71
+ name: TOX21
72
+ type: TOX21
73
+ metrics:
74
+ - type: roc_auc
75
+ value: 0.7646
76
+ - task:
77
+ type: regression
78
+ name: Regression (RMSE)
79
+ dataset:
80
+ name: BACE
81
+ type: BACE
82
+ metrics:
83
+ - type: rmse
84
+ value: 1.0304
85
+ - task:
86
+ type: regression
87
+ name: Regression (RMSE)
88
+ dataset:
89
+ name: CLEARANCE
90
+ type: CLEARANCE
91
+ metrics:
92
+ - type: rmse
93
+ value: 47.8418
94
+ - task:
95
+ type: regression
96
+ name: Regression (RMSE)
97
+ dataset:
98
+ name: ESOL
99
+ type: ESOL
100
+ metrics:
101
+ - type: rmse
102
+ value: 0.7669
103
+ - task:
104
+ type: regression
105
+ name: Regression (RMSE)
106
+ dataset:
107
+ name: FREESOLV
108
+ type: FREESOLV
109
+ metrics:
110
+ - type: rmse
111
+ value: 0.5293
112
+ - task:
113
+ type: regression
114
+ name: Regression (RMSE)
115
+ dataset:
116
+ name: LIPO
117
+ type: LIPO
118
+ metrics:
119
+ - type: rmse
120
+ value: 0.6708
121
+ ---
122
+
123
+ # ModChemBERT: ModernBERT as a Chemical Language Model
124
+ ModChemBERT is a ModernBERT-based chemical language model (CLM), trained on SMILES strings for masked language modeling (MLM) and downstream molecular property prediction (classification & regression).
125
+
126
+ ## Usage
127
+ ### Load Model
128
+ ```python
129
+ from transformers import AutoModelForMaskedLM, AutoTokenizer
130
+
131
+ model_id = "Derify/ModChemBERT-MLM-TAFT"
132
+ tokenizer = AutoTokenizer.from_pretrained(model_id)
133
+ model = AutoModelForMaskedLM.from_pretrained(
134
+ model_id,
135
+ trust_remote_code=True,
136
+ dtype="float16",
137
+ device_map="auto",
138
+ )
139
+ ```
140
+
141
+ ### Fill-Mask Pipeline
142
+ ```python
143
+ from transformers import pipeline
144
+
145
+ fill = pipeline("fill-mask", model=model, tokenizer=tokenizer)
146
+ print(fill("c1ccccc1[MASK]"))
147
+ ```
148
+
149
+ ## Intended Use
150
+ * Primary: Research and development for molecular property prediction, experimentation with pooling strategies, and as a foundational model for downstream applications.
151
+ * Appropriate for: Binary / multi-class classification (e.g., toxicity, activity) and single-task or multi-task regression (e.g., solubility, clearance) after fine-tuning.
152
+ * Not intended for generating novel molecules.
153
+
154
+ ## Limitations
155
+ - Out-of-domain performance may degrade for: very long (>128 token) SMILES, inorganic / organometallic compounds, polymers, or charged / enumerated tautomers are not well represented in training.
156
+ - No guarantee of synthesizability, safety, or biological efficacy.
157
+
158
+ ## Ethical Considerations & Responsible Use
159
+ - Potential biases arise from training corpora skewed to drug-like space.
160
+ - Do not deploy in clinical or regulatory settings without rigorous, domain-specific validation.
161
+
162
+ ## Architecture
163
+ - Backbone: ModernBERT
164
+ - Hidden size: 768
165
+ - Intermediate size: 1152
166
+ - Encoder Layers: 22
167
+ - Attention heads: 12
168
+ - Max sequence length: 256 tokens (MLM primarily trained with 128-token sequences)
169
+ - Vocabulary: BPE tokenizer using [MolFormer's vocab](https://github.com/emapco/ModChemBERT/blob/main/modchembert/tokenizers/molformer/vocab.json) (2362 tokens)
170
+
171
+ ## Pooling (Classifier / Regressor Head)
172
+ Kallergis et al. [1] demonstrated that the CLM embedding method prior to the prediction head can significantly impact downstream performance.
173
+
174
+ Behrendt et al. [2] noted that the last few layers contain task-specific information and that pooling methods leveraging information from multiple layers can enhance model performance. Their results further demonstrated that the `max_seq_mha` pooling method was particularly effective in low-data regimes, which is often the case for molecular property prediction tasks.
175
+
176
+ Multiple pooling strategies are supported by ModChemBERT to explore their impact on downstream performance:
177
+ - `cls`: Last layer [CLS]
178
+ - `mean`: Mean over last hidden layer
179
+ - `max_cls`: Max over last k layers of [CLS]
180
+ - `cls_mha`: MHA with [CLS] as query
181
+ - `max_seq_mha`: MHA with max pooled sequence as KV and max pooled [CLS] as query
182
+ - `sum_mean`: Sum over all layers then mean tokens
183
+ - `sum_sum`: Sum over all layers then sum tokens
184
+ - `mean_mean`: Mean over all layers then mean tokens
185
+ - `mean_sum`: Mean over all layers then sum tokens
186
+ - `max_seq_mean`: Max over last k layers then mean tokens
187
+
188
+ ## Training Pipeline
189
+ <div align="center">
190
+ <img src="https://cdn-uploads.huggingface.co/production/uploads/656892962693fa22e18b5331/bxNbpgMkU8m60ypyEJoWQ.png" alt="ModChemBERT Training Pipeline" width="650"/>
191
+ </div>
192
+
193
+ ### Rationale for MTR Stage
194
+ Following Sultan et al. [3], multi-task regression (physicochemical properties) biases the latent space toward ADME-related representations prior to narrow TAFT specialization. Sultan et al. observed that MLM + DAPT (MTR) outperforms MLM-only, MTR-only, and MTR + DAPT (MTR).
195
+
196
+ ### Checkpoint Averaging Motivation
197
+ Inspired by ModernBERT [4], JaColBERTv2.5 [5], and Llama 3.1 [6], where results show that model merging can enhance generalization or performance while mitigating overfitting to any single fine-tune or annealing checkpoint.
198
+
199
+ ## Datasets
200
+ - Pretraining: [Derify/augmented_canonical_druglike_QED_Pfizer_15M](https://huggingface.co/datasets/Derify/augmented_canonical_druglike_QED_Pfizer_15M)
201
+ - Domain Adaptive Pretraining (DAPT) & Task Adaptive Fine-tuning (TAFT): ADME + AstraZeneca datasets (10 tasks) with scaffold splits from DA4MT pipeline (see [domain-adaptation-molecular-transformers](https://github.com/emapco/ModChemBERT/tree/main/domain-adaptation-molecular-transformers))
202
+ - Benchmarking: ChemBERTa-3 [7] tasks (BACE, BBBP, TOX21, HIV, SIDER, CLINTOX for classification; ESOL, FREESOLV, LIPO, BACE, CLEARANCE for regression)
203
+
204
+ ## Benchmarking
205
+ Benchmarks were conducted with the ChemBERTa-3 framework using DeepChem scaffold splits. Each task was trained for 100 epochs with 3 random seeds.
206
+
207
+ ### Evaluation Methodology
208
+ - Classification Metric: ROC AUC.
209
+ - Regression Metric: RMSE.
210
+ - Aggregation: Mean ± standard deviation of the triplicate results.
211
+ - Input Constraints: SMILES truncated / filtered to ≤200 tokens, following the MolFormer paper's recommendation.
212
+
213
+ ### Results
214
+ <details><summary>Click to expand</summary>
215
+
216
+ #### Classification Datasets (ROC AUC - Higher is better)
217
+
218
+ | Model | BACE↑ | BBBP↑ | CLINTOX↑ | HIV↑ | SIDER↑ | TOX21↑ | AVG† |
219
+ | ---------------------------------------------------------------------------- | ----------------- | ----------------- | --------------------- | --------------------- | --------------------- | ----------------- | ------ |
220
+ | **Tasks** | 1 | 1 | 2 | 1 | 27 | 12 | |
221
+ | [ChemBERTa-100M-MLM](https://huggingface.co/DeepChem/ChemBERTa-100M-MLM)* | 0.781 ± 0.019 | 0.700 ± 0.027 | 0.979 ± 0.022 | 0.740 ± 0.013 | 0.611 ± 0.002 | 0.718 ± 0.011 | 0.7548 |
222
+ | [c3-MoLFormer-1.1B](https://huggingface.co/DeepChem/MoLFormer-c3-1.1B)* | 0.819 ± 0.019 | 0.735 ± 0.019 | 0.839 ± 0.013 | 0.762 ± 0.005 | 0.618 ± 0.005 | 0.723 ± 0.012 | 0.7493 |
223
+ | MoLFormer-LHPC* | **0.887 ± 0.004** | **0.908 ± 0.013** | 0.993 ± 0.004 | 0.750 ± 0.003 | 0.622 ± 0.007 | **0.791 ± 0.014** | 0.8252 |
224
+ | ------------------------- | ----------------- | ----------------- | ------------------- | ------------------- | ------------------- | ----------------- | ------ |
225
+ | [MLM](https://huggingface.co/Derify/ModChemBERT-MLM) | 0.8065 ± 0.0103 | 0.7222 ± 0.0150 | 0.9709 ± 0.0227 | ***0.7800 ± 0.0133*** | 0.6419 ± 0.0113 | 0.7400 ± 0.0044 | 0.7769 |
226
+ | [MLM + DAPT](https://huggingface.co/Derify/ModChemBERT-MLM-DAPT) | 0.8224 ± 0.0156 | 0.7402 ± 0.0095 | 0.9820 ± 0.0138 | 0.7702 ± 0.0020 | 0.6303 ± 0.0039 | 0.7360 ± 0.0036 | 0.7802 |
227
+ | [MLM + TAFT](https://huggingface.co/Derify/ModChemBERT-MLM-TAFT) | 0.7924 ± 0.0155 | 0.7282 ± 0.0058 | 0.9725 ± 0.0213 | 0.7770 ± 0.0047 | 0.6542 ± 0.0128 | *0.7646 ± 0.0039* | 0.7815 |
228
+ | [MLM + DAPT + TAFT](https://huggingface.co/Derify/ModChemBERT-MLM-DAPT-TAFT) | 0.8213 ± 0.0051 | 0.7356 ± 0.0094 | 0.9664 ± 0.0202 | 0.7750 ± 0.0048 | 0.6415 ± 0.0094 | 0.7263 ± 0.0036 | 0.7777 |
229
+ | [MLM + DAPT + TAFT OPT](https://huggingface.co/Derify/ModChemBERT) | *0.8346 ± 0.0045* | *0.7573 ± 0.0120* | ***0.9938 ± 0.0017*** | 0.7737 ± 0.0034 | ***0.6600 ± 0.0061*** | 0.7518 ± 0.0047 | 0.7952 |
230
+
231
+ #### Regression Datasets (RMSE - Lower is better)
232
+
233
+ | Model | BACE↓ | CLEARANCE↓ | ESOL↓ | FREESOLV↓ | LIPO↓ | AVG‡ |
234
+ | ---------------------------------------------------------------------------- | --------------------- | ---------------------- | --------------------- | --------------------- | --------------------- | ---------------- |
235
+ | **Tasks** | 1 | 1 | 1 | 1 | 1 | |
236
+ | [ChemBERTa-100M-MLM](https://huggingface.co/DeepChem/ChemBERTa-100M-MLM)* | 1.011 ± 0.038 | 51.582 ± 3.079 | 0.920 ± 0.011 | 0.536 ± 0.016 | 0.758 ± 0.013 | 0.8063 / 10.9614 |
237
+ | [c3-MoLFormer-1.1B](https://huggingface.co/DeepChem/MoLFormer-c3-1.1B)* | 1.094 ± 0.126 | 52.058 ± 2.767 | 0.829 ± 0.019 | 0.572 ± 0.023 | 0.728 ± 0.016 | 0.8058 / 11.0562 |
238
+ | MoLFormer-LHPC* | 1.201 ± 0.100 | 45.74 ± 2.637 | 0.848 ± 0.031 | 0.683 ± 0.040 | 0.895 ± 0.080 | 0.9068 / 9.8734 |
239
+ | ------------------------- | ------------------- | -------------------- | ------------------- | ------------------- | ------------------- | ---------------- |
240
+ | [MLM](https://huggingface.co/Derify/ModChemBERT-MLM) | 1.0893 ± 0.1319 | 49.0005 ± 1.2787 | 0.8456 ± 0.0406 | 0.5491 ± 0.0134 | 0.7147 ± 0.0062 | 0.7997 / 10.4398 |
241
+ | [MLM + DAPT](https://huggingface.co/Derify/ModChemBERT-MLM-DAPT) | 0.9931 ± 0.0258 | 45.4951 ± 0.7112 | 0.9319 ± 0.0153 | 0.6049 ± 0.0666 | 0.6874 ± 0.0040 | 0.8043 / 9.7425 |
242
+ | [MLM + TAFT](https://huggingface.co/Derify/ModChemBERT-MLM-TAFT) | 1.0304 ± 0.1146 | 47.8418 ± 0.4070 | ***0.7669 ± 0.0024*** | 0.5293 ± 0.0267 | 0.6708 ± 0.0074 | 0.7493 / 10.1678 |
243
+ | [MLM + DAPT + TAFT](https://huggingface.co/Derify/ModChemBERT-MLM-DAPT-TAFT) | 0.9713 ± 0.0224 | ***42.8010 ± 3.3475*** | 0.8169 ± 0.0268 | 0.5445 ± 0.0257 | 0.6820 ± 0.0028 | 0.7537 / 9.1631 |
244
+ | [MLM + DAPT + TAFT OPT](https://huggingface.co/Derify/ModChemBERT) | ***0.9665 ± 0.0250*** | 44.0137 ± 1.1110 | 0.8158 ± 0.0115 | ***0.4979 ± 0.0158*** | ***0.6505 ± 0.0126*** | 0.7327 / 9.3889 |
245
+
246
+ **Bold** indicates the best result in the column; *italic* indicates the best result among ModChemBERT checkpoints.<br/>
247
+ \* Published results from the ChemBERTa-3 [7] paper for optimized chemical language models using DeepChem scaffold splits.<br/>
248
+ † AVG column shows the mean score across all classification tasks.<br/>
249
+ ‡ AVG column shows the mean scores across all regression tasks without and with the clearance score.
250
+
251
+ </details>
252
+
253
+ ## Optimized ModChemBERT Hyperparameters
254
+
255
+ <details><summary>Click to expand</summary>
256
+
257
+ ### TAFT Datasets
258
+ Optimal parameters (per dataset) for the `MLM + DAPT + TAFT OPT` merged model:
259
+
260
+ | Dataset | Learning Rate | Batch Size | Warmup Ratio | Classifier Pooling | Last k Layers |
261
+ | ---------------------- | ------------- | ---------- | ------------ | ------------------ | ------------- |
262
+ | adme_microsom_stab_h | 3e-5 | 8 | 0.0 | max_seq_mean | 5 |
263
+ | adme_microsom_stab_r | 3e-5 | 16 | 0.2 | max_cls | 3 |
264
+ | adme_permeability | 3e-5 | 8 | 0.0 | max_cls | 3 |
265
+ | adme_ppb_h | 1e-5 | 32 | 0.1 | max_seq_mean | 5 |
266
+ | adme_ppb_r | 1e-5 | 32 | 0.0 | sum_mean | N/A |
267
+ | adme_solubility | 3e-5 | 32 | 0.0 | sum_mean | N/A |
268
+ | astrazeneca_CL | 3e-5 | 8 | 0.1 | max_seq_mha | 3 |
269
+ | astrazeneca_LogD74 | 1e-5 | 8 | 0.0 | max_seq_mean | 5 |
270
+ | astrazeneca_PPB | 1e-5 | 32 | 0.0 | max_cls | 3 |
271
+ | astrazeneca_Solubility | 1e-5 | 32 | 0.0 | max_seq_mean | 5 |
272
+
273
+ ### Benchmarking Datasets
274
+ Optimal parameters (per dataset) for the `MLM + DAPT + TAFT OPT` merged model:
275
+
276
+ | Dataset | Batch Size | Classifier Pooling | Last k Layers | Pooling Attention Dropout | Classifier Dropout | Embedding Dropout |
277
+ | ------------------- | ---------- | ------------------ | ------------- | ------------------------- | ------------------ | ----------------- |
278
+ | bace_classification | 32 | max_seq_mha | 3 | 0.0 | 0.0 | 0.0 |
279
+ | bbbp | 64 | max_cls | 3 | 0.1 | 0.0 | 0.0 |
280
+ | clintox | 32 | max_seq_mha | 5 | 0.1 | 0.0 | 0.0 |
281
+ | hiv | 32 | max_seq_mha | 3 | 0.0 | 0.0 | 0.0 |
282
+ | sider | 32 | mean | N/A | 0.1 | 0.0 | 0.1 |
283
+ | tox21 | 32 | max_seq_mha | 5 | 0.1 | 0.0 | 0.0 |
284
+ | base_regression | 32 | max_seq_mha | 5 | 0.1 | 0.0 | 0.0 |
285
+ | clearance | 32 | max_seq_mha | 5 | 0.1 | 0.0 | 0.0 |
286
+ | esol | 64 | sum_mean | N/A | 0.1 | 0.0 | 0.1 |
287
+ | freesolv | 32 | max_seq_mha | 5 | 0.1 | 0.0 | 0.0 |
288
+ | lipo | 32 | max_seq_mha | 3 | 0.1 | 0.1 | 0.1 |
289
+
290
+ </details>
291
+
292
+ ## Hardware
293
+ Training and experiments were performed on 2 NVIDIA RTX 3090 GPUs.
294
+
295
+ ## Citation
296
+ If you use ModChemBERT in your research, please cite the checkpoint and the following:
297
+ ```
298
+ @software{cortes-2025-modchembert,
299
+ author = {Emmanuel Cortes},
300
+ title = {ModChemBERT: ModernBERT as a Chemical Language Model},
301
+ year = {2025},
302
+ publisher = {GitHub},
303
+ howpublished = {GitHub repository},
304
+ url = {https://github.com/emapco/ModChemBERT}
305
+ }
306
+ ```
307
+
308
+ ## References
309
+ 1. Kallergis, Georgios, et al. "Domain adaptable language modeling of chemical compounds identifies potent pathoblockers for Pseudomonas aeruginosa." Communications Chemistry 8.1 (2025): 114.
310
+ 2. Behrendt, Maike, Stefan Sylvius Wagner, and Stefan Harmeling. "MaxPoolBERT: Enhancing BERT Classification via Layer-and Token-Wise Aggregation." arXiv preprint arXiv:2505.15696 (2025).
311
+ 3. Sultan, Afnan, et al. "Transformers for molecular property prediction: Domain adaptation efficiently improves performance." arXiv preprint arXiv:2503.03360 (2025).
312
+ 4. Warner, Benjamin, et al. "Smarter, better, faster, longer: A modern bidirectional encoder for fast, memory efficient, and long context finetuning and inference." arXiv preprint arXiv:2412.13663 (2024).
313
+ 5. Clavié, Benjamin. "JaColBERTv2.5: Optimising Multi-Vector Retrievers to Create State-of-the-Art Japanese Retrievers with Constrained Resources." Journal of Natural Language Processing 32.1 (2025): 176-218.
314
+ 6. Grattafiori, Aaron, et al. "The llama 3 herd of models." arXiv preprint arXiv:2407.21783 (2024).
315
+ 7. Singh, Riya, et al. "ChemBERTa-3: An Open Source Training Framework for Chemical Foundation Models." (2025).
config.json ADDED
@@ -0,0 +1,54 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "architectures": [
3
+ "ModChemBertForMaskedLM",
4
+ "ModChemBertForSequenceClassification"
5
+ ],
6
+ "attention_bias": false,
7
+ "attention_dropout": 0.1,
8
+ "auto_map": {
9
+ "AutoConfig": "configuration_modchembert.ModChemBertConfig",
10
+ "AutoModelForMaskedLM": "modeling_modchembert.ModChemBertForMaskedLM",
11
+ "AutoModelForSequenceClassification": "modeling_modchembert.ModChemBertForSequenceClassification"
12
+ },
13
+ "bos_token_id": 0,
14
+ "classifier_activation": "gelu",
15
+ "classifier_bias": false,
16
+ "classifier_dropout": 0.0,
17
+ "classifier_pooling": "max_seq_mha",
18
+ "classifier_pooling_attention_dropout": 0.1,
19
+ "classifier_pooling_last_k": 3,
20
+ "classifier_pooling_num_attention_heads": 4,
21
+ "cls_token_id": 0,
22
+ "decoder_bias": true,
23
+ "deterministic_flash_attn": false,
24
+ "dtype": "float32",
25
+ "embedding_dropout": 0.1,
26
+ "eos_token_id": 1,
27
+ "global_attn_every_n_layers": 3,
28
+ "global_rope_theta": 160000.0,
29
+ "hidden_activation": "gelu",
30
+ "hidden_size": 768,
31
+ "initializer_cutoff_factor": 2.0,
32
+ "initializer_range": 0.02,
33
+ "intermediate_size": 1152,
34
+ "layer_norm_eps": 1e-05,
35
+ "local_attention": 8,
36
+ "local_rope_theta": 10000.0,
37
+ "max_position_embeddings": 256,
38
+ "mlp_bias": false,
39
+ "mlp_dropout": 0.1,
40
+ "model_type": "modchembert",
41
+ "norm_bias": false,
42
+ "norm_eps": 1e-05,
43
+ "num_attention_heads": 12,
44
+ "num_hidden_layers": 22,
45
+ "num_labels": 1,
46
+ "pad_token_id": 2,
47
+ "position_embedding_type": "absolute",
48
+ "repad_logits_with_grad": false,
49
+ "sep_token_id": 1,
50
+ "sparse_pred_ignore_index": -100,
51
+ "sparse_prediction": false,
52
+ "transformers_version": "4.56.1",
53
+ "vocab_size": 2362
54
+ }
configuration_modchembert.py ADDED
@@ -0,0 +1,84 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ # Copyright 2025 Emmanuel Cortes, All Rights Reserved.
2
+ #
3
+ # Licensed under the Apache License, Version 2.0 (the "License");
4
+ # you may not use this file except in compliance with the License.
5
+ # You may obtain a copy of the License at
6
+ #
7
+ # http://www.apache.org/licenses/LICENSE-2.0
8
+ #
9
+ # Unless required by applicable law or agreed to in writing, software
10
+ # distributed under the License is distributed on an "AS IS" BASIS,
11
+ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12
+ # See the License for the specific language governing permissions and
13
+ # limitations under the License.
14
+
15
+ from typing import Literal
16
+
17
+ from transformers.models.modernbert.configuration_modernbert import ModernBertConfig
18
+
19
+
20
+ class ModChemBertConfig(ModernBertConfig):
21
+ """
22
+ Configuration class for ModChemBert models.
23
+
24
+ This configuration class extends ModernBertConfig with additional parameters specific to
25
+ chemical molecule modeling and custom pooling strategies for classification/regression tasks.
26
+ It accepts all arguments and keyword arguments from ModernBertConfig.
27
+
28
+ Args:
29
+ classifier_pooling (str, optional): Pooling strategy for sequence classification.
30
+ Available options:
31
+ - "cls": Use CLS token representation
32
+ - "mean": Attention-weighted average pooling
33
+ - "sum_mean": Sum all hidden states across layers, then mean pool over sequence (ChemLM approach)
34
+ - "sum_sum": Sum all hidden states across layers, then sum pool over sequence
35
+ - "mean_mean": Mean all hidden states across layers, then mean pool over sequence
36
+ - "mean_sum": Mean all hidden states across layers, then sum pool over sequence
37
+ - "max_cls": Element-wise max pooling over last k hidden states, then take CLS token
38
+ - "cls_mha": Multi-head attention with CLS token as query and full sequence as keys/values
39
+ - "max_seq_mha": Max pooling over last k states + multi-head attention with CLS as query
40
+ - "max_seq_mean": Max pooling over last k hidden states, then mean pooling over sequence
41
+ Defaults to "sum_mean".
42
+ classifier_pooling_num_attention_heads (int, optional): Number of attention heads for multi-head attention
43
+ pooling strategies (cls_mha, max_seq_mha). Defaults to 4.
44
+ classifier_pooling_attention_dropout (float, optional): Dropout probability for multi-head attention
45
+ pooling strategies (cls_mha, max_seq_mha). Defaults to 0.0.
46
+ classifier_pooling_last_k (int, optional): Number of last hidden layers to use for max pooling
47
+ strategies (max_cls, max_seq_mha, max_seq_mean). Defaults to 8.
48
+ *args: Variable length argument list passed to ModernBertConfig.
49
+ **kwargs: Arbitrary keyword arguments passed to ModernBertConfig.
50
+
51
+ Note:
52
+ This class inherits all configuration parameters from ModernBertConfig including
53
+ hidden_size, num_hidden_layers, num_attention_heads, intermediate_size, etc.
54
+ """
55
+
56
+ model_type = "modchembert"
57
+
58
+ def __init__(
59
+ self,
60
+ *args,
61
+ classifier_pooling: Literal[
62
+ "cls",
63
+ "mean",
64
+ "sum_mean",
65
+ "sum_sum",
66
+ "mean_mean",
67
+ "mean_sum",
68
+ "max_cls",
69
+ "cls_mha",
70
+ "max_seq_mha",
71
+ "max_seq_mean",
72
+ ] = "max_seq_mha",
73
+ classifier_pooling_num_attention_heads: int = 4,
74
+ classifier_pooling_attention_dropout: float = 0.0,
75
+ classifier_pooling_last_k: int = 8,
76
+ **kwargs,
77
+ ):
78
+ # Pass classifier_pooling="cls" to circumvent ValueError in ModernBertConfig init
79
+ super().__init__(*args, classifier_pooling="cls", **kwargs)
80
+ # Override with custom value
81
+ self.classifier_pooling = classifier_pooling
82
+ self.classifier_pooling_num_attention_heads = classifier_pooling_num_attention_heads
83
+ self.classifier_pooling_attention_dropout = classifier_pooling_attention_dropout
84
+ self.classifier_pooling_last_k = classifier_pooling_last_k
logs_modchembert_classification_ModChemBERT-MLM-TAFT/modchembert_deepchem_splits_run_bace_classification_epochs100_batch_size32_20250923_153237.log ADDED
@@ -0,0 +1,357 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ 2025-09-23 15:32:37,439 - logs_modchembert_bace_classification_epochs100_batch_size32 - INFO - Running benchmark for dataset: bace_classification
2
+ 2025-09-23 15:32:37,439 - logs_modchembert_bace_classification_epochs100_batch_size32 - INFO - dataset: bace_classification, tasks: ['Class'], epochs: 100, learning rate: 3e-05
3
+ 2025-09-23 15:32:37,444 - logs_modchembert_bace_classification_epochs100_batch_size32 - INFO - Starting triplicate run 1 for dataset bace_classification at 2025-09-23_15-32-37
4
+ 2025-09-23 15:32:45,151 - logs_modchembert_bace_classification_epochs100_batch_size32 - INFO - Epoch 1/100 | Train Loss: 0.7599 | Val mean-roc_auc_score: 0.6470
5
+ 2025-09-23 15:32:45,152 - logs_modchembert_bace_classification_epochs100_batch_size32 - INFO - Global step of best model: 38
6
+ 2025-09-23 15:32:45,685 - logs_modchembert_bace_classification_epochs100_batch_size32 - INFO - Best model saved at epoch 1 with val mean-roc_auc_score: 0.6470
7
+ 2025-09-23 15:32:50,852 - logs_modchembert_bace_classification_epochs100_batch_size32 - INFO - Epoch 2/100 | Train Loss: 0.5132 | Val mean-roc_auc_score: 0.7121
8
+ 2025-09-23 15:32:51,045 - logs_modchembert_bace_classification_epochs100_batch_size32 - INFO - Global step of best model: 76
9
+ 2025-09-23 15:32:51,578 - logs_modchembert_bace_classification_epochs100_batch_size32 - INFO - Best model saved at epoch 2 with val mean-roc_auc_score: 0.7121
10
+ 2025-09-23 15:32:57,045 - logs_modchembert_bace_classification_epochs100_batch_size32 - INFO - Epoch 3/100 | Train Loss: 0.4397 | Val mean-roc_auc_score: 0.7233
11
+ 2025-09-23 15:32:57,224 - logs_modchembert_bace_classification_epochs100_batch_size32 - INFO - Global step of best model: 114
12
+ 2025-09-23 15:32:57,824 - logs_modchembert_bace_classification_epochs100_batch_size32 - INFO - Best model saved at epoch 3 with val mean-roc_auc_score: 0.7233
13
+ 2025-09-23 15:33:03,432 - logs_modchembert_bace_classification_epochs100_batch_size32 - INFO - Epoch 4/100 | Train Loss: 0.3355 | Val mean-roc_auc_score: 0.7555
14
+ 2025-09-23 15:33:03,623 - logs_modchembert_bace_classification_epochs100_batch_size32 - INFO - Global step of best model: 152
15
+ 2025-09-23 15:33:04,166 - logs_modchembert_bace_classification_epochs100_batch_size32 - INFO - Best model saved at epoch 4 with val mean-roc_auc_score: 0.7555
16
+ 2025-09-23 15:33:09,804 - logs_modchembert_bace_classification_epochs100_batch_size32 - INFO - Epoch 5/100 | Train Loss: 0.2796 | Val mean-roc_auc_score: 0.7464
17
+ 2025-09-23 15:33:15,500 - logs_modchembert_bace_classification_epochs100_batch_size32 - INFO - Epoch 6/100 | Train Loss: 0.2623 | Val mean-roc_auc_score: 0.7489
18
+ 2025-09-23 15:33:21,629 - logs_modchembert_bace_classification_epochs100_batch_size32 - INFO - Epoch 7/100 | Train Loss: 0.2081 | Val mean-roc_auc_score: 0.7254
19
+ 2025-09-23 15:33:27,511 - logs_modchembert_bace_classification_epochs100_batch_size32 - INFO - Epoch 8/100 | Train Loss: 0.2480 | Val mean-roc_auc_score: 0.7598
20
+ 2025-09-23 15:33:27,693 - logs_modchembert_bace_classification_epochs100_batch_size32 - INFO - Global step of best model: 304
21
+ 2025-09-23 15:33:28,213 - logs_modchembert_bace_classification_epochs100_batch_size32 - INFO - Best model saved at epoch 8 with val mean-roc_auc_score: 0.7598
22
+ 2025-09-23 15:33:34,104 - logs_modchembert_bace_classification_epochs100_batch_size32 - INFO - Epoch 9/100 | Train Loss: 0.1941 | Val mean-roc_auc_score: 0.7567
23
+ 2025-09-23 15:33:39,928 - logs_modchembert_bace_classification_epochs100_batch_size32 - INFO - Epoch 10/100 | Train Loss: 0.1439 | Val mean-roc_auc_score: 0.7603
24
+ 2025-09-23 15:33:40,112 - logs_modchembert_bace_classification_epochs100_batch_size32 - INFO - Global step of best model: 380
25
+ 2025-09-23 15:33:40,631 - logs_modchembert_bace_classification_epochs100_batch_size32 - INFO - Best model saved at epoch 10 with val mean-roc_auc_score: 0.7603
26
+ 2025-09-23 15:33:46,474 - logs_modchembert_bace_classification_epochs100_batch_size32 - INFO - Epoch 11/100 | Train Loss: 0.1970 | Val mean-roc_auc_score: 0.7313
27
+ 2025-09-23 15:33:52,494 - logs_modchembert_bace_classification_epochs100_batch_size32 - INFO - Epoch 12/100 | Train Loss: 0.1390 | Val mean-roc_auc_score: 0.7470
28
+ 2025-09-23 15:33:58,341 - logs_modchembert_bace_classification_epochs100_batch_size32 - INFO - Epoch 13/100 | Train Loss: 0.1012 | Val mean-roc_auc_score: 0.7528
29
+ 2025-09-23 15:34:04,234 - logs_modchembert_bace_classification_epochs100_batch_size32 - INFO - Epoch 14/100 | Train Loss: 0.1064 | Val mean-roc_auc_score: 0.7611
30
+ 2025-09-23 15:34:04,430 - logs_modchembert_bace_classification_epochs100_batch_size32 - INFO - Global step of best model: 532
31
+ 2025-09-23 15:34:05,007 - logs_modchembert_bace_classification_epochs100_batch_size32 - INFO - Best model saved at epoch 14 with val mean-roc_auc_score: 0.7611
32
+ 2025-09-23 15:34:10,847 - logs_modchembert_bace_classification_epochs100_batch_size32 - INFO - Epoch 15/100 | Train Loss: 0.0752 | Val mean-roc_auc_score: 0.7524
33
+ 2025-09-23 15:34:16,609 - logs_modchembert_bace_classification_epochs100_batch_size32 - INFO - Epoch 16/100 | Train Loss: 0.0908 | Val mean-roc_auc_score: 0.7476
34
+ 2025-09-23 15:34:22,192 - logs_modchembert_bace_classification_epochs100_batch_size32 - INFO - Epoch 17/100 | Train Loss: 0.0695 | Val mean-roc_auc_score: 0.7555
35
+ 2025-09-23 15:34:27,993 - logs_modchembert_bace_classification_epochs100_batch_size32 - INFO - Epoch 18/100 | Train Loss: 0.0736 | Val mean-roc_auc_score: 0.7466
36
+ 2025-09-23 15:34:33,306 - logs_modchembert_bace_classification_epochs100_batch_size32 - INFO - Epoch 19/100 | Train Loss: 0.0593 | Val mean-roc_auc_score: 0.7513
37
+ 2025-09-23 15:34:39,124 - logs_modchembert_bace_classification_epochs100_batch_size32 - INFO - Epoch 20/100 | Train Loss: 0.0409 | Val mean-roc_auc_score: 0.7385
38
+ 2025-09-23 15:34:44,965 - logs_modchembert_bace_classification_epochs100_batch_size32 - INFO - Epoch 21/100 | Train Loss: 0.0454 | Val mean-roc_auc_score: 0.7350
39
+ 2025-09-23 15:34:51,033 - logs_modchembert_bace_classification_epochs100_batch_size32 - INFO - Epoch 22/100 | Train Loss: 0.0690 | Val mean-roc_auc_score: 0.7448
40
+ 2025-09-23 15:34:56,801 - logs_modchembert_bace_classification_epochs100_batch_size32 - INFO - Epoch 23/100 | Train Loss: 0.0584 | Val mean-roc_auc_score: 0.7795
41
+ 2025-09-23 15:34:56,952 - logs_modchembert_bace_classification_epochs100_batch_size32 - INFO - Global step of best model: 874
42
+ 2025-09-23 15:34:57,494 - logs_modchembert_bace_classification_epochs100_batch_size32 - INFO - Best model saved at epoch 23 with val mean-roc_auc_score: 0.7795
43
+ 2025-09-23 15:35:03,240 - logs_modchembert_bace_classification_epochs100_batch_size32 - INFO - Epoch 24/100 | Train Loss: 0.0159 | Val mean-roc_auc_score: 0.7755
44
+ 2025-09-23 15:35:09,072 - logs_modchembert_bace_classification_epochs100_batch_size32 - INFO - Epoch 25/100 | Train Loss: 0.0143 | Val mean-roc_auc_score: 0.7771
45
+ 2025-09-23 15:35:14,893 - logs_modchembert_bace_classification_epochs100_batch_size32 - INFO - Epoch 26/100 | Train Loss: 0.0183 | Val mean-roc_auc_score: 0.7713
46
+ 2025-09-23 15:35:22,228 - logs_modchembert_bace_classification_epochs100_batch_size32 - INFO - Epoch 27/100 | Train Loss: 0.0129 | Val mean-roc_auc_score: 0.7719
47
+ 2025-09-23 15:35:28,169 - logs_modchembert_bace_classification_epochs100_batch_size32 - INFO - Epoch 28/100 | Train Loss: 0.0144 | Val mean-roc_auc_score: 0.7717
48
+ 2025-09-23 15:35:33,859 - logs_modchembert_bace_classification_epochs100_batch_size32 - INFO - Epoch 29/100 | Train Loss: 0.0354 | Val mean-roc_auc_score: 0.7694
49
+ 2025-09-23 15:35:39,451 - logs_modchembert_bace_classification_epochs100_batch_size32 - INFO - Epoch 30/100 | Train Loss: 0.0331 | Val mean-roc_auc_score: 0.7480
50
+ 2025-09-23 15:35:45,116 - logs_modchembert_bace_classification_epochs100_batch_size32 - INFO - Epoch 31/100 | Train Loss: 0.0232 | Val mean-roc_auc_score: 0.7395
51
+ 2025-09-23 15:35:50,475 - logs_modchembert_bace_classification_epochs100_batch_size32 - INFO - Epoch 32/100 | Train Loss: 0.0146 | Val mean-roc_auc_score: 0.7299
52
+ 2025-09-23 15:35:55,944 - logs_modchembert_bace_classification_epochs100_batch_size32 - INFO - Epoch 33/100 | Train Loss: 0.0331 | Val mean-roc_auc_score: 0.7417
53
+ 2025-09-23 15:36:01,265 - logs_modchembert_bace_classification_epochs100_batch_size32 - INFO - Epoch 34/100 | Train Loss: 0.0227 | Val mean-roc_auc_score: 0.7545
54
+ 2025-09-23 15:36:07,137 - logs_modchembert_bace_classification_epochs100_batch_size32 - INFO - Epoch 35/100 | Train Loss: 0.0104 | Val mean-roc_auc_score: 0.7482
55
+ 2025-09-23 15:36:13,038 - logs_modchembert_bace_classification_epochs100_batch_size32 - INFO - Epoch 36/100 | Train Loss: 0.0086 | Val mean-roc_auc_score: 0.7404
56
+ 2025-09-23 15:36:18,990 - logs_modchembert_bace_classification_epochs100_batch_size32 - INFO - Epoch 37/100 | Train Loss: 0.0034 | Val mean-roc_auc_score: 0.7385
57
+ 2025-09-23 15:36:24,688 - logs_modchembert_bace_classification_epochs100_batch_size32 - INFO - Epoch 38/100 | Train Loss: 0.0181 | Val mean-roc_auc_score: 0.7815
58
+ 2025-09-23 15:36:24,839 - logs_modchembert_bace_classification_epochs100_batch_size32 - INFO - Global step of best model: 1444
59
+ 2025-09-23 15:36:25,378 - logs_modchembert_bace_classification_epochs100_batch_size32 - INFO - Best model saved at epoch 38 with val mean-roc_auc_score: 0.7815
60
+ 2025-09-23 15:36:31,092 - logs_modchembert_bace_classification_epochs100_batch_size32 - INFO - Epoch 39/100 | Train Loss: 0.1143 | Val mean-roc_auc_score: 0.7536
61
+ 2025-09-23 15:36:36,860 - logs_modchembert_bace_classification_epochs100_batch_size32 - INFO - Epoch 40/100 | Train Loss: 0.0186 | Val mean-roc_auc_score: 0.7611
62
+ 2025-09-23 15:36:42,689 - logs_modchembert_bace_classification_epochs100_batch_size32 - INFO - Epoch 41/100 | Train Loss: 0.0197 | Val mean-roc_auc_score: 0.7538
63
+ 2025-09-23 15:36:48,923 - logs_modchembert_bace_classification_epochs100_batch_size32 - INFO - Epoch 42/100 | Train Loss: 0.0111 | Val mean-roc_auc_score: 0.7571
64
+ 2025-09-23 15:36:54,849 - logs_modchembert_bace_classification_epochs100_batch_size32 - INFO - Epoch 43/100 | Train Loss: 0.0146 | Val mean-roc_auc_score: 0.7467
65
+ 2025-09-23 15:37:00,609 - logs_modchembert_bace_classification_epochs100_batch_size32 - INFO - Epoch 44/100 | Train Loss: 0.0133 | Val mean-roc_auc_score: 0.7565
66
+ 2025-09-23 15:37:06,465 - logs_modchembert_bace_classification_epochs100_batch_size32 - INFO - Epoch 45/100 | Train Loss: 0.0248 | Val mean-roc_auc_score: 0.7439
67
+ 2025-09-23 15:37:12,313 - logs_modchembert_bace_classification_epochs100_batch_size32 - INFO - Epoch 46/100 | Train Loss: 0.0151 | Val mean-roc_auc_score: 0.7355
68
+ 2025-09-23 15:37:18,132 - logs_modchembert_bace_classification_epochs100_batch_size32 - INFO - Epoch 47/100 | Train Loss: 0.0099 | Val mean-roc_auc_score: 0.7631
69
+ 2025-09-23 15:37:23,368 - logs_modchembert_bace_classification_epochs100_batch_size32 - INFO - Epoch 48/100 | Train Loss: 0.0127 | Val mean-roc_auc_score: 0.7590
70
+ 2025-09-23 15:37:28,719 - logs_modchembert_bace_classification_epochs100_batch_size32 - INFO - Epoch 49/100 | Train Loss: 0.0085 | Val mean-roc_auc_score: 0.7612
71
+ 2025-09-23 15:37:34,691 - logs_modchembert_bace_classification_epochs100_batch_size32 - INFO - Epoch 50/100 | Train Loss: 0.0119 | Val mean-roc_auc_score: 0.7559
72
+ 2025-09-23 15:37:40,507 - logs_modchembert_bace_classification_epochs100_batch_size32 - INFO - Epoch 51/100 | Train Loss: 0.0273 | Val mean-roc_auc_score: 0.7596
73
+ 2025-09-23 15:37:46,552 - logs_modchembert_bace_classification_epochs100_batch_size32 - INFO - Epoch 52/100 | Train Loss: 0.0229 | Val mean-roc_auc_score: 0.7613
74
+ 2025-09-23 15:37:53,657 - logs_modchembert_bace_classification_epochs100_batch_size32 - INFO - Epoch 53/100 | Train Loss: 0.0053 | Val mean-roc_auc_score: 0.7642
75
+ 2025-09-23 15:37:59,535 - logs_modchembert_bace_classification_epochs100_batch_size32 - INFO - Epoch 54/100 | Train Loss: 0.0037 | Val mean-roc_auc_score: 0.7620
76
+ 2025-09-23 15:38:05,327 - logs_modchembert_bace_classification_epochs100_batch_size32 - INFO - Epoch 55/100 | Train Loss: 0.0034 | Val mean-roc_auc_score: 0.7579
77
+ 2025-09-23 15:38:11,183 - logs_modchembert_bace_classification_epochs100_batch_size32 - INFO - Epoch 56/100 | Train Loss: 0.0016 | Val mean-roc_auc_score: 0.7566
78
+ 2025-09-23 15:38:17,260 - logs_modchembert_bace_classification_epochs100_batch_size32 - INFO - Epoch 57/100 | Train Loss: 0.0025 | Val mean-roc_auc_score: 0.7584
79
+ 2025-09-23 15:38:23,163 - logs_modchembert_bace_classification_epochs100_batch_size32 - INFO - Epoch 58/100 | Train Loss: 0.0009 | Val mean-roc_auc_score: 0.7604
80
+ 2025-09-23 15:38:28,928 - logs_modchembert_bace_classification_epochs100_batch_size32 - INFO - Epoch 59/100 | Train Loss: 0.0014 | Val mean-roc_auc_score: 0.7599
81
+ 2025-09-23 15:38:34,608 - logs_modchembert_bace_classification_epochs100_batch_size32 - INFO - Epoch 60/100 | Train Loss: 0.0020 | Val mean-roc_auc_score: 0.7603
82
+ 2025-09-23 15:38:40,253 - logs_modchembert_bace_classification_epochs100_batch_size32 - INFO - Epoch 61/100 | Train Loss: 0.0008 | Val mean-roc_auc_score: 0.7609
83
+ 2025-09-23 15:38:46,148 - logs_modchembert_bace_classification_epochs100_batch_size32 - INFO - Epoch 62/100 | Train Loss: 0.0041 | Val mean-roc_auc_score: 0.7535
84
+ 2025-09-23 15:38:51,409 - logs_modchembert_bace_classification_epochs100_batch_size32 - INFO - Epoch 63/100 | Train Loss: 0.0026 | Val mean-roc_auc_score: 0.7584
85
+ 2025-09-23 15:38:56,644 - logs_modchembert_bace_classification_epochs100_batch_size32 - INFO - Epoch 64/100 | Train Loss: 0.0107 | Val mean-roc_auc_score: 0.7604
86
+ 2025-09-23 15:39:02,208 - logs_modchembert_bace_classification_epochs100_batch_size32 - INFO - Epoch 65/100 | Train Loss: 0.0021 | Val mean-roc_auc_score: 0.7546
87
+ 2025-09-23 15:39:07,953 - logs_modchembert_bace_classification_epochs100_batch_size32 - INFO - Epoch 66/100 | Train Loss: 0.0013 | Val mean-roc_auc_score: 0.7564
88
+ 2025-09-23 15:39:14,163 - logs_modchembert_bace_classification_epochs100_batch_size32 - INFO - Epoch 67/100 | Train Loss: 0.0009 | Val mean-roc_auc_score: 0.7565
89
+ 2025-09-23 15:39:20,024 - logs_modchembert_bace_classification_epochs100_batch_size32 - INFO - Epoch 68/100 | Train Loss: 0.0012 | Val mean-roc_auc_score: 0.7559
90
+ 2025-09-23 15:39:25,848 - logs_modchembert_bace_classification_epochs100_batch_size32 - INFO - Epoch 69/100 | Train Loss: 0.0013 | Val mean-roc_auc_score: 0.7592
91
+ 2025-09-23 15:39:31,693 - logs_modchembert_bace_classification_epochs100_batch_size32 - INFO - Epoch 70/100 | Train Loss: 0.0016 | Val mean-roc_auc_score: 0.7571
92
+ 2025-09-23 15:39:37,559 - logs_modchembert_bace_classification_epochs100_batch_size32 - INFO - Epoch 71/100 | Train Loss: 0.0013 | Val mean-roc_auc_score: 0.7561
93
+ 2025-09-23 15:39:43,703 - logs_modchembert_bace_classification_epochs100_batch_size32 - INFO - Epoch 72/100 | Train Loss: 0.0013 | Val mean-roc_auc_score: 0.7561
94
+ 2025-09-23 15:39:49,599 - logs_modchembert_bace_classification_epochs100_batch_size32 - INFO - Epoch 73/100 | Train Loss: 0.0019 | Val mean-roc_auc_score: 0.7552
95
+ 2025-09-23 15:39:55,443 - logs_modchembert_bace_classification_epochs100_batch_size32 - INFO - Epoch 74/100 | Train Loss: 0.0006 | Val mean-roc_auc_score: 0.7564
96
+ 2025-09-23 15:40:01,341 - logs_modchembert_bace_classification_epochs100_batch_size32 - INFO - Epoch 75/100 | Train Loss: 0.0009 | Val mean-roc_auc_score: 0.7557
97
+ 2025-09-23 15:40:07,152 - logs_modchembert_bace_classification_epochs100_batch_size32 - INFO - Epoch 76/100 | Train Loss: 0.0033 | Val mean-roc_auc_score: 0.7536
98
+ 2025-09-23 15:40:13,233 - logs_modchembert_bace_classification_epochs100_batch_size32 - INFO - Epoch 77/100 | Train Loss: 0.0015 | Val mean-roc_auc_score: 0.7574
99
+ 2025-09-23 15:40:18,504 - logs_modchembert_bace_classification_epochs100_batch_size32 - INFO - Epoch 78/100 | Train Loss: 0.0021 | Val mean-roc_auc_score: 0.7597
100
+ 2025-09-23 15:40:25,053 - logs_modchembert_bace_classification_epochs100_batch_size32 - INFO - Epoch 79/100 | Train Loss: 0.0023 | Val mean-roc_auc_score: 0.7614
101
+ 2025-09-23 15:40:30,873 - logs_modchembert_bace_classification_epochs100_batch_size32 - INFO - Epoch 80/100 | Train Loss: 0.0015 | Val mean-roc_auc_score: 0.7620
102
+ 2025-09-23 15:40:36,731 - logs_modchembert_bace_classification_epochs100_batch_size32 - INFO - Epoch 81/100 | Train Loss: 0.0010 | Val mean-roc_auc_score: 0.7618
103
+ 2025-09-23 15:40:42,785 - logs_modchembert_bace_classification_epochs100_batch_size32 - INFO - Epoch 82/100 | Train Loss: 0.0093 | Val mean-roc_auc_score: 0.7665
104
+ 2025-09-23 15:40:48,660 - logs_modchembert_bace_classification_epochs100_batch_size32 - INFO - Epoch 83/100 | Train Loss: 0.0096 | Val mean-roc_auc_score: 0.7574
105
+ 2025-09-23 15:40:54,752 - logs_modchembert_bace_classification_epochs100_batch_size32 - INFO - Epoch 84/100 | Train Loss: 0.0055 | Val mean-roc_auc_score: 0.7624
106
+ 2025-09-23 15:41:00,680 - logs_modchembert_bace_classification_epochs100_batch_size32 - INFO - Epoch 85/100 | Train Loss: 0.0100 | Val mean-roc_auc_score: 0.7622
107
+ 2025-09-23 15:41:06,558 - logs_modchembert_bace_classification_epochs100_batch_size32 - INFO - Epoch 86/100 | Train Loss: 0.0049 | Val mean-roc_auc_score: 0.7635
108
+ 2025-09-23 15:41:12,608 - logs_modchembert_bace_classification_epochs100_batch_size32 - INFO - Epoch 87/100 | Train Loss: 0.0015 | Val mean-roc_auc_score: 0.7639
109
+ 2025-09-23 15:41:18,443 - logs_modchembert_bace_classification_epochs100_batch_size32 - INFO - Epoch 88/100 | Train Loss: 0.0025 | Val mean-roc_auc_score: 0.7662
110
+ 2025-09-23 15:41:24,290 - logs_modchembert_bace_classification_epochs100_batch_size32 - INFO - Epoch 89/100 | Train Loss: 0.0010 | Val mean-roc_auc_score: 0.7672
111
+ 2025-09-23 15:41:30,105 - logs_modchembert_bace_classification_epochs100_batch_size32 - INFO - Epoch 90/100 | Train Loss: 0.0010 | Val mean-roc_auc_score: 0.7673
112
+ 2025-09-23 15:41:35,766 - logs_modchembert_bace_classification_epochs100_batch_size32 - INFO - Epoch 91/100 | Train Loss: 0.0008 | Val mean-roc_auc_score: 0.7668
113
+ 2025-09-23 15:41:41,692 - logs_modchembert_bace_classification_epochs100_batch_size32 - INFO - Epoch 92/100 | Train Loss: 0.0012 | Val mean-roc_auc_score: 0.7662
114
+ 2025-09-23 15:41:46,952 - logs_modchembert_bace_classification_epochs100_batch_size32 - INFO - Epoch 93/100 | Train Loss: 0.0013 | Val mean-roc_auc_score: 0.7660
115
+ 2025-09-23 15:41:52,102 - logs_modchembert_bace_classification_epochs100_batch_size32 - INFO - Epoch 94/100 | Train Loss: 0.0022 | Val mean-roc_auc_score: 0.7655
116
+ 2025-09-23 15:41:57,251 - logs_modchembert_bace_classification_epochs100_batch_size32 - INFO - Epoch 95/100 | Train Loss: 0.0012 | Val mean-roc_auc_score: 0.7635
117
+ 2025-09-23 15:42:03,105 - logs_modchembert_bace_classification_epochs100_batch_size32 - INFO - Epoch 96/100 | Train Loss: 0.0018 | Val mean-roc_auc_score: 0.7648
118
+ 2025-09-23 15:42:09,140 - logs_modchembert_bace_classification_epochs100_batch_size32 - INFO - Epoch 97/100 | Train Loss: 0.0011 | Val mean-roc_auc_score: 0.7646
119
+ 2025-09-23 15:42:14,816 - logs_modchembert_bace_classification_epochs100_batch_size32 - INFO - Epoch 98/100 | Train Loss: 0.0006 | Val mean-roc_auc_score: 0.7644
120
+ 2025-09-23 15:42:20,705 - logs_modchembert_bace_classification_epochs100_batch_size32 - INFO - Epoch 99/100 | Train Loss: 0.0010 | Val mean-roc_auc_score: 0.7642
121
+ 2025-09-23 15:42:26,620 - logs_modchembert_bace_classification_epochs100_batch_size32 - INFO - Epoch 100/100 | Train Loss: 0.0010 | Val mean-roc_auc_score: 0.7647
122
+ 2025-09-23 15:42:27,511 - logs_modchembert_bace_classification_epochs100_batch_size32 - INFO - Test mean-roc_auc_score: 0.8057
123
+ 2025-09-23 15:42:27,826 - logs_modchembert_bace_classification_epochs100_batch_size32 - INFO - Starting triplicate run 2 for dataset bace_classification at 2025-09-23_15-42-27
124
+ 2025-09-23 15:42:33,199 - logs_modchembert_bace_classification_epochs100_batch_size32 - INFO - Epoch 1/100 | Train Loss: 0.6645 | Val mean-roc_auc_score: 0.7057
125
+ 2025-09-23 15:42:33,199 - logs_modchembert_bace_classification_epochs100_batch_size32 - INFO - Global step of best model: 38
126
+ 2025-09-23 15:42:33,716 - logs_modchembert_bace_classification_epochs100_batch_size32 - INFO - Best model saved at epoch 1 with val mean-roc_auc_score: 0.7057
127
+ 2025-09-23 15:42:39,523 - logs_modchembert_bace_classification_epochs100_batch_size32 - INFO - Epoch 2/100 | Train Loss: 0.4408 | Val mean-roc_auc_score: 0.7241
128
+ 2025-09-23 15:42:39,694 - logs_modchembert_bace_classification_epochs100_batch_size32 - INFO - Global step of best model: 76
129
+ 2025-09-23 15:42:40,262 - logs_modchembert_bace_classification_epochs100_batch_size32 - INFO - Best model saved at epoch 2 with val mean-roc_auc_score: 0.7241
130
+ 2025-09-23 15:42:46,107 - logs_modchembert_bace_classification_epochs100_batch_size32 - INFO - Epoch 3/100 | Train Loss: 0.3438 | Val mean-roc_auc_score: 0.7394
131
+ 2025-09-23 15:42:46,285 - logs_modchembert_bace_classification_epochs100_batch_size32 - INFO - Global step of best model: 114
132
+ 2025-09-23 15:42:46,811 - logs_modchembert_bace_classification_epochs100_batch_size32 - INFO - Best model saved at epoch 3 with val mean-roc_auc_score: 0.7394
133
+ 2025-09-23 15:42:52,674 - logs_modchembert_bace_classification_epochs100_batch_size32 - INFO - Epoch 4/100 | Train Loss: 0.3174 | Val mean-roc_auc_score: 0.7539
134
+ 2025-09-23 15:42:52,853 - logs_modchembert_bace_classification_epochs100_batch_size32 - INFO - Global step of best model: 152
135
+ 2025-09-23 15:42:53,380 - logs_modchembert_bace_classification_epochs100_batch_size32 - INFO - Best model saved at epoch 4 with val mean-roc_auc_score: 0.7539
136
+ 2025-09-23 15:42:59,242 - logs_modchembert_bace_classification_epochs100_batch_size32 - INFO - Epoch 5/100 | Train Loss: 0.2599 | Val mean-roc_auc_score: 0.7348
137
+ 2025-09-23 15:43:05,023 - logs_modchembert_bace_classification_epochs100_batch_size32 - INFO - Epoch 6/100 | Train Loss: 0.2366 | Val mean-roc_auc_score: 0.7308
138
+ 2025-09-23 15:43:11,000 - logs_modchembert_bace_classification_epochs100_batch_size32 - INFO - Epoch 7/100 | Train Loss: 0.2253 | Val mean-roc_auc_score: 0.7552
139
+ 2025-09-23 15:43:11,190 - logs_modchembert_bace_classification_epochs100_batch_size32 - INFO - Global step of best model: 266
140
+ 2025-09-23 15:43:11,731 - logs_modchembert_bace_classification_epochs100_batch_size32 - INFO - Best model saved at epoch 7 with val mean-roc_auc_score: 0.7552
141
+ 2025-09-23 15:43:17,491 - logs_modchembert_bace_classification_epochs100_batch_size32 - INFO - Epoch 8/100 | Train Loss: 0.1650 | Val mean-roc_auc_score: 0.7365
142
+ 2025-09-23 15:43:22,804 - logs_modchembert_bace_classification_epochs100_batch_size32 - INFO - Epoch 9/100 | Train Loss: 0.1941 | Val mean-roc_auc_score: 0.7526
143
+ 2025-09-23 15:43:28,603 - logs_modchembert_bace_classification_epochs100_batch_size32 - INFO - Epoch 10/100 | Train Loss: 0.1661 | Val mean-roc_auc_score: 0.7162
144
+ 2025-09-23 15:43:34,276 - logs_modchembert_bace_classification_epochs100_batch_size32 - INFO - Epoch 11/100 | Train Loss: 0.1684 | Val mean-roc_auc_score: 0.7259
145
+ 2025-09-23 15:43:40,165 - logs_modchembert_bace_classification_epochs100_batch_size32 - INFO - Epoch 12/100 | Train Loss: 0.1340 | Val mean-roc_auc_score: 0.7312
146
+ 2025-09-23 15:43:45,859 - logs_modchembert_bace_classification_epochs100_batch_size32 - INFO - Epoch 13/100 | Train Loss: 0.1192 | Val mean-roc_auc_score: 0.7440
147
+ 2025-09-23 15:43:51,737 - logs_modchembert_bace_classification_epochs100_batch_size32 - INFO - Epoch 14/100 | Train Loss: 0.1299 | Val mean-roc_auc_score: 0.7255
148
+ 2025-09-23 15:43:57,604 - logs_modchembert_bace_classification_epochs100_batch_size32 - INFO - Epoch 15/100 | Train Loss: 0.0991 | Val mean-roc_auc_score: 0.7319
149
+ 2025-09-23 15:44:03,496 - logs_modchembert_bace_classification_epochs100_batch_size32 - INFO - Epoch 16/100 | Train Loss: 0.1006 | Val mean-roc_auc_score: 0.7245
150
+ 2025-09-23 15:44:09,545 - logs_modchembert_bace_classification_epochs100_batch_size32 - INFO - Epoch 17/100 | Train Loss: 0.0662 | Val mean-roc_auc_score: 0.7197
151
+ 2025-09-23 15:44:15,462 - logs_modchembert_bace_classification_epochs100_batch_size32 - INFO - Epoch 18/100 | Train Loss: 0.1456 | Val mean-roc_auc_score: 0.7348
152
+ 2025-09-23 15:44:21,265 - logs_modchembert_bace_classification_epochs100_batch_size32 - INFO - Epoch 19/100 | Train Loss: 0.0767 | Val mean-roc_auc_score: 0.7225
153
+ 2025-09-23 15:44:27,156 - logs_modchembert_bace_classification_epochs100_batch_size32 - INFO - Epoch 20/100 | Train Loss: 0.0516 | Val mean-roc_auc_score: 0.7732
154
+ 2025-09-23 15:44:27,308 - logs_modchembert_bace_classification_epochs100_batch_size32 - INFO - Global step of best model: 760
155
+ 2025-09-23 15:44:27,831 - logs_modchembert_bace_classification_epochs100_batch_size32 - INFO - Best model saved at epoch 20 with val mean-roc_auc_score: 0.7732
156
+ 2025-09-23 15:44:33,700 - logs_modchembert_bace_classification_epochs100_batch_size32 - INFO - Epoch 21/100 | Train Loss: 0.1053 | Val mean-roc_auc_score: 0.7130
157
+ 2025-09-23 15:44:39,774 - logs_modchembert_bace_classification_epochs100_batch_size32 - INFO - Epoch 22/100 | Train Loss: 0.0751 | Val mean-roc_auc_score: 0.7246
158
+ 2025-09-23 15:44:45,528 - logs_modchembert_bace_classification_epochs100_batch_size32 - INFO - Epoch 23/100 | Train Loss: 0.0335 | Val mean-roc_auc_score: 0.7345
159
+ 2025-09-23 15:44:50,953 - logs_modchembert_bace_classification_epochs100_batch_size32 - INFO - Epoch 24/100 | Train Loss: 0.0456 | Val mean-roc_auc_score: 0.7353
160
+ 2025-09-23 15:44:56,139 - logs_modchembert_bace_classification_epochs100_batch_size32 - INFO - Epoch 25/100 | Train Loss: 0.0298 | Val mean-roc_auc_score: 0.7436
161
+ 2025-09-23 15:45:01,612 - logs_modchembert_bace_classification_epochs100_batch_size32 - INFO - Epoch 26/100 | Train Loss: 0.0292 | Val mean-roc_auc_score: 0.7413
162
+ 2025-09-23 15:45:08,808 - logs_modchembert_bace_classification_epochs100_batch_size32 - INFO - Epoch 27/100 | Train Loss: 0.0294 | Val mean-roc_auc_score: 0.7401
163
+ 2025-09-23 15:45:14,490 - logs_modchembert_bace_classification_epochs100_batch_size32 - INFO - Epoch 28/100 | Train Loss: 0.0405 | Val mean-roc_auc_score: 0.7326
164
+ 2025-09-23 15:45:20,357 - logs_modchembert_bace_classification_epochs100_batch_size32 - INFO - Epoch 29/100 | Train Loss: 0.0854 | Val mean-roc_auc_score: 0.7399
165
+ 2025-09-23 15:45:26,221 - logs_modchembert_bace_classification_epochs100_batch_size32 - INFO - Epoch 30/100 | Train Loss: 0.0337 | Val mean-roc_auc_score: 0.7670
166
+ 2025-09-23 15:45:32,084 - logs_modchembert_bace_classification_epochs100_batch_size32 - INFO - Epoch 31/100 | Train Loss: 0.1308 | Val mean-roc_auc_score: 0.7358
167
+ 2025-09-23 15:45:38,232 - logs_modchembert_bace_classification_epochs100_batch_size32 - INFO - Epoch 32/100 | Train Loss: 0.0566 | Val mean-roc_auc_score: 0.7727
168
+ 2025-09-23 15:45:44,039 - logs_modchembert_bace_classification_epochs100_batch_size32 - INFO - Epoch 33/100 | Train Loss: 0.0290 | Val mean-roc_auc_score: 0.7657
169
+ 2025-09-23 15:45:49,840 - logs_modchembert_bace_classification_epochs100_batch_size32 - INFO - Epoch 34/100 | Train Loss: 0.0197 | Val mean-roc_auc_score: 0.7591
170
+ 2025-09-23 15:45:55,671 - logs_modchembert_bace_classification_epochs100_batch_size32 - INFO - Epoch 35/100 | Train Loss: 0.0149 | Val mean-roc_auc_score: 0.7691
171
+ 2025-09-23 15:46:01,504 - logs_modchembert_bace_classification_epochs100_batch_size32 - INFO - Epoch 36/100 | Train Loss: 0.0395 | Val mean-roc_auc_score: 0.7357
172
+ 2025-09-23 15:46:07,627 - logs_modchembert_bace_classification_epochs100_batch_size32 - INFO - Epoch 37/100 | Train Loss: 0.0360 | Val mean-roc_auc_score: 0.7226
173
+ 2025-09-23 15:46:13,315 - logs_modchembert_bace_classification_epochs100_batch_size32 - INFO - Epoch 38/100 | Train Loss: 0.0237 | Val mean-roc_auc_score: 0.7303
174
+ 2025-09-23 15:46:19,218 - logs_modchembert_bace_classification_epochs100_batch_size32 - INFO - Epoch 39/100 | Train Loss: 0.0163 | Val mean-roc_auc_score: 0.7413
175
+ 2025-09-23 15:46:24,362 - logs_modchembert_bace_classification_epochs100_batch_size32 - INFO - Epoch 40/100 | Train Loss: 0.0136 | Val mean-roc_auc_score: 0.7428
176
+ 2025-09-23 15:46:30,140 - logs_modchembert_bace_classification_epochs100_batch_size32 - INFO - Epoch 41/100 | Train Loss: 0.0091 | Val mean-roc_auc_score: 0.7354
177
+ 2025-09-23 15:46:36,158 - logs_modchembert_bace_classification_epochs100_batch_size32 - INFO - Epoch 42/100 | Train Loss: 0.0125 | Val mean-roc_auc_score: 0.7733
178
+ 2025-09-23 15:46:36,308 - logs_modchembert_bace_classification_epochs100_batch_size32 - INFO - Global step of best model: 1596
179
+ 2025-09-23 15:46:36,837 - logs_modchembert_bace_classification_epochs100_batch_size32 - INFO - Best model saved at epoch 42 with val mean-roc_auc_score: 0.7733
180
+ 2025-09-23 15:46:42,617 - logs_modchembert_bace_classification_epochs100_batch_size32 - INFO - Epoch 43/100 | Train Loss: 0.0708 | Val mean-roc_auc_score: 0.7700
181
+ 2025-09-23 15:46:48,407 - logs_modchembert_bace_classification_epochs100_batch_size32 - INFO - Epoch 44/100 | Train Loss: 0.0260 | Val mean-roc_auc_score: 0.7719
182
+ 2025-09-23 15:46:54,047 - logs_modchembert_bace_classification_epochs100_batch_size32 - INFO - Epoch 45/100 | Train Loss: 0.0418 | Val mean-roc_auc_score: 0.7683
183
+ 2025-09-23 15:46:59,772 - logs_modchembert_bace_classification_epochs100_batch_size32 - INFO - Epoch 46/100 | Train Loss: 0.0150 | Val mean-roc_auc_score: 0.7621
184
+ 2025-09-23 15:47:05,729 - logs_modchembert_bace_classification_epochs100_batch_size32 - INFO - Epoch 47/100 | Train Loss: 0.0144 | Val mean-roc_auc_score: 0.7583
185
+ 2025-09-23 15:47:11,529 - logs_modchembert_bace_classification_epochs100_batch_size32 - INFO - Epoch 48/100 | Train Loss: 0.0064 | Val mean-roc_auc_score: 0.7568
186
+ 2025-09-23 15:47:17,414 - logs_modchembert_bace_classification_epochs100_batch_size32 - INFO - Epoch 49/100 | Train Loss: 0.0063 | Val mean-roc_auc_score: 0.7554
187
+ 2025-09-23 15:47:23,265 - logs_modchembert_bace_classification_epochs100_batch_size32 - INFO - Epoch 50/100 | Train Loss: 0.0029 | Val mean-roc_auc_score: 0.7562
188
+ 2025-09-23 15:47:29,167 - logs_modchembert_bace_classification_epochs100_batch_size32 - INFO - Epoch 51/100 | Train Loss: 0.0031 | Val mean-roc_auc_score: 0.7552
189
+ 2025-09-23 15:47:35,236 - logs_modchembert_bace_classification_epochs100_batch_size32 - INFO - Epoch 52/100 | Train Loss: 0.0230 | Val mean-roc_auc_score: 0.7539
190
+ 2025-09-23 15:47:42,051 - logs_modchembert_bace_classification_epochs100_batch_size32 - INFO - Epoch 53/100 | Train Loss: 0.0103 | Val mean-roc_auc_score: 0.7428
191
+ 2025-09-23 15:47:47,921 - logs_modchembert_bace_classification_epochs100_batch_size32 - INFO - Epoch 54/100 | Train Loss: 0.0083 | Val mean-roc_auc_score: 0.7508
192
+ 2025-09-23 15:47:53,068 - logs_modchembert_bace_classification_epochs100_batch_size32 - INFO - Epoch 55/100 | Train Loss: 0.0035 | Val mean-roc_auc_score: 0.7513
193
+ 2025-09-23 15:47:58,556 - logs_modchembert_bace_classification_epochs100_batch_size32 - INFO - Epoch 56/100 | Train Loss: 0.0042 | Val mean-roc_auc_score: 0.7516
194
+ 2025-09-23 15:48:04,713 - logs_modchembert_bace_classification_epochs100_batch_size32 - INFO - Epoch 57/100 | Train Loss: 0.0040 | Val mean-roc_auc_score: 0.7484
195
+ 2025-09-23 15:48:10,462 - logs_modchembert_bace_classification_epochs100_batch_size32 - INFO - Epoch 58/100 | Train Loss: 0.0075 | Val mean-roc_auc_score: 0.7492
196
+ 2025-09-23 15:48:16,246 - logs_modchembert_bace_classification_epochs100_batch_size32 - INFO - Epoch 59/100 | Train Loss: 0.0046 | Val mean-roc_auc_score: 0.7531
197
+ 2025-09-23 15:48:21,941 - logs_modchembert_bace_classification_epochs100_batch_size32 - INFO - Epoch 60/100 | Train Loss: 0.0065 | Val mean-roc_auc_score: 0.7461
198
+ 2025-09-23 15:48:27,613 - logs_modchembert_bace_classification_epochs100_batch_size32 - INFO - Epoch 61/100 | Train Loss: 0.0028 | Val mean-roc_auc_score: 0.7490
199
+ 2025-09-23 15:48:33,517 - logs_modchembert_bace_classification_epochs100_batch_size32 - INFO - Epoch 62/100 | Train Loss: 0.0022 | Val mean-roc_auc_score: 0.7482
200
+ 2025-09-23 15:48:39,298 - logs_modchembert_bace_classification_epochs100_batch_size32 - INFO - Epoch 63/100 | Train Loss: 0.0065 | Val mean-roc_auc_score: 0.7398
201
+ 2025-09-23 15:48:45,076 - logs_modchembert_bace_classification_epochs100_batch_size32 - INFO - Epoch 64/100 | Train Loss: 0.0161 | Val mean-roc_auc_score: 0.7299
202
+ 2025-09-23 15:48:50,901 - logs_modchembert_bace_classification_epochs100_batch_size32 - INFO - Epoch 65/100 | Train Loss: 0.0144 | Val mean-roc_auc_score: 0.7422
203
+ 2025-09-23 15:48:56,770 - logs_modchembert_bace_classification_epochs100_batch_size32 - INFO - Epoch 66/100 | Train Loss: 0.0019 | Val mean-roc_auc_score: 0.7450
204
+ 2025-09-23 15:49:03,228 - logs_modchembert_bace_classification_epochs100_batch_size32 - INFO - Epoch 67/100 | Train Loss: 0.0024 | Val mean-roc_auc_score: 0.7446
205
+ 2025-09-23 15:49:08,981 - logs_modchembert_bace_classification_epochs100_batch_size32 - INFO - Epoch 68/100 | Train Loss: 0.0027 | Val mean-roc_auc_score: 0.7453
206
+ 2025-09-23 15:49:14,934 - logs_modchembert_bace_classification_epochs100_batch_size32 - INFO - Epoch 69/100 | Train Loss: 0.0020 | Val mean-roc_auc_score: 0.7448
207
+ 2025-09-23 15:49:20,856 - logs_modchembert_bace_classification_epochs100_batch_size32 - INFO - Epoch 70/100 | Train Loss: 0.0016 | Val mean-roc_auc_score: 0.7452
208
+ 2025-09-23 15:49:26,162 - logs_modchembert_bace_classification_epochs100_batch_size32 - INFO - Epoch 71/100 | Train Loss: 0.0017 | Val mean-roc_auc_score: 0.7448
209
+ 2025-09-23 15:49:32,266 - logs_modchembert_bace_classification_epochs100_batch_size32 - INFO - Epoch 72/100 | Train Loss: 0.0023 | Val mean-roc_auc_score: 0.7446
210
+ 2025-09-23 15:49:38,157 - logs_modchembert_bace_classification_epochs100_batch_size32 - INFO - Epoch 73/100 | Train Loss: 0.0043 | Val mean-roc_auc_score: 0.7431
211
+ 2025-09-23 15:49:43,986 - logs_modchembert_bace_classification_epochs100_batch_size32 - INFO - Epoch 74/100 | Train Loss: 0.0259 | Val mean-roc_auc_score: 0.7420
212
+ 2025-09-23 15:49:49,842 - logs_modchembert_bace_classification_epochs100_batch_size32 - INFO - Epoch 75/100 | Train Loss: 0.0160 | Val mean-roc_auc_score: 0.7445
213
+ 2025-09-23 15:49:55,637 - logs_modchembert_bace_classification_epochs100_batch_size32 - INFO - Epoch 76/100 | Train Loss: 0.0060 | Val mean-roc_auc_score: 0.7452
214
+ 2025-09-23 15:50:01,757 - logs_modchembert_bace_classification_epochs100_batch_size32 - INFO - Epoch 77/100 | Train Loss: 0.0044 | Val mean-roc_auc_score: 0.7465
215
+ 2025-09-23 15:50:07,598 - logs_modchembert_bace_classification_epochs100_batch_size32 - INFO - Epoch 78/100 | Train Loss: 0.0029 | Val mean-roc_auc_score: 0.7456
216
+ 2025-09-23 15:50:14,517 - logs_modchembert_bace_classification_epochs100_batch_size32 - INFO - Epoch 79/100 | Train Loss: 0.0050 | Val mean-roc_auc_score: 0.7452
217
+ 2025-09-23 15:50:20,132 - logs_modchembert_bace_classification_epochs100_batch_size32 - INFO - Epoch 80/100 | Train Loss: 0.0023 | Val mean-roc_auc_score: 0.7436
218
+ 2025-09-23 15:50:25,820 - logs_modchembert_bace_classification_epochs100_batch_size32 - INFO - Epoch 81/100 | Train Loss: 0.0028 | Val mean-roc_auc_score: 0.7444
219
+ 2025-09-23 15:50:31,833 - logs_modchembert_bace_classification_epochs100_batch_size32 - INFO - Epoch 82/100 | Train Loss: 0.0028 | Val mean-roc_auc_score: 0.7436
220
+ 2025-09-23 15:50:37,482 - logs_modchembert_bace_classification_epochs100_batch_size32 - INFO - Epoch 83/100 | Train Loss: 0.0021 | Val mean-roc_auc_score: 0.7438
221
+ 2025-09-23 15:50:43,317 - logs_modchembert_bace_classification_epochs100_batch_size32 - INFO - Epoch 84/100 | Train Loss: 0.0025 | Val mean-roc_auc_score: 0.7436
222
+ 2025-09-23 15:50:49,267 - logs_modchembert_bace_classification_epochs100_batch_size32 - INFO - Epoch 85/100 | Train Loss: 0.0033 | Val mean-roc_auc_score: 0.7432
223
+ 2025-09-23 15:50:54,512 - logs_modchembert_bace_classification_epochs100_batch_size32 - INFO - Epoch 86/100 | Train Loss: 0.0018 | Val mean-roc_auc_score: 0.7440
224
+ 2025-09-23 15:51:00,236 - logs_modchembert_bace_classification_epochs100_batch_size32 - INFO - Epoch 87/100 | Train Loss: 0.0008 | Val mean-roc_auc_score: 0.7443
225
+ 2025-09-23 15:51:06,230 - logs_modchembert_bace_classification_epochs100_batch_size32 - INFO - Epoch 88/100 | Train Loss: 0.0013 | Val mean-roc_auc_score: 0.7440
226
+ 2025-09-23 15:51:12,141 - logs_modchembert_bace_classification_epochs100_batch_size32 - INFO - Epoch 89/100 | Train Loss: 0.0011 | Val mean-roc_auc_score: 0.7446
227
+ 2025-09-23 15:51:18,063 - logs_modchembert_bace_classification_epochs100_batch_size32 - INFO - Epoch 90/100 | Train Loss: 0.0042 | Val mean-roc_auc_score: 0.7427
228
+ 2025-09-23 15:51:23,956 - logs_modchembert_bace_classification_epochs100_batch_size32 - INFO - Epoch 91/100 | Train Loss: 0.0053 | Val mean-roc_auc_score: 0.7412
229
+ 2025-09-23 15:51:30,287 - logs_modchembert_bace_classification_epochs100_batch_size32 - INFO - Epoch 92/100 | Train Loss: 0.0085 | Val mean-roc_auc_score: 0.7522
230
+ 2025-09-23 15:51:36,222 - logs_modchembert_bace_classification_epochs100_batch_size32 - INFO - Epoch 93/100 | Train Loss: 0.0083 | Val mean-roc_auc_score: 0.7479
231
+ 2025-09-23 15:51:42,004 - logs_modchembert_bace_classification_epochs100_batch_size32 - INFO - Epoch 94/100 | Train Loss: 0.0083 | Val mean-roc_auc_score: 0.7494
232
+ 2025-09-23 15:51:47,763 - logs_modchembert_bace_classification_epochs100_batch_size32 - INFO - Epoch 95/100 | Train Loss: 0.0266 | Val mean-roc_auc_score: 0.7507
233
+ 2025-09-23 15:51:53,458 - logs_modchembert_bace_classification_epochs100_batch_size32 - INFO - Epoch 96/100 | Train Loss: 0.0096 | Val mean-roc_auc_score: 0.7504
234
+ 2025-09-23 15:51:59,421 - logs_modchembert_bace_classification_epochs100_batch_size32 - INFO - Epoch 97/100 | Train Loss: 0.0019 | Val mean-roc_auc_score: 0.7516
235
+ 2025-09-23 15:52:04,758 - logs_modchembert_bace_classification_epochs100_batch_size32 - INFO - Epoch 98/100 | Train Loss: 0.0020 | Val mean-roc_auc_score: 0.7510
236
+ 2025-09-23 15:52:10,625 - logs_modchembert_bace_classification_epochs100_batch_size32 - INFO - Epoch 99/100 | Train Loss: 0.0021 | Val mean-roc_auc_score: 0.7526
237
+ 2025-09-23 15:52:16,594 - logs_modchembert_bace_classification_epochs100_batch_size32 - INFO - Epoch 100/100 | Train Loss: 0.0019 | Val mean-roc_auc_score: 0.7515
238
+ 2025-09-23 15:52:17,466 - logs_modchembert_bace_classification_epochs100_batch_size32 - INFO - Test mean-roc_auc_score: 0.7706
239
+ 2025-09-23 15:52:17,817 - logs_modchembert_bace_classification_epochs100_batch_size32 - INFO - Starting triplicate run 3 for dataset bace_classification at 2025-09-23_15-52-17
240
+ 2025-09-23 15:52:22,537 - logs_modchembert_bace_classification_epochs100_batch_size32 - INFO - Epoch 1/100 | Train Loss: 0.7368 | Val mean-roc_auc_score: 0.6879
241
+ 2025-09-23 15:52:22,537 - logs_modchembert_bace_classification_epochs100_batch_size32 - INFO - Global step of best model: 38
242
+ 2025-09-23 15:52:23,070 - logs_modchembert_bace_classification_epochs100_batch_size32 - INFO - Best model saved at epoch 1 with val mean-roc_auc_score: 0.6879
243
+ 2025-09-23 15:52:28,398 - logs_modchembert_bace_classification_epochs100_batch_size32 - INFO - Epoch 2/100 | Train Loss: 0.5033 | Val mean-roc_auc_score: 0.7126
244
+ 2025-09-23 15:52:28,570 - logs_modchembert_bace_classification_epochs100_batch_size32 - INFO - Global step of best model: 76
245
+ 2025-09-23 15:52:29,088 - logs_modchembert_bace_classification_epochs100_batch_size32 - INFO - Best model saved at epoch 2 with val mean-roc_auc_score: 0.7126
246
+ 2025-09-23 15:52:34,827 - logs_modchembert_bace_classification_epochs100_batch_size32 - INFO - Epoch 3/100 | Train Loss: 0.4174 | Val mean-roc_auc_score: 0.7462
247
+ 2025-09-23 15:52:35,001 - logs_modchembert_bace_classification_epochs100_batch_size32 - INFO - Global step of best model: 114
248
+ 2025-09-23 15:52:35,518 - logs_modchembert_bace_classification_epochs100_batch_size32 - INFO - Best model saved at epoch 3 with val mean-roc_auc_score: 0.7462
249
+ 2025-09-23 15:52:41,359 - logs_modchembert_bace_classification_epochs100_batch_size32 - INFO - Epoch 4/100 | Train Loss: 0.3289 | Val mean-roc_auc_score: 0.7367
250
+ 2025-09-23 15:52:47,115 - logs_modchembert_bace_classification_epochs100_batch_size32 - INFO - Epoch 5/100 | Train Loss: 0.2845 | Val mean-roc_auc_score: 0.7516
251
+ 2025-09-23 15:52:47,306 - logs_modchembert_bace_classification_epochs100_batch_size32 - INFO - Global step of best model: 190
252
+ 2025-09-23 15:52:47,858 - logs_modchembert_bace_classification_epochs100_batch_size32 - INFO - Best model saved at epoch 5 with val mean-roc_auc_score: 0.7516
253
+ 2025-09-23 15:52:53,820 - logs_modchembert_bace_classification_epochs100_batch_size32 - INFO - Epoch 6/100 | Train Loss: 0.2533 | Val mean-roc_auc_score: 0.7308
254
+ 2025-09-23 15:52:59,926 - logs_modchembert_bace_classification_epochs100_batch_size32 - INFO - Epoch 7/100 | Train Loss: 0.2220 | Val mean-roc_auc_score: 0.7065
255
+ 2025-09-23 15:53:05,758 - logs_modchembert_bace_classification_epochs100_batch_size32 - INFO - Epoch 8/100 | Train Loss: 0.2256 | Val mean-roc_auc_score: 0.7148
256
+ 2025-09-23 15:53:11,538 - logs_modchembert_bace_classification_epochs100_batch_size32 - INFO - Epoch 9/100 | Train Loss: 0.1719 | Val mean-roc_auc_score: 0.7478
257
+ 2025-09-23 15:53:17,239 - logs_modchembert_bace_classification_epochs100_batch_size32 - INFO - Epoch 10/100 | Train Loss: 0.1612 | Val mean-roc_auc_score: 0.7209
258
+ 2025-09-23 15:53:22,951 - logs_modchembert_bace_classification_epochs100_batch_size32 - INFO - Epoch 11/100 | Train Loss: 0.1701 | Val mean-roc_auc_score: 0.7282
259
+ 2025-09-23 15:53:28,786 - logs_modchembert_bace_classification_epochs100_batch_size32 - INFO - Epoch 12/100 | Train Loss: 0.1127 | Val mean-roc_auc_score: 0.7010
260
+ 2025-09-23 15:53:34,775 - logs_modchembert_bace_classification_epochs100_batch_size32 - INFO - Epoch 13/100 | Train Loss: 0.1201 | Val mean-roc_auc_score: 0.7374
261
+ 2025-09-23 15:53:40,703 - logs_modchembert_bace_classification_epochs100_batch_size32 - INFO - Epoch 14/100 | Train Loss: 0.1191 | Val mean-roc_auc_score: 0.7358
262
+ 2025-09-23 15:53:46,634 - logs_modchembert_bace_classification_epochs100_batch_size32 - INFO - Epoch 15/100 | Train Loss: 0.0929 | Val mean-roc_auc_score: 0.7420
263
+ 2025-09-23 15:53:52,452 - logs_modchembert_bace_classification_epochs100_batch_size32 - INFO - Epoch 16/100 | Train Loss: 0.0718 | Val mean-roc_auc_score: 0.7370
264
+ 2025-09-23 15:53:58,080 - logs_modchembert_bace_classification_epochs100_batch_size32 - INFO - Epoch 17/100 | Train Loss: 0.0633 | Val mean-roc_auc_score: 0.7039
265
+ 2025-09-23 15:54:03,693 - logs_modchembert_bace_classification_epochs100_batch_size32 - INFO - Epoch 18/100 | Train Loss: 0.0765 | Val mean-roc_auc_score: 0.7241
266
+ 2025-09-23 15:54:09,408 - logs_modchembert_bace_classification_epochs100_batch_size32 - INFO - Epoch 19/100 | Train Loss: 0.0437 | Val mean-roc_auc_score: 0.7162
267
+ 2025-09-23 15:54:15,146 - logs_modchembert_bace_classification_epochs100_batch_size32 - INFO - Epoch 20/100 | Train Loss: 0.0516 | Val mean-roc_auc_score: 0.7190
268
+ 2025-09-23 15:54:21,040 - logs_modchembert_bace_classification_epochs100_batch_size32 - INFO - Epoch 21/100 | Train Loss: 0.0475 | Val mean-roc_auc_score: 0.7324
269
+ 2025-09-23 15:54:27,352 - logs_modchembert_bace_classification_epochs100_batch_size32 - INFO - Epoch 22/100 | Train Loss: 0.0365 | Val mean-roc_auc_score: 0.7235
270
+ 2025-09-23 15:54:33,304 - logs_modchembert_bace_classification_epochs100_batch_size32 - INFO - Epoch 23/100 | Train Loss: 0.0391 | Val mean-roc_auc_score: 0.7148
271
+ 2025-09-23 15:54:39,248 - logs_modchembert_bace_classification_epochs100_batch_size32 - INFO - Epoch 24/100 | Train Loss: 0.0172 | Val mean-roc_auc_score: 0.7074
272
+ 2025-09-23 15:54:45,036 - logs_modchembert_bace_classification_epochs100_batch_size32 - INFO - Epoch 25/100 | Train Loss: 0.0442 | Val mean-roc_auc_score: 0.7279
273
+ 2025-09-23 15:54:50,875 - logs_modchembert_bace_classification_epochs100_batch_size32 - INFO - Epoch 26/100 | Train Loss: 0.0245 | Val mean-roc_auc_score: 0.7067
274
+ 2025-09-23 15:54:57,777 - logs_modchembert_bace_classification_epochs100_batch_size32 - INFO - Epoch 27/100 | Train Loss: 0.0436 | Val mean-roc_auc_score: 0.7185
275
+ 2025-09-23 15:55:03,707 - logs_modchembert_bace_classification_epochs100_batch_size32 - INFO - Epoch 28/100 | Train Loss: 0.0942 | Val mean-roc_auc_score: 0.7316
276
+ 2025-09-23 15:55:09,643 - logs_modchembert_bace_classification_epochs100_batch_size32 - INFO - Epoch 29/100 | Train Loss: 0.0359 | Val mean-roc_auc_score: 0.7284
277
+ 2025-09-23 15:55:15,500 - logs_modchembert_bace_classification_epochs100_batch_size32 - INFO - Epoch 30/100 | Train Loss: 0.0139 | Val mean-roc_auc_score: 0.7308
278
+ 2025-09-23 15:55:21,348 - logs_modchembert_bace_classification_epochs100_batch_size32 - INFO - Epoch 31/100 | Train Loss: 0.0147 | Val mean-roc_auc_score: 0.7279
279
+ 2025-09-23 15:55:27,151 - logs_modchembert_bace_classification_epochs100_batch_size32 - INFO - Epoch 32/100 | Train Loss: 0.0427 | Val mean-roc_auc_score: 0.7197
280
+ 2025-09-23 15:55:32,973 - logs_modchembert_bace_classification_epochs100_batch_size32 - INFO - Epoch 33/100 | Train Loss: 0.0212 | Val mean-roc_auc_score: 0.7153
281
+ 2025-09-23 15:55:38,895 - logs_modchembert_bace_classification_epochs100_batch_size32 - INFO - Epoch 34/100 | Train Loss: 0.0076 | Val mean-roc_auc_score: 0.7203
282
+ 2025-09-23 15:55:44,715 - logs_modchembert_bace_classification_epochs100_batch_size32 - INFO - Epoch 35/100 | Train Loss: 0.0062 | Val mean-roc_auc_score: 0.7255
283
+ 2025-09-23 15:55:50,587 - logs_modchembert_bace_classification_epochs100_batch_size32 - INFO - Epoch 36/100 | Train Loss: 0.0112 | Val mean-roc_auc_score: 0.7216
284
+ 2025-09-23 15:55:56,725 - logs_modchembert_bace_classification_epochs100_batch_size32 - INFO - Epoch 37/100 | Train Loss: 0.0052 | Val mean-roc_auc_score: 0.7218
285
+ 2025-09-23 15:56:02,621 - logs_modchembert_bace_classification_epochs100_batch_size32 - INFO - Epoch 38/100 | Train Loss: 0.0018 | Val mean-roc_auc_score: 0.7234
286
+ 2025-09-23 15:56:08,510 - logs_modchembert_bace_classification_epochs100_batch_size32 - INFO - Epoch 39/100 | Train Loss: 0.0267 | Val mean-roc_auc_score: 0.7429
287
+ 2025-09-23 15:56:14,214 - logs_modchembert_bace_classification_epochs100_batch_size32 - INFO - Epoch 40/100 | Train Loss: 0.0535 | Val mean-roc_auc_score: 0.7463
288
+ 2025-09-23 15:56:19,884 - logs_modchembert_bace_classification_epochs100_batch_size32 - INFO - Epoch 41/100 | Train Loss: 0.0333 | Val mean-roc_auc_score: 0.7542
289
+ 2025-09-23 15:56:20,300 - logs_modchembert_bace_classification_epochs100_batch_size32 - INFO - Global step of best model: 1558
290
+ 2025-09-23 15:56:20,821 - logs_modchembert_bace_classification_epochs100_batch_size32 - INFO - Best model saved at epoch 41 with val mean-roc_auc_score: 0.7542
291
+ 2025-09-23 15:56:26,230 - logs_modchembert_bace_classification_epochs100_batch_size32 - INFO - Epoch 42/100 | Train Loss: 0.0164 | Val mean-roc_auc_score: 0.7442
292
+ 2025-09-23 15:56:31,892 - logs_modchembert_bace_classification_epochs100_batch_size32 - INFO - Epoch 43/100 | Train Loss: 0.0129 | Val mean-roc_auc_score: 0.7401
293
+ 2025-09-23 15:56:37,971 - logs_modchembert_bace_classification_epochs100_batch_size32 - INFO - Epoch 44/100 | Train Loss: 0.0103 | Val mean-roc_auc_score: 0.7389
294
+ 2025-09-23 15:56:43,840 - logs_modchembert_bace_classification_epochs100_batch_size32 - INFO - Epoch 45/100 | Train Loss: 0.0073 | Val mean-roc_auc_score: 0.7424
295
+ 2025-09-23 15:56:49,696 - logs_modchembert_bace_classification_epochs100_batch_size32 - INFO - Epoch 46/100 | Train Loss: 0.0230 | Val mean-roc_auc_score: 0.7472
296
+ 2025-09-23 15:56:55,109 - logs_modchembert_bace_classification_epochs100_batch_size32 - INFO - Epoch 47/100 | Train Loss: 0.0130 | Val mean-roc_auc_score: 0.7411
297
+ 2025-09-23 15:57:00,221 - logs_modchembert_bace_classification_epochs100_batch_size32 - INFO - Epoch 48/100 | Train Loss: 0.0135 | Val mean-roc_auc_score: 0.7252
298
+ 2025-09-23 15:57:06,037 - logs_modchembert_bace_classification_epochs100_batch_size32 - INFO - Epoch 49/100 | Train Loss: 0.0296 | Val mean-roc_auc_score: 0.7461
299
+ 2025-09-23 15:57:11,926 - logs_modchembert_bace_classification_epochs100_batch_size32 - INFO - Epoch 50/100 | Train Loss: 0.0195 | Val mean-roc_auc_score: 0.7297
300
+ 2025-09-23 15:57:17,900 - logs_modchembert_bace_classification_epochs100_batch_size32 - INFO - Epoch 51/100 | Train Loss: 0.0097 | Val mean-roc_auc_score: 0.7413
301
+ 2025-09-23 15:57:23,797 - logs_modchembert_bace_classification_epochs100_batch_size32 - INFO - Epoch 52/100 | Train Loss: 0.0051 | Val mean-roc_auc_score: 0.7445
302
+ 2025-09-23 15:57:30,793 - logs_modchembert_bace_classification_epochs100_batch_size32 - INFO - Epoch 53/100 | Train Loss: 0.0058 | Val mean-roc_auc_score: 0.7446
303
+ 2025-09-23 15:57:36,457 - logs_modchembert_bace_classification_epochs100_batch_size32 - INFO - Epoch 54/100 | Train Loss: 0.0075 | Val mean-roc_auc_score: 0.7454
304
+ 2025-09-23 15:57:42,321 - logs_modchembert_bace_classification_epochs100_batch_size32 - INFO - Epoch 55/100 | Train Loss: 0.0030 | Val mean-roc_auc_score: 0.7462
305
+ 2025-09-23 15:57:48,133 - logs_modchembert_bace_classification_epochs100_batch_size32 - INFO - Epoch 56/100 | Train Loss: 0.0022 | Val mean-roc_auc_score: 0.7463
306
+ 2025-09-23 15:57:54,016 - logs_modchembert_bace_classification_epochs100_batch_size32 - INFO - Epoch 57/100 | Train Loss: 0.0020 | Val mean-roc_auc_score: 0.7510
307
+ 2025-09-23 15:57:59,914 - logs_modchembert_bace_classification_epochs100_batch_size32 - INFO - Epoch 58/100 | Train Loss: 0.0008 | Val mean-roc_auc_score: 0.7528
308
+ 2025-09-23 15:58:05,725 - logs_modchembert_bace_classification_epochs100_batch_size32 - INFO - Epoch 59/100 | Train Loss: 0.0048 | Val mean-roc_auc_score: 0.7509
309
+ 2025-09-23 15:58:11,576 - logs_modchembert_bace_classification_epochs100_batch_size32 - INFO - Epoch 60/100 | Train Loss: 0.0012 | Val mean-roc_auc_score: 0.7507
310
+ 2025-09-23 15:58:17,429 - logs_modchembert_bace_classification_epochs100_batch_size32 - INFO - Epoch 61/100 | Train Loss: 0.0039 | Val mean-roc_auc_score: 0.7522
311
+ 2025-09-23 15:58:23,482 - logs_modchembert_bace_classification_epochs100_batch_size32 - INFO - Epoch 62/100 | Train Loss: 0.0021 | Val mean-roc_auc_score: 0.7525
312
+ 2025-09-23 15:58:28,832 - logs_modchembert_bace_classification_epochs100_batch_size32 - INFO - Epoch 63/100 | Train Loss: 0.0013 | Val mean-roc_auc_score: 0.7528
313
+ 2025-09-23 15:58:34,693 - logs_modchembert_bace_classification_epochs100_batch_size32 - INFO - Epoch 64/100 | Train Loss: 0.0007 | Val mean-roc_auc_score: 0.7530
314
+ 2025-09-23 15:58:40,556 - logs_modchembert_bace_classification_epochs100_batch_size32 - INFO - Epoch 65/100 | Train Loss: 0.0025 | Val mean-roc_auc_score: 0.7522
315
+ 2025-09-23 15:58:46,468 - logs_modchembert_bace_classification_epochs100_batch_size32 - INFO - Epoch 66/100 | Train Loss: 0.0033 | Val mean-roc_auc_score: 0.7500
316
+ 2025-09-23 15:58:52,571 - logs_modchembert_bace_classification_epochs100_batch_size32 - INFO - Epoch 67/100 | Train Loss: 0.0045 | Val mean-roc_auc_score: 0.7459
317
+ 2025-09-23 15:58:58,457 - logs_modchembert_bace_classification_epochs100_batch_size32 - INFO - Epoch 68/100 | Train Loss: 0.0016 | Val mean-roc_auc_score: 0.7482
318
+ 2025-09-23 15:59:04,291 - logs_modchembert_bace_classification_epochs100_batch_size32 - INFO - Epoch 69/100 | Train Loss: 0.0008 | Val mean-roc_auc_score: 0.7473
319
+ 2025-09-23 15:59:10,152 - logs_modchembert_bace_classification_epochs100_batch_size32 - INFO - Epoch 70/100 | Train Loss: 0.0016 | Val mean-roc_auc_score: 0.7476
320
+ 2025-09-23 15:59:16,103 - logs_modchembert_bace_classification_epochs100_batch_size32 - INFO - Epoch 71/100 | Train Loss: 0.0008 | Val mean-roc_auc_score: 0.7476
321
+ 2025-09-23 15:59:22,266 - logs_modchembert_bace_classification_epochs100_batch_size32 - INFO - Epoch 72/100 | Train Loss: 0.0010 | Val mean-roc_auc_score: 0.7443
322
+ 2025-09-23 15:59:28,088 - logs_modchembert_bace_classification_epochs100_batch_size32 - INFO - Epoch 73/100 | Train Loss: 0.0010 | Val mean-roc_auc_score: 0.7473
323
+ 2025-09-23 15:59:34,062 - logs_modchembert_bace_classification_epochs100_batch_size32 - INFO - Epoch 74/100 | Train Loss: 0.0008 | Val mean-roc_auc_score: 0.7454
324
+ 2025-09-23 15:59:39,793 - logs_modchembert_bace_classification_epochs100_batch_size32 - INFO - Epoch 75/100 | Train Loss: 0.0021 | Val mean-roc_auc_score: 0.7494
325
+ 2025-09-23 15:59:45,538 - logs_modchembert_bace_classification_epochs100_batch_size32 - INFO - Epoch 76/100 | Train Loss: 0.0026 | Val mean-roc_auc_score: 0.7435
326
+ 2025-09-23 15:59:51,503 - logs_modchembert_bace_classification_epochs100_batch_size32 - INFO - Epoch 77/100 | Train Loss: 0.0043 | Val mean-roc_auc_score: 0.7513
327
+ 2025-09-23 15:59:56,679 - logs_modchembert_bace_classification_epochs100_batch_size32 - INFO - Epoch 78/100 | Train Loss: 0.0018 | Val mean-roc_auc_score: 0.7476
328
+ 2025-09-23 16:00:03,279 - logs_modchembert_bace_classification_epochs100_batch_size32 - INFO - Epoch 79/100 | Train Loss: 0.0014 | Val mean-roc_auc_score: 0.7492
329
+ 2025-09-23 16:00:08,996 - logs_modchembert_bace_classification_epochs100_batch_size32 - INFO - Epoch 80/100 | Train Loss: 0.0009 | Val mean-roc_auc_score: 0.7473
330
+ 2025-09-23 16:00:14,656 - logs_modchembert_bace_classification_epochs100_batch_size32 - INFO - Epoch 81/100 | Train Loss: 0.0010 | Val mean-roc_auc_score: 0.7498
331
+ 2025-09-23 16:00:20,664 - logs_modchembert_bace_classification_epochs100_batch_size32 - INFO - Epoch 82/100 | Train Loss: 0.0005 | Val mean-roc_auc_score: 0.7487
332
+ 2025-09-23 16:00:26,493 - logs_modchembert_bace_classification_epochs100_batch_size32 - INFO - Epoch 83/100 | Train Loss: 0.0006 | Val mean-roc_auc_score: 0.7480
333
+ 2025-09-23 16:00:32,302 - logs_modchembert_bace_classification_epochs100_batch_size32 - INFO - Epoch 84/100 | Train Loss: 0.0009 | Val mean-roc_auc_score: 0.7489
334
+ 2025-09-23 16:00:38,166 - logs_modchembert_bace_classification_epochs100_batch_size32 - INFO - Epoch 85/100 | Train Loss: 0.0026 | Val mean-roc_auc_score: 0.7418
335
+ 2025-09-23 16:00:44,045 - logs_modchembert_bace_classification_epochs100_batch_size32 - INFO - Epoch 86/100 | Train Loss: 0.0158 | Val mean-roc_auc_score: 0.7440
336
+ 2025-09-23 16:00:50,120 - logs_modchembert_bace_classification_epochs100_batch_size32 - INFO - Epoch 87/100 | Train Loss: 0.0167 | Val mean-roc_auc_score: 0.7570
337
+ 2025-09-23 16:00:50,263 - logs_modchembert_bace_classification_epochs100_batch_size32 - INFO - Global step of best model: 3306
338
+ 2025-09-23 16:00:50,812 - logs_modchembert_bace_classification_epochs100_batch_size32 - INFO - Best model saved at epoch 87 with val mean-roc_auc_score: 0.7570
339
+ 2025-09-23 16:00:56,722 - logs_modchembert_bace_classification_epochs100_batch_size32 - INFO - Epoch 88/100 | Train Loss: 0.0043 | Val mean-roc_auc_score: 0.7583
340
+ 2025-09-23 16:00:56,901 - logs_modchembert_bace_classification_epochs100_batch_size32 - INFO - Global step of best model: 3344
341
+ 2025-09-23 16:00:57,440 - logs_modchembert_bace_classification_epochs100_batch_size32 - INFO - Best model saved at epoch 88 with val mean-roc_auc_score: 0.7583
342
+ 2025-09-23 16:01:03,255 - logs_modchembert_bace_classification_epochs100_batch_size32 - INFO - Epoch 89/100 | Train Loss: 0.0041 | Val mean-roc_auc_score: 0.7584
343
+ 2025-09-23 16:01:03,449 - logs_modchembert_bace_classification_epochs100_batch_size32 - INFO - Global step of best model: 3382
344
+ 2025-09-23 16:01:04,001 - logs_modchembert_bace_classification_epochs100_batch_size32 - INFO - Best model saved at epoch 89 with val mean-roc_auc_score: 0.7584
345
+ 2025-09-23 16:01:09,935 - logs_modchembert_bace_classification_epochs100_batch_size32 - INFO - Epoch 90/100 | Train Loss: 0.0028 | Val mean-roc_auc_score: 0.7575
346
+ 2025-09-23 16:01:15,934 - logs_modchembert_bace_classification_epochs100_batch_size32 - INFO - Epoch 91/100 | Train Loss: 0.0038 | Val mean-roc_auc_score: 0.7561
347
+ 2025-09-23 16:01:22,312 - logs_modchembert_bace_classification_epochs100_batch_size32 - INFO - Epoch 92/100 | Train Loss: 0.0112 | Val mean-roc_auc_score: 0.7584
348
+ 2025-09-23 16:01:27,932 - logs_modchembert_bace_classification_epochs100_batch_size32 - INFO - Epoch 93/100 | Train Loss: 0.0152 | Val mean-roc_auc_score: 0.7482
349
+ 2025-09-23 16:01:33,813 - logs_modchembert_bace_classification_epochs100_batch_size32 - INFO - Epoch 94/100 | Train Loss: 0.0021 | Val mean-roc_auc_score: 0.7498
350
+ 2025-09-23 16:01:39,800 - logs_modchembert_bace_classification_epochs100_batch_size32 - INFO - Epoch 95/100 | Train Loss: 0.0016 | Val mean-roc_auc_score: 0.7516
351
+ 2025-09-23 16:01:45,608 - logs_modchembert_bace_classification_epochs100_batch_size32 - INFO - Epoch 96/100 | Train Loss: 0.0080 | Val mean-roc_auc_score: 0.7534
352
+ 2025-09-23 16:01:51,665 - logs_modchembert_bace_classification_epochs100_batch_size32 - INFO - Epoch 97/100 | Train Loss: 0.0030 | Val mean-roc_auc_score: 0.7487
353
+ 2025-09-23 16:01:57,529 - logs_modchembert_bace_classification_epochs100_batch_size32 - INFO - Epoch 98/100 | Train Loss: 0.0068 | Val mean-roc_auc_score: 0.7496
354
+ 2025-09-23 16:02:03,236 - logs_modchembert_bace_classification_epochs100_batch_size32 - INFO - Epoch 99/100 | Train Loss: 0.0033 | Val mean-roc_auc_score: 0.7509
355
+ 2025-09-23 16:02:08,901 - logs_modchembert_bace_classification_epochs100_batch_size32 - INFO - Epoch 100/100 | Train Loss: 0.0011 | Val mean-roc_auc_score: 0.7508
356
+ 2025-09-23 16:02:09,776 - logs_modchembert_bace_classification_epochs100_batch_size32 - INFO - Test mean-roc_auc_score: 0.8009
357
+ 2025-09-23 16:02:10,125 - logs_modchembert_bace_classification_epochs100_batch_size32 - INFO - Final Triplicate Test Results — Avg mean-roc_auc_score: 0.7924, Std Dev: 0.0155
logs_modchembert_classification_ModChemBERT-MLM-TAFT/modchembert_deepchem_splits_run_bbbp_epochs100_batch_size32_20250923_160210.log ADDED
@@ -0,0 +1,337 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ 2025-09-23 16:02:10,127 - logs_modchembert_bbbp_epochs100_batch_size32 - INFO - Running benchmark for dataset: bbbp
2
+ 2025-09-23 16:02:10,127 - logs_modchembert_bbbp_epochs100_batch_size32 - INFO - dataset: bbbp, tasks: ['p_np'], epochs: 100, learning rate: 3e-05
3
+ 2025-09-23 16:02:10,131 - logs_modchembert_bbbp_epochs100_batch_size32 - INFO - Starting triplicate run 1 for dataset bbbp at 2025-09-23_16-02-10
4
+ 2025-09-23 16:02:16,412 - logs_modchembert_bbbp_epochs100_batch_size32 - INFO - Epoch 1/100 | Train Loss: 0.2993 | Val mean-roc_auc_score: 0.9929
5
+ 2025-09-23 16:02:16,412 - logs_modchembert_bbbp_epochs100_batch_size32 - INFO - Global step of best model: 52
6
+ 2025-09-23 16:02:16,953 - logs_modchembert_bbbp_epochs100_batch_size32 - INFO - Best model saved at epoch 1 with val mean-roc_auc_score: 0.9929
7
+ 2025-09-23 16:02:24,073 - logs_modchembert_bbbp_epochs100_batch_size32 - INFO - Epoch 2/100 | Train Loss: 0.1001 | Val mean-roc_auc_score: 0.9902
8
+ 2025-09-23 16:02:31,156 - logs_modchembert_bbbp_epochs100_batch_size32 - INFO - Epoch 3/100 | Train Loss: 0.1280 | Val mean-roc_auc_score: 0.9922
9
+ 2025-09-23 16:02:38,245 - logs_modchembert_bbbp_epochs100_batch_size32 - INFO - Epoch 4/100 | Train Loss: 0.1035 | Val mean-roc_auc_score: 0.9941
10
+ 2025-09-23 16:02:38,381 - logs_modchembert_bbbp_epochs100_batch_size32 - INFO - Global step of best model: 208
11
+ 2025-09-23 16:02:38,908 - logs_modchembert_bbbp_epochs100_batch_size32 - INFO - Best model saved at epoch 4 with val mean-roc_auc_score: 0.9941
12
+ 2025-09-23 16:02:45,830 - logs_modchembert_bbbp_epochs100_batch_size32 - INFO - Epoch 5/100 | Train Loss: 0.0604 | Val mean-roc_auc_score: 0.9961
13
+ 2025-09-23 16:02:46,009 - logs_modchembert_bbbp_epochs100_batch_size32 - INFO - Global step of best model: 260
14
+ 2025-09-23 16:02:46,550 - logs_modchembert_bbbp_epochs100_batch_size32 - INFO - Best model saved at epoch 5 with val mean-roc_auc_score: 0.9961
15
+ 2025-09-23 16:02:53,516 - logs_modchembert_bbbp_epochs100_batch_size32 - INFO - Epoch 6/100 | Train Loss: 0.0592 | Val mean-roc_auc_score: 0.9781
16
+ 2025-09-23 16:03:00,095 - logs_modchembert_bbbp_epochs100_batch_size32 - INFO - Epoch 7/100 | Train Loss: 0.0430 | Val mean-roc_auc_score: 0.9897
17
+ 2025-09-23 16:03:07,062 - logs_modchembert_bbbp_epochs100_batch_size32 - INFO - Epoch 8/100 | Train Loss: 0.0278 | Val mean-roc_auc_score: 0.9801
18
+ 2025-09-23 16:03:13,867 - logs_modchembert_bbbp_epochs100_batch_size32 - INFO - Epoch 9/100 | Train Loss: 0.0383 | Val mean-roc_auc_score: 0.9887
19
+ 2025-09-23 16:03:20,737 - logs_modchembert_bbbp_epochs100_batch_size32 - INFO - Epoch 10/100 | Train Loss: 0.0091 | Val mean-roc_auc_score: 0.9856
20
+ 2025-09-23 16:03:27,425 - logs_modchembert_bbbp_epochs100_batch_size32 - INFO - Epoch 11/100 | Train Loss: 0.0118 | Val mean-roc_auc_score: 0.9864
21
+ 2025-09-23 16:03:34,514 - logs_modchembert_bbbp_epochs100_batch_size32 - INFO - Epoch 12/100 | Train Loss: 0.0081 | Val mean-roc_auc_score: 0.9870
22
+ 2025-09-23 16:03:41,235 - logs_modchembert_bbbp_epochs100_batch_size32 - INFO - Epoch 13/100 | Train Loss: 0.0092 | Val mean-roc_auc_score: 0.9834
23
+ 2025-09-23 16:03:47,661 - logs_modchembert_bbbp_epochs100_batch_size32 - INFO - Epoch 14/100 | Train Loss: 0.0046 | Val mean-roc_auc_score: 0.9879
24
+ 2025-09-23 16:03:54,715 - logs_modchembert_bbbp_epochs100_batch_size32 - INFO - Epoch 15/100 | Train Loss: 0.0113 | Val mean-roc_auc_score: 0.9811
25
+ 2025-09-23 16:04:01,694 - logs_modchembert_bbbp_epochs100_batch_size32 - INFO - Epoch 16/100 | Train Loss: 0.0173 | Val mean-roc_auc_score: 0.9879
26
+ 2025-09-23 16:04:08,944 - logs_modchembert_bbbp_epochs100_batch_size32 - INFO - Epoch 17/100 | Train Loss: 0.0613 | Val mean-roc_auc_score: 0.9921
27
+ 2025-09-23 16:04:15,886 - logs_modchembert_bbbp_epochs100_batch_size32 - INFO - Epoch 18/100 | Train Loss: 0.0153 | Val mean-roc_auc_score: 0.9935
28
+ 2025-09-23 16:04:22,801 - logs_modchembert_bbbp_epochs100_batch_size32 - INFO - Epoch 19/100 | Train Loss: 0.0080 | Val mean-roc_auc_score: 0.9913
29
+ 2025-09-23 16:04:29,989 - logs_modchembert_bbbp_epochs100_batch_size32 - INFO - Epoch 20/100 | Train Loss: 0.0050 | Val mean-roc_auc_score: 0.9900
30
+ 2025-09-23 16:04:37,118 - logs_modchembert_bbbp_epochs100_batch_size32 - INFO - Epoch 21/100 | Train Loss: 0.0048 | Val mean-roc_auc_score: 0.9905
31
+ 2025-09-23 16:04:44,439 - logs_modchembert_bbbp_epochs100_batch_size32 - INFO - Epoch 22/100 | Train Loss: 0.0073 | Val mean-roc_auc_score: 0.9899
32
+ 2025-09-23 16:04:51,564 - logs_modchembert_bbbp_epochs100_batch_size32 - INFO - Epoch 23/100 | Train Loss: 0.0035 | Val mean-roc_auc_score: 0.9886
33
+ 2025-09-23 16:04:58,581 - logs_modchembert_bbbp_epochs100_batch_size32 - INFO - Epoch 24/100 | Train Loss: 0.0025 | Val mean-roc_auc_score: 0.9877
34
+ 2025-09-23 16:05:05,558 - logs_modchembert_bbbp_epochs100_batch_size32 - INFO - Epoch 25/100 | Train Loss: 0.0026 | Val mean-roc_auc_score: 0.9884
35
+ 2025-09-23 16:05:12,479 - logs_modchembert_bbbp_epochs100_batch_size32 - INFO - Epoch 26/100 | Train Loss: 0.0028 | Val mean-roc_auc_score: 0.9889
36
+ 2025-09-23 16:05:19,304 - logs_modchembert_bbbp_epochs100_batch_size32 - INFO - Epoch 27/100 | Train Loss: 0.0013 | Val mean-roc_auc_score: 0.9906
37
+ 2025-09-23 16:05:26,247 - logs_modchembert_bbbp_epochs100_batch_size32 - INFO - Epoch 28/100 | Train Loss: 0.0035 | Val mean-roc_auc_score: 0.9922
38
+ 2025-09-23 16:05:33,007 - logs_modchembert_bbbp_epochs100_batch_size32 - INFO - Epoch 29/100 | Train Loss: 0.0055 | Val mean-roc_auc_score: 0.9918
39
+ 2025-09-23 16:05:39,811 - logs_modchembert_bbbp_epochs100_batch_size32 - INFO - Epoch 30/100 | Train Loss: 0.0024 | Val mean-roc_auc_score: 0.9921
40
+ 2025-09-23 16:05:46,739 - logs_modchembert_bbbp_epochs100_batch_size32 - INFO - Epoch 31/100 | Train Loss: 0.0021 | Val mean-roc_auc_score: 0.9912
41
+ 2025-09-23 16:05:53,884 - logs_modchembert_bbbp_epochs100_batch_size32 - INFO - Epoch 32/100 | Train Loss: 0.0021 | Val mean-roc_auc_score: 0.9915
42
+ 2025-09-23 16:06:00,023 - logs_modchembert_bbbp_epochs100_batch_size32 - INFO - Epoch 33/100 | Train Loss: 0.0005 | Val mean-roc_auc_score: 0.9914
43
+ 2025-09-23 16:06:06,944 - logs_modchembert_bbbp_epochs100_batch_size32 - INFO - Epoch 34/100 | Train Loss: 0.0015 | Val mean-roc_auc_score: 0.9906
44
+ 2025-09-23 16:06:13,864 - logs_modchembert_bbbp_epochs100_batch_size32 - INFO - Epoch 35/100 | Train Loss: 0.0147 | Val mean-roc_auc_score: 0.9935
45
+ 2025-09-23 16:06:20,774 - logs_modchembert_bbbp_epochs100_batch_size32 - INFO - Epoch 36/100 | Train Loss: 0.0141 | Val mean-roc_auc_score: 0.9889
46
+ 2025-09-23 16:06:28,139 - logs_modchembert_bbbp_epochs100_batch_size32 - INFO - Epoch 37/100 | Train Loss: 0.0039 | Val mean-roc_auc_score: 0.9883
47
+ 2025-09-23 16:06:35,161 - logs_modchembert_bbbp_epochs100_batch_size32 - INFO - Epoch 38/100 | Train Loss: 0.0054 | Val mean-roc_auc_score: 0.9927
48
+ 2025-09-23 16:06:42,962 - logs_modchembert_bbbp_epochs100_batch_size32 - INFO - Epoch 39/100 | Train Loss: 0.0063 | Val mean-roc_auc_score: 0.9918
49
+ 2025-09-23 16:06:49,969 - logs_modchembert_bbbp_epochs100_batch_size32 - INFO - Epoch 40/100 | Train Loss: 0.0043 | Val mean-roc_auc_score: 0.9891
50
+ 2025-09-23 16:06:56,919 - logs_modchembert_bbbp_epochs100_batch_size32 - INFO - Epoch 41/100 | Train Loss: 0.0058 | Val mean-roc_auc_score: 0.9883
51
+ 2025-09-23 16:07:04,249 - logs_modchembert_bbbp_epochs100_batch_size32 - INFO - Epoch 42/100 | Train Loss: 0.0031 | Val mean-roc_auc_score: 0.9919
52
+ 2025-09-23 16:07:10,932 - logs_modchembert_bbbp_epochs100_batch_size32 - INFO - Epoch 43/100 | Train Loss: 0.0053 | Val mean-roc_auc_score: 0.9897
53
+ 2025-09-23 16:07:17,599 - logs_modchembert_bbbp_epochs100_batch_size32 - INFO - Epoch 44/100 | Train Loss: 0.0014 | Val mean-roc_auc_score: 0.9903
54
+ 2025-09-23 16:07:24,048 - logs_modchembert_bbbp_epochs100_batch_size32 - INFO - Epoch 45/100 | Train Loss: 0.0012 | Val mean-roc_auc_score: 0.9905
55
+ 2025-09-23 16:07:30,413 - logs_modchembert_bbbp_epochs100_batch_size32 - INFO - Epoch 46/100 | Train Loss: 0.0008 | Val mean-roc_auc_score: 0.9906
56
+ 2025-09-23 16:07:37,537 - logs_modchembert_bbbp_epochs100_batch_size32 - INFO - Epoch 47/100 | Train Loss: 0.0013 | Val mean-roc_auc_score: 0.9916
57
+ 2025-09-23 16:07:44,612 - logs_modchembert_bbbp_epochs100_batch_size32 - INFO - Epoch 48/100 | Train Loss: 0.0010 | Val mean-roc_auc_score: 0.9917
58
+ 2025-09-23 16:07:51,624 - logs_modchembert_bbbp_epochs100_batch_size32 - INFO - Epoch 49/100 | Train Loss: 0.0008 | Val mean-roc_auc_score: 0.9911
59
+ 2025-09-23 16:07:58,740 - logs_modchembert_bbbp_epochs100_batch_size32 - INFO - Epoch 50/100 | Train Loss: 0.0007 | Val mean-roc_auc_score: 0.9918
60
+ 2025-09-23 16:08:05,656 - logs_modchembert_bbbp_epochs100_batch_size32 - INFO - Epoch 51/100 | Train Loss: 0.0006 | Val mean-roc_auc_score: 0.9910
61
+ 2025-09-23 16:08:12,479 - logs_modchembert_bbbp_epochs100_batch_size32 - INFO - Epoch 52/100 | Train Loss: 0.0008 | Val mean-roc_auc_score: 0.9918
62
+ 2025-09-23 16:08:19,573 - logs_modchembert_bbbp_epochs100_batch_size32 - INFO - Epoch 53/100 | Train Loss: 0.0006 | Val mean-roc_auc_score: 0.9917
63
+ 2025-09-23 16:08:26,598 - logs_modchembert_bbbp_epochs100_batch_size32 - INFO - Epoch 54/100 | Train Loss: 0.0033 | Val mean-roc_auc_score: 0.9919
64
+ 2025-09-23 16:08:33,639 - logs_modchembert_bbbp_epochs100_batch_size32 - INFO - Epoch 55/100 | Train Loss: 0.0011 | Val mean-roc_auc_score: 0.9912
65
+ 2025-09-23 16:08:40,685 - logs_modchembert_bbbp_epochs100_batch_size32 - INFO - Epoch 56/100 | Train Loss: 0.0002 | Val mean-roc_auc_score: 0.9912
66
+ 2025-09-23 16:08:47,693 - logs_modchembert_bbbp_epochs100_batch_size32 - INFO - Epoch 57/100 | Train Loss: 0.0008 | Val mean-roc_auc_score: 0.9912
67
+ 2025-09-23 16:08:55,508 - logs_modchembert_bbbp_epochs100_batch_size32 - INFO - Epoch 58/100 | Train Loss: 0.0003 | Val mean-roc_auc_score: 0.9911
68
+ 2025-09-23 16:09:02,131 - logs_modchembert_bbbp_epochs100_batch_size32 - INFO - Epoch 59/100 | Train Loss: 0.0005 | Val mean-roc_auc_score: 0.9911
69
+ 2025-09-23 16:09:09,244 - logs_modchembert_bbbp_epochs100_batch_size32 - INFO - Epoch 60/100 | Train Loss: 0.0005 | Val mean-roc_auc_score: 0.9911
70
+ 2025-09-23 16:09:16,183 - logs_modchembert_bbbp_epochs100_batch_size32 - INFO - Epoch 61/100 | Train Loss: 0.0008 | Val mean-roc_auc_score: 0.9911
71
+ 2025-09-23 16:09:23,609 - logs_modchembert_bbbp_epochs100_batch_size32 - INFO - Epoch 62/100 | Train Loss: 0.0008 | Val mean-roc_auc_score: 0.9912
72
+ 2025-09-23 16:09:30,550 - logs_modchembert_bbbp_epochs100_batch_size32 - INFO - Epoch 63/100 | Train Loss: 0.0006 | Val mean-roc_auc_score: 0.9911
73
+ 2025-09-23 16:09:37,111 - logs_modchembert_bbbp_epochs100_batch_size32 - INFO - Epoch 64/100 | Train Loss: 0.0002 | Val mean-roc_auc_score: 0.9910
74
+ 2025-09-23 16:09:43,970 - logs_modchembert_bbbp_epochs100_batch_size32 - INFO - Epoch 65/100 | Train Loss: 0.0004 | Val mean-roc_auc_score: 0.9911
75
+ 2025-09-23 16:09:50,771 - logs_modchembert_bbbp_epochs100_batch_size32 - INFO - Epoch 66/100 | Train Loss: 0.0006 | Val mean-roc_auc_score: 0.9910
76
+ 2025-09-23 16:09:57,792 - logs_modchembert_bbbp_epochs100_batch_size32 - INFO - Epoch 67/100 | Train Loss: 0.0006 | Val mean-roc_auc_score: 0.9915
77
+ 2025-09-23 16:10:04,659 - logs_modchembert_bbbp_epochs100_batch_size32 - INFO - Epoch 68/100 | Train Loss: 0.0008 | Val mean-roc_auc_score: 0.9905
78
+ 2025-09-23 16:10:11,612 - logs_modchembert_bbbp_epochs100_batch_size32 - INFO - Epoch 69/100 | Train Loss: 0.0157 | Val mean-roc_auc_score: 0.9895
79
+ 2025-09-23 16:10:18,447 - logs_modchembert_bbbp_epochs100_batch_size32 - INFO - Epoch 70/100 | Train Loss: 0.0150 | Val mean-roc_auc_score: 0.9919
80
+ 2025-09-23 16:10:24,747 - logs_modchembert_bbbp_epochs100_batch_size32 - INFO - Epoch 71/100 | Train Loss: 0.0111 | Val mean-roc_auc_score: 0.9910
81
+ 2025-09-23 16:10:31,575 - logs_modchembert_bbbp_epochs100_batch_size32 - INFO - Epoch 72/100 | Train Loss: 0.0040 | Val mean-roc_auc_score: 0.9924
82
+ 2025-09-23 16:10:38,544 - logs_modchembert_bbbp_epochs100_batch_size32 - INFO - Epoch 73/100 | Train Loss: 0.0025 | Val mean-roc_auc_score: 0.9926
83
+ 2025-09-23 16:10:45,408 - logs_modchembert_bbbp_epochs100_batch_size32 - INFO - Epoch 74/100 | Train Loss: 0.0023 | Val mean-roc_auc_score: 0.9927
84
+ 2025-09-23 16:10:52,213 - logs_modchembert_bbbp_epochs100_batch_size32 - INFO - Epoch 75/100 | Train Loss: 0.0020 | Val mean-roc_auc_score: 0.9927
85
+ 2025-09-23 16:10:58,917 - logs_modchembert_bbbp_epochs100_batch_size32 - INFO - Epoch 76/100 | Train Loss: 0.0014 | Val mean-roc_auc_score: 0.9925
86
+ 2025-09-23 16:11:06,798 - logs_modchembert_bbbp_epochs100_batch_size32 - INFO - Epoch 77/100 | Train Loss: 0.0059 | Val mean-roc_auc_score: 0.9925
87
+ 2025-09-23 16:11:13,840 - logs_modchembert_bbbp_epochs100_batch_size32 - INFO - Epoch 78/100 | Train Loss: 0.0013 | Val mean-roc_auc_score: 0.9925
88
+ 2025-09-23 16:11:20,913 - logs_modchembert_bbbp_epochs100_batch_size32 - INFO - Epoch 79/100 | Train Loss: 0.0036 | Val mean-roc_auc_score: 0.9916
89
+ 2025-09-23 16:11:28,064 - logs_modchembert_bbbp_epochs100_batch_size32 - INFO - Epoch 80/100 | Train Loss: 0.0029 | Val mean-roc_auc_score: 0.9924
90
+ 2025-09-23 16:11:35,265 - logs_modchembert_bbbp_epochs100_batch_size32 - INFO - Epoch 81/100 | Train Loss: 0.0006 | Val mean-roc_auc_score: 0.9919
91
+ 2025-09-23 16:11:42,696 - logs_modchembert_bbbp_epochs100_batch_size32 - INFO - Epoch 82/100 | Train Loss: 0.0012 | Val mean-roc_auc_score: 0.9921
92
+ 2025-09-23 16:11:49,588 - logs_modchembert_bbbp_epochs100_batch_size32 - INFO - Epoch 83/100 | Train Loss: 0.0007 | Val mean-roc_auc_score: 0.9921
93
+ 2025-09-23 16:11:55,578 - logs_modchembert_bbbp_epochs100_batch_size32 - INFO - Epoch 84/100 | Train Loss: 0.0006 | Val mean-roc_auc_score: 0.9921
94
+ 2025-09-23 16:12:02,388 - logs_modchembert_bbbp_epochs100_batch_size32 - INFO - Epoch 85/100 | Train Loss: 0.0005 | Val mean-roc_auc_score: 0.9921
95
+ 2025-09-23 16:12:09,371 - logs_modchembert_bbbp_epochs100_batch_size32 - INFO - Epoch 86/100 | Train Loss: 0.0007 | Val mean-roc_auc_score: 0.9921
96
+ 2025-09-23 16:12:16,730 - logs_modchembert_bbbp_epochs100_batch_size32 - INFO - Epoch 87/100 | Train Loss: 0.0007 | Val mean-roc_auc_score: 0.9920
97
+ 2025-09-23 16:12:23,723 - logs_modchembert_bbbp_epochs100_batch_size32 - INFO - Epoch 88/100 | Train Loss: 0.0005 | Val mean-roc_auc_score: 0.9921
98
+ 2025-09-23 16:12:30,534 - logs_modchembert_bbbp_epochs100_batch_size32 - INFO - Epoch 89/100 | Train Loss: 0.0005 | Val mean-roc_auc_score: 0.9923
99
+ 2025-09-23 16:12:37,342 - logs_modchembert_bbbp_epochs100_batch_size32 - INFO - Epoch 90/100 | Train Loss: 0.0037 | Val mean-roc_auc_score: 0.9897
100
+ 2025-09-23 16:12:44,233 - logs_modchembert_bbbp_epochs100_batch_size32 - INFO - Epoch 91/100 | Train Loss: 0.0167 | Val mean-roc_auc_score: 0.9924
101
+ 2025-09-23 16:12:51,552 - logs_modchembert_bbbp_epochs100_batch_size32 - INFO - Epoch 92/100 | Train Loss: 0.0031 | Val mean-roc_auc_score: 0.9924
102
+ 2025-09-23 16:12:58,654 - logs_modchembert_bbbp_epochs100_batch_size32 - INFO - Epoch 93/100 | Train Loss: 0.0032 | Val mean-roc_auc_score: 0.9927
103
+ 2025-09-23 16:13:05,485 - logs_modchembert_bbbp_epochs100_batch_size32 - INFO - Epoch 94/100 | Train Loss: 0.0014 | Val mean-roc_auc_score: 0.9926
104
+ 2025-09-23 16:13:12,134 - logs_modchembert_bbbp_epochs100_batch_size32 - INFO - Epoch 95/100 | Train Loss: 0.0010 | Val mean-roc_auc_score: 0.9927
105
+ 2025-09-23 16:13:18,834 - logs_modchembert_bbbp_epochs100_batch_size32 - INFO - Epoch 96/100 | Train Loss: 0.0010 | Val mean-roc_auc_score: 0.9926
106
+ 2025-09-23 16:13:26,679 - logs_modchembert_bbbp_epochs100_batch_size32 - INFO - Epoch 97/100 | Train Loss: 0.0010 | Val mean-roc_auc_score: 0.9927
107
+ 2025-09-23 16:13:33,882 - logs_modchembert_bbbp_epochs100_batch_size32 - INFO - Epoch 98/100 | Train Loss: 0.0009 | Val mean-roc_auc_score: 0.9926
108
+ 2025-09-23 16:13:40,844 - logs_modchembert_bbbp_epochs100_batch_size32 - INFO - Epoch 99/100 | Train Loss: 0.0007 | Val mean-roc_auc_score: 0.9927
109
+ 2025-09-23 16:13:47,948 - logs_modchembert_bbbp_epochs100_batch_size32 - INFO - Epoch 100/100 | Train Loss: 0.0008 | Val mean-roc_auc_score: 0.9926
110
+ 2025-09-23 16:13:48,861 - logs_modchembert_bbbp_epochs100_batch_size32 - INFO - Test mean-roc_auc_score: 0.7235
111
+ 2025-09-23 16:13:49,207 - logs_modchembert_bbbp_epochs100_batch_size32 - INFO - Starting triplicate run 2 for dataset bbbp at 2025-09-23_16-13-49
112
+ 2025-09-23 16:13:55,677 - logs_modchembert_bbbp_epochs100_batch_size32 - INFO - Epoch 1/100 | Train Loss: 0.2728 | Val mean-roc_auc_score: 0.9926
113
+ 2025-09-23 16:13:55,677 - logs_modchembert_bbbp_epochs100_batch_size32 - INFO - Global step of best model: 52
114
+ 2025-09-23 16:13:56,198 - logs_modchembert_bbbp_epochs100_batch_size32 - INFO - Best model saved at epoch 1 with val mean-roc_auc_score: 0.9926
115
+ 2025-09-23 16:14:02,554 - logs_modchembert_bbbp_epochs100_batch_size32 - INFO - Epoch 2/100 | Train Loss: 0.1914 | Val mean-roc_auc_score: 0.9926
116
+ 2025-09-23 16:14:02,732 - logs_modchembert_bbbp_epochs100_batch_size32 - INFO - Global step of best model: 104
117
+ 2025-09-23 16:14:03,278 - logs_modchembert_bbbp_epochs100_batch_size32 - INFO - Best model saved at epoch 2 with val mean-roc_auc_score: 0.9926
118
+ 2025-09-23 16:14:10,143 - logs_modchembert_bbbp_epochs100_batch_size32 - INFO - Epoch 3/100 | Train Loss: 0.1178 | Val mean-roc_auc_score: 0.9895
119
+ 2025-09-23 16:14:17,180 - logs_modchembert_bbbp_epochs100_batch_size32 - INFO - Epoch 4/100 | Train Loss: 0.1138 | Val mean-roc_auc_score: 0.9951
120
+ 2025-09-23 16:14:17,330 - logs_modchembert_bbbp_epochs100_batch_size32 - INFO - Global step of best model: 208
121
+ 2025-09-23 16:14:17,866 - logs_modchembert_bbbp_epochs100_batch_size32 - INFO - Best model saved at epoch 4 with val mean-roc_auc_score: 0.9951
122
+ 2025-09-23 16:14:24,834 - logs_modchembert_bbbp_epochs100_batch_size32 - INFO - Epoch 5/100 | Train Loss: 0.0610 | Val mean-roc_auc_score: 0.9910
123
+ 2025-09-23 16:14:31,795 - logs_modchembert_bbbp_epochs100_batch_size32 - INFO - Epoch 6/100 | Train Loss: 0.0723 | Val mean-roc_auc_score: 0.9891
124
+ 2025-09-23 16:14:38,815 - logs_modchembert_bbbp_epochs100_batch_size32 - INFO - Epoch 7/100 | Train Loss: 0.0359 | Val mean-roc_auc_score: 0.9907
125
+ 2025-09-23 16:14:45,512 - logs_modchembert_bbbp_epochs100_batch_size32 - INFO - Epoch 8/100 | Train Loss: 0.0410 | Val mean-roc_auc_score: 0.9908
126
+ 2025-09-23 16:14:51,957 - logs_modchembert_bbbp_epochs100_batch_size32 - INFO - Epoch 9/100 | Train Loss: 0.0308 | Val mean-roc_auc_score: 0.9876
127
+ 2025-09-23 16:14:58,333 - logs_modchembert_bbbp_epochs100_batch_size32 - INFO - Epoch 10/100 | Train Loss: 0.0176 | Val mean-roc_auc_score: 0.9861
128
+ 2025-09-23 16:15:05,150 - logs_modchembert_bbbp_epochs100_batch_size32 - INFO - Epoch 11/100 | Train Loss: 0.0174 | Val mean-roc_auc_score: 0.9867
129
+ 2025-09-23 16:15:12,417 - logs_modchembert_bbbp_epochs100_batch_size32 - INFO - Epoch 12/100 | Train Loss: 0.0154 | Val mean-roc_auc_score: 0.9909
130
+ 2025-09-23 16:15:19,428 - logs_modchembert_bbbp_epochs100_batch_size32 - INFO - Epoch 13/100 | Train Loss: 0.0091 | Val mean-roc_auc_score: 0.9911
131
+ 2025-09-23 16:15:26,336 - logs_modchembert_bbbp_epochs100_batch_size32 - INFO - Epoch 14/100 | Train Loss: 0.0073 | Val mean-roc_auc_score: 0.9924
132
+ 2025-09-23 16:15:33,221 - logs_modchembert_bbbp_epochs100_batch_size32 - INFO - Epoch 15/100 | Train Loss: 0.0058 | Val mean-roc_auc_score: 0.9909
133
+ 2025-09-23 16:15:40,086 - logs_modchembert_bbbp_epochs100_batch_size32 - INFO - Epoch 16/100 | Train Loss: 0.0096 | Val mean-roc_auc_score: 0.9948
134
+ 2025-09-23 16:15:47,441 - logs_modchembert_bbbp_epochs100_batch_size32 - INFO - Epoch 17/100 | Train Loss: 0.0061 | Val mean-roc_auc_score: 0.9912
135
+ 2025-09-23 16:15:54,514 - logs_modchembert_bbbp_epochs100_batch_size32 - INFO - Epoch 18/100 | Train Loss: 0.0068 | Val mean-roc_auc_score: 0.9951
136
+ 2025-09-23 16:15:54,665 - logs_modchembert_bbbp_epochs100_batch_size32 - INFO - Global step of best model: 936
137
+ 2025-09-23 16:15:55,241 - logs_modchembert_bbbp_epochs100_batch_size32 - INFO - Best model saved at epoch 18 with val mean-roc_auc_score: 0.9951
138
+ 2025-09-23 16:16:02,310 - logs_modchembert_bbbp_epochs100_batch_size32 - INFO - Epoch 19/100 | Train Loss: 0.0057 | Val mean-roc_auc_score: 0.9936
139
+ 2025-09-23 16:16:10,262 - logs_modchembert_bbbp_epochs100_batch_size32 - INFO - Epoch 20/100 | Train Loss: 0.0057 | Val mean-roc_auc_score: 0.9950
140
+ 2025-09-23 16:16:17,033 - logs_modchembert_bbbp_epochs100_batch_size32 - INFO - Epoch 21/100 | Train Loss: 0.0032 | Val mean-roc_auc_score: 0.9943
141
+ 2025-09-23 16:16:23,677 - logs_modchembert_bbbp_epochs100_batch_size32 - INFO - Epoch 22/100 | Train Loss: 0.0016 | Val mean-roc_auc_score: 0.9940
142
+ 2025-09-23 16:16:30,410 - logs_modchembert_bbbp_epochs100_batch_size32 - INFO - Epoch 23/100 | Train Loss: 0.0020 | Val mean-roc_auc_score: 0.9902
143
+ 2025-09-23 16:16:37,548 - logs_modchembert_bbbp_epochs100_batch_size32 - INFO - Epoch 24/100 | Train Loss: 0.0026 | Val mean-roc_auc_score: 0.9870
144
+ 2025-09-23 16:16:44,546 - logs_modchembert_bbbp_epochs100_batch_size32 - INFO - Epoch 25/100 | Train Loss: 0.0082 | Val mean-roc_auc_score: 0.9872
145
+ 2025-09-23 16:16:51,617 - logs_modchembert_bbbp_epochs100_batch_size32 - INFO - Epoch 26/100 | Train Loss: 0.0157 | Val mean-roc_auc_score: 0.9905
146
+ 2025-09-23 16:16:58,553 - logs_modchembert_bbbp_epochs100_batch_size32 - INFO - Epoch 27/100 | Train Loss: 0.0049 | Val mean-roc_auc_score: 0.9881
147
+ 2025-09-23 16:17:05,609 - logs_modchembert_bbbp_epochs100_batch_size32 - INFO - Epoch 28/100 | Train Loss: 0.0032 | Val mean-roc_auc_score: 0.9893
148
+ 2025-09-23 16:17:12,491 - logs_modchembert_bbbp_epochs100_batch_size32 - INFO - Epoch 29/100 | Train Loss: 0.0006 | Val mean-roc_auc_score: 0.9903
149
+ 2025-09-23 16:17:19,252 - logs_modchembert_bbbp_epochs100_batch_size32 - INFO - Epoch 30/100 | Train Loss: 0.0013 | Val mean-roc_auc_score: 0.9917
150
+ 2025-09-23 16:17:26,035 - logs_modchembert_bbbp_epochs100_batch_size32 - INFO - Epoch 31/100 | Train Loss: 0.0016 | Val mean-roc_auc_score: 0.9921
151
+ 2025-09-23 16:17:33,139 - logs_modchembert_bbbp_epochs100_batch_size32 - INFO - Epoch 32/100 | Train Loss: 0.0015 | Val mean-roc_auc_score: 0.9918
152
+ 2025-09-23 16:17:40,091 - logs_modchembert_bbbp_epochs100_batch_size32 - INFO - Epoch 33/100 | Train Loss: 0.0004 | Val mean-roc_auc_score: 0.9927
153
+ 2025-09-23 16:17:46,973 - logs_modchembert_bbbp_epochs100_batch_size32 - INFO - Epoch 34/100 | Train Loss: 0.0011 | Val mean-roc_auc_score: 0.9930
154
+ 2025-09-23 16:17:53,188 - logs_modchembert_bbbp_epochs100_batch_size32 - INFO - Epoch 35/100 | Train Loss: 0.0023 | Val mean-roc_auc_score: 0.9933
155
+ 2025-09-23 16:17:59,145 - logs_modchembert_bbbp_epochs100_batch_size32 - INFO - Epoch 36/100 | Train Loss: 0.0009 | Val mean-roc_auc_score: 0.9937
156
+ 2025-09-23 16:18:06,368 - logs_modchembert_bbbp_epochs100_batch_size32 - INFO - Epoch 37/100 | Train Loss: 0.0641 | Val mean-roc_auc_score: 0.9908
157
+ 2025-09-23 16:18:13,541 - logs_modchembert_bbbp_epochs100_batch_size32 - INFO - Epoch 38/100 | Train Loss: 0.0332 | Val mean-roc_auc_score: 0.9908
158
+ 2025-09-23 16:18:21,932 - logs_modchembert_bbbp_epochs100_batch_size32 - INFO - Epoch 39/100 | Train Loss: 0.0272 | Val mean-roc_auc_score: 0.9911
159
+ 2025-09-23 16:18:29,069 - logs_modchembert_bbbp_epochs100_batch_size32 - INFO - Epoch 40/100 | Train Loss: 0.0123 | Val mean-roc_auc_score: 0.9928
160
+ 2025-09-23 16:18:36,251 - logs_modchembert_bbbp_epochs100_batch_size32 - INFO - Epoch 41/100 | Train Loss: 0.0045 | Val mean-roc_auc_score: 0.9928
161
+ 2025-09-23 16:18:43,574 - logs_modchembert_bbbp_epochs100_batch_size32 - INFO - Epoch 42/100 | Train Loss: 0.0028 | Val mean-roc_auc_score: 0.9929
162
+ 2025-09-23 16:18:50,603 - logs_modchembert_bbbp_epochs100_batch_size32 - INFO - Epoch 43/100 | Train Loss: 0.0022 | Val mean-roc_auc_score: 0.9928
163
+ 2025-09-23 16:18:57,658 - logs_modchembert_bbbp_epochs100_batch_size32 - INFO - Epoch 44/100 | Train Loss: 0.0013 | Val mean-roc_auc_score: 0.9928
164
+ 2025-09-23 16:19:04,890 - logs_modchembert_bbbp_epochs100_batch_size32 - INFO - Epoch 45/100 | Train Loss: 0.0014 | Val mean-roc_auc_score: 0.9928
165
+ 2025-09-23 16:19:12,134 - logs_modchembert_bbbp_epochs100_batch_size32 - INFO - Epoch 46/100 | Train Loss: 0.0014 | Val mean-roc_auc_score: 0.9927
166
+ 2025-09-23 16:19:18,752 - logs_modchembert_bbbp_epochs100_batch_size32 - INFO - Epoch 47/100 | Train Loss: 0.0011 | Val mean-roc_auc_score: 0.9928
167
+ 2025-09-23 16:19:24,842 - logs_modchembert_bbbp_epochs100_batch_size32 - INFO - Epoch 48/100 | Train Loss: 0.0009 | Val mean-roc_auc_score: 0.9928
168
+ 2025-09-23 16:19:32,017 - logs_modchembert_bbbp_epochs100_batch_size32 - INFO - Epoch 49/100 | Train Loss: 0.0012 | Val mean-roc_auc_score: 0.9925
169
+ 2025-09-23 16:19:39,142 - logs_modchembert_bbbp_epochs100_batch_size32 - INFO - Epoch 50/100 | Train Loss: 0.0010 | Val mean-roc_auc_score: 0.9927
170
+ 2025-09-23 16:19:46,190 - logs_modchembert_bbbp_epochs100_batch_size32 - INFO - Epoch 51/100 | Train Loss: 0.0008 | Val mean-roc_auc_score: 0.9924
171
+ 2025-09-23 16:19:53,289 - logs_modchembert_bbbp_epochs100_batch_size32 - INFO - Epoch 52/100 | Train Loss: 0.0004 | Val mean-roc_auc_score: 0.9922
172
+ 2025-09-23 16:20:00,140 - logs_modchembert_bbbp_epochs100_batch_size32 - INFO - Epoch 53/100 | Train Loss: 0.0013 | Val mean-roc_auc_score: 0.9909
173
+ 2025-09-23 16:20:06,970 - logs_modchembert_bbbp_epochs100_batch_size32 - INFO - Epoch 54/100 | Train Loss: 0.0004 | Val mean-roc_auc_score: 0.9916
174
+ 2025-09-23 16:20:13,939 - logs_modchembert_bbbp_epochs100_batch_size32 - INFO - Epoch 55/100 | Train Loss: 0.0029 | Val mean-roc_auc_score: 0.9909
175
+ 2025-09-23 16:20:20,914 - logs_modchembert_bbbp_epochs100_batch_size32 - INFO - Epoch 56/100 | Train Loss: 0.0009 | Val mean-roc_auc_score: 0.9914
176
+ 2025-09-23 16:20:28,186 - logs_modchembert_bbbp_epochs100_batch_size32 - INFO - Epoch 57/100 | Train Loss: 0.0007 | Val mean-roc_auc_score: 0.9920
177
+ 2025-09-23 16:20:36,322 - logs_modchembert_bbbp_epochs100_batch_size32 - INFO - Epoch 58/100 | Train Loss: 0.0003 | Val mean-roc_auc_score: 0.9913
178
+ 2025-09-23 16:20:42,941 - logs_modchembert_bbbp_epochs100_batch_size32 - INFO - Epoch 59/100 | Train Loss: 0.0004 | Val mean-roc_auc_score: 0.9912
179
+ 2025-09-23 16:20:49,229 - logs_modchembert_bbbp_epochs100_batch_size32 - INFO - Epoch 60/100 | Train Loss: 0.0005 | Val mean-roc_auc_score: 0.9913
180
+ 2025-09-23 16:20:55,131 - logs_modchembert_bbbp_epochs100_batch_size32 - INFO - Epoch 61/100 | Train Loss: 0.0005 | Val mean-roc_auc_score: 0.9916
181
+ 2025-09-23 16:21:02,488 - logs_modchembert_bbbp_epochs100_batch_size32 - INFO - Epoch 62/100 | Train Loss: 0.0005 | Val mean-roc_auc_score: 0.9915
182
+ 2025-09-23 16:21:09,541 - logs_modchembert_bbbp_epochs100_batch_size32 - INFO - Epoch 63/100 | Train Loss: 0.0004 | Val mean-roc_auc_score: 0.9916
183
+ 2025-09-23 16:21:16,693 - logs_modchembert_bbbp_epochs100_batch_size32 - INFO - Epoch 64/100 | Train Loss: 0.0005 | Val mean-roc_auc_score: 0.9905
184
+ 2025-09-23 16:21:23,438 - logs_modchembert_bbbp_epochs100_batch_size32 - INFO - Epoch 65/100 | Train Loss: 0.0004 | Val mean-roc_auc_score: 0.9916
185
+ 2025-09-23 16:21:30,151 - logs_modchembert_bbbp_epochs100_batch_size32 - INFO - Epoch 66/100 | Train Loss: 0.0006 | Val mean-roc_auc_score: 0.9906
186
+ 2025-09-23 16:21:37,362 - logs_modchembert_bbbp_epochs100_batch_size32 - INFO - Epoch 67/100 | Train Loss: 0.0006 | Val mean-roc_auc_score: 0.9905
187
+ 2025-09-23 16:21:44,362 - logs_modchembert_bbbp_epochs100_batch_size32 - INFO - Epoch 68/100 | Train Loss: 0.0006 | Val mean-roc_auc_score: 0.9906
188
+ 2025-09-23 16:21:51,446 - logs_modchembert_bbbp_epochs100_batch_size32 - INFO - Epoch 69/100 | Train Loss: 0.0003 | Val mean-roc_auc_score: 0.9906
189
+ 2025-09-23 16:21:58,428 - logs_modchembert_bbbp_epochs100_batch_size32 - INFO - Epoch 70/100 | Train Loss: 0.0007 | Val mean-roc_auc_score: 0.9905
190
+ 2025-09-23 16:22:05,337 - logs_modchembert_bbbp_epochs100_batch_size32 - INFO - Epoch 71/100 | Train Loss: 0.0010 | Val mean-roc_auc_score: 0.9931
191
+ 2025-09-23 16:22:12,050 - logs_modchembert_bbbp_epochs100_batch_size32 - INFO - Epoch 72/100 | Train Loss: 0.0049 | Val mean-roc_auc_score: 0.9867
192
+ 2025-09-23 16:22:18,985 - logs_modchembert_bbbp_epochs100_batch_size32 - INFO - Epoch 73/100 | Train Loss: 0.0044 | Val mean-roc_auc_score: 0.9927
193
+ 2025-09-23 16:22:25,015 - logs_modchembert_bbbp_epochs100_batch_size32 - INFO - Epoch 74/100 | Train Loss: 0.0011 | Val mean-roc_auc_score: 0.9922
194
+ 2025-09-23 16:22:31,816 - logs_modchembert_bbbp_epochs100_batch_size32 - INFO - Epoch 75/100 | Train Loss: 0.0010 | Val mean-roc_auc_score: 0.9928
195
+ 2025-09-23 16:22:38,584 - logs_modchembert_bbbp_epochs100_batch_size32 - INFO - Epoch 76/100 | Train Loss: 0.0007 | Val mean-roc_auc_score: 0.9928
196
+ 2025-09-23 16:22:47,074 - logs_modchembert_bbbp_epochs100_batch_size32 - INFO - Epoch 77/100 | Train Loss: 0.0005 | Val mean-roc_auc_score: 0.9929
197
+ 2025-09-23 16:22:54,000 - logs_modchembert_bbbp_epochs100_batch_size32 - INFO - Epoch 78/100 | Train Loss: 0.0005 | Val mean-roc_auc_score: 0.9928
198
+ 2025-09-23 16:23:00,951 - logs_modchembert_bbbp_epochs100_batch_size32 - INFO - Epoch 79/100 | Train Loss: 0.0005 | Val mean-roc_auc_score: 0.9927
199
+ 2025-09-23 16:23:07,885 - logs_modchembert_bbbp_epochs100_batch_size32 - INFO - Epoch 80/100 | Train Loss: 0.0009 | Val mean-roc_auc_score: 0.9926
200
+ 2025-09-23 16:23:15,111 - logs_modchembert_bbbp_epochs100_batch_size32 - INFO - Epoch 81/100 | Train Loss: 0.0010 | Val mean-roc_auc_score: 0.9929
201
+ 2025-09-23 16:23:22,500 - logs_modchembert_bbbp_epochs100_batch_size32 - INFO - Epoch 82/100 | Train Loss: 0.0017 | Val mean-roc_auc_score: 0.9932
202
+ 2025-09-23 16:23:29,540 - logs_modchembert_bbbp_epochs100_batch_size32 - INFO - Epoch 83/100 | Train Loss: 0.0003 | Val mean-roc_auc_score: 0.9932
203
+ 2025-09-23 16:23:36,594 - logs_modchembert_bbbp_epochs100_batch_size32 - INFO - Epoch 84/100 | Train Loss: 0.0008 | Val mean-roc_auc_score: 0.9931
204
+ 2025-09-23 16:23:43,109 - logs_modchembert_bbbp_epochs100_batch_size32 - INFO - Epoch 85/100 | Train Loss: 0.0005 | Val mean-roc_auc_score: 0.9931
205
+ 2025-09-23 16:23:49,820 - logs_modchembert_bbbp_epochs100_batch_size32 - INFO - Epoch 86/100 | Train Loss: 0.0005 | Val mean-roc_auc_score: 0.9931
206
+ 2025-09-23 16:23:56,592 - logs_modchembert_bbbp_epochs100_batch_size32 - INFO - Epoch 87/100 | Train Loss: 0.0006 | Val mean-roc_auc_score: 0.9931
207
+ 2025-09-23 16:24:03,628 - logs_modchembert_bbbp_epochs100_batch_size32 - INFO - Epoch 88/100 | Train Loss: 0.0005 | Val mean-roc_auc_score: 0.9931
208
+ 2025-09-23 16:24:10,648 - logs_modchembert_bbbp_epochs100_batch_size32 - INFO - Epoch 89/100 | Train Loss: 0.0007 | Val mean-roc_auc_score: 0.9932
209
+ 2025-09-23 16:24:17,519 - logs_modchembert_bbbp_epochs100_batch_size32 - INFO - Epoch 90/100 | Train Loss: 0.0005 | Val mean-roc_auc_score: 0.9931
210
+ 2025-09-23 16:24:24,212 - logs_modchembert_bbbp_epochs100_batch_size32 - INFO - Epoch 91/100 | Train Loss: 0.0005 | Val mean-roc_auc_score: 0.9932
211
+ 2025-09-23 16:24:31,243 - logs_modchembert_bbbp_epochs100_batch_size32 - INFO - Epoch 92/100 | Train Loss: 0.0005 | Val mean-roc_auc_score: 0.9932
212
+ 2025-09-23 16:24:38,085 - logs_modchembert_bbbp_epochs100_batch_size32 - INFO - Epoch 93/100 | Train Loss: 0.0006 | Val mean-roc_auc_score: 0.9932
213
+ 2025-09-23 16:24:45,122 - logs_modchembert_bbbp_epochs100_batch_size32 - INFO - Epoch 94/100 | Train Loss: 0.0004 | Val mean-roc_auc_score: 0.9932
214
+ 2025-09-23 16:24:52,266 - logs_modchembert_bbbp_epochs100_batch_size32 - INFO - Epoch 95/100 | Train Loss: 0.0005 | Val mean-roc_auc_score: 0.9933
215
+ 2025-09-23 16:24:59,378 - logs_modchembert_bbbp_epochs100_batch_size32 - INFO - Epoch 96/100 | Train Loss: 0.0005 | Val mean-roc_auc_score: 0.9932
216
+ 2025-09-23 16:25:07,715 - logs_modchembert_bbbp_epochs100_batch_size32 - INFO - Epoch 97/100 | Train Loss: 0.0005 | Val mean-roc_auc_score: 0.9933
217
+ 2025-09-23 16:25:14,695 - logs_modchembert_bbbp_epochs100_batch_size32 - INFO - Epoch 98/100 | Train Loss: 0.0004 | Val mean-roc_auc_score: 0.9932
218
+ 2025-09-23 16:25:20,997 - logs_modchembert_bbbp_epochs100_batch_size32 - INFO - Epoch 99/100 | Train Loss: 0.0004 | Val mean-roc_auc_score: 0.9932
219
+ 2025-09-23 16:25:27,433 - logs_modchembert_bbbp_epochs100_batch_size32 - INFO - Epoch 100/100 | Train Loss: 0.0005 | Val mean-roc_auc_score: 0.9932
220
+ 2025-09-23 16:25:28,306 - logs_modchembert_bbbp_epochs100_batch_size32 - INFO - Test mean-roc_auc_score: 0.7249
221
+ 2025-09-23 16:25:28,673 - logs_modchembert_bbbp_epochs100_batch_size32 - INFO - Starting triplicate run 3 for dataset bbbp at 2025-09-23_16-25-28
222
+ 2025-09-23 16:25:35,097 - logs_modchembert_bbbp_epochs100_batch_size32 - INFO - Epoch 1/100 | Train Loss: 0.3101 | Val mean-roc_auc_score: 0.9935
223
+ 2025-09-23 16:25:35,097 - logs_modchembert_bbbp_epochs100_batch_size32 - INFO - Global step of best model: 52
224
+ 2025-09-23 16:25:35,675 - logs_modchembert_bbbp_epochs100_batch_size32 - INFO - Best model saved at epoch 1 with val mean-roc_auc_score: 0.9935
225
+ 2025-09-23 16:25:42,649 - logs_modchembert_bbbp_epochs100_batch_size32 - INFO - Epoch 2/100 | Train Loss: 0.4551 | Val mean-roc_auc_score: 0.9925
226
+ 2025-09-23 16:25:49,758 - logs_modchembert_bbbp_epochs100_batch_size32 - INFO - Epoch 3/100 | Train Loss: 0.1238 | Val mean-roc_auc_score: 0.9919
227
+ 2025-09-23 16:25:56,920 - logs_modchembert_bbbp_epochs100_batch_size32 - INFO - Epoch 4/100 | Train Loss: 0.1113 | Val mean-roc_auc_score: 0.9930
228
+ 2025-09-23 16:26:04,068 - logs_modchembert_bbbp_epochs100_batch_size32 - INFO - Epoch 5/100 | Train Loss: 0.0622 | Val mean-roc_auc_score: 0.9907
229
+ 2025-09-23 16:26:10,845 - logs_modchembert_bbbp_epochs100_batch_size32 - INFO - Epoch 6/100 | Train Loss: 0.0892 | Val mean-roc_auc_score: 0.9938
230
+ 2025-09-23 16:26:11,297 - logs_modchembert_bbbp_epochs100_batch_size32 - INFO - Global step of best model: 312
231
+ 2025-09-23 16:26:11,842 - logs_modchembert_bbbp_epochs100_batch_size32 - INFO - Best model saved at epoch 6 with val mean-roc_auc_score: 0.9938
232
+ 2025-09-23 16:26:18,567 - logs_modchembert_bbbp_epochs100_batch_size32 - INFO - Epoch 7/100 | Train Loss: 0.0580 | Val mean-roc_auc_score: 0.9946
233
+ 2025-09-23 16:26:18,750 - logs_modchembert_bbbp_epochs100_batch_size32 - INFO - Global step of best model: 364
234
+ 2025-09-23 16:26:19,287 - logs_modchembert_bbbp_epochs100_batch_size32 - INFO - Best model saved at epoch 7 with val mean-roc_auc_score: 0.9946
235
+ 2025-09-23 16:26:26,007 - logs_modchembert_bbbp_epochs100_batch_size32 - INFO - Epoch 8/100 | Train Loss: 0.0298 | Val mean-roc_auc_score: 0.9939
236
+ 2025-09-23 16:26:32,905 - logs_modchembert_bbbp_epochs100_batch_size32 - INFO - Epoch 9/100 | Train Loss: 0.0183 | Val mean-roc_auc_score: 0.9937
237
+ 2025-09-23 16:26:39,761 - logs_modchembert_bbbp_epochs100_batch_size32 - INFO - Epoch 10/100 | Train Loss: 0.0181 | Val mean-roc_auc_score: 0.9914
238
+ 2025-09-23 16:26:46,708 - logs_modchembert_bbbp_epochs100_batch_size32 - INFO - Epoch 11/100 | Train Loss: 0.0221 | Val mean-roc_auc_score: 0.9926
239
+ 2025-09-23 16:26:53,277 - logs_modchembert_bbbp_epochs100_batch_size32 - INFO - Epoch 12/100 | Train Loss: 0.0226 | Val mean-roc_auc_score: 0.9891
240
+ 2025-09-23 16:27:00,352 - logs_modchembert_bbbp_epochs100_batch_size32 - INFO - Epoch 13/100 | Train Loss: 0.0159 | Val mean-roc_auc_score: 0.9942
241
+ 2025-09-23 16:27:07,422 - logs_modchembert_bbbp_epochs100_batch_size32 - INFO - Epoch 14/100 | Train Loss: 0.0130 | Val mean-roc_auc_score: 0.9948
242
+ 2025-09-23 16:27:07,574 - logs_modchembert_bbbp_epochs100_batch_size32 - INFO - Global step of best model: 728
243
+ 2025-09-23 16:27:08,130 - logs_modchembert_bbbp_epochs100_batch_size32 - INFO - Best model saved at epoch 14 with val mean-roc_auc_score: 0.9948
244
+ 2025-09-23 16:27:15,178 - logs_modchembert_bbbp_epochs100_batch_size32 - INFO - Epoch 15/100 | Train Loss: 0.0101 | Val mean-roc_auc_score: 0.9942
245
+ 2025-09-23 16:27:22,117 - logs_modchembert_bbbp_epochs100_batch_size32 - INFO - Epoch 16/100 | Train Loss: 0.0073 | Val mean-roc_auc_score: 0.9947
246
+ 2025-09-23 16:27:29,424 - logs_modchembert_bbbp_epochs100_batch_size32 - INFO - Epoch 17/100 | Train Loss: 0.0054 | Val mean-roc_auc_score: 0.9941
247
+ 2025-09-23 16:27:36,410 - logs_modchembert_bbbp_epochs100_batch_size32 - INFO - Epoch 18/100 | Train Loss: 0.0027 | Val mean-roc_auc_score: 0.9941
248
+ 2025-09-23 16:27:43,521 - logs_modchembert_bbbp_epochs100_batch_size32 - INFO - Epoch 19/100 | Train Loss: 0.0051 | Val mean-roc_auc_score: 0.9938
249
+ 2025-09-23 16:27:51,857 - logs_modchembert_bbbp_epochs100_batch_size32 - INFO - Epoch 20/100 | Train Loss: 0.0039 | Val mean-roc_auc_score: 0.9937
250
+ 2025-09-23 16:27:58,740 - logs_modchembert_bbbp_epochs100_batch_size32 - INFO - Epoch 21/100 | Train Loss: 0.0035 | Val mean-roc_auc_score: 0.9938
251
+ 2025-09-23 16:28:05,426 - logs_modchembert_bbbp_epochs100_batch_size32 - INFO - Epoch 22/100 | Train Loss: 0.0030 | Val mean-roc_auc_score: 0.9938
252
+ 2025-09-23 16:28:12,119 - logs_modchembert_bbbp_epochs100_batch_size32 - INFO - Epoch 23/100 | Train Loss: 0.0066 | Val mean-roc_auc_score: 0.9952
253
+ 2025-09-23 16:28:12,261 - logs_modchembert_bbbp_epochs100_batch_size32 - INFO - Global step of best model: 1196
254
+ 2025-09-23 16:28:12,838 - logs_modchembert_bbbp_epochs100_batch_size32 - INFO - Best model saved at epoch 23 with val mean-roc_auc_score: 0.9952
255
+ 2025-09-23 16:28:19,232 - logs_modchembert_bbbp_epochs100_batch_size32 - INFO - Epoch 24/100 | Train Loss: 0.0073 | Val mean-roc_auc_score: 0.9940
256
+ 2025-09-23 16:28:25,596 - logs_modchembert_bbbp_epochs100_batch_size32 - INFO - Epoch 25/100 | Train Loss: 0.0565 | Val mean-roc_auc_score: 0.9945
257
+ 2025-09-23 16:28:32,614 - logs_modchembert_bbbp_epochs100_batch_size32 - INFO - Epoch 26/100 | Train Loss: 0.0273 | Val mean-roc_auc_score: 0.9951
258
+ 2025-09-23 16:28:40,012 - logs_modchembert_bbbp_epochs100_batch_size32 - INFO - Epoch 27/100 | Train Loss: 0.0069 | Val mean-roc_auc_score: 0.9951
259
+ 2025-09-23 16:28:47,027 - logs_modchembert_bbbp_epochs100_batch_size32 - INFO - Epoch 28/100 | Train Loss: 0.0086 | Val mean-roc_auc_score: 0.9948
260
+ 2025-09-23 16:28:53,944 - logs_modchembert_bbbp_epochs100_batch_size32 - INFO - Epoch 29/100 | Train Loss: 0.0352 | Val mean-roc_auc_score: 0.9953
261
+ 2025-09-23 16:28:54,094 - logs_modchembert_bbbp_epochs100_batch_size32 - INFO - Global step of best model: 1508
262
+ 2025-09-23 16:28:54,650 - logs_modchembert_bbbp_epochs100_batch_size32 - INFO - Best model saved at epoch 29 with val mean-roc_auc_score: 0.9953
263
+ 2025-09-23 16:29:01,651 - logs_modchembert_bbbp_epochs100_batch_size32 - INFO - Epoch 30/100 | Train Loss: 0.0075 | Val mean-roc_auc_score: 0.9953
264
+ 2025-09-23 16:29:01,838 - logs_modchembert_bbbp_epochs100_batch_size32 - INFO - Global step of best model: 1560
265
+ 2025-09-23 16:29:02,365 - logs_modchembert_bbbp_epochs100_batch_size32 - INFO - Best model saved at epoch 30 with val mean-roc_auc_score: 0.9953
266
+ 2025-09-23 16:29:09,149 - logs_modchembert_bbbp_epochs100_batch_size32 - INFO - Epoch 31/100 | Train Loss: 0.0044 | Val mean-roc_auc_score: 0.9947
267
+ 2025-09-23 16:29:16,233 - logs_modchembert_bbbp_epochs100_batch_size32 - INFO - Epoch 32/100 | Train Loss: 0.0036 | Val mean-roc_auc_score: 0.9949
268
+ 2025-09-23 16:29:22,955 - logs_modchembert_bbbp_epochs100_batch_size32 - INFO - Epoch 33/100 | Train Loss: 0.0030 | Val mean-roc_auc_score: 0.9950
269
+ 2025-09-23 16:29:29,670 - logs_modchembert_bbbp_epochs100_batch_size32 - INFO - Epoch 34/100 | Train Loss: 0.0021 | Val mean-roc_auc_score: 0.9950
270
+ 2025-09-23 16:29:36,734 - logs_modchembert_bbbp_epochs100_batch_size32 - INFO - Epoch 35/100 | Train Loss: 0.0017 | Val mean-roc_auc_score: 0.9949
271
+ 2025-09-23 16:29:43,904 - logs_modchembert_bbbp_epochs100_batch_size32 - INFO - Epoch 36/100 | Train Loss: 0.0024 | Val mean-roc_auc_score: 0.9949
272
+ 2025-09-23 16:29:50,408 - logs_modchembert_bbbp_epochs100_batch_size32 - INFO - Epoch 37/100 | Train Loss: 0.0104 | Val mean-roc_auc_score: 0.9948
273
+ 2025-09-23 16:29:57,086 - logs_modchembert_bbbp_epochs100_batch_size32 - INFO - Epoch 38/100 | Train Loss: 0.0032 | Val mean-roc_auc_score: 0.9950
274
+ 2025-09-23 16:30:05,222 - logs_modchembert_bbbp_epochs100_batch_size32 - INFO - Epoch 39/100 | Train Loss: 0.0022 | Val mean-roc_auc_score: 0.9951
275
+ 2025-09-23 16:30:12,258 - logs_modchembert_bbbp_epochs100_batch_size32 - INFO - Epoch 40/100 | Train Loss: 0.0019 | Val mean-roc_auc_score: 0.9951
276
+ 2025-09-23 16:30:19,203 - logs_modchembert_bbbp_epochs100_batch_size32 - INFO - Epoch 41/100 | Train Loss: 0.0015 | Val mean-roc_auc_score: 0.9951
277
+ 2025-09-23 16:30:26,404 - logs_modchembert_bbbp_epochs100_batch_size32 - INFO - Epoch 42/100 | Train Loss: 0.0019 | Val mean-roc_auc_score: 0.9951
278
+ 2025-09-23 16:30:33,355 - logs_modchembert_bbbp_epochs100_batch_size32 - INFO - Epoch 43/100 | Train Loss: 0.0027 | Val mean-roc_auc_score: 0.9951
279
+ 2025-09-23 16:30:40,516 - logs_modchembert_bbbp_epochs100_batch_size32 - INFO - Epoch 44/100 | Train Loss: 0.0012 | Val mean-roc_auc_score: 0.9949
280
+ 2025-09-23 16:30:47,602 - logs_modchembert_bbbp_epochs100_batch_size32 - INFO - Epoch 45/100 | Train Loss: 0.0021 | Val mean-roc_auc_score: 0.9950
281
+ 2025-09-23 16:30:54,658 - logs_modchembert_bbbp_epochs100_batch_size32 - INFO - Epoch 46/100 | Train Loss: 0.0052 | Val mean-roc_auc_score: 0.9952
282
+ 2025-09-23 16:31:01,414 - logs_modchembert_bbbp_epochs100_batch_size32 - INFO - Epoch 47/100 | Train Loss: 0.0047 | Val mean-roc_auc_score: 0.9949
283
+ 2025-09-23 16:31:08,136 - logs_modchembert_bbbp_epochs100_batch_size32 - INFO - Epoch 48/100 | Train Loss: 0.0022 | Val mean-roc_auc_score: 0.9949
284
+ 2025-09-23 16:31:14,864 - logs_modchembert_bbbp_epochs100_batch_size32 - INFO - Epoch 49/100 | Train Loss: 0.0007 | Val mean-roc_auc_score: 0.9949
285
+ 2025-09-23 16:31:20,958 - logs_modchembert_bbbp_epochs100_batch_size32 - INFO - Epoch 50/100 | Train Loss: 0.0014 | Val mean-roc_auc_score: 0.9948
286
+ 2025-09-23 16:31:27,484 - logs_modchembert_bbbp_epochs100_batch_size32 - INFO - Epoch 51/100 | Train Loss: 0.0015 | Val mean-roc_auc_score: 0.9949
287
+ 2025-09-23 16:31:34,513 - logs_modchembert_bbbp_epochs100_batch_size32 - INFO - Epoch 52/100 | Train Loss: 0.0005 | Val mean-roc_auc_score: 0.9950
288
+ 2025-09-23 16:31:41,190 - logs_modchembert_bbbp_epochs100_batch_size32 - INFO - Epoch 53/100 | Train Loss: 0.0012 | Val mean-roc_auc_score: 0.9950
289
+ 2025-09-23 16:31:48,285 - logs_modchembert_bbbp_epochs100_batch_size32 - INFO - Epoch 54/100 | Train Loss: 0.0025 | Val mean-roc_auc_score: 0.9950
290
+ 2025-09-23 16:31:55,363 - logs_modchembert_bbbp_epochs100_batch_size32 - INFO - Epoch 55/100 | Train Loss: 0.0018 | Val mean-roc_auc_score: 0.9949
291
+ 2025-09-23 16:32:02,462 - logs_modchembert_bbbp_epochs100_batch_size32 - INFO - Epoch 56/100 | Train Loss: 0.0026 | Val mean-roc_auc_score: 0.9949
292
+ 2025-09-23 16:32:09,825 - logs_modchembert_bbbp_epochs100_batch_size32 - INFO - Epoch 57/100 | Train Loss: 0.0009 | Val mean-roc_auc_score: 0.9950
293
+ 2025-09-23 16:32:18,033 - logs_modchembert_bbbp_epochs100_batch_size32 - INFO - Epoch 58/100 | Train Loss: 0.0021 | Val mean-roc_auc_score: 0.9949
294
+ 2025-09-23 16:32:24,802 - logs_modchembert_bbbp_epochs100_batch_size32 - INFO - Epoch 59/100 | Train Loss: 0.0007 | Val mean-roc_auc_score: 0.9950
295
+ 2025-09-23 16:32:31,703 - logs_modchembert_bbbp_epochs100_batch_size32 - INFO - Epoch 60/100 | Train Loss: 0.0003 | Val mean-roc_auc_score: 0.9950
296
+ 2025-09-23 16:32:38,983 - logs_modchembert_bbbp_epochs100_batch_size32 - INFO - Epoch 61/100 | Train Loss: 0.0006 | Val mean-roc_auc_score: 0.9949
297
+ 2025-09-23 16:32:46,222 - logs_modchembert_bbbp_epochs100_batch_size32 - INFO - Epoch 62/100 | Train Loss: 0.0010 | Val mean-roc_auc_score: 0.9950
298
+ 2025-09-23 16:32:52,494 - logs_modchembert_bbbp_epochs100_batch_size32 - INFO - Epoch 63/100 | Train Loss: 0.0008 | Val mean-roc_auc_score: 0.9949
299
+ 2025-09-23 16:32:59,240 - logs_modchembert_bbbp_epochs100_batch_size32 - INFO - Epoch 64/100 | Train Loss: 0.0014 | Val mean-roc_auc_score: 0.9950
300
+ 2025-09-23 16:33:05,882 - logs_modchembert_bbbp_epochs100_batch_size32 - INFO - Epoch 65/100 | Train Loss: 0.0007 | Val mean-roc_auc_score: 0.9950
301
+ 2025-09-23 16:33:12,700 - logs_modchembert_bbbp_epochs100_batch_size32 - INFO - Epoch 66/100 | Train Loss: 0.0009 | Val mean-roc_auc_score: 0.9950
302
+ 2025-09-23 16:33:20,047 - logs_modchembert_bbbp_epochs100_batch_size32 - INFO - Epoch 67/100 | Train Loss: 0.0006 | Val mean-roc_auc_score: 0.9950
303
+ 2025-09-23 16:33:27,028 - logs_modchembert_bbbp_epochs100_batch_size32 - INFO - Epoch 68/100 | Train Loss: 0.0006 | Val mean-roc_auc_score: 0.9950
304
+ 2025-09-23 16:33:34,018 - logs_modchembert_bbbp_epochs100_batch_size32 - INFO - Epoch 69/100 | Train Loss: 0.0007 | Val mean-roc_auc_score: 0.9949
305
+ 2025-09-23 16:33:41,029 - logs_modchembert_bbbp_epochs100_batch_size32 - INFO - Epoch 70/100 | Train Loss: 0.0005 | Val mean-roc_auc_score: 0.9949
306
+ 2025-09-23 16:33:47,977 - logs_modchembert_bbbp_epochs100_batch_size32 - INFO - Epoch 71/100 | Train Loss: 0.0006 | Val mean-roc_auc_score: 0.9949
307
+ 2025-09-23 16:33:54,648 - logs_modchembert_bbbp_epochs100_batch_size32 - INFO - Epoch 72/100 | Train Loss: 0.0006 | Val mean-roc_auc_score: 0.9951
308
+ 2025-09-23 16:34:01,650 - logs_modchembert_bbbp_epochs100_batch_size32 - INFO - Epoch 73/100 | Train Loss: 0.0007 | Val mean-roc_auc_score: 0.9949
309
+ 2025-09-23 16:34:08,693 - logs_modchembert_bbbp_epochs100_batch_size32 - INFO - Epoch 74/100 | Train Loss: 0.0005 | Val mean-roc_auc_score: 0.9949
310
+ 2025-09-23 16:34:15,731 - logs_modchembert_bbbp_epochs100_batch_size32 - INFO - Epoch 75/100 | Train Loss: 0.0005 | Val mean-roc_auc_score: 0.9950
311
+ 2025-09-23 16:34:21,746 - logs_modchembert_bbbp_epochs100_batch_size32 - INFO - Epoch 76/100 | Train Loss: 0.0004 | Val mean-roc_auc_score: 0.9949
312
+ 2025-09-23 16:34:30,535 - logs_modchembert_bbbp_epochs100_batch_size32 - INFO - Epoch 77/100 | Train Loss: 0.0005 | Val mean-roc_auc_score: 0.9949
313
+ 2025-09-23 16:34:37,740 - logs_modchembert_bbbp_epochs100_batch_size32 - INFO - Epoch 78/100 | Train Loss: 0.0004 | Val mean-roc_auc_score: 0.9949
314
+ 2025-09-23 16:34:44,854 - logs_modchembert_bbbp_epochs100_batch_size32 - INFO - Epoch 79/100 | Train Loss: 0.0007 | Val mean-roc_auc_score: 0.9949
315
+ 2025-09-23 16:34:51,646 - logs_modchembert_bbbp_epochs100_batch_size32 - INFO - Epoch 80/100 | Train Loss: 0.0004 | Val mean-roc_auc_score: 0.9949
316
+ 2025-09-23 16:34:58,495 - logs_modchembert_bbbp_epochs100_batch_size32 - INFO - Epoch 81/100 | Train Loss: 0.0004 | Val mean-roc_auc_score: 0.9950
317
+ 2025-09-23 16:35:05,554 - logs_modchembert_bbbp_epochs100_batch_size32 - INFO - Epoch 82/100 | Train Loss: 0.0013 | Val mean-roc_auc_score: 0.9950
318
+ 2025-09-23 16:35:12,538 - logs_modchembert_bbbp_epochs100_batch_size32 - INFO - Epoch 83/100 | Train Loss: 0.0052 | Val mean-roc_auc_score: 0.9949
319
+ 2025-09-23 16:35:19,424 - logs_modchembert_bbbp_epochs100_batch_size32 - INFO - Epoch 84/100 | Train Loss: 0.0023 | Val mean-roc_auc_score: 0.9951
320
+ 2025-09-23 16:35:26,331 - logs_modchembert_bbbp_epochs100_batch_size32 - INFO - Epoch 85/100 | Train Loss: 0.0002 | Val mean-roc_auc_score: 0.9944
321
+ 2025-09-23 16:35:33,277 - logs_modchembert_bbbp_epochs100_batch_size32 - INFO - Epoch 86/100 | Train Loss: 0.0022 | Val mean-roc_auc_score: 0.9942
322
+ 2025-09-23 16:35:40,566 - logs_modchembert_bbbp_epochs100_batch_size32 - INFO - Epoch 87/100 | Train Loss: 0.0010 | Val mean-roc_auc_score: 0.9936
323
+ 2025-09-23 16:35:47,827 - logs_modchembert_bbbp_epochs100_batch_size32 - INFO - Epoch 88/100 | Train Loss: 0.0007 | Val mean-roc_auc_score: 0.9938
324
+ 2025-09-23 16:35:54,102 - logs_modchembert_bbbp_epochs100_batch_size32 - INFO - Epoch 89/100 | Train Loss: 0.0011 | Val mean-roc_auc_score: 0.9936
325
+ 2025-09-23 16:36:01,187 - logs_modchembert_bbbp_epochs100_batch_size32 - INFO - Epoch 90/100 | Train Loss: 0.0009 | Val mean-roc_auc_score: 0.9941
326
+ 2025-09-23 16:36:08,234 - logs_modchembert_bbbp_epochs100_batch_size32 - INFO - Epoch 91/100 | Train Loss: 0.0008 | Val mean-roc_auc_score: 0.9940
327
+ 2025-09-23 16:36:15,602 - logs_modchembert_bbbp_epochs100_batch_size32 - INFO - Epoch 92/100 | Train Loss: 0.0007 | Val mean-roc_auc_score: 0.9941
328
+ 2025-09-23 16:36:22,306 - logs_modchembert_bbbp_epochs100_batch_size32 - INFO - Epoch 93/100 | Train Loss: 0.0006 | Val mean-roc_auc_score: 0.9942
329
+ 2025-09-23 16:36:29,092 - logs_modchembert_bbbp_epochs100_batch_size32 - INFO - Epoch 94/100 | Train Loss: 0.0006 | Val mean-roc_auc_score: 0.9942
330
+ 2025-09-23 16:36:35,776 - logs_modchembert_bbbp_epochs100_batch_size32 - INFO - Epoch 95/100 | Train Loss: 0.0007 | Val mean-roc_auc_score: 0.9944
331
+ 2025-09-23 16:36:42,680 - logs_modchembert_bbbp_epochs100_batch_size32 - INFO - Epoch 96/100 | Train Loss: 0.0009 | Val mean-roc_auc_score: 0.9940
332
+ 2025-09-23 16:36:50,687 - logs_modchembert_bbbp_epochs100_batch_size32 - INFO - Epoch 97/100 | Train Loss: 0.0007 | Val mean-roc_auc_score: 0.9944
333
+ 2025-09-23 16:36:57,739 - logs_modchembert_bbbp_epochs100_batch_size32 - INFO - Epoch 98/100 | Train Loss: 0.0005 | Val mean-roc_auc_score: 0.9944
334
+ 2025-09-23 16:37:04,808 - logs_modchembert_bbbp_epochs100_batch_size32 - INFO - Epoch 99/100 | Train Loss: 0.0003 | Val mean-roc_auc_score: 0.9944
335
+ 2025-09-23 16:37:11,823 - logs_modchembert_bbbp_epochs100_batch_size32 - INFO - Epoch 100/100 | Train Loss: 0.0005 | Val mean-roc_auc_score: 0.9945
336
+ 2025-09-23 16:37:12,716 - logs_modchembert_bbbp_epochs100_batch_size32 - INFO - Test mean-roc_auc_score: 0.7363
337
+ 2025-09-23 16:37:13,074 - logs_modchembert_bbbp_epochs100_batch_size32 - INFO - Final Triplicate Test Results — Avg mean-roc_auc_score: 0.7282, Std Dev: 0.0058
logs_modchembert_classification_ModChemBERT-MLM-TAFT/modchembert_deepchem_splits_run_clintox_epochs100_batch_size32_20250923_163713.log ADDED
@@ -0,0 +1,373 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ 2025-09-23 16:37:13,075 - logs_modchembert_clintox_epochs100_batch_size32 - INFO - Running benchmark for dataset: clintox
2
+ 2025-09-23 16:37:13,075 - logs_modchembert_clintox_epochs100_batch_size32 - INFO - dataset: clintox, tasks: ['FDA_APPROVED', 'CT_TOX'], epochs: 100, learning rate: 3e-05
3
+ 2025-09-23 16:37:13,100 - logs_modchembert_clintox_epochs100_batch_size32 - INFO - Starting triplicate run 1 for dataset clintox at 2025-09-23_16-37-13
4
+ 2025-09-23 16:37:18,172 - logs_modchembert_clintox_epochs100_batch_size32 - INFO - Epoch 1/100 | Train Loss: 0.1326 | Val mean-roc_auc_score: 0.9127
5
+ 2025-09-23 16:37:18,172 - logs_modchembert_clintox_epochs100_batch_size32 - INFO - Global step of best model: 37
6
+ 2025-09-23 16:37:18,725 - logs_modchembert_clintox_epochs100_batch_size32 - INFO - Best model saved at epoch 1 with val mean-roc_auc_score: 0.9127
7
+ 2025-09-23 16:37:23,911 - logs_modchembert_clintox_epochs100_batch_size32 - INFO - Epoch 2/100 | Train Loss: 0.0410 | Val mean-roc_auc_score: 0.9767
8
+ 2025-09-23 16:37:24,105 - logs_modchembert_clintox_epochs100_batch_size32 - INFO - Global step of best model: 74
9
+ 2025-09-23 16:37:24,641 - logs_modchembert_clintox_epochs100_batch_size32 - INFO - Best model saved at epoch 2 with val mean-roc_auc_score: 0.9767
10
+ 2025-09-23 16:37:30,165 - logs_modchembert_clintox_epochs100_batch_size32 - INFO - Epoch 3/100 | Train Loss: 0.0202 | Val mean-roc_auc_score: 0.9775
11
+ 2025-09-23 16:37:30,351 - logs_modchembert_clintox_epochs100_batch_size32 - INFO - Global step of best model: 111
12
+ 2025-09-23 16:37:30,883 - logs_modchembert_clintox_epochs100_batch_size32 - INFO - Best model saved at epoch 3 with val mean-roc_auc_score: 0.9775
13
+ 2025-09-23 16:37:36,669 - logs_modchembert_clintox_epochs100_batch_size32 - INFO - Epoch 4/100 | Train Loss: 0.0279 | Val mean-roc_auc_score: 0.9741
14
+ 2025-09-23 16:37:42,391 - logs_modchembert_clintox_epochs100_batch_size32 - INFO - Epoch 5/100 | Train Loss: 0.0266 | Val mean-roc_auc_score: 0.9816
15
+ 2025-09-23 16:37:42,548 - logs_modchembert_clintox_epochs100_batch_size32 - INFO - Global step of best model: 185
16
+ 2025-09-23 16:37:43,138 - logs_modchembert_clintox_epochs100_batch_size32 - INFO - Best model saved at epoch 5 with val mean-roc_auc_score: 0.9816
17
+ 2025-09-23 16:37:48,945 - logs_modchembert_clintox_epochs100_batch_size32 - INFO - Epoch 6/100 | Train Loss: 0.0234 | Val mean-roc_auc_score: 0.9807
18
+ 2025-09-23 16:37:54,916 - logs_modchembert_clintox_epochs100_batch_size32 - INFO - Epoch 7/100 | Train Loss: 0.0188 | Val mean-roc_auc_score: 0.9813
19
+ 2025-09-23 16:38:00,629 - logs_modchembert_clintox_epochs100_batch_size32 - INFO - Epoch 8/100 | Train Loss: 0.0179 | Val mean-roc_auc_score: 0.9822
20
+ 2025-09-23 16:38:00,818 - logs_modchembert_clintox_epochs100_batch_size32 - INFO - Global step of best model: 296
21
+ 2025-09-23 16:38:01,385 - logs_modchembert_clintox_epochs100_batch_size32 - INFO - Best model saved at epoch 8 with val mean-roc_auc_score: 0.9822
22
+ 2025-09-23 16:38:06,952 - logs_modchembert_clintox_epochs100_batch_size32 - INFO - Epoch 9/100 | Train Loss: 0.0165 | Val mean-roc_auc_score: 0.9816
23
+ 2025-09-23 16:38:12,442 - logs_modchembert_clintox_epochs100_batch_size32 - INFO - Epoch 10/100 | Train Loss: 0.0141 | Val mean-roc_auc_score: 0.9828
24
+ 2025-09-23 16:38:12,636 - logs_modchembert_clintox_epochs100_batch_size32 - INFO - Global step of best model: 370
25
+ 2025-09-23 16:38:13,211 - logs_modchembert_clintox_epochs100_batch_size32 - INFO - Best model saved at epoch 10 with val mean-roc_auc_score: 0.9828
26
+ 2025-09-23 16:38:18,279 - logs_modchembert_clintox_epochs100_batch_size32 - INFO - Epoch 11/100 | Train Loss: 0.0157 | Val mean-roc_auc_score: 0.9856
27
+ 2025-09-23 16:38:18,771 - logs_modchembert_clintox_epochs100_batch_size32 - INFO - Global step of best model: 407
28
+ 2025-09-23 16:38:19,322 - logs_modchembert_clintox_epochs100_batch_size32 - INFO - Best model saved at epoch 11 with val mean-roc_auc_score: 0.9856
29
+ 2025-09-23 16:38:24,917 - logs_modchembert_clintox_epochs100_batch_size32 - INFO - Epoch 12/100 | Train Loss: 0.0129 | Val mean-roc_auc_score: 0.9866
30
+ 2025-09-23 16:38:25,104 - logs_modchembert_clintox_epochs100_batch_size32 - INFO - Global step of best model: 444
31
+ 2025-09-23 16:38:25,692 - logs_modchembert_clintox_epochs100_batch_size32 - INFO - Best model saved at epoch 12 with val mean-roc_auc_score: 0.9866
32
+ 2025-09-23 16:38:31,474 - logs_modchembert_clintox_epochs100_batch_size32 - INFO - Epoch 13/100 | Train Loss: 0.0099 | Val mean-roc_auc_score: 0.9918
33
+ 2025-09-23 16:38:31,668 - logs_modchembert_clintox_epochs100_batch_size32 - INFO - Global step of best model: 481
34
+ 2025-09-23 16:38:32,245 - logs_modchembert_clintox_epochs100_batch_size32 - INFO - Best model saved at epoch 13 with val mean-roc_auc_score: 0.9918
35
+ 2025-09-23 16:38:37,872 - logs_modchembert_clintox_epochs100_batch_size32 - INFO - Epoch 14/100 | Train Loss: 0.0184 | Val mean-roc_auc_score: 0.9874
36
+ 2025-09-23 16:38:43,541 - logs_modchembert_clintox_epochs100_batch_size32 - INFO - Epoch 15/100 | Train Loss: 0.0109 | Val mean-roc_auc_score: 0.9883
37
+ 2025-09-23 16:38:48,598 - logs_modchembert_clintox_epochs100_batch_size32 - INFO - Epoch 16/100 | Train Loss: 0.0059 | Val mean-roc_auc_score: 0.9886
38
+ 2025-09-23 16:38:54,722 - logs_modchembert_clintox_epochs100_batch_size32 - INFO - Epoch 17/100 | Train Loss: 0.0131 | Val mean-roc_auc_score: 0.9845
39
+ 2025-09-23 16:39:00,513 - logs_modchembert_clintox_epochs100_batch_size32 - INFO - Epoch 18/100 | Train Loss: 0.0197 | Val mean-roc_auc_score: 0.9857
40
+ 2025-09-23 16:39:06,291 - logs_modchembert_clintox_epochs100_batch_size32 - INFO - Epoch 19/100 | Train Loss: 0.0006 | Val mean-roc_auc_score: 0.9868
41
+ 2025-09-23 16:39:12,089 - logs_modchembert_clintox_epochs100_batch_size32 - INFO - Epoch 20/100 | Train Loss: 0.0069 | Val mean-roc_auc_score: 0.9892
42
+ 2025-09-23 16:39:17,646 - logs_modchembert_clintox_epochs100_batch_size32 - INFO - Epoch 21/100 | Train Loss: 0.0068 | Val mean-roc_auc_score: 0.9877
43
+ 2025-09-23 16:39:23,498 - logs_modchembert_clintox_epochs100_batch_size32 - INFO - Epoch 22/100 | Train Loss: 0.0061 | Val mean-roc_auc_score: 0.9880
44
+ 2025-09-23 16:39:29,064 - logs_modchembert_clintox_epochs100_batch_size32 - INFO - Epoch 23/100 | Train Loss: 0.0063 | Val mean-roc_auc_score: 0.9874
45
+ 2025-09-23 16:39:34,724 - logs_modchembert_clintox_epochs100_batch_size32 - INFO - Epoch 24/100 | Train Loss: 0.0125 | Val mean-roc_auc_score: 0.9857
46
+ 2025-09-23 16:39:40,324 - logs_modchembert_clintox_epochs100_batch_size32 - INFO - Epoch 25/100 | Train Loss: 0.0472 | Val mean-roc_auc_score: 0.9867
47
+ 2025-09-23 16:39:46,178 - logs_modchembert_clintox_epochs100_batch_size32 - INFO - Epoch 26/100 | Train Loss: 0.0332 | Val mean-roc_auc_score: 0.9845
48
+ 2025-09-23 16:39:53,509 - logs_modchembert_clintox_epochs100_batch_size32 - INFO - Epoch 27/100 | Train Loss: 0.0139 | Val mean-roc_auc_score: 0.9839
49
+ 2025-09-23 16:39:59,165 - logs_modchembert_clintox_epochs100_batch_size32 - INFO - Epoch 28/100 | Train Loss: 0.0123 | Val mean-roc_auc_score: 0.9867
50
+ 2025-09-23 16:40:04,827 - logs_modchembert_clintox_epochs100_batch_size32 - INFO - Epoch 29/100 | Train Loss: 0.0111 | Val mean-roc_auc_score: 0.9890
51
+ 2025-09-23 16:40:10,521 - logs_modchembert_clintox_epochs100_batch_size32 - INFO - Epoch 30/100 | Train Loss: 0.0131 | Val mean-roc_auc_score: 0.9903
52
+ 2025-09-23 16:40:15,752 - logs_modchembert_clintox_epochs100_batch_size32 - INFO - Epoch 31/100 | Train Loss: 0.0137 | Val mean-roc_auc_score: 0.9862
53
+ 2025-09-23 16:40:21,279 - logs_modchembert_clintox_epochs100_batch_size32 - INFO - Epoch 32/100 | Train Loss: 0.0087 | Val mean-roc_auc_score: 0.9874
54
+ 2025-09-23 16:40:26,960 - logs_modchembert_clintox_epochs100_batch_size32 - INFO - Epoch 33/100 | Train Loss: 0.0074 | Val mean-roc_auc_score: 0.9874
55
+ 2025-09-23 16:40:32,321 - logs_modchembert_clintox_epochs100_batch_size32 - INFO - Epoch 34/100 | Train Loss: 0.0048 | Val mean-roc_auc_score: 0.9886
56
+ 2025-09-23 16:40:38,000 - logs_modchembert_clintox_epochs100_batch_size32 - INFO - Epoch 35/100 | Train Loss: 0.0041 | Val mean-roc_auc_score: 0.9886
57
+ 2025-09-23 16:40:43,715 - logs_modchembert_clintox_epochs100_batch_size32 - INFO - Epoch 36/100 | Train Loss: 0.0047 | Val mean-roc_auc_score: 0.9892
58
+ 2025-09-23 16:40:49,543 - logs_modchembert_clintox_epochs100_batch_size32 - INFO - Epoch 37/100 | Train Loss: 0.0040 | Val mean-roc_auc_score: 0.9886
59
+ 2025-09-23 16:40:55,020 - logs_modchembert_clintox_epochs100_batch_size32 - INFO - Epoch 38/100 | Train Loss: 0.0043 | Val mean-roc_auc_score: 0.9892
60
+ 2025-09-23 16:41:00,544 - logs_modchembert_clintox_epochs100_batch_size32 - INFO - Epoch 39/100 | Train Loss: 0.0038 | Val mean-roc_auc_score: 0.9892
61
+ 2025-09-23 16:41:06,179 - logs_modchembert_clintox_epochs100_batch_size32 - INFO - Epoch 40/100 | Train Loss: 0.0034 | Val mean-roc_auc_score: 0.9892
62
+ 2025-09-23 16:41:11,635 - logs_modchembert_clintox_epochs100_batch_size32 - INFO - Epoch 41/100 | Train Loss: 0.0025 | Val mean-roc_auc_score: 0.9886
63
+ 2025-09-23 16:41:17,624 - logs_modchembert_clintox_epochs100_batch_size32 - INFO - Epoch 42/100 | Train Loss: 0.0031 | Val mean-roc_auc_score: 0.9886
64
+ 2025-09-23 16:41:23,360 - logs_modchembert_clintox_epochs100_batch_size32 - INFO - Epoch 43/100 | Train Loss: 0.0031 | Val mean-roc_auc_score: 0.9886
65
+ 2025-09-23 16:41:29,124 - logs_modchembert_clintox_epochs100_batch_size32 - INFO - Epoch 44/100 | Train Loss: 0.0036 | Val mean-roc_auc_score: 0.9889
66
+ 2025-09-23 16:41:34,885 - logs_modchembert_clintox_epochs100_batch_size32 - INFO - Epoch 45/100 | Train Loss: 0.0033 | Val mean-roc_auc_score: 0.9905
67
+ 2025-09-23 16:41:40,588 - logs_modchembert_clintox_epochs100_batch_size32 - INFO - Epoch 46/100 | Train Loss: 0.0003 | Val mean-roc_auc_score: 0.9892
68
+ 2025-09-23 16:41:46,011 - logs_modchembert_clintox_epochs100_batch_size32 - INFO - Epoch 47/100 | Train Loss: 0.0026 | Val mean-roc_auc_score: 0.9892
69
+ 2025-09-23 16:41:51,075 - logs_modchembert_clintox_epochs100_batch_size32 - INFO - Epoch 48/100 | Train Loss: 0.0027 | Val mean-roc_auc_score: 0.9892
70
+ 2025-09-23 16:41:56,816 - logs_modchembert_clintox_epochs100_batch_size32 - INFO - Epoch 49/100 | Train Loss: 0.0005 | Val mean-roc_auc_score: 0.9892
71
+ 2025-09-23 16:42:02,548 - logs_modchembert_clintox_epochs100_batch_size32 - INFO - Epoch 50/100 | Train Loss: 0.0028 | Val mean-roc_auc_score: 0.9889
72
+ 2025-09-23 16:42:08,288 - logs_modchembert_clintox_epochs100_batch_size32 - INFO - Epoch 51/100 | Train Loss: 0.0026 | Val mean-roc_auc_score: 0.9886
73
+ 2025-09-23 16:42:14,376 - logs_modchembert_clintox_epochs100_batch_size32 - INFO - Epoch 52/100 | Train Loss: 0.0030 | Val mean-roc_auc_score: 0.9886
74
+ 2025-09-23 16:42:20,009 - logs_modchembert_clintox_epochs100_batch_size32 - INFO - Epoch 53/100 | Train Loss: 0.0029 | Val mean-roc_auc_score: 0.9892
75
+ 2025-09-23 16:42:25,631 - logs_modchembert_clintox_epochs100_batch_size32 - INFO - Epoch 54/100 | Train Loss: 0.0031 | Val mean-roc_auc_score: 0.9908
76
+ 2025-09-23 16:42:32,533 - logs_modchembert_clintox_epochs100_batch_size32 - INFO - Epoch 55/100 | Train Loss: 0.0033 | Val mean-roc_auc_score: 0.9903
77
+ 2025-09-23 16:42:37,618 - logs_modchembert_clintox_epochs100_batch_size32 - INFO - Epoch 56/100 | Train Loss: 0.0021 | Val mean-roc_auc_score: 0.9903
78
+ 2025-09-23 16:42:43,559 - logs_modchembert_clintox_epochs100_batch_size32 - INFO - Epoch 57/100 | Train Loss: 0.0001 | Val mean-roc_auc_score: 0.9903
79
+ 2025-09-23 16:42:49,019 - logs_modchembert_clintox_epochs100_batch_size32 - INFO - Epoch 58/100 | Train Loss: 0.0028 | Val mean-roc_auc_score: 0.9903
80
+ 2025-09-23 16:42:54,692 - logs_modchembert_clintox_epochs100_batch_size32 - INFO - Epoch 59/100 | Train Loss: 0.0029 | Val mean-roc_auc_score: 0.9888
81
+ 2025-09-23 16:43:00,365 - logs_modchembert_clintox_epochs100_batch_size32 - INFO - Epoch 60/100 | Train Loss: 0.0059 | Val mean-roc_auc_score: 0.9891
82
+ 2025-09-23 16:43:05,923 - logs_modchembert_clintox_epochs100_batch_size32 - INFO - Epoch 61/100 | Train Loss: 0.0054 | Val mean-roc_auc_score: 0.9892
83
+ 2025-09-23 16:43:11,859 - logs_modchembert_clintox_epochs100_batch_size32 - INFO - Epoch 62/100 | Train Loss: 0.0024 | Val mean-roc_auc_score: 0.9897
84
+ 2025-09-23 16:43:16,915 - logs_modchembert_clintox_epochs100_batch_size32 - INFO - Epoch 63/100 | Train Loss: 0.0027 | Val mean-roc_auc_score: 0.9897
85
+ 2025-09-23 16:43:22,529 - logs_modchembert_clintox_epochs100_batch_size32 - INFO - Epoch 64/100 | Train Loss: 0.0029 | Val mean-roc_auc_score: 0.9891
86
+ 2025-09-23 16:43:28,241 - logs_modchembert_clintox_epochs100_batch_size32 - INFO - Epoch 65/100 | Train Loss: 0.0020 | Val mean-roc_auc_score: 0.9891
87
+ 2025-09-23 16:43:34,094 - logs_modchembert_clintox_epochs100_batch_size32 - INFO - Epoch 66/100 | Train Loss: 0.0025 | Val mean-roc_auc_score: 0.9891
88
+ 2025-09-23 16:43:40,227 - logs_modchembert_clintox_epochs100_batch_size32 - INFO - Epoch 67/100 | Train Loss: 0.0025 | Val mean-roc_auc_score: 0.9891
89
+ 2025-09-23 16:43:45,884 - logs_modchembert_clintox_epochs100_batch_size32 - INFO - Epoch 68/100 | Train Loss: 0.0020 | Val mean-roc_auc_score: 0.9891
90
+ 2025-09-23 16:43:51,624 - logs_modchembert_clintox_epochs100_batch_size32 - INFO - Epoch 69/100 | Train Loss: 0.0022 | Val mean-roc_auc_score: 0.9891
91
+ 2025-09-23 16:43:57,332 - logs_modchembert_clintox_epochs100_batch_size32 - INFO - Epoch 70/100 | Train Loss: 0.0025 | Val mean-roc_auc_score: 0.9891
92
+ 2025-09-23 16:44:03,021 - logs_modchembert_clintox_epochs100_batch_size32 - INFO - Epoch 71/100 | Train Loss: 0.0036 | Val mean-roc_auc_score: 0.9891
93
+ 2025-09-23 16:44:08,903 - logs_modchembert_clintox_epochs100_batch_size32 - INFO - Epoch 72/100 | Train Loss: 0.0026 | Val mean-roc_auc_score: 0.9896
94
+ 2025-09-23 16:44:14,707 - logs_modchembert_clintox_epochs100_batch_size32 - INFO - Epoch 73/100 | Train Loss: 0.0001 | Val mean-roc_auc_score: 0.9896
95
+ 2025-09-23 16:44:20,303 - logs_modchembert_clintox_epochs100_batch_size32 - INFO - Epoch 74/100 | Train Loss: 0.0020 | Val mean-roc_auc_score: 0.9894
96
+ 2025-09-23 16:44:25,939 - logs_modchembert_clintox_epochs100_batch_size32 - INFO - Epoch 75/100 | Train Loss: 0.0022 | Val mean-roc_auc_score: 0.9896
97
+ 2025-09-23 16:44:31,422 - logs_modchembert_clintox_epochs100_batch_size32 - INFO - Epoch 76/100 | Train Loss: 0.0030 | Val mean-roc_auc_score: 0.9896
98
+ 2025-09-23 16:44:37,374 - logs_modchembert_clintox_epochs100_batch_size32 - INFO - Epoch 77/100 | Train Loss: 0.0022 | Val mean-roc_auc_score: 0.9896
99
+ 2025-09-23 16:44:42,871 - logs_modchembert_clintox_epochs100_batch_size32 - INFO - Epoch 78/100 | Train Loss: 0.0019 | Val mean-roc_auc_score: 0.9896
100
+ 2025-09-23 16:44:48,169 - logs_modchembert_clintox_epochs100_batch_size32 - INFO - Epoch 79/100 | Train Loss: 0.0025 | Val mean-roc_auc_score: 0.9896
101
+ 2025-09-23 16:44:53,706 - logs_modchembert_clintox_epochs100_batch_size32 - INFO - Epoch 80/100 | Train Loss: 0.0024 | Val mean-roc_auc_score: 0.9896
102
+ 2025-09-23 16:44:59,501 - logs_modchembert_clintox_epochs100_batch_size32 - INFO - Epoch 81/100 | Train Loss: 0.0023 | Val mean-roc_auc_score: 0.9896
103
+ 2025-09-23 16:45:06,811 - logs_modchembert_clintox_epochs100_batch_size32 - INFO - Epoch 82/100 | Train Loss: 0.0020 | Val mean-roc_auc_score: 0.9896
104
+ 2025-09-23 16:45:12,521 - logs_modchembert_clintox_epochs100_batch_size32 - INFO - Epoch 83/100 | Train Loss: 0.0022 | Val mean-roc_auc_score: 0.9896
105
+ 2025-09-23 16:45:18,267 - logs_modchembert_clintox_epochs100_batch_size32 - INFO - Epoch 84/100 | Train Loss: 0.0054 | Val mean-roc_auc_score: 0.9896
106
+ 2025-09-23 16:45:23,865 - logs_modchembert_clintox_epochs100_batch_size32 - INFO - Epoch 85/100 | Train Loss: 0.0019 | Val mean-roc_auc_score: 0.9896
107
+ 2025-09-23 16:45:29,326 - logs_modchembert_clintox_epochs100_batch_size32 - INFO - Epoch 86/100 | Train Loss: 0.0031 | Val mean-roc_auc_score: 0.9896
108
+ 2025-09-23 16:45:35,216 - logs_modchembert_clintox_epochs100_batch_size32 - INFO - Epoch 87/100 | Train Loss: 0.0024 | Val mean-roc_auc_score: 0.9886
109
+ 2025-09-23 16:45:40,879 - logs_modchembert_clintox_epochs100_batch_size32 - INFO - Epoch 88/100 | Train Loss: 0.0020 | Val mean-roc_auc_score: 0.9913
110
+ 2025-09-23 16:45:46,547 - logs_modchembert_clintox_epochs100_batch_size32 - INFO - Epoch 89/100 | Train Loss: 0.0020 | Val mean-roc_auc_score: 0.9908
111
+ 2025-09-23 16:45:52,315 - logs_modchembert_clintox_epochs100_batch_size32 - INFO - Epoch 90/100 | Train Loss: 0.0022 | Val mean-roc_auc_score: 0.9913
112
+ 2025-09-23 16:45:58,093 - logs_modchembert_clintox_epochs100_batch_size32 - INFO - Epoch 91/100 | Train Loss: 0.0016 | Val mean-roc_auc_score: 0.9908
113
+ 2025-09-23 16:46:04,268 - logs_modchembert_clintox_epochs100_batch_size32 - INFO - Epoch 92/100 | Train Loss: 0.0001 | Val mean-roc_auc_score: 0.9887
114
+ 2025-09-23 16:46:10,057 - logs_modchembert_clintox_epochs100_batch_size32 - INFO - Epoch 93/100 | Train Loss: 0.0020 | Val mean-roc_auc_score: 0.9900
115
+ 2025-09-23 16:46:15,159 - logs_modchembert_clintox_epochs100_batch_size32 - INFO - Epoch 94/100 | Train Loss: 0.0014 | Val mean-roc_auc_score: 0.9913
116
+ 2025-09-23 16:46:20,469 - logs_modchembert_clintox_epochs100_batch_size32 - INFO - Epoch 95/100 | Train Loss: 0.0022 | Val mean-roc_auc_score: 0.9908
117
+ 2025-09-23 16:46:26,122 - logs_modchembert_clintox_epochs100_batch_size32 - INFO - Epoch 96/100 | Train Loss: 0.0016 | Val mean-roc_auc_score: 0.9908
118
+ 2025-09-23 16:46:32,099 - logs_modchembert_clintox_epochs100_batch_size32 - INFO - Epoch 97/100 | Train Loss: 0.0022 | Val mean-roc_auc_score: 0.9905
119
+ 2025-09-23 16:46:37,768 - logs_modchembert_clintox_epochs100_batch_size32 - INFO - Epoch 98/100 | Train Loss: 0.0026 | Val mean-roc_auc_score: 0.9908
120
+ 2025-09-23 16:46:43,311 - logs_modchembert_clintox_epochs100_batch_size32 - INFO - Epoch 99/100 | Train Loss: 0.0021 | Val mean-roc_auc_score: 0.9908
121
+ 2025-09-23 16:46:48,815 - logs_modchembert_clintox_epochs100_batch_size32 - INFO - Epoch 100/100 | Train Loss: 0.0014 | Val mean-roc_auc_score: 0.9910
122
+ 2025-09-23 16:46:49,690 - logs_modchembert_clintox_epochs100_batch_size32 - INFO - Test mean-roc_auc_score: 0.9945
123
+ 2025-09-23 16:46:50,084 - logs_modchembert_clintox_epochs100_batch_size32 - INFO - Starting triplicate run 2 for dataset clintox at 2025-09-23_16-46-50
124
+ 2025-09-23 16:46:55,086 - logs_modchembert_clintox_epochs100_batch_size32 - INFO - Epoch 1/100 | Train Loss: 0.1444 | Val mean-roc_auc_score: 0.9743
125
+ 2025-09-23 16:46:55,086 - logs_modchembert_clintox_epochs100_batch_size32 - INFO - Global step of best model: 37
126
+ 2025-09-23 16:46:55,638 - logs_modchembert_clintox_epochs100_batch_size32 - INFO - Best model saved at epoch 1 with val mean-roc_auc_score: 0.9743
127
+ 2025-09-23 16:47:00,956 - logs_modchembert_clintox_epochs100_batch_size32 - INFO - Epoch 2/100 | Train Loss: 0.0538 | Val mean-roc_auc_score: 0.9762
128
+ 2025-09-23 16:47:01,138 - logs_modchembert_clintox_epochs100_batch_size32 - INFO - Global step of best model: 74
129
+ 2025-09-23 16:47:01,682 - logs_modchembert_clintox_epochs100_batch_size32 - INFO - Best model saved at epoch 2 with val mean-roc_auc_score: 0.9762
130
+ 2025-09-23 16:47:07,529 - logs_modchembert_clintox_epochs100_batch_size32 - INFO - Epoch 3/100 | Train Loss: 0.0272 | Val mean-roc_auc_score: 0.9790
131
+ 2025-09-23 16:47:07,714 - logs_modchembert_clintox_epochs100_batch_size32 - INFO - Global step of best model: 111
132
+ 2025-09-23 16:47:08,239 - logs_modchembert_clintox_epochs100_batch_size32 - INFO - Best model saved at epoch 3 with val mean-roc_auc_score: 0.9790
133
+ 2025-09-23 16:47:13,877 - logs_modchembert_clintox_epochs100_batch_size32 - INFO - Epoch 4/100 | Train Loss: 0.0254 | Val mean-roc_auc_score: 0.9823
134
+ 2025-09-23 16:47:14,080 - logs_modchembert_clintox_epochs100_batch_size32 - INFO - Global step of best model: 148
135
+ 2025-09-23 16:47:14,623 - logs_modchembert_clintox_epochs100_batch_size32 - INFO - Best model saved at epoch 4 with val mean-roc_auc_score: 0.9823
136
+ 2025-09-23 16:47:20,325 - logs_modchembert_clintox_epochs100_batch_size32 - INFO - Epoch 5/100 | Train Loss: 0.0245 | Val mean-roc_auc_score: 0.9811
137
+ 2025-09-23 16:47:26,037 - logs_modchembert_clintox_epochs100_batch_size32 - INFO - Epoch 6/100 | Train Loss: 0.0170 | Val mean-roc_auc_score: 0.9839
138
+ 2025-09-23 16:47:26,510 - logs_modchembert_clintox_epochs100_batch_size32 - INFO - Global step of best model: 222
139
+ 2025-09-23 16:47:27,042 - logs_modchembert_clintox_epochs100_batch_size32 - INFO - Best model saved at epoch 6 with val mean-roc_auc_score: 0.9839
140
+ 2025-09-23 16:47:32,827 - logs_modchembert_clintox_epochs100_batch_size32 - INFO - Epoch 7/100 | Train Loss: 0.0202 | Val mean-roc_auc_score: 0.9874
141
+ 2025-09-23 16:47:33,024 - logs_modchembert_clintox_epochs100_batch_size32 - INFO - Global step of best model: 259
142
+ 2025-09-23 16:47:33,561 - logs_modchembert_clintox_epochs100_batch_size32 - INFO - Best model saved at epoch 7 with val mean-roc_auc_score: 0.9874
143
+ 2025-09-23 16:47:39,300 - logs_modchembert_clintox_epochs100_batch_size32 - INFO - Epoch 8/100 | Train Loss: 0.0151 | Val mean-roc_auc_score: 0.9840
144
+ 2025-09-23 16:47:44,310 - logs_modchembert_clintox_epochs100_batch_size32 - INFO - Epoch 9/100 | Train Loss: 0.0210 | Val mean-roc_auc_score: 0.9863
145
+ 2025-09-23 16:47:49,877 - logs_modchembert_clintox_epochs100_batch_size32 - INFO - Epoch 10/100 | Train Loss: 0.0140 | Val mean-roc_auc_score: 0.9897
146
+ 2025-09-23 16:47:50,056 - logs_modchembert_clintox_epochs100_batch_size32 - INFO - Global step of best model: 370
147
+ 2025-09-23 16:47:50,591 - logs_modchembert_clintox_epochs100_batch_size32 - INFO - Best model saved at epoch 10 with val mean-roc_auc_score: 0.9897
148
+ 2025-09-23 16:47:56,233 - logs_modchembert_clintox_epochs100_batch_size32 - INFO - Epoch 11/100 | Train Loss: 0.0195 | Val mean-roc_auc_score: 0.9857
149
+ 2025-09-23 16:48:02,190 - logs_modchembert_clintox_epochs100_batch_size32 - INFO - Epoch 12/100 | Train Loss: 0.0125 | Val mean-roc_auc_score: 0.9864
150
+ 2025-09-23 16:48:07,902 - logs_modchembert_clintox_epochs100_batch_size32 - INFO - Epoch 13/100 | Train Loss: 0.0107 | Val mean-roc_auc_score: 0.9874
151
+ 2025-09-23 16:48:13,385 - logs_modchembert_clintox_epochs100_batch_size32 - INFO - Epoch 14/100 | Train Loss: 0.0117 | Val mean-roc_auc_score: 0.9868
152
+ 2025-09-23 16:48:18,872 - logs_modchembert_clintox_epochs100_batch_size32 - INFO - Epoch 15/100 | Train Loss: 0.0088 | Val mean-roc_auc_score: 0.9893
153
+ 2025-09-23 16:48:24,299 - logs_modchembert_clintox_epochs100_batch_size32 - INFO - Epoch 16/100 | Train Loss: 0.0063 | Val mean-roc_auc_score: 0.9887
154
+ 2025-09-23 16:48:30,120 - logs_modchembert_clintox_epochs100_batch_size32 - INFO - Epoch 17/100 | Train Loss: 0.0048 | Val mean-roc_auc_score: 0.9887
155
+ 2025-09-23 16:48:35,835 - logs_modchembert_clintox_epochs100_batch_size32 - INFO - Epoch 18/100 | Train Loss: 0.0044 | Val mean-roc_auc_score: 0.9895
156
+ 2025-09-23 16:48:41,571 - logs_modchembert_clintox_epochs100_batch_size32 - INFO - Epoch 19/100 | Train Loss: 0.0112 | Val mean-roc_auc_score: 0.9868
157
+ 2025-09-23 16:48:47,310 - logs_modchembert_clintox_epochs100_batch_size32 - INFO - Epoch 20/100 | Train Loss: 0.0044 | Val mean-roc_auc_score: 0.9886
158
+ 2025-09-23 16:48:53,015 - logs_modchembert_clintox_epochs100_batch_size32 - INFO - Epoch 21/100 | Train Loss: 0.0031 | Val mean-roc_auc_score: 0.9881
159
+ 2025-09-23 16:48:58,921 - logs_modchembert_clintox_epochs100_batch_size32 - INFO - Epoch 22/100 | Train Loss: 0.0126 | Val mean-roc_auc_score: 0.9887
160
+ 2025-09-23 16:49:04,517 - logs_modchembert_clintox_epochs100_batch_size32 - INFO - Epoch 23/100 | Train Loss: 0.0083 | Val mean-roc_auc_score: 0.9784
161
+ 2025-09-23 16:49:10,062 - logs_modchembert_clintox_epochs100_batch_size32 - INFO - Epoch 24/100 | Train Loss: 0.0266 | Val mean-roc_auc_score: 0.9761
162
+ 2025-09-23 16:49:15,330 - logs_modchembert_clintox_epochs100_batch_size32 - INFO - Epoch 25/100 | Train Loss: 0.0126 | Val mean-roc_auc_score: 0.9878
163
+ 2025-09-23 16:49:20,911 - logs_modchembert_clintox_epochs100_batch_size32 - INFO - Epoch 26/100 | Train Loss: 0.0086 | Val mean-roc_auc_score: 0.9885
164
+ 2025-09-23 16:49:27,977 - logs_modchembert_clintox_epochs100_batch_size32 - INFO - Epoch 27/100 | Train Loss: 0.0075 | Val mean-roc_auc_score: 0.9885
165
+ 2025-09-23 16:49:33,530 - logs_modchembert_clintox_epochs100_batch_size32 - INFO - Epoch 28/100 | Train Loss: 0.0047 | Val mean-roc_auc_score: 0.9885
166
+ 2025-09-23 16:49:39,101 - logs_modchembert_clintox_epochs100_batch_size32 - INFO - Epoch 29/100 | Train Loss: 0.0045 | Val mean-roc_auc_score: 0.9885
167
+ 2025-09-23 16:49:44,836 - logs_modchembert_clintox_epochs100_batch_size32 - INFO - Epoch 30/100 | Train Loss: 0.0050 | Val mean-roc_auc_score: 0.9890
168
+ 2025-09-23 16:49:50,415 - logs_modchembert_clintox_epochs100_batch_size32 - INFO - Epoch 31/100 | Train Loss: 0.0042 | Val mean-roc_auc_score: 0.9885
169
+ 2025-09-23 16:49:56,368 - logs_modchembert_clintox_epochs100_batch_size32 - INFO - Epoch 32/100 | Train Loss: 0.0042 | Val mean-roc_auc_score: 0.9896
170
+ 2025-09-23 16:50:02,041 - logs_modchembert_clintox_epochs100_batch_size32 - INFO - Epoch 33/100 | Train Loss: 0.0040 | Val mean-roc_auc_score: 0.9868
171
+ 2025-09-23 16:50:07,656 - logs_modchembert_clintox_epochs100_batch_size32 - INFO - Epoch 34/100 | Train Loss: 0.0071 | Val mean-roc_auc_score: 0.9913
172
+ 2025-09-23 16:50:07,814 - logs_modchembert_clintox_epochs100_batch_size32 - INFO - Global step of best model: 1258
173
+ 2025-09-23 16:50:08,377 - logs_modchembert_clintox_epochs100_batch_size32 - INFO - Best model saved at epoch 34 with val mean-roc_auc_score: 0.9913
174
+ 2025-09-23 16:50:14,013 - logs_modchembert_clintox_epochs100_batch_size32 - INFO - Epoch 35/100 | Train Loss: 0.0037 | Val mean-roc_auc_score: 0.9896
175
+ 2025-09-23 16:50:19,688 - logs_modchembert_clintox_epochs100_batch_size32 - INFO - Epoch 36/100 | Train Loss: 0.0037 | Val mean-roc_auc_score: 0.9903
176
+ 2025-09-23 16:50:25,740 - logs_modchembert_clintox_epochs100_batch_size32 - INFO - Epoch 37/100 | Train Loss: 0.0037 | Val mean-roc_auc_score: 0.9903
177
+ 2025-09-23 16:50:31,520 - logs_modchembert_clintox_epochs100_batch_size32 - INFO - Epoch 38/100 | Train Loss: 0.0011 | Val mean-roc_auc_score: 0.9903
178
+ 2025-09-23 16:50:37,256 - logs_modchembert_clintox_epochs100_batch_size32 - INFO - Epoch 39/100 | Train Loss: 0.0032 | Val mean-roc_auc_score: 0.9908
179
+ 2025-09-23 16:50:42,379 - logs_modchembert_clintox_epochs100_batch_size32 - INFO - Epoch 40/100 | Train Loss: 0.0039 | Val mean-roc_auc_score: 0.9891
180
+ 2025-09-23 16:50:47,589 - logs_modchembert_clintox_epochs100_batch_size32 - INFO - Epoch 41/100 | Train Loss: 0.0044 | Val mean-roc_auc_score: 0.9896
181
+ 2025-09-23 16:50:53,370 - logs_modchembert_clintox_epochs100_batch_size32 - INFO - Epoch 42/100 | Train Loss: 0.0027 | Val mean-roc_auc_score: 0.9903
182
+ 2025-09-23 16:50:58,886 - logs_modchembert_clintox_epochs100_batch_size32 - INFO - Epoch 43/100 | Train Loss: 0.0031 | Val mean-roc_auc_score: 0.9891
183
+ 2025-09-23 16:51:04,349 - logs_modchembert_clintox_epochs100_batch_size32 - INFO - Epoch 44/100 | Train Loss: 0.0026 | Val mean-roc_auc_score: 0.9897
184
+ 2025-09-23 16:51:10,064 - logs_modchembert_clintox_epochs100_batch_size32 - INFO - Epoch 45/100 | Train Loss: 0.0030 | Val mean-roc_auc_score: 0.9896
185
+ 2025-09-23 16:51:15,750 - logs_modchembert_clintox_epochs100_batch_size32 - INFO - Epoch 46/100 | Train Loss: 0.0000 | Val mean-roc_auc_score: 0.9897
186
+ 2025-09-23 16:51:21,316 - logs_modchembert_clintox_epochs100_batch_size32 - INFO - Epoch 47/100 | Train Loss: 0.0025 | Val mean-roc_auc_score: 0.9890
187
+ 2025-09-23 16:51:27,052 - logs_modchembert_clintox_epochs100_batch_size32 - INFO - Epoch 48/100 | Train Loss: 0.0034 | Val mean-roc_auc_score: 0.9908
188
+ 2025-09-23 16:51:32,697 - logs_modchembert_clintox_epochs100_batch_size32 - INFO - Epoch 49/100 | Train Loss: 0.0035 | Val mean-roc_auc_score: 0.9908
189
+ 2025-09-23 16:51:38,387 - logs_modchembert_clintox_epochs100_batch_size32 - INFO - Epoch 50/100 | Train Loss: 0.0033 | Val mean-roc_auc_score: 0.9891
190
+ 2025-09-23 16:51:43,981 - logs_modchembert_clintox_epochs100_batch_size32 - INFO - Epoch 51/100 | Train Loss: 0.0024 | Val mean-roc_auc_score: 0.9896
191
+ 2025-09-23 16:51:49,964 - logs_modchembert_clintox_epochs100_batch_size32 - INFO - Epoch 52/100 | Train Loss: 0.0025 | Val mean-roc_auc_score: 0.9903
192
+ 2025-09-23 16:51:55,649 - logs_modchembert_clintox_epochs100_batch_size32 - INFO - Epoch 53/100 | Train Loss: 0.0019 | Val mean-roc_auc_score: 0.9897
193
+ 2025-09-23 16:52:01,435 - logs_modchembert_clintox_epochs100_batch_size32 - INFO - Epoch 54/100 | Train Loss: 0.0028 | Val mean-roc_auc_score: 0.9885
194
+ 2025-09-23 16:52:08,394 - logs_modchembert_clintox_epochs100_batch_size32 - INFO - Epoch 55/100 | Train Loss: 0.0030 | Val mean-roc_auc_score: 0.9897
195
+ 2025-09-23 16:52:13,678 - logs_modchembert_clintox_epochs100_batch_size32 - INFO - Epoch 56/100 | Train Loss: 0.0020 | Val mean-roc_auc_score: 0.9897
196
+ 2025-09-23 16:52:19,725 - logs_modchembert_clintox_epochs100_batch_size32 - INFO - Epoch 57/100 | Train Loss: 0.0013 | Val mean-roc_auc_score: 0.9897
197
+ 2025-09-23 16:52:25,422 - logs_modchembert_clintox_epochs100_batch_size32 - INFO - Epoch 58/100 | Train Loss: 0.0017 | Val mean-roc_auc_score: 0.9897
198
+ 2025-09-23 16:52:31,123 - logs_modchembert_clintox_epochs100_batch_size32 - INFO - Epoch 59/100 | Train Loss: 0.0017 | Val mean-roc_auc_score: 0.9886
199
+ 2025-09-23 16:52:36,848 - logs_modchembert_clintox_epochs100_batch_size32 - INFO - Epoch 60/100 | Train Loss: 0.0027 | Val mean-roc_auc_score: 0.9880
200
+ 2025-09-23 16:52:42,551 - logs_modchembert_clintox_epochs100_batch_size32 - INFO - Epoch 61/100 | Train Loss: 0.0016 | Val mean-roc_auc_score: 0.9848
201
+ 2025-09-23 16:52:48,060 - logs_modchembert_clintox_epochs100_batch_size32 - INFO - Epoch 62/100 | Train Loss: 0.0086 | Val mean-roc_auc_score: 0.9925
202
+ 2025-09-23 16:52:48,206 - logs_modchembert_clintox_epochs100_batch_size32 - INFO - Global step of best model: 2294
203
+ 2025-09-23 16:52:48,738 - logs_modchembert_clintox_epochs100_batch_size32 - INFO - Best model saved at epoch 62 with val mean-roc_auc_score: 0.9925
204
+ 2025-09-23 16:52:54,414 - logs_modchembert_clintox_epochs100_batch_size32 - INFO - Epoch 63/100 | Train Loss: 0.0115 | Val mean-roc_auc_score: 0.9931
205
+ 2025-09-23 16:52:54,625 - logs_modchembert_clintox_epochs100_batch_size32 - INFO - Global step of best model: 2331
206
+ 2025-09-23 16:52:55,150 - logs_modchembert_clintox_epochs100_batch_size32 - INFO - Best model saved at epoch 63 with val mean-roc_auc_score: 0.9931
207
+ 2025-09-23 16:53:00,973 - logs_modchembert_clintox_epochs100_batch_size32 - INFO - Epoch 64/100 | Train Loss: 0.0076 | Val mean-roc_auc_score: 0.9931
208
+ 2025-09-23 16:53:06,675 - logs_modchembert_clintox_epochs100_batch_size32 - INFO - Epoch 65/100 | Train Loss: 0.0060 | Val mean-roc_auc_score: 0.9920
209
+ 2025-09-23 16:53:12,399 - logs_modchembert_clintox_epochs100_batch_size32 - INFO - Epoch 66/100 | Train Loss: 0.0036 | Val mean-roc_auc_score: 0.9920
210
+ 2025-09-23 16:53:18,445 - logs_modchembert_clintox_epochs100_batch_size32 - INFO - Epoch 67/100 | Train Loss: 0.0033 | Val mean-roc_auc_score: 0.9926
211
+ 2025-09-23 16:53:23,984 - logs_modchembert_clintox_epochs100_batch_size32 - INFO - Epoch 68/100 | Train Loss: 0.0027 | Val mean-roc_auc_score: 0.9920
212
+ 2025-09-23 16:53:29,545 - logs_modchembert_clintox_epochs100_batch_size32 - INFO - Epoch 69/100 | Train Loss: 0.0028 | Val mean-roc_auc_score: 0.9920
213
+ 2025-09-23 16:53:35,046 - logs_modchembert_clintox_epochs100_batch_size32 - INFO - Epoch 70/100 | Train Loss: 0.0031 | Val mean-roc_auc_score: 0.9920
214
+ 2025-09-23 16:53:40,124 - logs_modchembert_clintox_epochs100_batch_size32 - INFO - Epoch 71/100 | Train Loss: 0.0026 | Val mean-roc_auc_score: 0.9920
215
+ 2025-09-23 16:53:45,730 - logs_modchembert_clintox_epochs100_batch_size32 - INFO - Epoch 72/100 | Train Loss: 0.0025 | Val mean-roc_auc_score: 0.9920
216
+ 2025-09-23 16:53:51,431 - logs_modchembert_clintox_epochs100_batch_size32 - INFO - Epoch 73/100 | Train Loss: 0.0000 | Val mean-roc_auc_score: 0.9920
217
+ 2025-09-23 16:53:57,089 - logs_modchembert_clintox_epochs100_batch_size32 - INFO - Epoch 74/100 | Train Loss: 0.0026 | Val mean-roc_auc_score: 0.9920
218
+ 2025-09-23 16:54:02,801 - logs_modchembert_clintox_epochs100_batch_size32 - INFO - Epoch 75/100 | Train Loss: 0.0024 | Val mean-roc_auc_score: 0.9920
219
+ 2025-09-23 16:54:08,440 - logs_modchembert_clintox_epochs100_batch_size32 - INFO - Epoch 76/100 | Train Loss: 0.0013 | Val mean-roc_auc_score: 0.9920
220
+ 2025-09-23 16:54:14,127 - logs_modchembert_clintox_epochs100_batch_size32 - INFO - Epoch 77/100 | Train Loss: 0.0029 | Val mean-roc_auc_score: 0.9920
221
+ 2025-09-23 16:54:19,822 - logs_modchembert_clintox_epochs100_batch_size32 - INFO - Epoch 78/100 | Train Loss: 0.0036 | Val mean-roc_auc_score: 0.9920
222
+ 2025-09-23 16:54:25,487 - logs_modchembert_clintox_epochs100_batch_size32 - INFO - Epoch 79/100 | Train Loss: 0.0033 | Val mean-roc_auc_score: 0.9920
223
+ 2025-09-23 16:54:30,928 - logs_modchembert_clintox_epochs100_batch_size32 - INFO - Epoch 80/100 | Train Loss: 0.0025 | Val mean-roc_auc_score: 0.9920
224
+ 2025-09-23 16:54:36,338 - logs_modchembert_clintox_epochs100_batch_size32 - INFO - Epoch 81/100 | Train Loss: 0.0028 | Val mean-roc_auc_score: 0.9920
225
+ 2025-09-23 16:54:43,421 - logs_modchembert_clintox_epochs100_batch_size32 - INFO - Epoch 82/100 | Train Loss: 0.0020 | Val mean-roc_auc_score: 0.9920
226
+ 2025-09-23 16:54:49,144 - logs_modchembert_clintox_epochs100_batch_size32 - INFO - Epoch 83/100 | Train Loss: 0.0025 | Val mean-roc_auc_score: 0.9920
227
+ 2025-09-23 16:54:54,835 - logs_modchembert_clintox_epochs100_batch_size32 - INFO - Epoch 84/100 | Train Loss: 0.0020 | Val mean-roc_auc_score: 0.9920
228
+ 2025-09-23 16:55:00,542 - logs_modchembert_clintox_epochs100_batch_size32 - INFO - Epoch 85/100 | Train Loss: 0.0019 | Val mean-roc_auc_score: 0.9920
229
+ 2025-09-23 16:55:06,243 - logs_modchembert_clintox_epochs100_batch_size32 - INFO - Epoch 86/100 | Train Loss: 0.0020 | Val mean-roc_auc_score: 0.9920
230
+ 2025-09-23 16:55:11,958 - logs_modchembert_clintox_epochs100_batch_size32 - INFO - Epoch 87/100 | Train Loss: 0.0016 | Val mean-roc_auc_score: 0.9920
231
+ 2025-09-23 16:55:17,657 - logs_modchembert_clintox_epochs100_batch_size32 - INFO - Epoch 88/100 | Train Loss: 0.0019 | Val mean-roc_auc_score: 0.9920
232
+ 2025-09-23 16:55:23,389 - logs_modchembert_clintox_epochs100_batch_size32 - INFO - Epoch 89/100 | Train Loss: 0.0020 | Val mean-roc_auc_score: 0.9920
233
+ 2025-09-23 16:55:29,139 - logs_modchembert_clintox_epochs100_batch_size32 - INFO - Epoch 90/100 | Train Loss: 0.0017 | Val mean-roc_auc_score: 0.9920
234
+ 2025-09-23 16:55:34,752 - logs_modchembert_clintox_epochs100_batch_size32 - INFO - Epoch 91/100 | Train Loss: 0.0018 | Val mean-roc_auc_score: 0.9920
235
+ 2025-09-23 16:55:40,067 - logs_modchembert_clintox_epochs100_batch_size32 - INFO - Epoch 92/100 | Train Loss: 0.0001 | Val mean-roc_auc_score: 0.9920
236
+ 2025-09-23 16:55:45,546 - logs_modchembert_clintox_epochs100_batch_size32 - INFO - Epoch 93/100 | Train Loss: 0.0033 | Val mean-roc_auc_score: 0.9920
237
+ 2025-09-23 16:55:51,092 - logs_modchembert_clintox_epochs100_batch_size32 - INFO - Epoch 94/100 | Train Loss: 0.0023 | Val mean-roc_auc_score: 0.9903
238
+ 2025-09-23 16:55:56,785 - logs_modchembert_clintox_epochs100_batch_size32 - INFO - Epoch 95/100 | Train Loss: 0.0014 | Val mean-roc_auc_score: 0.9909
239
+ 2025-09-23 16:56:02,466 - logs_modchembert_clintox_epochs100_batch_size32 - INFO - Epoch 96/100 | Train Loss: 0.0019 | Val mean-roc_auc_score: 0.9909
240
+ 2025-09-23 16:56:08,428 - logs_modchembert_clintox_epochs100_batch_size32 - INFO - Epoch 97/100 | Train Loss: 0.0016 | Val mean-roc_auc_score: 0.9909
241
+ 2025-09-23 16:56:14,144 - logs_modchembert_clintox_epochs100_batch_size32 - INFO - Epoch 98/100 | Train Loss: 0.0017 | Val mean-roc_auc_score: 0.9909
242
+ 2025-09-23 16:56:19,884 - logs_modchembert_clintox_epochs100_batch_size32 - INFO - Epoch 99/100 | Train Loss: 0.0018 | Val mean-roc_auc_score: 0.9903
243
+ 2025-09-23 16:56:25,547 - logs_modchembert_clintox_epochs100_batch_size32 - INFO - Epoch 100/100 | Train Loss: 0.0021 | Val mean-roc_auc_score: 0.9909
244
+ 2025-09-23 16:56:26,395 - logs_modchembert_clintox_epochs100_batch_size32 - INFO - Test mean-roc_auc_score: 0.9793
245
+ 2025-09-23 16:56:26,754 - logs_modchembert_clintox_epochs100_batch_size32 - INFO - Starting triplicate run 3 for dataset clintox at 2025-09-23_16-56-26
246
+ 2025-09-23 16:56:31,935 - logs_modchembert_clintox_epochs100_batch_size32 - INFO - Epoch 1/100 | Train Loss: 0.1503 | Val mean-roc_auc_score: 0.9637
247
+ 2025-09-23 16:56:31,935 - logs_modchembert_clintox_epochs100_batch_size32 - INFO - Global step of best model: 37
248
+ 2025-09-23 16:56:32,452 - logs_modchembert_clintox_epochs100_batch_size32 - INFO - Best model saved at epoch 1 with val mean-roc_auc_score: 0.9637
249
+ 2025-09-23 16:56:37,580 - logs_modchembert_clintox_epochs100_batch_size32 - INFO - Epoch 2/100 | Train Loss: 0.0427 | Val mean-roc_auc_score: 0.9758
250
+ 2025-09-23 16:56:37,748 - logs_modchembert_clintox_epochs100_batch_size32 - INFO - Global step of best model: 74
251
+ 2025-09-23 16:56:38,271 - logs_modchembert_clintox_epochs100_batch_size32 - INFO - Best model saved at epoch 2 with val mean-roc_auc_score: 0.9758
252
+ 2025-09-23 16:56:43,339 - logs_modchembert_clintox_epochs100_batch_size32 - INFO - Epoch 3/100 | Train Loss: 0.0182 | Val mean-roc_auc_score: 0.9805
253
+ 2025-09-23 16:56:43,523 - logs_modchembert_clintox_epochs100_batch_size32 - INFO - Global step of best model: 111
254
+ 2025-09-23 16:56:44,076 - logs_modchembert_clintox_epochs100_batch_size32 - INFO - Best model saved at epoch 3 with val mean-roc_auc_score: 0.9805
255
+ 2025-09-23 16:56:49,881 - logs_modchembert_clintox_epochs100_batch_size32 - INFO - Epoch 4/100 | Train Loss: 0.0338 | Val mean-roc_auc_score: 0.9861
256
+ 2025-09-23 16:56:50,106 - logs_modchembert_clintox_epochs100_batch_size32 - INFO - Global step of best model: 148
257
+ 2025-09-23 16:56:50,638 - logs_modchembert_clintox_epochs100_batch_size32 - INFO - Best model saved at epoch 4 with val mean-roc_auc_score: 0.9861
258
+ 2025-09-23 16:56:56,364 - logs_modchembert_clintox_epochs100_batch_size32 - INFO - Epoch 5/100 | Train Loss: 0.0329 | Val mean-roc_auc_score: 0.9788
259
+ 2025-09-23 16:57:02,115 - logs_modchembert_clintox_epochs100_batch_size32 - INFO - Epoch 6/100 | Train Loss: 0.0254 | Val mean-roc_auc_score: 0.9834
260
+ 2025-09-23 16:57:07,589 - logs_modchembert_clintox_epochs100_batch_size32 - INFO - Epoch 7/100 | Train Loss: 0.0208 | Val mean-roc_auc_score: 0.9846
261
+ 2025-09-23 16:57:13,155 - logs_modchembert_clintox_epochs100_batch_size32 - INFO - Epoch 8/100 | Train Loss: 0.0139 | Val mean-roc_auc_score: 0.9779
262
+ 2025-09-23 16:57:18,847 - logs_modchembert_clintox_epochs100_batch_size32 - INFO - Epoch 9/100 | Train Loss: 0.0312 | Val mean-roc_auc_score: 0.9811
263
+ 2025-09-23 16:57:24,494 - logs_modchembert_clintox_epochs100_batch_size32 - INFO - Epoch 10/100 | Train Loss: 0.0211 | Val mean-roc_auc_score: 0.9834
264
+ 2025-09-23 16:57:30,112 - logs_modchembert_clintox_epochs100_batch_size32 - INFO - Epoch 11/100 | Train Loss: 0.0036 | Val mean-roc_auc_score: 0.9863
265
+ 2025-09-23 16:57:30,558 - logs_modchembert_clintox_epochs100_batch_size32 - INFO - Global step of best model: 407
266
+ 2025-09-23 16:57:31,111 - logs_modchembert_clintox_epochs100_batch_size32 - INFO - Best model saved at epoch 11 with val mean-roc_auc_score: 0.9863
267
+ 2025-09-23 16:57:36,640 - logs_modchembert_clintox_epochs100_batch_size32 - INFO - Epoch 12/100 | Train Loss: 0.0120 | Val mean-roc_auc_score: 0.9851
268
+ 2025-09-23 16:57:42,132 - logs_modchembert_clintox_epochs100_batch_size32 - INFO - Epoch 13/100 | Train Loss: 0.0223 | Val mean-roc_auc_score: 0.9854
269
+ 2025-09-23 16:57:47,825 - logs_modchembert_clintox_epochs100_batch_size32 - INFO - Epoch 14/100 | Train Loss: 0.0248 | Val mean-roc_auc_score: 0.9890
270
+ 2025-09-23 16:57:48,005 - logs_modchembert_clintox_epochs100_batch_size32 - INFO - Global step of best model: 518
271
+ 2025-09-23 16:57:48,528 - logs_modchembert_clintox_epochs100_batch_size32 - INFO - Best model saved at epoch 14 with val mean-roc_auc_score: 0.9890
272
+ 2025-09-23 16:57:54,182 - logs_modchembert_clintox_epochs100_batch_size32 - INFO - Epoch 15/100 | Train Loss: 0.0133 | Val mean-roc_auc_score: 0.9903
273
+ 2025-09-23 16:57:54,365 - logs_modchembert_clintox_epochs100_batch_size32 - INFO - Global step of best model: 555
274
+ 2025-09-23 16:57:54,899 - logs_modchembert_clintox_epochs100_batch_size32 - INFO - Best model saved at epoch 15 with val mean-roc_auc_score: 0.9903
275
+ 2025-09-23 16:58:00,596 - logs_modchembert_clintox_epochs100_batch_size32 - INFO - Epoch 16/100 | Train Loss: 0.0098 | Val mean-roc_auc_score: 0.9887
276
+ 2025-09-23 16:58:05,888 - logs_modchembert_clintox_epochs100_batch_size32 - INFO - Epoch 17/100 | Train Loss: 0.0059 | Val mean-roc_auc_score: 0.9899
277
+ 2025-09-23 16:58:11,129 - logs_modchembert_clintox_epochs100_batch_size32 - INFO - Epoch 18/100 | Train Loss: 0.0150 | Val mean-roc_auc_score: 0.9886
278
+ 2025-09-23 16:58:16,792 - logs_modchembert_clintox_epochs100_batch_size32 - INFO - Epoch 19/100 | Train Loss: 0.0334 | Val mean-roc_auc_score: 0.9892
279
+ 2025-09-23 16:58:22,416 - logs_modchembert_clintox_epochs100_batch_size32 - INFO - Epoch 20/100 | Train Loss: 0.0080 | Val mean-roc_auc_score: 0.9892
280
+ 2025-09-23 16:58:28,024 - logs_modchembert_clintox_epochs100_batch_size32 - INFO - Epoch 21/100 | Train Loss: 0.0071 | Val mean-roc_auc_score: 0.9914
281
+ 2025-09-23 16:58:28,497 - logs_modchembert_clintox_epochs100_batch_size32 - INFO - Global step of best model: 777
282
+ 2025-09-23 16:58:29,044 - logs_modchembert_clintox_epochs100_batch_size32 - INFO - Best model saved at epoch 21 with val mean-roc_auc_score: 0.9914
283
+ 2025-09-23 16:58:34,503 - logs_modchembert_clintox_epochs100_batch_size32 - INFO - Epoch 22/100 | Train Loss: 0.0044 | Val mean-roc_auc_score: 0.9914
284
+ 2025-09-23 16:58:34,686 - logs_modchembert_clintox_epochs100_batch_size32 - INFO - Global step of best model: 814
285
+ 2025-09-23 16:58:35,228 - logs_modchembert_clintox_epochs100_batch_size32 - INFO - Best model saved at epoch 22 with val mean-roc_auc_score: 0.9914
286
+ 2025-09-23 16:58:40,901 - logs_modchembert_clintox_epochs100_batch_size32 - INFO - Epoch 23/100 | Train Loss: 0.0056 | Val mean-roc_auc_score: 0.9936
287
+ 2025-09-23 16:58:41,097 - logs_modchembert_clintox_epochs100_batch_size32 - INFO - Global step of best model: 851
288
+ 2025-09-23 16:58:41,696 - logs_modchembert_clintox_epochs100_batch_size32 - INFO - Best model saved at epoch 23 with val mean-roc_auc_score: 0.9936
289
+ 2025-09-23 16:58:47,379 - logs_modchembert_clintox_epochs100_batch_size32 - INFO - Epoch 24/100 | Train Loss: 0.0066 | Val mean-roc_auc_score: 0.9930
290
+ 2025-09-23 16:58:53,086 - logs_modchembert_clintox_epochs100_batch_size32 - INFO - Epoch 25/100 | Train Loss: 0.0044 | Val mean-roc_auc_score: 0.9914
291
+ 2025-09-23 16:58:58,675 - logs_modchembert_clintox_epochs100_batch_size32 - INFO - Epoch 26/100 | Train Loss: 0.0045 | Val mean-roc_auc_score: 0.9887
292
+ 2025-09-23 16:59:05,908 - logs_modchembert_clintox_epochs100_batch_size32 - INFO - Epoch 27/100 | Train Loss: 0.0182 | Val mean-roc_auc_score: 0.9881
293
+ 2025-09-23 16:59:11,587 - logs_modchembert_clintox_epochs100_batch_size32 - INFO - Epoch 28/100 | Train Loss: 0.0117 | Val mean-roc_auc_score: 0.9903
294
+ 2025-09-23 16:59:17,006 - logs_modchembert_clintox_epochs100_batch_size32 - INFO - Epoch 29/100 | Train Loss: 0.0184 | Val mean-roc_auc_score: 0.9925
295
+ 2025-09-23 16:59:22,552 - logs_modchembert_clintox_epochs100_batch_size32 - INFO - Epoch 30/100 | Train Loss: 0.0035 | Val mean-roc_auc_score: 0.9910
296
+ 2025-09-23 16:59:28,116 - logs_modchembert_clintox_epochs100_batch_size32 - INFO - Epoch 31/100 | Train Loss: 0.0059 | Val mean-roc_auc_score: 0.9921
297
+ 2025-09-23 16:59:33,553 - logs_modchembert_clintox_epochs100_batch_size32 - INFO - Epoch 32/100 | Train Loss: 0.0047 | Val mean-roc_auc_score: 0.9918
298
+ 2025-09-23 16:59:38,813 - logs_modchembert_clintox_epochs100_batch_size32 - INFO - Epoch 33/100 | Train Loss: 0.0047 | Val mean-roc_auc_score: 0.9915
299
+ 2025-09-23 16:59:44,318 - logs_modchembert_clintox_epochs100_batch_size32 - INFO - Epoch 34/100 | Train Loss: 0.0036 | Val mean-roc_auc_score: 0.9918
300
+ 2025-09-23 16:59:50,144 - logs_modchembert_clintox_epochs100_batch_size32 - INFO - Epoch 35/100 | Train Loss: 0.0042 | Val mean-roc_auc_score: 0.9910
301
+ 2025-09-23 16:59:55,835 - logs_modchembert_clintox_epochs100_batch_size32 - INFO - Epoch 36/100 | Train Loss: 0.0041 | Val mean-roc_auc_score: 0.9931
302
+ 2025-09-23 17:00:01,577 - logs_modchembert_clintox_epochs100_batch_size32 - INFO - Epoch 37/100 | Train Loss: 0.0039 | Val mean-roc_auc_score: 0.9918
303
+ 2025-09-23 17:00:07,268 - logs_modchembert_clintox_epochs100_batch_size32 - INFO - Epoch 38/100 | Train Loss: 0.0001 | Val mean-roc_auc_score: 0.9925
304
+ 2025-09-23 17:00:12,904 - logs_modchembert_clintox_epochs100_batch_size32 - INFO - Epoch 39/100 | Train Loss: 0.0031 | Val mean-roc_auc_score: 0.9915
305
+ 2025-09-23 17:00:18,575 - logs_modchembert_clintox_epochs100_batch_size32 - INFO - Epoch 40/100 | Train Loss: 0.0030 | Val mean-roc_auc_score: 0.9918
306
+ 2025-09-23 17:00:24,052 - logs_modchembert_clintox_epochs100_batch_size32 - INFO - Epoch 41/100 | Train Loss: 0.0055 | Val mean-roc_auc_score: 0.9918
307
+ 2025-09-23 17:00:29,823 - logs_modchembert_clintox_epochs100_batch_size32 - INFO - Epoch 42/100 | Train Loss: 0.0039 | Val mean-roc_auc_score: 0.9931
308
+ 2025-09-23 17:00:35,441 - logs_modchembert_clintox_epochs100_batch_size32 - INFO - Epoch 43/100 | Train Loss: 0.0040 | Val mean-roc_auc_score: 0.9925
309
+ 2025-09-23 17:00:40,981 - logs_modchembert_clintox_epochs100_batch_size32 - INFO - Epoch 44/100 | Train Loss: 0.0037 | Val mean-roc_auc_score: 0.9928
310
+ 2025-09-23 17:00:46,601 - logs_modchembert_clintox_epochs100_batch_size32 - INFO - Epoch 45/100 | Train Loss: 0.0029 | Val mean-roc_auc_score: 0.9925
311
+ 2025-09-23 17:00:52,312 - logs_modchembert_clintox_epochs100_batch_size32 - INFO - Epoch 46/100 | Train Loss: 0.0114 | Val mean-roc_auc_score: 0.9922
312
+ 2025-09-23 17:00:58,223 - logs_modchembert_clintox_epochs100_batch_size32 - INFO - Epoch 47/100 | Train Loss: 0.0032 | Val mean-roc_auc_score: 0.9931
313
+ 2025-09-23 17:01:03,242 - logs_modchembert_clintox_epochs100_batch_size32 - INFO - Epoch 48/100 | Train Loss: 0.0027 | Val mean-roc_auc_score: 0.9937
314
+ 2025-09-23 17:01:03,397 - logs_modchembert_clintox_epochs100_batch_size32 - INFO - Global step of best model: 1776
315
+ 2025-09-23 17:01:03,932 - logs_modchembert_clintox_epochs100_batch_size32 - INFO - Best model saved at epoch 48 with val mean-roc_auc_score: 0.9937
316
+ 2025-09-23 17:01:09,588 - logs_modchembert_clintox_epochs100_batch_size32 - INFO - Epoch 49/100 | Train Loss: 0.0012 | Val mean-roc_auc_score: 0.9931
317
+ 2025-09-23 17:01:15,199 - logs_modchembert_clintox_epochs100_batch_size32 - INFO - Epoch 50/100 | Train Loss: 0.0032 | Val mean-roc_auc_score: 0.9937
318
+ 2025-09-23 17:01:20,780 - logs_modchembert_clintox_epochs100_batch_size32 - INFO - Epoch 51/100 | Train Loss: 0.0026 | Val mean-roc_auc_score: 0.9937
319
+ 2025-09-23 17:01:26,121 - logs_modchembert_clintox_epochs100_batch_size32 - INFO - Epoch 52/100 | Train Loss: 0.0038 | Val mean-roc_auc_score: 0.9943
320
+ 2025-09-23 17:01:26,282 - logs_modchembert_clintox_epochs100_batch_size32 - INFO - Global step of best model: 1924
321
+ 2025-09-23 17:01:26,845 - logs_modchembert_clintox_epochs100_batch_size32 - INFO - Best model saved at epoch 52 with val mean-roc_auc_score: 0.9943
322
+ 2025-09-23 17:01:32,471 - logs_modchembert_clintox_epochs100_batch_size32 - INFO - Epoch 53/100 | Train Loss: 0.0025 | Val mean-roc_auc_score: 0.9943
323
+ 2025-09-23 17:01:38,123 - logs_modchembert_clintox_epochs100_batch_size32 - INFO - Epoch 54/100 | Train Loss: 0.0029 | Val mean-roc_auc_score: 0.9943
324
+ 2025-09-23 17:01:45,129 - logs_modchembert_clintox_epochs100_batch_size32 - INFO - Epoch 55/100 | Train Loss: 0.0026 | Val mean-roc_auc_score: 0.9937
325
+ 2025-09-23 17:01:50,688 - logs_modchembert_clintox_epochs100_batch_size32 - INFO - Epoch 56/100 | Train Loss: 0.0032 | Val mean-roc_auc_score: 0.9931
326
+ 2025-09-23 17:01:56,519 - logs_modchembert_clintox_epochs100_batch_size32 - INFO - Epoch 57/100 | Train Loss: 0.0028 | Val mean-roc_auc_score: 0.9943
327
+ 2025-09-23 17:02:01,981 - logs_modchembert_clintox_epochs100_batch_size32 - INFO - Epoch 58/100 | Train Loss: 0.0023 | Val mean-roc_auc_score: 0.9943
328
+ 2025-09-23 17:02:07,510 - logs_modchembert_clintox_epochs100_batch_size32 - INFO - Epoch 59/100 | Train Loss: 0.0028 | Val mean-roc_auc_score: 0.9937
329
+ 2025-09-23 17:02:13,198 - logs_modchembert_clintox_epochs100_batch_size32 - INFO - Epoch 60/100 | Train Loss: 0.0019 | Val mean-roc_auc_score: 0.9937
330
+ 2025-09-23 17:02:18,936 - logs_modchembert_clintox_epochs100_batch_size32 - INFO - Epoch 61/100 | Train Loss: 0.0028 | Val mean-roc_auc_score: 0.9931
331
+ 2025-09-23 17:02:24,986 - logs_modchembert_clintox_epochs100_batch_size32 - INFO - Epoch 62/100 | Train Loss: 0.0036 | Val mean-roc_auc_score: 0.9937
332
+ 2025-09-23 17:02:30,136 - logs_modchembert_clintox_epochs100_batch_size32 - INFO - Epoch 63/100 | Train Loss: 0.0028 | Val mean-roc_auc_score: 0.9940
333
+ 2025-09-23 17:02:35,315 - logs_modchembert_clintox_epochs100_batch_size32 - INFO - Epoch 64/100 | Train Loss: 0.0023 | Val mean-roc_auc_score: 0.9937
334
+ 2025-09-23 17:02:41,125 - logs_modchembert_clintox_epochs100_batch_size32 - INFO - Epoch 65/100 | Train Loss: 0.0004 | Val mean-roc_auc_score: 0.9943
335
+ 2025-09-23 17:02:46,695 - logs_modchembert_clintox_epochs100_batch_size32 - INFO - Epoch 66/100 | Train Loss: 0.0024 | Val mean-roc_auc_score: 0.9937
336
+ 2025-09-23 17:02:52,133 - logs_modchembert_clintox_epochs100_batch_size32 - INFO - Epoch 67/100 | Train Loss: 0.0025 | Val mean-roc_auc_score: 0.9937
337
+ 2025-09-23 17:02:57,790 - logs_modchembert_clintox_epochs100_batch_size32 - INFO - Epoch 68/100 | Train Loss: 0.0062 | Val mean-roc_auc_score: 0.9940
338
+ 2025-09-23 17:03:03,487 - logs_modchembert_clintox_epochs100_batch_size32 - INFO - Epoch 69/100 | Train Loss: 0.0048 | Val mean-roc_auc_score: 0.9937
339
+ 2025-09-23 17:03:09,153 - logs_modchembert_clintox_epochs100_batch_size32 - INFO - Epoch 70/100 | Train Loss: 0.0041 | Val mean-roc_auc_score: 0.9920
340
+ 2025-09-23 17:03:14,888 - logs_modchembert_clintox_epochs100_batch_size32 - INFO - Epoch 71/100 | Train Loss: 0.0033 | Val mean-roc_auc_score: 0.9920
341
+ 2025-09-23 17:03:20,915 - logs_modchembert_clintox_epochs100_batch_size32 - INFO - Epoch 72/100 | Train Loss: 0.0025 | Val mean-roc_auc_score: 0.9920
342
+ 2025-09-23 17:03:26,656 - logs_modchembert_clintox_epochs100_batch_size32 - INFO - Epoch 73/100 | Train Loss: 0.0000 | Val mean-roc_auc_score: 0.9920
343
+ 2025-09-23 17:03:32,326 - logs_modchembert_clintox_epochs100_batch_size32 - INFO - Epoch 74/100 | Train Loss: 0.0023 | Val mean-roc_auc_score: 0.9920
344
+ 2025-09-23 17:03:38,037 - logs_modchembert_clintox_epochs100_batch_size32 - INFO - Epoch 75/100 | Train Loss: 0.0026 | Val mean-roc_auc_score: 0.9920
345
+ 2025-09-23 17:03:43,592 - logs_modchembert_clintox_epochs100_batch_size32 - INFO - Epoch 76/100 | Train Loss: 0.0035 | Val mean-roc_auc_score: 0.9920
346
+ 2025-09-23 17:03:49,389 - logs_modchembert_clintox_epochs100_batch_size32 - INFO - Epoch 77/100 | Train Loss: 0.0035 | Val mean-roc_auc_score: 0.9920
347
+ 2025-09-23 17:03:54,995 - logs_modchembert_clintox_epochs100_batch_size32 - INFO - Epoch 78/100 | Train Loss: 0.0070 | Val mean-roc_auc_score: 0.9931
348
+ 2025-09-23 17:04:00,087 - logs_modchembert_clintox_epochs100_batch_size32 - INFO - Epoch 79/100 | Train Loss: 0.0053 | Val mean-roc_auc_score: 0.9943
349
+ 2025-09-23 17:04:05,014 - logs_modchembert_clintox_epochs100_batch_size32 - INFO - Epoch 80/100 | Train Loss: 0.0030 | Val mean-roc_auc_score: 0.9943
350
+ 2025-09-23 17:04:10,734 - logs_modchembert_clintox_epochs100_batch_size32 - INFO - Epoch 81/100 | Train Loss: 0.0025 | Val mean-roc_auc_score: 0.9943
351
+ 2025-09-23 17:04:17,463 - logs_modchembert_clintox_epochs100_batch_size32 - INFO - Epoch 82/100 | Train Loss: 0.0018 | Val mean-roc_auc_score: 0.9943
352
+ 2025-09-23 17:04:23,072 - logs_modchembert_clintox_epochs100_batch_size32 - INFO - Epoch 83/100 | Train Loss: 0.0023 | Val mean-roc_auc_score: 0.9943
353
+ 2025-09-23 17:04:28,846 - logs_modchembert_clintox_epochs100_batch_size32 - INFO - Epoch 84/100 | Train Loss: 0.0023 | Val mean-roc_auc_score: 0.9938
354
+ 2025-09-23 17:04:34,587 - logs_modchembert_clintox_epochs100_batch_size32 - INFO - Epoch 85/100 | Train Loss: 0.0026 | Val mean-roc_auc_score: 0.9938
355
+ 2025-09-23 17:04:40,335 - logs_modchembert_clintox_epochs100_batch_size32 - INFO - Epoch 86/100 | Train Loss: 0.0022 | Val mean-roc_auc_score: 0.9938
356
+ 2025-09-23 17:04:46,361 - logs_modchembert_clintox_epochs100_batch_size32 - INFO - Epoch 87/100 | Train Loss: 0.0028 | Val mean-roc_auc_score: 0.9938
357
+ 2025-09-23 17:04:51,818 - logs_modchembert_clintox_epochs100_batch_size32 - INFO - Epoch 88/100 | Train Loss: 0.0023 | Val mean-roc_auc_score: 0.9938
358
+ 2025-09-23 17:04:57,347 - logs_modchembert_clintox_epochs100_batch_size32 - INFO - Epoch 89/100 | Train Loss: 0.0026 | Val mean-roc_auc_score: 0.9938
359
+ 2025-09-23 17:05:02,975 - logs_modchembert_clintox_epochs100_batch_size32 - INFO - Epoch 90/100 | Train Loss: 0.0015 | Val mean-roc_auc_score: 0.9938
360
+ 2025-09-23 17:05:08,618 - logs_modchembert_clintox_epochs100_batch_size32 - INFO - Epoch 91/100 | Train Loss: 0.0021 | Val mean-roc_auc_score: 0.9938
361
+ 2025-09-23 17:05:14,600 - logs_modchembert_clintox_epochs100_batch_size32 - INFO - Epoch 92/100 | Train Loss: 0.0000 | Val mean-roc_auc_score: 0.9938
362
+ 2025-09-23 17:05:20,272 - logs_modchembert_clintox_epochs100_batch_size32 - INFO - Epoch 93/100 | Train Loss: 0.0023 | Val mean-roc_auc_score: 0.9938
363
+ 2025-09-23 17:05:25,937 - logs_modchembert_clintox_epochs100_batch_size32 - INFO - Epoch 94/100 | Train Loss: 0.0022 | Val mean-roc_auc_score: 0.9938
364
+ 2025-09-23 17:05:31,308 - logs_modchembert_clintox_epochs100_batch_size32 - INFO - Epoch 95/100 | Train Loss: 0.0016 | Val mean-roc_auc_score: 0.9938
365
+ 2025-09-23 17:05:37,069 - logs_modchembert_clintox_epochs100_batch_size32 - INFO - Epoch 96/100 | Train Loss: 0.0023 | Val mean-roc_auc_score: 0.9938
366
+ 2025-09-23 17:05:43,091 - logs_modchembert_clintox_epochs100_batch_size32 - INFO - Epoch 97/100 | Train Loss: 0.0020 | Val mean-roc_auc_score: 0.9944
367
+ 2025-09-23 17:05:43,246 - logs_modchembert_clintox_epochs100_batch_size32 - INFO - Global step of best model: 3589
368
+ 2025-09-23 17:05:43,791 - logs_modchembert_clintox_epochs100_batch_size32 - INFO - Best model saved at epoch 97 with val mean-roc_auc_score: 0.9944
369
+ 2025-09-23 17:05:49,443 - logs_modchembert_clintox_epochs100_batch_size32 - INFO - Epoch 98/100 | Train Loss: 0.0024 | Val mean-roc_auc_score: 0.9944
370
+ 2025-09-23 17:05:55,126 - logs_modchembert_clintox_epochs100_batch_size32 - INFO - Epoch 99/100 | Train Loss: 0.0020 | Val mean-roc_auc_score: 0.9944
371
+ 2025-09-23 17:06:00,883 - logs_modchembert_clintox_epochs100_batch_size32 - INFO - Epoch 100/100 | Train Loss: 0.0023 | Val mean-roc_auc_score: 0.9944
372
+ 2025-09-23 17:06:01,737 - logs_modchembert_clintox_epochs100_batch_size32 - INFO - Test mean-roc_auc_score: 0.9437
373
+ 2025-09-23 17:06:02,089 - logs_modchembert_clintox_epochs100_batch_size32 - INFO - Final Triplicate Test Results — Avg mean-roc_auc_score: 0.9725, Std Dev: 0.0213
logs_modchembert_classification_ModChemBERT-MLM-TAFT/modchembert_deepchem_splits_run_hiv_epochs100_batch_size32_20250923_153257.log ADDED
@@ -0,0 +1,331 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ 2025-09-23 15:32:57,085 - logs_modchembert_hiv_epochs100_batch_size32 - INFO - Running benchmark for dataset: hiv
2
+ 2025-09-23 15:32:57,085 - logs_modchembert_hiv_epochs100_batch_size32 - INFO - dataset: hiv, tasks: ['HIV_active'], epochs: 100, learning rate: 3e-05
3
+ 2025-09-23 15:32:57,090 - logs_modchembert_hiv_epochs100_batch_size32 - INFO - Starting triplicate run 1 for dataset hiv at 2025-09-23_15-32-57
4
+ 2025-09-23 15:34:19,613 - logs_modchembert_hiv_epochs100_batch_size32 - INFO - Epoch 1/100 | Train Loss: 0.1204 | Val mean-roc_auc_score: 0.8254
5
+ 2025-09-23 15:34:19,613 - logs_modchembert_hiv_epochs100_batch_size32 - INFO - Global step of best model: 1027
6
+ 2025-09-23 15:34:20,144 - logs_modchembert_hiv_epochs100_batch_size32 - INFO - Best model saved at epoch 1 with val mean-roc_auc_score: 0.8254
7
+ 2025-09-23 15:35:46,004 - logs_modchembert_hiv_epochs100_batch_size32 - INFO - Epoch 2/100 | Train Loss: 0.0920 | Val mean-roc_auc_score: 0.8208
8
+ 2025-09-23 15:37:11,944 - logs_modchembert_hiv_epochs100_batch_size32 - INFO - Epoch 3/100 | Train Loss: 0.1173 | Val mean-roc_auc_score: 0.8369
9
+ 2025-09-23 15:37:12,108 - logs_modchembert_hiv_epochs100_batch_size32 - INFO - Global step of best model: 3081
10
+ 2025-09-23 15:37:12,659 - logs_modchembert_hiv_epochs100_batch_size32 - INFO - Best model saved at epoch 3 with val mean-roc_auc_score: 0.8369
11
+ 2025-09-23 15:38:39,216 - logs_modchembert_hiv_epochs100_batch_size32 - INFO - Epoch 4/100 | Train Loss: 0.0732 | Val mean-roc_auc_score: 0.8483
12
+ 2025-09-23 15:38:39,363 - logs_modchembert_hiv_epochs100_batch_size32 - INFO - Global step of best model: 4108
13
+ 2025-09-23 15:38:39,888 - logs_modchembert_hiv_epochs100_batch_size32 - INFO - Best model saved at epoch 4 with val mean-roc_auc_score: 0.8483
14
+ 2025-09-23 15:40:07,310 - logs_modchembert_hiv_epochs100_batch_size32 - INFO - Epoch 5/100 | Train Loss: 0.0516 | Val mean-roc_auc_score: 0.8387
15
+ 2025-09-23 15:41:34,106 - logs_modchembert_hiv_epochs100_batch_size32 - INFO - Epoch 6/100 | Train Loss: 0.0484 | Val mean-roc_auc_score: 0.8457
16
+ 2025-09-23 15:43:01,785 - logs_modchembert_hiv_epochs100_batch_size32 - INFO - Epoch 7/100 | Train Loss: 0.0463 | Val mean-roc_auc_score: 0.8315
17
+ 2025-09-23 15:44:28,844 - logs_modchembert_hiv_epochs100_batch_size32 - INFO - Epoch 8/100 | Train Loss: 0.0156 | Val mean-roc_auc_score: 0.8134
18
+ 2025-09-23 15:45:55,737 - logs_modchembert_hiv_epochs100_batch_size32 - INFO - Epoch 9/100 | Train Loss: 0.0382 | Val mean-roc_auc_score: 0.8129
19
+ 2025-09-23 15:47:22,995 - logs_modchembert_hiv_epochs100_batch_size32 - INFO - Epoch 10/100 | Train Loss: 0.0286 | Val mean-roc_auc_score: 0.8159
20
+ 2025-09-23 15:48:51,155 - logs_modchembert_hiv_epochs100_batch_size32 - INFO - Epoch 11/100 | Train Loss: 0.0117 | Val mean-roc_auc_score: 0.7577
21
+ 2025-09-23 15:50:17,552 - logs_modchembert_hiv_epochs100_batch_size32 - INFO - Epoch 12/100 | Train Loss: 0.0172 | Val mean-roc_auc_score: 0.7877
22
+ 2025-09-23 15:51:44,991 - logs_modchembert_hiv_epochs100_batch_size32 - INFO - Epoch 13/100 | Train Loss: 0.0300 | Val mean-roc_auc_score: 0.7850
23
+ 2025-09-23 15:53:11,728 - logs_modchembert_hiv_epochs100_batch_size32 - INFO - Epoch 14/100 | Train Loss: 0.0187 | Val mean-roc_auc_score: 0.7388
24
+ 2025-09-23 15:54:39,305 - logs_modchembert_hiv_epochs100_batch_size32 - INFO - Epoch 15/100 | Train Loss: 0.0018 | Val mean-roc_auc_score: 0.7234
25
+ 2025-09-23 15:56:06,685 - logs_modchembert_hiv_epochs100_batch_size32 - INFO - Epoch 16/100 | Train Loss: 0.0150 | Val mean-roc_auc_score: 0.7709
26
+ 2025-09-23 15:57:33,387 - logs_modchembert_hiv_epochs100_batch_size32 - INFO - Epoch 17/100 | Train Loss: 0.0133 | Val mean-roc_auc_score: 0.7484
27
+ 2025-09-23 15:58:59,222 - logs_modchembert_hiv_epochs100_batch_size32 - INFO - Epoch 18/100 | Train Loss: 0.0144 | Val mean-roc_auc_score: 0.7739
28
+ 2025-09-23 16:00:19,101 - logs_modchembert_hiv_epochs100_batch_size32 - INFO - Epoch 19/100 | Train Loss: 0.0062 | Val mean-roc_auc_score: 0.7526
29
+ 2025-09-23 16:01:38,697 - logs_modchembert_hiv_epochs100_batch_size32 - INFO - Epoch 20/100 | Train Loss: 0.0225 | Val mean-roc_auc_score: 0.7962
30
+ 2025-09-23 16:02:59,206 - logs_modchembert_hiv_epochs100_batch_size32 - INFO - Epoch 21/100 | Train Loss: 0.0059 | Val mean-roc_auc_score: 0.7711
31
+ 2025-09-23 16:04:20,055 - logs_modchembert_hiv_epochs100_batch_size32 - INFO - Epoch 22/100 | Train Loss: 0.0112 | Val mean-roc_auc_score: 0.7627
32
+ 2025-09-23 16:05:39,488 - logs_modchembert_hiv_epochs100_batch_size32 - INFO - Epoch 23/100 | Train Loss: 0.0139 | Val mean-roc_auc_score: 0.7700
33
+ 2025-09-23 16:07:00,674 - logs_modchembert_hiv_epochs100_batch_size32 - INFO - Epoch 24/100 | Train Loss: 0.0108 | Val mean-roc_auc_score: 0.7510
34
+ 2025-09-23 16:08:20,943 - logs_modchembert_hiv_epochs100_batch_size32 - INFO - Epoch 25/100 | Train Loss: 0.0084 | Val mean-roc_auc_score: 0.7572
35
+ 2025-09-23 16:09:41,664 - logs_modchembert_hiv_epochs100_batch_size32 - INFO - Epoch 26/100 | Train Loss: 0.0099 | Val mean-roc_auc_score: 0.7557
36
+ 2025-09-23 16:11:01,636 - logs_modchembert_hiv_epochs100_batch_size32 - INFO - Epoch 27/100 | Train Loss: 0.0061 | Val mean-roc_auc_score: 0.7617
37
+ 2025-09-23 16:12:22,003 - logs_modchembert_hiv_epochs100_batch_size32 - INFO - Epoch 28/100 | Train Loss: 0.0093 | Val mean-roc_auc_score: 0.7723
38
+ 2025-09-23 16:13:42,531 - logs_modchembert_hiv_epochs100_batch_size32 - INFO - Epoch 29/100 | Train Loss: 0.0040 | Val mean-roc_auc_score: 0.7589
39
+ 2025-09-23 16:15:02,867 - logs_modchembert_hiv_epochs100_batch_size32 - INFO - Epoch 30/100 | Train Loss: 0.0112 | Val mean-roc_auc_score: 0.7603
40
+ 2025-09-23 16:16:19,461 - logs_modchembert_hiv_epochs100_batch_size32 - INFO - Epoch 31/100 | Train Loss: 0.0079 | Val mean-roc_auc_score: 0.7545
41
+ 2025-09-23 16:17:32,293 - logs_modchembert_hiv_epochs100_batch_size32 - INFO - Epoch 32/100 | Train Loss: 0.0121 | Val mean-roc_auc_score: 0.7692
42
+ 2025-09-23 16:18:44,956 - logs_modchembert_hiv_epochs100_batch_size32 - INFO - Epoch 33/100 | Train Loss: 0.0087 | Val mean-roc_auc_score: 0.7941
43
+ 2025-09-23 16:19:58,078 - logs_modchembert_hiv_epochs100_batch_size32 - INFO - Epoch 34/100 | Train Loss: 0.0145 | Val mean-roc_auc_score: 0.7605
44
+ 2025-09-23 16:21:11,017 - logs_modchembert_hiv_epochs100_batch_size32 - INFO - Epoch 35/100 | Train Loss: 0.0071 | Val mean-roc_auc_score: 0.7549
45
+ 2025-09-23 16:22:24,069 - logs_modchembert_hiv_epochs100_batch_size32 - INFO - Epoch 36/100 | Train Loss: 0.0036 | Val mean-roc_auc_score: 0.7740
46
+ 2025-09-23 16:23:39,024 - logs_modchembert_hiv_epochs100_batch_size32 - INFO - Epoch 37/100 | Train Loss: 0.0076 | Val mean-roc_auc_score: 0.7600
47
+ 2025-09-23 16:24:51,958 - logs_modchembert_hiv_epochs100_batch_size32 - INFO - Epoch 38/100 | Train Loss: 0.0022 | Val mean-roc_auc_score: 0.7716
48
+ 2025-09-23 16:26:05,044 - logs_modchembert_hiv_epochs100_batch_size32 - INFO - Epoch 39/100 | Train Loss: 0.0088 | Val mean-roc_auc_score: 0.7574
49
+ 2025-09-23 16:27:18,181 - logs_modchembert_hiv_epochs100_batch_size32 - INFO - Epoch 40/100 | Train Loss: 0.0061 | Val mean-roc_auc_score: 0.7646
50
+ 2025-09-23 16:28:31,100 - logs_modchembert_hiv_epochs100_batch_size32 - INFO - Epoch 41/100 | Train Loss: 0.0052 | Val mean-roc_auc_score: 0.7592
51
+ 2025-09-23 16:29:44,149 - logs_modchembert_hiv_epochs100_batch_size32 - INFO - Epoch 42/100 | Train Loss: 0.0070 | Val mean-roc_auc_score: 0.7556
52
+ 2025-09-23 16:30:58,079 - logs_modchembert_hiv_epochs100_batch_size32 - INFO - Epoch 43/100 | Train Loss: 0.0035 | Val mean-roc_auc_score: 0.7565
53
+ 2025-09-23 16:32:11,795 - logs_modchembert_hiv_epochs100_batch_size32 - INFO - Epoch 44/100 | Train Loss: 0.0036 | Val mean-roc_auc_score: 0.7547
54
+ 2025-09-23 16:33:24,871 - logs_modchembert_hiv_epochs100_batch_size32 - INFO - Epoch 45/100 | Train Loss: 0.0046 | Val mean-roc_auc_score: 0.7580
55
+ 2025-09-23 16:34:34,899 - logs_modchembert_hiv_epochs100_batch_size32 - INFO - Epoch 46/100 | Train Loss: 0.0055 | Val mean-roc_auc_score: 0.7559
56
+ 2025-09-23 16:35:44,407 - logs_modchembert_hiv_epochs100_batch_size32 - INFO - Epoch 47/100 | Train Loss: 0.0041 | Val mean-roc_auc_score: 0.7575
57
+ 2025-09-23 16:36:54,048 - logs_modchembert_hiv_epochs100_batch_size32 - INFO - Epoch 48/100 | Train Loss: 0.0038 | Val mean-roc_auc_score: 0.7552
58
+ 2025-09-23 16:38:03,911 - logs_modchembert_hiv_epochs100_batch_size32 - INFO - Epoch 49/100 | Train Loss: 0.0056 | Val mean-roc_auc_score: 0.7487
59
+ 2025-09-23 16:39:12,629 - logs_modchembert_hiv_epochs100_batch_size32 - INFO - Epoch 50/100 | Train Loss: 0.0054 | Val mean-roc_auc_score: 0.7475
60
+ 2025-09-23 16:40:22,714 - logs_modchembert_hiv_epochs100_batch_size32 - INFO - Epoch 51/100 | Train Loss: 0.0041 | Val mean-roc_auc_score: 0.7472
61
+ 2025-09-23 16:41:32,433 - logs_modchembert_hiv_epochs100_batch_size32 - INFO - Epoch 52/100 | Train Loss: 0.0028 | Val mean-roc_auc_score: 0.7583
62
+ 2025-09-23 16:42:41,844 - logs_modchembert_hiv_epochs100_batch_size32 - INFO - Epoch 53/100 | Train Loss: 0.0049 | Val mean-roc_auc_score: 0.7601
63
+ 2025-09-23 16:43:50,380 - logs_modchembert_hiv_epochs100_batch_size32 - INFO - Epoch 54/100 | Train Loss: 0.0066 | Val mean-roc_auc_score: 0.7526
64
+ 2025-09-23 16:44:59,893 - logs_modchembert_hiv_epochs100_batch_size32 - INFO - Epoch 55/100 | Train Loss: 0.0051 | Val mean-roc_auc_score: 0.7468
65
+ 2025-09-23 16:46:09,091 - logs_modchembert_hiv_epochs100_batch_size32 - INFO - Epoch 56/100 | Train Loss: 0.0053 | Val mean-roc_auc_score: 0.7546
66
+ 2025-09-23 16:47:23,347 - logs_modchembert_hiv_epochs100_batch_size32 - INFO - Epoch 57/100 | Train Loss: 0.0038 | Val mean-roc_auc_score: 0.7396
67
+ 2025-09-23 16:48:51,091 - logs_modchembert_hiv_epochs100_batch_size32 - INFO - Epoch 58/100 | Train Loss: 0.0046 | Val mean-roc_auc_score: 0.7516
68
+ 2025-09-23 16:50:19,890 - logs_modchembert_hiv_epochs100_batch_size32 - INFO - Epoch 59/100 | Train Loss: 0.0048 | Val mean-roc_auc_score: 0.7510
69
+ 2025-09-23 16:51:48,730 - logs_modchembert_hiv_epochs100_batch_size32 - INFO - Epoch 60/100 | Train Loss: 0.0056 | Val mean-roc_auc_score: 0.7543
70
+ 2025-09-23 16:53:18,464 - logs_modchembert_hiv_epochs100_batch_size32 - INFO - Epoch 61/100 | Train Loss: 0.0043 | Val mean-roc_auc_score: 0.7394
71
+ 2025-09-23 16:54:47,899 - logs_modchembert_hiv_epochs100_batch_size32 - INFO - Epoch 62/100 | Train Loss: 0.0041 | Val mean-roc_auc_score: 0.7509
72
+ 2025-09-23 16:56:16,978 - logs_modchembert_hiv_epochs100_batch_size32 - INFO - Epoch 63/100 | Train Loss: 0.0001 | Val mean-roc_auc_score: 0.7474
73
+ 2025-09-23 16:57:45,893 - logs_modchembert_hiv_epochs100_batch_size32 - INFO - Epoch 64/100 | Train Loss: 0.0029 | Val mean-roc_auc_score: 0.7561
74
+ 2025-09-23 16:59:14,691 - logs_modchembert_hiv_epochs100_batch_size32 - INFO - Epoch 65/100 | Train Loss: 0.0037 | Val mean-roc_auc_score: 0.7468
75
+ 2025-09-23 17:00:44,442 - logs_modchembert_hiv_epochs100_batch_size32 - INFO - Epoch 66/100 | Train Loss: 0.0042 | Val mean-roc_auc_score: 0.7469
76
+ 2025-09-23 17:02:13,591 - logs_modchembert_hiv_epochs100_batch_size32 - INFO - Epoch 67/100 | Train Loss: 0.0052 | Val mean-roc_auc_score: 0.7421
77
+ 2025-09-23 17:03:41,864 - logs_modchembert_hiv_epochs100_batch_size32 - INFO - Epoch 68/100 | Train Loss: 0.0017 | Val mean-roc_auc_score: 0.7423
78
+ 2025-09-23 17:05:10,526 - logs_modchembert_hiv_epochs100_batch_size32 - INFO - Epoch 69/100 | Train Loss: 0.0033 | Val mean-roc_auc_score: 0.7504
79
+ 2025-09-23 17:06:38,758 - logs_modchembert_hiv_epochs100_batch_size32 - INFO - Epoch 70/100 | Train Loss: 0.0031 | Val mean-roc_auc_score: 0.7481
80
+ 2025-09-23 17:08:07,514 - logs_modchembert_hiv_epochs100_batch_size32 - INFO - Epoch 71/100 | Train Loss: 0.0084 | Val mean-roc_auc_score: 0.7432
81
+ 2025-09-23 17:09:37,298 - logs_modchembert_hiv_epochs100_batch_size32 - INFO - Epoch 72/100 | Train Loss: 0.0030 | Val mean-roc_auc_score: 0.7413
82
+ 2025-09-23 17:11:06,651 - logs_modchembert_hiv_epochs100_batch_size32 - INFO - Epoch 73/100 | Train Loss: 0.0041 | Val mean-roc_auc_score: 0.7566
83
+ 2025-09-23 17:12:36,313 - logs_modchembert_hiv_epochs100_batch_size32 - INFO - Epoch 74/100 | Train Loss: 0.0033 | Val mean-roc_auc_score: 0.7547
84
+ 2025-09-23 17:14:05,895 - logs_modchembert_hiv_epochs100_batch_size32 - INFO - Epoch 75/100 | Train Loss: 0.0032 | Val mean-roc_auc_score: 0.7699
85
+ 2025-09-23 17:15:35,703 - logs_modchembert_hiv_epochs100_batch_size32 - INFO - Epoch 76/100 | Train Loss: 0.0079 | Val mean-roc_auc_score: 0.7608
86
+ 2025-09-23 17:17:05,389 - logs_modchembert_hiv_epochs100_batch_size32 - INFO - Epoch 77/100 | Train Loss: 0.0032 | Val mean-roc_auc_score: 0.7509
87
+ 2025-09-23 17:18:33,800 - logs_modchembert_hiv_epochs100_batch_size32 - INFO - Epoch 78/100 | Train Loss: 0.0014 | Val mean-roc_auc_score: 0.7544
88
+ 2025-09-23 17:20:02,870 - logs_modchembert_hiv_epochs100_batch_size32 - INFO - Epoch 79/100 | Train Loss: 0.0045 | Val mean-roc_auc_score: 0.7478
89
+ 2025-09-23 17:21:32,544 - logs_modchembert_hiv_epochs100_batch_size32 - INFO - Epoch 80/100 | Train Loss: 0.0020 | Val mean-roc_auc_score: 0.7533
90
+ 2025-09-23 17:23:01,616 - logs_modchembert_hiv_epochs100_batch_size32 - INFO - Epoch 81/100 | Train Loss: 0.0018 | Val mean-roc_auc_score: 0.7553
91
+ 2025-09-23 17:24:29,836 - logs_modchembert_hiv_epochs100_batch_size32 - INFO - Epoch 82/100 | Train Loss: 0.0023 | Val mean-roc_auc_score: 0.7694
92
+ 2025-09-23 17:25:58,687 - logs_modchembert_hiv_epochs100_batch_size32 - INFO - Epoch 83/100 | Train Loss: 0.0013 | Val mean-roc_auc_score: 0.7631
93
+ 2025-09-23 17:27:27,924 - logs_modchembert_hiv_epochs100_batch_size32 - INFO - Epoch 84/100 | Train Loss: 0.0026 | Val mean-roc_auc_score: 0.7546
94
+ 2025-09-23 17:28:56,019 - logs_modchembert_hiv_epochs100_batch_size32 - INFO - Epoch 85/100 | Train Loss: 0.0058 | Val mean-roc_auc_score: 0.7514
95
+ 2025-09-23 17:30:25,061 - logs_modchembert_hiv_epochs100_batch_size32 - INFO - Epoch 86/100 | Train Loss: 0.0009 | Val mean-roc_auc_score: 0.7488
96
+ 2025-09-23 17:31:55,210 - logs_modchembert_hiv_epochs100_batch_size32 - INFO - Epoch 87/100 | Train Loss: 0.0026 | Val mean-roc_auc_score: 0.7501
97
+ 2025-09-23 17:33:24,426 - logs_modchembert_hiv_epochs100_batch_size32 - INFO - Epoch 88/100 | Train Loss: 0.0056 | Val mean-roc_auc_score: 0.7607
98
+ 2025-09-23 17:34:53,423 - logs_modchembert_hiv_epochs100_batch_size32 - INFO - Epoch 89/100 | Train Loss: 0.0005 | Val mean-roc_auc_score: 0.7540
99
+ 2025-09-23 17:36:22,022 - logs_modchembert_hiv_epochs100_batch_size32 - INFO - Epoch 90/100 | Train Loss: 0.0032 | Val mean-roc_auc_score: 0.7520
100
+ 2025-09-23 17:37:51,816 - logs_modchembert_hiv_epochs100_batch_size32 - INFO - Epoch 91/100 | Train Loss: 0.0027 | Val mean-roc_auc_score: 0.7472
101
+ 2025-09-23 17:39:21,252 - logs_modchembert_hiv_epochs100_batch_size32 - INFO - Epoch 92/100 | Train Loss: 0.0038 | Val mean-roc_auc_score: 0.7637
102
+ 2025-09-23 17:40:50,045 - logs_modchembert_hiv_epochs100_batch_size32 - INFO - Epoch 93/100 | Train Loss: 0.0020 | Val mean-roc_auc_score: 0.7576
103
+ 2025-09-23 17:42:02,532 - logs_modchembert_hiv_epochs100_batch_size32 - INFO - Epoch 94/100 | Train Loss: 0.0022 | Val mean-roc_auc_score: 0.7476
104
+ 2025-09-23 17:43:02,921 - logs_modchembert_hiv_epochs100_batch_size32 - INFO - Epoch 95/100 | Train Loss: 0.0046 | Val mean-roc_auc_score: 0.7812
105
+ 2025-09-23 17:44:03,844 - logs_modchembert_hiv_epochs100_batch_size32 - INFO - Epoch 96/100 | Train Loss: 0.0099 | Val mean-roc_auc_score: 0.7716
106
+ 2025-09-23 17:45:05,041 - logs_modchembert_hiv_epochs100_batch_size32 - INFO - Epoch 97/100 | Train Loss: 0.0050 | Val mean-roc_auc_score: 0.7693
107
+ 2025-09-23 17:46:05,500 - logs_modchembert_hiv_epochs100_batch_size32 - INFO - Epoch 98/100 | Train Loss: 0.0026 | Val mean-roc_auc_score: 0.7666
108
+ 2025-09-23 17:47:06,610 - logs_modchembert_hiv_epochs100_batch_size32 - INFO - Epoch 99/100 | Train Loss: 0.0037 | Val mean-roc_auc_score: 0.7629
109
+ 2025-09-23 17:48:07,479 - logs_modchembert_hiv_epochs100_batch_size32 - INFO - Epoch 100/100 | Train Loss: 0.0040 | Val mean-roc_auc_score: 0.7577
110
+ 2025-09-23 17:48:11,176 - logs_modchembert_hiv_epochs100_batch_size32 - INFO - Test mean-roc_auc_score: 0.7837
111
+ 2025-09-23 17:48:11,645 - logs_modchembert_hiv_epochs100_batch_size32 - INFO - Starting triplicate run 2 for dataset hiv at 2025-09-23_17-48-11
112
+ 2025-09-23 17:49:08,509 - logs_modchembert_hiv_epochs100_batch_size32 - INFO - Epoch 1/100 | Train Loss: 0.1169 | Val mean-roc_auc_score: 0.8152
113
+ 2025-09-23 17:49:08,509 - logs_modchembert_hiv_epochs100_batch_size32 - INFO - Global step of best model: 1027
114
+ 2025-09-23 17:49:09,021 - logs_modchembert_hiv_epochs100_batch_size32 - INFO - Best model saved at epoch 1 with val mean-roc_auc_score: 0.8152
115
+ 2025-09-23 17:50:10,026 - logs_modchembert_hiv_epochs100_batch_size32 - INFO - Epoch 2/100 | Train Loss: 0.1001 | Val mean-roc_auc_score: 0.8315
116
+ 2025-09-23 17:50:10,163 - logs_modchembert_hiv_epochs100_batch_size32 - INFO - Global step of best model: 2054
117
+ 2025-09-23 17:50:10,695 - logs_modchembert_hiv_epochs100_batch_size32 - INFO - Best model saved at epoch 2 with val mean-roc_auc_score: 0.8315
118
+ 2025-09-23 17:51:11,338 - logs_modchembert_hiv_epochs100_batch_size32 - INFO - Epoch 3/100 | Train Loss: 0.0590 | Val mean-roc_auc_score: 0.8272
119
+ 2025-09-23 17:52:12,005 - logs_modchembert_hiv_epochs100_batch_size32 - INFO - Epoch 4/100 | Train Loss: 0.1245 | Val mean-roc_auc_score: 0.8339
120
+ 2025-09-23 17:52:12,155 - logs_modchembert_hiv_epochs100_batch_size32 - INFO - Global step of best model: 4108
121
+ 2025-09-23 17:52:12,690 - logs_modchembert_hiv_epochs100_batch_size32 - INFO - Best model saved at epoch 4 with val mean-roc_auc_score: 0.8339
122
+ 2025-09-23 17:53:13,789 - logs_modchembert_hiv_epochs100_batch_size32 - INFO - Epoch 5/100 | Train Loss: 0.0790 | Val mean-roc_auc_score: 0.8173
123
+ 2025-09-23 17:54:14,273 - logs_modchembert_hiv_epochs100_batch_size32 - INFO - Epoch 6/100 | Train Loss: 0.0441 | Val mean-roc_auc_score: 0.8433
124
+ 2025-09-23 17:54:14,871 - logs_modchembert_hiv_epochs100_batch_size32 - INFO - Global step of best model: 6162
125
+ 2025-09-23 17:54:15,409 - logs_modchembert_hiv_epochs100_batch_size32 - INFO - Best model saved at epoch 6 with val mean-roc_auc_score: 0.8433
126
+ 2025-09-23 17:55:15,938 - logs_modchembert_hiv_epochs100_batch_size32 - INFO - Epoch 7/100 | Train Loss: 0.0562 | Val mean-roc_auc_score: 0.8204
127
+ 2025-09-23 17:56:16,165 - logs_modchembert_hiv_epochs100_batch_size32 - INFO - Epoch 8/100 | Train Loss: 0.0234 | Val mean-roc_auc_score: 0.7905
128
+ 2025-09-23 17:57:16,712 - logs_modchembert_hiv_epochs100_batch_size32 - INFO - Epoch 9/100 | Train Loss: 0.0334 | Val mean-roc_auc_score: 0.8001
129
+ 2025-09-23 17:58:17,279 - logs_modchembert_hiv_epochs100_batch_size32 - INFO - Epoch 10/100 | Train Loss: 0.0344 | Val mean-roc_auc_score: 0.8020
130
+ 2025-09-23 17:59:18,531 - logs_modchembert_hiv_epochs100_batch_size32 - INFO - Epoch 11/100 | Train Loss: 0.0126 | Val mean-roc_auc_score: 0.7389
131
+ 2025-09-23 18:00:19,536 - logs_modchembert_hiv_epochs100_batch_size32 - INFO - Epoch 12/100 | Train Loss: 0.0203 | Val mean-roc_auc_score: 0.8060
132
+ 2025-09-23 18:01:19,881 - logs_modchembert_hiv_epochs100_batch_size32 - INFO - Epoch 13/100 | Train Loss: 0.0152 | Val mean-roc_auc_score: 0.7259
133
+ 2025-09-23 18:02:21,078 - logs_modchembert_hiv_epochs100_batch_size32 - INFO - Epoch 14/100 | Train Loss: 0.0169 | Val mean-roc_auc_score: 0.7829
134
+ 2025-09-23 18:03:21,346 - logs_modchembert_hiv_epochs100_batch_size32 - INFO - Epoch 15/100 | Train Loss: 0.0039 | Val mean-roc_auc_score: 0.8041
135
+ 2025-09-23 18:04:22,955 - logs_modchembert_hiv_epochs100_batch_size32 - INFO - Epoch 16/100 | Train Loss: 0.0151 | Val mean-roc_auc_score: 0.7620
136
+ 2025-09-23 18:05:24,531 - logs_modchembert_hiv_epochs100_batch_size32 - INFO - Epoch 17/100 | Train Loss: 0.0177 | Val mean-roc_auc_score: 0.7868
137
+ 2025-09-23 18:06:25,117 - logs_modchembert_hiv_epochs100_batch_size32 - INFO - Epoch 18/100 | Train Loss: 0.0161 | Val mean-roc_auc_score: 0.7834
138
+ 2025-09-23 18:07:26,354 - logs_modchembert_hiv_epochs100_batch_size32 - INFO - Epoch 19/100 | Train Loss: 0.0055 | Val mean-roc_auc_score: 0.7541
139
+ 2025-09-23 18:08:27,032 - logs_modchembert_hiv_epochs100_batch_size32 - INFO - Epoch 20/100 | Train Loss: 0.0035 | Val mean-roc_auc_score: 0.7164
140
+ 2025-09-23 18:09:28,219 - logs_modchembert_hiv_epochs100_batch_size32 - INFO - Epoch 21/100 | Train Loss: 0.0133 | Val mean-roc_auc_score: 0.7754
141
+ 2025-09-23 18:10:29,425 - logs_modchembert_hiv_epochs100_batch_size32 - INFO - Epoch 22/100 | Train Loss: 0.0070 | Val mean-roc_auc_score: 0.7299
142
+ 2025-09-23 18:11:30,126 - logs_modchembert_hiv_epochs100_batch_size32 - INFO - Epoch 23/100 | Train Loss: 0.0141 | Val mean-roc_auc_score: 0.7370
143
+ 2025-09-23 18:12:31,008 - logs_modchembert_hiv_epochs100_batch_size32 - INFO - Epoch 24/100 | Train Loss: 0.0125 | Val mean-roc_auc_score: 0.7276
144
+ 2025-09-23 18:13:31,611 - logs_modchembert_hiv_epochs100_batch_size32 - INFO - Epoch 25/100 | Train Loss: 0.0052 | Val mean-roc_auc_score: 0.7128
145
+ 2025-09-23 18:14:32,740 - logs_modchembert_hiv_epochs100_batch_size32 - INFO - Epoch 26/100 | Train Loss: 0.0083 | Val mean-roc_auc_score: 0.7102
146
+ 2025-09-23 18:15:33,742 - logs_modchembert_hiv_epochs100_batch_size32 - INFO - Epoch 27/100 | Train Loss: 0.0079 | Val mean-roc_auc_score: 0.7094
147
+ 2025-09-23 18:16:34,159 - logs_modchembert_hiv_epochs100_batch_size32 - INFO - Epoch 28/100 | Train Loss: 0.0179 | Val mean-roc_auc_score: 0.7395
148
+ 2025-09-23 18:17:35,042 - logs_modchembert_hiv_epochs100_batch_size32 - INFO - Epoch 29/100 | Train Loss: 0.0092 | Val mean-roc_auc_score: 0.7702
149
+ 2025-09-23 18:18:35,847 - logs_modchembert_hiv_epochs100_batch_size32 - INFO - Epoch 30/100 | Train Loss: 0.0152 | Val mean-roc_auc_score: 0.7094
150
+ 2025-09-23 18:19:37,221 - logs_modchembert_hiv_epochs100_batch_size32 - INFO - Epoch 31/100 | Train Loss: 0.0134 | Val mean-roc_auc_score: 0.7011
151
+ 2025-09-23 18:20:38,121 - logs_modchembert_hiv_epochs100_batch_size32 - INFO - Epoch 32/100 | Train Loss: 0.0071 | Val mean-roc_auc_score: 0.7313
152
+ 2025-09-23 18:21:38,550 - logs_modchembert_hiv_epochs100_batch_size32 - INFO - Epoch 33/100 | Train Loss: 0.0077 | Val mean-roc_auc_score: 0.7133
153
+ 2025-09-23 18:22:39,433 - logs_modchembert_hiv_epochs100_batch_size32 - INFO - Epoch 34/100 | Train Loss: 0.0094 | Val mean-roc_auc_score: 0.6993
154
+ 2025-09-23 18:23:40,199 - logs_modchembert_hiv_epochs100_batch_size32 - INFO - Epoch 35/100 | Train Loss: 0.0137 | Val mean-roc_auc_score: 0.7236
155
+ 2025-09-23 18:24:41,196 - logs_modchembert_hiv_epochs100_batch_size32 - INFO - Epoch 36/100 | Train Loss: 0.0073 | Val mean-roc_auc_score: 0.7025
156
+ 2025-09-23 18:25:43,385 - logs_modchembert_hiv_epochs100_batch_size32 - INFO - Epoch 37/100 | Train Loss: 0.0058 | Val mean-roc_auc_score: 0.6775
157
+ 2025-09-23 18:26:44,514 - logs_modchembert_hiv_epochs100_batch_size32 - INFO - Epoch 38/100 | Train Loss: 0.0056 | Val mean-roc_auc_score: 0.7229
158
+ 2025-09-23 18:27:45,684 - logs_modchembert_hiv_epochs100_batch_size32 - INFO - Epoch 39/100 | Train Loss: 0.0062 | Val mean-roc_auc_score: 0.7203
159
+ 2025-09-23 18:28:46,010 - logs_modchembert_hiv_epochs100_batch_size32 - INFO - Epoch 40/100 | Train Loss: 0.0075 | Val mean-roc_auc_score: 0.6948
160
+ 2025-09-23 18:29:47,232 - logs_modchembert_hiv_epochs100_batch_size32 - INFO - Epoch 41/100 | Train Loss: 0.0011 | Val mean-roc_auc_score: 0.7252
161
+ 2025-09-23 18:30:48,780 - logs_modchembert_hiv_epochs100_batch_size32 - INFO - Epoch 42/100 | Train Loss: 0.0045 | Val mean-roc_auc_score: 0.7146
162
+ 2025-09-23 18:31:49,611 - logs_modchembert_hiv_epochs100_batch_size32 - INFO - Epoch 43/100 | Train Loss: 0.0089 | Val mean-roc_auc_score: 0.7187
163
+ 2025-09-23 18:32:51,012 - logs_modchembert_hiv_epochs100_batch_size32 - INFO - Epoch 44/100 | Train Loss: 0.0093 | Val mean-roc_auc_score: 0.7256
164
+ 2025-09-23 18:33:51,967 - logs_modchembert_hiv_epochs100_batch_size32 - INFO - Epoch 45/100 | Train Loss: 0.0029 | Val mean-roc_auc_score: 0.7098
165
+ 2025-09-23 18:34:53,202 - logs_modchembert_hiv_epochs100_batch_size32 - INFO - Epoch 46/100 | Train Loss: 0.0067 | Val mean-roc_auc_score: 0.7162
166
+ 2025-09-23 18:35:54,168 - logs_modchembert_hiv_epochs100_batch_size32 - INFO - Epoch 47/100 | Train Loss: 0.0059 | Val mean-roc_auc_score: 0.7068
167
+ 2025-09-23 18:36:54,832 - logs_modchembert_hiv_epochs100_batch_size32 - INFO - Epoch 48/100 | Train Loss: 0.0055 | Val mean-roc_auc_score: 0.7074
168
+ 2025-09-23 18:37:56,389 - logs_modchembert_hiv_epochs100_batch_size32 - INFO - Epoch 49/100 | Train Loss: 0.0092 | Val mean-roc_auc_score: 0.7250
169
+ 2025-09-23 18:38:57,069 - logs_modchembert_hiv_epochs100_batch_size32 - INFO - Epoch 50/100 | Train Loss: 0.0128 | Val mean-roc_auc_score: 0.7370
170
+ 2025-09-23 18:39:58,059 - logs_modchembert_hiv_epochs100_batch_size32 - INFO - Epoch 51/100 | Train Loss: 0.0058 | Val mean-roc_auc_score: 0.7390
171
+ 2025-09-23 18:40:59,252 - logs_modchembert_hiv_epochs100_batch_size32 - INFO - Epoch 52/100 | Train Loss: 0.0038 | Val mean-roc_auc_score: 0.7291
172
+ 2025-09-23 18:41:59,825 - logs_modchembert_hiv_epochs100_batch_size32 - INFO - Epoch 53/100 | Train Loss: 0.0040 | Val mean-roc_auc_score: 0.7242
173
+ 2025-09-23 18:43:01,212 - logs_modchembert_hiv_epochs100_batch_size32 - INFO - Epoch 54/100 | Train Loss: 0.0051 | Val mean-roc_auc_score: 0.7056
174
+ 2025-09-23 18:44:01,927 - logs_modchembert_hiv_epochs100_batch_size32 - INFO - Epoch 55/100 | Train Loss: 0.0077 | Val mean-roc_auc_score: 0.6897
175
+ 2025-09-23 18:45:03,037 - logs_modchembert_hiv_epochs100_batch_size32 - INFO - Epoch 56/100 | Train Loss: 0.0019 | Val mean-roc_auc_score: 0.7191
176
+ 2025-09-23 18:46:04,519 - logs_modchembert_hiv_epochs100_batch_size32 - INFO - Epoch 57/100 | Train Loss: 0.0097 | Val mean-roc_auc_score: 0.6933
177
+ 2025-09-23 18:47:05,315 - logs_modchembert_hiv_epochs100_batch_size32 - INFO - Epoch 58/100 | Train Loss: 0.0070 | Val mean-roc_auc_score: 0.7272
178
+ 2025-09-23 18:48:06,539 - logs_modchembert_hiv_epochs100_batch_size32 - INFO - Epoch 59/100 | Train Loss: 0.0076 | Val mean-roc_auc_score: 0.7175
179
+ 2025-09-23 18:49:07,192 - logs_modchembert_hiv_epochs100_batch_size32 - INFO - Epoch 60/100 | Train Loss: 0.0099 | Val mean-roc_auc_score: 0.7150
180
+ 2025-09-23 18:50:08,202 - logs_modchembert_hiv_epochs100_batch_size32 - INFO - Epoch 61/100 | Train Loss: 0.0028 | Val mean-roc_auc_score: 0.7210
181
+ 2025-09-23 18:51:09,716 - logs_modchembert_hiv_epochs100_batch_size32 - INFO - Epoch 62/100 | Train Loss: 0.0025 | Val mean-roc_auc_score: 0.7388
182
+ 2025-09-23 18:52:10,590 - logs_modchembert_hiv_epochs100_batch_size32 - INFO - Epoch 63/100 | Train Loss: 0.0002 | Val mean-roc_auc_score: 0.7251
183
+ 2025-09-23 18:53:11,954 - logs_modchembert_hiv_epochs100_batch_size32 - INFO - Epoch 64/100 | Train Loss: 0.0047 | Val mean-roc_auc_score: 0.7205
184
+ 2025-09-23 18:54:12,855 - logs_modchembert_hiv_epochs100_batch_size32 - INFO - Epoch 65/100 | Train Loss: 0.0026 | Val mean-roc_auc_score: 0.7195
185
+ 2025-09-23 18:55:15,013 - logs_modchembert_hiv_epochs100_batch_size32 - INFO - Epoch 66/100 | Train Loss: 0.0040 | Val mean-roc_auc_score: 0.7149
186
+ 2025-09-23 18:56:16,307 - logs_modchembert_hiv_epochs100_batch_size32 - INFO - Epoch 67/100 | Train Loss: 0.0014 | Val mean-roc_auc_score: 0.7152
187
+ 2025-09-23 18:57:16,796 - logs_modchembert_hiv_epochs100_batch_size32 - INFO - Epoch 68/100 | Train Loss: 0.0055 | Val mean-roc_auc_score: 0.7020
188
+ 2025-09-23 18:58:18,088 - logs_modchembert_hiv_epochs100_batch_size32 - INFO - Epoch 69/100 | Train Loss: 0.0052 | Val mean-roc_auc_score: 0.7183
189
+ 2025-09-23 18:59:18,424 - logs_modchembert_hiv_epochs100_batch_size32 - INFO - Epoch 70/100 | Train Loss: 0.0039 | Val mean-roc_auc_score: 0.7015
190
+ 2025-09-23 19:00:19,536 - logs_modchembert_hiv_epochs100_batch_size32 - INFO - Epoch 71/100 | Train Loss: 0.0059 | Val mean-roc_auc_score: 0.6950
191
+ 2025-09-23 19:01:20,990 - logs_modchembert_hiv_epochs100_batch_size32 - INFO - Epoch 72/100 | Train Loss: 0.0042 | Val mean-roc_auc_score: 0.7118
192
+ 2025-09-23 19:02:21,355 - logs_modchembert_hiv_epochs100_batch_size32 - INFO - Epoch 73/100 | Train Loss: 0.0045 | Val mean-roc_auc_score: 0.6883
193
+ 2025-09-23 19:03:22,468 - logs_modchembert_hiv_epochs100_batch_size32 - INFO - Epoch 74/100 | Train Loss: 0.0049 | Val mean-roc_auc_score: 0.7360
194
+ 2025-09-23 19:04:24,045 - logs_modchembert_hiv_epochs100_batch_size32 - INFO - Epoch 75/100 | Train Loss: 0.0102 | Val mean-roc_auc_score: 0.7152
195
+ 2025-09-23 19:05:25,225 - logs_modchembert_hiv_epochs100_batch_size32 - INFO - Epoch 76/100 | Train Loss: 0.0047 | Val mean-roc_auc_score: 0.6928
196
+ 2025-09-23 19:06:26,410 - logs_modchembert_hiv_epochs100_batch_size32 - INFO - Epoch 77/100 | Train Loss: 0.0044 | Val mean-roc_auc_score: 0.6990
197
+ 2025-09-23 19:07:27,086 - logs_modchembert_hiv_epochs100_batch_size32 - INFO - Epoch 78/100 | Train Loss: 0.0014 | Val mean-roc_auc_score: 0.7093
198
+ 2025-09-23 19:08:26,398 - logs_modchembert_hiv_epochs100_batch_size32 - INFO - Epoch 79/100 | Train Loss: 0.0052 | Val mean-roc_auc_score: 0.7139
199
+ 2025-09-23 19:09:25,397 - logs_modchembert_hiv_epochs100_batch_size32 - INFO - Epoch 80/100 | Train Loss: 0.0102 | Val mean-roc_auc_score: 0.7414
200
+ 2025-09-23 19:10:24,810 - logs_modchembert_hiv_epochs100_batch_size32 - INFO - Epoch 81/100 | Train Loss: 0.0051 | Val mean-roc_auc_score: 0.7085
201
+ 2025-09-23 19:11:24,294 - logs_modchembert_hiv_epochs100_batch_size32 - INFO - Epoch 82/100 | Train Loss: 0.0049 | Val mean-roc_auc_score: 0.7153
202
+ 2025-09-23 19:12:22,936 - logs_modchembert_hiv_epochs100_batch_size32 - INFO - Epoch 83/100 | Train Loss: 0.0020 | Val mean-roc_auc_score: 0.7044
203
+ 2025-09-23 19:13:22,067 - logs_modchembert_hiv_epochs100_batch_size32 - INFO - Epoch 84/100 | Train Loss: 0.0029 | Val mean-roc_auc_score: 0.7290
204
+ 2025-09-23 19:14:20,903 - logs_modchembert_hiv_epochs100_batch_size32 - INFO - Epoch 85/100 | Train Loss: 0.0058 | Val mean-roc_auc_score: 0.7357
205
+ 2025-09-23 19:15:20,617 - logs_modchembert_hiv_epochs100_batch_size32 - INFO - Epoch 86/100 | Train Loss: 0.0073 | Val mean-roc_auc_score: 0.7252
206
+ 2025-09-23 19:16:19,791 - logs_modchembert_hiv_epochs100_batch_size32 - INFO - Epoch 87/100 | Train Loss: 0.0038 | Val mean-roc_auc_score: 0.7222
207
+ 2025-09-23 19:17:18,801 - logs_modchembert_hiv_epochs100_batch_size32 - INFO - Epoch 88/100 | Train Loss: 0.0043 | Val mean-roc_auc_score: 0.7084
208
+ 2025-09-23 19:18:18,029 - logs_modchembert_hiv_epochs100_batch_size32 - INFO - Epoch 89/100 | Train Loss: 0.0001 | Val mean-roc_auc_score: 0.7137
209
+ 2025-09-23 19:19:17,247 - logs_modchembert_hiv_epochs100_batch_size32 - INFO - Epoch 90/100 | Train Loss: 0.0029 | Val mean-roc_auc_score: 0.7113
210
+ 2025-09-23 19:20:16,728 - logs_modchembert_hiv_epochs100_batch_size32 - INFO - Epoch 91/100 | Train Loss: 0.0023 | Val mean-roc_auc_score: 0.7061
211
+ 2025-09-23 19:21:16,009 - logs_modchembert_hiv_epochs100_batch_size32 - INFO - Epoch 92/100 | Train Loss: 0.0041 | Val mean-roc_auc_score: 0.6913
212
+ 2025-09-23 19:22:14,869 - logs_modchembert_hiv_epochs100_batch_size32 - INFO - Epoch 93/100 | Train Loss: 0.0063 | Val mean-roc_auc_score: 0.7150
213
+ 2025-09-23 19:23:14,192 - logs_modchembert_hiv_epochs100_batch_size32 - INFO - Epoch 94/100 | Train Loss: 0.0058 | Val mean-roc_auc_score: 0.7181
214
+ 2025-09-23 19:24:13,134 - logs_modchembert_hiv_epochs100_batch_size32 - INFO - Epoch 95/100 | Train Loss: 0.0074 | Val mean-roc_auc_score: 0.7182
215
+ 2025-09-23 19:25:12,704 - logs_modchembert_hiv_epochs100_batch_size32 - INFO - Epoch 96/100 | Train Loss: 0.0045 | Val mean-roc_auc_score: 0.7044
216
+ 2025-09-23 19:26:11,849 - logs_modchembert_hiv_epochs100_batch_size32 - INFO - Epoch 97/100 | Train Loss: 0.0088 | Val mean-roc_auc_score: 0.7202
217
+ 2025-09-23 19:27:10,675 - logs_modchembert_hiv_epochs100_batch_size32 - INFO - Epoch 98/100 | Train Loss: 0.0048 | Val mean-roc_auc_score: 0.7237
218
+ 2025-09-23 19:28:10,332 - logs_modchembert_hiv_epochs100_batch_size32 - INFO - Epoch 99/100 | Train Loss: 0.0042 | Val mean-roc_auc_score: 0.7127
219
+ 2025-09-23 19:29:09,229 - logs_modchembert_hiv_epochs100_batch_size32 - INFO - Epoch 100/100 | Train Loss: 0.0059 | Val mean-roc_auc_score: 0.7289
220
+ 2025-09-23 19:29:12,417 - logs_modchembert_hiv_epochs100_batch_size32 - INFO - Test mean-roc_auc_score: 0.7736
221
+ 2025-09-23 19:29:13,065 - logs_modchembert_hiv_epochs100_batch_size32 - INFO - Starting triplicate run 3 for dataset hiv at 2025-09-23_19-29-13
222
+ 2025-09-23 19:30:08,859 - logs_modchembert_hiv_epochs100_batch_size32 - INFO - Epoch 1/100 | Train Loss: 0.1019 | Val mean-roc_auc_score: 0.7988
223
+ 2025-09-23 19:30:08,860 - logs_modchembert_hiv_epochs100_batch_size32 - INFO - Global step of best model: 1027
224
+ 2025-09-23 19:30:09,374 - logs_modchembert_hiv_epochs100_batch_size32 - INFO - Best model saved at epoch 1 with val mean-roc_auc_score: 0.7988
225
+ 2025-09-23 19:31:08,578 - logs_modchembert_hiv_epochs100_batch_size32 - INFO - Epoch 2/100 | Train Loss: 0.0972 | Val mean-roc_auc_score: 0.8244
226
+ 2025-09-23 19:31:08,716 - logs_modchembert_hiv_epochs100_batch_size32 - INFO - Global step of best model: 2054
227
+ 2025-09-23 19:31:09,237 - logs_modchembert_hiv_epochs100_batch_size32 - INFO - Best model saved at epoch 2 with val mean-roc_auc_score: 0.8244
228
+ 2025-09-23 19:32:08,195 - logs_modchembert_hiv_epochs100_batch_size32 - INFO - Epoch 3/100 | Train Loss: 0.1173 | Val mean-roc_auc_score: 0.8269
229
+ 2025-09-23 19:32:08,336 - logs_modchembert_hiv_epochs100_batch_size32 - INFO - Global step of best model: 3081
230
+ 2025-09-23 19:32:08,857 - logs_modchembert_hiv_epochs100_batch_size32 - INFO - Best model saved at epoch 3 with val mean-roc_auc_score: 0.8269
231
+ 2025-09-23 19:33:07,539 - logs_modchembert_hiv_epochs100_batch_size32 - INFO - Epoch 4/100 | Train Loss: 0.0354 | Val mean-roc_auc_score: 0.8350
232
+ 2025-09-23 19:33:07,680 - logs_modchembert_hiv_epochs100_batch_size32 - INFO - Global step of best model: 4108
233
+ 2025-09-23 19:33:08,201 - logs_modchembert_hiv_epochs100_batch_size32 - INFO - Best model saved at epoch 4 with val mean-roc_auc_score: 0.8350
234
+ 2025-09-23 19:34:07,608 - logs_modchembert_hiv_epochs100_batch_size32 - INFO - Epoch 5/100 | Train Loss: 0.0585 | Val mean-roc_auc_score: 0.8290
235
+ 2025-09-23 19:35:06,621 - logs_modchembert_hiv_epochs100_batch_size32 - INFO - Epoch 6/100 | Train Loss: 0.0557 | Val mean-roc_auc_score: 0.8254
236
+ 2025-09-23 19:36:06,172 - logs_modchembert_hiv_epochs100_batch_size32 - INFO - Epoch 7/100 | Train Loss: 0.0363 | Val mean-roc_auc_score: 0.8126
237
+ 2025-09-23 19:37:04,976 - logs_modchembert_hiv_epochs100_batch_size32 - INFO - Epoch 8/100 | Train Loss: 0.0371 | Val mean-roc_auc_score: 0.8049
238
+ 2025-09-23 19:38:04,234 - logs_modchembert_hiv_epochs100_batch_size32 - INFO - Epoch 9/100 | Train Loss: 0.0276 | Val mean-roc_auc_score: 0.8242
239
+ 2025-09-23 19:39:03,135 - logs_modchembert_hiv_epochs100_batch_size32 - INFO - Epoch 10/100 | Train Loss: 0.0229 | Val mean-roc_auc_score: 0.8090
240
+ 2025-09-23 19:40:02,616 - logs_modchembert_hiv_epochs100_batch_size32 - INFO - Epoch 11/100 | Train Loss: 0.0274 | Val mean-roc_auc_score: 0.7805
241
+ 2025-09-23 19:41:02,345 - logs_modchembert_hiv_epochs100_batch_size32 - INFO - Epoch 12/100 | Train Loss: 0.0116 | Val mean-roc_auc_score: 0.7489
242
+ 2025-09-23 19:42:01,106 - logs_modchembert_hiv_epochs100_batch_size32 - INFO - Epoch 13/100 | Train Loss: 0.0222 | Val mean-roc_auc_score: 0.7778
243
+ 2025-09-23 19:43:00,600 - logs_modchembert_hiv_epochs100_batch_size32 - INFO - Epoch 14/100 | Train Loss: 0.0127 | Val mean-roc_auc_score: 0.7222
244
+ 2025-09-23 19:43:59,653 - logs_modchembert_hiv_epochs100_batch_size32 - INFO - Epoch 15/100 | Train Loss: 0.0090 | Val mean-roc_auc_score: 0.7647
245
+ 2025-09-23 19:44:59,081 - logs_modchembert_hiv_epochs100_batch_size32 - INFO - Epoch 16/100 | Train Loss: 0.0160 | Val mean-roc_auc_score: 0.7850
246
+ 2025-09-23 19:45:58,443 - logs_modchembert_hiv_epochs100_batch_size32 - INFO - Epoch 17/100 | Train Loss: 0.0180 | Val mean-roc_auc_score: 0.7703
247
+ 2025-09-23 19:46:57,353 - logs_modchembert_hiv_epochs100_batch_size32 - INFO - Epoch 18/100 | Train Loss: 0.0095 | Val mean-roc_auc_score: 0.7379
248
+ 2025-09-23 19:47:56,918 - logs_modchembert_hiv_epochs100_batch_size32 - INFO - Epoch 19/100 | Train Loss: 0.0069 | Val mean-roc_auc_score: 0.7277
249
+ 2025-09-23 19:48:56,033 - logs_modchembert_hiv_epochs100_batch_size32 - INFO - Epoch 20/100 | Train Loss: 0.0120 | Val mean-roc_auc_score: 0.7273
250
+ 2025-09-23 19:49:55,532 - logs_modchembert_hiv_epochs100_batch_size32 - INFO - Epoch 21/100 | Train Loss: 0.0075 | Val mean-roc_auc_score: 0.7433
251
+ 2025-09-23 19:50:54,860 - logs_modchembert_hiv_epochs100_batch_size32 - INFO - Epoch 22/100 | Train Loss: 0.0082 | Val mean-roc_auc_score: 0.7345
252
+ 2025-09-23 19:51:53,917 - logs_modchembert_hiv_epochs100_batch_size32 - INFO - Epoch 23/100 | Train Loss: 0.0142 | Val mean-roc_auc_score: 0.7613
253
+ 2025-09-23 19:52:53,402 - logs_modchembert_hiv_epochs100_batch_size32 - INFO - Epoch 24/100 | Train Loss: 0.0065 | Val mean-roc_auc_score: 0.7450
254
+ 2025-09-23 19:53:52,503 - logs_modchembert_hiv_epochs100_batch_size32 - INFO - Epoch 25/100 | Train Loss: 0.0134 | Val mean-roc_auc_score: 0.7792
255
+ 2025-09-23 19:54:52,110 - logs_modchembert_hiv_epochs100_batch_size32 - INFO - Epoch 26/100 | Train Loss: 0.0055 | Val mean-roc_auc_score: 0.7080
256
+ 2025-09-23 19:55:51,660 - logs_modchembert_hiv_epochs100_batch_size32 - INFO - Epoch 27/100 | Train Loss: 0.0072 | Val mean-roc_auc_score: 0.7414
257
+ 2025-09-23 19:56:50,512 - logs_modchembert_hiv_epochs100_batch_size32 - INFO - Epoch 28/100 | Train Loss: 0.0055 | Val mean-roc_auc_score: 0.7499
258
+ 2025-09-23 19:57:49,775 - logs_modchembert_hiv_epochs100_batch_size32 - INFO - Epoch 29/100 | Train Loss: 0.0075 | Val mean-roc_auc_score: 0.7515
259
+ 2025-09-23 19:58:48,722 - logs_modchembert_hiv_epochs100_batch_size32 - INFO - Epoch 30/100 | Train Loss: 0.0041 | Val mean-roc_auc_score: 0.7394
260
+ 2025-09-23 19:59:47,946 - logs_modchembert_hiv_epochs100_batch_size32 - INFO - Epoch 31/100 | Train Loss: 0.0061 | Val mean-roc_auc_score: 0.7169
261
+ 2025-09-23 20:00:47,486 - logs_modchembert_hiv_epochs100_batch_size32 - INFO - Epoch 32/100 | Train Loss: 0.0051 | Val mean-roc_auc_score: 0.7095
262
+ 2025-09-23 20:01:46,840 - logs_modchembert_hiv_epochs100_batch_size32 - INFO - Epoch 33/100 | Train Loss: 0.0084 | Val mean-roc_auc_score: 0.7325
263
+ 2025-09-23 20:02:46,533 - logs_modchembert_hiv_epochs100_batch_size32 - INFO - Epoch 34/100 | Train Loss: 0.0058 | Val mean-roc_auc_score: 0.7367
264
+ 2025-09-23 20:03:45,599 - logs_modchembert_hiv_epochs100_batch_size32 - INFO - Epoch 35/100 | Train Loss: 0.0067 | Val mean-roc_auc_score: 0.7050
265
+ 2025-09-23 20:04:45,131 - logs_modchembert_hiv_epochs100_batch_size32 - INFO - Epoch 36/100 | Train Loss: 0.0073 | Val mean-roc_auc_score: 0.7210
266
+ 2025-09-23 20:05:45,627 - logs_modchembert_hiv_epochs100_batch_size32 - INFO - Epoch 37/100 | Train Loss: 0.0063 | Val mean-roc_auc_score: 0.7084
267
+ 2025-09-23 20:06:44,768 - logs_modchembert_hiv_epochs100_batch_size32 - INFO - Epoch 38/100 | Train Loss: 0.0070 | Val mean-roc_auc_score: 0.7134
268
+ 2025-09-23 20:07:44,396 - logs_modchembert_hiv_epochs100_batch_size32 - INFO - Epoch 39/100 | Train Loss: 0.0058 | Val mean-roc_auc_score: 0.7098
269
+ 2025-09-23 20:08:43,348 - logs_modchembert_hiv_epochs100_batch_size32 - INFO - Epoch 40/100 | Train Loss: 0.0047 | Val mean-roc_auc_score: 0.6826
270
+ 2025-09-23 20:09:42,947 - logs_modchembert_hiv_epochs100_batch_size32 - INFO - Epoch 41/100 | Train Loss: 0.0016 | Val mean-roc_auc_score: 0.7208
271
+ 2025-09-23 20:10:42,630 - logs_modchembert_hiv_epochs100_batch_size32 - INFO - Epoch 42/100 | Train Loss: 0.0067 | Val mean-roc_auc_score: 0.7261
272
+ 2025-09-23 20:11:41,568 - logs_modchembert_hiv_epochs100_batch_size32 - INFO - Epoch 43/100 | Train Loss: 0.0030 | Val mean-roc_auc_score: 0.7062
273
+ 2025-09-23 20:12:41,067 - logs_modchembert_hiv_epochs100_batch_size32 - INFO - Epoch 44/100 | Train Loss: 0.0085 | Val mean-roc_auc_score: 0.7272
274
+ 2025-09-23 20:13:39,867 - logs_modchembert_hiv_epochs100_batch_size32 - INFO - Epoch 45/100 | Train Loss: 0.0014 | Val mean-roc_auc_score: 0.7086
275
+ 2025-09-23 20:14:39,573 - logs_modchembert_hiv_epochs100_batch_size32 - INFO - Epoch 46/100 | Train Loss: 0.0109 | Val mean-roc_auc_score: 0.7192
276
+ 2025-09-23 20:15:39,119 - logs_modchembert_hiv_epochs100_batch_size32 - INFO - Epoch 47/100 | Train Loss: 0.0078 | Val mean-roc_auc_score: 0.7375
277
+ 2025-09-23 20:16:37,966 - logs_modchembert_hiv_epochs100_batch_size32 - INFO - Epoch 48/100 | Train Loss: 0.0048 | Val mean-roc_auc_score: 0.7323
278
+ 2025-09-23 20:17:37,649 - logs_modchembert_hiv_epochs100_batch_size32 - INFO - Epoch 49/100 | Train Loss: 0.0029 | Val mean-roc_auc_score: 0.7341
279
+ 2025-09-23 20:18:36,585 - logs_modchembert_hiv_epochs100_batch_size32 - INFO - Epoch 50/100 | Train Loss: 0.0061 | Val mean-roc_auc_score: 0.7280
280
+ 2025-09-23 20:19:36,163 - logs_modchembert_hiv_epochs100_batch_size32 - INFO - Epoch 51/100 | Train Loss: 0.0032 | Val mean-roc_auc_score: 0.7224
281
+ 2025-09-23 20:20:35,689 - logs_modchembert_hiv_epochs100_batch_size32 - INFO - Epoch 52/100 | Train Loss: 0.0013 | Val mean-roc_auc_score: 0.7529
282
+ 2025-09-23 20:21:34,699 - logs_modchembert_hiv_epochs100_batch_size32 - INFO - Epoch 53/100 | Train Loss: 0.0058 | Val mean-roc_auc_score: 0.7411
283
+ 2025-09-23 20:22:34,578 - logs_modchembert_hiv_epochs100_batch_size32 - INFO - Epoch 54/100 | Train Loss: 0.0044 | Val mean-roc_auc_score: 0.7238
284
+ 2025-09-23 20:23:33,554 - logs_modchembert_hiv_epochs100_batch_size32 - INFO - Epoch 55/100 | Train Loss: 0.0049 | Val mean-roc_auc_score: 0.7136
285
+ 2025-09-23 20:24:33,066 - logs_modchembert_hiv_epochs100_batch_size32 - INFO - Epoch 56/100 | Train Loss: 0.0025 | Val mean-roc_auc_score: 0.6897
286
+ 2025-09-23 20:25:32,388 - logs_modchembert_hiv_epochs100_batch_size32 - INFO - Epoch 57/100 | Train Loss: 0.0027 | Val mean-roc_auc_score: 0.7002
287
+ 2025-09-23 20:26:31,191 - logs_modchembert_hiv_epochs100_batch_size32 - INFO - Epoch 58/100 | Train Loss: 0.0044 | Val mean-roc_auc_score: 0.7196
288
+ 2025-09-23 20:27:30,751 - logs_modchembert_hiv_epochs100_batch_size32 - INFO - Epoch 59/100 | Train Loss: 0.0074 | Val mean-roc_auc_score: 0.7478
289
+ 2025-09-23 20:28:29,628 - logs_modchembert_hiv_epochs100_batch_size32 - INFO - Epoch 60/100 | Train Loss: 0.0100 | Val mean-roc_auc_score: 0.7394
290
+ 2025-09-23 20:29:29,186 - logs_modchembert_hiv_epochs100_batch_size32 - INFO - Epoch 61/100 | Train Loss: 0.0061 | Val mean-roc_auc_score: 0.7191
291
+ 2025-09-23 20:30:28,827 - logs_modchembert_hiv_epochs100_batch_size32 - INFO - Epoch 62/100 | Train Loss: 0.0034 | Val mean-roc_auc_score: 0.7189
292
+ 2025-09-23 20:31:27,723 - logs_modchembert_hiv_epochs100_batch_size32 - INFO - Epoch 63/100 | Train Loss: 0.0005 | Val mean-roc_auc_score: 0.7146
293
+ 2025-09-23 20:32:27,389 - logs_modchembert_hiv_epochs100_batch_size32 - INFO - Epoch 64/100 | Train Loss: 0.0093 | Val mean-roc_auc_score: 0.7103
294
+ 2025-09-23 20:33:26,142 - logs_modchembert_hiv_epochs100_batch_size32 - INFO - Epoch 65/100 | Train Loss: 0.0078 | Val mean-roc_auc_score: 0.7261
295
+ 2025-09-23 20:34:25,730 - logs_modchembert_hiv_epochs100_batch_size32 - INFO - Epoch 66/100 | Train Loss: 0.0056 | Val mean-roc_auc_score: 0.7115
296
+ 2025-09-23 20:35:25,307 - logs_modchembert_hiv_epochs100_batch_size32 - INFO - Epoch 67/100 | Train Loss: 0.0076 | Val mean-roc_auc_score: 0.7173
297
+ 2025-09-23 20:36:24,177 - logs_modchembert_hiv_epochs100_batch_size32 - INFO - Epoch 68/100 | Train Loss: 0.0052 | Val mean-roc_auc_score: 0.7132
298
+ 2025-09-23 20:37:23,796 - logs_modchembert_hiv_epochs100_batch_size32 - INFO - Epoch 69/100 | Train Loss: 0.0058 | Val mean-roc_auc_score: 0.7063
299
+ 2025-09-23 20:38:22,267 - logs_modchembert_hiv_epochs100_batch_size32 - INFO - Epoch 70/100 | Train Loss: 0.0046 | Val mean-roc_auc_score: 0.6928
300
+ 2025-09-23 20:39:19,887 - logs_modchembert_hiv_epochs100_batch_size32 - INFO - Epoch 71/100 | Train Loss: 0.0049 | Val mean-roc_auc_score: 0.7194
301
+ 2025-09-23 20:40:17,262 - logs_modchembert_hiv_epochs100_batch_size32 - INFO - Epoch 72/100 | Train Loss: 0.0020 | Val mean-roc_auc_score: 0.7022
302
+ 2025-09-23 20:41:13,554 - logs_modchembert_hiv_epochs100_batch_size32 - INFO - Epoch 73/100 | Train Loss: 0.0060 | Val mean-roc_auc_score: 0.7108
303
+ 2025-09-23 20:42:10,432 - logs_modchembert_hiv_epochs100_batch_size32 - INFO - Epoch 74/100 | Train Loss: 0.0051 | Val mean-roc_auc_score: 0.7174
304
+ 2025-09-23 20:43:07,475 - logs_modchembert_hiv_epochs100_batch_size32 - INFO - Epoch 75/100 | Train Loss: 0.0116 | Val mean-roc_auc_score: 0.7123
305
+ 2025-09-23 20:44:04,226 - logs_modchembert_hiv_epochs100_batch_size32 - INFO - Epoch 76/100 | Train Loss: 0.0032 | Val mean-roc_auc_score: 0.7098
306
+ 2025-09-23 20:45:00,792 - logs_modchembert_hiv_epochs100_batch_size32 - INFO - Epoch 77/100 | Train Loss: 0.0022 | Val mean-roc_auc_score: 0.7000
307
+ 2025-09-23 20:45:56,860 - logs_modchembert_hiv_epochs100_batch_size32 - INFO - Epoch 78/100 | Train Loss: 0.0142 | Val mean-roc_auc_score: 0.7147
308
+ 2025-09-23 20:46:53,649 - logs_modchembert_hiv_epochs100_batch_size32 - INFO - Epoch 79/100 | Train Loss: 0.0018 | Val mean-roc_auc_score: 0.7172
309
+ 2025-09-23 20:47:49,737 - logs_modchembert_hiv_epochs100_batch_size32 - INFO - Epoch 80/100 | Train Loss: 0.0048 | Val mean-roc_auc_score: 0.7130
310
+ 2025-09-23 20:48:46,485 - logs_modchembert_hiv_epochs100_batch_size32 - INFO - Epoch 81/100 | Train Loss: 0.0052 | Val mean-roc_auc_score: 0.7181
311
+ 2025-09-23 20:49:43,309 - logs_modchembert_hiv_epochs100_batch_size32 - INFO - Epoch 82/100 | Train Loss: 0.0086 | Val mean-roc_auc_score: 0.7092
312
+ 2025-09-23 20:50:39,283 - logs_modchembert_hiv_epochs100_batch_size32 - INFO - Epoch 83/100 | Train Loss: 0.0034 | Val mean-roc_auc_score: 0.7016
313
+ 2025-09-23 20:51:36,108 - logs_modchembert_hiv_epochs100_batch_size32 - INFO - Epoch 84/100 | Train Loss: 0.0040 | Val mean-roc_auc_score: 0.7167
314
+ 2025-09-23 20:52:32,109 - logs_modchembert_hiv_epochs100_batch_size32 - INFO - Epoch 85/100 | Train Loss: 0.0046 | Val mean-roc_auc_score: 0.7079
315
+ 2025-09-23 20:53:28,802 - logs_modchembert_hiv_epochs100_batch_size32 - INFO - Epoch 86/100 | Train Loss: 0.0090 | Val mean-roc_auc_score: 0.7081
316
+ 2025-09-23 20:54:25,532 - logs_modchembert_hiv_epochs100_batch_size32 - INFO - Epoch 87/100 | Train Loss: 0.0056 | Val mean-roc_auc_score: 0.7325
317
+ 2025-09-23 20:55:21,433 - logs_modchembert_hiv_epochs100_batch_size32 - INFO - Epoch 88/100 | Train Loss: 0.0029 | Val mean-roc_auc_score: 0.7087
318
+ 2025-09-23 20:56:18,326 - logs_modchembert_hiv_epochs100_batch_size32 - INFO - Epoch 89/100 | Train Loss: 0.0077 | Val mean-roc_auc_score: 0.7128
319
+ 2025-09-23 20:57:14,337 - logs_modchembert_hiv_epochs100_batch_size32 - INFO - Epoch 90/100 | Train Loss: 0.0048 | Val mean-roc_auc_score: 0.7070
320
+ 2025-09-23 20:58:11,046 - logs_modchembert_hiv_epochs100_batch_size32 - INFO - Epoch 91/100 | Train Loss: 0.0046 | Val mean-roc_auc_score: 0.7064
321
+ 2025-09-23 20:59:07,841 - logs_modchembert_hiv_epochs100_batch_size32 - INFO - Epoch 92/100 | Train Loss: 0.0051 | Val mean-roc_auc_score: 0.7093
322
+ 2025-09-23 21:00:03,838 - logs_modchembert_hiv_epochs100_batch_size32 - INFO - Epoch 93/100 | Train Loss: 0.0009 | Val mean-roc_auc_score: 0.7151
323
+ 2025-09-23 21:01:00,612 - logs_modchembert_hiv_epochs100_batch_size32 - INFO - Epoch 94/100 | Train Loss: 0.0028 | Val mean-roc_auc_score: 0.7155
324
+ 2025-09-23 21:01:56,548 - logs_modchembert_hiv_epochs100_batch_size32 - INFO - Epoch 95/100 | Train Loss: 0.0043 | Val mean-roc_auc_score: 0.7335
325
+ 2025-09-23 21:02:53,377 - logs_modchembert_hiv_epochs100_batch_size32 - INFO - Epoch 96/100 | Train Loss: 0.0049 | Val mean-roc_auc_score: 0.7400
326
+ 2025-09-23 21:03:50,186 - logs_modchembert_hiv_epochs100_batch_size32 - INFO - Epoch 97/100 | Train Loss: 0.0024 | Val mean-roc_auc_score: 0.7243
327
+ 2025-09-23 21:04:46,377 - logs_modchembert_hiv_epochs100_batch_size32 - INFO - Epoch 98/100 | Train Loss: 0.0040 | Val mean-roc_auc_score: 0.7309
328
+ 2025-09-23 21:05:43,105 - logs_modchembert_hiv_epochs100_batch_size32 - INFO - Epoch 99/100 | Train Loss: 0.0054 | Val mean-roc_auc_score: 0.7152
329
+ 2025-09-23 21:06:39,201 - logs_modchembert_hiv_epochs100_batch_size32 - INFO - Epoch 100/100 | Train Loss: 0.0060 | Val mean-roc_auc_score: 0.7565
330
+ 2025-09-23 21:06:42,287 - logs_modchembert_hiv_epochs100_batch_size32 - INFO - Test mean-roc_auc_score: 0.7738
331
+ 2025-09-23 21:06:43,090 - logs_modchembert_hiv_epochs100_batch_size32 - INFO - Final Triplicate Test Results — Avg mean-roc_auc_score: 0.7770, Std Dev: 0.0047
logs_modchembert_classification_ModChemBERT-MLM-TAFT/modchembert_deepchem_splits_run_sider_epochs100_batch_size32_20250923_170602.log ADDED
@@ -0,0 +1,355 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ 2025-09-23 17:06:02,090 - logs_modchembert_sider_epochs100_batch_size32 - INFO - Running benchmark for dataset: sider
2
+ 2025-09-23 17:06:02,091 - logs_modchembert_sider_epochs100_batch_size32 - INFO - dataset: sider, tasks: ['Hepatobiliary disorders', 'Metabolism and nutrition disorders', 'Product issues', 'Eye disorders', 'Investigations', 'Musculoskeletal and connective tissue disorders', 'Gastrointestinal disorders', 'Social circumstances', 'Immune system disorders', 'Reproductive system and breast disorders', 'Neoplasms benign, malignant and unspecified (incl cysts and polyps)', 'General disorders and administration site conditions', 'Endocrine disorders', 'Surgical and medical procedures', 'Vascular disorders', 'Blood and lymphatic system disorders', 'Skin and subcutaneous tissue disorders', 'Congenital, familial and genetic disorders', 'Infections and infestations', 'Respiratory, thoracic and mediastinal disorders', 'Psychiatric disorders', 'Renal and urinary disorders', 'Pregnancy, puerperium and perinatal conditions', 'Ear and labyrinth disorders', 'Cardiac disorders', 'Nervous system disorders', 'Injury, poisoning and procedural complications'], epochs: 100, learning rate: 3e-05
3
+ 2025-09-23 17:06:02,104 - logs_modchembert_sider_epochs100_batch_size32 - INFO - Starting triplicate run 1 for dataset sider at 2025-09-23_17-06-02
4
+ 2025-09-23 17:06:07,163 - logs_modchembert_sider_epochs100_batch_size32 - INFO - Epoch 1/100 | Train Loss: 0.5286 | Val mean-roc_auc_score: 0.5518
5
+ 2025-09-23 17:06:07,163 - logs_modchembert_sider_epochs100_batch_size32 - INFO - Global step of best model: 35
6
+ 2025-09-23 17:06:07,687 - logs_modchembert_sider_epochs100_batch_size32 - INFO - Best model saved at epoch 1 with val mean-roc_auc_score: 0.5518
7
+ 2025-09-23 17:06:13,174 - logs_modchembert_sider_epochs100_batch_size32 - INFO - Epoch 2/100 | Train Loss: 0.5071 | Val mean-roc_auc_score: 0.5893
8
+ 2025-09-23 17:06:13,349 - logs_modchembert_sider_epochs100_batch_size32 - INFO - Global step of best model: 70
9
+ 2025-09-23 17:06:13,886 - logs_modchembert_sider_epochs100_batch_size32 - INFO - Best model saved at epoch 2 with val mean-roc_auc_score: 0.5893
10
+ 2025-09-23 17:06:19,231 - logs_modchembert_sider_epochs100_batch_size32 - INFO - Epoch 3/100 | Train Loss: 0.4906 | Val mean-roc_auc_score: 0.5857
11
+ 2025-09-23 17:06:24,534 - logs_modchembert_sider_epochs100_batch_size32 - INFO - Epoch 4/100 | Train Loss: 0.4750 | Val mean-roc_auc_score: 0.6008
12
+ 2025-09-23 17:06:24,714 - logs_modchembert_sider_epochs100_batch_size32 - INFO - Global step of best model: 140
13
+ 2025-09-23 17:06:25,236 - logs_modchembert_sider_epochs100_batch_size32 - INFO - Best model saved at epoch 4 with val mean-roc_auc_score: 0.6008
14
+ 2025-09-23 17:06:30,403 - logs_modchembert_sider_epochs100_batch_size32 - INFO - Epoch 5/100 | Train Loss: 0.4571 | Val mean-roc_auc_score: 0.6049
15
+ 2025-09-23 17:06:30,583 - logs_modchembert_sider_epochs100_batch_size32 - INFO - Global step of best model: 175
16
+ 2025-09-23 17:06:31,104 - logs_modchembert_sider_epochs100_batch_size32 - INFO - Best model saved at epoch 5 with val mean-roc_auc_score: 0.6049
17
+ 2025-09-23 17:06:36,469 - logs_modchembert_sider_epochs100_batch_size32 - INFO - Epoch 6/100 | Train Loss: 0.4156 | Val mean-roc_auc_score: 0.6089
18
+ 2025-09-23 17:06:36,989 - logs_modchembert_sider_epochs100_batch_size32 - INFO - Global step of best model: 210
19
+ 2025-09-23 17:06:37,568 - logs_modchembert_sider_epochs100_batch_size32 - INFO - Best model saved at epoch 6 with val mean-roc_auc_score: 0.6089
20
+ 2025-09-23 17:06:42,937 - logs_modchembert_sider_epochs100_batch_size32 - INFO - Epoch 7/100 | Train Loss: 0.3875 | Val mean-roc_auc_score: 0.6101
21
+ 2025-09-23 17:06:43,122 - logs_modchembert_sider_epochs100_batch_size32 - INFO - Global step of best model: 245
22
+ 2025-09-23 17:06:43,661 - logs_modchembert_sider_epochs100_batch_size32 - INFO - Best model saved at epoch 7 with val mean-roc_auc_score: 0.6101
23
+ 2025-09-23 17:06:49,054 - logs_modchembert_sider_epochs100_batch_size32 - INFO - Epoch 8/100 | Train Loss: 0.3571 | Val mean-roc_auc_score: 0.6182
24
+ 2025-09-23 17:06:49,252 - logs_modchembert_sider_epochs100_batch_size32 - INFO - Global step of best model: 280
25
+ 2025-09-23 17:06:49,835 - logs_modchembert_sider_epochs100_batch_size32 - INFO - Best model saved at epoch 8 with val mean-roc_auc_score: 0.6182
26
+ 2025-09-23 17:06:54,700 - logs_modchembert_sider_epochs100_batch_size32 - INFO - Epoch 9/100 | Train Loss: 0.3417 | Val mean-roc_auc_score: 0.6189
27
+ 2025-09-23 17:06:54,898 - logs_modchembert_sider_epochs100_batch_size32 - INFO - Global step of best model: 315
28
+ 2025-09-23 17:06:55,468 - logs_modchembert_sider_epochs100_batch_size32 - INFO - Best model saved at epoch 9 with val mean-roc_auc_score: 0.6189
29
+ 2025-09-23 17:07:00,441 - logs_modchembert_sider_epochs100_batch_size32 - INFO - Epoch 10/100 | Train Loss: 0.3089 | Val mean-roc_auc_score: 0.6268
30
+ 2025-09-23 17:07:00,629 - logs_modchembert_sider_epochs100_batch_size32 - INFO - Global step of best model: 350
31
+ 2025-09-23 17:07:01,211 - logs_modchembert_sider_epochs100_batch_size32 - INFO - Best model saved at epoch 10 with val mean-roc_auc_score: 0.6268
32
+ 2025-09-23 17:07:06,906 - logs_modchembert_sider_epochs100_batch_size32 - INFO - Epoch 11/100 | Train Loss: 0.2911 | Val mean-roc_auc_score: 0.6187
33
+ 2025-09-23 17:07:12,592 - logs_modchembert_sider_epochs100_batch_size32 - INFO - Epoch 12/100 | Train Loss: 0.2687 | Val mean-roc_auc_score: 0.6189
34
+ 2025-09-23 17:07:17,953 - logs_modchembert_sider_epochs100_batch_size32 - INFO - Epoch 13/100 | Train Loss: 0.2500 | Val mean-roc_auc_score: 0.6071
35
+ 2025-09-23 17:07:23,414 - logs_modchembert_sider_epochs100_batch_size32 - INFO - Epoch 14/100 | Train Loss: 0.2411 | Val mean-roc_auc_score: 0.6066
36
+ 2025-09-23 17:07:28,879 - logs_modchembert_sider_epochs100_batch_size32 - INFO - Epoch 15/100 | Train Loss: 0.2263 | Val mean-roc_auc_score: 0.6128
37
+ 2025-09-23 17:07:34,358 - logs_modchembert_sider_epochs100_batch_size32 - INFO - Epoch 16/100 | Train Loss: 0.2062 | Val mean-roc_auc_score: 0.6020
38
+ 2025-09-23 17:07:40,187 - logs_modchembert_sider_epochs100_batch_size32 - INFO - Epoch 17/100 | Train Loss: 0.1902 | Val mean-roc_auc_score: 0.5981
39
+ 2025-09-23 17:07:45,436 - logs_modchembert_sider_epochs100_batch_size32 - INFO - Epoch 18/100 | Train Loss: 0.1802 | Val mean-roc_auc_score: 0.6105
40
+ 2025-09-23 17:07:50,740 - logs_modchembert_sider_epochs100_batch_size32 - INFO - Epoch 19/100 | Train Loss: 0.1661 | Val mean-roc_auc_score: 0.6068
41
+ 2025-09-23 17:07:55,957 - logs_modchembert_sider_epochs100_batch_size32 - INFO - Epoch 20/100 | Train Loss: 0.1545 | Val mean-roc_auc_score: 0.6048
42
+ 2025-09-23 17:08:01,217 - logs_modchembert_sider_epochs100_batch_size32 - INFO - Epoch 21/100 | Train Loss: 0.1536 | Val mean-roc_auc_score: 0.6045
43
+ 2025-09-23 17:08:07,007 - logs_modchembert_sider_epochs100_batch_size32 - INFO - Epoch 22/100 | Train Loss: 0.1393 | Val mean-roc_auc_score: 0.6091
44
+ 2025-09-23 17:08:12,485 - logs_modchembert_sider_epochs100_batch_size32 - INFO - Epoch 23/100 | Train Loss: 0.1492 | Val mean-roc_auc_score: 0.6033
45
+ 2025-09-23 17:08:17,930 - logs_modchembert_sider_epochs100_batch_size32 - INFO - Epoch 24/100 | Train Loss: 0.1286 | Val mean-roc_auc_score: 0.6081
46
+ 2025-09-23 17:08:22,791 - logs_modchembert_sider_epochs100_batch_size32 - INFO - Epoch 25/100 | Train Loss: 0.1205 | Val mean-roc_auc_score: 0.6006
47
+ 2025-09-23 17:08:27,640 - logs_modchembert_sider_epochs100_batch_size32 - INFO - Epoch 26/100 | Train Loss: 0.1141 | Val mean-roc_auc_score: 0.6016
48
+ 2025-09-23 17:08:33,199 - logs_modchembert_sider_epochs100_batch_size32 - INFO - Epoch 27/100 | Train Loss: 0.1125 | Val mean-roc_auc_score: 0.5960
49
+ 2025-09-23 17:08:38,411 - logs_modchembert_sider_epochs100_batch_size32 - INFO - Epoch 28/100 | Train Loss: 0.1076 | Val mean-roc_auc_score: 0.6001
50
+ 2025-09-23 17:08:44,918 - logs_modchembert_sider_epochs100_batch_size32 - INFO - Epoch 29/100 | Train Loss: 0.1073 | Val mean-roc_auc_score: 0.5938
51
+ 2025-09-23 17:08:50,310 - logs_modchembert_sider_epochs100_batch_size32 - INFO - Epoch 30/100 | Train Loss: 0.0978 | Val mean-roc_auc_score: 0.6004
52
+ 2025-09-23 17:08:55,792 - logs_modchembert_sider_epochs100_batch_size32 - INFO - Epoch 31/100 | Train Loss: 0.0929 | Val mean-roc_auc_score: 0.6067
53
+ 2025-09-23 17:09:01,514 - logs_modchembert_sider_epochs100_batch_size32 - INFO - Epoch 32/100 | Train Loss: 0.0934 | Val mean-roc_auc_score: 0.6013
54
+ 2025-09-23 17:09:07,047 - logs_modchembert_sider_epochs100_batch_size32 - INFO - Epoch 33/100 | Train Loss: 0.0888 | Val mean-roc_auc_score: 0.6039
55
+ 2025-09-23 17:09:12,555 - logs_modchembert_sider_epochs100_batch_size32 - INFO - Epoch 34/100 | Train Loss: 0.0835 | Val mean-roc_auc_score: 0.6021
56
+ 2025-09-23 17:09:17,912 - logs_modchembert_sider_epochs100_batch_size32 - INFO - Epoch 35/100 | Train Loss: 0.0831 | Val mean-roc_auc_score: 0.6067
57
+ 2025-09-23 17:09:23,357 - logs_modchembert_sider_epochs100_batch_size32 - INFO - Epoch 36/100 | Train Loss: 0.0777 | Val mean-roc_auc_score: 0.6021
58
+ 2025-09-23 17:09:29,028 - logs_modchembert_sider_epochs100_batch_size32 - INFO - Epoch 37/100 | Train Loss: 0.0772 | Val mean-roc_auc_score: 0.6011
59
+ 2025-09-23 17:09:34,436 - logs_modchembert_sider_epochs100_batch_size32 - INFO - Epoch 38/100 | Train Loss: 0.0740 | Val mean-roc_auc_score: 0.6051
60
+ 2025-09-23 17:09:39,820 - logs_modchembert_sider_epochs100_batch_size32 - INFO - Epoch 39/100 | Train Loss: 0.0710 | Val mean-roc_auc_score: 0.6032
61
+ 2025-09-23 17:09:45,215 - logs_modchembert_sider_epochs100_batch_size32 - INFO - Epoch 40/100 | Train Loss: 0.0696 | Val mean-roc_auc_score: 0.6027
62
+ 2025-09-23 17:09:50,102 - logs_modchembert_sider_epochs100_batch_size32 - INFO - Epoch 41/100 | Train Loss: 0.0692 | Val mean-roc_auc_score: 0.6026
63
+ 2025-09-23 17:09:55,364 - logs_modchembert_sider_epochs100_batch_size32 - INFO - Epoch 42/100 | Train Loss: 0.0683 | Val mean-roc_auc_score: 0.6040
64
+ 2025-09-23 17:10:00,277 - logs_modchembert_sider_epochs100_batch_size32 - INFO - Epoch 43/100 | Train Loss: 0.0734 | Val mean-roc_auc_score: 0.6020
65
+ 2025-09-23 17:10:05,696 - logs_modchembert_sider_epochs100_batch_size32 - INFO - Epoch 44/100 | Train Loss: 0.0647 | Val mean-roc_auc_score: 0.5992
66
+ 2025-09-23 17:10:11,111 - logs_modchembert_sider_epochs100_batch_size32 - INFO - Epoch 45/100 | Train Loss: 0.0625 | Val mean-roc_auc_score: 0.6043
67
+ 2025-09-23 17:10:16,541 - logs_modchembert_sider_epochs100_batch_size32 - INFO - Epoch 46/100 | Train Loss: 0.0578 | Val mean-roc_auc_score: 0.6046
68
+ 2025-09-23 17:10:22,070 - logs_modchembert_sider_epochs100_batch_size32 - INFO - Epoch 47/100 | Train Loss: 0.0607 | Val mean-roc_auc_score: 0.5993
69
+ 2025-09-23 17:10:27,283 - logs_modchembert_sider_epochs100_batch_size32 - INFO - Epoch 48/100 | Train Loss: 0.0598 | Val mean-roc_auc_score: 0.6011
70
+ 2025-09-23 17:10:32,546 - logs_modchembert_sider_epochs100_batch_size32 - INFO - Epoch 49/100 | Train Loss: 0.0612 | Val mean-roc_auc_score: 0.6010
71
+ 2025-09-23 17:10:37,806 - logs_modchembert_sider_epochs100_batch_size32 - INFO - Epoch 50/100 | Train Loss: 0.0565 | Val mean-roc_auc_score: 0.5995
72
+ 2025-09-23 17:10:43,222 - logs_modchembert_sider_epochs100_batch_size32 - INFO - Epoch 51/100 | Train Loss: 0.0565 | Val mean-roc_auc_score: 0.5989
73
+ 2025-09-23 17:10:48,891 - logs_modchembert_sider_epochs100_batch_size32 - INFO - Epoch 52/100 | Train Loss: 0.0594 | Val mean-roc_auc_score: 0.6012
74
+ 2025-09-23 17:10:54,313 - logs_modchembert_sider_epochs100_batch_size32 - INFO - Epoch 53/100 | Train Loss: 0.0547 | Val mean-roc_auc_score: 0.6004
75
+ 2025-09-23 17:10:59,724 - logs_modchembert_sider_epochs100_batch_size32 - INFO - Epoch 54/100 | Train Loss: 0.0540 | Val mean-roc_auc_score: 0.6015
76
+ 2025-09-23 17:11:05,202 - logs_modchembert_sider_epochs100_batch_size32 - INFO - Epoch 55/100 | Train Loss: 0.0528 | Val mean-roc_auc_score: 0.5976
77
+ 2025-09-23 17:11:10,800 - logs_modchembert_sider_epochs100_batch_size32 - INFO - Epoch 56/100 | Train Loss: 0.0513 | Val mean-roc_auc_score: 0.5978
78
+ 2025-09-23 17:11:16,570 - logs_modchembert_sider_epochs100_batch_size32 - INFO - Epoch 57/100 | Train Loss: 0.0516 | Val mean-roc_auc_score: 0.5991
79
+ 2025-09-23 17:11:22,683 - logs_modchembert_sider_epochs100_batch_size32 - INFO - Epoch 58/100 | Train Loss: 0.0516 | Val mean-roc_auc_score: 0.6009
80
+ 2025-09-23 17:11:27,516 - logs_modchembert_sider_epochs100_batch_size32 - INFO - Epoch 59/100 | Train Loss: 0.0498 | Val mean-roc_auc_score: 0.5967
81
+ 2025-09-23 17:11:32,828 - logs_modchembert_sider_epochs100_batch_size32 - INFO - Epoch 60/100 | Train Loss: 0.0480 | Val mean-roc_auc_score: 0.5974
82
+ 2025-09-23 17:11:38,123 - logs_modchembert_sider_epochs100_batch_size32 - INFO - Epoch 61/100 | Train Loss: 0.0478 | Val mean-roc_auc_score: 0.6003
83
+ 2025-09-23 17:11:43,713 - logs_modchembert_sider_epochs100_batch_size32 - INFO - Epoch 62/100 | Train Loss: 0.0469 | Val mean-roc_auc_score: 0.5995
84
+ 2025-09-23 17:11:49,187 - logs_modchembert_sider_epochs100_batch_size32 - INFO - Epoch 63/100 | Train Loss: 0.0488 | Val mean-roc_auc_score: 0.5986
85
+ 2025-09-23 17:11:54,593 - logs_modchembert_sider_epochs100_batch_size32 - INFO - Epoch 64/100 | Train Loss: 0.0467 | Val mean-roc_auc_score: 0.5978
86
+ 2025-09-23 17:12:00,053 - logs_modchembert_sider_epochs100_batch_size32 - INFO - Epoch 65/100 | Train Loss: 0.0460 | Val mean-roc_auc_score: 0.5980
87
+ 2025-09-23 17:12:05,453 - logs_modchembert_sider_epochs100_batch_size32 - INFO - Epoch 66/100 | Train Loss: 0.0465 | Val mean-roc_auc_score: 0.5969
88
+ 2025-09-23 17:12:11,267 - logs_modchembert_sider_epochs100_batch_size32 - INFO - Epoch 67/100 | Train Loss: 0.0449 | Val mean-roc_auc_score: 0.5954
89
+ 2025-09-23 17:12:16,769 - logs_modchembert_sider_epochs100_batch_size32 - INFO - Epoch 68/100 | Train Loss: 0.0453 | Val mean-roc_auc_score: 0.5949
90
+ 2025-09-23 17:12:22,167 - logs_modchembert_sider_epochs100_batch_size32 - INFO - Epoch 69/100 | Train Loss: 0.0464 | Val mean-roc_auc_score: 0.5950
91
+ 2025-09-23 17:12:27,623 - logs_modchembert_sider_epochs100_batch_size32 - INFO - Epoch 70/100 | Train Loss: 0.0449 | Val mean-roc_auc_score: 0.5949
92
+ 2025-09-23 17:12:33,107 - logs_modchembert_sider_epochs100_batch_size32 - INFO - Epoch 71/100 | Train Loss: 0.0433 | Val mean-roc_auc_score: 0.5972
93
+ 2025-09-23 17:12:38,886 - logs_modchembert_sider_epochs100_batch_size32 - INFO - Epoch 72/100 | Train Loss: 0.0428 | Val mean-roc_auc_score: 0.5973
94
+ 2025-09-23 17:12:44,364 - logs_modchembert_sider_epochs100_batch_size32 - INFO - Epoch 73/100 | Train Loss: 0.0420 | Val mean-roc_auc_score: 0.5982
95
+ 2025-09-23 17:12:49,148 - logs_modchembert_sider_epochs100_batch_size32 - INFO - Epoch 74/100 | Train Loss: 0.0417 | Val mean-roc_auc_score: 0.5984
96
+ 2025-09-23 17:12:54,117 - logs_modchembert_sider_epochs100_batch_size32 - INFO - Epoch 75/100 | Train Loss: 0.0419 | Val mean-roc_auc_score: 0.5947
97
+ 2025-09-23 17:12:59,331 - logs_modchembert_sider_epochs100_batch_size32 - INFO - Epoch 76/100 | Train Loss: 0.0415 | Val mean-roc_auc_score: 0.5997
98
+ 2025-09-23 17:13:04,931 - logs_modchembert_sider_epochs100_batch_size32 - INFO - Epoch 77/100 | Train Loss: 0.0406 | Val mean-roc_auc_score: 0.5980
99
+ 2025-09-23 17:13:10,198 - logs_modchembert_sider_epochs100_batch_size32 - INFO - Epoch 78/100 | Train Loss: 0.0406 | Val mean-roc_auc_score: 0.5975
100
+ 2025-09-23 17:13:15,621 - logs_modchembert_sider_epochs100_batch_size32 - INFO - Epoch 79/100 | Train Loss: 0.0402 | Val mean-roc_auc_score: 0.5969
101
+ 2025-09-23 17:13:21,051 - logs_modchembert_sider_epochs100_batch_size32 - INFO - Epoch 80/100 | Train Loss: 0.0395 | Val mean-roc_auc_score: 0.5970
102
+ 2025-09-23 17:13:26,409 - logs_modchembert_sider_epochs100_batch_size32 - INFO - Epoch 81/100 | Train Loss: 0.0404 | Val mean-roc_auc_score: 0.5973
103
+ 2025-09-23 17:13:32,213 - logs_modchembert_sider_epochs100_batch_size32 - INFO - Epoch 82/100 | Train Loss: 0.0395 | Val mean-roc_auc_score: 0.5994
104
+ 2025-09-23 17:13:37,709 - logs_modchembert_sider_epochs100_batch_size32 - INFO - Epoch 83/100 | Train Loss: 0.0432 | Val mean-roc_auc_score: 0.5983
105
+ 2025-09-23 17:13:43,209 - logs_modchembert_sider_epochs100_batch_size32 - INFO - Epoch 84/100 | Train Loss: 0.0391 | Val mean-roc_auc_score: 0.5991
106
+ 2025-09-23 17:13:48,637 - logs_modchembert_sider_epochs100_batch_size32 - INFO - Epoch 85/100 | Train Loss: 0.0391 | Val mean-roc_auc_score: 0.5992
107
+ 2025-09-23 17:13:55,366 - logs_modchembert_sider_epochs100_batch_size32 - INFO - Epoch 86/100 | Train Loss: 0.0391 | Val mean-roc_auc_score: 0.6005
108
+ 2025-09-23 17:14:01,128 - logs_modchembert_sider_epochs100_batch_size32 - INFO - Epoch 87/100 | Train Loss: 0.0379 | Val mean-roc_auc_score: 0.5988
109
+ 2025-09-23 17:14:06,627 - logs_modchembert_sider_epochs100_batch_size32 - INFO - Epoch 88/100 | Train Loss: 0.0371 | Val mean-roc_auc_score: 0.5982
110
+ 2025-09-23 17:14:11,505 - logs_modchembert_sider_epochs100_batch_size32 - INFO - Epoch 89/100 | Train Loss: 0.0391 | Val mean-roc_auc_score: 0.5963
111
+ 2025-09-23 17:14:16,452 - logs_modchembert_sider_epochs100_batch_size32 - INFO - Epoch 90/100 | Train Loss: 0.0379 | Val mean-roc_auc_score: 0.5977
112
+ 2025-09-23 17:14:21,777 - logs_modchembert_sider_epochs100_batch_size32 - INFO - Epoch 91/100 | Train Loss: 0.0377 | Val mean-roc_auc_score: 0.5978
113
+ 2025-09-23 17:14:27,574 - logs_modchembert_sider_epochs100_batch_size32 - INFO - Epoch 92/100 | Train Loss: 0.0389 | Val mean-roc_auc_score: 0.5982
114
+ 2025-09-23 17:14:33,034 - logs_modchembert_sider_epochs100_batch_size32 - INFO - Epoch 93/100 | Train Loss: 0.0366 | Val mean-roc_auc_score: 0.5947
115
+ 2025-09-23 17:14:38,489 - logs_modchembert_sider_epochs100_batch_size32 - INFO - Epoch 94/100 | Train Loss: 0.0357 | Val mean-roc_auc_score: 0.5962
116
+ 2025-09-23 17:14:43,946 - logs_modchembert_sider_epochs100_batch_size32 - INFO - Epoch 95/100 | Train Loss: 0.0355 | Val mean-roc_auc_score: 0.5973
117
+ 2025-09-23 17:14:49,336 - logs_modchembert_sider_epochs100_batch_size32 - INFO - Epoch 96/100 | Train Loss: 0.0355 | Val mean-roc_auc_score: 0.5987
118
+ 2025-09-23 17:14:55,069 - logs_modchembert_sider_epochs100_batch_size32 - INFO - Epoch 97/100 | Train Loss: 0.0357 | Val mean-roc_auc_score: 0.5967
119
+ 2025-09-23 17:15:00,432 - logs_modchembert_sider_epochs100_batch_size32 - INFO - Epoch 98/100 | Train Loss: 0.0354 | Val mean-roc_auc_score: 0.5967
120
+ 2025-09-23 17:15:05,637 - logs_modchembert_sider_epochs100_batch_size32 - INFO - Epoch 99/100 | Train Loss: 0.0350 | Val mean-roc_auc_score: 0.5960
121
+ 2025-09-23 17:15:10,897 - logs_modchembert_sider_epochs100_batch_size32 - INFO - Epoch 100/100 | Train Loss: 0.0350 | Val mean-roc_auc_score: 0.5994
122
+ 2025-09-23 17:15:11,739 - logs_modchembert_sider_epochs100_batch_size32 - INFO - Test mean-roc_auc_score: 0.6532
123
+ 2025-09-23 17:15:12,096 - logs_modchembert_sider_epochs100_batch_size32 - INFO - Starting triplicate run 2 for dataset sider at 2025-09-23_17-15-12
124
+ 2025-09-23 17:15:16,945 - logs_modchembert_sider_epochs100_batch_size32 - INFO - Epoch 1/100 | Train Loss: 0.5357 | Val mean-roc_auc_score: 0.5486
125
+ 2025-09-23 17:15:16,945 - logs_modchembert_sider_epochs100_batch_size32 - INFO - Global step of best model: 35
126
+ 2025-09-23 17:15:17,469 - logs_modchembert_sider_epochs100_batch_size32 - INFO - Best model saved at epoch 1 with val mean-roc_auc_score: 0.5486
127
+ 2025-09-23 17:15:22,889 - logs_modchembert_sider_epochs100_batch_size32 - INFO - Epoch 2/100 | Train Loss: 0.5000 | Val mean-roc_auc_score: 0.5611
128
+ 2025-09-23 17:15:23,066 - logs_modchembert_sider_epochs100_batch_size32 - INFO - Global step of best model: 70
129
+ 2025-09-23 17:15:23,600 - logs_modchembert_sider_epochs100_batch_size32 - INFO - Best model saved at epoch 2 with val mean-roc_auc_score: 0.5611
130
+ 2025-09-23 17:15:29,088 - logs_modchembert_sider_epochs100_batch_size32 - INFO - Epoch 3/100 | Train Loss: 0.4813 | Val mean-roc_auc_score: 0.5886
131
+ 2025-09-23 17:15:29,269 - logs_modchembert_sider_epochs100_batch_size32 - INFO - Global step of best model: 105
132
+ 2025-09-23 17:15:29,804 - logs_modchembert_sider_epochs100_batch_size32 - INFO - Best model saved at epoch 3 with val mean-roc_auc_score: 0.5886
133
+ 2025-09-23 17:15:35,369 - logs_modchembert_sider_epochs100_batch_size32 - INFO - Epoch 4/100 | Train Loss: 0.4750 | Val mean-roc_auc_score: 0.6082
134
+ 2025-09-23 17:15:35,582 - logs_modchembert_sider_epochs100_batch_size32 - INFO - Global step of best model: 140
135
+ 2025-09-23 17:15:36,127 - logs_modchembert_sider_epochs100_batch_size32 - INFO - Best model saved at epoch 4 with val mean-roc_auc_score: 0.6082
136
+ 2025-09-23 17:15:41,644 - logs_modchembert_sider_epochs100_batch_size32 - INFO - Epoch 5/100 | Train Loss: 0.4518 | Val mean-roc_auc_score: 0.6042
137
+ 2025-09-23 17:15:46,746 - logs_modchembert_sider_epochs100_batch_size32 - INFO - Epoch 6/100 | Train Loss: 0.4156 | Val mean-roc_auc_score: 0.6075
138
+ 2025-09-23 17:15:52,904 - logs_modchembert_sider_epochs100_batch_size32 - INFO - Epoch 7/100 | Train Loss: 0.3911 | Val mean-roc_auc_score: 0.6218
139
+ 2025-09-23 17:15:53,050 - logs_modchembert_sider_epochs100_batch_size32 - INFO - Global step of best model: 245
140
+ 2025-09-23 17:15:53,603 - logs_modchembert_sider_epochs100_batch_size32 - INFO - Best model saved at epoch 7 with val mean-roc_auc_score: 0.6218
141
+ 2025-09-23 17:15:58,935 - logs_modchembert_sider_epochs100_batch_size32 - INFO - Epoch 8/100 | Train Loss: 0.3661 | Val mean-roc_auc_score: 0.6377
142
+ 2025-09-23 17:15:59,124 - logs_modchembert_sider_epochs100_batch_size32 - INFO - Global step of best model: 280
143
+ 2025-09-23 17:15:59,670 - logs_modchembert_sider_epochs100_batch_size32 - INFO - Best model saved at epoch 8 with val mean-roc_auc_score: 0.6377
144
+ 2025-09-23 17:16:05,064 - logs_modchembert_sider_epochs100_batch_size32 - INFO - Epoch 9/100 | Train Loss: 0.3417 | Val mean-roc_auc_score: 0.6193
145
+ 2025-09-23 17:16:10,544 - logs_modchembert_sider_epochs100_batch_size32 - INFO - Epoch 10/100 | Train Loss: 0.3125 | Val mean-roc_auc_score: 0.6188
146
+ 2025-09-23 17:16:16,054 - logs_modchembert_sider_epochs100_batch_size32 - INFO - Epoch 11/100 | Train Loss: 0.2946 | Val mean-roc_auc_score: 0.6294
147
+ 2025-09-23 17:16:21,998 - logs_modchembert_sider_epochs100_batch_size32 - INFO - Epoch 12/100 | Train Loss: 0.2781 | Val mean-roc_auc_score: 0.6084
148
+ 2025-09-23 17:16:27,422 - logs_modchembert_sider_epochs100_batch_size32 - INFO - Epoch 13/100 | Train Loss: 0.2571 | Val mean-roc_auc_score: 0.5993
149
+ 2025-09-23 17:16:32,718 - logs_modchembert_sider_epochs100_batch_size32 - INFO - Epoch 14/100 | Train Loss: 0.2464 | Val mean-roc_auc_score: 0.6213
150
+ 2025-09-23 17:16:37,929 - logs_modchembert_sider_epochs100_batch_size32 - INFO - Epoch 15/100 | Train Loss: 0.2275 | Val mean-roc_auc_score: 0.6036
151
+ 2025-09-23 17:16:43,145 - logs_modchembert_sider_epochs100_batch_size32 - INFO - Epoch 16/100 | Train Loss: 0.2107 | Val mean-roc_auc_score: 0.6138
152
+ 2025-09-23 17:16:48,761 - logs_modchembert_sider_epochs100_batch_size32 - INFO - Epoch 17/100 | Train Loss: 0.1964 | Val mean-roc_auc_score: 0.6161
153
+ 2025-09-23 17:16:54,166 - logs_modchembert_sider_epochs100_batch_size32 - INFO - Epoch 18/100 | Train Loss: 0.1844 | Val mean-roc_auc_score: 0.6186
154
+ 2025-09-23 17:16:59,585 - logs_modchembert_sider_epochs100_batch_size32 - INFO - Epoch 19/100 | Train Loss: 0.1723 | Val mean-roc_auc_score: 0.6215
155
+ 2025-09-23 17:17:04,455 - logs_modchembert_sider_epochs100_batch_size32 - INFO - Epoch 20/100 | Train Loss: 0.1607 | Val mean-roc_auc_score: 0.6216
156
+ 2025-09-23 17:17:09,680 - logs_modchembert_sider_epochs100_batch_size32 - INFO - Epoch 21/100 | Train Loss: 0.1545 | Val mean-roc_auc_score: 0.6280
157
+ 2025-09-23 17:17:14,831 - logs_modchembert_sider_epochs100_batch_size32 - INFO - Epoch 22/100 | Train Loss: 0.1473 | Val mean-roc_auc_score: 0.6266
158
+ 2025-09-23 17:17:20,323 - logs_modchembert_sider_epochs100_batch_size32 - INFO - Epoch 23/100 | Train Loss: 0.1375 | Val mean-roc_auc_score: 0.6247
159
+ 2025-09-23 17:17:25,510 - logs_modchembert_sider_epochs100_batch_size32 - INFO - Epoch 24/100 | Train Loss: 0.1295 | Val mean-roc_auc_score: 0.6240
160
+ 2025-09-23 17:17:31,052 - logs_modchembert_sider_epochs100_batch_size32 - INFO - Epoch 25/100 | Train Loss: 0.1205 | Val mean-roc_auc_score: 0.6206
161
+ 2025-09-23 17:17:36,421 - logs_modchembert_sider_epochs100_batch_size32 - INFO - Epoch 26/100 | Train Loss: 0.1203 | Val mean-roc_auc_score: 0.6252
162
+ 2025-09-23 17:17:42,111 - logs_modchembert_sider_epochs100_batch_size32 - INFO - Epoch 27/100 | Train Loss: 0.1138 | Val mean-roc_auc_score: 0.6180
163
+ 2025-09-23 17:17:47,631 - logs_modchembert_sider_epochs100_batch_size32 - INFO - Epoch 28/100 | Train Loss: 0.1071 | Val mean-roc_auc_score: 0.6223
164
+ 2025-09-23 17:17:54,258 - logs_modchembert_sider_epochs100_batch_size32 - INFO - Epoch 29/100 | Train Loss: 0.1062 | Val mean-roc_auc_score: 0.6171
165
+ 2025-09-23 17:17:59,467 - logs_modchembert_sider_epochs100_batch_size32 - INFO - Epoch 30/100 | Train Loss: 0.0996 | Val mean-roc_auc_score: 0.6173
166
+ 2025-09-23 17:18:04,677 - logs_modchembert_sider_epochs100_batch_size32 - INFO - Epoch 31/100 | Train Loss: 0.0964 | Val mean-roc_auc_score: 0.6158
167
+ 2025-09-23 17:18:10,258 - logs_modchembert_sider_epochs100_batch_size32 - INFO - Epoch 32/100 | Train Loss: 0.0918 | Val mean-roc_auc_score: 0.6208
168
+ 2025-09-23 17:18:15,558 - logs_modchembert_sider_epochs100_batch_size32 - INFO - Epoch 33/100 | Train Loss: 0.0875 | Val mean-roc_auc_score: 0.6154
169
+ 2025-09-23 17:18:21,015 - logs_modchembert_sider_epochs100_batch_size32 - INFO - Epoch 34/100 | Train Loss: 0.0839 | Val mean-roc_auc_score: 0.6200
170
+ 2025-09-23 17:18:26,410 - logs_modchembert_sider_epochs100_batch_size32 - INFO - Epoch 35/100 | Train Loss: 0.0844 | Val mean-roc_auc_score: 0.6130
171
+ 2025-09-23 17:18:31,774 - logs_modchembert_sider_epochs100_batch_size32 - INFO - Epoch 36/100 | Train Loss: 0.0817 | Val mean-roc_auc_score: 0.6135
172
+ 2025-09-23 17:18:37,648 - logs_modchembert_sider_epochs100_batch_size32 - INFO - Epoch 37/100 | Train Loss: 0.0781 | Val mean-roc_auc_score: 0.6130
173
+ 2025-09-23 17:18:42,575 - logs_modchembert_sider_epochs100_batch_size32 - INFO - Epoch 38/100 | Train Loss: 0.0771 | Val mean-roc_auc_score: 0.6120
174
+ 2025-09-23 17:18:48,069 - logs_modchembert_sider_epochs100_batch_size32 - INFO - Epoch 39/100 | Train Loss: 0.0737 | Val mean-roc_auc_score: 0.6207
175
+ 2025-09-23 17:18:53,480 - logs_modchembert_sider_epochs100_batch_size32 - INFO - Epoch 40/100 | Train Loss: 0.0719 | Val mean-roc_auc_score: 0.6152
176
+ 2025-09-23 17:18:58,882 - logs_modchembert_sider_epochs100_batch_size32 - INFO - Epoch 41/100 | Train Loss: 0.0701 | Val mean-roc_auc_score: 0.6131
177
+ 2025-09-23 17:19:04,576 - logs_modchembert_sider_epochs100_batch_size32 - INFO - Epoch 42/100 | Train Loss: 0.0683 | Val mean-roc_auc_score: 0.6160
178
+ 2025-09-23 17:19:09,933 - logs_modchembert_sider_epochs100_batch_size32 - INFO - Epoch 43/100 | Train Loss: 0.0625 | Val mean-roc_auc_score: 0.6146
179
+ 2025-09-23 17:19:15,429 - logs_modchembert_sider_epochs100_batch_size32 - INFO - Epoch 44/100 | Train Loss: 0.0638 | Val mean-roc_auc_score: 0.6143
180
+ 2025-09-23 17:19:20,882 - logs_modchembert_sider_epochs100_batch_size32 - INFO - Epoch 45/100 | Train Loss: 0.0647 | Val mean-roc_auc_score: 0.6188
181
+ 2025-09-23 17:19:26,264 - logs_modchembert_sider_epochs100_batch_size32 - INFO - Epoch 46/100 | Train Loss: 0.0621 | Val mean-roc_auc_score: 0.6165
182
+ 2025-09-23 17:19:31,856 - logs_modchembert_sider_epochs100_batch_size32 - INFO - Epoch 47/100 | Train Loss: 0.0621 | Val mean-roc_auc_score: 0.6161
183
+ 2025-09-23 17:19:37,075 - logs_modchembert_sider_epochs100_batch_size32 - INFO - Epoch 48/100 | Train Loss: 0.0598 | Val mean-roc_auc_score: 0.6161
184
+ 2025-09-23 17:19:42,431 - logs_modchembert_sider_epochs100_batch_size32 - INFO - Epoch 49/100 | Train Loss: 0.0589 | Val mean-roc_auc_score: 0.6131
185
+ 2025-09-23 17:19:47,980 - logs_modchembert_sider_epochs100_batch_size32 - INFO - Epoch 50/100 | Train Loss: 0.0585 | Val mean-roc_auc_score: 0.6148
186
+ 2025-09-23 17:19:53,499 - logs_modchembert_sider_epochs100_batch_size32 - INFO - Epoch 51/100 | Train Loss: 0.0580 | Val mean-roc_auc_score: 0.6146
187
+ 2025-09-23 17:19:59,290 - logs_modchembert_sider_epochs100_batch_size32 - INFO - Epoch 52/100 | Train Loss: 0.0574 | Val mean-roc_auc_score: 0.6126
188
+ 2025-09-23 17:20:04,705 - logs_modchembert_sider_epochs100_batch_size32 - INFO - Epoch 53/100 | Train Loss: 0.0554 | Val mean-roc_auc_score: 0.6135
189
+ 2025-09-23 17:20:09,543 - logs_modchembert_sider_epochs100_batch_size32 - INFO - Epoch 54/100 | Train Loss: 0.0549 | Val mean-roc_auc_score: 0.6154
190
+ 2025-09-23 17:20:14,795 - logs_modchembert_sider_epochs100_batch_size32 - INFO - Epoch 55/100 | Train Loss: 0.0537 | Val mean-roc_auc_score: 0.6123
191
+ 2025-09-23 17:20:20,370 - logs_modchembert_sider_epochs100_batch_size32 - INFO - Epoch 56/100 | Train Loss: 0.0536 | Val mean-roc_auc_score: 0.6113
192
+ 2025-09-23 17:20:26,285 - logs_modchembert_sider_epochs100_batch_size32 - INFO - Epoch 57/100 | Train Loss: 0.0518 | Val mean-roc_auc_score: 0.6135
193
+ 2025-09-23 17:20:32,976 - logs_modchembert_sider_epochs100_batch_size32 - INFO - Epoch 58/100 | Train Loss: 0.0518 | Val mean-roc_auc_score: 0.6165
194
+ 2025-09-23 17:20:38,390 - logs_modchembert_sider_epochs100_batch_size32 - INFO - Epoch 59/100 | Train Loss: 0.0502 | Val mean-roc_auc_score: 0.6145
195
+ 2025-09-23 17:20:43,875 - logs_modchembert_sider_epochs100_batch_size32 - INFO - Epoch 60/100 | Train Loss: 0.0504 | Val mean-roc_auc_score: 0.6133
196
+ 2025-09-23 17:20:49,235 - logs_modchembert_sider_epochs100_batch_size32 - INFO - Epoch 61/100 | Train Loss: 0.0491 | Val mean-roc_auc_score: 0.6146
197
+ 2025-09-23 17:20:55,018 - logs_modchembert_sider_epochs100_batch_size32 - INFO - Epoch 62/100 | Train Loss: 0.0489 | Val mean-roc_auc_score: 0.6174
198
+ 2025-09-23 17:21:00,401 - logs_modchembert_sider_epochs100_batch_size32 - INFO - Epoch 63/100 | Train Loss: 0.0471 | Val mean-roc_auc_score: 0.6160
199
+ 2025-09-23 17:21:05,832 - logs_modchembert_sider_epochs100_batch_size32 - INFO - Epoch 64/100 | Train Loss: 0.0475 | Val mean-roc_auc_score: 0.6183
200
+ 2025-09-23 17:21:11,237 - logs_modchembert_sider_epochs100_batch_size32 - INFO - Epoch 65/100 | Train Loss: 0.0473 | Val mean-roc_auc_score: 0.6167
201
+ 2025-09-23 17:21:16,493 - logs_modchembert_sider_epochs100_batch_size32 - INFO - Epoch 66/100 | Train Loss: 0.0471 | Val mean-roc_auc_score: 0.6128
202
+ 2025-09-23 17:21:21,641 - logs_modchembert_sider_epochs100_batch_size32 - INFO - Epoch 67/100 | Train Loss: 0.0464 | Val mean-roc_auc_score: 0.6132
203
+ 2025-09-23 17:21:26,948 - logs_modchembert_sider_epochs100_batch_size32 - INFO - Epoch 68/100 | Train Loss: 0.0471 | Val mean-roc_auc_score: 0.6137
204
+ 2025-09-23 17:21:32,225 - logs_modchembert_sider_epochs100_batch_size32 - INFO - Epoch 69/100 | Train Loss: 0.0448 | Val mean-roc_auc_score: 0.6165
205
+ 2025-09-23 17:21:37,137 - logs_modchembert_sider_epochs100_batch_size32 - INFO - Epoch 70/100 | Train Loss: 0.0449 | Val mean-roc_auc_score: 0.6138
206
+ 2025-09-23 17:21:42,511 - logs_modchembert_sider_epochs100_batch_size32 - INFO - Epoch 71/100 | Train Loss: 0.0437 | Val mean-roc_auc_score: 0.6159
207
+ 2025-09-23 17:21:48,056 - logs_modchembert_sider_epochs100_batch_size32 - INFO - Epoch 72/100 | Train Loss: 0.0439 | Val mean-roc_auc_score: 0.6141
208
+ 2025-09-23 17:21:53,418 - logs_modchembert_sider_epochs100_batch_size32 - INFO - Epoch 73/100 | Train Loss: 0.0444 | Val mean-roc_auc_score: 0.6189
209
+ 2025-09-23 17:21:58,812 - logs_modchembert_sider_epochs100_batch_size32 - INFO - Epoch 74/100 | Train Loss: 0.0440 | Val mean-roc_auc_score: 0.6126
210
+ 2025-09-23 17:22:04,279 - logs_modchembert_sider_epochs100_batch_size32 - INFO - Epoch 75/100 | Train Loss: 0.0431 | Val mean-roc_auc_score: 0.6151
211
+ 2025-09-23 17:22:09,824 - logs_modchembert_sider_epochs100_batch_size32 - INFO - Epoch 76/100 | Train Loss: 0.0431 | Val mean-roc_auc_score: 0.6123
212
+ 2025-09-23 17:22:15,675 - logs_modchembert_sider_epochs100_batch_size32 - INFO - Epoch 77/100 | Train Loss: 0.0422 | Val mean-roc_auc_score: 0.6166
213
+ 2025-09-23 17:22:21,203 - logs_modchembert_sider_epochs100_batch_size32 - INFO - Epoch 78/100 | Train Loss: 0.0424 | Val mean-roc_auc_score: 0.6155
214
+ 2025-09-23 17:22:26,629 - logs_modchembert_sider_epochs100_batch_size32 - INFO - Epoch 79/100 | Train Loss: 0.0413 | Val mean-roc_auc_score: 0.6153
215
+ 2025-09-23 17:22:32,010 - logs_modchembert_sider_epochs100_batch_size32 - INFO - Epoch 80/100 | Train Loss: 0.0408 | Val mean-roc_auc_score: 0.6128
216
+ 2025-09-23 17:22:37,515 - logs_modchembert_sider_epochs100_batch_size32 - INFO - Epoch 81/100 | Train Loss: 0.0404 | Val mean-roc_auc_score: 0.6152
217
+ 2025-09-23 17:22:43,219 - logs_modchembert_sider_epochs100_batch_size32 - INFO - Epoch 82/100 | Train Loss: 0.0404 | Val mean-roc_auc_score: 0.6152
218
+ 2025-09-23 17:22:48,088 - logs_modchembert_sider_epochs100_batch_size32 - INFO - Epoch 83/100 | Train Loss: 0.0385 | Val mean-roc_auc_score: 0.6136
219
+ 2025-09-23 17:22:53,401 - logs_modchembert_sider_epochs100_batch_size32 - INFO - Epoch 84/100 | Train Loss: 0.0397 | Val mean-roc_auc_score: 0.6140
220
+ 2025-09-23 17:22:58,712 - logs_modchembert_sider_epochs100_batch_size32 - INFO - Epoch 85/100 | Train Loss: 0.0393 | Val mean-roc_auc_score: 0.6124
221
+ 2025-09-23 17:23:04,936 - logs_modchembert_sider_epochs100_batch_size32 - INFO - Epoch 86/100 | Train Loss: 0.0387 | Val mean-roc_auc_score: 0.6116
222
+ 2025-09-23 17:23:10,612 - logs_modchembert_sider_epochs100_batch_size32 - INFO - Epoch 87/100 | Train Loss: 0.0397 | Val mean-roc_auc_score: 0.6116
223
+ 2025-09-23 17:23:15,999 - logs_modchembert_sider_epochs100_batch_size32 - INFO - Epoch 88/100 | Train Loss: 0.0393 | Val mean-roc_auc_score: 0.6132
224
+ 2025-09-23 17:23:21,399 - logs_modchembert_sider_epochs100_batch_size32 - INFO - Epoch 89/100 | Train Loss: 0.0383 | Val mean-roc_auc_score: 0.6121
225
+ 2025-09-23 17:23:26,823 - logs_modchembert_sider_epochs100_batch_size32 - INFO - Epoch 90/100 | Train Loss: 0.0379 | Val mean-roc_auc_score: 0.6120
226
+ 2025-09-23 17:23:32,280 - logs_modchembert_sider_epochs100_batch_size32 - INFO - Epoch 91/100 | Train Loss: 0.0384 | Val mean-roc_auc_score: 0.6121
227
+ 2025-09-23 17:23:37,966 - logs_modchembert_sider_epochs100_batch_size32 - INFO - Epoch 92/100 | Train Loss: 0.0355 | Val mean-roc_auc_score: 0.6125
228
+ 2025-09-23 17:23:43,485 - logs_modchembert_sider_epochs100_batch_size32 - INFO - Epoch 93/100 | Train Loss: 0.0377 | Val mean-roc_auc_score: 0.6156
229
+ 2025-09-23 17:23:48,893 - logs_modchembert_sider_epochs100_batch_size32 - INFO - Epoch 94/100 | Train Loss: 0.0375 | Val mean-roc_auc_score: 0.6131
230
+ 2025-09-23 17:23:54,311 - logs_modchembert_sider_epochs100_batch_size32 - INFO - Epoch 95/100 | Train Loss: 0.0377 | Val mean-roc_auc_score: 0.6132
231
+ 2025-09-23 17:23:59,814 - logs_modchembert_sider_epochs100_batch_size32 - INFO - Epoch 96/100 | Train Loss: 0.0379 | Val mean-roc_auc_score: 0.6121
232
+ 2025-09-23 17:24:05,636 - logs_modchembert_sider_epochs100_batch_size32 - INFO - Epoch 97/100 | Train Loss: 0.0368 | Val mean-roc_auc_score: 0.6145
233
+ 2025-09-23 17:24:11,005 - logs_modchembert_sider_epochs100_batch_size32 - INFO - Epoch 98/100 | Train Loss: 0.0357 | Val mean-roc_auc_score: 0.6118
234
+ 2025-09-23 17:24:16,221 - logs_modchembert_sider_epochs100_batch_size32 - INFO - Epoch 99/100 | Train Loss: 0.0366 | Val mean-roc_auc_score: 0.6100
235
+ 2025-09-23 17:24:21,446 - logs_modchembert_sider_epochs100_batch_size32 - INFO - Epoch 100/100 | Train Loss: 0.0364 | Val mean-roc_auc_score: 0.6127
236
+ 2025-09-23 17:24:22,292 - logs_modchembert_sider_epochs100_batch_size32 - INFO - Test mean-roc_auc_score: 0.6391
237
+ 2025-09-23 17:24:22,654 - logs_modchembert_sider_epochs100_batch_size32 - INFO - Starting triplicate run 3 for dataset sider at 2025-09-23_17-24-22
238
+ 2025-09-23 17:24:27,418 - logs_modchembert_sider_epochs100_batch_size32 - INFO - Epoch 1/100 | Train Loss: 0.5321 | Val mean-roc_auc_score: 0.5332
239
+ 2025-09-23 17:24:27,426 - logs_modchembert_sider_epochs100_batch_size32 - INFO - Global step of best model: 35
240
+ 2025-09-23 17:24:27,980 - logs_modchembert_sider_epochs100_batch_size32 - INFO - Best model saved at epoch 1 with val mean-roc_auc_score: 0.5332
241
+ 2025-09-23 17:24:33,124 - logs_modchembert_sider_epochs100_batch_size32 - INFO - Epoch 2/100 | Train Loss: 0.5071 | Val mean-roc_auc_score: 0.5585
242
+ 2025-09-23 17:24:33,301 - logs_modchembert_sider_epochs100_batch_size32 - INFO - Global step of best model: 70
243
+ 2025-09-23 17:24:33,837 - logs_modchembert_sider_epochs100_batch_size32 - INFO - Best model saved at epoch 2 with val mean-roc_auc_score: 0.5585
244
+ 2025-09-23 17:24:39,415 - logs_modchembert_sider_epochs100_batch_size32 - INFO - Epoch 3/100 | Train Loss: 0.4906 | Val mean-roc_auc_score: 0.5797
245
+ 2025-09-23 17:24:39,594 - logs_modchembert_sider_epochs100_batch_size32 - INFO - Global step of best model: 105
246
+ 2025-09-23 17:24:40,131 - logs_modchembert_sider_epochs100_batch_size32 - INFO - Best model saved at epoch 3 with val mean-roc_auc_score: 0.5797
247
+ 2025-09-23 17:24:45,539 - logs_modchembert_sider_epochs100_batch_size32 - INFO - Epoch 4/100 | Train Loss: 0.4750 | Val mean-roc_auc_score: 0.6014
248
+ 2025-09-23 17:24:45,723 - logs_modchembert_sider_epochs100_batch_size32 - INFO - Global step of best model: 140
249
+ 2025-09-23 17:24:46,257 - logs_modchembert_sider_epochs100_batch_size32 - INFO - Best model saved at epoch 4 with val mean-roc_auc_score: 0.6014
250
+ 2025-09-23 17:24:51,609 - logs_modchembert_sider_epochs100_batch_size32 - INFO - Epoch 5/100 | Train Loss: 0.4500 | Val mean-roc_auc_score: 0.5968
251
+ 2025-09-23 17:24:57,013 - logs_modchembert_sider_epochs100_batch_size32 - INFO - Epoch 6/100 | Train Loss: 0.4062 | Val mean-roc_auc_score: 0.6113
252
+ 2025-09-23 17:24:57,521 - logs_modchembert_sider_epochs100_batch_size32 - INFO - Global step of best model: 210
253
+ 2025-09-23 17:24:58,051 - logs_modchembert_sider_epochs100_batch_size32 - INFO - Best model saved at epoch 6 with val mean-roc_auc_score: 0.6113
254
+ 2025-09-23 17:25:03,322 - logs_modchembert_sider_epochs100_batch_size32 - INFO - Epoch 7/100 | Train Loss: 0.3786 | Val mean-roc_auc_score: 0.6089
255
+ 2025-09-23 17:25:08,523 - logs_modchembert_sider_epochs100_batch_size32 - INFO - Epoch 8/100 | Train Loss: 0.3607 | Val mean-roc_auc_score: 0.6114
256
+ 2025-09-23 17:25:08,712 - logs_modchembert_sider_epochs100_batch_size32 - INFO - Global step of best model: 280
257
+ 2025-09-23 17:25:09,245 - logs_modchembert_sider_epochs100_batch_size32 - INFO - Best model saved at epoch 8 with val mean-roc_auc_score: 0.6114
258
+ 2025-09-23 17:25:14,489 - logs_modchembert_sider_epochs100_batch_size32 - INFO - Epoch 9/100 | Train Loss: 0.3354 | Val mean-roc_auc_score: 0.6209
259
+ 2025-09-23 17:25:14,680 - logs_modchembert_sider_epochs100_batch_size32 - INFO - Global step of best model: 315
260
+ 2025-09-23 17:25:15,219 - logs_modchembert_sider_epochs100_batch_size32 - INFO - Best model saved at epoch 9 with val mean-roc_auc_score: 0.6209
261
+ 2025-09-23 17:25:20,690 - logs_modchembert_sider_epochs100_batch_size32 - INFO - Epoch 10/100 | Train Loss: 0.3143 | Val mean-roc_auc_score: 0.6226
262
+ 2025-09-23 17:25:20,872 - logs_modchembert_sider_epochs100_batch_size32 - INFO - Global step of best model: 350
263
+ 2025-09-23 17:25:21,395 - logs_modchembert_sider_epochs100_batch_size32 - INFO - Best model saved at epoch 10 with val mean-roc_auc_score: 0.6226
264
+ 2025-09-23 17:25:26,763 - logs_modchembert_sider_epochs100_batch_size32 - INFO - Epoch 11/100 | Train Loss: 0.3000 | Val mean-roc_auc_score: 0.6121
265
+ 2025-09-23 17:25:32,552 - logs_modchembert_sider_epochs100_batch_size32 - INFO - Epoch 12/100 | Train Loss: 0.2797 | Val mean-roc_auc_score: 0.6147
266
+ 2025-09-23 17:25:37,440 - logs_modchembert_sider_epochs100_batch_size32 - INFO - Epoch 13/100 | Train Loss: 0.2589 | Val mean-roc_auc_score: 0.6093
267
+ 2025-09-23 17:25:42,876 - logs_modchembert_sider_epochs100_batch_size32 - INFO - Epoch 14/100 | Train Loss: 0.2446 | Val mean-roc_auc_score: 0.6143
268
+ 2025-09-23 17:25:48,254 - logs_modchembert_sider_epochs100_batch_size32 - INFO - Epoch 15/100 | Train Loss: 0.2313 | Val mean-roc_auc_score: 0.6032
269
+ 2025-09-23 17:25:53,722 - logs_modchembert_sider_epochs100_batch_size32 - INFO - Epoch 16/100 | Train Loss: 0.2143 | Val mean-roc_auc_score: 0.6052
270
+ 2025-09-23 17:25:58,870 - logs_modchembert_sider_epochs100_batch_size32 - INFO - Epoch 17/100 | Train Loss: 0.2018 | Val mean-roc_auc_score: 0.6119
271
+ 2025-09-23 17:26:03,753 - logs_modchembert_sider_epochs100_batch_size32 - INFO - Epoch 18/100 | Train Loss: 0.1885 | Val mean-roc_auc_score: 0.6110
272
+ 2025-09-23 17:26:09,156 - logs_modchembert_sider_epochs100_batch_size32 - INFO - Epoch 19/100 | Train Loss: 0.1777 | Val mean-roc_auc_score: 0.6066
273
+ 2025-09-23 17:26:14,575 - logs_modchembert_sider_epochs100_batch_size32 - INFO - Epoch 20/100 | Train Loss: 0.1688 | Val mean-roc_auc_score: 0.6028
274
+ 2025-09-23 17:26:19,927 - logs_modchembert_sider_epochs100_batch_size32 - INFO - Epoch 21/100 | Train Loss: 0.1580 | Val mean-roc_auc_score: 0.6047
275
+ 2025-09-23 17:26:25,567 - logs_modchembert_sider_epochs100_batch_size32 - INFO - Epoch 22/100 | Train Loss: 0.1545 | Val mean-roc_auc_score: 0.6061
276
+ 2025-09-23 17:26:30,895 - logs_modchembert_sider_epochs100_batch_size32 - INFO - Epoch 23/100 | Train Loss: 0.1430 | Val mean-roc_auc_score: 0.6077
277
+ 2025-09-23 17:26:36,170 - logs_modchembert_sider_epochs100_batch_size32 - INFO - Epoch 24/100 | Train Loss: 0.1357 | Val mean-roc_auc_score: 0.6136
278
+ 2025-09-23 17:26:41,404 - logs_modchembert_sider_epochs100_batch_size32 - INFO - Epoch 25/100 | Train Loss: 0.1268 | Val mean-roc_auc_score: 0.6158
279
+ 2025-09-23 17:26:46,906 - logs_modchembert_sider_epochs100_batch_size32 - INFO - Epoch 26/100 | Train Loss: 0.1305 | Val mean-roc_auc_score: 0.6082
280
+ 2025-09-23 17:26:52,736 - logs_modchembert_sider_epochs100_batch_size32 - INFO - Epoch 27/100 | Train Loss: 0.1187 | Val mean-roc_auc_score: 0.6110
281
+ 2025-09-23 17:26:58,294 - logs_modchembert_sider_epochs100_batch_size32 - INFO - Epoch 28/100 | Train Loss: 0.1143 | Val mean-roc_auc_score: 0.6071
282
+ 2025-09-23 17:27:04,701 - logs_modchembert_sider_epochs100_batch_size32 - INFO - Epoch 29/100 | Train Loss: 0.1073 | Val mean-roc_auc_score: 0.6093
283
+ 2025-09-23 17:27:10,175 - logs_modchembert_sider_epochs100_batch_size32 - INFO - Epoch 30/100 | Train Loss: 0.1040 | Val mean-roc_auc_score: 0.6118
284
+ 2025-09-23 17:27:15,995 - logs_modchembert_sider_epochs100_batch_size32 - INFO - Epoch 31/100 | Train Loss: 0.1013 | Val mean-roc_auc_score: 0.6160
285
+ 2025-09-23 17:27:21,745 - logs_modchembert_sider_epochs100_batch_size32 - INFO - Epoch 32/100 | Train Loss: 0.0969 | Val mean-roc_auc_score: 0.6108
286
+ 2025-09-23 17:27:26,442 - logs_modchembert_sider_epochs100_batch_size32 - INFO - Epoch 33/100 | Train Loss: 0.0929 | Val mean-roc_auc_score: 0.6098
287
+ 2025-09-23 17:27:31,601 - logs_modchembert_sider_epochs100_batch_size32 - INFO - Epoch 34/100 | Train Loss: 0.0897 | Val mean-roc_auc_score: 0.6172
288
+ 2025-09-23 17:27:37,123 - logs_modchembert_sider_epochs100_batch_size32 - INFO - Epoch 35/100 | Train Loss: 0.0862 | Val mean-roc_auc_score: 0.6117
289
+ 2025-09-23 17:27:42,613 - logs_modchembert_sider_epochs100_batch_size32 - INFO - Epoch 36/100 | Train Loss: 0.0835 | Val mean-roc_auc_score: 0.6129
290
+ 2025-09-23 17:27:48,414 - logs_modchembert_sider_epochs100_batch_size32 - INFO - Epoch 37/100 | Train Loss: 0.0821 | Val mean-roc_auc_score: 0.6061
291
+ 2025-09-23 17:27:53,689 - logs_modchembert_sider_epochs100_batch_size32 - INFO - Epoch 38/100 | Train Loss: 0.0797 | Val mean-roc_auc_score: 0.6160
292
+ 2025-09-23 17:27:58,914 - logs_modchembert_sider_epochs100_batch_size32 - INFO - Epoch 39/100 | Train Loss: 0.0754 | Val mean-roc_auc_score: 0.6119
293
+ 2025-09-23 17:28:04,178 - logs_modchembert_sider_epochs100_batch_size32 - INFO - Epoch 40/100 | Train Loss: 0.0746 | Val mean-roc_auc_score: 0.6125
294
+ 2025-09-23 17:28:09,331 - logs_modchembert_sider_epochs100_batch_size32 - INFO - Epoch 41/100 | Train Loss: 0.0741 | Val mean-roc_auc_score: 0.6136
295
+ 2025-09-23 17:28:15,028 - logs_modchembert_sider_epochs100_batch_size32 - INFO - Epoch 42/100 | Train Loss: 0.0728 | Val mean-roc_auc_score: 0.6158
296
+ 2025-09-23 17:28:20,448 - logs_modchembert_sider_epochs100_batch_size32 - INFO - Epoch 43/100 | Train Loss: 0.0633 | Val mean-roc_auc_score: 0.6134
297
+ 2025-09-23 17:28:25,846 - logs_modchembert_sider_epochs100_batch_size32 - INFO - Epoch 44/100 | Train Loss: 0.0674 | Val mean-roc_auc_score: 0.6182
298
+ 2025-09-23 17:28:31,184 - logs_modchembert_sider_epochs100_batch_size32 - INFO - Epoch 45/100 | Train Loss: 0.0665 | Val mean-roc_auc_score: 0.6153
299
+ 2025-09-23 17:28:36,749 - logs_modchembert_sider_epochs100_batch_size32 - INFO - Epoch 46/100 | Train Loss: 0.0633 | Val mean-roc_auc_score: 0.6152
300
+ 2025-09-23 17:28:42,464 - logs_modchembert_sider_epochs100_batch_size32 - INFO - Epoch 47/100 | Train Loss: 0.0647 | Val mean-roc_auc_score: 0.6154
301
+ 2025-09-23 17:28:47,931 - logs_modchembert_sider_epochs100_batch_size32 - INFO - Epoch 48/100 | Train Loss: 0.0616 | Val mean-roc_auc_score: 0.6154
302
+ 2025-09-23 17:28:52,870 - logs_modchembert_sider_epochs100_batch_size32 - INFO - Epoch 49/100 | Train Loss: 0.0641 | Val mean-roc_auc_score: 0.6130
303
+ 2025-09-23 17:28:57,791 - logs_modchembert_sider_epochs100_batch_size32 - INFO - Epoch 50/100 | Train Loss: 0.0616 | Val mean-roc_auc_score: 0.6148
304
+ 2025-09-23 17:29:03,081 - logs_modchembert_sider_epochs100_batch_size32 - INFO - Epoch 51/100 | Train Loss: 0.0594 | Val mean-roc_auc_score: 0.6125
305
+ 2025-09-23 17:29:08,926 - logs_modchembert_sider_epochs100_batch_size32 - INFO - Epoch 52/100 | Train Loss: 0.0617 | Val mean-roc_auc_score: 0.6176
306
+ 2025-09-23 17:29:14,450 - logs_modchembert_sider_epochs100_batch_size32 - INFO - Epoch 53/100 | Train Loss: 0.0569 | Val mean-roc_auc_score: 0.6135
307
+ 2025-09-23 17:29:19,979 - logs_modchembert_sider_epochs100_batch_size32 - INFO - Epoch 54/100 | Train Loss: 0.0560 | Val mean-roc_auc_score: 0.6149
308
+ 2025-09-23 17:29:25,450 - logs_modchembert_sider_epochs100_batch_size32 - INFO - Epoch 55/100 | Train Loss: 0.0550 | Val mean-roc_auc_score: 0.6174
309
+ 2025-09-23 17:29:30,983 - logs_modchembert_sider_epochs100_batch_size32 - INFO - Epoch 56/100 | Train Loss: 0.0554 | Val mean-roc_auc_score: 0.6153
310
+ 2025-09-23 17:29:36,780 - logs_modchembert_sider_epochs100_batch_size32 - INFO - Epoch 57/100 | Train Loss: 0.0545 | Val mean-roc_auc_score: 0.6140
311
+ 2025-09-23 17:29:43,415 - logs_modchembert_sider_epochs100_batch_size32 - INFO - Epoch 58/100 | Train Loss: 0.0542 | Val mean-roc_auc_score: 0.6128
312
+ 2025-09-23 17:29:48,812 - logs_modchembert_sider_epochs100_batch_size32 - INFO - Epoch 59/100 | Train Loss: 0.0527 | Val mean-roc_auc_score: 0.6142
313
+ 2025-09-23 17:29:53,752 - logs_modchembert_sider_epochs100_batch_size32 - INFO - Epoch 60/100 | Train Loss: 0.0513 | Val mean-roc_auc_score: 0.6175
314
+ 2025-09-23 17:29:59,123 - logs_modchembert_sider_epochs100_batch_size32 - INFO - Epoch 61/100 | Train Loss: 0.0507 | Val mean-roc_auc_score: 0.6138
315
+ 2025-09-23 17:30:04,725 - logs_modchembert_sider_epochs100_batch_size32 - INFO - Epoch 62/100 | Train Loss: 0.0507 | Val mean-roc_auc_score: 0.6129
316
+ 2025-09-23 17:30:09,934 - logs_modchembert_sider_epochs100_batch_size32 - INFO - Epoch 63/100 | Train Loss: 0.0504 | Val mean-roc_auc_score: 0.6140
317
+ 2025-09-23 17:30:15,211 - logs_modchembert_sider_epochs100_batch_size32 - INFO - Epoch 64/100 | Train Loss: 0.0491 | Val mean-roc_auc_score: 0.6138
318
+ 2025-09-23 17:30:19,894 - logs_modchembert_sider_epochs100_batch_size32 - INFO - Epoch 65/100 | Train Loss: 0.0489 | Val mean-roc_auc_score: 0.6134
319
+ 2025-09-23 17:30:24,902 - logs_modchembert_sider_epochs100_batch_size32 - INFO - Epoch 66/100 | Train Loss: 0.0494 | Val mean-roc_auc_score: 0.6158
320
+ 2025-09-23 17:30:30,807 - logs_modchembert_sider_epochs100_batch_size32 - INFO - Epoch 67/100 | Train Loss: 0.0478 | Val mean-roc_auc_score: 0.6158
321
+ 2025-09-23 17:30:36,260 - logs_modchembert_sider_epochs100_batch_size32 - INFO - Epoch 68/100 | Train Loss: 0.0467 | Val mean-roc_auc_score: 0.6162
322
+ 2025-09-23 17:30:41,750 - logs_modchembert_sider_epochs100_batch_size32 - INFO - Epoch 69/100 | Train Loss: 0.0479 | Val mean-roc_auc_score: 0.6160
323
+ 2025-09-23 17:30:47,166 - logs_modchembert_sider_epochs100_batch_size32 - INFO - Epoch 70/100 | Train Loss: 0.0462 | Val mean-roc_auc_score: 0.6154
324
+ 2025-09-23 17:30:52,539 - logs_modchembert_sider_epochs100_batch_size32 - INFO - Epoch 71/100 | Train Loss: 0.0458 | Val mean-roc_auc_score: 0.6133
325
+ 2025-09-23 17:30:58,344 - logs_modchembert_sider_epochs100_batch_size32 - INFO - Epoch 72/100 | Train Loss: 0.0480 | Val mean-roc_auc_score: 0.6160
326
+ 2025-09-23 17:31:03,800 - logs_modchembert_sider_epochs100_batch_size32 - INFO - Epoch 73/100 | Train Loss: 0.0446 | Val mean-roc_auc_score: 0.6136
327
+ 2025-09-23 17:31:09,386 - logs_modchembert_sider_epochs100_batch_size32 - INFO - Epoch 74/100 | Train Loss: 0.0442 | Val mean-roc_auc_score: 0.6136
328
+ 2025-09-23 17:31:14,864 - logs_modchembert_sider_epochs100_batch_size32 - INFO - Epoch 75/100 | Train Loss: 0.0444 | Val mean-roc_auc_score: 0.6157
329
+ 2025-09-23 17:31:19,940 - logs_modchembert_sider_epochs100_batch_size32 - INFO - Epoch 76/100 | Train Loss: 0.0433 | Val mean-roc_auc_score: 0.6177
330
+ 2025-09-23 17:31:25,711 - logs_modchembert_sider_epochs100_batch_size32 - INFO - Epoch 77/100 | Train Loss: 0.0435 | Val mean-roc_auc_score: 0.6164
331
+ 2025-09-23 17:31:30,965 - logs_modchembert_sider_epochs100_batch_size32 - INFO - Epoch 78/100 | Train Loss: 0.0432 | Val mean-roc_auc_score: 0.6157
332
+ 2025-09-23 17:31:36,222 - logs_modchembert_sider_epochs100_batch_size32 - INFO - Epoch 79/100 | Train Loss: 0.0426 | Val mean-roc_auc_score: 0.6152
333
+ 2025-09-23 17:31:41,592 - logs_modchembert_sider_epochs100_batch_size32 - INFO - Epoch 80/100 | Train Loss: 0.0417 | Val mean-roc_auc_score: 0.6146
334
+ 2025-09-23 17:31:46,467 - logs_modchembert_sider_epochs100_batch_size32 - INFO - Epoch 81/100 | Train Loss: 0.0431 | Val mean-roc_auc_score: 0.6170
335
+ 2025-09-23 17:31:51,713 - logs_modchembert_sider_epochs100_batch_size32 - INFO - Epoch 82/100 | Train Loss: 0.0415 | Val mean-roc_auc_score: 0.6150
336
+ 2025-09-23 17:31:57,022 - logs_modchembert_sider_epochs100_batch_size32 - INFO - Epoch 83/100 | Train Loss: 0.0461 | Val mean-roc_auc_score: 0.6149
337
+ 2025-09-23 17:32:02,257 - logs_modchembert_sider_epochs100_batch_size32 - INFO - Epoch 84/100 | Train Loss: 0.0420 | Val mean-roc_auc_score: 0.6153
338
+ 2025-09-23 17:32:07,541 - logs_modchembert_sider_epochs100_batch_size32 - INFO - Epoch 85/100 | Train Loss: 0.0415 | Val mean-roc_auc_score: 0.6158
339
+ 2025-09-23 17:32:14,162 - logs_modchembert_sider_epochs100_batch_size32 - INFO - Epoch 86/100 | Train Loss: 0.0402 | Val mean-roc_auc_score: 0.6161
340
+ 2025-09-23 17:32:19,967 - logs_modchembert_sider_epochs100_batch_size32 - INFO - Epoch 87/100 | Train Loss: 0.0400 | Val mean-roc_auc_score: 0.6147
341
+ 2025-09-23 17:32:25,401 - logs_modchembert_sider_epochs100_batch_size32 - INFO - Epoch 88/100 | Train Loss: 0.0404 | Val mean-roc_auc_score: 0.6152
342
+ 2025-09-23 17:32:30,824 - logs_modchembert_sider_epochs100_batch_size32 - INFO - Epoch 89/100 | Train Loss: 0.0391 | Val mean-roc_auc_score: 0.6152
343
+ 2025-09-23 17:32:36,312 - logs_modchembert_sider_epochs100_batch_size32 - INFO - Epoch 90/100 | Train Loss: 0.0395 | Val mean-roc_auc_score: 0.6138
344
+ 2025-09-23 17:32:41,808 - logs_modchembert_sider_epochs100_batch_size32 - INFO - Epoch 91/100 | Train Loss: 0.0388 | Val mean-roc_auc_score: 0.6138
345
+ 2025-09-23 17:32:47,615 - logs_modchembert_sider_epochs100_batch_size32 - INFO - Epoch 92/100 | Train Loss: 0.0393 | Val mean-roc_auc_score: 0.6130
346
+ 2025-09-23 17:32:53,135 - logs_modchembert_sider_epochs100_batch_size32 - INFO - Epoch 93/100 | Train Loss: 0.0393 | Val mean-roc_auc_score: 0.6152
347
+ 2025-09-23 17:32:58,504 - logs_modchembert_sider_epochs100_batch_size32 - INFO - Epoch 94/100 | Train Loss: 0.0379 | Val mean-roc_auc_score: 0.6146
348
+ 2025-09-23 17:33:03,838 - logs_modchembert_sider_epochs100_batch_size32 - INFO - Epoch 95/100 | Train Loss: 0.0386 | Val mean-roc_auc_score: 0.6133
349
+ 2025-09-23 17:33:09,250 - logs_modchembert_sider_epochs100_batch_size32 - INFO - Epoch 96/100 | Train Loss: 0.0384 | Val mean-roc_auc_score: 0.6131
350
+ 2025-09-23 17:33:14,451 - logs_modchembert_sider_epochs100_batch_size32 - INFO - Epoch 97/100 | Train Loss: 0.0375 | Val mean-roc_auc_score: 0.6139
351
+ 2025-09-23 17:33:19,130 - logs_modchembert_sider_epochs100_batch_size32 - INFO - Epoch 98/100 | Train Loss: 0.0375 | Val mean-roc_auc_score: 0.6157
352
+ 2025-09-23 17:33:24,585 - logs_modchembert_sider_epochs100_batch_size32 - INFO - Epoch 99/100 | Train Loss: 0.0368 | Val mean-roc_auc_score: 0.6142
353
+ 2025-09-23 17:33:30,178 - logs_modchembert_sider_epochs100_batch_size32 - INFO - Epoch 100/100 | Train Loss: 0.0379 | Val mean-roc_auc_score: 0.6144
354
+ 2025-09-23 17:33:31,026 - logs_modchembert_sider_epochs100_batch_size32 - INFO - Test mean-roc_auc_score: 0.6704
355
+ 2025-09-23 17:33:31,420 - logs_modchembert_sider_epochs100_batch_size32 - INFO - Final Triplicate Test Results — Avg mean-roc_auc_score: 0.6542, Std Dev: 0.0128
logs_modchembert_classification_ModChemBERT-MLM-TAFT/modchembert_deepchem_splits_run_tox21_epochs100_batch_size32_20250923_173331.log ADDED
@@ -0,0 +1,329 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ 2025-09-23 17:33:31,421 - logs_modchembert_tox21_epochs100_batch_size32 - INFO - Running benchmark for dataset: tox21
2
+ 2025-09-23 17:33:31,421 - logs_modchembert_tox21_epochs100_batch_size32 - INFO - dataset: tox21, tasks: ['NR-AR', 'NR-AR-LBD', 'NR-AhR', 'NR-Aromatase', 'NR-ER', 'NR-ER-LBD', 'NR-PPAR-gamma', 'SR-ARE', 'SR-ATAD5', 'SR-HSE', 'SR-MMP', 'SR-p53'], epochs: 100, learning rate: 3e-05
3
+ 2025-09-23 17:33:31,434 - logs_modchembert_tox21_epochs100_batch_size32 - INFO - Starting triplicate run 1 for dataset tox21 at 2025-09-23_17-33-31
4
+ 2025-09-23 17:33:49,274 - logs_modchembert_tox21_epochs100_batch_size32 - INFO - Epoch 1/100 | Train Loss: 0.1732 | Val mean-roc_auc_score: 0.7399
5
+ 2025-09-23 17:33:49,274 - logs_modchembert_tox21_epochs100_batch_size32 - INFO - Global step of best model: 196
6
+ 2025-09-23 17:33:49,817 - logs_modchembert_tox21_epochs100_batch_size32 - INFO - Best model saved at epoch 1 with val mean-roc_auc_score: 0.7399
7
+ 2025-09-23 17:34:08,656 - logs_modchembert_tox21_epochs100_batch_size32 - INFO - Epoch 2/100 | Train Loss: 0.1617 | Val mean-roc_auc_score: 0.7591
8
+ 2025-09-23 17:34:08,831 - logs_modchembert_tox21_epochs100_batch_size32 - INFO - Global step of best model: 392
9
+ 2025-09-23 17:34:09,357 - logs_modchembert_tox21_epochs100_batch_size32 - INFO - Best model saved at epoch 2 with val mean-roc_auc_score: 0.7591
10
+ 2025-09-23 17:34:28,061 - logs_modchembert_tox21_epochs100_batch_size32 - INFO - Epoch 3/100 | Train Loss: 0.1435 | Val mean-roc_auc_score: 0.7756
11
+ 2025-09-23 17:34:28,238 - logs_modchembert_tox21_epochs100_batch_size32 - INFO - Global step of best model: 588
12
+ 2025-09-23 17:34:28,808 - logs_modchembert_tox21_epochs100_batch_size32 - INFO - Best model saved at epoch 3 with val mean-roc_auc_score: 0.7756
13
+ 2025-09-23 17:34:46,151 - logs_modchembert_tox21_epochs100_batch_size32 - INFO - Epoch 4/100 | Train Loss: 0.1376 | Val mean-roc_auc_score: 0.7742
14
+ 2025-09-23 17:35:04,403 - logs_modchembert_tox21_epochs100_batch_size32 - INFO - Epoch 5/100 | Train Loss: 0.1297 | Val mean-roc_auc_score: 0.7677
15
+ 2025-09-23 17:35:24,388 - logs_modchembert_tox21_epochs100_batch_size32 - INFO - Epoch 6/100 | Train Loss: 0.1168 | Val mean-roc_auc_score: 0.7702
16
+ 2025-09-23 17:35:43,450 - logs_modchembert_tox21_epochs100_batch_size32 - INFO - Epoch 7/100 | Train Loss: 0.1042 | Val mean-roc_auc_score: 0.7673
17
+ 2025-09-23 17:36:02,744 - logs_modchembert_tox21_epochs100_batch_size32 - INFO - Epoch 8/100 | Train Loss: 0.0919 | Val mean-roc_auc_score: 0.7590
18
+ 2025-09-23 17:36:20,575 - logs_modchembert_tox21_epochs100_batch_size32 - INFO - Epoch 9/100 | Train Loss: 0.0830 | Val mean-roc_auc_score: 0.7518
19
+ 2025-09-23 17:36:39,536 - logs_modchembert_tox21_epochs100_batch_size32 - INFO - Epoch 10/100 | Train Loss: 0.0708 | Val mean-roc_auc_score: 0.7424
20
+ 2025-09-23 17:36:59,351 - logs_modchembert_tox21_epochs100_batch_size32 - INFO - Epoch 11/100 | Train Loss: 0.0617 | Val mean-roc_auc_score: 0.7377
21
+ 2025-09-23 17:37:18,505 - logs_modchembert_tox21_epochs100_batch_size32 - INFO - Epoch 12/100 | Train Loss: 0.0508 | Val mean-roc_auc_score: 0.7406
22
+ 2025-09-23 17:37:37,738 - logs_modchembert_tox21_epochs100_batch_size32 - INFO - Epoch 13/100 | Train Loss: 0.0449 | Val mean-roc_auc_score: 0.7371
23
+ 2025-09-23 17:37:55,810 - logs_modchembert_tox21_epochs100_batch_size32 - INFO - Epoch 14/100 | Train Loss: 0.0426 | Val mean-roc_auc_score: 0.7341
24
+ 2025-09-23 17:38:14,583 - logs_modchembert_tox21_epochs100_batch_size32 - INFO - Epoch 15/100 | Train Loss: 0.0383 | Val mean-roc_auc_score: 0.7319
25
+ 2025-09-23 17:38:33,975 - logs_modchembert_tox21_epochs100_batch_size32 - INFO - Epoch 16/100 | Train Loss: 0.0315 | Val mean-roc_auc_score: 0.7300
26
+ 2025-09-23 17:38:53,414 - logs_modchembert_tox21_epochs100_batch_size32 - INFO - Epoch 17/100 | Train Loss: 0.0342 | Val mean-roc_auc_score: 0.7300
27
+ 2025-09-23 17:39:11,797 - logs_modchembert_tox21_epochs100_batch_size32 - INFO - Epoch 18/100 | Train Loss: 0.0289 | Val mean-roc_auc_score: 0.7295
28
+ 2025-09-23 17:39:30,697 - logs_modchembert_tox21_epochs100_batch_size32 - INFO - Epoch 19/100 | Train Loss: 0.0257 | Val mean-roc_auc_score: 0.7291
29
+ 2025-09-23 17:39:49,962 - logs_modchembert_tox21_epochs100_batch_size32 - INFO - Epoch 20/100 | Train Loss: 0.0234 | Val mean-roc_auc_score: 0.7324
30
+ 2025-09-23 17:40:09,476 - logs_modchembert_tox21_epochs100_batch_size32 - INFO - Epoch 21/100 | Train Loss: 0.0245 | Val mean-roc_auc_score: 0.7342
31
+ 2025-09-23 17:40:28,571 - logs_modchembert_tox21_epochs100_batch_size32 - INFO - Epoch 22/100 | Train Loss: 0.0186 | Val mean-roc_auc_score: 0.7326
32
+ 2025-09-23 17:40:46,010 - logs_modchembert_tox21_epochs100_batch_size32 - INFO - Epoch 23/100 | Train Loss: 0.0166 | Val mean-roc_auc_score: 0.7353
33
+ 2025-09-23 17:41:04,301 - logs_modchembert_tox21_epochs100_batch_size32 - INFO - Epoch 24/100 | Train Loss: 0.0222 | Val mean-roc_auc_score: 0.7287
34
+ 2025-09-23 17:41:23,175 - logs_modchembert_tox21_epochs100_batch_size32 - INFO - Epoch 25/100 | Train Loss: 0.0155 | Val mean-roc_auc_score: 0.7321
35
+ 2025-09-23 17:41:43,245 - logs_modchembert_tox21_epochs100_batch_size32 - INFO - Epoch 26/100 | Train Loss: 0.0158 | Val mean-roc_auc_score: 0.7291
36
+ 2025-09-23 17:42:02,563 - logs_modchembert_tox21_epochs100_batch_size32 - INFO - Epoch 27/100 | Train Loss: 0.0145 | Val mean-roc_auc_score: 0.7293
37
+ 2025-09-23 17:42:20,433 - logs_modchembert_tox21_epochs100_batch_size32 - INFO - Epoch 28/100 | Train Loss: 0.0129 | Val mean-roc_auc_score: 0.7288
38
+ 2025-09-23 17:42:39,583 - logs_modchembert_tox21_epochs100_batch_size32 - INFO - Epoch 29/100 | Train Loss: 0.0146 | Val mean-roc_auc_score: 0.7305
39
+ 2025-09-23 17:42:57,914 - logs_modchembert_tox21_epochs100_batch_size32 - INFO - Epoch 30/100 | Train Loss: 0.0121 | Val mean-roc_auc_score: 0.7284
40
+ 2025-09-23 17:43:17,482 - logs_modchembert_tox21_epochs100_batch_size32 - INFO - Epoch 31/100 | Train Loss: 0.0116 | Val mean-roc_auc_score: 0.7260
41
+ 2025-09-23 17:43:35,436 - logs_modchembert_tox21_epochs100_batch_size32 - INFO - Epoch 32/100 | Train Loss: 0.0125 | Val mean-roc_auc_score: 0.7259
42
+ 2025-09-23 17:43:54,426 - logs_modchembert_tox21_epochs100_batch_size32 - INFO - Epoch 33/100 | Train Loss: 0.0102 | Val mean-roc_auc_score: 0.7254
43
+ 2025-09-23 17:44:13,441 - logs_modchembert_tox21_epochs100_batch_size32 - INFO - Epoch 34/100 | Train Loss: 0.0121 | Val mean-roc_auc_score: 0.7281
44
+ 2025-09-23 17:44:32,250 - logs_modchembert_tox21_epochs100_batch_size32 - INFO - Epoch 35/100 | Train Loss: 0.0096 | Val mean-roc_auc_score: 0.7258
45
+ 2025-09-23 17:44:51,847 - logs_modchembert_tox21_epochs100_batch_size32 - INFO - Epoch 36/100 | Train Loss: 0.0099 | Val mean-roc_auc_score: 0.7285
46
+ 2025-09-23 17:45:09,494 - logs_modchembert_tox21_epochs100_batch_size32 - INFO - Epoch 37/100 | Train Loss: 0.0102 | Val mean-roc_auc_score: 0.7281
47
+ 2025-09-23 17:45:28,291 - logs_modchembert_tox21_epochs100_batch_size32 - INFO - Epoch 38/100 | Train Loss: 0.0096 | Val mean-roc_auc_score: 0.7288
48
+ 2025-09-23 17:45:46,101 - logs_modchembert_tox21_epochs100_batch_size32 - INFO - Epoch 39/100 | Train Loss: 0.0102 | Val mean-roc_auc_score: 0.7291
49
+ 2025-09-23 17:46:05,226 - logs_modchembert_tox21_epochs100_batch_size32 - INFO - Epoch 40/100 | Train Loss: 0.0092 | Val mean-roc_auc_score: 0.7282
50
+ 2025-09-23 17:46:25,057 - logs_modchembert_tox21_epochs100_batch_size32 - INFO - Epoch 41/100 | Train Loss: 0.0092 | Val mean-roc_auc_score: 0.7278
51
+ 2025-09-23 17:46:43,455 - logs_modchembert_tox21_epochs100_batch_size32 - INFO - Epoch 42/100 | Train Loss: 0.0088 | Val mean-roc_auc_score: 0.7268
52
+ 2025-09-23 17:47:02,018 - logs_modchembert_tox21_epochs100_batch_size32 - INFO - Epoch 43/100 | Train Loss: 0.0096 | Val mean-roc_auc_score: 0.7259
53
+ 2025-09-23 17:47:20,369 - logs_modchembert_tox21_epochs100_batch_size32 - INFO - Epoch 44/100 | Train Loss: 0.0085 | Val mean-roc_auc_score: 0.7251
54
+ 2025-09-23 17:47:39,327 - logs_modchembert_tox21_epochs100_batch_size32 - INFO - Epoch 45/100 | Train Loss: 0.0070 | Val mean-roc_auc_score: 0.7275
55
+ 2025-09-23 17:47:58,972 - logs_modchembert_tox21_epochs100_batch_size32 - INFO - Epoch 46/100 | Train Loss: 0.0094 | Val mean-roc_auc_score: 0.7294
56
+ 2025-09-23 17:48:17,315 - logs_modchembert_tox21_epochs100_batch_size32 - INFO - Epoch 47/100 | Train Loss: 0.0064 | Val mean-roc_auc_score: 0.7273
57
+ 2025-09-23 17:48:35,694 - logs_modchembert_tox21_epochs100_batch_size32 - INFO - Epoch 48/100 | Train Loss: 0.0082 | Val mean-roc_auc_score: 0.7256
58
+ 2025-09-23 17:48:54,651 - logs_modchembert_tox21_epochs100_batch_size32 - INFO - Epoch 49/100 | Train Loss: 0.0095 | Val mean-roc_auc_score: 0.7276
59
+ 2025-09-23 17:49:13,636 - logs_modchembert_tox21_epochs100_batch_size32 - INFO - Epoch 50/100 | Train Loss: 0.0084 | Val mean-roc_auc_score: 0.7276
60
+ 2025-09-23 17:49:31,401 - logs_modchembert_tox21_epochs100_batch_size32 - INFO - Epoch 51/100 | Train Loss: 0.0088 | Val mean-roc_auc_score: 0.7272
61
+ 2025-09-23 17:49:52,219 - logs_modchembert_tox21_epochs100_batch_size32 - INFO - Epoch 52/100 | Train Loss: 0.0073 | Val mean-roc_auc_score: 0.7260
62
+ 2025-09-23 17:50:09,993 - logs_modchembert_tox21_epochs100_batch_size32 - INFO - Epoch 53/100 | Train Loss: 0.0071 | Val mean-roc_auc_score: 0.7264
63
+ 2025-09-23 17:50:28,936 - logs_modchembert_tox21_epochs100_batch_size32 - INFO - Epoch 54/100 | Train Loss: 0.0084 | Val mean-roc_auc_score: 0.7254
64
+ 2025-09-23 17:50:47,589 - logs_modchembert_tox21_epochs100_batch_size32 - INFO - Epoch 55/100 | Train Loss: 0.0078 | Val mean-roc_auc_score: 0.7279
65
+ 2025-09-23 17:51:04,557 - logs_modchembert_tox21_epochs100_batch_size32 - INFO - Epoch 56/100 | Train Loss: 0.0080 | Val mean-roc_auc_score: 0.7239
66
+ 2025-09-23 17:51:24,268 - logs_modchembert_tox21_epochs100_batch_size32 - INFO - Epoch 57/100 | Train Loss: 0.0072 | Val mean-roc_auc_score: 0.7250
67
+ 2025-09-23 17:51:43,286 - logs_modchembert_tox21_epochs100_batch_size32 - INFO - Epoch 58/100 | Train Loss: 0.0069 | Val mean-roc_auc_score: 0.7261
68
+ 2025-09-23 17:52:02,417 - logs_modchembert_tox21_epochs100_batch_size32 - INFO - Epoch 59/100 | Train Loss: 0.0081 | Val mean-roc_auc_score: 0.7227
69
+ 2025-09-23 17:52:20,604 - logs_modchembert_tox21_epochs100_batch_size32 - INFO - Epoch 60/100 | Train Loss: 0.0073 | Val mean-roc_auc_score: 0.7257
70
+ 2025-09-23 17:52:38,288 - logs_modchembert_tox21_epochs100_batch_size32 - INFO - Epoch 61/100 | Train Loss: 0.0068 | Val mean-roc_auc_score: 0.7261
71
+ 2025-09-23 17:52:58,498 - logs_modchembert_tox21_epochs100_batch_size32 - INFO - Epoch 62/100 | Train Loss: 0.0069 | Val mean-roc_auc_score: 0.7266
72
+ 2025-09-23 17:53:17,499 - logs_modchembert_tox21_epochs100_batch_size32 - INFO - Epoch 63/100 | Train Loss: 0.0070 | Val mean-roc_auc_score: 0.7252
73
+ 2025-09-23 17:53:36,444 - logs_modchembert_tox21_epochs100_batch_size32 - INFO - Epoch 64/100 | Train Loss: 0.0060 | Val mean-roc_auc_score: 0.7248
74
+ 2025-09-23 17:53:53,924 - logs_modchembert_tox21_epochs100_batch_size32 - INFO - Epoch 65/100 | Train Loss: 0.0067 | Val mean-roc_auc_score: 0.7227
75
+ 2025-09-23 17:54:12,894 - logs_modchembert_tox21_epochs100_batch_size32 - INFO - Epoch 66/100 | Train Loss: 0.0059 | Val mean-roc_auc_score: 0.7234
76
+ 2025-09-23 17:54:32,758 - logs_modchembert_tox21_epochs100_batch_size32 - INFO - Epoch 67/100 | Train Loss: 0.0060 | Val mean-roc_auc_score: 0.7244
77
+ 2025-09-23 17:54:51,410 - logs_modchembert_tox21_epochs100_batch_size32 - INFO - Epoch 68/100 | Train Loss: 0.0063 | Val mean-roc_auc_score: 0.7246
78
+ 2025-09-23 17:55:10,308 - logs_modchembert_tox21_epochs100_batch_size32 - INFO - Epoch 69/100 | Train Loss: 0.0059 | Val mean-roc_auc_score: 0.7231
79
+ 2025-09-23 17:55:27,397 - logs_modchembert_tox21_epochs100_batch_size32 - INFO - Epoch 70/100 | Train Loss: 0.0063 | Val mean-roc_auc_score: 0.7230
80
+ 2025-09-23 17:55:45,523 - logs_modchembert_tox21_epochs100_batch_size32 - INFO - Epoch 71/100 | Train Loss: 0.0064 | Val mean-roc_auc_score: 0.7231
81
+ 2025-09-23 17:56:06,189 - logs_modchembert_tox21_epochs100_batch_size32 - INFO - Epoch 72/100 | Train Loss: 0.0052 | Val mean-roc_auc_score: 0.7245
82
+ 2025-09-23 17:56:24,946 - logs_modchembert_tox21_epochs100_batch_size32 - INFO - Epoch 73/100 | Train Loss: 0.0061 | Val mean-roc_auc_score: 0.7248
83
+ 2025-09-23 17:56:43,793 - logs_modchembert_tox21_epochs100_batch_size32 - INFO - Epoch 74/100 | Train Loss: 0.0051 | Val mean-roc_auc_score: 0.7246
84
+ 2025-09-23 17:57:01,812 - logs_modchembert_tox21_epochs100_batch_size32 - INFO - Epoch 75/100 | Train Loss: 0.0066 | Val mean-roc_auc_score: 0.7246
85
+ 2025-09-23 17:57:20,604 - logs_modchembert_tox21_epochs100_batch_size32 - INFO - Epoch 76/100 | Train Loss: 0.0062 | Val mean-roc_auc_score: 0.7221
86
+ 2025-09-23 17:57:40,827 - logs_modchembert_tox21_epochs100_batch_size32 - INFO - Epoch 77/100 | Train Loss: 0.0058 | Val mean-roc_auc_score: 0.7231
87
+ 2025-09-23 17:57:59,322 - logs_modchembert_tox21_epochs100_batch_size32 - INFO - Epoch 78/100 | Train Loss: 0.0063 | Val mean-roc_auc_score: 0.7240
88
+ 2025-09-23 17:58:16,786 - logs_modchembert_tox21_epochs100_batch_size32 - INFO - Epoch 79/100 | Train Loss: 0.0058 | Val mean-roc_auc_score: 0.7219
89
+ 2025-09-23 17:58:34,957 - logs_modchembert_tox21_epochs100_batch_size32 - INFO - Epoch 80/100 | Train Loss: 0.0059 | Val mean-roc_auc_score: 0.7232
90
+ 2025-09-23 17:58:53,727 - logs_modchembert_tox21_epochs100_batch_size32 - INFO - Epoch 81/100 | Train Loss: 0.0058 | Val mean-roc_auc_score: 0.7240
91
+ 2025-09-23 17:59:13,551 - logs_modchembert_tox21_epochs100_batch_size32 - INFO - Epoch 82/100 | Train Loss: 0.0054 | Val mean-roc_auc_score: 0.7249
92
+ 2025-09-23 17:59:32,603 - logs_modchembert_tox21_epochs100_batch_size32 - INFO - Epoch 83/100 | Train Loss: 0.0056 | Val mean-roc_auc_score: 0.7250
93
+ 2025-09-23 17:59:50,176 - logs_modchembert_tox21_epochs100_batch_size32 - INFO - Epoch 84/100 | Train Loss: 0.0055 | Val mean-roc_auc_score: 0.7245
94
+ 2025-09-23 18:00:08,491 - logs_modchembert_tox21_epochs100_batch_size32 - INFO - Epoch 85/100 | Train Loss: 0.0055 | Val mean-roc_auc_score: 0.7248
95
+ 2025-09-23 18:00:27,722 - logs_modchembert_tox21_epochs100_batch_size32 - INFO - Epoch 86/100 | Train Loss: 0.0055 | Val mean-roc_auc_score: 0.7254
96
+ 2025-09-23 18:00:48,192 - logs_modchembert_tox21_epochs100_batch_size32 - INFO - Epoch 87/100 | Train Loss: 0.0056 | Val mean-roc_auc_score: 0.7253
97
+ 2025-09-23 18:01:06,085 - logs_modchembert_tox21_epochs100_batch_size32 - INFO - Epoch 88/100 | Train Loss: 0.0059 | Val mean-roc_auc_score: 0.7270
98
+ 2025-09-23 18:01:23,374 - logs_modchembert_tox21_epochs100_batch_size32 - INFO - Epoch 89/100 | Train Loss: 0.0062 | Val mean-roc_auc_score: 0.7250
99
+ 2025-09-23 18:01:41,953 - logs_modchembert_tox21_epochs100_batch_size32 - INFO - Epoch 90/100 | Train Loss: 0.0063 | Val mean-roc_auc_score: 0.7252
100
+ 2025-09-23 18:02:00,821 - logs_modchembert_tox21_epochs100_batch_size32 - INFO - Epoch 91/100 | Train Loss: 0.0045 | Val mean-roc_auc_score: 0.7249
101
+ 2025-09-23 18:02:21,421 - logs_modchembert_tox21_epochs100_batch_size32 - INFO - Epoch 92/100 | Train Loss: 0.0054 | Val mean-roc_auc_score: 0.7245
102
+ 2025-09-23 18:02:39,172 - logs_modchembert_tox21_epochs100_batch_size32 - INFO - Epoch 93/100 | Train Loss: 0.0053 | Val mean-roc_auc_score: 0.7243
103
+ 2025-09-23 18:02:57,502 - logs_modchembert_tox21_epochs100_batch_size32 - INFO - Epoch 94/100 | Train Loss: 0.0060 | Val mean-roc_auc_score: 0.7259
104
+ 2025-09-23 18:03:15,619 - logs_modchembert_tox21_epochs100_batch_size32 - INFO - Epoch 95/100 | Train Loss: 0.0043 | Val mean-roc_auc_score: 0.7260
105
+ 2025-09-23 18:03:34,412 - logs_modchembert_tox21_epochs100_batch_size32 - INFO - Epoch 96/100 | Train Loss: 0.0044 | Val mean-roc_auc_score: 0.7245
106
+ 2025-09-23 18:03:55,044 - logs_modchembert_tox21_epochs100_batch_size32 - INFO - Epoch 97/100 | Train Loss: 0.0052 | Val mean-roc_auc_score: 0.7242
107
+ 2025-09-23 18:04:12,064 - logs_modchembert_tox21_epochs100_batch_size32 - INFO - Epoch 98/100 | Train Loss: 0.0043 | Val mean-roc_auc_score: 0.7249
108
+ 2025-09-23 18:04:30,362 - logs_modchembert_tox21_epochs100_batch_size32 - INFO - Epoch 99/100 | Train Loss: 0.0051 | Val mean-roc_auc_score: 0.7246
109
+ 2025-09-23 18:04:49,180 - logs_modchembert_tox21_epochs100_batch_size32 - INFO - Epoch 100/100 | Train Loss: 0.0053 | Val mean-roc_auc_score: 0.7245
110
+ 2025-09-23 18:04:50,726 - logs_modchembert_tox21_epochs100_batch_size32 - INFO - Test mean-roc_auc_score: 0.7636
111
+ 2025-09-23 18:04:51,155 - logs_modchembert_tox21_epochs100_batch_size32 - INFO - Starting triplicate run 2 for dataset tox21 at 2025-09-23_18-04-51
112
+ 2025-09-23 18:05:08,649 - logs_modchembert_tox21_epochs100_batch_size32 - INFO - Epoch 1/100 | Train Loss: 0.1745 | Val mean-roc_auc_score: 0.7520
113
+ 2025-09-23 18:05:08,649 - logs_modchembert_tox21_epochs100_batch_size32 - INFO - Global step of best model: 196
114
+ 2025-09-23 18:05:09,176 - logs_modchembert_tox21_epochs100_batch_size32 - INFO - Best model saved at epoch 1 with val mean-roc_auc_score: 0.7520
115
+ 2025-09-23 18:05:27,941 - logs_modchembert_tox21_epochs100_batch_size32 - INFO - Epoch 2/100 | Train Loss: 0.1583 | Val mean-roc_auc_score: 0.7595
116
+ 2025-09-23 18:05:28,109 - logs_modchembert_tox21_epochs100_batch_size32 - INFO - Global step of best model: 392
117
+ 2025-09-23 18:05:28,633 - logs_modchembert_tox21_epochs100_batch_size32 - INFO - Best model saved at epoch 2 with val mean-roc_auc_score: 0.7595
118
+ 2025-09-23 18:05:46,375 - logs_modchembert_tox21_epochs100_batch_size32 - INFO - Epoch 3/100 | Train Loss: 0.1449 | Val mean-roc_auc_score: 0.7720
119
+ 2025-09-23 18:05:46,548 - logs_modchembert_tox21_epochs100_batch_size32 - INFO - Global step of best model: 588
120
+ 2025-09-23 18:05:47,077 - logs_modchembert_tox21_epochs100_batch_size32 - INFO - Best model saved at epoch 3 with val mean-roc_auc_score: 0.7720
121
+ 2025-09-23 18:06:05,622 - logs_modchembert_tox21_epochs100_batch_size32 - INFO - Epoch 4/100 | Train Loss: 0.1310 | Val mean-roc_auc_score: 0.7539
122
+ 2025-09-23 18:06:24,225 - logs_modchembert_tox21_epochs100_batch_size32 - INFO - Epoch 5/100 | Train Loss: 0.1266 | Val mean-roc_auc_score: 0.7647
123
+ 2025-09-23 18:06:44,308 - logs_modchembert_tox21_epochs100_batch_size32 - INFO - Epoch 6/100 | Train Loss: 0.1176 | Val mean-roc_auc_score: 0.7575
124
+ 2025-09-23 18:07:02,227 - logs_modchembert_tox21_epochs100_batch_size32 - INFO - Epoch 7/100 | Train Loss: 0.1011 | Val mean-roc_auc_score: 0.7458
125
+ 2025-09-23 18:07:21,228 - logs_modchembert_tox21_epochs100_batch_size32 - INFO - Epoch 8/100 | Train Loss: 0.0873 | Val mean-roc_auc_score: 0.7509
126
+ 2025-09-23 18:07:39,881 - logs_modchembert_tox21_epochs100_batch_size32 - INFO - Epoch 9/100 | Train Loss: 0.0771 | Val mean-roc_auc_score: 0.7495
127
+ 2025-09-23 18:07:59,102 - logs_modchembert_tox21_epochs100_batch_size32 - INFO - Epoch 10/100 | Train Loss: 0.0648 | Val mean-roc_auc_score: 0.7486
128
+ 2025-09-23 18:08:18,694 - logs_modchembert_tox21_epochs100_batch_size32 - INFO - Epoch 11/100 | Train Loss: 0.0622 | Val mean-roc_auc_score: 0.7457
129
+ 2025-09-23 18:08:36,225 - logs_modchembert_tox21_epochs100_batch_size32 - INFO - Epoch 12/100 | Train Loss: 0.0508 | Val mean-roc_auc_score: 0.7368
130
+ 2025-09-23 18:08:54,890 - logs_modchembert_tox21_epochs100_batch_size32 - INFO - Epoch 13/100 | Train Loss: 0.0449 | Val mean-roc_auc_score: 0.7290
131
+ 2025-09-23 18:09:13,086 - logs_modchembert_tox21_epochs100_batch_size32 - INFO - Epoch 14/100 | Train Loss: 0.0421 | Val mean-roc_auc_score: 0.7355
132
+ 2025-09-23 18:09:31,792 - logs_modchembert_tox21_epochs100_batch_size32 - INFO - Epoch 15/100 | Train Loss: 0.0359 | Val mean-roc_auc_score: 0.7323
133
+ 2025-09-23 18:09:51,253 - logs_modchembert_tox21_epochs100_batch_size32 - INFO - Epoch 16/100 | Train Loss: 0.0321 | Val mean-roc_auc_score: 0.7277
134
+ 2025-09-23 18:10:09,484 - logs_modchembert_tox21_epochs100_batch_size32 - INFO - Epoch 17/100 | Train Loss: 0.0275 | Val mean-roc_auc_score: 0.7304
135
+ 2025-09-23 18:10:27,859 - logs_modchembert_tox21_epochs100_batch_size32 - INFO - Epoch 18/100 | Train Loss: 0.0264 | Val mean-roc_auc_score: 0.7241
136
+ 2025-09-23 18:10:46,512 - logs_modchembert_tox21_epochs100_batch_size32 - INFO - Epoch 19/100 | Train Loss: 0.0254 | Val mean-roc_auc_score: 0.7251
137
+ 2025-09-23 18:11:05,654 - logs_modchembert_tox21_epochs100_batch_size32 - INFO - Epoch 20/100 | Train Loss: 0.0228 | Val mean-roc_auc_score: 0.7215
138
+ 2025-09-23 18:11:24,445 - logs_modchembert_tox21_epochs100_batch_size32 - INFO - Epoch 21/100 | Train Loss: 0.0210 | Val mean-roc_auc_score: 0.7310
139
+ 2025-09-23 18:11:43,944 - logs_modchembert_tox21_epochs100_batch_size32 - INFO - Epoch 22/100 | Train Loss: 0.0201 | Val mean-roc_auc_score: 0.7266
140
+ 2025-09-23 18:12:02,660 - logs_modchembert_tox21_epochs100_batch_size32 - INFO - Epoch 23/100 | Train Loss: 0.0171 | Val mean-roc_auc_score: 0.7241
141
+ 2025-09-23 18:12:20,899 - logs_modchembert_tox21_epochs100_batch_size32 - INFO - Epoch 24/100 | Train Loss: 0.0151 | Val mean-roc_auc_score: 0.7259
142
+ 2025-09-23 18:12:39,585 - logs_modchembert_tox21_epochs100_batch_size32 - INFO - Epoch 25/100 | Train Loss: 0.0158 | Val mean-roc_auc_score: 0.7263
143
+ 2025-09-23 18:12:58,391 - logs_modchembert_tox21_epochs100_batch_size32 - INFO - Epoch 26/100 | Train Loss: 0.0147 | Val mean-roc_auc_score: 0.7230
144
+ 2025-09-23 18:13:17,116 - logs_modchembert_tox21_epochs100_batch_size32 - INFO - Epoch 27/100 | Train Loss: 0.0144 | Val mean-roc_auc_score: 0.7246
145
+ 2025-09-23 18:13:36,230 - logs_modchembert_tox21_epochs100_batch_size32 - INFO - Epoch 28/100 | Train Loss: 0.0138 | Val mean-roc_auc_score: 0.7230
146
+ 2025-09-23 18:13:54,986 - logs_modchembert_tox21_epochs100_batch_size32 - INFO - Epoch 29/100 | Train Loss: 0.0126 | Val mean-roc_auc_score: 0.7204
147
+ 2025-09-23 18:14:13,469 - logs_modchembert_tox21_epochs100_batch_size32 - INFO - Epoch 30/100 | Train Loss: 0.0117 | Val mean-roc_auc_score: 0.7197
148
+ 2025-09-23 18:14:32,259 - logs_modchembert_tox21_epochs100_batch_size32 - INFO - Epoch 31/100 | Train Loss: 0.0125 | Val mean-roc_auc_score: 0.7215
149
+ 2025-09-23 18:14:51,629 - logs_modchembert_tox21_epochs100_batch_size32 - INFO - Epoch 32/100 | Train Loss: 0.0109 | Val mean-roc_auc_score: 0.7214
150
+ 2025-09-23 18:15:10,608 - logs_modchembert_tox21_epochs100_batch_size32 - INFO - Epoch 33/100 | Train Loss: 0.0109 | Val mean-roc_auc_score: 0.7216
151
+ 2025-09-23 18:15:29,062 - logs_modchembert_tox21_epochs100_batch_size32 - INFO - Epoch 34/100 | Train Loss: 0.0117 | Val mean-roc_auc_score: 0.7176
152
+ 2025-09-23 18:15:46,538 - logs_modchembert_tox21_epochs100_batch_size32 - INFO - Epoch 35/100 | Train Loss: 0.0105 | Val mean-roc_auc_score: 0.7168
153
+ 2025-09-23 18:16:06,538 - logs_modchembert_tox21_epochs100_batch_size32 - INFO - Epoch 36/100 | Train Loss: 0.0100 | Val mean-roc_auc_score: 0.7186
154
+ 2025-09-23 18:16:25,699 - logs_modchembert_tox21_epochs100_batch_size32 - INFO - Epoch 37/100 | Train Loss: 0.0102 | Val mean-roc_auc_score: 0.7225
155
+ 2025-09-23 18:16:44,564 - logs_modchembert_tox21_epochs100_batch_size32 - INFO - Epoch 38/100 | Train Loss: 0.0107 | Val mean-roc_auc_score: 0.7200
156
+ 2025-09-23 18:17:02,581 - logs_modchembert_tox21_epochs100_batch_size32 - INFO - Epoch 39/100 | Train Loss: 0.0105 | Val mean-roc_auc_score: 0.7187
157
+ 2025-09-23 18:17:19,434 - logs_modchembert_tox21_epochs100_batch_size32 - INFO - Epoch 40/100 | Train Loss: 0.0092 | Val mean-roc_auc_score: 0.7194
158
+ 2025-09-23 18:17:39,478 - logs_modchembert_tox21_epochs100_batch_size32 - INFO - Epoch 41/100 | Train Loss: 0.0102 | Val mean-roc_auc_score: 0.7207
159
+ 2025-09-23 18:17:58,075 - logs_modchembert_tox21_epochs100_batch_size32 - INFO - Epoch 42/100 | Train Loss: 0.0099 | Val mean-roc_auc_score: 0.7158
160
+ 2025-09-23 18:18:17,033 - logs_modchembert_tox21_epochs100_batch_size32 - INFO - Epoch 43/100 | Train Loss: 0.0093 | Val mean-roc_auc_score: 0.7180
161
+ 2025-09-23 18:18:35,575 - logs_modchembert_tox21_epochs100_batch_size32 - INFO - Epoch 44/100 | Train Loss: 0.0092 | Val mean-roc_auc_score: 0.7209
162
+ 2025-09-23 18:18:52,896 - logs_modchembert_tox21_epochs100_batch_size32 - INFO - Epoch 45/100 | Train Loss: 0.0115 | Val mean-roc_auc_score: 0.7194
163
+ 2025-09-23 18:19:12,569 - logs_modchembert_tox21_epochs100_batch_size32 - INFO - Epoch 46/100 | Train Loss: 0.0075 | Val mean-roc_auc_score: 0.7177
164
+ 2025-09-23 18:19:31,960 - logs_modchembert_tox21_epochs100_batch_size32 - INFO - Epoch 47/100 | Train Loss: 0.0074 | Val mean-roc_auc_score: 0.7184
165
+ 2025-09-23 18:19:50,511 - logs_modchembert_tox21_epochs100_batch_size32 - INFO - Epoch 48/100 | Train Loss: 0.0070 | Val mean-roc_auc_score: 0.7192
166
+ 2025-09-23 18:20:08,839 - logs_modchembert_tox21_epochs100_batch_size32 - INFO - Epoch 49/100 | Train Loss: 0.0069 | Val mean-roc_auc_score: 0.7186
167
+ 2025-09-23 18:20:27,224 - logs_modchembert_tox21_epochs100_batch_size32 - INFO - Epoch 50/100 | Train Loss: 0.0073 | Val mean-roc_auc_score: 0.7189
168
+ 2025-09-23 18:20:45,961 - logs_modchembert_tox21_epochs100_batch_size32 - INFO - Epoch 51/100 | Train Loss: 0.0073 | Val mean-roc_auc_score: 0.7187
169
+ 2025-09-23 18:21:05,782 - logs_modchembert_tox21_epochs100_batch_size32 - INFO - Epoch 52/100 | Train Loss: 0.0078 | Val mean-roc_auc_score: 0.7191
170
+ 2025-09-23 18:21:24,157 - logs_modchembert_tox21_epochs100_batch_size32 - INFO - Epoch 53/100 | Train Loss: 0.0071 | Val mean-roc_auc_score: 0.7185
171
+ 2025-09-23 18:21:41,696 - logs_modchembert_tox21_epochs100_batch_size32 - INFO - Epoch 54/100 | Train Loss: 0.0084 | Val mean-roc_auc_score: 0.7180
172
+ 2025-09-23 18:22:00,666 - logs_modchembert_tox21_epochs100_batch_size32 - INFO - Epoch 55/100 | Train Loss: 0.0070 | Val mean-roc_auc_score: 0.7206
173
+ 2025-09-23 18:22:19,416 - logs_modchembert_tox21_epochs100_batch_size32 - INFO - Epoch 56/100 | Train Loss: 0.0069 | Val mean-roc_auc_score: 0.7198
174
+ 2025-09-23 18:22:39,789 - logs_modchembert_tox21_epochs100_batch_size32 - INFO - Epoch 57/100 | Train Loss: 0.0066 | Val mean-roc_auc_score: 0.7190
175
+ 2025-09-23 18:22:57,978 - logs_modchembert_tox21_epochs100_batch_size32 - INFO - Epoch 58/100 | Train Loss: 0.0063 | Val mean-roc_auc_score: 0.7202
176
+ 2025-09-23 18:23:15,061 - logs_modchembert_tox21_epochs100_batch_size32 - INFO - Epoch 59/100 | Train Loss: 0.0067 | Val mean-roc_auc_score: 0.7201
177
+ 2025-09-23 18:23:33,251 - logs_modchembert_tox21_epochs100_batch_size32 - INFO - Epoch 60/100 | Train Loss: 0.0063 | Val mean-roc_auc_score: 0.7205
178
+ 2025-09-23 18:23:52,230 - logs_modchembert_tox21_epochs100_batch_size32 - INFO - Epoch 61/100 | Train Loss: 0.0069 | Val mean-roc_auc_score: 0.7183
179
+ 2025-09-23 18:24:12,564 - logs_modchembert_tox21_epochs100_batch_size32 - INFO - Epoch 62/100 | Train Loss: 0.0066 | Val mean-roc_auc_score: 0.7199
180
+ 2025-09-23 18:24:30,983 - logs_modchembert_tox21_epochs100_batch_size32 - INFO - Epoch 63/100 | Train Loss: 0.0069 | Val mean-roc_auc_score: 0.7195
181
+ 2025-09-23 18:24:49,350 - logs_modchembert_tox21_epochs100_batch_size32 - INFO - Epoch 64/100 | Train Loss: 0.0060 | Val mean-roc_auc_score: 0.7189
182
+ 2025-09-23 18:25:08,266 - logs_modchembert_tox21_epochs100_batch_size32 - INFO - Epoch 65/100 | Train Loss: 0.0059 | Val mean-roc_auc_score: 0.7189
183
+ 2025-09-23 18:25:26,551 - logs_modchembert_tox21_epochs100_batch_size32 - INFO - Epoch 66/100 | Train Loss: 0.0067 | Val mean-roc_auc_score: 0.7183
184
+ 2025-09-23 18:25:46,988 - logs_modchembert_tox21_epochs100_batch_size32 - INFO - Epoch 67/100 | Train Loss: 0.0060 | Val mean-roc_auc_score: 0.7189
185
+ 2025-09-23 18:26:04,714 - logs_modchembert_tox21_epochs100_batch_size32 - INFO - Epoch 68/100 | Train Loss: 0.0061 | Val mean-roc_auc_score: 0.7181
186
+ 2025-09-23 18:26:23,572 - logs_modchembert_tox21_epochs100_batch_size32 - INFO - Epoch 69/100 | Train Loss: 0.0064 | Val mean-roc_auc_score: 0.7181
187
+ 2025-09-23 18:26:42,780 - logs_modchembert_tox21_epochs100_batch_size32 - INFO - Epoch 70/100 | Train Loss: 0.0060 | Val mean-roc_auc_score: 0.7175
188
+ 2025-09-23 18:27:01,295 - logs_modchembert_tox21_epochs100_batch_size32 - INFO - Epoch 71/100 | Train Loss: 0.0070 | Val mean-roc_auc_score: 0.7193
189
+ 2025-09-23 18:27:21,276 - logs_modchembert_tox21_epochs100_batch_size32 - INFO - Epoch 72/100 | Train Loss: 0.0066 | Val mean-roc_auc_score: 0.7181
190
+ 2025-09-23 18:27:38,906 - logs_modchembert_tox21_epochs100_batch_size32 - INFO - Epoch 73/100 | Train Loss: 0.0059 | Val mean-roc_auc_score: 0.7178
191
+ 2025-09-23 18:27:57,580 - logs_modchembert_tox21_epochs100_batch_size32 - INFO - Epoch 74/100 | Train Loss: 0.0056 | Val mean-roc_auc_score: 0.7177
192
+ 2025-09-23 18:28:15,962 - logs_modchembert_tox21_epochs100_batch_size32 - INFO - Epoch 75/100 | Train Loss: 0.0055 | Val mean-roc_auc_score: 0.7166
193
+ 2025-09-23 18:28:34,335 - logs_modchembert_tox21_epochs100_batch_size32 - INFO - Epoch 76/100 | Train Loss: 0.0054 | Val mean-roc_auc_score: 0.7174
194
+ 2025-09-23 18:28:54,206 - logs_modchembert_tox21_epochs100_batch_size32 - INFO - Epoch 77/100 | Train Loss: 0.0055 | Val mean-roc_auc_score: 0.7175
195
+ 2025-09-23 18:29:13,121 - logs_modchembert_tox21_epochs100_batch_size32 - INFO - Epoch 78/100 | Train Loss: 0.0052 | Val mean-roc_auc_score: 0.7180
196
+ 2025-09-23 18:29:31,947 - logs_modchembert_tox21_epochs100_batch_size32 - INFO - Epoch 79/100 | Train Loss: 0.0058 | Val mean-roc_auc_score: 0.7174
197
+ 2025-09-23 18:29:50,278 - logs_modchembert_tox21_epochs100_batch_size32 - INFO - Epoch 80/100 | Train Loss: 0.0052 | Val mean-roc_auc_score: 0.7169
198
+ 2025-09-23 18:30:08,753 - logs_modchembert_tox21_epochs100_batch_size32 - INFO - Epoch 81/100 | Train Loss: 0.0053 | Val mean-roc_auc_score: 0.7182
199
+ 2025-09-23 18:30:27,894 - logs_modchembert_tox21_epochs100_batch_size32 - INFO - Epoch 82/100 | Train Loss: 0.0058 | Val mean-roc_auc_score: 0.7176
200
+ 2025-09-23 18:30:46,318 - logs_modchembert_tox21_epochs100_batch_size32 - INFO - Epoch 83/100 | Train Loss: 0.0049 | Val mean-roc_auc_score: 0.7175
201
+ 2025-09-23 18:31:04,566 - logs_modchembert_tox21_epochs100_batch_size32 - INFO - Epoch 84/100 | Train Loss: 0.0056 | Val mean-roc_auc_score: 0.7174
202
+ 2025-09-23 18:31:23,142 - logs_modchembert_tox21_epochs100_batch_size32 - INFO - Epoch 85/100 | Train Loss: 0.0055 | Val mean-roc_auc_score: 0.7172
203
+ 2025-09-23 18:31:42,386 - logs_modchembert_tox21_epochs100_batch_size32 - INFO - Epoch 86/100 | Train Loss: 0.0053 | Val mean-roc_auc_score: 0.7171
204
+ 2025-09-23 18:32:01,584 - logs_modchembert_tox21_epochs100_batch_size32 - INFO - Epoch 87/100 | Train Loss: 0.0047 | Val mean-roc_auc_score: 0.7190
205
+ 2025-09-23 18:32:20,595 - logs_modchembert_tox21_epochs100_batch_size32 - INFO - Epoch 88/100 | Train Loss: 0.0053 | Val mean-roc_auc_score: 0.7168
206
+ 2025-09-23 18:32:38,679 - logs_modchembert_tox21_epochs100_batch_size32 - INFO - Epoch 89/100 | Train Loss: 0.0044 | Val mean-roc_auc_score: 0.7170
207
+ 2025-09-23 18:32:57,067 - logs_modchembert_tox21_epochs100_batch_size32 - INFO - Epoch 90/100 | Train Loss: 0.0051 | Val mean-roc_auc_score: 0.7188
208
+ 2025-09-23 18:33:15,614 - logs_modchembert_tox21_epochs100_batch_size32 - INFO - Epoch 91/100 | Train Loss: 0.0055 | Val mean-roc_auc_score: 0.7151
209
+ 2025-09-23 18:33:35,968 - logs_modchembert_tox21_epochs100_batch_size32 - INFO - Epoch 92/100 | Train Loss: 0.0052 | Val mean-roc_auc_score: 0.7185
210
+ 2025-09-23 18:33:55,012 - logs_modchembert_tox21_epochs100_batch_size32 - INFO - Epoch 93/100 | Train Loss: 0.0041 | Val mean-roc_auc_score: 0.7179
211
+ 2025-09-23 18:34:13,472 - logs_modchembert_tox21_epochs100_batch_size32 - INFO - Epoch 94/100 | Train Loss: 0.0044 | Val mean-roc_auc_score: 0.7168
212
+ 2025-09-23 18:34:31,821 - logs_modchembert_tox21_epochs100_batch_size32 - INFO - Epoch 95/100 | Train Loss: 0.0047 | Val mean-roc_auc_score: 0.7173
213
+ 2025-09-23 18:34:48,901 - logs_modchembert_tox21_epochs100_batch_size32 - INFO - Epoch 96/100 | Train Loss: 0.0042 | Val mean-roc_auc_score: 0.7170
214
+ 2025-09-23 18:35:08,668 - logs_modchembert_tox21_epochs100_batch_size32 - INFO - Epoch 97/100 | Train Loss: 0.0038 | Val mean-roc_auc_score: 0.7168
215
+ 2025-09-23 18:35:27,429 - logs_modchembert_tox21_epochs100_batch_size32 - INFO - Epoch 98/100 | Train Loss: 0.0060 | Val mean-roc_auc_score: 0.7183
216
+ 2025-09-23 18:35:46,285 - logs_modchembert_tox21_epochs100_batch_size32 - INFO - Epoch 99/100 | Train Loss: 0.0054 | Val mean-roc_auc_score: 0.7167
217
+ 2025-09-23 18:36:05,274 - logs_modchembert_tox21_epochs100_batch_size32 - INFO - Epoch 100/100 | Train Loss: 0.0047 | Val mean-roc_auc_score: 0.7159
218
+ 2025-09-23 18:36:06,786 - logs_modchembert_tox21_epochs100_batch_size32 - INFO - Test mean-roc_auc_score: 0.7604
219
+ 2025-09-23 18:36:07,249 - logs_modchembert_tox21_epochs100_batch_size32 - INFO - Starting triplicate run 3 for dataset tox21 at 2025-09-23_18-36-07
220
+ 2025-09-23 18:36:23,311 - logs_modchembert_tox21_epochs100_batch_size32 - INFO - Epoch 1/100 | Train Loss: 0.1771 | Val mean-roc_auc_score: 0.7512
221
+ 2025-09-23 18:36:23,311 - logs_modchembert_tox21_epochs100_batch_size32 - INFO - Global step of best model: 196
222
+ 2025-09-23 18:36:23,878 - logs_modchembert_tox21_epochs100_batch_size32 - INFO - Best model saved at epoch 1 with val mean-roc_auc_score: 0.7512
223
+ 2025-09-23 18:36:42,783 - logs_modchembert_tox21_epochs100_batch_size32 - INFO - Epoch 2/100 | Train Loss: 0.1576 | Val mean-roc_auc_score: 0.7611
224
+ 2025-09-23 18:36:42,960 - logs_modchembert_tox21_epochs100_batch_size32 - INFO - Global step of best model: 392
225
+ 2025-09-23 18:36:43,499 - logs_modchembert_tox21_epochs100_batch_size32 - INFO - Best model saved at epoch 2 with val mean-roc_auc_score: 0.7611
226
+ 2025-09-23 18:37:02,046 - logs_modchembert_tox21_epochs100_batch_size32 - INFO - Epoch 3/100 | Train Loss: 0.1477 | Val mean-roc_auc_score: 0.7746
227
+ 2025-09-23 18:37:02,241 - logs_modchembert_tox21_epochs100_batch_size32 - INFO - Global step of best model: 588
228
+ 2025-09-23 18:37:02,777 - logs_modchembert_tox21_epochs100_batch_size32 - INFO - Best model saved at epoch 3 with val mean-roc_auc_score: 0.7746
229
+ 2025-09-23 18:37:21,776 - logs_modchembert_tox21_epochs100_batch_size32 - INFO - Epoch 4/100 | Train Loss: 0.1295 | Val mean-roc_auc_score: 0.7847
230
+ 2025-09-23 18:37:21,921 - logs_modchembert_tox21_epochs100_batch_size32 - INFO - Global step of best model: 784
231
+ 2025-09-23 18:37:22,444 - logs_modchembert_tox21_epochs100_batch_size32 - INFO - Best model saved at epoch 4 with val mean-roc_auc_score: 0.7847
232
+ 2025-09-23 18:37:40,039 - logs_modchembert_tox21_epochs100_batch_size32 - INFO - Epoch 5/100 | Train Loss: 0.1250 | Val mean-roc_auc_score: 0.7801
233
+ 2025-09-23 18:37:59,233 - logs_modchembert_tox21_epochs100_batch_size32 - INFO - Epoch 6/100 | Train Loss: 0.1086 | Val mean-roc_auc_score: 0.7778
234
+ 2025-09-23 18:38:18,367 - logs_modchembert_tox21_epochs100_batch_size32 - INFO - Epoch 7/100 | Train Loss: 0.0998 | Val mean-roc_auc_score: 0.7567
235
+ 2025-09-23 18:38:37,112 - logs_modchembert_tox21_epochs100_batch_size32 - INFO - Epoch 8/100 | Train Loss: 0.0924 | Val mean-roc_auc_score: 0.7563
236
+ 2025-09-23 18:38:55,965 - logs_modchembert_tox21_epochs100_batch_size32 - INFO - Epoch 9/100 | Train Loss: 0.0757 | Val mean-roc_auc_score: 0.7505
237
+ 2025-09-23 18:39:12,972 - logs_modchembert_tox21_epochs100_batch_size32 - INFO - Epoch 10/100 | Train Loss: 0.0664 | Val mean-roc_auc_score: 0.7460
238
+ 2025-09-23 18:39:32,424 - logs_modchembert_tox21_epochs100_batch_size32 - INFO - Epoch 11/100 | Train Loss: 0.0552 | Val mean-roc_auc_score: 0.7444
239
+ 2025-09-23 18:39:51,384 - logs_modchembert_tox21_epochs100_batch_size32 - INFO - Epoch 12/100 | Train Loss: 0.0481 | Val mean-roc_auc_score: 0.7498
240
+ 2025-09-23 18:40:10,527 - logs_modchembert_tox21_epochs100_batch_size32 - INFO - Epoch 13/100 | Train Loss: 0.0439 | Val mean-roc_auc_score: 0.7386
241
+ 2025-09-23 18:40:29,276 - logs_modchembert_tox21_epochs100_batch_size32 - INFO - Epoch 14/100 | Train Loss: 0.0392 | Val mean-roc_auc_score: 0.7396
242
+ 2025-09-23 18:40:46,951 - logs_modchembert_tox21_epochs100_batch_size32 - INFO - Epoch 15/100 | Train Loss: 0.0373 | Val mean-roc_auc_score: 0.7339
243
+ 2025-09-23 18:41:06,502 - logs_modchembert_tox21_epochs100_batch_size32 - INFO - Epoch 16/100 | Train Loss: 0.0286 | Val mean-roc_auc_score: 0.7411
244
+ 2025-09-23 18:41:25,381 - logs_modchembert_tox21_epochs100_batch_size32 - INFO - Epoch 17/100 | Train Loss: 0.0289 | Val mean-roc_auc_score: 0.7367
245
+ 2025-09-23 18:41:44,144 - logs_modchembert_tox21_epochs100_batch_size32 - INFO - Epoch 18/100 | Train Loss: 0.0264 | Val mean-roc_auc_score: 0.7317
246
+ 2025-09-23 18:42:02,377 - logs_modchembert_tox21_epochs100_batch_size32 - INFO - Epoch 19/100 | Train Loss: 0.0244 | Val mean-roc_auc_score: 0.7377
247
+ 2025-09-23 18:42:21,055 - logs_modchembert_tox21_epochs100_batch_size32 - INFO - Epoch 20/100 | Train Loss: 0.0233 | Val mean-roc_auc_score: 0.7336
248
+ 2025-09-23 18:42:40,665 - logs_modchembert_tox21_epochs100_batch_size32 - INFO - Epoch 21/100 | Train Loss: 0.0240 | Val mean-roc_auc_score: 0.7400
249
+ 2025-09-23 18:43:00,240 - logs_modchembert_tox21_epochs100_batch_size32 - INFO - Epoch 22/100 | Train Loss: 0.0203 | Val mean-roc_auc_score: 0.7368
250
+ 2025-09-23 18:43:18,661 - logs_modchembert_tox21_epochs100_batch_size32 - INFO - Epoch 23/100 | Train Loss: 0.0159 | Val mean-roc_auc_score: 0.7362
251
+ 2025-09-23 18:43:35,978 - logs_modchembert_tox21_epochs100_batch_size32 - INFO - Epoch 24/100 | Train Loss: 0.0192 | Val mean-roc_auc_score: 0.7358
252
+ 2025-09-23 18:43:54,592 - logs_modchembert_tox21_epochs100_batch_size32 - INFO - Epoch 25/100 | Train Loss: 0.0155 | Val mean-roc_auc_score: 0.7347
253
+ 2025-09-23 18:44:14,299 - logs_modchembert_tox21_epochs100_batch_size32 - INFO - Epoch 26/100 | Train Loss: 0.0142 | Val mean-roc_auc_score: 0.7374
254
+ 2025-09-23 18:44:34,023 - logs_modchembert_tox21_epochs100_batch_size32 - INFO - Epoch 27/100 | Train Loss: 0.0138 | Val mean-roc_auc_score: 0.7361
255
+ 2025-09-23 18:44:52,145 - logs_modchembert_tox21_epochs100_batch_size32 - INFO - Epoch 28/100 | Train Loss: 0.0131 | Val mean-roc_auc_score: 0.7385
256
+ 2025-09-23 18:45:08,680 - logs_modchembert_tox21_epochs100_batch_size32 - INFO - Epoch 29/100 | Train Loss: 0.0131 | Val mean-roc_auc_score: 0.7388
257
+ 2025-09-23 18:45:27,546 - logs_modchembert_tox21_epochs100_batch_size32 - INFO - Epoch 30/100 | Train Loss: 0.0135 | Val mean-roc_auc_score: 0.7351
258
+ 2025-09-23 18:45:47,566 - logs_modchembert_tox21_epochs100_batch_size32 - INFO - Epoch 31/100 | Train Loss: 0.0135 | Val mean-roc_auc_score: 0.7311
259
+ 2025-09-23 18:46:06,924 - logs_modchembert_tox21_epochs100_batch_size32 - INFO - Epoch 32/100 | Train Loss: 0.0133 | Val mean-roc_auc_score: 0.7366
260
+ 2025-09-23 18:46:24,239 - logs_modchembert_tox21_epochs100_batch_size32 - INFO - Epoch 33/100 | Train Loss: 0.0110 | Val mean-roc_auc_score: 0.7360
261
+ 2025-09-23 18:46:42,412 - logs_modchembert_tox21_epochs100_batch_size32 - INFO - Epoch 34/100 | Train Loss: 0.0109 | Val mean-roc_auc_score: 0.7359
262
+ 2025-09-23 18:47:00,676 - logs_modchembert_tox21_epochs100_batch_size32 - INFO - Epoch 35/100 | Train Loss: 0.0104 | Val mean-roc_auc_score: 0.7348
263
+ 2025-09-23 18:47:20,696 - logs_modchembert_tox21_epochs100_batch_size32 - INFO - Epoch 36/100 | Train Loss: 0.0096 | Val mean-roc_auc_score: 0.7328
264
+ 2025-09-23 18:47:40,147 - logs_modchembert_tox21_epochs100_batch_size32 - INFO - Epoch 37/100 | Train Loss: 0.0099 | Val mean-roc_auc_score: 0.7316
265
+ 2025-09-23 18:47:56,970 - logs_modchembert_tox21_epochs100_batch_size32 - INFO - Epoch 38/100 | Train Loss: 0.0107 | Val mean-roc_auc_score: 0.7330
266
+ 2025-09-23 18:48:15,123 - logs_modchembert_tox21_epochs100_batch_size32 - INFO - Epoch 39/100 | Train Loss: 0.0091 | Val mean-roc_auc_score: 0.7333
267
+ 2025-09-23 18:48:33,797 - logs_modchembert_tox21_epochs100_batch_size32 - INFO - Epoch 40/100 | Train Loss: 0.0081 | Val mean-roc_auc_score: 0.7335
268
+ 2025-09-23 18:48:54,183 - logs_modchembert_tox21_epochs100_batch_size32 - INFO - Epoch 41/100 | Train Loss: 0.0101 | Val mean-roc_auc_score: 0.7336
269
+ 2025-09-23 18:49:13,175 - logs_modchembert_tox21_epochs100_batch_size32 - INFO - Epoch 42/100 | Train Loss: 0.0103 | Val mean-roc_auc_score: 0.7325
270
+ 2025-09-23 18:49:30,873 - logs_modchembert_tox21_epochs100_batch_size32 - INFO - Epoch 43/100 | Train Loss: 0.0090 | Val mean-roc_auc_score: 0.7323
271
+ 2025-09-23 18:49:49,778 - logs_modchembert_tox21_epochs100_batch_size32 - INFO - Epoch 44/100 | Train Loss: 0.0090 | Val mean-roc_auc_score: 0.7335
272
+ 2025-09-23 18:50:08,137 - logs_modchembert_tox21_epochs100_batch_size32 - INFO - Epoch 45/100 | Train Loss: 0.0084 | Val mean-roc_auc_score: 0.7355
273
+ 2025-09-23 18:50:28,306 - logs_modchembert_tox21_epochs100_batch_size32 - INFO - Epoch 46/100 | Train Loss: 0.0077 | Val mean-roc_auc_score: 0.7325
274
+ 2025-09-23 18:50:46,880 - logs_modchembert_tox21_epochs100_batch_size32 - INFO - Epoch 47/100 | Train Loss: 0.0071 | Val mean-roc_auc_score: 0.7320
275
+ 2025-09-23 18:51:04,753 - logs_modchembert_tox21_epochs100_batch_size32 - INFO - Epoch 48/100 | Train Loss: 0.0093 | Val mean-roc_auc_score: 0.7329
276
+ 2025-09-23 18:51:23,550 - logs_modchembert_tox21_epochs100_batch_size32 - INFO - Epoch 49/100 | Train Loss: 0.0066 | Val mean-roc_auc_score: 0.7327
277
+ 2025-09-23 18:51:41,812 - logs_modchembert_tox21_epochs100_batch_size32 - INFO - Epoch 50/100 | Train Loss: 0.0073 | Val mean-roc_auc_score: 0.7290
278
+ 2025-09-23 18:52:00,300 - logs_modchembert_tox21_epochs100_batch_size32 - INFO - Epoch 51/100 | Train Loss: 0.0074 | Val mean-roc_auc_score: 0.7346
279
+ 2025-09-23 18:52:19,858 - logs_modchembert_tox21_epochs100_batch_size32 - INFO - Epoch 52/100 | Train Loss: 0.0081 | Val mean-roc_auc_score: 0.7343
280
+ 2025-09-23 18:52:38,498 - logs_modchembert_tox21_epochs100_batch_size32 - INFO - Epoch 53/100 | Train Loss: 0.0075 | Val mean-roc_auc_score: 0.7335
281
+ 2025-09-23 18:52:57,683 - logs_modchembert_tox21_epochs100_batch_size32 - INFO - Epoch 54/100 | Train Loss: 0.0067 | Val mean-roc_auc_score: 0.7336
282
+ 2025-09-23 18:53:16,024 - logs_modchembert_tox21_epochs100_batch_size32 - INFO - Epoch 55/100 | Train Loss: 0.0070 | Val mean-roc_auc_score: 0.7330
283
+ 2025-09-23 18:53:34,294 - logs_modchembert_tox21_epochs100_batch_size32 - INFO - Epoch 56/100 | Train Loss: 0.0069 | Val mean-roc_auc_score: 0.7346
284
+ 2025-09-23 18:53:53,370 - logs_modchembert_tox21_epochs100_batch_size32 - INFO - Epoch 57/100 | Train Loss: 0.0070 | Val mean-roc_auc_score: 0.7338
285
+ 2025-09-23 18:54:12,198 - logs_modchembert_tox21_epochs100_batch_size32 - INFO - Epoch 58/100 | Train Loss: 0.0061 | Val mean-roc_auc_score: 0.7347
286
+ 2025-09-23 18:54:31,226 - logs_modchembert_tox21_epochs100_batch_size32 - INFO - Epoch 59/100 | Train Loss: 0.0067 | Val mean-roc_auc_score: 0.7348
287
+ 2025-09-23 18:54:50,302 - logs_modchembert_tox21_epochs100_batch_size32 - INFO - Epoch 60/100 | Train Loss: 0.0068 | Val mean-roc_auc_score: 0.7346
288
+ 2025-09-23 18:55:08,088 - logs_modchembert_tox21_epochs100_batch_size32 - INFO - Epoch 61/100 | Train Loss: 0.0065 | Val mean-roc_auc_score: 0.7351
289
+ 2025-09-23 18:55:27,599 - logs_modchembert_tox21_epochs100_batch_size32 - INFO - Epoch 62/100 | Train Loss: 0.0064 | Val mean-roc_auc_score: 0.7357
290
+ 2025-09-23 18:55:45,975 - logs_modchembert_tox21_epochs100_batch_size32 - INFO - Epoch 63/100 | Train Loss: 0.0070 | Val mean-roc_auc_score: 0.7346
291
+ 2025-09-23 18:56:04,721 - logs_modchembert_tox21_epochs100_batch_size32 - INFO - Epoch 64/100 | Train Loss: 0.0070 | Val mean-roc_auc_score: 0.7333
292
+ 2025-09-23 18:56:23,264 - logs_modchembert_tox21_epochs100_batch_size32 - INFO - Epoch 65/100 | Train Loss: 0.0064 | Val mean-roc_auc_score: 0.7321
293
+ 2025-09-23 18:56:41,452 - logs_modchembert_tox21_epochs100_batch_size32 - INFO - Epoch 66/100 | Train Loss: 0.0058 | Val mean-roc_auc_score: 0.7331
294
+ 2025-09-23 18:57:02,278 - logs_modchembert_tox21_epochs100_batch_size32 - INFO - Epoch 67/100 | Train Loss: 0.0061 | Val mean-roc_auc_score: 0.7336
295
+ 2025-09-23 18:57:20,516 - logs_modchembert_tox21_epochs100_batch_size32 - INFO - Epoch 68/100 | Train Loss: 0.0069 | Val mean-roc_auc_score: 0.7327
296
+ 2025-09-23 18:57:39,571 - logs_modchembert_tox21_epochs100_batch_size32 - INFO - Epoch 69/100 | Train Loss: 0.0061 | Val mean-roc_auc_score: 0.7346
297
+ 2025-09-23 18:57:57,919 - logs_modchembert_tox21_epochs100_batch_size32 - INFO - Epoch 70/100 | Train Loss: 0.0061 | Val mean-roc_auc_score: 0.7325
298
+ 2025-09-23 18:58:15,674 - logs_modchembert_tox21_epochs100_batch_size32 - INFO - Epoch 71/100 | Train Loss: 0.0053 | Val mean-roc_auc_score: 0.7311
299
+ 2025-09-23 18:58:36,416 - logs_modchembert_tox21_epochs100_batch_size32 - INFO - Epoch 72/100 | Train Loss: 0.0063 | Val mean-roc_auc_score: 0.7312
300
+ 2025-09-23 18:58:55,518 - logs_modchembert_tox21_epochs100_batch_size32 - INFO - Epoch 73/100 | Train Loss: 0.0068 | Val mean-roc_auc_score: 0.7332
301
+ 2025-09-23 18:59:13,810 - logs_modchembert_tox21_epochs100_batch_size32 - INFO - Epoch 74/100 | Train Loss: 0.0065 | Val mean-roc_auc_score: 0.7345
302
+ 2025-09-23 18:59:31,905 - logs_modchembert_tox21_epochs100_batch_size32 - INFO - Epoch 75/100 | Train Loss: 0.0059 | Val mean-roc_auc_score: 0.7352
303
+ 2025-09-23 18:59:48,937 - logs_modchembert_tox21_epochs100_batch_size32 - INFO - Epoch 76/100 | Train Loss: 0.0060 | Val mean-roc_auc_score: 0.7341
304
+ 2025-09-23 19:00:09,265 - logs_modchembert_tox21_epochs100_batch_size32 - INFO - Epoch 77/100 | Train Loss: 0.0052 | Val mean-roc_auc_score: 0.7364
305
+ 2025-09-23 19:00:28,137 - logs_modchembert_tox21_epochs100_batch_size32 - INFO - Epoch 78/100 | Train Loss: 0.0053 | Val mean-roc_auc_score: 0.7356
306
+ 2025-09-23 19:00:46,606 - logs_modchembert_tox21_epochs100_batch_size32 - INFO - Epoch 79/100 | Train Loss: 0.0060 | Val mean-roc_auc_score: 0.7361
307
+ 2025-09-23 19:01:04,823 - logs_modchembert_tox21_epochs100_batch_size32 - INFO - Epoch 80/100 | Train Loss: 0.0055 | Val mean-roc_auc_score: 0.7342
308
+ 2025-09-23 19:01:23,100 - logs_modchembert_tox21_epochs100_batch_size32 - INFO - Epoch 81/100 | Train Loss: 0.0057 | Val mean-roc_auc_score: 0.7362
309
+ 2025-09-23 19:01:44,054 - logs_modchembert_tox21_epochs100_batch_size32 - INFO - Epoch 82/100 | Train Loss: 0.0052 | Val mean-roc_auc_score: 0.7341
310
+ 2025-09-23 19:02:02,210 - logs_modchembert_tox21_epochs100_batch_size32 - INFO - Epoch 83/100 | Train Loss: 0.0051 | Val mean-roc_auc_score: 0.7343
311
+ 2025-09-23 19:02:21,204 - logs_modchembert_tox21_epochs100_batch_size32 - INFO - Epoch 84/100 | Train Loss: 0.0052 | Val mean-roc_auc_score: 0.7340
312
+ 2025-09-23 19:02:38,914 - logs_modchembert_tox21_epochs100_batch_size32 - INFO - Epoch 85/100 | Train Loss: 0.0060 | Val mean-roc_auc_score: 0.7314
313
+ 2025-09-23 19:02:57,701 - logs_modchembert_tox21_epochs100_batch_size32 - INFO - Epoch 86/100 | Train Loss: 0.0053 | Val mean-roc_auc_score: 0.7340
314
+ 2025-09-23 19:03:18,543 - logs_modchembert_tox21_epochs100_batch_size32 - INFO - Epoch 87/100 | Train Loss: 0.0057 | Val mean-roc_auc_score: 0.7341
315
+ 2025-09-23 19:03:36,948 - logs_modchembert_tox21_epochs100_batch_size32 - INFO - Epoch 88/100 | Train Loss: 0.0061 | Val mean-roc_auc_score: 0.7345
316
+ 2025-09-23 19:03:55,031 - logs_modchembert_tox21_epochs100_batch_size32 - INFO - Epoch 89/100 | Train Loss: 0.0055 | Val mean-roc_auc_score: 0.7352
317
+ 2025-09-23 19:04:11,858 - logs_modchembert_tox21_epochs100_batch_size32 - INFO - Epoch 90/100 | Train Loss: 0.0048 | Val mean-roc_auc_score: 0.7351
318
+ 2025-09-23 19:04:30,112 - logs_modchembert_tox21_epochs100_batch_size32 - INFO - Epoch 91/100 | Train Loss: 0.0046 | Val mean-roc_auc_score: 0.7341
319
+ 2025-09-23 19:04:50,608 - logs_modchembert_tox21_epochs100_batch_size32 - INFO - Epoch 92/100 | Train Loss: 0.0042 | Val mean-roc_auc_score: 0.7349
320
+ 2025-09-23 19:05:09,141 - logs_modchembert_tox21_epochs100_batch_size32 - INFO - Epoch 93/100 | Train Loss: 0.0045 | Val mean-roc_auc_score: 0.7339
321
+ 2025-09-23 19:05:27,468 - logs_modchembert_tox21_epochs100_batch_size32 - INFO - Epoch 94/100 | Train Loss: 0.0049 | Val mean-roc_auc_score: 0.7324
322
+ 2025-09-23 19:05:45,882 - logs_modchembert_tox21_epochs100_batch_size32 - INFO - Epoch 95/100 | Train Loss: 0.0052 | Val mean-roc_auc_score: 0.7347
323
+ 2025-09-23 19:06:04,908 - logs_modchembert_tox21_epochs100_batch_size32 - INFO - Epoch 96/100 | Train Loss: 0.0058 | Val mean-roc_auc_score: 0.7336
324
+ 2025-09-23 19:06:24,522 - logs_modchembert_tox21_epochs100_batch_size32 - INFO - Epoch 97/100 | Train Loss: 0.0069 | Val mean-roc_auc_score: 0.7315
325
+ 2025-09-23 19:06:43,316 - logs_modchembert_tox21_epochs100_batch_size32 - INFO - Epoch 98/100 | Train Loss: 0.0035 | Val mean-roc_auc_score: 0.7334
326
+ 2025-09-23 19:07:00,711 - logs_modchembert_tox21_epochs100_batch_size32 - INFO - Epoch 99/100 | Train Loss: 0.0057 | Val mean-roc_auc_score: 0.7339
327
+ 2025-09-23 19:07:19,539 - logs_modchembert_tox21_epochs100_batch_size32 - INFO - Epoch 100/100 | Train Loss: 0.0049 | Val mean-roc_auc_score: 0.7328
328
+ 2025-09-23 19:07:21,101 - logs_modchembert_tox21_epochs100_batch_size32 - INFO - Test mean-roc_auc_score: 0.7697
329
+ 2025-09-23 19:07:21,601 - logs_modchembert_tox21_epochs100_batch_size32 - INFO - Final Triplicate Test Results — Avg mean-roc_auc_score: 0.7646, Std Dev: 0.0039
logs_modchembert_regression_ModChemBERT-MLM-TAFT/modchembert_deepchem_splits_run_bace_regression_epochs100_batch_size32_20250923_153243.log ADDED
@@ -0,0 +1,327 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ 2025-09-23 15:32:43,020 - logs_modchembert_bace_regression_epochs100_batch_size32 - INFO - Running benchmark for dataset: bace_regression
2
+ 2025-09-23 15:32:43,020 - logs_modchembert_bace_regression_epochs100_batch_size32 - INFO - dataset: bace_regression, tasks: ['pIC50'], epochs: 100, learning rate: 3e-05, transform: True
3
+ 2025-09-23 15:32:43,025 - logs_modchembert_bace_regression_epochs100_batch_size32 - INFO - Starting triplicate run 1 for dataset bace_regression at 2025-09-23_15-32-43
4
+ 2025-09-23 15:32:48,494 - logs_modchembert_bace_regression_epochs100_batch_size32 - INFO - Epoch 1/100 | Train Loss: 0.7632 | Val rms_score: 0.5624
5
+ 2025-09-23 15:32:48,494 - logs_modchembert_bace_regression_epochs100_batch_size32 - INFO - Global step of best model: 38
6
+ 2025-09-23 15:32:49,035 - logs_modchembert_bace_regression_epochs100_batch_size32 - INFO - Best model saved at epoch 1 with val rms_score: 0.5624
7
+ 2025-09-23 15:32:52,586 - logs_modchembert_bace_regression_epochs100_batch_size32 - INFO - Epoch 2/100 | Train Loss: 0.3947 | Val rms_score: 0.6616
8
+ 2025-09-23 15:32:56,081 - logs_modchembert_bace_regression_epochs100_batch_size32 - INFO - Epoch 3/100 | Train Loss: 0.3058 | Val rms_score: 0.7009
9
+ 2025-09-23 15:32:59,792 - logs_modchembert_bace_regression_epochs100_batch_size32 - INFO - Epoch 4/100 | Train Loss: 0.2368 | Val rms_score: 0.7269
10
+ 2025-09-23 15:33:04,929 - logs_modchembert_bace_regression_epochs100_batch_size32 - INFO - Epoch 5/100 | Train Loss: 0.2220 | Val rms_score: 0.6853
11
+ 2025-09-23 15:33:09,844 - logs_modchembert_bace_regression_epochs100_batch_size32 - INFO - Epoch 6/100 | Train Loss: 0.1931 | Val rms_score: 0.7314
12
+ 2025-09-23 15:33:14,991 - logs_modchembert_bace_regression_epochs100_batch_size32 - INFO - Epoch 7/100 | Train Loss: 0.1719 | Val rms_score: 0.6990
13
+ 2025-09-23 15:33:20,066 - logs_modchembert_bace_regression_epochs100_batch_size32 - INFO - Epoch 8/100 | Train Loss: 0.1533 | Val rms_score: 0.5772
14
+ 2025-09-23 15:33:24,946 - logs_modchembert_bace_regression_epochs100_batch_size32 - INFO - Epoch 9/100 | Train Loss: 0.1316 | Val rms_score: 0.6658
15
+ 2025-09-23 15:33:29,656 - logs_modchembert_bace_regression_epochs100_batch_size32 - INFO - Epoch 10/100 | Train Loss: 0.1299 | Val rms_score: 0.7158
16
+ 2025-09-23 15:33:34,476 - logs_modchembert_bace_regression_epochs100_batch_size32 - INFO - Epoch 11/100 | Train Loss: 0.1372 | Val rms_score: 0.5717
17
+ 2025-09-23 15:33:39,691 - logs_modchembert_bace_regression_epochs100_batch_size32 - INFO - Epoch 12/100 | Train Loss: 0.1143 | Val rms_score: 0.7283
18
+ 2025-09-23 15:33:44,782 - logs_modchembert_bace_regression_epochs100_batch_size32 - INFO - Epoch 13/100 | Train Loss: 0.0975 | Val rms_score: 0.7408
19
+ 2025-09-23 15:33:49,795 - logs_modchembert_bace_regression_epochs100_batch_size32 - INFO - Epoch 14/100 | Train Loss: 0.1030 | Val rms_score: 0.7165
20
+ 2025-09-23 15:33:54,854 - logs_modchembert_bace_regression_epochs100_batch_size32 - INFO - Epoch 15/100 | Train Loss: 0.0921 | Val rms_score: 0.6376
21
+ 2025-09-23 15:34:00,066 - logs_modchembert_bace_regression_epochs100_batch_size32 - INFO - Epoch 16/100 | Train Loss: 0.0737 | Val rms_score: 0.7195
22
+ 2025-09-23 15:34:05,511 - logs_modchembert_bace_regression_epochs100_batch_size32 - INFO - Epoch 17/100 | Train Loss: 0.0843 | Val rms_score: 0.6892
23
+ 2025-09-23 15:34:10,806 - logs_modchembert_bace_regression_epochs100_batch_size32 - INFO - Epoch 18/100 | Train Loss: 0.0785 | Val rms_score: 0.7086
24
+ 2025-09-23 15:34:15,451 - logs_modchembert_bace_regression_epochs100_batch_size32 - INFO - Epoch 19/100 | Train Loss: 0.0771 | Val rms_score: 0.6359
25
+ 2025-09-23 15:34:19,920 - logs_modchembert_bace_regression_epochs100_batch_size32 - INFO - Epoch 20/100 | Train Loss: 0.0699 | Val rms_score: 0.6803
26
+ 2025-09-23 15:34:24,789 - logs_modchembert_bace_regression_epochs100_batch_size32 - INFO - Epoch 21/100 | Train Loss: 0.0678 | Val rms_score: 0.5871
27
+ 2025-09-23 15:34:30,136 - logs_modchembert_bace_regression_epochs100_batch_size32 - INFO - Epoch 22/100 | Train Loss: 0.0647 | Val rms_score: 0.6301
28
+ 2025-09-23 15:34:35,216 - logs_modchembert_bace_regression_epochs100_batch_size32 - INFO - Epoch 23/100 | Train Loss: 0.0658 | Val rms_score: 0.7528
29
+ 2025-09-23 15:34:40,251 - logs_modchembert_bace_regression_epochs100_batch_size32 - INFO - Epoch 24/100 | Train Loss: 0.0671 | Val rms_score: 0.6838
30
+ 2025-09-23 15:34:45,375 - logs_modchembert_bace_regression_epochs100_batch_size32 - INFO - Epoch 25/100 | Train Loss: 0.0604 | Val rms_score: 0.6878
31
+ 2025-09-23 15:34:50,520 - logs_modchembert_bace_regression_epochs100_batch_size32 - INFO - Epoch 26/100 | Train Loss: 0.0576 | Val rms_score: 0.6815
32
+ 2025-09-23 15:34:56,792 - logs_modchembert_bace_regression_epochs100_batch_size32 - INFO - Epoch 27/100 | Train Loss: 0.0550 | Val rms_score: 0.7233
33
+ 2025-09-23 15:35:01,875 - logs_modchembert_bace_regression_epochs100_batch_size32 - INFO - Epoch 28/100 | Train Loss: 0.0539 | Val rms_score: 0.6768
34
+ 2025-09-23 15:35:07,045 - logs_modchembert_bace_regression_epochs100_batch_size32 - INFO - Epoch 29/100 | Train Loss: 0.0454 | Val rms_score: 0.7266
35
+ 2025-09-23 15:35:12,192 - logs_modchembert_bace_regression_epochs100_batch_size32 - INFO - Epoch 30/100 | Train Loss: 0.0535 | Val rms_score: 0.6604
36
+ 2025-09-23 15:35:17,378 - logs_modchembert_bace_regression_epochs100_batch_size32 - INFO - Epoch 31/100 | Train Loss: 0.0485 | Val rms_score: 0.6930
37
+ 2025-09-23 15:35:22,700 - logs_modchembert_bace_regression_epochs100_batch_size32 - INFO - Epoch 32/100 | Train Loss: 0.0417 | Val rms_score: 0.6955
38
+ 2025-09-23 15:35:27,733 - logs_modchembert_bace_regression_epochs100_batch_size32 - INFO - Epoch 33/100 | Train Loss: 0.0440 | Val rms_score: 0.6438
39
+ 2025-09-23 15:35:32,770 - logs_modchembert_bace_regression_epochs100_batch_size32 - INFO - Epoch 34/100 | Train Loss: 0.0430 | Val rms_score: 0.6804
40
+ 2025-09-23 15:35:37,449 - logs_modchembert_bace_regression_epochs100_batch_size32 - INFO - Epoch 35/100 | Train Loss: 0.0383 | Val rms_score: 0.6678
41
+ 2025-09-23 15:35:42,200 - logs_modchembert_bace_regression_epochs100_batch_size32 - INFO - Epoch 36/100 | Train Loss: 0.0356 | Val rms_score: 0.6577
42
+ 2025-09-23 15:35:47,066 - logs_modchembert_bace_regression_epochs100_batch_size32 - INFO - Epoch 37/100 | Train Loss: 0.0360 | Val rms_score: 0.6738
43
+ 2025-09-23 15:35:52,290 - logs_modchembert_bace_regression_epochs100_batch_size32 - INFO - Epoch 38/100 | Train Loss: 0.0339 | Val rms_score: 0.6651
44
+ 2025-09-23 15:35:57,432 - logs_modchembert_bace_regression_epochs100_batch_size32 - INFO - Epoch 39/100 | Train Loss: 0.0399 | Val rms_score: 0.6800
45
+ 2025-09-23 15:36:02,539 - logs_modchembert_bace_regression_epochs100_batch_size32 - INFO - Epoch 40/100 | Train Loss: 0.0365 | Val rms_score: 0.6859
46
+ 2025-09-23 15:36:07,657 - logs_modchembert_bace_regression_epochs100_batch_size32 - INFO - Epoch 41/100 | Train Loss: 0.0343 | Val rms_score: 0.6738
47
+ 2025-09-23 15:36:12,956 - logs_modchembert_bace_regression_epochs100_batch_size32 - INFO - Epoch 42/100 | Train Loss: 0.0358 | Val rms_score: 0.6908
48
+ 2025-09-23 15:36:18,050 - logs_modchembert_bace_regression_epochs100_batch_size32 - INFO - Epoch 43/100 | Train Loss: 0.0342 | Val rms_score: 0.6802
49
+ 2025-09-23 15:36:23,119 - logs_modchembert_bace_regression_epochs100_batch_size32 - INFO - Epoch 44/100 | Train Loss: 0.0312 | Val rms_score: 0.6927
50
+ 2025-09-23 15:36:28,224 - logs_modchembert_bace_regression_epochs100_batch_size32 - INFO - Epoch 45/100 | Train Loss: 0.0404 | Val rms_score: 0.6842
51
+ 2025-09-23 15:36:33,247 - logs_modchembert_bace_regression_epochs100_batch_size32 - INFO - Epoch 46/100 | Train Loss: 0.0327 | Val rms_score: 0.6683
52
+ 2025-09-23 15:36:38,482 - logs_modchembert_bace_regression_epochs100_batch_size32 - INFO - Epoch 47/100 | Train Loss: 0.0304 | Val rms_score: 0.6857
53
+ 2025-09-23 15:36:43,590 - logs_modchembert_bace_regression_epochs100_batch_size32 - INFO - Epoch 48/100 | Train Loss: 0.0285 | Val rms_score: 0.6669
54
+ 2025-09-23 15:36:48,668 - logs_modchembert_bace_regression_epochs100_batch_size32 - INFO - Epoch 49/100 | Train Loss: 0.0294 | Val rms_score: 0.6803
55
+ 2025-09-23 15:36:53,838 - logs_modchembert_bace_regression_epochs100_batch_size32 - INFO - Epoch 50/100 | Train Loss: 0.0329 | Val rms_score: 0.7074
56
+ 2025-09-23 15:36:59,058 - logs_modchembert_bace_regression_epochs100_batch_size32 - INFO - Epoch 51/100 | Train Loss: 0.0312 | Val rms_score: 0.6897
57
+ 2025-09-23 15:37:04,056 - logs_modchembert_bace_regression_epochs100_batch_size32 - INFO - Epoch 52/100 | Train Loss: 0.0302 | Val rms_score: 0.6886
58
+ 2025-09-23 15:37:09,875 - logs_modchembert_bace_regression_epochs100_batch_size32 - INFO - Epoch 53/100 | Train Loss: 0.0312 | Val rms_score: 0.6550
59
+ 2025-09-23 15:37:14,099 - logs_modchembert_bace_regression_epochs100_batch_size32 - INFO - Epoch 54/100 | Train Loss: 0.0273 | Val rms_score: 0.6783
60
+ 2025-09-23 15:37:19,309 - logs_modchembert_bace_regression_epochs100_batch_size32 - INFO - Epoch 55/100 | Train Loss: 0.0288 | Val rms_score: 0.6834
61
+ 2025-09-23 15:37:24,532 - logs_modchembert_bace_regression_epochs100_batch_size32 - INFO - Epoch 56/100 | Train Loss: 0.0283 | Val rms_score: 0.6736
62
+ 2025-09-23 15:37:30,009 - logs_modchembert_bace_regression_epochs100_batch_size32 - INFO - Epoch 57/100 | Train Loss: 0.0240 | Val rms_score: 0.6733
63
+ 2025-09-23 15:37:35,307 - logs_modchembert_bace_regression_epochs100_batch_size32 - INFO - Epoch 58/100 | Train Loss: 0.0247 | Val rms_score: 0.6650
64
+ 2025-09-23 15:37:40,645 - logs_modchembert_bace_regression_epochs100_batch_size32 - INFO - Epoch 59/100 | Train Loss: 0.0251 | Val rms_score: 0.6875
65
+ 2025-09-23 15:37:45,965 - logs_modchembert_bace_regression_epochs100_batch_size32 - INFO - Epoch 60/100 | Train Loss: 0.0232 | Val rms_score: 0.6929
66
+ 2025-09-23 15:37:51,267 - logs_modchembert_bace_regression_epochs100_batch_size32 - INFO - Epoch 61/100 | Train Loss: 0.0235 | Val rms_score: 0.6898
67
+ 2025-09-23 15:37:56,858 - logs_modchembert_bace_regression_epochs100_batch_size32 - INFO - Epoch 62/100 | Train Loss: 0.0219 | Val rms_score: 0.6766
68
+ 2025-09-23 15:38:02,177 - logs_modchembert_bace_regression_epochs100_batch_size32 - INFO - Epoch 63/100 | Train Loss: 0.0212 | Val rms_score: 0.6698
69
+ 2025-09-23 15:38:07,477 - logs_modchembert_bace_regression_epochs100_batch_size32 - INFO - Epoch 64/100 | Train Loss: 0.0238 | Val rms_score: 0.6865
70
+ 2025-09-23 15:38:12,701 - logs_modchembert_bace_regression_epochs100_batch_size32 - INFO - Epoch 65/100 | Train Loss: 0.0212 | Val rms_score: 0.6712
71
+ 2025-09-23 15:38:17,989 - logs_modchembert_bace_regression_epochs100_batch_size32 - INFO - Epoch 66/100 | Train Loss: 0.0255 | Val rms_score: 0.7072
72
+ 2025-09-23 15:38:23,298 - logs_modchembert_bace_regression_epochs100_batch_size32 - INFO - Epoch 67/100 | Train Loss: 0.0182 | Val rms_score: 0.7080
73
+ 2025-09-23 15:38:27,933 - logs_modchembert_bace_regression_epochs100_batch_size32 - INFO - Epoch 68/100 | Train Loss: 0.0194 | Val rms_score: 0.6749
74
+ 2025-09-23 15:38:33,107 - logs_modchembert_bace_regression_epochs100_batch_size32 - INFO - Epoch 69/100 | Train Loss: 0.0173 | Val rms_score: 0.6834
75
+ 2025-09-23 15:38:37,608 - logs_modchembert_bace_regression_epochs100_batch_size32 - INFO - Epoch 70/100 | Train Loss: 0.0193 | Val rms_score: 0.6941
76
+ 2025-09-23 15:38:42,011 - logs_modchembert_bace_regression_epochs100_batch_size32 - INFO - Epoch 71/100 | Train Loss: 0.0228 | Val rms_score: 0.6768
77
+ 2025-09-23 15:38:47,610 - logs_modchembert_bace_regression_epochs100_batch_size32 - INFO - Epoch 72/100 | Train Loss: 0.0178 | Val rms_score: 0.6620
78
+ 2025-09-23 15:38:52,908 - logs_modchembert_bace_regression_epochs100_batch_size32 - INFO - Epoch 73/100 | Train Loss: 0.0183 | Val rms_score: 0.6831
79
+ 2025-09-23 15:38:58,179 - logs_modchembert_bace_regression_epochs100_batch_size32 - INFO - Epoch 74/100 | Train Loss: 0.0169 | Val rms_score: 0.6882
80
+ 2025-09-23 15:39:03,464 - logs_modchembert_bace_regression_epochs100_batch_size32 - INFO - Epoch 75/100 | Train Loss: 0.0172 | Val rms_score: 0.6658
81
+ 2025-09-23 15:39:08,652 - logs_modchembert_bace_regression_epochs100_batch_size32 - INFO - Epoch 76/100 | Train Loss: 0.0164 | Val rms_score: 0.6934
82
+ 2025-09-23 15:39:13,996 - logs_modchembert_bace_regression_epochs100_batch_size32 - INFO - Epoch 77/100 | Train Loss: 0.0154 | Val rms_score: 0.6906
83
+ 2025-09-23 15:39:19,156 - logs_modchembert_bace_regression_epochs100_batch_size32 - INFO - Epoch 78/100 | Train Loss: 0.0177 | Val rms_score: 0.6820
84
+ 2025-09-23 15:39:25,388 - logs_modchembert_bace_regression_epochs100_batch_size32 - INFO - Epoch 79/100 | Train Loss: 0.0167 | Val rms_score: 0.6765
85
+ 2025-09-23 15:39:30,689 - logs_modchembert_bace_regression_epochs100_batch_size32 - INFO - Epoch 80/100 | Train Loss: 0.0167 | Val rms_score: 0.7093
86
+ 2025-09-23 15:39:36,051 - logs_modchembert_bace_regression_epochs100_batch_size32 - INFO - Epoch 81/100 | Train Loss: 0.0162 | Val rms_score: 0.6856
87
+ 2025-09-23 15:39:41,575 - logs_modchembert_bace_regression_epochs100_batch_size32 - INFO - Epoch 82/100 | Train Loss: 0.0184 | Val rms_score: 0.6814
88
+ 2025-09-23 15:39:46,704 - logs_modchembert_bace_regression_epochs100_batch_size32 - INFO - Epoch 83/100 | Train Loss: 0.0161 | Val rms_score: 0.7072
89
+ 2025-09-23 15:39:51,862 - logs_modchembert_bace_regression_epochs100_batch_size32 - INFO - Epoch 84/100 | Train Loss: 0.0154 | Val rms_score: 0.7001
90
+ 2025-09-23 15:39:57,016 - logs_modchembert_bace_regression_epochs100_batch_size32 - INFO - Epoch 85/100 | Train Loss: 0.0149 | Val rms_score: 0.6772
91
+ 2025-09-23 15:40:02,205 - logs_modchembert_bace_regression_epochs100_batch_size32 - INFO - Epoch 86/100 | Train Loss: 0.0149 | Val rms_score: 0.7069
92
+ 2025-09-23 15:40:06,944 - logs_modchembert_bace_regression_epochs100_batch_size32 - INFO - Epoch 87/100 | Train Loss: 0.0158 | Val rms_score: 0.6876
93
+ 2025-09-23 15:40:11,917 - logs_modchembert_bace_regression_epochs100_batch_size32 - INFO - Epoch 88/100 | Train Loss: 0.0150 | Val rms_score: 0.6641
94
+ 2025-09-23 15:40:17,147 - logs_modchembert_bace_regression_epochs100_batch_size32 - INFO - Epoch 89/100 | Train Loss: 0.0148 | Val rms_score: 0.6687
95
+ 2025-09-23 15:40:22,414 - logs_modchembert_bace_regression_epochs100_batch_size32 - INFO - Epoch 90/100 | Train Loss: 0.0133 | Val rms_score: 0.6735
96
+ 2025-09-23 15:40:27,627 - logs_modchembert_bace_regression_epochs100_batch_size32 - INFO - Epoch 91/100 | Train Loss: 0.0148 | Val rms_score: 0.6736
97
+ 2025-09-23 15:40:33,071 - logs_modchembert_bace_regression_epochs100_batch_size32 - INFO - Epoch 92/100 | Train Loss: 0.0143 | Val rms_score: 0.6757
98
+ 2025-09-23 15:40:38,455 - logs_modchembert_bace_regression_epochs100_batch_size32 - INFO - Epoch 93/100 | Train Loss: 0.0136 | Val rms_score: 0.6999
99
+ 2025-09-23 15:40:43,790 - logs_modchembert_bace_regression_epochs100_batch_size32 - INFO - Epoch 94/100 | Train Loss: 0.0139 | Val rms_score: 0.6507
100
+ 2025-09-23 15:40:49,158 - logs_modchembert_bace_regression_epochs100_batch_size32 - INFO - Epoch 95/100 | Train Loss: 0.0147 | Val rms_score: 0.7008
101
+ 2025-09-23 15:40:54,484 - logs_modchembert_bace_regression_epochs100_batch_size32 - INFO - Epoch 96/100 | Train Loss: 0.0143 | Val rms_score: 0.6737
102
+ 2025-09-23 15:41:00,029 - logs_modchembert_bace_regression_epochs100_batch_size32 - INFO - Epoch 97/100 | Train Loss: 0.0154 | Val rms_score: 0.7119
103
+ 2025-09-23 15:41:05,354 - logs_modchembert_bace_regression_epochs100_batch_size32 - INFO - Epoch 98/100 | Train Loss: 0.0136 | Val rms_score: 0.6760
104
+ 2025-09-23 15:41:10,604 - logs_modchembert_bace_regression_epochs100_batch_size32 - INFO - Epoch 99/100 | Train Loss: 0.0125 | Val rms_score: 0.6896
105
+ 2025-09-23 15:41:15,687 - logs_modchembert_bace_regression_epochs100_batch_size32 - INFO - Epoch 100/100 | Train Loss: 0.0142 | Val rms_score: 0.7089
106
+ 2025-09-23 15:41:16,195 - logs_modchembert_bace_regression_epochs100_batch_size32 - INFO - Test rms_score: 1.1898
107
+ 2025-09-23 15:41:16,518 - logs_modchembert_bace_regression_epochs100_batch_size32 - INFO - Starting triplicate run 2 for dataset bace_regression at 2025-09-23_15-41-16
108
+ 2025-09-23 15:41:20,980 - logs_modchembert_bace_regression_epochs100_batch_size32 - INFO - Epoch 1/100 | Train Loss: 0.6974 | Val rms_score: 0.9186
109
+ 2025-09-23 15:41:20,980 - logs_modchembert_bace_regression_epochs100_batch_size32 - INFO - Global step of best model: 38
110
+ 2025-09-23 15:41:21,498 - logs_modchembert_bace_regression_epochs100_batch_size32 - INFO - Best model saved at epoch 1 with val rms_score: 0.9186
111
+ 2025-09-23 15:41:26,676 - logs_modchembert_bace_regression_epochs100_batch_size32 - INFO - Epoch 2/100 | Train Loss: 0.3964 | Val rms_score: 0.7411
112
+ 2025-09-23 15:41:26,846 - logs_modchembert_bace_regression_epochs100_batch_size32 - INFO - Global step of best model: 76
113
+ 2025-09-23 15:41:27,364 - logs_modchembert_bace_regression_epochs100_batch_size32 - INFO - Best model saved at epoch 2 with val rms_score: 0.7411
114
+ 2025-09-23 15:41:31,966 - logs_modchembert_bace_regression_epochs100_batch_size32 - INFO - Epoch 3/100 | Train Loss: 0.2489 | Val rms_score: 0.6450
115
+ 2025-09-23 15:41:32,144 - logs_modchembert_bace_regression_epochs100_batch_size32 - INFO - Global step of best model: 114
116
+ 2025-09-23 15:41:32,661 - logs_modchembert_bace_regression_epochs100_batch_size32 - INFO - Best model saved at epoch 3 with val rms_score: 0.6450
117
+ 2025-09-23 15:41:37,292 - logs_modchembert_bace_regression_epochs100_batch_size32 - INFO - Epoch 4/100 | Train Loss: 0.2434 | Val rms_score: 0.8176
118
+ 2025-09-23 15:41:42,578 - logs_modchembert_bace_regression_epochs100_batch_size32 - INFO - Epoch 5/100 | Train Loss: 0.2122 | Val rms_score: 0.6907
119
+ 2025-09-23 15:41:47,890 - logs_modchembert_bace_regression_epochs100_batch_size32 - INFO - Epoch 6/100 | Train Loss: 0.1908 | Val rms_score: 0.6564
120
+ 2025-09-23 15:41:53,369 - logs_modchembert_bace_regression_epochs100_batch_size32 - INFO - Epoch 7/100 | Train Loss: 0.1711 | Val rms_score: 0.6508
121
+ 2025-09-23 15:41:58,708 - logs_modchembert_bace_regression_epochs100_batch_size32 - INFO - Epoch 8/100 | Train Loss: 0.1426 | Val rms_score: 0.7617
122
+ 2025-09-23 15:42:04,097 - logs_modchembert_bace_regression_epochs100_batch_size32 - INFO - Epoch 9/100 | Train Loss: 0.1365 | Val rms_score: 0.6979
123
+ 2025-09-23 15:42:09,437 - logs_modchembert_bace_regression_epochs100_batch_size32 - INFO - Epoch 10/100 | Train Loss: 0.1332 | Val rms_score: 0.6527
124
+ 2025-09-23 15:42:14,748 - logs_modchembert_bace_regression_epochs100_batch_size32 - INFO - Epoch 11/100 | Train Loss: 0.1354 | Val rms_score: 0.6267
125
+ 2025-09-23 15:42:15,139 - logs_modchembert_bace_regression_epochs100_batch_size32 - INFO - Global step of best model: 418
126
+ 2025-09-23 15:42:15,657 - logs_modchembert_bace_regression_epochs100_batch_size32 - INFO - Best model saved at epoch 11 with val rms_score: 0.6267
127
+ 2025-09-23 15:42:20,934 - logs_modchembert_bace_regression_epochs100_batch_size32 - INFO - Epoch 12/100 | Train Loss: 0.1192 | Val rms_score: 0.7037
128
+ 2025-09-23 15:42:26,272 - logs_modchembert_bace_regression_epochs100_batch_size32 - INFO - Epoch 13/100 | Train Loss: 0.1086 | Val rms_score: 0.7363
129
+ 2025-09-23 15:42:31,513 - logs_modchembert_bace_regression_epochs100_batch_size32 - INFO - Epoch 14/100 | Train Loss: 0.1094 | Val rms_score: 0.6679
130
+ 2025-09-23 15:42:36,848 - logs_modchembert_bace_regression_epochs100_batch_size32 - INFO - Epoch 15/100 | Train Loss: 0.0863 | Val rms_score: 0.6946
131
+ 2025-09-23 15:42:41,893 - logs_modchembert_bace_regression_epochs100_batch_size32 - INFO - Epoch 16/100 | Train Loss: 0.1328 | Val rms_score: 0.7521
132
+ 2025-09-23 15:42:47,200 - logs_modchembert_bace_regression_epochs100_batch_size32 - INFO - Epoch 17/100 | Train Loss: 0.0859 | Val rms_score: 0.7342
133
+ 2025-09-23 15:42:52,385 - logs_modchembert_bace_regression_epochs100_batch_size32 - INFO - Epoch 18/100 | Train Loss: 0.0818 | Val rms_score: 0.7761
134
+ 2025-09-23 15:42:57,469 - logs_modchembert_bace_regression_epochs100_batch_size32 - INFO - Epoch 19/100 | Train Loss: 0.0923 | Val rms_score: 0.7018
135
+ 2025-09-23 15:43:02,141 - logs_modchembert_bace_regression_epochs100_batch_size32 - INFO - Epoch 20/100 | Train Loss: 0.0740 | Val rms_score: 0.6657
136
+ 2025-09-23 15:43:07,357 - logs_modchembert_bace_regression_epochs100_batch_size32 - INFO - Epoch 21/100 | Train Loss: 0.0711 | Val rms_score: 0.6469
137
+ 2025-09-23 15:43:12,874 - logs_modchembert_bace_regression_epochs100_batch_size32 - INFO - Epoch 22/100 | Train Loss: 0.0690 | Val rms_score: 0.6512
138
+ 2025-09-23 15:43:18,108 - logs_modchembert_bace_regression_epochs100_batch_size32 - INFO - Epoch 23/100 | Train Loss: 0.0637 | Val rms_score: 0.7320
139
+ 2025-09-23 15:43:23,367 - logs_modchembert_bace_regression_epochs100_batch_size32 - INFO - Epoch 24/100 | Train Loss: 0.0726 | Val rms_score: 0.7052
140
+ 2025-09-23 15:43:28,559 - logs_modchembert_bace_regression_epochs100_batch_size32 - INFO - Epoch 25/100 | Train Loss: 0.0596 | Val rms_score: 0.6870
141
+ 2025-09-23 15:43:33,721 - logs_modchembert_bace_regression_epochs100_batch_size32 - INFO - Epoch 26/100 | Train Loss: 0.0572 | Val rms_score: 0.6872
142
+ 2025-09-23 15:43:40,122 - logs_modchembert_bace_regression_epochs100_batch_size32 - INFO - Epoch 27/100 | Train Loss: 0.0574 | Val rms_score: 0.6982
143
+ 2025-09-23 15:43:45,332 - logs_modchembert_bace_regression_epochs100_batch_size32 - INFO - Epoch 28/100 | Train Loss: 0.0547 | Val rms_score: 0.7532
144
+ 2025-09-23 15:43:50,601 - logs_modchembert_bace_regression_epochs100_batch_size32 - INFO - Epoch 29/100 | Train Loss: 0.0608 | Val rms_score: 0.6930
145
+ 2025-09-23 15:43:55,851 - logs_modchembert_bace_regression_epochs100_batch_size32 - INFO - Epoch 30/100 | Train Loss: 0.0530 | Val rms_score: 0.7312
146
+ 2025-09-23 15:44:01,169 - logs_modchembert_bace_regression_epochs100_batch_size32 - INFO - Epoch 31/100 | Train Loss: 0.0629 | Val rms_score: 0.7510
147
+ 2025-09-23 15:44:06,665 - logs_modchembert_bace_regression_epochs100_batch_size32 - INFO - Epoch 32/100 | Train Loss: 0.0530 | Val rms_score: 0.6868
148
+ 2025-09-23 15:44:11,797 - logs_modchembert_bace_regression_epochs100_batch_size32 - INFO - Epoch 33/100 | Train Loss: 0.0471 | Val rms_score: 0.7292
149
+ 2025-09-23 15:44:17,027 - logs_modchembert_bace_regression_epochs100_batch_size32 - INFO - Epoch 34/100 | Train Loss: 0.0440 | Val rms_score: 0.7075
150
+ 2025-09-23 15:44:22,310 - logs_modchembert_bace_regression_epochs100_batch_size32 - INFO - Epoch 35/100 | Train Loss: 0.0471 | Val rms_score: 0.7303
151
+ 2025-09-23 15:44:26,818 - logs_modchembert_bace_regression_epochs100_batch_size32 - INFO - Epoch 36/100 | Train Loss: 0.0430 | Val rms_score: 0.7473
152
+ 2025-09-23 15:44:31,833 - logs_modchembert_bace_regression_epochs100_batch_size32 - INFO - Epoch 37/100 | Train Loss: 0.0472 | Val rms_score: 0.6988
153
+ 2025-09-23 15:44:36,932 - logs_modchembert_bace_regression_epochs100_batch_size32 - INFO - Epoch 38/100 | Train Loss: 0.0428 | Val rms_score: 0.6654
154
+ 2025-09-23 15:44:42,131 - logs_modchembert_bace_regression_epochs100_batch_size32 - INFO - Epoch 39/100 | Train Loss: 0.0419 | Val rms_score: 0.6527
155
+ 2025-09-23 15:44:47,266 - logs_modchembert_bace_regression_epochs100_batch_size32 - INFO - Epoch 40/100 | Train Loss: 0.0373 | Val rms_score: 0.7055
156
+ 2025-09-23 15:44:52,567 - logs_modchembert_bace_regression_epochs100_batch_size32 - INFO - Epoch 41/100 | Train Loss: 0.0380 | Val rms_score: 0.7422
157
+ 2025-09-23 15:44:58,109 - logs_modchembert_bace_regression_epochs100_batch_size32 - INFO - Epoch 42/100 | Train Loss: 0.0397 | Val rms_score: 0.6912
158
+ 2025-09-23 15:45:03,339 - logs_modchembert_bace_regression_epochs100_batch_size32 - INFO - Epoch 43/100 | Train Loss: 0.0363 | Val rms_score: 0.7056
159
+ 2025-09-23 15:45:08,629 - logs_modchembert_bace_regression_epochs100_batch_size32 - INFO - Epoch 44/100 | Train Loss: 0.0341 | Val rms_score: 0.6903
160
+ 2025-09-23 15:45:13,925 - logs_modchembert_bace_regression_epochs100_batch_size32 - INFO - Epoch 45/100 | Train Loss: 0.0336 | Val rms_score: 0.7452
161
+ 2025-09-23 15:45:19,169 - logs_modchembert_bace_regression_epochs100_batch_size32 - INFO - Epoch 46/100 | Train Loss: 0.0337 | Val rms_score: 0.7156
162
+ 2025-09-23 15:45:24,650 - logs_modchembert_bace_regression_epochs100_batch_size32 - INFO - Epoch 47/100 | Train Loss: 0.0325 | Val rms_score: 0.6792
163
+ 2025-09-23 15:45:29,865 - logs_modchembert_bace_regression_epochs100_batch_size32 - INFO - Epoch 48/100 | Train Loss: 0.0301 | Val rms_score: 0.7047
164
+ 2025-09-23 15:45:34,450 - logs_modchembert_bace_regression_epochs100_batch_size32 - INFO - Epoch 49/100 | Train Loss: 0.0315 | Val rms_score: 0.6809
165
+ 2025-09-23 15:45:39,552 - logs_modchembert_bace_regression_epochs100_batch_size32 - INFO - Epoch 50/100 | Train Loss: 0.0302 | Val rms_score: 0.7062
166
+ 2025-09-23 15:45:44,604 - logs_modchembert_bace_regression_epochs100_batch_size32 - INFO - Epoch 51/100 | Train Loss: 0.0284 | Val rms_score: 0.7306
167
+ 2025-09-23 15:45:49,898 - logs_modchembert_bace_regression_epochs100_batch_size32 - INFO - Epoch 52/100 | Train Loss: 0.0298 | Val rms_score: 0.7384
168
+ 2025-09-23 15:45:55,358 - logs_modchembert_bace_regression_epochs100_batch_size32 - INFO - Epoch 53/100 | Train Loss: 0.0336 | Val rms_score: 0.7346
169
+ 2025-09-23 15:46:00,168 - logs_modchembert_bace_regression_epochs100_batch_size32 - INFO - Epoch 54/100 | Train Loss: 0.0286 | Val rms_score: 0.7196
170
+ 2025-09-23 15:46:05,286 - logs_modchembert_bace_regression_epochs100_batch_size32 - INFO - Epoch 55/100 | Train Loss: 0.0259 | Val rms_score: 0.7441
171
+ 2025-09-23 15:46:10,554 - logs_modchembert_bace_regression_epochs100_batch_size32 - INFO - Epoch 56/100 | Train Loss: 0.0285 | Val rms_score: 0.7334
172
+ 2025-09-23 15:46:16,043 - logs_modchembert_bace_regression_epochs100_batch_size32 - INFO - Epoch 57/100 | Train Loss: 0.0259 | Val rms_score: 0.7017
173
+ 2025-09-23 15:46:21,259 - logs_modchembert_bace_regression_epochs100_batch_size32 - INFO - Epoch 58/100 | Train Loss: 0.0378 | Val rms_score: 0.7236
174
+ 2025-09-23 15:46:26,459 - logs_modchembert_bace_regression_epochs100_batch_size32 - INFO - Epoch 59/100 | Train Loss: 0.0247 | Val rms_score: 0.6944
175
+ 2025-09-23 15:46:31,646 - logs_modchembert_bace_regression_epochs100_batch_size32 - INFO - Epoch 60/100 | Train Loss: 0.0269 | Val rms_score: 0.7314
176
+ 2025-09-23 15:46:36,952 - logs_modchembert_bace_regression_epochs100_batch_size32 - INFO - Epoch 61/100 | Train Loss: 0.0229 | Val rms_score: 0.7089
177
+ 2025-09-23 15:46:42,389 - logs_modchembert_bace_regression_epochs100_batch_size32 - INFO - Epoch 62/100 | Train Loss: 0.0218 | Val rms_score: 0.7093
178
+ 2025-09-23 15:46:47,523 - logs_modchembert_bace_regression_epochs100_batch_size32 - INFO - Epoch 63/100 | Train Loss: 0.0254 | Val rms_score: 0.7227
179
+ 2025-09-23 15:46:52,764 - logs_modchembert_bace_regression_epochs100_batch_size32 - INFO - Epoch 64/100 | Train Loss: 0.0251 | Val rms_score: 0.7468
180
+ 2025-09-23 15:46:57,882 - logs_modchembert_bace_regression_epochs100_batch_size32 - INFO - Epoch 65/100 | Train Loss: 0.0223 | Val rms_score: 0.7313
181
+ 2025-09-23 15:47:02,920 - logs_modchembert_bace_regression_epochs100_batch_size32 - INFO - Epoch 66/100 | Train Loss: 0.0275 | Val rms_score: 0.7066
182
+ 2025-09-23 15:47:08,250 - logs_modchembert_bace_regression_epochs100_batch_size32 - INFO - Epoch 67/100 | Train Loss: 0.0240 | Val rms_score: 0.6740
183
+ 2025-09-23 15:47:13,305 - logs_modchembert_bace_regression_epochs100_batch_size32 - INFO - Epoch 68/100 | Train Loss: 0.0237 | Val rms_score: 0.7361
184
+ 2025-09-23 15:47:18,325 - logs_modchembert_bace_regression_epochs100_batch_size32 - INFO - Epoch 69/100 | Train Loss: 0.0281 | Val rms_score: 0.7128
185
+ 2025-09-23 15:47:22,973 - logs_modchembert_bace_regression_epochs100_batch_size32 - INFO - Epoch 70/100 | Train Loss: 0.0265 | Val rms_score: 0.6856
186
+ 2025-09-23 15:47:27,841 - logs_modchembert_bace_regression_epochs100_batch_size32 - INFO - Epoch 71/100 | Train Loss: 0.0236 | Val rms_score: 0.7115
187
+ 2025-09-23 15:47:33,156 - logs_modchembert_bace_regression_epochs100_batch_size32 - INFO - Epoch 72/100 | Train Loss: 0.0216 | Val rms_score: 0.7244
188
+ 2025-09-23 15:47:38,169 - logs_modchembert_bace_regression_epochs100_batch_size32 - INFO - Epoch 73/100 | Train Loss: 0.0248 | Val rms_score: 0.7479
189
+ 2025-09-23 15:47:43,217 - logs_modchembert_bace_regression_epochs100_batch_size32 - INFO - Epoch 74/100 | Train Loss: 0.0199 | Val rms_score: 0.7119
190
+ 2025-09-23 15:47:48,377 - logs_modchembert_bace_regression_epochs100_batch_size32 - INFO - Epoch 75/100 | Train Loss: 0.0184 | Val rms_score: 0.6887
191
+ 2025-09-23 15:47:53,606 - logs_modchembert_bace_regression_epochs100_batch_size32 - INFO - Epoch 76/100 | Train Loss: 0.0215 | Val rms_score: 0.7163
192
+ 2025-09-23 15:47:59,072 - logs_modchembert_bace_regression_epochs100_batch_size32 - INFO - Epoch 77/100 | Train Loss: 0.0195 | Val rms_score: 0.7179
193
+ 2025-09-23 15:48:04,373 - logs_modchembert_bace_regression_epochs100_batch_size32 - INFO - Epoch 78/100 | Train Loss: 0.0193 | Val rms_score: 0.7196
194
+ 2025-09-23 15:48:10,529 - logs_modchembert_bace_regression_epochs100_batch_size32 - INFO - Epoch 79/100 | Train Loss: 0.0286 | Val rms_score: 0.7270
195
+ 2025-09-23 15:48:15,758 - logs_modchembert_bace_regression_epochs100_batch_size32 - INFO - Epoch 80/100 | Train Loss: 0.0173 | Val rms_score: 0.7112
196
+ 2025-09-23 15:48:20,904 - logs_modchembert_bace_regression_epochs100_batch_size32 - INFO - Epoch 81/100 | Train Loss: 0.0179 | Val rms_score: 0.6911
197
+ 2025-09-23 15:48:26,427 - logs_modchembert_bace_regression_epochs100_batch_size32 - INFO - Epoch 82/100 | Train Loss: 0.0210 | Val rms_score: 0.6950
198
+ 2025-09-23 15:48:31,634 - logs_modchembert_bace_regression_epochs100_batch_size32 - INFO - Epoch 83/100 | Train Loss: 0.0188 | Val rms_score: 0.6803
199
+ 2025-09-23 15:48:36,946 - logs_modchembert_bace_regression_epochs100_batch_size32 - INFO - Epoch 84/100 | Train Loss: 0.0193 | Val rms_score: 0.6832
200
+ 2025-09-23 15:48:42,175 - logs_modchembert_bace_regression_epochs100_batch_size32 - INFO - Epoch 85/100 | Train Loss: 0.0198 | Val rms_score: 0.7017
201
+ 2025-09-23 15:48:47,225 - logs_modchembert_bace_regression_epochs100_batch_size32 - INFO - Epoch 86/100 | Train Loss: 0.0174 | Val rms_score: 0.7098
202
+ 2025-09-23 15:48:52,265 - logs_modchembert_bace_regression_epochs100_batch_size32 - INFO - Epoch 87/100 | Train Loss: 0.0113 | Val rms_score: 0.7193
203
+ 2025-09-23 15:48:57,300 - logs_modchembert_bace_regression_epochs100_batch_size32 - INFO - Epoch 88/100 | Train Loss: 0.0173 | Val rms_score: 0.7061
204
+ 2025-09-23 15:49:02,455 - logs_modchembert_bace_regression_epochs100_batch_size32 - INFO - Epoch 89/100 | Train Loss: 0.0159 | Val rms_score: 0.7302
205
+ 2025-09-23 15:49:07,541 - logs_modchembert_bace_regression_epochs100_batch_size32 - INFO - Epoch 90/100 | Train Loss: 0.0183 | Val rms_score: 0.7146
206
+ 2025-09-23 15:49:12,782 - logs_modchembert_bace_regression_epochs100_batch_size32 - INFO - Epoch 91/100 | Train Loss: 0.0174 | Val rms_score: 0.6915
207
+ 2025-09-23 15:49:18,253 - logs_modchembert_bace_regression_epochs100_batch_size32 - INFO - Epoch 92/100 | Train Loss: 0.0182 | Val rms_score: 0.6979
208
+ 2025-09-23 15:49:23,527 - logs_modchembert_bace_regression_epochs100_batch_size32 - INFO - Epoch 93/100 | Train Loss: 0.0152 | Val rms_score: 0.6995
209
+ 2025-09-23 15:49:28,922 - logs_modchembert_bace_regression_epochs100_batch_size32 - INFO - Epoch 94/100 | Train Loss: 0.0172 | Val rms_score: 0.6947
210
+ 2025-09-23 15:49:34,283 - logs_modchembert_bace_regression_epochs100_batch_size32 - INFO - Epoch 95/100 | Train Loss: 0.0193 | Val rms_score: 0.7109
211
+ 2025-09-23 15:49:39,488 - logs_modchembert_bace_regression_epochs100_batch_size32 - INFO - Epoch 96/100 | Train Loss: 0.0171 | Val rms_score: 0.7071
212
+ 2025-09-23 15:49:45,279 - logs_modchembert_bace_regression_epochs100_batch_size32 - INFO - Epoch 97/100 | Train Loss: 0.0176 | Val rms_score: 0.6884
213
+ 2025-09-23 15:49:50,001 - logs_modchembert_bace_regression_epochs100_batch_size32 - INFO - Epoch 98/100 | Train Loss: 0.0166 | Val rms_score: 0.6828
214
+ 2025-09-23 15:49:55,276 - logs_modchembert_bace_regression_epochs100_batch_size32 - INFO - Epoch 99/100 | Train Loss: 0.0143 | Val rms_score: 0.6850
215
+ 2025-09-23 15:50:00,488 - logs_modchembert_bace_regression_epochs100_batch_size32 - INFO - Epoch 100/100 | Train Loss: 0.0146 | Val rms_score: 0.6954
216
+ 2025-09-23 15:50:01,037 - logs_modchembert_bace_regression_epochs100_batch_size32 - INFO - Test rms_score: 0.9253
217
+ 2025-09-23 15:50:01,366 - logs_modchembert_bace_regression_epochs100_batch_size32 - INFO - Starting triplicate run 3 for dataset bace_regression at 2025-09-23_15-50-01
218
+ 2025-09-23 15:50:06,289 - logs_modchembert_bace_regression_epochs100_batch_size32 - INFO - Epoch 1/100 | Train Loss: 0.7237 | Val rms_score: 0.7429
219
+ 2025-09-23 15:50:06,289 - logs_modchembert_bace_regression_epochs100_batch_size32 - INFO - Global step of best model: 38
220
+ 2025-09-23 15:50:06,833 - logs_modchembert_bace_regression_epochs100_batch_size32 - INFO - Best model saved at epoch 1 with val rms_score: 0.7429
221
+ 2025-09-23 15:50:12,039 - logs_modchembert_bace_regression_epochs100_batch_size32 - INFO - Epoch 2/100 | Train Loss: 0.4161 | Val rms_score: 0.7291
222
+ 2025-09-23 15:50:12,205 - logs_modchembert_bace_regression_epochs100_batch_size32 - INFO - Global step of best model: 76
223
+ 2025-09-23 15:50:12,763 - logs_modchembert_bace_regression_epochs100_batch_size32 - INFO - Best model saved at epoch 2 with val rms_score: 0.7291
224
+ 2025-09-23 15:50:17,224 - logs_modchembert_bace_regression_epochs100_batch_size32 - INFO - Epoch 3/100 | Train Loss: 0.3214 | Val rms_score: 0.6446
225
+ 2025-09-23 15:50:17,394 - logs_modchembert_bace_regression_epochs100_batch_size32 - INFO - Global step of best model: 114
226
+ 2025-09-23 15:50:17,915 - logs_modchembert_bace_regression_epochs100_batch_size32 - INFO - Best model saved at epoch 3 with val rms_score: 0.6446
227
+ 2025-09-23 15:50:23,144 - logs_modchembert_bace_regression_epochs100_batch_size32 - INFO - Epoch 4/100 | Train Loss: 0.2615 | Val rms_score: 0.5852
228
+ 2025-09-23 15:50:23,321 - logs_modchembert_bace_regression_epochs100_batch_size32 - INFO - Global step of best model: 152
229
+ 2025-09-23 15:50:23,860 - logs_modchembert_bace_regression_epochs100_batch_size32 - INFO - Best model saved at epoch 4 with val rms_score: 0.5852
230
+ 2025-09-23 15:50:29,121 - logs_modchembert_bace_regression_epochs100_batch_size32 - INFO - Epoch 5/100 | Train Loss: 0.2220 | Val rms_score: 0.5952
231
+ 2025-09-23 15:50:34,394 - logs_modchembert_bace_regression_epochs100_batch_size32 - INFO - Epoch 6/100 | Train Loss: 0.2065 | Val rms_score: 0.7065
232
+ 2025-09-23 15:50:39,910 - logs_modchembert_bace_regression_epochs100_batch_size32 - INFO - Epoch 7/100 | Train Loss: 0.1743 | Val rms_score: 0.6663
233
+ 2025-09-23 15:50:45,283 - logs_modchembert_bace_regression_epochs100_batch_size32 - INFO - Epoch 8/100 | Train Loss: 0.1973 | Val rms_score: 0.7485
234
+ 2025-09-23 15:50:50,592 - logs_modchembert_bace_regression_epochs100_batch_size32 - INFO - Epoch 9/100 | Train Loss: 0.1398 | Val rms_score: 0.6783
235
+ 2025-09-23 15:50:55,935 - logs_modchembert_bace_regression_epochs100_batch_size32 - INFO - Epoch 10/100 | Train Loss: 0.1357 | Val rms_score: 0.7064
236
+ 2025-09-23 15:51:01,252 - logs_modchembert_bace_regression_epochs100_batch_size32 - INFO - Epoch 11/100 | Train Loss: 0.1311 | Val rms_score: 0.6680
237
+ 2025-09-23 15:51:06,783 - logs_modchembert_bace_regression_epochs100_batch_size32 - INFO - Epoch 12/100 | Train Loss: 0.1127 | Val rms_score: 0.7236
238
+ 2025-09-23 15:51:12,034 - logs_modchembert_bace_regression_epochs100_batch_size32 - INFO - Epoch 13/100 | Train Loss: 0.1077 | Val rms_score: 0.7618
239
+ 2025-09-23 15:51:17,072 - logs_modchembert_bace_regression_epochs100_batch_size32 - INFO - Epoch 14/100 | Train Loss: 0.0996 | Val rms_score: 0.8921
240
+ 2025-09-23 15:51:22,282 - logs_modchembert_bace_regression_epochs100_batch_size32 - INFO - Epoch 15/100 | Train Loss: 0.0958 | Val rms_score: 0.8292
241
+ 2025-09-23 15:51:27,361 - logs_modchembert_bace_regression_epochs100_batch_size32 - INFO - Epoch 16/100 | Train Loss: 0.1162 | Val rms_score: 0.7961
242
+ 2025-09-23 15:51:32,717 - logs_modchembert_bace_regression_epochs100_batch_size32 - INFO - Epoch 17/100 | Train Loss: 0.0839 | Val rms_score: 0.8269
243
+ 2025-09-23 15:51:37,806 - logs_modchembert_bace_regression_epochs100_batch_size32 - INFO - Epoch 18/100 | Train Loss: 0.0872 | Val rms_score: 0.6972
244
+ 2025-09-23 15:51:42,285 - logs_modchembert_bace_regression_epochs100_batch_size32 - INFO - Epoch 19/100 | Train Loss: 0.0778 | Val rms_score: 0.6379
245
+ 2025-09-23 15:51:46,806 - logs_modchembert_bace_regression_epochs100_batch_size32 - INFO - Epoch 20/100 | Train Loss: 0.0744 | Val rms_score: 0.6533
246
+ 2025-09-23 15:51:51,947 - logs_modchembert_bace_regression_epochs100_batch_size32 - INFO - Epoch 21/100 | Train Loss: 0.0810 | Val rms_score: 0.6750
247
+ 2025-09-23 15:51:57,526 - logs_modchembert_bace_regression_epochs100_batch_size32 - INFO - Epoch 22/100 | Train Loss: 0.0642 | Val rms_score: 0.6953
248
+ 2025-09-23 15:52:02,748 - logs_modchembert_bace_regression_epochs100_batch_size32 - INFO - Epoch 23/100 | Train Loss: 0.0609 | Val rms_score: 0.6645
249
+ 2025-09-23 15:52:08,024 - logs_modchembert_bace_regression_epochs100_batch_size32 - INFO - Epoch 24/100 | Train Loss: 0.0684 | Val rms_score: 0.6795
250
+ 2025-09-23 15:52:13,096 - logs_modchembert_bace_regression_epochs100_batch_size32 - INFO - Epoch 25/100 | Train Loss: 0.0580 | Val rms_score: 0.6118
251
+ 2025-09-23 15:52:18,277 - logs_modchembert_bace_regression_epochs100_batch_size32 - INFO - Epoch 26/100 | Train Loss: 0.0563 | Val rms_score: 0.6816
252
+ 2025-09-23 15:52:24,681 - logs_modchembert_bace_regression_epochs100_batch_size32 - INFO - Epoch 27/100 | Train Loss: 0.0526 | Val rms_score: 0.6569
253
+ 2025-09-23 15:52:29,963 - logs_modchembert_bace_regression_epochs100_batch_size32 - INFO - Epoch 28/100 | Train Loss: 0.0567 | Val rms_score: 0.6775
254
+ 2025-09-23 15:52:35,265 - logs_modchembert_bace_regression_epochs100_batch_size32 - INFO - Epoch 29/100 | Train Loss: 0.0918 | Val rms_score: 0.6962
255
+ 2025-09-23 15:52:39,896 - logs_modchembert_bace_regression_epochs100_batch_size32 - INFO - Epoch 30/100 | Train Loss: 0.0504 | Val rms_score: 0.6827
256
+ 2025-09-23 15:52:45,213 - logs_modchembert_bace_regression_epochs100_batch_size32 - INFO - Epoch 31/100 | Train Loss: 0.0491 | Val rms_score: 0.6858
257
+ 2025-09-23 15:52:50,660 - logs_modchembert_bace_regression_epochs100_batch_size32 - INFO - Epoch 32/100 | Train Loss: 0.0479 | Val rms_score: 0.6581
258
+ 2025-09-23 15:52:55,864 - logs_modchembert_bace_regression_epochs100_batch_size32 - INFO - Epoch 33/100 | Train Loss: 0.0405 | Val rms_score: 0.7189
259
+ 2025-09-23 15:53:01,087 - logs_modchembert_bace_regression_epochs100_batch_size32 - INFO - Epoch 34/100 | Train Loss: 0.0413 | Val rms_score: 0.6745
260
+ 2025-09-23 15:53:06,369 - logs_modchembert_bace_regression_epochs100_batch_size32 - INFO - Epoch 35/100 | Train Loss: 0.0477 | Val rms_score: 0.6543
261
+ 2025-09-23 15:53:10,813 - logs_modchembert_bace_regression_epochs100_batch_size32 - INFO - Epoch 36/100 | Train Loss: 0.0442 | Val rms_score: 0.7129
262
+ 2025-09-23 15:53:15,958 - logs_modchembert_bace_regression_epochs100_batch_size32 - INFO - Epoch 37/100 | Train Loss: 0.0384 | Val rms_score: 0.6600
263
+ 2025-09-23 15:53:21,168 - logs_modchembert_bace_regression_epochs100_batch_size32 - INFO - Epoch 38/100 | Train Loss: 0.0356 | Val rms_score: 0.7416
264
+ 2025-09-23 15:53:26,393 - logs_modchembert_bace_regression_epochs100_batch_size32 - INFO - Epoch 39/100 | Train Loss: 0.0403 | Val rms_score: 0.7362
265
+ 2025-09-23 15:53:31,700 - logs_modchembert_bace_regression_epochs100_batch_size32 - INFO - Epoch 40/100 | Train Loss: 0.0445 | Val rms_score: 0.6655
266
+ 2025-09-23 15:53:36,970 - logs_modchembert_bace_regression_epochs100_batch_size32 - INFO - Epoch 41/100 | Train Loss: 0.0380 | Val rms_score: 0.6503
267
+ 2025-09-23 15:53:42,270 - logs_modchembert_bace_regression_epochs100_batch_size32 - INFO - Epoch 42/100 | Train Loss: 0.0329 | Val rms_score: 0.6601
268
+ 2025-09-23 15:53:47,436 - logs_modchembert_bace_regression_epochs100_batch_size32 - INFO - Epoch 43/100 | Train Loss: 0.0345 | Val rms_score: 0.6642
269
+ 2025-09-23 15:53:52,571 - logs_modchembert_bace_regression_epochs100_batch_size32 - INFO - Epoch 44/100 | Train Loss: 0.0337 | Val rms_score: 0.7007
270
+ 2025-09-23 15:53:57,751 - logs_modchembert_bace_regression_epochs100_batch_size32 - INFO - Epoch 45/100 | Train Loss: 0.0311 | Val rms_score: 0.7240
271
+ 2025-09-23 15:54:02,557 - logs_modchembert_bace_regression_epochs100_batch_size32 - INFO - Epoch 46/100 | Train Loss: 0.0302 | Val rms_score: 0.6679
272
+ 2025-09-23 15:54:08,080 - logs_modchembert_bace_regression_epochs100_batch_size32 - INFO - Epoch 47/100 | Train Loss: 0.0300 | Val rms_score: 0.7008
273
+ 2025-09-23 15:54:13,332 - logs_modchembert_bace_regression_epochs100_batch_size32 - INFO - Epoch 48/100 | Train Loss: 0.0301 | Val rms_score: 0.6711
274
+ 2025-09-23 15:54:18,603 - logs_modchembert_bace_regression_epochs100_batch_size32 - INFO - Epoch 49/100 | Train Loss: 0.0306 | Val rms_score: 0.6848
275
+ 2025-09-23 15:54:23,874 - logs_modchembert_bace_regression_epochs100_batch_size32 - INFO - Epoch 50/100 | Train Loss: 0.0323 | Val rms_score: 0.7223
276
+ 2025-09-23 15:54:29,159 - logs_modchembert_bace_regression_epochs100_batch_size32 - INFO - Epoch 51/100 | Train Loss: 0.0312 | Val rms_score: 0.7383
277
+ 2025-09-23 15:54:34,754 - logs_modchembert_bace_regression_epochs100_batch_size32 - INFO - Epoch 52/100 | Train Loss: 0.0304 | Val rms_score: 0.6889
278
+ 2025-09-23 15:54:40,410 - logs_modchembert_bace_regression_epochs100_batch_size32 - INFO - Epoch 53/100 | Train Loss: 0.0326 | Val rms_score: 0.7064
279
+ 2025-09-23 15:54:45,464 - logs_modchembert_bace_regression_epochs100_batch_size32 - INFO - Epoch 54/100 | Train Loss: 0.0267 | Val rms_score: 0.6729
280
+ 2025-09-23 15:54:50,419 - logs_modchembert_bace_regression_epochs100_batch_size32 - INFO - Epoch 55/100 | Train Loss: 0.0249 | Val rms_score: 0.6919
281
+ 2025-09-23 15:54:55,505 - logs_modchembert_bace_regression_epochs100_batch_size32 - INFO - Epoch 56/100 | Train Loss: 0.0234 | Val rms_score: 0.7171
282
+ 2025-09-23 15:55:00,876 - logs_modchembert_bace_regression_epochs100_batch_size32 - INFO - Epoch 57/100 | Train Loss: 0.0230 | Val rms_score: 0.6926
283
+ 2025-09-23 15:55:06,201 - logs_modchembert_bace_regression_epochs100_batch_size32 - INFO - Epoch 58/100 | Train Loss: 0.0276 | Val rms_score: 0.7124
284
+ 2025-09-23 15:55:11,436 - logs_modchembert_bace_regression_epochs100_batch_size32 - INFO - Epoch 59/100 | Train Loss: 0.0237 | Val rms_score: 0.7131
285
+ 2025-09-23 15:55:16,721 - logs_modchembert_bace_regression_epochs100_batch_size32 - INFO - Epoch 60/100 | Train Loss: 0.0223 | Val rms_score: 0.7059
286
+ 2025-09-23 15:55:22,073 - logs_modchembert_bace_regression_epochs100_batch_size32 - INFO - Epoch 61/100 | Train Loss: 0.0217 | Val rms_score: 0.7109
287
+ 2025-09-23 15:55:27,615 - logs_modchembert_bace_regression_epochs100_batch_size32 - INFO - Epoch 62/100 | Train Loss: 0.0232 | Val rms_score: 0.7012
288
+ 2025-09-23 15:55:32,985 - logs_modchembert_bace_regression_epochs100_batch_size32 - INFO - Epoch 63/100 | Train Loss: 0.0213 | Val rms_score: 0.7031
289
+ 2025-09-23 15:55:38,243 - logs_modchembert_bace_regression_epochs100_batch_size32 - INFO - Epoch 64/100 | Train Loss: 0.0201 | Val rms_score: 0.6933
290
+ 2025-09-23 15:55:43,551 - logs_modchembert_bace_regression_epochs100_batch_size32 - INFO - Epoch 65/100 | Train Loss: 0.0209 | Val rms_score: 0.7095
291
+ 2025-09-23 15:55:48,711 - logs_modchembert_bace_regression_epochs100_batch_size32 - INFO - Epoch 66/100 | Train Loss: 0.0209 | Val rms_score: 0.7111
292
+ 2025-09-23 15:55:54,185 - logs_modchembert_bace_regression_epochs100_batch_size32 - INFO - Epoch 67/100 | Train Loss: 0.0193 | Val rms_score: 0.7025
293
+ 2025-09-23 15:55:59,514 - logs_modchembert_bace_regression_epochs100_batch_size32 - INFO - Epoch 68/100 | Train Loss: 0.0195 | Val rms_score: 0.7054
294
+ 2025-09-23 15:56:04,147 - logs_modchembert_bace_regression_epochs100_batch_size32 - INFO - Epoch 69/100 | Train Loss: 0.0194 | Val rms_score: 0.7237
295
+ 2025-09-23 15:56:08,688 - logs_modchembert_bace_regression_epochs100_batch_size32 - INFO - Epoch 70/100 | Train Loss: 0.0190 | Val rms_score: 0.6903
296
+ 2025-09-23 15:56:13,849 - logs_modchembert_bace_regression_epochs100_batch_size32 - INFO - Epoch 71/100 | Train Loss: 0.0197 | Val rms_score: 0.6883
297
+ 2025-09-23 15:56:19,281 - logs_modchembert_bace_regression_epochs100_batch_size32 - INFO - Epoch 72/100 | Train Loss: 0.0196 | Val rms_score: 0.7258
298
+ 2025-09-23 15:56:24,359 - logs_modchembert_bace_regression_epochs100_batch_size32 - INFO - Epoch 73/100 | Train Loss: 0.0208 | Val rms_score: 0.7213
299
+ 2025-09-23 15:56:29,527 - logs_modchembert_bace_regression_epochs100_batch_size32 - INFO - Epoch 74/100 | Train Loss: 0.0192 | Val rms_score: 0.6997
300
+ 2025-09-23 15:56:34,880 - logs_modchembert_bace_regression_epochs100_batch_size32 - INFO - Epoch 75/100 | Train Loss: 0.0179 | Val rms_score: 0.6894
301
+ 2025-09-23 15:56:40,146 - logs_modchembert_bace_regression_epochs100_batch_size32 - INFO - Epoch 76/100 | Train Loss: 0.0184 | Val rms_score: 0.7082
302
+ 2025-09-23 15:56:45,692 - logs_modchembert_bace_regression_epochs100_batch_size32 - INFO - Epoch 77/100 | Train Loss: 0.0192 | Val rms_score: 0.6606
303
+ 2025-09-23 15:56:51,017 - logs_modchembert_bace_regression_epochs100_batch_size32 - INFO - Epoch 78/100 | Train Loss: 0.0180 | Val rms_score: 0.7032
304
+ 2025-09-23 15:56:56,735 - logs_modchembert_bace_regression_epochs100_batch_size32 - INFO - Epoch 79/100 | Train Loss: 0.0242 | Val rms_score: 0.7063
305
+ 2025-09-23 15:57:02,129 - logs_modchembert_bace_regression_epochs100_batch_size32 - INFO - Epoch 80/100 | Train Loss: 0.0161 | Val rms_score: 0.6937
306
+ 2025-09-23 15:57:07,468 - logs_modchembert_bace_regression_epochs100_batch_size32 - INFO - Epoch 81/100 | Train Loss: 0.0169 | Val rms_score: 0.6816
307
+ 2025-09-23 15:57:12,958 - logs_modchembert_bace_regression_epochs100_batch_size32 - INFO - Epoch 82/100 | Train Loss: 0.0136 | Val rms_score: 0.7086
308
+ 2025-09-23 15:57:18,214 - logs_modchembert_bace_regression_epochs100_batch_size32 - INFO - Epoch 83/100 | Train Loss: 0.0152 | Val rms_score: 0.6986
309
+ 2025-09-23 15:57:23,385 - logs_modchembert_bace_regression_epochs100_batch_size32 - INFO - Epoch 84/100 | Train Loss: 0.0149 | Val rms_score: 0.7184
310
+ 2025-09-23 15:57:28,646 - logs_modchembert_bace_regression_epochs100_batch_size32 - INFO - Epoch 85/100 | Train Loss: 0.0165 | Val rms_score: 0.7147
311
+ 2025-09-23 15:57:33,183 - logs_modchembert_bace_regression_epochs100_batch_size32 - INFO - Epoch 86/100 | Train Loss: 0.0188 | Val rms_score: 0.6983
312
+ 2025-09-23 15:57:38,583 - logs_modchembert_bace_regression_epochs100_batch_size32 - INFO - Epoch 87/100 | Train Loss: 0.0175 | Val rms_score: 0.6672
313
+ 2025-09-23 15:57:43,856 - logs_modchembert_bace_regression_epochs100_batch_size32 - INFO - Epoch 88/100 | Train Loss: 0.0168 | Val rms_score: 0.6641
314
+ 2025-09-23 15:57:49,091 - logs_modchembert_bace_regression_epochs100_batch_size32 - INFO - Epoch 89/100 | Train Loss: 0.0168 | Val rms_score: 0.6984
315
+ 2025-09-23 15:57:54,420 - logs_modchembert_bace_regression_epochs100_batch_size32 - INFO - Epoch 90/100 | Train Loss: 0.0178 | Val rms_score: 0.7044
316
+ 2025-09-23 15:57:59,646 - logs_modchembert_bace_regression_epochs100_batch_size32 - INFO - Epoch 91/100 | Train Loss: 0.0155 | Val rms_score: 0.6834
317
+ 2025-09-23 15:58:05,143 - logs_modchembert_bace_regression_epochs100_batch_size32 - INFO - Epoch 92/100 | Train Loss: 0.0150 | Val rms_score: 0.6961
318
+ 2025-09-23 15:58:10,465 - logs_modchembert_bace_regression_epochs100_batch_size32 - INFO - Epoch 93/100 | Train Loss: 0.0155 | Val rms_score: 0.6910
319
+ 2025-09-23 15:58:15,707 - logs_modchembert_bace_regression_epochs100_batch_size32 - INFO - Epoch 94/100 | Train Loss: 0.0147 | Val rms_score: 0.6974
320
+ 2025-09-23 15:58:20,381 - logs_modchembert_bace_regression_epochs100_batch_size32 - INFO - Epoch 95/100 | Train Loss: 0.0133 | Val rms_score: 0.7059
321
+ 2025-09-23 15:58:25,664 - logs_modchembert_bace_regression_epochs100_batch_size32 - INFO - Epoch 96/100 | Train Loss: 0.0127 | Val rms_score: 0.6874
322
+ 2025-09-23 15:58:31,149 - logs_modchembert_bace_regression_epochs100_batch_size32 - INFO - Epoch 97/100 | Train Loss: 0.0143 | Val rms_score: 0.7105
323
+ 2025-09-23 15:58:36,373 - logs_modchembert_bace_regression_epochs100_batch_size32 - INFO - Epoch 98/100 | Train Loss: 0.0142 | Val rms_score: 0.7352
324
+ 2025-09-23 15:58:41,517 - logs_modchembert_bace_regression_epochs100_batch_size32 - INFO - Epoch 99/100 | Train Loss: 0.0142 | Val rms_score: 0.7204
325
+ 2025-09-23 15:58:46,634 - logs_modchembert_bace_regression_epochs100_batch_size32 - INFO - Epoch 100/100 | Train Loss: 0.0131 | Val rms_score: 0.6847
326
+ 2025-09-23 15:58:47,102 - logs_modchembert_bace_regression_epochs100_batch_size32 - INFO - Test rms_score: 0.9761
327
+ 2025-09-23 15:58:47,433 - logs_modchembert_bace_regression_epochs100_batch_size32 - INFO - Final Triplicate Test Results — Avg rms_score: 1.0304, Std Dev: 0.1146
logs_modchembert_regression_ModChemBERT-MLM-TAFT/modchembert_deepchem_splits_run_clearance_epochs100_batch_size32_20250923_155847.log ADDED
@@ -0,0 +1,343 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ 2025-09-23 15:58:47,434 - logs_modchembert_clearance_epochs100_batch_size32 - INFO - Running benchmark for dataset: clearance
2
+ 2025-09-23 15:58:47,434 - logs_modchembert_clearance_epochs100_batch_size32 - INFO - dataset: clearance, tasks: ['target'], epochs: 100, learning rate: 3e-05, transform: True
3
+ 2025-09-23 15:58:47,440 - logs_modchembert_clearance_epochs100_batch_size32 - INFO - Starting triplicate run 1 for dataset clearance at 2025-09-23_15-58-47
4
+ 2025-09-23 15:58:50,522 - logs_modchembert_clearance_epochs100_batch_size32 - INFO - Epoch 1/100 | Train Loss: 2.9048 | Val rms_score: 64.4146
5
+ 2025-09-23 15:58:50,522 - logs_modchembert_clearance_epochs100_batch_size32 - INFO - Global step of best model: 21
6
+ 2025-09-23 15:58:51,078 - logs_modchembert_clearance_epochs100_batch_size32 - INFO - Best model saved at epoch 1 with val rms_score: 64.4146
7
+ 2025-09-23 15:58:54,382 - logs_modchembert_clearance_epochs100_batch_size32 - INFO - Epoch 2/100 | Train Loss: 1.5476 | Val rms_score: 60.7774
8
+ 2025-09-23 15:58:54,553 - logs_modchembert_clearance_epochs100_batch_size32 - INFO - Global step of best model: 42
9
+ 2025-09-23 15:58:55,123 - logs_modchembert_clearance_epochs100_batch_size32 - INFO - Best model saved at epoch 2 with val rms_score: 60.7774
10
+ 2025-09-23 15:58:58,041 - logs_modchembert_clearance_epochs100_batch_size32 - INFO - Epoch 3/100 | Train Loss: 1.3036 | Val rms_score: 56.2072
11
+ 2025-09-23 15:58:58,224 - logs_modchembert_clearance_epochs100_batch_size32 - INFO - Global step of best model: 63
12
+ 2025-09-23 15:58:58,763 - logs_modchembert_clearance_epochs100_batch_size32 - INFO - Best model saved at epoch 3 with val rms_score: 56.2072
13
+ 2025-09-23 15:59:01,756 - logs_modchembert_clearance_epochs100_batch_size32 - INFO - Epoch 4/100 | Train Loss: 0.9524 | Val rms_score: 57.7758
14
+ 2025-09-23 15:59:04,881 - logs_modchembert_clearance_epochs100_batch_size32 - INFO - Epoch 5/100 | Train Loss: 0.5563 | Val rms_score: 56.6243
15
+ 2025-09-23 15:59:08,111 - logs_modchembert_clearance_epochs100_batch_size32 - INFO - Epoch 6/100 | Train Loss: 0.5685 | Val rms_score: 56.3067
16
+ 2025-09-23 15:59:11,532 - logs_modchembert_clearance_epochs100_batch_size32 - INFO - Epoch 7/100 | Train Loss: 0.4256 | Val rms_score: 52.7751
17
+ 2025-09-23 15:59:11,732 - logs_modchembert_clearance_epochs100_batch_size32 - INFO - Global step of best model: 147
18
+ 2025-09-23 15:59:12,269 - logs_modchembert_clearance_epochs100_batch_size32 - INFO - Best model saved at epoch 7 with val rms_score: 52.7751
19
+ 2025-09-23 15:59:15,443 - logs_modchembert_clearance_epochs100_batch_size32 - INFO - Epoch 8/100 | Train Loss: 0.2917 | Val rms_score: 53.8809
20
+ 2025-09-23 15:59:18,676 - logs_modchembert_clearance_epochs100_batch_size32 - INFO - Epoch 9/100 | Train Loss: 0.2545 | Val rms_score: 53.3727
21
+ 2025-09-23 15:59:22,142 - logs_modchembert_clearance_epochs100_batch_size32 - INFO - Epoch 10/100 | Train Loss: 0.1773 | Val rms_score: 54.1458
22
+ 2025-09-23 15:59:25,563 - logs_modchembert_clearance_epochs100_batch_size32 - INFO - Epoch 11/100 | Train Loss: 0.1749 | Val rms_score: 55.0962
23
+ 2025-09-23 15:59:29,246 - logs_modchembert_clearance_epochs100_batch_size32 - INFO - Epoch 12/100 | Train Loss: 0.1607 | Val rms_score: 54.6244
24
+ 2025-09-23 15:59:32,677 - logs_modchembert_clearance_epochs100_batch_size32 - INFO - Epoch 13/100 | Train Loss: 0.1317 | Val rms_score: 54.9578
25
+ 2025-09-23 15:59:36,067 - logs_modchembert_clearance_epochs100_batch_size32 - INFO - Epoch 14/100 | Train Loss: 0.1339 | Val rms_score: 53.2909
26
+ 2025-09-23 15:59:38,972 - logs_modchembert_clearance_epochs100_batch_size32 - INFO - Epoch 15/100 | Train Loss: 0.1172 | Val rms_score: 54.5441
27
+ 2025-09-23 15:59:42,400 - logs_modchembert_clearance_epochs100_batch_size32 - INFO - Epoch 16/100 | Train Loss: 0.1220 | Val rms_score: 54.4550
28
+ 2025-09-23 15:59:46,035 - logs_modchembert_clearance_epochs100_batch_size32 - INFO - Epoch 17/100 | Train Loss: 0.1362 | Val rms_score: 53.6734
29
+ 2025-09-23 15:59:49,388 - logs_modchembert_clearance_epochs100_batch_size32 - INFO - Epoch 18/100 | Train Loss: 0.1302 | Val rms_score: 53.4385
30
+ 2025-09-23 15:59:52,731 - logs_modchembert_clearance_epochs100_batch_size32 - INFO - Epoch 19/100 | Train Loss: 0.1064 | Val rms_score: 53.7716
31
+ 2025-09-23 15:59:56,128 - logs_modchembert_clearance_epochs100_batch_size32 - INFO - Epoch 20/100 | Train Loss: 0.0863 | Val rms_score: 53.3175
32
+ 2025-09-23 15:59:59,536 - logs_modchembert_clearance_epochs100_batch_size32 - INFO - Epoch 21/100 | Train Loss: 0.0785 | Val rms_score: 53.5502
33
+ 2025-09-23 16:00:03,152 - logs_modchembert_clearance_epochs100_batch_size32 - INFO - Epoch 22/100 | Train Loss: 0.0792 | Val rms_score: 53.5879
34
+ 2025-09-23 16:00:06,515 - logs_modchembert_clearance_epochs100_batch_size32 - INFO - Epoch 23/100 | Train Loss: 0.0744 | Val rms_score: 53.7084
35
+ 2025-09-23 16:00:09,897 - logs_modchembert_clearance_epochs100_batch_size32 - INFO - Epoch 24/100 | Train Loss: 0.0986 | Val rms_score: 52.4849
36
+ 2025-09-23 16:00:10,055 - logs_modchembert_clearance_epochs100_batch_size32 - INFO - Global step of best model: 504
37
+ 2025-09-23 16:00:10,606 - logs_modchembert_clearance_epochs100_batch_size32 - INFO - Best model saved at epoch 24 with val rms_score: 52.4849
38
+ 2025-09-23 16:00:13,998 - logs_modchembert_clearance_epochs100_batch_size32 - INFO - Epoch 25/100 | Train Loss: 0.0755 | Val rms_score: 53.4247
39
+ 2025-09-23 16:00:16,740 - logs_modchembert_clearance_epochs100_batch_size32 - INFO - Epoch 26/100 | Train Loss: 0.0722 | Val rms_score: 52.7315
40
+ 2025-09-23 16:00:20,046 - logs_modchembert_clearance_epochs100_batch_size32 - INFO - Epoch 27/100 | Train Loss: 0.0673 | Val rms_score: 53.2687
41
+ 2025-09-23 16:00:23,372 - logs_modchembert_clearance_epochs100_batch_size32 - INFO - Epoch 28/100 | Train Loss: 0.0770 | Val rms_score: 53.1594
42
+ 2025-09-23 16:00:26,724 - logs_modchembert_clearance_epochs100_batch_size32 - INFO - Epoch 29/100 | Train Loss: 0.0621 | Val rms_score: 54.4709
43
+ 2025-09-23 16:00:30,087 - logs_modchembert_clearance_epochs100_batch_size32 - INFO - Epoch 30/100 | Train Loss: 0.0751 | Val rms_score: 52.0724
44
+ 2025-09-23 16:00:30,285 - logs_modchembert_clearance_epochs100_batch_size32 - INFO - Global step of best model: 630
45
+ 2025-09-23 16:00:30,867 - logs_modchembert_clearance_epochs100_batch_size32 - INFO - Best model saved at epoch 30 with val rms_score: 52.0724
46
+ 2025-09-23 16:00:34,301 - logs_modchembert_clearance_epochs100_batch_size32 - INFO - Epoch 31/100 | Train Loss: 0.0640 | Val rms_score: 53.6212
47
+ 2025-09-23 16:00:37,994 - logs_modchembert_clearance_epochs100_batch_size32 - INFO - Epoch 32/100 | Train Loss: 0.0658 | Val rms_score: 53.1400
48
+ 2025-09-23 16:00:41,372 - logs_modchembert_clearance_epochs100_batch_size32 - INFO - Epoch 33/100 | Train Loss: 0.0655 | Val rms_score: 52.3414
49
+ 2025-09-23 16:00:44,770 - logs_modchembert_clearance_epochs100_batch_size32 - INFO - Epoch 34/100 | Train Loss: 0.0578 | Val rms_score: 52.8821
50
+ 2025-09-23 16:00:48,137 - logs_modchembert_clearance_epochs100_batch_size32 - INFO - Epoch 35/100 | Train Loss: 0.0562 | Val rms_score: 52.6956
51
+ 2025-09-23 16:00:51,598 - logs_modchembert_clearance_epochs100_batch_size32 - INFO - Epoch 36/100 | Train Loss: 0.0543 | Val rms_score: 53.1207
52
+ 2025-09-23 16:00:55,202 - logs_modchembert_clearance_epochs100_batch_size32 - INFO - Epoch 37/100 | Train Loss: 0.0480 | Val rms_score: 52.6687
53
+ 2025-09-23 16:00:58,197 - logs_modchembert_clearance_epochs100_batch_size32 - INFO - Epoch 38/100 | Train Loss: 0.0525 | Val rms_score: 53.0538
54
+ 2025-09-23 16:01:01,576 - logs_modchembert_clearance_epochs100_batch_size32 - INFO - Epoch 39/100 | Train Loss: 0.0409 | Val rms_score: 52.5507
55
+ 2025-09-23 16:01:04,951 - logs_modchembert_clearance_epochs100_batch_size32 - INFO - Epoch 40/100 | Train Loss: 0.0424 | Val rms_score: 53.1289
56
+ 2025-09-23 16:01:08,307 - logs_modchembert_clearance_epochs100_batch_size32 - INFO - Epoch 41/100 | Train Loss: 0.0502 | Val rms_score: 52.2980
57
+ 2025-09-23 16:01:11,897 - logs_modchembert_clearance_epochs100_batch_size32 - INFO - Epoch 42/100 | Train Loss: 0.0454 | Val rms_score: 51.2038
58
+ 2025-09-23 16:01:12,041 - logs_modchembert_clearance_epochs100_batch_size32 - INFO - Global step of best model: 882
59
+ 2025-09-23 16:01:12,581 - logs_modchembert_clearance_epochs100_batch_size32 - INFO - Best model saved at epoch 42 with val rms_score: 51.2038
60
+ 2025-09-23 16:01:15,972 - logs_modchembert_clearance_epochs100_batch_size32 - INFO - Epoch 43/100 | Train Loss: 0.0365 | Val rms_score: 52.3488
61
+ 2025-09-23 16:01:19,229 - logs_modchembert_clearance_epochs100_batch_size32 - INFO - Epoch 44/100 | Train Loss: 0.0450 | Val rms_score: 52.7973
62
+ 2025-09-23 16:01:22,516 - logs_modchembert_clearance_epochs100_batch_size32 - INFO - Epoch 45/100 | Train Loss: 0.0452 | Val rms_score: 52.5988
63
+ 2025-09-23 16:01:25,704 - logs_modchembert_clearance_epochs100_batch_size32 - INFO - Epoch 46/100 | Train Loss: 0.0484 | Val rms_score: 51.7505
64
+ 2025-09-23 16:01:29,244 - logs_modchembert_clearance_epochs100_batch_size32 - INFO - Epoch 47/100 | Train Loss: 0.0465 | Val rms_score: 52.4046
65
+ 2025-09-23 16:01:33,438 - logs_modchembert_clearance_epochs100_batch_size32 - INFO - Epoch 48/100 | Train Loss: 0.0391 | Val rms_score: 52.3794
66
+ 2025-09-23 16:01:36,268 - logs_modchembert_clearance_epochs100_batch_size32 - INFO - Epoch 49/100 | Train Loss: 0.0445 | Val rms_score: 52.2507
67
+ 2025-09-23 16:01:39,268 - logs_modchembert_clearance_epochs100_batch_size32 - INFO - Epoch 50/100 | Train Loss: 0.0415 | Val rms_score: 53.2267
68
+ 2025-09-23 16:01:42,602 - logs_modchembert_clearance_epochs100_batch_size32 - INFO - Epoch 51/100 | Train Loss: 0.0443 | Val rms_score: 51.8783
69
+ 2025-09-23 16:01:46,168 - logs_modchembert_clearance_epochs100_batch_size32 - INFO - Epoch 52/100 | Train Loss: 0.0387 | Val rms_score: 51.9046
70
+ 2025-09-23 16:01:49,373 - logs_modchembert_clearance_epochs100_batch_size32 - INFO - Epoch 53/100 | Train Loss: 0.0403 | Val rms_score: 52.3628
71
+ 2025-09-23 16:01:52,749 - logs_modchembert_clearance_epochs100_batch_size32 - INFO - Epoch 54/100 | Train Loss: 0.0383 | Val rms_score: 52.2623
72
+ 2025-09-23 16:01:56,163 - logs_modchembert_clearance_epochs100_batch_size32 - INFO - Epoch 55/100 | Train Loss: 0.0471 | Val rms_score: 53.2199
73
+ 2025-09-23 16:01:59,482 - logs_modchembert_clearance_epochs100_batch_size32 - INFO - Epoch 56/100 | Train Loss: 0.0366 | Val rms_score: 52.4045
74
+ 2025-09-23 16:02:03,080 - logs_modchembert_clearance_epochs100_batch_size32 - INFO - Epoch 57/100 | Train Loss: 0.0450 | Val rms_score: 51.6118
75
+ 2025-09-23 16:02:06,479 - logs_modchembert_clearance_epochs100_batch_size32 - INFO - Epoch 58/100 | Train Loss: 0.0388 | Val rms_score: 52.0146
76
+ 2025-09-23 16:02:09,820 - logs_modchembert_clearance_epochs100_batch_size32 - INFO - Epoch 59/100 | Train Loss: 0.0339 | Val rms_score: 52.1151
77
+ 2025-09-23 16:02:13,230 - logs_modchembert_clearance_epochs100_batch_size32 - INFO - Epoch 60/100 | Train Loss: 0.0335 | Val rms_score: 51.5018
78
+ 2025-09-23 16:02:16,373 - logs_modchembert_clearance_epochs100_batch_size32 - INFO - Epoch 61/100 | Train Loss: 0.0357 | Val rms_score: 51.3810
79
+ 2025-09-23 16:02:20,052 - logs_modchembert_clearance_epochs100_batch_size32 - INFO - Epoch 62/100 | Train Loss: 0.0287 | Val rms_score: 51.5677
80
+ 2025-09-23 16:02:23,295 - logs_modchembert_clearance_epochs100_batch_size32 - INFO - Epoch 63/100 | Train Loss: 0.0322 | Val rms_score: 52.0980
81
+ 2025-09-23 16:02:26,547 - logs_modchembert_clearance_epochs100_batch_size32 - INFO - Epoch 64/100 | Train Loss: 0.0385 | Val rms_score: 51.0717
82
+ 2025-09-23 16:02:26,693 - logs_modchembert_clearance_epochs100_batch_size32 - INFO - Global step of best model: 1344
83
+ 2025-09-23 16:02:27,236 - logs_modchembert_clearance_epochs100_batch_size32 - INFO - Best model saved at epoch 64 with val rms_score: 51.0717
84
+ 2025-09-23 16:02:30,507 - logs_modchembert_clearance_epochs100_batch_size32 - INFO - Epoch 65/100 | Train Loss: 0.0339 | Val rms_score: 51.5128
85
+ 2025-09-23 16:02:33,751 - logs_modchembert_clearance_epochs100_batch_size32 - INFO - Epoch 66/100 | Train Loss: 0.0320 | Val rms_score: 52.5312
86
+ 2025-09-23 16:02:37,318 - logs_modchembert_clearance_epochs100_batch_size32 - INFO - Epoch 67/100 | Train Loss: 0.0310 | Val rms_score: 51.9217
87
+ 2025-09-23 16:02:40,722 - logs_modchembert_clearance_epochs100_batch_size32 - INFO - Epoch 68/100 | Train Loss: 0.0296 | Val rms_score: 52.1772
88
+ 2025-09-23 16:02:44,084 - logs_modchembert_clearance_epochs100_batch_size32 - INFO - Epoch 69/100 | Train Loss: 0.0286 | Val rms_score: 51.8858
89
+ 2025-09-23 16:02:47,439 - logs_modchembert_clearance_epochs100_batch_size32 - INFO - Epoch 70/100 | Train Loss: 0.0307 | Val rms_score: 51.9655
90
+ 2025-09-23 16:02:50,740 - logs_modchembert_clearance_epochs100_batch_size32 - INFO - Epoch 71/100 | Train Loss: 0.0266 | Val rms_score: 52.3772
91
+ 2025-09-23 16:02:54,299 - logs_modchembert_clearance_epochs100_batch_size32 - INFO - Epoch 72/100 | Train Loss: 0.0288 | Val rms_score: 52.5480
92
+ 2025-09-23 16:02:57,188 - logs_modchembert_clearance_epochs100_batch_size32 - INFO - Epoch 73/100 | Train Loss: 0.0337 | Val rms_score: 51.4142
93
+ 2025-09-23 16:03:00,183 - logs_modchembert_clearance_epochs100_batch_size32 - INFO - Epoch 74/100 | Train Loss: 0.0402 | Val rms_score: 52.0467
94
+ 2025-09-23 16:03:03,529 - logs_modchembert_clearance_epochs100_batch_size32 - INFO - Epoch 75/100 | Train Loss: 0.0329 | Val rms_score: 51.6782
95
+ 2025-09-23 16:03:06,940 - logs_modchembert_clearance_epochs100_batch_size32 - INFO - Epoch 76/100 | Train Loss: 0.0326 | Val rms_score: 52.6323
96
+ 2025-09-23 16:03:10,452 - logs_modchembert_clearance_epochs100_batch_size32 - INFO - Epoch 77/100 | Train Loss: 0.0306 | Val rms_score: 52.2625
97
+ 2025-09-23 16:03:13,783 - logs_modchembert_clearance_epochs100_batch_size32 - INFO - Epoch 78/100 | Train Loss: 0.0268 | Val rms_score: 52.0978
98
+ 2025-09-23 16:03:17,099 - logs_modchembert_clearance_epochs100_batch_size32 - INFO - Epoch 79/100 | Train Loss: 0.0268 | Val rms_score: 51.6988
99
+ 2025-09-23 16:03:20,513 - logs_modchembert_clearance_epochs100_batch_size32 - INFO - Epoch 80/100 | Train Loss: 0.0249 | Val rms_score: 51.3871
100
+ 2025-09-23 16:03:23,925 - logs_modchembert_clearance_epochs100_batch_size32 - INFO - Epoch 81/100 | Train Loss: 0.0322 | Val rms_score: 51.7331
101
+ 2025-09-23 16:03:27,531 - logs_modchembert_clearance_epochs100_batch_size32 - INFO - Epoch 82/100 | Train Loss: 0.0253 | Val rms_score: 51.2242
102
+ 2025-09-23 16:03:30,943 - logs_modchembert_clearance_epochs100_batch_size32 - INFO - Epoch 83/100 | Train Loss: 0.0246 | Val rms_score: 52.0744
103
+ 2025-09-23 16:03:33,794 - logs_modchembert_clearance_epochs100_batch_size32 - INFO - Epoch 84/100 | Train Loss: 0.0224 | Val rms_score: 52.2049
104
+ 2025-09-23 16:03:37,101 - logs_modchembert_clearance_epochs100_batch_size32 - INFO - Epoch 85/100 | Train Loss: 0.0224 | Val rms_score: 51.5314
105
+ 2025-09-23 16:03:40,439 - logs_modchembert_clearance_epochs100_batch_size32 - INFO - Epoch 86/100 | Train Loss: 0.0207 | Val rms_score: 51.1434
106
+ 2025-09-23 16:03:44,170 - logs_modchembert_clearance_epochs100_batch_size32 - INFO - Epoch 87/100 | Train Loss: 0.0257 | Val rms_score: 52.4424
107
+ 2025-09-23 16:03:47,560 - logs_modchembert_clearance_epochs100_batch_size32 - INFO - Epoch 88/100 | Train Loss: 0.0262 | Val rms_score: 51.5833
108
+ 2025-09-23 16:03:50,892 - logs_modchembert_clearance_epochs100_batch_size32 - INFO - Epoch 89/100 | Train Loss: 0.0249 | Val rms_score: 51.1982
109
+ 2025-09-23 16:03:54,309 - logs_modchembert_clearance_epochs100_batch_size32 - INFO - Epoch 90/100 | Train Loss: 0.0246 | Val rms_score: 51.3339
110
+ 2025-09-23 16:03:57,665 - logs_modchembert_clearance_epochs100_batch_size32 - INFO - Epoch 91/100 | Train Loss: 0.0197 | Val rms_score: 51.8717
111
+ 2025-09-23 16:04:01,140 - logs_modchembert_clearance_epochs100_batch_size32 - INFO - Epoch 92/100 | Train Loss: 0.0206 | Val rms_score: 51.4453
112
+ 2025-09-23 16:04:04,327 - logs_modchembert_clearance_epochs100_batch_size32 - INFO - Epoch 93/100 | Train Loss: 0.0218 | Val rms_score: 50.8917
113
+ 2025-09-23 16:04:04,472 - logs_modchembert_clearance_epochs100_batch_size32 - INFO - Global step of best model: 1953
114
+ 2025-09-23 16:04:05,017 - logs_modchembert_clearance_epochs100_batch_size32 - INFO - Best model saved at epoch 93 with val rms_score: 50.8917
115
+ 2025-09-23 16:04:08,246 - logs_modchembert_clearance_epochs100_batch_size32 - INFO - Epoch 94/100 | Train Loss: 0.0259 | Val rms_score: 52.5053
116
+ 2025-09-23 16:04:11,383 - logs_modchembert_clearance_epochs100_batch_size32 - INFO - Epoch 95/100 | Train Loss: 0.0213 | Val rms_score: 51.4773
117
+ 2025-09-23 16:04:15,590 - logs_modchembert_clearance_epochs100_batch_size32 - INFO - Epoch 96/100 | Train Loss: 0.0219 | Val rms_score: 52.1857
118
+ 2025-09-23 16:04:18,997 - logs_modchembert_clearance_epochs100_batch_size32 - INFO - Epoch 97/100 | Train Loss: 0.0194 | Val rms_score: 51.6751
119
+ 2025-09-23 16:04:22,019 - logs_modchembert_clearance_epochs100_batch_size32 - INFO - Epoch 98/100 | Train Loss: 0.0220 | Val rms_score: 51.8457
120
+ 2025-09-23 16:04:25,459 - logs_modchembert_clearance_epochs100_batch_size32 - INFO - Epoch 99/100 | Train Loss: 0.0229 | Val rms_score: 51.8682
121
+ 2025-09-23 16:04:28,808 - logs_modchembert_clearance_epochs100_batch_size32 - INFO - Epoch 100/100 | Train Loss: 0.0213 | Val rms_score: 51.3125
122
+ 2025-09-23 16:04:29,303 - logs_modchembert_clearance_epochs100_batch_size32 - INFO - Test rms_score: 47.3413
123
+ 2025-09-23 16:04:29,625 - logs_modchembert_clearance_epochs100_batch_size32 - INFO - Starting triplicate run 2 for dataset clearance at 2025-09-23_16-04-29
124
+ 2025-09-23 16:04:32,806 - logs_modchembert_clearance_epochs100_batch_size32 - INFO - Epoch 1/100 | Train Loss: 2.8810 | Val rms_score: 64.1828
125
+ 2025-09-23 16:04:32,806 - logs_modchembert_clearance_epochs100_batch_size32 - INFO - Global step of best model: 21
126
+ 2025-09-23 16:04:33,347 - logs_modchembert_clearance_epochs100_batch_size32 - INFO - Best model saved at epoch 1 with val rms_score: 64.1828
127
+ 2025-09-23 16:04:36,736 - logs_modchembert_clearance_epochs100_batch_size32 - INFO - Epoch 2/100 | Train Loss: 1.5357 | Val rms_score: 61.6657
128
+ 2025-09-23 16:04:36,907 - logs_modchembert_clearance_epochs100_batch_size32 - INFO - Global step of best model: 42
129
+ 2025-09-23 16:04:37,441 - logs_modchembert_clearance_epochs100_batch_size32 - INFO - Best model saved at epoch 2 with val rms_score: 61.6657
130
+ 2025-09-23 16:04:40,784 - logs_modchembert_clearance_epochs100_batch_size32 - INFO - Epoch 3/100 | Train Loss: 1.3155 | Val rms_score: 51.6429
131
+ 2025-09-23 16:04:40,959 - logs_modchembert_clearance_epochs100_batch_size32 - INFO - Global step of best model: 63
132
+ 2025-09-23 16:04:41,478 - logs_modchembert_clearance_epochs100_batch_size32 - INFO - Best model saved at epoch 3 with val rms_score: 51.6429
133
+ 2025-09-23 16:04:44,803 - logs_modchembert_clearance_epochs100_batch_size32 - INFO - Epoch 4/100 | Train Loss: 1.0238 | Val rms_score: 55.3201
134
+ 2025-09-23 16:04:48,171 - logs_modchembert_clearance_epochs100_batch_size32 - INFO - Epoch 5/100 | Train Loss: 0.7688 | Val rms_score: 49.8112
135
+ 2025-09-23 16:04:48,350 - logs_modchembert_clearance_epochs100_batch_size32 - INFO - Global step of best model: 105
136
+ 2025-09-23 16:04:48,877 - logs_modchembert_clearance_epochs100_batch_size32 - INFO - Best model saved at epoch 5 with val rms_score: 49.8112
137
+ 2025-09-23 16:04:51,720 - logs_modchembert_clearance_epochs100_batch_size32 - INFO - Epoch 6/100 | Train Loss: 0.5536 | Val rms_score: 53.9356
138
+ 2025-09-23 16:04:55,343 - logs_modchembert_clearance_epochs100_batch_size32 - INFO - Epoch 7/100 | Train Loss: 0.3646 | Val rms_score: 53.4953
139
+ 2025-09-23 16:04:58,736 - logs_modchembert_clearance_epochs100_batch_size32 - INFO - Epoch 8/100 | Train Loss: 0.2723 | Val rms_score: 52.8911
140
+ 2025-09-23 16:05:02,151 - logs_modchembert_clearance_epochs100_batch_size32 - INFO - Epoch 9/100 | Train Loss: 0.2083 | Val rms_score: 52.3115
141
+ 2025-09-23 16:05:05,516 - logs_modchembert_clearance_epochs100_batch_size32 - INFO - Epoch 10/100 | Train Loss: 0.2062 | Val rms_score: 53.7355
142
+ 2025-09-23 16:05:08,829 - logs_modchembert_clearance_epochs100_batch_size32 - INFO - Epoch 11/100 | Train Loss: 0.1644 | Val rms_score: 54.6787
143
+ 2025-09-23 16:05:12,444 - logs_modchembert_clearance_epochs100_batch_size32 - INFO - Epoch 12/100 | Train Loss: 0.1585 | Val rms_score: 54.0003
144
+ 2025-09-23 16:05:15,797 - logs_modchembert_clearance_epochs100_batch_size32 - INFO - Epoch 13/100 | Train Loss: 0.1406 | Val rms_score: 54.7218
145
+ 2025-09-23 16:05:19,090 - logs_modchembert_clearance_epochs100_batch_size32 - INFO - Epoch 14/100 | Train Loss: 0.1310 | Val rms_score: 54.3926
146
+ 2025-09-23 16:05:22,299 - logs_modchembert_clearance_epochs100_batch_size32 - INFO - Epoch 15/100 | Train Loss: 0.1125 | Val rms_score: 53.1509
147
+ 2025-09-23 16:05:25,481 - logs_modchembert_clearance_epochs100_batch_size32 - INFO - Epoch 16/100 | Train Loss: 0.1086 | Val rms_score: 53.6322
148
+ 2025-09-23 16:05:28,988 - logs_modchembert_clearance_epochs100_batch_size32 - INFO - Epoch 17/100 | Train Loss: 0.0997 | Val rms_score: 54.2865
149
+ 2025-09-23 16:05:32,241 - logs_modchembert_clearance_epochs100_batch_size32 - INFO - Epoch 18/100 | Train Loss: 0.0908 | Val rms_score: 52.0481
150
+ 2025-09-23 16:05:35,456 - logs_modchembert_clearance_epochs100_batch_size32 - INFO - Epoch 19/100 | Train Loss: 0.0863 | Val rms_score: 53.5262
151
+ 2025-09-23 16:05:38,443 - logs_modchembert_clearance_epochs100_batch_size32 - INFO - Epoch 20/100 | Train Loss: 0.0828 | Val rms_score: 53.1050
152
+ 2025-09-23 16:05:41,391 - logs_modchembert_clearance_epochs100_batch_size32 - INFO - Epoch 21/100 | Train Loss: 0.0748 | Val rms_score: 52.7918
153
+ 2025-09-23 16:05:44,924 - logs_modchembert_clearance_epochs100_batch_size32 - INFO - Epoch 22/100 | Train Loss: 0.0729 | Val rms_score: 52.0584
154
+ 2025-09-23 16:05:48,372 - logs_modchembert_clearance_epochs100_batch_size32 - INFO - Epoch 23/100 | Train Loss: 0.0647 | Val rms_score: 53.1887
155
+ 2025-09-23 16:05:51,747 - logs_modchembert_clearance_epochs100_batch_size32 - INFO - Epoch 24/100 | Train Loss: 0.1079 | Val rms_score: 52.3895
156
+ 2025-09-23 16:05:55,166 - logs_modchembert_clearance_epochs100_batch_size32 - INFO - Epoch 25/100 | Train Loss: 0.0733 | Val rms_score: 51.9434
157
+ 2025-09-23 16:05:58,513 - logs_modchembert_clearance_epochs100_batch_size32 - INFO - Epoch 26/100 | Train Loss: 0.0647 | Val rms_score: 52.9971
158
+ 2025-09-23 16:06:02,019 - logs_modchembert_clearance_epochs100_batch_size32 - INFO - Epoch 27/100 | Train Loss: 0.0636 | Val rms_score: 52.1252
159
+ 2025-09-23 16:06:05,218 - logs_modchembert_clearance_epochs100_batch_size32 - INFO - Epoch 28/100 | Train Loss: 0.0792 | Val rms_score: 52.0694
160
+ 2025-09-23 16:06:08,448 - logs_modchembert_clearance_epochs100_batch_size32 - INFO - Epoch 29/100 | Train Loss: 0.0738 | Val rms_score: 53.5043
161
+ 2025-09-23 16:06:11,543 - logs_modchembert_clearance_epochs100_batch_size32 - INFO - Epoch 30/100 | Train Loss: 0.0859 | Val rms_score: 53.7143
162
+ 2025-09-23 16:06:14,737 - logs_modchembert_clearance_epochs100_batch_size32 - INFO - Epoch 31/100 | Train Loss: 0.0755 | Val rms_score: 52.6572
163
+ 2025-09-23 16:06:18,190 - logs_modchembert_clearance_epochs100_batch_size32 - INFO - Epoch 32/100 | Train Loss: 0.0551 | Val rms_score: 52.9711
164
+ 2025-09-23 16:06:21,439 - logs_modchembert_clearance_epochs100_batch_size32 - INFO - Epoch 33/100 | Train Loss: 0.0547 | Val rms_score: 53.7424
165
+ 2025-09-23 16:06:24,891 - logs_modchembert_clearance_epochs100_batch_size32 - INFO - Epoch 34/100 | Train Loss: 0.0572 | Val rms_score: 52.6225
166
+ 2025-09-23 16:06:28,266 - logs_modchembert_clearance_epochs100_batch_size32 - INFO - Epoch 35/100 | Train Loss: 0.0528 | Val rms_score: 51.8708
167
+ 2025-09-23 16:06:31,516 - logs_modchembert_clearance_epochs100_batch_size32 - INFO - Epoch 36/100 | Train Loss: 0.0599 | Val rms_score: 51.8018
168
+ 2025-09-23 16:06:35,150 - logs_modchembert_clearance_epochs100_batch_size32 - INFO - Epoch 37/100 | Train Loss: 0.0456 | Val rms_score: 52.0364
169
+ 2025-09-23 16:06:38,479 - logs_modchembert_clearance_epochs100_batch_size32 - INFO - Epoch 38/100 | Train Loss: 0.0480 | Val rms_score: 52.7463
170
+ 2025-09-23 16:06:41,840 - logs_modchembert_clearance_epochs100_batch_size32 - INFO - Epoch 39/100 | Train Loss: 0.0452 | Val rms_score: 53.0674
171
+ 2025-09-23 16:06:45,160 - logs_modchembert_clearance_epochs100_batch_size32 - INFO - Epoch 40/100 | Train Loss: 0.0432 | Val rms_score: 52.3117
172
+ 2025-09-23 16:06:48,572 - logs_modchembert_clearance_epochs100_batch_size32 - INFO - Epoch 41/100 | Train Loss: 0.0452 | Val rms_score: 52.1652
173
+ 2025-09-23 16:06:52,319 - logs_modchembert_clearance_epochs100_batch_size32 - INFO - Epoch 42/100 | Train Loss: 0.0459 | Val rms_score: 52.9385
174
+ 2025-09-23 16:06:55,700 - logs_modchembert_clearance_epochs100_batch_size32 - INFO - Epoch 43/100 | Train Loss: 0.0550 | Val rms_score: 52.6301
175
+ 2025-09-23 16:06:58,477 - logs_modchembert_clearance_epochs100_batch_size32 - INFO - Epoch 44/100 | Train Loss: 0.0441 | Val rms_score: 52.5454
176
+ 2025-09-23 16:07:01,406 - logs_modchembert_clearance_epochs100_batch_size32 - INFO - Epoch 45/100 | Train Loss: 0.0420 | Val rms_score: 51.9439
177
+ 2025-09-23 16:07:04,759 - logs_modchembert_clearance_epochs100_batch_size32 - INFO - Epoch 46/100 | Train Loss: 0.0448 | Val rms_score: 53.6195
178
+ 2025-09-23 16:07:08,376 - logs_modchembert_clearance_epochs100_batch_size32 - INFO - Epoch 47/100 | Train Loss: 0.0428 | Val rms_score: 51.9563
179
+ 2025-09-23 16:07:12,671 - logs_modchembert_clearance_epochs100_batch_size32 - INFO - Epoch 48/100 | Train Loss: 0.0417 | Val rms_score: 52.0420
180
+ 2025-09-23 16:07:16,065 - logs_modchembert_clearance_epochs100_batch_size32 - INFO - Epoch 49/100 | Train Loss: 0.0426 | Val rms_score: 52.9924
181
+ 2025-09-23 16:07:19,462 - logs_modchembert_clearance_epochs100_batch_size32 - INFO - Epoch 50/100 | Train Loss: 0.0350 | Val rms_score: 52.2841
182
+ 2025-09-23 16:07:22,828 - logs_modchembert_clearance_epochs100_batch_size32 - INFO - Epoch 51/100 | Train Loss: 0.0450 | Val rms_score: 51.6702
183
+ 2025-09-23 16:07:26,466 - logs_modchembert_clearance_epochs100_batch_size32 - INFO - Epoch 52/100 | Train Loss: 0.0376 | Val rms_score: 51.9973
184
+ 2025-09-23 16:07:29,343 - logs_modchembert_clearance_epochs100_batch_size32 - INFO - Epoch 53/100 | Train Loss: 0.0383 | Val rms_score: 52.4745
185
+ 2025-09-23 16:07:32,687 - logs_modchembert_clearance_epochs100_batch_size32 - INFO - Epoch 54/100 | Train Loss: 0.0378 | Val rms_score: 51.4141
186
+ 2025-09-23 16:07:36,020 - logs_modchembert_clearance_epochs100_batch_size32 - INFO - Epoch 55/100 | Train Loss: 0.0355 | Val rms_score: 53.1024
187
+ 2025-09-23 16:07:39,421 - logs_modchembert_clearance_epochs100_batch_size32 - INFO - Epoch 56/100 | Train Loss: 0.0339 | Val rms_score: 51.7457
188
+ 2025-09-23 16:07:43,026 - logs_modchembert_clearance_epochs100_batch_size32 - INFO - Epoch 57/100 | Train Loss: 0.0363 | Val rms_score: 52.3038
189
+ 2025-09-23 16:07:46,404 - logs_modchembert_clearance_epochs100_batch_size32 - INFO - Epoch 58/100 | Train Loss: 0.0310 | Val rms_score: 52.2082
190
+ 2025-09-23 16:07:49,744 - logs_modchembert_clearance_epochs100_batch_size32 - INFO - Epoch 59/100 | Train Loss: 0.0368 | Val rms_score: 51.2399
191
+ 2025-09-23 16:07:53,080 - logs_modchembert_clearance_epochs100_batch_size32 - INFO - Epoch 60/100 | Train Loss: 0.0407 | Val rms_score: 51.9361
192
+ 2025-09-23 16:07:56,412 - logs_modchembert_clearance_epochs100_batch_size32 - INFO - Epoch 61/100 | Train Loss: 0.0348 | Val rms_score: 52.3489
193
+ 2025-09-23 16:07:59,957 - logs_modchembert_clearance_epochs100_batch_size32 - INFO - Epoch 62/100 | Train Loss: 0.0261 | Val rms_score: 51.7292
194
+ 2025-09-23 16:08:03,199 - logs_modchembert_clearance_epochs100_batch_size32 - INFO - Epoch 63/100 | Train Loss: 0.0335 | Val rms_score: 52.5782
195
+ 2025-09-23 16:08:06,483 - logs_modchembert_clearance_epochs100_batch_size32 - INFO - Epoch 64/100 | Train Loss: 0.0299 | Val rms_score: 51.9017
196
+ 2025-09-23 16:08:09,719 - logs_modchembert_clearance_epochs100_batch_size32 - INFO - Epoch 65/100 | Train Loss: 0.0327 | Val rms_score: 52.8559
197
+ 2025-09-23 16:08:12,956 - logs_modchembert_clearance_epochs100_batch_size32 - INFO - Epoch 66/100 | Train Loss: 0.0335 | Val rms_score: 52.5423
198
+ 2025-09-23 16:08:16,596 - logs_modchembert_clearance_epochs100_batch_size32 - INFO - Epoch 67/100 | Train Loss: 0.0346 | Val rms_score: 52.7408
199
+ 2025-09-23 16:08:19,513 - logs_modchembert_clearance_epochs100_batch_size32 - INFO - Epoch 68/100 | Train Loss: 0.0314 | Val rms_score: 52.1002
200
+ 2025-09-23 16:08:22,428 - logs_modchembert_clearance_epochs100_batch_size32 - INFO - Epoch 69/100 | Train Loss: 0.0305 | Val rms_score: 51.8033
201
+ 2025-09-23 16:08:25,796 - logs_modchembert_clearance_epochs100_batch_size32 - INFO - Epoch 70/100 | Train Loss: 0.0320 | Val rms_score: 51.6772
202
+ 2025-09-23 16:08:29,118 - logs_modchembert_clearance_epochs100_batch_size32 - INFO - Epoch 71/100 | Train Loss: 0.0355 | Val rms_score: 52.7855
203
+ 2025-09-23 16:08:32,563 - logs_modchembert_clearance_epochs100_batch_size32 - INFO - Epoch 72/100 | Train Loss: 0.0277 | Val rms_score: 51.5858
204
+ 2025-09-23 16:08:35,912 - logs_modchembert_clearance_epochs100_batch_size32 - INFO - Epoch 73/100 | Train Loss: 0.0264 | Val rms_score: 52.1851
205
+ 2025-09-23 16:08:39,156 - logs_modchembert_clearance_epochs100_batch_size32 - INFO - Epoch 74/100 | Train Loss: 0.0264 | Val rms_score: 51.7117
206
+ 2025-09-23 16:08:42,512 - logs_modchembert_clearance_epochs100_batch_size32 - INFO - Epoch 75/100 | Train Loss: 0.0268 | Val rms_score: 51.7072
207
+ 2025-09-23 16:08:45,834 - logs_modchembert_clearance_epochs100_batch_size32 - INFO - Epoch 76/100 | Train Loss: 0.0277 | Val rms_score: 52.9206
208
+ 2025-09-23 16:08:49,507 - logs_modchembert_clearance_epochs100_batch_size32 - INFO - Epoch 77/100 | Train Loss: 0.0262 | Val rms_score: 52.2689
209
+ 2025-09-23 16:08:52,884 - logs_modchembert_clearance_epochs100_batch_size32 - INFO - Epoch 78/100 | Train Loss: 0.0242 | Val rms_score: 51.9277
210
+ 2025-09-23 16:08:56,310 - logs_modchembert_clearance_epochs100_batch_size32 - INFO - Epoch 79/100 | Train Loss: 0.0246 | Val rms_score: 52.1394
211
+ 2025-09-23 16:08:59,632 - logs_modchembert_clearance_epochs100_batch_size32 - INFO - Epoch 80/100 | Train Loss: 0.0257 | Val rms_score: 52.1480
212
+ 2025-09-23 16:09:02,981 - logs_modchembert_clearance_epochs100_batch_size32 - INFO - Epoch 81/100 | Train Loss: 0.0177 | Val rms_score: 52.0657
213
+ 2025-09-23 16:09:06,541 - logs_modchembert_clearance_epochs100_batch_size32 - INFO - Epoch 82/100 | Train Loss: 0.0228 | Val rms_score: 51.6482
214
+ 2025-09-23 16:09:09,791 - logs_modchembert_clearance_epochs100_batch_size32 - INFO - Epoch 83/100 | Train Loss: 0.0236 | Val rms_score: 51.9890
215
+ 2025-09-23 16:09:13,001 - logs_modchembert_clearance_epochs100_batch_size32 - INFO - Epoch 84/100 | Train Loss: 0.0240 | Val rms_score: 52.5238
216
+ 2025-09-23 16:09:16,236 - logs_modchembert_clearance_epochs100_batch_size32 - INFO - Epoch 85/100 | Train Loss: 0.0234 | Val rms_score: 53.5937
217
+ 2025-09-23 16:09:19,496 - logs_modchembert_clearance_epochs100_batch_size32 - INFO - Epoch 86/100 | Train Loss: 0.0241 | Val rms_score: 51.8436
218
+ 2025-09-23 16:09:23,147 - logs_modchembert_clearance_epochs100_batch_size32 - INFO - Epoch 87/100 | Train Loss: 0.0219 | Val rms_score: 52.2614
219
+ 2025-09-23 16:09:26,526 - logs_modchembert_clearance_epochs100_batch_size32 - INFO - Epoch 88/100 | Train Loss: 0.0221 | Val rms_score: 51.7228
220
+ 2025-09-23 16:09:29,896 - logs_modchembert_clearance_epochs100_batch_size32 - INFO - Epoch 89/100 | Train Loss: 0.0275 | Val rms_score: 52.5084
221
+ 2025-09-23 16:09:33,301 - logs_modchembert_clearance_epochs100_batch_size32 - INFO - Epoch 90/100 | Train Loss: 0.0255 | Val rms_score: 52.1294
222
+ 2025-09-23 16:09:36,629 - logs_modchembert_clearance_epochs100_batch_size32 - INFO - Epoch 91/100 | Train Loss: 0.0225 | Val rms_score: 52.1346
223
+ 2025-09-23 16:09:39,708 - logs_modchembert_clearance_epochs100_batch_size32 - INFO - Epoch 92/100 | Train Loss: 0.0253 | Val rms_score: 51.5442
224
+ 2025-09-23 16:09:42,665 - logs_modchembert_clearance_epochs100_batch_size32 - INFO - Epoch 93/100 | Train Loss: 0.0230 | Val rms_score: 52.9107
225
+ 2025-09-23 16:09:46,072 - logs_modchembert_clearance_epochs100_batch_size32 - INFO - Epoch 94/100 | Train Loss: 0.0228 | Val rms_score: 51.8405
226
+ 2025-09-23 16:09:49,475 - logs_modchembert_clearance_epochs100_batch_size32 - INFO - Epoch 95/100 | Train Loss: 0.0262 | Val rms_score: 52.3715
227
+ 2025-09-23 16:09:53,821 - logs_modchembert_clearance_epochs100_batch_size32 - INFO - Epoch 96/100 | Train Loss: 0.0245 | Val rms_score: 52.4280
228
+ 2025-09-23 16:09:57,456 - logs_modchembert_clearance_epochs100_batch_size32 - INFO - Epoch 97/100 | Train Loss: 0.0217 | Val rms_score: 52.4344
229
+ 2025-09-23 16:10:00,865 - logs_modchembert_clearance_epochs100_batch_size32 - INFO - Epoch 98/100 | Train Loss: 0.0231 | Val rms_score: 51.9628
230
+ 2025-09-23 16:10:04,166 - logs_modchembert_clearance_epochs100_batch_size32 - INFO - Epoch 99/100 | Train Loss: 0.0247 | Val rms_score: 52.3798
231
+ 2025-09-23 16:10:07,132 - logs_modchembert_clearance_epochs100_batch_size32 - INFO - Epoch 100/100 | Train Loss: 0.0221 | Val rms_score: 52.6243
232
+ 2025-09-23 16:10:07,605 - logs_modchembert_clearance_epochs100_batch_size32 - INFO - Test rms_score: 47.8457
233
+ 2025-09-23 16:10:07,913 - logs_modchembert_clearance_epochs100_batch_size32 - INFO - Starting triplicate run 3 for dataset clearance at 2025-09-23_16-10-07
234
+ 2025-09-23 16:10:10,979 - logs_modchembert_clearance_epochs100_batch_size32 - INFO - Epoch 1/100 | Train Loss: 2.8333 | Val rms_score: 61.7199
235
+ 2025-09-23 16:10:10,979 - logs_modchembert_clearance_epochs100_batch_size32 - INFO - Global step of best model: 21
236
+ 2025-09-23 16:10:11,517 - logs_modchembert_clearance_epochs100_batch_size32 - INFO - Best model saved at epoch 1 with val rms_score: 61.7199
237
+ 2025-09-23 16:10:14,812 - logs_modchembert_clearance_epochs100_batch_size32 - INFO - Epoch 2/100 | Train Loss: 1.5595 | Val rms_score: 56.5517
238
+ 2025-09-23 16:10:14,987 - logs_modchembert_clearance_epochs100_batch_size32 - INFO - Global step of best model: 42
239
+ 2025-09-23 16:10:15,532 - logs_modchembert_clearance_epochs100_batch_size32 - INFO - Best model saved at epoch 2 with val rms_score: 56.5517
240
+ 2025-09-23 16:10:18,902 - logs_modchembert_clearance_epochs100_batch_size32 - INFO - Epoch 3/100 | Train Loss: 1.2917 | Val rms_score: 55.7577
241
+ 2025-09-23 16:10:19,085 - logs_modchembert_clearance_epochs100_batch_size32 - INFO - Global step of best model: 63
242
+ 2025-09-23 16:10:19,615 - logs_modchembert_clearance_epochs100_batch_size32 - INFO - Best model saved at epoch 3 with val rms_score: 55.7577
243
+ 2025-09-23 16:10:22,960 - logs_modchembert_clearance_epochs100_batch_size32 - INFO - Epoch 4/100 | Train Loss: 0.9940 | Val rms_score: 49.4852
244
+ 2025-09-23 16:10:23,140 - logs_modchembert_clearance_epochs100_batch_size32 - INFO - Global step of best model: 84
245
+ 2025-09-23 16:10:23,681 - logs_modchembert_clearance_epochs100_batch_size32 - INFO - Best model saved at epoch 4 with val rms_score: 49.4852
246
+ 2025-09-23 16:10:27,013 - logs_modchembert_clearance_epochs100_batch_size32 - INFO - Epoch 5/100 | Train Loss: 0.8688 | Val rms_score: 57.8875
247
+ 2025-09-23 16:10:30,339 - logs_modchembert_clearance_epochs100_batch_size32 - INFO - Epoch 6/100 | Train Loss: 0.5774 | Val rms_score: 53.3043
248
+ 2025-09-23 16:10:34,030 - logs_modchembert_clearance_epochs100_batch_size32 - INFO - Epoch 7/100 | Train Loss: 0.4286 | Val rms_score: 51.4965
249
+ 2025-09-23 16:10:37,336 - logs_modchembert_clearance_epochs100_batch_size32 - INFO - Epoch 8/100 | Train Loss: 0.3423 | Val rms_score: 55.1529
250
+ 2025-09-23 16:10:40,601 - logs_modchembert_clearance_epochs100_batch_size32 - INFO - Epoch 9/100 | Train Loss: 0.2604 | Val rms_score: 54.9146
251
+ 2025-09-23 16:10:43,882 - logs_modchembert_clearance_epochs100_batch_size32 - INFO - Epoch 10/100 | Train Loss: 0.2437 | Val rms_score: 54.8047
252
+ 2025-09-23 16:10:47,074 - logs_modchembert_clearance_epochs100_batch_size32 - INFO - Epoch 11/100 | Train Loss: 0.1801 | Val rms_score: 56.2482
253
+ 2025-09-23 16:10:50,562 - logs_modchembert_clearance_epochs100_batch_size32 - INFO - Epoch 12/100 | Train Loss: 0.1726 | Val rms_score: 56.1988
254
+ 2025-09-23 16:10:53,893 - logs_modchembert_clearance_epochs100_batch_size32 - INFO - Epoch 13/100 | Train Loss: 0.1399 | Val rms_score: 56.6824
255
+ 2025-09-23 16:10:57,129 - logs_modchembert_clearance_epochs100_batch_size32 - INFO - Epoch 14/100 | Train Loss: 0.1176 | Val rms_score: 54.9101
256
+ 2025-09-23 16:11:00,104 - logs_modchembert_clearance_epochs100_batch_size32 - INFO - Epoch 15/100 | Train Loss: 0.1089 | Val rms_score: 55.0056
257
+ 2025-09-23 16:11:03,026 - logs_modchembert_clearance_epochs100_batch_size32 - INFO - Epoch 16/100 | Train Loss: 0.0982 | Val rms_score: 55.9964
258
+ 2025-09-23 16:11:06,690 - logs_modchembert_clearance_epochs100_batch_size32 - INFO - Epoch 17/100 | Train Loss: 0.0919 | Val rms_score: 55.5890
259
+ 2025-09-23 16:11:10,074 - logs_modchembert_clearance_epochs100_batch_size32 - INFO - Epoch 18/100 | Train Loss: 0.0997 | Val rms_score: 56.1309
260
+ 2025-09-23 16:11:13,430 - logs_modchembert_clearance_epochs100_batch_size32 - INFO - Epoch 19/100 | Train Loss: 0.0863 | Val rms_score: 55.2265
261
+ 2025-09-23 16:11:16,885 - logs_modchembert_clearance_epochs100_batch_size32 - INFO - Epoch 20/100 | Train Loss: 0.0891 | Val rms_score: 55.2649
262
+ 2025-09-23 16:11:20,307 - logs_modchembert_clearance_epochs100_batch_size32 - INFO - Epoch 21/100 | Train Loss: 0.0789 | Val rms_score: 57.0729
263
+ 2025-09-23 16:11:23,397 - logs_modchembert_clearance_epochs100_batch_size32 - INFO - Epoch 22/100 | Train Loss: 0.0703 | Val rms_score: 54.5726
264
+ 2025-09-23 16:11:26,612 - logs_modchembert_clearance_epochs100_batch_size32 - INFO - Epoch 23/100 | Train Loss: 0.0681 | Val rms_score: 56.2811
265
+ 2025-09-23 16:11:29,842 - logs_modchembert_clearance_epochs100_batch_size32 - INFO - Epoch 24/100 | Train Loss: 0.0952 | Val rms_score: 55.9929
266
+ 2025-09-23 16:11:33,135 - logs_modchembert_clearance_epochs100_batch_size32 - INFO - Epoch 25/100 | Train Loss: 0.0655 | Val rms_score: 55.1508
267
+ 2025-09-23 16:11:36,382 - logs_modchembert_clearance_epochs100_batch_size32 - INFO - Epoch 26/100 | Train Loss: 0.0636 | Val rms_score: 54.9086
268
+ 2025-09-23 16:11:39,872 - logs_modchembert_clearance_epochs100_batch_size32 - INFO - Epoch 27/100 | Train Loss: 0.0677 | Val rms_score: 54.4931
269
+ 2025-09-23 16:11:43,282 - logs_modchembert_clearance_epochs100_batch_size32 - INFO - Epoch 28/100 | Train Loss: 0.0644 | Val rms_score: 56.3476
270
+ 2025-09-23 16:11:46,660 - logs_modchembert_clearance_epochs100_batch_size32 - INFO - Epoch 29/100 | Train Loss: 0.0807 | Val rms_score: 54.9040
271
+ 2025-09-23 16:11:49,970 - logs_modchembert_clearance_epochs100_batch_size32 - INFO - Epoch 30/100 | Train Loss: 0.0688 | Val rms_score: 54.6308
272
+ 2025-09-23 16:11:53,235 - logs_modchembert_clearance_epochs100_batch_size32 - INFO - Epoch 31/100 | Train Loss: 0.0565 | Val rms_score: 54.4854
273
+ 2025-09-23 16:11:56,865 - logs_modchembert_clearance_epochs100_batch_size32 - INFO - Epoch 32/100 | Train Loss: 0.0521 | Val rms_score: 54.8628
274
+ 2025-09-23 16:12:00,254 - logs_modchembert_clearance_epochs100_batch_size32 - INFO - Epoch 33/100 | Train Loss: 0.0625 | Val rms_score: 53.2328
275
+ 2025-09-23 16:12:03,604 - logs_modchembert_clearance_epochs100_batch_size32 - INFO - Epoch 34/100 | Train Loss: 0.0600 | Val rms_score: 53.9883
276
+ 2025-09-23 16:12:06,967 - logs_modchembert_clearance_epochs100_batch_size32 - INFO - Epoch 35/100 | Train Loss: 0.0621 | Val rms_score: 53.6648
277
+ 2025-09-23 16:12:10,407 - logs_modchembert_clearance_epochs100_batch_size32 - INFO - Epoch 36/100 | Train Loss: 0.0565 | Val rms_score: 54.0352
278
+ 2025-09-23 16:12:14,073 - logs_modchembert_clearance_epochs100_batch_size32 - INFO - Epoch 37/100 | Train Loss: 0.0543 | Val rms_score: 53.7250
279
+ 2025-09-23 16:12:17,504 - logs_modchembert_clearance_epochs100_batch_size32 - INFO - Epoch 38/100 | Train Loss: 0.0487 | Val rms_score: 54.4356
280
+ 2025-09-23 16:12:20,420 - logs_modchembert_clearance_epochs100_batch_size32 - INFO - Epoch 39/100 | Train Loss: 0.0440 | Val rms_score: 54.8511
281
+ 2025-09-23 16:12:23,368 - logs_modchembert_clearance_epochs100_batch_size32 - INFO - Epoch 40/100 | Train Loss: 0.0435 | Val rms_score: 54.0993
282
+ 2025-09-23 16:12:26,716 - logs_modchembert_clearance_epochs100_batch_size32 - INFO - Epoch 41/100 | Train Loss: 0.0433 | Val rms_score: 55.2133
283
+ 2025-09-23 16:12:30,373 - logs_modchembert_clearance_epochs100_batch_size32 - INFO - Epoch 42/100 | Train Loss: 0.0452 | Val rms_score: 53.6537
284
+ 2025-09-23 16:12:33,648 - logs_modchembert_clearance_epochs100_batch_size32 - INFO - Epoch 43/100 | Train Loss: 0.0267 | Val rms_score: 55.3331
285
+ 2025-09-23 16:12:37,054 - logs_modchembert_clearance_epochs100_batch_size32 - INFO - Epoch 44/100 | Train Loss: 0.0419 | Val rms_score: 53.9443
286
+ 2025-09-23 16:12:40,403 - logs_modchembert_clearance_epochs100_batch_size32 - INFO - Epoch 45/100 | Train Loss: 0.0450 | Val rms_score: 55.0959
287
+ 2025-09-23 16:12:43,123 - logs_modchembert_clearance_epochs100_batch_size32 - INFO - Epoch 46/100 | Train Loss: 0.0495 | Val rms_score: 54.3647
288
+ 2025-09-23 16:12:46,649 - logs_modchembert_clearance_epochs100_batch_size32 - INFO - Epoch 47/100 | Train Loss: 0.0467 | Val rms_score: 54.2524
289
+ 2025-09-23 16:12:50,841 - logs_modchembert_clearance_epochs100_batch_size32 - INFO - Epoch 48/100 | Train Loss: 0.0415 | Val rms_score: 54.5548
290
+ 2025-09-23 16:12:54,080 - logs_modchembert_clearance_epochs100_batch_size32 - INFO - Epoch 49/100 | Train Loss: 0.0445 | Val rms_score: 53.7829
291
+ 2025-09-23 16:12:57,375 - logs_modchembert_clearance_epochs100_batch_size32 - INFO - Epoch 50/100 | Train Loss: 0.0374 | Val rms_score: 55.2459
292
+ 2025-09-23 16:13:00,627 - logs_modchembert_clearance_epochs100_batch_size32 - INFO - Epoch 51/100 | Train Loss: 0.0394 | Val rms_score: 54.7028
293
+ 2025-09-23 16:13:04,347 - logs_modchembert_clearance_epochs100_batch_size32 - INFO - Epoch 52/100 | Train Loss: 0.0407 | Val rms_score: 53.4607
294
+ 2025-09-23 16:13:07,634 - logs_modchembert_clearance_epochs100_batch_size32 - INFO - Epoch 53/100 | Train Loss: 0.0421 | Val rms_score: 53.8731
295
+ 2025-09-23 16:13:10,966 - logs_modchembert_clearance_epochs100_batch_size32 - INFO - Epoch 54/100 | Train Loss: 0.0366 | Val rms_score: 54.2852
296
+ 2025-09-23 16:13:14,376 - logs_modchembert_clearance_epochs100_batch_size32 - INFO - Epoch 55/100 | Train Loss: 0.0357 | Val rms_score: 54.1573
297
+ 2025-09-23 16:13:17,665 - logs_modchembert_clearance_epochs100_batch_size32 - INFO - Epoch 56/100 | Train Loss: 0.0376 | Val rms_score: 54.3116
298
+ 2025-09-23 16:13:21,340 - logs_modchembert_clearance_epochs100_batch_size32 - INFO - Epoch 57/100 | Train Loss: 0.0363 | Val rms_score: 53.4057
299
+ 2025-09-23 16:13:24,688 - logs_modchembert_clearance_epochs100_batch_size32 - INFO - Epoch 58/100 | Train Loss: 0.0373 | Val rms_score: 53.2463
300
+ 2025-09-23 16:13:27,996 - logs_modchembert_clearance_epochs100_batch_size32 - INFO - Epoch 59/100 | Train Loss: 0.0335 | Val rms_score: 53.9092
301
+ 2025-09-23 16:13:31,356 - logs_modchembert_clearance_epochs100_batch_size32 - INFO - Epoch 60/100 | Train Loss: 0.0309 | Val rms_score: 53.6502
302
+ 2025-09-23 16:13:34,707 - logs_modchembert_clearance_epochs100_batch_size32 - INFO - Epoch 61/100 | Train Loss: 0.0346 | Val rms_score: 53.7358
303
+ 2025-09-23 16:13:38,250 - logs_modchembert_clearance_epochs100_batch_size32 - INFO - Epoch 62/100 | Train Loss: 0.0261 | Val rms_score: 53.9274
304
+ 2025-09-23 16:13:41,250 - logs_modchembert_clearance_epochs100_batch_size32 - INFO - Epoch 63/100 | Train Loss: 0.0290 | Val rms_score: 53.6667
305
+ 2025-09-23 16:13:44,164 - logs_modchembert_clearance_epochs100_batch_size32 - INFO - Epoch 64/100 | Train Loss: 0.0357 | Val rms_score: 53.8288
306
+ 2025-09-23 16:13:47,582 - logs_modchembert_clearance_epochs100_batch_size32 - INFO - Epoch 65/100 | Train Loss: 0.0292 | Val rms_score: 54.1736
307
+ 2025-09-23 16:13:50,953 - logs_modchembert_clearance_epochs100_batch_size32 - INFO - Epoch 66/100 | Train Loss: 0.0329 | Val rms_score: 53.5181
308
+ 2025-09-23 16:13:54,484 - logs_modchembert_clearance_epochs100_batch_size32 - INFO - Epoch 67/100 | Train Loss: 0.0332 | Val rms_score: 54.2348
309
+ 2025-09-23 16:13:57,829 - logs_modchembert_clearance_epochs100_batch_size32 - INFO - Epoch 68/100 | Train Loss: 0.0264 | Val rms_score: 53.3717
310
+ 2025-09-23 16:14:00,640 - logs_modchembert_clearance_epochs100_batch_size32 - INFO - Epoch 69/100 | Train Loss: 0.0286 | Val rms_score: 54.4662
311
+ 2025-09-23 16:14:04,002 - logs_modchembert_clearance_epochs100_batch_size32 - INFO - Epoch 70/100 | Train Loss: 0.0299 | Val rms_score: 54.6656
312
+ 2025-09-23 16:14:07,362 - logs_modchembert_clearance_epochs100_batch_size32 - INFO - Epoch 71/100 | Train Loss: 0.0270 | Val rms_score: 53.6906
313
+ 2025-09-23 16:14:11,066 - logs_modchembert_clearance_epochs100_batch_size32 - INFO - Epoch 72/100 | Train Loss: 0.0308 | Val rms_score: 54.1202
314
+ 2025-09-23 16:14:14,389 - logs_modchembert_clearance_epochs100_batch_size32 - INFO - Epoch 73/100 | Train Loss: 0.0259 | Val rms_score: 53.9152
315
+ 2025-09-23 16:14:17,781 - logs_modchembert_clearance_epochs100_batch_size32 - INFO - Epoch 74/100 | Train Loss: 0.0260 | Val rms_score: 53.9365
316
+ 2025-09-23 16:14:21,141 - logs_modchembert_clearance_epochs100_batch_size32 - INFO - Epoch 75/100 | Train Loss: 0.0283 | Val rms_score: 53.8003
317
+ 2025-09-23 16:14:24,555 - logs_modchembert_clearance_epochs100_batch_size32 - INFO - Epoch 76/100 | Train Loss: 0.0259 | Val rms_score: 53.5949
318
+ 2025-09-23 16:14:28,162 - logs_modchembert_clearance_epochs100_batch_size32 - INFO - Epoch 77/100 | Train Loss: 0.0269 | Val rms_score: 53.8700
319
+ 2025-09-23 16:14:31,522 - logs_modchembert_clearance_epochs100_batch_size32 - INFO - Epoch 78/100 | Train Loss: 0.0309 | Val rms_score: 54.2039
320
+ 2025-09-23 16:14:34,852 - logs_modchembert_clearance_epochs100_batch_size32 - INFO - Epoch 79/100 | Train Loss: 0.0255 | Val rms_score: 54.8122
321
+ 2025-09-23 16:14:38,207 - logs_modchembert_clearance_epochs100_batch_size32 - INFO - Epoch 80/100 | Train Loss: 0.0233 | Val rms_score: 54.0177
322
+ 2025-09-23 16:14:41,543 - logs_modchembert_clearance_epochs100_batch_size32 - INFO - Epoch 81/100 | Train Loss: 0.0212 | Val rms_score: 53.8244
323
+ 2025-09-23 16:14:45,162 - logs_modchembert_clearance_epochs100_batch_size32 - INFO - Epoch 82/100 | Train Loss: 0.0246 | Val rms_score: 53.2493
324
+ 2025-09-23 16:14:48,412 - logs_modchembert_clearance_epochs100_batch_size32 - INFO - Epoch 83/100 | Train Loss: 0.0238 | Val rms_score: 53.1124
325
+ 2025-09-23 16:14:51,647 - logs_modchembert_clearance_epochs100_batch_size32 - INFO - Epoch 84/100 | Train Loss: 0.0225 | Val rms_score: 53.0574
326
+ 2025-09-23 16:14:54,837 - logs_modchembert_clearance_epochs100_batch_size32 - INFO - Epoch 85/100 | Train Loss: 0.0260 | Val rms_score: 54.3885
327
+ 2025-09-23 16:14:58,174 - logs_modchembert_clearance_epochs100_batch_size32 - INFO - Epoch 86/100 | Train Loss: 0.0252 | Val rms_score: 53.1988
328
+ 2025-09-23 16:15:01,377 - logs_modchembert_clearance_epochs100_batch_size32 - INFO - Epoch 87/100 | Train Loss: 0.0272 | Val rms_score: 53.4111
329
+ 2025-09-23 16:15:04,296 - logs_modchembert_clearance_epochs100_batch_size32 - INFO - Epoch 88/100 | Train Loss: 0.0246 | Val rms_score: 53.1587
330
+ 2025-09-23 16:15:07,612 - logs_modchembert_clearance_epochs100_batch_size32 - INFO - Epoch 89/100 | Train Loss: 0.0247 | Val rms_score: 53.5860
331
+ 2025-09-23 16:15:10,994 - logs_modchembert_clearance_epochs100_batch_size32 - INFO - Epoch 90/100 | Train Loss: 0.0188 | Val rms_score: 53.5242
332
+ 2025-09-23 16:15:14,235 - logs_modchembert_clearance_epochs100_batch_size32 - INFO - Epoch 91/100 | Train Loss: 0.0202 | Val rms_score: 53.9253
333
+ 2025-09-23 16:15:17,925 - logs_modchembert_clearance_epochs100_batch_size32 - INFO - Epoch 92/100 | Train Loss: 0.0255 | Val rms_score: 53.0505
334
+ 2025-09-23 16:15:21,088 - logs_modchembert_clearance_epochs100_batch_size32 - INFO - Epoch 93/100 | Train Loss: 0.0216 | Val rms_score: 53.0964
335
+ 2025-09-23 16:15:24,430 - logs_modchembert_clearance_epochs100_batch_size32 - INFO - Epoch 94/100 | Train Loss: 0.0226 | Val rms_score: 53.4543
336
+ 2025-09-23 16:15:27,762 - logs_modchembert_clearance_epochs100_batch_size32 - INFO - Epoch 95/100 | Train Loss: 0.0220 | Val rms_score: 53.6302
337
+ 2025-09-23 16:15:32,046 - logs_modchembert_clearance_epochs100_batch_size32 - INFO - Epoch 96/100 | Train Loss: 0.0199 | Val rms_score: 53.2150
338
+ 2025-09-23 16:15:35,690 - logs_modchembert_clearance_epochs100_batch_size32 - INFO - Epoch 97/100 | Train Loss: 0.0192 | Val rms_score: 53.4553
339
+ 2025-09-23 16:15:38,993 - logs_modchembert_clearance_epochs100_batch_size32 - INFO - Epoch 98/100 | Train Loss: 0.0198 | Val rms_score: 53.0611
340
+ 2025-09-23 16:15:42,322 - logs_modchembert_clearance_epochs100_batch_size32 - INFO - Epoch 99/100 | Train Loss: 0.0188 | Val rms_score: 54.2889
341
+ 2025-09-23 16:15:45,519 - logs_modchembert_clearance_epochs100_batch_size32 - INFO - Epoch 100/100 | Train Loss: 0.0202 | Val rms_score: 52.6335
342
+ 2025-09-23 16:15:45,917 - logs_modchembert_clearance_epochs100_batch_size32 - INFO - Test rms_score: 48.3383
343
+ 2025-09-23 16:15:46,244 - logs_modchembert_clearance_epochs100_batch_size32 - INFO - Final Triplicate Test Results — Avg rms_score: 47.8418, Std Dev: 0.4070
logs_modchembert_regression_ModChemBERT-MLM-TAFT/modchembert_deepchem_splits_run_delaney_epochs100_batch_size32_20250923_161546.log ADDED
@@ -0,0 +1,349 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ 2025-09-23 16:15:46,245 - logs_modchembert_delaney_epochs100_batch_size32 - INFO - Running benchmark for dataset: delaney
2
+ 2025-09-23 16:15:46,245 - logs_modchembert_delaney_epochs100_batch_size32 - INFO - dataset: delaney, tasks: ['measured_log_solubility_in_mols_per_litre'], epochs: 100, learning rate: 3e-05, transform: True
3
+ 2025-09-23 16:15:46,256 - logs_modchembert_delaney_epochs100_batch_size32 - INFO - Starting triplicate run 1 for dataset delaney at 2025-09-23_16-15-46
4
+ 2025-09-23 16:15:49,467 - logs_modchembert_delaney_epochs100_batch_size32 - INFO - Epoch 1/100 | Train Loss: 0.2532 | Val rms_score: 1.0595
5
+ 2025-09-23 16:15:49,467 - logs_modchembert_delaney_epochs100_batch_size32 - INFO - Global step of best model: 29
6
+ 2025-09-23 16:15:49,994 - logs_modchembert_delaney_epochs100_batch_size32 - INFO - Best model saved at epoch 1 with val rms_score: 1.0595
7
+ 2025-09-23 16:15:53,377 - logs_modchembert_delaney_epochs100_batch_size32 - INFO - Epoch 2/100 | Train Loss: 0.1088 | Val rms_score: 0.9917
8
+ 2025-09-23 16:15:53,548 - logs_modchembert_delaney_epochs100_batch_size32 - INFO - Global step of best model: 58
9
+ 2025-09-23 16:15:54,127 - logs_modchembert_delaney_epochs100_batch_size32 - INFO - Best model saved at epoch 2 with val rms_score: 0.9917
10
+ 2025-09-23 16:15:57,535 - logs_modchembert_delaney_epochs100_batch_size32 - INFO - Epoch 3/100 | Train Loss: 0.0760 | Val rms_score: 1.0079
11
+ 2025-09-23 16:16:01,071 - logs_modchembert_delaney_epochs100_batch_size32 - INFO - Epoch 4/100 | Train Loss: 0.0605 | Val rms_score: 0.9093
12
+ 2025-09-23 16:16:01,245 - logs_modchembert_delaney_epochs100_batch_size32 - INFO - Global step of best model: 116
13
+ 2025-09-23 16:16:01,773 - logs_modchembert_delaney_epochs100_batch_size32 - INFO - Best model saved at epoch 4 with val rms_score: 0.9093
14
+ 2025-09-23 16:16:05,314 - logs_modchembert_delaney_epochs100_batch_size32 - INFO - Epoch 5/100 | Train Loss: 0.0488 | Val rms_score: 0.9311
15
+ 2025-09-23 16:16:08,799 - logs_modchembert_delaney_epochs100_batch_size32 - INFO - Epoch 6/100 | Train Loss: 0.0501 | Val rms_score: 0.9452
16
+ 2025-09-23 16:16:12,625 - logs_modchembert_delaney_epochs100_batch_size32 - INFO - Epoch 7/100 | Train Loss: 0.0394 | Val rms_score: 0.9710
17
+ 2025-09-23 16:16:16,117 - logs_modchembert_delaney_epochs100_batch_size32 - INFO - Epoch 8/100 | Train Loss: 0.0366 | Val rms_score: 0.9289
18
+ 2025-09-23 16:16:19,316 - logs_modchembert_delaney_epochs100_batch_size32 - INFO - Epoch 9/100 | Train Loss: 0.0338 | Val rms_score: 0.9424
19
+ 2025-09-23 16:16:22,630 - logs_modchembert_delaney_epochs100_batch_size32 - INFO - Epoch 10/100 | Train Loss: 0.0277 | Val rms_score: 0.9215
20
+ 2025-09-23 16:16:26,076 - logs_modchembert_delaney_epochs100_batch_size32 - INFO - Epoch 11/100 | Train Loss: 0.0230 | Val rms_score: 0.9344
21
+ 2025-09-23 16:16:29,924 - logs_modchembert_delaney_epochs100_batch_size32 - INFO - Epoch 12/100 | Train Loss: 0.0240 | Val rms_score: 0.9015
22
+ 2025-09-23 16:16:30,126 - logs_modchembert_delaney_epochs100_batch_size32 - INFO - Global step of best model: 348
23
+ 2025-09-23 16:16:30,668 - logs_modchembert_delaney_epochs100_batch_size32 - INFO - Best model saved at epoch 12 with val rms_score: 0.9015
24
+ 2025-09-23 16:16:33,791 - logs_modchembert_delaney_epochs100_batch_size32 - INFO - Epoch 13/100 | Train Loss: 0.0249 | Val rms_score: 0.9404
25
+ 2025-09-23 16:16:37,300 - logs_modchembert_delaney_epochs100_batch_size32 - INFO - Epoch 14/100 | Train Loss: 0.0181 | Val rms_score: 0.8927
26
+ 2025-09-23 16:16:37,506 - logs_modchembert_delaney_epochs100_batch_size32 - INFO - Global step of best model: 406
27
+ 2025-09-23 16:16:38,078 - logs_modchembert_delaney_epochs100_batch_size32 - INFO - Best model saved at epoch 14 with val rms_score: 0.8927
28
+ 2025-09-23 16:16:41,534 - logs_modchembert_delaney_epochs100_batch_size32 - INFO - Epoch 15/100 | Train Loss: 0.0197 | Val rms_score: 0.9033
29
+ 2025-09-23 16:16:45,085 - logs_modchembert_delaney_epochs100_batch_size32 - INFO - Epoch 16/100 | Train Loss: 0.0179 | Val rms_score: 0.8869
30
+ 2025-09-23 16:16:45,540 - logs_modchembert_delaney_epochs100_batch_size32 - INFO - Global step of best model: 464
31
+ 2025-09-23 16:16:46,080 - logs_modchembert_delaney_epochs100_batch_size32 - INFO - Best model saved at epoch 16 with val rms_score: 0.8869
32
+ 2025-09-23 16:16:49,561 - logs_modchembert_delaney_epochs100_batch_size32 - INFO - Epoch 17/100 | Train Loss: 0.0157 | Val rms_score: 0.9576
33
+ 2025-09-23 16:16:53,199 - logs_modchembert_delaney_epochs100_batch_size32 - INFO - Epoch 18/100 | Train Loss: 0.0162 | Val rms_score: 0.9403
34
+ 2025-09-23 16:16:56,741 - logs_modchembert_delaney_epochs100_batch_size32 - INFO - Epoch 19/100 | Train Loss: 0.0154 | Val rms_score: 0.9107
35
+ 2025-09-23 16:17:00,261 - logs_modchembert_delaney_epochs100_batch_size32 - INFO - Epoch 20/100 | Train Loss: 0.0167 | Val rms_score: 0.9372
36
+ 2025-09-23 16:17:03,761 - logs_modchembert_delaney_epochs100_batch_size32 - INFO - Epoch 21/100 | Train Loss: 0.0135 | Val rms_score: 0.8995
37
+ 2025-09-23 16:17:07,622 - logs_modchembert_delaney_epochs100_batch_size32 - INFO - Epoch 22/100 | Train Loss: 0.0133 | Val rms_score: 0.9455
38
+ 2025-09-23 16:17:11,176 - logs_modchembert_delaney_epochs100_batch_size32 - INFO - Epoch 23/100 | Train Loss: 0.0131 | Val rms_score: 0.9300
39
+ 2025-09-23 16:17:14,593 - logs_modchembert_delaney_epochs100_batch_size32 - INFO - Epoch 24/100 | Train Loss: 0.0163 | Val rms_score: 0.9167
40
+ 2025-09-23 16:17:17,988 - logs_modchembert_delaney_epochs100_batch_size32 - INFO - Epoch 25/100 | Train Loss: 0.0170 | Val rms_score: 0.8995
41
+ 2025-09-23 16:17:21,400 - logs_modchembert_delaney_epochs100_batch_size32 - INFO - Epoch 26/100 | Train Loss: 0.0151 | Val rms_score: 0.8814
42
+ 2025-09-23 16:17:21,815 - logs_modchembert_delaney_epochs100_batch_size32 - INFO - Global step of best model: 754
43
+ 2025-09-23 16:17:22,362 - logs_modchembert_delaney_epochs100_batch_size32 - INFO - Best model saved at epoch 26 with val rms_score: 0.8814
44
+ 2025-09-23 16:17:25,756 - logs_modchembert_delaney_epochs100_batch_size32 - INFO - Epoch 27/100 | Train Loss: 0.0112 | Val rms_score: 0.8842
45
+ 2025-09-23 16:17:29,079 - logs_modchembert_delaney_epochs100_batch_size32 - INFO - Epoch 28/100 | Train Loss: 0.0111 | Val rms_score: 0.8785
46
+ 2025-09-23 16:17:29,273 - logs_modchembert_delaney_epochs100_batch_size32 - INFO - Global step of best model: 812
47
+ 2025-09-23 16:17:29,823 - logs_modchembert_delaney_epochs100_batch_size32 - INFO - Best model saved at epoch 28 with val rms_score: 0.8785
48
+ 2025-09-23 16:17:33,046 - logs_modchembert_delaney_epochs100_batch_size32 - INFO - Epoch 29/100 | Train Loss: 0.0111 | Val rms_score: 0.9217
49
+ 2025-09-23 16:17:36,481 - logs_modchembert_delaney_epochs100_batch_size32 - INFO - Epoch 30/100 | Train Loss: 0.0123 | Val rms_score: 0.9012
50
+ 2025-09-23 16:17:39,792 - logs_modchembert_delaney_epochs100_batch_size32 - INFO - Epoch 31/100 | Train Loss: 0.0098 | Val rms_score: 0.8997
51
+ 2025-09-23 16:17:43,123 - logs_modchembert_delaney_epochs100_batch_size32 - INFO - Epoch 32/100 | Train Loss: 0.0103 | Val rms_score: 0.9153
52
+ 2025-09-23 16:17:46,561 - logs_modchembert_delaney_epochs100_batch_size32 - INFO - Epoch 33/100 | Train Loss: 0.0106 | Val rms_score: 0.9164
53
+ 2025-09-23 16:17:50,007 - logs_modchembert_delaney_epochs100_batch_size32 - INFO - Epoch 34/100 | Train Loss: 0.0135 | Val rms_score: 0.8998
54
+ 2025-09-23 16:17:54,553 - logs_modchembert_delaney_epochs100_batch_size32 - INFO - Epoch 35/100 | Train Loss: 0.0086 | Val rms_score: 0.9056
55
+ 2025-09-23 16:17:58,107 - logs_modchembert_delaney_epochs100_batch_size32 - INFO - Epoch 36/100 | Train Loss: 0.0088 | Val rms_score: 0.9209
56
+ 2025-09-23 16:18:02,054 - logs_modchembert_delaney_epochs100_batch_size32 - INFO - Epoch 37/100 | Train Loss: 0.0090 | Val rms_score: 0.9135
57
+ 2025-09-23 16:18:05,595 - logs_modchembert_delaney_epochs100_batch_size32 - INFO - Epoch 38/100 | Train Loss: 0.0103 | Val rms_score: 0.9080
58
+ 2025-09-23 16:18:09,142 - logs_modchembert_delaney_epochs100_batch_size32 - INFO - Epoch 39/100 | Train Loss: 0.0102 | Val rms_score: 0.8904
59
+ 2025-09-23 16:18:12,683 - logs_modchembert_delaney_epochs100_batch_size32 - INFO - Epoch 40/100 | Train Loss: 0.0118 | Val rms_score: 0.9341
60
+ 2025-09-23 16:18:16,248 - logs_modchembert_delaney_epochs100_batch_size32 - INFO - Epoch 41/100 | Train Loss: 0.0093 | Val rms_score: 0.8920
61
+ 2025-09-23 16:18:20,095 - logs_modchembert_delaney_epochs100_batch_size32 - INFO - Epoch 42/100 | Train Loss: 0.0066 | Val rms_score: 0.9142
62
+ 2025-09-23 16:18:23,656 - logs_modchembert_delaney_epochs100_batch_size32 - INFO - Epoch 43/100 | Train Loss: 0.0080 | Val rms_score: 0.9040
63
+ 2025-09-23 16:18:27,146 - logs_modchembert_delaney_epochs100_batch_size32 - INFO - Epoch 44/100 | Train Loss: 0.0071 | Val rms_score: 0.8906
64
+ 2025-09-23 16:18:30,713 - logs_modchembert_delaney_epochs100_batch_size32 - INFO - Epoch 45/100 | Train Loss: 0.0060 | Val rms_score: 0.9007
65
+ 2025-09-23 16:18:34,326 - logs_modchembert_delaney_epochs100_batch_size32 - INFO - Epoch 46/100 | Train Loss: 0.0065 | Val rms_score: 0.9232
66
+ 2025-09-23 16:18:38,144 - logs_modchembert_delaney_epochs100_batch_size32 - INFO - Epoch 47/100 | Train Loss: 0.0070 | Val rms_score: 0.9031
67
+ 2025-09-23 16:18:41,634 - logs_modchembert_delaney_epochs100_batch_size32 - INFO - Epoch 48/100 | Train Loss: 0.0068 | Val rms_score: 0.9233
68
+ 2025-09-23 16:18:44,840 - logs_modchembert_delaney_epochs100_batch_size32 - INFO - Epoch 49/100 | Train Loss: 0.0072 | Val rms_score: 0.8960
69
+ 2025-09-23 16:18:48,260 - logs_modchembert_delaney_epochs100_batch_size32 - INFO - Epoch 50/100 | Train Loss: 0.0059 | Val rms_score: 0.9120
70
+ 2025-09-23 16:18:51,594 - logs_modchembert_delaney_epochs100_batch_size32 - INFO - Epoch 51/100 | Train Loss: 0.0065 | Val rms_score: 0.9102
71
+ 2025-09-23 16:18:55,071 - logs_modchembert_delaney_epochs100_batch_size32 - INFO - Epoch 52/100 | Train Loss: 0.0076 | Val rms_score: 0.9142
72
+ 2025-09-23 16:18:58,552 - logs_modchembert_delaney_epochs100_batch_size32 - INFO - Epoch 53/100 | Train Loss: 0.0067 | Val rms_score: 0.9331
73
+ 2025-09-23 16:19:01,991 - logs_modchembert_delaney_epochs100_batch_size32 - INFO - Epoch 54/100 | Train Loss: 0.0060 | Val rms_score: 0.9216
74
+ 2025-09-23 16:19:05,462 - logs_modchembert_delaney_epochs100_batch_size32 - INFO - Epoch 55/100 | Train Loss: 0.0062 | Val rms_score: 0.9100
75
+ 2025-09-23 16:19:09,000 - logs_modchembert_delaney_epochs100_batch_size32 - INFO - Epoch 56/100 | Train Loss: 0.0063 | Val rms_score: 0.9076
76
+ 2025-09-23 16:19:12,821 - logs_modchembert_delaney_epochs100_batch_size32 - INFO - Epoch 57/100 | Train Loss: 0.0067 | Val rms_score: 0.9118
77
+ 2025-09-23 16:19:16,369 - logs_modchembert_delaney_epochs100_batch_size32 - INFO - Epoch 58/100 | Train Loss: 0.0071 | Val rms_score: 0.9189
78
+ 2025-09-23 16:19:19,995 - logs_modchembert_delaney_epochs100_batch_size32 - INFO - Epoch 59/100 | Train Loss: 0.0072 | Val rms_score: 0.8984
79
+ 2025-09-23 16:19:23,427 - logs_modchembert_delaney_epochs100_batch_size32 - INFO - Epoch 60/100 | Train Loss: 0.0065 | Val rms_score: 0.9465
80
+ 2025-09-23 16:19:26,923 - logs_modchembert_delaney_epochs100_batch_size32 - INFO - Epoch 61/100 | Train Loss: 0.0065 | Val rms_score: 0.9021
81
+ 2025-09-23 16:19:30,737 - logs_modchembert_delaney_epochs100_batch_size32 - INFO - Epoch 62/100 | Train Loss: 0.0064 | Val rms_score: 0.9052
82
+ 2025-09-23 16:19:34,235 - logs_modchembert_delaney_epochs100_batch_size32 - INFO - Epoch 63/100 | Train Loss: 0.0063 | Val rms_score: 0.9129
83
+ 2025-09-23 16:19:37,812 - logs_modchembert_delaney_epochs100_batch_size32 - INFO - Epoch 64/100 | Train Loss: 0.0061 | Val rms_score: 0.9316
84
+ 2025-09-23 16:19:41,411 - logs_modchembert_delaney_epochs100_batch_size32 - INFO - Epoch 65/100 | Train Loss: 0.0062 | Val rms_score: 0.9237
85
+ 2025-09-23 16:19:44,937 - logs_modchembert_delaney_epochs100_batch_size32 - INFO - Epoch 66/100 | Train Loss: 0.0082 | Val rms_score: 0.8822
86
+ 2025-09-23 16:19:48,845 - logs_modchembert_delaney_epochs100_batch_size32 - INFO - Epoch 67/100 | Train Loss: 0.0067 | Val rms_score: 0.9417
87
+ 2025-09-23 16:19:52,452 - logs_modchembert_delaney_epochs100_batch_size32 - INFO - Epoch 68/100 | Train Loss: 0.0057 | Val rms_score: 0.9204
88
+ 2025-09-23 16:19:56,622 - logs_modchembert_delaney_epochs100_batch_size32 - INFO - Epoch 69/100 | Train Loss: 0.0032 | Val rms_score: 0.9129
89
+ 2025-09-23 16:19:59,785 - logs_modchembert_delaney_epochs100_batch_size32 - INFO - Epoch 70/100 | Train Loss: 0.0050 | Val rms_score: 0.9154
90
+ 2025-09-23 16:20:03,299 - logs_modchembert_delaney_epochs100_batch_size32 - INFO - Epoch 71/100 | Train Loss: 0.0048 | Val rms_score: 0.9252
91
+ 2025-09-23 16:20:06,728 - logs_modchembert_delaney_epochs100_batch_size32 - INFO - Epoch 72/100 | Train Loss: 0.0047 | Val rms_score: 0.9271
92
+ 2025-09-23 16:20:10,187 - logs_modchembert_delaney_epochs100_batch_size32 - INFO - Epoch 73/100 | Train Loss: 0.0055 | Val rms_score: 0.9369
93
+ 2025-09-23 16:20:13,761 - logs_modchembert_delaney_epochs100_batch_size32 - INFO - Epoch 74/100 | Train Loss: 0.0057 | Val rms_score: 0.9141
94
+ 2025-09-23 16:20:17,320 - logs_modchembert_delaney_epochs100_batch_size32 - INFO - Epoch 75/100 | Train Loss: 0.0054 | Val rms_score: 0.9140
95
+ 2025-09-23 16:20:20,774 - logs_modchembert_delaney_epochs100_batch_size32 - INFO - Epoch 76/100 | Train Loss: 0.0038 | Val rms_score: 0.9193
96
+ 2025-09-23 16:20:24,469 - logs_modchembert_delaney_epochs100_batch_size32 - INFO - Epoch 77/100 | Train Loss: 0.0052 | Val rms_score: 0.9146
97
+ 2025-09-23 16:20:27,887 - logs_modchembert_delaney_epochs100_batch_size32 - INFO - Epoch 78/100 | Train Loss: 0.0046 | Val rms_score: 0.9219
98
+ 2025-09-23 16:20:31,265 - logs_modchembert_delaney_epochs100_batch_size32 - INFO - Epoch 79/100 | Train Loss: 0.0049 | Val rms_score: 0.9168
99
+ 2025-09-23 16:20:34,699 - logs_modchembert_delaney_epochs100_batch_size32 - INFO - Epoch 80/100 | Train Loss: 0.0048 | Val rms_score: 0.9192
100
+ 2025-09-23 16:20:38,187 - logs_modchembert_delaney_epochs100_batch_size32 - INFO - Epoch 81/100 | Train Loss: 0.0049 | Val rms_score: 0.9207
101
+ 2025-09-23 16:20:41,984 - logs_modchembert_delaney_epochs100_batch_size32 - INFO - Epoch 82/100 | Train Loss: 0.0046 | Val rms_score: 0.9178
102
+ 2025-09-23 16:20:45,493 - logs_modchembert_delaney_epochs100_batch_size32 - INFO - Epoch 83/100 | Train Loss: 0.0047 | Val rms_score: 0.9178
103
+ 2025-09-23 16:20:49,143 - logs_modchembert_delaney_epochs100_batch_size32 - INFO - Epoch 84/100 | Train Loss: 0.0048 | Val rms_score: 0.9264
104
+ 2025-09-23 16:20:52,643 - logs_modchembert_delaney_epochs100_batch_size32 - INFO - Epoch 85/100 | Train Loss: 0.0043 | Val rms_score: 0.9241
105
+ 2025-09-23 16:20:56,174 - logs_modchembert_delaney_epochs100_batch_size32 - INFO - Epoch 86/100 | Train Loss: 0.0047 | Val rms_score: 0.9117
106
+ 2025-09-23 16:20:59,974 - logs_modchembert_delaney_epochs100_batch_size32 - INFO - Epoch 87/100 | Train Loss: 0.0047 | Val rms_score: 0.9187
107
+ 2025-09-23 16:21:03,533 - logs_modchembert_delaney_epochs100_batch_size32 - INFO - Epoch 88/100 | Train Loss: 0.0044 | Val rms_score: 0.8958
108
+ 2025-09-23 16:21:07,134 - logs_modchembert_delaney_epochs100_batch_size32 - INFO - Epoch 89/100 | Train Loss: 0.0047 | Val rms_score: 0.9062
109
+ 2025-09-23 16:21:10,229 - logs_modchembert_delaney_epochs100_batch_size32 - INFO - Epoch 90/100 | Train Loss: 0.0049 | Val rms_score: 0.9249
110
+ 2025-09-23 16:21:13,521 - logs_modchembert_delaney_epochs100_batch_size32 - INFO - Epoch 91/100 | Train Loss: 0.0049 | Val rms_score: 0.9067
111
+ 2025-09-23 16:21:17,039 - logs_modchembert_delaney_epochs100_batch_size32 - INFO - Epoch 92/100 | Train Loss: 0.0044 | Val rms_score: 0.9276
112
+ 2025-09-23 16:21:20,606 - logs_modchembert_delaney_epochs100_batch_size32 - INFO - Epoch 93/100 | Train Loss: 0.0042 | Val rms_score: 0.9204
113
+ 2025-09-23 16:21:24,063 - logs_modchembert_delaney_epochs100_batch_size32 - INFO - Epoch 94/100 | Train Loss: 0.0043 | Val rms_score: 0.9195
114
+ 2025-09-23 16:21:27,641 - logs_modchembert_delaney_epochs100_batch_size32 - INFO - Epoch 95/100 | Train Loss: 0.0046 | Val rms_score: 0.9180
115
+ 2025-09-23 16:21:31,198 - logs_modchembert_delaney_epochs100_batch_size32 - INFO - Epoch 96/100 | Train Loss: 0.0050 | Val rms_score: 0.9224
116
+ 2025-09-23 16:21:35,018 - logs_modchembert_delaney_epochs100_batch_size32 - INFO - Epoch 97/100 | Train Loss: 0.0045 | Val rms_score: 0.9083
117
+ 2025-09-23 16:21:38,540 - logs_modchembert_delaney_epochs100_batch_size32 - INFO - Epoch 98/100 | Train Loss: 0.0043 | Val rms_score: 0.9300
118
+ 2025-09-23 16:21:42,032 - logs_modchembert_delaney_epochs100_batch_size32 - INFO - Epoch 99/100 | Train Loss: 0.0038 | Val rms_score: 0.9289
119
+ 2025-09-23 16:21:45,602 - logs_modchembert_delaney_epochs100_batch_size32 - INFO - Epoch 100/100 | Train Loss: 0.0038 | Val rms_score: 0.9279
120
+ 2025-09-23 16:21:46,049 - logs_modchembert_delaney_epochs100_batch_size32 - INFO - Test rms_score: 0.7681
121
+ 2025-09-23 16:21:46,350 - logs_modchembert_delaney_epochs100_batch_size32 - INFO - Starting triplicate run 2 for dataset delaney at 2025-09-23_16-21-46
122
+ 2025-09-23 16:21:49,632 - logs_modchembert_delaney_epochs100_batch_size32 - INFO - Epoch 1/100 | Train Loss: 0.2759 | Val rms_score: 1.0618
123
+ 2025-09-23 16:21:49,632 - logs_modchembert_delaney_epochs100_batch_size32 - INFO - Global step of best model: 29
124
+ 2025-09-23 16:21:50,194 - logs_modchembert_delaney_epochs100_batch_size32 - INFO - Best model saved at epoch 1 with val rms_score: 1.0618
125
+ 2025-09-23 16:21:53,531 - logs_modchembert_delaney_epochs100_batch_size32 - INFO - Epoch 2/100 | Train Loss: 0.0964 | Val rms_score: 0.9919
126
+ 2025-09-23 16:21:53,700 - logs_modchembert_delaney_epochs100_batch_size32 - INFO - Global step of best model: 58
127
+ 2025-09-23 16:21:54,252 - logs_modchembert_delaney_epochs100_batch_size32 - INFO - Best model saved at epoch 2 with val rms_score: 0.9919
128
+ 2025-09-23 16:21:57,587 - logs_modchembert_delaney_epochs100_batch_size32 - INFO - Epoch 3/100 | Train Loss: 0.0824 | Val rms_score: 0.9620
129
+ 2025-09-23 16:21:57,765 - logs_modchembert_delaney_epochs100_batch_size32 - INFO - Global step of best model: 87
130
+ 2025-09-23 16:21:58,297 - logs_modchembert_delaney_epochs100_batch_size32 - INFO - Best model saved at epoch 3 with val rms_score: 0.9620
131
+ 2025-09-23 16:22:01,672 - logs_modchembert_delaney_epochs100_batch_size32 - INFO - Epoch 4/100 | Train Loss: 0.0718 | Val rms_score: 1.0094
132
+ 2025-09-23 16:22:05,138 - logs_modchembert_delaney_epochs100_batch_size32 - INFO - Epoch 5/100 | Train Loss: 0.0566 | Val rms_score: 0.9508
133
+ 2025-09-23 16:22:05,318 - logs_modchembert_delaney_epochs100_batch_size32 - INFO - Global step of best model: 145
134
+ 2025-09-23 16:22:05,866 - logs_modchembert_delaney_epochs100_batch_size32 - INFO - Best model saved at epoch 5 with val rms_score: 0.9508
135
+ 2025-09-23 16:22:09,375 - logs_modchembert_delaney_epochs100_batch_size32 - INFO - Epoch 6/100 | Train Loss: 0.0445 | Val rms_score: 0.8799
136
+ 2025-09-23 16:22:09,856 - logs_modchembert_delaney_epochs100_batch_size32 - INFO - Global step of best model: 174
137
+ 2025-09-23 16:22:10,378 - logs_modchembert_delaney_epochs100_batch_size32 - INFO - Best model saved at epoch 6 with val rms_score: 0.8799
138
+ 2025-09-23 16:22:13,875 - logs_modchembert_delaney_epochs100_batch_size32 - INFO - Epoch 7/100 | Train Loss: 0.0492 | Val rms_score: 0.9140
139
+ 2025-09-23 16:22:17,398 - logs_modchembert_delaney_epochs100_batch_size32 - INFO - Epoch 8/100 | Train Loss: 0.0407 | Val rms_score: 0.9117
140
+ 2025-09-23 16:22:20,891 - logs_modchembert_delaney_epochs100_batch_size32 - INFO - Epoch 9/100 | Train Loss: 0.0330 | Val rms_score: 0.8867
141
+ 2025-09-23 16:22:24,025 - logs_modchembert_delaney_epochs100_batch_size32 - INFO - Epoch 10/100 | Train Loss: 0.0290 | Val rms_score: 0.9152
142
+ 2025-09-23 16:22:27,212 - logs_modchembert_delaney_epochs100_batch_size32 - INFO - Epoch 11/100 | Train Loss: 0.0286 | Val rms_score: 0.8915
143
+ 2025-09-23 16:22:31,047 - logs_modchembert_delaney_epochs100_batch_size32 - INFO - Epoch 12/100 | Train Loss: 0.0269 | Val rms_score: 0.9190
144
+ 2025-09-23 16:22:34,532 - logs_modchembert_delaney_epochs100_batch_size32 - INFO - Epoch 13/100 | Train Loss: 0.0267 | Val rms_score: 0.9315
145
+ 2025-09-23 16:22:38,028 - logs_modchembert_delaney_epochs100_batch_size32 - INFO - Epoch 14/100 | Train Loss: 0.0190 | Val rms_score: 0.9368
146
+ 2025-09-23 16:22:41,580 - logs_modchembert_delaney_epochs100_batch_size32 - INFO - Epoch 15/100 | Train Loss: 0.0203 | Val rms_score: 0.9127
147
+ 2025-09-23 16:22:45,098 - logs_modchembert_delaney_epochs100_batch_size32 - INFO - Epoch 16/100 | Train Loss: 0.0197 | Val rms_score: 0.8896
148
+ 2025-09-23 16:22:48,967 - logs_modchembert_delaney_epochs100_batch_size32 - INFO - Epoch 17/100 | Train Loss: 0.0214 | Val rms_score: 0.9217
149
+ 2025-09-23 16:22:52,461 - logs_modchembert_delaney_epochs100_batch_size32 - INFO - Epoch 18/100 | Train Loss: 0.0198 | Val rms_score: 0.8936
150
+ 2025-09-23 16:22:55,998 - logs_modchembert_delaney_epochs100_batch_size32 - INFO - Epoch 19/100 | Train Loss: 0.0182 | Val rms_score: 0.9184
151
+ 2025-09-23 16:22:59,518 - logs_modchembert_delaney_epochs100_batch_size32 - INFO - Epoch 20/100 | Train Loss: 0.0151 | Val rms_score: 0.9274
152
+ 2025-09-23 16:23:02,893 - logs_modchembert_delaney_epochs100_batch_size32 - INFO - Epoch 21/100 | Train Loss: 0.0124 | Val rms_score: 0.9456
153
+ 2025-09-23 16:23:06,614 - logs_modchembert_delaney_epochs100_batch_size32 - INFO - Epoch 22/100 | Train Loss: 0.0136 | Val rms_score: 0.9319
154
+ 2025-09-23 16:23:10,068 - logs_modchembert_delaney_epochs100_batch_size32 - INFO - Epoch 23/100 | Train Loss: 0.0145 | Val rms_score: 0.9224
155
+ 2025-09-23 16:23:13,500 - logs_modchembert_delaney_epochs100_batch_size32 - INFO - Epoch 24/100 | Train Loss: 0.0127 | Val rms_score: 0.9331
156
+ 2025-09-23 16:23:16,904 - logs_modchembert_delaney_epochs100_batch_size32 - INFO - Epoch 25/100 | Train Loss: 0.0111 | Val rms_score: 0.9615
157
+ 2025-09-23 16:23:20,449 - logs_modchembert_delaney_epochs100_batch_size32 - INFO - Epoch 26/100 | Train Loss: 0.0187 | Val rms_score: 0.9467
158
+ 2025-09-23 16:23:24,235 - logs_modchembert_delaney_epochs100_batch_size32 - INFO - Epoch 27/100 | Train Loss: 0.0144 | Val rms_score: 0.9343
159
+ 2025-09-23 16:23:27,871 - logs_modchembert_delaney_epochs100_batch_size32 - INFO - Epoch 28/100 | Train Loss: 0.0133 | Val rms_score: 0.9131
160
+ 2025-09-23 16:23:31,484 - logs_modchembert_delaney_epochs100_batch_size32 - INFO - Epoch 29/100 | Train Loss: 0.0110 | Val rms_score: 0.9446
161
+ 2025-09-23 16:23:34,909 - logs_modchembert_delaney_epochs100_batch_size32 - INFO - Epoch 30/100 | Train Loss: 0.0101 | Val rms_score: 0.9385
162
+ 2025-09-23 16:23:38,046 - logs_modchembert_delaney_epochs100_batch_size32 - INFO - Epoch 31/100 | Train Loss: 0.0108 | Val rms_score: 0.9403
163
+ 2025-09-23 16:23:41,585 - logs_modchembert_delaney_epochs100_batch_size32 - INFO - Epoch 32/100 | Train Loss: 0.0119 | Val rms_score: 0.9260
164
+ 2025-09-23 16:23:44,979 - logs_modchembert_delaney_epochs100_batch_size32 - INFO - Epoch 33/100 | Train Loss: 0.0106 | Val rms_score: 0.9347
165
+ 2025-09-23 16:23:48,381 - logs_modchembert_delaney_epochs100_batch_size32 - INFO - Epoch 34/100 | Train Loss: 0.0106 | Val rms_score: 0.9170
166
+ 2025-09-23 16:23:52,759 - logs_modchembert_delaney_epochs100_batch_size32 - INFO - Epoch 35/100 | Train Loss: 0.0117 | Val rms_score: 0.9408
167
+ 2025-09-23 16:23:56,184 - logs_modchembert_delaney_epochs100_batch_size32 - INFO - Epoch 36/100 | Train Loss: 0.0111 | Val rms_score: 0.9186
168
+ 2025-09-23 16:24:00,032 - logs_modchembert_delaney_epochs100_batch_size32 - INFO - Epoch 37/100 | Train Loss: 0.0094 | Val rms_score: 0.9303
169
+ 2025-09-23 16:24:03,691 - logs_modchembert_delaney_epochs100_batch_size32 - INFO - Epoch 38/100 | Train Loss: 0.0087 | Val rms_score: 0.9296
170
+ 2025-09-23 16:24:07,208 - logs_modchembert_delaney_epochs100_batch_size32 - INFO - Epoch 39/100 | Train Loss: 0.0088 | Val rms_score: 0.9688
171
+ 2025-09-23 16:24:10,828 - logs_modchembert_delaney_epochs100_batch_size32 - INFO - Epoch 40/100 | Train Loss: 0.0106 | Val rms_score: 0.9434
172
+ 2025-09-23 16:24:14,368 - logs_modchembert_delaney_epochs100_batch_size32 - INFO - Epoch 41/100 | Train Loss: 0.0099 | Val rms_score: 0.9745
173
+ 2025-09-23 16:24:18,135 - logs_modchembert_delaney_epochs100_batch_size32 - INFO - Epoch 42/100 | Train Loss: 0.0107 | Val rms_score: 0.9210
174
+ 2025-09-23 16:24:21,700 - logs_modchembert_delaney_epochs100_batch_size32 - INFO - Epoch 43/100 | Train Loss: 0.0086 | Val rms_score: 0.9398
175
+ 2025-09-23 16:24:25,209 - logs_modchembert_delaney_epochs100_batch_size32 - INFO - Epoch 44/100 | Train Loss: 0.0083 | Val rms_score: 0.9290
176
+ 2025-09-23 16:24:28,612 - logs_modchembert_delaney_epochs100_batch_size32 - INFO - Epoch 45/100 | Train Loss: 0.0059 | Val rms_score: 0.9387
177
+ 2025-09-23 16:24:32,232 - logs_modchembert_delaney_epochs100_batch_size32 - INFO - Epoch 46/100 | Train Loss: 0.0078 | Val rms_score: 0.9419
178
+ 2025-09-23 16:24:36,069 - logs_modchembert_delaney_epochs100_batch_size32 - INFO - Epoch 47/100 | Train Loss: 0.0083 | Val rms_score: 0.9377
179
+ 2025-09-23 16:24:39,664 - logs_modchembert_delaney_epochs100_batch_size32 - INFO - Epoch 48/100 | Train Loss: 0.0084 | Val rms_score: 0.9362
180
+ 2025-09-23 16:24:43,176 - logs_modchembert_delaney_epochs100_batch_size32 - INFO - Epoch 49/100 | Train Loss: 0.0080 | Val rms_score: 0.9444
181
+ 2025-09-23 16:24:46,493 - logs_modchembert_delaney_epochs100_batch_size32 - INFO - Epoch 50/100 | Train Loss: 0.0073 | Val rms_score: 0.9496
182
+ 2025-09-23 16:24:49,639 - logs_modchembert_delaney_epochs100_batch_size32 - INFO - Epoch 51/100 | Train Loss: 0.0072 | Val rms_score: 0.9417
183
+ 2025-09-23 16:24:53,107 - logs_modchembert_delaney_epochs100_batch_size32 - INFO - Epoch 52/100 | Train Loss: 0.0075 | Val rms_score: 0.9594
184
+ 2025-09-23 16:24:56,542 - logs_modchembert_delaney_epochs100_batch_size32 - INFO - Epoch 53/100 | Train Loss: 0.0071 | Val rms_score: 0.9548
185
+ 2025-09-23 16:25:00,023 - logs_modchembert_delaney_epochs100_batch_size32 - INFO - Epoch 54/100 | Train Loss: 0.0067 | Val rms_score: 0.9324
186
+ 2025-09-23 16:25:03,572 - logs_modchembert_delaney_epochs100_batch_size32 - INFO - Epoch 55/100 | Train Loss: 0.0066 | Val rms_score: 0.9415
187
+ 2025-09-23 16:25:07,060 - logs_modchembert_delaney_epochs100_batch_size32 - INFO - Epoch 56/100 | Train Loss: 0.0070 | Val rms_score: 0.9358
188
+ 2025-09-23 16:25:10,858 - logs_modchembert_delaney_epochs100_batch_size32 - INFO - Epoch 57/100 | Train Loss: 0.0080 | Val rms_score: 0.9972
189
+ 2025-09-23 16:25:14,376 - logs_modchembert_delaney_epochs100_batch_size32 - INFO - Epoch 58/100 | Train Loss: 0.0068 | Val rms_score: 0.9395
190
+ 2025-09-23 16:25:17,938 - logs_modchembert_delaney_epochs100_batch_size32 - INFO - Epoch 59/100 | Train Loss: 0.0056 | Val rms_score: 0.9666
191
+ 2025-09-23 16:25:21,665 - logs_modchembert_delaney_epochs100_batch_size32 - INFO - Epoch 60/100 | Train Loss: 0.0061 | Val rms_score: 0.9792
192
+ 2025-09-23 16:25:25,197 - logs_modchembert_delaney_epochs100_batch_size32 - INFO - Epoch 61/100 | Train Loss: 0.0061 | Val rms_score: 0.9614
193
+ 2025-09-23 16:25:29,066 - logs_modchembert_delaney_epochs100_batch_size32 - INFO - Epoch 62/100 | Train Loss: 0.0066 | Val rms_score: 0.9687
194
+ 2025-09-23 16:25:32,581 - logs_modchembert_delaney_epochs100_batch_size32 - INFO - Epoch 63/100 | Train Loss: 0.0061 | Val rms_score: 0.9532
195
+ 2025-09-23 16:25:36,185 - logs_modchembert_delaney_epochs100_batch_size32 - INFO - Epoch 64/100 | Train Loss: 0.0054 | Val rms_score: 0.9606
196
+ 2025-09-23 16:25:39,686 - logs_modchembert_delaney_epochs100_batch_size32 - INFO - Epoch 65/100 | Train Loss: 0.0057 | Val rms_score: 0.9470
197
+ 2025-09-23 16:25:43,256 - logs_modchembert_delaney_epochs100_batch_size32 - INFO - Epoch 66/100 | Train Loss: 0.0064 | Val rms_score: 0.9510
198
+ 2025-09-23 16:25:46,959 - logs_modchembert_delaney_epochs100_batch_size32 - INFO - Epoch 67/100 | Train Loss: 0.0059 | Val rms_score: 0.9634
199
+ 2025-09-23 16:25:50,326 - logs_modchembert_delaney_epochs100_batch_size32 - INFO - Epoch 68/100 | Train Loss: 0.0052 | Val rms_score: 0.9453
200
+ 2025-09-23 16:25:54,663 - logs_modchembert_delaney_epochs100_batch_size32 - INFO - Epoch 69/100 | Train Loss: 0.0032 | Val rms_score: 0.9671
201
+ 2025-09-23 16:25:57,948 - logs_modchembert_delaney_epochs100_batch_size32 - INFO - Epoch 70/100 | Train Loss: 0.0057 | Val rms_score: 0.9636
202
+ 2025-09-23 16:26:01,351 - logs_modchembert_delaney_epochs100_batch_size32 - INFO - Epoch 71/100 | Train Loss: 0.0053 | Val rms_score: 0.9387
203
+ 2025-09-23 16:26:04,838 - logs_modchembert_delaney_epochs100_batch_size32 - INFO - Epoch 72/100 | Train Loss: 0.0052 | Val rms_score: 0.9597
204
+ 2025-09-23 16:26:08,241 - logs_modchembert_delaney_epochs100_batch_size32 - INFO - Epoch 73/100 | Train Loss: 0.0051 | Val rms_score: 0.9544
205
+ 2025-09-23 16:26:11,734 - logs_modchembert_delaney_epochs100_batch_size32 - INFO - Epoch 74/100 | Train Loss: 0.0050 | Val rms_score: 0.9574
206
+ 2025-09-23 16:26:15,251 - logs_modchembert_delaney_epochs100_batch_size32 - INFO - Epoch 75/100 | Train Loss: 0.0053 | Val rms_score: 0.9123
207
+ 2025-09-23 16:26:18,726 - logs_modchembert_delaney_epochs100_batch_size32 - INFO - Epoch 76/100 | Train Loss: 0.0054 | Val rms_score: 0.9544
208
+ 2025-09-23 16:26:22,556 - logs_modchembert_delaney_epochs100_batch_size32 - INFO - Epoch 77/100 | Train Loss: 0.0052 | Val rms_score: 0.9548
209
+ 2025-09-23 16:26:26,120 - logs_modchembert_delaney_epochs100_batch_size32 - INFO - Epoch 78/100 | Train Loss: 0.0051 | Val rms_score: 0.9401
210
+ 2025-09-23 16:26:29,626 - logs_modchembert_delaney_epochs100_batch_size32 - INFO - Epoch 79/100 | Train Loss: 0.0053 | Val rms_score: 0.9416
211
+ 2025-09-23 16:26:33,195 - logs_modchembert_delaney_epochs100_batch_size32 - INFO - Epoch 80/100 | Train Loss: 0.0058 | Val rms_score: 0.9609
212
+ 2025-09-23 16:26:36,765 - logs_modchembert_delaney_epochs100_batch_size32 - INFO - Epoch 81/100 | Train Loss: 0.0055 | Val rms_score: 0.9310
213
+ 2025-09-23 16:26:40,607 - logs_modchembert_delaney_epochs100_batch_size32 - INFO - Epoch 82/100 | Train Loss: 0.0059 | Val rms_score: 0.9339
214
+ 2025-09-23 16:26:44,062 - logs_modchembert_delaney_epochs100_batch_size32 - INFO - Epoch 83/100 | Train Loss: 0.0042 | Val rms_score: 0.9285
215
+ 2025-09-23 16:26:47,423 - logs_modchembert_delaney_epochs100_batch_size32 - INFO - Epoch 84/100 | Train Loss: 0.0048 | Val rms_score: 0.9491
216
+ 2025-09-23 16:26:50,797 - logs_modchembert_delaney_epochs100_batch_size32 - INFO - Epoch 85/100 | Train Loss: 0.0049 | Val rms_score: 0.9375
217
+ 2025-09-23 16:26:54,225 - logs_modchembert_delaney_epochs100_batch_size32 - INFO - Epoch 86/100 | Train Loss: 0.0046 | Val rms_score: 0.9492
218
+ 2025-09-23 16:26:57,923 - logs_modchembert_delaney_epochs100_batch_size32 - INFO - Epoch 87/100 | Train Loss: 0.0042 | Val rms_score: 0.9359
219
+ 2025-09-23 16:27:01,513 - logs_modchembert_delaney_epochs100_batch_size32 - INFO - Epoch 88/100 | Train Loss: 0.0048 | Val rms_score: 0.9494
220
+ 2025-09-23 16:27:05,022 - logs_modchembert_delaney_epochs100_batch_size32 - INFO - Epoch 89/100 | Train Loss: 0.0043 | Val rms_score: 0.9481
221
+ 2025-09-23 16:27:08,551 - logs_modchembert_delaney_epochs100_batch_size32 - INFO - Epoch 90/100 | Train Loss: 0.0048 | Val rms_score: 0.9486
222
+ 2025-09-23 16:27:12,095 - logs_modchembert_delaney_epochs100_batch_size32 - INFO - Epoch 91/100 | Train Loss: 0.0048 | Val rms_score: 0.9508
223
+ 2025-09-23 16:27:15,621 - logs_modchembert_delaney_epochs100_batch_size32 - INFO - Epoch 92/100 | Train Loss: 0.0044 | Val rms_score: 0.9402
224
+ 2025-09-23 16:27:18,741 - logs_modchembert_delaney_epochs100_batch_size32 - INFO - Epoch 93/100 | Train Loss: 0.0044 | Val rms_score: 0.9338
225
+ 2025-09-23 16:27:22,243 - logs_modchembert_delaney_epochs100_batch_size32 - INFO - Epoch 94/100 | Train Loss: 0.0043 | Val rms_score: 0.9609
226
+ 2025-09-23 16:27:25,792 - logs_modchembert_delaney_epochs100_batch_size32 - INFO - Epoch 95/100 | Train Loss: 0.0044 | Val rms_score: 0.9486
227
+ 2025-09-23 16:27:29,123 - logs_modchembert_delaney_epochs100_batch_size32 - INFO - Epoch 96/100 | Train Loss: 0.0045 | Val rms_score: 0.9360
228
+ 2025-09-23 16:27:32,992 - logs_modchembert_delaney_epochs100_batch_size32 - INFO - Epoch 97/100 | Train Loss: 0.0052 | Val rms_score: 0.9592
229
+ 2025-09-23 16:27:36,541 - logs_modchembert_delaney_epochs100_batch_size32 - INFO - Epoch 98/100 | Train Loss: 0.0044 | Val rms_score: 0.9433
230
+ 2025-09-23 16:27:40,009 - logs_modchembert_delaney_epochs100_batch_size32 - INFO - Epoch 99/100 | Train Loss: 0.0042 | Val rms_score: 0.9441
231
+ 2025-09-23 16:27:43,557 - logs_modchembert_delaney_epochs100_batch_size32 - INFO - Epoch 100/100 | Train Loss: 0.0044 | Val rms_score: 0.9530
232
+ 2025-09-23 16:27:43,998 - logs_modchembert_delaney_epochs100_batch_size32 - INFO - Test rms_score: 0.7691
233
+ 2025-09-23 16:27:44,299 - logs_modchembert_delaney_epochs100_batch_size32 - INFO - Starting triplicate run 3 for dataset delaney at 2025-09-23_16-27-44
234
+ 2025-09-23 16:27:47,521 - logs_modchembert_delaney_epochs100_batch_size32 - INFO - Epoch 1/100 | Train Loss: 0.2640 | Val rms_score: 1.1129
235
+ 2025-09-23 16:27:47,521 - logs_modchembert_delaney_epochs100_batch_size32 - INFO - Global step of best model: 29
236
+ 2025-09-23 16:27:48,055 - logs_modchembert_delaney_epochs100_batch_size32 - INFO - Best model saved at epoch 1 with val rms_score: 1.1129
237
+ 2025-09-23 16:27:51,610 - logs_modchembert_delaney_epochs100_batch_size32 - INFO - Epoch 2/100 | Train Loss: 0.1029 | Val rms_score: 0.9465
238
+ 2025-09-23 16:27:51,782 - logs_modchembert_delaney_epochs100_batch_size32 - INFO - Global step of best model: 58
239
+ 2025-09-23 16:27:52,359 - logs_modchembert_delaney_epochs100_batch_size32 - INFO - Best model saved at epoch 2 with val rms_score: 0.9465
240
+ 2025-09-23 16:27:55,887 - logs_modchembert_delaney_epochs100_batch_size32 - INFO - Epoch 3/100 | Train Loss: 0.0754 | Val rms_score: 0.9950
241
+ 2025-09-23 16:27:59,422 - logs_modchembert_delaney_epochs100_batch_size32 - INFO - Epoch 4/100 | Train Loss: 0.0625 | Val rms_score: 1.0158
242
+ 2025-09-23 16:28:02,990 - logs_modchembert_delaney_epochs100_batch_size32 - INFO - Epoch 5/100 | Train Loss: 0.0520 | Val rms_score: 0.9715
243
+ 2025-09-23 16:28:06,535 - logs_modchembert_delaney_epochs100_batch_size32 - INFO - Epoch 6/100 | Train Loss: 0.0447 | Val rms_score: 0.9159
244
+ 2025-09-23 16:28:07,012 - logs_modchembert_delaney_epochs100_batch_size32 - INFO - Global step of best model: 174
245
+ 2025-09-23 16:28:07,577 - logs_modchembert_delaney_epochs100_batch_size32 - INFO - Best model saved at epoch 6 with val rms_score: 0.9159
246
+ 2025-09-23 16:28:11,108 - logs_modchembert_delaney_epochs100_batch_size32 - INFO - Epoch 7/100 | Train Loss: 0.0459 | Val rms_score: 0.9148
247
+ 2025-09-23 16:28:11,294 - logs_modchembert_delaney_epochs100_batch_size32 - INFO - Global step of best model: 203
248
+ 2025-09-23 16:28:11,835 - logs_modchembert_delaney_epochs100_batch_size32 - INFO - Best model saved at epoch 7 with val rms_score: 0.9148
249
+ 2025-09-23 16:28:15,239 - logs_modchembert_delaney_epochs100_batch_size32 - INFO - Epoch 8/100 | Train Loss: 0.0412 | Val rms_score: 0.9233
250
+ 2025-09-23 16:28:18,672 - logs_modchembert_delaney_epochs100_batch_size32 - INFO - Epoch 9/100 | Train Loss: 0.0292 | Val rms_score: 0.9217
251
+ 2025-09-23 16:28:21,643 - logs_modchembert_delaney_epochs100_batch_size32 - INFO - Epoch 10/100 | Train Loss: 0.0268 | Val rms_score: 0.9164
252
+ 2025-09-23 16:28:25,028 - logs_modchembert_delaney_epochs100_batch_size32 - INFO - Epoch 11/100 | Train Loss: 0.0300 | Val rms_score: 0.9301
253
+ 2025-09-23 16:28:28,641 - logs_modchembert_delaney_epochs100_batch_size32 - INFO - Epoch 12/100 | Train Loss: 0.0252 | Val rms_score: 0.8913
254
+ 2025-09-23 16:28:28,806 - logs_modchembert_delaney_epochs100_batch_size32 - INFO - Global step of best model: 348
255
+ 2025-09-23 16:28:29,362 - logs_modchembert_delaney_epochs100_batch_size32 - INFO - Best model saved at epoch 12 with val rms_score: 0.8913
256
+ 2025-09-23 16:28:32,447 - logs_modchembert_delaney_epochs100_batch_size32 - INFO - Epoch 13/100 | Train Loss: 0.0256 | Val rms_score: 0.9262
257
+ 2025-09-23 16:28:35,937 - logs_modchembert_delaney_epochs100_batch_size32 - INFO - Epoch 14/100 | Train Loss: 0.0238 | Val rms_score: 0.9142
258
+ 2025-09-23 16:28:39,532 - logs_modchembert_delaney_epochs100_batch_size32 - INFO - Epoch 15/100 | Train Loss: 0.0205 | Val rms_score: 0.8876
259
+ 2025-09-23 16:28:39,720 - logs_modchembert_delaney_epochs100_batch_size32 - INFO - Global step of best model: 435
260
+ 2025-09-23 16:28:40,265 - logs_modchembert_delaney_epochs100_batch_size32 - INFO - Best model saved at epoch 15 with val rms_score: 0.8876
261
+ 2025-09-23 16:28:43,846 - logs_modchembert_delaney_epochs100_batch_size32 - INFO - Epoch 16/100 | Train Loss: 0.0179 | Val rms_score: 0.8729
262
+ 2025-09-23 16:28:44,317 - logs_modchembert_delaney_epochs100_batch_size32 - INFO - Global step of best model: 464
263
+ 2025-09-23 16:28:44,860 - logs_modchembert_delaney_epochs100_batch_size32 - INFO - Best model saved at epoch 16 with val rms_score: 0.8729
264
+ 2025-09-23 16:28:48,488 - logs_modchembert_delaney_epochs100_batch_size32 - INFO - Epoch 17/100 | Train Loss: 0.0164 | Val rms_score: 0.8906
265
+ 2025-09-23 16:28:51,925 - logs_modchembert_delaney_epochs100_batch_size32 - INFO - Epoch 18/100 | Train Loss: 0.0173 | Val rms_score: 0.9094
266
+ 2025-09-23 16:28:55,432 - logs_modchembert_delaney_epochs100_batch_size32 - INFO - Epoch 19/100 | Train Loss: 0.0178 | Val rms_score: 0.8917
267
+ 2025-09-23 16:28:58,950 - logs_modchembert_delaney_epochs100_batch_size32 - INFO - Epoch 20/100 | Train Loss: 0.0143 | Val rms_score: 0.8988
268
+ 2025-09-23 16:29:02,524 - logs_modchembert_delaney_epochs100_batch_size32 - INFO - Epoch 21/100 | Train Loss: 0.0130 | Val rms_score: 0.8896
269
+ 2025-09-23 16:29:06,383 - logs_modchembert_delaney_epochs100_batch_size32 - INFO - Epoch 22/100 | Train Loss: 0.0150 | Val rms_score: 0.8927
270
+ 2025-09-23 16:29:09,912 - logs_modchembert_delaney_epochs100_batch_size32 - INFO - Epoch 23/100 | Train Loss: 0.0171 | Val rms_score: 0.9523
271
+ 2025-09-23 16:29:13,529 - logs_modchembert_delaney_epochs100_batch_size32 - INFO - Epoch 24/100 | Train Loss: 0.0176 | Val rms_score: 0.9970
272
+ 2025-09-23 16:29:17,116 - logs_modchembert_delaney_epochs100_batch_size32 - INFO - Epoch 25/100 | Train Loss: 0.0142 | Val rms_score: 0.9356
273
+ 2025-09-23 16:29:20,627 - logs_modchembert_delaney_epochs100_batch_size32 - INFO - Epoch 26/100 | Train Loss: 0.0124 | Val rms_score: 0.9339
274
+ 2025-09-23 16:29:24,403 - logs_modchembert_delaney_epochs100_batch_size32 - INFO - Epoch 27/100 | Train Loss: 0.0136 | Val rms_score: 0.9255
275
+ 2025-09-23 16:29:27,825 - logs_modchembert_delaney_epochs100_batch_size32 - INFO - Epoch 28/100 | Train Loss: 0.0095 | Val rms_score: 0.9649
276
+ 2025-09-23 16:29:31,219 - logs_modchembert_delaney_epochs100_batch_size32 - INFO - Epoch 29/100 | Train Loss: 0.0117 | Val rms_score: 0.9234
277
+ 2025-09-23 16:29:34,643 - logs_modchembert_delaney_epochs100_batch_size32 - INFO - Epoch 30/100 | Train Loss: 0.0114 | Val rms_score: 0.9560
278
+ 2025-09-23 16:29:38,085 - logs_modchembert_delaney_epochs100_batch_size32 - INFO - Epoch 31/100 | Train Loss: 0.0092 | Val rms_score: 0.9189
279
+ 2025-09-23 16:29:41,517 - logs_modchembert_delaney_epochs100_batch_size32 - INFO - Epoch 32/100 | Train Loss: 0.0172 | Val rms_score: 0.9400
280
+ 2025-09-23 16:29:44,601 - logs_modchembert_delaney_epochs100_batch_size32 - INFO - Epoch 33/100 | Train Loss: 0.0128 | Val rms_score: 0.9091
281
+ 2025-09-23 16:29:48,150 - logs_modchembert_delaney_epochs100_batch_size32 - INFO - Epoch 34/100 | Train Loss: 0.0097 | Val rms_score: 0.9403
282
+ 2025-09-23 16:29:52,650 - logs_modchembert_delaney_epochs100_batch_size32 - INFO - Epoch 35/100 | Train Loss: 0.0090 | Val rms_score: 0.9186
283
+ 2025-09-23 16:29:56,226 - logs_modchembert_delaney_epochs100_batch_size32 - INFO - Epoch 36/100 | Train Loss: 0.0084 | Val rms_score: 0.9617
284
+ 2025-09-23 16:30:00,073 - logs_modchembert_delaney_epochs100_batch_size32 - INFO - Epoch 37/100 | Train Loss: 0.0083 | Val rms_score: 0.9325
285
+ 2025-09-23 16:30:03,503 - logs_modchembert_delaney_epochs100_batch_size32 - INFO - Epoch 38/100 | Train Loss: 0.0099 | Val rms_score: 0.9550
286
+ 2025-09-23 16:30:07,021 - logs_modchembert_delaney_epochs100_batch_size32 - INFO - Epoch 39/100 | Train Loss: 0.0075 | Val rms_score: 0.9528
287
+ 2025-09-23 16:30:10,579 - logs_modchembert_delaney_epochs100_batch_size32 - INFO - Epoch 40/100 | Train Loss: 0.0080 | Val rms_score: 0.9534
288
+ 2025-09-23 16:30:14,155 - logs_modchembert_delaney_epochs100_batch_size32 - INFO - Epoch 41/100 | Train Loss: 0.0071 | Val rms_score: 0.9442
289
+ 2025-09-23 16:30:17,994 - logs_modchembert_delaney_epochs100_batch_size32 - INFO - Epoch 42/100 | Train Loss: 0.0076 | Val rms_score: 0.9590
290
+ 2025-09-23 16:30:21,566 - logs_modchembert_delaney_epochs100_batch_size32 - INFO - Epoch 43/100 | Train Loss: 0.0077 | Val rms_score: 0.9411
291
+ 2025-09-23 16:30:25,122 - logs_modchembert_delaney_epochs100_batch_size32 - INFO - Epoch 44/100 | Train Loss: 0.0076 | Val rms_score: 0.9558
292
+ 2025-09-23 16:30:28,765 - logs_modchembert_delaney_epochs100_batch_size32 - INFO - Epoch 45/100 | Train Loss: 0.0084 | Val rms_score: 0.9416
293
+ 2025-09-23 16:30:32,373 - logs_modchembert_delaney_epochs100_batch_size32 - INFO - Epoch 46/100 | Train Loss: 0.0075 | Val rms_score: 0.9677
294
+ 2025-09-23 16:30:36,269 - logs_modchembert_delaney_epochs100_batch_size32 - INFO - Epoch 47/100 | Train Loss: 0.0069 | Val rms_score: 0.9653
295
+ 2025-09-23 16:30:39,786 - logs_modchembert_delaney_epochs100_batch_size32 - INFO - Epoch 48/100 | Train Loss: 0.0070 | Val rms_score: 0.9481
296
+ 2025-09-23 16:30:43,240 - logs_modchembert_delaney_epochs100_batch_size32 - INFO - Epoch 49/100 | Train Loss: 0.0066 | Val rms_score: 0.9354
297
+ 2025-09-23 16:30:46,671 - logs_modchembert_delaney_epochs100_batch_size32 - INFO - Epoch 50/100 | Train Loss: 0.0083 | Val rms_score: 0.9708
298
+ 2025-09-23 16:30:50,108 - logs_modchembert_delaney_epochs100_batch_size32 - INFO - Epoch 51/100 | Train Loss: 0.0078 | Val rms_score: 0.9602
299
+ 2025-09-23 16:30:53,777 - logs_modchembert_delaney_epochs100_batch_size32 - INFO - Epoch 52/100 | Train Loss: 0.0061 | Val rms_score: 0.9592
300
+ 2025-09-23 16:30:57,012 - logs_modchembert_delaney_epochs100_batch_size32 - INFO - Epoch 53/100 | Train Loss: 0.0063 | Val rms_score: 0.9282
301
+ 2025-09-23 16:31:00,341 - logs_modchembert_delaney_epochs100_batch_size32 - INFO - Epoch 54/100 | Train Loss: 0.0064 | Val rms_score: 0.9415
302
+ 2025-09-23 16:31:03,944 - logs_modchembert_delaney_epochs100_batch_size32 - INFO - Epoch 55/100 | Train Loss: 0.0068 | Val rms_score: 0.9566
303
+ 2025-09-23 16:31:07,494 - logs_modchembert_delaney_epochs100_batch_size32 - INFO - Epoch 56/100 | Train Loss: 0.0074 | Val rms_score: 0.9369
304
+ 2025-09-23 16:31:11,302 - logs_modchembert_delaney_epochs100_batch_size32 - INFO - Epoch 57/100 | Train Loss: 0.0073 | Val rms_score: 0.9449
305
+ 2025-09-23 16:31:14,967 - logs_modchembert_delaney_epochs100_batch_size32 - INFO - Epoch 58/100 | Train Loss: 0.0072 | Val rms_score: 0.9722
306
+ 2025-09-23 16:31:18,617 - logs_modchembert_delaney_epochs100_batch_size32 - INFO - Epoch 59/100 | Train Loss: 0.0082 | Val rms_score: 0.9555
307
+ 2025-09-23 16:31:22,106 - logs_modchembert_delaney_epochs100_batch_size32 - INFO - Epoch 60/100 | Train Loss: 0.0064 | Val rms_score: 0.9614
308
+ 2025-09-23 16:31:25,736 - logs_modchembert_delaney_epochs100_batch_size32 - INFO - Epoch 61/100 | Train Loss: 0.0062 | Val rms_score: 0.9568
309
+ 2025-09-23 16:31:29,578 - logs_modchembert_delaney_epochs100_batch_size32 - INFO - Epoch 62/100 | Train Loss: 0.0054 | Val rms_score: 0.9408
310
+ 2025-09-23 16:31:33,061 - logs_modchembert_delaney_epochs100_batch_size32 - INFO - Epoch 63/100 | Train Loss: 0.0059 | Val rms_score: 0.9525
311
+ 2025-09-23 16:31:36,522 - logs_modchembert_delaney_epochs100_batch_size32 - INFO - Epoch 64/100 | Train Loss: 0.0054 | Val rms_score: 0.9713
312
+ 2025-09-23 16:31:40,017 - logs_modchembert_delaney_epochs100_batch_size32 - INFO - Epoch 65/100 | Train Loss: 0.0052 | Val rms_score: 0.9559
313
+ 2025-09-23 16:31:43,442 - logs_modchembert_delaney_epochs100_batch_size32 - INFO - Epoch 66/100 | Train Loss: 0.0059 | Val rms_score: 0.9451
314
+ 2025-09-23 16:31:47,120 - logs_modchembert_delaney_epochs100_batch_size32 - INFO - Epoch 67/100 | Train Loss: 0.0048 | Val rms_score: 0.9537
315
+ 2025-09-23 16:31:50,539 - logs_modchembert_delaney_epochs100_batch_size32 - INFO - Epoch 68/100 | Train Loss: 0.0058 | Val rms_score: 0.9470
316
+ 2025-09-23 16:31:54,988 - logs_modchembert_delaney_epochs100_batch_size32 - INFO - Epoch 69/100 | Train Loss: 0.0031 | Val rms_score: 0.9591
317
+ 2025-09-23 16:31:58,413 - logs_modchembert_delaney_epochs100_batch_size32 - INFO - Epoch 70/100 | Train Loss: 0.0054 | Val rms_score: 0.9875
318
+ 2025-09-23 16:32:01,960 - logs_modchembert_delaney_epochs100_batch_size32 - INFO - Epoch 71/100 | Train Loss: 0.0060 | Val rms_score: 0.9846
319
+ 2025-09-23 16:32:05,754 - logs_modchembert_delaney_epochs100_batch_size32 - INFO - Epoch 72/100 | Train Loss: 0.0061 | Val rms_score: 0.9571
320
+ 2025-09-23 16:32:08,939 - logs_modchembert_delaney_epochs100_batch_size32 - INFO - Epoch 73/100 | Train Loss: 0.0059 | Val rms_score: 0.9718
321
+ 2025-09-23 16:32:12,055 - logs_modchembert_delaney_epochs100_batch_size32 - INFO - Epoch 74/100 | Train Loss: 0.0055 | Val rms_score: 0.9597
322
+ 2025-09-23 16:32:15,674 - logs_modchembert_delaney_epochs100_batch_size32 - INFO - Epoch 75/100 | Train Loss: 0.0048 | Val rms_score: 0.9547
323
+ 2025-09-23 16:32:19,179 - logs_modchembert_delaney_epochs100_batch_size32 - INFO - Epoch 76/100 | Train Loss: 0.0053 | Val rms_score: 0.9654
324
+ 2025-09-23 16:32:23,014 - logs_modchembert_delaney_epochs100_batch_size32 - INFO - Epoch 77/100 | Train Loss: 0.0046 | Val rms_score: 0.9783
325
+ 2025-09-23 16:32:26,574 - logs_modchembert_delaney_epochs100_batch_size32 - INFO - Epoch 78/100 | Train Loss: 0.0045 | Val rms_score: 0.9747
326
+ 2025-09-23 16:32:30,134 - logs_modchembert_delaney_epochs100_batch_size32 - INFO - Epoch 79/100 | Train Loss: 0.0055 | Val rms_score: 0.9671
327
+ 2025-09-23 16:32:33,503 - logs_modchembert_delaney_epochs100_batch_size32 - INFO - Epoch 80/100 | Train Loss: 0.0047 | Val rms_score: 0.9771
328
+ 2025-09-23 16:32:36,931 - logs_modchembert_delaney_epochs100_batch_size32 - INFO - Epoch 81/100 | Train Loss: 0.0050 | Val rms_score: 0.9493
329
+ 2025-09-23 16:32:40,636 - logs_modchembert_delaney_epochs100_batch_size32 - INFO - Epoch 82/100 | Train Loss: 0.0046 | Val rms_score: 0.9601
330
+ 2025-09-23 16:32:44,055 - logs_modchembert_delaney_epochs100_batch_size32 - INFO - Epoch 83/100 | Train Loss: 0.0040 | Val rms_score: 0.9675
331
+ 2025-09-23 16:32:47,510 - logs_modchembert_delaney_epochs100_batch_size32 - INFO - Epoch 84/100 | Train Loss: 0.0044 | Val rms_score: 0.9828
332
+ 2025-09-23 16:32:50,985 - logs_modchembert_delaney_epochs100_batch_size32 - INFO - Epoch 85/100 | Train Loss: 0.0045 | Val rms_score: 0.9727
333
+ 2025-09-23 16:32:54,598 - logs_modchembert_delaney_epochs100_batch_size32 - INFO - Epoch 86/100 | Train Loss: 0.0045 | Val rms_score: 0.9792
334
+ 2025-09-23 16:32:58,500 - logs_modchembert_delaney_epochs100_batch_size32 - INFO - Epoch 87/100 | Train Loss: 0.0047 | Val rms_score: 0.9847
335
+ 2025-09-23 16:33:02,155 - logs_modchembert_delaney_epochs100_batch_size32 - INFO - Epoch 88/100 | Train Loss: 0.0045 | Val rms_score: 0.9659
336
+ 2025-09-23 16:33:05,699 - logs_modchembert_delaney_epochs100_batch_size32 - INFO - Epoch 89/100 | Train Loss: 0.0045 | Val rms_score: 0.9782
337
+ 2025-09-23 16:33:08,892 - logs_modchembert_delaney_epochs100_batch_size32 - INFO - Epoch 90/100 | Train Loss: 0.0046 | Val rms_score: 0.9635
338
+ 2025-09-23 16:33:12,454 - logs_modchembert_delaney_epochs100_batch_size32 - INFO - Epoch 91/100 | Train Loss: 0.0048 | Val rms_score: 0.9520
339
+ 2025-09-23 16:33:16,261 - logs_modchembert_delaney_epochs100_batch_size32 - INFO - Epoch 92/100 | Train Loss: 0.0045 | Val rms_score: 0.9836
340
+ 2025-09-23 16:33:19,778 - logs_modchembert_delaney_epochs100_batch_size32 - INFO - Epoch 93/100 | Train Loss: 0.0044 | Val rms_score: 0.9501
341
+ 2025-09-23 16:33:22,782 - logs_modchembert_delaney_epochs100_batch_size32 - INFO - Epoch 94/100 | Train Loss: 0.0044 | Val rms_score: 0.9713
342
+ 2025-09-23 16:33:25,983 - logs_modchembert_delaney_epochs100_batch_size32 - INFO - Epoch 95/100 | Train Loss: 0.0039 | Val rms_score: 0.9552
343
+ 2025-09-23 16:33:29,537 - logs_modchembert_delaney_epochs100_batch_size32 - INFO - Epoch 96/100 | Train Loss: 0.0041 | Val rms_score: 0.9647
344
+ 2025-09-23 16:33:33,367 - logs_modchembert_delaney_epochs100_batch_size32 - INFO - Epoch 97/100 | Train Loss: 0.0047 | Val rms_score: 0.9757
345
+ 2025-09-23 16:33:36,766 - logs_modchembert_delaney_epochs100_batch_size32 - INFO - Epoch 98/100 | Train Loss: 0.0043 | Val rms_score: 0.9705
346
+ 2025-09-23 16:33:40,464 - logs_modchembert_delaney_epochs100_batch_size32 - INFO - Epoch 99/100 | Train Loss: 0.0039 | Val rms_score: 0.9538
347
+ 2025-09-23 16:33:43,962 - logs_modchembert_delaney_epochs100_batch_size32 - INFO - Epoch 100/100 | Train Loss: 0.0038 | Val rms_score: 0.9745
348
+ 2025-09-23 16:33:44,451 - logs_modchembert_delaney_epochs100_batch_size32 - INFO - Test rms_score: 0.7636
349
+ 2025-09-23 16:33:44,765 - logs_modchembert_delaney_epochs100_batch_size32 - INFO - Final Triplicate Test Results — Avg rms_score: 0.7669, Std Dev: 0.0024
logs_modchembert_regression_ModChemBERT-MLM-TAFT/modchembert_deepchem_splits_run_freesolv_epochs100_batch_size32_20250923_163344.log ADDED
@@ -0,0 +1,367 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ 2025-09-23 16:33:44,766 - logs_modchembert_freesolv_epochs100_batch_size32 - INFO - Running benchmark for dataset: freesolv
2
+ 2025-09-23 16:33:44,767 - logs_modchembert_freesolv_epochs100_batch_size32 - INFO - dataset: freesolv, tasks: ['y'], epochs: 100, learning rate: 3e-05, transform: True
3
+ 2025-09-23 16:33:44,786 - logs_modchembert_freesolv_epochs100_batch_size32 - INFO - Starting triplicate run 1 for dataset freesolv at 2025-09-23_16-33-44
4
+ 2025-09-23 16:33:47,146 - logs_modchembert_freesolv_epochs100_batch_size32 - INFO - Epoch 1/100 | Train Loss: 0.5331 | Val rms_score: 1.0297
5
+ 2025-09-23 16:33:47,146 - logs_modchembert_freesolv_epochs100_batch_size32 - INFO - Global step of best model: 17
6
+ 2025-09-23 16:33:47,726 - logs_modchembert_freesolv_epochs100_batch_size32 - INFO - Best model saved at epoch 1 with val rms_score: 1.0297
7
+ 2025-09-23 16:33:50,223 - logs_modchembert_freesolv_epochs100_batch_size32 - INFO - Epoch 2/100 | Train Loss: 0.2537 | Val rms_score: 1.1179
8
+ 2025-09-23 16:33:52,732 - logs_modchembert_freesolv_epochs100_batch_size32 - INFO - Epoch 3/100 | Train Loss: 0.1884 | Val rms_score: 0.9443
9
+ 2025-09-23 16:33:52,907 - logs_modchembert_freesolv_epochs100_batch_size32 - INFO - Global step of best model: 51
10
+ 2025-09-23 16:33:53,453 - logs_modchembert_freesolv_epochs100_batch_size32 - INFO - Best model saved at epoch 3 with val rms_score: 0.9443
11
+ 2025-09-23 16:33:56,008 - logs_modchembert_freesolv_epochs100_batch_size32 - INFO - Epoch 4/100 | Train Loss: 0.1581 | Val rms_score: 0.8542
12
+ 2025-09-23 16:33:56,190 - logs_modchembert_freesolv_epochs100_batch_size32 - INFO - Global step of best model: 68
13
+ 2025-09-23 16:33:56,747 - logs_modchembert_freesolv_epochs100_batch_size32 - INFO - Best model saved at epoch 4 with val rms_score: 0.8542
14
+ 2025-09-23 16:33:59,308 - logs_modchembert_freesolv_epochs100_batch_size32 - INFO - Epoch 5/100 | Train Loss: 0.1379 | Val rms_score: 0.8880
15
+ 2025-09-23 16:34:01,771 - logs_modchembert_freesolv_epochs100_batch_size32 - INFO - Epoch 6/100 | Train Loss: 0.0288 | Val rms_score: 0.7912
16
+ 2025-09-23 16:34:02,334 - logs_modchembert_freesolv_epochs100_batch_size32 - INFO - Global step of best model: 102
17
+ 2025-09-23 16:34:02,902 - logs_modchembert_freesolv_epochs100_batch_size32 - INFO - Best model saved at epoch 6 with val rms_score: 0.7912
18
+ 2025-09-23 16:34:05,302 - logs_modchembert_freesolv_epochs100_batch_size32 - INFO - Epoch 7/100 | Train Loss: 0.0703 | Val rms_score: 0.7949
19
+ 2025-09-23 16:34:07,816 - logs_modchembert_freesolv_epochs100_batch_size32 - INFO - Epoch 8/100 | Train Loss: 0.0616 | Val rms_score: 0.7294
20
+ 2025-09-23 16:34:08,000 - logs_modchembert_freesolv_epochs100_batch_size32 - INFO - Global step of best model: 136
21
+ 2025-09-23 16:34:08,573 - logs_modchembert_freesolv_epochs100_batch_size32 - INFO - Best model saved at epoch 8 with val rms_score: 0.7294
22
+ 2025-09-23 16:34:11,028 - logs_modchembert_freesolv_epochs100_batch_size32 - INFO - Epoch 9/100 | Train Loss: 0.0551 | Val rms_score: 0.7646
23
+ 2025-09-23 16:34:13,535 - logs_modchembert_freesolv_epochs100_batch_size32 - INFO - Epoch 10/100 | Train Loss: 0.0496 | Val rms_score: 0.7237
24
+ 2025-09-23 16:34:13,742 - logs_modchembert_freesolv_epochs100_batch_size32 - INFO - Global step of best model: 170
25
+ 2025-09-23 16:34:14,296 - logs_modchembert_freesolv_epochs100_batch_size32 - INFO - Best model saved at epoch 10 with val rms_score: 0.7237
26
+ 2025-09-23 16:34:16,594 - logs_modchembert_freesolv_epochs100_batch_size32 - INFO - Epoch 11/100 | Train Loss: 0.0616 | Val rms_score: 0.5909
27
+ 2025-09-23 16:34:17,083 - logs_modchembert_freesolv_epochs100_batch_size32 - INFO - Global step of best model: 187
28
+ 2025-09-23 16:34:17,639 - logs_modchembert_freesolv_epochs100_batch_size32 - INFO - Best model saved at epoch 11 with val rms_score: 0.5909
29
+ 2025-09-23 16:34:20,197 - logs_modchembert_freesolv_epochs100_batch_size32 - INFO - Epoch 12/100 | Train Loss: 0.4121 | Val rms_score: 1.1368
30
+ 2025-09-23 16:34:22,714 - logs_modchembert_freesolv_epochs100_batch_size32 - INFO - Epoch 13/100 | Train Loss: 0.1838 | Val rms_score: 1.0212
31
+ 2025-09-23 16:34:25,255 - logs_modchembert_freesolv_epochs100_batch_size32 - INFO - Epoch 14/100 | Train Loss: 0.1085 | Val rms_score: 0.9405
32
+ 2025-09-23 16:34:27,875 - logs_modchembert_freesolv_epochs100_batch_size32 - INFO - Epoch 15/100 | Train Loss: 0.0846 | Val rms_score: 0.9135
33
+ 2025-09-23 16:34:30,444 - logs_modchembert_freesolv_epochs100_batch_size32 - INFO - Epoch 16/100 | Train Loss: 0.0671 | Val rms_score: 0.8700
34
+ 2025-09-23 16:34:33,076 - logs_modchembert_freesolv_epochs100_batch_size32 - INFO - Epoch 17/100 | Train Loss: 0.0554 | Val rms_score: 0.8197
35
+ 2025-09-23 16:34:35,482 - logs_modchembert_freesolv_epochs100_batch_size32 - INFO - Epoch 18/100 | Train Loss: 0.0265 | Val rms_score: 0.7514
36
+ 2025-09-23 16:34:37,976 - logs_modchembert_freesolv_epochs100_batch_size32 - INFO - Epoch 19/100 | Train Loss: 0.0453 | Val rms_score: 0.7831
37
+ 2025-09-23 16:34:40,448 - logs_modchembert_freesolv_epochs100_batch_size32 - INFO - Epoch 20/100 | Train Loss: 0.0375 | Val rms_score: 0.7463
38
+ 2025-09-23 16:34:42,844 - logs_modchembert_freesolv_epochs100_batch_size32 - INFO - Epoch 21/100 | Train Loss: 0.0377 | Val rms_score: 0.7322
39
+ 2025-09-23 16:34:45,617 - logs_modchembert_freesolv_epochs100_batch_size32 - INFO - Epoch 22/100 | Train Loss: 0.0322 | Val rms_score: 0.7362
40
+ 2025-09-23 16:34:48,045 - logs_modchembert_freesolv_epochs100_batch_size32 - INFO - Epoch 23/100 | Train Loss: 0.0284 | Val rms_score: 0.7028
41
+ 2025-09-23 16:34:50,548 - logs_modchembert_freesolv_epochs100_batch_size32 - INFO - Epoch 24/100 | Train Loss: 0.0287 | Val rms_score: 0.7351
42
+ 2025-09-23 16:34:53,040 - logs_modchembert_freesolv_epochs100_batch_size32 - INFO - Epoch 25/100 | Train Loss: 0.0262 | Val rms_score: 0.7048
43
+ 2025-09-23 16:34:55,560 - logs_modchembert_freesolv_epochs100_batch_size32 - INFO - Epoch 26/100 | Train Loss: 0.0294 | Val rms_score: 0.7558
44
+ 2025-09-23 16:34:58,381 - logs_modchembert_freesolv_epochs100_batch_size32 - INFO - Epoch 27/100 | Train Loss: 0.0308 | Val rms_score: 0.7432
45
+ 2025-09-23 16:35:00,931 - logs_modchembert_freesolv_epochs100_batch_size32 - INFO - Epoch 28/100 | Train Loss: 0.0331 | Val rms_score: 0.8672
46
+ 2025-09-23 16:35:03,486 - logs_modchembert_freesolv_epochs100_batch_size32 - INFO - Epoch 29/100 | Train Loss: 0.0478 | Val rms_score: 0.7091
47
+ 2025-09-23 16:35:06,058 - logs_modchembert_freesolv_epochs100_batch_size32 - INFO - Epoch 30/100 | Train Loss: 0.0202 | Val rms_score: 0.7078
48
+ 2025-09-23 16:35:08,544 - logs_modchembert_freesolv_epochs100_batch_size32 - INFO - Epoch 31/100 | Train Loss: 0.0206 | Val rms_score: 0.7213
49
+ 2025-09-23 16:35:11,411 - logs_modchembert_freesolv_epochs100_batch_size32 - INFO - Epoch 32/100 | Train Loss: 0.0250 | Val rms_score: 0.6831
50
+ 2025-09-23 16:35:13,976 - logs_modchembert_freesolv_epochs100_batch_size32 - INFO - Epoch 33/100 | Train Loss: 0.0178 | Val rms_score: 0.7171
51
+ 2025-09-23 16:35:16,562 - logs_modchembert_freesolv_epochs100_batch_size32 - INFO - Epoch 34/100 | Train Loss: 0.0169 | Val rms_score: 0.6897
52
+ 2025-09-23 16:35:19,191 - logs_modchembert_freesolv_epochs100_batch_size32 - INFO - Epoch 35/100 | Train Loss: 0.0141 | Val rms_score: 0.6886
53
+ 2025-09-23 16:35:21,790 - logs_modchembert_freesolv_epochs100_batch_size32 - INFO - Epoch 36/100 | Train Loss: 0.0172 | Val rms_score: 0.7528
54
+ 2025-09-23 16:35:24,429 - logs_modchembert_freesolv_epochs100_batch_size32 - INFO - Epoch 37/100 | Train Loss: 0.0292 | Val rms_score: 0.7092
55
+ 2025-09-23 16:35:27,025 - logs_modchembert_freesolv_epochs100_batch_size32 - INFO - Epoch 38/100 | Train Loss: 0.0216 | Val rms_score: 0.7163
56
+ 2025-09-23 16:35:29,575 - logs_modchembert_freesolv_epochs100_batch_size32 - INFO - Epoch 39/100 | Train Loss: 0.0147 | Val rms_score: 0.6871
57
+ 2025-09-23 16:35:32,059 - logs_modchembert_freesolv_epochs100_batch_size32 - INFO - Epoch 40/100 | Train Loss: 0.0135 | Val rms_score: 0.7001
58
+ 2025-09-23 16:35:34,592 - logs_modchembert_freesolv_epochs100_batch_size32 - INFO - Epoch 41/100 | Train Loss: 0.0133 | Val rms_score: 0.6771
59
+ 2025-09-23 16:35:37,415 - logs_modchembert_freesolv_epochs100_batch_size32 - INFO - Epoch 42/100 | Train Loss: 0.0136 | Val rms_score: 0.6989
60
+ 2025-09-23 16:35:40,039 - logs_modchembert_freesolv_epochs100_batch_size32 - INFO - Epoch 43/100 | Train Loss: 0.0142 | Val rms_score: 0.7200
61
+ 2025-09-23 16:35:42,239 - logs_modchembert_freesolv_epochs100_batch_size32 - INFO - Epoch 44/100 | Train Loss: 0.0184 | Val rms_score: 0.6859
62
+ 2025-09-23 16:35:44,530 - logs_modchembert_freesolv_epochs100_batch_size32 - INFO - Epoch 45/100 | Train Loss: 0.0142 | Val rms_score: 0.7051
63
+ 2025-09-23 16:35:47,013 - logs_modchembert_freesolv_epochs100_batch_size32 - INFO - Epoch 46/100 | Train Loss: 0.0114 | Val rms_score: 0.6933
64
+ 2025-09-23 16:35:49,897 - logs_modchembert_freesolv_epochs100_batch_size32 - INFO - Epoch 47/100 | Train Loss: 0.0109 | Val rms_score: 0.6929
65
+ 2025-09-23 16:35:52,455 - logs_modchembert_freesolv_epochs100_batch_size32 - INFO - Epoch 48/100 | Train Loss: 0.0106 | Val rms_score: 0.7075
66
+ 2025-09-23 16:35:54,950 - logs_modchembert_freesolv_epochs100_batch_size32 - INFO - Epoch 49/100 | Train Loss: 0.0109 | Val rms_score: 0.6972
67
+ 2025-09-23 16:35:57,444 - logs_modchembert_freesolv_epochs100_batch_size32 - INFO - Epoch 50/100 | Train Loss: 0.0096 | Val rms_score: 0.6937
68
+ 2025-09-23 16:36:00,001 - logs_modchembert_freesolv_epochs100_batch_size32 - INFO - Epoch 51/100 | Train Loss: 0.0095 | Val rms_score: 0.6969
69
+ 2025-09-23 16:36:02,799 - logs_modchembert_freesolv_epochs100_batch_size32 - INFO - Epoch 52/100 | Train Loss: 0.0109 | Val rms_score: 0.7026
70
+ 2025-09-23 16:36:05,330 - logs_modchembert_freesolv_epochs100_batch_size32 - INFO - Epoch 53/100 | Train Loss: 0.0017 | Val rms_score: 0.7022
71
+ 2025-09-23 16:36:07,822 - logs_modchembert_freesolv_epochs100_batch_size32 - INFO - Epoch 54/100 | Train Loss: 0.0080 | Val rms_score: 0.7090
72
+ 2025-09-23 16:36:10,313 - logs_modchembert_freesolv_epochs100_batch_size32 - INFO - Epoch 55/100 | Train Loss: 0.0092 | Val rms_score: 0.7101
73
+ 2025-09-23 16:36:12,763 - logs_modchembert_freesolv_epochs100_batch_size32 - INFO - Epoch 56/100 | Train Loss: 0.0098 | Val rms_score: 0.7225
74
+ 2025-09-23 16:36:15,619 - logs_modchembert_freesolv_epochs100_batch_size32 - INFO - Epoch 57/100 | Train Loss: 0.0109 | Val rms_score: 0.6845
75
+ 2025-09-23 16:36:18,164 - logs_modchembert_freesolv_epochs100_batch_size32 - INFO - Epoch 58/100 | Train Loss: 0.0101 | Val rms_score: 0.6998
76
+ 2025-09-23 16:36:21,719 - logs_modchembert_freesolv_epochs100_batch_size32 - INFO - Epoch 59/100 | Train Loss: 0.0068 | Val rms_score: 0.6976
77
+ 2025-09-23 16:36:24,274 - logs_modchembert_freesolv_epochs100_batch_size32 - INFO - Epoch 60/100 | Train Loss: 0.0086 | Val rms_score: 0.7093
78
+ 2025-09-23 16:36:26,836 - logs_modchembert_freesolv_epochs100_batch_size32 - INFO - Epoch 61/100 | Train Loss: 0.0102 | Val rms_score: 0.6987
79
+ 2025-09-23 16:36:29,723 - logs_modchembert_freesolv_epochs100_batch_size32 - INFO - Epoch 62/100 | Train Loss: 0.0083 | Val rms_score: 0.7049
80
+ 2025-09-23 16:36:31,942 - logs_modchembert_freesolv_epochs100_batch_size32 - INFO - Epoch 63/100 | Train Loss: 0.0075 | Val rms_score: 0.7030
81
+ 2025-09-23 16:36:34,525 - logs_modchembert_freesolv_epochs100_batch_size32 - INFO - Epoch 64/100 | Train Loss: 0.0074 | Val rms_score: 0.7031
82
+ 2025-09-23 16:36:37,068 - logs_modchembert_freesolv_epochs100_batch_size32 - INFO - Epoch 65/100 | Train Loss: 0.0064 | Val rms_score: 0.7006
83
+ 2025-09-23 16:36:39,624 - logs_modchembert_freesolv_epochs100_batch_size32 - INFO - Epoch 66/100 | Train Loss: 0.0066 | Val rms_score: 0.7039
84
+ 2025-09-23 16:36:42,460 - logs_modchembert_freesolv_epochs100_batch_size32 - INFO - Epoch 67/100 | Train Loss: 0.0067 | Val rms_score: 0.7051
85
+ 2025-09-23 16:36:44,976 - logs_modchembert_freesolv_epochs100_batch_size32 - INFO - Epoch 68/100 | Train Loss: 0.0060 | Val rms_score: 0.7132
86
+ 2025-09-23 16:36:47,577 - logs_modchembert_freesolv_epochs100_batch_size32 - INFO - Epoch 69/100 | Train Loss: 0.0057 | Val rms_score: 0.7068
87
+ 2025-09-23 16:36:50,169 - logs_modchembert_freesolv_epochs100_batch_size32 - INFO - Epoch 70/100 | Train Loss: 0.0064 | Val rms_score: 0.7149
88
+ 2025-09-23 16:36:52,396 - logs_modchembert_freesolv_epochs100_batch_size32 - INFO - Epoch 71/100 | Train Loss: 0.0098 | Val rms_score: 0.6899
89
+ 2025-09-23 16:36:54,968 - logs_modchembert_freesolv_epochs100_batch_size32 - INFO - Epoch 72/100 | Train Loss: 0.0064 | Val rms_score: 0.7286
90
+ 2025-09-23 16:36:57,602 - logs_modchembert_freesolv_epochs100_batch_size32 - INFO - Epoch 73/100 | Train Loss: 0.0075 | Val rms_score: 0.7135
91
+ 2025-09-23 16:36:59,974 - logs_modchembert_freesolv_epochs100_batch_size32 - INFO - Epoch 74/100 | Train Loss: 0.0066 | Val rms_score: 0.7124
92
+ 2025-09-23 16:37:02,554 - logs_modchembert_freesolv_epochs100_batch_size32 - INFO - Epoch 75/100 | Train Loss: 0.0064 | Val rms_score: 0.7225
93
+ 2025-09-23 16:37:05,161 - logs_modchembert_freesolv_epochs100_batch_size32 - INFO - Epoch 76/100 | Train Loss: 0.0076 | Val rms_score: 0.7156
94
+ 2025-09-23 16:37:08,033 - logs_modchembert_freesolv_epochs100_batch_size32 - INFO - Epoch 77/100 | Train Loss: 0.0068 | Val rms_score: 0.7111
95
+ 2025-09-23 16:37:10,625 - logs_modchembert_freesolv_epochs100_batch_size32 - INFO - Epoch 78/100 | Train Loss: 0.0064 | Val rms_score: 0.7169
96
+ 2025-09-23 16:37:13,101 - logs_modchembert_freesolv_epochs100_batch_size32 - INFO - Epoch 79/100 | Train Loss: 0.0065 | Val rms_score: 0.7100
97
+ 2025-09-23 16:37:15,559 - logs_modchembert_freesolv_epochs100_batch_size32 - INFO - Epoch 80/100 | Train Loss: 0.0053 | Val rms_score: 0.7147
98
+ 2025-09-23 16:37:17,997 - logs_modchembert_freesolv_epochs100_batch_size32 - INFO - Epoch 81/100 | Train Loss: 0.0057 | Val rms_score: 0.7093
99
+ 2025-09-23 16:37:20,821 - logs_modchembert_freesolv_epochs100_batch_size32 - INFO - Epoch 82/100 | Train Loss: 0.0055 | Val rms_score: 0.7125
100
+ 2025-09-23 16:37:23,385 - logs_modchembert_freesolv_epochs100_batch_size32 - INFO - Epoch 83/100 | Train Loss: 0.0054 | Val rms_score: 0.7169
101
+ 2025-09-23 16:37:25,887 - logs_modchembert_freesolv_epochs100_batch_size32 - INFO - Epoch 84/100 | Train Loss: 0.0051 | Val rms_score: 0.7173
102
+ 2025-09-23 16:37:28,440 - logs_modchembert_freesolv_epochs100_batch_size32 - INFO - Epoch 85/100 | Train Loss: 0.0060 | Val rms_score: 0.6837
103
+ 2025-09-23 16:37:30,998 - logs_modchembert_freesolv_epochs100_batch_size32 - INFO - Epoch 86/100 | Train Loss: 0.0075 | Val rms_score: 0.7281
104
+ 2025-09-23 16:37:33,853 - logs_modchembert_freesolv_epochs100_batch_size32 - INFO - Epoch 87/100 | Train Loss: 0.0065 | Val rms_score: 0.7127
105
+ 2025-09-23 16:37:36,424 - logs_modchembert_freesolv_epochs100_batch_size32 - INFO - Epoch 88/100 | Train Loss: 0.0068 | Val rms_score: 0.7288
106
+ 2025-09-23 16:37:38,982 - logs_modchembert_freesolv_epochs100_batch_size32 - INFO - Epoch 89/100 | Train Loss: 0.0171 | Val rms_score: 0.8272
107
+ 2025-09-23 16:37:41,432 - logs_modchembert_freesolv_epochs100_batch_size32 - INFO - Epoch 90/100 | Train Loss: 0.0420 | Val rms_score: 0.8557
108
+ 2025-09-23 16:37:44,035 - logs_modchembert_freesolv_epochs100_batch_size32 - INFO - Epoch 91/100 | Train Loss: 0.0386 | Val rms_score: 0.7458
109
+ 2025-09-23 16:37:46,876 - logs_modchembert_freesolv_epochs100_batch_size32 - INFO - Epoch 92/100 | Train Loss: 0.0156 | Val rms_score: 0.6890
110
+ 2025-09-23 16:37:49,399 - logs_modchembert_freesolv_epochs100_batch_size32 - INFO - Epoch 93/100 | Train Loss: 0.0124 | Val rms_score: 0.7461
111
+ 2025-09-23 16:37:51,937 - logs_modchembert_freesolv_epochs100_batch_size32 - INFO - Epoch 94/100 | Train Loss: 0.0229 | Val rms_score: 0.6774
112
+ 2025-09-23 16:37:54,499 - logs_modchembert_freesolv_epochs100_batch_size32 - INFO - Epoch 95/100 | Train Loss: 0.0130 | Val rms_score: 0.6914
113
+ 2025-09-23 16:37:57,113 - logs_modchembert_freesolv_epochs100_batch_size32 - INFO - Epoch 96/100 | Train Loss: 0.0105 | Val rms_score: 0.6999
114
+ 2025-09-23 16:37:59,949 - logs_modchembert_freesolv_epochs100_batch_size32 - INFO - Epoch 97/100 | Train Loss: 0.0121 | Val rms_score: 0.6837
115
+ 2025-09-23 16:38:02,174 - logs_modchembert_freesolv_epochs100_batch_size32 - INFO - Epoch 98/100 | Train Loss: 0.0109 | Val rms_score: 0.6712
116
+ 2025-09-23 16:38:04,587 - logs_modchembert_freesolv_epochs100_batch_size32 - INFO - Epoch 99/100 | Train Loss: 0.0094 | Val rms_score: 0.7292
117
+ 2025-09-23 16:38:07,051 - logs_modchembert_freesolv_epochs100_batch_size32 - INFO - Epoch 100/100 | Train Loss: 0.0069 | Val rms_score: 0.6941
118
+ 2025-09-23 16:38:07,480 - logs_modchembert_freesolv_epochs100_batch_size32 - INFO - Test rms_score: 0.5608
119
+ 2025-09-23 16:38:07,790 - logs_modchembert_freesolv_epochs100_batch_size32 - INFO - Starting triplicate run 2 for dataset freesolv at 2025-09-23_16-38-07
120
+ 2025-09-23 16:38:10,005 - logs_modchembert_freesolv_epochs100_batch_size32 - INFO - Epoch 1/100 | Train Loss: 0.4926 | Val rms_score: 1.0036
121
+ 2025-09-23 16:38:10,005 - logs_modchembert_freesolv_epochs100_batch_size32 - INFO - Global step of best model: 17
122
+ 2025-09-23 16:38:10,529 - logs_modchembert_freesolv_epochs100_batch_size32 - INFO - Best model saved at epoch 1 with val rms_score: 1.0036
123
+ 2025-09-23 16:38:13,101 - logs_modchembert_freesolv_epochs100_batch_size32 - INFO - Epoch 2/100 | Train Loss: 0.2215 | Val rms_score: 1.0470
124
+ 2025-09-23 16:38:15,629 - logs_modchembert_freesolv_epochs100_batch_size32 - INFO - Epoch 3/100 | Train Loss: 0.1425 | Val rms_score: 0.8988
125
+ 2025-09-23 16:38:15,799 - logs_modchembert_freesolv_epochs100_batch_size32 - INFO - Global step of best model: 51
126
+ 2025-09-23 16:38:16,336 - logs_modchembert_freesolv_epochs100_batch_size32 - INFO - Best model saved at epoch 3 with val rms_score: 0.8988
127
+ 2025-09-23 16:38:18,867 - logs_modchembert_freesolv_epochs100_batch_size32 - INFO - Epoch 4/100 | Train Loss: 0.1075 | Val rms_score: 0.8497
128
+ 2025-09-23 16:38:19,059 - logs_modchembert_freesolv_epochs100_batch_size32 - INFO - Global step of best model: 68
129
+ 2025-09-23 16:38:19,613 - logs_modchembert_freesolv_epochs100_batch_size32 - INFO - Best model saved at epoch 4 with val rms_score: 0.8497
130
+ 2025-09-23 16:38:22,178 - logs_modchembert_freesolv_epochs100_batch_size32 - INFO - Epoch 5/100 | Train Loss: 0.0947 | Val rms_score: 0.8595
131
+ 2025-09-23 16:38:24,725 - logs_modchembert_freesolv_epochs100_batch_size32 - INFO - Epoch 6/100 | Train Loss: 0.0498 | Val rms_score: 1.0429
132
+ 2025-09-23 16:38:27,563 - logs_modchembert_freesolv_epochs100_batch_size32 - INFO - Epoch 7/100 | Train Loss: 0.1195 | Val rms_score: 0.9290
133
+ 2025-09-23 16:38:30,076 - logs_modchembert_freesolv_epochs100_batch_size32 - INFO - Epoch 8/100 | Train Loss: 0.0699 | Val rms_score: 0.8619
134
+ 2025-09-23 16:38:32,586 - logs_modchembert_freesolv_epochs100_batch_size32 - INFO - Epoch 9/100 | Train Loss: 0.0545 | Val rms_score: 0.7865
135
+ 2025-09-23 16:38:32,768 - logs_modchembert_freesolv_epochs100_batch_size32 - INFO - Global step of best model: 153
136
+ 2025-09-23 16:38:33,323 - logs_modchembert_freesolv_epochs100_batch_size32 - INFO - Best model saved at epoch 9 with val rms_score: 0.7865
137
+ 2025-09-23 16:38:35,863 - logs_modchembert_freesolv_epochs100_batch_size32 - INFO - Epoch 10/100 | Train Loss: 0.0443 | Val rms_score: 0.8073
138
+ 2025-09-23 16:38:38,518 - logs_modchembert_freesolv_epochs100_batch_size32 - INFO - Epoch 11/100 | Train Loss: 0.0411 | Val rms_score: 0.7576
139
+ 2025-09-23 16:38:39,024 - logs_modchembert_freesolv_epochs100_batch_size32 - INFO - Global step of best model: 187
140
+ 2025-09-23 16:38:39,570 - logs_modchembert_freesolv_epochs100_batch_size32 - INFO - Best model saved at epoch 11 with val rms_score: 0.7576
141
+ 2025-09-23 16:38:42,175 - logs_modchembert_freesolv_epochs100_batch_size32 - INFO - Epoch 12/100 | Train Loss: 0.0322 | Val rms_score: 0.7660
142
+ 2025-09-23 16:38:44,724 - logs_modchembert_freesolv_epochs100_batch_size32 - INFO - Epoch 13/100 | Train Loss: 0.0368 | Val rms_score: 0.7095
143
+ 2025-09-23 16:38:44,909 - logs_modchembert_freesolv_epochs100_batch_size32 - INFO - Global step of best model: 221
144
+ 2025-09-23 16:38:45,474 - logs_modchembert_freesolv_epochs100_batch_size32 - INFO - Best model saved at epoch 13 with val rms_score: 0.7095
145
+ 2025-09-23 16:38:47,612 - logs_modchembert_freesolv_epochs100_batch_size32 - INFO - Epoch 14/100 | Train Loss: 0.0349 | Val rms_score: 0.7964
146
+ 2025-09-23 16:38:50,181 - logs_modchembert_freesolv_epochs100_batch_size32 - INFO - Epoch 15/100 | Train Loss: 0.0772 | Val rms_score: 0.7793
147
+ 2025-09-23 16:38:52,835 - logs_modchembert_freesolv_epochs100_batch_size32 - INFO - Epoch 16/100 | Train Loss: 0.0928 | Val rms_score: 0.8270
148
+ 2025-09-23 16:38:55,677 - logs_modchembert_freesolv_epochs100_batch_size32 - INFO - Epoch 17/100 | Train Loss: 0.0501 | Val rms_score: 0.7441
149
+ 2025-09-23 16:38:58,203 - logs_modchembert_freesolv_epochs100_batch_size32 - INFO - Epoch 18/100 | Train Loss: 0.0306 | Val rms_score: 0.7248
150
+ 2025-09-23 16:39:00,803 - logs_modchembert_freesolv_epochs100_batch_size32 - INFO - Epoch 19/100 | Train Loss: 0.0299 | Val rms_score: 0.7388
151
+ 2025-09-23 16:39:03,362 - logs_modchembert_freesolv_epochs100_batch_size32 - INFO - Epoch 20/100 | Train Loss: 0.0281 | Val rms_score: 0.7268
152
+ 2025-09-23 16:39:05,972 - logs_modchembert_freesolv_epochs100_batch_size32 - INFO - Epoch 21/100 | Train Loss: 0.0255 | Val rms_score: 0.7053
153
+ 2025-09-23 16:39:06,399 - logs_modchembert_freesolv_epochs100_batch_size32 - INFO - Global step of best model: 357
154
+ 2025-09-23 16:39:06,939 - logs_modchembert_freesolv_epochs100_batch_size32 - INFO - Best model saved at epoch 21 with val rms_score: 0.7053
155
+ 2025-09-23 16:39:09,420 - logs_modchembert_freesolv_epochs100_batch_size32 - INFO - Epoch 22/100 | Train Loss: 0.0290 | Val rms_score: 0.7644
156
+ 2025-09-23 16:39:11,722 - logs_modchembert_freesolv_epochs100_batch_size32 - INFO - Epoch 23/100 | Train Loss: 0.0455 | Val rms_score: 0.6656
157
+ 2025-09-23 16:39:11,920 - logs_modchembert_freesolv_epochs100_batch_size32 - INFO - Global step of best model: 391
158
+ 2025-09-23 16:39:12,467 - logs_modchembert_freesolv_epochs100_batch_size32 - INFO - Best model saved at epoch 23 with val rms_score: 0.6656
159
+ 2025-09-23 16:39:14,839 - logs_modchembert_freesolv_epochs100_batch_size32 - INFO - Epoch 24/100 | Train Loss: 0.0234 | Val rms_score: 0.6991
160
+ 2025-09-23 16:39:17,379 - logs_modchembert_freesolv_epochs100_batch_size32 - INFO - Epoch 25/100 | Train Loss: 0.0260 | Val rms_score: 0.7783
161
+ 2025-09-23 16:39:19,961 - logs_modchembert_freesolv_epochs100_batch_size32 - INFO - Epoch 26/100 | Train Loss: 0.0892 | Val rms_score: 0.8554
162
+ 2025-09-23 16:39:22,762 - logs_modchembert_freesolv_epochs100_batch_size32 - INFO - Epoch 27/100 | Train Loss: 0.1287 | Val rms_score: 0.8236
163
+ 2025-09-23 16:39:25,301 - logs_modchembert_freesolv_epochs100_batch_size32 - INFO - Epoch 28/100 | Train Loss: 0.0602 | Val rms_score: 0.7665
164
+ 2025-09-23 16:39:27,795 - logs_modchembert_freesolv_epochs100_batch_size32 - INFO - Epoch 29/100 | Train Loss: 0.0315 | Val rms_score: 0.7142
165
+ 2025-09-23 16:39:30,394 - logs_modchembert_freesolv_epochs100_batch_size32 - INFO - Epoch 30/100 | Train Loss: 0.0186 | Val rms_score: 0.6998
166
+ 2025-09-23 16:39:32,951 - logs_modchembert_freesolv_epochs100_batch_size32 - INFO - Epoch 31/100 | Train Loss: 0.0179 | Val rms_score: 0.6821
167
+ 2025-09-23 16:39:35,809 - logs_modchembert_freesolv_epochs100_batch_size32 - INFO - Epoch 32/100 | Train Loss: 0.0159 | Val rms_score: 0.6820
168
+ 2025-09-23 16:39:38,417 - logs_modchembert_freesolv_epochs100_batch_size32 - INFO - Epoch 33/100 | Train Loss: 0.0151 | Val rms_score: 0.6736
169
+ 2025-09-23 16:39:40,973 - logs_modchembert_freesolv_epochs100_batch_size32 - INFO - Epoch 34/100 | Train Loss: 0.0200 | Val rms_score: 0.6532
170
+ 2025-09-23 16:39:41,156 - logs_modchembert_freesolv_epochs100_batch_size32 - INFO - Global step of best model: 578
171
+ 2025-09-23 16:39:41,733 - logs_modchembert_freesolv_epochs100_batch_size32 - INFO - Best model saved at epoch 34 with val rms_score: 0.6532
172
+ 2025-09-23 16:39:44,262 - logs_modchembert_freesolv_epochs100_batch_size32 - INFO - Epoch 35/100 | Train Loss: 0.0239 | Val rms_score: 0.7194
173
+ 2025-09-23 16:39:46,811 - logs_modchembert_freesolv_epochs100_batch_size32 - INFO - Epoch 36/100 | Train Loss: 0.0260 | Val rms_score: 0.6250
174
+ 2025-09-23 16:39:47,279 - logs_modchembert_freesolv_epochs100_batch_size32 - INFO - Global step of best model: 612
175
+ 2025-09-23 16:39:47,819 - logs_modchembert_freesolv_epochs100_batch_size32 - INFO - Best model saved at epoch 36 with val rms_score: 0.6250
176
+ 2025-09-23 16:39:50,297 - logs_modchembert_freesolv_epochs100_batch_size32 - INFO - Epoch 37/100 | Train Loss: 0.0680 | Val rms_score: 0.8003
177
+ 2025-09-23 16:39:52,804 - logs_modchembert_freesolv_epochs100_batch_size32 - INFO - Epoch 38/100 | Train Loss: 0.0427 | Val rms_score: 0.7936
178
+ 2025-09-23 16:39:55,185 - logs_modchembert_freesolv_epochs100_batch_size32 - INFO - Epoch 39/100 | Train Loss: 0.0208 | Val rms_score: 0.7144
179
+ 2025-09-23 16:39:57,617 - logs_modchembert_freesolv_epochs100_batch_size32 - INFO - Epoch 40/100 | Train Loss: 0.0144 | Val rms_score: 0.7282
180
+ 2025-09-23 16:40:00,112 - logs_modchembert_freesolv_epochs100_batch_size32 - INFO - Epoch 41/100 | Train Loss: 0.0114 | Val rms_score: 0.7194
181
+ 2025-09-23 16:40:02,883 - logs_modchembert_freesolv_epochs100_batch_size32 - INFO - Epoch 42/100 | Train Loss: 0.0107 | Val rms_score: 0.7164
182
+ 2025-09-23 16:40:05,484 - logs_modchembert_freesolv_epochs100_batch_size32 - INFO - Epoch 43/100 | Train Loss: 0.0126 | Val rms_score: 0.7259
183
+ 2025-09-23 16:40:07,999 - logs_modchembert_freesolv_epochs100_batch_size32 - INFO - Epoch 44/100 | Train Loss: 0.0137 | Val rms_score: 0.7259
184
+ 2025-09-23 16:40:10,520 - logs_modchembert_freesolv_epochs100_batch_size32 - INFO - Epoch 45/100 | Train Loss: 0.0110 | Val rms_score: 0.7219
185
+ 2025-09-23 16:40:13,125 - logs_modchembert_freesolv_epochs100_batch_size32 - INFO - Epoch 46/100 | Train Loss: 0.0090 | Val rms_score: 0.7230
186
+ 2025-09-23 16:40:16,095 - logs_modchembert_freesolv_epochs100_batch_size32 - INFO - Epoch 47/100 | Train Loss: 0.0101 | Val rms_score: 0.7149
187
+ 2025-09-23 16:40:18,597 - logs_modchembert_freesolv_epochs100_batch_size32 - INFO - Epoch 48/100 | Train Loss: 0.0090 | Val rms_score: 0.7199
188
+ 2025-09-23 16:40:20,911 - logs_modchembert_freesolv_epochs100_batch_size32 - INFO - Epoch 49/100 | Train Loss: 0.0086 | Val rms_score: 0.7232
189
+ 2025-09-23 16:40:23,233 - logs_modchembert_freesolv_epochs100_batch_size32 - INFO - Epoch 50/100 | Train Loss: 0.0110 | Val rms_score: 0.7320
190
+ 2025-09-23 16:40:25,753 - logs_modchembert_freesolv_epochs100_batch_size32 - INFO - Epoch 51/100 | Train Loss: 0.0129 | Val rms_score: 0.7122
191
+ 2025-09-23 16:40:28,629 - logs_modchembert_freesolv_epochs100_batch_size32 - INFO - Epoch 52/100 | Train Loss: 0.0182 | Val rms_score: 0.6851
192
+ 2025-09-23 16:40:31,130 - logs_modchembert_freesolv_epochs100_batch_size32 - INFO - Epoch 53/100 | Train Loss: 0.0359 | Val rms_score: 0.7598
193
+ 2025-09-23 16:40:33,667 - logs_modchembert_freesolv_epochs100_batch_size32 - INFO - Epoch 54/100 | Train Loss: 0.0247 | Val rms_score: 0.7497
194
+ 2025-09-23 16:40:36,217 - logs_modchembert_freesolv_epochs100_batch_size32 - INFO - Epoch 55/100 | Train Loss: 0.0178 | Val rms_score: 0.7211
195
+ 2025-09-23 16:40:38,736 - logs_modchembert_freesolv_epochs100_batch_size32 - INFO - Epoch 56/100 | Train Loss: 0.0185 | Val rms_score: 0.7140
196
+ 2025-09-23 16:40:41,605 - logs_modchembert_freesolv_epochs100_batch_size32 - INFO - Epoch 57/100 | Train Loss: 0.0207 | Val rms_score: 0.7235
197
+ 2025-09-23 16:40:44,295 - logs_modchembert_freesolv_epochs100_batch_size32 - INFO - Epoch 58/100 | Train Loss: 0.0213 | Val rms_score: 0.7579
198
+ 2025-09-23 16:40:47,833 - logs_modchembert_freesolv_epochs100_batch_size32 - INFO - Epoch 59/100 | Train Loss: 0.0360 | Val rms_score: 0.6994
199
+ 2025-09-23 16:40:50,465 - logs_modchembert_freesolv_epochs100_batch_size32 - INFO - Epoch 60/100 | Train Loss: 0.0162 | Val rms_score: 0.7262
200
+ 2025-09-23 16:40:53,047 - logs_modchembert_freesolv_epochs100_batch_size32 - INFO - Epoch 61/100 | Train Loss: 0.0139 | Val rms_score: 0.7195
201
+ 2025-09-23 16:40:55,975 - logs_modchembert_freesolv_epochs100_batch_size32 - INFO - Epoch 62/100 | Train Loss: 0.0132 | Val rms_score: 0.7118
202
+ 2025-09-23 16:40:58,435 - logs_modchembert_freesolv_epochs100_batch_size32 - INFO - Epoch 63/100 | Train Loss: 0.0119 | Val rms_score: 0.7140
203
+ 2025-09-23 16:41:01,023 - logs_modchembert_freesolv_epochs100_batch_size32 - INFO - Epoch 64/100 | Train Loss: 0.0119 | Val rms_score: 0.7244
204
+ 2025-09-23 16:41:03,493 - logs_modchembert_freesolv_epochs100_batch_size32 - INFO - Epoch 65/100 | Train Loss: 0.0069 | Val rms_score: 0.7159
205
+ 2025-09-23 16:41:06,069 - logs_modchembert_freesolv_epochs100_batch_size32 - INFO - Epoch 66/100 | Train Loss: 0.0117 | Val rms_score: 0.7106
206
+ 2025-09-23 16:41:08,874 - logs_modchembert_freesolv_epochs100_batch_size32 - INFO - Epoch 67/100 | Train Loss: 0.0147 | Val rms_score: 0.7380
207
+ 2025-09-23 16:41:11,408 - logs_modchembert_freesolv_epochs100_batch_size32 - INFO - Epoch 68/100 | Train Loss: 0.0114 | Val rms_score: 0.7138
208
+ 2025-09-23 16:41:13,949 - logs_modchembert_freesolv_epochs100_batch_size32 - INFO - Epoch 69/100 | Train Loss: 0.0101 | Val rms_score: 0.7114
209
+ 2025-09-23 16:41:16,437 - logs_modchembert_freesolv_epochs100_batch_size32 - INFO - Epoch 70/100 | Train Loss: 0.0104 | Val rms_score: 0.7385
210
+ 2025-09-23 16:41:18,930 - logs_modchembert_freesolv_epochs100_batch_size32 - INFO - Epoch 71/100 | Train Loss: 0.0073 | Val rms_score: 0.7190
211
+ 2025-09-23 16:41:21,769 - logs_modchembert_freesolv_epochs100_batch_size32 - INFO - Epoch 72/100 | Train Loss: 0.0097 | Val rms_score: 0.7247
212
+ 2025-09-23 16:41:24,308 - logs_modchembert_freesolv_epochs100_batch_size32 - INFO - Epoch 73/100 | Train Loss: 0.0090 | Val rms_score: 0.7262
213
+ 2025-09-23 16:41:26,786 - logs_modchembert_freesolv_epochs100_batch_size32 - INFO - Epoch 74/100 | Train Loss: 0.0089 | Val rms_score: 0.7350
214
+ 2025-09-23 16:41:29,243 - logs_modchembert_freesolv_epochs100_batch_size32 - INFO - Epoch 75/100 | Train Loss: 0.0095 | Val rms_score: 0.7176
215
+ 2025-09-23 16:41:31,487 - logs_modchembert_freesolv_epochs100_batch_size32 - INFO - Epoch 76/100 | Train Loss: 0.0083 | Val rms_score: 0.7205
216
+ 2025-09-23 16:41:34,155 - logs_modchembert_freesolv_epochs100_batch_size32 - INFO - Epoch 77/100 | Train Loss: 0.0087 | Val rms_score: 0.7161
217
+ 2025-09-23 16:41:36,789 - logs_modchembert_freesolv_epochs100_batch_size32 - INFO - Epoch 78/100 | Train Loss: 0.0080 | Val rms_score: 0.7108
218
+ 2025-09-23 16:41:39,273 - logs_modchembert_freesolv_epochs100_batch_size32 - INFO - Epoch 79/100 | Train Loss: 0.0088 | Val rms_score: 0.7021
219
+ 2025-09-23 16:41:41,939 - logs_modchembert_freesolv_epochs100_batch_size32 - INFO - Epoch 80/100 | Train Loss: 0.0074 | Val rms_score: 0.7150
220
+ 2025-09-23 16:41:44,479 - logs_modchembert_freesolv_epochs100_batch_size32 - INFO - Epoch 81/100 | Train Loss: 0.0076 | Val rms_score: 0.7028
221
+ 2025-09-23 16:41:47,431 - logs_modchembert_freesolv_epochs100_batch_size32 - INFO - Epoch 82/100 | Train Loss: 0.0079 | Val rms_score: 0.7060
222
+ 2025-09-23 16:41:49,945 - logs_modchembert_freesolv_epochs100_batch_size32 - INFO - Epoch 83/100 | Train Loss: 0.0062 | Val rms_score: 0.7174
223
+ 2025-09-23 16:41:52,431 - logs_modchembert_freesolv_epochs100_batch_size32 - INFO - Epoch 84/100 | Train Loss: 0.0070 | Val rms_score: 0.7113
224
+ 2025-09-23 16:41:54,944 - logs_modchembert_freesolv_epochs100_batch_size32 - INFO - Epoch 85/100 | Train Loss: 0.0069 | Val rms_score: 0.7089
225
+ 2025-09-23 16:41:57,473 - logs_modchembert_freesolv_epochs100_batch_size32 - INFO - Epoch 86/100 | Train Loss: 0.0075 | Val rms_score: 0.7134
226
+ 2025-09-23 16:42:00,184 - logs_modchembert_freesolv_epochs100_batch_size32 - INFO - Epoch 87/100 | Train Loss: 0.0098 | Val rms_score: 0.7097
227
+ 2025-09-23 16:42:02,705 - logs_modchembert_freesolv_epochs100_batch_size32 - INFO - Epoch 88/100 | Train Loss: 0.0092 | Val rms_score: 0.7219
228
+ 2025-09-23 16:42:05,209 - logs_modchembert_freesolv_epochs100_batch_size32 - INFO - Epoch 89/100 | Train Loss: 0.0091 | Val rms_score: 0.7120
229
+ 2025-09-23 16:42:07,844 - logs_modchembert_freesolv_epochs100_batch_size32 - INFO - Epoch 90/100 | Train Loss: 0.0065 | Val rms_score: 0.7189
230
+ 2025-09-23 16:42:10,349 - logs_modchembert_freesolv_epochs100_batch_size32 - INFO - Epoch 91/100 | Train Loss: 0.0067 | Val rms_score: 0.7159
231
+ 2025-09-23 16:42:13,223 - logs_modchembert_freesolv_epochs100_batch_size32 - INFO - Epoch 92/100 | Train Loss: 0.0080 | Val rms_score: 0.7133
232
+ 2025-09-23 16:42:15,758 - logs_modchembert_freesolv_epochs100_batch_size32 - INFO - Epoch 93/100 | Train Loss: 0.0071 | Val rms_score: 0.7170
233
+ 2025-09-23 16:42:18,310 - logs_modchembert_freesolv_epochs100_batch_size32 - INFO - Epoch 94/100 | Train Loss: 0.0065 | Val rms_score: 0.7094
234
+ 2025-09-23 16:42:20,948 - logs_modchembert_freesolv_epochs100_batch_size32 - INFO - Epoch 95/100 | Train Loss: 0.0071 | Val rms_score: 0.7111
235
+ 2025-09-23 16:42:23,474 - logs_modchembert_freesolv_epochs100_batch_size32 - INFO - Epoch 96/100 | Train Loss: 0.0061 | Val rms_score: 0.7097
236
+ 2025-09-23 16:42:26,272 - logs_modchembert_freesolv_epochs100_batch_size32 - INFO - Epoch 97/100 | Train Loss: 0.0063 | Val rms_score: 0.7150
237
+ 2025-09-23 16:42:28,822 - logs_modchembert_freesolv_epochs100_batch_size32 - INFO - Epoch 98/100 | Train Loss: 0.0064 | Val rms_score: 0.7124
238
+ 2025-09-23 16:42:31,371 - logs_modchembert_freesolv_epochs100_batch_size32 - INFO - Epoch 99/100 | Train Loss: 0.0061 | Val rms_score: 0.7124
239
+ 2025-09-23 16:42:33,939 - logs_modchembert_freesolv_epochs100_batch_size32 - INFO - Epoch 100/100 | Train Loss: 0.0063 | Val rms_score: 0.7114
240
+ 2025-09-23 16:42:34,403 - logs_modchembert_freesolv_epochs100_batch_size32 - INFO - Test rms_score: 0.4955
241
+ 2025-09-23 16:42:34,716 - logs_modchembert_freesolv_epochs100_batch_size32 - INFO - Starting triplicate run 3 for dataset freesolv at 2025-09-23_16-42-34
242
+ 2025-09-23 16:42:37,067 - logs_modchembert_freesolv_epochs100_batch_size32 - INFO - Epoch 1/100 | Train Loss: 0.6287 | Val rms_score: 0.9630
243
+ 2025-09-23 16:42:37,067 - logs_modchembert_freesolv_epochs100_batch_size32 - INFO - Global step of best model: 17
244
+ 2025-09-23 16:42:37,612 - logs_modchembert_freesolv_epochs100_batch_size32 - INFO - Best model saved at epoch 1 with val rms_score: 0.9630
245
+ 2025-09-23 16:42:39,832 - logs_modchembert_freesolv_epochs100_batch_size32 - INFO - Epoch 2/100 | Train Loss: 0.3180 | Val rms_score: 1.1704
246
+ 2025-09-23 16:42:42,099 - logs_modchembert_freesolv_epochs100_batch_size32 - INFO - Epoch 3/100 | Train Loss: 0.1893 | Val rms_score: 1.0321
247
+ 2025-09-23 16:42:44,574 - logs_modchembert_freesolv_epochs100_batch_size32 - INFO - Epoch 4/100 | Train Loss: 0.1388 | Val rms_score: 0.9330
248
+ 2025-09-23 16:42:44,770 - logs_modchembert_freesolv_epochs100_batch_size32 - INFO - Global step of best model: 68
249
+ 2025-09-23 16:42:45,328 - logs_modchembert_freesolv_epochs100_batch_size32 - INFO - Best model saved at epoch 4 with val rms_score: 0.9330
250
+ 2025-09-23 16:42:47,725 - logs_modchembert_freesolv_epochs100_batch_size32 - INFO - Epoch 5/100 | Train Loss: 0.1135 | Val rms_score: 0.9161
251
+ 2025-09-23 16:42:47,911 - logs_modchembert_freesolv_epochs100_batch_size32 - INFO - Global step of best model: 85
252
+ 2025-09-23 16:42:48,471 - logs_modchembert_freesolv_epochs100_batch_size32 - INFO - Best model saved at epoch 5 with val rms_score: 0.9161
253
+ 2025-09-23 16:42:50,978 - logs_modchembert_freesolv_epochs100_batch_size32 - INFO - Epoch 6/100 | Train Loss: 0.0962 | Val rms_score: 0.8425
254
+ 2025-09-23 16:42:51,437 - logs_modchembert_freesolv_epochs100_batch_size32 - INFO - Global step of best model: 102
255
+ 2025-09-23 16:42:51,965 - logs_modchembert_freesolv_epochs100_batch_size32 - INFO - Best model saved at epoch 6 with val rms_score: 0.8425
256
+ 2025-09-23 16:42:54,506 - logs_modchembert_freesolv_epochs100_batch_size32 - INFO - Epoch 7/100 | Train Loss: 0.1039 | Val rms_score: 0.9967
257
+ 2025-09-23 16:42:57,172 - logs_modchembert_freesolv_epochs100_batch_size32 - INFO - Epoch 8/100 | Train Loss: 0.1397 | Val rms_score: 0.8364
258
+ 2025-09-23 16:42:57,354 - logs_modchembert_freesolv_epochs100_batch_size32 - INFO - Global step of best model: 136
259
+ 2025-09-23 16:42:57,907 - logs_modchembert_freesolv_epochs100_batch_size32 - INFO - Best model saved at epoch 8 with val rms_score: 0.8364
260
+ 2025-09-23 16:43:00,449 - logs_modchembert_freesolv_epochs100_batch_size32 - INFO - Epoch 9/100 | Train Loss: 0.0878 | Val rms_score: 0.8180
261
+ 2025-09-23 16:43:00,639 - logs_modchembert_freesolv_epochs100_batch_size32 - INFO - Global step of best model: 153
262
+ 2025-09-23 16:43:01,195 - logs_modchembert_freesolv_epochs100_batch_size32 - INFO - Best model saved at epoch 9 with val rms_score: 0.8180
263
+ 2025-09-23 16:43:03,746 - logs_modchembert_freesolv_epochs100_batch_size32 - INFO - Epoch 10/100 | Train Loss: 0.0956 | Val rms_score: 0.9201
264
+ 2025-09-23 16:43:06,304 - logs_modchembert_freesolv_epochs100_batch_size32 - INFO - Epoch 11/100 | Train Loss: 0.0901 | Val rms_score: 0.7999
265
+ 2025-09-23 16:43:06,780 - logs_modchembert_freesolv_epochs100_batch_size32 - INFO - Global step of best model: 187
266
+ 2025-09-23 16:43:07,336 - logs_modchembert_freesolv_epochs100_batch_size32 - INFO - Best model saved at epoch 11 with val rms_score: 0.7999
267
+ 2025-09-23 16:43:09,891 - logs_modchembert_freesolv_epochs100_batch_size32 - INFO - Epoch 12/100 | Train Loss: 0.0300 | Val rms_score: 0.7670
268
+ 2025-09-23 16:43:10,089 - logs_modchembert_freesolv_epochs100_batch_size32 - INFO - Global step of best model: 204
269
+ 2025-09-23 16:43:10,656 - logs_modchembert_freesolv_epochs100_batch_size32 - INFO - Best model saved at epoch 12 with val rms_score: 0.7670
270
+ 2025-09-23 16:43:13,206 - logs_modchembert_freesolv_epochs100_batch_size32 - INFO - Epoch 13/100 | Train Loss: 0.0551 | Val rms_score: 0.7642
271
+ 2025-09-23 16:43:13,442 - logs_modchembert_freesolv_epochs100_batch_size32 - INFO - Global step of best model: 221
272
+ 2025-09-23 16:43:14,044 - logs_modchembert_freesolv_epochs100_batch_size32 - INFO - Best model saved at epoch 13 with val rms_score: 0.7642
273
+ 2025-09-23 16:43:16,608 - logs_modchembert_freesolv_epochs100_batch_size32 - INFO - Epoch 14/100 | Train Loss: 0.0772 | Val rms_score: 0.7470
274
+ 2025-09-23 16:43:16,810 - logs_modchembert_freesolv_epochs100_batch_size32 - INFO - Global step of best model: 238
275
+ 2025-09-23 16:43:17,352 - logs_modchembert_freesolv_epochs100_batch_size32 - INFO - Best model saved at epoch 14 with val rms_score: 0.7470
276
+ 2025-09-23 16:43:19,603 - logs_modchembert_freesolv_epochs100_batch_size32 - INFO - Epoch 15/100 | Train Loss: 0.0584 | Val rms_score: 0.7532
277
+ 2025-09-23 16:43:22,166 - logs_modchembert_freesolv_epochs100_batch_size32 - INFO - Epoch 16/100 | Train Loss: 0.0581 | Val rms_score: 0.7122
278
+ 2025-09-23 16:43:22,651 - logs_modchembert_freesolv_epochs100_batch_size32 - INFO - Global step of best model: 272
279
+ 2025-09-23 16:43:23,187 - logs_modchembert_freesolv_epochs100_batch_size32 - INFO - Best model saved at epoch 16 with val rms_score: 0.7122
280
+ 2025-09-23 16:43:25,742 - logs_modchembert_freesolv_epochs100_batch_size32 - INFO - Epoch 17/100 | Train Loss: 0.0418 | Val rms_score: 0.7673
281
+ 2025-09-23 16:43:28,344 - logs_modchembert_freesolv_epochs100_batch_size32 - INFO - Epoch 18/100 | Train Loss: 0.0518 | Val rms_score: 0.7259
282
+ 2025-09-23 16:43:30,952 - logs_modchembert_freesolv_epochs100_batch_size32 - INFO - Epoch 19/100 | Train Loss: 0.0460 | Val rms_score: 0.7566
283
+ 2025-09-23 16:43:33,513 - logs_modchembert_freesolv_epochs100_batch_size32 - INFO - Epoch 20/100 | Train Loss: 0.0425 | Val rms_score: 0.7181
284
+ 2025-09-23 16:43:35,962 - logs_modchembert_freesolv_epochs100_batch_size32 - INFO - Epoch 21/100 | Train Loss: 0.0322 | Val rms_score: 0.6856
285
+ 2025-09-23 16:43:36,393 - logs_modchembert_freesolv_epochs100_batch_size32 - INFO - Global step of best model: 357
286
+ 2025-09-23 16:43:36,954 - logs_modchembert_freesolv_epochs100_batch_size32 - INFO - Best model saved at epoch 21 with val rms_score: 0.6856
287
+ 2025-09-23 16:43:39,433 - logs_modchembert_freesolv_epochs100_batch_size32 - INFO - Epoch 22/100 | Train Loss: 0.0280 | Val rms_score: 0.7254
288
+ 2025-09-23 16:43:41,891 - logs_modchembert_freesolv_epochs100_batch_size32 - INFO - Epoch 23/100 | Train Loss: 0.0255 | Val rms_score: 0.7166
289
+ 2025-09-23 16:43:44,316 - logs_modchembert_freesolv_epochs100_batch_size32 - INFO - Epoch 24/100 | Train Loss: 0.0243 | Val rms_score: 0.7080
290
+ 2025-09-23 16:43:46,764 - logs_modchembert_freesolv_epochs100_batch_size32 - INFO - Epoch 25/100 | Train Loss: 0.0210 | Val rms_score: 0.7008
291
+ 2025-09-23 16:43:49,163 - logs_modchembert_freesolv_epochs100_batch_size32 - INFO - Epoch 26/100 | Train Loss: 0.0172 | Val rms_score: 0.7108
292
+ 2025-09-23 16:43:51,792 - logs_modchembert_freesolv_epochs100_batch_size32 - INFO - Epoch 27/100 | Train Loss: 0.0178 | Val rms_score: 0.7064
293
+ 2025-09-23 16:43:54,376 - logs_modchembert_freesolv_epochs100_batch_size32 - INFO - Epoch 28/100 | Train Loss: 0.0188 | Val rms_score: 0.6878
294
+ 2025-09-23 16:43:56,913 - logs_modchembert_freesolv_epochs100_batch_size32 - INFO - Epoch 29/100 | Train Loss: 0.0160 | Val rms_score: 0.7168
295
+ 2025-09-23 16:43:59,454 - logs_modchembert_freesolv_epochs100_batch_size32 - INFO - Epoch 30/100 | Train Loss: 0.0146 | Val rms_score: 0.7025
296
+ 2025-09-23 16:44:02,082 - logs_modchembert_freesolv_epochs100_batch_size32 - INFO - Epoch 31/100 | Train Loss: 0.0151 | Val rms_score: 0.7015
297
+ 2025-09-23 16:44:05,008 - logs_modchembert_freesolv_epochs100_batch_size32 - INFO - Epoch 32/100 | Train Loss: 0.0194 | Val rms_score: 0.7248
298
+ 2025-09-23 16:44:07,606 - logs_modchembert_freesolv_epochs100_batch_size32 - INFO - Epoch 33/100 | Train Loss: 0.0255 | Val rms_score: 0.7338
299
+ 2025-09-23 16:44:10,198 - logs_modchembert_freesolv_epochs100_batch_size32 - INFO - Epoch 34/100 | Train Loss: 0.0190 | Val rms_score: 0.7376
300
+ 2025-09-23 16:44:12,736 - logs_modchembert_freesolv_epochs100_batch_size32 - INFO - Epoch 35/100 | Train Loss: 0.0180 | Val rms_score: 0.7402
301
+ 2025-09-23 16:44:15,276 - logs_modchembert_freesolv_epochs100_batch_size32 - INFO - Epoch 36/100 | Train Loss: 0.0143 | Val rms_score: 0.7170
302
+ 2025-09-23 16:44:18,150 - logs_modchembert_freesolv_epochs100_batch_size32 - INFO - Epoch 37/100 | Train Loss: 0.0125 | Val rms_score: 0.7182
303
+ 2025-09-23 16:44:20,728 - logs_modchembert_freesolv_epochs100_batch_size32 - INFO - Epoch 38/100 | Train Loss: 0.0126 | Val rms_score: 0.6987
304
+ 2025-09-23 16:44:23,280 - logs_modchembert_freesolv_epochs100_batch_size32 - INFO - Epoch 39/100 | Train Loss: 0.0117 | Val rms_score: 0.7133
305
+ 2025-09-23 16:44:25,827 - logs_modchembert_freesolv_epochs100_batch_size32 - INFO - Epoch 40/100 | Train Loss: 0.0126 | Val rms_score: 0.7252
306
+ 2025-09-23 16:44:28,320 - logs_modchembert_freesolv_epochs100_batch_size32 - INFO - Epoch 41/100 | Train Loss: 0.0133 | Val rms_score: 0.7046
307
+ 2025-09-23 16:44:31,162 - logs_modchembert_freesolv_epochs100_batch_size32 - INFO - Epoch 42/100 | Train Loss: 0.0129 | Val rms_score: 0.7233
308
+ 2025-09-23 16:44:33,620 - logs_modchembert_freesolv_epochs100_batch_size32 - INFO - Epoch 43/100 | Train Loss: 0.0126 | Val rms_score: 0.7321
309
+ 2025-09-23 16:44:36,148 - logs_modchembert_freesolv_epochs100_batch_size32 - INFO - Epoch 44/100 | Train Loss: 0.0106 | Val rms_score: 0.7272
310
+ 2025-09-23 16:44:38,639 - logs_modchembert_freesolv_epochs100_batch_size32 - INFO - Epoch 45/100 | Train Loss: 0.0097 | Val rms_score: 0.7205
311
+ 2025-09-23 16:44:41,085 - logs_modchembert_freesolv_epochs100_batch_size32 - INFO - Epoch 46/100 | Train Loss: 0.0107 | Val rms_score: 0.7015
312
+ 2025-09-23 16:44:43,942 - logs_modchembert_freesolv_epochs100_batch_size32 - INFO - Epoch 47/100 | Train Loss: 0.0104 | Val rms_score: 0.7326
313
+ 2025-09-23 16:44:46,430 - logs_modchembert_freesolv_epochs100_batch_size32 - INFO - Epoch 48/100 | Train Loss: 0.0085 | Val rms_score: 0.7227
314
+ 2025-09-23 16:44:48,987 - logs_modchembert_freesolv_epochs100_batch_size32 - INFO - Epoch 49/100 | Train Loss: 0.0082 | Val rms_score: 0.7342
315
+ 2025-09-23 16:44:51,510 - logs_modchembert_freesolv_epochs100_batch_size32 - INFO - Epoch 50/100 | Train Loss: 0.0092 | Val rms_score: 0.7230
316
+ 2025-09-23 16:44:54,003 - logs_modchembert_freesolv_epochs100_batch_size32 - INFO - Epoch 51/100 | Train Loss: 0.0106 | Val rms_score: 0.7154
317
+ 2025-09-23 16:44:56,719 - logs_modchembert_freesolv_epochs100_batch_size32 - INFO - Epoch 52/100 | Train Loss: 0.0111 | Val rms_score: 0.7231
318
+ 2025-09-23 16:44:59,051 - logs_modchembert_freesolv_epochs100_batch_size32 - INFO - Epoch 53/100 | Train Loss: 0.0043 | Val rms_score: 0.7155
319
+ 2025-09-23 16:45:01,401 - logs_modchembert_freesolv_epochs100_batch_size32 - INFO - Epoch 54/100 | Train Loss: 0.0078 | Val rms_score: 0.7253
320
+ 2025-09-23 16:45:03,918 - logs_modchembert_freesolv_epochs100_batch_size32 - INFO - Epoch 55/100 | Train Loss: 0.0080 | Val rms_score: 0.7255
321
+ 2025-09-23 16:45:06,467 - logs_modchembert_freesolv_epochs100_batch_size32 - INFO - Epoch 56/100 | Train Loss: 0.0082 | Val rms_score: 0.7262
322
+ 2025-09-23 16:45:09,374 - logs_modchembert_freesolv_epochs100_batch_size32 - INFO - Epoch 57/100 | Train Loss: 0.0085 | Val rms_score: 0.7276
323
+ 2025-09-23 16:45:11,975 - logs_modchembert_freesolv_epochs100_batch_size32 - INFO - Epoch 58/100 | Train Loss: 0.0086 | Val rms_score: 0.7190
324
+ 2025-09-23 16:45:15,423 - logs_modchembert_freesolv_epochs100_batch_size32 - INFO - Epoch 59/100 | Train Loss: 0.0061 | Val rms_score: 0.7245
325
+ 2025-09-23 16:45:17,948 - logs_modchembert_freesolv_epochs100_batch_size32 - INFO - Epoch 60/100 | Train Loss: 0.0076 | Val rms_score: 0.7255
326
+ 2025-09-23 16:45:20,452 - logs_modchembert_freesolv_epochs100_batch_size32 - INFO - Epoch 61/100 | Train Loss: 0.0075 | Val rms_score: 0.7252
327
+ 2025-09-23 16:45:23,253 - logs_modchembert_freesolv_epochs100_batch_size32 - INFO - Epoch 62/100 | Train Loss: 0.0073 | Val rms_score: 0.7260
328
+ 2025-09-23 16:45:25,752 - logs_modchembert_freesolv_epochs100_batch_size32 - INFO - Epoch 63/100 | Train Loss: 0.0069 | Val rms_score: 0.7259
329
+ 2025-09-23 16:45:28,210 - logs_modchembert_freesolv_epochs100_batch_size32 - INFO - Epoch 64/100 | Train Loss: 0.0076 | Val rms_score: 0.7148
330
+ 2025-09-23 16:45:30,732 - logs_modchembert_freesolv_epochs100_batch_size32 - INFO - Epoch 65/100 | Train Loss: 0.0119 | Val rms_score: 0.7123
331
+ 2025-09-23 16:45:33,185 - logs_modchembert_freesolv_epochs100_batch_size32 - INFO - Epoch 66/100 | Train Loss: 0.0110 | Val rms_score: 0.7284
332
+ 2025-09-23 16:45:36,095 - logs_modchembert_freesolv_epochs100_batch_size32 - INFO - Epoch 67/100 | Train Loss: 0.0086 | Val rms_score: 0.7208
333
+ 2025-09-23 16:45:38,636 - logs_modchembert_freesolv_epochs100_batch_size32 - INFO - Epoch 68/100 | Train Loss: 0.0114 | Val rms_score: 0.7768
334
+ 2025-09-23 16:45:41,291 - logs_modchembert_freesolv_epochs100_batch_size32 - INFO - Epoch 69/100 | Train Loss: 0.0226 | Val rms_score: 0.7364
335
+ 2025-09-23 16:45:43,796 - logs_modchembert_freesolv_epochs100_batch_size32 - INFO - Epoch 70/100 | Train Loss: 0.0239 | Val rms_score: 0.6931
336
+ 2025-09-23 16:45:46,355 - logs_modchembert_freesolv_epochs100_batch_size32 - INFO - Epoch 71/100 | Train Loss: 0.0099 | Val rms_score: 0.7137
337
+ 2025-09-23 16:45:49,190 - logs_modchembert_freesolv_epochs100_batch_size32 - INFO - Epoch 72/100 | Train Loss: 0.0081 | Val rms_score: 0.7195
338
+ 2025-09-23 16:45:51,762 - logs_modchembert_freesolv_epochs100_batch_size32 - INFO - Epoch 73/100 | Train Loss: 0.0093 | Val rms_score: 0.7026
339
+ 2025-09-23 16:45:54,309 - logs_modchembert_freesolv_epochs100_batch_size32 - INFO - Epoch 74/100 | Train Loss: 0.0249 | Val rms_score: 0.7572
340
+ 2025-09-23 16:45:56,846 - logs_modchembert_freesolv_epochs100_batch_size32 - INFO - Epoch 75/100 | Train Loss: 0.0252 | Val rms_score: 0.7416
341
+ 2025-09-23 16:45:59,401 - logs_modchembert_freesolv_epochs100_batch_size32 - INFO - Epoch 76/100 | Train Loss: 0.0340 | Val rms_score: 0.7479
342
+ 2025-09-23 16:46:02,277 - logs_modchembert_freesolv_epochs100_batch_size32 - INFO - Epoch 77/100 | Train Loss: 0.0451 | Val rms_score: 0.7613
343
+ 2025-09-23 16:46:04,777 - logs_modchembert_freesolv_epochs100_batch_size32 - INFO - Epoch 78/100 | Train Loss: 0.0126 | Val rms_score: 0.7614
344
+ 2025-09-23 16:46:07,028 - logs_modchembert_freesolv_epochs100_batch_size32 - INFO - Epoch 79/100 | Train Loss: 0.0106 | Val rms_score: 0.7435
345
+ 2025-09-23 16:46:09,383 - logs_modchembert_freesolv_epochs100_batch_size32 - INFO - Epoch 80/100 | Train Loss: 0.0101 | Val rms_score: 0.7514
346
+ 2025-09-23 16:46:11,797 - logs_modchembert_freesolv_epochs100_batch_size32 - INFO - Epoch 81/100 | Train Loss: 0.0069 | Val rms_score: 0.7301
347
+ 2025-09-23 16:46:14,655 - logs_modchembert_freesolv_epochs100_batch_size32 - INFO - Epoch 82/100 | Train Loss: 0.0065 | Val rms_score: 0.7624
348
+ 2025-09-23 16:46:17,236 - logs_modchembert_freesolv_epochs100_batch_size32 - INFO - Epoch 83/100 | Train Loss: 0.0059 | Val rms_score: 0.7502
349
+ 2025-09-23 16:46:19,793 - logs_modchembert_freesolv_epochs100_batch_size32 - INFO - Epoch 84/100 | Train Loss: 0.0062 | Val rms_score: 0.7414
350
+ 2025-09-23 16:46:22,361 - logs_modchembert_freesolv_epochs100_batch_size32 - INFO - Epoch 85/100 | Train Loss: 0.0071 | Val rms_score: 0.7252
351
+ 2025-09-23 16:46:24,883 - logs_modchembert_freesolv_epochs100_batch_size32 - INFO - Epoch 86/100 | Train Loss: 0.0099 | Val rms_score: 0.7519
352
+ 2025-09-23 16:46:27,797 - logs_modchembert_freesolv_epochs100_batch_size32 - INFO - Epoch 87/100 | Train Loss: 0.0088 | Val rms_score: 0.7669
353
+ 2025-09-23 16:46:30,352 - logs_modchembert_freesolv_epochs100_batch_size32 - INFO - Epoch 88/100 | Train Loss: 0.0074 | Val rms_score: 0.7362
354
+ 2025-09-23 16:46:32,926 - logs_modchembert_freesolv_epochs100_batch_size32 - INFO - Epoch 89/100 | Train Loss: 0.0053 | Val rms_score: 0.7495
355
+ 2025-09-23 16:46:35,456 - logs_modchembert_freesolv_epochs100_batch_size32 - INFO - Epoch 90/100 | Train Loss: 0.0059 | Val rms_score: 0.7439
356
+ 2025-09-23 16:46:37,961 - logs_modchembert_freesolv_epochs100_batch_size32 - INFO - Epoch 91/100 | Train Loss: 0.0060 | Val rms_score: 0.7334
357
+ 2025-09-23 16:46:40,789 - logs_modchembert_freesolv_epochs100_batch_size32 - INFO - Epoch 92/100 | Train Loss: 0.0057 | Val rms_score: 0.7346
358
+ 2025-09-23 16:46:43,165 - logs_modchembert_freesolv_epochs100_batch_size32 - INFO - Epoch 93/100 | Train Loss: 0.0065 | Val rms_score: 0.7572
359
+ 2025-09-23 16:46:45,606 - logs_modchembert_freesolv_epochs100_batch_size32 - INFO - Epoch 94/100 | Train Loss: 0.0059 | Val rms_score: 0.7531
360
+ 2025-09-23 16:46:48,136 - logs_modchembert_freesolv_epochs100_batch_size32 - INFO - Epoch 95/100 | Train Loss: 0.0070 | Val rms_score: 0.7548
361
+ 2025-09-23 16:46:50,672 - logs_modchembert_freesolv_epochs100_batch_size32 - INFO - Epoch 96/100 | Train Loss: 0.0118 | Val rms_score: 0.7314
362
+ 2025-09-23 16:46:53,481 - logs_modchembert_freesolv_epochs100_batch_size32 - INFO - Epoch 97/100 | Train Loss: 0.0082 | Val rms_score: 0.7699
363
+ 2025-09-23 16:46:56,013 - logs_modchembert_freesolv_epochs100_batch_size32 - INFO - Epoch 98/100 | Train Loss: 0.0079 | Val rms_score: 0.7603
364
+ 2025-09-23 16:46:58,466 - logs_modchembert_freesolv_epochs100_batch_size32 - INFO - Epoch 99/100 | Train Loss: 0.0061 | Val rms_score: 0.7453
365
+ 2025-09-23 16:47:01,173 - logs_modchembert_freesolv_epochs100_batch_size32 - INFO - Epoch 100/100 | Train Loss: 0.0062 | Val rms_score: 0.7545
366
+ 2025-09-23 16:47:01,608 - logs_modchembert_freesolv_epochs100_batch_size32 - INFO - Test rms_score: 0.5316
367
+ 2025-09-23 16:47:01,930 - logs_modchembert_freesolv_epochs100_batch_size32 - INFO - Final Triplicate Test Results — Avg rms_score: 0.5293, Std Dev: 0.0267
logs_modchembert_regression_ModChemBERT-MLM-TAFT/modchembert_deepchem_splits_run_lipo_epochs100_batch_size32_20250923_164701.log ADDED
@@ -0,0 +1,349 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ 2025-09-23 16:47:01,931 - logs_modchembert_lipo_epochs100_batch_size32 - INFO - Running benchmark for dataset: lipo
2
+ 2025-09-23 16:47:01,931 - logs_modchembert_lipo_epochs100_batch_size32 - INFO - dataset: lipo, tasks: ['exp'], epochs: 100, learning rate: 3e-05, transform: True
3
+ 2025-09-23 16:47:01,957 - logs_modchembert_lipo_epochs100_batch_size32 - INFO - Starting triplicate run 1 for dataset lipo at 2025-09-23_16-47-01
4
+ 2025-09-23 16:47:12,610 - logs_modchembert_lipo_epochs100_batch_size32 - INFO - Epoch 1/100 | Train Loss: 0.3453 | Val rms_score: 0.8830
5
+ 2025-09-23 16:47:12,610 - logs_modchembert_lipo_epochs100_batch_size32 - INFO - Global step of best model: 105
6
+ 2025-09-23 16:47:13,151 - logs_modchembert_lipo_epochs100_batch_size32 - INFO - Best model saved at epoch 1 with val rms_score: 0.8830
7
+ 2025-09-23 16:47:23,127 - logs_modchembert_lipo_epochs100_batch_size32 - INFO - Epoch 2/100 | Train Loss: 0.2516 | Val rms_score: 0.7136
8
+ 2025-09-23 16:47:23,303 - logs_modchembert_lipo_epochs100_batch_size32 - INFO - Global step of best model: 210
9
+ 2025-09-23 16:47:23,840 - logs_modchembert_lipo_epochs100_batch_size32 - INFO - Best model saved at epoch 2 with val rms_score: 0.7136
10
+ 2025-09-23 16:47:34,755 - logs_modchembert_lipo_epochs100_batch_size32 - INFO - Epoch 3/100 | Train Loss: 0.2156 | Val rms_score: 0.6932
11
+ 2025-09-23 16:47:34,933 - logs_modchembert_lipo_epochs100_batch_size32 - INFO - Global step of best model: 315
12
+ 2025-09-23 16:47:35,452 - logs_modchembert_lipo_epochs100_batch_size32 - INFO - Best model saved at epoch 3 with val rms_score: 0.6932
13
+ 2025-09-23 16:47:46,485 - logs_modchembert_lipo_epochs100_batch_size32 - INFO - Epoch 4/100 | Train Loss: 0.1609 | Val rms_score: 0.6754
14
+ 2025-09-23 16:47:46,679 - logs_modchembert_lipo_epochs100_batch_size32 - INFO - Global step of best model: 420
15
+ 2025-09-23 16:47:47,219 - logs_modchembert_lipo_epochs100_batch_size32 - INFO - Best model saved at epoch 4 with val rms_score: 0.6754
16
+ 2025-09-23 16:47:58,306 - logs_modchembert_lipo_epochs100_batch_size32 - INFO - Epoch 5/100 | Train Loss: 0.1437 | Val rms_score: 0.6788
17
+ 2025-09-23 16:48:08,659 - logs_modchembert_lipo_epochs100_batch_size32 - INFO - Epoch 6/100 | Train Loss: 0.1141 | Val rms_score: 0.6607
18
+ 2025-09-23 16:48:09,179 - logs_modchembert_lipo_epochs100_batch_size32 - INFO - Global step of best model: 630
19
+ 2025-09-23 16:48:09,746 - logs_modchembert_lipo_epochs100_batch_size32 - INFO - Best model saved at epoch 6 with val rms_score: 0.6607
20
+ 2025-09-23 16:48:20,288 - logs_modchembert_lipo_epochs100_batch_size32 - INFO - Epoch 7/100 | Train Loss: 0.0969 | Val rms_score: 0.6684
21
+ 2025-09-23 16:48:30,968 - logs_modchembert_lipo_epochs100_batch_size32 - INFO - Epoch 8/100 | Train Loss: 0.0930 | Val rms_score: 0.6690
22
+ 2025-09-23 16:48:41,886 - logs_modchembert_lipo_epochs100_batch_size32 - INFO - Epoch 9/100 | Train Loss: 0.0760 | Val rms_score: 0.6655
23
+ 2025-09-23 16:48:52,551 - logs_modchembert_lipo_epochs100_batch_size32 - INFO - Epoch 10/100 | Train Loss: 0.0681 | Val rms_score: 0.6687
24
+ 2025-09-23 16:49:03,421 - logs_modchembert_lipo_epochs100_batch_size32 - INFO - Epoch 11/100 | Train Loss: 0.0676 | Val rms_score: 0.6722
25
+ 2025-09-23 16:49:14,769 - logs_modchembert_lipo_epochs100_batch_size32 - INFO - Epoch 12/100 | Train Loss: 0.0565 | Val rms_score: 0.6761
26
+ 2025-09-23 16:49:25,217 - logs_modchembert_lipo_epochs100_batch_size32 - INFO - Epoch 13/100 | Train Loss: 0.0526 | Val rms_score: 0.6774
27
+ 2025-09-23 16:49:35,404 - logs_modchembert_lipo_epochs100_batch_size32 - INFO - Epoch 14/100 | Train Loss: 0.0464 | Val rms_score: 0.6694
28
+ 2025-09-23 16:49:46,538 - logs_modchembert_lipo_epochs100_batch_size32 - INFO - Epoch 15/100 | Train Loss: 0.0406 | Val rms_score: 0.6762
29
+ 2025-09-23 16:49:57,639 - logs_modchembert_lipo_epochs100_batch_size32 - INFO - Epoch 16/100 | Train Loss: 0.0426 | Val rms_score: 0.6623
30
+ 2025-09-23 16:50:08,818 - logs_modchembert_lipo_epochs100_batch_size32 - INFO - Epoch 17/100 | Train Loss: 0.0379 | Val rms_score: 0.6664
31
+ 2025-09-23 16:50:18,827 - logs_modchembert_lipo_epochs100_batch_size32 - INFO - Epoch 18/100 | Train Loss: 0.0385 | Val rms_score: 0.6807
32
+ 2025-09-23 16:50:29,540 - logs_modchembert_lipo_epochs100_batch_size32 - INFO - Epoch 19/100 | Train Loss: 0.0345 | Val rms_score: 0.6616
33
+ 2025-09-23 16:50:41,586 - logs_modchembert_lipo_epochs100_batch_size32 - INFO - Epoch 20/100 | Train Loss: 0.0359 | Val rms_score: 0.6753
34
+ 2025-09-23 16:50:52,713 - logs_modchembert_lipo_epochs100_batch_size32 - INFO - Epoch 21/100 | Train Loss: 0.0312 | Val rms_score: 0.6581
35
+ 2025-09-23 16:50:53,167 - logs_modchembert_lipo_epochs100_batch_size32 - INFO - Global step of best model: 2205
36
+ 2025-09-23 16:50:53,706 - logs_modchembert_lipo_epochs100_batch_size32 - INFO - Best model saved at epoch 21 with val rms_score: 0.6581
37
+ 2025-09-23 16:51:04,610 - logs_modchembert_lipo_epochs100_batch_size32 - INFO - Epoch 22/100 | Train Loss: 0.0367 | Val rms_score: 0.6579
38
+ 2025-09-23 16:51:04,800 - logs_modchembert_lipo_epochs100_batch_size32 - INFO - Global step of best model: 2310
39
+ 2025-09-23 16:51:05,332 - logs_modchembert_lipo_epochs100_batch_size32 - INFO - Best model saved at epoch 22 with val rms_score: 0.6579
40
+ 2025-09-23 16:51:15,783 - logs_modchembert_lipo_epochs100_batch_size32 - INFO - Epoch 23/100 | Train Loss: 0.0297 | Val rms_score: 0.6654
41
+ 2025-09-23 16:51:26,386 - logs_modchembert_lipo_epochs100_batch_size32 - INFO - Epoch 24/100 | Train Loss: 0.0285 | Val rms_score: 0.6618
42
+ 2025-09-23 16:51:37,362 - logs_modchembert_lipo_epochs100_batch_size32 - INFO - Epoch 25/100 | Train Loss: 0.0297 | Val rms_score: 0.6719
43
+ 2025-09-23 16:51:47,551 - logs_modchembert_lipo_epochs100_batch_size32 - INFO - Epoch 26/100 | Train Loss: 0.0258 | Val rms_score: 0.6610
44
+ 2025-09-23 16:51:58,550 - logs_modchembert_lipo_epochs100_batch_size32 - INFO - Epoch 27/100 | Train Loss: 0.0247 | Val rms_score: 0.6599
45
+ 2025-09-23 16:52:09,376 - logs_modchembert_lipo_epochs100_batch_size32 - INFO - Epoch 28/100 | Train Loss: 0.0242 | Val rms_score: 0.6721
46
+ 2025-09-23 16:52:21,308 - logs_modchembert_lipo_epochs100_batch_size32 - INFO - Epoch 29/100 | Train Loss: 0.0248 | Val rms_score: 0.6591
47
+ 2025-09-23 16:52:32,223 - logs_modchembert_lipo_epochs100_batch_size32 - INFO - Epoch 30/100 | Train Loss: 0.0262 | Val rms_score: 0.6710
48
+ 2025-09-23 16:52:42,757 - logs_modchembert_lipo_epochs100_batch_size32 - INFO - Epoch 31/100 | Train Loss: 0.0247 | Val rms_score: 0.6660
49
+ 2025-09-23 16:52:53,489 - logs_modchembert_lipo_epochs100_batch_size32 - INFO - Epoch 32/100 | Train Loss: 0.0234 | Val rms_score: 0.6679
50
+ 2025-09-23 16:53:04,630 - logs_modchembert_lipo_epochs100_batch_size32 - INFO - Epoch 33/100 | Train Loss: 0.0230 | Val rms_score: 0.6739
51
+ 2025-09-23 16:53:15,488 - logs_modchembert_lipo_epochs100_batch_size32 - INFO - Epoch 34/100 | Train Loss: 0.0233 | Val rms_score: 0.6679
52
+ 2025-09-23 16:53:25,255 - logs_modchembert_lipo_epochs100_batch_size32 - INFO - Epoch 35/100 | Train Loss: 0.0210 | Val rms_score: 0.6687
53
+ 2025-09-23 16:53:36,148 - logs_modchembert_lipo_epochs100_batch_size32 - INFO - Epoch 36/100 | Train Loss: 0.0229 | Val rms_score: 0.6801
54
+ 2025-09-23 16:53:47,573 - logs_modchembert_lipo_epochs100_batch_size32 - INFO - Epoch 37/100 | Train Loss: 0.0232 | Val rms_score: 0.6639
55
+ 2025-09-23 16:53:58,248 - logs_modchembert_lipo_epochs100_batch_size32 - INFO - Epoch 38/100 | Train Loss: 0.0207 | Val rms_score: 0.6770
56
+ 2025-09-23 16:54:10,277 - logs_modchembert_lipo_epochs100_batch_size32 - INFO - Epoch 39/100 | Train Loss: 0.0207 | Val rms_score: 0.6648
57
+ 2025-09-23 16:54:21,370 - logs_modchembert_lipo_epochs100_batch_size32 - INFO - Epoch 40/100 | Train Loss: 0.0209 | Val rms_score: 0.6646
58
+ 2025-09-23 16:54:32,013 - logs_modchembert_lipo_epochs100_batch_size32 - INFO - Epoch 41/100 | Train Loss: 0.0229 | Val rms_score: 0.6742
59
+ 2025-09-23 16:54:42,679 - logs_modchembert_lipo_epochs100_batch_size32 - INFO - Epoch 42/100 | Train Loss: 0.0215 | Val rms_score: 0.6749
60
+ 2025-09-23 16:54:52,201 - logs_modchembert_lipo_epochs100_batch_size32 - INFO - Epoch 43/100 | Train Loss: 0.0215 | Val rms_score: 0.6635
61
+ 2025-09-23 16:55:03,072 - logs_modchembert_lipo_epochs100_batch_size32 - INFO - Epoch 44/100 | Train Loss: 0.0190 | Val rms_score: 0.6674
62
+ 2025-09-23 16:55:14,075 - logs_modchembert_lipo_epochs100_batch_size32 - INFO - Epoch 45/100 | Train Loss: 0.0166 | Val rms_score: 0.6664
63
+ 2025-09-23 16:55:24,619 - logs_modchembert_lipo_epochs100_batch_size32 - INFO - Epoch 46/100 | Train Loss: 0.0184 | Val rms_score: 0.6730
64
+ 2025-09-23 16:55:35,926 - logs_modchembert_lipo_epochs100_batch_size32 - INFO - Epoch 47/100 | Train Loss: 0.0183 | Val rms_score: 0.6713
65
+ 2025-09-23 16:55:47,369 - logs_modchembert_lipo_epochs100_batch_size32 - INFO - Epoch 48/100 | Train Loss: 0.0182 | Val rms_score: 0.6652
66
+ 2025-09-23 16:55:58,201 - logs_modchembert_lipo_epochs100_batch_size32 - INFO - Epoch 49/100 | Train Loss: 0.0182 | Val rms_score: 0.6615
67
+ 2025-09-23 16:56:09,188 - logs_modchembert_lipo_epochs100_batch_size32 - INFO - Epoch 50/100 | Train Loss: 0.0192 | Val rms_score: 0.6736
68
+ 2025-09-23 16:56:18,646 - logs_modchembert_lipo_epochs100_batch_size32 - INFO - Epoch 51/100 | Train Loss: 0.0172 | Val rms_score: 0.6678
69
+ 2025-09-23 16:56:29,324 - logs_modchembert_lipo_epochs100_batch_size32 - INFO - Epoch 52/100 | Train Loss: 0.0177 | Val rms_score: 0.6681
70
+ 2025-09-23 16:56:39,879 - logs_modchembert_lipo_epochs100_batch_size32 - INFO - Epoch 53/100 | Train Loss: 0.0171 | Val rms_score: 0.6685
71
+ 2025-09-23 16:56:50,313 - logs_modchembert_lipo_epochs100_batch_size32 - INFO - Epoch 54/100 | Train Loss: 0.0171 | Val rms_score: 0.6664
72
+ 2025-09-23 16:57:01,348 - logs_modchembert_lipo_epochs100_batch_size32 - INFO - Epoch 55/100 | Train Loss: 0.0164 | Val rms_score: 0.6638
73
+ 2025-09-23 16:57:12,579 - logs_modchembert_lipo_epochs100_batch_size32 - INFO - Epoch 56/100 | Train Loss: 0.0162 | Val rms_score: 0.6643
74
+ 2025-09-23 16:57:23,956 - logs_modchembert_lipo_epochs100_batch_size32 - INFO - Epoch 57/100 | Train Loss: 0.0183 | Val rms_score: 0.6613
75
+ 2025-09-23 16:57:35,777 - logs_modchembert_lipo_epochs100_batch_size32 - INFO - Epoch 58/100 | Train Loss: 0.0167 | Val rms_score: 0.6638
76
+ 2025-09-23 16:57:45,565 - logs_modchembert_lipo_epochs100_batch_size32 - INFO - Epoch 59/100 | Train Loss: 0.0166 | Val rms_score: 0.6675
77
+ 2025-09-23 16:57:56,306 - logs_modchembert_lipo_epochs100_batch_size32 - INFO - Epoch 60/100 | Train Loss: 0.0156 | Val rms_score: 0.6603
78
+ 2025-09-23 16:58:07,359 - logs_modchembert_lipo_epochs100_batch_size32 - INFO - Epoch 61/100 | Train Loss: 0.0130 | Val rms_score: 0.6661
79
+ 2025-09-23 16:58:18,036 - logs_modchembert_lipo_epochs100_batch_size32 - INFO - Epoch 62/100 | Train Loss: 0.0114 | Val rms_score: 0.6682
80
+ 2025-09-23 16:58:28,958 - logs_modchembert_lipo_epochs100_batch_size32 - INFO - Epoch 63/100 | Train Loss: 0.0168 | Val rms_score: 0.6709
81
+ 2025-09-23 16:58:40,014 - logs_modchembert_lipo_epochs100_batch_size32 - INFO - Epoch 64/100 | Train Loss: 0.0151 | Val rms_score: 0.6669
82
+ 2025-09-23 16:58:51,213 - logs_modchembert_lipo_epochs100_batch_size32 - INFO - Epoch 65/100 | Train Loss: 0.0144 | Val rms_score: 0.6635
83
+ 2025-09-23 16:59:01,766 - logs_modchembert_lipo_epochs100_batch_size32 - INFO - Epoch 66/100 | Train Loss: 0.0147 | Val rms_score: 0.6658
84
+ 2025-09-23 16:59:12,951 - logs_modchembert_lipo_epochs100_batch_size32 - INFO - Epoch 67/100 | Train Loss: 0.0140 | Val rms_score: 0.6628
85
+ 2025-09-23 16:59:22,907 - logs_modchembert_lipo_epochs100_batch_size32 - INFO - Epoch 68/100 | Train Loss: 0.0149 | Val rms_score: 0.6627
86
+ 2025-09-23 16:59:33,176 - logs_modchembert_lipo_epochs100_batch_size32 - INFO - Epoch 69/100 | Train Loss: 0.0149 | Val rms_score: 0.6579
87
+ 2025-09-23 16:59:43,439 - logs_modchembert_lipo_epochs100_batch_size32 - INFO - Epoch 70/100 | Train Loss: 0.0154 | Val rms_score: 0.6623
88
+ 2025-09-23 16:59:54,404 - logs_modchembert_lipo_epochs100_batch_size32 - INFO - Epoch 71/100 | Train Loss: 0.0141 | Val rms_score: 0.6666
89
+ 2025-09-23 17:00:05,717 - logs_modchembert_lipo_epochs100_batch_size32 - INFO - Epoch 72/100 | Train Loss: 0.0140 | Val rms_score: 0.6629
90
+ 2025-09-23 17:00:16,762 - logs_modchembert_lipo_epochs100_batch_size32 - INFO - Epoch 73/100 | Train Loss: 0.0141 | Val rms_score: 0.6586
91
+ 2025-09-23 17:00:27,868 - logs_modchembert_lipo_epochs100_batch_size32 - INFO - Epoch 74/100 | Train Loss: 0.0143 | Val rms_score: 0.6641
92
+ 2025-09-23 17:00:38,989 - logs_modchembert_lipo_epochs100_batch_size32 - INFO - Epoch 75/100 | Train Loss: 0.0143 | Val rms_score: 0.6644
93
+ 2025-09-23 17:00:48,028 - logs_modchembert_lipo_epochs100_batch_size32 - INFO - Epoch 76/100 | Train Loss: 0.0147 | Val rms_score: 0.6638
94
+ 2025-09-23 17:00:59,841 - logs_modchembert_lipo_epochs100_batch_size32 - INFO - Epoch 77/100 | Train Loss: 0.0140 | Val rms_score: 0.6644
95
+ 2025-09-23 17:01:09,934 - logs_modchembert_lipo_epochs100_batch_size32 - INFO - Epoch 78/100 | Train Loss: 0.0128 | Val rms_score: 0.6630
96
+ 2025-09-23 17:01:20,816 - logs_modchembert_lipo_epochs100_batch_size32 - INFO - Epoch 79/100 | Train Loss: 0.0141 | Val rms_score: 0.6661
97
+ 2025-09-23 17:01:31,711 - logs_modchembert_lipo_epochs100_batch_size32 - INFO - Epoch 80/100 | Train Loss: 0.0134 | Val rms_score: 0.6615
98
+ 2025-09-23 17:01:42,666 - logs_modchembert_lipo_epochs100_batch_size32 - INFO - Epoch 81/100 | Train Loss: 0.0155 | Val rms_score: 0.6636
99
+ 2025-09-23 17:01:54,122 - logs_modchembert_lipo_epochs100_batch_size32 - INFO - Epoch 82/100 | Train Loss: 0.0143 | Val rms_score: 0.6699
100
+ 2025-09-23 17:02:05,241 - logs_modchembert_lipo_epochs100_batch_size32 - INFO - Epoch 83/100 | Train Loss: 0.0132 | Val rms_score: 0.6676
101
+ 2025-09-23 17:02:14,896 - logs_modchembert_lipo_epochs100_batch_size32 - INFO - Epoch 84/100 | Train Loss: 0.0160 | Val rms_score: 0.6694
102
+ 2025-09-23 17:02:25,815 - logs_modchembert_lipo_epochs100_batch_size32 - INFO - Epoch 85/100 | Train Loss: 0.0128 | Val rms_score: 0.6659
103
+ 2025-09-23 17:02:37,289 - logs_modchembert_lipo_epochs100_batch_size32 - INFO - Epoch 86/100 | Train Loss: 0.0128 | Val rms_score: 0.6619
104
+ 2025-09-23 17:02:48,685 - logs_modchembert_lipo_epochs100_batch_size32 - INFO - Epoch 87/100 | Train Loss: 0.0134 | Val rms_score: 0.6618
105
+ 2025-09-23 17:02:59,643 - logs_modchembert_lipo_epochs100_batch_size32 - INFO - Epoch 88/100 | Train Loss: 0.0125 | Val rms_score: 0.6603
106
+ 2025-09-23 17:03:10,110 - logs_modchembert_lipo_epochs100_batch_size32 - INFO - Epoch 89/100 | Train Loss: 0.0140 | Val rms_score: 0.6642
107
+ 2025-09-23 17:03:20,804 - logs_modchembert_lipo_epochs100_batch_size32 - INFO - Epoch 90/100 | Train Loss: 0.0130 | Val rms_score: 0.6695
108
+ 2025-09-23 17:03:31,912 - logs_modchembert_lipo_epochs100_batch_size32 - INFO - Epoch 91/100 | Train Loss: 0.0128 | Val rms_score: 0.6637
109
+ 2025-09-23 17:03:42,184 - logs_modchembert_lipo_epochs100_batch_size32 - INFO - Epoch 92/100 | Train Loss: 0.0135 | Val rms_score: 0.6636
110
+ 2025-09-23 17:03:52,409 - logs_modchembert_lipo_epochs100_batch_size32 - INFO - Epoch 93/100 | Train Loss: 0.0132 | Val rms_score: 0.6616
111
+ 2025-09-23 17:04:02,566 - logs_modchembert_lipo_epochs100_batch_size32 - INFO - Epoch 94/100 | Train Loss: 0.0132 | Val rms_score: 0.6669
112
+ 2025-09-23 17:04:13,577 - logs_modchembert_lipo_epochs100_batch_size32 - INFO - Epoch 95/100 | Train Loss: 0.0124 | Val rms_score: 0.6601
113
+ 2025-09-23 17:04:25,587 - logs_modchembert_lipo_epochs100_batch_size32 - INFO - Epoch 96/100 | Train Loss: 0.0123 | Val rms_score: 0.6651
114
+ 2025-09-23 17:04:36,782 - logs_modchembert_lipo_epochs100_batch_size32 - INFO - Epoch 97/100 | Train Loss: 0.0131 | Val rms_score: 0.6666
115
+ 2025-09-23 17:04:47,724 - logs_modchembert_lipo_epochs100_batch_size32 - INFO - Epoch 98/100 | Train Loss: 0.0120 | Val rms_score: 0.6641
116
+ 2025-09-23 17:04:58,933 - logs_modchembert_lipo_epochs100_batch_size32 - INFO - Epoch 99/100 | Train Loss: 0.0130 | Val rms_score: 0.6663
117
+ 2025-09-23 17:05:09,309 - logs_modchembert_lipo_epochs100_batch_size32 - INFO - Epoch 100/100 | Train Loss: 0.0126 | Val rms_score: 0.6640
118
+ 2025-09-23 17:05:10,007 - logs_modchembert_lipo_epochs100_batch_size32 - INFO - Test rms_score: 0.6810
119
+ 2025-09-23 17:05:10,376 - logs_modchembert_lipo_epochs100_batch_size32 - INFO - Starting triplicate run 2 for dataset lipo at 2025-09-23_17-05-10
120
+ 2025-09-23 17:05:20,621 - logs_modchembert_lipo_epochs100_batch_size32 - INFO - Epoch 1/100 | Train Loss: 0.3828 | Val rms_score: 0.7847
121
+ 2025-09-23 17:05:20,622 - logs_modchembert_lipo_epochs100_batch_size32 - INFO - Global step of best model: 105
122
+ 2025-09-23 17:05:21,150 - logs_modchembert_lipo_epochs100_batch_size32 - INFO - Best model saved at epoch 1 with val rms_score: 0.7847
123
+ 2025-09-23 17:05:31,265 - logs_modchembert_lipo_epochs100_batch_size32 - INFO - Epoch 2/100 | Train Loss: 0.2672 | Val rms_score: 0.7279
124
+ 2025-09-23 17:05:31,446 - logs_modchembert_lipo_epochs100_batch_size32 - INFO - Global step of best model: 210
125
+ 2025-09-23 17:05:31,972 - logs_modchembert_lipo_epochs100_batch_size32 - INFO - Best model saved at epoch 2 with val rms_score: 0.7279
126
+ 2025-09-23 17:05:42,958 - logs_modchembert_lipo_epochs100_batch_size32 - INFO - Epoch 3/100 | Train Loss: 0.2167 | Val rms_score: 0.7805
127
+ 2025-09-23 17:05:53,920 - logs_modchembert_lipo_epochs100_batch_size32 - INFO - Epoch 4/100 | Train Loss: 0.1492 | Val rms_score: 0.6888
128
+ 2025-09-23 17:05:54,102 - logs_modchembert_lipo_epochs100_batch_size32 - INFO - Global step of best model: 420
129
+ 2025-09-23 17:05:54,630 - logs_modchembert_lipo_epochs100_batch_size32 - INFO - Best model saved at epoch 4 with val rms_score: 0.6888
130
+ 2025-09-23 17:06:05,514 - logs_modchembert_lipo_epochs100_batch_size32 - INFO - Epoch 5/100 | Train Loss: 0.1219 | Val rms_score: 0.6935
131
+ 2025-09-23 17:06:16,426 - logs_modchembert_lipo_epochs100_batch_size32 - INFO - Epoch 6/100 | Train Loss: 0.1240 | Val rms_score: 0.6831
132
+ 2025-09-23 17:06:16,934 - logs_modchembert_lipo_epochs100_batch_size32 - INFO - Global step of best model: 630
133
+ 2025-09-23 17:06:17,468 - logs_modchembert_lipo_epochs100_batch_size32 - INFO - Best model saved at epoch 6 with val rms_score: 0.6831
134
+ 2025-09-23 17:06:28,513 - logs_modchembert_lipo_epochs100_batch_size32 - INFO - Epoch 7/100 | Train Loss: 0.1085 | Val rms_score: 0.6853
135
+ 2025-09-23 17:06:38,548 - logs_modchembert_lipo_epochs100_batch_size32 - INFO - Epoch 8/100 | Train Loss: 0.0902 | Val rms_score: 0.6666
136
+ 2025-09-23 17:06:38,740 - logs_modchembert_lipo_epochs100_batch_size32 - INFO - Global step of best model: 840
137
+ 2025-09-23 17:06:39,309 - logs_modchembert_lipo_epochs100_batch_size32 - INFO - Best model saved at epoch 8 with val rms_score: 0.6666
138
+ 2025-09-23 17:06:49,578 - logs_modchembert_lipo_epochs100_batch_size32 - INFO - Epoch 9/100 | Train Loss: 0.0764 | Val rms_score: 0.6573
139
+ 2025-09-23 17:06:49,761 - logs_modchembert_lipo_epochs100_batch_size32 - INFO - Global step of best model: 945
140
+ 2025-09-23 17:06:50,305 - logs_modchembert_lipo_epochs100_batch_size32 - INFO - Best model saved at epoch 9 with val rms_score: 0.6573
141
+ 2025-09-23 17:07:01,768 - logs_modchembert_lipo_epochs100_batch_size32 - INFO - Epoch 10/100 | Train Loss: 0.0678 | Val rms_score: 0.6832
142
+ 2025-09-23 17:07:12,970 - logs_modchembert_lipo_epochs100_batch_size32 - INFO - Epoch 11/100 | Train Loss: 0.0619 | Val rms_score: 0.6945
143
+ 2025-09-23 17:07:24,311 - logs_modchembert_lipo_epochs100_batch_size32 - INFO - Epoch 12/100 | Train Loss: 0.0568 | Val rms_score: 0.6795
144
+ 2025-09-23 17:07:35,081 - logs_modchembert_lipo_epochs100_batch_size32 - INFO - Epoch 13/100 | Train Loss: 0.0531 | Val rms_score: 0.6841
145
+ 2025-09-23 17:07:45,965 - logs_modchembert_lipo_epochs100_batch_size32 - INFO - Epoch 14/100 | Train Loss: 0.0473 | Val rms_score: 0.6785
146
+ 2025-09-23 17:07:56,919 - logs_modchembert_lipo_epochs100_batch_size32 - INFO - Epoch 15/100 | Train Loss: 0.0483 | Val rms_score: 0.6856
147
+ 2025-09-23 17:08:06,932 - logs_modchembert_lipo_epochs100_batch_size32 - INFO - Epoch 16/100 | Train Loss: 0.0461 | Val rms_score: 0.7072
148
+ 2025-09-23 17:08:17,469 - logs_modchembert_lipo_epochs100_batch_size32 - INFO - Epoch 17/100 | Train Loss: 0.0430 | Val rms_score: 0.6901
149
+ 2025-09-23 17:08:28,555 - logs_modchembert_lipo_epochs100_batch_size32 - INFO - Epoch 18/100 | Train Loss: 0.0378 | Val rms_score: 0.6754
150
+ 2025-09-23 17:08:39,467 - logs_modchembert_lipo_epochs100_batch_size32 - INFO - Epoch 19/100 | Train Loss: 0.0377 | Val rms_score: 0.6725
151
+ 2025-09-23 17:08:51,289 - logs_modchembert_lipo_epochs100_batch_size32 - INFO - Epoch 20/100 | Train Loss: 0.0361 | Val rms_score: 0.6814
152
+ 2025-09-23 17:09:02,422 - logs_modchembert_lipo_epochs100_batch_size32 - INFO - Epoch 21/100 | Train Loss: 0.0301 | Val rms_score: 0.6848
153
+ 2025-09-23 17:09:13,813 - logs_modchembert_lipo_epochs100_batch_size32 - INFO - Epoch 22/100 | Train Loss: 0.0355 | Val rms_score: 0.6882
154
+ 2025-09-23 17:09:24,220 - logs_modchembert_lipo_epochs100_batch_size32 - INFO - Epoch 23/100 | Train Loss: 0.0332 | Val rms_score: 0.6754
155
+ 2025-09-23 17:09:34,361 - logs_modchembert_lipo_epochs100_batch_size32 - INFO - Epoch 24/100 | Train Loss: 0.0322 | Val rms_score: 0.6832
156
+ 2025-09-23 17:09:44,559 - logs_modchembert_lipo_epochs100_batch_size32 - INFO - Epoch 25/100 | Train Loss: 0.0300 | Val rms_score: 0.6696
157
+ 2025-09-23 17:09:55,708 - logs_modchembert_lipo_epochs100_batch_size32 - INFO - Epoch 26/100 | Train Loss: 0.0322 | Val rms_score: 0.6731
158
+ 2025-09-23 17:10:06,706 - logs_modchembert_lipo_epochs100_batch_size32 - INFO - Epoch 27/100 | Train Loss: 0.0306 | Val rms_score: 0.6685
159
+ 2025-09-23 17:10:17,185 - logs_modchembert_lipo_epochs100_batch_size32 - INFO - Epoch 28/100 | Train Loss: 0.0268 | Val rms_score: 0.6828
160
+ 2025-09-23 17:10:28,890 - logs_modchembert_lipo_epochs100_batch_size32 - INFO - Epoch 29/100 | Train Loss: 0.0250 | Val rms_score: 0.6798
161
+ 2025-09-23 17:10:39,898 - logs_modchembert_lipo_epochs100_batch_size32 - INFO - Epoch 30/100 | Train Loss: 0.0239 | Val rms_score: 0.6873
162
+ 2025-09-23 17:10:50,798 - logs_modchembert_lipo_epochs100_batch_size32 - INFO - Epoch 31/100 | Train Loss: 0.0230 | Val rms_score: 0.6732
163
+ 2025-09-23 17:11:01,891 - logs_modchembert_lipo_epochs100_batch_size32 - INFO - Epoch 32/100 | Train Loss: 0.0268 | Val rms_score: 0.6705
164
+ 2025-09-23 17:11:11,482 - logs_modchembert_lipo_epochs100_batch_size32 - INFO - Epoch 33/100 | Train Loss: 0.0245 | Val rms_score: 0.6811
165
+ 2025-09-23 17:11:22,341 - logs_modchembert_lipo_epochs100_batch_size32 - INFO - Epoch 34/100 | Train Loss: 0.0234 | Val rms_score: 0.6788
166
+ 2025-09-23 17:11:33,374 - logs_modchembert_lipo_epochs100_batch_size32 - INFO - Epoch 35/100 | Train Loss: 0.0225 | Val rms_score: 0.6779
167
+ 2025-09-23 17:11:44,486 - logs_modchembert_lipo_epochs100_batch_size32 - INFO - Epoch 36/100 | Train Loss: 0.0227 | Val rms_score: 0.6739
168
+ 2025-09-23 17:11:55,981 - logs_modchembert_lipo_epochs100_batch_size32 - INFO - Epoch 37/100 | Train Loss: 0.0216 | Val rms_score: 0.6846
169
+ 2025-09-23 17:12:06,962 - logs_modchembert_lipo_epochs100_batch_size32 - INFO - Epoch 38/100 | Train Loss: 0.0210 | Val rms_score: 0.6702
170
+ 2025-09-23 17:12:18,417 - logs_modchembert_lipo_epochs100_batch_size32 - INFO - Epoch 39/100 | Train Loss: 0.0207 | Val rms_score: 0.6705
171
+ 2025-09-23 17:12:28,827 - logs_modchembert_lipo_epochs100_batch_size32 - INFO - Epoch 40/100 | Train Loss: 0.0213 | Val rms_score: 0.6797
172
+ 2025-09-23 17:12:38,062 - logs_modchembert_lipo_epochs100_batch_size32 - INFO - Epoch 41/100 | Train Loss: 0.0187 | Val rms_score: 0.6721
173
+ 2025-09-23 17:12:48,877 - logs_modchembert_lipo_epochs100_batch_size32 - INFO - Epoch 42/100 | Train Loss: 0.0174 | Val rms_score: 0.6705
174
+ 2025-09-23 17:12:59,327 - logs_modchembert_lipo_epochs100_batch_size32 - INFO - Epoch 43/100 | Train Loss: 0.0207 | Val rms_score: 0.6756
175
+ 2025-09-23 17:13:10,374 - logs_modchembert_lipo_epochs100_batch_size32 - INFO - Epoch 44/100 | Train Loss: 0.0206 | Val rms_score: 0.6752
176
+ 2025-09-23 17:13:21,458 - logs_modchembert_lipo_epochs100_batch_size32 - INFO - Epoch 45/100 | Train Loss: 0.0206 | Val rms_score: 0.6740
177
+ 2025-09-23 17:13:32,600 - logs_modchembert_lipo_epochs100_batch_size32 - INFO - Epoch 46/100 | Train Loss: 0.0184 | Val rms_score: 0.6745
178
+ 2025-09-23 17:13:43,925 - logs_modchembert_lipo_epochs100_batch_size32 - INFO - Epoch 47/100 | Train Loss: 0.0175 | Val rms_score: 0.6750
179
+ 2025-09-23 17:13:55,701 - logs_modchembert_lipo_epochs100_batch_size32 - INFO - Epoch 48/100 | Train Loss: 0.0190 | Val rms_score: 0.6711
180
+ 2025-09-23 17:14:05,039 - logs_modchembert_lipo_epochs100_batch_size32 - INFO - Epoch 49/100 | Train Loss: 0.0202 | Val rms_score: 0.6736
181
+ 2025-09-23 17:14:15,532 - logs_modchembert_lipo_epochs100_batch_size32 - INFO - Epoch 50/100 | Train Loss: 0.0187 | Val rms_score: 0.6701
182
+ 2025-09-23 17:14:26,290 - logs_modchembert_lipo_epochs100_batch_size32 - INFO - Epoch 51/100 | Train Loss: 0.0178 | Val rms_score: 0.6688
183
+ 2025-09-23 17:14:37,785 - logs_modchembert_lipo_epochs100_batch_size32 - INFO - Epoch 52/100 | Train Loss: 0.0177 | Val rms_score: 0.6720
184
+ 2025-09-23 17:14:48,926 - logs_modchembert_lipo_epochs100_batch_size32 - INFO - Epoch 53/100 | Train Loss: 0.0186 | Val rms_score: 0.6661
185
+ 2025-09-23 17:15:00,021 - logs_modchembert_lipo_epochs100_batch_size32 - INFO - Epoch 54/100 | Train Loss: 0.0167 | Val rms_score: 0.6727
186
+ 2025-09-23 17:15:10,938 - logs_modchembert_lipo_epochs100_batch_size32 - INFO - Epoch 55/100 | Train Loss: 0.0169 | Val rms_score: 0.6724
187
+ 2025-09-23 17:15:21,290 - logs_modchembert_lipo_epochs100_batch_size32 - INFO - Epoch 56/100 | Train Loss: 0.0169 | Val rms_score: 0.6711
188
+ 2025-09-23 17:15:31,529 - logs_modchembert_lipo_epochs100_batch_size32 - INFO - Epoch 57/100 | Train Loss: 0.0176 | Val rms_score: 0.6745
189
+ 2025-09-23 17:15:42,091 - logs_modchembert_lipo_epochs100_batch_size32 - INFO - Epoch 58/100 | Train Loss: 0.0180 | Val rms_score: 0.6690
190
+ 2025-09-23 17:15:52,472 - logs_modchembert_lipo_epochs100_batch_size32 - INFO - Epoch 59/100 | Train Loss: 0.0162 | Val rms_score: 0.6700
191
+ 2025-09-23 17:16:03,217 - logs_modchembert_lipo_epochs100_batch_size32 - INFO - Epoch 60/100 | Train Loss: 0.0151 | Val rms_score: 0.6734
192
+ 2025-09-23 17:16:14,399 - logs_modchembert_lipo_epochs100_batch_size32 - INFO - Epoch 61/100 | Train Loss: 0.0210 | Val rms_score: 0.6681
193
+ 2025-09-23 17:16:25,685 - logs_modchembert_lipo_epochs100_batch_size32 - INFO - Epoch 62/100 | Train Loss: 0.0177 | Val rms_score: 0.6727
194
+ 2025-09-23 17:16:36,724 - logs_modchembert_lipo_epochs100_batch_size32 - INFO - Epoch 63/100 | Train Loss: 0.0171 | Val rms_score: 0.6690
195
+ 2025-09-23 17:16:47,694 - logs_modchembert_lipo_epochs100_batch_size32 - INFO - Epoch 64/100 | Train Loss: 0.0160 | Val rms_score: 0.6739
196
+ 2025-09-23 17:16:58,068 - logs_modchembert_lipo_epochs100_batch_size32 - INFO - Epoch 65/100 | Train Loss: 0.0170 | Val rms_score: 0.6782
197
+ 2025-09-23 17:17:07,262 - logs_modchembert_lipo_epochs100_batch_size32 - INFO - Epoch 66/100 | Train Loss: 0.0167 | Val rms_score: 0.6715
198
+ 2025-09-23 17:17:18,978 - logs_modchembert_lipo_epochs100_batch_size32 - INFO - Epoch 67/100 | Train Loss: 0.0151 | Val rms_score: 0.6719
199
+ 2025-09-23 17:17:29,642 - logs_modchembert_lipo_epochs100_batch_size32 - INFO - Epoch 68/100 | Train Loss: 0.0157 | Val rms_score: 0.6739
200
+ 2025-09-23 17:17:40,708 - logs_modchembert_lipo_epochs100_batch_size32 - INFO - Epoch 69/100 | Train Loss: 0.0161 | Val rms_score: 0.6724
201
+ 2025-09-23 17:17:51,703 - logs_modchembert_lipo_epochs100_batch_size32 - INFO - Epoch 70/100 | Train Loss: 0.0152 | Val rms_score: 0.6697
202
+ 2025-09-23 17:18:02,836 - logs_modchembert_lipo_epochs100_batch_size32 - INFO - Epoch 71/100 | Train Loss: 0.0151 | Val rms_score: 0.6693
203
+ 2025-09-23 17:18:14,236 - logs_modchembert_lipo_epochs100_batch_size32 - INFO - Epoch 72/100 | Train Loss: 0.0151 | Val rms_score: 0.6732
204
+ 2025-09-23 17:18:24,483 - logs_modchembert_lipo_epochs100_batch_size32 - INFO - Epoch 73/100 | Train Loss: 0.0146 | Val rms_score: 0.6709
205
+ 2025-09-23 17:18:34,292 - logs_modchembert_lipo_epochs100_batch_size32 - INFO - Epoch 74/100 | Train Loss: 0.0140 | Val rms_score: 0.6700
206
+ 2025-09-23 17:18:44,652 - logs_modchembert_lipo_epochs100_batch_size32 - INFO - Epoch 75/100 | Train Loss: 0.0144 | Val rms_score: 0.6685
207
+ 2025-09-23 17:18:55,085 - logs_modchembert_lipo_epochs100_batch_size32 - INFO - Epoch 76/100 | Train Loss: 0.0144 | Val rms_score: 0.6641
208
+ 2025-09-23 17:19:07,505 - logs_modchembert_lipo_epochs100_batch_size32 - INFO - Epoch 77/100 | Train Loss: 0.0145 | Val rms_score: 0.6708
209
+ 2025-09-23 17:19:18,597 - logs_modchembert_lipo_epochs100_batch_size32 - INFO - Epoch 78/100 | Train Loss: 0.0140 | Val rms_score: 0.6682
210
+ 2025-09-23 17:19:29,478 - logs_modchembert_lipo_epochs100_batch_size32 - INFO - Epoch 79/100 | Train Loss: 0.0141 | Val rms_score: 0.6664
211
+ 2025-09-23 17:19:40,444 - logs_modchembert_lipo_epochs100_batch_size32 - INFO - Epoch 80/100 | Train Loss: 0.0146 | Val rms_score: 0.6731
212
+ 2025-09-23 17:19:50,961 - logs_modchembert_lipo_epochs100_batch_size32 - INFO - Epoch 81/100 | Train Loss: 0.0134 | Val rms_score: 0.6718
213
+ 2025-09-23 17:20:01,379 - logs_modchembert_lipo_epochs100_batch_size32 - INFO - Epoch 82/100 | Train Loss: 0.0146 | Val rms_score: 0.6687
214
+ 2025-09-23 17:20:11,774 - logs_modchembert_lipo_epochs100_batch_size32 - INFO - Epoch 83/100 | Train Loss: 0.0158 | Val rms_score: 0.6694
215
+ 2025-09-23 17:20:22,729 - logs_modchembert_lipo_epochs100_batch_size32 - INFO - Epoch 84/100 | Train Loss: 0.0144 | Val rms_score: 0.6733
216
+ 2025-09-23 17:20:33,296 - logs_modchembert_lipo_epochs100_batch_size32 - INFO - Epoch 85/100 | Train Loss: 0.0152 | Val rms_score: 0.6689
217
+ 2025-09-23 17:20:44,706 - logs_modchembert_lipo_epochs100_batch_size32 - INFO - Epoch 86/100 | Train Loss: 0.0140 | Val rms_score: 0.6673
218
+ 2025-09-23 17:20:56,020 - logs_modchembert_lipo_epochs100_batch_size32 - INFO - Epoch 87/100 | Train Loss: 0.0137 | Val rms_score: 0.6674
219
+ 2025-09-23 17:21:07,144 - logs_modchembert_lipo_epochs100_batch_size32 - INFO - Epoch 88/100 | Train Loss: 0.0138 | Val rms_score: 0.6749
220
+ 2025-09-23 17:21:17,718 - logs_modchembert_lipo_epochs100_batch_size32 - INFO - Epoch 89/100 | Train Loss: 0.0140 | Val rms_score: 0.6703
221
+ 2025-09-23 17:21:28,932 - logs_modchembert_lipo_epochs100_batch_size32 - INFO - Epoch 90/100 | Train Loss: 0.0131 | Val rms_score: 0.6657
222
+ 2025-09-23 17:21:38,980 - logs_modchembert_lipo_epochs100_batch_size32 - INFO - Epoch 91/100 | Train Loss: 0.0132 | Val rms_score: 0.6668
223
+ 2025-09-23 17:21:50,063 - logs_modchembert_lipo_epochs100_batch_size32 - INFO - Epoch 92/100 | Train Loss: 0.0123 | Val rms_score: 0.6763
224
+ 2025-09-23 17:22:00,823 - logs_modchembert_lipo_epochs100_batch_size32 - INFO - Epoch 93/100 | Train Loss: 0.0135 | Val rms_score: 0.6709
225
+ 2025-09-23 17:22:11,254 - logs_modchembert_lipo_epochs100_batch_size32 - INFO - Epoch 94/100 | Train Loss: 0.0133 | Val rms_score: 0.6748
226
+ 2025-09-23 17:22:21,989 - logs_modchembert_lipo_epochs100_batch_size32 - INFO - Epoch 95/100 | Train Loss: 0.0127 | Val rms_score: 0.6722
227
+ 2025-09-23 17:22:33,864 - logs_modchembert_lipo_epochs100_batch_size32 - INFO - Epoch 96/100 | Train Loss: 0.0133 | Val rms_score: 0.6740
228
+ 2025-09-23 17:22:44,936 - logs_modchembert_lipo_epochs100_batch_size32 - INFO - Epoch 97/100 | Train Loss: 0.0124 | Val rms_score: 0.6731
229
+ 2025-09-23 17:22:56,068 - logs_modchembert_lipo_epochs100_batch_size32 - INFO - Epoch 98/100 | Train Loss: 0.0126 | Val rms_score: 0.6688
230
+ 2025-09-23 17:23:05,283 - logs_modchembert_lipo_epochs100_batch_size32 - INFO - Epoch 99/100 | Train Loss: 0.0125 | Val rms_score: 0.6641
231
+ 2025-09-23 17:23:16,310 - logs_modchembert_lipo_epochs100_batch_size32 - INFO - Epoch 100/100 | Train Loss: 0.0127 | Val rms_score: 0.6762
232
+ 2025-09-23 17:23:17,119 - logs_modchembert_lipo_epochs100_batch_size32 - INFO - Test rms_score: 0.6673
233
+ 2025-09-23 17:23:17,496 - logs_modchembert_lipo_epochs100_batch_size32 - INFO - Starting triplicate run 3 for dataset lipo at 2025-09-23_17-23-17
234
+ 2025-09-23 17:23:28,334 - logs_modchembert_lipo_epochs100_batch_size32 - INFO - Epoch 1/100 | Train Loss: 0.3969 | Val rms_score: 0.7874
235
+ 2025-09-23 17:23:28,334 - logs_modchembert_lipo_epochs100_batch_size32 - INFO - Global step of best model: 105
236
+ 2025-09-23 17:23:28,865 - logs_modchembert_lipo_epochs100_batch_size32 - INFO - Best model saved at epoch 1 with val rms_score: 0.7874
237
+ 2025-09-23 17:23:39,816 - logs_modchembert_lipo_epochs100_batch_size32 - INFO - Epoch 2/100 | Train Loss: 0.3250 | Val rms_score: 0.7317
238
+ 2025-09-23 17:23:39,987 - logs_modchembert_lipo_epochs100_batch_size32 - INFO - Global step of best model: 210
239
+ 2025-09-23 17:23:40,526 - logs_modchembert_lipo_epochs100_batch_size32 - INFO - Best model saved at epoch 2 with val rms_score: 0.7317
240
+ 2025-09-23 17:23:51,149 - logs_modchembert_lipo_epochs100_batch_size32 - INFO - Epoch 3/100 | Train Loss: 0.2229 | Val rms_score: 0.6903
241
+ 2025-09-23 17:23:51,331 - logs_modchembert_lipo_epochs100_batch_size32 - INFO - Global step of best model: 315
242
+ 2025-09-23 17:23:51,863 - logs_modchembert_lipo_epochs100_batch_size32 - INFO - Best model saved at epoch 3 with val rms_score: 0.6903
243
+ 2025-09-23 17:24:02,283 - logs_modchembert_lipo_epochs100_batch_size32 - INFO - Epoch 4/100 | Train Loss: 0.1578 | Val rms_score: 0.6852
244
+ 2025-09-23 17:24:02,492 - logs_modchembert_lipo_epochs100_batch_size32 - INFO - Global step of best model: 420
245
+ 2025-09-23 17:24:03,028 - logs_modchembert_lipo_epochs100_batch_size32 - INFO - Best model saved at epoch 4 with val rms_score: 0.6852
246
+ 2025-09-23 17:24:13,532 - logs_modchembert_lipo_epochs100_batch_size32 - INFO - Epoch 5/100 | Train Loss: 0.1381 | Val rms_score: 0.7189
247
+ 2025-09-23 17:24:24,662 - logs_modchembert_lipo_epochs100_batch_size32 - INFO - Epoch 6/100 | Train Loss: 0.1276 | Val rms_score: 0.6701
248
+ 2025-09-23 17:24:25,170 - logs_modchembert_lipo_epochs100_batch_size32 - INFO - Global step of best model: 630
249
+ 2025-09-23 17:24:25,705 - logs_modchembert_lipo_epochs100_batch_size32 - INFO - Best model saved at epoch 6 with val rms_score: 0.6701
250
+ 2025-09-23 17:24:35,634 - logs_modchembert_lipo_epochs100_batch_size32 - INFO - Epoch 7/100 | Train Loss: 0.1196 | Val rms_score: 0.7227
251
+ 2025-09-23 17:24:46,687 - logs_modchembert_lipo_epochs100_batch_size32 - INFO - Epoch 8/100 | Train Loss: 0.0816 | Val rms_score: 0.6632
252
+ 2025-09-23 17:24:46,836 - logs_modchembert_lipo_epochs100_batch_size32 - INFO - Global step of best model: 840
253
+ 2025-09-23 17:24:47,413 - logs_modchembert_lipo_epochs100_batch_size32 - INFO - Best model saved at epoch 8 with val rms_score: 0.6632
254
+ 2025-09-23 17:24:58,635 - logs_modchembert_lipo_epochs100_batch_size32 - INFO - Epoch 9/100 | Train Loss: 0.0743 | Val rms_score: 0.6677
255
+ 2025-09-23 17:25:10,695 - logs_modchembert_lipo_epochs100_batch_size32 - INFO - Epoch 10/100 | Train Loss: 0.0694 | Val rms_score: 0.6647
256
+ 2025-09-23 17:25:21,245 - logs_modchembert_lipo_epochs100_batch_size32 - INFO - Epoch 11/100 | Train Loss: 0.0631 | Val rms_score: 0.6721
257
+ 2025-09-23 17:25:32,100 - logs_modchembert_lipo_epochs100_batch_size32 - INFO - Epoch 12/100 | Train Loss: 0.0633 | Val rms_score: 0.6880
258
+ 2025-09-23 17:25:42,456 - logs_modchembert_lipo_epochs100_batch_size32 - INFO - Epoch 13/100 | Train Loss: 0.0534 | Val rms_score: 0.6741
259
+ 2025-09-23 17:25:53,262 - logs_modchembert_lipo_epochs100_batch_size32 - INFO - Epoch 14/100 | Train Loss: 0.0525 | Val rms_score: 0.6716
260
+ 2025-09-23 17:26:02,591 - logs_modchembert_lipo_epochs100_batch_size32 - INFO - Epoch 15/100 | Train Loss: 0.0462 | Val rms_score: 0.6665
261
+ 2025-09-23 17:26:13,306 - logs_modchembert_lipo_epochs100_batch_size32 - INFO - Epoch 16/100 | Train Loss: 0.0437 | Val rms_score: 0.6710
262
+ 2025-09-23 17:26:24,655 - logs_modchembert_lipo_epochs100_batch_size32 - INFO - Epoch 17/100 | Train Loss: 0.0414 | Val rms_score: 0.6699
263
+ 2025-09-23 17:26:35,690 - logs_modchembert_lipo_epochs100_batch_size32 - INFO - Epoch 18/100 | Train Loss: 0.0391 | Val rms_score: 0.6808
264
+ 2025-09-23 17:26:46,510 - logs_modchembert_lipo_epochs100_batch_size32 - INFO - Epoch 19/100 | Train Loss: 0.0363 | Val rms_score: 0.6816
265
+ 2025-09-23 17:26:57,943 - logs_modchembert_lipo_epochs100_batch_size32 - INFO - Epoch 20/100 | Train Loss: 0.0345 | Val rms_score: 0.6725
266
+ 2025-09-23 17:27:08,396 - logs_modchembert_lipo_epochs100_batch_size32 - INFO - Epoch 21/100 | Train Loss: 0.0314 | Val rms_score: 0.6815
267
+ 2025-09-23 17:27:19,857 - logs_modchembert_lipo_epochs100_batch_size32 - INFO - Epoch 22/100 | Train Loss: 0.0355 | Val rms_score: 0.6729
268
+ 2025-09-23 17:27:29,409 - logs_modchembert_lipo_epochs100_batch_size32 - INFO - Epoch 23/100 | Train Loss: 0.0315 | Val rms_score: 0.6737
269
+ 2025-09-23 17:27:39,913 - logs_modchembert_lipo_epochs100_batch_size32 - INFO - Epoch 24/100 | Train Loss: 0.0352 | Val rms_score: 0.6725
270
+ 2025-09-23 17:27:50,424 - logs_modchembert_lipo_epochs100_batch_size32 - INFO - Epoch 25/100 | Train Loss: 0.0267 | Val rms_score: 0.6719
271
+ 2025-09-23 17:28:01,638 - logs_modchembert_lipo_epochs100_batch_size32 - INFO - Epoch 26/100 | Train Loss: 0.0305 | Val rms_score: 0.6716
272
+ 2025-09-23 17:28:13,063 - logs_modchembert_lipo_epochs100_batch_size32 - INFO - Epoch 27/100 | Train Loss: 0.0281 | Val rms_score: 0.6584
273
+ 2025-09-23 17:28:13,214 - logs_modchembert_lipo_epochs100_batch_size32 - INFO - Global step of best model: 2835
274
+ 2025-09-23 17:28:13,762 - logs_modchembert_lipo_epochs100_batch_size32 - INFO - Best model saved at epoch 27 with val rms_score: 0.6584
275
+ 2025-09-23 17:28:24,659 - logs_modchembert_lipo_epochs100_batch_size32 - INFO - Epoch 28/100 | Train Loss: 0.0275 | Val rms_score: 0.6724
276
+ 2025-09-23 17:28:35,931 - logs_modchembert_lipo_epochs100_batch_size32 - INFO - Epoch 29/100 | Train Loss: 0.0266 | Val rms_score: 0.6685
277
+ 2025-09-23 17:28:46,927 - logs_modchembert_lipo_epochs100_batch_size32 - INFO - Epoch 30/100 | Train Loss: 0.0272 | Val rms_score: 0.6651
278
+ 2025-09-23 17:28:56,804 - logs_modchembert_lipo_epochs100_batch_size32 - INFO - Epoch 31/100 | Train Loss: 0.0256 | Val rms_score: 0.6797
279
+ 2025-09-23 17:29:08,133 - logs_modchembert_lipo_epochs100_batch_size32 - INFO - Epoch 32/100 | Train Loss: 0.0255 | Val rms_score: 0.6714
280
+ 2025-09-23 17:29:19,016 - logs_modchembert_lipo_epochs100_batch_size32 - INFO - Epoch 33/100 | Train Loss: 0.0243 | Val rms_score: 0.6692
281
+ 2025-09-23 17:29:29,994 - logs_modchembert_lipo_epochs100_batch_size32 - INFO - Epoch 34/100 | Train Loss: 0.0235 | Val rms_score: 0.6630
282
+ 2025-09-23 17:29:40,939 - logs_modchembert_lipo_epochs100_batch_size32 - INFO - Epoch 35/100 | Train Loss: 0.0226 | Val rms_score: 0.6666
283
+ 2025-09-23 17:29:51,372 - logs_modchembert_lipo_epochs100_batch_size32 - INFO - Epoch 36/100 | Train Loss: 0.0221 | Val rms_score: 0.6729
284
+ 2025-09-23 17:30:01,872 - logs_modchembert_lipo_epochs100_batch_size32 - INFO - Epoch 37/100 | Train Loss: 0.0222 | Val rms_score: 0.6625
285
+ 2025-09-23 17:30:12,957 - logs_modchembert_lipo_epochs100_batch_size32 - INFO - Epoch 38/100 | Train Loss: 0.0216 | Val rms_score: 0.6680
286
+ 2025-09-23 17:30:23,993 - logs_modchembert_lipo_epochs100_batch_size32 - INFO - Epoch 39/100 | Train Loss: 0.0212 | Val rms_score: 0.6629
287
+ 2025-09-23 17:30:34,338 - logs_modchembert_lipo_epochs100_batch_size32 - INFO - Epoch 40/100 | Train Loss: 0.0203 | Val rms_score: 0.6683
288
+ 2025-09-23 17:30:45,546 - logs_modchembert_lipo_epochs100_batch_size32 - INFO - Epoch 41/100 | Train Loss: 0.0239 | Val rms_score: 0.6652
289
+ 2025-09-23 17:30:56,394 - logs_modchembert_lipo_epochs100_batch_size32 - INFO - Epoch 42/100 | Train Loss: 0.0241 | Val rms_score: 0.6684
290
+ 2025-09-23 17:31:06,888 - logs_modchembert_lipo_epochs100_batch_size32 - INFO - Epoch 43/100 | Train Loss: 0.0195 | Val rms_score: 0.6628
291
+ 2025-09-23 17:31:17,860 - logs_modchembert_lipo_epochs100_batch_size32 - INFO - Epoch 44/100 | Train Loss: 0.0186 | Val rms_score: 0.6690
292
+ 2025-09-23 17:31:28,346 - logs_modchembert_lipo_epochs100_batch_size32 - INFO - Epoch 45/100 | Train Loss: 0.0205 | Val rms_score: 0.6623
293
+ 2025-09-23 17:31:39,222 - logs_modchembert_lipo_epochs100_batch_size32 - INFO - Epoch 46/100 | Train Loss: 0.0186 | Val rms_score: 0.6689
294
+ 2025-09-23 17:31:50,326 - logs_modchembert_lipo_epochs100_batch_size32 - INFO - Epoch 47/100 | Train Loss: 0.0198 | Val rms_score: 0.6628
295
+ 2025-09-23 17:32:01,229 - logs_modchembert_lipo_epochs100_batch_size32 - INFO - Epoch 48/100 | Train Loss: 0.0191 | Val rms_score: 0.6756
296
+ 2025-09-23 17:32:12,159 - logs_modchembert_lipo_epochs100_batch_size32 - INFO - Epoch 49/100 | Train Loss: 0.0177 | Val rms_score: 0.6660
297
+ 2025-09-23 17:32:23,055 - logs_modchembert_lipo_epochs100_batch_size32 - INFO - Epoch 50/100 | Train Loss: 0.0187 | Val rms_score: 0.6722
298
+ 2025-09-23 17:32:33,921 - logs_modchembert_lipo_epochs100_batch_size32 - INFO - Epoch 51/100 | Train Loss: 0.0182 | Val rms_score: 0.6688
299
+ 2025-09-23 17:32:45,367 - logs_modchembert_lipo_epochs100_batch_size32 - INFO - Epoch 52/100 | Train Loss: 0.0190 | Val rms_score: 0.6672
300
+ 2025-09-23 17:32:55,989 - logs_modchembert_lipo_epochs100_batch_size32 - INFO - Epoch 53/100 | Train Loss: 0.0179 | Val rms_score: 0.6683
301
+ 2025-09-23 17:33:06,668 - logs_modchembert_lipo_epochs100_batch_size32 - INFO - Epoch 54/100 | Train Loss: 0.0181 | Val rms_score: 0.6623
302
+ 2025-09-23 17:33:17,225 - logs_modchembert_lipo_epochs100_batch_size32 - INFO - Epoch 55/100 | Train Loss: 0.0180 | Val rms_score: 0.6725
303
+ 2025-09-23 17:33:26,383 - logs_modchembert_lipo_epochs100_batch_size32 - INFO - Epoch 56/100 | Train Loss: 0.0181 | Val rms_score: 0.6737
304
+ 2025-09-23 17:33:37,138 - logs_modchembert_lipo_epochs100_batch_size32 - INFO - Epoch 57/100 | Train Loss: 0.0169 | Val rms_score: 0.6758
305
+ 2025-09-23 17:33:48,666 - logs_modchembert_lipo_epochs100_batch_size32 - INFO - Epoch 58/100 | Train Loss: 0.0169 | Val rms_score: 0.6689
306
+ 2025-09-23 17:33:59,596 - logs_modchembert_lipo_epochs100_batch_size32 - INFO - Epoch 59/100 | Train Loss: 0.0169 | Val rms_score: 0.6693
307
+ 2025-09-23 17:34:10,653 - logs_modchembert_lipo_epochs100_batch_size32 - INFO - Epoch 60/100 | Train Loss: 0.0166 | Val rms_score: 0.6709
308
+ 2025-09-23 17:34:21,169 - logs_modchembert_lipo_epochs100_batch_size32 - INFO - Epoch 61/100 | Train Loss: 0.0152 | Val rms_score: 0.6716
309
+ 2025-09-23 17:34:32,646 - logs_modchembert_lipo_epochs100_batch_size32 - INFO - Epoch 62/100 | Train Loss: 0.0174 | Val rms_score: 0.6654
310
+ 2025-09-23 17:34:43,780 - logs_modchembert_lipo_epochs100_batch_size32 - INFO - Epoch 63/100 | Train Loss: 0.0148 | Val rms_score: 0.6683
311
+ 2025-09-23 17:34:53,684 - logs_modchembert_lipo_epochs100_batch_size32 - INFO - Epoch 64/100 | Train Loss: 0.0167 | Val rms_score: 0.6686
312
+ 2025-09-23 17:35:04,869 - logs_modchembert_lipo_epochs100_batch_size32 - INFO - Epoch 65/100 | Train Loss: 0.0163 | Val rms_score: 0.6692
313
+ 2025-09-23 17:35:15,892 - logs_modchembert_lipo_epochs100_batch_size32 - INFO - Epoch 66/100 | Train Loss: 0.0155 | Val rms_score: 0.6712
314
+ 2025-09-23 17:35:27,680 - logs_modchembert_lipo_epochs100_batch_size32 - INFO - Epoch 67/100 | Train Loss: 0.0151 | Val rms_score: 0.6723
315
+ 2025-09-23 17:35:38,499 - logs_modchembert_lipo_epochs100_batch_size32 - INFO - Epoch 68/100 | Train Loss: 0.0146 | Val rms_score: 0.6656
316
+ 2025-09-23 17:35:49,019 - logs_modchembert_lipo_epochs100_batch_size32 - INFO - Epoch 69/100 | Train Loss: 0.0157 | Val rms_score: 0.6666
317
+ 2025-09-23 17:36:00,066 - logs_modchembert_lipo_epochs100_batch_size32 - INFO - Epoch 70/100 | Train Loss: 0.0155 | Val rms_score: 0.6666
318
+ 2025-09-23 17:36:10,964 - logs_modchembert_lipo_epochs100_batch_size32 - INFO - Epoch 71/100 | Train Loss: 0.0160 | Val rms_score: 0.6685
319
+ 2025-09-23 17:36:21,308 - logs_modchembert_lipo_epochs100_batch_size32 - INFO - Epoch 72/100 | Train Loss: 0.0156 | Val rms_score: 0.6668
320
+ 2025-09-23 17:36:32,125 - logs_modchembert_lipo_epochs100_batch_size32 - INFO - Epoch 73/100 | Train Loss: 0.0159 | Val rms_score: 0.6736
321
+ 2025-09-23 17:36:42,930 - logs_modchembert_lipo_epochs100_batch_size32 - INFO - Epoch 74/100 | Train Loss: 0.0145 | Val rms_score: 0.6687
322
+ 2025-09-23 17:36:53,395 - logs_modchembert_lipo_epochs100_batch_size32 - INFO - Epoch 75/100 | Train Loss: 0.0154 | Val rms_score: 0.6644
323
+ 2025-09-23 17:37:03,893 - logs_modchembert_lipo_epochs100_batch_size32 - INFO - Epoch 76/100 | Train Loss: 0.0148 | Val rms_score: 0.6677
324
+ 2025-09-23 17:37:15,678 - logs_modchembert_lipo_epochs100_batch_size32 - INFO - Epoch 77/100 | Train Loss: 0.0146 | Val rms_score: 0.6678
325
+ 2025-09-23 17:37:26,847 - logs_modchembert_lipo_epochs100_batch_size32 - INFO - Epoch 78/100 | Train Loss: 0.0141 | Val rms_score: 0.6632
326
+ 2025-09-23 17:37:37,737 - logs_modchembert_lipo_epochs100_batch_size32 - INFO - Epoch 79/100 | Train Loss: 0.0134 | Val rms_score: 0.6684
327
+ 2025-09-23 17:37:48,329 - logs_modchembert_lipo_epochs100_batch_size32 - INFO - Epoch 80/100 | Train Loss: 0.0141 | Val rms_score: 0.6690
328
+ 2025-09-23 17:37:58,216 - logs_modchembert_lipo_epochs100_batch_size32 - INFO - Epoch 81/100 | Train Loss: 0.0133 | Val rms_score: 0.6703
329
+ 2025-09-23 17:38:09,720 - logs_modchembert_lipo_epochs100_batch_size32 - INFO - Epoch 82/100 | Train Loss: 0.0140 | Val rms_score: 0.6669
330
+ 2025-09-23 17:38:20,812 - logs_modchembert_lipo_epochs100_batch_size32 - INFO - Epoch 83/100 | Train Loss: 0.0144 | Val rms_score: 0.6682
331
+ 2025-09-23 17:38:31,785 - logs_modchembert_lipo_epochs100_batch_size32 - INFO - Epoch 84/100 | Train Loss: 0.0154 | Val rms_score: 0.6668
332
+ 2025-09-23 17:38:41,826 - logs_modchembert_lipo_epochs100_batch_size32 - INFO - Epoch 85/100 | Train Loss: 0.0142 | Val rms_score: 0.6706
333
+ 2025-09-23 17:38:53,283 - logs_modchembert_lipo_epochs100_batch_size32 - INFO - Epoch 86/100 | Train Loss: 0.0161 | Val rms_score: 0.6702
334
+ 2025-09-23 17:39:04,554 - logs_modchembert_lipo_epochs100_batch_size32 - INFO - Epoch 87/100 | Train Loss: 0.0131 | Val rms_score: 0.6677
335
+ 2025-09-23 17:39:15,652 - logs_modchembert_lipo_epochs100_batch_size32 - INFO - Epoch 88/100 | Train Loss: 0.0139 | Val rms_score: 0.6687
336
+ 2025-09-23 17:39:24,888 - logs_modchembert_lipo_epochs100_batch_size32 - INFO - Epoch 89/100 | Train Loss: 0.0135 | Val rms_score: 0.6668
337
+ 2025-09-23 17:39:35,795 - logs_modchembert_lipo_epochs100_batch_size32 - INFO - Epoch 90/100 | Train Loss: 0.0145 | Val rms_score: 0.6670
338
+ 2025-09-23 17:39:46,754 - logs_modchembert_lipo_epochs100_batch_size32 - INFO - Epoch 91/100 | Train Loss: 0.0138 | Val rms_score: 0.6654
339
+ 2025-09-23 17:39:58,274 - logs_modchembert_lipo_epochs100_batch_size32 - INFO - Epoch 92/100 | Train Loss: 0.0145 | Val rms_score: 0.6697
340
+ 2025-09-23 17:40:08,767 - logs_modchembert_lipo_epochs100_batch_size32 - INFO - Epoch 93/100 | Train Loss: 0.0136 | Val rms_score: 0.6698
341
+ 2025-09-23 17:40:19,795 - logs_modchembert_lipo_epochs100_batch_size32 - INFO - Epoch 94/100 | Train Loss: 0.0135 | Val rms_score: 0.6672
342
+ 2025-09-23 17:40:30,611 - logs_modchembert_lipo_epochs100_batch_size32 - INFO - Epoch 95/100 | Train Loss: 0.0123 | Val rms_score: 0.6689
343
+ 2025-09-23 17:40:41,996 - logs_modchembert_lipo_epochs100_batch_size32 - INFO - Epoch 96/100 | Train Loss: 0.0128 | Val rms_score: 0.6696
344
+ 2025-09-23 17:40:51,724 - logs_modchembert_lipo_epochs100_batch_size32 - INFO - Epoch 97/100 | Train Loss: 0.0124 | Val rms_score: 0.6660
345
+ 2025-09-23 17:41:02,555 - logs_modchembert_lipo_epochs100_batch_size32 - INFO - Epoch 98/100 | Train Loss: 0.0133 | Val rms_score: 0.6732
346
+ 2025-09-23 17:41:13,693 - logs_modchembert_lipo_epochs100_batch_size32 - INFO - Epoch 99/100 | Train Loss: 0.0132 | Val rms_score: 0.6655
347
+ 2025-09-23 17:41:24,594 - logs_modchembert_lipo_epochs100_batch_size32 - INFO - Epoch 100/100 | Train Loss: 0.0133 | Val rms_score: 0.6676
348
+ 2025-09-23 17:41:25,421 - logs_modchembert_lipo_epochs100_batch_size32 - INFO - Test rms_score: 0.6640
349
+ 2025-09-23 17:41:25,802 - logs_modchembert_lipo_epochs100_batch_size32 - INFO - Final Triplicate Test Results — Avg rms_score: 0.6708, Std Dev: 0.0074
model.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:8656f01d9edb002cf882df1e554a0a10e57065c97d4c08a1abfe34bad98da87f
3
+ size 460409308
modeling_modchembert.py ADDED
@@ -0,0 +1,554 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ # Copyright 2025 Emmanuel Cortes, All Rights Reserved.
2
+ #
3
+ # Copyright 2024 Answer.AI, LightOn, and contributors, and the HuggingFace Inc. team. All rights reserved.
4
+ #
5
+ #
6
+ # Licensed under the Apache License, Version 2.0 (the "License");
7
+ # you may not use this file except in compliance with the License.
8
+ # You may obtain a copy of the License at
9
+ #
10
+ # http://www.apache.org/licenses/LICENSE-2.0
11
+ #
12
+ # Unless required by applicable law or agreed to in writing, software
13
+ # distributed under the License is distributed on an "AS IS" BASIS,
14
+ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
15
+ # See the License for the specific language governing permissions and
16
+ # limitations under the License.
17
+
18
+ # This file is adapted from the transformers library.
19
+ # Modifications include:
20
+ # - Additional classifier_pooling options for ModChemBertForSequenceClassification
21
+ # - sum_mean, sum_sum, mean_sum, mean_mean: from ChemLM (utilizes all hidden states)
22
+ # - max_cls, cls_mha, max_seq_mha: from MaxPoolBERT (utilizes last k hidden states)
23
+ # - max_seq_mean: a merge between sum_mean and max_cls (utilizes last k hidden states)
24
+ # - Addition of ModChemBertPoolingAttention for cls_mha and max_seq_mha pooling options
25
+
26
+ import copy
27
+ import math
28
+ import typing
29
+ from contextlib import nullcontext
30
+
31
+ import torch
32
+ import torch.nn as nn
33
+ from torch.nn import BCEWithLogitsLoss, CrossEntropyLoss, MSELoss
34
+ from transformers.modeling_attn_mask_utils import _prepare_4d_attention_mask
35
+ from transformers.modeling_outputs import MaskedLMOutput, SequenceClassifierOutput
36
+ from transformers.models.modernbert.modeling_modernbert import (
37
+ MODERNBERT_ATTENTION_FUNCTION,
38
+ ModernBertModel,
39
+ ModernBertPredictionHead,
40
+ ModernBertPreTrainedModel,
41
+ ModernBertRotaryEmbedding,
42
+ _pad_modernbert_output,
43
+ _unpad_modernbert_input,
44
+ )
45
+ from transformers.utils import logging
46
+
47
+ from .configuration_modchembert import ModChemBertConfig
48
+
49
+ logger = logging.get_logger(__name__)
50
+
51
+
52
+ class InitWeightsMixin:
53
+ def _init_weights(self, module: nn.Module):
54
+ super()._init_weights(module) # type: ignore
55
+
56
+ cutoff_factor = self.config.initializer_cutoff_factor # type: ignore
57
+ if cutoff_factor is None:
58
+ cutoff_factor = 3
59
+
60
+ def init_weight(module: nn.Module, std: float):
61
+ if isinstance(module, nn.Linear):
62
+ nn.init.trunc_normal_(
63
+ module.weight,
64
+ mean=0.0,
65
+ std=std,
66
+ a=-cutoff_factor * std,
67
+ b=cutoff_factor * std,
68
+ )
69
+ if module.bias is not None:
70
+ nn.init.zeros_(module.bias)
71
+
72
+ stds = {
73
+ "in": self.config.initializer_range, # type: ignore
74
+ "out": self.config.initializer_range / math.sqrt(2.0 * self.config.num_hidden_layers), # type: ignore
75
+ "final_out": self.config.hidden_size**-0.5, # type: ignore
76
+ }
77
+
78
+ if isinstance(module, ModChemBertForMaskedLM):
79
+ init_weight(module.decoder, stds["out"])
80
+ elif isinstance(module, ModChemBertForSequenceClassification):
81
+ init_weight(module.classifier, stds["final_out"])
82
+ elif isinstance(module, ModChemBertPoolingAttention):
83
+ init_weight(module.Wq, stds["in"])
84
+ init_weight(module.Wk, stds["in"])
85
+ init_weight(module.Wv, stds["in"])
86
+ init_weight(module.Wo, stds["out"])
87
+
88
+
89
+ class ModChemBertPoolingAttention(nn.Module):
90
+ """Performs multi-headed self attention on a batch of sequences."""
91
+
92
+ def __init__(self, config: ModChemBertConfig):
93
+ super().__init__()
94
+ self.config = copy.deepcopy(config)
95
+ # Override num_attention_heads to use classifier_pooling_num_attention_heads
96
+ self.config.num_attention_heads = config.classifier_pooling_num_attention_heads
97
+ # Override attention_dropout to use classifier_pooling_attention_dropout
98
+ self.config.attention_dropout = config.classifier_pooling_attention_dropout
99
+
100
+ if config.hidden_size % config.num_attention_heads != 0:
101
+ raise ValueError(
102
+ f"The hidden size ({config.hidden_size}) is not a multiple of the number of attention heads "
103
+ f"({config.num_attention_heads})"
104
+ )
105
+
106
+ self.attention_dropout = config.attention_dropout
107
+ self.num_heads = config.num_attention_heads
108
+ self.head_dim = config.hidden_size // config.num_attention_heads
109
+ self.all_head_size = self.head_dim * self.num_heads
110
+ self.Wq = nn.Linear(config.hidden_size, self.all_head_size, bias=config.attention_bias)
111
+ self.Wk = nn.Linear(config.hidden_size, self.all_head_size, bias=config.attention_bias)
112
+ self.Wv = nn.Linear(config.hidden_size, self.all_head_size, bias=config.attention_bias)
113
+
114
+ # Use global attention
115
+ self.local_attention = (-1, -1)
116
+ rope_theta = config.global_rope_theta
117
+ # sdpa path from original ModernBert implementation
118
+ config_copy = copy.deepcopy(config)
119
+ config_copy.rope_theta = rope_theta
120
+ self.rotary_emb = ModernBertRotaryEmbedding(config=config_copy)
121
+
122
+ self.Wo = nn.Linear(config.hidden_size, config.hidden_size, bias=config.attention_bias)
123
+ self.out_drop = nn.Dropout(config.attention_dropout) if config.attention_dropout > 0.0 else nn.Identity()
124
+ self.pruned_heads = set()
125
+
126
+ def forward(
127
+ self,
128
+ q: torch.Tensor,
129
+ kv: torch.Tensor,
130
+ attention_mask: torch.Tensor | None = None,
131
+ **kwargs,
132
+ ) -> torch.Tensor:
133
+ bs, seq_len = kv.shape[:2]
134
+ q_proj: torch.Tensor = self.Wq(q)
135
+ k_proj: torch.Tensor = self.Wk(kv)
136
+ v_proj: torch.Tensor = self.Wv(kv)
137
+ qkv = torch.stack(
138
+ (
139
+ q_proj.reshape(bs, seq_len, self.num_heads, self.head_dim),
140
+ k_proj.reshape(bs, seq_len, self.num_heads, self.head_dim),
141
+ v_proj.reshape(bs, seq_len, self.num_heads, self.head_dim),
142
+ ),
143
+ dim=2,
144
+ ) # (bs, seq_len, 3, num_heads, head_dim)
145
+
146
+ device = kv.device
147
+ if attention_mask is None:
148
+ attention_mask = torch.ones((bs, seq_len), device=device, dtype=torch.bool)
149
+ position_ids = torch.arange(seq_len, device=device).unsqueeze(0).long()
150
+
151
+ attn_outputs = MODERNBERT_ATTENTION_FUNCTION["sdpa"](
152
+ self,
153
+ qkv=qkv,
154
+ attention_mask=_prepare_4d_attention_mask(attention_mask, kv.dtype),
155
+ sliding_window_mask=None, # not needed when using global attention
156
+ position_ids=position_ids,
157
+ local_attention=self.local_attention,
158
+ bs=bs,
159
+ dim=self.all_head_size,
160
+ **kwargs,
161
+ )
162
+ hidden_states = attn_outputs[0]
163
+ hidden_states = self.out_drop(self.Wo(hidden_states))
164
+
165
+ return hidden_states
166
+
167
+
168
+ class ModChemBertForMaskedLM(InitWeightsMixin, ModernBertPreTrainedModel):
169
+ config_class = ModChemBertConfig
170
+ _tied_weights_keys = ["decoder.weight"]
171
+
172
+ def __init__(self, config: ModChemBertConfig):
173
+ super().__init__(config)
174
+ self.config = config
175
+ self.model = ModernBertModel(config)
176
+ self.head = ModernBertPredictionHead(config)
177
+ self.decoder = nn.Linear(config.hidden_size, config.vocab_size, bias=config.decoder_bias)
178
+
179
+ self.sparse_prediction = self.config.sparse_prediction
180
+ self.sparse_pred_ignore_index = self.config.sparse_pred_ignore_index
181
+
182
+ # Initialize weights and apply final processing
183
+ self.post_init()
184
+
185
+ def get_output_embeddings(self):
186
+ return self.decoder
187
+
188
+ def set_output_embeddings(self, new_embeddings: nn.Linear):
189
+ self.decoder = new_embeddings
190
+
191
+ @torch.compile(dynamic=True)
192
+ def compiled_head(self, output: torch.Tensor) -> torch.Tensor:
193
+ return self.decoder(self.head(output))
194
+
195
+ def forward(
196
+ self,
197
+ input_ids: torch.LongTensor | None = None,
198
+ attention_mask: torch.Tensor | None = None,
199
+ sliding_window_mask: torch.Tensor | None = None,
200
+ position_ids: torch.Tensor | None = None,
201
+ inputs_embeds: torch.Tensor | None = None,
202
+ labels: torch.Tensor | None = None,
203
+ indices: torch.Tensor | None = None,
204
+ cu_seqlens: torch.Tensor | None = None,
205
+ max_seqlen: int | None = None,
206
+ batch_size: int | None = None,
207
+ seq_len: int | None = None,
208
+ output_attentions: bool | None = None,
209
+ output_hidden_states: bool | None = None,
210
+ return_dict: bool | None = None,
211
+ **kwargs,
212
+ ) -> tuple[torch.Tensor] | tuple[torch.Tensor, typing.Any] | MaskedLMOutput:
213
+ r"""
214
+ sliding_window_mask (`torch.Tensor` of shape `(batch_size, sequence_length)`, *optional*):
215
+ Mask to avoid performing attention on padding or far-away tokens. In ModernBert, only every few layers
216
+ perform global attention, while the rest perform local attention. This mask is used to avoid attending to
217
+ far-away tokens in the local attention layers when not using Flash Attention.
218
+ indices (`torch.Tensor` of shape `(total_unpadded_tokens,)`, *optional*):
219
+ Indices of the non-padding tokens in the input sequence. Used for unpadding the output.
220
+ cu_seqlens (`torch.Tensor` of shape `(batch + 1,)`, *optional*):
221
+ Cumulative sequence lengths of the input sequences. Used to index the unpadded tensors.
222
+ max_seqlen (`int`, *optional*):
223
+ Maximum sequence length in the batch excluding padding tokens. Used to unpad input_ids & pad output tensors.
224
+ batch_size (`int`, *optional*):
225
+ Batch size of the input sequences. Used to pad the output tensors.
226
+ seq_len (`int`, *optional*):
227
+ Sequence length of the input sequences including padding tokens. Used to pad the output tensors.
228
+ """
229
+ return_dict = return_dict if return_dict is not None else self.config.use_return_dict
230
+ self._maybe_set_compile()
231
+
232
+ if self.config._attn_implementation == "flash_attention_2": # noqa: SIM102
233
+ if indices is None and cu_seqlens is None and max_seqlen is None:
234
+ if batch_size is None and seq_len is None:
235
+ if inputs_embeds is not None:
236
+ batch_size, seq_len = inputs_embeds.shape[:2]
237
+ else:
238
+ batch_size, seq_len = input_ids.shape[:2] # type: ignore
239
+ device = input_ids.device if input_ids is not None else inputs_embeds.device # type: ignore
240
+
241
+ if attention_mask is None:
242
+ attention_mask = torch.ones((batch_size, seq_len), device=device, dtype=torch.bool) # type: ignore
243
+
244
+ if inputs_embeds is None:
245
+ with torch.no_grad():
246
+ input_ids, indices, cu_seqlens, max_seqlen, position_ids, labels = _unpad_modernbert_input(
247
+ inputs=input_ids, # type: ignore
248
+ attention_mask=attention_mask, # type: ignore
249
+ position_ids=position_ids,
250
+ labels=labels,
251
+ )
252
+ else:
253
+ inputs_embeds, indices, cu_seqlens, max_seqlen, position_ids, labels = _unpad_modernbert_input(
254
+ inputs=inputs_embeds,
255
+ attention_mask=attention_mask, # type: ignore
256
+ position_ids=position_ids,
257
+ labels=labels,
258
+ )
259
+
260
+ outputs = self.model(
261
+ input_ids=input_ids,
262
+ attention_mask=attention_mask,
263
+ sliding_window_mask=sliding_window_mask,
264
+ position_ids=position_ids,
265
+ inputs_embeds=inputs_embeds,
266
+ indices=indices,
267
+ cu_seqlens=cu_seqlens,
268
+ max_seqlen=max_seqlen,
269
+ batch_size=batch_size,
270
+ seq_len=seq_len,
271
+ output_attentions=output_attentions,
272
+ output_hidden_states=output_hidden_states,
273
+ return_dict=return_dict,
274
+ )
275
+ last_hidden_state = outputs[0]
276
+
277
+ if self.sparse_prediction and labels is not None:
278
+ # flatten labels and output first
279
+ labels = labels.view(-1)
280
+ last_hidden_state = last_hidden_state.view(labels.shape[0], -1)
281
+
282
+ # then filter out the non-masked tokens
283
+ mask_tokens = labels != self.sparse_pred_ignore_index
284
+ last_hidden_state = last_hidden_state[mask_tokens]
285
+ labels = labels[mask_tokens]
286
+
287
+ logits = (
288
+ self.compiled_head(last_hidden_state)
289
+ if self.config.reference_compile
290
+ else self.decoder(self.head(last_hidden_state))
291
+ )
292
+
293
+ loss = None
294
+ if labels is not None:
295
+ loss = self.loss_function(logits, labels, vocab_size=self.config.vocab_size, **kwargs)
296
+
297
+ if self.config._attn_implementation == "flash_attention_2":
298
+ with nullcontext() if self.config.repad_logits_with_grad or labels is None else torch.no_grad():
299
+ logits = _pad_modernbert_output(inputs=logits, indices=indices, batch=batch_size, seqlen=seq_len) # type: ignore
300
+
301
+ if not return_dict:
302
+ output = (logits,)
303
+ return ((loss,) + output) if loss is not None else output
304
+
305
+ return MaskedLMOutput(
306
+ loss=loss,
307
+ logits=typing.cast(torch.FloatTensor, logits),
308
+ hidden_states=outputs.hidden_states,
309
+ attentions=outputs.attentions,
310
+ )
311
+
312
+
313
+ class ModChemBertForSequenceClassification(InitWeightsMixin, ModernBertPreTrainedModel):
314
+ config_class = ModChemBertConfig
315
+
316
+ def __init__(self, config: ModChemBertConfig):
317
+ super().__init__(config)
318
+ self.num_labels = config.num_labels
319
+ self.config = config
320
+
321
+ self.model = ModernBertModel(config)
322
+ if self.config.classifier_pooling in {"cls_mha", "max_seq_mha"}:
323
+ self.pooling_attn = ModChemBertPoolingAttention(config=self.config)
324
+ else:
325
+ self.pooling_attn = None
326
+ self.head = ModernBertPredictionHead(config)
327
+ self.drop = torch.nn.Dropout(config.classifier_dropout)
328
+ self.classifier = nn.Linear(config.hidden_size, config.num_labels)
329
+
330
+ # Initialize weights and apply final processing
331
+ self.post_init()
332
+
333
+ def forward(
334
+ self,
335
+ input_ids: torch.LongTensor | None = None,
336
+ attention_mask: torch.Tensor | None = None,
337
+ sliding_window_mask: torch.Tensor | None = None,
338
+ position_ids: torch.Tensor | None = None,
339
+ inputs_embeds: torch.Tensor | None = None,
340
+ labels: torch.Tensor | None = None,
341
+ indices: torch.Tensor | None = None,
342
+ cu_seqlens: torch.Tensor | None = None,
343
+ max_seqlen: int | None = None,
344
+ batch_size: int | None = None,
345
+ seq_len: int | None = None,
346
+ output_attentions: bool | None = None,
347
+ output_hidden_states: bool | None = None,
348
+ return_dict: bool | None = None,
349
+ **kwargs,
350
+ ) -> tuple[torch.Tensor] | tuple[torch.Tensor, typing.Any] | SequenceClassifierOutput:
351
+ r"""
352
+ sliding_window_mask (`torch.Tensor` of shape `(batch_size, sequence_length)`, *optional*):
353
+ Mask to avoid performing attention on padding or far-away tokens. In ModernBert, only every few layers
354
+ perform global attention, while the rest perform local attention. This mask is used to avoid attending to
355
+ far-away tokens in the local attention layers when not using Flash Attention.
356
+ labels (`torch.LongTensor` of shape `(batch_size,)`, *optional*):
357
+ Labels for computing the sequence classification/regression loss. Indices should be in `[0, ...,
358
+ config.num_labels - 1]`. If `config.num_labels == 1` a regression loss is computed (Mean-Square loss), If
359
+ `config.num_labels > 1` a classification loss is computed (Cross-Entropy).
360
+ indices (`torch.Tensor` of shape `(total_unpadded_tokens,)`, *optional*):
361
+ Indices of the non-padding tokens in the input sequence. Used for unpadding the output.
362
+ cu_seqlens (`torch.Tensor` of shape `(batch + 1,)`, *optional*):
363
+ Cumulative sequence lengths of the input sequences. Used to index the unpadded tensors.
364
+ max_seqlen (`int`, *optional*):
365
+ Maximum sequence length in the batch excluding padding tokens. Used to unpad input_ids & pad output tensors.
366
+ batch_size (`int`, *optional*):
367
+ Batch size of the input sequences. Used to pad the output tensors.
368
+ seq_len (`int`, *optional*):
369
+ Sequence length of the input sequences including padding tokens. Used to pad the output tensors.
370
+ """
371
+ return_dict = return_dict if return_dict is not None else self.config.use_return_dict
372
+ self._maybe_set_compile()
373
+
374
+ if input_ids is not None:
375
+ self.warn_if_padding_and_no_attention_mask(input_ids, attention_mask)
376
+
377
+ if batch_size is None and seq_len is None:
378
+ if inputs_embeds is not None:
379
+ batch_size, seq_len = inputs_embeds.shape[:2]
380
+ else:
381
+ batch_size, seq_len = input_ids.shape[:2] # type: ignore
382
+ device = input_ids.device if input_ids is not None else inputs_embeds.device # type: ignore
383
+
384
+ if attention_mask is None:
385
+ attention_mask = torch.ones((batch_size, seq_len), device=device, dtype=torch.bool) # type: ignore
386
+
387
+ # Ensure output_hidden_states is True in case pooling mode requires all hidden states
388
+ output_hidden_states = True
389
+
390
+ outputs = self.model(
391
+ input_ids=input_ids,
392
+ attention_mask=attention_mask,
393
+ sliding_window_mask=sliding_window_mask,
394
+ position_ids=position_ids,
395
+ inputs_embeds=inputs_embeds,
396
+ indices=indices,
397
+ cu_seqlens=cu_seqlens,
398
+ max_seqlen=max_seqlen,
399
+ batch_size=batch_size,
400
+ seq_len=seq_len,
401
+ output_attentions=output_attentions,
402
+ output_hidden_states=output_hidden_states,
403
+ return_dict=return_dict,
404
+ )
405
+ last_hidden_state = outputs[0]
406
+ hidden_states = outputs[1]
407
+
408
+ last_hidden_state = _pool_modchembert_output(
409
+ self,
410
+ last_hidden_state,
411
+ hidden_states,
412
+ typing.cast(torch.Tensor, attention_mask),
413
+ )
414
+ pooled_output = self.head(last_hidden_state)
415
+ pooled_output = self.drop(pooled_output)
416
+ logits = self.classifier(pooled_output)
417
+
418
+ loss = None
419
+ if labels is not None:
420
+ if self.config.problem_type is None:
421
+ if self.num_labels == 1:
422
+ self.config.problem_type = "regression"
423
+ elif self.num_labels > 1 and (labels.dtype == torch.long or labels.dtype == torch.int):
424
+ self.config.problem_type = "single_label_classification"
425
+ else:
426
+ self.config.problem_type = "multi_label_classification"
427
+
428
+ if self.config.problem_type == "regression":
429
+ loss_fct = MSELoss()
430
+ if self.num_labels == 1:
431
+ loss = loss_fct(logits.squeeze(), labels.squeeze())
432
+ else:
433
+ loss = loss_fct(logits, labels)
434
+ elif self.config.problem_type == "single_label_classification":
435
+ loss_fct = CrossEntropyLoss()
436
+ loss = loss_fct(logits.view(-1, self.num_labels), labels.view(-1))
437
+ elif self.config.problem_type == "multi_label_classification":
438
+ loss_fct = BCEWithLogitsLoss()
439
+ loss = loss_fct(logits, labels)
440
+
441
+ if not return_dict:
442
+ output = (logits,)
443
+ return ((loss,) + output) if loss is not None else output
444
+
445
+ return SequenceClassifierOutput(
446
+ loss=loss,
447
+ logits=logits,
448
+ hidden_states=outputs.hidden_states,
449
+ attentions=outputs.attentions,
450
+ )
451
+
452
+
453
+ def _pool_modchembert_output(
454
+ module: ModChemBertForSequenceClassification,
455
+ last_hidden_state: torch.Tensor,
456
+ hidden_states: list[torch.Tensor],
457
+ attention_mask: torch.Tensor,
458
+ ):
459
+ """
460
+ Apply pooling strategy to hidden states for sequence-level classification/regression tasks.
461
+
462
+ This function implements various pooling strategies to aggregate sequence representations
463
+ into a single vector for downstream classification or regression tasks. The pooling method
464
+ is determined by the `classifier_pooling` configuration parameter.
465
+
466
+ Available pooling strategies:
467
+ - cls: Use the CLS token ([CLS]) representation from the last hidden state
468
+ - mean: Average pooling over all tokens in the sequence (attention-weighted)
469
+ - max_cls: Element-wise max pooling over the last k hidden states, then take CLS token
470
+ - cls_mha: Multi-head attention with CLS token as query and full sequence as keys/values
471
+ - max_seq_mha: Max pooling over last k states + multi-head attention with CLS as query
472
+ - max_seq_mean: Max pooling over last k hidden states, then mean pooling over sequence
473
+ - sum_mean: Sum all hidden states across layers, then mean pool over sequence
474
+ - sum_sum: Sum all hidden states across layers, then sum pool over sequence
475
+ - mean_sum: Mean all hidden states across layers, then sum pool over sequence
476
+ - mean_mean: Mean all hidden states across layers, then mean pool over sequence
477
+
478
+ Args:
479
+ module: The model instance containing configuration and pooling attention if needed
480
+ last_hidden_state: Final layer hidden states of shape (batch_size, seq_len, hidden_size)
481
+ hidden_states: List of hidden states from all layers, each of shape (batch_size, seq_len, hidden_size)
482
+ attention_mask: Attention mask of shape (batch_size, seq_len) indicating valid tokens
483
+
484
+ Returns:
485
+ torch.Tensor: Pooled representation of shape (batch_size, hidden_size)
486
+
487
+ Note:
488
+ Some pooling strategies (cls_mha, max_seq_mha) require the module to have a pooling_attn
489
+ attribute containing a ModChemBertPoolingAttention instance.
490
+ """
491
+ config = typing.cast(ModChemBertConfig, module.config)
492
+ if config.classifier_pooling == "cls":
493
+ last_hidden_state = last_hidden_state[:, 0]
494
+ elif config.classifier_pooling == "mean":
495
+ last_hidden_state = (last_hidden_state * attention_mask.unsqueeze(-1)).sum(dim=1) / attention_mask.sum(
496
+ dim=1, keepdim=True
497
+ )
498
+ elif config.classifier_pooling == "max_cls":
499
+ k_hidden_states = hidden_states[-config.classifier_pooling_last_k :]
500
+ theta = torch.stack(k_hidden_states, dim=1) # (batch, k, seq_len, hidden)
501
+ pooled_seq = torch.max(theta, dim=1).values # Element-wise max over k -> (batch, seq_len, hidden)
502
+ last_hidden_state = pooled_seq[:, 0, :] # (batch, hidden)
503
+ elif config.classifier_pooling == "cls_mha":
504
+ # Similar to max_seq_mha but without the max pooling step
505
+ # Query is CLS token (position 0); Keys/Values are full sequence
506
+ q = last_hidden_state[:, 0, :].unsqueeze(1) # (batch, 1, hidden)
507
+ q = q.expand(-1, last_hidden_state.shape[1], -1) # (batch, seq_len, hidden)
508
+ attn_out: torch.Tensor = module.pooling_attn( # type: ignore
509
+ q=q, kv=last_hidden_state, attention_mask=attention_mask
510
+ ) # (batch, seq_len, hidden)
511
+ last_hidden_state = torch.mean(attn_out, dim=1)
512
+ elif config.classifier_pooling == "max_seq_mha":
513
+ k_hidden_states = hidden_states[-config.classifier_pooling_last_k :]
514
+ theta = torch.stack(k_hidden_states, dim=1) # (batch, k, seq_len, hidden)
515
+ pooled_seq = torch.max(theta, dim=1).values # Element-wise max over k -> (batch, seq_len, hidden)
516
+ # Query is pooled CLS token (position 0); Keys/Values are pooled sequence
517
+ q = pooled_seq[:, 0, :].unsqueeze(1) # (batch, 1, hidden)
518
+ q = q.expand(-1, pooled_seq.shape[1], -1) # (batch, seq_len, hidden)
519
+ attn_out: torch.Tensor = module.pooling_attn( # type: ignore
520
+ q=q, kv=pooled_seq, attention_mask=attention_mask
521
+ ) # (batch, seq_len, hidden)
522
+ last_hidden_state = torch.mean(attn_out, dim=1)
523
+ elif config.classifier_pooling == "max_seq_mean":
524
+ k_hidden_states = hidden_states[-config.classifier_pooling_last_k :]
525
+ theta = torch.stack(k_hidden_states, dim=1) # (batch, k, seq_len, hidden)
526
+ pooled_seq = torch.max(theta, dim=1).values # Element-wise max over k -> (batch, seq_len, hidden)
527
+ last_hidden_state = torch.mean(pooled_seq, dim=1) # Mean over sequence length
528
+ elif config.classifier_pooling == "sum_mean":
529
+ # ChemLM uses the mean of all hidden states
530
+ # which outperforms using just the last layer mean or the cls embedding
531
+ # https://doi.org/10.1038/s42004-025-01484-4
532
+ # https://static-content.springer.com/esm/art%3A10.1038%2Fs42004-025-01484-4/MediaObjects/42004_2025_1484_MOESM2_ESM.pdf
533
+ all_hidden_states = torch.stack(hidden_states)
534
+ w = torch.sum(all_hidden_states, dim=0)
535
+ last_hidden_state = torch.mean(w, dim=1)
536
+ elif config.classifier_pooling == "sum_sum":
537
+ all_hidden_states = torch.stack(hidden_states)
538
+ w = torch.sum(all_hidden_states, dim=0)
539
+ last_hidden_state = torch.sum(w, dim=1)
540
+ elif config.classifier_pooling == "mean_sum":
541
+ all_hidden_states = torch.stack(hidden_states)
542
+ w = torch.mean(all_hidden_states, dim=0)
543
+ last_hidden_state = torch.sum(w, dim=1)
544
+ elif config.classifier_pooling == "mean_mean":
545
+ all_hidden_states = torch.stack(hidden_states)
546
+ w = torch.mean(all_hidden_states, dim=0)
547
+ last_hidden_state = torch.mean(w, dim=1)
548
+ return last_hidden_state
549
+
550
+
551
+ __all__ = [
552
+ "ModChemBertForMaskedLM",
553
+ "ModChemBertForSequenceClassification",
554
+ ]
special_tokens_map.json ADDED
@@ -0,0 +1,37 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "cls_token": {
3
+ "content": "[CLS]",
4
+ "lstrip": false,
5
+ "normalized": false,
6
+ "rstrip": false,
7
+ "single_word": false
8
+ },
9
+ "mask_token": {
10
+ "content": "[MASK]",
11
+ "lstrip": false,
12
+ "normalized": false,
13
+ "rstrip": false,
14
+ "single_word": false
15
+ },
16
+ "pad_token": {
17
+ "content": "[PAD]",
18
+ "lstrip": false,
19
+ "normalized": false,
20
+ "rstrip": false,
21
+ "single_word": false
22
+ },
23
+ "sep_token": {
24
+ "content": "[SEP]",
25
+ "lstrip": false,
26
+ "normalized": false,
27
+ "rstrip": false,
28
+ "single_word": false
29
+ },
30
+ "unk_token": {
31
+ "content": "[UNK]",
32
+ "lstrip": false,
33
+ "normalized": false,
34
+ "rstrip": false,
35
+ "single_word": false
36
+ }
37
+ }
tokenizer.json ADDED
@@ -0,0 +1,2554 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "version": "1.0",
3
+ "truncation": {
4
+ "direction": "Right",
5
+ "max_length": 256,
6
+ "strategy": "LongestFirst",
7
+ "stride": 0
8
+ },
9
+ "padding": {
10
+ "strategy": "BatchLongest",
11
+ "direction": "Right",
12
+ "pad_to_multiple_of": 8,
13
+ "pad_id": 2,
14
+ "pad_type_id": 0,
15
+ "pad_token": "[PAD]"
16
+ },
17
+ "added_tokens": [
18
+ {
19
+ "id": 0,
20
+ "content": "[CLS]",
21
+ "single_word": false,
22
+ "lstrip": false,
23
+ "rstrip": false,
24
+ "normalized": false,
25
+ "special": true
26
+ },
27
+ {
28
+ "id": 1,
29
+ "content": "[SEP]",
30
+ "single_word": false,
31
+ "lstrip": false,
32
+ "rstrip": false,
33
+ "normalized": false,
34
+ "special": true
35
+ },
36
+ {
37
+ "id": 2,
38
+ "content": "[PAD]",
39
+ "single_word": false,
40
+ "lstrip": false,
41
+ "rstrip": false,
42
+ "normalized": false,
43
+ "special": true
44
+ },
45
+ {
46
+ "id": 3,
47
+ "content": "[MASK]",
48
+ "single_word": false,
49
+ "lstrip": false,
50
+ "rstrip": false,
51
+ "normalized": false,
52
+ "special": true
53
+ },
54
+ {
55
+ "id": 2361,
56
+ "content": "[UNK]",
57
+ "single_word": false,
58
+ "lstrip": false,
59
+ "rstrip": false,
60
+ "normalized": false,
61
+ "special": true
62
+ }
63
+ ],
64
+ "normalizer": null,
65
+ "pre_tokenizer": {
66
+ "type": "ByteLevel",
67
+ "add_prefix_space": false,
68
+ "trim_offsets": true,
69
+ "use_regex": true
70
+ },
71
+ "post_processor": {
72
+ "type": "TemplateProcessing",
73
+ "single": [
74
+ {
75
+ "SpecialToken": {
76
+ "id": "[CLS]",
77
+ "type_id": 0
78
+ }
79
+ },
80
+ {
81
+ "Sequence": {
82
+ "id": "A",
83
+ "type_id": 0
84
+ }
85
+ },
86
+ {
87
+ "SpecialToken": {
88
+ "id": "[SEP]",
89
+ "type_id": 0
90
+ }
91
+ }
92
+ ],
93
+ "pair": [
94
+ {
95
+ "SpecialToken": {
96
+ "id": "[CLS]",
97
+ "type_id": 0
98
+ }
99
+ },
100
+ {
101
+ "Sequence": {
102
+ "id": "A",
103
+ "type_id": 0
104
+ }
105
+ },
106
+ {
107
+ "SpecialToken": {
108
+ "id": "[SEP]",
109
+ "type_id": 0
110
+ }
111
+ },
112
+ {
113
+ "Sequence": {
114
+ "id": "B",
115
+ "type_id": 0
116
+ }
117
+ },
118
+ {
119
+ "SpecialToken": {
120
+ "id": "[SEP]",
121
+ "type_id": 0
122
+ }
123
+ }
124
+ ],
125
+ "special_tokens": {
126
+ "[CLS]": {
127
+ "id": "[CLS]",
128
+ "ids": [
129
+ 0
130
+ ],
131
+ "tokens": [
132
+ "[CLS]"
133
+ ]
134
+ },
135
+ "[MASK]": {
136
+ "id": "[MASK]",
137
+ "ids": [
138
+ 3
139
+ ],
140
+ "tokens": [
141
+ "[MASK]"
142
+ ]
143
+ },
144
+ "[PAD]": {
145
+ "id": "[PAD]",
146
+ "ids": [
147
+ 2
148
+ ],
149
+ "tokens": [
150
+ "[PAD]"
151
+ ]
152
+ },
153
+ "[SEP]": {
154
+ "id": "[SEP]",
155
+ "ids": [
156
+ 1
157
+ ],
158
+ "tokens": [
159
+ "[SEP]"
160
+ ]
161
+ },
162
+ "[UNK]": {
163
+ "id": "[UNK]",
164
+ "ids": [
165
+ 2361
166
+ ],
167
+ "tokens": [
168
+ "[UNK]"
169
+ ]
170
+ }
171
+ }
172
+ },
173
+ "decoder": {
174
+ "type": "ByteLevel",
175
+ "add_prefix_space": false,
176
+ "trim_offsets": true,
177
+ "use_regex": true
178
+ },
179
+ "model": {
180
+ "type": "BPE",
181
+ "dropout": null,
182
+ "unk_token": "[UNK]",
183
+ "continuing_subword_prefix": null,
184
+ "end_of_word_suffix": null,
185
+ "fuse_unk": false,
186
+ "byte_fallback": false,
187
+ "ignore_merges": false,
188
+ "vocab": {
189
+ "[CLS]": 0,
190
+ "[SEP]": 1,
191
+ "[PAD]": 2,
192
+ "[MASK]": 3,
193
+ "C": 4,
194
+ "c": 5,
195
+ "(": 6,
196
+ ")": 7,
197
+ "1": 8,
198
+ "O": 9,
199
+ "N": 10,
200
+ "2": 11,
201
+ "=": 12,
202
+ "n": 13,
203
+ "3": 14,
204
+ "[C@H]": 15,
205
+ "[C@@H]": 16,
206
+ "F": 17,
207
+ "S": 18,
208
+ "4": 19,
209
+ "Cl": 20,
210
+ "-": 21,
211
+ "o": 22,
212
+ "s": 23,
213
+ "[nH]": 24,
214
+ "#": 25,
215
+ "/": 26,
216
+ "Br": 27,
217
+ "[C@]": 28,
218
+ "[C@@]": 29,
219
+ "[N+]": 30,
220
+ "[O-]": 31,
221
+ "5": 32,
222
+ "\\": 33,
223
+ ".": 34,
224
+ "I": 35,
225
+ "6": 36,
226
+ "[S@]": 37,
227
+ "[S@@]": 38,
228
+ "P": 39,
229
+ "[N-]": 40,
230
+ "[Si]": 41,
231
+ "7": 42,
232
+ "[n+]": 43,
233
+ "[2H]": 44,
234
+ "8": 45,
235
+ "[NH+]": 46,
236
+ "B": 47,
237
+ "9": 48,
238
+ "[C-]": 49,
239
+ "[Na+]": 50,
240
+ "[Cl-]": 51,
241
+ "[c-]": 52,
242
+ "[CH]": 53,
243
+ "%10": 54,
244
+ "[NH2+]": 55,
245
+ "[P+]": 56,
246
+ "[B]": 57,
247
+ "[I-]": 58,
248
+ "%11": 59,
249
+ "[CH2-]": 60,
250
+ "[O+]": 61,
251
+ "[NH3+]": 62,
252
+ "[C]": 63,
253
+ "[Br-]": 64,
254
+ "[IH2]": 65,
255
+ "[S-]": 66,
256
+ "[cH-]": 67,
257
+ "%12": 68,
258
+ "[nH+]": 69,
259
+ "[B-]": 70,
260
+ "[K+]": 71,
261
+ "[Sn]": 72,
262
+ "[Se]": 73,
263
+ "[CH-]": 74,
264
+ "[HH]": 75,
265
+ "[Y]": 76,
266
+ "[n-]": 77,
267
+ "[CH3-]": 78,
268
+ "[SiH]": 79,
269
+ "[S+]": 80,
270
+ "%13": 81,
271
+ "[SiH2]": 82,
272
+ "[Li+]": 83,
273
+ "[NH-]": 84,
274
+ "%14": 85,
275
+ "[Na]": 86,
276
+ "[CH2]": 87,
277
+ "[O-2]": 88,
278
+ "[U+2]": 89,
279
+ "[W]": 90,
280
+ "[Al]": 91,
281
+ "[P@]": 92,
282
+ "[Fe+2]": 93,
283
+ "[PH+]": 94,
284
+ "%15": 95,
285
+ "[Cl+3]": 96,
286
+ "[Zn+2]": 97,
287
+ "[Ir]": 98,
288
+ "[Mg+2]": 99,
289
+ "[Pt+2]": 100,
290
+ "[OH2+]": 101,
291
+ "[As]": 102,
292
+ "[Fe]": 103,
293
+ "[OH+]": 104,
294
+ "[Zr+2]": 105,
295
+ "[3H]": 106,
296
+ "[Ge]": 107,
297
+ "[SiH3]": 108,
298
+ "[OH-]": 109,
299
+ "[NH4+]": 110,
300
+ "[Cu+2]": 111,
301
+ "[P@@]": 112,
302
+ "p": 113,
303
+ "[Pt]": 114,
304
+ "%16": 115,
305
+ "[Ca+2]": 116,
306
+ "[Zr]": 117,
307
+ "[F-]": 118,
308
+ "[C+]": 119,
309
+ "[Ti]": 120,
310
+ "[P-]": 121,
311
+ "[V]": 122,
312
+ "[se]": 123,
313
+ "[U]": 124,
314
+ "[O]": 125,
315
+ "[Ni+2]": 126,
316
+ "[Zn]": 127,
317
+ "[Co]": 128,
318
+ "[Ni]": 129,
319
+ "[Pd+2]": 130,
320
+ "[Cu]": 131,
321
+ "%17": 132,
322
+ "[Cu+]": 133,
323
+ "[Te]": 134,
324
+ "[H+]": 135,
325
+ "[CH+]": 136,
326
+ "[Li]": 137,
327
+ "[Pd]": 138,
328
+ "[Mo]": 139,
329
+ "[Ru+2]": 140,
330
+ "[o+]": 141,
331
+ "[Re]": 142,
332
+ "[SH+]": 143,
333
+ "%18": 144,
334
+ "[Ac]": 145,
335
+ "[Cr]": 146,
336
+ "[NH2-]": 147,
337
+ "[K]": 148,
338
+ "[13CH2]": 149,
339
+ "[c]": 150,
340
+ "[Zr+4]": 151,
341
+ "[Tl]": 152,
342
+ "[13C]": 153,
343
+ "[Mn]": 154,
344
+ "[N@+]": 155,
345
+ "[Hg]": 156,
346
+ "[Rh]": 157,
347
+ "[Ti+4]": 158,
348
+ "[Sb]": 159,
349
+ "[Co+2]": 160,
350
+ "[Ag+]": 161,
351
+ "[Ru]": 162,
352
+ "%19": 163,
353
+ "[N@@+]": 164,
354
+ "[Ti+2]": 165,
355
+ "[Al+3]": 166,
356
+ "[Pb]": 167,
357
+ "[I+]": 168,
358
+ "[18F]": 169,
359
+ "[s+]": 170,
360
+ "[Rb+]": 171,
361
+ "[Ba+2]": 172,
362
+ "[H-]": 173,
363
+ "[Fe+3]": 174,
364
+ "[Ir+3]": 175,
365
+ "[13cH]": 176,
366
+ "%20": 177,
367
+ "[AlH2]": 178,
368
+ "[Au+]": 179,
369
+ "[13c]": 180,
370
+ "[SH2+]": 181,
371
+ "[Sn+2]": 182,
372
+ "[Mn+2]": 183,
373
+ "[Si-]": 184,
374
+ "[Ag]": 185,
375
+ "[N]": 186,
376
+ "[Bi]": 187,
377
+ "%21": 188,
378
+ "[In]": 189,
379
+ "[CH2+]": 190,
380
+ "[Y+3]": 191,
381
+ "[Ga]": 192,
382
+ "%22": 193,
383
+ "[Co+3]": 194,
384
+ "[Au]": 195,
385
+ "[13CH3]": 196,
386
+ "[Mg]": 197,
387
+ "[Cs+]": 198,
388
+ "[W+2]": 199,
389
+ "[Hf]": 200,
390
+ "[Zn+]": 201,
391
+ "[Se-]": 202,
392
+ "[S-2]": 203,
393
+ "[Ca]": 204,
394
+ "[pH]": 205,
395
+ "[ClH+]": 206,
396
+ "[Ti+3]": 207,
397
+ "%23": 208,
398
+ "[Ru+]": 209,
399
+ "[SH-]": 210,
400
+ "[13CH]": 211,
401
+ "[IH+]": 212,
402
+ "[Hf+4]": 213,
403
+ "[Rf]": 214,
404
+ "[OH3+]": 215,
405
+ "%24": 216,
406
+ "[Pt+4]": 217,
407
+ "[Zr+3]": 218,
408
+ "[PH3+]": 219,
409
+ "[Sr+2]": 220,
410
+ "[Cd+2]": 221,
411
+ "[Cd]": 222,
412
+ "%25": 223,
413
+ "[Os]": 224,
414
+ "[BH-]": 225,
415
+ "[Sn+4]": 226,
416
+ "[Cr+3]": 227,
417
+ "[Ru+3]": 228,
418
+ "[PH2+]": 229,
419
+ "[Rh+2]": 230,
420
+ "[V+2]": 231,
421
+ "%26": 232,
422
+ "[Gd+3]": 233,
423
+ "[Pb+2]": 234,
424
+ "[PH]": 235,
425
+ "[Hg+]": 236,
426
+ "[Mo+2]": 237,
427
+ "[AlH]": 238,
428
+ "[Sn+]": 239,
429
+ "%27": 240,
430
+ "[Pd+]": 241,
431
+ "b": 242,
432
+ "[Rh+3]": 243,
433
+ "[Hg+2]": 244,
434
+ "[15NH]": 245,
435
+ "[14C]": 246,
436
+ "%28": 247,
437
+ "[Mn+3]": 248,
438
+ "[Si+]": 249,
439
+ "[SeH]": 250,
440
+ "[13C@H]": 251,
441
+ "[NH]": 252,
442
+ "[Ga+3]": 253,
443
+ "[SiH-]": 254,
444
+ "[13C@@H]": 255,
445
+ "[Ce]": 256,
446
+ "[Au+3]": 257,
447
+ "[Bi+3]": 258,
448
+ "[15N]": 259,
449
+ "%29": 260,
450
+ "[BH3-]": 261,
451
+ "[14cH]": 262,
452
+ "[Ti+]": 263,
453
+ "[Gd]": 264,
454
+ "[cH+]": 265,
455
+ "[Cr+2]": 266,
456
+ "[Sb-]": 267,
457
+ "%30": 268,
458
+ "[Be+2]": 269,
459
+ "[Al+]": 270,
460
+ "[te]": 271,
461
+ "[11CH3]": 272,
462
+ "[Sm]": 273,
463
+ "[Pr]": 274,
464
+ "[La]": 275,
465
+ "%31": 276,
466
+ "[Al-]": 277,
467
+ "[Ta]": 278,
468
+ "[125I]": 279,
469
+ "[BH2-]": 280,
470
+ "[Nb]": 281,
471
+ "[Si@]": 282,
472
+ "%32": 283,
473
+ "[14c]": 284,
474
+ "[Sb+3]": 285,
475
+ "[Ba]": 286,
476
+ "%33": 287,
477
+ "[Os+2]": 288,
478
+ "[Si@@]": 289,
479
+ "[La+3]": 290,
480
+ "[15n]": 291,
481
+ "[15NH2]": 292,
482
+ "[Nd+3]": 293,
483
+ "%34": 294,
484
+ "[14CH2]": 295,
485
+ "[18O]": 296,
486
+ "[Nd]": 297,
487
+ "[GeH]": 298,
488
+ "[Ni+3]": 299,
489
+ "[Eu]": 300,
490
+ "[Dy+3]": 301,
491
+ "[Sc]": 302,
492
+ "%36": 303,
493
+ "[Se-2]": 304,
494
+ "[As+]": 305,
495
+ "%35": 306,
496
+ "[AsH]": 307,
497
+ "[Tb]": 308,
498
+ "[Sb+5]": 309,
499
+ "[Se+]": 310,
500
+ "[Ce+3]": 311,
501
+ "[c+]": 312,
502
+ "[In+3]": 313,
503
+ "[SnH]": 314,
504
+ "[Mo+4]": 315,
505
+ "%37": 316,
506
+ "[V+4]": 317,
507
+ "[Eu+3]": 318,
508
+ "[Hf+2]": 319,
509
+ "%38": 320,
510
+ "[Pt+]": 321,
511
+ "[p+]": 322,
512
+ "[123I]": 323,
513
+ "[Tl+]": 324,
514
+ "[Sm+3]": 325,
515
+ "%39": 326,
516
+ "[Yb+3]": 327,
517
+ "%40": 328,
518
+ "[Yb]": 329,
519
+ "[Os+]": 330,
520
+ "%41": 331,
521
+ "[10B]": 332,
522
+ "[Sc+3]": 333,
523
+ "[Al+2]": 334,
524
+ "%42": 335,
525
+ "[Sr]": 336,
526
+ "[Tb+3]": 337,
527
+ "[Po]": 338,
528
+ "[Tc]": 339,
529
+ "[PH-]": 340,
530
+ "[AlH3]": 341,
531
+ "[Ar]": 342,
532
+ "[U+4]": 343,
533
+ "[SnH2]": 344,
534
+ "[Cl+2]": 345,
535
+ "[si]": 346,
536
+ "[Fe+]": 347,
537
+ "[14CH3]": 348,
538
+ "[U+3]": 349,
539
+ "[Cl+]": 350,
540
+ "%43": 351,
541
+ "[GeH2]": 352,
542
+ "%44": 353,
543
+ "[Er+3]": 354,
544
+ "[Mo+3]": 355,
545
+ "[I+2]": 356,
546
+ "[Fe+4]": 357,
547
+ "[99Tc]": 358,
548
+ "%45": 359,
549
+ "[11C]": 360,
550
+ "%46": 361,
551
+ "[SnH3]": 362,
552
+ "[S]": 363,
553
+ "[Te+]": 364,
554
+ "[Er]": 365,
555
+ "[Lu+3]": 366,
556
+ "[11B]": 367,
557
+ "%47": 368,
558
+ "%48": 369,
559
+ "[P]": 370,
560
+ "[Tm]": 371,
561
+ "[Th]": 372,
562
+ "[Dy]": 373,
563
+ "[Pr+3]": 374,
564
+ "[Ta+5]": 375,
565
+ "[Nb+5]": 376,
566
+ "[Rb]": 377,
567
+ "[GeH3]": 378,
568
+ "[Br+2]": 379,
569
+ "%49": 380,
570
+ "[131I]": 381,
571
+ "[Fm]": 382,
572
+ "[Cs]": 383,
573
+ "[BH4-]": 384,
574
+ "[Lu]": 385,
575
+ "[15nH]": 386,
576
+ "%50": 387,
577
+ "[Ru+6]": 388,
578
+ "[b-]": 389,
579
+ "[Ho]": 390,
580
+ "[Th+4]": 391,
581
+ "[Ru+4]": 392,
582
+ "%52": 393,
583
+ "[14CH]": 394,
584
+ "%51": 395,
585
+ "[Cr+6]": 396,
586
+ "[18OH]": 397,
587
+ "[Ho+3]": 398,
588
+ "[Ce+4]": 399,
589
+ "[Bi+2]": 400,
590
+ "[Co+]": 401,
591
+ "%53": 402,
592
+ "[Yb+2]": 403,
593
+ "[Fe+6]": 404,
594
+ "[Be]": 405,
595
+ "%54": 406,
596
+ "[SH3+]": 407,
597
+ "[Np]": 408,
598
+ "[As-]": 409,
599
+ "%55": 410,
600
+ "[14C@@H]": 411,
601
+ "[Ir+2]": 412,
602
+ "[GaH3]": 413,
603
+ "[p-]": 414,
604
+ "[GeH4]": 415,
605
+ "[Sn+3]": 416,
606
+ "[Os+4]": 417,
607
+ "%56": 418,
608
+ "[14C@H]": 419,
609
+ "[sH+]": 420,
610
+ "[19F]": 421,
611
+ "[Eu+2]": 422,
612
+ "[TlH]": 423,
613
+ "%57": 424,
614
+ "[Cr+4]": 425,
615
+ "%58": 426,
616
+ "[B@@-]": 427,
617
+ "[SiH+]": 428,
618
+ "[At]": 429,
619
+ "[Am]": 430,
620
+ "[Fe+5]": 431,
621
+ "[AsH2]": 432,
622
+ "[Si+4]": 433,
623
+ "[B@-]": 434,
624
+ "[Pu]": 435,
625
+ "[SbH]": 436,
626
+ "[P-2]": 437,
627
+ "[Tm+3]": 438,
628
+ "*": 439,
629
+ "%59": 440,
630
+ "[se+]": 441,
631
+ "%60": 442,
632
+ "[oH+]": 443,
633
+ "[1H]": 444,
634
+ "[15N+]": 445,
635
+ "[124I]": 446,
636
+ "[S@@+]": 447,
637
+ "[P-3]": 448,
638
+ "[H]": 449,
639
+ "[IH2+]": 450,
640
+ "[TeH]": 451,
641
+ "[Xe]": 452,
642
+ "[PH4+]": 453,
643
+ "[Cr+]": 454,
644
+ "[Cm]": 455,
645
+ "[I+3]": 456,
646
+ "%61": 457,
647
+ "[Nb+2]": 458,
648
+ "[Ru+5]": 459,
649
+ "%62": 460,
650
+ "[Ta+2]": 461,
651
+ "[Tc+4]": 462,
652
+ "[CH3+]": 463,
653
+ "[Pm]": 464,
654
+ "[Si@H]": 465,
655
+ "[No]": 466,
656
+ "%63": 467,
657
+ "[Cr+5]": 468,
658
+ "[Th+2]": 469,
659
+ "[Zn-2]": 470,
660
+ "[13C@]": 471,
661
+ "[Lr]": 472,
662
+ "%64": 473,
663
+ "[99Tc+3]": 474,
664
+ "%65": 475,
665
+ "[13C@@]": 476,
666
+ "%66": 477,
667
+ "[Fe-]": 478,
668
+ "[17O]": 479,
669
+ "[siH]": 480,
670
+ "[Sb+]": 481,
671
+ "[OH]": 482,
672
+ "[IH]": 483,
673
+ "[11CH2]": 484,
674
+ "[Cf]": 485,
675
+ "[SiH2+]": 486,
676
+ "[Gd+2]": 487,
677
+ "[In+]": 488,
678
+ "[Si@@H]": 489,
679
+ "[Mn+]": 490,
680
+ "[99Tc+4]": 491,
681
+ "[Ga-]": 492,
682
+ "%67": 493,
683
+ "[S@+]": 494,
684
+ "[Ge+4]": 495,
685
+ "[Tl+3]": 496,
686
+ "[16OH]": 497,
687
+ "%68": 498,
688
+ "[2H-]": 499,
689
+ "[Ra]": 500,
690
+ "[si-]": 501,
691
+ "[NiH2]": 502,
692
+ "[P@@H]": 503,
693
+ "[Rh+]": 504,
694
+ "[12C]": 505,
695
+ "[35S]": 506,
696
+ "[32P]": 507,
697
+ "[SiH2-]": 508,
698
+ "[AlH2+]": 509,
699
+ "[16O]": 510,
700
+ "%69": 511,
701
+ "[BiH]": 512,
702
+ "[BiH2]": 513,
703
+ "[Zn-]": 514,
704
+ "[BH]": 515,
705
+ "[Tc+3]": 516,
706
+ "[Ir+]": 517,
707
+ "[Ni+]": 518,
708
+ "%70": 519,
709
+ "[InH2]": 520,
710
+ "[InH]": 521,
711
+ "[Nb+3]": 522,
712
+ "[PbH]": 523,
713
+ "[Bi+]": 524,
714
+ "%71": 525,
715
+ "[As+3]": 526,
716
+ "%72": 527,
717
+ "[18O-]": 528,
718
+ "[68Ga+3]": 529,
719
+ "%73": 530,
720
+ "[Pa]": 531,
721
+ "[76Br]": 532,
722
+ "[Tc+5]": 533,
723
+ "[pH+]": 534,
724
+ "[64Cu+2]": 535,
725
+ "[Ru+8]": 536,
726
+ "%74": 537,
727
+ "[PH2-]": 538,
728
+ "[Si+2]": 539,
729
+ "[17OH]": 540,
730
+ "[RuH]": 541,
731
+ "[111In+3]": 542,
732
+ "[AlH+]": 543,
733
+ "%75": 544,
734
+ "%76": 545,
735
+ "[W+]": 546,
736
+ "[SbH2]": 547,
737
+ "[PoH]": 548,
738
+ "[Ru-]": 549,
739
+ "[XeH]": 550,
740
+ "[Tc+2]": 551,
741
+ "[13C-]": 552,
742
+ "[Br+]": 553,
743
+ "[Pt-2]": 554,
744
+ "[Es]": 555,
745
+ "[Cu-]": 556,
746
+ "[Mg+]": 557,
747
+ "[3HH]": 558,
748
+ "[P@H]": 559,
749
+ "[ClH2+]": 560,
750
+ "%77": 561,
751
+ "[SH]": 562,
752
+ "[Au-]": 563,
753
+ "[2HH]": 564,
754
+ "%78": 565,
755
+ "[Sn-]": 566,
756
+ "[11CH]": 567,
757
+ "[PdH2]": 568,
758
+ "0": 569,
759
+ "[Os+6]": 570,
760
+ "%79": 571,
761
+ "[Mo+]": 572,
762
+ "%80": 573,
763
+ "[al]": 574,
764
+ "[PbH2]": 575,
765
+ "[64Cu]": 576,
766
+ "[Cl]": 577,
767
+ "[12CH3]": 578,
768
+ "%81": 579,
769
+ "[Tc+7]": 580,
770
+ "[11c]": 581,
771
+ "%82": 582,
772
+ "[Li-]": 583,
773
+ "[99Tc+5]": 584,
774
+ "[He]": 585,
775
+ "[12c]": 586,
776
+ "[Kr]": 587,
777
+ "[RuH+2]": 588,
778
+ "[35Cl]": 589,
779
+ "[Pd-2]": 590,
780
+ "[GaH2]": 591,
781
+ "[4H]": 592,
782
+ "[Sg]": 593,
783
+ "[Cu-2]": 594,
784
+ "[Br+3]": 595,
785
+ "%83": 596,
786
+ "[37Cl]": 597,
787
+ "[211At]": 598,
788
+ "[IrH+2]": 599,
789
+ "[Mt]": 600,
790
+ "[Ir-2]": 601,
791
+ "[In-]": 602,
792
+ "[12cH]": 603,
793
+ "[12CH2]": 604,
794
+ "[RuH2]": 605,
795
+ "[99Tc+7]": 606,
796
+ "%84": 607,
797
+ "[15n+]": 608,
798
+ "[ClH2+2]": 609,
799
+ "[16N]": 610,
800
+ "[111In]": 611,
801
+ "[Tc+]": 612,
802
+ "[Ru-2]": 613,
803
+ "[12CH]": 614,
804
+ "[si+]": 615,
805
+ "[Tc+6]": 616,
806
+ "%85": 617,
807
+ "%86": 618,
808
+ "[90Y]": 619,
809
+ "[Pd-]": 620,
810
+ "[188Re]": 621,
811
+ "[RuH+]": 622,
812
+ "[NiH]": 623,
813
+ "[SiH3-]": 624,
814
+ "[14n]": 625,
815
+ "[CH3]": 626,
816
+ "[14N]": 627,
817
+ "[10BH2]": 628,
818
+ "%88": 629,
819
+ "%89": 630,
820
+ "%90": 631,
821
+ "[34S]": 632,
822
+ "[77Br]": 633,
823
+ "[GaH]": 634,
824
+ "[Br]": 635,
825
+ "[Ge@]": 636,
826
+ "[B@@H-]": 637,
827
+ "[CuH]": 638,
828
+ "[SiH4]": 639,
829
+ "[3H-]": 640,
830
+ "%87": 641,
831
+ "%91": 642,
832
+ "%92": 643,
833
+ "[67Cu]": 644,
834
+ "[I]": 645,
835
+ "[177Lu]": 646,
836
+ "[ReH]": 647,
837
+ "[67Ga+3]": 648,
838
+ "[Db]": 649,
839
+ "[177Lu+3]": 650,
840
+ "[AlH2-]": 651,
841
+ "[Si+3]": 652,
842
+ "[Ti-2]": 653,
843
+ "[RuH+3]": 654,
844
+ "[al+]": 655,
845
+ "[68Ga]": 656,
846
+ "[2H+]": 657,
847
+ "[B@H-]": 658,
848
+ "[WH2]": 659,
849
+ "[OsH]": 660,
850
+ "[Ir-3]": 661,
851
+ "[AlH-]": 662,
852
+ "[Bk]": 663,
853
+ "[75Se]": 664,
854
+ "[14C@]": 665,
855
+ "[Pt-]": 666,
856
+ "[N@@H+]": 667,
857
+ "[Nb-]": 668,
858
+ "[13NH2]": 669,
859
+ "%93": 670,
860
+ "[186Re]": 671,
861
+ "[Tb+4]": 672,
862
+ "[PtH]": 673,
863
+ "[IrH2]": 674,
864
+ "[Hg-2]": 675,
865
+ "[AlH3-]": 676,
866
+ "[PdH+]": 677,
867
+ "[Md]": 678,
868
+ "[RhH+2]": 679,
869
+ "[11cH]": 680,
870
+ "[Co-2]": 681,
871
+ "[15N-]": 682,
872
+ "[ZrH2]": 683,
873
+ "%94": 684,
874
+ "[Hg-]": 685,
875
+ "[127I]": 686,
876
+ "[AsH2+]": 687,
877
+ "[MoH2]": 688,
878
+ "[Te+4]": 689,
879
+ "[14C@@]": 690,
880
+ "[As+5]": 691,
881
+ "[SnH+3]": 692,
882
+ "[Ge@@]": 693,
883
+ "[6Li+]": 694,
884
+ "[WH]": 695,
885
+ "[Ne]": 696,
886
+ "[14NH2]": 697,
887
+ "[14NH]": 698,
888
+ "[12C@@H]": 699,
889
+ "[Os+7]": 700,
890
+ "[RhH]": 701,
891
+ "[Al-3]": 702,
892
+ "[SnH+]": 703,
893
+ "[15NH3+]": 704,
894
+ "[Zr+]": 705,
895
+ "[197Hg+]": 706,
896
+ "%95": 707,
897
+ "%96": 708,
898
+ "[90Y+3]": 709,
899
+ "[Os-2]": 710,
900
+ "[98Tc+5]": 711,
901
+ "[15NH3]": 712,
902
+ "[bH-]": 713,
903
+ "[33P]": 714,
904
+ "[Zr-2]": 715,
905
+ "[15O]": 716,
906
+ "[Rh-]": 717,
907
+ "[PbH3]": 718,
908
+ "[PH2]": 719,
909
+ "[Ni-]": 720,
910
+ "[CuH+]": 721,
911
+ "%97": 722,
912
+ "%98": 723,
913
+ "%99": 724,
914
+ "[Os+5]": 725,
915
+ "[PtH+]": 726,
916
+ "[ReH4]": 727,
917
+ "[16NH]": 728,
918
+ "[82Br]": 729,
919
+ "[W-]": 730,
920
+ "[18F-]": 731,
921
+ "[15NH4+]": 732,
922
+ "[Se+4]": 733,
923
+ "[SeH-]": 734,
924
+ "[67Cu+2]": 735,
925
+ "[12C@H]": 736,
926
+ "[AsH3]": 737,
927
+ "[HgH]": 738,
928
+ "[10B-]": 739,
929
+ "[99Tc+6]": 740,
930
+ "[117Sn+4]": 741,
931
+ "[Te@]": 742,
932
+ "[P@+]": 743,
933
+ "[35SH]": 744,
934
+ "[SeH+]": 745,
935
+ "[Ni-2]": 746,
936
+ "[Al-2]": 747,
937
+ "[TeH2]": 748,
938
+ "[Bh]": 749,
939
+ "[99Tc+2]": 750,
940
+ "[Os+8]": 751,
941
+ "[PH-2]": 752,
942
+ "[7Li+]": 753,
943
+ "[14nH]": 754,
944
+ "[AlH+2]": 755,
945
+ "[18FH]": 756,
946
+ "[SnH4]": 757,
947
+ "[18O-2]": 758,
948
+ "[IrH]": 759,
949
+ "[13N]": 760,
950
+ "[Te@@]": 761,
951
+ "[Rh-3]": 762,
952
+ "[15NH+]": 763,
953
+ "[AsH3+]": 764,
954
+ "[SeH2]": 765,
955
+ "[AsH+]": 766,
956
+ "[CoH2]": 767,
957
+ "[16NH2]": 768,
958
+ "[AsH-]": 769,
959
+ "[203Hg+]": 770,
960
+ "[P@@+]": 771,
961
+ "[166Ho+3]": 772,
962
+ "[60Co+3]": 773,
963
+ "[13CH2-]": 774,
964
+ "[SeH2+]": 775,
965
+ "[75Br]": 776,
966
+ "[TlH2]": 777,
967
+ "[80Br]": 778,
968
+ "[siH+]": 779,
969
+ "[Ca+]": 780,
970
+ "[153Sm+3]": 781,
971
+ "[PdH]": 782,
972
+ "[225Ac]": 783,
973
+ "[13CH3-]": 784,
974
+ "[AlH4-]": 785,
975
+ "[FeH]": 786,
976
+ "[13CH-]": 787,
977
+ "[14C-]": 788,
978
+ "[11C-]": 789,
979
+ "[153Sm]": 790,
980
+ "[Re-]": 791,
981
+ "[te+]": 792,
982
+ "[13CH4]": 793,
983
+ "[ClH+2]": 794,
984
+ "[8CH2]": 795,
985
+ "[99Mo]": 796,
986
+ "[ClH3+3]": 797,
987
+ "[SbH3]": 798,
988
+ "[25Mg+2]": 799,
989
+ "[16N+]": 800,
990
+ "[SnH2+]": 801,
991
+ "[11C@H]": 802,
992
+ "[122I]": 803,
993
+ "[Re-2]": 804,
994
+ "[RuH2+2]": 805,
995
+ "[ZrH]": 806,
996
+ "[Bi-]": 807,
997
+ "[Pr+]": 808,
998
+ "[Rn]": 809,
999
+ "[Fr]": 810,
1000
+ "[36Cl]": 811,
1001
+ "[18o]": 812,
1002
+ "[YH]": 813,
1003
+ "[79Br]": 814,
1004
+ "[121I]": 815,
1005
+ "[113In+3]": 816,
1006
+ "[TaH]": 817,
1007
+ "[RhH2]": 818,
1008
+ "[Ta-]": 819,
1009
+ "[67Ga]": 820,
1010
+ "[ZnH+]": 821,
1011
+ "[SnH2-]": 822,
1012
+ "[OsH2]": 823,
1013
+ "[16F]": 824,
1014
+ "[FeH2]": 825,
1015
+ "[14O]": 826,
1016
+ "[PbH2+2]": 827,
1017
+ "[BH2]": 828,
1018
+ "[6H]": 829,
1019
+ "[125Te]": 830,
1020
+ "[197Hg]": 831,
1021
+ "[TaH2]": 832,
1022
+ "[TaH3]": 833,
1023
+ "[76As]": 834,
1024
+ "[Nb-2]": 835,
1025
+ "[14N+]": 836,
1026
+ "[125I-]": 837,
1027
+ "[33S]": 838,
1028
+ "[IH2+2]": 839,
1029
+ "[NH2]": 840,
1030
+ "[PtH2]": 841,
1031
+ "[MnH]": 842,
1032
+ "[19C]": 843,
1033
+ "[17F]": 844,
1034
+ "[1H-]": 845,
1035
+ "[SnH4+2]": 846,
1036
+ "[Mn-2]": 847,
1037
+ "[15NH2+]": 848,
1038
+ "[TiH2]": 849,
1039
+ "[ReH7]": 850,
1040
+ "[Cd-2]": 851,
1041
+ "[Fe-3]": 852,
1042
+ "[SH2]": 853,
1043
+ "[17O-]": 854,
1044
+ "[siH-]": 855,
1045
+ "[CoH+]": 856,
1046
+ "[VH]": 857,
1047
+ "[10BH]": 858,
1048
+ "[Ru-3]": 859,
1049
+ "[13O]": 860,
1050
+ "[5H]": 861,
1051
+ "[15n-]": 862,
1052
+ "[153Gd]": 863,
1053
+ "[12C@]": 864,
1054
+ "[11CH3-]": 865,
1055
+ "[IrH3]": 866,
1056
+ "[RuH3]": 867,
1057
+ "[74Se]": 868,
1058
+ "[Se@]": 869,
1059
+ "[Hf+]": 870,
1060
+ "[77Se]": 871,
1061
+ "[166Ho]": 872,
1062
+ "[59Fe+2]": 873,
1063
+ "[203Hg]": 874,
1064
+ "[18OH-]": 875,
1065
+ "[8CH]": 876,
1066
+ "[12C@@]": 877,
1067
+ "[11CH4]": 878,
1068
+ "[15C]": 879,
1069
+ "[249Cf]": 880,
1070
+ "[PbH4]": 881,
1071
+ "[64Zn]": 882,
1072
+ "[99Tc+]": 883,
1073
+ "[14c-]": 884,
1074
+ "[149Pm]": 885,
1075
+ "[IrH4]": 886,
1076
+ "[Se@@]": 887,
1077
+ "[13OH]": 888,
1078
+ "[14CH3-]": 889,
1079
+ "[28Si]": 890,
1080
+ "[Rh-2]": 891,
1081
+ "[Fe-2]": 892,
1082
+ "[131I-]": 893,
1083
+ "[51Cr]": 894,
1084
+ "[62Cu+2]": 895,
1085
+ "[81Br]": 896,
1086
+ "[121Sb]": 897,
1087
+ "[7Li]": 898,
1088
+ "[89Zr+4]": 899,
1089
+ "[SbH3+]": 900,
1090
+ "[11C@@H]": 901,
1091
+ "[98Tc]": 902,
1092
+ "[59Fe+3]": 903,
1093
+ "[BiH2+]": 904,
1094
+ "[SbH+]": 905,
1095
+ "[TiH]": 906,
1096
+ "[14NH3]": 907,
1097
+ "[15OH]": 908,
1098
+ "[119Sn]": 909,
1099
+ "[201Hg]": 910,
1100
+ "[MnH+]": 911,
1101
+ "[201Tl]": 912,
1102
+ "[51Cr+3]": 913,
1103
+ "[123I-]": 914,
1104
+ "[MoH]": 915,
1105
+ "[AlH6-3]": 916,
1106
+ "[MnH2]": 917,
1107
+ "[WH3]": 918,
1108
+ "[213Bi+3]": 919,
1109
+ "[SnH2+2]": 920,
1110
+ "[123IH]": 921,
1111
+ "[13CH+]": 922,
1112
+ "[Zr-]": 923,
1113
+ "[74As]": 924,
1114
+ "[13C+]": 925,
1115
+ "[32P+]": 926,
1116
+ "[KrH]": 927,
1117
+ "[SiH+2]": 928,
1118
+ "[ClH3+2]": 929,
1119
+ "[13NH]": 930,
1120
+ "[9CH2]": 931,
1121
+ "[ZrH2+2]": 932,
1122
+ "[87Sr+2]": 933,
1123
+ "[35s]": 934,
1124
+ "[239Pu]": 935,
1125
+ "[198Au]": 936,
1126
+ "[241Am]": 937,
1127
+ "[203Hg+2]": 938,
1128
+ "[V+]": 939,
1129
+ "[YH2]": 940,
1130
+ "[195Pt]": 941,
1131
+ "[203Pb]": 942,
1132
+ "[RuH4]": 943,
1133
+ "[ThH2]": 944,
1134
+ "[AuH]": 945,
1135
+ "[66Ga+3]": 946,
1136
+ "[11B-]": 947,
1137
+ "[F]": 948,
1138
+ "[24Na+]": 949,
1139
+ "[85Sr+2]": 950,
1140
+ "[201Tl+]": 951,
1141
+ "[14CH4]": 952,
1142
+ "[32S]": 953,
1143
+ "[TeH2+]": 954,
1144
+ "[ClH2+3]": 955,
1145
+ "[AgH]": 956,
1146
+ "[Ge@H]": 957,
1147
+ "[44Ca+2]": 958,
1148
+ "[Os-]": 959,
1149
+ "[31P]": 960,
1150
+ "[15nH+]": 961,
1151
+ "[SbH4]": 962,
1152
+ "[TiH+]": 963,
1153
+ "[Ba+]": 964,
1154
+ "[57Co+2]": 965,
1155
+ "[Ta+]": 966,
1156
+ "[125IH]": 967,
1157
+ "[77As]": 968,
1158
+ "[129I]": 969,
1159
+ "[Fe-4]": 970,
1160
+ "[Ta-2]": 971,
1161
+ "[19O]": 972,
1162
+ "[12O]": 973,
1163
+ "[BiH3]": 974,
1164
+ "[237Np]": 975,
1165
+ "[252Cf]": 976,
1166
+ "[86Y]": 977,
1167
+ "[Cr-2]": 978,
1168
+ "[89Y]": 979,
1169
+ "[195Pt+2]": 980,
1170
+ "[si+2]": 981,
1171
+ "[58Fe+2]": 982,
1172
+ "[Hs]": 983,
1173
+ "[S@@H]": 984,
1174
+ "[8CH4]": 985,
1175
+ "[164Dy+3]": 986,
1176
+ "[47Ca+2]": 987,
1177
+ "[57Co]": 988,
1178
+ "[NbH2]": 989,
1179
+ "[ReH2]": 990,
1180
+ "[ZnH2]": 991,
1181
+ "[CrH2]": 992,
1182
+ "[17NH]": 993,
1183
+ "[ZrH3]": 994,
1184
+ "[RhH3]": 995,
1185
+ "[12C-]": 996,
1186
+ "[18O+]": 997,
1187
+ "[Bi-2]": 998,
1188
+ "[ClH4+3]": 999,
1189
+ "[Ni-3]": 1000,
1190
+ "[Ag-]": 1001,
1191
+ "[111In-]": 1002,
1192
+ "[Mo-2]": 1003,
1193
+ "[55Fe+3]": 1004,
1194
+ "[204Hg+]": 1005,
1195
+ "[35Cl-]": 1006,
1196
+ "[211Pb]": 1007,
1197
+ "[75Ge]": 1008,
1198
+ "[8B]": 1009,
1199
+ "[TeH3]": 1010,
1200
+ "[SnH3+]": 1011,
1201
+ "[Zr-3]": 1012,
1202
+ "[28F]": 1013,
1203
+ "[249Bk]": 1014,
1204
+ "[169Yb]": 1015,
1205
+ "[34SH]": 1016,
1206
+ "[6Li]": 1017,
1207
+ "[94Tc]": 1018,
1208
+ "[197Au]": 1019,
1209
+ "[195Pt+4]": 1020,
1210
+ "[169Yb+3]": 1021,
1211
+ "[32Cl]": 1022,
1212
+ "[82Se]": 1023,
1213
+ "[159Gd+3]": 1024,
1214
+ "[213Bi]": 1025,
1215
+ "[CoH+2]": 1026,
1216
+ "[36S]": 1027,
1217
+ "[35P]": 1028,
1218
+ "[Ru-4]": 1029,
1219
+ "[Cr-3]": 1030,
1220
+ "[60Co]": 1031,
1221
+ "[1H+]": 1032,
1222
+ "[18CH2]": 1033,
1223
+ "[Cd-]": 1034,
1224
+ "[152Sm+3]": 1035,
1225
+ "[106Ru]": 1036,
1226
+ "[238Pu]": 1037,
1227
+ "[220Rn]": 1038,
1228
+ "[45Ca+2]": 1039,
1229
+ "[89Sr+2]": 1040,
1230
+ "[239Np]": 1041,
1231
+ "[90Sr+2]": 1042,
1232
+ "[137Cs+]": 1043,
1233
+ "[165Dy]": 1044,
1234
+ "[68GaH3]": 1045,
1235
+ "[65Zn+2]": 1046,
1236
+ "[89Zr]": 1047,
1237
+ "[BiH2+2]": 1048,
1238
+ "[62Cu]": 1049,
1239
+ "[165Dy+3]": 1050,
1240
+ "[238U]": 1051,
1241
+ "[105Rh+3]": 1052,
1242
+ "[70Zn]": 1053,
1243
+ "[12B]": 1054,
1244
+ "[12OH]": 1055,
1245
+ "[18CH]": 1056,
1246
+ "[17CH]": 1057,
1247
+ "[42K]": 1058,
1248
+ "[76Br-]": 1059,
1249
+ "[71As]": 1060,
1250
+ "[NbH3]": 1061,
1251
+ "[ReH3]": 1062,
1252
+ "[OsH-]": 1063,
1253
+ "[WH4]": 1064,
1254
+ "[MoH3]": 1065,
1255
+ "[OsH4]": 1066,
1256
+ "[RuH6]": 1067,
1257
+ "[PtH3]": 1068,
1258
+ "[CuH2]": 1069,
1259
+ "[CoH3]": 1070,
1260
+ "[TiH4]": 1071,
1261
+ "[64Zn+2]": 1072,
1262
+ "[Si-2]": 1073,
1263
+ "[79BrH]": 1074,
1264
+ "[14CH2-]": 1075,
1265
+ "[PtH2+2]": 1076,
1266
+ "[Os-3]": 1077,
1267
+ "[29Si]": 1078,
1268
+ "[Ti-]": 1079,
1269
+ "[Se+6]": 1080,
1270
+ "[22Na+]": 1081,
1271
+ "[42K+]": 1082,
1272
+ "[131Cs+]": 1083,
1273
+ "[86Rb+]": 1084,
1274
+ "[134Cs+]": 1085,
1275
+ "[209Po]": 1086,
1276
+ "[208Po]": 1087,
1277
+ "[81Rb+]": 1088,
1278
+ "[203Tl+]": 1089,
1279
+ "[Zr-4]": 1090,
1280
+ "[148Sm]": 1091,
1281
+ "[147Sm]": 1092,
1282
+ "[37Cl-]": 1093,
1283
+ "[12CH4]": 1094,
1284
+ "[Ge@@H]": 1095,
1285
+ "[63Cu]": 1096,
1286
+ "[13CH2+]": 1097,
1287
+ "[AsH2-]": 1098,
1288
+ "[CeH]": 1099,
1289
+ "[SnH-]": 1100,
1290
+ "[UH]": 1101,
1291
+ "[9c]": 1102,
1292
+ "[21CH3]": 1103,
1293
+ "[TeH+]": 1104,
1294
+ "[57Co+3]": 1105,
1295
+ "[8BH2]": 1106,
1296
+ "[12BH2]": 1107,
1297
+ "[19BH2]": 1108,
1298
+ "[9BH2]": 1109,
1299
+ "[YbH2]": 1110,
1300
+ "[CrH+2]": 1111,
1301
+ "[208Bi]": 1112,
1302
+ "[152Gd]": 1113,
1303
+ "[61Cu]": 1114,
1304
+ "[115In]": 1115,
1305
+ "[60Co+2]": 1116,
1306
+ "[13NH2-]": 1117,
1307
+ "[120I]": 1118,
1308
+ "[18OH2]": 1119,
1309
+ "[75SeH]": 1120,
1310
+ "[SbH2+]": 1121,
1311
+ "[144Ce]": 1122,
1312
+ "[16n]": 1123,
1313
+ "[113In]": 1124,
1314
+ "[22nH]": 1125,
1315
+ "[129I-]": 1126,
1316
+ "[InH3]": 1127,
1317
+ "[32PH3]": 1128,
1318
+ "[234U]": 1129,
1319
+ "[235U]": 1130,
1320
+ "[59Fe]": 1131,
1321
+ "[82Rb+]": 1132,
1322
+ "[65Zn]": 1133,
1323
+ "[244Cm]": 1134,
1324
+ "[147Pm]": 1135,
1325
+ "[91Y]": 1136,
1326
+ "[237Pu]": 1137,
1327
+ "[231Pa]": 1138,
1328
+ "[253Cf]": 1139,
1329
+ "[127Te]": 1140,
1330
+ "[187Re]": 1141,
1331
+ "[236Np]": 1142,
1332
+ "[235Np]": 1143,
1333
+ "[72Zn]": 1144,
1334
+ "[253Es]": 1145,
1335
+ "[159Dy]": 1146,
1336
+ "[62Zn]": 1147,
1337
+ "[101Tc]": 1148,
1338
+ "[149Tb]": 1149,
1339
+ "[124I-]": 1150,
1340
+ "[SeH3+]": 1151,
1341
+ "[210Pb]": 1152,
1342
+ "[40K]": 1153,
1343
+ "[210Po]": 1154,
1344
+ "[214Pb]": 1155,
1345
+ "[218Po]": 1156,
1346
+ "[214Po]": 1157,
1347
+ "[7Be]": 1158,
1348
+ "[212Pb]": 1159,
1349
+ "[205Pb]": 1160,
1350
+ "[209Pb]": 1161,
1351
+ "[123Te]": 1162,
1352
+ "[202Pb]": 1163,
1353
+ "[72As]": 1164,
1354
+ "[201Pb]": 1165,
1355
+ "[70As]": 1166,
1356
+ "[73Ge]": 1167,
1357
+ "[200Pb]": 1168,
1358
+ "[198Pb]": 1169,
1359
+ "[66Ga]": 1170,
1360
+ "[73Se]": 1171,
1361
+ "[195Pb]": 1172,
1362
+ "[199Pb]": 1173,
1363
+ "[144Ce+3]": 1174,
1364
+ "[235U+2]": 1175,
1365
+ "[90Tc]": 1176,
1366
+ "[114In+3]": 1177,
1367
+ "[128I]": 1178,
1368
+ "[100Tc+]": 1179,
1369
+ "[82Br-]": 1180,
1370
+ "[191Pt+2]": 1181,
1371
+ "[191Pt+4]": 1182,
1372
+ "[193Pt+4]": 1183,
1373
+ "[31PH3]": 1184,
1374
+ "[125I+2]": 1185,
1375
+ "[131I+2]": 1186,
1376
+ "[125Te+4]": 1187,
1377
+ "[82Sr+2]": 1188,
1378
+ "[149Sm]": 1189,
1379
+ "[81BrH]": 1190,
1380
+ "[129Xe]": 1191,
1381
+ "[193Pt+2]": 1192,
1382
+ "[123I+2]": 1193,
1383
+ "[Cr-]": 1194,
1384
+ "[Co-]": 1195,
1385
+ "[227Th+4]": 1196,
1386
+ "[249Cf+3]": 1197,
1387
+ "[252Cf+3]": 1198,
1388
+ "[187Os]": 1199,
1389
+ "[16O-]": 1200,
1390
+ "[17O+]": 1201,
1391
+ "[16OH-]": 1202,
1392
+ "[98Tc+7]": 1203,
1393
+ "[58Co+2]": 1204,
1394
+ "[69Ga+3]": 1205,
1395
+ "[57Fe+2]": 1206,
1396
+ "[43K+]": 1207,
1397
+ "[16C]": 1208,
1398
+ "[52Fe+3]": 1209,
1399
+ "[SeH5]": 1210,
1400
+ "[194Pb]": 1211,
1401
+ "[196Pb]": 1212,
1402
+ "[197Pb]": 1213,
1403
+ "[213Pb]": 1214,
1404
+ "[9B]": 1215,
1405
+ "[19B]": 1216,
1406
+ "[11CH-]": 1217,
1407
+ "[9CH]": 1218,
1408
+ "[20OH]": 1219,
1409
+ "[25OH]": 1220,
1410
+ "[8cH]": 1221,
1411
+ "[TiH+3]": 1222,
1412
+ "[SnH6+3]": 1223,
1413
+ "[N@H+]": 1224,
1414
+ "[52Mn+2]": 1225,
1415
+ "[64Ga]": 1226,
1416
+ "[13B]": 1227,
1417
+ "[216Bi]": 1228,
1418
+ "[117Sn+2]": 1229,
1419
+ "[232Th]": 1230,
1420
+ "[SnH+2]": 1231,
1421
+ "[BiH5]": 1232,
1422
+ "[77Kr]": 1233,
1423
+ "[103Cd]": 1234,
1424
+ "[62Ni]": 1235,
1425
+ "[LaH3]": 1236,
1426
+ "[SmH3]": 1237,
1427
+ "[EuH3]": 1238,
1428
+ "[MoH5]": 1239,
1429
+ "[64Ni]": 1240,
1430
+ "[66Zn]": 1241,
1431
+ "[68Zn]": 1242,
1432
+ "[186W]": 1243,
1433
+ "[FeH4]": 1244,
1434
+ "[MoH4]": 1245,
1435
+ "[HgH2]": 1246,
1436
+ "[15NH2-]": 1247,
1437
+ "[UH2]": 1248,
1438
+ "[204Hg]": 1249,
1439
+ "[GaH4-]": 1250,
1440
+ "[ThH4]": 1251,
1441
+ "[WH6]": 1252,
1442
+ "[PtH4]": 1253,
1443
+ "[VH2]": 1254,
1444
+ "[UH3]": 1255,
1445
+ "[FeH3]": 1256,
1446
+ "[RuH5]": 1257,
1447
+ "[BiH4]": 1258,
1448
+ "[80Br-]": 1259,
1449
+ "[CeH3]": 1260,
1450
+ "[37ClH]": 1261,
1451
+ "[157Gd+3]": 1262,
1452
+ "[205Tl]": 1263,
1453
+ "[203Tl]": 1264,
1454
+ "[62Cu+]": 1265,
1455
+ "[64Cu+]": 1266,
1456
+ "[61Cu+]": 1267,
1457
+ "[37SH2]": 1268,
1458
+ "[30Si]": 1269,
1459
+ "[28Al]": 1270,
1460
+ "[19OH2]": 1271,
1461
+ "[8He]": 1272,
1462
+ "[6He]": 1273,
1463
+ "[153Pm]": 1274,
1464
+ "[209Bi]": 1275,
1465
+ "[66Zn+2]": 1276,
1466
+ "[10CH4]": 1277,
1467
+ "[191Ir]": 1278,
1468
+ "[66Cu]": 1279,
1469
+ "[16O+]": 1280,
1470
+ "[25O]": 1281,
1471
+ "[10c]": 1282,
1472
+ "[Co-3]": 1283,
1473
+ "[Sn@@]": 1284,
1474
+ "[17OH-]": 1285,
1475
+ "[206Po]": 1286,
1476
+ "[204Po]": 1287,
1477
+ "[202Po]": 1288,
1478
+ "[201Po]": 1289,
1479
+ "[200Po]": 1290,
1480
+ "[199Po]": 1291,
1481
+ "[198Po]": 1292,
1482
+ "[197Po]": 1293,
1483
+ "[196Po]": 1294,
1484
+ "[195Po]": 1295,
1485
+ "[194Po]": 1296,
1486
+ "[193Po]": 1297,
1487
+ "[192Po]": 1298,
1488
+ "[191Po]": 1299,
1489
+ "[190Po]": 1300,
1490
+ "[217Po]": 1301,
1491
+ "[BiH4-]": 1302,
1492
+ "[TeH4]": 1303,
1493
+ "[222Ra]": 1304,
1494
+ "[62Ga]": 1305,
1495
+ "[39Ar]": 1306,
1496
+ "[144Sm]": 1307,
1497
+ "[58Fe]": 1308,
1498
+ "[153Eu]": 1309,
1499
+ "[85Rb]": 1310,
1500
+ "[171Yb]": 1311,
1501
+ "[172Yb]": 1312,
1502
+ "[114Cd]": 1313,
1503
+ "[51Fe]": 1314,
1504
+ "[142Ce]": 1315,
1505
+ "[207Tl]": 1316,
1506
+ "[92Mo]": 1317,
1507
+ "[115Sn]": 1318,
1508
+ "[140Ce]": 1319,
1509
+ "[202Hg]": 1320,
1510
+ "[180W]": 1321,
1511
+ "[182W]": 1322,
1512
+ "[183W]": 1323,
1513
+ "[184W]": 1324,
1514
+ "[96Mo]": 1325,
1515
+ "[47Ti]": 1326,
1516
+ "[111Cd]": 1327,
1517
+ "[143Nd]": 1328,
1518
+ "[145Nd]": 1329,
1519
+ "[126Te]": 1330,
1520
+ "[128Te]": 1331,
1521
+ "[130Te]": 1332,
1522
+ "[185Re]": 1333,
1523
+ "[97Mo]": 1334,
1524
+ "[98Mo]": 1335,
1525
+ "[183Re]": 1336,
1526
+ "[52V]": 1337,
1527
+ "[80Se]": 1338,
1528
+ "[87Kr]": 1339,
1529
+ "[137Xe]": 1340,
1530
+ "[196Au]": 1341,
1531
+ "[146Ce]": 1342,
1532
+ "[88Kr]": 1343,
1533
+ "[51Ti]": 1344,
1534
+ "[138Xe]": 1345,
1535
+ "[112Cd]": 1346,
1536
+ "[116Sn]": 1347,
1537
+ "[120Sn]": 1348,
1538
+ "[28SiH3]": 1349,
1539
+ "[35S-]": 1350,
1540
+ "[15NH-]": 1351,
1541
+ "[13CH3+]": 1352,
1542
+ "[34S+]": 1353,
1543
+ "[34s]": 1354,
1544
+ "[SiH4-]": 1355,
1545
+ "[100Tc+5]": 1356,
1546
+ "[NiH2+2]": 1357,
1547
+ "[239Th]": 1358,
1548
+ "[186Lu]": 1359,
1549
+ "[AuH3]": 1360,
1550
+ "[I@@-]": 1361,
1551
+ "[XeH2]": 1362,
1552
+ "[B+]": 1363,
1553
+ "[16CH2]": 1364,
1554
+ "[8C]": 1365,
1555
+ "[TaH5]": 1366,
1556
+ "[FeH4-]": 1367,
1557
+ "[19C@H]": 1368,
1558
+ "[10NH]": 1369,
1559
+ "[FeH6-3]": 1370,
1560
+ "[22CH]": 1371,
1561
+ "[25N]": 1372,
1562
+ "[25N+]": 1373,
1563
+ "[25N-]": 1374,
1564
+ "[21CH2]": 1375,
1565
+ "[18cH]": 1376,
1566
+ "[113I]": 1377,
1567
+ "[ScH3]": 1378,
1568
+ "[30PH3]": 1379,
1569
+ "[43Ca+2]": 1380,
1570
+ "[41Ca+2]": 1381,
1571
+ "[106Cd]": 1382,
1572
+ "[122Sn]": 1383,
1573
+ "[18CH3]": 1384,
1574
+ "[58Co+3]": 1385,
1575
+ "[98Tc+4]": 1386,
1576
+ "[70Ge]": 1387,
1577
+ "[76Ge]": 1388,
1578
+ "[108Cd]": 1389,
1579
+ "[116Cd]": 1390,
1580
+ "[130Xe]": 1391,
1581
+ "[94Mo]": 1392,
1582
+ "[124Sn]": 1393,
1583
+ "[186Os]": 1394,
1584
+ "[188Os]": 1395,
1585
+ "[190Os]": 1396,
1586
+ "[192Os]": 1397,
1587
+ "[106Pd]": 1398,
1588
+ "[110Pd]": 1399,
1589
+ "[120Te]": 1400,
1590
+ "[132Ba]": 1401,
1591
+ "[134Ba]": 1402,
1592
+ "[136Ba]": 1403,
1593
+ "[136Ce]": 1404,
1594
+ "[138Ce]": 1405,
1595
+ "[156Dy]": 1406,
1596
+ "[158Dy]": 1407,
1597
+ "[160Dy]": 1408,
1598
+ "[163Dy]": 1409,
1599
+ "[162Er]": 1410,
1600
+ "[164Er]": 1411,
1601
+ "[167Er]": 1412,
1602
+ "[176Hf]": 1413,
1603
+ "[26Mg]": 1414,
1604
+ "[144Nd]": 1415,
1605
+ "[150Nd]": 1416,
1606
+ "[41K]": 1417,
1607
+ "[46Ti]": 1418,
1608
+ "[48Ti]": 1419,
1609
+ "[49Ti]": 1420,
1610
+ "[50Ti]": 1421,
1611
+ "[170Yb]": 1422,
1612
+ "[173Yb]": 1423,
1613
+ "[91Zr]": 1424,
1614
+ "[92Zr]": 1425,
1615
+ "[96Zr]": 1426,
1616
+ "[34S-]": 1427,
1617
+ "[CuH2-]": 1428,
1618
+ "[38Cl]": 1429,
1619
+ "[25Mg]": 1430,
1620
+ "[51V]": 1431,
1621
+ "[93Nb]": 1432,
1622
+ "[95Mo]": 1433,
1623
+ "[45Sc]": 1434,
1624
+ "[123Sb]": 1435,
1625
+ "[139La]": 1436,
1626
+ "[9Be]": 1437,
1627
+ "[99Y+3]": 1438,
1628
+ "[99Y]": 1439,
1629
+ "[156Ho]": 1440,
1630
+ "[67Zn]": 1441,
1631
+ "[144Ce+4]": 1442,
1632
+ "[210Tl]": 1443,
1633
+ "[42Ca]": 1444,
1634
+ "[54Fe]": 1445,
1635
+ "[193Ir]": 1446,
1636
+ "[92Nb]": 1447,
1637
+ "[141Cs]": 1448,
1638
+ "[52Cr]": 1449,
1639
+ "[35ClH]": 1450,
1640
+ "[46Ca]": 1451,
1641
+ "[139Cs]": 1452,
1642
+ "[65Cu]": 1453,
1643
+ "[71Ga]": 1454,
1644
+ "[60Ni]": 1455,
1645
+ "[16NH3]": 1456,
1646
+ "[148Nd]": 1457,
1647
+ "[72Ge]": 1458,
1648
+ "[161Dy]": 1459,
1649
+ "[49Ca]": 1460,
1650
+ "[43Ca]": 1461,
1651
+ "[8Be]": 1462,
1652
+ "[48Ca]": 1463,
1653
+ "[44Ca]": 1464,
1654
+ "[120Xe]": 1465,
1655
+ "[80Rb]": 1466,
1656
+ "[215At]": 1467,
1657
+ "[180Re]": 1468,
1658
+ "[146Sm]": 1469,
1659
+ "[19Ne]": 1470,
1660
+ "[74Kr]": 1471,
1661
+ "[134La]": 1472,
1662
+ "[76Kr]": 1473,
1663
+ "[219Fr]": 1474,
1664
+ "[121Xe]": 1475,
1665
+ "[220Fr]": 1476,
1666
+ "[216At]": 1477,
1667
+ "[223Ac]": 1478,
1668
+ "[218At]": 1479,
1669
+ "[37Ar]": 1480,
1670
+ "[135I]": 1481,
1671
+ "[110Cd]": 1482,
1672
+ "[94Tc+7]": 1483,
1673
+ "[86Y+3]": 1484,
1674
+ "[135I-]": 1485,
1675
+ "[15O-2]": 1486,
1676
+ "[151Eu+3]": 1487,
1677
+ "[161Tb+3]": 1488,
1678
+ "[197Hg+2]": 1489,
1679
+ "[109Cd+2]": 1490,
1680
+ "[191Os+4]": 1491,
1681
+ "[170Tm+3]": 1492,
1682
+ "[205Bi+3]": 1493,
1683
+ "[233U+4]": 1494,
1684
+ "[126Sb+3]": 1495,
1685
+ "[127Sb+3]": 1496,
1686
+ "[132Cs+]": 1497,
1687
+ "[136Eu+3]": 1498,
1688
+ "[136Eu]": 1499,
1689
+ "[125Sn+4]": 1500,
1690
+ "[175Yb+3]": 1501,
1691
+ "[100Mo]": 1502,
1692
+ "[22Ne]": 1503,
1693
+ "[13c-]": 1504,
1694
+ "[13NH4+]": 1505,
1695
+ "[17C]": 1506,
1696
+ "[9C]": 1507,
1697
+ "[31S]": 1508,
1698
+ "[31SH]": 1509,
1699
+ "[133I]": 1510,
1700
+ "[126I]": 1511,
1701
+ "[36SH]": 1512,
1702
+ "[30S]": 1513,
1703
+ "[32SH]": 1514,
1704
+ "[19CH2]": 1515,
1705
+ "[19c]": 1516,
1706
+ "[18c]": 1517,
1707
+ "[15F]": 1518,
1708
+ "[10C]": 1519,
1709
+ "[RuH-]": 1520,
1710
+ "[62Zn+2]": 1521,
1711
+ "[32ClH]": 1522,
1712
+ "[33ClH]": 1523,
1713
+ "[78BrH]": 1524,
1714
+ "[12Li+]": 1525,
1715
+ "[12Li]": 1526,
1716
+ "[233Ra]": 1527,
1717
+ "[68Ge+4]": 1528,
1718
+ "[44Sc+3]": 1529,
1719
+ "[91Y+3]": 1530,
1720
+ "[106Ru+3]": 1531,
1721
+ "[PoH2]": 1532,
1722
+ "[AtH]": 1533,
1723
+ "[55Fe]": 1534,
1724
+ "[233U]": 1535,
1725
+ "[210PoH2]": 1536,
1726
+ "[230Th]": 1537,
1727
+ "[228Th]": 1538,
1728
+ "[222Rn]": 1539,
1729
+ "[35SH2]": 1540,
1730
+ "[227Th]": 1541,
1731
+ "[192Ir]": 1542,
1732
+ "[133Xe]": 1543,
1733
+ "[81Kr]": 1544,
1734
+ "[95Zr]": 1545,
1735
+ "[240Pu]": 1546,
1736
+ "[54Mn]": 1547,
1737
+ "[103Ru]": 1548,
1738
+ "[95Nb]": 1549,
1739
+ "[109Cd]": 1550,
1740
+ "[141Ce]": 1551,
1741
+ "[85Kr]": 1552,
1742
+ "[110Ag]": 1553,
1743
+ "[58Co]": 1554,
1744
+ "[241Pu]": 1555,
1745
+ "[234Th]": 1556,
1746
+ "[140La]": 1557,
1747
+ "[63Ni]": 1558,
1748
+ "[152Eu]": 1559,
1749
+ "[132IH]": 1560,
1750
+ "[226Rn]": 1561,
1751
+ "[154Eu]": 1562,
1752
+ "[36ClH]": 1563,
1753
+ "[228Ac]": 1564,
1754
+ "[155Eu]": 1565,
1755
+ "[106Rh]": 1566,
1756
+ "[243Am]": 1567,
1757
+ "[227Ac]": 1568,
1758
+ "[243Cm]": 1569,
1759
+ "[236U]": 1570,
1760
+ "[144Pr]": 1571,
1761
+ "[232U]": 1572,
1762
+ "[32SH2]": 1573,
1763
+ "[88Y]": 1574,
1764
+ "[82BrH]": 1575,
1765
+ "[135IH]": 1576,
1766
+ "[242Cm]": 1577,
1767
+ "[115Cd]": 1578,
1768
+ "[242Pu]": 1579,
1769
+ "[46Sc]": 1580,
1770
+ "[56Mn]": 1581,
1771
+ "[234Pa]": 1582,
1772
+ "[41Ar]": 1583,
1773
+ "[147Nd]": 1584,
1774
+ "[187W]": 1585,
1775
+ "[151Sm]": 1586,
1776
+ "[59Ni]": 1587,
1777
+ "[233Pa]": 1588,
1778
+ "[52Mn]": 1589,
1779
+ "[94Nb]": 1590,
1780
+ "[219Rn]": 1591,
1781
+ "[236Pu]": 1592,
1782
+ "[13NH3]": 1593,
1783
+ "[93Zr]": 1594,
1784
+ "[51Cr+6]": 1595,
1785
+ "[TlH3]": 1596,
1786
+ "[123Xe]": 1597,
1787
+ "[160Tb]": 1598,
1788
+ "[170Tm]": 1599,
1789
+ "[182Ta]": 1600,
1790
+ "[175Yb]": 1601,
1791
+ "[93Mo]": 1602,
1792
+ "[143Ce]": 1603,
1793
+ "[191Os]": 1604,
1794
+ "[126IH]": 1605,
1795
+ "[48V]": 1606,
1796
+ "[113Cd]": 1607,
1797
+ "[47Sc]": 1608,
1798
+ "[181Hf]": 1609,
1799
+ "[185W]": 1610,
1800
+ "[143Pr]": 1611,
1801
+ "[191Pt]": 1612,
1802
+ "[181W]": 1613,
1803
+ "[33PH3]": 1614,
1804
+ "[97Ru]": 1615,
1805
+ "[97Tc]": 1616,
1806
+ "[111Ag]": 1617,
1807
+ "[169Er]": 1618,
1808
+ "[107Pd]": 1619,
1809
+ "[103Ru+2]": 1620,
1810
+ "[34SH2]": 1621,
1811
+ "[137Ce]": 1622,
1812
+ "[242Am]": 1623,
1813
+ "[117SnH2]": 1624,
1814
+ "[57Ni]": 1625,
1815
+ "[239U]": 1626,
1816
+ "[60Cu]": 1627,
1817
+ "[250Cf]": 1628,
1818
+ "[193Au]": 1629,
1819
+ "[69Zn]": 1630,
1820
+ "[55Co]": 1631,
1821
+ "[139Ce]": 1632,
1822
+ "[127Xe]": 1633,
1823
+ "[159Gd]": 1634,
1824
+ "[56Co]": 1635,
1825
+ "[177Hf]": 1636,
1826
+ "[244Pu]": 1637,
1827
+ "[38ClH]": 1638,
1828
+ "[142Pr]": 1639,
1829
+ "[199Hg]": 1640,
1830
+ "[179Hf]": 1641,
1831
+ "[178Hf]": 1642,
1832
+ "[237U]": 1643,
1833
+ "[156Eu]": 1644,
1834
+ "[157Eu]": 1645,
1835
+ "[105Ru]": 1646,
1836
+ "[171Tm]": 1647,
1837
+ "[199Au]": 1648,
1838
+ "[155Sm]": 1649,
1839
+ "[80BrH]": 1650,
1840
+ "[108Ag]": 1651,
1841
+ "[128IH]": 1652,
1842
+ "[48Sc]": 1653,
1843
+ "[45Ti]": 1654,
1844
+ "[176Lu]": 1655,
1845
+ "[121SnH2]": 1656,
1846
+ "[148Pm]": 1657,
1847
+ "[57Fe]": 1658,
1848
+ "[10BH3]": 1659,
1849
+ "[96Tc]": 1660,
1850
+ "[133IH]": 1661,
1851
+ "[143Pm]": 1662,
1852
+ "[105Rh]": 1663,
1853
+ "[130IH]": 1664,
1854
+ "[134IH]": 1665,
1855
+ "[131IH]": 1666,
1856
+ "[71Zn]": 1667,
1857
+ "[105Ag]": 1668,
1858
+ "[97Zr]": 1669,
1859
+ "[235Pu]": 1670,
1860
+ "[231Th]": 1671,
1861
+ "[109Pd]": 1672,
1862
+ "[93Y]": 1673,
1863
+ "[190Ir]": 1674,
1864
+ "[135Xe]": 1675,
1865
+ "[53Mn]": 1676,
1866
+ "[134Ce]": 1677,
1867
+ "[234Np]": 1678,
1868
+ "[240Am]": 1679,
1869
+ "[246Cf]": 1680,
1870
+ "[240Cm]": 1681,
1871
+ "[241Cm]": 1682,
1872
+ "[226Th]": 1683,
1873
+ "[39ClH]": 1684,
1874
+ "[229Th]": 1685,
1875
+ "[245Cm]": 1686,
1876
+ "[240U]": 1687,
1877
+ "[240Np]": 1688,
1878
+ "[249Cm]": 1689,
1879
+ "[243Pu]": 1690,
1880
+ "[145Pm]": 1691,
1881
+ "[199Pt]": 1692,
1882
+ "[246Bk]": 1693,
1883
+ "[193Pt]": 1694,
1884
+ "[230U]": 1695,
1885
+ "[250Cm]": 1696,
1886
+ "[44Ti]": 1697,
1887
+ "[175Hf]": 1698,
1888
+ "[254Fm]": 1699,
1889
+ "[255Fm]": 1700,
1890
+ "[257Fm]": 1701,
1891
+ "[92Y]": 1702,
1892
+ "[188Ir]": 1703,
1893
+ "[171Lu]": 1704,
1894
+ "[257Md]": 1705,
1895
+ "[247Bk]": 1706,
1896
+ "[121IH]": 1707,
1897
+ "[250Bk]": 1708,
1898
+ "[179Lu]": 1709,
1899
+ "[224Ac]": 1710,
1900
+ "[195Hg]": 1711,
1901
+ "[244Am]": 1712,
1902
+ "[246Pu]": 1713,
1903
+ "[194Au]": 1714,
1904
+ "[252Fm]": 1715,
1905
+ "[173Hf]": 1716,
1906
+ "[246Cm]": 1717,
1907
+ "[135Ce]": 1718,
1908
+ "[49Cr]": 1719,
1909
+ "[248Cf]": 1720,
1910
+ "[247Cm]": 1721,
1911
+ "[248Cm]": 1722,
1912
+ "[174Ta]": 1723,
1913
+ "[176Ta]": 1724,
1914
+ "[154Tb]": 1725,
1915
+ "[172Ta]": 1726,
1916
+ "[177Ta]": 1727,
1917
+ "[175Ta]": 1728,
1918
+ "[180Ta]": 1729,
1919
+ "[158Tb]": 1730,
1920
+ "[115Ag]": 1731,
1921
+ "[189Os]": 1732,
1922
+ "[251Cf]": 1733,
1923
+ "[145Pr]": 1734,
1924
+ "[147Pr]": 1735,
1925
+ "[76BrH]": 1736,
1926
+ "[102Rh]": 1737,
1927
+ "[238Np]": 1738,
1928
+ "[185Os]": 1739,
1929
+ "[246Am]": 1740,
1930
+ "[233Np]": 1741,
1931
+ "[166Dy]": 1742,
1932
+ "[254Es]": 1743,
1933
+ "[244Cf]": 1744,
1934
+ "[193Os]": 1745,
1935
+ "[245Am]": 1746,
1936
+ "[245Bk]": 1747,
1937
+ "[239Am]": 1748,
1938
+ "[238Am]": 1749,
1939
+ "[97Nb]": 1750,
1940
+ "[245Pu]": 1751,
1941
+ "[254Cf]": 1752,
1942
+ "[188W]": 1753,
1943
+ "[250Es]": 1754,
1944
+ "[251Es]": 1755,
1945
+ "[237Am]": 1756,
1946
+ "[182Hf]": 1757,
1947
+ "[258Md]": 1758,
1948
+ "[232Np]": 1759,
1949
+ "[238Cm]": 1760,
1950
+ "[60Fe]": 1761,
1951
+ "[109Pd+2]": 1762,
1952
+ "[234Pu]": 1763,
1953
+ "[141Ce+3]": 1764,
1954
+ "[136Nd]": 1765,
1955
+ "[136Pr]": 1766,
1956
+ "[173Ta]": 1767,
1957
+ "[110Ru]": 1768,
1958
+ "[147Tb]": 1769,
1959
+ "[253Fm]": 1770,
1960
+ "[139Nd]": 1771,
1961
+ "[178Re]": 1772,
1962
+ "[177Re]": 1773,
1963
+ "[200Au]": 1774,
1964
+ "[182Re]": 1775,
1965
+ "[156Tb]": 1776,
1966
+ "[155Tb]": 1777,
1967
+ "[157Tb]": 1778,
1968
+ "[161Tb]": 1779,
1969
+ "[161Ho]": 1780,
1970
+ "[167Tm]": 1781,
1971
+ "[173Lu]": 1782,
1972
+ "[179Ta]": 1783,
1973
+ "[171Er]": 1784,
1974
+ "[44Sc]": 1785,
1975
+ "[49Sc]": 1786,
1976
+ "[49V]": 1787,
1977
+ "[51Mn]": 1788,
1978
+ "[90Nb]": 1789,
1979
+ "[88Nb]": 1790,
1980
+ "[88Zr]": 1791,
1981
+ "[36SH2]": 1792,
1982
+ "[174Yb]": 1793,
1983
+ "[178Lu]": 1794,
1984
+ "[179W]": 1795,
1985
+ "[83BrH]": 1796,
1986
+ "[107Cd]": 1797,
1987
+ "[75BrH]": 1798,
1988
+ "[62Co]": 1799,
1989
+ "[48Cr]": 1800,
1990
+ "[63Zn]": 1801,
1991
+ "[102Ag]": 1802,
1992
+ "[154Sm]": 1803,
1993
+ "[168Er]": 1804,
1994
+ "[65Ni]": 1805,
1995
+ "[137La]": 1806,
1996
+ "[187Ir]": 1807,
1997
+ "[144Pm]": 1808,
1998
+ "[146Pm]": 1809,
1999
+ "[160Gd]": 1810,
2000
+ "[166Yb]": 1811,
2001
+ "[162Dy]": 1812,
2002
+ "[47V]": 1813,
2003
+ "[141Nd]": 1814,
2004
+ "[141Sm]": 1815,
2005
+ "[166Er]": 1816,
2006
+ "[150Sm]": 1817,
2007
+ "[146Eu]": 1818,
2008
+ "[149Eu]": 1819,
2009
+ "[174Lu]": 1820,
2010
+ "[17NH3]": 1821,
2011
+ "[102Ru]": 1822,
2012
+ "[170Hf]": 1823,
2013
+ "[188Pt]": 1824,
2014
+ "[61Ni]": 1825,
2015
+ "[56Ni]": 1826,
2016
+ "[149Gd]": 1827,
2017
+ "[151Gd]": 1828,
2018
+ "[141Pm]": 1829,
2019
+ "[147Gd]": 1830,
2020
+ "[146Gd]": 1831,
2021
+ "[161Er]": 1832,
2022
+ "[103Ag]": 1833,
2023
+ "[145Eu]": 1834,
2024
+ "[153Tb]": 1835,
2025
+ "[155Dy]": 1836,
2026
+ "[184Re]": 1837,
2027
+ "[180Os]": 1838,
2028
+ "[182Os]": 1839,
2029
+ "[186Pt]": 1840,
2030
+ "[181Os]": 1841,
2031
+ "[181Re]": 1842,
2032
+ "[151Tb]": 1843,
2033
+ "[178Ta]": 1844,
2034
+ "[178W]": 1845,
2035
+ "[189Pt]": 1846,
2036
+ "[194Hg]": 1847,
2037
+ "[145Sm]": 1848,
2038
+ "[150Tb]": 1849,
2039
+ "[132La]": 1850,
2040
+ "[158Gd]": 1851,
2041
+ "[104Ag]": 1852,
2042
+ "[193Hg]": 1853,
2043
+ "[94Ru]": 1854,
2044
+ "[137Pr]": 1855,
2045
+ "[155Ho]": 1856,
2046
+ "[117Cd]": 1857,
2047
+ "[99Ru]": 1858,
2048
+ "[146Nd]": 1859,
2049
+ "[218Rn]": 1860,
2050
+ "[95Y]": 1861,
2051
+ "[79Kr]": 1862,
2052
+ "[120IH]": 1863,
2053
+ "[138Pr]": 1864,
2054
+ "[100Pd]": 1865,
2055
+ "[166Tm]": 1866,
2056
+ "[90Mo]": 1867,
2057
+ "[151Nd]": 1868,
2058
+ "[231U]": 1869,
2059
+ "[138Nd]": 1870,
2060
+ "[89Nb]": 1871,
2061
+ "[98Nb]": 1872,
2062
+ "[162Ho]": 1873,
2063
+ "[142Sm]": 1874,
2064
+ "[186Ta]": 1875,
2065
+ "[104Tc]": 1876,
2066
+ "[184Ta]": 1877,
2067
+ "[185Ta]": 1878,
2068
+ "[170Er]": 1879,
2069
+ "[107Rh]": 1880,
2070
+ "[131La]": 1881,
2071
+ "[169Lu]": 1882,
2072
+ "[74BrH]": 1883,
2073
+ "[150Pm]": 1884,
2074
+ "[172Tm]": 1885,
2075
+ "[197Pt]": 1886,
2076
+ "[230Pu]": 1887,
2077
+ "[170Lu]": 1888,
2078
+ "[86Zr]": 1889,
2079
+ "[176W]": 1890,
2080
+ "[177W]": 1891,
2081
+ "[101Pd]": 1892,
2082
+ "[105Pd]": 1893,
2083
+ "[108Pd]": 1894,
2084
+ "[149Nd]": 1895,
2085
+ "[164Ho]": 1896,
2086
+ "[159Ho]": 1897,
2087
+ "[167Ho]": 1898,
2088
+ "[176Yb]": 1899,
2089
+ "[156Sm]": 1900,
2090
+ "[77BrH]": 1901,
2091
+ "[189Re]": 1902,
2092
+ "[99Rh]": 1903,
2093
+ "[100Rh]": 1904,
2094
+ "[151Pm]": 1905,
2095
+ "[232Pa]": 1906,
2096
+ "[228Pa]": 1907,
2097
+ "[230Pa]": 1908,
2098
+ "[66Ni]": 1909,
2099
+ "[194Os]": 1910,
2100
+ "[135La]": 1911,
2101
+ "[138La]": 1912,
2102
+ "[141La]": 1913,
2103
+ "[142La]": 1914,
2104
+ "[195Ir]": 1915,
2105
+ "[96Nb]": 1916,
2106
+ "[157Ho]": 1917,
2107
+ "[183Hf]": 1918,
2108
+ "[162Tm]": 1919,
2109
+ "[172Er]": 1920,
2110
+ "[148Eu]": 1921,
2111
+ "[150Eu]": 1922,
2112
+ "[15CH4]": 1923,
2113
+ "[89Kr]": 1924,
2114
+ "[143La]": 1925,
2115
+ "[58Ni]": 1926,
2116
+ "[61Co]": 1927,
2117
+ "[158Eu]": 1928,
2118
+ "[165Er]": 1929,
2119
+ "[167Yb]": 1930,
2120
+ "[173Tm]": 1931,
2121
+ "[175Tm]": 1932,
2122
+ "[172Hf]": 1933,
2123
+ "[172Lu]": 1934,
2124
+ "[93Tc]": 1935,
2125
+ "[177Yb]": 1936,
2126
+ "[124IH]": 1937,
2127
+ "[194Ir]": 1938,
2128
+ "[147Eu]": 1939,
2129
+ "[101Mo]": 1940,
2130
+ "[180Hf]": 1941,
2131
+ "[189Ir]": 1942,
2132
+ "[87Y]": 1943,
2133
+ "[43Sc]": 1944,
2134
+ "[195Au]": 1945,
2135
+ "[112Ag]": 1946,
2136
+ "[84BrH]": 1947,
2137
+ "[106Ag]": 1948,
2138
+ "[109Ag]": 1949,
2139
+ "[101Rh]": 1950,
2140
+ "[162Yb]": 1951,
2141
+ "[228Rn]": 1952,
2142
+ "[139Pr]": 1953,
2143
+ "[94Y]": 1954,
2144
+ "[201Au]": 1955,
2145
+ "[40PH3]": 1956,
2146
+ "[110Ag+]": 1957,
2147
+ "[104Cd]": 1958,
2148
+ "[133Ba+2]": 1959,
2149
+ "[226Ac]": 1960,
2150
+ "[145Gd]": 1961,
2151
+ "[186Ir]": 1962,
2152
+ "[184Ir]": 1963,
2153
+ "[224Rn]": 1964,
2154
+ "[185Ir]": 1965,
2155
+ "[182Ir]": 1966,
2156
+ "[184Hf]": 1967,
2157
+ "[200Pt]": 1968,
2158
+ "[227Pa]": 1969,
2159
+ "[178Yb]": 1970,
2160
+ "[72Br-]": 1971,
2161
+ "[72BrH]": 1972,
2162
+ "[248Am]": 1973,
2163
+ "[238Th]": 1974,
2164
+ "[161Gd]": 1975,
2165
+ "[35S-2]": 1976,
2166
+ "[107Ag]": 1977,
2167
+ "[FeH6-4]": 1978,
2168
+ "[89Sr]": 1979,
2169
+ "[SnH3-]": 1980,
2170
+ "[SeH3]": 1981,
2171
+ "[TeH3+]": 1982,
2172
+ "[SbH4+]": 1983,
2173
+ "[AsH4+]": 1984,
2174
+ "[4He]": 1985,
2175
+ "[AsH3-]": 1986,
2176
+ "[1HH]": 1987,
2177
+ "[3H+]": 1988,
2178
+ "[82Rb]": 1989,
2179
+ "[85Sr]": 1990,
2180
+ "[90Sr]": 1991,
2181
+ "[137Cs]": 1992,
2182
+ "[133Ba]": 1993,
2183
+ "[131Cs]": 1994,
2184
+ "[SbH5]": 1995,
2185
+ "[224Ra]": 1996,
2186
+ "[22Na]": 1997,
2187
+ "[210Bi]": 1998,
2188
+ "[214Bi]": 1999,
2189
+ "[228Ra]": 2000,
2190
+ "[127Sb]": 2001,
2191
+ "[136Cs]": 2002,
2192
+ "[125Sb]": 2003,
2193
+ "[134Cs]": 2004,
2194
+ "[140Ba]": 2005,
2195
+ "[45Ca]": 2006,
2196
+ "[206Pb]": 2007,
2197
+ "[207Pb]": 2008,
2198
+ "[24Na]": 2009,
2199
+ "[86Rb]": 2010,
2200
+ "[212Bi]": 2011,
2201
+ "[208Pb]": 2012,
2202
+ "[124Sb]": 2013,
2203
+ "[204Pb]": 2014,
2204
+ "[44K]": 2015,
2205
+ "[129Te]": 2016,
2206
+ "[113Sn]": 2017,
2207
+ "[204Tl]": 2018,
2208
+ "[87Sr]": 2019,
2209
+ "[208Tl]": 2020,
2210
+ "[87Rb]": 2021,
2211
+ "[47Ca]": 2022,
2212
+ "[135Cs]": 2023,
2213
+ "[216Po]": 2024,
2214
+ "[137Ba]": 2025,
2215
+ "[207Bi]": 2026,
2216
+ "[212Po]": 2027,
2217
+ "[79Se]": 2028,
2218
+ "[223Ra]": 2029,
2219
+ "[86Sr]": 2030,
2220
+ "[122Sb]": 2031,
2221
+ "[26Al]": 2032,
2222
+ "[32Si]": 2033,
2223
+ "[126Sn]": 2034,
2224
+ "[225Ra]": 2035,
2225
+ "[114In]": 2036,
2226
+ "[72Ga]": 2037,
2227
+ "[132Te]": 2038,
2228
+ "[10Be]": 2039,
2229
+ "[125Sn]": 2040,
2230
+ "[73As]": 2041,
2231
+ "[206Bi]": 2042,
2232
+ "[117Sn]": 2043,
2233
+ "[40Ca]": 2044,
2234
+ "[41Ca]": 2045,
2235
+ "[89Rb]": 2046,
2236
+ "[116In]": 2047,
2237
+ "[129Sb]": 2048,
2238
+ "[91Sr]": 2049,
2239
+ "[71Ge]": 2050,
2240
+ "[139Ba]": 2051,
2241
+ "[69Ga]": 2052,
2242
+ "[120Sb]": 2053,
2243
+ "[121Sn]": 2054,
2244
+ "[123Sn]": 2055,
2245
+ "[131Te]": 2056,
2246
+ "[77Ge]": 2057,
2247
+ "[135Ba]": 2058,
2248
+ "[82Sr]": 2059,
2249
+ "[43K]": 2060,
2250
+ "[131Ba]": 2061,
2251
+ "[92Sr]": 2062,
2252
+ "[88Rb]": 2063,
2253
+ "[129Cs]": 2064,
2254
+ "[144Cs]": 2065,
2255
+ "[127Cs]": 2066,
2256
+ "[200Tl]": 2067,
2257
+ "[202Tl]": 2068,
2258
+ "[141Ba]": 2069,
2259
+ "[117Sb]": 2070,
2260
+ "[116Sb]": 2071,
2261
+ "[78As]": 2072,
2262
+ "[131Sb]": 2073,
2263
+ "[126Sb]": 2074,
2264
+ "[128Sb]": 2075,
2265
+ "[130Sb]": 2076,
2266
+ "[67Ge]": 2077,
2267
+ "[68Ge]": 2078,
2268
+ "[78Ge]": 2079,
2269
+ "[66Ge]": 2080,
2270
+ "[223Fr]": 2081,
2271
+ "[132Cs]": 2082,
2272
+ "[125Cs]": 2083,
2273
+ "[138Cs]": 2084,
2274
+ "[133Te]": 2085,
2275
+ "[84Rb]": 2086,
2276
+ "[83Rb]": 2087,
2277
+ "[81Rb]": 2088,
2278
+ "[142Ba]": 2089,
2279
+ "[200Bi]": 2090,
2280
+ "[115Sb]": 2091,
2281
+ "[194Tl]": 2092,
2282
+ "[70Se]": 2093,
2283
+ "[112In]": 2094,
2284
+ "[118Sb]": 2095,
2285
+ "[70Ga]": 2096,
2286
+ "[27Mg]": 2097,
2287
+ "[202Bi]": 2098,
2288
+ "[83Se]": 2099,
2289
+ "[9Li]": 2100,
2290
+ "[69As]": 2101,
2291
+ "[79Rb]": 2102,
2292
+ "[81Sr]": 2103,
2293
+ "[83Sr]": 2104,
2294
+ "[78Se]": 2105,
2295
+ "[109In]": 2106,
2296
+ "[29Al]": 2107,
2297
+ "[118Sn]": 2108,
2298
+ "[117In]": 2109,
2299
+ "[119Sb]": 2110,
2300
+ "[114Sn]": 2111,
2301
+ "[138Ba]": 2112,
2302
+ "[69Ge]": 2113,
2303
+ "[73Ga]": 2114,
2304
+ "[74Ge]": 2115,
2305
+ "[206Tl]": 2116,
2306
+ "[199Tl]": 2117,
2307
+ "[130Cs]": 2118,
2308
+ "[28Mg]": 2119,
2309
+ "[116Te]": 2120,
2310
+ "[112Sn]": 2121,
2311
+ "[126Ba]": 2122,
2312
+ "[211Bi]": 2123,
2313
+ "[81Se]": 2124,
2314
+ "[127Sn]": 2125,
2315
+ "[143Cs]": 2126,
2316
+ "[134Te]": 2127,
2317
+ "[80Sr]": 2128,
2318
+ "[45K]": 2129,
2319
+ "[215Po]": 2130,
2320
+ "[207Po]": 2131,
2321
+ "[111Sn]": 2132,
2322
+ "[211Po]": 2133,
2323
+ "[128Ba]": 2134,
2324
+ "[198Tl]": 2135,
2325
+ "[227Ra]": 2136,
2326
+ "[213Po]": 2137,
2327
+ "[220Ra]": 2138,
2328
+ "[128Sn]": 2139,
2329
+ "[203Po]": 2140,
2330
+ "[205Po]": 2141,
2331
+ "[65Ga]": 2142,
2332
+ "[197Tl]": 2143,
2333
+ "[88Sr]": 2144,
2334
+ "[110In]": 2145,
2335
+ "[31Si]": 2146,
2336
+ "[201Bi]": 2147,
2337
+ "[121Te]": 2148,
2338
+ "[205Bi]": 2149,
2339
+ "[203Bi]": 2150,
2340
+ "[195Tl]": 2151,
2341
+ "[209Tl]": 2152,
2342
+ "[110Sn]": 2153,
2343
+ "[222Fr]": 2154,
2344
+ "[207At]": 2155,
2345
+ "[119In]": 2156,
2346
+ "[As@]": 2157,
2347
+ "[129IH]": 2158,
2348
+ "[157Dy]": 2159,
2349
+ "[111IH]": 2160,
2350
+ "[230Ra]": 2161,
2351
+ "[144Pr+3]": 2162,
2352
+ "[SiH3+]": 2163,
2353
+ "[3He]": 2164,
2354
+ "[AsH5]": 2165,
2355
+ "[72Se]": 2166,
2356
+ "[95Tc]": 2167,
2357
+ "[103Pd]": 2168,
2358
+ "[121Sn+2]": 2169,
2359
+ "[211Rn]": 2170,
2360
+ "[38SH2]": 2171,
2361
+ "[127IH]": 2172,
2362
+ "[74Br-]": 2173,
2363
+ "[133I-]": 2174,
2364
+ "[100Tc+4]": 2175,
2365
+ "[100Tc]": 2176,
2366
+ "[36Cl-]": 2177,
2367
+ "[89Y+3]": 2178,
2368
+ "[104Rh]": 2179,
2369
+ "[152Sm]": 2180,
2370
+ "[226Ra]": 2181,
2371
+ "[19FH]": 2182,
2372
+ "[104Pd]": 2183,
2373
+ "[148Gd]": 2184,
2374
+ "[157Lu]": 2185,
2375
+ "[33SH2]": 2186,
2376
+ "[121I-]": 2187,
2377
+ "[17FH]": 2188,
2378
+ "[71Se]": 2189,
2379
+ "[157Sm]": 2190,
2380
+ "[148Tb]": 2191,
2381
+ "[164Dy]": 2192,
2382
+ "[15OH2]": 2193,
2383
+ "[15O+]": 2194,
2384
+ "[39K]": 2195,
2385
+ "[40Ar]": 2196,
2386
+ "[50Cr+3]": 2197,
2387
+ "[50Cr]": 2198,
2388
+ "[52Ti]": 2199,
2389
+ "[103Pd+2]": 2200,
2390
+ "[130Ba]": 2201,
2391
+ "[142Pm]": 2202,
2392
+ "[153Gd+3]": 2203,
2393
+ "[151Eu]": 2204,
2394
+ "[103Rh]": 2205,
2395
+ "[124Xe]": 2206,
2396
+ "[152Tb]": 2207,
2397
+ "[17OH2]": 2208,
2398
+ "[20Ne]": 2209,
2399
+ "[52Fe]": 2210,
2400
+ "[94Zr+4]": 2211,
2401
+ "[94Zr]": 2212,
2402
+ "[149Pr]": 2213,
2403
+ "[16OH2]": 2214,
2404
+ "[53Cr+6]": 2215,
2405
+ "[53Cr]": 2216,
2406
+ "[81Br-]": 2217,
2407
+ "[112Pd]": 2218,
2408
+ "[125Xe]": 2219,
2409
+ "[155Gd]": 2220,
2410
+ "[157Gd]": 2221,
2411
+ "[168Yb]": 2222,
2412
+ "[184Os]": 2223,
2413
+ "[166Tb]": 2224,
2414
+ "[221Fr]": 2225,
2415
+ "[212Ra]": 2226,
2416
+ "[75Br-]": 2227,
2417
+ "[79Br-]": 2228,
2418
+ "[113Ag]": 2229,
2419
+ "[23Na]": 2230,
2420
+ "[34Cl-]": 2231,
2421
+ "[34ClH]": 2232,
2422
+ "[38Cl-]": 2233,
2423
+ "[56Fe]": 2234,
2424
+ "[68Cu]": 2235,
2425
+ "[77Br-]": 2236,
2426
+ "[90Zr+4]": 2237,
2427
+ "[90Zr]": 2238,
2428
+ "[102Pd]": 2239,
2429
+ "[154Eu+3]": 2240,
2430
+ "[57Mn]": 2241,
2431
+ "[165Tm]": 2242,
2432
+ "[152Dy]": 2243,
2433
+ "[217At]": 2244,
2434
+ "[77se]": 2245,
2435
+ "[13cH-]": 2246,
2436
+ "[122Te]": 2247,
2437
+ "[156Gd]": 2248,
2438
+ "[124Te]": 2249,
2439
+ "[53Ni]": 2250,
2440
+ "[131Xe]": 2251,
2441
+ "[174Hf+4]": 2252,
2442
+ "[174Hf]": 2253,
2443
+ "[76Se]": 2254,
2444
+ "[168Tm]": 2255,
2445
+ "[167Dy]": 2256,
2446
+ "[154Gd]": 2257,
2447
+ "[95Ru]": 2258,
2448
+ "[210At]": 2259,
2449
+ "[85Br]": 2260,
2450
+ "[59Co]": 2261,
2451
+ "[122Xe]": 2262,
2452
+ "[27Al]": 2263,
2453
+ "[54Cr]": 2264,
2454
+ "[198Hg]": 2265,
2455
+ "[85Rb+]": 2266,
2456
+ "[214Tl]": 2267,
2457
+ "[229Rn]": 2268,
2458
+ "[218Pb]": 2269,
2459
+ "[218Bi]": 2270,
2460
+ "[167Tm+3]": 2271,
2461
+ "[18o+]": 2272,
2462
+ "[P@@H+]": 2273,
2463
+ "[P@H+]": 2274,
2464
+ "[13N+]": 2275,
2465
+ "[212Pb+2]": 2276,
2466
+ "[217Bi]": 2277,
2467
+ "[249Cf+2]": 2278,
2468
+ "[18OH3+]": 2279,
2469
+ "[90Sr-]": 2280,
2470
+ "[Cf+3]": 2281,
2471
+ "[200Hg]": 2282,
2472
+ "[86Tc]": 2283,
2473
+ "[141Pr+3]": 2284,
2474
+ "[141Pr]": 2285,
2475
+ "[16nH]": 2286,
2476
+ "[14NH4+]": 2287,
2477
+ "[132Xe]": 2288,
2478
+ "[83Kr]": 2289,
2479
+ "[70Zn+2]": 2290,
2480
+ "[137Ba+2]": 2291,
2481
+ "[36Ar]": 2292,
2482
+ "[38Ar]": 2293,
2483
+ "[21Ne]": 2294,
2484
+ "[126Xe]": 2295,
2485
+ "[136Xe]": 2296,
2486
+ "[128Xe]": 2297,
2487
+ "[134Xe]": 2298,
2488
+ "[84Kr]": 2299,
2489
+ "[86Kr]": 2300,
2490
+ "[78Kr]": 2301,
2491
+ "[80Kr]": 2302,
2492
+ "[82Kr]": 2303,
2493
+ "[67Zn+2]": 2304,
2494
+ "[65Cu+2]": 2305,
2495
+ "[110Te]": 2306,
2496
+ "[58Fe+3]": 2307,
2497
+ "[142Nd]": 2308,
2498
+ "[38K]": 2309,
2499
+ "[198Au+3]": 2310,
2500
+ "[122IH]": 2311,
2501
+ "[38PH3]": 2312,
2502
+ "[130I-]": 2313,
2503
+ "[40K+]": 2314,
2504
+ "[38K+]": 2315,
2505
+ "[28Mg+2]": 2316,
2506
+ "[208Tl+]": 2317,
2507
+ "[13OH2]": 2318,
2508
+ "[198Bi]": 2319,
2509
+ "[192Bi]": 2320,
2510
+ "[194Bi]": 2321,
2511
+ "[196Bi]": 2322,
2512
+ "[132I-]": 2323,
2513
+ "[83Sr+2]": 2324,
2514
+ "[169Er+3]": 2325,
2515
+ "[122I-]": 2326,
2516
+ "[120I-]": 2327,
2517
+ "[92Sr+2]": 2328,
2518
+ "[126I-]": 2329,
2519
+ "[24Mg]": 2330,
2520
+ "[84Sr]": 2331,
2521
+ "[118Pd+2]": 2332,
2522
+ "[118Pd]": 2333,
2523
+ "[AsH4]": 2334,
2524
+ "[127I-]": 2335,
2525
+ "[9C-]": 2336,
2526
+ "[11CH3+]": 2337,
2527
+ "[17B]": 2338,
2528
+ "[7B]": 2339,
2529
+ "[4HH]": 2340,
2530
+ "[18C-]": 2341,
2531
+ "[22CH3-]": 2342,
2532
+ "[22CH4]": 2343,
2533
+ "[17C-]": 2344,
2534
+ "[15CH3]": 2345,
2535
+ "[16CH3]": 2346,
2536
+ "[11NH3]": 2347,
2537
+ "[21NH3]": 2348,
2538
+ "[11N-]": 2349,
2539
+ "[11NH]": 2350,
2540
+ "[16CH]": 2351,
2541
+ "[17CH2]": 2352,
2542
+ "[99Ru+2]": 2353,
2543
+ "[181Ta+2]": 2354,
2544
+ "[181Ta]": 2355,
2545
+ "[20CH]": 2356,
2546
+ "[32PH2]": 2357,
2547
+ "[55Fe+2]": 2358,
2548
+ "[SH3]": 2359,
2549
+ "[S@H]": 2360,
2550
+ "[UNK]": 2361
2551
+ },
2552
+ "merges": []
2553
+ }
2554
+ }
tokenizer_config.json ADDED
@@ -0,0 +1,53 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "added_tokens_decoder": {
3
+ "0": {
4
+ "content": "[CLS]",
5
+ "lstrip": false,
6
+ "normalized": false,
7
+ "rstrip": false,
8
+ "single_word": false,
9
+ "special": true
10
+ },
11
+ "1": {
12
+ "content": "[SEP]",
13
+ "lstrip": false,
14
+ "normalized": false,
15
+ "rstrip": false,
16
+ "single_word": false,
17
+ "special": true
18
+ },
19
+ "2": {
20
+ "content": "[PAD]",
21
+ "lstrip": false,
22
+ "normalized": false,
23
+ "rstrip": false,
24
+ "single_word": false,
25
+ "special": true
26
+ },
27
+ "3": {
28
+ "content": "[MASK]",
29
+ "lstrip": false,
30
+ "normalized": false,
31
+ "rstrip": false,
32
+ "single_word": false,
33
+ "special": true
34
+ },
35
+ "2361": {
36
+ "content": "[UNK]",
37
+ "lstrip": false,
38
+ "normalized": false,
39
+ "rstrip": false,
40
+ "single_word": false,
41
+ "special": true
42
+ }
43
+ },
44
+ "clean_up_tokenization_spaces": false,
45
+ "cls_token": "[CLS]",
46
+ "extra_special_tokens": {},
47
+ "mask_token": "[MASK]",
48
+ "model_max_length": 256,
49
+ "pad_token": "[PAD]",
50
+ "sep_token": "[SEP]",
51
+ "tokenizer_class": "PreTrainedTokenizerFast",
52
+ "unk_token": "[UNK]"
53
+ }