Delete pet-leadership-model-roberta-large-mnli_bs4_gas4_lr3e-05_ep2
Browse files- pet-leadership-model-roberta-large-mnli_bs4_gas4_lr3e-05_ep2/checkpoint-742/config.json +0 -38
- pet-leadership-model-roberta-large-mnli_bs4_gas4_lr3e-05_ep2/checkpoint-742/model.safetensors +0 -3
- pet-leadership-model-roberta-large-mnli_bs4_gas4_lr3e-05_ep2/checkpoint-742/optimizer.pt +0 -3
- pet-leadership-model-roberta-large-mnli_bs4_gas4_lr3e-05_ep2/checkpoint-742/rng_state.pth +0 -3
- pet-leadership-model-roberta-large-mnli_bs4_gas4_lr3e-05_ep2/checkpoint-742/scheduler.pt +0 -3
- pet-leadership-model-roberta-large-mnli_bs4_gas4_lr3e-05_ep2/checkpoint-742/trainer_state.json +0 -567
- pet-leadership-model-roberta-large-mnli_bs4_gas4_lr3e-05_ep2/checkpoint-742/training_args.bin +0 -3
pet-leadership-model-roberta-large-mnli_bs4_gas4_lr3e-05_ep2/checkpoint-742/config.json
DELETED
|
@@ -1,38 +0,0 @@
|
|
| 1 |
-
{
|
| 2 |
-
"_name_or_path": "roberta-large-mnli",
|
| 3 |
-
"_num_labels": 3,
|
| 4 |
-
"architectures": [
|
| 5 |
-
"RobertaForMaskedLM"
|
| 6 |
-
],
|
| 7 |
-
"attention_probs_dropout_prob": 0.1,
|
| 8 |
-
"bos_token_id": 0,
|
| 9 |
-
"classifier_dropout": null,
|
| 10 |
-
"eos_token_id": 2,
|
| 11 |
-
"hidden_act": "gelu",
|
| 12 |
-
"hidden_dropout_prob": 0.1,
|
| 13 |
-
"hidden_size": 1024,
|
| 14 |
-
"id2label": {
|
| 15 |
-
"0": "CONTRADICTION",
|
| 16 |
-
"1": "NEUTRAL",
|
| 17 |
-
"2": "ENTAILMENT"
|
| 18 |
-
},
|
| 19 |
-
"initializer_range": 0.02,
|
| 20 |
-
"intermediate_size": 4096,
|
| 21 |
-
"label2id": {
|
| 22 |
-
"CONTRADICTION": 0,
|
| 23 |
-
"ENTAILMENT": 2,
|
| 24 |
-
"NEUTRAL": 1
|
| 25 |
-
},
|
| 26 |
-
"layer_norm_eps": 1e-05,
|
| 27 |
-
"max_position_embeddings": 514,
|
| 28 |
-
"model_type": "roberta",
|
| 29 |
-
"num_attention_heads": 16,
|
| 30 |
-
"num_hidden_layers": 24,
|
| 31 |
-
"pad_token_id": 1,
|
| 32 |
-
"position_embedding_type": "absolute",
|
| 33 |
-
"torch_dtype": "float32",
|
| 34 |
-
"transformers_version": "4.49.0",
|
| 35 |
-
"type_vocab_size": 1,
|
| 36 |
-
"use_cache": true,
|
| 37 |
-
"vocab_size": 50265
|
| 38 |
-
}
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
pet-leadership-model-roberta-large-mnli_bs4_gas4_lr3e-05_ep2/checkpoint-742/model.safetensors
DELETED
|
@@ -1,3 +0,0 @@
|
|
| 1 |
-
version https://git-lfs.github.com/spec/v1
|
| 2 |
-
oid sha256:0b08c1eba1219d1f37d89d4e14c6dc0adf662e053a1ceb6a4176a9eaca04e825
|
| 3 |
-
size 1421696540
|
|
|
|
|
|
|
|
|
|
|
|
pet-leadership-model-roberta-large-mnli_bs4_gas4_lr3e-05_ep2/checkpoint-742/optimizer.pt
DELETED
|
@@ -1,3 +0,0 @@
|
|
| 1 |
-
version https://git-lfs.github.com/spec/v1
|
| 2 |
-
oid sha256:d1a8ff3e1f8189a005a6f946827138203b25981cb45232b8c057071899f824c6
|
| 3 |
-
size 2843626160
|
|
|
|
|
|
|
|
|
|
|
|
pet-leadership-model-roberta-large-mnli_bs4_gas4_lr3e-05_ep2/checkpoint-742/rng_state.pth
DELETED
|
@@ -1,3 +0,0 @@
|
|
| 1 |
-
version https://git-lfs.github.com/spec/v1
|
| 2 |
-
oid sha256:281c36caf51fbd63c7dd963f257385d59119adc28e964456b6e13c349a56717c
|
| 3 |
-
size 14244
|
|
|
|
|
|
|
|
|
|
|
|
pet-leadership-model-roberta-large-mnli_bs4_gas4_lr3e-05_ep2/checkpoint-742/scheduler.pt
DELETED
|
@@ -1,3 +0,0 @@
|
|
| 1 |
-
version https://git-lfs.github.com/spec/v1
|
| 2 |
-
oid sha256:d1a1b4b54695e12078e54ca4904d7351c3707eb227b68911ee2e1b413feca070
|
| 3 |
-
size 1064
|
|
|
|
|
|
|
|
|
|
|
|
pet-leadership-model-roberta-large-mnli_bs4_gas4_lr3e-05_ep2/checkpoint-742/trainer_state.json
DELETED
|
@@ -1,567 +0,0 @@
|
|
| 1 |
-
{
|
| 2 |
-
"best_metric": null,
|
| 3 |
-
"best_model_checkpoint": null,
|
| 4 |
-
"epoch": 2.0,
|
| 5 |
-
"eval_steps": 500,
|
| 6 |
-
"global_step": 742,
|
| 7 |
-
"is_hyper_param_search": false,
|
| 8 |
-
"is_local_process_zero": true,
|
| 9 |
-
"is_world_process_zero": true,
|
| 10 |
-
"log_history": [
|
| 11 |
-
{
|
| 12 |
-
"epoch": 0.026954177897574125,
|
| 13 |
-
"grad_norm": 29.47423553466797,
|
| 14 |
-
"learning_rate": 2.959568733153639e-05,
|
| 15 |
-
"loss": 4.3393,
|
| 16 |
-
"step": 10
|
| 17 |
-
},
|
| 18 |
-
{
|
| 19 |
-
"epoch": 0.05390835579514825,
|
| 20 |
-
"grad_norm": 31.8802433013916,
|
| 21 |
-
"learning_rate": 2.9191374663072775e-05,
|
| 22 |
-
"loss": 0.9828,
|
| 23 |
-
"step": 20
|
| 24 |
-
},
|
| 25 |
-
{
|
| 26 |
-
"epoch": 0.08086253369272237,
|
| 27 |
-
"grad_norm": 22.71839714050293,
|
| 28 |
-
"learning_rate": 2.8787061994609164e-05,
|
| 29 |
-
"loss": 0.7336,
|
| 30 |
-
"step": 30
|
| 31 |
-
},
|
| 32 |
-
{
|
| 33 |
-
"epoch": 0.1078167115902965,
|
| 34 |
-
"grad_norm": 65.7586669921875,
|
| 35 |
-
"learning_rate": 2.8382749326145553e-05,
|
| 36 |
-
"loss": 0.6222,
|
| 37 |
-
"step": 40
|
| 38 |
-
},
|
| 39 |
-
{
|
| 40 |
-
"epoch": 0.1347708894878706,
|
| 41 |
-
"grad_norm": 32.97853469848633,
|
| 42 |
-
"learning_rate": 2.7978436657681942e-05,
|
| 43 |
-
"loss": 0.633,
|
| 44 |
-
"step": 50
|
| 45 |
-
},
|
| 46 |
-
{
|
| 47 |
-
"epoch": 0.16172506738544473,
|
| 48 |
-
"grad_norm": 18.638259887695312,
|
| 49 |
-
"learning_rate": 2.757412398921833e-05,
|
| 50 |
-
"loss": 0.517,
|
| 51 |
-
"step": 60
|
| 52 |
-
},
|
| 53 |
-
{
|
| 54 |
-
"epoch": 0.18867924528301888,
|
| 55 |
-
"grad_norm": 31.97262954711914,
|
| 56 |
-
"learning_rate": 2.716981132075472e-05,
|
| 57 |
-
"loss": 0.3794,
|
| 58 |
-
"step": 70
|
| 59 |
-
},
|
| 60 |
-
{
|
| 61 |
-
"epoch": 0.215633423180593,
|
| 62 |
-
"grad_norm": 39.317928314208984,
|
| 63 |
-
"learning_rate": 2.6765498652291105e-05,
|
| 64 |
-
"loss": 0.4368,
|
| 65 |
-
"step": 80
|
| 66 |
-
},
|
| 67 |
-
{
|
| 68 |
-
"epoch": 0.24258760107816713,
|
| 69 |
-
"grad_norm": 25.218809127807617,
|
| 70 |
-
"learning_rate": 2.6361185983827494e-05,
|
| 71 |
-
"loss": 0.4537,
|
| 72 |
-
"step": 90
|
| 73 |
-
},
|
| 74 |
-
{
|
| 75 |
-
"epoch": 0.2695417789757412,
|
| 76 |
-
"grad_norm": 19.011188507080078,
|
| 77 |
-
"learning_rate": 2.5956873315363883e-05,
|
| 78 |
-
"loss": 0.3985,
|
| 79 |
-
"step": 100
|
| 80 |
-
},
|
| 81 |
-
{
|
| 82 |
-
"epoch": 0.29649595687331537,
|
| 83 |
-
"grad_norm": 12.845932006835938,
|
| 84 |
-
"learning_rate": 2.555256064690027e-05,
|
| 85 |
-
"loss": 0.3786,
|
| 86 |
-
"step": 110
|
| 87 |
-
},
|
| 88 |
-
{
|
| 89 |
-
"epoch": 0.32345013477088946,
|
| 90 |
-
"grad_norm": 29.667890548706055,
|
| 91 |
-
"learning_rate": 2.5148247978436658e-05,
|
| 92 |
-
"loss": 0.4118,
|
| 93 |
-
"step": 120
|
| 94 |
-
},
|
| 95 |
-
{
|
| 96 |
-
"epoch": 0.3504043126684636,
|
| 97 |
-
"grad_norm": 23.784292221069336,
|
| 98 |
-
"learning_rate": 2.4743935309973046e-05,
|
| 99 |
-
"loss": 0.5691,
|
| 100 |
-
"step": 130
|
| 101 |
-
},
|
| 102 |
-
{
|
| 103 |
-
"epoch": 0.37735849056603776,
|
| 104 |
-
"grad_norm": 10.289161682128906,
|
| 105 |
-
"learning_rate": 2.4339622641509435e-05,
|
| 106 |
-
"loss": 0.4434,
|
| 107 |
-
"step": 140
|
| 108 |
-
},
|
| 109 |
-
{
|
| 110 |
-
"epoch": 0.40431266846361186,
|
| 111 |
-
"grad_norm": 19.481176376342773,
|
| 112 |
-
"learning_rate": 2.3935309973045824e-05,
|
| 113 |
-
"loss": 0.4913,
|
| 114 |
-
"step": 150
|
| 115 |
-
},
|
| 116 |
-
{
|
| 117 |
-
"epoch": 0.431266846361186,
|
| 118 |
-
"grad_norm": 23.314889907836914,
|
| 119 |
-
"learning_rate": 2.353099730458221e-05,
|
| 120 |
-
"loss": 0.3104,
|
| 121 |
-
"step": 160
|
| 122 |
-
},
|
| 123 |
-
{
|
| 124 |
-
"epoch": 0.4582210242587601,
|
| 125 |
-
"grad_norm": 16.9091796875,
|
| 126 |
-
"learning_rate": 2.31266846361186e-05,
|
| 127 |
-
"loss": 0.2901,
|
| 128 |
-
"step": 170
|
| 129 |
-
},
|
| 130 |
-
{
|
| 131 |
-
"epoch": 0.48517520215633425,
|
| 132 |
-
"grad_norm": 15.09512710571289,
|
| 133 |
-
"learning_rate": 2.2722371967654988e-05,
|
| 134 |
-
"loss": 0.2728,
|
| 135 |
-
"step": 180
|
| 136 |
-
},
|
| 137 |
-
{
|
| 138 |
-
"epoch": 0.5121293800539084,
|
| 139 |
-
"grad_norm": 43.94502258300781,
|
| 140 |
-
"learning_rate": 2.2318059299191373e-05,
|
| 141 |
-
"loss": 0.51,
|
| 142 |
-
"step": 190
|
| 143 |
-
},
|
| 144 |
-
{
|
| 145 |
-
"epoch": 0.5390835579514824,
|
| 146 |
-
"grad_norm": 12.847454071044922,
|
| 147 |
-
"learning_rate": 2.1913746630727762e-05,
|
| 148 |
-
"loss": 0.4558,
|
| 149 |
-
"step": 200
|
| 150 |
-
},
|
| 151 |
-
{
|
| 152 |
-
"epoch": 0.5660377358490566,
|
| 153 |
-
"grad_norm": 31.876087188720703,
|
| 154 |
-
"learning_rate": 2.150943396226415e-05,
|
| 155 |
-
"loss": 0.4355,
|
| 156 |
-
"step": 210
|
| 157 |
-
},
|
| 158 |
-
{
|
| 159 |
-
"epoch": 0.5929919137466307,
|
| 160 |
-
"grad_norm": 11.343082427978516,
|
| 161 |
-
"learning_rate": 2.110512129380054e-05,
|
| 162 |
-
"loss": 0.4043,
|
| 163 |
-
"step": 220
|
| 164 |
-
},
|
| 165 |
-
{
|
| 166 |
-
"epoch": 0.6199460916442049,
|
| 167 |
-
"grad_norm": 22.41591453552246,
|
| 168 |
-
"learning_rate": 2.070080862533693e-05,
|
| 169 |
-
"loss": 0.3301,
|
| 170 |
-
"step": 230
|
| 171 |
-
},
|
| 172 |
-
{
|
| 173 |
-
"epoch": 0.6469002695417789,
|
| 174 |
-
"grad_norm": 23.612829208374023,
|
| 175 |
-
"learning_rate": 2.0296495956873318e-05,
|
| 176 |
-
"loss": 0.2875,
|
| 177 |
-
"step": 240
|
| 178 |
-
},
|
| 179 |
-
{
|
| 180 |
-
"epoch": 0.6738544474393531,
|
| 181 |
-
"grad_norm": 9.502119064331055,
|
| 182 |
-
"learning_rate": 1.9892183288409703e-05,
|
| 183 |
-
"loss": 0.3458,
|
| 184 |
-
"step": 250
|
| 185 |
-
},
|
| 186 |
-
{
|
| 187 |
-
"epoch": 0.7008086253369272,
|
| 188 |
-
"grad_norm": 33.75297927856445,
|
| 189 |
-
"learning_rate": 1.9487870619946092e-05,
|
| 190 |
-
"loss": 0.3215,
|
| 191 |
-
"step": 260
|
| 192 |
-
},
|
| 193 |
-
{
|
| 194 |
-
"epoch": 0.7277628032345014,
|
| 195 |
-
"grad_norm": 31.682952880859375,
|
| 196 |
-
"learning_rate": 1.908355795148248e-05,
|
| 197 |
-
"loss": 0.3333,
|
| 198 |
-
"step": 270
|
| 199 |
-
},
|
| 200 |
-
{
|
| 201 |
-
"epoch": 0.7547169811320755,
|
| 202 |
-
"grad_norm": 16.26555633544922,
|
| 203 |
-
"learning_rate": 1.8679245283018867e-05,
|
| 204 |
-
"loss": 0.3504,
|
| 205 |
-
"step": 280
|
| 206 |
-
},
|
| 207 |
-
{
|
| 208 |
-
"epoch": 0.7816711590296496,
|
| 209 |
-
"grad_norm": 22.693450927734375,
|
| 210 |
-
"learning_rate": 1.8274932614555256e-05,
|
| 211 |
-
"loss": 0.4523,
|
| 212 |
-
"step": 290
|
| 213 |
-
},
|
| 214 |
-
{
|
| 215 |
-
"epoch": 0.8086253369272237,
|
| 216 |
-
"grad_norm": 17.097888946533203,
|
| 217 |
-
"learning_rate": 1.7870619946091645e-05,
|
| 218 |
-
"loss": 0.3542,
|
| 219 |
-
"step": 300
|
| 220 |
-
},
|
| 221 |
-
{
|
| 222 |
-
"epoch": 0.8355795148247979,
|
| 223 |
-
"grad_norm": 13.334282875061035,
|
| 224 |
-
"learning_rate": 1.7466307277628033e-05,
|
| 225 |
-
"loss": 0.2878,
|
| 226 |
-
"step": 310
|
| 227 |
-
},
|
| 228 |
-
{
|
| 229 |
-
"epoch": 0.862533692722372,
|
| 230 |
-
"grad_norm": 9.898679733276367,
|
| 231 |
-
"learning_rate": 1.7061994609164422e-05,
|
| 232 |
-
"loss": 0.3661,
|
| 233 |
-
"step": 320
|
| 234 |
-
},
|
| 235 |
-
{
|
| 236 |
-
"epoch": 0.889487870619946,
|
| 237 |
-
"grad_norm": 10.888357162475586,
|
| 238 |
-
"learning_rate": 1.665768194070081e-05,
|
| 239 |
-
"loss": 0.322,
|
| 240 |
-
"step": 330
|
| 241 |
-
},
|
| 242 |
-
{
|
| 243 |
-
"epoch": 0.9164420485175202,
|
| 244 |
-
"grad_norm": 35.20107650756836,
|
| 245 |
-
"learning_rate": 1.6253369272237197e-05,
|
| 246 |
-
"loss": 0.3405,
|
| 247 |
-
"step": 340
|
| 248 |
-
},
|
| 249 |
-
{
|
| 250 |
-
"epoch": 0.9433962264150944,
|
| 251 |
-
"grad_norm": 4.650808334350586,
|
| 252 |
-
"learning_rate": 1.5849056603773586e-05,
|
| 253 |
-
"loss": 0.3893,
|
| 254 |
-
"step": 350
|
| 255 |
-
},
|
| 256 |
-
{
|
| 257 |
-
"epoch": 0.9703504043126685,
|
| 258 |
-
"grad_norm": 24.92270851135254,
|
| 259 |
-
"learning_rate": 1.544474393530997e-05,
|
| 260 |
-
"loss": 0.3561,
|
| 261 |
-
"step": 360
|
| 262 |
-
},
|
| 263 |
-
{
|
| 264 |
-
"epoch": 0.9973045822102425,
|
| 265 |
-
"grad_norm": 14.140254020690918,
|
| 266 |
-
"learning_rate": 1.504043126684636e-05,
|
| 267 |
-
"loss": 0.2356,
|
| 268 |
-
"step": 370
|
| 269 |
-
},
|
| 270 |
-
{
|
| 271 |
-
"epoch": 1.0,
|
| 272 |
-
"eval_loss": 0.2660793364048004,
|
| 273 |
-
"eval_runtime": 7.2402,
|
| 274 |
-
"eval_samples_per_second": 204.829,
|
| 275 |
-
"eval_steps_per_second": 51.242,
|
| 276 |
-
"step": 371
|
| 277 |
-
},
|
| 278 |
-
{
|
| 279 |
-
"epoch": 1.0242587601078168,
|
| 280 |
-
"grad_norm": 10.333748817443848,
|
| 281 |
-
"learning_rate": 1.463611859838275e-05,
|
| 282 |
-
"loss": 0.1811,
|
| 283 |
-
"step": 380
|
| 284 |
-
},
|
| 285 |
-
{
|
| 286 |
-
"epoch": 1.0512129380053907,
|
| 287 |
-
"grad_norm": 20.75710105895996,
|
| 288 |
-
"learning_rate": 1.4231805929919138e-05,
|
| 289 |
-
"loss": 0.2177,
|
| 290 |
-
"step": 390
|
| 291 |
-
},
|
| 292 |
-
{
|
| 293 |
-
"epoch": 1.0781671159029649,
|
| 294 |
-
"grad_norm": 22.326885223388672,
|
| 295 |
-
"learning_rate": 1.3827493261455525e-05,
|
| 296 |
-
"loss": 0.3734,
|
| 297 |
-
"step": 400
|
| 298 |
-
},
|
| 299 |
-
{
|
| 300 |
-
"epoch": 1.105121293800539,
|
| 301 |
-
"grad_norm": 8.389409065246582,
|
| 302 |
-
"learning_rate": 1.3423180592991914e-05,
|
| 303 |
-
"loss": 0.2553,
|
| 304 |
-
"step": 410
|
| 305 |
-
},
|
| 306 |
-
{
|
| 307 |
-
"epoch": 1.1320754716981132,
|
| 308 |
-
"grad_norm": 19.33669090270996,
|
| 309 |
-
"learning_rate": 1.3018867924528303e-05,
|
| 310 |
-
"loss": 0.2771,
|
| 311 |
-
"step": 420
|
| 312 |
-
},
|
| 313 |
-
{
|
| 314 |
-
"epoch": 1.1590296495956873,
|
| 315 |
-
"grad_norm": 14.596505165100098,
|
| 316 |
-
"learning_rate": 1.261455525606469e-05,
|
| 317 |
-
"loss": 0.2798,
|
| 318 |
-
"step": 430
|
| 319 |
-
},
|
| 320 |
-
{
|
| 321 |
-
"epoch": 1.1859838274932615,
|
| 322 |
-
"grad_norm": 32.35258102416992,
|
| 323 |
-
"learning_rate": 1.2210242587601077e-05,
|
| 324 |
-
"loss": 0.3396,
|
| 325 |
-
"step": 440
|
| 326 |
-
},
|
| 327 |
-
{
|
| 328 |
-
"epoch": 1.2129380053908356,
|
| 329 |
-
"grad_norm": 12.335786819458008,
|
| 330 |
-
"learning_rate": 1.1805929919137466e-05,
|
| 331 |
-
"loss": 0.1821,
|
| 332 |
-
"step": 450
|
| 333 |
-
},
|
| 334 |
-
{
|
| 335 |
-
"epoch": 1.2398921832884098,
|
| 336 |
-
"grad_norm": 8.339882850646973,
|
| 337 |
-
"learning_rate": 1.1401617250673855e-05,
|
| 338 |
-
"loss": 0.2896,
|
| 339 |
-
"step": 460
|
| 340 |
-
},
|
| 341 |
-
{
|
| 342 |
-
"epoch": 1.266846361185984,
|
| 343 |
-
"grad_norm": 17.156047821044922,
|
| 344 |
-
"learning_rate": 1.0997304582210243e-05,
|
| 345 |
-
"loss": 0.1827,
|
| 346 |
-
"step": 470
|
| 347 |
-
},
|
| 348 |
-
{
|
| 349 |
-
"epoch": 1.2938005390835579,
|
| 350 |
-
"grad_norm": 8.71430778503418,
|
| 351 |
-
"learning_rate": 1.0592991913746631e-05,
|
| 352 |
-
"loss": 0.3261,
|
| 353 |
-
"step": 480
|
| 354 |
-
},
|
| 355 |
-
{
|
| 356 |
-
"epoch": 1.320754716981132,
|
| 357 |
-
"grad_norm": 33.503440856933594,
|
| 358 |
-
"learning_rate": 1.0188679245283019e-05,
|
| 359 |
-
"loss": 0.2977,
|
| 360 |
-
"step": 490
|
| 361 |
-
},
|
| 362 |
-
{
|
| 363 |
-
"epoch": 1.3477088948787062,
|
| 364 |
-
"grad_norm": 13.175012588500977,
|
| 365 |
-
"learning_rate": 9.784366576819408e-06,
|
| 366 |
-
"loss": 0.2304,
|
| 367 |
-
"step": 500
|
| 368 |
-
},
|
| 369 |
-
{
|
| 370 |
-
"epoch": 1.3746630727762803,
|
| 371 |
-
"grad_norm": 6.546019554138184,
|
| 372 |
-
"learning_rate": 9.380053908355796e-06,
|
| 373 |
-
"loss": 0.1473,
|
| 374 |
-
"step": 510
|
| 375 |
-
},
|
| 376 |
-
{
|
| 377 |
-
"epoch": 1.4016172506738545,
|
| 378 |
-
"grad_norm": 24.98724365234375,
|
| 379 |
-
"learning_rate": 8.975741239892184e-06,
|
| 380 |
-
"loss": 0.3648,
|
| 381 |
-
"step": 520
|
| 382 |
-
},
|
| 383 |
-
{
|
| 384 |
-
"epoch": 1.4285714285714286,
|
| 385 |
-
"grad_norm": 6.695165157318115,
|
| 386 |
-
"learning_rate": 8.571428571428571e-06,
|
| 387 |
-
"loss": 0.2416,
|
| 388 |
-
"step": 530
|
| 389 |
-
},
|
| 390 |
-
{
|
| 391 |
-
"epoch": 1.4555256064690028,
|
| 392 |
-
"grad_norm": 8.321410179138184,
|
| 393 |
-
"learning_rate": 8.16711590296496e-06,
|
| 394 |
-
"loss": 0.2227,
|
| 395 |
-
"step": 540
|
| 396 |
-
},
|
| 397 |
-
{
|
| 398 |
-
"epoch": 1.482479784366577,
|
| 399 |
-
"grad_norm": 3.5790646076202393,
|
| 400 |
-
"learning_rate": 7.762803234501349e-06,
|
| 401 |
-
"loss": 0.2386,
|
| 402 |
-
"step": 550
|
| 403 |
-
},
|
| 404 |
-
{
|
| 405 |
-
"epoch": 1.509433962264151,
|
| 406 |
-
"grad_norm": 7.105038642883301,
|
| 407 |
-
"learning_rate": 7.358490566037736e-06,
|
| 408 |
-
"loss": 0.1779,
|
| 409 |
-
"step": 560
|
| 410 |
-
},
|
| 411 |
-
{
|
| 412 |
-
"epoch": 1.536388140161725,
|
| 413 |
-
"grad_norm": 48.24789047241211,
|
| 414 |
-
"learning_rate": 6.954177897574124e-06,
|
| 415 |
-
"loss": 0.2599,
|
| 416 |
-
"step": 570
|
| 417 |
-
},
|
| 418 |
-
{
|
| 419 |
-
"epoch": 1.5633423180592994,
|
| 420 |
-
"grad_norm": 7.495761871337891,
|
| 421 |
-
"learning_rate": 6.549865229110512e-06,
|
| 422 |
-
"loss": 0.1809,
|
| 423 |
-
"step": 580
|
| 424 |
-
},
|
| 425 |
-
{
|
| 426 |
-
"epoch": 1.5902964959568733,
|
| 427 |
-
"grad_norm": 17.49237823486328,
|
| 428 |
-
"learning_rate": 6.1455525606469e-06,
|
| 429 |
-
"loss": 0.1797,
|
| 430 |
-
"step": 590
|
| 431 |
-
},
|
| 432 |
-
{
|
| 433 |
-
"epoch": 1.6172506738544474,
|
| 434 |
-
"grad_norm": 13.955606460571289,
|
| 435 |
-
"learning_rate": 5.741239892183289e-06,
|
| 436 |
-
"loss": 0.2784,
|
| 437 |
-
"step": 600
|
| 438 |
-
},
|
| 439 |
-
{
|
| 440 |
-
"epoch": 1.6442048517520216,
|
| 441 |
-
"grad_norm": 27.44432258605957,
|
| 442 |
-
"learning_rate": 5.336927223719676e-06,
|
| 443 |
-
"loss": 0.2583,
|
| 444 |
-
"step": 610
|
| 445 |
-
},
|
| 446 |
-
{
|
| 447 |
-
"epoch": 1.6711590296495957,
|
| 448 |
-
"grad_norm": 29.639062881469727,
|
| 449 |
-
"learning_rate": 4.932614555256065e-06,
|
| 450 |
-
"loss": 0.2687,
|
| 451 |
-
"step": 620
|
| 452 |
-
},
|
| 453 |
-
{
|
| 454 |
-
"epoch": 1.6981132075471699,
|
| 455 |
-
"grad_norm": 10.949997901916504,
|
| 456 |
-
"learning_rate": 4.5283018867924524e-06,
|
| 457 |
-
"loss": 0.1988,
|
| 458 |
-
"step": 630
|
| 459 |
-
},
|
| 460 |
-
{
|
| 461 |
-
"epoch": 1.7250673854447438,
|
| 462 |
-
"grad_norm": 15.069132804870605,
|
| 463 |
-
"learning_rate": 4.123989218328841e-06,
|
| 464 |
-
"loss": 0.1382,
|
| 465 |
-
"step": 640
|
| 466 |
-
},
|
| 467 |
-
{
|
| 468 |
-
"epoch": 1.7520215633423182,
|
| 469 |
-
"grad_norm": 17.37251091003418,
|
| 470 |
-
"learning_rate": 3.719676549865229e-06,
|
| 471 |
-
"loss": 0.199,
|
| 472 |
-
"step": 650
|
| 473 |
-
},
|
| 474 |
-
{
|
| 475 |
-
"epoch": 1.778975741239892,
|
| 476 |
-
"grad_norm": 13.217763900756836,
|
| 477 |
-
"learning_rate": 3.315363881401617e-06,
|
| 478 |
-
"loss": 0.1404,
|
| 479 |
-
"step": 660
|
| 480 |
-
},
|
| 481 |
-
{
|
| 482 |
-
"epoch": 1.8059299191374663,
|
| 483 |
-
"grad_norm": 26.328725814819336,
|
| 484 |
-
"learning_rate": 2.9110512129380056e-06,
|
| 485 |
-
"loss": 0.1185,
|
| 486 |
-
"step": 670
|
| 487 |
-
},
|
| 488 |
-
{
|
| 489 |
-
"epoch": 1.8328840970350404,
|
| 490 |
-
"grad_norm": 11.576745986938477,
|
| 491 |
-
"learning_rate": 2.5067385444743936e-06,
|
| 492 |
-
"loss": 0.2272,
|
| 493 |
-
"step": 680
|
| 494 |
-
},
|
| 495 |
-
{
|
| 496 |
-
"epoch": 1.8598382749326146,
|
| 497 |
-
"grad_norm": 1.781421422958374,
|
| 498 |
-
"learning_rate": 2.1024258760107817e-06,
|
| 499 |
-
"loss": 0.1137,
|
| 500 |
-
"step": 690
|
| 501 |
-
},
|
| 502 |
-
{
|
| 503 |
-
"epoch": 1.8867924528301887,
|
| 504 |
-
"grad_norm": 23.61797523498535,
|
| 505 |
-
"learning_rate": 1.69811320754717e-06,
|
| 506 |
-
"loss": 0.2826,
|
| 507 |
-
"step": 700
|
| 508 |
-
},
|
| 509 |
-
{
|
| 510 |
-
"epoch": 1.9137466307277629,
|
| 511 |
-
"grad_norm": 0.7516375184059143,
|
| 512 |
-
"learning_rate": 1.293800539083558e-06,
|
| 513 |
-
"loss": 0.2227,
|
| 514 |
-
"step": 710
|
| 515 |
-
},
|
| 516 |
-
{
|
| 517 |
-
"epoch": 1.940700808625337,
|
| 518 |
-
"grad_norm": 6.095058441162109,
|
| 519 |
-
"learning_rate": 8.894878706199461e-07,
|
| 520 |
-
"loss": 0.258,
|
| 521 |
-
"step": 720
|
| 522 |
-
},
|
| 523 |
-
{
|
| 524 |
-
"epoch": 1.967654986522911,
|
| 525 |
-
"grad_norm": 8.179100036621094,
|
| 526 |
-
"learning_rate": 4.851752021563343e-07,
|
| 527 |
-
"loss": 0.1808,
|
| 528 |
-
"step": 730
|
| 529 |
-
},
|
| 530 |
-
{
|
| 531 |
-
"epoch": 1.9946091644204853,
|
| 532 |
-
"grad_norm": 11.866944313049316,
|
| 533 |
-
"learning_rate": 8.086253369272238e-08,
|
| 534 |
-
"loss": 0.1688,
|
| 535 |
-
"step": 740
|
| 536 |
-
},
|
| 537 |
-
{
|
| 538 |
-
"epoch": 2.0,
|
| 539 |
-
"eval_loss": 0.2651370167732239,
|
| 540 |
-
"eval_runtime": 7.4873,
|
| 541 |
-
"eval_samples_per_second": 198.07,
|
| 542 |
-
"eval_steps_per_second": 49.551,
|
| 543 |
-
"step": 742
|
| 544 |
-
}
|
| 545 |
-
],
|
| 546 |
-
"logging_steps": 10,
|
| 547 |
-
"max_steps": 742,
|
| 548 |
-
"num_input_tokens_seen": 0,
|
| 549 |
-
"num_train_epochs": 2,
|
| 550 |
-
"save_steps": 100,
|
| 551 |
-
"stateful_callbacks": {
|
| 552 |
-
"TrainerControl": {
|
| 553 |
-
"args": {
|
| 554 |
-
"should_epoch_stop": false,
|
| 555 |
-
"should_evaluate": false,
|
| 556 |
-
"should_log": false,
|
| 557 |
-
"should_save": true,
|
| 558 |
-
"should_training_stop": true
|
| 559 |
-
},
|
| 560 |
-
"attributes": {}
|
| 561 |
-
}
|
| 562 |
-
},
|
| 563 |
-
"total_flos": 962338508269596.0,
|
| 564 |
-
"train_batch_size": 4,
|
| 565 |
-
"trial_name": null,
|
| 566 |
-
"trial_params": null
|
| 567 |
-
}
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
pet-leadership-model-roberta-large-mnli_bs4_gas4_lr3e-05_ep2/checkpoint-742/training_args.bin
DELETED
|
@@ -1,3 +0,0 @@
|
|
| 1 |
-
version https://git-lfs.github.com/spec/v1
|
| 2 |
-
oid sha256:11d07387ba23496b1ded4dce787e89460237ef6e9414ab716d72f50b57f5c57e
|
| 3 |
-
size 5432
|
|
|
|
|
|
|
|
|
|
|
|