End of training
Browse files- README.md +206 -0
- config.json +46 -0
- generation_config.json +6 -0
- merges.txt +0 -0
- model.safetensors +3 -0
- runs/Jun27_18-14-59_viridian/events.out.tfevents.1719512102.viridian.2678733.13 +3 -0
- special_tokens_map.json +24 -0
- tokenizer.json +0 -0
- tokenizer_config.json +20 -0
- training_args.bin +3 -0
- vocab.json +0 -0
README.md
ADDED
|
@@ -0,0 +1,206 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
---
|
| 2 |
+
license: apache-2.0
|
| 3 |
+
base_model: distilgpt2
|
| 4 |
+
tags:
|
| 5 |
+
- generated_from_trainer
|
| 6 |
+
model-index:
|
| 7 |
+
- name: StatementOfWork_Generator_Omega_BS_512
|
| 8 |
+
results: []
|
| 9 |
+
---
|
| 10 |
+
|
| 11 |
+
<!-- This model card has been generated automatically according to the information the Trainer had access to. You
|
| 12 |
+
should probably proofread and complete it, then remove this comment. -->
|
| 13 |
+
|
| 14 |
+
# StatementOfWork_Generator_Omega_BS_512
|
| 15 |
+
|
| 16 |
+
This model is a fine-tuned version of [distilgpt2](https://huggingface.co/distilgpt2) on an unknown dataset.
|
| 17 |
+
It achieves the following results on the evaluation set:
|
| 18 |
+
- Loss: 0.7165
|
| 19 |
+
|
| 20 |
+
## Model description
|
| 21 |
+
|
| 22 |
+
More information needed
|
| 23 |
+
|
| 24 |
+
## Intended uses & limitations
|
| 25 |
+
|
| 26 |
+
More information needed
|
| 27 |
+
|
| 28 |
+
## Training and evaluation data
|
| 29 |
+
|
| 30 |
+
More information needed
|
| 31 |
+
|
| 32 |
+
## Training procedure
|
| 33 |
+
|
| 34 |
+
### Training hyperparameters
|
| 35 |
+
|
| 36 |
+
The following hyperparameters were used during training:
|
| 37 |
+
- learning_rate: 2e-05
|
| 38 |
+
- train_batch_size: 50
|
| 39 |
+
- eval_batch_size: 50
|
| 40 |
+
- seed: 42
|
| 41 |
+
- optimizer: Adam with betas=(0.9,0.999) and epsilon=1e-08
|
| 42 |
+
- lr_scheduler_type: linear
|
| 43 |
+
- num_epochs: 150
|
| 44 |
+
|
| 45 |
+
### Training results
|
| 46 |
+
|
| 47 |
+
| Training Loss | Epoch | Step | Validation Loss |
|
| 48 |
+
|:-------------:|:-----:|:----:|:---------------:|
|
| 49 |
+
| No log | 1.0 | 4 | 0.8137 |
|
| 50 |
+
| No log | 2.0 | 8 | 0.7930 |
|
| 51 |
+
| No log | 3.0 | 12 | 0.7915 |
|
| 52 |
+
| No log | 4.0 | 16 | 0.7846 |
|
| 53 |
+
| No log | 5.0 | 20 | 0.7785 |
|
| 54 |
+
| No log | 6.0 | 24 | 0.7751 |
|
| 55 |
+
| No log | 7.0 | 28 | 0.7724 |
|
| 56 |
+
| No log | 8.0 | 32 | 0.7687 |
|
| 57 |
+
| No log | 9.0 | 36 | 0.7654 |
|
| 58 |
+
| No log | 10.0 | 40 | 0.7630 |
|
| 59 |
+
| No log | 11.0 | 44 | 0.7615 |
|
| 60 |
+
| No log | 12.0 | 48 | 0.7604 |
|
| 61 |
+
| No log | 13.0 | 52 | 0.7595 |
|
| 62 |
+
| No log | 14.0 | 56 | 0.7572 |
|
| 63 |
+
| No log | 15.0 | 60 | 0.7559 |
|
| 64 |
+
| No log | 16.0 | 64 | 0.7549 |
|
| 65 |
+
| No log | 17.0 | 68 | 0.7541 |
|
| 66 |
+
| No log | 18.0 | 72 | 0.7541 |
|
| 67 |
+
| No log | 19.0 | 76 | 0.7522 |
|
| 68 |
+
| No log | 20.0 | 80 | 0.7496 |
|
| 69 |
+
| No log | 21.0 | 84 | 0.7485 |
|
| 70 |
+
| No log | 22.0 | 88 | 0.7492 |
|
| 71 |
+
| No log | 23.0 | 92 | 0.7481 |
|
| 72 |
+
| No log | 24.0 | 96 | 0.7469 |
|
| 73 |
+
| No log | 25.0 | 100 | 0.7447 |
|
| 74 |
+
| No log | 26.0 | 104 | 0.7436 |
|
| 75 |
+
| No log | 27.0 | 108 | 0.7435 |
|
| 76 |
+
| No log | 28.0 | 112 | 0.7431 |
|
| 77 |
+
| No log | 29.0 | 116 | 0.7426 |
|
| 78 |
+
| No log | 30.0 | 120 | 0.7418 |
|
| 79 |
+
| No log | 31.0 | 124 | 0.7414 |
|
| 80 |
+
| No log | 32.0 | 128 | 0.7409 |
|
| 81 |
+
| No log | 33.0 | 132 | 0.7393 |
|
| 82 |
+
| No log | 34.0 | 136 | 0.7393 |
|
| 83 |
+
| No log | 35.0 | 140 | 0.7392 |
|
| 84 |
+
| No log | 36.0 | 144 | 0.7368 |
|
| 85 |
+
| No log | 37.0 | 148 | 0.7361 |
|
| 86 |
+
| No log | 38.0 | 152 | 0.7366 |
|
| 87 |
+
| No log | 39.0 | 156 | 0.7367 |
|
| 88 |
+
| No log | 40.0 | 160 | 0.7349 |
|
| 89 |
+
| No log | 41.0 | 164 | 0.7344 |
|
| 90 |
+
| No log | 42.0 | 168 | 0.7341 |
|
| 91 |
+
| No log | 43.0 | 172 | 0.7328 |
|
| 92 |
+
| No log | 44.0 | 176 | 0.7317 |
|
| 93 |
+
| No log | 45.0 | 180 | 0.7314 |
|
| 94 |
+
| No log | 46.0 | 184 | 0.7325 |
|
| 95 |
+
| No log | 47.0 | 188 | 0.7325 |
|
| 96 |
+
| No log | 48.0 | 192 | 0.7311 |
|
| 97 |
+
| No log | 49.0 | 196 | 0.7301 |
|
| 98 |
+
| No log | 50.0 | 200 | 0.7306 |
|
| 99 |
+
| No log | 51.0 | 204 | 0.7305 |
|
| 100 |
+
| No log | 52.0 | 208 | 0.7301 |
|
| 101 |
+
| No log | 53.0 | 212 | 0.7286 |
|
| 102 |
+
| No log | 54.0 | 216 | 0.7266 |
|
| 103 |
+
| No log | 55.0 | 220 | 0.7275 |
|
| 104 |
+
| No log | 56.0 | 224 | 0.7292 |
|
| 105 |
+
| No log | 57.0 | 228 | 0.7288 |
|
| 106 |
+
| No log | 58.0 | 232 | 0.7275 |
|
| 107 |
+
| No log | 59.0 | 236 | 0.7263 |
|
| 108 |
+
| No log | 60.0 | 240 | 0.7265 |
|
| 109 |
+
| No log | 61.0 | 244 | 0.7269 |
|
| 110 |
+
| No log | 62.0 | 248 | 0.7271 |
|
| 111 |
+
| No log | 63.0 | 252 | 0.7262 |
|
| 112 |
+
| No log | 64.0 | 256 | 0.7252 |
|
| 113 |
+
| No log | 65.0 | 260 | 0.7262 |
|
| 114 |
+
| No log | 66.0 | 264 | 0.7251 |
|
| 115 |
+
| No log | 67.0 | 268 | 0.7243 |
|
| 116 |
+
| No log | 68.0 | 272 | 0.7241 |
|
| 117 |
+
| No log | 69.0 | 276 | 0.7248 |
|
| 118 |
+
| No log | 70.0 | 280 | 0.7238 |
|
| 119 |
+
| No log | 71.0 | 284 | 0.7227 |
|
| 120 |
+
| No log | 72.0 | 288 | 0.7234 |
|
| 121 |
+
| No log | 73.0 | 292 | 0.7233 |
|
| 122 |
+
| No log | 74.0 | 296 | 0.7222 |
|
| 123 |
+
| No log | 75.0 | 300 | 0.7213 |
|
| 124 |
+
| No log | 76.0 | 304 | 0.7223 |
|
| 125 |
+
| No log | 77.0 | 308 | 0.7230 |
|
| 126 |
+
| No log | 78.0 | 312 | 0.7226 |
|
| 127 |
+
| No log | 79.0 | 316 | 0.7222 |
|
| 128 |
+
| No log | 80.0 | 320 | 0.7220 |
|
| 129 |
+
| No log | 81.0 | 324 | 0.7222 |
|
| 130 |
+
| No log | 82.0 | 328 | 0.7220 |
|
| 131 |
+
| No log | 83.0 | 332 | 0.7209 |
|
| 132 |
+
| No log | 84.0 | 336 | 0.7202 |
|
| 133 |
+
| No log | 85.0 | 340 | 0.7208 |
|
| 134 |
+
| No log | 86.0 | 344 | 0.7218 |
|
| 135 |
+
| No log | 87.0 | 348 | 0.7214 |
|
| 136 |
+
| No log | 88.0 | 352 | 0.7195 |
|
| 137 |
+
| No log | 89.0 | 356 | 0.7190 |
|
| 138 |
+
| No log | 90.0 | 360 | 0.7199 |
|
| 139 |
+
| No log | 91.0 | 364 | 0.7213 |
|
| 140 |
+
| No log | 92.0 | 368 | 0.7207 |
|
| 141 |
+
| No log | 93.0 | 372 | 0.7198 |
|
| 142 |
+
| No log | 94.0 | 376 | 0.7198 |
|
| 143 |
+
| No log | 95.0 | 380 | 0.7200 |
|
| 144 |
+
| No log | 96.0 | 384 | 0.7199 |
|
| 145 |
+
| No log | 97.0 | 388 | 0.7198 |
|
| 146 |
+
| No log | 98.0 | 392 | 0.7198 |
|
| 147 |
+
| No log | 99.0 | 396 | 0.7194 |
|
| 148 |
+
| No log | 100.0 | 400 | 0.7192 |
|
| 149 |
+
| No log | 101.0 | 404 | 0.7187 |
|
| 150 |
+
| No log | 102.0 | 408 | 0.7181 |
|
| 151 |
+
| No log | 103.0 | 412 | 0.7186 |
|
| 152 |
+
| No log | 104.0 | 416 | 0.7192 |
|
| 153 |
+
| No log | 105.0 | 420 | 0.7184 |
|
| 154 |
+
| No log | 106.0 | 424 | 0.7178 |
|
| 155 |
+
| No log | 107.0 | 428 | 0.7181 |
|
| 156 |
+
| No log | 108.0 | 432 | 0.7188 |
|
| 157 |
+
| No log | 109.0 | 436 | 0.7192 |
|
| 158 |
+
| No log | 110.0 | 440 | 0.7189 |
|
| 159 |
+
| No log | 111.0 | 444 | 0.7186 |
|
| 160 |
+
| No log | 112.0 | 448 | 0.7183 |
|
| 161 |
+
| No log | 113.0 | 452 | 0.7181 |
|
| 162 |
+
| No log | 114.0 | 456 | 0.7184 |
|
| 163 |
+
| No log | 115.0 | 460 | 0.7187 |
|
| 164 |
+
| No log | 116.0 | 464 | 0.7186 |
|
| 165 |
+
| No log | 117.0 | 468 | 0.7180 |
|
| 166 |
+
| No log | 118.0 | 472 | 0.7178 |
|
| 167 |
+
| No log | 119.0 | 476 | 0.7176 |
|
| 168 |
+
| No log | 120.0 | 480 | 0.7175 |
|
| 169 |
+
| No log | 121.0 | 484 | 0.7171 |
|
| 170 |
+
| No log | 122.0 | 488 | 0.7170 |
|
| 171 |
+
| No log | 123.0 | 492 | 0.7172 |
|
| 172 |
+
| No log | 124.0 | 496 | 0.7173 |
|
| 173 |
+
| 0.2218 | 125.0 | 500 | 0.7174 |
|
| 174 |
+
| 0.2218 | 126.0 | 504 | 0.7173 |
|
| 175 |
+
| 0.2218 | 127.0 | 508 | 0.7174 |
|
| 176 |
+
| 0.2218 | 128.0 | 512 | 0.7174 |
|
| 177 |
+
| 0.2218 | 129.0 | 516 | 0.7174 |
|
| 178 |
+
| 0.2218 | 130.0 | 520 | 0.7171 |
|
| 179 |
+
| 0.2218 | 131.0 | 524 | 0.7167 |
|
| 180 |
+
| 0.2218 | 132.0 | 528 | 0.7166 |
|
| 181 |
+
| 0.2218 | 133.0 | 532 | 0.7169 |
|
| 182 |
+
| 0.2218 | 134.0 | 536 | 0.7171 |
|
| 183 |
+
| 0.2218 | 135.0 | 540 | 0.7172 |
|
| 184 |
+
| 0.2218 | 136.0 | 544 | 0.7171 |
|
| 185 |
+
| 0.2218 | 137.0 | 548 | 0.7168 |
|
| 186 |
+
| 0.2218 | 138.0 | 552 | 0.7166 |
|
| 187 |
+
| 0.2218 | 139.0 | 556 | 0.7165 |
|
| 188 |
+
| 0.2218 | 140.0 | 560 | 0.7165 |
|
| 189 |
+
| 0.2218 | 141.0 | 564 | 0.7165 |
|
| 190 |
+
| 0.2218 | 142.0 | 568 | 0.7165 |
|
| 191 |
+
| 0.2218 | 143.0 | 572 | 0.7166 |
|
| 192 |
+
| 0.2218 | 144.0 | 576 | 0.7166 |
|
| 193 |
+
| 0.2218 | 145.0 | 580 | 0.7166 |
|
| 194 |
+
| 0.2218 | 146.0 | 584 | 0.7166 |
|
| 195 |
+
| 0.2218 | 147.0 | 588 | 0.7166 |
|
| 196 |
+
| 0.2218 | 148.0 | 592 | 0.7166 |
|
| 197 |
+
| 0.2218 | 149.0 | 596 | 0.7165 |
|
| 198 |
+
| 0.2218 | 150.0 | 600 | 0.7165 |
|
| 199 |
+
|
| 200 |
+
|
| 201 |
+
### Framework versions
|
| 202 |
+
|
| 203 |
+
- Transformers 4.38.2
|
| 204 |
+
- Pytorch 2.2.1+cu121
|
| 205 |
+
- Datasets 2.18.0
|
| 206 |
+
- Tokenizers 0.15.2
|
config.json
ADDED
|
@@ -0,0 +1,46 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
{
|
| 2 |
+
"_name_or_path": "distilgpt2",
|
| 3 |
+
"_num_labels": 1,
|
| 4 |
+
"activation_function": "gelu_new",
|
| 5 |
+
"architectures": [
|
| 6 |
+
"GPT2LMHeadModel"
|
| 7 |
+
],
|
| 8 |
+
"attn_pdrop": 0.1,
|
| 9 |
+
"bos_token_id": 50256,
|
| 10 |
+
"embd_pdrop": 0.1,
|
| 11 |
+
"eos_token_id": 50256,
|
| 12 |
+
"id2label": {
|
| 13 |
+
"0": "LABEL_0"
|
| 14 |
+
},
|
| 15 |
+
"initializer_range": 0.02,
|
| 16 |
+
"label2id": {
|
| 17 |
+
"LABEL_0": 0
|
| 18 |
+
},
|
| 19 |
+
"layer_norm_epsilon": 1e-05,
|
| 20 |
+
"model_type": "gpt2",
|
| 21 |
+
"n_ctx": 1024,
|
| 22 |
+
"n_embd": 768,
|
| 23 |
+
"n_head": 12,
|
| 24 |
+
"n_inner": null,
|
| 25 |
+
"n_layer": 6,
|
| 26 |
+
"n_positions": 1024,
|
| 27 |
+
"reorder_and_upcast_attn": false,
|
| 28 |
+
"resid_pdrop": 0.1,
|
| 29 |
+
"scale_attn_by_inverse_layer_idx": false,
|
| 30 |
+
"scale_attn_weights": true,
|
| 31 |
+
"summary_activation": null,
|
| 32 |
+
"summary_first_dropout": 0.1,
|
| 33 |
+
"summary_proj_to_labels": true,
|
| 34 |
+
"summary_type": "cls_index",
|
| 35 |
+
"summary_use_proj": true,
|
| 36 |
+
"task_specific_params": {
|
| 37 |
+
"text-generation": {
|
| 38 |
+
"do_sample": true,
|
| 39 |
+
"max_length": 50
|
| 40 |
+
}
|
| 41 |
+
},
|
| 42 |
+
"torch_dtype": "float32",
|
| 43 |
+
"transformers_version": "4.38.2",
|
| 44 |
+
"use_cache": true,
|
| 45 |
+
"vocab_size": 50257
|
| 46 |
+
}
|
generation_config.json
ADDED
|
@@ -0,0 +1,6 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
{
|
| 2 |
+
"_from_model_config": true,
|
| 3 |
+
"bos_token_id": 50256,
|
| 4 |
+
"eos_token_id": 50256,
|
| 5 |
+
"transformers_version": "4.38.2"
|
| 6 |
+
}
|
merges.txt
ADDED
|
The diff for this file is too large to render.
See raw diff
|
|
|
model.safetensors
ADDED
|
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
version https://git-lfs.github.com/spec/v1
|
| 2 |
+
oid sha256:fe5cfda23c2f7a4512fef5736f4f6825e7b3d9dd6085adc324a3c3403cd8d4d2
|
| 3 |
+
size 327657928
|
runs/Jun27_18-14-59_viridian/events.out.tfevents.1719512102.viridian.2678733.13
ADDED
|
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
version https://git-lfs.github.com/spec/v1
|
| 2 |
+
oid sha256:90a1071cc19440fb0812673cf89b7df5fd73529fe6a5e7656bac7bceedbd3d1e
|
| 3 |
+
size 46127
|
special_tokens_map.json
ADDED
|
@@ -0,0 +1,24 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
{
|
| 2 |
+
"bos_token": {
|
| 3 |
+
"content": "<|endoftext|>",
|
| 4 |
+
"lstrip": false,
|
| 5 |
+
"normalized": true,
|
| 6 |
+
"rstrip": false,
|
| 7 |
+
"single_word": false
|
| 8 |
+
},
|
| 9 |
+
"eos_token": {
|
| 10 |
+
"content": "<|endoftext|>",
|
| 11 |
+
"lstrip": false,
|
| 12 |
+
"normalized": true,
|
| 13 |
+
"rstrip": false,
|
| 14 |
+
"single_word": false
|
| 15 |
+
},
|
| 16 |
+
"pad_token": "<|endoftext|>",
|
| 17 |
+
"unk_token": {
|
| 18 |
+
"content": "<|endoftext|>",
|
| 19 |
+
"lstrip": false,
|
| 20 |
+
"normalized": true,
|
| 21 |
+
"rstrip": false,
|
| 22 |
+
"single_word": false
|
| 23 |
+
}
|
| 24 |
+
}
|
tokenizer.json
ADDED
|
The diff for this file is too large to render.
See raw diff
|
|
|
tokenizer_config.json
ADDED
|
@@ -0,0 +1,20 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
{
|
| 2 |
+
"add_prefix_space": false,
|
| 3 |
+
"added_tokens_decoder": {
|
| 4 |
+
"50256": {
|
| 5 |
+
"content": "<|endoftext|>",
|
| 6 |
+
"lstrip": false,
|
| 7 |
+
"normalized": true,
|
| 8 |
+
"rstrip": false,
|
| 9 |
+
"single_word": false,
|
| 10 |
+
"special": true
|
| 11 |
+
}
|
| 12 |
+
},
|
| 13 |
+
"bos_token": "<|endoftext|>",
|
| 14 |
+
"clean_up_tokenization_spaces": true,
|
| 15 |
+
"eos_token": "<|endoftext|>",
|
| 16 |
+
"model_max_length": 1024,
|
| 17 |
+
"pad_token": "<|endoftext|>",
|
| 18 |
+
"tokenizer_class": "GPT2Tokenizer",
|
| 19 |
+
"unk_token": "<|endoftext|>"
|
| 20 |
+
}
|
training_args.bin
ADDED
|
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
version https://git-lfs.github.com/spec/v1
|
| 2 |
+
oid sha256:b856d6c72d29239eb1952dbd09b6fe9f9f995b23019b422b65b1214ab21012d9
|
| 3 |
+
size 5048
|
vocab.json
ADDED
|
The diff for this file is too large to render.
See raw diff
|
|
|