Model save
Browse files- README.md +179 -0
- generation_config.json +7 -0
- special_tokens_map.json +23 -0
- tokenizer.json +0 -0
- tokenizer_config.json +115 -0
- trainer_state.json +0 -0
README.md
ADDED
|
@@ -0,0 +1,179 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
---
|
| 2 |
+
library_name: transformers
|
| 3 |
+
tags:
|
| 4 |
+
- generated_from_trainer
|
| 5 |
+
model-index:
|
| 6 |
+
- name: smallm_70_rope
|
| 7 |
+
results: []
|
| 8 |
+
---
|
| 9 |
+
|
| 10 |
+
<!-- This model card has been generated automatically according to the information the Trainer had access to. You
|
| 11 |
+
should probably proofread and complete it, then remove this comment. -->
|
| 12 |
+
|
| 13 |
+
# smallm_70_rope
|
| 14 |
+
|
| 15 |
+
This model is a fine-tuned version of [](https://huggingface.co/) on the None dataset.
|
| 16 |
+
It achieves the following results on the evaluation set:
|
| 17 |
+
- Loss: 2.8819
|
| 18 |
+
- Num Input Tokens Seen: 15728640000
|
| 19 |
+
|
| 20 |
+
## Model description
|
| 21 |
+
|
| 22 |
+
More information needed
|
| 23 |
+
|
| 24 |
+
## Intended uses & limitations
|
| 25 |
+
|
| 26 |
+
More information needed
|
| 27 |
+
|
| 28 |
+
## Training and evaluation data
|
| 29 |
+
|
| 30 |
+
More information needed
|
| 31 |
+
|
| 32 |
+
## Training procedure
|
| 33 |
+
|
| 34 |
+
### Training hyperparameters
|
| 35 |
+
|
| 36 |
+
The following hyperparameters were used during training:
|
| 37 |
+
- learning_rate: 0.001
|
| 38 |
+
- train_batch_size: 64
|
| 39 |
+
- eval_batch_size: 4
|
| 40 |
+
- seed: 42
|
| 41 |
+
- gradient_accumulation_steps: 4
|
| 42 |
+
- total_train_batch_size: 256
|
| 43 |
+
- optimizer: Use OptimizerNames.ADAMW_APEX_FUSED with betas=(0.9,0.999) and epsilon=1e-08 and optimizer_args=No additional optimizer arguments
|
| 44 |
+
- lr_scheduler_type: warmup_stable_decay
|
| 45 |
+
- lr_scheduler_warmup_steps: 500
|
| 46 |
+
- training_steps: 60000
|
| 47 |
+
|
| 48 |
+
### Training results
|
| 49 |
+
|
| 50 |
+
| Training Loss | Epoch | Step | Validation Loss | Input Tokens Seen |
|
| 51 |
+
|:-------------:|:------:|:-----:|:---------------:|:-----------------:|
|
| 52 |
+
| 5.3179 | 0.0034 | 500 | 5.1793 | 131072000 |
|
| 53 |
+
| 4.208 | 0.0067 | 1000 | 4.1088 | 262144000 |
|
| 54 |
+
| 3.8864 | 0.0101 | 1500 | 3.8075 | 393216000 |
|
| 55 |
+
| 3.7289 | 0.0135 | 2000 | 3.6546 | 524288000 |
|
| 56 |
+
| 3.6424 | 0.0168 | 2500 | 3.5607 | 655360000 |
|
| 57 |
+
| 3.5846 | 0.0202 | 3000 | 3.5029 | 786432000 |
|
| 58 |
+
| 3.528 | 0.0235 | 3500 | 3.4473 | 917504000 |
|
| 59 |
+
| 3.4784 | 0.0269 | 4000 | 3.4037 | 1048576000 |
|
| 60 |
+
| 3.4509 | 0.0303 | 4500 | 3.3683 | 1179648000 |
|
| 61 |
+
| 3.4252 | 0.0336 | 5000 | 3.3413 | 1310720000 |
|
| 62 |
+
| 3.4036 | 0.0370 | 5500 | 3.3187 | 1441792000 |
|
| 63 |
+
| 3.3953 | 0.0404 | 6000 | 3.2934 | 1572864000 |
|
| 64 |
+
| 3.3625 | 0.0437 | 6500 | 3.2745 | 1703936000 |
|
| 65 |
+
| 3.3387 | 0.0471 | 7000 | 3.2563 | 1835008000 |
|
| 66 |
+
| 3.3459 | 0.0504 | 7500 | 3.2415 | 1966080000 |
|
| 67 |
+
| 3.3143 | 0.0538 | 8000 | 3.2275 | 2097152000 |
|
| 68 |
+
| 3.2975 | 0.0572 | 8500 | 3.2149 | 2228224000 |
|
| 69 |
+
| 3.2817 | 0.0605 | 9000 | 3.2016 | 2359296000 |
|
| 70 |
+
| 3.2876 | 0.0639 | 9500 | 3.1907 | 2490368000 |
|
| 71 |
+
| 3.2632 | 0.0673 | 10000 | 3.1775 | 2621440000 |
|
| 72 |
+
| 3.2577 | 0.0706 | 10500 | 3.1682 | 2752512000 |
|
| 73 |
+
| 3.2427 | 0.0740 | 11000 | 3.1592 | 2883584000 |
|
| 74 |
+
| 3.2421 | 0.0774 | 11500 | 3.1493 | 3014656000 |
|
| 75 |
+
| 3.2393 | 0.0807 | 12000 | 3.1432 | 3145728000 |
|
| 76 |
+
| 3.2386 | 0.0841 | 12500 | 3.1355 | 3276800000 |
|
| 77 |
+
| 3.2158 | 0.0874 | 13000 | 3.1287 | 3407872000 |
|
| 78 |
+
| 3.2117 | 0.0908 | 13500 | 3.1214 | 3538944000 |
|
| 79 |
+
| 3.2057 | 0.0942 | 14000 | 3.1152 | 3670016000 |
|
| 80 |
+
| 3.2121 | 0.0975 | 14500 | 3.1071 | 3801088000 |
|
| 81 |
+
| 3.2015 | 0.1009 | 15000 | 3.1015 | 3932160000 |
|
| 82 |
+
| 3.1925 | 0.1043 | 15500 | 3.0996 | 4063232000 |
|
| 83 |
+
| 3.1796 | 0.1076 | 16000 | 3.0902 | 4194304000 |
|
| 84 |
+
| 3.211 | 0.1110 | 16500 | 3.0987 | 4325376000 |
|
| 85 |
+
| 3.1778 | 0.1144 | 17000 | 3.0843 | 4456448000 |
|
| 86 |
+
| 3.1717 | 0.1177 | 17500 | 3.0752 | 4587520000 |
|
| 87 |
+
| 3.1597 | 0.1211 | 18000 | 3.0699 | 4718592000 |
|
| 88 |
+
| 3.183 | 0.1244 | 18500 | 3.0884 | 4849664000 |
|
| 89 |
+
| 3.1541 | 0.1278 | 19000 | 3.0668 | 4980736000 |
|
| 90 |
+
| 3.1499 | 0.1312 | 19500 | 3.0654 | 5111808000 |
|
| 91 |
+
| 3.1499 | 0.1345 | 20000 | 3.0563 | 5242880000 |
|
| 92 |
+
| 3.1462 | 0.1379 | 20500 | 3.0525 | 5373952000 |
|
| 93 |
+
| 3.15 | 0.1413 | 21000 | 3.0538 | 5505024000 |
|
| 94 |
+
| 3.1544 | 0.1446 | 21500 | 3.0516 | 5636096000 |
|
| 95 |
+
| 3.1475 | 0.1480 | 22000 | 3.0482 | 5767168000 |
|
| 96 |
+
| 3.1364 | 0.1513 | 22500 | 3.0421 | 5898240000 |
|
| 97 |
+
| 3.1564 | 0.1547 | 23000 | 3.0723 | 6029312000 |
|
| 98 |
+
| 3.1312 | 0.1581 | 23500 | 3.0458 | 6160384000 |
|
| 99 |
+
| 3.132 | 0.1614 | 24000 | 3.0352 | 6291456000 |
|
| 100 |
+
| 3.1358 | 0.1648 | 24500 | 3.0328 | 6422528000 |
|
| 101 |
+
| 3.1231 | 0.1682 | 25000 | 3.0353 | 6553600000 |
|
| 102 |
+
| 3.1248 | 0.1715 | 25500 | 3.0260 | 6684672000 |
|
| 103 |
+
| 3.118 | 0.1749 | 26000 | 3.0195 | 6815744000 |
|
| 104 |
+
| 3.1308 | 0.1783 | 26500 | 3.0297 | 6946816000 |
|
| 105 |
+
| 3.1286 | 0.1816 | 27000 | 3.0181 | 7077888000 |
|
| 106 |
+
| 3.1231 | 0.1850 | 27500 | 3.0236 | 7208960000 |
|
| 107 |
+
| 3.1399 | 0.1883 | 28000 | 3.0280 | 7340032000 |
|
| 108 |
+
| 3.1113 | 0.1917 | 28500 | 3.0133 | 7471104000 |
|
| 109 |
+
| 3.1287 | 0.1951 | 29000 | 3.0184 | 7602176000 |
|
| 110 |
+
| 3.108 | 0.1984 | 29500 | 3.0065 | 7733248000 |
|
| 111 |
+
| 3.1074 | 0.2018 | 30000 | 3.0053 | 7864320000 |
|
| 112 |
+
| 3.1155 | 0.2052 | 30500 | 3.0058 | 7995392000 |
|
| 113 |
+
| 3.0952 | 0.2085 | 31000 | 3.0034 | 8126464000 |
|
| 114 |
+
| 3.1095 | 0.2119 | 31500 | 3.0025 | 8257536000 |
|
| 115 |
+
| 3.1201 | 0.2152 | 32000 | 2.9990 | 8388608000 |
|
| 116 |
+
| 3.0979 | 0.2186 | 32500 | 2.9993 | 8519680000 |
|
| 117 |
+
| 3.1079 | 0.2220 | 33000 | 2.9947 | 8650752000 |
|
| 118 |
+
| 3.0888 | 0.2253 | 33500 | 2.9899 | 8781824000 |
|
| 119 |
+
| 3.1028 | 0.2287 | 34000 | 2.9927 | 8912896000 |
|
| 120 |
+
| 3.1182 | 0.2321 | 34500 | 3.0027 | 9043968000 |
|
| 121 |
+
| 3.0831 | 0.2354 | 35000 | 2.9875 | 9175040000 |
|
| 122 |
+
| 3.1019 | 0.2388 | 35500 | 2.9896 | 9306112000 |
|
| 123 |
+
| 3.0993 | 0.2422 | 36000 | 2.9876 | 9437184000 |
|
| 124 |
+
| 3.0801 | 0.2455 | 36500 | 2.9815 | 9568256000 |
|
| 125 |
+
| 3.0913 | 0.2489 | 37000 | 2.9841 | 9699328000 |
|
| 126 |
+
| 3.1105 | 0.2522 | 37500 | 2.9955 | 9830400000 |
|
| 127 |
+
| 3.0926 | 0.2556 | 38000 | 2.9854 | 9961472000 |
|
| 128 |
+
| 3.0802 | 0.2590 | 38500 | 2.9803 | 10092544000 |
|
| 129 |
+
| 3.0881 | 0.2623 | 39000 | 2.9857 | 10223616000 |
|
| 130 |
+
| 3.083 | 0.2657 | 39500 | 2.9809 | 10354688000 |
|
| 131 |
+
| 3.0904 | 0.2691 | 40000 | 2.9785 | 10485760000 |
|
| 132 |
+
| 3.0857 | 0.2724 | 40500 | 2.9742 | 10616832000 |
|
| 133 |
+
| 3.0675 | 0.2758 | 41000 | 2.9688 | 10747904000 |
|
| 134 |
+
| 3.0733 | 0.2791 | 41500 | 2.9694 | 10878976000 |
|
| 135 |
+
| 3.0685 | 0.2825 | 42000 | 2.9689 | 11010048000 |
|
| 136 |
+
| 3.0798 | 0.2859 | 42500 | 2.9728 | 11141120000 |
|
| 137 |
+
| 3.071 | 0.2892 | 43000 | 2.9696 | 11272192000 |
|
| 138 |
+
| 3.0664 | 0.2926 | 43500 | 2.9677 | 11403264000 |
|
| 139 |
+
| 3.0844 | 0.2960 | 44000 | 2.9880 | 11534336000 |
|
| 140 |
+
| 3.0591 | 0.2993 | 44500 | 2.9622 | 11665408000 |
|
| 141 |
+
| 3.0603 | 0.3027 | 45000 | 2.9669 | 11796480000 |
|
| 142 |
+
| 3.0714 | 0.3061 | 45500 | 2.9655 | 11927552000 |
|
| 143 |
+
| 3.0602 | 0.3094 | 46000 | 2.9600 | 12058624000 |
|
| 144 |
+
| 3.067 | 0.3128 | 46500 | 2.9571 | 12189696000 |
|
| 145 |
+
| 3.0676 | 0.3161 | 47000 | 2.9561 | 12320768000 |
|
| 146 |
+
| 3.0544 | 0.3195 | 47500 | 2.9534 | 12451840000 |
|
| 147 |
+
| 3.0489 | 0.3229 | 48000 | 2.9548 | 12582912000 |
|
| 148 |
+
| 3.072 | 0.3262 | 48500 | 2.9678 | 12713984000 |
|
| 149 |
+
| 3.0473 | 0.3296 | 49000 | 2.9521 | 12845056000 |
|
| 150 |
+
| 3.0573 | 0.3330 | 49500 | 2.9763 | 12976128000 |
|
| 151 |
+
| 3.0805 | 0.3363 | 50000 | 2.9581 | 13107200000 |
|
| 152 |
+
| 3.073 | 0.3397 | 50500 | 2.9553 | 13238272000 |
|
| 153 |
+
| 3.054 | 0.3431 | 51000 | 2.9483 | 13369344000 |
|
| 154 |
+
| 3.049 | 0.3464 | 51500 | 2.9457 | 13500416000 |
|
| 155 |
+
| 3.0509 | 0.3498 | 52000 | 2.9477 | 13631488000 |
|
| 156 |
+
| 3.0478 | 0.3531 | 52500 | 2.9460 | 13762560000 |
|
| 157 |
+
| 3.044 | 0.3565 | 53000 | 2.9570 | 13893632000 |
|
| 158 |
+
| 3.0444 | 0.3599 | 53500 | 2.9434 | 14024704000 |
|
| 159 |
+
| 3.071 | 0.3632 | 54000 | 2.9484 | 14155776000 |
|
| 160 |
+
| 3.0523 | 0.3666 | 54500 | 2.9419 | 14286848000 |
|
| 161 |
+
| 3.0524 | 0.3700 | 55000 | 2.9469 | 14417920000 |
|
| 162 |
+
| 3.0432 | 0.3733 | 55500 | 2.9362 | 14548992000 |
|
| 163 |
+
| 3.0364 | 0.3767 | 56000 | 2.9314 | 14680064000 |
|
| 164 |
+
| 3.0241 | 0.3800 | 56500 | 2.9202 | 14811136000 |
|
| 165 |
+
| 3.0101 | 0.3834 | 57000 | 2.9125 | 14942208000 |
|
| 166 |
+
| 3.0115 | 0.3868 | 57500 | 2.9029 | 15073280000 |
|
| 167 |
+
| 2.9931 | 0.3901 | 58000 | 2.8951 | 15204352000 |
|
| 168 |
+
| 2.9876 | 0.3935 | 58500 | 2.8888 | 15335424000 |
|
| 169 |
+
| 2.9856 | 0.3969 | 59000 | 2.8846 | 15466496000 |
|
| 170 |
+
| 2.9824 | 0.4002 | 59500 | 2.8822 | 15597568000 |
|
| 171 |
+
| 2.9789 | 0.4036 | 60000 | 2.8819 | 15728640000 |
|
| 172 |
+
|
| 173 |
+
|
| 174 |
+
### Framework versions
|
| 175 |
+
|
| 176 |
+
- Transformers 4.50.3
|
| 177 |
+
- Pytorch 2.6.0+cu126
|
| 178 |
+
- Datasets 3.5.0
|
| 179 |
+
- Tokenizers 0.21.1
|
generation_config.json
ADDED
|
@@ -0,0 +1,7 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
{
|
| 2 |
+
"_from_model_config": true,
|
| 3 |
+
"bos_token_id": 1,
|
| 4 |
+
"eos_token_id": 0,
|
| 5 |
+
"pad_token_id": 0,
|
| 6 |
+
"transformers_version": "4.50.3"
|
| 7 |
+
}
|
special_tokens_map.json
ADDED
|
@@ -0,0 +1,23 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
{
|
| 2 |
+
"bos_token": {
|
| 3 |
+
"content": "<|beginoftext|>",
|
| 4 |
+
"lstrip": false,
|
| 5 |
+
"normalized": false,
|
| 6 |
+
"rstrip": false,
|
| 7 |
+
"single_word": false
|
| 8 |
+
},
|
| 9 |
+
"eos_token": {
|
| 10 |
+
"content": "<|endoftext|>",
|
| 11 |
+
"lstrip": false,
|
| 12 |
+
"normalized": false,
|
| 13 |
+
"rstrip": false,
|
| 14 |
+
"single_word": false
|
| 15 |
+
},
|
| 16 |
+
"pad_token": {
|
| 17 |
+
"content": "<|endoftext|>",
|
| 18 |
+
"lstrip": false,
|
| 19 |
+
"normalized": false,
|
| 20 |
+
"rstrip": false,
|
| 21 |
+
"single_word": false
|
| 22 |
+
}
|
| 23 |
+
}
|
tokenizer.json
ADDED
|
The diff for this file is too large to render.
See raw diff
|
|
|
tokenizer_config.json
ADDED
|
@@ -0,0 +1,115 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
{
|
| 2 |
+
"added_tokens_decoder": {
|
| 3 |
+
"0": {
|
| 4 |
+
"content": "<|endoftext|>",
|
| 5 |
+
"lstrip": false,
|
| 6 |
+
"normalized": false,
|
| 7 |
+
"rstrip": false,
|
| 8 |
+
"single_word": false,
|
| 9 |
+
"special": true
|
| 10 |
+
},
|
| 11 |
+
"1": {
|
| 12 |
+
"content": "<|beginoftext|>",
|
| 13 |
+
"lstrip": false,
|
| 14 |
+
"normalized": false,
|
| 15 |
+
"rstrip": false,
|
| 16 |
+
"single_word": false,
|
| 17 |
+
"special": true
|
| 18 |
+
},
|
| 19 |
+
"2": {
|
| 20 |
+
"content": "<|reserved_token_1|>",
|
| 21 |
+
"lstrip": false,
|
| 22 |
+
"normalized": false,
|
| 23 |
+
"rstrip": false,
|
| 24 |
+
"single_word": false,
|
| 25 |
+
"special": true
|
| 26 |
+
},
|
| 27 |
+
"3": {
|
| 28 |
+
"content": "<|reserved_token_2|>",
|
| 29 |
+
"lstrip": false,
|
| 30 |
+
"normalized": false,
|
| 31 |
+
"rstrip": false,
|
| 32 |
+
"single_word": false,
|
| 33 |
+
"special": true
|
| 34 |
+
},
|
| 35 |
+
"4": {
|
| 36 |
+
"content": "<|reserved_token_3|>",
|
| 37 |
+
"lstrip": false,
|
| 38 |
+
"normalized": false,
|
| 39 |
+
"rstrip": false,
|
| 40 |
+
"single_word": false,
|
| 41 |
+
"special": true
|
| 42 |
+
},
|
| 43 |
+
"5": {
|
| 44 |
+
"content": "<|reserved_token_4|>",
|
| 45 |
+
"lstrip": false,
|
| 46 |
+
"normalized": false,
|
| 47 |
+
"rstrip": false,
|
| 48 |
+
"single_word": false,
|
| 49 |
+
"special": true
|
| 50 |
+
},
|
| 51 |
+
"6": {
|
| 52 |
+
"content": "<|reserved_token_5|>",
|
| 53 |
+
"lstrip": false,
|
| 54 |
+
"normalized": false,
|
| 55 |
+
"rstrip": false,
|
| 56 |
+
"single_word": false,
|
| 57 |
+
"special": true
|
| 58 |
+
},
|
| 59 |
+
"7": {
|
| 60 |
+
"content": "<|reserved_token_6|>",
|
| 61 |
+
"lstrip": false,
|
| 62 |
+
"normalized": false,
|
| 63 |
+
"rstrip": false,
|
| 64 |
+
"single_word": false,
|
| 65 |
+
"special": true
|
| 66 |
+
},
|
| 67 |
+
"8": {
|
| 68 |
+
"content": "<|reserved_token_7|>",
|
| 69 |
+
"lstrip": false,
|
| 70 |
+
"normalized": false,
|
| 71 |
+
"rstrip": false,
|
| 72 |
+
"single_word": false,
|
| 73 |
+
"special": true
|
| 74 |
+
},
|
| 75 |
+
"9": {
|
| 76 |
+
"content": "<|reserved_token_8|>",
|
| 77 |
+
"lstrip": false,
|
| 78 |
+
"normalized": false,
|
| 79 |
+
"rstrip": false,
|
| 80 |
+
"single_word": false,
|
| 81 |
+
"special": true
|
| 82 |
+
},
|
| 83 |
+
"10": {
|
| 84 |
+
"content": "<|reserved_token_9|>",
|
| 85 |
+
"lstrip": false,
|
| 86 |
+
"normalized": false,
|
| 87 |
+
"rstrip": false,
|
| 88 |
+
"single_word": false,
|
| 89 |
+
"special": true
|
| 90 |
+
},
|
| 91 |
+
"11": {
|
| 92 |
+
"content": "<|reserved_token_10|>",
|
| 93 |
+
"lstrip": false,
|
| 94 |
+
"normalized": false,
|
| 95 |
+
"rstrip": false,
|
| 96 |
+
"single_word": false,
|
| 97 |
+
"special": true
|
| 98 |
+
},
|
| 99 |
+
"12": {
|
| 100 |
+
"content": "<|reserved_token_11|>",
|
| 101 |
+
"lstrip": false,
|
| 102 |
+
"normalized": false,
|
| 103 |
+
"rstrip": false,
|
| 104 |
+
"single_word": false,
|
| 105 |
+
"special": true
|
| 106 |
+
}
|
| 107 |
+
},
|
| 108 |
+
"bos_token": "<|beginoftext|>",
|
| 109 |
+
"clean_up_tokenization_spaces": false,
|
| 110 |
+
"eos_token": "<|endoftext|>",
|
| 111 |
+
"extra_special_tokens": {},
|
| 112 |
+
"model_max_length": 1000000000000000019884624838656,
|
| 113 |
+
"pad_token": "<|endoftext|>",
|
| 114 |
+
"tokenizer_class": "PreTrainedTokenizer"
|
| 115 |
+
}
|
trainer_state.json
ADDED
|
The diff for this file is too large to render.
See raw diff
|
|
|