ArcMa commited on
Commit ·
223ee14
1
Parent(s): e6eff16
files
Browse files- config.json +19 -3
- pytorch_model.bin +2 -2
config.json
CHANGED
|
@@ -1,4 +1,5 @@
|
|
| 1 |
{
|
|
|
|
| 2 |
"architectures": [
|
| 3 |
"VisionEncoderDecoderModel"
|
| 4 |
],
|
|
@@ -26,6 +27,7 @@
|
|
| 26 |
"early_stopping": false,
|
| 27 |
"encoder_no_repeat_ngram_size": 0,
|
| 28 |
"eos_token_id": 2,
|
|
|
|
| 29 |
"finetuning_task": null,
|
| 30 |
"forced_bos_token_id": null,
|
| 31 |
"forced_eos_token_id": null,
|
|
@@ -65,6 +67,7 @@
|
|
| 65 |
"sep_token_id": null,
|
| 66 |
"task_specific_params": null,
|
| 67 |
"temperature": 1.0,
|
|
|
|
| 68 |
"tie_encoder_decoder": false,
|
| 69 |
"tie_word_embeddings": false,
|
| 70 |
"tokenizer_class": null,
|
|
@@ -72,12 +75,15 @@
|
|
| 72 |
"top_p": 1.0,
|
| 73 |
"torch_dtype": null,
|
| 74 |
"torchscript": false,
|
| 75 |
-
"transformers_version": "4.
|
|
|
|
| 76 |
"use_bfloat16": false,
|
| 77 |
"use_cache": false,
|
| 78 |
"use_learned_position_embeddings": true,
|
| 79 |
"vocab_size": 64044
|
| 80 |
},
|
|
|
|
|
|
|
| 81 |
"encoder": {
|
| 82 |
"_name_or_path": "",
|
| 83 |
"add_cross_attention": false,
|
|
@@ -92,7 +98,9 @@
|
|
| 92 |
"do_sample": false,
|
| 93 |
"early_stopping": false,
|
| 94 |
"encoder_no_repeat_ngram_size": 0,
|
|
|
|
| 95 |
"eos_token_id": null,
|
|
|
|
| 96 |
"finetuning_task": null,
|
| 97 |
"forced_bos_token_id": null,
|
| 98 |
"forced_eos_token_id": null,
|
|
@@ -140,6 +148,7 @@
|
|
| 140 |
"sep_token_id": null,
|
| 141 |
"task_specific_params": null,
|
| 142 |
"temperature": 1.0,
|
|
|
|
| 143 |
"tie_encoder_decoder": false,
|
| 144 |
"tie_word_embeddings": true,
|
| 145 |
"tokenizer_class": null,
|
|
@@ -147,13 +156,20 @@
|
|
| 147 |
"top_p": 1.0,
|
| 148 |
"torch_dtype": null,
|
| 149 |
"torchscript": false,
|
| 150 |
-
"transformers_version": "4.
|
|
|
|
| 151 |
"use_bfloat16": false
|
| 152 |
},
|
| 153 |
"eos_token_id": 2,
|
| 154 |
"is_encoder_decoder": true,
|
|
|
|
|
|
|
| 155 |
"model_type": "vision-encoder-decoder",
|
|
|
|
|
|
|
|
|
|
| 156 |
"tie_word_embeddings": false,
|
| 157 |
"torch_dtype": "float32",
|
| 158 |
-
"transformers_version": null
|
|
|
|
| 159 |
}
|
|
|
|
| 1 |
{
|
| 2 |
+
"_name_or_path": "tomofi/trocr-captcha",
|
| 3 |
"architectures": [
|
| 4 |
"VisionEncoderDecoderModel"
|
| 5 |
],
|
|
|
|
| 27 |
"early_stopping": false,
|
| 28 |
"encoder_no_repeat_ngram_size": 0,
|
| 29 |
"eos_token_id": 2,
|
| 30 |
+
"exponential_decay_length_penalty": null,
|
| 31 |
"finetuning_task": null,
|
| 32 |
"forced_bos_token_id": null,
|
| 33 |
"forced_eos_token_id": null,
|
|
|
|
| 67 |
"sep_token_id": null,
|
| 68 |
"task_specific_params": null,
|
| 69 |
"temperature": 1.0,
|
| 70 |
+
"tf_legacy_loss": false,
|
| 71 |
"tie_encoder_decoder": false,
|
| 72 |
"tie_word_embeddings": false,
|
| 73 |
"tokenizer_class": null,
|
|
|
|
| 75 |
"top_p": 1.0,
|
| 76 |
"torch_dtype": null,
|
| 77 |
"torchscript": false,
|
| 78 |
+
"transformers_version": "4.21.1",
|
| 79 |
+
"typical_p": 1.0,
|
| 80 |
"use_bfloat16": false,
|
| 81 |
"use_cache": false,
|
| 82 |
"use_learned_position_embeddings": true,
|
| 83 |
"vocab_size": 64044
|
| 84 |
},
|
| 85 |
+
"decoder_start_token_id": 0,
|
| 86 |
+
"early_stopping": true,
|
| 87 |
"encoder": {
|
| 88 |
"_name_or_path": "",
|
| 89 |
"add_cross_attention": false,
|
|
|
|
| 98 |
"do_sample": false,
|
| 99 |
"early_stopping": false,
|
| 100 |
"encoder_no_repeat_ngram_size": 0,
|
| 101 |
+
"encoder_stride": 16,
|
| 102 |
"eos_token_id": null,
|
| 103 |
+
"exponential_decay_length_penalty": null,
|
| 104 |
"finetuning_task": null,
|
| 105 |
"forced_bos_token_id": null,
|
| 106 |
"forced_eos_token_id": null,
|
|
|
|
| 148 |
"sep_token_id": null,
|
| 149 |
"task_specific_params": null,
|
| 150 |
"temperature": 1.0,
|
| 151 |
+
"tf_legacy_loss": false,
|
| 152 |
"tie_encoder_decoder": false,
|
| 153 |
"tie_word_embeddings": true,
|
| 154 |
"tokenizer_class": null,
|
|
|
|
| 156 |
"top_p": 1.0,
|
| 157 |
"torch_dtype": null,
|
| 158 |
"torchscript": false,
|
| 159 |
+
"transformers_version": "4.21.1",
|
| 160 |
+
"typical_p": 1.0,
|
| 161 |
"use_bfloat16": false
|
| 162 |
},
|
| 163 |
"eos_token_id": 2,
|
| 164 |
"is_encoder_decoder": true,
|
| 165 |
+
"length_penalty": 2.0,
|
| 166 |
+
"max_length": 16,
|
| 167 |
"model_type": "vision-encoder-decoder",
|
| 168 |
+
"no_repeat_ngram_size": 3,
|
| 169 |
+
"num_beams": 4,
|
| 170 |
+
"pad_token_id": 1,
|
| 171 |
"tie_word_embeddings": false,
|
| 172 |
"torch_dtype": "float32",
|
| 173 |
+
"transformers_version": null,
|
| 174 |
+
"vocab_size": 64044
|
| 175 |
}
|
pytorch_model.bin
CHANGED
|
@@ -1,3 +1,3 @@
|
|
| 1 |
version https://git-lfs.github.com/spec/v1
|
| 2 |
-
oid sha256:
|
| 3 |
-
size
|
|
|
|
| 1 |
version https://git-lfs.github.com/spec/v1
|
| 2 |
+
oid sha256:a3f92d14db633471beea1a1e7eeaa911c84966d841c0a5879a6ae4cf222795bc
|
| 3 |
+
size 246509357
|