Update configuration_gpt2vision.py
Browse files
configuration_gpt2vision.py
CHANGED
|
@@ -136,7 +136,7 @@ class GPT2Config(PretrainedConfig):
|
|
| 136 |
|
| 137 |
def __init__(
|
| 138 |
self,
|
| 139 |
-
vocab_size=
|
| 140 |
n_positions=1024,
|
| 141 |
n_embd=768,
|
| 142 |
n_layer=12,
|
|
@@ -157,8 +157,6 @@ class GPT2Config(PretrainedConfig):
|
|
| 157 |
use_cache=True,
|
| 158 |
bos_token_id=50256,
|
| 159 |
eos_token_id=50256,
|
| 160 |
-
|
| 161 |
-
pad_token_id=50257,
|
| 162 |
scale_attn_by_inverse_layer_idx=False,
|
| 163 |
reorder_and_upcast_attn=False,
|
| 164 |
**kwargs,
|
|
@@ -168,7 +166,6 @@ class GPT2Config(PretrainedConfig):
|
|
| 168 |
self.n_embd = n_embd
|
| 169 |
self.n_layer = n_layer
|
| 170 |
self.n_head = n_head
|
| 171 |
-
self.pad_token_id = pad_token_id
|
| 172 |
self.n_inner = n_inner
|
| 173 |
self.activation_function = activation_function
|
| 174 |
self.resid_pdrop = resid_pdrop
|
|
|
|
| 136 |
|
| 137 |
def __init__(
|
| 138 |
self,
|
| 139 |
+
vocab_size=50258,
|
| 140 |
n_positions=1024,
|
| 141 |
n_embd=768,
|
| 142 |
n_layer=12,
|
|
|
|
| 157 |
use_cache=True,
|
| 158 |
bos_token_id=50256,
|
| 159 |
eos_token_id=50256,
|
|
|
|
|
|
|
| 160 |
scale_attn_by_inverse_layer_idx=False,
|
| 161 |
reorder_and_upcast_attn=False,
|
| 162 |
**kwargs,
|
|
|
|
| 166 |
self.n_embd = n_embd
|
| 167 |
self.n_layer = n_layer
|
| 168 |
self.n_head = n_head
|
|
|
|
| 169 |
self.n_inner = n_inner
|
| 170 |
self.activation_function = activation_function
|
| 171 |
self.resid_pdrop = resid_pdrop
|