Compatibility with v5
#5
by RaushanTurganbay HF Staff - opened
- modeling_intern_vit.py +3 -1
modeling_intern_vit.py
CHANGED
|
@@ -3,6 +3,7 @@
|
|
| 3 |
# Copyright (c) 2023 OpenGVLab
|
| 4 |
# Licensed under The MIT License [see LICENSE for details]
|
| 5 |
# --------------------------------------------------------
|
|
|
|
| 6 |
from typing import Optional, Tuple, Union
|
| 7 |
|
| 8 |
import torch
|
|
@@ -243,7 +244,7 @@ class InternVisionEncoder(nn.Module):
|
|
| 243 |
super().__init__()
|
| 244 |
self.config = config
|
| 245 |
# stochastic depth decay rule
|
| 246 |
-
dpr = [x
|
| 247 |
self.layers = nn.ModuleList([
|
| 248 |
InternVisionEncoderLayer(config, dpr[idx]) for idx in range(config.num_hidden_layers)])
|
| 249 |
self.gradient_checkpointing = True
|
|
@@ -306,6 +307,7 @@ class InternVisionModel(PreTrainedModel):
|
|
| 306 |
|
| 307 |
self.embeddings = InternVisionEmbeddings(config)
|
| 308 |
self.encoder = InternVisionEncoder(config)
|
|
|
|
| 309 |
|
| 310 |
def resize_pos_embeddings(self, old_size, new_size, patch_size):
|
| 311 |
pos_emb = self.embeddings.position_embedding
|
|
|
|
| 3 |
# Copyright (c) 2023 OpenGVLab
|
| 4 |
# Licensed under The MIT License [see LICENSE for details]
|
| 5 |
# --------------------------------------------------------
|
| 6 |
+
import numpy as np
|
| 7 |
from typing import Optional, Tuple, Union
|
| 8 |
|
| 9 |
import torch
|
|
|
|
| 244 |
super().__init__()
|
| 245 |
self.config = config
|
| 246 |
# stochastic depth decay rule
|
| 247 |
+
dpr = [x for x in np.linspace(0, config.drop_path_rate, config.num_hidden_layers)]
|
| 248 |
self.layers = nn.ModuleList([
|
| 249 |
InternVisionEncoderLayer(config, dpr[idx]) for idx in range(config.num_hidden_layers)])
|
| 250 |
self.gradient_checkpointing = True
|
|
|
|
| 307 |
|
| 308 |
self.embeddings = InternVisionEmbeddings(config)
|
| 309 |
self.encoder = InternVisionEncoder(config)
|
| 310 |
+
self.post_init()
|
| 311 |
|
| 312 |
def resize_pos_embeddings(self, old_size, new_size, patch_size):
|
| 313 |
pos_emb = self.embeddings.position_embedding
|