[import] fixed
Browse files- configuration_internvl_chat.py +1 -1
- modeling_intern_vit.py +1 -1
- modeling_internvl_chat.py +3 -3
configuration_internvl_chat.py
CHANGED
|
@@ -10,7 +10,7 @@ from transformers import AutoConfig, LlamaConfig, Qwen2Config
|
|
| 10 |
from transformers.configuration_utils import PretrainedConfig
|
| 11 |
from transformers.utils import logging
|
| 12 |
|
| 13 |
-
from
|
| 14 |
|
| 15 |
logger = logging.get_logger(__name__)
|
| 16 |
|
|
|
|
| 10 |
from transformers.configuration_utils import PretrainedConfig
|
| 11 |
from transformers.utils import logging
|
| 12 |
|
| 13 |
+
from configuration_intern_vit import InternVisionConfig
|
| 14 |
|
| 15 |
logger = logging.get_logger(__name__)
|
| 16 |
|
modeling_intern_vit.py
CHANGED
|
@@ -18,7 +18,7 @@ from transformers.modeling_outputs import (BaseModelOutput,
|
|
| 18 |
from transformers.modeling_utils import PreTrainedModel
|
| 19 |
from transformers.utils import logging
|
| 20 |
|
| 21 |
-
from
|
| 22 |
|
| 23 |
try:
|
| 24 |
from flash_attn.bert_padding import pad_input, unpad_input
|
|
|
|
| 18 |
from transformers.modeling_utils import PreTrainedModel
|
| 19 |
from transformers.utils import logging
|
| 20 |
|
| 21 |
+
from configuration_intern_vit import InternVisionConfig
|
| 22 |
|
| 23 |
try:
|
| 24 |
from flash_attn.bert_padding import pad_input, unpad_input
|
modeling_internvl_chat.py
CHANGED
|
@@ -17,9 +17,9 @@ from transformers.modeling_outputs import CausalLMOutputWithPast
|
|
| 17 |
from transformers.modeling_utils import PreTrainedModel
|
| 18 |
from transformers.utils import ModelOutput, logging
|
| 19 |
|
| 20 |
-
from
|
| 21 |
-
from
|
| 22 |
-
from
|
| 23 |
|
| 24 |
logger = logging.get_logger(__name__)
|
| 25 |
|
|
|
|
| 17 |
from transformers.modeling_utils import PreTrainedModel
|
| 18 |
from transformers.utils import ModelOutput, logging
|
| 19 |
|
| 20 |
+
from configuration_internvl_chat import InternVLChatConfig
|
| 21 |
+
from conversation import get_conv_template
|
| 22 |
+
from modeling_intern_vit import InternVisionModel, has_flash_attn
|
| 23 |
|
| 24 |
logger = logging.get_logger(__name__)
|
| 25 |
|