Update modeling_neollm.py
Browse files- modeling_neollm.py +1 -1
modeling_neollm.py
CHANGED
|
@@ -32,7 +32,7 @@ from transformers.utils.import_utils import (
|
|
| 32 |
is_causal_conv1d_available,
|
| 33 |
is_flash_linear_attention_available,
|
| 34 |
)
|
| 35 |
-
from configuration_neollm import NeoLLMConfig
|
| 36 |
|
| 37 |
|
| 38 |
if is_causal_conv1d_available():
|
|
|
|
| 32 |
is_causal_conv1d_available,
|
| 33 |
is_flash_linear_attention_available,
|
| 34 |
)
|
| 35 |
+
from .configuration_neollm import NeoLLMConfig
|
| 36 |
|
| 37 |
|
| 38 |
if is_causal_conv1d_available():
|