Update modeling_lumees.py
Browse files- modeling_lumees.py +2 -2
modeling_lumees.py
CHANGED
|
@@ -67,11 +67,11 @@ except ImportError as error:
|
|
| 67 |
"Current `flash-attention` does not support `window_size`. Either upgrade or use `attn_implementation='eager'`."
|
| 68 |
)
|
| 69 |
|
| 70 |
-
_CHECKPOINT_FOR_DOC = "
|
| 71 |
_CONFIG_FOR_DOC = "LumeesConfig"
|
| 72 |
|
| 73 |
LUMEES_PRETRAINED_MODEL_ARCHIVE_LIST = [
|
| 74 |
-
"
|
| 75 |
]
|
| 76 |
|
| 77 |
|
|
|
|
| 67 |
"Current `flash-attention` does not support `window_size`. Either upgrade or use `attn_implementation='eager'`."
|
| 68 |
)
|
| 69 |
|
| 70 |
+
_CHECKPOINT_FOR_DOC = "lumees/Lumees-3.8B-Reasoning"
|
| 71 |
_CONFIG_FOR_DOC = "LumeesConfig"
|
| 72 |
|
| 73 |
LUMEES_PRETRAINED_MODEL_ARCHIVE_LIST = [
|
| 74 |
+
"lumees/Lumees-3.8B-Reasoning",
|
| 75 |
]
|
| 76 |
|
| 77 |
|