hasankursun commited on
Commit
42f223e
·
verified ·
1 Parent(s): abd58d0

Update modeling_lumees.py

Browse files
Files changed (1) hide show
  1. modeling_lumees.py +2 -2
modeling_lumees.py CHANGED
@@ -67,11 +67,11 @@ except ImportError as error:
67
  "Current `flash-attention` does not support `window_size`. Either upgrade or use `attn_implementation='eager'`."
68
  )
69
 
70
- _CHECKPOINT_FOR_DOC = "your-org/Lumees-mini-instruct"
71
  _CONFIG_FOR_DOC = "LumeesConfig"
72
 
73
  LUMEES_PRETRAINED_MODEL_ARCHIVE_LIST = [
74
- "your-org/Lumees-mini-instruct",
75
  ]
76
 
77
 
 
67
  "Current `flash-attention` does not support `window_size`. Either upgrade or use `attn_implementation='eager'`."
68
  )
69
 
70
+ _CHECKPOINT_FOR_DOC = "lumees/Lumees-3.8B-Reasoning"
71
  _CONFIG_FOR_DOC = "LumeesConfig"
72
 
73
  LUMEES_PRETRAINED_MODEL_ARCHIVE_LIST = [
74
+ "lumees/Lumees-3.8B-Reasoning",
75
  ]
76
 
77