set "use_flash_attn" to false
#25
by
crumeike
- opened
- config.json +2 -2
config.json
CHANGED
|
@@ -17,7 +17,7 @@
|
|
| 17 |
"architectures": [
|
| 18 |
"InternLM2ForCausalLM"
|
| 19 |
],
|
| 20 |
-
"attn_implementation": "
|
| 21 |
"auto_map": {
|
| 22 |
"AutoConfig": "configuration_internlm2.InternLM2Config",
|
| 23 |
"AutoModel": "modeling_internlm2.InternLM2ForCausalLM",
|
|
@@ -138,6 +138,6 @@
|
|
| 138 |
"torch_dtype": "bfloat16",
|
| 139 |
"transformers_version": "4.37.2",
|
| 140 |
"use_bfloat16": true,
|
| 141 |
-
"use_flash_attn":
|
| 142 |
}
|
| 143 |
}
|
|
|
|
| 17 |
"architectures": [
|
| 18 |
"InternLM2ForCausalLM"
|
| 19 |
],
|
| 20 |
+
"attn_implementation": "eager",
|
| 21 |
"auto_map": {
|
| 22 |
"AutoConfig": "configuration_internlm2.InternLM2Config",
|
| 23 |
"AutoModel": "modeling_internlm2.InternLM2ForCausalLM",
|
|
|
|
| 138 |
"torch_dtype": "bfloat16",
|
| 139 |
"transformers_version": "4.37.2",
|
| 140 |
"use_bfloat16": true,
|
| 141 |
+
"use_flash_attn": false
|
| 142 |
}
|
| 143 |
}
|