ccclemenfff commited on
Commit
723daaf
·
1 Parent(s): e65359a

fix: disable flash attention 2 for CLIPVisionModel

Browse files
Files changed (2) hide show
  1. handler.py +0 -1
  2. videollama2/model/encoder.py +1 -1
handler.py CHANGED
@@ -3,7 +3,6 @@ import base64
3
  import tempfile
4
  import os
5
  import sys
6
- os.environ["FLASH_ATTENTION_2_ENABLED"] = "false"
7
 
8
 
9
  # 确保能导入 videollama2 模块(模型代码需要放同目录或已安装)
 
3
  import tempfile
4
  import os
5
  import sys
 
6
 
7
 
8
  # 确保能导入 videollama2 模块(模型代码需要放同目录或已安装)
videollama2/model/encoder.py CHANGED
@@ -1,5 +1,5 @@
1
  import os
2
-
3
  import torch
4
  import torch.nn as nn
5
 
 
1
  import os
2
+ os.environ["TRANSFORMERS_NO_FLASH_ATTN_2"] = "1"
3
  import torch
4
  import torch.nn as nn
5