Recag commited on
Commit
c895f0b
·
1 Parent(s): 14a2047

Update model.py

Browse files
Files changed (1) hide show
  1. model.py +2 -2
model.py CHANGED
@@ -36,8 +36,8 @@ if is_flash_attn_2_available():
36
  # This makes `_prepare_4d_causal_attention_mask` a leaf function in the FX graph.
37
  # It means that the function will not be traced through and simply appear as a node in the graph.
38
  if is_torch_fx_available():
39
- if not is_torch_greater_or_equal_than_1_13:
40
- import torch.fx
41
 
42
  _prepare_4d_causal_attention_mask = torch.fx.wrap(_prepare_4d_causal_attention_mask)
43
 
 
36
  # This makes `_prepare_4d_causal_attention_mask` a leaf function in the FX graph.
37
  # It means that the function will not be traced through and simply appear as a node in the graph.
38
  if is_torch_fx_available():
39
+
40
+ import torch.fx
41
 
42
  _prepare_4d_causal_attention_mask = torch.fx.wrap(_prepare_4d_causal_attention_mask)
43