John Ho commited on
Commit
d81f6c9
·
1 Parent(s): 354d431

trying to debug issue with F.scaled_dot_product_attention

Browse files
Files changed (1) hide show
  1. app.py +7 -0
app.py CHANGED
@@ -5,6 +5,13 @@ from tqdm import tqdm
5
  from samv2_handler import load_sam_image_model, run_sam_im_inference
6
  from PIL import Image
7
  from typing import Union
 
 
 
 
 
 
 
8
 
9
 
10
  def download_checkpoints():
 
5
  from samv2_handler import load_sam_image_model, run_sam_im_inference
6
  from PIL import Image
7
  from typing import Union
8
+ import subprocess
9
+
10
+ subprocess.run(
11
+ "pip install flash-attn --no-build-isolation",
12
+ env={"FLASH_ATTENTION_SKIP_CUDA_BUILD": "TRUE"},
13
+ shell=True,
14
+ )
15
 
16
 
17
  def download_checkpoints():