OuyBin commited on
Commit
2780bb2
·
verified ·
1 Parent(s): ea77612

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +2 -2
app.py CHANGED
@@ -37,7 +37,7 @@ COT_TEMPLATE = \
37
  model = SAMR1ForConditionalGeneration_qwen2p5.from_pretrained(
38
  MODEL_PATH,
39
  torch_dtype=torch.bfloat16,
40
- attn_implementation="flash_attention_2",
41
  ignore_mismatched_sizes=True,
42
  ).to(DEVICE)
43
  processor = AutoProcessor.from_pretrained(MODEL_PATH)
@@ -65,7 +65,7 @@ def preprocess(image_path: str, instruction: str):
65
  pixel_mean = torch.Tensor([123.675, 116.28, 103.53]).view(-1, 1, 1)
66
  pixel_std = torch.Tensor([58.395, 57.12, 57.375]).view(-1, 1, 1)
67
 
68
- question_template = COT_TEMPLATE
69
  system_template = QWEN2_SYS
70
 
71
  image = Image.open(image_path).convert(mode="RGB")
 
37
  model = SAMR1ForConditionalGeneration_qwen2p5.from_pretrained(
38
  MODEL_PATH,
39
  torch_dtype=torch.bfloat16,
40
+ attn_implementation="eager",
41
  ignore_mismatched_sizes=True,
42
  ).to(DEVICE)
43
  processor = AutoProcessor.from_pretrained(MODEL_PATH)
 
65
  pixel_mean = torch.Tensor([123.675, 116.28, 103.53]).view(-1, 1, 1)
66
  pixel_std = torch.Tensor([58.395, 57.12, 57.375]).view(-1, 1, 1)
67
 
68
+ question_template = DEFAULT_TEMPLATE
69
  system_template = QWEN2_SYS
70
 
71
  image = Image.open(image_path).convert(mode="RGB")