prithivMLmods commited on
Commit
b696989
·
verified ·
1 Parent(s): f3894a5

update app

Browse files
Files changed (1) hide show
  1. app.py +0 -3
app.py CHANGED
@@ -84,7 +84,6 @@ logger.info(f"Loading model 1: {MODEL_ID_1}")
84
  processor_1 = AutoProcessor.from_pretrained(MODEL_ID_1, trust_remote_code=True)
85
  model_1 = Qwen2_5_VLForConditionalGeneration.from_pretrained(
86
  MODEL_ID_1,
87
- attn_implementation="flash_attention_2",
88
  trust_remote_code=True,
89
  torch_dtype=torch.float16 if device == "cuda" else torch.float32
90
  ).to(device).eval()
@@ -96,7 +95,6 @@ logger.info(f"Loading model 2: {MODEL_ID_2}")
96
  processor_2 = AutoProcessor.from_pretrained(MODEL_ID_2, trust_remote_code=True)
97
  model_2 = Qwen2_5_VLForConditionalGeneration.from_pretrained(
98
  MODEL_ID_2,
99
- attn_implementation="flash_attention_2",
100
  trust_remote_code=True,
101
  torch_dtype=torch.float16 if device == "cuda" else torch.float32
102
  ).to(device).eval()
@@ -108,7 +106,6 @@ logger.info(f"Loading model 3: {MODEL_ID_3}")
108
  processor_3 = AutoProcessor.from_pretrained(MODEL_ID_3, trust_remote_code=True)
109
  model_3 = Qwen2_5_VLForConditionalGeneration.from_pretrained(
110
  MODEL_ID_3,
111
- attn_implementation="flash_attention_2",
112
  trust_remote_code=True,
113
  torch_dtype=torch.float16 if device == "cuda" else torch.float32
114
  ).to(device).eval()
 
84
  processor_1 = AutoProcessor.from_pretrained(MODEL_ID_1, trust_remote_code=True)
85
  model_1 = Qwen2_5_VLForConditionalGeneration.from_pretrained(
86
  MODEL_ID_1,
 
87
  trust_remote_code=True,
88
  torch_dtype=torch.float16 if device == "cuda" else torch.float32
89
  ).to(device).eval()
 
95
  processor_2 = AutoProcessor.from_pretrained(MODEL_ID_2, trust_remote_code=True)
96
  model_2 = Qwen2_5_VLForConditionalGeneration.from_pretrained(
97
  MODEL_ID_2,
 
98
  trust_remote_code=True,
99
  torch_dtype=torch.float16 if device == "cuda" else torch.float32
100
  ).to(device).eval()
 
106
  processor_3 = AutoProcessor.from_pretrained(MODEL_ID_3, trust_remote_code=True)
107
  model_3 = Qwen2_5_VLForConditionalGeneration.from_pretrained(
108
  MODEL_ID_3,
 
109
  trust_remote_code=True,
110
  torch_dtype=torch.float16 if device == "cuda" else torch.float32
111
  ).to(device).eval()