prithivMLmods commited on
Commit
6b0450c
·
verified ·
1 Parent(s): 8f4c471

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +1 -12
app.py CHANGED
@@ -1,6 +1,5 @@
1
  import os
2
  import sys
3
- import subprocess
4
  from threading import Thread
5
  from typing import Iterable
6
  from huggingface_hub import snapshot_download
@@ -21,14 +20,6 @@ from gradio.themes.utils import colors, fonts, sizes
21
 
22
  # --- Theme and CSS Definition ---
23
 
24
- # Attempt to install flash-attn
25
- try:
26
- subprocess.run('pip install flash-attn --no-build-isolation', env={'FLASH_ATTENTION_SKIP_CUDA_BUILD': "TRUE"}, check=True, shell=True)
27
- except subprocess.CalledProcessError as e:
28
- print(f"Error installing flash-attn: {e}")
29
- print("Continuing without flash-attn.")
30
-
31
-
32
  colors.steel_blue = colors.Color(
33
  name="steel_blue",
34
  c50="#EBF3F8",
@@ -158,14 +149,12 @@ model_m = Qwen2_5_VLForConditionalGeneration.from_pretrained(
158
  torch_dtype=torch.float16
159
  ).to(device).eval()
160
 
161
- import flash_attn_2_cuda as flash_attn_gpu
162
-
163
  # Load Dots.OCR from the local, patched directory
164
  MODEL_PATH_D = model_path_d_local
165
  processor_d = AutoProcessor.from_pretrained(MODEL_PATH_D, trust_remote_code=True)
166
  model_d = AutoModelForCausalLM.from_pretrained(
167
  MODEL_PATH_D,
168
- attn_implementation="flash_attention_2",
169
  torch_dtype=torch.bfloat16,
170
  device_map="auto",
171
  trust_remote_code=True
 
1
  import os
2
  import sys
 
3
  from threading import Thread
4
  from typing import Iterable
5
  from huggingface_hub import snapshot_download
 
20
 
21
  # --- Theme and CSS Definition ---
22
 
 
 
 
 
 
 
 
 
23
  colors.steel_blue = colors.Color(
24
  name="steel_blue",
25
  c50="#EBF3F8",
 
149
  torch_dtype=torch.float16
150
  ).to(device).eval()
151
 
 
 
152
  # Load Dots.OCR from the local, patched directory
153
  MODEL_PATH_D = model_path_d_local
154
  processor_d = AutoProcessor.from_pretrained(MODEL_PATH_D, trust_remote_code=True)
155
  model_d = AutoModelForCausalLM.from_pretrained(
156
  MODEL_PATH_D,
157
+ attn_implementation="eager",
158
  torch_dtype=torch.bfloat16,
159
  device_map="auto",
160
  trust_remote_code=True