Bapt120 commited on
Commit
8392fde
·
verified ·
1 Parent(s): 9235b22

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +6 -2
app.py CHANGED
@@ -2,8 +2,13 @@
2
  import subprocess
3
  import sys
4
 
5
- # Install flash-attn for GPU only
 
 
 
6
  import torch
 
 
7
  if torch.cuda.is_available():
8
  print("CUDA detected - installing flash-attn for optimal GPU performance...")
9
  subprocess.run(
@@ -13,7 +18,6 @@ if torch.cuda.is_available():
13
  )
14
 
15
  import gradio as gr
16
- import spaces
17
  from PIL import Image
18
  from io import BytesIO
19
  import pypdfium2 as pdfium
 
2
  import subprocess
3
  import sys
4
 
5
+ # CRITICAL: Import spaces FIRST before any CUDA initialization
6
+ import spaces
7
+
8
+ # Now we can import torch and other packages
9
  import torch
10
+
11
+ # Install flash-attn for GPU only (after spaces import)
12
  if torch.cuda.is_available():
13
  print("CUDA detected - installing flash-attn for optimal GPU performance...")
14
  subprocess.run(
 
18
  )
19
 
20
  import gradio as gr
 
21
  from PIL import Image
22
  from io import BytesIO
23
  import pypdfium2 as pdfium