EGYADMIN commited on
Commit
ef80b0e
·
verified ·
1 Parent(s): b76db2a

Apply patch BEFORE transformers import to fix is_torch_fx_available error

Browse files
Files changed (1) hide show
  1. app.py +11 -9
app.py CHANGED
@@ -1,15 +1,17 @@
1
  import gradio as gr
2
  import torch
3
- from transformers import AutoTokenizer, AutoModelForCausalLM
4
  import os
5
- # Patch for missing is_torch_fx_available function
6
- try:
7
- from transformers.utils import is_torch_fx_available
8
- except ImportError:
9
- print("Patching missing is_torch_fx_available function...")
10
- import transformers.utils
11
- transformers.utils.is_torch_fx_available = lambda: False
12
 
 
 
 
 
 
 
 
 
 
 
13
 
14
  print("Starting model loading...")
15
  print(f"CUDA available: {torch.cuda.is_available()}")
@@ -26,7 +28,7 @@ tokenizer = AutoTokenizer.from_pretrained(model_name, trust_remote_code=True)
26
  # Load model with automatic device mapping for multi-GPU support
27
  model = AutoModelForCausalLM.from_pretrained(
28
  model_name,
29
- torch_dtype=torch.float16,
30
  device_map="auto",
31
  trust_remote_code=True
32
  )
 
1
  import gradio as gr
2
  import torch
 
3
  import os
 
 
 
 
 
 
 
4
 
5
+ # CRITICAL: Patch must be applied BEFORE importing transformers
6
+ import sys
7
+ import importlib
8
+
9
+ # Monkey-patch transformers.utils.import_utils to add missing function
10
+ import transformers.utils.import_utils
11
+ transformers.utils.import_utils.is_torch_fx_available = lambda: False
12
+ print("Patched is_torch_fx_available function before transformers import")
13
+
14
+ from transformers import AutoTokenizer, AutoModelForCausalLM
15
 
16
  print("Starting model loading...")
17
  print(f"CUDA available: {torch.cuda.is_available()}")
 
28
  # Load model with automatic device mapping for multi-GPU support
29
  model = AutoModelForCausalLM.from_pretrained(
30
  model_name,
31
+ dtype=torch.float16,
32
  device_map="auto",
33
  trust_remote_code=True
34
  )