EGYADMIN commited on
Commit
b76db2a
·
verified ·
1 Parent(s): 2345889

Fix import error by patching is_torch_fx_available function

Browse files
Files changed (1) hide show
  1. app.py +8 -0
app.py CHANGED
@@ -2,6 +2,14 @@ import gradio as gr
2
  import torch
3
  from transformers import AutoTokenizer, AutoModelForCausalLM
4
  import os
 
 
 
 
 
 
 
 
5
 
6
  print("Starting model loading...")
7
  print(f"CUDA available: {torch.cuda.is_available()}")
 
2
  import torch
3
  from transformers import AutoTokenizer, AutoModelForCausalLM
4
  import os
5
+ # Patch for missing is_torch_fx_available function
6
+ try:
7
+ from transformers.utils import is_torch_fx_available
8
+ except ImportError:
9
+ print("Patching missing is_torch_fx_available function...")
10
+ import transformers.utils
11
+ transformers.utils.is_torch_fx_available = lambda: False
12
+
13
 
14
  print("Starting model loading...")
15
  print(f"CUDA available: {torch.cuda.is_available()}")