Shilpaj commited on
Commit
6ac6d43
·
verified ·
1 Parent(s): 179bc9a

Fix: Import modules within the function

Browse files
Files changed (1) hide show
  1. utils.py +3 -3
utils.py CHANGED
@@ -9,8 +9,6 @@ import torch
9
  import gc
10
  import os
11
  from PIL import Image, ImageDraw, ImageFont
12
- from diffusers import StableDiffusionPipeline
13
- from transformers import CLIPTokenizer, CLIPTextModel
14
 
15
  # Disable HF transfer to avoid download issues
16
  os.environ["HF_HUB_ENABLE_HF_TRANSFER"] = "0"
@@ -22,7 +20,9 @@ def load_models(device="cuda"):
22
  :param device: (str) Device to load models on ('cuda', 'mps', or 'cpu')
23
  :return: (tuple) (vae, tokenizer, text_encoder, unet, scheduler, pipe)
24
  """
25
- from diffusers import AutoencoderKL, LMSDiscreteScheduler, UNet2DConditionModel
 
 
26
 
27
  # Set device
28
  if device == "cuda" and not torch.cuda.is_available():
 
9
  import gc
10
  import os
11
  from PIL import Image, ImageDraw, ImageFont
 
 
12
 
13
  # Disable HF transfer to avoid download issues
14
  os.environ["HF_HUB_ENABLE_HF_TRANSFER"] = "0"
 
20
  :param device: (str) Device to load models on ('cuda', 'mps', or 'cpu')
21
  :return: (tuple) (vae, tokenizer, text_encoder, unet, scheduler, pipe)
22
  """
23
+ # Import here to avoid the cached_download issue
24
+ from diffusers import AutoencoderKL, LMSDiscreteScheduler, UNet2DConditionModel, StableDiffusionPipeline
25
+ from transformers import CLIPTokenizer, CLIPTextModel
26
 
27
  # Set device
28
  if device == "cuda" and not torch.cuda.is_available():