Chrissy1 commited on
Commit
3117cfa
·
verified ·
1 Parent(s): 7a37b0b

Initial commit with folder contents

Browse files
Files changed (2) hide show
  1. pyproject.toml +2 -2
  2. src/pipeline.py +2 -2
pyproject.toml CHANGED
@@ -32,8 +32,8 @@ repository = "madebyollin/taef1"
32
  revision = "2d552378e58c9c94201075708d7de4e1163b2689"
33
 
34
  [[tool.edge-maxxing.models]]
35
- repository = "Chrissy1/extra0well0"
36
- revision = "09350c769b001d6c49620472835de41dd18258dd"
37
 
38
 
39
  [project.scripts]
 
32
  revision = "2d552378e58c9c94201075708d7de4e1163b2689"
33
 
34
  [[tool.edge-maxxing.models]]
35
+ repository = "Chrissy1/extra0manQ0"
36
+ revision = "c0db1e82d89825a4664ad873f20d261cbe46e737"
37
 
38
 
39
  [project.scripts]
src/pipeline.py CHANGED
@@ -48,8 +48,8 @@ def empty_cache():
48
  torch.cuda.reset_peak_memory_stats()
49
 
50
  def load_pipeline() -> Pipeline:
51
- text_encoder_2 = T5EncoderModel.from_pretrained("Chrissy1/extra0well0", revision = "09350c769b001d6c49620472835de41dd18258dd", subfolder="text_encoder_2",torch_dtype=torch.bfloat16).to(memory_format=torch.channels_last)
52
- path = os.path.join(HF_HUB_CACHE, "models--Chrissy1--extra0well0/snapshots/09350c769b001d6c49620472835de41dd18258dd/transformer")
53
  transformer = FluxTransformer2DModel.from_pretrained(path, torch_dtype=torch.bfloat16, use_safetensors=False).to(memory_format=torch.channels_last)
54
  quantize_(AutoencoderKL.from_pretrained(ckpt_id,revision=ckpt_revision, subfolder="vae", local_files_only=True, torch_dtype=torch.bfloat16,), int8_weight_only())
55
  pipeline = FluxPipeline.from_pretrained(ckpt_id, revision=ckpt_revision, transformer=transformer, text_encoder_2=text_encoder_2, torch_dtype=torch.bfloat16,)
 
48
  torch.cuda.reset_peak_memory_stats()
49
 
50
  def load_pipeline() -> Pipeline:
51
+ text_encoder_2 = T5EncoderModel.from_pretrained("Chrissy1/extra0manQ0", revision = "c0db1e82d89825a4664ad873f20d261cbe46e737", subfolder="text_encoder_2",torch_dtype=torch.bfloat16).to(memory_format=torch.channels_last)
52
+ path = os.path.join(HF_HUB_CACHE, "models--Chrissy1--extra0manQ0/snapshots/c0db1e82d89825a4664ad873f20d261cbe46e737/transformer")
53
  transformer = FluxTransformer2DModel.from_pretrained(path, torch_dtype=torch.bfloat16, use_safetensors=False).to(memory_format=torch.channels_last)
54
  quantize_(AutoencoderKL.from_pretrained(ckpt_id,revision=ckpt_revision, subfolder="vae", local_files_only=True, torch_dtype=torch.bfloat16,), int8_weight_only())
55
  pipeline = FluxPipeline.from_pretrained(ckpt_id, revision=ckpt_revision, transformer=transformer, text_encoder_2=text_encoder_2, torch_dtype=torch.bfloat16,)