CraftedHue commited on
Commit
eb5e58e
·
verified ·
1 Parent(s): 55255bf

Initial commit with folder contents

Browse files
Files changed (2) hide show
  1. pyproject.toml +6 -6
  2. src/pipeline.py +3 -3
pyproject.toml CHANGED
@@ -27,16 +27,16 @@ revision = "741f7c3ce8b383c54771c7003378a50191e9efe9"
27
  exclude = ["transformer"]
28
 
29
  [[tool.edge-maxxing.models]]
30
- repository = "CraftedHue/extra0usio0"
31
- revision = "5b04ff5b00d170334f508db412d9cc9e7803617b"
32
 
33
  [[tool.edge-maxxing.models]]
34
- repository = "CraftedHue/extra1usio1"
35
- revision = "73a7cf1e0a20350fca758bba7f7be81b1ac4f52b"
36
 
37
  [[tool.edge-maxxing.models]]
38
- repository = "CraftedHue/extra2usio2"
39
- revision = "d3aed408719722124428e041f74c3e09540a6890"
40
 
41
 
42
  [project.scripts]
 
27
  exclude = ["transformer"]
28
 
29
  [[tool.edge-maxxing.models]]
30
+ repository = "CraftedHue/extra0Forg0"
31
+ revision = "5bce637251277b7c6423d01c5cace7778bbf2e9f"
32
 
33
  [[tool.edge-maxxing.models]]
34
+ repository = "CraftedHue/extra1Forg1"
35
+ revision = "312290178758e8f4147b8c188faf8fe021a6b58e"
36
 
37
  [[tool.edge-maxxing.models]]
38
+ repository = "CraftedHue/extra2Forg2"
39
+ revision = "1394f2a464629a736898fd44ea31b9b98cdee3d2"
40
 
41
 
42
  [project.scripts]
src/pipeline.py CHANGED
@@ -591,12 +591,12 @@ def load_pipeline() -> Pipeline:
591
  dtype, device = torch.bfloat16, "cuda"
592
 
593
  text_encoder_2 = T5EncoderModel.from_pretrained(
594
- "CraftedHue/extra1usio1", revision = "73a7cf1e0a20350fca758bba7f7be81b1ac4f52b", torch_dtype=torch.bfloat16
595
  ).to(memory_format=torch.channels_last)
596
 
597
- vae = AutoencoderTiny.from_pretrained("CraftedHue/extra2usio2", revision="d3aed408719722124428e041f74c3e09540a6890", torch_dtype=dtype)
598
 
599
- path = os.path.join(HF_HUB_CACHE, "models--CraftedHue--extra0usio0/snapshots/5b04ff5b00d170334f508db412d9cc9e7803617b")
600
  generator = torch.Generator(device=device)
601
  model = FluxTransformer2DModel.from_pretrained(path, torch_dtype=dtype, use_safetensors=False, generator= generator).to(memory_format=torch.channels_last)
602
  torch.backends.cudnn.benchmark = True
 
591
  dtype, device = torch.bfloat16, "cuda"
592
 
593
  text_encoder_2 = T5EncoderModel.from_pretrained(
594
+ "CraftedHue/extra1Forg1", revision = "312290178758e8f4147b8c188faf8fe021a6b58e", torch_dtype=torch.bfloat16
595
  ).to(memory_format=torch.channels_last)
596
 
597
+ vae = AutoencoderTiny.from_pretrained("CraftedHue/extra2Forg2", revision="1394f2a464629a736898fd44ea31b9b98cdee3d2", torch_dtype=dtype)
598
 
599
+ path = os.path.join(HF_HUB_CACHE, "models--CraftedHue--extra0Forg0/snapshots/5bce637251277b7c6423d01c5cace7778bbf2e9f")
600
  generator = torch.Generator(device=device)
601
  model = FluxTransformer2DModel.from_pretrained(path, torch_dtype=dtype, use_safetensors=False, generator= generator).to(memory_format=torch.channels_last)
602
  torch.backends.cudnn.benchmark = True