Upload folder using huggingface_hub
Browse files- pyproject.toml +2 -3
- src/pipeline.py +7 -12
- uv.lock +0 -17
pyproject.toml
CHANGED
|
@@ -18,12 +18,11 @@ dependencies = [
|
|
| 18 |
"edge-maxxing-pipelines @ git+https://github.com/womboai/edge-maxxing@7c760ac54f6052803dadb3ade8ebfc9679a94589#subdirectory=pipelines",
|
| 19 |
"torchao>=0.6.1",
|
| 20 |
"ipython>=8.29.0",
|
| 21 |
-
"setuptools >= 75.0"
|
| 22 |
-
"xformers>=0.0.28.post3",
|
| 23 |
]
|
| 24 |
|
| 25 |
[tool.edge-maxxing]
|
| 26 |
-
models = ["jokerbit/flux.1-schnell
|
| 27 |
|
| 28 |
[project.scripts]
|
| 29 |
start_inference = "main:main"
|
|
|
|
| 18 |
"edge-maxxing-pipelines @ git+https://github.com/womboai/edge-maxxing@7c760ac54f6052803dadb3ade8ebfc9679a94589#subdirectory=pipelines",
|
| 19 |
"torchao>=0.6.1",
|
| 20 |
"ipython>=8.29.0",
|
| 21 |
+
"setuptools >= 75.0"
|
|
|
|
| 22 |
]
|
| 23 |
|
| 24 |
[tool.edge-maxxing]
|
| 25 |
+
models = ["jokerbit/flux.1-schnell-city96"]
|
| 26 |
|
| 27 |
[project.scripts]
|
| 28 |
start_inference = "main:main"
|
src/pipeline.py
CHANGED
|
@@ -13,7 +13,7 @@ from time import perf_counter
|
|
| 13 |
|
| 14 |
HOME = os.environ["HOME"]
|
| 15 |
os.environ["PYTORCH_CUDA_ALLOC_CONF"] = "expandable_segments:False,garbage_collection_threshold:0.01"
|
| 16 |
-
FLUX_CHECKPOINT = "jokerbit/flux.1-schnell"
|
| 17 |
torch.backends.cudnn.benchmark = True
|
| 18 |
torch.backends.cuda.matmul.allow_tf32 = True
|
| 19 |
torch.cuda.set_per_process_memory_fraction(0.99)
|
|
@@ -30,27 +30,22 @@ def empty_cache():
|
|
| 30 |
|
| 31 |
|
| 32 |
def load_pipeline() -> FluxPipeline:
|
| 33 |
-
empty_cache()
|
| 34 |
-
|
| 35 |
-
"city96/t5-v1_1-xxl-encoder-bf16", torch_dtype=DTYPE
|
| 36 |
-
)
|
| 37 |
-
pipe = FluxPipeline.from_pretrained(FLUX_CHECKPOINT,
|
| 38 |
-
text_encoder_2=text_encoder_2,
|
| 39 |
torch_dtype=DTYPE)
|
| 40 |
pipe.text_encoder.to(memory_format=torch.channels_last)
|
| 41 |
pipe.text_encoder_2.to(memory_format=torch.channels_last)
|
| 42 |
-
pipe.transformer.to(memory_format=torch.channels_last)
|
|
|
|
| 43 |
pipe.vae.to(memory_format=torch.channels_last)
|
| 44 |
pipe.vae = torch.compile(pipe.vae, mode="reduce-overhead")
|
| 45 |
-
pipe.
|
| 46 |
-
pipe.vae.enable_slicing()
|
| 47 |
-
pipe._exclude_from_cpu_offload = ["text_encoder"]
|
| 48 |
pipe.enable_sequential_cpu_offload()
|
| 49 |
|
| 50 |
prompt = 'martyr, semiconformity, peregrination, quip, twineless, emotionless, tawa, depickle'
|
| 51 |
|
| 52 |
|
| 53 |
-
for _ in range(
|
| 54 |
empty_cache()
|
| 55 |
pipe(prompt, guidance_scale=0., max_sequence_length=256, num_inference_steps=4)
|
| 56 |
empty_cache()
|
|
|
|
| 13 |
|
| 14 |
HOME = os.environ["HOME"]
|
| 15 |
os.environ["PYTORCH_CUDA_ALLOC_CONF"] = "expandable_segments:False,garbage_collection_threshold:0.01"
|
| 16 |
+
FLUX_CHECKPOINT = "jokerbit/flux.1-schnell-city96"
|
| 17 |
torch.backends.cudnn.benchmark = True
|
| 18 |
torch.backends.cuda.matmul.allow_tf32 = True
|
| 19 |
torch.cuda.set_per_process_memory_fraction(0.99)
|
|
|
|
| 30 |
|
| 31 |
|
| 32 |
def load_pipeline() -> FluxPipeline:
|
| 33 |
+
empty_cache()
|
| 34 |
+
pipe = FluxPipeline.from_pretrained(FLUX_CHECKPOINT,
|
|
|
|
|
|
|
|
|
|
|
|
|
| 35 |
torch_dtype=DTYPE)
|
| 36 |
pipe.text_encoder.to(memory_format=torch.channels_last)
|
| 37 |
pipe.text_encoder_2.to(memory_format=torch.channels_last)
|
| 38 |
+
pipe.transformer.to(memory_format=torch.channels_last)
|
| 39 |
+
|
| 40 |
pipe.vae.to(memory_format=torch.channels_last)
|
| 41 |
pipe.vae = torch.compile(pipe.vae, mode="reduce-overhead")
|
| 42 |
+
pipe._exclude_from_cpu_offload = ["vae"]
|
|
|
|
|
|
|
| 43 |
pipe.enable_sequential_cpu_offload()
|
| 44 |
|
| 45 |
prompt = 'martyr, semiconformity, peregrination, quip, twineless, emotionless, tawa, depickle'
|
| 46 |
|
| 47 |
|
| 48 |
+
for _ in range(2):
|
| 49 |
empty_cache()
|
| 50 |
pipe(prompt, guidance_scale=0., max_sequence_length=256, num_inference_steps=4)
|
| 51 |
empty_cache()
|
uv.lock
CHANGED
|
@@ -200,7 +200,6 @@ dependencies = [
|
|
| 200 |
{ name = "torch" },
|
| 201 |
{ name = "torchao" },
|
| 202 |
{ name = "transformers" },
|
| 203 |
-
{ name = "xformers" },
|
| 204 |
]
|
| 205 |
|
| 206 |
[package.metadata]
|
|
@@ -216,7 +215,6 @@ requires-dist = [
|
|
| 216 |
{ name = "torch", specifier = "==2.5.1" },
|
| 217 |
{ name = "torchao", specifier = ">=0.6.1" },
|
| 218 |
{ name = "transformers", specifier = "==4.46.2" },
|
| 219 |
-
{ name = "xformers", specifier = ">=0.0.28.post3" },
|
| 220 |
]
|
| 221 |
|
| 222 |
[[package]]
|
|
@@ -1192,21 +1190,6 @@ wheels = [
|
|
| 1192 |
{ url = "https://files.pythonhosted.org/packages/fd/84/fd2ba7aafacbad3c4201d395674fc6348826569da3c0937e75505ead3528/wcwidth-0.2.13-py2.py3-none-any.whl", hash = "sha256:3da69048e4540d84af32131829ff948f1e022c1c6bdb8d6102117aac784f6859", size = 34166 },
|
| 1193 |
]
|
| 1194 |
|
| 1195 |
-
[[package]]
|
| 1196 |
-
name = "xformers"
|
| 1197 |
-
version = "0.0.28.post3"
|
| 1198 |
-
source = { registry = "https://pypi.org/simple" }
|
| 1199 |
-
dependencies = [
|
| 1200 |
-
{ name = "numpy" },
|
| 1201 |
-
{ name = "torch" },
|
| 1202 |
-
]
|
| 1203 |
-
sdist = { url = "https://files.pythonhosted.org/packages/00/d8/7301b2044e29b384b6ec009ed37002f4df48906a2a772654e8386fa3b730/xformers-0.0.28.post3.tar.gz", hash = "sha256:c7a2392c874dfd8f38b73e14492baf048a4f50f77ddf522bfcf6ebf5ee84d567", size = 7758532 }
|
| 1204 |
-
wheels = [
|
| 1205 |
-
{ url = "https://files.pythonhosted.org/packages/02/56/d1a86f2c4a5a80e1f4926eef1ba69d6eb77ae823d36da7860984ca0b3421/xformers-0.0.28.post3-cp310-cp310-manylinux_2_28_x86_64.whl", hash = "sha256:648483325366fb3c6a42246f99646101d3cfd678725b3ffc50a4708a222ae973", size = 16715821 },
|
| 1206 |
-
{ url = "https://files.pythonhosted.org/packages/9c/4b/00b1749055a083b238e93a950291d09085a91d5cb677b2ebcdb28337f45e/xformers-0.0.28.post3-cp311-cp311-manylinux_2_28_x86_64.whl", hash = "sha256:825563e129f6b22885c2a91e464ff617382f4d78876cc92d96ff618e875aaee3", size = 16715691 },
|
| 1207 |
-
{ url = "https://files.pythonhosted.org/packages/01/ba/048171c15dfd4f9bff63aaf6e93586ea1ea3e14cc66cd2cea59a50fc2047/xformers-0.0.28.post3-cp312-cp312-manylinux_2_28_x86_64.whl", hash = "sha256:c550f72bb4e55b67bd847e9272b7f41d27ac82b6b99f35a710a1292f2f218a3a", size = 16714914 },
|
| 1208 |
-
]
|
| 1209 |
-
|
| 1210 |
[[package]]
|
| 1211 |
name = "zipp"
|
| 1212 |
version = "3.20.2"
|
|
|
|
| 200 |
{ name = "torch" },
|
| 201 |
{ name = "torchao" },
|
| 202 |
{ name = "transformers" },
|
|
|
|
| 203 |
]
|
| 204 |
|
| 205 |
[package.metadata]
|
|
|
|
| 215 |
{ name = "torch", specifier = "==2.5.1" },
|
| 216 |
{ name = "torchao", specifier = ">=0.6.1" },
|
| 217 |
{ name = "transformers", specifier = "==4.46.2" },
|
|
|
|
| 218 |
]
|
| 219 |
|
| 220 |
[[package]]
|
|
|
|
| 1190 |
{ url = "https://files.pythonhosted.org/packages/fd/84/fd2ba7aafacbad3c4201d395674fc6348826569da3c0937e75505ead3528/wcwidth-0.2.13-py2.py3-none-any.whl", hash = "sha256:3da69048e4540d84af32131829ff948f1e022c1c6bdb8d6102117aac784f6859", size = 34166 },
|
| 1191 |
]
|
| 1192 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1193 |
[[package]]
|
| 1194 |
name = "zipp"
|
| 1195 |
version = "3.20.2"
|