Act
commited on
Initial commit with folder contents
Browse files- .gitattributes +0 -1
- pyproject.toml +7 -16
- src/main.py +3 -12
- src/pipeline.py +78 -79
- uv.lock +36 -32
.gitattributes
CHANGED
|
@@ -33,4 +33,3 @@ saved_model/**/* filter=lfs diff=lfs merge=lfs -text
|
|
| 33 |
*.zip filter=lfs diff=lfs merge=lfs -text
|
| 34 |
*.zst filter=lfs diff=lfs merge=lfs -text
|
| 35 |
*tfevents* filter=lfs diff=lfs merge=lfs -text
|
| 36 |
-
RobertML.png filter=lfs diff=lfs merge=lfs -text
|
|
|
|
| 33 |
*.zip filter=lfs diff=lfs merge=lfs -text
|
| 34 |
*.zst filter=lfs diff=lfs merge=lfs -text
|
| 35 |
*tfevents* filter=lfs diff=lfs merge=lfs -text
|
|
|
pyproject.toml
CHANGED
|
@@ -4,7 +4,7 @@ build-backend = "setuptools.build_meta"
|
|
| 4 |
|
| 5 |
[project]
|
| 6 |
name = "flux-schnell-edge-inference"
|
| 7 |
-
description = "An edge-maxxing model submission
|
| 8 |
requires-python = ">=3.10,<3.13"
|
| 9 |
version = "8"
|
| 10 |
dependencies = [
|
|
@@ -15,29 +15,20 @@ dependencies = [
|
|
| 15 |
"torch==2.5.1",
|
| 16 |
"protobuf==5.28.3",
|
| 17 |
"sentencepiece==0.2.0",
|
|
|
|
|
|
|
| 18 |
"edge-maxxing-pipelines @ git+https://github.com/womboai/edge-maxxing@7c760ac54f6052803dadb3ade8ebfc9679a94589#subdirectory=pipelines",
|
| 19 |
-
"gitpython>=3.1.43",
|
| 20 |
-
"torchao>=0.6.1",
|
| 21 |
]
|
| 22 |
|
| 23 |
[[tool.edge-maxxing.models]]
|
| 24 |
-
repository = "
|
| 25 |
-
revision = "
|
| 26 |
-
exclude = ["transformer"]
|
| 27 |
|
| 28 |
-
[[tool.edge-maxxing.models]]
|
| 29 |
-
repository = "proact/PRO_FLUX_0load"
|
| 30 |
-
revision = "22f140d15b1b2f86794cda5ffb7bc52e7b917965"
|
| 31 |
-
|
| 32 |
-
[[tool.edge-maxxing.models]]
|
| 33 |
-
repository = "city96/t5-v1_1-xxl-encoder-bf16"
|
| 34 |
-
revision = "1b9c856aadb864af93c1dcdc226c2774fa67bc86"
|
| 35 |
|
| 36 |
[[tool.edge-maxxing.models]]
|
| 37 |
-
repository = "
|
| 38 |
-
revision = "
|
| 39 |
|
| 40 |
|
| 41 |
[project.scripts]
|
| 42 |
start_inference = "main:main"
|
| 43 |
-
|
|
|
|
| 4 |
|
| 5 |
[project]
|
| 6 |
name = "flux-schnell-edge-inference"
|
| 7 |
+
description = "An edge-maxxing model submission for the 4090 Flux contest"
|
| 8 |
requires-python = ">=3.10,<3.13"
|
| 9 |
version = "8"
|
| 10 |
dependencies = [
|
|
|
|
| 15 |
"torch==2.5.1",
|
| 16 |
"protobuf==5.28.3",
|
| 17 |
"sentencepiece==0.2.0",
|
| 18 |
+
"torchao==0.6.1",
|
| 19 |
+
"hf_transfer==0.1.8",
|
| 20 |
"edge-maxxing-pipelines @ git+https://github.com/womboai/edge-maxxing@7c760ac54f6052803dadb3ade8ebfc9679a94589#subdirectory=pipelines",
|
|
|
|
|
|
|
| 21 |
]
|
| 22 |
|
| 23 |
[[tool.edge-maxxing.models]]
|
| 24 |
+
repository = "jokerbit/flux.1-schnell-Robert-int8wo"
|
| 25 |
+
revision = "5ef0012f11a863e5111ec56540302a023bc8587b"
|
|
|
|
| 26 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 27 |
|
| 28 |
[[tool.edge-maxxing.models]]
|
| 29 |
+
repository = "madebyollin/taef1"
|
| 30 |
+
revision = "2d552378e58c9c94201075708d7de4e1163b2689"
|
| 31 |
|
| 32 |
|
| 33 |
[project.scripts]
|
| 34 |
start_inference = "main:main"
|
|
|
src/main.py
CHANGED
|
@@ -1,30 +1,21 @@
|
|
| 1 |
-
import atexit
|
| 2 |
from io import BytesIO
|
| 3 |
from multiprocessing.connection import Listener
|
| 4 |
from os import chmod, remove
|
| 5 |
from os.path import abspath, exists
|
| 6 |
from pathlib import Path
|
| 7 |
-
from git import Repo
|
| 8 |
-
import torch
|
| 9 |
|
| 10 |
from PIL.JpegImagePlugin import JpegImageFile
|
| 11 |
from pipelines.models import TextToImageRequest
|
| 12 |
-
|
| 13 |
from pipeline import load_pipeline, infer
|
| 14 |
|
| 15 |
SOCKET = abspath(Path(__file__).parent.parent / "inferences.sock")
|
| 16 |
|
| 17 |
|
| 18 |
-
def at_exit():
|
| 19 |
-
torch.cuda.empty_cache()
|
| 20 |
-
|
| 21 |
-
|
| 22 |
def main():
|
| 23 |
-
atexit.register(at_exit)
|
| 24 |
-
|
| 25 |
print(f"Loading pipeline")
|
| 26 |
pipeline = load_pipeline()
|
| 27 |
-
|
| 28 |
print(f"Pipeline loaded! , creating socket at '{SOCKET}'")
|
| 29 |
|
| 30 |
if exists(SOCKET):
|
|
@@ -45,7 +36,7 @@ def main():
|
|
| 45 |
|
| 46 |
return
|
| 47 |
|
| 48 |
-
image = infer(request, pipeline)
|
| 49 |
|
| 50 |
data = BytesIO()
|
| 51 |
image.save(data, format=JpegImageFile.format)
|
|
|
|
|
|
|
| 1 |
from io import BytesIO
|
| 2 |
from multiprocessing.connection import Listener
|
| 3 |
from os import chmod, remove
|
| 4 |
from os.path import abspath, exists
|
| 5 |
from pathlib import Path
|
|
|
|
|
|
|
| 6 |
|
| 7 |
from PIL.JpegImagePlugin import JpegImageFile
|
| 8 |
from pipelines.models import TextToImageRequest
|
| 9 |
+
import torch
|
| 10 |
from pipeline import load_pipeline, infer
|
| 11 |
|
| 12 |
SOCKET = abspath(Path(__file__).parent.parent / "inferences.sock")
|
| 13 |
|
| 14 |
|
|
|
|
|
|
|
|
|
|
|
|
|
| 15 |
def main():
|
|
|
|
|
|
|
| 16 |
print(f"Loading pipeline")
|
| 17 |
pipeline = load_pipeline()
|
| 18 |
+
generator = torch.Generator(pipeline.device)
|
| 19 |
print(f"Pipeline loaded! , creating socket at '{SOCKET}'")
|
| 20 |
|
| 21 |
if exists(SOCKET):
|
|
|
|
| 36 |
|
| 37 |
return
|
| 38 |
|
| 39 |
+
image = infer(request, pipeline, generator.manual_seed(request.seed))
|
| 40 |
|
| 41 |
data = BytesIO()
|
| 42 |
image.save(data, format=JpegImageFile.format)
|
src/pipeline.py
CHANGED
|
@@ -1,93 +1,92 @@
|
|
| 1 |
-
|
| 2 |
-
|
| 3 |
-
from
|
| 4 |
|
| 5 |
-
from transformers import T5EncoderModel, T5TokenizerFast, CLIPTokenizer, CLIPTextModel
|
| 6 |
import torch
|
| 7 |
-
import torch._dynamo
|
| 8 |
-
import gc
|
| 9 |
-
from PIL import Image as img
|
| 10 |
from PIL.Image import Image
|
|
|
|
|
|
|
| 11 |
from pipelines.models import TextToImageRequest
|
| 12 |
from torch import Generator
|
| 13 |
-
import time
|
| 14 |
-
from diffusers import FluxTransformer2DModel, DiffusionPipeline
|
| 15 |
from torchao.quantization import quantize_, int8_weight_only
|
| 16 |
-
|
| 17 |
-
|
| 18 |
-
Pipeline = None
|
| 19 |
-
torch.backends.cudnn.benchmark = True
|
| 20 |
-
torch.backends.cuda.matmul.allow_tf32 = True
|
| 21 |
-
torch.cuda.set_per_process_memory_fraction(0.95)
|
| 22 |
-
|
| 23 |
-
ckpt_id = "black-forest-labs/FLUX.1-schnell"
|
| 24 |
-
def empty_cache():
|
| 25 |
-
gc.collect()
|
| 26 |
-
torch.cuda.empty_cache()
|
| 27 |
-
torch.cuda.reset_max_memory_allocated()
|
| 28 |
-
torch.cuda.reset_peak_memory_stats()
|
| 29 |
-
|
| 30 |
-
|
| 31 |
-
def load_pipeline() -> Pipeline:
|
| 32 |
-
empty_cache()
|
| 33 |
-
|
| 34 |
-
dtype, device = torch.bfloat16, "cuda"
|
| 35 |
-
|
| 36 |
-
try:
|
| 37 |
-
vae = AutoencoderTiny.from_pretrained(
|
| 38 |
-
"/home/sandbox/.cache/huggingface/hub/models--proact--PRO_FLUX_1load/snapshots/91741cd838292c87e50d14d1b5d14335e180a961", torch_dtype=dtype)
|
| 39 |
-
except:
|
| 40 |
-
vae = AutoencoderTiny.from_pretrained("proact/PRO_FLUX_1load", torch_dtype=dtype)
|
| 41 |
-
|
| 42 |
|
| 43 |
-
############ Text Encoder ############
|
| 44 |
-
text_encoder = CLIPTextModel.from_pretrained(
|
| 45 |
-
ckpt_id, subfolder="text_encoder", torch_dtype=torch.bfloat16
|
| 46 |
-
)
|
| 47 |
-
############ Text Encoder 2 ############
|
| 48 |
-
text_encoder_2 = T5EncoderModel.from_pretrained(
|
| 49 |
-
"city96/t5-v1_1-xxl-encoder-bf16", torch_dtype=torch.bfloat16
|
| 50 |
-
)
|
| 51 |
|
| 52 |
-
|
| 53 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 54 |
)
|
| 55 |
|
| 56 |
-
|
| 57 |
-
pipeline
|
| 58 |
-
|
| 59 |
-
|
| 60 |
-
|
| 61 |
-
text_encoder_2=text_encoder_2,
|
| 62 |
-
torch_dtype=dtype,
|
| 63 |
-
vae=vae
|
| 64 |
-
).to(device)
|
| 65 |
-
|
| 66 |
-
pipeline.vae.enable_tiling()
|
| 67 |
-
pipeline.vae.enable_slicing()
|
| 68 |
-
|
| 69 |
-
|
| 70 |
-
empty_cache()
|
| 71 |
-
pipeline(prompt="untenibleness, gynecocracy, overcapitalization, demiplate, shockable", width=1024, height=1024, guidance_scale=0.0, num_inference_steps=4, max_sequence_length=256)
|
| 72 |
|
| 73 |
-
|
|
|
|
| 74 |
|
|
|
|
| 75 |
|
| 76 |
@torch.inference_mode()
|
| 77 |
-
def infer(request: TextToImageRequest, pipeline: Pipeline) -> Image:
|
| 78 |
-
|
| 79 |
-
|
| 80 |
-
|
| 81 |
-
|
| 82 |
-
|
| 83 |
-
|
| 84 |
-
|
| 85 |
-
|
| 86 |
-
|
| 87 |
-
|
| 88 |
-
|
| 89 |
-
|
| 90 |
-
|
| 91 |
-
|
| 92 |
-
|
| 93 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
import gc
|
| 2 |
+
import os
|
| 3 |
+
from typing import TypeAlias
|
| 4 |
|
|
|
|
| 5 |
import torch
|
|
|
|
|
|
|
|
|
|
| 6 |
from PIL.Image import Image
|
| 7 |
+
from diffusers import FluxPipeline, FluxTransformer2DModel, AutoencoderKL, AutoencoderTiny
|
| 8 |
+
from huggingface_hub.constants import HF_HUB_CACHE
|
| 9 |
from pipelines.models import TextToImageRequest
|
| 10 |
from torch import Generator
|
|
|
|
|
|
|
| 11 |
from torchao.quantization import quantize_, int8_weight_only
|
| 12 |
+
from transformers import T5EncoderModel, CLIPTextModel, logging
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 13 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 14 |
|
| 15 |
+
Pipeline: TypeAlias = FluxPipeline
|
| 16 |
+
torch.backends.cudnn.benchmark = True
|
| 17 |
+
torch.backends.cudnn.benchmark = True
|
| 18 |
+
torch._inductor.config.conv_1x1_as_mm = True
|
| 19 |
+
torch._inductor.config.coordinate_descent_tuning = True
|
| 20 |
+
torch._inductor.config.epilogue_fusion = False
|
| 21 |
+
torch._inductor.config.coordinate_descent_check_all_directions = True
|
| 22 |
+
os.environ['PYTORCH_CUDA_ALLOC_CONF']="expandable_segments:True"
|
| 23 |
+
|
| 24 |
+
CHECKPOINT = "jokerbit/flux.1-schnell-Robert-int8wo"
|
| 25 |
+
REVISION = "5ef0012f11a863e5111ec56540302a023bc8587b"
|
| 26 |
+
|
| 27 |
+
TinyVAE = "madebyollin/taef1"
|
| 28 |
+
TinyVAE_REV = "2d552378e58c9c94201075708d7de4e1163b2689"
|
| 29 |
+
|
| 30 |
+
|
| 31 |
+
def load_pipeline() -> Pipeline:
|
| 32 |
+
path = os.path.join(HF_HUB_CACHE, "models--jokerbit--flux.1-schnell-Robert-int8wo/snapshots/5ef0012f11a863e5111ec56540302a023bc8587b/transformer")
|
| 33 |
+
transformer = FluxTransformer2DModel.from_pretrained(
|
| 34 |
+
path,
|
| 35 |
+
use_safetensors=False,
|
| 36 |
+
local_files_only=True,
|
| 37 |
+
torch_dtype=torch.bfloat16)
|
| 38 |
+
vae = AutoencoderTiny.from_pretrained(
|
| 39 |
+
TinyVAE,
|
| 40 |
+
revision=TinyVAE_REV,
|
| 41 |
+
local_files_only=True,
|
| 42 |
+
torch_dtype=torch.bfloat16)
|
| 43 |
+
pipeline = FluxPipeline.from_pretrained(
|
| 44 |
+
CHECKPOINT,
|
| 45 |
+
revision=REVISION,
|
| 46 |
+
transformer=transformer,
|
| 47 |
+
vae=vae,
|
| 48 |
+
local_files_only=True,
|
| 49 |
+
torch_dtype=torch.bfloat16,
|
| 50 |
)
|
| 51 |
|
| 52 |
+
pipeline.transformer.to(memory_format=torch.channels_last)
|
| 53 |
+
pipeline.vae.to(memory_format=torch.channels_last)
|
| 54 |
+
# quantize_(pipeline.vae, int8_weight_only())
|
| 55 |
+
pipeline.vae = torch.compile(pipeline.vae, mode="reduce-overhead", fullgraph=True)
|
| 56 |
+
pipeline.to("cuda")
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 57 |
|
| 58 |
+
for _ in range(2):
|
| 59 |
+
pipeline("cat", num_inference_steps=4)
|
| 60 |
|
| 61 |
+
return pipeline
|
| 62 |
|
| 63 |
@torch.inference_mode()
|
| 64 |
+
def infer(request: TextToImageRequest, pipeline: Pipeline, generator: torch.Generator) -> Image:
|
| 65 |
+
|
| 66 |
+
return pipeline(
|
| 67 |
+
request.prompt,
|
| 68 |
+
generator=generator,
|
| 69 |
+
guidance_scale=0.0,
|
| 70 |
+
num_inference_steps=4,
|
| 71 |
+
max_sequence_length=256,
|
| 72 |
+
height=request.height,
|
| 73 |
+
width=request.width,
|
| 74 |
+
).images[0]
|
| 75 |
+
|
| 76 |
+
|
| 77 |
+
if __name__ == "__main__":
|
| 78 |
+
from time import perf_counter
|
| 79 |
+
PROMPT = 'martyr, semiconformity, peregrination, quip, twineless, emotionless, tawa, depickle'
|
| 80 |
+
request = TextToImageRequest(prompt=PROMPT,
|
| 81 |
+
height=None,
|
| 82 |
+
width=None,
|
| 83 |
+
seed=666)
|
| 84 |
+
start_time = perf_counter()
|
| 85 |
+
pipe_ = load_pipeline()
|
| 86 |
+
stop_time = perf_counter()
|
| 87 |
+
print(f"Pipeline is loaded in {stop_time - start_time}s")
|
| 88 |
+
for _ in range(4):
|
| 89 |
+
start_time = perf_counter()
|
| 90 |
+
infer(request, pipe_)
|
| 91 |
+
stop_time = perf_counter()
|
| 92 |
+
print(f"Request in {stop_time - start_time}s")
|
uv.lock
CHANGED
|
@@ -147,13 +147,13 @@ wheels = [
|
|
| 147 |
|
| 148 |
[[package]]
|
| 149 |
name = "flux-schnell-edge-inference"
|
| 150 |
-
version = "
|
| 151 |
source = { editable = "." }
|
| 152 |
dependencies = [
|
| 153 |
{ name = "accelerate" },
|
| 154 |
{ name = "diffusers" },
|
| 155 |
{ name = "edge-maxxing-pipelines" },
|
| 156 |
-
{ name = "
|
| 157 |
{ name = "omegaconf" },
|
| 158 |
{ name = "protobuf" },
|
| 159 |
{ name = "sentencepiece" },
|
|
@@ -167,12 +167,12 @@ requires-dist = [
|
|
| 167 |
{ name = "accelerate", specifier = "==1.1.0" },
|
| 168 |
{ name = "diffusers", specifier = "==0.31.0" },
|
| 169 |
{ name = "edge-maxxing-pipelines", git = "https://github.com/womboai/edge-maxxing?subdirectory=pipelines&rev=7c760ac54f6052803dadb3ade8ebfc9679a94589#7c760ac54f6052803dadb3ade8ebfc9679a94589" },
|
| 170 |
-
{ name = "
|
| 171 |
{ name = "omegaconf", specifier = "==2.3.0" },
|
| 172 |
{ name = "protobuf", specifier = "==5.28.3" },
|
| 173 |
{ name = "sentencepiece", specifier = "==0.2.0" },
|
| 174 |
{ name = "torch", specifier = "==2.5.1" },
|
| 175 |
-
{ name = "torchao", specifier = "
|
| 176 |
{ name = "transformers", specifier = "==4.46.2" },
|
| 177 |
]
|
| 178 |
|
|
@@ -186,27 +186,40 @@ wheels = [
|
|
| 186 |
]
|
| 187 |
|
| 188 |
[[package]]
|
| 189 |
-
name = "
|
| 190 |
-
version = "
|
| 191 |
source = { registry = "https://pypi.org/simple" }
|
| 192 |
-
|
| 193 |
-
{ name = "smmap" },
|
| 194 |
-
]
|
| 195 |
-
sdist = { url = "https://files.pythonhosted.org/packages/19/0d/bbb5b5ee188dec84647a4664f3e11b06ade2bde568dbd489d9d64adef8ed/gitdb-4.0.11.tar.gz", hash = "sha256:bf5421126136d6d0af55bc1e7c1af1c397a34f5b7bd79e776cd3e89785c2b04b", size = 394469 }
|
| 196 |
-
wheels = [
|
| 197 |
-
{ url = "https://files.pythonhosted.org/packages/fd/5b/8f0c4a5bb9fd491c277c21eff7ccae71b47d43c4446c9d0c6cff2fe8c2c4/gitdb-4.0.11-py3-none-any.whl", hash = "sha256:81a3407ddd2ee8df444cbacea00e2d038e40150acfa3001696fe0dcf1d3adfa4", size = 62721 },
|
| 198 |
-
]
|
| 199 |
-
|
| 200 |
-
[[package]]
|
| 201 |
-
name = "gitpython"
|
| 202 |
-
version = "3.1.43"
|
| 203 |
-
source = { registry = "https://pypi.org/simple" }
|
| 204 |
-
dependencies = [
|
| 205 |
-
{ name = "gitdb" },
|
| 206 |
-
]
|
| 207 |
-
sdist = { url = "https://files.pythonhosted.org/packages/b6/a1/106fd9fa2dd989b6fb36e5893961f82992cf676381707253e0bf93eb1662/GitPython-3.1.43.tar.gz", hash = "sha256:35f314a9f878467f5453cc1fee295c3e18e52f1b99f10f6cf5b1682e968a9e7c", size = 214149 }
|
| 208 |
wheels = [
|
| 209 |
-
{ url = "https://files.pythonhosted.org/packages/
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 210 |
]
|
| 211 |
|
| 212 |
[[package]]
|
|
@@ -842,15 +855,6 @@ wheels = [
|
|
| 842 |
{ url = "https://files.pythonhosted.org/packages/90/12/282ee9bce8b58130cb762fbc9beabd531549952cac11fc56add11dcb7ea0/setuptools-75.3.0-py3-none-any.whl", hash = "sha256:f2504966861356aa38616760c0f66568e535562374995367b4e69c7143cf6bcd", size = 1251070 },
|
| 843 |
]
|
| 844 |
|
| 845 |
-
[[package]]
|
| 846 |
-
name = "smmap"
|
| 847 |
-
version = "5.0.1"
|
| 848 |
-
source = { registry = "https://pypi.org/simple" }
|
| 849 |
-
sdist = { url = "https://files.pythonhosted.org/packages/88/04/b5bf6d21dc4041000ccba7eb17dd3055feb237e7ffc2c20d3fae3af62baa/smmap-5.0.1.tar.gz", hash = "sha256:dceeb6c0028fdb6734471eb07c0cd2aae706ccaecab45965ee83f11c8d3b1f62", size = 22291 }
|
| 850 |
-
wheels = [
|
| 851 |
-
{ url = "https://files.pythonhosted.org/packages/a7/a5/10f97f73544edcdef54409f1d839f6049a0d79df68adbc1ceb24d1aaca42/smmap-5.0.1-py3-none-any.whl", hash = "sha256:e6d8668fa5f93e706934a62d7b4db19c8d9eb8cf2adbb75ef1b675aa332b69da", size = 24282 },
|
| 852 |
-
]
|
| 853 |
-
|
| 854 |
[[package]]
|
| 855 |
name = "sympy"
|
| 856 |
version = "1.13.1"
|
|
|
|
| 147 |
|
| 148 |
[[package]]
|
| 149 |
name = "flux-schnell-edge-inference"
|
| 150 |
+
version = "8"
|
| 151 |
source = { editable = "." }
|
| 152 |
dependencies = [
|
| 153 |
{ name = "accelerate" },
|
| 154 |
{ name = "diffusers" },
|
| 155 |
{ name = "edge-maxxing-pipelines" },
|
| 156 |
+
{ name = "hf-transfer" },
|
| 157 |
{ name = "omegaconf" },
|
| 158 |
{ name = "protobuf" },
|
| 159 |
{ name = "sentencepiece" },
|
|
|
|
| 167 |
{ name = "accelerate", specifier = "==1.1.0" },
|
| 168 |
{ name = "diffusers", specifier = "==0.31.0" },
|
| 169 |
{ name = "edge-maxxing-pipelines", git = "https://github.com/womboai/edge-maxxing?subdirectory=pipelines&rev=7c760ac54f6052803dadb3ade8ebfc9679a94589#7c760ac54f6052803dadb3ade8ebfc9679a94589" },
|
| 170 |
+
{ name = "hf-transfer", specifier = "==0.1.8" },
|
| 171 |
{ name = "omegaconf", specifier = "==2.3.0" },
|
| 172 |
{ name = "protobuf", specifier = "==5.28.3" },
|
| 173 |
{ name = "sentencepiece", specifier = "==0.2.0" },
|
| 174 |
{ name = "torch", specifier = "==2.5.1" },
|
| 175 |
+
{ name = "torchao", specifier = "==0.6.1" },
|
| 176 |
{ name = "transformers", specifier = "==4.46.2" },
|
| 177 |
]
|
| 178 |
|
|
|
|
| 186 |
]
|
| 187 |
|
| 188 |
[[package]]
|
| 189 |
+
name = "hf-transfer"
|
| 190 |
+
version = "0.1.8"
|
| 191 |
source = { registry = "https://pypi.org/simple" }
|
| 192 |
+
sdist = { url = "https://files.pythonhosted.org/packages/d3/0e/ba51e31148f0a9bc8d44878086535c2dc6d9a8dce321250e9bcdd3c110ea/hf_transfer-0.1.8.tar.gz", hash = "sha256:26d229468152e7a3ec12664cac86b8c2800695fd85f9c9a96677a775cc04f0b3", size = 23595 }
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 193 |
wheels = [
|
| 194 |
+
{ url = "https://files.pythonhosted.org/packages/4f/eb/469e68c4259c4f4ad8e00967ad2f72ff1ba5e2712b4e1093e3e03c5cbc3d/hf_transfer-0.1.8-cp310-cp310-macosx_10_12_x86_64.whl", hash = "sha256:70858f9e94286738ed300484a45beb5cfee6a7ddac4c5886f9c6fce7823ac5ab", size = 1422386 },
|
| 195 |
+
{ url = "https://files.pythonhosted.org/packages/bd/3d/5e8966b47aa86cd50f2017c76c2634aa09a437224567f379bc28d6580d7c/hf_transfer-0.1.8-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:38adc73f0a8526319d90f7cc5dc2d5e4bb66f487a513d94b98aa6725be732e4a", size = 1406027 },
|
| 196 |
+
{ url = "https://files.pythonhosted.org/packages/61/e0/fd5f849ed7b2bf9b2bb008f3df3ee5a8773ca98362302833708cce26c337/hf_transfer-0.1.8-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:44d2f0c08198d8d899fe9d66e86aee2dd844bd7ce33888f261373fcec81d2a54", size = 3781136 },
|
| 197 |
+
{ url = "https://files.pythonhosted.org/packages/d5/e9/fad10fb8b04c91cb8775b850f2bc578a1fb6168e2ab2b04ebb8525466159/hf_transfer-0.1.8-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:1de2a4ef36f9e60b3d3bec00193c0aafd75771709f2ca51b9b162373f5af3d32", size = 3099910 },
|
| 198 |
+
{ url = "https://files.pythonhosted.org/packages/8c/ae/8a608949a87280ed14f0f5e0adbeccab54a7ea3d3aabdf77ec38544dd44f/hf_transfer-0.1.8-cp310-cp310-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e319269e3606a5ff2979296841766649ac73598a4a8eee2a968f86c8071fea5a", size = 3589277 },
|
| 199 |
+
{ url = "https://files.pythonhosted.org/packages/81/ca/855ea35c9f997b500acd1baf6d6920ead00a0b7a8fccdcac74fe7e4f66d9/hf_transfer-0.1.8-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:0f6026cf3be6a53ea42f92172f60c1c0675baaa9073f865e671b661dde5fd157", size = 3409983 },
|
| 200 |
+
{ url = "https://files.pythonhosted.org/packages/5e/89/863f333b49603cc8d3c8862a428cc8fbaa9388ac8f076e9fa5ef3e729c3c/hf_transfer-0.1.8-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f865c33ada5bd3650c2b46e59979f2d7755c3f517f8d0facc78576a0c7d26406", size = 3562732 },
|
| 201 |
+
{ url = "https://files.pythonhosted.org/packages/95/93/8137b83bd4ca6b1b4dab36e42af8c19d62c98ff8837306429547a92cbde0/hf_transfer-0.1.8-cp310-none-win32.whl", hash = "sha256:2054730e8d8ed21917c64be7199e06424b2bd08df1c43a72766afaed7992f2d3", size = 1129924 },
|
| 202 |
+
{ url = "https://files.pythonhosted.org/packages/da/36/7583964f7cb0671071488f358dd388a8ef21f3a9bfe2e3596dac199010fc/hf_transfer-0.1.8-cp310-none-win_amd64.whl", hash = "sha256:2b4f1a9446ba31170b5b1eca4e916504d18378a6b5fe959896bdac8a736a5ecb", size = 1209808 },
|
| 203 |
+
{ url = "https://files.pythonhosted.org/packages/72/94/d1c3d383536051f61a5d1d50bbc848a5c165d67d94bde0286ea343d5e00a/hf_transfer-0.1.8-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:e27c15fcc5869ad7e52bbc0bdec6106b288d1c463f8d2da92f28615a3b181361", size = 1422132 },
|
| 204 |
+
{ url = "https://files.pythonhosted.org/packages/a0/a0/d10411151752499381052dbaf99fcbaefa8aaa3b5912b0535eea92d4699c/hf_transfer-0.1.8-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:871a0032d011ebc6409a73a8406b98b84ff2cd3ed7d9e1af8cdf4d660b9fab9b", size = 1405922 },
|
| 205 |
+
{ url = "https://files.pythonhosted.org/packages/85/df/70543e805988b8a1085830e7f5ca290cc7a72c869b4ac2be1a4b619435aa/hf_transfer-0.1.8-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:686fa756e1e0214bb6327d33c66732c52274d94a8460beb50604ad988b391cf6", size = 3780881 },
|
| 206 |
+
{ url = "https://files.pythonhosted.org/packages/93/c9/6920e63df88b2acaa3a4b0b616edca476ef8525d38d6f71437c0c9992b5d/hf_transfer-0.1.8-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:36a03b1b2911b0cf15b1b9d971a34b32dadcc4f2fd979aaff5979d6ce4017c34", size = 3099659 },
|
| 207 |
+
{ url = "https://files.pythonhosted.org/packages/7d/b0/f2a85771491de8f887e71ba8769d9fa15c53cadf4c0959954735f5f6e71b/hf_transfer-0.1.8-cp311-cp311-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:079db90c81f41f4cf3227dfaaa855a9b8e9aef45bc7c2be29ce7232cd83ff881", size = 3588878 },
|
| 208 |
+
{ url = "https://files.pythonhosted.org/packages/d8/36/cf7bd093988bdb530abbbfddd4cac80e3ccee4d80454af24fc0913bf2033/hf_transfer-0.1.8-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ac08a4524127fdd14c234d4bcbe49d1c498acf5335c781714823179bcc8dc039", size = 3409342 },
|
| 209 |
+
{ url = "https://files.pythonhosted.org/packages/30/61/b38643f305e1f0f76c8894cec38d5d39d0d6265a75cc9de0a94917ddff3d/hf_transfer-0.1.8-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:837432e73cb17274a6782b6216e8ce058aa325a475dc44a5a6a753d48b86d18a", size = 3562382 },
|
| 210 |
+
{ url = "https://files.pythonhosted.org/packages/cd/66/723bc1eeca445a1ce5cf72026f45f8a7ae656a1e47fce026cca92e31dbd5/hf_transfer-0.1.8-cp311-none-win32.whl", hash = "sha256:b180f9823dde35aba9bc0f1d0c04ac8a873baebd3732a7ffe4f11940abc7df0d", size = 1129916 },
|
| 211 |
+
{ url = "https://files.pythonhosted.org/packages/dd/7e/139527d276416bdeb08546cdcbd6f3e02326f3a6a6c2f00c71300a709e71/hf_transfer-0.1.8-cp311-none-win_amd64.whl", hash = "sha256:37907d2135cebcf8b6d419bb575148d89c224f16b69357f027bd29d0e85c6529", size = 1209794 },
|
| 212 |
+
{ url = "https://files.pythonhosted.org/packages/5b/d6/54c9ea16c782cb79cdae78500c0a4bc7474236f94537ee954771e6e86c8c/hf_transfer-0.1.8-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:baf948f4f493949309cbe60529620b9b0aef854a22b6e526753364acc57c09b6", size = 1424195 },
|
| 213 |
+
{ url = "https://files.pythonhosted.org/packages/63/57/09e2aa7fa63bc640d9c3fda2cc724744b46227d239bb4ae9bf33efc338c2/hf_transfer-0.1.8-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:0bce5c8bdefa478c5d5eaa646cc4ce1df5cfe764d98572ad0c6b8773e98d49f6", size = 1408105 },
|
| 214 |
+
{ url = "https://files.pythonhosted.org/packages/19/72/f247f9632410d8b9655332b2007924557c293094ea91648336f49403afe7/hf_transfer-0.1.8-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:54d6f8a1a86128d651a3799e1267c343d60f81f2c565d7c5416eb8e674e4cf0e", size = 3782066 },
|
| 215 |
+
{ url = "https://files.pythonhosted.org/packages/d0/cf/8eccb6fcff8eedd79334ffaf65c44109e8bece1ecc232c1036de697d51fa/hf_transfer-0.1.8-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:f79fd1b0c2ed93efb4c5f684118d7a762ecdd218e170df8208c4e13d3dcd4959", size = 3103992 },
|
| 216 |
+
{ url = "https://files.pythonhosted.org/packages/23/e8/f5d4ef6febc9ece1099e1f8de64f05f4d9f5b62461c4e54aac324a94d1ab/hf_transfer-0.1.8-cp312-cp312-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:414df35692670683bf5623498ef9d88a8df5d77e9516515da6e2b34d1054c11f", size = 3590083 },
|
| 217 |
+
{ url = "https://files.pythonhosted.org/packages/aa/de/cd8b36ecfd1c40119f307cb0dfd4ca5cd437beb8c92219d52a4253e0059a/hf_transfer-0.1.8-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:3c9798d5f951f66b96d40a7a53910260cb5874fda56cf5944dddb7c571f37ec3", size = 3406261 },
|
| 218 |
+
{ url = "https://files.pythonhosted.org/packages/37/7f/914b684779dae9d2db4cdb6efa50426da7411754d820b8ddc9c10eef5042/hf_transfer-0.1.8-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:060c661691f85a61392e57579c80eb64b5ee277434e81fb582f605c1c8ff05d5", size = 3560705 },
|
| 219 |
+
{ url = "https://files.pythonhosted.org/packages/de/17/e9ff11be0ab52d113091462f65fa280bd5c04c80e5b1dadb7f8de9645848/hf_transfer-0.1.8-cp312-none-win32.whl", hash = "sha256:f7840e32379820c3e1571a480238e05ea043e970c99d2e999578004a2eb17788", size = 1130448 },
|
| 220 |
+
{ url = "https://files.pythonhosted.org/packages/58/60/04c18bbeb46cc2dc6fd237323c03f2e4c700bca122f28567dbb344ff5bab/hf_transfer-0.1.8-cp312-none-win_amd64.whl", hash = "sha256:9a3204ec423cc5e659872e8179f8704ad9ce2abb1e6a991f8838aedf1dc07830", size = 1206317 },
|
| 221 |
+
{ url = "https://files.pythonhosted.org/packages/ae/e1/647dbd310042c11638ef330060777084f3394a82adc8274624b0f0601198/hf_transfer-0.1.8-pp310-pypy310_pp73-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:928ff036c3e98e10dcfbdb4fcdfc4592d37a5cc8e365a7ba8dfd4337e849d675", size = 3591149 },
|
| 222 |
+
{ url = "https://files.pythonhosted.org/packages/13/c4/aaf060b26e720a7b4cb90d7f02dc18a56b18894cbd72fb610f75b11fb9dc/hf_transfer-0.1.8-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d49ba3ce67035f460ae1924fe2feafec155cb535eec7f31ed5109c19064cd294", size = 3564510 },
|
| 223 |
]
|
| 224 |
|
| 225 |
[[package]]
|
|
|
|
| 855 |
{ url = "https://files.pythonhosted.org/packages/90/12/282ee9bce8b58130cb762fbc9beabd531549952cac11fc56add11dcb7ea0/setuptools-75.3.0-py3-none-any.whl", hash = "sha256:f2504966861356aa38616760c0f66568e535562374995367b4e69c7143cf6bcd", size = 1251070 },
|
| 856 |
]
|
| 857 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 858 |
[[package]]
|
| 859 |
name = "sympy"
|
| 860 |
version = "1.13.1"
|