File size: 1,277 Bytes
18d4e6f f494d61 18d4e6f f494d61 18d4e6f a61aec3 f5098bf 9e6ae41 f494d61 4e34532 18d4e6f | 1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 | [build-system]
requires = ["setuptools >= 75.0"]
build-backend = "setuptools.build_meta"
[project]
name = "flux-schnell-edge-inference"
description = "An edge-maxxing model submission by RobertML for the 4090 Flux contest"
requires-python = ">=3.10,<3.13"
version = "8"
dependencies = [
"diffusers==0.32.2",
"transformers==4.46.2",
"accelerate==1.1.0",
"omegaconf==2.3.0",
"torch==2.6",
"protobuf==5.28.3",
"sentencepiece==0.2.0",
"edge-maxxing-pipelines @ git+https://github.com/womboai/edge-maxxing@7c760ac54f6052803dadb3ade8ebfc9679a94589#subdirectory=pipelines",
"gitpython>=3.1.43",
"hf_transfer==0.1.8",
"torchao==0.6.1",
"setuptools>=75.3.0",
]
[[tool.edge-maxxing.models]]
repository = "manbeast3b/Flux.1.Schnell-full-quant1"
revision = "e7ddf488a4ea8a3cba05db5b8d06e7e0feb826a2"
[[tool.edge-maxxing.models]]
repository = "manbeast3b/flux.1-schnell-full1"
revision = "cb1b599b0d712b9aab2c4df3ad27b050a27ec146"
[[tool.edge-maxxing.models]]
repository = "manbeast3b/Flux.1.la_schnella_transformer_aot"
revision = "56fa3ac58c770179f25f2453500a5160f1423b6c"
[[tool.edge-maxxing.models]]
repository = "sayakpaul/flux.1-dev-int8-aot-compiled"
revision = "3b4f77e9752dd278c432870d101b958c902af2c9"
[project.scripts]
start_inference = "main:main"
|