Update src/pipeline.py
Browse files- src/pipeline.py +4 -2
src/pipeline.py
CHANGED
|
@@ -1127,8 +1127,10 @@ torch.backends.cuda.matmul.allow_tf32 = True
|
|
| 1127 |
torch.backends.cudnn.enabled = True
|
| 1128 |
torch.backends.cudnn.benchmark = True
|
| 1129 |
|
| 1130 |
-
ckpt_id = "black-forest-labs/FLUX.1-schnell"
|
| 1131 |
-
ckpt_revision = "741f7c3ce8b383c54771c7003378a50191e9efe9"
|
|
|
|
|
|
|
| 1132 |
def empty_cache():
|
| 1133 |
gc.collect()
|
| 1134 |
torch.cuda.empty_cache()
|
|
|
|
| 1127 |
torch.backends.cudnn.enabled = True
|
| 1128 |
torch.backends.cudnn.benchmark = True
|
| 1129 |
|
| 1130 |
+
# ckpt_id = "black-forest-labs/FLUX.1-schnell"
|
| 1131 |
+
# ckpt_revision = "741f7c3ce8b383c54771c7003378a50191e9efe9"
|
| 1132 |
+
ckpt_id = "RobertML/FLUX.1-schnell-qf8"
|
| 1133 |
+
ckpt_revision = "f360ee74b68f38c0b8abd873d0d5800509ed62a2"
|
| 1134 |
def empty_cache():
|
| 1135 |
gc.collect()
|
| 1136 |
torch.cuda.empty_cache()
|