Masaaki Kawata commited on
Commit Β·
703765f
1
Parent(s): 92fbf8b
Update parallax.py
Browse files- parallax.py +1 -1
parallax.py
CHANGED
|
@@ -34,7 +34,7 @@ LAMA_TRAIN_CFG = OmegaConf.load('big-lama/config.yaml')
|
|
| 34 |
LAMA_TRAIN_CFG['training_model']['predict_only'] = True
|
| 35 |
LAMA = load_checkpoint(LAMA_TRAIN_CFG, 'big-lama/models/best.ckpt', strict=False, map_location='cpu')
|
| 36 |
LAMA = LAMA.to('cuda' if torch.cuda.is_available() else 'cpu').eval()
|
| 37 |
-
FLUX_KONTEXT_INPAINT_PIPELINE = FluxKontextInpaintPipeline.from_pretrained('black-forest-labs/FLUX.1-Kontext-dev',
|
| 38 |
FLUX_KONTEXT_INPAINT_PIPELINE.load_lora_weights('alimama-creative/FLUX.1-Turbo-Alpha')
|
| 39 |
FLUX_KONTEXT_INPAINT_PIPELINE.fuse_lora()
|
| 40 |
optimize_pipeline_(FLUX_KONTEXT_INPAINT_PIPELINE, image=Image.new('RGB', (1024, 1024)), mask_image=Image.new('L', (512, 512)), prompt='prompt')
|
|
|
|
| 34 |
LAMA_TRAIN_CFG['training_model']['predict_only'] = True
|
| 35 |
LAMA = load_checkpoint(LAMA_TRAIN_CFG, 'big-lama/models/best.ckpt', strict=False, map_location='cpu')
|
| 36 |
LAMA = LAMA.to('cuda' if torch.cuda.is_available() else 'cpu').eval()
|
| 37 |
+
FLUX_KONTEXT_INPAINT_PIPELINE = FluxKontextInpaintPipeline.from_pretrained('black-forest-labs/FLUX.1-Kontext-dev', torch_dtype=torch.bfloat16, token=os.environ['HF_TOKEN']).to('cuda' if torch.cuda.is_available() else 'cpu')
|
| 38 |
FLUX_KONTEXT_INPAINT_PIPELINE.load_lora_weights('alimama-creative/FLUX.1-Turbo-Alpha')
|
| 39 |
FLUX_KONTEXT_INPAINT_PIPELINE.fuse_lora()
|
| 40 |
optimize_pipeline_(FLUX_KONTEXT_INPAINT_PIPELINE, image=Image.new('RGB', (1024, 1024)), mask_image=Image.new('L', (512, 512)), prompt='prompt')
|