Spaces:
Running
on
Zero
Running
on
Zero
Plat
commited on
Commit
·
48c25a2
1
Parent(s):
ad998c0
works
Browse files- app.py +1 -1
- requirements.txt +1 -1
app.py
CHANGED
|
@@ -41,6 +41,7 @@ model = load_ip_adapter_model(
|
|
| 41 |
config_path=adapter_config_path,
|
| 42 |
adapter_path=adapter_model_path,
|
| 43 |
)
|
|
|
|
| 44 |
|
| 45 |
|
| 46 |
@spaces.GPU
|
|
@@ -64,7 +65,6 @@ def on_generate(
|
|
| 64 |
seed = random.randint(0, 2147483647)
|
| 65 |
|
| 66 |
with torch.inference_mode(), torch.autocast("cuda", dtype=torch.bfloat16):
|
| 67 |
-
model.to("cuda:0")
|
| 68 |
images = model.generate(
|
| 69 |
prompt=[prompt] * num_images, # batch size 4
|
| 70 |
negative_prompt=negative_prompt,
|
|
|
|
| 41 |
config_path=adapter_config_path,
|
| 42 |
adapter_path=adapter_model_path,
|
| 43 |
)
|
| 44 |
+
model.to("cuda:0")
|
| 45 |
|
| 46 |
|
| 47 |
@spaces.GPU
|
|
|
|
| 65 |
seed = random.randint(0, 2147483647)
|
| 66 |
|
| 67 |
with torch.inference_mode(), torch.autocast("cuda", dtype=torch.bfloat16):
|
|
|
|
| 68 |
images = model.generate(
|
| 69 |
prompt=[prompt] * num_images, # batch size 4
|
| 70 |
negative_prompt=negative_prompt,
|
requirements.txt
CHANGED
|
@@ -1,4 +1,4 @@
|
|
| 1 |
-
git+https://github.com/p1atdev/vision-ft
|
| 2 |
accelerate
|
| 3 |
diffusers
|
| 4 |
transformers
|
|
|
|
| 1 |
+
git+https://github.com/p1atdev/vision-ft@8edad06d
|
| 2 |
accelerate
|
| 3 |
diffusers
|
| 4 |
transformers
|