pva22
commited on
Commit
·
231b559
1
Parent(s):
dc08a66
add lora_scale param
Browse files- app.py +3 -3
- methods.py +5 -5
app.py
CHANGED
|
@@ -107,8 +107,8 @@ with gr.Blocks() as demo:
|
|
| 107 |
value=7,
|
| 108 |
)
|
| 109 |
|
| 110 |
-
|
| 111 |
-
label="
|
| 112 |
minimum=0,
|
| 113 |
maximum=1,
|
| 114 |
step=0.01,
|
|
@@ -166,7 +166,7 @@ with gr.Blocks() as demo:
|
|
| 166 |
use_advanced_ip,
|
| 167 |
ip_adapter_scale,
|
| 168 |
image_upload_ip,
|
| 169 |
-
|
| 170 |
],
|
| 171 |
outputs=[result, seed],
|
| 172 |
)
|
|
|
|
| 107 |
value=7,
|
| 108 |
)
|
| 109 |
|
| 110 |
+
lora_scale = gr.Slider(
|
| 111 |
+
label="lora_scale",
|
| 112 |
minimum=0,
|
| 113 |
maximum=1,
|
| 114 |
step=0.01,
|
|
|
|
| 166 |
use_advanced_ip,
|
| 167 |
ip_adapter_scale,
|
| 168 |
image_upload_ip,
|
| 169 |
+
lora_scale
|
| 170 |
],
|
| 171 |
outputs=[result, seed],
|
| 172 |
)
|
methods.py
CHANGED
|
@@ -131,7 +131,7 @@ def infer(
|
|
| 131 |
use_advanced_ip=False,
|
| 132 |
ip_adapter_scale=None,
|
| 133 |
image_upload_ip=None,
|
| 134 |
-
|
| 135 |
|
| 136 |
model_lora_id=model_lora_default,
|
| 137 |
progress=gr.Progress(track_tqdm=True),
|
|
@@ -154,7 +154,7 @@ def infer(
|
|
| 154 |
|
| 155 |
image = pipe(prompt,
|
| 156 |
num_inference_steps=num_inference_steps,
|
| 157 |
-
|
| 158 |
guidance_scale=guidance_scale,
|
| 159 |
negative_prompt=negative_prompt,
|
| 160 |
width=width,
|
|
@@ -175,7 +175,7 @@ def infer(
|
|
| 175 |
image = pipe(prompt,
|
| 176 |
edges,
|
| 177 |
num_inference_steps = num_inference_steps,
|
| 178 |
-
|
| 179 |
controlnet_conditioning_scale=control_strength,
|
| 180 |
negative_prompt=negative_prompt,
|
| 181 |
generator=generator).images[0]
|
|
@@ -191,7 +191,7 @@ def infer(
|
|
| 191 |
image = pipe(
|
| 192 |
prompt,
|
| 193 |
ip_adapter_image=image_upload_ip,
|
| 194 |
-
|
| 195 |
num_inference_steps=num_inference_steps,
|
| 196 |
guidance_scale=guidance_scale,
|
| 197 |
generator=generator).images[0]
|
|
@@ -213,7 +213,7 @@ def infer(
|
|
| 213 |
image = pipe(prompt,
|
| 214 |
edges,
|
| 215 |
ip_adapter_image=image_upload_ip,
|
| 216 |
-
|
| 217 |
num_inference_steps=num_inference_steps,
|
| 218 |
guidance_scale=guidance_scale,
|
| 219 |
controlnet_conditioning_scale=control_strength,
|
|
|
|
| 131 |
use_advanced_ip=False,
|
| 132 |
ip_adapter_scale=None,
|
| 133 |
image_upload_ip=None,
|
| 134 |
+
lora_scale=0.95,
|
| 135 |
|
| 136 |
model_lora_id=model_lora_default,
|
| 137 |
progress=gr.Progress(track_tqdm=True),
|
|
|
|
| 154 |
|
| 155 |
image = pipe(prompt,
|
| 156 |
num_inference_steps=num_inference_steps,
|
| 157 |
+
lora_scale=lora_scale,
|
| 158 |
guidance_scale=guidance_scale,
|
| 159 |
negative_prompt=negative_prompt,
|
| 160 |
width=width,
|
|
|
|
| 175 |
image = pipe(prompt,
|
| 176 |
edges,
|
| 177 |
num_inference_steps = num_inference_steps,
|
| 178 |
+
lora_scale=lora_scale,
|
| 179 |
controlnet_conditioning_scale=control_strength,
|
| 180 |
negative_prompt=negative_prompt,
|
| 181 |
generator=generator).images[0]
|
|
|
|
| 191 |
image = pipe(
|
| 192 |
prompt,
|
| 193 |
ip_adapter_image=image_upload_ip,
|
| 194 |
+
lora_scale=lora_scale,
|
| 195 |
num_inference_steps=num_inference_steps,
|
| 196 |
guidance_scale=guidance_scale,
|
| 197 |
generator=generator).images[0]
|
|
|
|
| 213 |
image = pipe(prompt,
|
| 214 |
edges,
|
| 215 |
ip_adapter_image=image_upload_ip,
|
| 216 |
+
lora_scale=lora_scale,
|
| 217 |
num_inference_steps=num_inference_steps,
|
| 218 |
guidance_scale=guidance_scale,
|
| 219 |
controlnet_conditioning_scale=control_strength,
|