pva22
commited on
Commit
·
dc08a66
1
Parent(s):
dff5884
add lora_scale param
Browse files- app.py +10 -1
- methods.py +5 -0
app.py
CHANGED
|
@@ -107,6 +107,14 @@ with gr.Blocks() as demo:
|
|
| 107 |
value=7,
|
| 108 |
)
|
| 109 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 110 |
num_inference_steps = gr.Slider(
|
| 111 |
label="num_inference_steps",
|
| 112 |
minimum=0,
|
|
@@ -157,7 +165,8 @@ with gr.Blocks() as demo:
|
|
| 157 |
|
| 158 |
use_advanced_ip,
|
| 159 |
ip_adapter_scale,
|
| 160 |
-
image_upload_ip
|
|
|
|
| 161 |
],
|
| 162 |
outputs=[result, seed],
|
| 163 |
)
|
|
|
|
| 107 |
value=7,
|
| 108 |
)
|
| 109 |
|
| 110 |
+
loca_scale = gr.Slider(
|
| 111 |
+
label="loca_scale",
|
| 112 |
+
minimum=0,
|
| 113 |
+
maximum=1,
|
| 114 |
+
step=0.01,
|
| 115 |
+
value=0.95,
|
| 116 |
+
visible=False)
|
| 117 |
+
|
| 118 |
num_inference_steps = gr.Slider(
|
| 119 |
label="num_inference_steps",
|
| 120 |
minimum=0,
|
|
|
|
| 165 |
|
| 166 |
use_advanced_ip,
|
| 167 |
ip_adapter_scale,
|
| 168 |
+
image_upload_ip,
|
| 169 |
+
loca_scale
|
| 170 |
],
|
| 171 |
outputs=[result, seed],
|
| 172 |
)
|
methods.py
CHANGED
|
@@ -131,6 +131,7 @@ def infer(
|
|
| 131 |
use_advanced_ip=False,
|
| 132 |
ip_adapter_scale=None,
|
| 133 |
image_upload_ip=None,
|
|
|
|
| 134 |
|
| 135 |
model_lora_id=model_lora_default,
|
| 136 |
progress=gr.Progress(track_tqdm=True),
|
|
@@ -153,6 +154,7 @@ def infer(
|
|
| 153 |
|
| 154 |
image = pipe(prompt,
|
| 155 |
num_inference_steps=num_inference_steps,
|
|
|
|
| 156 |
guidance_scale=guidance_scale,
|
| 157 |
negative_prompt=negative_prompt,
|
| 158 |
width=width,
|
|
@@ -173,6 +175,7 @@ def infer(
|
|
| 173 |
image = pipe(prompt,
|
| 174 |
edges,
|
| 175 |
num_inference_steps = num_inference_steps,
|
|
|
|
| 176 |
controlnet_conditioning_scale=control_strength,
|
| 177 |
negative_prompt=negative_prompt,
|
| 178 |
generator=generator).images[0]
|
|
@@ -188,6 +191,7 @@ def infer(
|
|
| 188 |
image = pipe(
|
| 189 |
prompt,
|
| 190 |
ip_adapter_image=image_upload_ip,
|
|
|
|
| 191 |
num_inference_steps=num_inference_steps,
|
| 192 |
guidance_scale=guidance_scale,
|
| 193 |
generator=generator).images[0]
|
|
@@ -209,6 +213,7 @@ def infer(
|
|
| 209 |
image = pipe(prompt,
|
| 210 |
edges,
|
| 211 |
ip_adapter_image=image_upload_ip,
|
|
|
|
| 212 |
num_inference_steps=num_inference_steps,
|
| 213 |
guidance_scale=guidance_scale,
|
| 214 |
controlnet_conditioning_scale=control_strength,
|
|
|
|
| 131 |
use_advanced_ip=False,
|
| 132 |
ip_adapter_scale=None,
|
| 133 |
image_upload_ip=None,
|
| 134 |
+
loca_scale=0.95,
|
| 135 |
|
| 136 |
model_lora_id=model_lora_default,
|
| 137 |
progress=gr.Progress(track_tqdm=True),
|
|
|
|
| 154 |
|
| 155 |
image = pipe(prompt,
|
| 156 |
num_inference_steps=num_inference_steps,
|
| 157 |
+
loca_scale=loca_scale,
|
| 158 |
guidance_scale=guidance_scale,
|
| 159 |
negative_prompt=negative_prompt,
|
| 160 |
width=width,
|
|
|
|
| 175 |
image = pipe(prompt,
|
| 176 |
edges,
|
| 177 |
num_inference_steps = num_inference_steps,
|
| 178 |
+
loca_scale=loca_scale,
|
| 179 |
controlnet_conditioning_scale=control_strength,
|
| 180 |
negative_prompt=negative_prompt,
|
| 181 |
generator=generator).images[0]
|
|
|
|
| 191 |
image = pipe(
|
| 192 |
prompt,
|
| 193 |
ip_adapter_image=image_upload_ip,
|
| 194 |
+
loca_scale=loca_scale,
|
| 195 |
num_inference_steps=num_inference_steps,
|
| 196 |
guidance_scale=guidance_scale,
|
| 197 |
generator=generator).images[0]
|
|
|
|
| 213 |
image = pipe(prompt,
|
| 214 |
edges,
|
| 215 |
ip_adapter_image=image_upload_ip,
|
| 216 |
+
loca_scale=loca_scale,
|
| 217 |
num_inference_steps=num_inference_steps,
|
| 218 |
guidance_scale=guidance_scale,
|
| 219 |
controlnet_conditioning_scale=control_strength,
|