yeq6x commited on
Commit
b5e87d9
·
1 Parent(s): adee6df

Update alpha blending weight to 0.75 in app.py and implement drag-and-drop functionality for image input.

Browse files
Files changed (1) hide show
  1. app.py +50 -5
app.py CHANGED
@@ -152,14 +152,13 @@ def infer(
152
  num_images_per_prompt=1,
153
  ).images
154
 
155
- # Alpha blend: input (0.2) + generated (0.8)
156
  if result_images and pil_image is not None:
157
  generated_image = result_images[0]
158
  # Resize input image to match generated image size if different
159
  if pil_image.size != generated_image.size:
160
  pil_image = pil_image.resize(generated_image.size, Image.Resampling.LANCZOS)
161
- # Blend: 0.2 * input + 0.8 * generated
162
- blended_image = Image.blend(pil_image, generated_image, alpha=0.8)
163
  return blended_image, seed
164
 
165
  # Return first result image and seed
@@ -192,7 +191,53 @@ with gr.Blocks(css=css) as demo:
192
  input_image = gr.Image(label="Input Image",
193
  show_label=False,
194
  type="pil",
195
- interactive=True)
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
196
 
197
  with gr.Column(scale=1):
198
  gr.Markdown("### 📤 Result")
@@ -280,4 +325,4 @@ with gr.Blocks(css=css) as demo:
280
  )
281
 
282
  if __name__ == "__main__":
283
- demo.launch(mcp_server=True)
 
152
  num_images_per_prompt=1,
153
  ).images
154
 
155
+ # Alpha blend (0.75)
156
  if result_images and pil_image is not None:
157
  generated_image = result_images[0]
158
  # Resize input image to match generated image size if different
159
  if pil_image.size != generated_image.size:
160
  pil_image = pil_image.resize(generated_image.size, Image.Resampling.LANCZOS)
161
+ blended_image = Image.blend(pil_image, generated_image, alpha=0.75)
 
162
  return blended_image, seed
163
 
164
  # Return first result image and seed
 
191
  input_image = gr.Image(label="Input Image",
192
  show_label=False,
193
  type="pil",
194
+ interactive=True,
195
+ elem_id="input-image")
196
+
197
+ gr.HTML("""
198
+ <script>
199
+ (function () {
200
+ function bindDrop() {
201
+ var root = document.getElementById("input-image");
202
+ if (!root || root.dataset.dropBound === "1") return;
203
+
204
+ var input = root.querySelector('input[type="file"]');
205
+ if (!input) return;
206
+
207
+ function prevent(e) {
208
+ e.preventDefault();
209
+ e.stopPropagation();
210
+ }
211
+
212
+ function onDrop(e) {
213
+ prevent(e);
214
+ var files = e.dataTransfer && e.dataTransfer.files;
215
+ if (!files || files.length === 0) return;
216
+
217
+ var dt = new DataTransfer();
218
+ dt.items.add(files[0]);
219
+ input.files = dt.files;
220
+ input.dispatchEvent(new Event("change", { bubbles: true }));
221
+ }
222
+
223
+ root.addEventListener("dragenter", prevent, true);
224
+ root.addEventListener("dragover", prevent, true);
225
+ root.addEventListener("drop", onDrop, true);
226
+ root.dataset.dropBound = "1";
227
+ }
228
+
229
+ var observer = new MutationObserver(function () {
230
+ bindDrop();
231
+ });
232
+ observer.observe(document.body, { childList: true, subtree: true });
233
+
234
+ window.addEventListener("load", function () {
235
+ bindDrop();
236
+ });
237
+ setTimeout(bindDrop, 1000);
238
+ })();
239
+ </script>
240
+ """)
241
 
242
  with gr.Column(scale=1):
243
  gr.Markdown("### 📤 Result")
 
325
  )
326
 
327
  if __name__ == "__main__":
328
+ demo.launch(mcp_server=True)