Update app.py
Browse files
app.py
CHANGED
|
@@ -12,6 +12,19 @@ from PIL import Image
|
|
| 12 |
import numpy as np
|
| 13 |
import gradio as gr
|
| 14 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 15 |
def create_key(seed=0):
|
| 16 |
return jax.random.PRNGKey(seed)
|
| 17 |
|
|
@@ -210,6 +223,16 @@ def create_demo(process, max_images=12, default_num_images=4):
|
|
| 210 |
show_label=False,
|
| 211 |
elem_id='gallery').style(grid=2,
|
| 212 |
height='auto')
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 213 |
inputs = [
|
| 214 |
input_image,
|
| 215 |
prompt,
|
|
|
|
| 12 |
import numpy as np
|
| 13 |
import gradio as gr
|
| 14 |
|
| 15 |
+
description = """
|
| 16 |
+
Our project is to use diffusion model to change the texture of our robotic arm simulation.
|
| 17 |
+
|
| 18 |
+
To do so, we first get our simulated images. After, we process these images to get Canny Edge maps. Finally, we can get brand new images by using ControlNet.
|
| 19 |
+
|
| 20 |
+
Therefore, we are able to change our simulation texture, and still keeping the image composition.
|
| 21 |
+
|
| 22 |
+
"""
|
| 23 |
+
|
| 24 |
+
|
| 25 |
+
|
| 26 |
+
|
| 27 |
+
|
| 28 |
def create_key(seed=0):
|
| 29 |
return jax.random.PRNGKey(seed)
|
| 30 |
|
|
|
|
| 223 |
show_label=False,
|
| 224 |
elem_id='gallery').style(grid=2,
|
| 225 |
height='auto')
|
| 226 |
+
|
| 227 |
+
with gr.Row():
|
| 228 |
+
gr.Markdown(description)
|
| 229 |
+
|
| 230 |
+
|
| 231 |
+
|
| 232 |
+
|
| 233 |
+
|
| 234 |
+
|
| 235 |
+
|
| 236 |
inputs = [
|
| 237 |
input_image,
|
| 238 |
prompt,
|