CSB261 commited on
Commit
6e1b9a1
ยท
verified ยท
1 Parent(s): 36cdba8

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +55 -21
app.py CHANGED
@@ -2,6 +2,7 @@ import gradio as gr
2
  import cv2
3
  import numpy as np
4
  from PIL import Image
 
5
 
6
  def process_image(image,
7
  convert_bw,
@@ -11,6 +12,12 @@ def process_image(image,
11
  brightness,
12
  contrast,
13
  saturation):
 
 
 
 
 
 
14
  # Convert PIL Image to OpenCV format
15
  img = np.array(image)
16
  img = cv2.cvtColor(img, cv2.COLOR_RGB2BGR)
@@ -57,10 +64,27 @@ def process_image(image,
57
  transformed_image = cv2.cvtColor(img, cv2.COLOR_BGR2RGB)
58
  transformed_image = Image.fromarray(transformed_image)
59
 
60
- return transformed_image
 
 
 
 
 
 
 
 
 
61
 
62
- def compare_images(original, transformed):
63
- return transformed
 
 
 
 
 
 
 
 
64
 
65
  # Gradio ์ธํ„ฐํŽ˜์ด์Šค ๊ตฌ์„ฑ
66
  def create_interface():
@@ -68,7 +92,7 @@ def create_interface():
68
  gr.Markdown("## ์ด๋ฏธ์ง€ ์ฒ˜๋ฆฌ ์• ํ”Œ๋ฆฌ์ผ€์ด์…˜")
69
 
70
  with gr.Row():
71
- with gr.Column():
72
  input_image = gr.Image(type="pil", label="์›๋ณธ ์ด๋ฏธ์ง€ ์—…๋กœ๋“œ")
73
  convert_bw = gr.Checkbox(label="ํ‘๋ฐฑ์œผ๋กœ ๋ณ€ํ™˜", value=True)
74
  denoise = gr.Checkbox(label="๋…ธ์ด์ฆˆ ์ œ๊ฑฐ", value=False)
@@ -78,35 +102,45 @@ def create_interface():
78
  contrast = gr.Slider(label="๋Œ€๋น„ ์กฐ์ •", minimum=0.5, maximum=1.5, step=0.1, value=1.0)
79
  saturation = gr.Slider(label="์ฑ„๋„ ์กฐ์ •", minimum=0.0, maximum=2.0, step=0.1, value=1.0)
80
  submit = gr.Button("๋ณ€ํ™˜ํ•˜๊ธฐ")
 
 
 
 
81
 
82
- with gr.Column():
83
- output_image = gr.Image(type="pil", label="๋ณ€ํ™˜๋œ ์ด๋ฏธ์ง€")
84
- download = gr.Button("JPG๋กœ ๋‹ค์šด๋กœ๋“œ")
85
 
 
86
  submit.click(
87
  fn=process_image,
88
  inputs=[input_image, convert_bw, denoise, sharpen, gamma, brightness, contrast, saturation],
89
- outputs=output_image
90
  )
91
 
92
- download.click(
93
- fn=lambda img: img.save("transformed_image.jpg"),
94
- inputs=output_image,
95
- outputs=None
 
96
  )
97
 
98
- gr.Markdown("""
99
- ### ์ด๋ฏธ์ง€ ๋น„๊ต
100
- ์Šฌ๋ผ์ด๋”๋ฅผ ์‚ฌ์šฉํ•˜์—ฌ ์›๋ณธ ์ด๋ฏธ์ง€์™€ ๋ณ€ํ™˜๋œ ์ด๋ฏธ์ง€๋ฅผ ๋น„๊ตํ•  ์ˆ˜ ์žˆ์Šต๋‹ˆ๋‹ค.
101
- """)
102
 
103
  with gr.Row():
104
- original_display = gr.Image(type="pil", label="์›๋ณธ ์ด๋ฏธ์ง€", interactive=False)
105
- transformed_display = gr.Image(type="pil", label="๋ณ€ํ™˜๋œ ์ด๋ฏธ์ง€", interactive=False)
 
 
 
 
 
 
 
 
106
 
107
- input_image.change(lambda img: (img, process_image(img, convert_bw.value, denoise.value, sharpen.value, gamma.value, brightness.value, contrast.value, saturation.value)),
108
- inputs=input_image,
109
- outputs=[original_display, transformed_display])
110
 
111
  return demo
112
 
 
2
  import cv2
3
  import numpy as np
4
  from PIL import Image
5
+ from io import BytesIO
6
 
7
  def process_image(image,
8
  convert_bw,
 
12
  brightness,
13
  contrast,
14
  saturation):
15
+ """
16
+ ์ด๋ฏธ์ง€ ์ฒ˜๋ฆฌ ํ•จ์ˆ˜
17
+ """
18
+ # ์›๋ณธ ์ด๋ฏธ์ง€ ๋ณต์‚ฌ
19
+ original_image = image.copy()
20
+
21
  # Convert PIL Image to OpenCV format
22
  img = np.array(image)
23
  img = cv2.cvtColor(img, cv2.COLOR_RGB2BGR)
 
64
  transformed_image = cv2.cvtColor(img, cv2.COLOR_BGR2RGB)
65
  transformed_image = Image.fromarray(transformed_image)
66
 
67
+ return transformed_image, original_image, transformed_image
68
+
69
+ def blend_images(original, transformed, alpha):
70
+ """
71
+ ๋‘ ์ด๋ฏธ์ง€๋ฅผ ์•ŒํŒŒ ๊ฐ’์— ๋”ฐ๋ผ ํ˜ผํ•ฉํ•˜๋Š” ํ•จ์ˆ˜
72
+ """
73
+ if original is None or transformed is None:
74
+ return None
75
+ blended = Image.blend(original, transformed, alpha)
76
+ return blended
77
 
78
+ def download_image(transformed):
79
+ """
80
+ ๋ณ€ํ™˜๋œ ์ด๋ฏธ์ง€๋ฅผ ๋‹ค์šด๋กœ๋“œํ•  ์ˆ˜ ์žˆ๋„๋ก ํ•˜๋Š” ํ•จ์ˆ˜
81
+ """
82
+ if transformed is None:
83
+ return None
84
+ buf = BytesIO()
85
+ transformed.save(buf, format="JPEG")
86
+ buf.seek(0)
87
+ return buf
88
 
89
  # Gradio ์ธํ„ฐํŽ˜์ด์Šค ๊ตฌ์„ฑ
90
  def create_interface():
 
92
  gr.Markdown("## ์ด๋ฏธ์ง€ ์ฒ˜๋ฆฌ ์• ํ”Œ๋ฆฌ์ผ€์ด์…˜")
93
 
94
  with gr.Row():
95
+ with gr.Column(scale=1):
96
  input_image = gr.Image(type="pil", label="์›๋ณธ ์ด๋ฏธ์ง€ ์—…๋กœ๋“œ")
97
  convert_bw = gr.Checkbox(label="ํ‘๋ฐฑ์œผ๋กœ ๋ณ€ํ™˜", value=True)
98
  denoise = gr.Checkbox(label="๋…ธ์ด์ฆˆ ์ œ๊ฑฐ", value=False)
 
102
  contrast = gr.Slider(label="๋Œ€๋น„ ์กฐ์ •", minimum=0.5, maximum=1.5, step=0.1, value=1.0)
103
  saturation = gr.Slider(label="์ฑ„๋„ ์กฐ์ •", minimum=0.0, maximum=2.0, step=0.1, value=1.0)
104
  submit = gr.Button("๋ณ€ํ™˜ํ•˜๊ธฐ")
105
+
106
+ # State๋ฅผ ์‚ฌ์šฉํ•˜์—ฌ ์›๋ณธ ๋ฐ ๋ณ€ํ™˜๋œ ์ด๋ฏธ์ง€ ์ €์žฅ
107
+ original_state = gr.State()
108
+ transformed_state = gr.State()
109
 
110
+ with gr.Column(scale=1):
111
+ transformed_image = gr.Image(type="pil", label="๋ณ€ํ™˜๋œ ์ด๋ฏธ์ง€")
112
+ download_btn = gr.DownloadButton("JPG๋กœ ๋‹ค์šด๋กœ๋“œ", file_name="transformed_image.jpg")
113
 
114
+ # ์ด๋ฏธ์ง€ ์ฒ˜๋ฆฌ ๊ฒฐ๊ณผ๋ฅผ State์— ์ €์žฅํ•˜๊ณ  ๋ณ€ํ™˜๋œ ์ด๋ฏธ์ง€ ํ‘œ์‹œ
115
  submit.click(
116
  fn=process_image,
117
  inputs=[input_image, convert_bw, denoise, sharpen, gamma, brightness, contrast, saturation],
118
+ outputs=[transformed_image, original_state, transformed_state]
119
  )
120
 
121
+ # ๋‹ค์šด๋กœ๋“œ ๋ฒ„ํŠผ์— ๋ณ€ํ™˜๋œ ์ด๋ฏธ์ง€ ์—ฐ๊ฒฐ
122
+ download_btn.click(
123
+ fn=download_image,
124
+ inputs=transformed_state,
125
+ outputs=download_btn
126
  )
127
 
128
+ gr.Markdown("### ์ด๋ฏธ์ง€ ๋น„๊ต")
 
 
 
129
 
130
  with gr.Row():
131
+ with gr.Column(scale=1):
132
+ alpha_slider = gr.Slider(label="๋น„๊ต ์Šฌ๋ผ์ด๋”", minimum=0.0, maximum=1.0, step=0.01, value=0.5)
133
+ blended_image = gr.Image(type="pil", label="ํ˜ผํ•ฉ๋œ ์ด๋ฏธ์ง€")
134
+
135
+ # ์Šฌ๋ผ์ด๋” ์กฐ์ž‘ ์‹œ ์ด๋ฏธ์ง€ ํ˜ผํ•ฉ
136
+ alpha_slider.change(
137
+ fn=blend_images,
138
+ inputs=[original_state, transformed_state, alpha_slider],
139
+ outputs=blended_image
140
+ )
141
 
142
+ # ์ดˆ๊ธฐ์—๋Š” ํ˜ผํ•ฉ๋œ ์ด๋ฏธ์ง€๊ฐ€ ํ‘œ์‹œ๋˜์ง€ ์•Š๋„๋ก ์„ค์ •
143
+ blended_image.style(height=300)
 
144
 
145
  return demo
146