djl234 commited on
Commit
7f70460
·
verified ·
1 Parent(s): c5b00e1

Update app.py

Browse files

[fix] block is nonetype

Files changed (1) hide show
  1. app.py +37 -50
app.py CHANGED
@@ -23,45 +23,6 @@ from model_video import build_model
23
  import numpy as np
24
  import collections
25
 
26
- def crf_refine(img, annos):
27
- print(img.shape,annos.shape)
28
- def _sigmoid(x):
29
- return 1 / (1 + np.exp(-x))
30
-
31
- assert img.dtype == np.uint8
32
- assert annos.dtype == np.uint8
33
- assert img.shape[:2] == annos.shape
34
-
35
- # img and annos should be np array with data type uint8
36
-
37
- EPSILON = 1e-8
38
-
39
- M = 2 # salient or not
40
- tau = 1.05
41
- # Setup the CRF model
42
- d = dcrf.DenseCRF2D(img.shape[1], img.shape[0], M)
43
-
44
- anno_norm = annos / 255.
45
-
46
- n_energy = -np.log((1.0 - anno_norm + EPSILON)) / (tau * _sigmoid(1 - anno_norm))
47
- p_energy = -np.log(anno_norm + EPSILON) / (tau * _sigmoid(anno_norm))
48
-
49
- U = np.zeros((M, img.shape[0] * img.shape[1]), dtype='float32')
50
- U[0, :] = n_energy.flatten()
51
- U[1, :] = p_energy.flatten()
52
-
53
- d.setUnaryEnergy(U)
54
-
55
- d.addPairwiseGaussian(sxy=3, compat=3)
56
- d.addPairwiseBilateral(sxy=60, srgb=5, rgbim=img, compat=5)
57
-
58
- # Do the inference
59
- infer = np.array(d.inference(1)).astype('float32')
60
- res = infer[1, :]
61
-
62
- res = res * 255
63
- res = res.reshape(img.shape[:2])
64
- return res.astype('uint8')
65
  def show_coord(evt: gr.SelectData):
66
  return f"{evt.index[0]},{evt.index[1]}"
67
 
@@ -154,11 +115,6 @@ def test(gpu_id, net, img_list, group_size, img_size):
154
 
155
  img_lst=[(torch.rand(352,352,3)*255).numpy().astype(np.uint8) for i in range(5)]
156
 
157
-
158
-
159
-
160
-
161
-
162
  #simly test
163
  res=test('cpu',net,img_lst,5,224)
164
  '''for i in range(5):
@@ -219,17 +175,48 @@ def create_mode1_interface():
219
  )
220
 
221
  with gr.Blocks(title="交互式图像组分割系统") as demo:
222
- #gr.Markdown("## mode choice")
223
- mode = gr.Radio(["多图协同分割", "点提示交互分割"], value="多图协同分割", label="运行模式")
 
 
224
 
225
- mode1 = create_mode1_interface()
226
- mode2 = create_mode2_interface()
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
227
 
 
228
  mode.change(
229
  lambda x: (gr.update(visible=x=="多图协同分割"), gr.update(visible=x=="点提示交互分割")),
230
  inputs=mode,
231
- outputs=[mode1, mode2]
232
  )
233
 
234
- demo.launch()
235
  demo.launch(debug=True)
 
23
  import numpy as np
24
  import collections
25
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
26
  def show_coord(evt: gr.SelectData):
27
  return f"{evt.index[0]},{evt.index[1]}"
28
 
 
115
 
116
  img_lst=[(torch.rand(352,352,3)*255).numpy().astype(np.uint8) for i in range(5)]
117
 
 
 
 
 
 
118
  #simly test
119
  res=test('cpu',net,img_lst,5,224)
120
  '''for i in range(5):
 
175
  )
176
 
177
  with gr.Blocks(title="交互式图像组分割系统") as demo:
178
+ # 模式选择器
179
+ mode = gr.Radio(["多图协同分割", "点提示交互分割"],
180
+ value="多图协同分割",
181
+ label="运行模式")
182
 
183
+ # 使用Tab容器替代独立Blocks
184
+ with gr.Tabs() as mode_container:
185
+ with gr.Tab("多图模式", id=0) as tab1:
186
+ # 模式1界面组件
187
+ with gr.Row():
188
+ inputs = [gr.Image(type="numpy", label=f"图像{i+1}") for i in range(5)]
189
+ process_btn = gr.Button("开始处理")
190
+ output_img = gr.Image(label="处理结果")
191
+
192
+ process_btn.click(
193
+ sepia,
194
+ inputs=inputs,
195
+ outputs=output_img
196
+ )
197
+
198
+ with gr.Tab("点选模式", id=1) as tab2:
199
+ # 模式2界面组件
200
+ img_input = gr.Image(type="numpy", label="点击上传图片并选择点")
201
+ coord_store = gr.Textbox(visible=False)
202
+ mask_btn = gr.Button("生成分割掩码")
203
+ mask_output = gr.Image(label="分割结果")
204
+
205
+ @img_input.select(inputs=[], outputs=coord_store)
206
+ def store_coordinate(evt: gr.SelectData):
207
+ return f"{evt.index[0]},{evt.index[1]}"
208
+
209
+ mask_btn.click(
210
+ generate_mask,
211
+ inputs=[img_input, coord_store],
212
+ outputs=mask_output
213
+ )
214
 
215
+ # 动态显示控制
216
  mode.change(
217
  lambda x: (gr.update(visible=x=="多图协同分割"), gr.update(visible=x=="点提示交互分割")),
218
  inputs=mode,
219
+ outputs=[tab1, tab2]
220
  )
221
 
 
222
  demo.launch(debug=True)