muhammadhamza-stack commited on
Commit
b8015e9
Β·
1 Parent(s): 812f506

refine the gradio app

Browse files
.gitignore ADDED
@@ -0,0 +1,2 @@
 
 
 
1
+ venv
2
+ gradio_cached_examples
Dockerfile ADDED
@@ -0,0 +1,25 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ FROM python:3.9-slim
2
+
3
+ ENV PYTHONDONTWRITEBYTECODE=1
4
+ ENV PYTHONUNBUFFERED=1
5
+
6
+ WORKDIR /app
7
+
8
+ # Required for OpenCV image display & ultralytics
9
+ RUN apt-get update && apt-get install -y \
10
+ libgl1 \
11
+ libglib2.0-0 \
12
+ git \
13
+ curl \
14
+ && rm -rf /var/lib/apt/lists/*
15
+
16
+ COPY requirements.txt .
17
+
18
+ RUN pip install --upgrade pip \
19
+ && pip install --no-cache-dir -r requirements.txt
20
+
21
+ COPY . .
22
+
23
+ EXPOSE 7860
24
+
25
+ CMD ["python", "app.py"]
README.md CHANGED
@@ -3,7 +3,7 @@ title: Cell Segmentation FZJ INM1 BDA
3
  emoji: 🐠
4
  colorFrom: green
5
  colorTo: red
6
- sdk: gradio
7
  sdk_version: 5.33.2
8
  app_file: app.py
9
  pinned: false
 
3
  emoji: 🐠
4
  colorFrom: green
5
  colorTo: red
6
+ sdk: docker
7
  sdk_version: 5.33.2
8
  app_file: app.py
9
  pinned: false
app.py CHANGED
@@ -5,6 +5,39 @@ import numpy as np
5
  import matplotlib.pyplot as plt
6
  from celldetection import fetch_model, to_tensor
7
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
8
  # βœ… Load the model
9
  device = 'cpu'
10
  model = fetch_model('ginoro_CpnResNeXt101UNet-fbe875f1a3e5ce2c').to(device).eval()
@@ -35,17 +68,131 @@ def segment(image):
35
 
36
  # βœ… Example images list
37
  examples = [
38
- ["1.png"],
39
- ["2.png"],
40
- ["3.png"]
41
  ]
42
 
43
  # βœ… Launch the Gradio interface
44
- gr.Interface(
45
- fn=segment,
46
- inputs=gr.Image(type="numpy"),
47
- outputs="image",
48
- title="Cell Segmentation Demo (FZJ-INM1)",
49
- description="Upload a microscopy image to see side-by-side segmentation.",
50
- examples=examples
51
- ).launch()
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
5
  import matplotlib.pyplot as plt
6
  from celldetection import fetch_model, to_tensor
7
 
8
+
9
+ # --- DOCUMENTATION STRINGS (Client Friendly) ---
10
+
11
+ USAGE_GUIDELINES = """
12
+ ## 1. Clear Setup and Run Instructions (Quick Start)
13
+ This application uses the advanced GINORO segmentation model, pre-trained for identifying cell nuclei in microscopy images.
14
+
15
+ 1. **Preparation:** Ensure your image is a clear microscopy slide image, preferably showing distinct cell nuclei.
16
+ 2. **Upload:** Click the 'Input Microscopy Image' box and upload your image (drag and drop, or click to select).
17
+ 3. **Run:** Click the **"Run Segmentation"** button. If using an example, clicking the thumbnail will load and run the segmentation automatically.
18
+ 4. **Review:** The result panel will display two images side-by-side: the Original (Left) and the Segmented result (Right).
19
+ """
20
+
21
+ INPUT_EXPLANATION = """
22
+ ## 2. Expected Inputs
23
+
24
+ | Input Field | Purpose | Requirement |
25
+ | :--- | :--- | :--- |
26
+ | **Input Microscopy Image** | The high-resolution image containing the cells you wish to analyze. | Must be an image file (PNG, JPG, TIF). Optimal results are achieved with clear, well-focused images typical of fluorescence microscopy (e.g., DAPI staining for nuclei). |
27
+
28
+ """
29
+
30
+ OUTPUT_EXPLANATION = """
31
+ ## 3. Expected Outputs (Side-by-Side Segmentation)
32
+
33
+ The output is a single image combining the original input and the segmented result for easy comparison.
34
+
35
+ * **Left Side (Original):** The unmodified input image.
36
+ * **Right Side (Segmented):** The same image with outlines (contours) drawn over the detected cellular structures.
37
+ * **Contour Color:** The detected cell nuclei are outlined in **Blue**.
38
+
39
+ """
40
+
41
  # βœ… Load the model
42
  device = 'cpu'
43
  model = fetch_model('ginoro_CpnResNeXt101UNet-fbe875f1a3e5ce2c').to(device).eval()
 
68
 
69
  # βœ… Example images list
70
  examples = [
71
+ ["./sample_data/1.png"],
72
+ ["./sample_data/2.png"],
73
+ ["./sample_data/3.png"]
74
  ]
75
 
76
  # βœ… Launch the Gradio interface
77
+
78
+ with gr.Blocks(title="Cell Segmentation Demo (FZJ-INM1)") as demo:
79
+
80
+ gr.Markdown(
81
+ """
82
+ # Cell Segmentation Demo (FZJ-INM1)
83
+ **Purpose:** Automatically identify and outline cell nuclei in microscopy images using a specialized neural network.
84
+ """
85
+ )
86
+
87
+ # 1. Guidelines Accordion (Documentation Section)
88
+ with gr.Accordion(" Tips & Guidelines ", open=False):
89
+ gr.Markdown(USAGE_GUIDELINES)
90
+ gr.Markdown("---")
91
+ gr.Markdown(INPUT_EXPLANATION)
92
+ gr.Markdown("---")
93
+ gr.Markdown(OUTPUT_EXPLANATION)
94
+
95
+ gr.Markdown("---")
96
+
97
+
98
+ # Define Components
99
+ gr.Markdown("## Step 1: Upload an Image")
100
+ input_image = gr.Image(type="numpy", label="Input Microscopy Image")
101
+
102
+ gr.Markdown("## Step 2: Click button")
103
+ run_button = gr.Button("Run Segmentation", variant="primary")
104
+
105
+ gr.Markdown("## Output")
106
+ output_image = gr.Image(label="Output: Original (Left) vs. Segmented (Right)")
107
+
108
+ # Layout the Application Interface
109
+ # with gr.Row():
110
+
111
+ # with gr.Column(scale=1):
112
+ # input_image
113
+ # gr.Markdown("## Step 2: Click button")
114
+ # run_button
115
+ # with gr.Column(scale=2):
116
+ # output_image
117
+
118
+ # Event Handler
119
+ run_button.click(
120
+ fn=segment,
121
+ inputs=input_image,
122
+ outputs=output_image
123
+ )
124
+
125
+ gr.Markdown("---")
126
+ gr.Markdown("## Examples ")
127
+
128
+ # 2. Examples Section (Error Fixed)
129
+ # By providing explicit inputs, outputs, and fn, we resolve the ValueError.
130
+ gr.Examples(
131
+ examples=examples,
132
+ inputs=[input_image],
133
+ outputs=output_image,
134
+ fn=segment,
135
+ label="Click on an image thumbnail below to load and run a sample segmentation.",
136
+ )
137
+
138
+ demo.launch(
139
+ server_name = "0.0.0.0",
140
+ server_port = 7860
141
+ )
142
+
143
+
144
+
145
+
146
+
147
+
148
+ # import gradio as gr
149
+ # import cv2
150
+ # import torch
151
+ # import numpy as np
152
+ # import matplotlib.pyplot as plt
153
+ # from celldetection import fetch_model, to_tensor
154
+
155
+ # # βœ… Load the model
156
+ # device = 'cpu'
157
+ # model = fetch_model('ginoro_CpnResNeXt101UNet-fbe875f1a3e5ce2c').to(device).eval()
158
+
159
+ # # βœ… Inference function
160
+ # def segment(image):
161
+ # img_rgb = cv2.cvtColor(image, cv2.COLOR_BGR2RGB) / 255.0
162
+ # x = to_tensor(img_rgb, transpose=True, device=device, dtype=torch.float32)[None]
163
+
164
+ # with torch.no_grad():
165
+ # output = model(x)
166
+
167
+ # contours = output['contours'][0]
168
+ # original = (img_rgb * 255).astype(np.uint8).copy()
169
+ # segmented = original.copy()
170
+
171
+ # for contour in contours:
172
+ # contour = np.array(contour.cpu(), dtype=np.int32)
173
+ # cv2.drawContours(segmented, [contour], -1, (255, 0, 0), 2)
174
+
175
+ # h, w, c = original.shape
176
+ # gap = 60
177
+ # canvas = np.zeros((h, w * 2 + gap, c), dtype=np.uint8)
178
+ # canvas[:, :w, :] = original
179
+ # canvas[:, w + gap:, :] = segmented
180
+
181
+ # return cv2.cvtColor(canvas, cv2.COLOR_RGB2BGR)
182
+
183
+ # # βœ… Example images list
184
+ # examples = [
185
+ # ["1.png"],
186
+ # ["2.png"],
187
+ # ["3.png"]
188
+ # ]
189
+
190
+ # # βœ… Launch the Gradio interface
191
+ # gr.Interface(
192
+ # fn=segment,
193
+ # inputs=gr.Image(type="numpy"),
194
+ # outputs="image",
195
+ # title="Cell Segmentation Demo (FZJ-INM1)",
196
+ # description="Upload a microscopy image to see side-by-side segmentation.",
197
+ # examples=examples
198
+ # ).launch()
1.png β†’ flagged/image/0258f8ed5ac723a71bb25be9a65d2efb4089573e/tmpubuvjgdc.png RENAMED
File without changes
flagged/log.csv ADDED
@@ -0,0 +1,2 @@
 
 
 
1
+ image,output,flag,username,timestamp
2
+ /Users/waqas/Documents/mlbench-workingspace/hikmat/cell-segmentation-FZJ-INM1-BDA/flagged/image/0258f8ed5ac723a71bb25be9a65d2efb4089573e/tmpubuvjgdc.png,/Users/waqas/Documents/mlbench-workingspace/hikmat/cell-segmentation-FZJ-INM1-BDA/flagged/output/c9a65caf91e2f56068a0133a03aca90edec7f722/tmpa9w_87zc.png,,,2026-01-16 12:04:26.300121
flagged/output/c9a65caf91e2f56068a0133a03aca90edec7f722/tmpa9w_87zc.png ADDED

Git LFS Details

  • SHA256: 947cfbd96b93c4d59f4e5289430010b4c500ede253e377ed1ac5b2b264f48fa5
  • Pointer size: 132 Bytes
  • Size of remote file: 1.41 MB
requirements.txt CHANGED
@@ -1,6 +1,7 @@
1
- gradio
2
  opencv-python
3
  celldetection
4
  torch
5
- numpy
6
  matplotlib
 
 
 
 
 
1
  opencv-python
2
  celldetection
3
  torch
 
4
  matplotlib
5
+ numpy<2
6
+ gradio==3.50.2
7
+ gradio-client==0.6.1
sample_data/1.png ADDED

Git LFS Details

  • SHA256: 36363b121e02626cf6c7d6c8c0bae326fe07ac00e8f2e3bc5b3e65e191857300
  • Pointer size: 132 Bytes
  • Size of remote file: 1.25 MB
2.png β†’ sample_data/2.png RENAMED
File without changes
3.png β†’ sample_data/3.png RENAMED
File without changes