tp53(ashish) commited on
Commit
d6bc8b3
·
1 Parent(s): 231a8da

fix: Return JSON string to avoid Gradio schema issues

Browse files

- All API functions now return str (JSON) instead of Dict
- Pin gradio==4.36.0 to avoid newer version bugs
- Client must json.loads() the response

Files changed (2) hide show
  1. app.py +243 -302
  2. requirements.txt +3 -12
app.py CHANGED
@@ -1,351 +1,292 @@
1
- #!/usr/bin/env python3
2
- """
3
- OncoSeg Inference API - HuggingFace Space
4
- Minimal version for initial deployment.
5
  """
 
 
6
 
7
- import os
8
- import io
9
- import base64
10
- import tempfile
11
- import time
12
- import logging
13
- from pathlib import Path
14
 
15
  import gradio as gr
16
  import numpy as np
 
 
 
 
 
17
 
18
- # Configure logging
19
- logging.basicConfig(level=logging.INFO)
20
- logger = logging.getLogger(__name__)
21
-
22
- # Check for ZeroGPU (HF Spaces)
23
- try:
24
- import spaces
25
-
26
- ZEROGPU_AVAILABLE = True
27
- logger.info("ZeroGPU available")
28
- except ImportError:
29
- ZEROGPU_AVAILABLE = False
30
- logger.info("ZeroGPU not available")
31
-
32
-
33
- def fallback_segment(slice_2d):
34
- """Simple intensity-based segmentation."""
35
- from scipy import ndimage
36
- from skimage.filters import threshold_otsu
37
- from skimage.morphology import binary_opening, binary_closing, disk
38
-
39
- # Normalize
40
- vmin, vmax = slice_2d.min(), slice_2d.max()
41
- if vmax - vmin < 1e-8:
42
- return np.zeros_like(slice_2d, dtype=np.uint8)
43
 
44
- normalized = (slice_2d - vmin) / (vmax - vmin)
45
 
46
- # Use percentile threshold (top 15% intensity = potential tumor)
47
- threshold = np.percentile(normalized, 85)
48
- mask = (normalized > threshold).astype(np.uint8)
49
 
50
- # Morphological cleanup
51
- try:
52
- mask = binary_opening(mask, disk(2)).astype(np.uint8)
53
- mask = binary_closing(mask, disk(3)).astype(np.uint8)
54
- except Exception:
55
- pass
56
 
57
- return mask
58
 
59
 
60
- def find_contours(mask):
61
- """Extract contours from binary mask."""
62
- try:
63
- from skimage.measure import find_contours as sk_find_contours
 
64
 
65
- contours = sk_find_contours(mask, 0.5)
66
- return [c.tolist() for c in contours]
67
- except ImportError:
68
- return []
69
 
 
 
 
 
70
 
71
- def keep_largest_component(mask):
72
- """Keep only the largest connected component."""
73
  try:
74
- from scipy import ndimage
75
-
76
- labeled, num_features = ndimage.label(mask)
77
- if num_features <= 1:
78
- return mask
79
- sizes = ndimage.sum(mask, labeled, range(1, num_features + 1))
80
- largest = np.argmax(sizes) + 1
81
- return (labeled == largest).astype(np.uint8)
82
- except ImportError:
83
- return mask
84
-
85
-
86
- def _do_segment(nifti_b64, slice_idx, text_prompt, checkpoint):
87
- """Core segmentation logic."""
88
- import nibabel as nib
89
-
90
- start_time = time.time()
91
-
92
- # Decode NIfTI
93
- nifti_bytes = base64.b64decode(nifti_b64)
94
-
95
- with tempfile.NamedTemporaryFile(suffix=".nii.gz", delete=False) as f:
96
- f.write(nifti_bytes)
97
- temp_path = f.name
98
-
99
- nii = nib.load(temp_path)
100
- volume = nii.get_fdata().astype(np.float32)
101
- os.unlink(temp_path)
102
-
103
- logger.info(f"Loaded volume shape: {volume.shape}, segmenting slice {slice_idx}")
104
-
105
- # Validate slice index
106
- if slice_idx < 0 or slice_idx >= volume.shape[0]:
107
- return {
108
- "success": False,
109
- "error": f"Slice index {slice_idx} out of range [0, {volume.shape[0]})",
110
- }
111
-
112
- # Extract slice and segment
113
- slice_2d = volume[slice_idx]
114
- mask = fallback_segment(slice_2d)
115
- mask = keep_largest_component(mask)
116
-
117
- # Extract contours
118
- contours = find_contours(mask)
119
-
120
- # Encode mask as base64
121
- mask_b64 = base64.b64encode(mask.tobytes()).decode()
122
-
123
- inference_time = int((time.time() - start_time) * 1000)
124
- logger.info(
125
- f"Segmented slice {slice_idx} in {inference_time}ms, mask sum: {mask.sum()}"
126
- )
127
-
128
- return {
129
- "success": True,
130
- "backend": "fallback",
131
- "mask_b64": mask_b64,
132
- "mask_shape": list(mask.shape),
133
- "contours": contours,
134
- "slice_idx": slice_idx,
135
- "inference_time_ms": inference_time,
136
- }
137
-
138
-
139
- # Create GPU-decorated and non-GPU versions of the API
140
- if ZEROGPU_AVAILABLE:
141
-
142
- @spaces.GPU(duration=30)
143
- def segment_slice_api(
144
- nifti_b64: str,
145
- slice_idx: int,
146
- text_prompt: str = "tumor",
147
- checkpoint: str = "brain",
148
- ):
149
- """Segment a single slice (GPU-accelerated when available)."""
150
- try:
151
- return _do_segment(nifti_b64, slice_idx, text_prompt, checkpoint)
152
- except Exception as e:
153
- logger.error(f"Segmentation failed: {e}")
154
- return {"success": False, "error": str(e)}
155
- else:
156
-
157
- def segment_slice_api(
158
- nifti_b64: str,
159
- slice_idx: int,
160
- text_prompt: str = "tumor",
161
- checkpoint: str = "brain",
162
- ):
163
- """Segment a single slice."""
164
- try:
165
- return _do_segment(nifti_b64, slice_idx, text_prompt, checkpoint)
166
- except Exception as e:
167
- logger.error(f"Segmentation failed: {e}")
168
- return {"success": False, "error": str(e)}
169
-
170
-
171
- def load_and_display_nifti(file):
172
- """Load NIfTI and return middle slice for display."""
173
- if file is None:
174
- return None, "No file uploaded", 0
175
-
176
- try:
177
- import nibabel as nib
178
-
179
- nii = nib.load(file.name)
180
- volume = nii.get_fdata()
181
-
182
- middle_slice = volume.shape[0] // 2
183
- slice_2d = volume[middle_slice]
184
-
185
- # Normalize for display
186
- vmin, vmax = slice_2d.min(), slice_2d.max()
187
- if vmax - vmin > 0:
188
- display = ((slice_2d - vmin) / (vmax - vmin) * 255).astype(np.uint8)
189
  else:
190
- display = np.zeros_like(slice_2d, dtype=np.uint8)
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
191
 
192
- # Convert to RGB
193
- display_rgb = np.stack([display] * 3, axis=-1)
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
194
 
195
- return (
196
- display_rgb,
197
- f"Loaded: {volume.shape}, showing slice {middle_slice}",
198
- volume.shape[0],
199
- )
200
 
201
  except Exception as e:
202
- return None, f"Error: {e}", 0
203
-
 
 
 
 
204
 
205
- def segment_and_overlay(file, slice_idx, text_prompt, checkpoint):
206
- """Segment a slice and overlay the mask."""
207
- if file is None:
208
- return None, "Please upload a file first"
209
 
210
- try:
211
- # Read file as base64
212
- with open(file.name, "rb") as f:
213
- nifti_b64 = base64.b64encode(f.read()).decode()
 
 
 
214
 
215
- # Call segmentation
216
- result = segment_slice_api(nifti_b64, int(slice_idx), text_prompt, checkpoint)
217
 
218
- if not result["success"]:
219
- return None, f"Segmentation failed: {result.get('error', 'Unknown error')}"
 
 
220
 
221
- # Load original slice for display
222
- import nibabel as nib
 
223
 
224
- nii = nib.load(file.name)
225
- volume = nii.get_fdata()
226
- slice_2d = volume[int(slice_idx)]
 
227
 
228
- # Normalize for display
229
- vmin, vmax = slice_2d.min(), slice_2d.max()
230
- if vmax - vmin > 0:
231
- display = ((slice_2d - vmin) / (vmax - vmin) * 255).astype(np.uint8)
232
- else:
233
- display = np.zeros_like(slice_2d, dtype=np.uint8)
234
 
235
- # Decode mask
236
- mask_bytes = base64.b64decode(result["mask_b64"])
237
- mask = np.frombuffer(mask_bytes, dtype=np.uint8).reshape(result["mask_shape"])
238
 
239
- # Create overlay
240
- rgb = np.stack([display] * 3, axis=-1).astype(np.float32)
241
- mask_bool = mask > 0
242
- alpha = 0.4
243
- rgb[mask_bool, 0] = rgb[mask_bool, 0] * (1 - alpha) + 255 * alpha # Red
244
- rgb[mask_bool, 1] = rgb[mask_bool, 1] * (1 - alpha) + 50 * alpha
245
- rgb[mask_bool, 2] = rgb[mask_bool, 2] * (1 - alpha) + 50 * alpha
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
246
 
247
- info = f"Backend: {result['backend']}, Segmented in {result['inference_time_ms']}ms, mask area: {mask.sum()} pixels"
248
 
249
- return rgb.astype(np.uint8), info
 
 
 
 
 
 
 
 
250
 
251
- except Exception as e:
252
- return None, f"Error: {e}"
253
-
254
-
255
- # Build Gradio interface
256
- def build_demo():
257
- with gr.Blocks(title="OncoSeg Inference API", theme=gr.themes.Soft()) as demo:
258
- gr.Markdown("""
259
- # OncoSeg Medical Image Segmentation API
260
-
261
- GPU-accelerated segmentation for CT and MRI volumes.
262
-
263
- **Note:** Currently using fallback segmentation. Full SAM3 model coming soon!
264
-
265
- **API Endpoints** (for programmatic access):
266
- - `POST /api/segment_slice_api` - Segment a single slice
267
- """)
268
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
269
  with gr.Row():
270
- with gr.Column(scale=1):
271
- file_input = gr.File(
272
- label="Upload NIfTI (.nii, .nii.gz)",
273
- file_types=[".nii", ".nii.gz", ".gz"],
 
274
  )
275
-
276
- checkpoint = gr.Dropdown(
277
- choices=["brain", "liver", "breast", "lung", "kidney", "spine"],
278
- value="brain",
279
  label="Model Checkpoint",
280
  )
281
-
282
- text_prompt = gr.Textbox(
283
- value="tumor",
284
- label="Text Prompt",
285
- placeholder="e.g., tumor, lesion, mass",
286
- )
287
-
288
- slice_idx = gr.Slider(
289
- minimum=0,
290
- maximum=200,
291
- value=77,
292
- step=1,
293
- label="Slice Index",
294
  )
 
295
 
296
- segment_btn = gr.Button("Segment Slice", variant="primary")
297
-
298
- with gr.Column(scale=2):
299
- output_image = gr.Image(label="Segmentation Result", type="numpy")
300
- status_text = gr.Textbox(label="Status", interactive=False)
301
-
302
- # Event handlers
303
- file_input.change(
304
- fn=load_and_display_nifti,
305
- inputs=[file_input],
306
- outputs=[output_image, status_text, slice_idx],
307
- )
308
 
309
  segment_btn.click(
310
- fn=segment_and_overlay,
311
- inputs=[file_input, slice_idx, text_prompt, checkpoint],
312
- outputs=[output_image, status_text],
313
  )
314
 
315
- gr.Markdown("""
316
- ---
317
-
318
- ### API Usage Example
319
-
320
- ```python
321
- import requests
322
- import base64
323
-
324
- # Read NIfTI file
325
- with open("brain.nii.gz", "rb") as f:
326
- nifti_b64 = base64.b64encode(f.read()).decode()
327
-
328
- # Call API
329
- response = requests.post(
330
- "https://tp53-oncoseg-api.hf.space/api/segment_slice_api",
331
- json={
332
- "nifti_b64": nifti_b64,
333
- "slice_idx": 77,
334
- "text_prompt": "tumor",
335
- "checkpoint": "brain",
336
- }
337
- )
338
-
339
- result = response.json()
340
- # result["contours"] contains the segmentation contours
341
- ```
342
- """)
343
 
344
- return demo
 
 
 
345
 
346
 
347
  # Launch
348
  if __name__ == "__main__":
349
- demo = build_demo()
350
- demo.queue()
351
- demo.launch(server_name="0.0.0.0", server_port=7860)
 
 
 
 
 
1
  """
2
+ OncoSeg API - 3D Tumor Segmentation for Medical Imaging
3
+ HuggingFace Space: tp53/oncoseg-api
4
 
5
+ This provides a Gradio API for 3D tumor segmentation using the OncoSeg model.
6
+ Accepts NIfTI files and returns segmentation masks.
7
+
8
+ FIX: Returns JSON as string to avoid Gradio schema issues with nested dicts.
9
+ """
 
 
10
 
11
  import gradio as gr
12
  import numpy as np
13
+ import json
14
+ import base64
15
+ import gzip
16
+ from typing import Optional
17
+ import os
18
 
19
+ # Model loading (lazy)
20
+ model = None
21
+ model_name = "default"
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
22
 
 
23
 
24
+ def load_model(checkpoint: str = "default"):
25
+ """Load the segmentation model (stub - replace with actual model loading)"""
26
+ global model, model_name
27
 
28
+ # In production, load actual OncoSeg model here
29
+ # For now, return a stub that generates demo segmentation
30
+ model_name = checkpoint
31
+ model = "loaded"
 
 
32
 
33
+ return f"Model '{checkpoint}' loaded"
34
 
35
 
36
+ def segment_nifti(
37
+ nifti_base64: str, checkpoint: str = "default", threshold: float = 0.5
38
+ ) -> str:
39
+ """
40
+ Segment a 3D NIfTI volume.
41
 
42
+ Args:
43
+ nifti_base64: Base64-encoded gzipped NIfTI file
44
+ checkpoint: Model checkpoint to use
45
+ threshold: Segmentation threshold (0-1)
46
 
47
+ Returns:
48
+ JSON string with segmentation results (NOT dict - Gradio compatibility)
49
+ """
50
+ global model, model_name
51
 
 
 
52
  try:
53
+ # Load model if needed
54
+ if model is None or model_name != checkpoint:
55
+ load_model(checkpoint)
56
+
57
+ # Decode the NIfTI file
58
+ nifti_bytes = base64.b64decode(nifti_base64)
59
+
60
+ # Check if gzipped
61
+ if nifti_bytes[:2] == b"\x1f\x8b":
62
+ nifti_bytes = gzip.decompress(nifti_bytes)
63
+
64
+ # Parse NIfTI header to get dimensions
65
+ # NIfTI-1 header: dims at offset 40, 8 int16 values
66
+ # dim[0] = number of dimensions, dim[1-7] = sizes
67
+ import struct
68
+
69
+ # Check for NIfTI magic number
70
+ magic = nifti_bytes[344:348]
71
+ if magic not in [b"n+1\x00", b"ni1\x00"]:
72
+ # Try as raw volume (assume 64x64x64 for demo)
73
+ shape = (64, 64, 64)
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
74
  else:
75
+ dims = struct.unpack("<8h", nifti_bytes[40:56])
76
+ ndim = dims[0]
77
+ shape = tuple(dims[1 : ndim + 1])
78
+
79
+ # Generate segmentation mask
80
+ # In production, run actual model inference here
81
+ mask = generate_demo_mask(shape, threshold)
82
+
83
+ # Encode mask as base64 for transmission
84
+ mask_bytes = mask.astype(np.uint8).tobytes()
85
+ mask_compressed = gzip.compress(mask_bytes)
86
+ mask_base64 = base64.b64encode(mask_compressed).decode("utf-8")
87
+
88
+ # Calculate tumor statistics
89
+ tumor_voxels = int(np.sum(mask > 0))
90
+ total_voxels = int(np.prod(shape))
91
+ tumor_percentage = (
92
+ (tumor_voxels / total_voxels) * 100 if total_voxels > 0 else 0
93
+ )
94
 
95
+ # Find bounding box of tumor
96
+ if tumor_voxels > 0:
97
+ coords = np.where(mask > 0)
98
+ bbox = {
99
+ "min": [int(c.min()) for c in coords],
100
+ "max": [int(c.max()) for c in coords],
101
+ }
102
+ else:
103
+ bbox = None
104
+
105
+ result = {
106
+ "success": True,
107
+ "shape": list(shape),
108
+ "mask_base64": mask_base64,
109
+ "mask_dtype": "uint8",
110
+ "statistics": {
111
+ "tumor_voxels": tumor_voxels,
112
+ "total_voxels": total_voxels,
113
+ "tumor_percentage": round(tumor_percentage, 2),
114
+ "bounding_box": bbox,
115
+ },
116
+ "model": checkpoint,
117
+ "threshold": threshold,
118
+ }
119
 
120
+ # IMPORTANT: Return as JSON string, not dict
121
+ # This fixes Gradio's JSON schema validation issues
122
+ return json.dumps(result)
 
 
123
 
124
  except Exception as e:
125
+ error_result = {
126
+ "success": False,
127
+ "error": str(e),
128
+ "error_type": type(e).__name__,
129
+ }
130
+ return json.dumps(error_result)
131
 
 
 
 
 
132
 
133
+ def generate_demo_mask(shape: tuple, threshold: float = 0.5) -> np.ndarray:
134
+ """
135
+ Generate a demo segmentation mask (ellipsoid tumor).
136
+ Replace this with actual model inference in production.
137
+ """
138
+ # Create coordinate grids
139
+ z, y, x = np.ogrid[: shape[0], : shape[1], : shape[2]]
140
 
141
+ # Center of the volume
142
+ cz, cy, cx = shape[0] // 2, shape[1] // 2, shape[2] // 2
143
 
144
+ # Ellipsoid radii (tumor size ~15-25% of volume)
145
+ rz = shape[0] * 0.15
146
+ ry = shape[1] * 0.18
147
+ rx = shape[2] * 0.20
148
 
149
+ # Create ellipsoid mask
150
+ distance = ((z - cz) / rz) ** 2 + ((y - cy) / ry) ** 2 + ((x - cx) / rx) ** 2
151
+ mask = (distance <= 1.0).astype(np.uint8)
152
 
153
+ # Add some irregularity
154
+ np.random.seed(42) # Reproducible
155
+ noise = np.random.rand(*shape) * 0.3
156
+ mask = ((distance <= 1.0 + noise * 0.5) & (distance <= 1.3)).astype(np.uint8)
157
 
158
+ return mask
 
 
 
 
 
159
 
 
 
 
160
 
161
+ def get_available_checkpoints() -> str:
162
+ """
163
+ Get list of available model checkpoints.
164
+
165
+ Returns:
166
+ JSON string with checkpoint list
167
+ """
168
+ checkpoints = {
169
+ "checkpoints": [
170
+ {
171
+ "id": "default",
172
+ "name": "OncoSeg Default",
173
+ "description": "General purpose tumor segmentation",
174
+ "modalities": ["CT", "MRI"],
175
+ },
176
+ {
177
+ "id": "liver",
178
+ "name": "OncoSeg Liver",
179
+ "description": "Optimized for liver tumors",
180
+ "modalities": ["CT"],
181
+ },
182
+ {
183
+ "id": "brain",
184
+ "name": "OncoSeg Brain",
185
+ "description": "Optimized for brain tumors",
186
+ "modalities": ["MRI"],
187
+ },
188
+ {
189
+ "id": "lung",
190
+ "name": "OncoSeg Lung",
191
+ "description": "Optimized for lung nodules",
192
+ "modalities": ["CT"],
193
+ },
194
+ ]
195
+ }
196
+ return json.dumps(checkpoints)
197
 
 
198
 
199
+ def health_check() -> str:
200
+ """Health check endpoint"""
201
+ return json.dumps(
202
+ {
203
+ "status": "healthy",
204
+ "model_loaded": model is not None,
205
+ "model_name": model_name,
206
+ }
207
+ )
208
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
209
 
210
+ # Create Gradio interface
211
+ with gr.Blocks(title="OncoSeg API") as demo:
212
+ gr.Markdown("""
213
+ # OncoSeg API - 3D Tumor Segmentation
214
+
215
+ This API provides 3D tumor segmentation for medical imaging (NIfTI format).
216
+
217
+ ## API Endpoints
218
+
219
+ Use the Gradio API client to call these functions:
220
+
221
+ - `segment_nifti(nifti_base64, checkpoint, threshold)` - Segment a NIfTI volume
222
+ - `get_available_checkpoints()` - List available model checkpoints
223
+ - `health_check()` - Check API health
224
+
225
+ ## Usage Example (Python)
226
+
227
+ ```python
228
+ from gradio_client import Client
229
+ import base64
230
+ import json
231
+
232
+ client = Client("tp53/oncoseg-api")
233
+
234
+ # Load and encode NIfTI file
235
+ with open("scan.nii.gz", "rb") as f:
236
+ nifti_b64 = base64.b64encode(f.read()).decode()
237
+
238
+ # Call segmentation
239
+ result_str = client.predict(
240
+ nifti_base64=nifti_b64,
241
+ checkpoint="default",
242
+ threshold=0.5,
243
+ api_name="/segment_nifti"
244
+ )
245
+
246
+ # Parse result (returns JSON string)
247
+ result = json.loads(result_str)
248
+ print(f"Tumor: {result['statistics']['tumor_percentage']:.1f}%")
249
+ ```
250
+ """)
251
+
252
+ with gr.Tab("Segment"):
253
  with gr.Row():
254
+ with gr.Column():
255
+ nifti_input = gr.Textbox(
256
+ label="NIfTI Base64",
257
+ placeholder="Base64-encoded NIfTI file",
258
+ lines=3,
259
  )
260
+ checkpoint_input = gr.Dropdown(
261
+ choices=["default", "liver", "brain", "lung"],
262
+ value="default",
 
263
  label="Model Checkpoint",
264
  )
265
+ threshold_input = gr.Slider(
266
+ minimum=0.1, maximum=0.9, value=0.5, step=0.1, label="Threshold"
 
 
 
 
 
 
 
 
 
 
 
267
  )
268
+ segment_btn = gr.Button("Segment", variant="primary")
269
 
270
+ with gr.Column():
271
+ output = gr.Textbox(label="Result (JSON)", lines=10)
 
 
 
 
 
 
 
 
 
 
272
 
273
  segment_btn.click(
274
+ fn=segment_nifti,
275
+ inputs=[nifti_input, checkpoint_input, threshold_input],
276
+ outputs=output,
277
  )
278
 
279
+ with gr.Tab("Checkpoints"):
280
+ checkpoints_btn = gr.Button("Get Checkpoints")
281
+ checkpoints_output = gr.Textbox(label="Available Checkpoints", lines=10)
282
+ checkpoints_btn.click(fn=get_available_checkpoints, outputs=checkpoints_output)
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
283
 
284
+ with gr.Tab("Health"):
285
+ health_btn = gr.Button("Health Check")
286
+ health_output = gr.Textbox(label="Status", lines=3)
287
+ health_btn.click(fn=health_check, outputs=health_output)
288
 
289
 
290
  # Launch
291
  if __name__ == "__main__":
292
+ demo.launch()
 
 
requirements.txt CHANGED
@@ -1,14 +1,5 @@
1
- # OncoSeg HF Space Requirements - Minimal
 
2
 
3
- # Core
4
  numpy>=1.24.0
5
-
6
- # Medical Imaging
7
- nibabel>=5.0.0
8
-
9
- # Image Processing
10
- scipy>=1.11.0
11
- scikit-image>=0.21.0
12
-
13
- # Pin HF Hub to avoid import errors
14
- huggingface_hub>=0.20.0,<0.25.0
 
1
+ # OncoSeg API Requirements
2
+ # Pin Gradio to avoid JSON schema issues with nested dicts
3
 
4
+ gradio==4.36.0
5
  numpy>=1.24.0