File size: 13,013 Bytes
b69e9e7
 
9d5f53d
b69e9e7
 
 
 
 
 
 
 
cc1addd
596cd10
 
 
 
 
 
 
 
 
b69e9e7
 
 
 
 
9d5f53d
b69e9e7
 
9d5f53d
ac00bb1
 
9d5f53d
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
ac00bb1
 
9d5f53d
 
 
 
 
 
 
 
 
 
 
ac00bb1
9d5f53d
 
ac00bb1
9d5f53d
 
 
 
 
 
 
 
 
 
 
 
ac00bb1
 
9d5f53d
 
 
 
 
 
ac00bb1
 
9d5f53d
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
ac00bb1
 
 
cc1addd
 
 
 
 
 
 
596cd10
cc1addd
 
 
 
 
9d5f53d
 
b69e9e7
cc1addd
9d5f53d
cc1addd
 
9d5f53d
 
b69e9e7
cc1addd
 
9d5f53d
b69e9e7
9d5f53d
 
 
ac00bb1
9d5f53d
 
ac00bb1
9d5f53d
 
 
 
 
ac00bb1
9d5f53d
 
b69e9e7
9d5f53d
 
 
b69e9e7
9d5f53d
 
 
 
 
 
 
 
b69e9e7
9d5f53d
 
cc1addd
9d5f53d
 
 
b69e9e7
cc1addd
 
 
b69e9e7
9d5f53d
 
 
 
 
 
 
 
 
 
cc1addd
 
 
 
 
 
 
 
 
9d5f53d
cc1addd
 
 
 
 
9d5f53d
cc1addd
9d5f53d
b69e9e7
9d5f53d
 
 
c6ab96d
9d5f53d
b69e9e7
9d5f53d
 
 
 
 
 
596cd10
 
 
9d5f53d
 
 
 
596cd10
9d5f53d
 
596cd10
 
 
cc1addd
9d5f53d
 
 
 
 
 
 
cc1addd
596cd10
9d5f53d
 
 
 
 
596cd10
9d5f53d
 
 
 
 
596cd10
9d5f53d
002b4c1
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
cc1addd
0c8c84a
002b4c1
0c8c84a
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
002b4c1
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
360e533
 
 
 
 
002b4c1
 
360e533
 
 
 
 
 
 
9d5f53d
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
300
301
302
303
304
305
306
307
308
309
310
311
312
313
314
315
316
317
318
319
320
321
322
323
324
325
326
327
328
329
330
331
332
333
334
335
336
337
338
339
340
341
342
343
344
345
346
347
348
349
350
351
352
353
354
355
356
357
358
359
360
361
362
363
364
365
366
367
368
369
370
371
372
373
#!/usr/bin/env python3
"""
Anton Microscopy Analysis - Clean UI
"""

import streamlit as st
import os
import sys
from pathlib import Path
import numpy as np
from PIL import Image
import random
import logging
import traceback

# Setup logging
logging.basicConfig(
    level=logging.INFO,
    format='%(asctime)s - %(levelname)s - %(message)s'
)
logger = logging.getLogger(__name__)

# Setup page
st.set_page_config(
    page_title="Anton Microscopy Analysis", 
    page_icon="πŸ”¬", 
    layout="centered"
)

# Clean, modern CSS
st.markdown("""
<style>
    /* Hide Streamlit branding */
    #MainMenu {visibility: hidden;}
    footer {visibility: hidden;}
    header {visibility: hidden;}
    
    /* Clean typography */
    .main-header {
        font-size: 2.5rem;
        font-weight: 300;
        color: #1f2937;
        text-align: center;
        margin-bottom: 3rem;
        letter-spacing: -0.02em;
    }
    
    .section-header {
        font-size: 1.1rem;
        font-weight: 500;
        color: #374151;
        margin-bottom: 1rem;
    }
    
    /* Clean upload area */
    .upload-section {
        background: #f8fafc;
        border: 2px dashed #e2e8f0;
        border-radius: 12px;
        padding: 2rem;
        text-align: center;
        margin-bottom: 2rem;
    }
    
    /* Primary button styling */
    .stButton > button {
        background: linear-gradient(135deg, #667eea 0%, #764ba2 100%);
        color: white;
        border: none;
        border-radius: 8px;
        padding: 0.75rem 2rem;
        font-weight: 500;
        font-size: 1rem;
        width: 100%;
        transition: all 0.2s ease;
        box-shadow: 0 4px 6px -1px rgba(0, 0, 0, 0.1);
    }
    
    .stButton > button:hover {
        transform: translateY(-1px);
        box-shadow: 0 8px 15px -3px rgba(0, 0, 0, 0.1);
    }
    
    /* Sample selection */
    .sample-selector {
        background: white;
        border-radius: 8px;
        padding: 1rem;
        box-shadow: 0 1px 3px rgba(0, 0, 0, 0.1);
        margin-bottom: 1rem;
    }
    
    /* Results styling */
    .results-container {
        background: white;
        border-radius: 12px;
        padding: 1.5rem;
        box-shadow: 0 4px 6px -1px rgba(0, 0, 0, 0.1);
        margin-top: 2rem;
    }
    
    /* Image display */
    .image-container {
        border-radius: 8px;
        overflow: hidden;
        box-shadow: 0 4px 6px -1px rgba(0, 0, 0, 0.1);
    }
    
    /* Clean spacing */
    .block-container {
        padding-top: 2rem;
        max-width: 1000px;
    }
</style>
""", unsafe_allow_html=True)

# Add Anton to path
sys.path.append(str(Path(__file__).parent))

# Import Anton components
try:
    from anton.core.pipeline import AnalysisPipeline
    from anton.utils.image_io import ImageLoader
    # CMPO mapping removed for simplicity
    anton_available = True
except ImportError as e:
    anton_available = False
    import_error = str(e)

# Main header
st.markdown('<h1 class="main-header">πŸ”¬ Anton Microscopy Analysis</h1>', unsafe_allow_html=True)

if not anton_available:
    st.error(f"System Error: {import_error}")
    st.stop()

# Image input section
st.markdown('<div class="section-header">Select Image</div>', unsafe_allow_html=True)

# Check for sample images
sample_images_path = Path("sample_images")
sample_images = sorted(list(sample_images_path.glob("*.BMP"))) if sample_images_path.exists() else []

# Initialize session state
if 'selected_sample_image' not in st.session_state:
    st.session_state.selected_sample_image = sample_images[0].name if sample_images else None

# Image source selection
col1, col2 = st.columns(2)

with col1:
    if sample_images:
        if st.button("🎲 Random Sample", use_container_width=True):
            st.session_state.selected_sample_image = random.choice(sample_images).name
            st.rerun()

with col2:
    use_sample = st.toggle("Use Sample Images", value=bool(sample_images))

# Image selection
current_image = None
image_to_analyze = None

if use_sample and sample_images:
    # Sample image selection
    selected_image = st.selectbox(
        "Choose sample image:",
        [img.name for img in sample_images],
        index=[img.name for img in sample_images].index(st.session_state.selected_sample_image) if st.session_state.selected_sample_image in [img.name for img in sample_images] else 0,
        label_visibility="collapsed"
    )
    
    if selected_image != st.session_state.selected_sample_image:
        st.session_state.selected_sample_image = selected_image
    
    # Load sample image
    sample_image_path = sample_images_path / selected_image
    if sample_image_path.exists():
        try:
            loader = ImageLoader()
            current_image = loader.load(str(sample_image_path))
            image_to_analyze = str(sample_image_path)
        except Exception as e:
            st.error(f"Error loading image: {e}")
else:
    # File upload
    uploaded_file = st.file_uploader(
        "Upload microscopy image",
        type=['png', 'jpg', 'jpeg', 'tiff', 'bmp'],
        label_visibility="collapsed"
    )
    
    if uploaded_file:
        try:
            import tempfile
            with tempfile.NamedTemporaryFile(delete=False, suffix=f"_{uploaded_file.name}") as tmp_file:
                tmp_file.write(uploaded_file.getbuffer())
                temp_path = tmp_file.name
            
            try:
                loader = ImageLoader()
                current_image = loader.load(temp_path)
            except:
                pil_img = Image.open(temp_path)
                current_image = np.array(pil_img)
                if len(current_image.shape) == 3 and current_image.shape[2] == 3:
                    current_image = np.mean(current_image, axis=2).astype(np.uint8)
            
            image_to_analyze = temp_path
        except Exception as e:
            st.error(f"Error loading image: {e}")

# Display image
if current_image is not None:
    st.markdown('<div class="image-container">', unsafe_allow_html=True)
    st.image(current_image, use_container_width=True)
    st.markdown('</div>', unsafe_allow_html=True)
    
    # Analysis button
    st.markdown("---")
    analyze_btn = st.button("πŸš€ Analyze Image", type="primary", use_container_width=True)
    
    # Run analysis
    if analyze_btn:
        logger.info("Analysis button pressed")
        st.info("Starting analysis...")
        
        # Check API keys
        vlm_provider = "mock"
        if os.getenv('GOOGLE_API_KEY'):
            vlm_provider = "gemini"
            logger.info("Using Gemini VLM provider")
        elif os.getenv('ANTHROPIC_API_KEY'):
            vlm_provider = "claude"
            logger.info("Using Claude VLM provider")
        else:
            logger.info("Using mock VLM provider (no API keys found)")
        
        # Configure pipeline
        if use_sample and sample_images:
            biological_context = {
                "experiment_type": "protein_translocation",
                "cell_line": "U2OS_osteosarcoma", 
                "protein": "FKHR-GFP",
                "readout": "nuclear_vs_cytoplasmic_localization"
            }
            logger.info("Using sample image context: protein translocation")
        else:
            biological_context = {
                "experiment_type": "general_microscopy",
                "readout": "cellular_morphology_and_phenotypes"
            }
            logger.info("Using general microscopy context")
        
        config = {
            "vlm_provider": vlm_provider,
            "biological_context": biological_context
        }
        logger.info(f"Pipeline config: {config}")
        
        # Run analysis with progressive display
        logger.info(f"Creating pipeline with image: {image_to_analyze}")
        pipeline = AnalysisPipeline(config)
        logger.info("Pipeline created, starting progressive analysis...")
        
        # Create containers for each stage
        st.markdown('<div class="results-container">', unsafe_allow_html=True)
        st.markdown("### Analysis Results")
        
        stage_containers = {}
        stage_statuses = {}
        
        stages = [
            ("Global Analysis", "stage_1_global"),
            ("Object Detection", "stage_2_objects"), 
            ("Feature Analysis", "stage_3_features"),
            ("Population Analysis", "stage_4_population")
        ]
        
        # Initialize containers
        for stage_name, stage_key in stages:
            stage_containers[stage_key] = st.empty()
            stage_statuses[stage_key] = st.empty()
        
        def update_stage_display(stage_key, status, data):
            """Update the display for a specific stage."""
            stage_name = next(name for name, key in stages if key == stage_key)
            
            if status == "running":
                stage_statuses[stage_key].info(f"⏳ {stage_name}: {data}")
                stage_containers[stage_key].expander(f"πŸ“‹ {stage_name}", expanded=False).write("Processing...")
            elif status == "completed":
                stage_statuses[stage_key].success(f"βœ… {stage_name}: Completed")
                
                # Extract content from stage data  
                if isinstance(data, dict):
                    logger.info(f"Stage {stage_key} data keys: {list(data.keys())}")
                    
                    # Handle different stage result formats
                    content = data.get('description')
                    if not content:
                        content = data.get('segmentation_guidance')
                    if not content:
                        content = data.get('population_summary')
                    if not content:
                        # Stage 3 feature_descriptions is a list
                        feature_descs = data.get('feature_descriptions', [])
                        logger.info(f"Stage 3 feature_descriptions: {feature_descs}")
                        if feature_descs and isinstance(feature_descs, list):
                            content = '\n'.join(str(desc) for desc in feature_descs)
                        else:
                            content = 'Analysis completed'
                    
                    logger.info(f"Stage {stage_key} final content: {content[:100]}...")
                    
                    if content and content.startswith('```'):
                        lines = content.split('\n')
                        content = '\n'.join([line for line in lines if not line.strip().startswith('```')])
                    
                    # Display in expandable container
                    with stage_containers[stage_key].expander(f"πŸ“‹ {stage_name}", expanded=True):
                        st.write(content[:1000] + "..." if len(content) > 1000 else content)
                else:
                    stage_containers[stage_key].expander(f"πŸ“‹ {stage_name}", expanded=True).write(str(data))
        
        try:
            # Create callback to handle stage updates
            def stage_callback(stage, status, data):
                # Map stage names: stage_1 -> stage_1_global
                stage_mapping = {
                    "stage_1": "stage_1_global",
                    "stage_2": "stage_2_objects",
                    "stage_3": "stage_3_features",
                    "stage_4": "stage_4_population"
                }
                mapped_stage = stage_mapping.get(stage, stage)
                update_stage_display(mapped_stage, status, data)
            
            # Run progressive analysis
            results = pipeline.run_pipeline_progressive(
                image_to_analyze, 
                callback=stage_callback
            )
            logger.info(f"Progressive analysis completed. Results keys: {list(results.keys()) if results else 'None'}")
            
            # Clear status messages after completion
            for stage_key in stage_statuses:
                stage_statuses[stage_key].empty()
            
            logger.info("Progressive analysis results displayed successfully")
            
        except Exception as e:
            # Clear any running status messages on error
            for stage_key in stage_statuses:
                stage_statuses[stage_key].empty()
            error_msg = f"Analysis failed: {str(e)}"
            logger.error(error_msg)
            logger.error(f"Full traceback: {traceback.format_exc()}")
            st.error(error_msg)
            st.code(traceback.format_exc())
        
        st.markdown('</div>', unsafe_allow_html=True)
        
        # Cleanup
        if 'temp_path' in locals() and os.path.exists(temp_path):
            try:
                os.remove(temp_path)
            except:
                pass
else:
    st.markdown('<div class="upload-section">', unsafe_allow_html=True)
    st.markdown("**Upload an image or select a sample to begin analysis**")
    st.markdown('</div>', unsafe_allow_html=True)