File size: 4,739 Bytes
86f402d
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
"""
Analysis Service - Wraps MedGemmaAgent for API use
"""

from pathlib import Path
from dataclasses import asdict
from typing import Optional, Generator

from models.medgemma_agent import MedGemmaAgent
from data.case_store import get_case_store


class AnalysisService:
    """Singleton service for managing analysis operations"""

    _instance = None

    def __init__(self):
        self.agent = MedGemmaAgent(verbose=True)
        self.store = get_case_store()
        self._loaded = False

    def _ensure_loaded(self):
        """Lazy load the ML models"""
        if not self._loaded:
            self.agent.load_model()
            self._loaded = True

    def analyze(
        self,
        patient_id: str,
        lesion_id: str,
        image_id: str,
        question: Optional[str] = None
    ) -> Generator[str, None, None]:
        """Run analysis on an image, yielding streaming chunks"""
        self._ensure_loaded()

        image = self.store.get_image(patient_id, lesion_id, image_id)
        if not image or not image.image_path:
            yield "[ERROR]No image uploaded[/ERROR]"
            return

        # Update stage
        self.store.update_image(patient_id, lesion_id, image_id, stage="analyzing")

        # Reset agent state for new analysis
        self.agent.reset_state()

        # Run analysis with question
        for chunk in self.agent.analyze_image_stream(image.image_path, question=question or ""):
            yield chunk

        # Save diagnosis after analysis
        if self.agent.last_diagnosis:
            analysis_data = {
                "diagnosis": self.agent.last_diagnosis["predictions"][0]["class"],
                "full_name": self.agent.last_diagnosis["predictions"][0]["full_name"],
                "confidence": self.agent.last_diagnosis["predictions"][0]["probability"],
                "all_predictions": self.agent.last_diagnosis["predictions"]
            }

            # Save MONET features if available
            if self.agent.last_monet_result:
                analysis_data["monet_features"] = self.agent.last_monet_result.get("features", {})

            self.store.update_image(
                patient_id, lesion_id, image_id,
                stage="awaiting_confirmation",
                analysis=analysis_data
            )

    def confirm(
        self,
        patient_id: str,
        lesion_id: str,
        image_id: str,
        confirmed: bool,
        feedback: Optional[str] = None
    ) -> Generator[str, None, None]:
        """Confirm diagnosis and generate management guidance"""
        for chunk in self.agent.generate_management_guidance(confirmed, feedback):
            yield chunk

        # Update stage to complete
        self.store.update_image(patient_id, lesion_id, image_id, stage="complete")

    def chat_followup(
        self,
        patient_id: str,
        lesion_id: str,
        message: str
    ) -> Generator[str, None, None]:
        """Handle follow-up chat messages"""
        # Save user message
        self.store.add_chat_message(patient_id, lesion_id, "user", message)

        # Generate response
        response = ""
        for chunk in self.agent.chat_followup(message):
            response += chunk
            yield chunk

        # Save assistant response
        self.store.add_chat_message(patient_id, lesion_id, "assistant", response)

    def get_chat_history(self, patient_id: str, lesion_id: str):
        """Get chat history for a lesion"""
        messages = self.store.get_chat_history(patient_id, lesion_id)
        return [asdict(m) for m in messages]

    def compare_images(
        self,
        patient_id: str,
        lesion_id: str,
        previous_image_path: str,
        current_image_path: str,
        current_image_id: str
    ) -> Generator[str, None, None]:
        """Compare two images and assess changes"""
        self._ensure_loaded()

        # Run comparison
        comparison_result = None
        for chunk in self.agent.compare_followup_images(previous_image_path, current_image_path):
            yield chunk

        # Extract comparison status from agent if available
        # Default to STABLE if we can't determine
        comparison_data = {
            "status": "STABLE",
            "summary": "Comparison complete"
        }

        # Update the current image with comparison data
        self.store.update_image(
            patient_id, lesion_id, current_image_id,
            comparison=comparison_data
        )


def get_analysis_service() -> AnalysisService:
    """Get or create AnalysisService singleton"""
    if AnalysisService._instance is None:
        AnalysisService._instance = AnalysisService()
    return AnalysisService._instance