File size: 10,728 Bytes
c75526e
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
300
301
302
303
304
305
#!/usr/bin/env python3
"""
Test suite for the OpenProblems Spatial Transcriptomics MCP Server.
"""

import asyncio
import json
import pytest
from unittest.mock import AsyncMock, MagicMock, patch

# Import the server components
import sys
from pathlib import Path
sys.path.append(str(Path(__file__).parent.parent / "src"))

from mcp_server.main import (
    handle_list_resources,
    handle_read_resource,
    handle_list_tools,
    handle_call_tool,
)


class TestMCPServer:
    """Test cases for the MCP server functionality."""

    @pytest.mark.asyncio
    async def test_list_resources(self):
        """Test that resources are properly listed."""
        resources = await handle_list_resources()

        assert len(resources) == 5
        resource_uris = [r.uri for r in resources]

        expected_uris = [
            "server://status",
            "documentation://nextflow",
            "documentation://viash",
            "documentation://docker",
            "templates://spatial-workflows"
        ]

        for uri in expected_uris:
            assert uri in resource_uris

    @pytest.mark.asyncio
    async def test_read_server_status_resource(self):
        """Test reading the server status resource."""
        status_content = await handle_read_resource("server://status")
        status_data = json.loads(status_content)

        assert status_data["server_name"] == "OpenProblems Spatial Transcriptomics MCP"
        assert status_data["version"] == "0.1.0"
        assert status_data["status"] == "running"
        assert "capabilities" in status_data
        assert status_data["capabilities"]["nextflow_execution"] is True

    @pytest.mark.asyncio
    async def test_read_documentation_resources(self):
        """Test reading documentation resources."""
        # Test Nextflow documentation
        nextflow_docs = await handle_read_resource("documentation://nextflow")
        nextflow_data = json.loads(nextflow_docs)
        assert "best_practices" in nextflow_data
        assert "dsl_version" in nextflow_data["best_practices"]

        # Test Viash documentation
        viash_docs = await handle_read_resource("documentation://viash")
        viash_data = json.loads(viash_docs)
        assert "component_structure" in viash_data
        assert "best_practices" in viash_data

        # Test Docker documentation
        docker_docs = await handle_read_resource("documentation://docker")
        docker_data = json.loads(docker_docs)
        assert "dockerfile_optimization" in docker_data
        assert "bioinformatics_specific" in docker_data

    @pytest.mark.asyncio
    async def test_read_templates_resource(self):
        """Test reading pipeline templates resource."""
        templates_content = await handle_read_resource("templates://spatial-workflows")
        templates_data = json.loads(templates_content)

        expected_templates = [
            "basic_preprocessing",
            "spatially_variable_genes",
            "label_transfer"
        ]

        for template in expected_templates:
            assert template in templates_data
            assert "name" in templates_data[template]
            assert "description" in templates_data[template]
            assert "inputs" in templates_data[template]
            assert "outputs" in templates_data[template]

    @pytest.mark.asyncio
    async def test_invalid_resource_uri(self):
        """Test handling of invalid resource URIs."""
        with pytest.raises(ValueError, match="Unknown resource URI"):
            await handle_read_resource("invalid://resource")

    @pytest.mark.asyncio
    async def test_list_tools(self):
        """Test that tools are properly listed."""
        tools = await handle_list_tools()

        expected_tools = [
            "echo_test",
            "list_available_tools",
            "run_nextflow_workflow",
            "run_viash_component",
            "build_docker_image",
            "analyze_nextflow_log"
        ]

        tool_names = [t.name for t in tools]

        for tool_name in expected_tools:
            assert tool_name in tool_names

        # Check that tools have proper schemas
        for tool in tools:
            assert hasattr(tool, 'inputSchema')
            assert 'type' in tool.inputSchema
            assert tool.inputSchema['type'] == 'object'

    @pytest.mark.asyncio
    async def test_echo_test_tool(self):
        """Test the echo test tool."""
        result = await handle_call_tool("echo_test", {"message": "Hello MCP!"})

        assert len(result) == 1
        assert result[0].type == "text"
        assert result[0].text == "Echo: Hello MCP!"

    @pytest.mark.asyncio
    async def test_list_available_tools_tool(self):
        """Test the list available tools tool."""
        result = await handle_call_tool("list_available_tools", {})

        assert len(result) == 1
        assert result[0].type == "text"

        tools_data = json.loads(result[0].text)
        assert isinstance(tools_data, list)
        assert len(tools_data) >= 6  # We have at least 6 tools

        # Check structure of tool entries
        for tool in tools_data:
            assert "name" in tool
            assert "description" in tool
            assert "required_params" in tool

    @pytest.mark.asyncio
    async def test_invalid_tool_name(self):
        """Test handling of invalid tool names."""
        with pytest.raises(ValueError, match="Unknown tool"):
            await handle_call_tool("invalid_tool", {})

    @pytest.mark.asyncio
    @patch('mcp_server.main.subprocess.run')
    async def test_nextflow_workflow_execution(self, mock_subprocess):
        """Test Nextflow workflow execution tool."""
        # Mock successful subprocess execution
        mock_result = MagicMock()
        mock_result.returncode = 0
        mock_result.stdout = "Nextflow execution completed successfully"
        mock_result.stderr = ""
        mock_subprocess.return_value = mock_result

        arguments = {
            "workflow_name": "main.nf",
            "github_repo_url": "https://github.com/openproblems-bio/test-workflow",
            "profile": "docker",
            "params": {"input": "test.h5ad", "output": "results/"}
        }

        result = await handle_call_tool("run_nextflow_workflow", arguments)

        assert len(result) == 1
        assert result[0].type == "text"

        execution_data = json.loads(result[0].text)
        assert execution_data["status"] == "completed"
        assert execution_data["exit_code"] == 0

    @pytest.mark.asyncio
    @patch('mcp_server.main.subprocess.run')
    async def test_viash_component_execution(self, mock_subprocess):
        """Test Viash component execution tool."""
        # Mock successful subprocess execution
        mock_result = MagicMock()
        mock_result.returncode = 0
        mock_result.stdout = "Viash component executed successfully"
        mock_result.stderr = ""
        mock_subprocess.return_value = mock_result

        arguments = {
            "component_name": "test_component",
            "component_config_path": "config.vsh.yaml",
            "engine": "docker",
            "args": {"input": "test.h5ad", "output": "result.h5ad"}
        }

        result = await handle_call_tool("run_viash_component", arguments)

        assert len(result) == 1
        assert result[0].type == "text"

        execution_data = json.loads(result[0].text)
        assert execution_data["status"] == "completed"
        assert execution_data["exit_code"] == 0
        assert execution_data["component"] == "test_component"

    @pytest.mark.asyncio
    @patch('mcp_server.main.subprocess.run')
    async def test_docker_image_build(self, mock_subprocess):
        """Test Docker image building tool."""
        # Mock successful subprocess execution
        mock_result = MagicMock()
        mock_result.returncode = 0
        mock_result.stdout = "Successfully built docker image"
        mock_result.stderr = ""
        mock_subprocess.return_value = mock_result

        arguments = {
            "dockerfile_path": "Dockerfile",
            "image_tag": "openproblems/test:latest",
            "context_path": "."
        }

        result = await handle_call_tool("build_docker_image", arguments)

        assert len(result) == 1
        assert result[0].type == "text"

        build_data = json.loads(result[0].text)
        assert build_data["status"] == "completed"
        assert build_data["exit_code"] == 0
        assert build_data["image_tag"] == "openproblems/test:latest"

    @pytest.mark.asyncio
    @patch('mcp_server.main.Path')
    async def test_nextflow_log_analysis(self, mock_path):
        """Test Nextflow log analysis tool."""
        # Mock log file content
        mock_log_content = """
        N E X T F L O W  ~  version 23.04.0
        Launching `main.nf` [abc123] DSL2 - revision: def456

        executor >  local (4)
        [12/abc123] process > PROCESS_1 [100%] 2 of 2 ✓
        [34/def456] process > PROCESS_2 [100%] 2 of 2, failed: 1, retries: 1 ✗

        ERROR ~ Error executing process > 'PROCESS_2'

        Caused by:
          Process `PROCESS_2` terminated with an error exit status (137)

        Command executed:
          python script.py --input data.h5ad --output result.h5ad

        Command exit status:
          137

        Execution failed
        """

        # Mock file operations
        mock_log_path = MagicMock()
        mock_log_path.exists.return_value = True
        mock_log_path.stat.return_value.st_size = len(mock_log_content)
        mock_path.return_value = mock_log_path

        # Mock file reading
        with patch('builtins.open', mock_open(read_data=mock_log_content)):
            arguments = {"log_file_path": "/path/to/.nextflow.log"}
            result = await handle_call_tool("analyze_nextflow_log", arguments)

            assert len(result) == 1
            assert result[0].type == "text"

            analysis_data = json.loads(result[0].text)
            assert "issues_found" in analysis_data
            assert "execution_status" in analysis_data
            assert analysis_data["execution_status"] == "failed"

            # Check that OOM error was detected
            issues = analysis_data["issues_found"]
            oom_issue = next((issue for issue in issues if "exit status 137" in issue["pattern"]), None)
            assert oom_issue is not None
            assert "Out of memory" in oom_issue["issue"]


def mock_open(read_data):
    """Mock file opening for testing."""
    from unittest.mock import mock_open as mock_open_builtin
    return mock_open_builtin(read_data=read_data)


if __name__ == "__main__":
    pytest.main([__file__])