File size: 4,293 Bytes
e067c2d
47bba68
e067c2d
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
47bba68
e067c2d
 
 
 
 
 
 
 
47bba68
e067c2d
 
 
 
 
 
 
47bba68
e067c2d
 
 
 
 
 
 
47bba68
 
e067c2d
 
 
 
 
 
 
 
 
47bba68
 
 
 
 
 
 
e067c2d
 
 
 
 
 
 
 
47bba68
 
 
e067c2d
 
 
47bba68
e067c2d
 
 
47bba68
e067c2d
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
47bba68
e067c2d
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
"""Performance metrics collection service."""

import time
import psutil
from contextlib import contextmanager
from dataclasses import dataclass
from typing import Generator, Callable, Any, Tuple
from ..models.state import SearchMetrics


@dataclass
class MetricsCollector:
    """Collects performance metrics during search execution."""

    def __init__(self):
        self.start_time: float = 0
        self.end_time: float = 0
        self.start_memory: int = 0
        self.end_memory: int = 0
        self.peak_memory: int = 0
        self.memory_samples: list = []
        self.cpu_samples: list = []
        self._process = psutil.Process()

    def start(self) -> None:
        """Start collecting metrics."""
        self.start_time = time.perf_counter()
        self.start_memory = self._process.memory_info().rss
        self.peak_memory = self.start_memory
        self.memory_samples = [self.start_memory]
        self.cpu_samples = []
        # Initial CPU sample
        self._process.cpu_percent()

    def sample(self) -> None:
        """Take a sample of current metrics."""
        current_memory = self._process.memory_info().rss
        self.memory_samples.append(current_memory)
        self.peak_memory = max(self.peak_memory, current_memory)
        self.cpu_samples.append(self._process.cpu_percent())

    def stop(self) -> None:
        """Stop collecting metrics."""
        self.end_time = time.perf_counter()
        self.end_memory = self._process.memory_info().rss
        self.memory_samples.append(self.end_memory)
        self.peak_memory = max(self.peak_memory, self.end_memory)
        # Final CPU sample
        self.cpu_samples.append(self._process.cpu_percent())

    @property
    def runtime_ms(self) -> float:
        """Get runtime in milliseconds."""
        return (self.end_time - self.start_time) * 1000

    @property
    def memory_kb(self) -> float:
        """Get memory usage in KB (peak minus baseline)."""
        if len(self.memory_samples) > 1:
            # Use max sample minus start for more accurate peak measurement
            max_sample = max(self.memory_samples)
            return (max_sample - self.start_memory) / 1024
        return (self.peak_memory - self.start_memory) / 1024

    @property
    def cpu_percent(self) -> float:
        """Get average CPU percentage."""
        if not self.cpu_samples:
            return 0.0
        return sum(self.cpu_samples) / len(self.cpu_samples)

    def to_metrics(
        self, nodes_expanded: int, path_cost: float, path_length: int
    ) -> SearchMetrics:
        """Convert to SearchMetrics object."""
        return SearchMetrics(
            runtime_ms=self.runtime_ms,
            memory_kb=max(0, self.memory_kb),  # Ensure non-negative
            cpu_percent=self.cpu_percent,
            nodes_expanded=nodes_expanded,
            path_cost=path_cost,
            path_length=path_length,
        )


@contextmanager
def measure_performance() -> Generator[MetricsCollector, None, None]:
    """
    Context manager for measuring search performance.

    Usage:
        with measure_performance() as metrics:
            result = search.solve(strategy)
        print(f"Runtime: {metrics.runtime_ms}ms")
    """
    collector = MetricsCollector()
    collector.start()
    try:
        yield collector
    finally:
        collector.stop()


def run_with_metrics(
    func: Callable[..., Any], *args, **kwargs
) -> Tuple[Any, MetricsCollector]:
    """
    Run a function and collect performance metrics.

    Args:
        func: Function to run
        *args: Positional arguments for func
        **kwargs: Keyword arguments for func

    Returns:
        Tuple of (function result, MetricsCollector)
    """
    collector = MetricsCollector()
    collector.start()
    try:
        result = func(*args, **kwargs)
    finally:
        collector.stop()
    return result, collector


def format_metrics(metrics: SearchMetrics) -> str:
    """Format metrics for display."""
    return (
        f"Runtime: {metrics.runtime_ms:.2f}ms | "
        f"Memory: {metrics.memory_mb:.2f}MB | "
        f"CPU: {metrics.cpu_percent:.1f}% | "
        f"Nodes: {metrics.nodes_expanded} | "
        f"Cost: {metrics.path_cost} | "
        f"Path Length: {metrics.path_length}"
    )