File size: 6,808 Bytes
5e4510c
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
"""
Async utilities for OpenEvolve
"""

import asyncio
import functools
import logging
import time
from typing import Any, Callable, Dict, List, Optional, TypeVar, Union

logger = logging.getLogger(__name__)

T = TypeVar("T")


def run_in_executor(f: Callable) -> Callable:
    """
    Decorator to run a synchronous function in an executor

    Args:
        f: Function to decorate

    Returns:
        Decorated function that runs in an executor
    """

    @functools.wraps(f)
    async def wrapper(*args: Any, **kwargs: Any) -> Any:
        loop = asyncio.get_event_loop()
        return await loop.run_in_executor(None, functools.partial(f, *args, **kwargs))

    return wrapper


async def run_with_timeout(
    coro: Callable, timeout: float, *args: Any, timeout_error_value: Any = None, **kwargs: Any
) -> Any:
    """
    Run a coroutine with a timeout, returning a default value on timeout

    Args:
        coro: Coroutine function to run
        timeout: Timeout in seconds
        *args: Arguments to pass to the coroutine
        timeout_error_value: Value to return on timeout (default: {"error": 0.0, "timeout": True})
        **kwargs: Keyword arguments to pass to the coroutine

    Returns:
        Result of the coroutine or timeout_error_value on timeout
    """
    if timeout_error_value is None:
        timeout_error_value = {"error": 0.0, "timeout": True}

    try:
        return await asyncio.wait_for(coro(*args, **kwargs), timeout=timeout)
    except asyncio.TimeoutError:
        logger.warning(f"Operation timed out after {timeout}s")
        return timeout_error_value


async def run_sync_with_timeout(
    func: Callable, timeout: float, *args: Any, timeout_error_value: Any = None, **kwargs: Any
) -> Any:
    """
    Run a synchronous function in an executor with a timeout

    Args:
        func: Synchronous function to run
        timeout: Timeout in seconds
        *args: Arguments to pass to the function
        timeout_error_value: Value to return on timeout (default: {"error": 0.0, "timeout": True})
        **kwargs: Keyword arguments to pass to the function

    Returns:
        Result of the function or timeout_error_value on timeout
    """
    if timeout_error_value is None:
        timeout_error_value = {"error": 0.0, "timeout": True}

    try:
        loop = asyncio.get_event_loop()
        task = loop.run_in_executor(None, functools.partial(func, *args, **kwargs))
        return await asyncio.wait_for(task, timeout=timeout)
    except asyncio.TimeoutError:
        logger.warning(f"Sync operation timed out after {timeout}s")
        return timeout_error_value


async def gather_with_concurrency(
    n: int, *tasks: asyncio.Future, return_exceptions: bool = False
) -> List[Any]:
    """
    Run tasks with a concurrency limit

    Args:
        n: Maximum number of tasks to run concurrently
        *tasks: Tasks to run
        return_exceptions: Whether to return exceptions instead of raising them

    Returns:
        List of task results
    """
    semaphore = asyncio.Semaphore(n)

    async def sem_task(task: asyncio.Future) -> Any:
        async with semaphore:
            return await task

    return await asyncio.gather(
        *(sem_task(task) for task in tasks), return_exceptions=return_exceptions
    )


async def retry_async(
    coro: Callable,
    *args: Any,
    retries: int = 3,
    delay: float = 1.0,
    backoff: float = 2.0,
    exceptions: Union[Exception, tuple] = Exception,
    **kwargs: Any,
) -> Any:
    """
    Retry an async function with exponential backoff

    Args:
        coro: Coroutine function to retry
        *args: Arguments to pass to the coroutine
        retries: Maximum number of retries
        delay: Initial delay between retries (seconds)
        backoff: Multiplier for delay between retries
        exceptions: Exception(s) to catch
        **kwargs: Keyword arguments to pass to the coroutine

    Returns:
        Result of the coroutine

    Raises:
        The last exception caught if all retries fail
    """
    last_exception = None
    current_delay = delay

    for i in range(retries + 1):
        try:
            return await coro(*args, **kwargs)
        except exceptions as e:
            last_exception = e
            if i < retries:
                logger.warning(
                    f"Retry {i+1}/{retries} failed with {type(e).__name__}: {str(e)}. "
                    f"Retrying in {current_delay:.2f}s..."
                )
                await asyncio.sleep(current_delay)
                current_delay *= backoff
            else:
                logger.error(
                    f"All {retries+1} attempts failed. Last error: {type(e).__name__}: {str(e)}"
                )

    if last_exception:
        raise last_exception

    return None  # Should never reach here


class TaskPool:
    """
    A simple task pool for managing and limiting concurrent tasks
    """

    def __init__(self, max_concurrency: int = 10):
        self.max_concurrency = max_concurrency
        self._semaphore: Optional[asyncio.Semaphore] = None
        self.tasks: List[asyncio.Task] = []

    @property
    def semaphore(self) -> asyncio.Semaphore:
        """Lazy-initialize the semaphore when first needed"""
        if self._semaphore is None:
            self._semaphore = asyncio.Semaphore(self.max_concurrency)
        return self._semaphore

    async def run(self, coro: Callable, *args: Any, **kwargs: Any) -> Any:
        """
        Run a coroutine in the pool

        Args:
            coro: Coroutine function to run
            *args: Arguments to pass to the coroutine
            **kwargs: Keyword arguments to pass to the coroutine

        Returns:
            Result of the coroutine
        """
        async with self.semaphore:
            return await coro(*args, **kwargs)

    def create_task(self, coro: Callable, *args: Any, **kwargs: Any) -> asyncio.Task:
        """
        Create and track a task in the pool

        Args:
            coro: Coroutine function to run
            *args: Arguments to pass to the coroutine
            **kwargs: Keyword arguments to pass to the coroutine

        Returns:
            Task object
        """
        task = asyncio.create_task(self.run(coro, *args, **kwargs))
        self.tasks.append(task)
        task.add_done_callback(lambda t: self.tasks.remove(t))
        return task

    async def wait_all(self) -> None:
        """Wait for all tasks in the pool to complete"""
        if self.tasks:
            await asyncio.gather(*self.tasks)

    async def cancel_all(self) -> None:
        """Cancel all tasks in the pool"""
        for task in self.tasks:
            task.cancel()

        if self.tasks:
            await asyncio.gather(*self.tasks, return_exceptions=True)