Instruction stringlengths 362 7.83k | output_code stringlengths 1 945 |
|---|---|
Predict the next line after this snippet: <|code_start|>from __future__ import annotations
@reentrant
@attrs.define(eq=False)
class LocalEventBroker(BaseEventBroker):
_executor: ThreadPoolExecutor = attrs.field(init=False)
_exit_stack: ExitStack = attrs.field(init=False)
_subscriptions_lock: Lock = attrs.field(init=False, factory=Lock)
<|code_end|>
using the current file's imports:
from asyncio import iscoroutinefunction
from concurrent.futures import ThreadPoolExecutor
from contextlib import ExitStack
from threading import Lock
from typing import Any, Callable, Iterable
from ..abc import Subscription
from ..events import Event
from ..util import reentrant
from .base import BaseEventBroker
import attrs
and any relevant context from other files:
# Path: src/apscheduler/abc.py
# class Subscription(metaclass=ABCMeta):
# """
# Represents a subscription with an event source.
#
# If used as a context manager, unsubscribes on exit.
# """
#
# def __enter__(self) -> Subscription:
# return self
#
# def __exit__(self, exc_type, exc_val, exc_tb) -> None:
# self.unsubscribe()
#
# @abstractmethod
# def unsubscribe(self) -> None:
# """
# Cancel this subscription.
#
# Does nothing if the subscription has already been cancelled.
# """
#
# Path: src/apscheduler/events.py
# class Event:
# timestamp: datetime = attrs.field(factory=partial(datetime.now, timezone.utc),
# converter=as_aware_datetime)
#
# Path: src/apscheduler/util.py
# def reentrant(cls: type[T]) -> type[T]:
# """
# Modifies a class so that its ``__enter__`` / ``__exit__`` (or ``__aenter__`` / ``__aexit__``)
# methods track the number of times it has been entered and exited and only actually invoke
# the ``__enter__()`` method on the first entry and ``__exit__()`` on the last exit.
#
# """
#
# def __enter__(self: T) -> T:
# loans[self] += 1
# if loans[self] == 1:
# previous_enter(self)
#
# return self
#
# def __exit__(self, exc_type, exc_val, exc_tb) -> None:
# assert loans[self]
# loans[self] -= 1
# if loans[self] == 0:
# return previous_exit(self, exc_type, exc_val, exc_tb)
#
# async def __aenter__(self: T) -> T:
# loans[self] += 1
# if loans[self] == 1:
# await previous_aenter(self)
#
# return self
#
# async def __aexit__(self, exc_type, exc_val, exc_tb):
# assert loans[self]
# loans[self] -= 1
# if loans[self] == 0:
# del loans[self]
# return await previous_aexit(self, exc_type, exc_val, exc_tb)
#
# loans: dict[T, int] = defaultdict(lambda: 0)
# previous_enter: Callable = getattr(cls, '__enter__', None)
# previous_exit: Callable = getattr(cls, '__exit__', None)
# previous_aenter: Callable = getattr(cls, '__aenter__', None)
# previous_aexit: Callable = getattr(cls, '__aexit__', None)
# if previous_enter and previous_exit:
# cls.__enter__ = __enter__
# cls.__exit__ = __exit__
# elif previous_aenter and previous_aexit:
# cls.__aenter__ = __aenter__
# cls.__aexit__ = __aexit__
#
# return cls
#
# Path: src/apscheduler/eventbrokers/base.py
# class BaseEventBroker(EventBroker):
# _logger: Logger = attrs.field(init=False)
# _subscriptions: dict[object, LocalSubscription] = attrs.field(init=False, factory=dict)
#
# def __attrs_post_init__(self) -> None:
# self._logger = getLogger(self.__class__.__module__)
#
# def subscribe(self, callback: Callable[[Event], Any],
# event_types: Iterable[type[Event]] | None = None, *,
# one_shot: bool = False) -> Subscription:
# types = set(event_types) if event_types else None
# token = object()
# subscription = LocalSubscription(callback, types, one_shot, token, self)
# self._subscriptions[token] = subscription
# return subscription
#
# def unsubscribe(self, token: object) -> None:
# self._subscriptions.pop(token, None)
. Output only the next line. | def __enter__(self): |
Next line prediction: <|code_start|>@attrs.define(eq=False)
class LocalEventBroker(BaseEventBroker):
_executor: ThreadPoolExecutor = attrs.field(init=False)
_exit_stack: ExitStack = attrs.field(init=False)
_subscriptions_lock: Lock = attrs.field(init=False, factory=Lock)
def __enter__(self):
self._exit_stack = ExitStack()
self._executor = self._exit_stack.enter_context(ThreadPoolExecutor(1))
return self
def __exit__(self, exc_type, exc_val, exc_tb):
self._exit_stack.__exit__(exc_type, exc_val, exc_tb)
del self._executor
def subscribe(self, callback: Callable[[Event], Any],
event_types: Iterable[type[Event]] | None = None, *,
one_shot: bool = False) -> Subscription:
if iscoroutinefunction(callback):
raise ValueError('Coroutine functions are not supported as callbacks on a synchronous '
'event source')
with self._subscriptions_lock:
return super().subscribe(callback, event_types, one_shot=one_shot)
def unsubscribe(self, token: object) -> None:
with self._subscriptions_lock:
super().unsubscribe(token)
def publish(self, event: Event) -> None:
<|code_end|>
. Use current file imports:
(from asyncio import iscoroutinefunction
from concurrent.futures import ThreadPoolExecutor
from contextlib import ExitStack
from threading import Lock
from typing import Any, Callable, Iterable
from ..abc import Subscription
from ..events import Event
from ..util import reentrant
from .base import BaseEventBroker
import attrs)
and context including class names, function names, or small code snippets from other files:
# Path: src/apscheduler/abc.py
# class Subscription(metaclass=ABCMeta):
# """
# Represents a subscription with an event source.
#
# If used as a context manager, unsubscribes on exit.
# """
#
# def __enter__(self) -> Subscription:
# return self
#
# def __exit__(self, exc_type, exc_val, exc_tb) -> None:
# self.unsubscribe()
#
# @abstractmethod
# def unsubscribe(self) -> None:
# """
# Cancel this subscription.
#
# Does nothing if the subscription has already been cancelled.
# """
#
# Path: src/apscheduler/events.py
# class Event:
# timestamp: datetime = attrs.field(factory=partial(datetime.now, timezone.utc),
# converter=as_aware_datetime)
#
# Path: src/apscheduler/util.py
# def reentrant(cls: type[T]) -> type[T]:
# """
# Modifies a class so that its ``__enter__`` / ``__exit__`` (or ``__aenter__`` / ``__aexit__``)
# methods track the number of times it has been entered and exited and only actually invoke
# the ``__enter__()`` method on the first entry and ``__exit__()`` on the last exit.
#
# """
#
# def __enter__(self: T) -> T:
# loans[self] += 1
# if loans[self] == 1:
# previous_enter(self)
#
# return self
#
# def __exit__(self, exc_type, exc_val, exc_tb) -> None:
# assert loans[self]
# loans[self] -= 1
# if loans[self] == 0:
# return previous_exit(self, exc_type, exc_val, exc_tb)
#
# async def __aenter__(self: T) -> T:
# loans[self] += 1
# if loans[self] == 1:
# await previous_aenter(self)
#
# return self
#
# async def __aexit__(self, exc_type, exc_val, exc_tb):
# assert loans[self]
# loans[self] -= 1
# if loans[self] == 0:
# del loans[self]
# return await previous_aexit(self, exc_type, exc_val, exc_tb)
#
# loans: dict[T, int] = defaultdict(lambda: 0)
# previous_enter: Callable = getattr(cls, '__enter__', None)
# previous_exit: Callable = getattr(cls, '__exit__', None)
# previous_aenter: Callable = getattr(cls, '__aenter__', None)
# previous_aexit: Callable = getattr(cls, '__aexit__', None)
# if previous_enter and previous_exit:
# cls.__enter__ = __enter__
# cls.__exit__ = __exit__
# elif previous_aenter and previous_aexit:
# cls.__aenter__ = __aenter__
# cls.__aexit__ = __aexit__
#
# return cls
#
# Path: src/apscheduler/eventbrokers/base.py
# class BaseEventBroker(EventBroker):
# _logger: Logger = attrs.field(init=False)
# _subscriptions: dict[object, LocalSubscription] = attrs.field(init=False, factory=dict)
#
# def __attrs_post_init__(self) -> None:
# self._logger = getLogger(self.__class__.__module__)
#
# def subscribe(self, callback: Callable[[Event], Any],
# event_types: Iterable[type[Event]] | None = None, *,
# one_shot: bool = False) -> Subscription:
# types = set(event_types) if event_types else None
# token = object()
# subscription = LocalSubscription(callback, types, one_shot, token, self)
# self._subscriptions[token] = subscription
# return subscription
#
# def unsubscribe(self, token: object) -> None:
# self._subscriptions.pop(token, None)
. Output only the next line. | self.publish_local(event) |
Given snippet: <|code_start|>from __future__ import annotations
@reentrant
@attrs.define(eq=False)
class LocalEventBroker(BaseEventBroker):
_executor: ThreadPoolExecutor = attrs.field(init=False)
_exit_stack: ExitStack = attrs.field(init=False)
<|code_end|>
, continue by predicting the next line. Consider current file imports:
from asyncio import iscoroutinefunction
from concurrent.futures import ThreadPoolExecutor
from contextlib import ExitStack
from threading import Lock
from typing import Any, Callable, Iterable
from ..abc import Subscription
from ..events import Event
from ..util import reentrant
from .base import BaseEventBroker
import attrs
and context:
# Path: src/apscheduler/abc.py
# class Subscription(metaclass=ABCMeta):
# """
# Represents a subscription with an event source.
#
# If used as a context manager, unsubscribes on exit.
# """
#
# def __enter__(self) -> Subscription:
# return self
#
# def __exit__(self, exc_type, exc_val, exc_tb) -> None:
# self.unsubscribe()
#
# @abstractmethod
# def unsubscribe(self) -> None:
# """
# Cancel this subscription.
#
# Does nothing if the subscription has already been cancelled.
# """
#
# Path: src/apscheduler/events.py
# class Event:
# timestamp: datetime = attrs.field(factory=partial(datetime.now, timezone.utc),
# converter=as_aware_datetime)
#
# Path: src/apscheduler/util.py
# def reentrant(cls: type[T]) -> type[T]:
# """
# Modifies a class so that its ``__enter__`` / ``__exit__`` (or ``__aenter__`` / ``__aexit__``)
# methods track the number of times it has been entered and exited and only actually invoke
# the ``__enter__()`` method on the first entry and ``__exit__()`` on the last exit.
#
# """
#
# def __enter__(self: T) -> T:
# loans[self] += 1
# if loans[self] == 1:
# previous_enter(self)
#
# return self
#
# def __exit__(self, exc_type, exc_val, exc_tb) -> None:
# assert loans[self]
# loans[self] -= 1
# if loans[self] == 0:
# return previous_exit(self, exc_type, exc_val, exc_tb)
#
# async def __aenter__(self: T) -> T:
# loans[self] += 1
# if loans[self] == 1:
# await previous_aenter(self)
#
# return self
#
# async def __aexit__(self, exc_type, exc_val, exc_tb):
# assert loans[self]
# loans[self] -= 1
# if loans[self] == 0:
# del loans[self]
# return await previous_aexit(self, exc_type, exc_val, exc_tb)
#
# loans: dict[T, int] = defaultdict(lambda: 0)
# previous_enter: Callable = getattr(cls, '__enter__', None)
# previous_exit: Callable = getattr(cls, '__exit__', None)
# previous_aenter: Callable = getattr(cls, '__aenter__', None)
# previous_aexit: Callable = getattr(cls, '__aexit__', None)
# if previous_enter and previous_exit:
# cls.__enter__ = __enter__
# cls.__exit__ = __exit__
# elif previous_aenter and previous_aexit:
# cls.__aenter__ = __aenter__
# cls.__aexit__ = __aexit__
#
# return cls
#
# Path: src/apscheduler/eventbrokers/base.py
# class BaseEventBroker(EventBroker):
# _logger: Logger = attrs.field(init=False)
# _subscriptions: dict[object, LocalSubscription] = attrs.field(init=False, factory=dict)
#
# def __attrs_post_init__(self) -> None:
# self._logger = getLogger(self.__class__.__module__)
#
# def subscribe(self, callback: Callable[[Event], Any],
# event_types: Iterable[type[Event]] | None = None, *,
# one_shot: bool = False) -> Subscription:
# types = set(event_types) if event_types else None
# token = object()
# subscription = LocalSubscription(callback, types, one_shot, token, self)
# self._subscriptions[token] = subscription
# return subscription
#
# def unsubscribe(self, token: object) -> None:
# self._subscriptions.pop(token, None)
which might include code, classes, or functions. Output only the next line. | _subscriptions_lock: Lock = attrs.field(init=False, factory=Lock) |
Given the code snippet: <|code_start|>@attrs.define(eq=False)
class LocalEventBroker(BaseEventBroker):
_executor: ThreadPoolExecutor = attrs.field(init=False)
_exit_stack: ExitStack = attrs.field(init=False)
_subscriptions_lock: Lock = attrs.field(init=False, factory=Lock)
def __enter__(self):
self._exit_stack = ExitStack()
self._executor = self._exit_stack.enter_context(ThreadPoolExecutor(1))
return self
def __exit__(self, exc_type, exc_val, exc_tb):
self._exit_stack.__exit__(exc_type, exc_val, exc_tb)
del self._executor
def subscribe(self, callback: Callable[[Event], Any],
event_types: Iterable[type[Event]] | None = None, *,
one_shot: bool = False) -> Subscription:
if iscoroutinefunction(callback):
raise ValueError('Coroutine functions are not supported as callbacks on a synchronous '
'event source')
with self._subscriptions_lock:
return super().subscribe(callback, event_types, one_shot=one_shot)
def unsubscribe(self, token: object) -> None:
with self._subscriptions_lock:
super().unsubscribe(token)
def publish(self, event: Event) -> None:
<|code_end|>
, generate the next line using the imports in this file:
from asyncio import iscoroutinefunction
from concurrent.futures import ThreadPoolExecutor
from contextlib import ExitStack
from threading import Lock
from typing import Any, Callable, Iterable
from ..abc import Subscription
from ..events import Event
from ..util import reentrant
from .base import BaseEventBroker
import attrs
and context (functions, classes, or occasionally code) from other files:
# Path: src/apscheduler/abc.py
# class Subscription(metaclass=ABCMeta):
# """
# Represents a subscription with an event source.
#
# If used as a context manager, unsubscribes on exit.
# """
#
# def __enter__(self) -> Subscription:
# return self
#
# def __exit__(self, exc_type, exc_val, exc_tb) -> None:
# self.unsubscribe()
#
# @abstractmethod
# def unsubscribe(self) -> None:
# """
# Cancel this subscription.
#
# Does nothing if the subscription has already been cancelled.
# """
#
# Path: src/apscheduler/events.py
# class Event:
# timestamp: datetime = attrs.field(factory=partial(datetime.now, timezone.utc),
# converter=as_aware_datetime)
#
# Path: src/apscheduler/util.py
# def reentrant(cls: type[T]) -> type[T]:
# """
# Modifies a class so that its ``__enter__`` / ``__exit__`` (or ``__aenter__`` / ``__aexit__``)
# methods track the number of times it has been entered and exited and only actually invoke
# the ``__enter__()`` method on the first entry and ``__exit__()`` on the last exit.
#
# """
#
# def __enter__(self: T) -> T:
# loans[self] += 1
# if loans[self] == 1:
# previous_enter(self)
#
# return self
#
# def __exit__(self, exc_type, exc_val, exc_tb) -> None:
# assert loans[self]
# loans[self] -= 1
# if loans[self] == 0:
# return previous_exit(self, exc_type, exc_val, exc_tb)
#
# async def __aenter__(self: T) -> T:
# loans[self] += 1
# if loans[self] == 1:
# await previous_aenter(self)
#
# return self
#
# async def __aexit__(self, exc_type, exc_val, exc_tb):
# assert loans[self]
# loans[self] -= 1
# if loans[self] == 0:
# del loans[self]
# return await previous_aexit(self, exc_type, exc_val, exc_tb)
#
# loans: dict[T, int] = defaultdict(lambda: 0)
# previous_enter: Callable = getattr(cls, '__enter__', None)
# previous_exit: Callable = getattr(cls, '__exit__', None)
# previous_aenter: Callable = getattr(cls, '__aenter__', None)
# previous_aexit: Callable = getattr(cls, '__aexit__', None)
# if previous_enter and previous_exit:
# cls.__enter__ = __enter__
# cls.__exit__ = __exit__
# elif previous_aenter and previous_aexit:
# cls.__aenter__ = __aenter__
# cls.__aexit__ = __aexit__
#
# return cls
#
# Path: src/apscheduler/eventbrokers/base.py
# class BaseEventBroker(EventBroker):
# _logger: Logger = attrs.field(init=False)
# _subscriptions: dict[object, LocalSubscription] = attrs.field(init=False, factory=dict)
#
# def __attrs_post_init__(self) -> None:
# self._logger = getLogger(self.__class__.__module__)
#
# def subscribe(self, callback: Callable[[Event], Any],
# event_types: Iterable[type[Event]] | None = None, *,
# one_shot: bool = False) -> Subscription:
# types = set(event_types) if event_types else None
# token = object()
# subscription = LocalSubscription(callback, types, one_shot, token, self)
# self._subscriptions[token] = subscription
# return subscription
#
# def unsubscribe(self, token: object) -> None:
# self._subscriptions.pop(token, None)
. Output only the next line. | self.publish_local(event) |
Here is a snippet: <|code_start|>from __future__ import annotations
if sys.version_info >= (3, 9):
else:
def marshal_object(obj) -> tuple[str, Any]:
return f'{obj.__class__.__module__}:{obj.__class__.__qualname__}', obj.__getstate__()
def unmarshal_object(ref: str, state):
cls = callable_from_ref(ref)
instance = cls.__new__(cls)
instance.__setstate__(state)
return instance
@overload
def marshal_date(value: None) -> None:
...
@overload
def marshal_date(value: date) -> str:
<|code_end|>
. Write the next line using the current file imports:
import sys
from datetime import date, datetime, tzinfo
from functools import partial
from typing import Any, Callable, overload
from .exceptions import DeserializationError, SerializationError
from zoneinfo import ZoneInfo
from backports.zoneinfo import ZoneInfo
and context from other files:
# Path: src/apscheduler/exceptions.py
# class DeserializationError(Exception):
# """Raised when a serializer fails to deserialize the given object."""
#
# class SerializationError(Exception):
# """Raised when a serializer fails to serialize the given object."""
, which may include functions, classes, or code. Output only the next line. | ... |
Predict the next line after this snippet: <|code_start|>from __future__ import annotations
@reentrant
@attrs.define(eq=False)
class LocalAsyncEventBroker(AsyncEventBroker, BaseEventBroker):
_task_group: TaskGroup = attrs.field(init=False)
_exit_stack: AsyncExitStack = attrs.field(init=False)
async def __aenter__(self) -> LocalAsyncEventBroker:
<|code_end|>
using the current file's imports:
from asyncio import iscoroutine
from contextlib import AsyncExitStack
from typing import Any, Callable
from anyio import create_task_group
from anyio.abc import TaskGroup
from ..abc import AsyncEventBroker
from ..events import Event
from ..util import reentrant
from .base import BaseEventBroker
import attrs
and any relevant context from other files:
# Path: src/apscheduler/abc.py
# class AsyncEventBroker(EventSource):
# """
# Asynchronous version of :class:`EventBroker`.
#
# Can be used as an asynchronous context manager.
# """
#
# async def __aenter__(self):
# return self
#
# async def __aexit__(self, exc_type, exc_val, exc_tb):
# pass
#
# @abstractmethod
# async def publish(self, event: Event) -> None:
# """Publish an event."""
#
# @abstractmethod
# async def publish_local(self, event: Event) -> None:
# """Publish an event, but only to local subscribers."""
#
# Path: src/apscheduler/events.py
# class Event:
# timestamp: datetime = attrs.field(factory=partial(datetime.now, timezone.utc),
# converter=as_aware_datetime)
#
# Path: src/apscheduler/util.py
# def reentrant(cls: type[T]) -> type[T]:
# """
# Modifies a class so that its ``__enter__`` / ``__exit__`` (or ``__aenter__`` / ``__aexit__``)
# methods track the number of times it has been entered and exited and only actually invoke
# the ``__enter__()`` method on the first entry and ``__exit__()`` on the last exit.
#
# """
#
# def __enter__(self: T) -> T:
# loans[self] += 1
# if loans[self] == 1:
# previous_enter(self)
#
# return self
#
# def __exit__(self, exc_type, exc_val, exc_tb) -> None:
# assert loans[self]
# loans[self] -= 1
# if loans[self] == 0:
# return previous_exit(self, exc_type, exc_val, exc_tb)
#
# async def __aenter__(self: T) -> T:
# loans[self] += 1
# if loans[self] == 1:
# await previous_aenter(self)
#
# return self
#
# async def __aexit__(self, exc_type, exc_val, exc_tb):
# assert loans[self]
# loans[self] -= 1
# if loans[self] == 0:
# del loans[self]
# return await previous_aexit(self, exc_type, exc_val, exc_tb)
#
# loans: dict[T, int] = defaultdict(lambda: 0)
# previous_enter: Callable = getattr(cls, '__enter__', None)
# previous_exit: Callable = getattr(cls, '__exit__', None)
# previous_aenter: Callable = getattr(cls, '__aenter__', None)
# previous_aexit: Callable = getattr(cls, '__aexit__', None)
# if previous_enter and previous_exit:
# cls.__enter__ = __enter__
# cls.__exit__ = __exit__
# elif previous_aenter and previous_aexit:
# cls.__aenter__ = __aenter__
# cls.__aexit__ = __aexit__
#
# return cls
#
# Path: src/apscheduler/eventbrokers/base.py
# class BaseEventBroker(EventBroker):
# _logger: Logger = attrs.field(init=False)
# _subscriptions: dict[object, LocalSubscription] = attrs.field(init=False, factory=dict)
#
# def __attrs_post_init__(self) -> None:
# self._logger = getLogger(self.__class__.__module__)
#
# def subscribe(self, callback: Callable[[Event], Any],
# event_types: Iterable[type[Event]] | None = None, *,
# one_shot: bool = False) -> Subscription:
# types = set(event_types) if event_types else None
# token = object()
# subscription = LocalSubscription(callback, types, one_shot, token, self)
# self._subscriptions[token] = subscription
# return subscription
#
# def unsubscribe(self, token: object) -> None:
# self._subscriptions.pop(token, None)
. Output only the next line. | self._exit_stack = AsyncExitStack() |
Using the snippet: <|code_start|>from __future__ import annotations
@reentrant
@attrs.define(eq=False)
class LocalAsyncEventBroker(AsyncEventBroker, BaseEventBroker):
_task_group: TaskGroup = attrs.field(init=False)
_exit_stack: AsyncExitStack = attrs.field(init=False)
async def __aenter__(self) -> LocalAsyncEventBroker:
<|code_end|>
, determine the next line of code. You have imports:
from asyncio import iscoroutine
from contextlib import AsyncExitStack
from typing import Any, Callable
from anyio import create_task_group
from anyio.abc import TaskGroup
from ..abc import AsyncEventBroker
from ..events import Event
from ..util import reentrant
from .base import BaseEventBroker
import attrs
and context (class names, function names, or code) available:
# Path: src/apscheduler/abc.py
# class AsyncEventBroker(EventSource):
# """
# Asynchronous version of :class:`EventBroker`.
#
# Can be used as an asynchronous context manager.
# """
#
# async def __aenter__(self):
# return self
#
# async def __aexit__(self, exc_type, exc_val, exc_tb):
# pass
#
# @abstractmethod
# async def publish(self, event: Event) -> None:
# """Publish an event."""
#
# @abstractmethod
# async def publish_local(self, event: Event) -> None:
# """Publish an event, but only to local subscribers."""
#
# Path: src/apscheduler/events.py
# class Event:
# timestamp: datetime = attrs.field(factory=partial(datetime.now, timezone.utc),
# converter=as_aware_datetime)
#
# Path: src/apscheduler/util.py
# def reentrant(cls: type[T]) -> type[T]:
# """
# Modifies a class so that its ``__enter__`` / ``__exit__`` (or ``__aenter__`` / ``__aexit__``)
# methods track the number of times it has been entered and exited and only actually invoke
# the ``__enter__()`` method on the first entry and ``__exit__()`` on the last exit.
#
# """
#
# def __enter__(self: T) -> T:
# loans[self] += 1
# if loans[self] == 1:
# previous_enter(self)
#
# return self
#
# def __exit__(self, exc_type, exc_val, exc_tb) -> None:
# assert loans[self]
# loans[self] -= 1
# if loans[self] == 0:
# return previous_exit(self, exc_type, exc_val, exc_tb)
#
# async def __aenter__(self: T) -> T:
# loans[self] += 1
# if loans[self] == 1:
# await previous_aenter(self)
#
# return self
#
# async def __aexit__(self, exc_type, exc_val, exc_tb):
# assert loans[self]
# loans[self] -= 1
# if loans[self] == 0:
# del loans[self]
# return await previous_aexit(self, exc_type, exc_val, exc_tb)
#
# loans: dict[T, int] = defaultdict(lambda: 0)
# previous_enter: Callable = getattr(cls, '__enter__', None)
# previous_exit: Callable = getattr(cls, '__exit__', None)
# previous_aenter: Callable = getattr(cls, '__aenter__', None)
# previous_aexit: Callable = getattr(cls, '__aexit__', None)
# if previous_enter and previous_exit:
# cls.__enter__ = __enter__
# cls.__exit__ = __exit__
# elif previous_aenter and previous_aexit:
# cls.__aenter__ = __aenter__
# cls.__aexit__ = __aexit__
#
# return cls
#
# Path: src/apscheduler/eventbrokers/base.py
# class BaseEventBroker(EventBroker):
# _logger: Logger = attrs.field(init=False)
# _subscriptions: dict[object, LocalSubscription] = attrs.field(init=False, factory=dict)
#
# def __attrs_post_init__(self) -> None:
# self._logger = getLogger(self.__class__.__module__)
#
# def subscribe(self, callback: Callable[[Event], Any],
# event_types: Iterable[type[Event]] | None = None, *,
# one_shot: bool = False) -> Subscription:
# types = set(event_types) if event_types else None
# token = object()
# subscription = LocalSubscription(callback, types, one_shot, token, self)
# self._subscriptions[token] = subscription
# return subscription
#
# def unsubscribe(self, token: object) -> None:
# self._subscriptions.pop(token, None)
. Output only the next line. | self._exit_stack = AsyncExitStack() |
Given snippet: <|code_start|> self._task_group = create_task_group()
await self._exit_stack.enter_async_context(self._task_group)
return self
async def __aexit__(self, exc_type, exc_val, exc_tb):
await self._exit_stack.__aexit__(exc_type, exc_val, exc_tb)
del self._task_group
async def publish(self, event: Event) -> None:
await self.publish_local(event)
async def publish_local(self, event: Event) -> None:
event_type = type(event)
one_shot_tokens: list[object] = []
for _token, subscription in self._subscriptions.items():
if subscription.event_types is None or event_type in subscription.event_types:
self._task_group.start_soon(self._deliver_event, subscription.callback, event)
if subscription.one_shot:
one_shot_tokens.append(subscription.token)
for token in one_shot_tokens:
super().unsubscribe(token)
async def _deliver_event(self, func: Callable[[Event], Any], event: Event) -> None:
try:
retval = func(event)
if iscoroutine(retval):
await retval
except BaseException:
<|code_end|>
, continue by predicting the next line. Consider current file imports:
from asyncio import iscoroutine
from contextlib import AsyncExitStack
from typing import Any, Callable
from anyio import create_task_group
from anyio.abc import TaskGroup
from ..abc import AsyncEventBroker
from ..events import Event
from ..util import reentrant
from .base import BaseEventBroker
import attrs
and context:
# Path: src/apscheduler/abc.py
# class AsyncEventBroker(EventSource):
# """
# Asynchronous version of :class:`EventBroker`.
#
# Can be used as an asynchronous context manager.
# """
#
# async def __aenter__(self):
# return self
#
# async def __aexit__(self, exc_type, exc_val, exc_tb):
# pass
#
# @abstractmethod
# async def publish(self, event: Event) -> None:
# """Publish an event."""
#
# @abstractmethod
# async def publish_local(self, event: Event) -> None:
# """Publish an event, but only to local subscribers."""
#
# Path: src/apscheduler/events.py
# class Event:
# timestamp: datetime = attrs.field(factory=partial(datetime.now, timezone.utc),
# converter=as_aware_datetime)
#
# Path: src/apscheduler/util.py
# def reentrant(cls: type[T]) -> type[T]:
# """
# Modifies a class so that its ``__enter__`` / ``__exit__`` (or ``__aenter__`` / ``__aexit__``)
# methods track the number of times it has been entered and exited and only actually invoke
# the ``__enter__()`` method on the first entry and ``__exit__()`` on the last exit.
#
# """
#
# def __enter__(self: T) -> T:
# loans[self] += 1
# if loans[self] == 1:
# previous_enter(self)
#
# return self
#
# def __exit__(self, exc_type, exc_val, exc_tb) -> None:
# assert loans[self]
# loans[self] -= 1
# if loans[self] == 0:
# return previous_exit(self, exc_type, exc_val, exc_tb)
#
# async def __aenter__(self: T) -> T:
# loans[self] += 1
# if loans[self] == 1:
# await previous_aenter(self)
#
# return self
#
# async def __aexit__(self, exc_type, exc_val, exc_tb):
# assert loans[self]
# loans[self] -= 1
# if loans[self] == 0:
# del loans[self]
# return await previous_aexit(self, exc_type, exc_val, exc_tb)
#
# loans: dict[T, int] = defaultdict(lambda: 0)
# previous_enter: Callable = getattr(cls, '__enter__', None)
# previous_exit: Callable = getattr(cls, '__exit__', None)
# previous_aenter: Callable = getattr(cls, '__aenter__', None)
# previous_aexit: Callable = getattr(cls, '__aexit__', None)
# if previous_enter and previous_exit:
# cls.__enter__ = __enter__
# cls.__exit__ = __exit__
# elif previous_aenter and previous_aexit:
# cls.__aenter__ = __aenter__
# cls.__aexit__ = __aexit__
#
# return cls
#
# Path: src/apscheduler/eventbrokers/base.py
# class BaseEventBroker(EventBroker):
# _logger: Logger = attrs.field(init=False)
# _subscriptions: dict[object, LocalSubscription] = attrs.field(init=False, factory=dict)
#
# def __attrs_post_init__(self) -> None:
# self._logger = getLogger(self.__class__.__module__)
#
# def subscribe(self, callback: Callable[[Event], Any],
# event_types: Iterable[type[Event]] | None = None, *,
# one_shot: bool = False) -> Subscription:
# types = set(event_types) if event_types else None
# token = object()
# subscription = LocalSubscription(callback, types, one_shot, token, self)
# self._subscriptions[token] = subscription
# return subscription
#
# def unsubscribe(self, token: object) -> None:
# self._subscriptions.pop(token, None)
which might include code, classes, or functions. Output only the next line. | self._logger.exception('Error delivering %s event', event.__class__.__name__) |
Given the code snippet: <|code_start|>from __future__ import annotations
@reentrant
@attrs.define(eq=False)
class LocalAsyncEventBroker(AsyncEventBroker, BaseEventBroker):
_task_group: TaskGroup = attrs.field(init=False)
_exit_stack: AsyncExitStack = attrs.field(init=False)
async def __aenter__(self) -> LocalAsyncEventBroker:
self._exit_stack = AsyncExitStack()
self._task_group = create_task_group()
await self._exit_stack.enter_async_context(self._task_group)
return self
async def __aexit__(self, exc_type, exc_val, exc_tb):
await self._exit_stack.__aexit__(exc_type, exc_val, exc_tb)
del self._task_group
async def publish(self, event: Event) -> None:
await self.publish_local(event)
async def publish_local(self, event: Event) -> None:
event_type = type(event)
<|code_end|>
, generate the next line using the imports in this file:
from asyncio import iscoroutine
from contextlib import AsyncExitStack
from typing import Any, Callable
from anyio import create_task_group
from anyio.abc import TaskGroup
from ..abc import AsyncEventBroker
from ..events import Event
from ..util import reentrant
from .base import BaseEventBroker
import attrs
and context (functions, classes, or occasionally code) from other files:
# Path: src/apscheduler/abc.py
# class AsyncEventBroker(EventSource):
# """
# Asynchronous version of :class:`EventBroker`.
#
# Can be used as an asynchronous context manager.
# """
#
# async def __aenter__(self):
# return self
#
# async def __aexit__(self, exc_type, exc_val, exc_tb):
# pass
#
# @abstractmethod
# async def publish(self, event: Event) -> None:
# """Publish an event."""
#
# @abstractmethod
# async def publish_local(self, event: Event) -> None:
# """Publish an event, but only to local subscribers."""
#
# Path: src/apscheduler/events.py
# class Event:
# timestamp: datetime = attrs.field(factory=partial(datetime.now, timezone.utc),
# converter=as_aware_datetime)
#
# Path: src/apscheduler/util.py
# def reentrant(cls: type[T]) -> type[T]:
# """
# Modifies a class so that its ``__enter__`` / ``__exit__`` (or ``__aenter__`` / ``__aexit__``)
# methods track the number of times it has been entered and exited and only actually invoke
# the ``__enter__()`` method on the first entry and ``__exit__()`` on the last exit.
#
# """
#
# def __enter__(self: T) -> T:
# loans[self] += 1
# if loans[self] == 1:
# previous_enter(self)
#
# return self
#
# def __exit__(self, exc_type, exc_val, exc_tb) -> None:
# assert loans[self]
# loans[self] -= 1
# if loans[self] == 0:
# return previous_exit(self, exc_type, exc_val, exc_tb)
#
# async def __aenter__(self: T) -> T:
# loans[self] += 1
# if loans[self] == 1:
# await previous_aenter(self)
#
# return self
#
# async def __aexit__(self, exc_type, exc_val, exc_tb):
# assert loans[self]
# loans[self] -= 1
# if loans[self] == 0:
# del loans[self]
# return await previous_aexit(self, exc_type, exc_val, exc_tb)
#
# loans: dict[T, int] = defaultdict(lambda: 0)
# previous_enter: Callable = getattr(cls, '__enter__', None)
# previous_exit: Callable = getattr(cls, '__exit__', None)
# previous_aenter: Callable = getattr(cls, '__aenter__', None)
# previous_aexit: Callable = getattr(cls, '__aexit__', None)
# if previous_enter and previous_exit:
# cls.__enter__ = __enter__
# cls.__exit__ = __exit__
# elif previous_aenter and previous_aexit:
# cls.__aenter__ = __aenter__
# cls.__aexit__ = __aexit__
#
# return cls
#
# Path: src/apscheduler/eventbrokers/base.py
# class BaseEventBroker(EventBroker):
# _logger: Logger = attrs.field(init=False)
# _subscriptions: dict[object, LocalSubscription] = attrs.field(init=False, factory=dict)
#
# def __attrs_post_init__(self) -> None:
# self._logger = getLogger(self.__class__.__module__)
#
# def subscribe(self, callback: Callable[[Event], Any],
# event_types: Iterable[type[Event]] | None = None, *,
# one_shot: bool = False) -> Subscription:
# types = set(event_types) if event_types else None
# token = object()
# subscription = LocalSubscription(callback, types, one_shot, token, self)
# self._subscriptions[token] = subscription
# return subscription
#
# def unsubscribe(self, token: object) -> None:
# self._subscriptions.pop(token, None)
. Output only the next line. | one_shot_tokens: list[object] = [] |
Using the snippet: <|code_start|>class TaskUpdated(DataStoreEvent):
task_id: str
@attrs.define(kw_only=True, frozen=True)
class TaskRemoved(DataStoreEvent):
task_id: str
@attrs.define(kw_only=True, frozen=True)
class ScheduleAdded(DataStoreEvent):
schedule_id: str
next_fire_time: datetime | None = attrs.field(converter=optional(as_aware_datetime))
@attrs.define(kw_only=True, frozen=True)
class ScheduleUpdated(DataStoreEvent):
schedule_id: str
next_fire_time: datetime | None = attrs.field(converter=optional(as_aware_datetime))
@attrs.define(kw_only=True, frozen=True)
class ScheduleRemoved(DataStoreEvent):
schedule_id: str
@attrs.define(kw_only=True, frozen=True)
class JobAdded(DataStoreEvent):
job_id: UUID = attrs.field(converter=as_uuid)
task_id: str
<|code_end|>
, determine the next line of code. You have imports:
from datetime import datetime, timezone
from functools import partial
from uuid import UUID
from attrs.converters import optional
from .converters import as_aware_datetime, as_uuid
from .enums import JobOutcome
import attrs
and context (class names, function names, or code) available:
# Path: src/apscheduler/converters.py
# def as_aware_datetime(value: datetime | str) -> datetime:
# """Convert the value from a string to a timezone aware datetime."""
# if isinstance(value, str):
# # fromisoformat() does not handle the "Z" suffix
# if value.upper().endswith('Z'):
# value = value[:-1] + '+00:00'
#
# value = datetime.fromisoformat(value)
#
# return value
#
# def as_uuid(value: UUID | str) -> UUID:
# """Convert a string-formatted UUID to a UUID instance."""
# if isinstance(value, str):
# return UUID(value)
#
# return value
#
# Path: src/apscheduler/enums.py
# class JobOutcome(Enum):
# success = auto()
# error = auto()
# missed_start_deadline = auto()
# cancelled = auto()
# expired = auto()
. Output only the next line. | schedule_id: str | None |
Next line prediction: <|code_start|>class SchedulerEvent(Event):
pass
@attrs.define(kw_only=True, frozen=True)
class SchedulerStarted(SchedulerEvent):
pass
@attrs.define(kw_only=True, frozen=True)
class SchedulerStopped(SchedulerEvent):
exception: BaseException | None = None
#
# Worker events
#
@attrs.define(kw_only=True, frozen=True)
class WorkerEvent(Event):
pass
@attrs.define(kw_only=True, frozen=True)
class WorkerStarted(WorkerEvent):
pass
@attrs.define(kw_only=True, frozen=True)
class WorkerStopped(WorkerEvent):
<|code_end|>
. Use current file imports:
(from datetime import datetime, timezone
from functools import partial
from uuid import UUID
from attrs.converters import optional
from .converters import as_aware_datetime, as_uuid
from .enums import JobOutcome
import attrs)
and context including class names, function names, or small code snippets from other files:
# Path: src/apscheduler/converters.py
# def as_aware_datetime(value: datetime | str) -> datetime:
# """Convert the value from a string to a timezone aware datetime."""
# if isinstance(value, str):
# # fromisoformat() does not handle the "Z" suffix
# if value.upper().endswith('Z'):
# value = value[:-1] + '+00:00'
#
# value = datetime.fromisoformat(value)
#
# return value
#
# def as_uuid(value: UUID | str) -> UUID:
# """Convert a string-formatted UUID to a UUID instance."""
# if isinstance(value, str):
# return UUID(value)
#
# return value
#
# Path: src/apscheduler/enums.py
# class JobOutcome(Enum):
# success = auto()
# error = auto()
# missed_start_deadline = auto()
# cancelled = auto()
# expired = auto()
. Output only the next line. | exception: BaseException | None = None |
Predict the next line after this snippet: <|code_start|>from __future__ import annotations
@attrs.define(kw_only=True, frozen=True)
class Event:
timestamp: datetime = attrs.field(factory=partial(datetime.now, timezone.utc),
converter=as_aware_datetime)
#
# Data store events
#
@attrs.define(kw_only=True, frozen=True)
<|code_end|>
using the current file's imports:
from datetime import datetime, timezone
from functools import partial
from uuid import UUID
from attrs.converters import optional
from .converters import as_aware_datetime, as_uuid
from .enums import JobOutcome
import attrs
and any relevant context from other files:
# Path: src/apscheduler/converters.py
# def as_aware_datetime(value: datetime | str) -> datetime:
# """Convert the value from a string to a timezone aware datetime."""
# if isinstance(value, str):
# # fromisoformat() does not handle the "Z" suffix
# if value.upper().endswith('Z'):
# value = value[:-1] + '+00:00'
#
# value = datetime.fromisoformat(value)
#
# return value
#
# def as_uuid(value: UUID | str) -> UUID:
# """Convert a string-formatted UUID to a UUID instance."""
# if isinstance(value, str):
# return UUID(value)
#
# return value
#
# Path: src/apscheduler/enums.py
# class JobOutcome(Enum):
# success = auto()
# error = auto()
# missed_start_deadline = auto()
# cancelled = auto()
# expired = auto()
. Output only the next line. | class DataStoreEvent(Event): |
Predict the next line for this snippet: <|code_start|>
@attrs.define(kw_only=True, eq=False)
class CBORSerializer(Serializer):
type_tag: int = 4664
dump_options: dict[str, Any] = attrs.field(factory=dict)
load_options: dict[str, Any] = attrs.field(factory=dict)
def __attrs_post_init__(self):
self.dump_options.setdefault('default', self._default_hook)
self.load_options.setdefault('tag_hook', self._tag_hook)
def _default_hook(self, encoder, value):
if hasattr(value, '__getstate__'):
marshalled = marshal_object(value)
encoder.encode(CBORTag(self.type_tag, marshalled))
else:
raise CBOREncodeTypeError(f'cannot serialize type {value.__class__.__name__}')
def _tag_hook(self, decoder, tag: CBORTag, shareable_index: int = None):
if tag.tag == self.type_tag:
cls_ref, state = tag.value
return unmarshal_object(cls_ref, state)
def serialize(self, obj) -> bytes:
return dumps(obj, **self.dump_options)
def deserialize(self, serialized: bytes):
<|code_end|>
with the help of current file imports:
from typing import Any
from cbor2 import CBOREncodeTypeError, CBORTag, dumps, loads
from ..abc import Serializer
from ..marshalling import marshal_object, unmarshal_object
import attrs
and context from other files:
# Path: src/apscheduler/abc.py
# class Serializer(metaclass=ABCMeta):
# __slots__ = ()
#
# @abstractmethod
# def serialize(self, obj) -> bytes:
# pass
#
# def serialize_to_unicode(self, obj) -> str:
# return b64encode(self.serialize(obj)).decode('ascii')
#
# @abstractmethod
# def deserialize(self, serialized: bytes):
# pass
#
# def deserialize_from_unicode(self, serialized: str):
# return self.deserialize(b64decode(serialized))
#
# Path: src/apscheduler/marshalling.py
# def marshal_object(obj) -> tuple[str, Any]:
# return f'{obj.__class__.__module__}:{obj.__class__.__qualname__}', obj.__getstate__()
#
# def unmarshal_object(ref: str, state):
# cls = callable_from_ref(ref)
# instance = cls.__new__(cls)
# instance.__setstate__(state)
# return instance
, which may contain function names, class names, or code. Output only the next line. | return loads(serialized, **self.load_options) |
Using the snippet: <|code_start|>
@attrs.define(kw_only=True, eq=False)
class CBORSerializer(Serializer):
type_tag: int = 4664
dump_options: dict[str, Any] = attrs.field(factory=dict)
load_options: dict[str, Any] = attrs.field(factory=dict)
def __attrs_post_init__(self):
self.dump_options.setdefault('default', self._default_hook)
self.load_options.setdefault('tag_hook', self._tag_hook)
def _default_hook(self, encoder, value):
if hasattr(value, '__getstate__'):
marshalled = marshal_object(value)
encoder.encode(CBORTag(self.type_tag, marshalled))
else:
raise CBOREncodeTypeError(f'cannot serialize type {value.__class__.__name__}')
def _tag_hook(self, decoder, tag: CBORTag, shareable_index: int = None):
if tag.tag == self.type_tag:
cls_ref, state = tag.value
return unmarshal_object(cls_ref, state)
def serialize(self, obj) -> bytes:
return dumps(obj, **self.dump_options)
<|code_end|>
, determine the next line of code. You have imports:
from typing import Any
from cbor2 import CBOREncodeTypeError, CBORTag, dumps, loads
from ..abc import Serializer
from ..marshalling import marshal_object, unmarshal_object
import attrs
and context (class names, function names, or code) available:
# Path: src/apscheduler/abc.py
# class Serializer(metaclass=ABCMeta):
# __slots__ = ()
#
# @abstractmethod
# def serialize(self, obj) -> bytes:
# pass
#
# def serialize_to_unicode(self, obj) -> str:
# return b64encode(self.serialize(obj)).decode('ascii')
#
# @abstractmethod
# def deserialize(self, serialized: bytes):
# pass
#
# def deserialize_from_unicode(self, serialized: str):
# return self.deserialize(b64decode(serialized))
#
# Path: src/apscheduler/marshalling.py
# def marshal_object(obj) -> tuple[str, Any]:
# return f'{obj.__class__.__module__}:{obj.__class__.__qualname__}', obj.__getstate__()
#
# def unmarshal_object(ref: str, state):
# cls = callable_from_ref(ref)
# instance = cls.__new__(cls)
# instance.__setstate__(state)
# return instance
. Output only the next line. | def deserialize(self, serialized: bytes): |
Here is a snippet: <|code_start|>
@attrs.define(kw_only=True, eq=False)
class CBORSerializer(Serializer):
type_tag: int = 4664
dump_options: dict[str, Any] = attrs.field(factory=dict)
load_options: dict[str, Any] = attrs.field(factory=dict)
def __attrs_post_init__(self):
self.dump_options.setdefault('default', self._default_hook)
self.load_options.setdefault('tag_hook', self._tag_hook)
def _default_hook(self, encoder, value):
if hasattr(value, '__getstate__'):
marshalled = marshal_object(value)
encoder.encode(CBORTag(self.type_tag, marshalled))
else:
raise CBOREncodeTypeError(f'cannot serialize type {value.__class__.__name__}')
def _tag_hook(self, decoder, tag: CBORTag, shareable_index: int = None):
if tag.tag == self.type_tag:
cls_ref, state = tag.value
return unmarshal_object(cls_ref, state)
def serialize(self, obj) -> bytes:
return dumps(obj, **self.dump_options)
def deserialize(self, serialized: bytes):
<|code_end|>
. Write the next line using the current file imports:
from typing import Any
from cbor2 import CBOREncodeTypeError, CBORTag, dumps, loads
from ..abc import Serializer
from ..marshalling import marshal_object, unmarshal_object
import attrs
and context from other files:
# Path: src/apscheduler/abc.py
# class Serializer(metaclass=ABCMeta):
# __slots__ = ()
#
# @abstractmethod
# def serialize(self, obj) -> bytes:
# pass
#
# def serialize_to_unicode(self, obj) -> str:
# return b64encode(self.serialize(obj)).decode('ascii')
#
# @abstractmethod
# def deserialize(self, serialized: bytes):
# pass
#
# def deserialize_from_unicode(self, serialized: str):
# return self.deserialize(b64decode(serialized))
#
# Path: src/apscheduler/marshalling.py
# def marshal_object(obj) -> tuple[str, Any]:
# return f'{obj.__class__.__module__}:{obj.__class__.__qualname__}', obj.__getstate__()
#
# def unmarshal_object(ref: str, state):
# cls = callable_from_ref(ref)
# instance = cls.__new__(cls)
# instance.__setstate__(state)
# return instance
, which may include functions, classes, or code. Output only the next line. | return loads(serialized, **self.load_options) |
Predict the next line after this snippet: <|code_start|>from __future__ import annotations
@attrs.define(kw_only=True, eq=False)
class PickleSerializer(Serializer):
<|code_end|>
using the current file's imports:
from pickle import dumps, loads
from ..abc import Serializer
import attrs
and any relevant context from other files:
# Path: src/apscheduler/abc.py
# class Serializer(metaclass=ABCMeta):
# __slots__ = ()
#
# @abstractmethod
# def serialize(self, obj) -> bytes:
# pass
#
# def serialize_to_unicode(self, obj) -> str:
# return b64encode(self.serialize(obj)).decode('ascii')
#
# @abstractmethod
# def deserialize(self, serialized: bytes):
# pass
#
# def deserialize_from_unicode(self, serialized: str):
# return self.deserialize(b64decode(serialized))
. Output only the next line. | protocol: int = 4 |
Predict the next line for this snippet: <|code_start|>from __future__ import annotations
if sys.version_info >= (3, 9):
else:
<|code_end|>
with the help of current file imports:
import sys
import attrs
from datetime import date, datetime, timedelta, timezone, tzinfo
from typing import Any
from attrs import Attribute
from tzlocal import get_localzone
from .abc import Trigger
from .exceptions import DeserializationError
from zoneinfo import ZoneInfo
from backports.zoneinfo import ZoneInfo
and context from other files:
# Path: src/apscheduler/abc.py
# class Trigger(Iterator[datetime], metaclass=ABCMeta):
# """Abstract base class that defines the interface that every trigger must implement."""
#
# __slots__ = ()
#
# @abstractmethod
# def next(self) -> datetime | None:
# """
# Return the next datetime to fire on.
#
# If no such datetime can be calculated, ``None`` is returned.
# :raises apscheduler.exceptions.MaxIterationsReached:
# """
#
# @abstractmethod
# def __getstate__(self):
# """Return the (JSON compatible) serializable state of the trigger."""
#
# @abstractmethod
# def __setstate__(self, state):
# """Initialize an empty instance from an existing state."""
#
# def __iter__(self):
# return self
#
# def __next__(self) -> datetime:
# dateval = self.next()
# if dateval is None:
# raise StopIteration
# else:
# return dateval
#
# Path: src/apscheduler/exceptions.py
# class DeserializationError(Exception):
# """Raised when a serializer fails to deserialize the given object."""
, which may contain function names, class names, or code. Output only the next line. | def as_int(value) -> int | None: |
Given snippet: <|code_start|>
class TraceLogger():
def __init__(self,
f_tree_mutation_log: Callable[[TreeMutation], Any]=lambda x: x is not None,
f_model_log: Callable[[Model], Any]=lambda x: deep_copy_model(x),
f_in_sample_prediction_log: Callable[[np.ndarray], Any]=lambda x: x):
<|code_end|>
, continue by predicting the next line. Consider current file imports:
from typing import Any, Callable
from bartpy.model import Model, deep_copy_model
from bartpy.mutation import TreeMutation
import numpy as np
and context:
# Path: bartpy/model.py
# class Model:
#
# def __init__(self,
# data: Optional[Data],
# sigma: Sigma,
# trees: Optional[List[Tree]]=None,
# n_trees: int=50,
# alpha: float=0.95,
# beta: float=2.,
# k: int=2.,
# initializer: Initializer=SklearnTreeInitializer()):
#
# self.data = deepcopy(data)
# self.alpha = float(alpha)
# self.beta = float(beta)
# self.k = k
# self._sigma = sigma
# self._prediction = None
# self._initializer = initializer
#
# if trees is None:
# self.n_trees = n_trees
# self._trees = self.initialize_trees()
# if self._initializer is not None:
# self._initializer.initialize_trees(self.refreshed_trees())
# else:
# self.n_trees = len(trees)
# self._trees = trees
#
# def initialize_trees(self) -> List[Tree]:
# trees = [Tree([LeafNode(Split(deepcopy(self.data)))]) for _ in range(self.n_trees)]
# for tree in trees:
# tree.update_y(tree.update_y(self.data.y.values / self.n_trees))
# return trees
#
# def residuals(self) -> np.ndarray:
# return self.data.y.values - self.predict()
#
# def unnormalized_residuals(self) -> np.ndarray:
# return self.data.y.unnormalized_y - self.data.y.unnormalize_y(self.predict())
#
# def predict(self, X: np.ndarray=None) -> np.ndarray:
# if X is not None:
# return self._out_of_sample_predict(X)
# return np.sum([tree.predict() for tree in self.trees], axis=0)
#
# def _out_of_sample_predict(self, X: np.ndarray) -> np.ndarray:
# if type(X) == pd.DataFrame:
# X: pd.DataFrame = X
# X = X.values
# return np.sum([tree.predict(X) for tree in self.trees], axis=0)
#
# @property
# def trees(self) -> List[Tree]:
# return self._trees
#
# def refreshed_trees(self) -> Generator[Tree, None, None]:
# if self._prediction is None:
# self._prediction = self.predict()
# for tree in self._trees:
# self._prediction -= tree.predict()
# tree.update_y(self.data.y.values - self._prediction)
# yield tree
# self._prediction += tree.predict()
#
# @property
# def sigma_m(self) -> float:
# return 0.5 / (self.k * np.power(self.n_trees, 0.5))
#
# @property
# def sigma(self) -> Sigma:
# return self._sigma
#
# def deep_copy_model(model: Model) -> Model:
# copied_model = Model(None, deepcopy(model.sigma), [deep_copy_tree(tree) for tree in model.trees])
# return copied_model
#
# Path: bartpy/mutation.py
# class TreeMutation(object):
# """
# An encapsulation of a change to be made to the tree.
# Constructed of three components
# - the node to be changed
# - what it should be changed to
# - a string name of the kind of change (normally grow or prune)
# """
#
# def __init__(self, kind: str, existing_node: TreeNode, updated_node: TreeNode):
# self.kind = kind
# self.existing_node = existing_node
# self.updated_node = updated_node
#
# def __str__(self):
# return "{} - {} => {}".format(self.kind, self.existing_node, self.updated_node)
which might include code, classes, or functions. Output only the next line. | self.f_tree_mutation_log = f_tree_mutation_log |
Continue the code snippet: <|code_start|>
class TraceLogger():
def __init__(self,
f_tree_mutation_log: Callable[[TreeMutation], Any]=lambda x: x is not None,
f_model_log: Callable[[Model], Any]=lambda x: deep_copy_model(x),
f_in_sample_prediction_log: Callable[[np.ndarray], Any]=lambda x: x):
self.f_tree_mutation_log = f_tree_mutation_log
<|code_end|>
. Use current file imports:
from typing import Any, Callable
from bartpy.model import Model, deep_copy_model
from bartpy.mutation import TreeMutation
import numpy as np
and context (classes, functions, or code) from other files:
# Path: bartpy/model.py
# class Model:
#
# def __init__(self,
# data: Optional[Data],
# sigma: Sigma,
# trees: Optional[List[Tree]]=None,
# n_trees: int=50,
# alpha: float=0.95,
# beta: float=2.,
# k: int=2.,
# initializer: Initializer=SklearnTreeInitializer()):
#
# self.data = deepcopy(data)
# self.alpha = float(alpha)
# self.beta = float(beta)
# self.k = k
# self._sigma = sigma
# self._prediction = None
# self._initializer = initializer
#
# if trees is None:
# self.n_trees = n_trees
# self._trees = self.initialize_trees()
# if self._initializer is not None:
# self._initializer.initialize_trees(self.refreshed_trees())
# else:
# self.n_trees = len(trees)
# self._trees = trees
#
# def initialize_trees(self) -> List[Tree]:
# trees = [Tree([LeafNode(Split(deepcopy(self.data)))]) for _ in range(self.n_trees)]
# for tree in trees:
# tree.update_y(tree.update_y(self.data.y.values / self.n_trees))
# return trees
#
# def residuals(self) -> np.ndarray:
# return self.data.y.values - self.predict()
#
# def unnormalized_residuals(self) -> np.ndarray:
# return self.data.y.unnormalized_y - self.data.y.unnormalize_y(self.predict())
#
# def predict(self, X: np.ndarray=None) -> np.ndarray:
# if X is not None:
# return self._out_of_sample_predict(X)
# return np.sum([tree.predict() for tree in self.trees], axis=0)
#
# def _out_of_sample_predict(self, X: np.ndarray) -> np.ndarray:
# if type(X) == pd.DataFrame:
# X: pd.DataFrame = X
# X = X.values
# return np.sum([tree.predict(X) for tree in self.trees], axis=0)
#
# @property
# def trees(self) -> List[Tree]:
# return self._trees
#
# def refreshed_trees(self) -> Generator[Tree, None, None]:
# if self._prediction is None:
# self._prediction = self.predict()
# for tree in self._trees:
# self._prediction -= tree.predict()
# tree.update_y(self.data.y.values - self._prediction)
# yield tree
# self._prediction += tree.predict()
#
# @property
# def sigma_m(self) -> float:
# return 0.5 / (self.k * np.power(self.n_trees, 0.5))
#
# @property
# def sigma(self) -> Sigma:
# return self._sigma
#
# def deep_copy_model(model: Model) -> Model:
# copied_model = Model(None, deepcopy(model.sigma), [deep_copy_tree(tree) for tree in model.trees])
# return copied_model
#
# Path: bartpy/mutation.py
# class TreeMutation(object):
# """
# An encapsulation of a change to be made to the tree.
# Constructed of three components
# - the node to be changed
# - what it should be changed to
# - a string name of the kind of change (normally grow or prune)
# """
#
# def __init__(self, kind: str, existing_node: TreeNode, updated_node: TreeNode):
# self.kind = kind
# self.existing_node = existing_node
# self.updated_node = updated_node
#
# def __str__(self):
# return "{} - {} => {}".format(self.kind, self.existing_node, self.updated_node)
. Output only the next line. | self.f_model_log = f_model_log |
Given the following code snippet before the placeholder: <|code_start|>
class TraceLogger():
def __init__(self,
f_tree_mutation_log: Callable[[TreeMutation], Any]=lambda x: x is not None,
f_model_log: Callable[[Model], Any]=lambda x: deep_copy_model(x),
f_in_sample_prediction_log: Callable[[np.ndarray], Any]=lambda x: x):
self.f_tree_mutation_log = f_tree_mutation_log
self.f_model_log = f_model_log
self.f_in_sample_prediction_log = f_in_sample_prediction_log
def __getitem__(self, item: str):
if item == "Tree":
return self.f_tree_mutation_log
if item == "Model":
<|code_end|>
, predict the next line using imports from the current file:
from typing import Any, Callable
from bartpy.model import Model, deep_copy_model
from bartpy.mutation import TreeMutation
import numpy as np
and context including class names, function names, and sometimes code from other files:
# Path: bartpy/model.py
# class Model:
#
# def __init__(self,
# data: Optional[Data],
# sigma: Sigma,
# trees: Optional[List[Tree]]=None,
# n_trees: int=50,
# alpha: float=0.95,
# beta: float=2.,
# k: int=2.,
# initializer: Initializer=SklearnTreeInitializer()):
#
# self.data = deepcopy(data)
# self.alpha = float(alpha)
# self.beta = float(beta)
# self.k = k
# self._sigma = sigma
# self._prediction = None
# self._initializer = initializer
#
# if trees is None:
# self.n_trees = n_trees
# self._trees = self.initialize_trees()
# if self._initializer is not None:
# self._initializer.initialize_trees(self.refreshed_trees())
# else:
# self.n_trees = len(trees)
# self._trees = trees
#
# def initialize_trees(self) -> List[Tree]:
# trees = [Tree([LeafNode(Split(deepcopy(self.data)))]) for _ in range(self.n_trees)]
# for tree in trees:
# tree.update_y(tree.update_y(self.data.y.values / self.n_trees))
# return trees
#
# def residuals(self) -> np.ndarray:
# return self.data.y.values - self.predict()
#
# def unnormalized_residuals(self) -> np.ndarray:
# return self.data.y.unnormalized_y - self.data.y.unnormalize_y(self.predict())
#
# def predict(self, X: np.ndarray=None) -> np.ndarray:
# if X is not None:
# return self._out_of_sample_predict(X)
# return np.sum([tree.predict() for tree in self.trees], axis=0)
#
# def _out_of_sample_predict(self, X: np.ndarray) -> np.ndarray:
# if type(X) == pd.DataFrame:
# X: pd.DataFrame = X
# X = X.values
# return np.sum([tree.predict(X) for tree in self.trees], axis=0)
#
# @property
# def trees(self) -> List[Tree]:
# return self._trees
#
# def refreshed_trees(self) -> Generator[Tree, None, None]:
# if self._prediction is None:
# self._prediction = self.predict()
# for tree in self._trees:
# self._prediction -= tree.predict()
# tree.update_y(self.data.y.values - self._prediction)
# yield tree
# self._prediction += tree.predict()
#
# @property
# def sigma_m(self) -> float:
# return 0.5 / (self.k * np.power(self.n_trees, 0.5))
#
# @property
# def sigma(self) -> Sigma:
# return self._sigma
#
# def deep_copy_model(model: Model) -> Model:
# copied_model = Model(None, deepcopy(model.sigma), [deep_copy_tree(tree) for tree in model.trees])
# return copied_model
#
# Path: bartpy/mutation.py
# class TreeMutation(object):
# """
# An encapsulation of a change to be made to the tree.
# Constructed of three components
# - the node to be changed
# - what it should be changed to
# - a string name of the kind of change (normally grow or prune)
# """
#
# def __init__(self, kind: str, existing_node: TreeNode, updated_node: TreeNode):
# self.kind = kind
# self.existing_node = existing_node
# self.updated_node = updated_node
#
# def __str__(self):
# return "{} - {} => {}".format(self.kind, self.existing_node, self.updated_node)
. Output only the next line. | return self.f_model_log |
Given snippet: <|code_start|>
class TestPruneTreeMutationProposer(unittest.TestCase):
def setUp(self):
self.data = make_bartpy_data(pd.DataFrame({"a": [1, 2]}), np.array([1, 2]), normalize=False)
self.d = LeafNode(Split(self.data))
self.e = LeafNode(Split(self.data))
self.c = DecisionNode(Split(self.data), self.d, self.e)
self.b = LeafNode(Split(self.data))
self.a = DecisionNode(Split(self.data), self.b, self.c)
self.tree = Tree([self.a, self.b, self.c, self.d, self.e])
def test_proposal_isnt_mutating(self):
proposal = uniformly_sample_prune_mutation(self.tree)
self.assertIn(proposal.existing_node, self.tree.nodes)
<|code_end|>
, continue by predicting the next line. Consider current file imports:
import unittest
import pandas as pd
import numpy as np
from bartpy.data import make_bartpy_data
from bartpy.samplers.unconstrainedtree.proposer import uniformly_sample_grow_mutation, uniformly_sample_prune_mutation
from bartpy.split import Split
from bartpy.tree import LeafNode, Tree, DecisionNode
and context:
# Path: bartpy/data.py
# def make_bartpy_data(X: Union[np.ndarray, pd.DataFrame],
# y: np.ndarray,
# normalize: bool=True) -> 'Data':
# X = format_covariate_matrix(X)
# y = y.astype(float)
# return Data(X, y, normalize=normalize)
#
# Path: bartpy/samplers/unconstrainedtree/proposer.py
# def uniformly_sample_grow_mutation(tree: Tree) -> TreeMutation:
# node = random_splittable_leaf_node(tree)
# updated_node = sample_split_node(node)
# return GrowMutation(node, updated_node)
#
# def uniformly_sample_prune_mutation(tree: Tree) -> TreeMutation:
# node = random_prunable_decision_node(tree)
# updated_node = LeafNode(node.split, depth=node.depth)
# return PruneMutation(node, updated_node)
#
# Path: bartpy/split.py
# class Split:
# """
# The Split class represents the conditioned data at any point in the decision tree
# It contains the logic for:
#
# - Maintaining a record of which rows of the covariate matrix are in the split
# - Being able to easily access a `Data` object with the relevant rows
# - Applying `SplitConditions` to further break up the data
# """
#
# def __init__(self, data: Data, combined_condition: Optional[CombinedCondition]=None):
# self._data = data
# if combined_condition is None:
# combined_condition = CombinedCondition(self._data.X.variables, [])
# self._combined_condition = combined_condition
#
# @property
# def data(self):
# return self._data
#
# def combined_condition(self):
# return self._combined_condition
#
# def condition(self, X: np.ndarray=None) -> np.array:
# if X is None:
# return ~self._data.mask
# else:
# return self.out_of_sample_condition(X)
#
# def out_of_sample_condition(self, X: np.ndarray) -> np.ndarray:
# return self._combined_condition.condition(X)
#
# def out_of_sample_conditioner(self) -> CombinedCondition:
# return self._combined_condition
#
# def __add__(self, other: SplitCondition):
#
# return Split(self._data + other,
# self._combined_condition + other)
#
# def most_recent_split_condition(self) -> Optional[SplitCondition]:
# return self._combined_condition.most_recent_split_condition()
#
# Path: bartpy/tree.py
# class Tree:
# def __init__(self, nodes: List[TreeNode]):
# def nodes(self) -> List[TreeNode]:
# def leaf_nodes(self) -> List[LeafNode]:
# def splittable_leaf_nodes(self) -> List[LeafNode]:
# def decision_nodes(self) -> List[DecisionNode]:
# def prunable_decision_nodes(self) -> List[DecisionNode]:
# def update_y(self, y: np.ndarray) -> None:
# def predict(self, X: np.ndarray=None) -> np.ndarray:
# def _out_of_sample_predict(self, X) -> np.ndarray:
# def remove_node(self, node: TreeNode) -> None:
# def add_node(self, node: TreeNode) -> None:
# def mutate(tree: Tree, mutation: TreeMutation) -> None:
# def deep_copy_tree(tree: Tree):
which might include code, classes, or functions. Output only the next line. | self.assertNotIn(proposal.updated_node, self.tree.nodes) |
Next line prediction: <|code_start|>
class TestPruneTreeMutationProposer(unittest.TestCase):
def setUp(self):
self.data = make_bartpy_data(pd.DataFrame({"a": [1, 2]}), np.array([1, 2]), normalize=False)
self.d = LeafNode(Split(self.data))
self.e = LeafNode(Split(self.data))
self.c = DecisionNode(Split(self.data), self.d, self.e)
self.b = LeafNode(Split(self.data))
self.a = DecisionNode(Split(self.data), self.b, self.c)
self.tree = Tree([self.a, self.b, self.c, self.d, self.e])
def test_proposal_isnt_mutating(self):
proposal = uniformly_sample_prune_mutation(self.tree)
self.assertIn(proposal.existing_node, self.tree.nodes)
self.assertNotIn(proposal.updated_node, self.tree.nodes)
def test_types(self):
proposal = uniformly_sample_prune_mutation(self.tree)
self.assertIsInstance(proposal.existing_node, DecisionNode)
self.assertIsInstance(proposal.updated_node, LeafNode)
<|code_end|>
. Use current file imports:
(import unittest
import pandas as pd
import numpy as np
from bartpy.data import make_bartpy_data
from bartpy.samplers.unconstrainedtree.proposer import uniformly_sample_grow_mutation, uniformly_sample_prune_mutation
from bartpy.split import Split
from bartpy.tree import LeafNode, Tree, DecisionNode)
and context including class names, function names, or small code snippets from other files:
# Path: bartpy/data.py
# def make_bartpy_data(X: Union[np.ndarray, pd.DataFrame],
# y: np.ndarray,
# normalize: bool=True) -> 'Data':
# X = format_covariate_matrix(X)
# y = y.astype(float)
# return Data(X, y, normalize=normalize)
#
# Path: bartpy/samplers/unconstrainedtree/proposer.py
# def uniformly_sample_grow_mutation(tree: Tree) -> TreeMutation:
# node = random_splittable_leaf_node(tree)
# updated_node = sample_split_node(node)
# return GrowMutation(node, updated_node)
#
# def uniformly_sample_prune_mutation(tree: Tree) -> TreeMutation:
# node = random_prunable_decision_node(tree)
# updated_node = LeafNode(node.split, depth=node.depth)
# return PruneMutation(node, updated_node)
#
# Path: bartpy/split.py
# class Split:
# """
# The Split class represents the conditioned data at any point in the decision tree
# It contains the logic for:
#
# - Maintaining a record of which rows of the covariate matrix are in the split
# - Being able to easily access a `Data` object with the relevant rows
# - Applying `SplitConditions` to further break up the data
# """
#
# def __init__(self, data: Data, combined_condition: Optional[CombinedCondition]=None):
# self._data = data
# if combined_condition is None:
# combined_condition = CombinedCondition(self._data.X.variables, [])
# self._combined_condition = combined_condition
#
# @property
# def data(self):
# return self._data
#
# def combined_condition(self):
# return self._combined_condition
#
# def condition(self, X: np.ndarray=None) -> np.array:
# if X is None:
# return ~self._data.mask
# else:
# return self.out_of_sample_condition(X)
#
# def out_of_sample_condition(self, X: np.ndarray) -> np.ndarray:
# return self._combined_condition.condition(X)
#
# def out_of_sample_conditioner(self) -> CombinedCondition:
# return self._combined_condition
#
# def __add__(self, other: SplitCondition):
#
# return Split(self._data + other,
# self._combined_condition + other)
#
# def most_recent_split_condition(self) -> Optional[SplitCondition]:
# return self._combined_condition.most_recent_split_condition()
#
# Path: bartpy/tree.py
# class Tree:
# def __init__(self, nodes: List[TreeNode]):
# def nodes(self) -> List[TreeNode]:
# def leaf_nodes(self) -> List[LeafNode]:
# def splittable_leaf_nodes(self) -> List[LeafNode]:
# def decision_nodes(self) -> List[DecisionNode]:
# def prunable_decision_nodes(self) -> List[DecisionNode]:
# def update_y(self, y: np.ndarray) -> None:
# def predict(self, X: np.ndarray=None) -> np.ndarray:
# def _out_of_sample_predict(self, X) -> np.ndarray:
# def remove_node(self, node: TreeNode) -> None:
# def add_node(self, node: TreeNode) -> None:
# def mutate(tree: Tree, mutation: TreeMutation) -> None:
# def deep_copy_tree(tree: Tree):
. Output only the next line. | class TestGrowTreeMutationProposer(unittest.TestCase): |
Predict the next line for this snippet: <|code_start|>
class TestPruneTreeMutationProposer(unittest.TestCase):
def setUp(self):
self.data = make_bartpy_data(pd.DataFrame({"a": [1, 2]}), np.array([1, 2]), normalize=False)
self.d = LeafNode(Split(self.data))
self.e = LeafNode(Split(self.data))
self.c = DecisionNode(Split(self.data), self.d, self.e)
self.b = LeafNode(Split(self.data))
self.a = DecisionNode(Split(self.data), self.b, self.c)
self.tree = Tree([self.a, self.b, self.c, self.d, self.e])
def test_proposal_isnt_mutating(self):
proposal = uniformly_sample_prune_mutation(self.tree)
<|code_end|>
with the help of current file imports:
import unittest
import pandas as pd
import numpy as np
from bartpy.data import make_bartpy_data
from bartpy.samplers.unconstrainedtree.proposer import uniformly_sample_grow_mutation, uniformly_sample_prune_mutation
from bartpy.split import Split
from bartpy.tree import LeafNode, Tree, DecisionNode
and context from other files:
# Path: bartpy/data.py
# def make_bartpy_data(X: Union[np.ndarray, pd.DataFrame],
# y: np.ndarray,
# normalize: bool=True) -> 'Data':
# X = format_covariate_matrix(X)
# y = y.astype(float)
# return Data(X, y, normalize=normalize)
#
# Path: bartpy/samplers/unconstrainedtree/proposer.py
# def uniformly_sample_grow_mutation(tree: Tree) -> TreeMutation:
# node = random_splittable_leaf_node(tree)
# updated_node = sample_split_node(node)
# return GrowMutation(node, updated_node)
#
# def uniformly_sample_prune_mutation(tree: Tree) -> TreeMutation:
# node = random_prunable_decision_node(tree)
# updated_node = LeafNode(node.split, depth=node.depth)
# return PruneMutation(node, updated_node)
#
# Path: bartpy/split.py
# class Split:
# """
# The Split class represents the conditioned data at any point in the decision tree
# It contains the logic for:
#
# - Maintaining a record of which rows of the covariate matrix are in the split
# - Being able to easily access a `Data` object with the relevant rows
# - Applying `SplitConditions` to further break up the data
# """
#
# def __init__(self, data: Data, combined_condition: Optional[CombinedCondition]=None):
# self._data = data
# if combined_condition is None:
# combined_condition = CombinedCondition(self._data.X.variables, [])
# self._combined_condition = combined_condition
#
# @property
# def data(self):
# return self._data
#
# def combined_condition(self):
# return self._combined_condition
#
# def condition(self, X: np.ndarray=None) -> np.array:
# if X is None:
# return ~self._data.mask
# else:
# return self.out_of_sample_condition(X)
#
# def out_of_sample_condition(self, X: np.ndarray) -> np.ndarray:
# return self._combined_condition.condition(X)
#
# def out_of_sample_conditioner(self) -> CombinedCondition:
# return self._combined_condition
#
# def __add__(self, other: SplitCondition):
#
# return Split(self._data + other,
# self._combined_condition + other)
#
# def most_recent_split_condition(self) -> Optional[SplitCondition]:
# return self._combined_condition.most_recent_split_condition()
#
# Path: bartpy/tree.py
# class Tree:
# def __init__(self, nodes: List[TreeNode]):
# def nodes(self) -> List[TreeNode]:
# def leaf_nodes(self) -> List[LeafNode]:
# def splittable_leaf_nodes(self) -> List[LeafNode]:
# def decision_nodes(self) -> List[DecisionNode]:
# def prunable_decision_nodes(self) -> List[DecisionNode]:
# def update_y(self, y: np.ndarray) -> None:
# def predict(self, X: np.ndarray=None) -> np.ndarray:
# def _out_of_sample_predict(self, X) -> np.ndarray:
# def remove_node(self, node: TreeNode) -> None:
# def add_node(self, node: TreeNode) -> None:
# def mutate(tree: Tree, mutation: TreeMutation) -> None:
# def deep_copy_tree(tree: Tree):
, which may contain function names, class names, or code. Output only the next line. | self.assertIn(proposal.existing_node, self.tree.nodes) |
Continue the code snippet: <|code_start|> self.a = DecisionNode(Split(self.data), self.b, self.c)
self.tree = Tree([self.a, self.b, self.c, self.d, self.e])
def test_proposal_isnt_mutating(self):
proposal = uniformly_sample_prune_mutation(self.tree)
self.assertIn(proposal.existing_node, self.tree.nodes)
self.assertNotIn(proposal.updated_node, self.tree.nodes)
def test_types(self):
proposal = uniformly_sample_prune_mutation(self.tree)
self.assertIsInstance(proposal.existing_node, DecisionNode)
self.assertIsInstance(proposal.updated_node, LeafNode)
class TestGrowTreeMutationProposer(unittest.TestCase):
def setUp(self):
self.data = make_bartpy_data(pd.DataFrame({"a": np.random.normal(size=1000)}), np.array(np.random.normal(size=1000)))
self.d = LeafNode(Split(self.data))
self.e = LeafNode(Split(self.data))
self.c = DecisionNode(Split(self.data), self.d, self.e)
self.b = LeafNode(Split(self.data))
self.a = DecisionNode(Split(self.data), self.b, self.c)
self.tree = Tree([self.a, self.b, self.c, self.d, self.e])
def test_proposal_isnt_mutating(self):
proposal = uniformly_sample_grow_mutation(self.tree)
self.assertIn(proposal.existing_node, self.tree.nodes)
self.assertNotIn(proposal.updated_node, self.tree.nodes)
<|code_end|>
. Use current file imports:
import unittest
import pandas as pd
import numpy as np
from bartpy.data import make_bartpy_data
from bartpy.samplers.unconstrainedtree.proposer import uniformly_sample_grow_mutation, uniformly_sample_prune_mutation
from bartpy.split import Split
from bartpy.tree import LeafNode, Tree, DecisionNode
and context (classes, functions, or code) from other files:
# Path: bartpy/data.py
# def make_bartpy_data(X: Union[np.ndarray, pd.DataFrame],
# y: np.ndarray,
# normalize: bool=True) -> 'Data':
# X = format_covariate_matrix(X)
# y = y.astype(float)
# return Data(X, y, normalize=normalize)
#
# Path: bartpy/samplers/unconstrainedtree/proposer.py
# def uniformly_sample_grow_mutation(tree: Tree) -> TreeMutation:
# node = random_splittable_leaf_node(tree)
# updated_node = sample_split_node(node)
# return GrowMutation(node, updated_node)
#
# def uniformly_sample_prune_mutation(tree: Tree) -> TreeMutation:
# node = random_prunable_decision_node(tree)
# updated_node = LeafNode(node.split, depth=node.depth)
# return PruneMutation(node, updated_node)
#
# Path: bartpy/split.py
# class Split:
# """
# The Split class represents the conditioned data at any point in the decision tree
# It contains the logic for:
#
# - Maintaining a record of which rows of the covariate matrix are in the split
# - Being able to easily access a `Data` object with the relevant rows
# - Applying `SplitConditions` to further break up the data
# """
#
# def __init__(self, data: Data, combined_condition: Optional[CombinedCondition]=None):
# self._data = data
# if combined_condition is None:
# combined_condition = CombinedCondition(self._data.X.variables, [])
# self._combined_condition = combined_condition
#
# @property
# def data(self):
# return self._data
#
# def combined_condition(self):
# return self._combined_condition
#
# def condition(self, X: np.ndarray=None) -> np.array:
# if X is None:
# return ~self._data.mask
# else:
# return self.out_of_sample_condition(X)
#
# def out_of_sample_condition(self, X: np.ndarray) -> np.ndarray:
# return self._combined_condition.condition(X)
#
# def out_of_sample_conditioner(self) -> CombinedCondition:
# return self._combined_condition
#
# def __add__(self, other: SplitCondition):
#
# return Split(self._data + other,
# self._combined_condition + other)
#
# def most_recent_split_condition(self) -> Optional[SplitCondition]:
# return self._combined_condition.most_recent_split_condition()
#
# Path: bartpy/tree.py
# class Tree:
# def __init__(self, nodes: List[TreeNode]):
# def nodes(self) -> List[TreeNode]:
# def leaf_nodes(self) -> List[LeafNode]:
# def splittable_leaf_nodes(self) -> List[LeafNode]:
# def decision_nodes(self) -> List[DecisionNode]:
# def prunable_decision_nodes(self) -> List[DecisionNode]:
# def update_y(self, y: np.ndarray) -> None:
# def predict(self, X: np.ndarray=None) -> np.ndarray:
# def _out_of_sample_predict(self, X) -> np.ndarray:
# def remove_node(self, node: TreeNode) -> None:
# def add_node(self, node: TreeNode) -> None:
# def mutate(tree: Tree, mutation: TreeMutation) -> None:
# def deep_copy_tree(tree: Tree):
. Output only the next line. | def test_types(self): |
Predict the next line after this snippet: <|code_start|>
class TestPruneTreeMutationProposer(unittest.TestCase):
def setUp(self):
self.data = make_bartpy_data(pd.DataFrame({"a": [1, 2]}), np.array([1, 2]), normalize=False)
self.d = LeafNode(Split(self.data))
self.e = LeafNode(Split(self.data))
self.c = DecisionNode(Split(self.data), self.d, self.e)
self.b = LeafNode(Split(self.data))
self.a = DecisionNode(Split(self.data), self.b, self.c)
self.tree = Tree([self.a, self.b, self.c, self.d, self.e])
def test_proposal_isnt_mutating(self):
proposal = uniformly_sample_prune_mutation(self.tree)
self.assertIn(proposal.existing_node, self.tree.nodes)
self.assertNotIn(proposal.updated_node, self.tree.nodes)
def test_types(self):
proposal = uniformly_sample_prune_mutation(self.tree)
self.assertIsInstance(proposal.existing_node, DecisionNode)
self.assertIsInstance(proposal.updated_node, LeafNode)
<|code_end|>
using the current file's imports:
import unittest
import pandas as pd
import numpy as np
from bartpy.data import make_bartpy_data
from bartpy.samplers.unconstrainedtree.proposer import uniformly_sample_grow_mutation, uniformly_sample_prune_mutation
from bartpy.split import Split
from bartpy.tree import LeafNode, Tree, DecisionNode
and any relevant context from other files:
# Path: bartpy/data.py
# def make_bartpy_data(X: Union[np.ndarray, pd.DataFrame],
# y: np.ndarray,
# normalize: bool=True) -> 'Data':
# X = format_covariate_matrix(X)
# y = y.astype(float)
# return Data(X, y, normalize=normalize)
#
# Path: bartpy/samplers/unconstrainedtree/proposer.py
# def uniformly_sample_grow_mutation(tree: Tree) -> TreeMutation:
# node = random_splittable_leaf_node(tree)
# updated_node = sample_split_node(node)
# return GrowMutation(node, updated_node)
#
# def uniformly_sample_prune_mutation(tree: Tree) -> TreeMutation:
# node = random_prunable_decision_node(tree)
# updated_node = LeafNode(node.split, depth=node.depth)
# return PruneMutation(node, updated_node)
#
# Path: bartpy/split.py
# class Split:
# """
# The Split class represents the conditioned data at any point in the decision tree
# It contains the logic for:
#
# - Maintaining a record of which rows of the covariate matrix are in the split
# - Being able to easily access a `Data` object with the relevant rows
# - Applying `SplitConditions` to further break up the data
# """
#
# def __init__(self, data: Data, combined_condition: Optional[CombinedCondition]=None):
# self._data = data
# if combined_condition is None:
# combined_condition = CombinedCondition(self._data.X.variables, [])
# self._combined_condition = combined_condition
#
# @property
# def data(self):
# return self._data
#
# def combined_condition(self):
# return self._combined_condition
#
# def condition(self, X: np.ndarray=None) -> np.array:
# if X is None:
# return ~self._data.mask
# else:
# return self.out_of_sample_condition(X)
#
# def out_of_sample_condition(self, X: np.ndarray) -> np.ndarray:
# return self._combined_condition.condition(X)
#
# def out_of_sample_conditioner(self) -> CombinedCondition:
# return self._combined_condition
#
# def __add__(self, other: SplitCondition):
#
# return Split(self._data + other,
# self._combined_condition + other)
#
# def most_recent_split_condition(self) -> Optional[SplitCondition]:
# return self._combined_condition.most_recent_split_condition()
#
# Path: bartpy/tree.py
# class Tree:
# def __init__(self, nodes: List[TreeNode]):
# def nodes(self) -> List[TreeNode]:
# def leaf_nodes(self) -> List[LeafNode]:
# def splittable_leaf_nodes(self) -> List[LeafNode]:
# def decision_nodes(self) -> List[DecisionNode]:
# def prunable_decision_nodes(self) -> List[DecisionNode]:
# def update_y(self, y: np.ndarray) -> None:
# def predict(self, X: np.ndarray=None) -> np.ndarray:
# def _out_of_sample_predict(self, X) -> np.ndarray:
# def remove_node(self, node: TreeNode) -> None:
# def add_node(self, node: TreeNode) -> None:
# def mutate(tree: Tree, mutation: TreeMutation) -> None:
# def deep_copy_tree(tree: Tree):
. Output only the next line. | class TestGrowTreeMutationProposer(unittest.TestCase): |
Next line prediction: <|code_start|> def test_types(self):
proposal = uniformly_sample_prune_mutation(self.tree)
self.assertIsInstance(proposal.existing_node, DecisionNode)
self.assertIsInstance(proposal.updated_node, LeafNode)
class TestGrowTreeMutationProposer(unittest.TestCase):
def setUp(self):
self.data = make_bartpy_data(pd.DataFrame({"a": np.random.normal(size=1000)}), np.array(np.random.normal(size=1000)))
self.d = LeafNode(Split(self.data))
self.e = LeafNode(Split(self.data))
self.c = DecisionNode(Split(self.data), self.d, self.e)
self.b = LeafNode(Split(self.data))
self.a = DecisionNode(Split(self.data), self.b, self.c)
self.tree = Tree([self.a, self.b, self.c, self.d, self.e])
def test_proposal_isnt_mutating(self):
proposal = uniformly_sample_grow_mutation(self.tree)
self.assertIn(proposal.existing_node, self.tree.nodes)
self.assertNotIn(proposal.updated_node, self.tree.nodes)
def test_types(self):
proposal = uniformly_sample_grow_mutation(self.tree)
self.assertIsInstance(proposal.updated_node, DecisionNode)
self.assertIsInstance(proposal.updated_node.left_child, LeafNode)
self.assertIsInstance(proposal.updated_node.right_child, LeafNode)
self.assertIsInstance(proposal.existing_node, LeafNode)
<|code_end|>
. Use current file imports:
(import unittest
import pandas as pd
import numpy as np
from bartpy.data import make_bartpy_data
from bartpy.samplers.unconstrainedtree.proposer import uniformly_sample_grow_mutation, uniformly_sample_prune_mutation
from bartpy.split import Split
from bartpy.tree import LeafNode, Tree, DecisionNode)
and context including class names, function names, or small code snippets from other files:
# Path: bartpy/data.py
# def make_bartpy_data(X: Union[np.ndarray, pd.DataFrame],
# y: np.ndarray,
# normalize: bool=True) -> 'Data':
# X = format_covariate_matrix(X)
# y = y.astype(float)
# return Data(X, y, normalize=normalize)
#
# Path: bartpy/samplers/unconstrainedtree/proposer.py
# def uniformly_sample_grow_mutation(tree: Tree) -> TreeMutation:
# node = random_splittable_leaf_node(tree)
# updated_node = sample_split_node(node)
# return GrowMutation(node, updated_node)
#
# def uniformly_sample_prune_mutation(tree: Tree) -> TreeMutation:
# node = random_prunable_decision_node(tree)
# updated_node = LeafNode(node.split, depth=node.depth)
# return PruneMutation(node, updated_node)
#
# Path: bartpy/split.py
# class Split:
# """
# The Split class represents the conditioned data at any point in the decision tree
# It contains the logic for:
#
# - Maintaining a record of which rows of the covariate matrix are in the split
# - Being able to easily access a `Data` object with the relevant rows
# - Applying `SplitConditions` to further break up the data
# """
#
# def __init__(self, data: Data, combined_condition: Optional[CombinedCondition]=None):
# self._data = data
# if combined_condition is None:
# combined_condition = CombinedCondition(self._data.X.variables, [])
# self._combined_condition = combined_condition
#
# @property
# def data(self):
# return self._data
#
# def combined_condition(self):
# return self._combined_condition
#
# def condition(self, X: np.ndarray=None) -> np.array:
# if X is None:
# return ~self._data.mask
# else:
# return self.out_of_sample_condition(X)
#
# def out_of_sample_condition(self, X: np.ndarray) -> np.ndarray:
# return self._combined_condition.condition(X)
#
# def out_of_sample_conditioner(self) -> CombinedCondition:
# return self._combined_condition
#
# def __add__(self, other: SplitCondition):
#
# return Split(self._data + other,
# self._combined_condition + other)
#
# def most_recent_split_condition(self) -> Optional[SplitCondition]:
# return self._combined_condition.most_recent_split_condition()
#
# Path: bartpy/tree.py
# class Tree:
# def __init__(self, nodes: List[TreeNode]):
# def nodes(self) -> List[TreeNode]:
# def leaf_nodes(self) -> List[LeafNode]:
# def splittable_leaf_nodes(self) -> List[LeafNode]:
# def decision_nodes(self) -> List[DecisionNode]:
# def prunable_decision_nodes(self) -> List[DecisionNode]:
# def update_y(self, y: np.ndarray) -> None:
# def predict(self, X: np.ndarray=None) -> np.ndarray:
# def _out_of_sample_predict(self, X) -> np.ndarray:
# def remove_node(self, node: TreeNode) -> None:
# def add_node(self, node: TreeNode) -> None:
# def mutate(tree: Tree, mutation: TreeMutation) -> None:
# def deep_copy_tree(tree: Tree):
. Output only the next line. | if __name__ == '__main__': |
Predict the next line for this snippet: <|code_start|>
class SigmaSampler(Sampler):
def step(self, model: Model, sigma: Sigma) -> float:
sample_value = self.sample(model, sigma)
sigma.set_value(sample_value)
return sample_value
@staticmethod
def sample(model: Model, sigma: Sigma) -> float:
<|code_end|>
with the help of current file imports:
import numpy as np
from bartpy.model import Model
from bartpy.samplers.sampler import Sampler
from bartpy.sigma import Sigma
and context from other files:
# Path: bartpy/model.py
# class Model:
#
# def __init__(self,
# data: Optional[Data],
# sigma: Sigma,
# trees: Optional[List[Tree]]=None,
# n_trees: int=50,
# alpha: float=0.95,
# beta: float=2.,
# k: int=2.,
# initializer: Initializer=SklearnTreeInitializer()):
#
# self.data = deepcopy(data)
# self.alpha = float(alpha)
# self.beta = float(beta)
# self.k = k
# self._sigma = sigma
# self._prediction = None
# self._initializer = initializer
#
# if trees is None:
# self.n_trees = n_trees
# self._trees = self.initialize_trees()
# if self._initializer is not None:
# self._initializer.initialize_trees(self.refreshed_trees())
# else:
# self.n_trees = len(trees)
# self._trees = trees
#
# def initialize_trees(self) -> List[Tree]:
# trees = [Tree([LeafNode(Split(deepcopy(self.data)))]) for _ in range(self.n_trees)]
# for tree in trees:
# tree.update_y(tree.update_y(self.data.y.values / self.n_trees))
# return trees
#
# def residuals(self) -> np.ndarray:
# return self.data.y.values - self.predict()
#
# def unnormalized_residuals(self) -> np.ndarray:
# return self.data.y.unnormalized_y - self.data.y.unnormalize_y(self.predict())
#
# def predict(self, X: np.ndarray=None) -> np.ndarray:
# if X is not None:
# return self._out_of_sample_predict(X)
# return np.sum([tree.predict() for tree in self.trees], axis=0)
#
# def _out_of_sample_predict(self, X: np.ndarray) -> np.ndarray:
# if type(X) == pd.DataFrame:
# X: pd.DataFrame = X
# X = X.values
# return np.sum([tree.predict(X) for tree in self.trees], axis=0)
#
# @property
# def trees(self) -> List[Tree]:
# return self._trees
#
# def refreshed_trees(self) -> Generator[Tree, None, None]:
# if self._prediction is None:
# self._prediction = self.predict()
# for tree in self._trees:
# self._prediction -= tree.predict()
# tree.update_y(self.data.y.values - self._prediction)
# yield tree
# self._prediction += tree.predict()
#
# @property
# def sigma_m(self) -> float:
# return 0.5 / (self.k * np.power(self.n_trees, 0.5))
#
# @property
# def sigma(self) -> Sigma:
# return self._sigma
#
# Path: bartpy/samplers/sampler.py
# class Sampler(ABC):
#
# @abstractmethod
# def step(self, model: Model, tree: Tree) -> bool:
# raise NotImplementedError()
#
# Path: bartpy/sigma.py
# class Sigma:
# """
# A representation of the sigma term in the model.
# Specifically, this is the sigma of y itself, i.e. the sigma in
# y ~ Normal(sum_of_trees, sigma)
#
# The default prior is an inverse gamma distribution on the variance
# The parametrization is slightly different to the numpy gamma version, with the scale parameter inverted
#
# Parameters
# ----------
# alpha - the shape of the prior
# beta - the scale of the prior
# scaling_factor - the range of the original distribution
# needed to rescale the variance into the original scale rather than on (-0.5, 0.5)
#
# """
#
# def __init__(self, alpha: float, beta: float, scaling_factor: float):
# self.alpha = alpha
# self.beta = beta
# self._current_value = 1.0
# self.scaling_factor = scaling_factor
#
# def set_value(self, value: float) -> None:
# self._current_value = value
#
# def current_value(self) -> float:
# return self._current_value
#
# def current_unnormalized_value(self) -> float:
# return self.current_value() * self.scaling_factor
, which may contain function names, class names, or code. Output only the next line. | posterior_alpha = sigma.alpha + (model.data.X.n_obsv / 2.) |
Given the code snippet: <|code_start|>
class SigmaSampler(Sampler):
def step(self, model: Model, sigma: Sigma) -> float:
sample_value = self.sample(model, sigma)
sigma.set_value(sample_value)
return sample_value
@staticmethod
def sample(model: Model, sigma: Sigma) -> float:
posterior_alpha = sigma.alpha + (model.data.X.n_obsv / 2.)
posterior_beta = sigma.beta + (0.5 * (np.sum(np.square(model.residuals()))))
draw = np.power(np.random.gamma(posterior_alpha, 1./posterior_beta), -0.5)
<|code_end|>
, generate the next line using the imports in this file:
import numpy as np
from bartpy.model import Model
from bartpy.samplers.sampler import Sampler
from bartpy.sigma import Sigma
and context (functions, classes, or occasionally code) from other files:
# Path: bartpy/model.py
# class Model:
#
# def __init__(self,
# data: Optional[Data],
# sigma: Sigma,
# trees: Optional[List[Tree]]=None,
# n_trees: int=50,
# alpha: float=0.95,
# beta: float=2.,
# k: int=2.,
# initializer: Initializer=SklearnTreeInitializer()):
#
# self.data = deepcopy(data)
# self.alpha = float(alpha)
# self.beta = float(beta)
# self.k = k
# self._sigma = sigma
# self._prediction = None
# self._initializer = initializer
#
# if trees is None:
# self.n_trees = n_trees
# self._trees = self.initialize_trees()
# if self._initializer is not None:
# self._initializer.initialize_trees(self.refreshed_trees())
# else:
# self.n_trees = len(trees)
# self._trees = trees
#
# def initialize_trees(self) -> List[Tree]:
# trees = [Tree([LeafNode(Split(deepcopy(self.data)))]) for _ in range(self.n_trees)]
# for tree in trees:
# tree.update_y(tree.update_y(self.data.y.values / self.n_trees))
# return trees
#
# def residuals(self) -> np.ndarray:
# return self.data.y.values - self.predict()
#
# def unnormalized_residuals(self) -> np.ndarray:
# return self.data.y.unnormalized_y - self.data.y.unnormalize_y(self.predict())
#
# def predict(self, X: np.ndarray=None) -> np.ndarray:
# if X is not None:
# return self._out_of_sample_predict(X)
# return np.sum([tree.predict() for tree in self.trees], axis=0)
#
# def _out_of_sample_predict(self, X: np.ndarray) -> np.ndarray:
# if type(X) == pd.DataFrame:
# X: pd.DataFrame = X
# X = X.values
# return np.sum([tree.predict(X) for tree in self.trees], axis=0)
#
# @property
# def trees(self) -> List[Tree]:
# return self._trees
#
# def refreshed_trees(self) -> Generator[Tree, None, None]:
# if self._prediction is None:
# self._prediction = self.predict()
# for tree in self._trees:
# self._prediction -= tree.predict()
# tree.update_y(self.data.y.values - self._prediction)
# yield tree
# self._prediction += tree.predict()
#
# @property
# def sigma_m(self) -> float:
# return 0.5 / (self.k * np.power(self.n_trees, 0.5))
#
# @property
# def sigma(self) -> Sigma:
# return self._sigma
#
# Path: bartpy/samplers/sampler.py
# class Sampler(ABC):
#
# @abstractmethod
# def step(self, model: Model, tree: Tree) -> bool:
# raise NotImplementedError()
#
# Path: bartpy/sigma.py
# class Sigma:
# """
# A representation of the sigma term in the model.
# Specifically, this is the sigma of y itself, i.e. the sigma in
# y ~ Normal(sum_of_trees, sigma)
#
# The default prior is an inverse gamma distribution on the variance
# The parametrization is slightly different to the numpy gamma version, with the scale parameter inverted
#
# Parameters
# ----------
# alpha - the shape of the prior
# beta - the scale of the prior
# scaling_factor - the range of the original distribution
# needed to rescale the variance into the original scale rather than on (-0.5, 0.5)
#
# """
#
# def __init__(self, alpha: float, beta: float, scaling_factor: float):
# self.alpha = alpha
# self.beta = beta
# self._current_value = 1.0
# self.scaling_factor = scaling_factor
#
# def set_value(self, value: float) -> None:
# self._current_value = value
#
# def current_value(self) -> float:
# return self._current_value
#
# def current_unnormalized_value(self) -> float:
# return self.current_value() * self.scaling_factor
. Output only the next line. | return draw |
Using the snippet: <|code_start|>
def run(alpha, beta, n_trees):
x = np.random.normal(0, 1, size=3000)
X = pd.DataFrame(x)
y = np.random.normal(0, 0.1, size=3000) + 2 * x + np.sin(x)
plt.scatter(x, y)
<|code_end|>
, determine the next line of code. You have imports:
import pandas as pd
import numpy as np
from matplotlib import pyplot as plt
from bartpy.extensions.baseestimator import ResidualBART
from datetime import datetime as dt
and context (class names, function names, or code) available:
# Path: bartpy/extensions/baseestimator.py
# class ResidualBART(SklearnModel):
#
# def __init__(self,
# base_estimator: RegressorMixin = None,
# **kwargs):
#
# if base_estimator is not None:
# self.base_estimator = clone(base_estimator)
# else:
# base_estimator = LinearRegression()
# self.base_estimator = base_estimator
# super().__init__(**kwargs)
#
# def fit(self, X: np.ndarray, y: np.ndarray) -> 'ResidualBART':
# self.base_estimator.fit(X, y)
# SklearnModel.fit(self, X, y - self.base_estimator.predict(X))
# return self
#
# def predict(self, X: np.ndarray=None) -> np.ndarray:
# if X is None:
# X = self.data.X
# sm_prediction = self.base_estimator.predict(X)
# bart_prediction = SklearnModel.predict(self, X)
# return sm_prediction + bart_prediction
. Output only the next line. | plt.show() |
Next line prediction: <|code_start|> n_trees=50):
warnings.simplefilter("error", UserWarning)
x = np.linspace(0, 5, size)
X = pd.DataFrame(x)
y = np.random.normal(0, 0.1, size=size) + np.sin(x)
model = ResidualBART(
n_samples=100,
n_burn=50,
n_trees=n_trees,
alpha=alpha,
beta=beta,
n_jobs=1,
n_chains=1)
X_train, X_test, y_train, y_test = train_test_split(X,
y,
test_size=0.33,
random_state=42,
shuffle=True)
model.fit(X_train, y_train)
y_pred = model.predict(X_test)
plt.scatter(y_test, y_pred)
plt.show()
rmse = np.sqrt(np.sum(np.square(y_test - y_pred)))
<|code_end|>
. Use current file imports:
(import pandas as pd
import numpy as np
import warnings
from matplotlib import pyplot as plt
from sklearn.model_selection import train_test_split
from bartpy.extensions.baseestimator import ResidualBART)
and context including class names, function names, or small code snippets from other files:
# Path: bartpy/extensions/baseestimator.py
# class ResidualBART(SklearnModel):
#
# def __init__(self,
# base_estimator: RegressorMixin = None,
# **kwargs):
#
# if base_estimator is not None:
# self.base_estimator = clone(base_estimator)
# else:
# base_estimator = LinearRegression()
# self.base_estimator = base_estimator
# super().__init__(**kwargs)
#
# def fit(self, X: np.ndarray, y: np.ndarray) -> 'ResidualBART':
# self.base_estimator.fit(X, y)
# SklearnModel.fit(self, X, y - self.base_estimator.predict(X))
# return self
#
# def predict(self, X: np.ndarray=None) -> np.ndarray:
# if X is None:
# X = self.data.X
# sm_prediction = self.base_estimator.predict(X)
# bart_prediction = SklearnModel.predict(self, X)
# return sm_prediction + bart_prediction
. Output only the next line. | print(rmse) |
Given snippet: <|code_start|>
def test_most_recent_split(self):
data = make_bartpy_data(pd.DataFrame({"a": [1, 2, 3, 4]}).values, np.array([1, 2, 1, 1]))
first_left_condition, first_right_condition = SplitCondition(0, 3, le), SplitCondition(0, 3, gt)
second_left_condition, second_right_condition = SplitCondition(0, 1, le), SplitCondition(0, 1, gt)
split = Split(data)
updated_split = split + first_left_condition + second_right_condition
self.assertEqual((split + first_left_condition).most_recent_split_condition(), first_left_condition)
self.assertEqual(updated_split.most_recent_split_condition(), second_right_condition)
class TestCombinedCondition(unittest.TestCase):
def setUp(self):
self.X = np.array([1, 2, 4, 6, 3, 5]).reshape(6, 1)
def test_single_condition(self):
condition = SplitCondition(0, 3, gt)
combined_condition = CombinedCondition([0], [condition])
self.assertListEqual(list(combined_condition.condition(self.X)), [False, False, True, True, False, True])
def test_multiple_conditions(self):
conditions = [
SplitCondition(0, 2, gt),
SplitCondition(0, 5, le)
]
combined_condition = CombinedCondition([0], conditions)
<|code_end|>
, continue by predicting the next line. Consider current file imports:
from operator import le, gt
from bartpy.data import Data, make_bartpy_data
from bartpy.split import SplitCondition, Split, CombinedCondition
import unittest
import pandas as pd
import numpy as np
and context:
# Path: bartpy/data.py
# class Data(object):
# """
# Encapsulates the data within a split of feature space.
# Primarily used to cache computations on the data for better performance
#
# Parameters
# ----------
# X: np.ndarray
# The subset of the covariate matrix that falls into the split
# y: np.ndarray
# The subset of the target array that falls into the split
# normalize: bool
# Whether to map the target into -0.5, 0.5
# cache: bool
# Whether to cache common values.
# You really only want to turn this off if you're not going to the resulting object for anything (e.g. when testing)
# """
#
# def __init__(self,
# X: np.ndarray,
# y: np.ndarray,
# mask: Optional[np.ndarray]=None,
# normalize: bool=False,
# unique_columns: List[int]=None,
# splittable_variables: Optional[List[Optional[bool]]]=None,
# y_sum: float=None,
# n_obsv: int=None):
#
# if mask is None:
# mask = np.zeros_like(y).astype(bool)
# self._mask: np.ndarray = mask
#
# if n_obsv is None:
# n_obsv = (~self.mask).astype(int).sum()
#
# self._n_obsv = n_obsv
#
# self._X = CovariateMatrix(X, mask, n_obsv, unique_columns, splittable_variables)
# self._y = Target(y, mask, n_obsv, normalize, y_sum)
#
# @property
# def y(self) -> Target:
# return self._y
#
# @property
# def X(self) -> CovariateMatrix:
# return self._X
#
# @property
# def mask(self) -> np.ndarray:
# return self._mask
#
# def update_y(self, y: np.ndarray) -> None:
# self._y.update_y(y)
#
# def __add__(self, other: SplitCondition) -> 'Data':
# updated_mask = self.X.update_mask(other)
#
# return Data(self.X.values,
# self.y.values,
# updated_mask,
# normalize=False,
# unique_columns=self._X._unique_columns,
# splittable_variables=self._X._splittable_variables,
# y_sum=other.carry_y_sum,
# n_obsv=other.carry_n_obsv)
#
# def make_bartpy_data(X: Union[np.ndarray, pd.DataFrame],
# y: np.ndarray,
# normalize: bool=True) -> 'Data':
# X = format_covariate_matrix(X)
# y = y.astype(float)
# return Data(X, y, normalize=normalize)
#
# Path: bartpy/split.py
# class Split:
# def __init__(self, data: Data, combined_condition: Optional[CombinedCondition]=None):
# def data(self):
# def combined_condition(self):
# def condition(self, X: np.ndarray=None) -> np.array:
# def out_of_sample_condition(self, X: np.ndarray) -> np.ndarray:
# def out_of_sample_conditioner(self) -> CombinedCondition:
# def __add__(self, other: SplitCondition):
# def most_recent_split_condition(self) -> Optional[SplitCondition]:
which might include code, classes, or functions. Output only the next line. | self.assertEqual(combined_condition.variables[0].min_value, 2) |
Predict the next line for this snippet: <|code_start|> updated_split = split + first_left_condition + second_right_condition
conditioned_data = updated_split.data
self.assertListEqual([2, 3], list(conditioned_data.X.get_column(0)))
def test_most_recent_split(self):
data = make_bartpy_data(pd.DataFrame({"a": [1, 2, 3, 4]}).values, np.array([1, 2, 1, 1]))
first_left_condition, first_right_condition = SplitCondition(0, 3, le), SplitCondition(0, 3, gt)
second_left_condition, second_right_condition = SplitCondition(0, 1, le), SplitCondition(0, 1, gt)
split = Split(data)
updated_split = split + first_left_condition + second_right_condition
self.assertEqual((split + first_left_condition).most_recent_split_condition(), first_left_condition)
self.assertEqual(updated_split.most_recent_split_condition(), second_right_condition)
class TestCombinedCondition(unittest.TestCase):
def setUp(self):
self.X = np.array([1, 2, 4, 6, 3, 5]).reshape(6, 1)
def test_single_condition(self):
condition = SplitCondition(0, 3, gt)
combined_condition = CombinedCondition([0], [condition])
self.assertListEqual(list(combined_condition.condition(self.X)), [False, False, True, True, False, True])
def test_multiple_conditions(self):
conditions = [
SplitCondition(0, 2, gt),
SplitCondition(0, 5, le)
<|code_end|>
with the help of current file imports:
from operator import le, gt
from bartpy.data import Data, make_bartpy_data
from bartpy.split import SplitCondition, Split, CombinedCondition
import unittest
import pandas as pd
import numpy as np
and context from other files:
# Path: bartpy/data.py
# class Data(object):
# """
# Encapsulates the data within a split of feature space.
# Primarily used to cache computations on the data for better performance
#
# Parameters
# ----------
# X: np.ndarray
# The subset of the covariate matrix that falls into the split
# y: np.ndarray
# The subset of the target array that falls into the split
# normalize: bool
# Whether to map the target into -0.5, 0.5
# cache: bool
# Whether to cache common values.
# You really only want to turn this off if you're not going to the resulting object for anything (e.g. when testing)
# """
#
# def __init__(self,
# X: np.ndarray,
# y: np.ndarray,
# mask: Optional[np.ndarray]=None,
# normalize: bool=False,
# unique_columns: List[int]=None,
# splittable_variables: Optional[List[Optional[bool]]]=None,
# y_sum: float=None,
# n_obsv: int=None):
#
# if mask is None:
# mask = np.zeros_like(y).astype(bool)
# self._mask: np.ndarray = mask
#
# if n_obsv is None:
# n_obsv = (~self.mask).astype(int).sum()
#
# self._n_obsv = n_obsv
#
# self._X = CovariateMatrix(X, mask, n_obsv, unique_columns, splittable_variables)
# self._y = Target(y, mask, n_obsv, normalize, y_sum)
#
# @property
# def y(self) -> Target:
# return self._y
#
# @property
# def X(self) -> CovariateMatrix:
# return self._X
#
# @property
# def mask(self) -> np.ndarray:
# return self._mask
#
# def update_y(self, y: np.ndarray) -> None:
# self._y.update_y(y)
#
# def __add__(self, other: SplitCondition) -> 'Data':
# updated_mask = self.X.update_mask(other)
#
# return Data(self.X.values,
# self.y.values,
# updated_mask,
# normalize=False,
# unique_columns=self._X._unique_columns,
# splittable_variables=self._X._splittable_variables,
# y_sum=other.carry_y_sum,
# n_obsv=other.carry_n_obsv)
#
# def make_bartpy_data(X: Union[np.ndarray, pd.DataFrame],
# y: np.ndarray,
# normalize: bool=True) -> 'Data':
# X = format_covariate_matrix(X)
# y = y.astype(float)
# return Data(X, y, normalize=normalize)
#
# Path: bartpy/split.py
# class Split:
# def __init__(self, data: Data, combined_condition: Optional[CombinedCondition]=None):
# def data(self):
# def combined_condition(self):
# def condition(self, X: np.ndarray=None) -> np.array:
# def out_of_sample_condition(self, X: np.ndarray) -> np.ndarray:
# def out_of_sample_conditioner(self) -> CombinedCondition:
# def __add__(self, other: SplitCondition):
# def most_recent_split_condition(self) -> Optional[SplitCondition]:
, which may contain function names, class names, or code. Output only the next line. | ] |
Next line prediction: <|code_start|>
class TestSplit(unittest.TestCase):
def test_null_split_returns_all_values(self):
data = make_bartpy_data(pd.DataFrame({"a": [1, 2]}).values, np.array([1, 2]))
split = Split(data)
conditioned_data = split.data
self.assertListEqual(list(data.X.get_column(0)), list(conditioned_data.X.get_column(0)))
def test_single_condition_data(self):
data = make_bartpy_data(pd.DataFrame({"a": [1, 2]}).values, np.array([1, 2]))
left_condition, right_condition = SplitCondition(0, 1, le), SplitCondition(0, 1, gt)
left_split, right_split = Split(data) + left_condition, Split(data) + right_condition
self.assertListEqual([1], list(left_split.data.X.get_column(0)))
self.assertListEqual([2], list(right_split.data.X.get_column(0)))
def test_combined_condition_data(self):
data = make_bartpy_data(pd.DataFrame({"a": [1, 2, 3, 4]}).values, np.array([1, 2, 1, 1]))
first_left_condition, first_right_condition = SplitCondition(0, 3, le), SplitCondition(0, 3, gt)
second_left_condition, second_right_condition = SplitCondition(0, 1, le), SplitCondition(0, 1, gt)
split = Split(data)
updated_split = split + first_left_condition + second_right_condition
conditioned_data = updated_split.data
<|code_end|>
. Use current file imports:
(from operator import le, gt
from bartpy.data import Data, make_bartpy_data
from bartpy.split import SplitCondition, Split, CombinedCondition
import unittest
import pandas as pd
import numpy as np)
and context including class names, function names, or small code snippets from other files:
# Path: bartpy/data.py
# class Data(object):
# """
# Encapsulates the data within a split of feature space.
# Primarily used to cache computations on the data for better performance
#
# Parameters
# ----------
# X: np.ndarray
# The subset of the covariate matrix that falls into the split
# y: np.ndarray
# The subset of the target array that falls into the split
# normalize: bool
# Whether to map the target into -0.5, 0.5
# cache: bool
# Whether to cache common values.
# You really only want to turn this off if you're not going to the resulting object for anything (e.g. when testing)
# """
#
# def __init__(self,
# X: np.ndarray,
# y: np.ndarray,
# mask: Optional[np.ndarray]=None,
# normalize: bool=False,
# unique_columns: List[int]=None,
# splittable_variables: Optional[List[Optional[bool]]]=None,
# y_sum: float=None,
# n_obsv: int=None):
#
# if mask is None:
# mask = np.zeros_like(y).astype(bool)
# self._mask: np.ndarray = mask
#
# if n_obsv is None:
# n_obsv = (~self.mask).astype(int).sum()
#
# self._n_obsv = n_obsv
#
# self._X = CovariateMatrix(X, mask, n_obsv, unique_columns, splittable_variables)
# self._y = Target(y, mask, n_obsv, normalize, y_sum)
#
# @property
# def y(self) -> Target:
# return self._y
#
# @property
# def X(self) -> CovariateMatrix:
# return self._X
#
# @property
# def mask(self) -> np.ndarray:
# return self._mask
#
# def update_y(self, y: np.ndarray) -> None:
# self._y.update_y(y)
#
# def __add__(self, other: SplitCondition) -> 'Data':
# updated_mask = self.X.update_mask(other)
#
# return Data(self.X.values,
# self.y.values,
# updated_mask,
# normalize=False,
# unique_columns=self._X._unique_columns,
# splittable_variables=self._X._splittable_variables,
# y_sum=other.carry_y_sum,
# n_obsv=other.carry_n_obsv)
#
# def make_bartpy_data(X: Union[np.ndarray, pd.DataFrame],
# y: np.ndarray,
# normalize: bool=True) -> 'Data':
# X = format_covariate_matrix(X)
# y = y.astype(float)
# return Data(X, y, normalize=normalize)
#
# Path: bartpy/split.py
# class Split:
# def __init__(self, data: Data, combined_condition: Optional[CombinedCondition]=None):
# def data(self):
# def combined_condition(self):
# def condition(self, X: np.ndarray=None) -> np.array:
# def out_of_sample_condition(self, X: np.ndarray) -> np.ndarray:
# def out_of_sample_conditioner(self) -> CombinedCondition:
# def __add__(self, other: SplitCondition):
# def most_recent_split_condition(self) -> Optional[SplitCondition]:
. Output only the next line. | self.assertListEqual([2, 3], list(conditioned_data.X.get_column(0))) |
Given the following code snippet before the placeholder: <|code_start|>
Chain = Mapping[str, Union[List[Any], np.ndarray]]
class ModelSampler(Sampler):
def __init__(self,
schedule: SampleSchedule,
trace_logger_class: Type[TraceLogger]=TraceLogger):
<|code_end|>
, predict the next line using imports from the current file:
from collections import defaultdict
from typing import List, Mapping, Union, Any, Type
from tqdm import tqdm
from bartpy.model import Model
from bartpy.samplers.sampler import Sampler
from bartpy.samplers.schedule import SampleSchedule
from bartpy.trace import TraceLogger
import numpy as np
and context including class names, function names, and sometimes code from other files:
# Path: bartpy/model.py
# class Model:
#
# def __init__(self,
# data: Optional[Data],
# sigma: Sigma,
# trees: Optional[List[Tree]]=None,
# n_trees: int=50,
# alpha: float=0.95,
# beta: float=2.,
# k: int=2.,
# initializer: Initializer=SklearnTreeInitializer()):
#
# self.data = deepcopy(data)
# self.alpha = float(alpha)
# self.beta = float(beta)
# self.k = k
# self._sigma = sigma
# self._prediction = None
# self._initializer = initializer
#
# if trees is None:
# self.n_trees = n_trees
# self._trees = self.initialize_trees()
# if self._initializer is not None:
# self._initializer.initialize_trees(self.refreshed_trees())
# else:
# self.n_trees = len(trees)
# self._trees = trees
#
# def initialize_trees(self) -> List[Tree]:
# trees = [Tree([LeafNode(Split(deepcopy(self.data)))]) for _ in range(self.n_trees)]
# for tree in trees:
# tree.update_y(tree.update_y(self.data.y.values / self.n_trees))
# return trees
#
# def residuals(self) -> np.ndarray:
# return self.data.y.values - self.predict()
#
# def unnormalized_residuals(self) -> np.ndarray:
# return self.data.y.unnormalized_y - self.data.y.unnormalize_y(self.predict())
#
# def predict(self, X: np.ndarray=None) -> np.ndarray:
# if X is not None:
# return self._out_of_sample_predict(X)
# return np.sum([tree.predict() for tree in self.trees], axis=0)
#
# def _out_of_sample_predict(self, X: np.ndarray) -> np.ndarray:
# if type(X) == pd.DataFrame:
# X: pd.DataFrame = X
# X = X.values
# return np.sum([tree.predict(X) for tree in self.trees], axis=0)
#
# @property
# def trees(self) -> List[Tree]:
# return self._trees
#
# def refreshed_trees(self) -> Generator[Tree, None, None]:
# if self._prediction is None:
# self._prediction = self.predict()
# for tree in self._trees:
# self._prediction -= tree.predict()
# tree.update_y(self.data.y.values - self._prediction)
# yield tree
# self._prediction += tree.predict()
#
# @property
# def sigma_m(self) -> float:
# return 0.5 / (self.k * np.power(self.n_trees, 0.5))
#
# @property
# def sigma(self) -> Sigma:
# return self._sigma
#
# Path: bartpy/samplers/sampler.py
# class Sampler(ABC):
#
# @abstractmethod
# def step(self, model: Model, tree: Tree) -> bool:
# raise NotImplementedError()
#
# Path: bartpy/samplers/schedule.py
# class SampleSchedule:
# """
# The SampleSchedule class is responsible for handling the ordering of sampling within a Gibbs step
# It is useful to encapsulate this logic if we wish to expand the model
#
# Parameters
# ----------
# tree_sampler: TreeMutationSampler
# How to sample tree mutation space
# leaf_sampler: LeafNodeSampler
# How to sample leaf node predictions
# sigma_sampler: SigmaSampler
# How to sample sigma values
# """
#
# def __init__(self,
# tree_sampler: TreeMutationSampler,
# leaf_sampler: LeafNodeSampler,
# sigma_sampler: SigmaSampler):
# self.leaf_sampler = leaf_sampler
# self.sigma_sampler = sigma_sampler
# self.tree_sampler = tree_sampler
#
# def steps(self, model: Model) -> Generator[Tuple[Text, Callable[[], float]], None, None]:
# """
# Create a generator of the steps that need to be called to complete a full Gibbs sample
#
# Parameters
# ----------
# model: Model
# The model being sampled
#
# Returns
# -------
# Generator[Callable[[Model], Sampler], None, None]
# A generator a function to be called
# """
# for tree in model.refreshed_trees():
# yield "Tree", lambda: self.tree_sampler.step(model, tree)
#
# for leaf_node in tree.leaf_nodes:
# yield "Node", lambda: self.leaf_sampler.step(model, leaf_node)
# yield "Node", lambda: self.sigma_sampler.step(model, model.sigma)
#
# Path: bartpy/trace.py
# class TraceLogger():
#
# def __init__(self,
# f_tree_mutation_log: Callable[[TreeMutation], Any]=lambda x: x is not None,
# f_model_log: Callable[[Model], Any]=lambda x: deep_copy_model(x),
# f_in_sample_prediction_log: Callable[[np.ndarray], Any]=lambda x: x):
# self.f_tree_mutation_log = f_tree_mutation_log
# self.f_model_log = f_model_log
# self.f_in_sample_prediction_log = f_in_sample_prediction_log
#
# def __getitem__(self, item: str):
# if item == "Tree":
# return self.f_tree_mutation_log
# if item == "Model":
# return self.f_model_log
# if item == "In Sample Prediction":
# return self.f_in_sample_prediction_log
# if item in ["Node", "Sigma"]:
# return lambda x: None
# else:
# raise KeyError("No method for key {}".format(item))
. Output only the next line. | self.schedule = schedule |
Given the following code snippet before the placeholder: <|code_start|> org='edX_course_org',
course='edX_course_course',
run='edX_course_run',
key_version=1
)
self.edx_course_key = self.edx_course.course_key()
self.edx_usage_id = 'edx_content_usage_id'
self.content_title = 'content item title'
self.edx_url_base = 'https://edx.example.com'
settings.EDX_URL_BASE = self.edx_url_base
self.canvas_course_id = 256
self.canvas_module_id = 123
self.canvas_user_id = 'value of the LTI user_id field'
self.canvas_api_token = 'Token to access the Canvas API'
self.canvas_api_authorization = CanvasApiAuthorization(
lti_user_id=self.canvas_user_id,
canvas_api_token=self.canvas_api_token,
)
self.canvas_external_tool_id = 12
self.module_list = ['Module 1', 'Module 2']
self.canvas_course = dict(
id=234,
)
self.authorization_mock = self.setup_patch(
'edx2canvas.models.CanvasApiAuthorization.objects.get',
self.canvas_api_authorization
)
self.setup_patch(
<|code_end|>
, predict the next line using imports from the current file:
from django.conf import settings
from unittest import TestCase
from mock import patch, MagicMock
from edx2canvas.models import CanvasApiAuthorization, EdxCourse
and context including class names, function names, and sometimes code from other files:
# Path: edx2canvas/models.py
# class CanvasApiAuthorization(models.Model):
# lti_user_id = models.CharField(max_length=255, unique=True, db_index=True)
# canvas_api_token = models.CharField(max_length=255)
#
# def __unicode__(self):
# return "user: {}, token: {}".format(self.lti_user_id, self.canvas_api_token)
#
# class EdxCourse(models.Model):
# title = models.CharField(max_length=255)
# org = models.CharField(max_length=128)
# course = models.CharField(max_length=32)
# run = models.CharField(max_length=32)
# key_version = models.IntegerField()
#
# def course_key(self):
# if self.key_version == 0:
# return "{}/{}/{}".format(self.org, self.course, self.run)
# if self.key_version == 1:
# return "course-v1:{}+{}+{}".format(self.org, self.course, self.run)
# raise NotImplementedError()
. Output only the next line. | 'edx2canvas.models.EdxCourse.objects.get', |
Predict the next line for this snippet: <|code_start|> self.edx_course_key = self.edx_course.course_key()
self.edx_usage_id = 'edx_content_usage_id'
self.content_title = 'content item title'
self.edx_url_base = 'https://edx.example.com'
settings.EDX_URL_BASE = self.edx_url_base
self.canvas_course_id = 256
self.canvas_module_id = 123
self.canvas_user_id = 'value of the LTI user_id field'
self.canvas_api_token = 'Token to access the Canvas API'
self.canvas_api_authorization = CanvasApiAuthorization(
lti_user_id=self.canvas_user_id,
canvas_api_token=self.canvas_api_token,
)
self.canvas_external_tool_id = 12
self.module_list = ['Module 1', 'Module 2']
self.canvas_course = dict(
id=234,
)
self.authorization_mock = self.setup_patch(
'edx2canvas.models.CanvasApiAuthorization.objects.get',
self.canvas_api_authorization
)
self.setup_patch(
'edx2canvas.models.EdxCourse.objects.get',
self.edx_course
)
self.setup_patch(
'edx2canvas.models.EdxCourse.objects.all',
<|code_end|>
with the help of current file imports:
from django.conf import settings
from unittest import TestCase
from mock import patch, MagicMock
from edx2canvas.models import CanvasApiAuthorization, EdxCourse
and context from other files:
# Path: edx2canvas/models.py
# class CanvasApiAuthorization(models.Model):
# lti_user_id = models.CharField(max_length=255, unique=True, db_index=True)
# canvas_api_token = models.CharField(max_length=255)
#
# def __unicode__(self):
# return "user: {}, token: {}".format(self.lti_user_id, self.canvas_api_token)
#
# class EdxCourse(models.Model):
# title = models.CharField(max_length=255)
# org = models.CharField(max_length=128)
# course = models.CharField(max_length=32)
# run = models.CharField(max_length=32)
# key_version = models.IntegerField()
#
# def course_key(self):
# if self.key_version == 0:
# return "{}/{}/{}".format(self.org, self.course, self.run)
# if self.key_version == 1:
# return "course-v1:{}+{}+{}".format(self.org, self.course, self.run)
# raise NotImplementedError()
, which may contain function names, class names, or code. Output only the next line. | [self.edx_course] |
Given snippet: <|code_start|>
class TestEdxCourseModel(TestCase):
def setUp(self):
super(TestEdxCourseModel, self).setUp()
self.course = EdxCourse(
title='title',
org='org',
course='course',
run='run',
)
def test_v0_course_key(self):
self.course.key_version = 0
self.assertEqual(self.course.course_key(), 'org/course/run')
<|code_end|>
, continue by predicting the next line. Consider current file imports:
from unittest import TestCase
from edx2canvas.models import EdxCourse
and context:
# Path: edx2canvas/models.py
# class EdxCourse(models.Model):
# title = models.CharField(max_length=255)
# org = models.CharField(max_length=128)
# course = models.CharField(max_length=32)
# run = models.CharField(max_length=32)
# key_version = models.IntegerField()
#
# def course_key(self):
# if self.key_version == 0:
# return "{}/{}/{}".format(self.org, self.course, self.run)
# if self.key_version == 1:
# return "course-v1:{}+{}+{}".format(self.org, self.course, self.run)
# raise NotImplementedError()
which might include code, classes, or functions. Output only the next line. | def test_v1_course_key(self): |
Given the code snippet: <|code_start|>
class BaseMorrisChart(BaseChart):
def get_data(self):
header = self.header
data = super(BaseMorrisChart, self).get_data()
data_only = data[1:]
rows = []
for row in data_only:
rows.append(dict(zip(header, row)))
return rows
def get_category_key(self):
return self.header[0]
<|code_end|>
, generate the next line using the imports in this file:
from .base import BaseChart
from ..utils import JSONEncoderForHTML
import json
and context (functions, classes, or occasionally code) from other files:
# Path: graphos/renderers/base.py
# class BaseChart(object):
#
# def __init__(self, data_source, html_id=None,
# width=None, height=None,
# options=None, encoder=GraphosEncoder,
# *args, **kwargs):
# """
# : param data_source: :type graphos.sources.base.BaseDataSource subclass instance.
# : param html_id: :type string: Id of the div where you would like chart to be rendered
# : param width: :type integer: Width of the chart div
# : param height: :type integer: Height of the chart div
# """
# self.data_source = data_source
# self.html_id = html_id or get_random_string()
# self.height = height or DEFAULT_HEIGHT
# self.width = width or DEFAULT_WIDTH
# # options could be an object, a list, a dictionary or a nested object or probably anything.
# # Different renderers have different structure for options.
# # Its responsibility of the renderer to read self.options in correct way and to use it in get_js_template.
# self.options = options or {}
# self.header = data_source.get_header()
# self.encoder = encoder
# self.context_data = kwargs
#
# def get_data(self):
# return self.data_source.get_data()
#
# def get_data_json(self):
# return json.dumps(self.get_data(), cls=JSONEncoderForHTML)
#
# def get_options(self):
# options = self.options
# if not 'title' in options:
# options['title'] = "Chart"
# return options
#
# def get_options_json(self):
# return json.dumps(self.get_options(), cls=JSONEncoderForHTML)
#
# def get_template(self):
# return 'graphos/as_html.html'
#
# def get_html_template(self):
# raise GraphosException("Not Implemented")
#
# def get_js_template(self):
# raise GraphosException("Not Implemented")
#
# def get_html_id(self):
# return self.html_id
#
# def get_context_data(self):
# return self.context_data
#
# def as_html(self):
# context = {
# 'html': self.render_html(),
# 'js': self.render_js(),
# }
# return render_to_string(self.get_template(), context)
#
# def render_html(self):
# context = {"chart": self}
# return render_to_string(self.get_html_template(), context)
#
# def render_js(self):
# context = {"chart": self}
# return render_to_string(self.get_js_template(), context)
#
# def zip_list(self, *args):
# rv = zip(*args)
# if sys.version_info < (3,0):
# return rv
# return list(rv)
#
# Path: graphos/utils.py
# class JSONEncoderForHTML(json.JSONEncoder):
# """An encoder that produces JSON safe to embed in HTML.
# To embed JSON content in, say, a script tag on a web page, the
# characters &, < and > should be escaped. They cannot be escaped
# with the usual entities (e.g. &) because they are not expanded
# within <script> tags.
# """
# def default(self, obj):
# # Taken from https://github.com/tomchristie/django-rest-framework/blob/master/rest_framework/utils/encoders.py
# # For Date Time string spec, see ECMA 262
# # http://ecma-international.org/ecma-262/5.1/#sec-15.9.1.15
# if isinstance(obj, Promise):
# return force_text(obj)
# elif isinstance(obj, datetime.datetime):
# representation = obj.isoformat()
# if representation.endswith('+00:00'):
# representation = representation[:-6] + 'Z'
# return representation
# elif isinstance(obj, datetime.date):
# return obj.isoformat()
# elif isinstance(obj, datetime.time):
# if timezone and timezone.is_aware(obj):
# raise ValueError("JSON can't represent timezone-aware times.")
# representation = obj.isoformat()
# if obj.microsecond:
# representation = representation[:12]
# return representation
# elif isinstance(obj, decimal.Decimal):
# # Serializers will coerce decimals to strings by default.
# return float(obj)
# elif isinstance(obj, uuid.UUID):
# return six.text_type(obj)
# elif isinstance(obj, QuerySet):
# return tuple(obj)
# elif isinstance(obj, six.binary_type):
# # Best-effort for binary blobs. See #4187.
# return obj.decode('utf-8')
# elif hasattr(obj, 'tolist'):
# # Numpy arrays and array scalars.
# return obj.tolist()
# elif hasattr(obj, '__getitem__'):
# try:
# return dict(obj)
# except:
# pass
# elif hasattr(obj, '__iter__'):
# return tuple(item for item in obj)
# return super(JSONEncoderForHTML, self).default(obj)
#
#
# def encode(self, o):
# # Override JSONEncoder.encode because it has hacks for
# # performance that make things more complicated.
# chunks = self.iterencode(o, True)
# if self.ensure_ascii:
# return ''.join(chunks)
# else:
# return u''.join(chunks)
#
# def iterencode(self, o, _one_shot=False):
# try:
# chunks = super(JSONEncoderForHTML, self).iterencode(o, _one_shot)
# except TypeError:
# # for python 2.6 compatibility
# chunks = super(JSONEncoderForHTML, self).iterencode(o)
# for chunk in chunks:
# chunk = chunk.replace('&', '&')
# chunk = chunk.replace('<', '<')
# chunk = chunk.replace('>', '>')
# yield chunk
. Output only the next line. | def get_y_keys(self): |
Next line prediction: <|code_start|>
class BaseMorrisChart(BaseChart):
def get_data(self):
header = self.header
data = super(BaseMorrisChart, self).get_data()
data_only = data[1:]
<|code_end|>
. Use current file imports:
(from .base import BaseChart
from ..utils import JSONEncoderForHTML
import json)
and context including class names, function names, or small code snippets from other files:
# Path: graphos/renderers/base.py
# class BaseChart(object):
#
# def __init__(self, data_source, html_id=None,
# width=None, height=None,
# options=None, encoder=GraphosEncoder,
# *args, **kwargs):
# """
# : param data_source: :type graphos.sources.base.BaseDataSource subclass instance.
# : param html_id: :type string: Id of the div where you would like chart to be rendered
# : param width: :type integer: Width of the chart div
# : param height: :type integer: Height of the chart div
# """
# self.data_source = data_source
# self.html_id = html_id or get_random_string()
# self.height = height or DEFAULT_HEIGHT
# self.width = width or DEFAULT_WIDTH
# # options could be an object, a list, a dictionary or a nested object or probably anything.
# # Different renderers have different structure for options.
# # Its responsibility of the renderer to read self.options in correct way and to use it in get_js_template.
# self.options = options or {}
# self.header = data_source.get_header()
# self.encoder = encoder
# self.context_data = kwargs
#
# def get_data(self):
# return self.data_source.get_data()
#
# def get_data_json(self):
# return json.dumps(self.get_data(), cls=JSONEncoderForHTML)
#
# def get_options(self):
# options = self.options
# if not 'title' in options:
# options['title'] = "Chart"
# return options
#
# def get_options_json(self):
# return json.dumps(self.get_options(), cls=JSONEncoderForHTML)
#
# def get_template(self):
# return 'graphos/as_html.html'
#
# def get_html_template(self):
# raise GraphosException("Not Implemented")
#
# def get_js_template(self):
# raise GraphosException("Not Implemented")
#
# def get_html_id(self):
# return self.html_id
#
# def get_context_data(self):
# return self.context_data
#
# def as_html(self):
# context = {
# 'html': self.render_html(),
# 'js': self.render_js(),
# }
# return render_to_string(self.get_template(), context)
#
# def render_html(self):
# context = {"chart": self}
# return render_to_string(self.get_html_template(), context)
#
# def render_js(self):
# context = {"chart": self}
# return render_to_string(self.get_js_template(), context)
#
# def zip_list(self, *args):
# rv = zip(*args)
# if sys.version_info < (3,0):
# return rv
# return list(rv)
#
# Path: graphos/utils.py
# class JSONEncoderForHTML(json.JSONEncoder):
# """An encoder that produces JSON safe to embed in HTML.
# To embed JSON content in, say, a script tag on a web page, the
# characters &, < and > should be escaped. They cannot be escaped
# with the usual entities (e.g. &) because they are not expanded
# within <script> tags.
# """
# def default(self, obj):
# # Taken from https://github.com/tomchristie/django-rest-framework/blob/master/rest_framework/utils/encoders.py
# # For Date Time string spec, see ECMA 262
# # http://ecma-international.org/ecma-262/5.1/#sec-15.9.1.15
# if isinstance(obj, Promise):
# return force_text(obj)
# elif isinstance(obj, datetime.datetime):
# representation = obj.isoformat()
# if representation.endswith('+00:00'):
# representation = representation[:-6] + 'Z'
# return representation
# elif isinstance(obj, datetime.date):
# return obj.isoformat()
# elif isinstance(obj, datetime.time):
# if timezone and timezone.is_aware(obj):
# raise ValueError("JSON can't represent timezone-aware times.")
# representation = obj.isoformat()
# if obj.microsecond:
# representation = representation[:12]
# return representation
# elif isinstance(obj, decimal.Decimal):
# # Serializers will coerce decimals to strings by default.
# return float(obj)
# elif isinstance(obj, uuid.UUID):
# return six.text_type(obj)
# elif isinstance(obj, QuerySet):
# return tuple(obj)
# elif isinstance(obj, six.binary_type):
# # Best-effort for binary blobs. See #4187.
# return obj.decode('utf-8')
# elif hasattr(obj, 'tolist'):
# # Numpy arrays and array scalars.
# return obj.tolist()
# elif hasattr(obj, '__getitem__'):
# try:
# return dict(obj)
# except:
# pass
# elif hasattr(obj, '__iter__'):
# return tuple(item for item in obj)
# return super(JSONEncoderForHTML, self).default(obj)
#
#
# def encode(self, o):
# # Override JSONEncoder.encode because it has hacks for
# # performance that make things more complicated.
# chunks = self.iterencode(o, True)
# if self.ensure_ascii:
# return ''.join(chunks)
# else:
# return u''.join(chunks)
#
# def iterencode(self, o, _one_shot=False):
# try:
# chunks = super(JSONEncoderForHTML, self).iterencode(o, _one_shot)
# except TypeError:
# # for python 2.6 compatibility
# chunks = super(JSONEncoderForHTML, self).iterencode(o)
# for chunk in chunks:
# chunk = chunk.replace('&', '&')
# chunk = chunk.replace('<', '<')
# chunk = chunk.replace('>', '>')
# yield chunk
. Output only the next line. | rows = [] |
Next line prediction: <|code_start|>
class BaseChart(object):
def __init__(self, data_source, html_id=None,
width=None, height=None,
options=None, encoder=GraphosEncoder,
<|code_end|>
. Use current file imports:
(import json
import sys
from django.template.loader import render_to_string
from ..exceptions import GraphosException
from ..utils import DEFAULT_HEIGHT, DEFAULT_WIDTH, get_random_string, JSONEncoderForHTML
from ..encoders import GraphosEncoder)
and context including class names, function names, or small code snippets from other files:
# Path: graphos/utils.py
# DEFAULT_HEIGHT = 400
#
# DEFAULT_WIDTH = 800
#
# def get_random_string():
# random_letter = lambda: random.choice(string.ascii_letters)
# random_string = "".join([random_letter()
# for el in range(10)])
# return random_string
#
# class JSONEncoderForHTML(json.JSONEncoder):
# """An encoder that produces JSON safe to embed in HTML.
# To embed JSON content in, say, a script tag on a web page, the
# characters &, < and > should be escaped. They cannot be escaped
# with the usual entities (e.g. &) because they are not expanded
# within <script> tags.
# """
# def default(self, obj):
# # Taken from https://github.com/tomchristie/django-rest-framework/blob/master/rest_framework/utils/encoders.py
# # For Date Time string spec, see ECMA 262
# # http://ecma-international.org/ecma-262/5.1/#sec-15.9.1.15
# if isinstance(obj, Promise):
# return force_text(obj)
# elif isinstance(obj, datetime.datetime):
# representation = obj.isoformat()
# if representation.endswith('+00:00'):
# representation = representation[:-6] + 'Z'
# return representation
# elif isinstance(obj, datetime.date):
# return obj.isoformat()
# elif isinstance(obj, datetime.time):
# if timezone and timezone.is_aware(obj):
# raise ValueError("JSON can't represent timezone-aware times.")
# representation = obj.isoformat()
# if obj.microsecond:
# representation = representation[:12]
# return representation
# elif isinstance(obj, decimal.Decimal):
# # Serializers will coerce decimals to strings by default.
# return float(obj)
# elif isinstance(obj, uuid.UUID):
# return six.text_type(obj)
# elif isinstance(obj, QuerySet):
# return tuple(obj)
# elif isinstance(obj, six.binary_type):
# # Best-effort for binary blobs. See #4187.
# return obj.decode('utf-8')
# elif hasattr(obj, 'tolist'):
# # Numpy arrays and array scalars.
# return obj.tolist()
# elif hasattr(obj, '__getitem__'):
# try:
# return dict(obj)
# except:
# pass
# elif hasattr(obj, '__iter__'):
# return tuple(item for item in obj)
# return super(JSONEncoderForHTML, self).default(obj)
#
#
# def encode(self, o):
# # Override JSONEncoder.encode because it has hacks for
# # performance that make things more complicated.
# chunks = self.iterencode(o, True)
# if self.ensure_ascii:
# return ''.join(chunks)
# else:
# return u''.join(chunks)
#
# def iterencode(self, o, _one_shot=False):
# try:
# chunks = super(JSONEncoderForHTML, self).iterencode(o, _one_shot)
# except TypeError:
# # for python 2.6 compatibility
# chunks = super(JSONEncoderForHTML, self).iterencode(o)
# for chunk in chunks:
# chunk = chunk.replace('&', '&')
# chunk = chunk.replace('<', '<')
# chunk = chunk.replace('>', '>')
# yield chunk
#
# Path: graphos/encoders.py
# class GraphosEncoder(DjangoJSONEncoder):
# pass
. Output only the next line. | *args, **kwargs): |
Continue the code snippet: <|code_start|>
class BaseChart(object):
def __init__(self, data_source, html_id=None,
width=None, height=None,
options=None, encoder=GraphosEncoder,
<|code_end|>
. Use current file imports:
import json
import sys
from django.template.loader import render_to_string
from ..exceptions import GraphosException
from ..utils import DEFAULT_HEIGHT, DEFAULT_WIDTH, get_random_string, JSONEncoderForHTML
from ..encoders import GraphosEncoder
and context (classes, functions, or code) from other files:
# Path: graphos/utils.py
# DEFAULT_HEIGHT = 400
#
# DEFAULT_WIDTH = 800
#
# def get_random_string():
# random_letter = lambda: random.choice(string.ascii_letters)
# random_string = "".join([random_letter()
# for el in range(10)])
# return random_string
#
# class JSONEncoderForHTML(json.JSONEncoder):
# """An encoder that produces JSON safe to embed in HTML.
# To embed JSON content in, say, a script tag on a web page, the
# characters &, < and > should be escaped. They cannot be escaped
# with the usual entities (e.g. &) because they are not expanded
# within <script> tags.
# """
# def default(self, obj):
# # Taken from https://github.com/tomchristie/django-rest-framework/blob/master/rest_framework/utils/encoders.py
# # For Date Time string spec, see ECMA 262
# # http://ecma-international.org/ecma-262/5.1/#sec-15.9.1.15
# if isinstance(obj, Promise):
# return force_text(obj)
# elif isinstance(obj, datetime.datetime):
# representation = obj.isoformat()
# if representation.endswith('+00:00'):
# representation = representation[:-6] + 'Z'
# return representation
# elif isinstance(obj, datetime.date):
# return obj.isoformat()
# elif isinstance(obj, datetime.time):
# if timezone and timezone.is_aware(obj):
# raise ValueError("JSON can't represent timezone-aware times.")
# representation = obj.isoformat()
# if obj.microsecond:
# representation = representation[:12]
# return representation
# elif isinstance(obj, decimal.Decimal):
# # Serializers will coerce decimals to strings by default.
# return float(obj)
# elif isinstance(obj, uuid.UUID):
# return six.text_type(obj)
# elif isinstance(obj, QuerySet):
# return tuple(obj)
# elif isinstance(obj, six.binary_type):
# # Best-effort for binary blobs. See #4187.
# return obj.decode('utf-8')
# elif hasattr(obj, 'tolist'):
# # Numpy arrays and array scalars.
# return obj.tolist()
# elif hasattr(obj, '__getitem__'):
# try:
# return dict(obj)
# except:
# pass
# elif hasattr(obj, '__iter__'):
# return tuple(item for item in obj)
# return super(JSONEncoderForHTML, self).default(obj)
#
#
# def encode(self, o):
# # Override JSONEncoder.encode because it has hacks for
# # performance that make things more complicated.
# chunks = self.iterencode(o, True)
# if self.ensure_ascii:
# return ''.join(chunks)
# else:
# return u''.join(chunks)
#
# def iterencode(self, o, _one_shot=False):
# try:
# chunks = super(JSONEncoderForHTML, self).iterencode(o, _one_shot)
# except TypeError:
# # for python 2.6 compatibility
# chunks = super(JSONEncoderForHTML, self).iterencode(o)
# for chunk in chunks:
# chunk = chunk.replace('&', '&')
# chunk = chunk.replace('<', '<')
# chunk = chunk.replace('>', '>')
# yield chunk
#
# Path: graphos/encoders.py
# class GraphosEncoder(DjangoJSONEncoder):
# pass
. Output only the next line. | *args, **kwargs): |
Predict the next line after this snippet: <|code_start|>
class BaseGChart(BaseChart):
def get_html_template(self):
return "graphos/gchart/html.html"
class LineChart(BaseGChart):
<|code_end|>
using the current file's imports:
from .base import BaseChart
and any relevant context from other files:
# Path: graphos/renderers/base.py
# class BaseChart(object):
#
# def __init__(self, data_source, html_id=None,
# width=None, height=None,
# options=None, encoder=GraphosEncoder,
# *args, **kwargs):
# """
# : param data_source: :type graphos.sources.base.BaseDataSource subclass instance.
# : param html_id: :type string: Id of the div where you would like chart to be rendered
# : param width: :type integer: Width of the chart div
# : param height: :type integer: Height of the chart div
# """
# self.data_source = data_source
# self.html_id = html_id or get_random_string()
# self.height = height or DEFAULT_HEIGHT
# self.width = width or DEFAULT_WIDTH
# # options could be an object, a list, a dictionary or a nested object or probably anything.
# # Different renderers have different structure for options.
# # Its responsibility of the renderer to read self.options in correct way and to use it in get_js_template.
# self.options = options or {}
# self.header = data_source.get_header()
# self.encoder = encoder
# self.context_data = kwargs
#
# def get_data(self):
# return self.data_source.get_data()
#
# def get_data_json(self):
# return json.dumps(self.get_data(), cls=JSONEncoderForHTML)
#
# def get_options(self):
# options = self.options
# if not 'title' in options:
# options['title'] = "Chart"
# return options
#
# def get_options_json(self):
# return json.dumps(self.get_options(), cls=JSONEncoderForHTML)
#
# def get_template(self):
# return 'graphos/as_html.html'
#
# def get_html_template(self):
# raise GraphosException("Not Implemented")
#
# def get_js_template(self):
# raise GraphosException("Not Implemented")
#
# def get_html_id(self):
# return self.html_id
#
# def get_context_data(self):
# return self.context_data
#
# def as_html(self):
# context = {
# 'html': self.render_html(),
# 'js': self.render_js(),
# }
# return render_to_string(self.get_template(), context)
#
# def render_html(self):
# context = {"chart": self}
# return render_to_string(self.get_html_template(), context)
#
# def render_js(self):
# context = {"chart": self}
# return render_to_string(self.get_js_template(), context)
#
# def zip_list(self, *args):
# rv = zip(*args)
# if sys.version_info < (3,0):
# return rv
# return list(rv)
. Output only the next line. | def get_js_template(self): |
Continue the code snippet: <|code_start|>
class BaseYuiChart(BaseChart):
def get_data(self):
data = super(BaseYuiChart, self).get_data()
header = self.header
data_only = data[1:]
rows = []
for row in data_only:
rows.append(dict(zip(header, row)))
return rows
def get_category_key(self):
return self.data_source.get_header()[0]
def get_html_template(self):
return "graphos/yui/html.html"
class LineChart(BaseYuiChart):
def get_js_template(self):
return "graphos/yui/line_chart.html"
<|code_end|>
. Use current file imports:
from .base import BaseChart
and context (classes, functions, or code) from other files:
# Path: graphos/renderers/base.py
# class BaseChart(object):
#
# def __init__(self, data_source, html_id=None,
# width=None, height=None,
# options=None, encoder=GraphosEncoder,
# *args, **kwargs):
# """
# : param data_source: :type graphos.sources.base.BaseDataSource subclass instance.
# : param html_id: :type string: Id of the div where you would like chart to be rendered
# : param width: :type integer: Width of the chart div
# : param height: :type integer: Height of the chart div
# """
# self.data_source = data_source
# self.html_id = html_id or get_random_string()
# self.height = height or DEFAULT_HEIGHT
# self.width = width or DEFAULT_WIDTH
# # options could be an object, a list, a dictionary or a nested object or probably anything.
# # Different renderers have different structure for options.
# # Its responsibility of the renderer to read self.options in correct way and to use it in get_js_template.
# self.options = options or {}
# self.header = data_source.get_header()
# self.encoder = encoder
# self.context_data = kwargs
#
# def get_data(self):
# return self.data_source.get_data()
#
# def get_data_json(self):
# return json.dumps(self.get_data(), cls=JSONEncoderForHTML)
#
# def get_options(self):
# options = self.options
# if not 'title' in options:
# options['title'] = "Chart"
# return options
#
# def get_options_json(self):
# return json.dumps(self.get_options(), cls=JSONEncoderForHTML)
#
# def get_template(self):
# return 'graphos/as_html.html'
#
# def get_html_template(self):
# raise GraphosException("Not Implemented")
#
# def get_js_template(self):
# raise GraphosException("Not Implemented")
#
# def get_html_id(self):
# return self.html_id
#
# def get_context_data(self):
# return self.context_data
#
# def as_html(self):
# context = {
# 'html': self.render_html(),
# 'js': self.render_js(),
# }
# return render_to_string(self.get_template(), context)
#
# def render_html(self):
# context = {"chart": self}
# return render_to_string(self.get_html_template(), context)
#
# def render_js(self):
# context = {"chart": self}
# return render_to_string(self.get_js_template(), context)
#
# def zip_list(self, *args):
# rv = zip(*args)
# if sys.version_info < (3,0):
# return rv
# return list(rv)
. Output only the next line. | def get_chart_type(self): |
Predict the next line after this snippet: <|code_start|>
def get_serieses(self):
data_only = self.get_data()[1:]
serieses = []
for i in range(0, len(self.header)):
current_column = [float(el[i]) for el in data_only]
serieses.append(current_column)
return serieses
def render_js(self):
return ""
class LineChart(BaseMatplotlibChart):
def get_image(self):
fig = plt.figure()
ax = fig.add_subplot(111)
serieses = self.get_serieses()
for i in range(1, len(serieses)):
ax.plot(serieses[0], serieses[i])
ax.xaxis.set_major_formatter(FormatStrFormatter('%d'))
out = StringIO()
plt.savefig(out)
out.seek(0)
return "data:image/png;base64,%s" % base64.encodestring(out.read())
class BarChart(BaseMatplotlibChart):
<|code_end|>
using the current file's imports:
from .base import BaseChart
from matplotlib.ticker import FormatStrFormatter
from StringIO import StringIO
from io import BytesIO as StringIO
import matplotlib
import matplotlib.pyplot as plt
import base64
and any relevant context from other files:
# Path: graphos/renderers/base.py
# class BaseChart(object):
#
# def __init__(self, data_source, html_id=None,
# width=None, height=None,
# options=None, encoder=GraphosEncoder,
# *args, **kwargs):
# """
# : param data_source: :type graphos.sources.base.BaseDataSource subclass instance.
# : param html_id: :type string: Id of the div where you would like chart to be rendered
# : param width: :type integer: Width of the chart div
# : param height: :type integer: Height of the chart div
# """
# self.data_source = data_source
# self.html_id = html_id or get_random_string()
# self.height = height or DEFAULT_HEIGHT
# self.width = width or DEFAULT_WIDTH
# # options could be an object, a list, a dictionary or a nested object or probably anything.
# # Different renderers have different structure for options.
# # Its responsibility of the renderer to read self.options in correct way and to use it in get_js_template.
# self.options = options or {}
# self.header = data_source.get_header()
# self.encoder = encoder
# self.context_data = kwargs
#
# def get_data(self):
# return self.data_source.get_data()
#
# def get_data_json(self):
# return json.dumps(self.get_data(), cls=JSONEncoderForHTML)
#
# def get_options(self):
# options = self.options
# if not 'title' in options:
# options['title'] = "Chart"
# return options
#
# def get_options_json(self):
# return json.dumps(self.get_options(), cls=JSONEncoderForHTML)
#
# def get_template(self):
# return 'graphos/as_html.html'
#
# def get_html_template(self):
# raise GraphosException("Not Implemented")
#
# def get_js_template(self):
# raise GraphosException("Not Implemented")
#
# def get_html_id(self):
# return self.html_id
#
# def get_context_data(self):
# return self.context_data
#
# def as_html(self):
# context = {
# 'html': self.render_html(),
# 'js': self.render_js(),
# }
# return render_to_string(self.get_template(), context)
#
# def render_html(self):
# context = {"chart": self}
# return render_to_string(self.get_html_template(), context)
#
# def render_js(self):
# context = {"chart": self}
# return render_to_string(self.get_js_template(), context)
#
# def zip_list(self, *args):
# rv = zip(*args)
# if sys.version_info < (3,0):
# return rv
# return list(rv)
. Output only the next line. | def get_image(self): |
Predict the next line after this snippet: <|code_start|> and `False` otherwise. `CCS_aligned_alignment`
and `CCS_aligned_target` give the
:py:mod:`dms_tools2.minimap2.Alignment` (or `None`)
and the target (or empty string).
"""
if isinstance(ccslist, collections.Iterable):
col_list = [ccs.df.columns for ccs in ccslist]
assert all([col_list[0].equals(col) for col in col_list]),\
"the CCS.df's in `ccslist` don't have same columns"
df = pandas.concat([ccs.df for ccs in ccslist])
else:
df = ccslist.df
# internal function:
def _align_CCS_both_orientations(df, mapper):
"""Try align CCS both ways, adds columns.
`CCS_aligned`, `CCS_aligned_alignment`, and
`CCS_aligned_target`."""
df_bi = (df.pipe(dms_tools2.pacbio.alignSeqs,
mapper=mapper,
query_col='CCS',
aligned_col='CCS_for_aligned')
.assign(CCS_rev=lambda x: x.CCS.map(
dms_tools2.utils.reverseComplement))
.pipe(dms_tools2.pacbio.alignSeqs,
mapper=mapper,
query_col='CCS_rev',
aligned_col='CCS_rev_aligned')
)
return (df.assign(CCS_aligned=df_bi.CCS_for_aligned |
<|code_end|>
using the current file's imports:
import os
import gzip
import re
import io
import math
import subprocess
import collections
import tempfile
import numbers
import regex
import numpy
import pandas
import pysam
import Bio.SeqFeature
import dms_tools2
import matplotlib.pyplot as plt
import doctest
from dms_tools2.plot import COLOR_BLIND_PALETTE
from dms_tools2.plot import COLOR_BLIND_PALETTE_GRAY
from plotnine import *
and any relevant context from other files:
# Path: dms_tools2/plot.py
# COLOR_BLIND_PALETTE = ["#000000", "#E69F00", "#56B4E9", "#009E73",
# "#F0E442", "#0072B2", "#D55E00", "#CC79A7"]
#
# Path: dms_tools2/plot.py
# COLOR_BLIND_PALETTE_GRAY = ["#999999", "#E69F00", "#56B4E9", "#009E73",
# "#F0E442", "#0072B2", "#D55E00", "#CC79A7"]
. Output only the next line. | df_bi.CCS_rev_aligned) |
Using the snippet: <|code_start|># load modules that are not referenced and we're very
# lazy in this file. As a workaround let's load all
# modules when we're in windows and we are not frozen
# so we should reference all modules when py2exe is
# inspecting us.
#
if sys.platform == 'win32' and not hasattr(sys, 'frozen'):
#import neubot.net.CA # posix only
#import neubot.server # requires PyGeoIP
def run(argv):
# /usr/bin/neubot module ...
del argv[0]
module = argv[0]
if module == "help":
sys.stdout.write("Neubot help -- prints available commands\n")
commands = " ".join(sorted(MODULES.keys()))
lines = textwrap.wrap(commands, 60)
sys.stdout.write("Commands: " + lines[0] + "\n")
for s in lines[1:]:
sys.stdout.write(" " + s + "\n")
sys.stdout.write("Try `neubot CMD --help` for more help on CMD.\n")
sys.exit(0)
utils_modules.modprobe(None, "load_subcommand", MODULES)
<|code_end|>
, determine the next line of code. You have imports:
import sys
import textwrap
import logging
import neubot.agent
import neubot.api.client
import neubot.database.main
import neubot.bittorrent
import neubot.http.client
import neubot.http.server
import neubot.privacy
import neubot.speedtest.client
import neubot.speedtest.client
import neubot.speedtest.server
import neubot.net.stream
from neubot import utils_modules
and context (class names, function names, or code) available:
# Path: neubot/utils_modules.py
# def modprobe(filter, context, message):
. Output only the next line. | if not module in MODULES: |
Given the code snippet: <|code_start|> result['privacy_can_share'] = 0
connection.execute(query, result)
connection.execute("DROP TABLE results;")
connection.execute("""UPDATE config SET value='2.0'
WHERE name='version';""")
connection.commit()
# add uuid to database
def migrate_from__v1_0__to__v1_1(connection):
cursor = connection.cursor()
cursor.execute("SELECT value FROM config WHERE name='version';")
ver = cursor.fetchone()[0]
if ver == "1.0":
logging.info("* Migrating database from version 1.0 to 1.1")
cursor.execute("ALTER TABLE results ADD uuid TEXT;")
cursor.execute("INSERT INTO config VALUES('uuid', :ident);",
{"ident": str(uuid.uuid4())})
cursor.execute("""UPDATE config SET value='1.1'
WHERE name='version';""")
connection.commit()
cursor.close()
MIGRATORS = [
migrate_from__v1_0__to__v1_1,
migrate_from__v1_1__to__v2_0,
migrate_from__v2_0__to__v2_1,
migrate_from__v2_1__to__v3_0,
migrate_from__v3_0__to__v4_0,
migrate_from__v4_0__to__v4_1,
migrate_from__v4_1__to__v4_2,
<|code_end|>
, generate the next line using the imports in this file:
import logging
import sqlite3
import sys
import uuid
from neubot.marshal import unmarshal_object
from neubot.database import _table_utils
and context (functions, classes, or occasionally code) from other files:
# Path: neubot/database/_table_utils.py
# SIMPLE_TYPES = {
# types.StringType : "TEXT",
# types.UnicodeType : "TEXT",
# types.IntType : "INTEGER",
# types.FloatType : "REAL",
# }
# def __check(value):
# def make_create_table(table, template):
# def make_insert_into(table, template):
# def do_insert_into(connection, query, dictobj, template,
# commit=True, override_timestamp=True):
# def make_select(table, template, **kwargs):
# def rename_column_query(table1, template1, table2, template2):
# def rename_column_ntemplate(template, mapping, broken=False):
# def rename_column(connection, table, template, mapping, broken=False):
. Output only the next line. | ] |
Using the snippet: <|code_start|> ('bittorrent.listen', False, 'Run in server mode'),
('bittorrent.negotiate', True, 'Enable negotiate client/server'),
('bittorrent.negotiate.port', 8080, 'Negotiate port'),
('bittorrent.my_id', '', 'Set local PeerId ("" = auto)'),
('bittorrent.numpieces', NUMPIECES, 'Num of pieces in bitfield'),
('bittorrent.piece_len', PIECE_LEN, 'Length of each piece'),
('bittorrent.port', 6881, 'Port to listen/connect to (0 = auto)'),
('bittorrent.watchdog', WATCHDOG, 'Maximum test run-time in seconds'),
)
CONFIG.register_defaults_helper(PROPERTIES)
def register_descriptions():
''' Registers the description of bittorrent variables '''
CONFIG.register_descriptions_helper(PROPERTIES)
def _random_bytes(num):
''' Generates a random string of @num bytes '''
return ''.join([chr(random.randint(32, 126)) for _ in range(num)])
def finalize_conf(conf):
''' Finalize configuration and guess the proper value of all
the undefined variables '''
if not conf['bittorrent.my_id']:
conf['bittorrent.my_id'] = _random_bytes(20)
if not conf['bittorrent.infohash']:
conf['bittorrent.infohash'] = _random_bytes(20)
<|code_end|>
, determine the next line of code. You have imports:
import random
from neubot.net.poller import WATCHDOG
from neubot.config import CONFIG
from neubot.bittorrent import estimate
and context (class names, function names, or code) available:
# Path: neubot/net/poller.py
#
# Path: neubot/config.py
# CONFIG = Config()
. Output only the next line. | if not conf['bittorrent.bytes.down']: |
Given the code snippet: <|code_start|> ('bittorrent.watchdog', WATCHDOG, 'Maximum test run-time in seconds'),
)
CONFIG.register_defaults_helper(PROPERTIES)
def register_descriptions():
''' Registers the description of bittorrent variables '''
CONFIG.register_descriptions_helper(PROPERTIES)
def _random_bytes(num):
''' Generates a random string of @num bytes '''
return ''.join([chr(random.randint(32, 126)) for _ in range(num)])
def finalize_conf(conf):
''' Finalize configuration and guess the proper value of all
the undefined variables '''
if not conf['bittorrent.my_id']:
conf['bittorrent.my_id'] = _random_bytes(20)
if not conf['bittorrent.infohash']:
conf['bittorrent.infohash'] = _random_bytes(20)
if not conf['bittorrent.bytes.down']:
conf['bittorrent.bytes.down'] = estimate.DOWNLOAD
if not conf['bittorrent.bytes.up']:
conf['bittorrent.bytes.up'] = estimate.UPLOAD
if not conf['bittorrent.address']:
if not conf['bittorrent.listen']:
<|code_end|>
, generate the next line using the imports in this file:
import random
from neubot.net.poller import WATCHDOG
from neubot.config import CONFIG
from neubot.bittorrent import estimate
and context (functions, classes, or occasionally code) from other files:
# Path: neubot/net/poller.py
#
# Path: neubot/config.py
# CONFIG = Config()
. Output only the next line. | conf['bittorrent.address'] = 'master.neubot.org master2.neubot.org' |
Using the snippet: <|code_start|># Copyright 2017 onwards LabsLand Experimentia S.L.
# This software is licensed under the GNU AGPL v3:
# GNU Affero General Public License version 3 (see the file LICENSE)
# Read in the documentation about the license
from __future__ import unicode_literals, print_function, division
class _TaskWrapper(object):
def __init__(self, weblab, func, unique):
self._func = func
self._unique = unique
self._name = func.__name__
if len(self._name) == len(create_token()):
<|code_end|>
, determine the next line of code. You have imports:
import sys
import time
import threading
import traceback
import six
import redis
from werkzeug.local import LocalProxy
from werkzeug.datastructures import ImmutableDict
from flask import g
from weblablib.utils import create_token, _current_session_id, _current_weblab
from weblablib.exc import AlreadyRunningError, TimeoutError
and context (class names, function names, or code) available:
# Path: weblablib/utils.py
# def create_token(size=None):
# if size is None:
# size = 32
# tok = os.urandom(size)
# safe_token = base64.urlsafe_b64encode(tok).strip().replace(b'=', b'').replace(b'-', b'_')
# safe_token = safe_token.decode('utf8')
# return safe_token
#
# def _current_session_id():
# return _current_weblab()._session_id()
#
# def _current_weblab():
# if 'weblab' not in current_app.extensions:
# raise WebLabNotInitializedError("App not initialized with weblab.init_app()")
# return current_app.extensions['weblab']
#
# Path: weblablib/exc.py
# class AlreadyRunningError(WebLabError):
# """When creating a task (:meth:`WebLab.task`) with ``unique='global'`` or ``unique='user'``, the second thread/process attempting to run the same method will obtain this error"""
# pass
#
# class TimeoutError(WebLabError):
# """When joining (:meth:`WebLabTask.join`) a task with a timeout, this error may arise"""
# pass
. Output only the next line. | raise ValueError("The function '{}' has an invalid name: the number of characters " |
Using the snippet: <|code_start|># Copyright 2017 onwards LabsLand Experimentia S.L.
# This software is licensed under the GNU AGPL v3:
# GNU Affero General Public License version 3 (see the file LICENSE)
# Read in the documentation about the license
from __future__ import unicode_literals, print_function, division
class _TaskWrapper(object):
def __init__(self, weblab, func, unique):
self._func = func
self._unique = unique
self._name = func.__name__
if len(self._name) == len(create_token()):
raise ValueError("The function '{}' has an invalid name: the number of characters "
<|code_end|>
, determine the next line of code. You have imports:
import sys
import time
import threading
import traceback
import six
import redis
from werkzeug.local import LocalProxy
from werkzeug.datastructures import ImmutableDict
from flask import g
from weblablib.utils import create_token, _current_session_id, _current_weblab
from weblablib.exc import AlreadyRunningError, TimeoutError
and context (class names, function names, or code) available:
# Path: weblablib/utils.py
# def create_token(size=None):
# if size is None:
# size = 32
# tok = os.urandom(size)
# safe_token = base64.urlsafe_b64encode(tok).strip().replace(b'=', b'').replace(b'-', b'_')
# safe_token = safe_token.decode('utf8')
# return safe_token
#
# def _current_session_id():
# return _current_weblab()._session_id()
#
# def _current_weblab():
# if 'weblab' not in current_app.extensions:
# raise WebLabNotInitializedError("App not initialized with weblab.init_app()")
# return current_app.extensions['weblab']
#
# Path: weblablib/exc.py
# class AlreadyRunningError(WebLabError):
# """When creating a task (:meth:`WebLab.task`) with ``unique='global'`` or ``unique='user'``, the second thread/process attempting to run the same method will obtain this error"""
# pass
#
# class TimeoutError(WebLabError):
# """When joining (:meth:`WebLabTask.join`) a task with a timeout, this error may arise"""
# pass
. Output only the next line. | "must be higher or lower than this. Otherwise get_task(task_id) " |
Given the following code snippet before the placeholder: <|code_start|># Copyright 2017 onwards LabsLand Experimentia S.L.
# This software is licensed under the GNU AGPL v3:
# GNU Affero General Public License version 3 (see the file LICENSE)
# Read in the documentation about the license
from __future__ import unicode_literals, print_function, division
class _TaskWrapper(object):
def __init__(self, weblab, func, unique):
self._func = func
self._unique = unique
self._name = func.__name__
if len(self._name) == len(create_token()):
raise ValueError("The function '{}' has an invalid name: the number of characters "
"must be higher or lower than this. Otherwise get_task(task_id) "
"could potentially fail".format(func.__name__))
self._weblab = weblab
self._backend = weblab._backend
@property
<|code_end|>
, predict the next line using imports from the current file:
import sys
import time
import threading
import traceback
import six
import redis
from werkzeug.local import LocalProxy
from werkzeug.datastructures import ImmutableDict
from flask import g
from weblablib.utils import create_token, _current_session_id, _current_weblab
from weblablib.exc import AlreadyRunningError, TimeoutError
and context including class names, function names, and sometimes code from other files:
# Path: weblablib/utils.py
# def create_token(size=None):
# if size is None:
# size = 32
# tok = os.urandom(size)
# safe_token = base64.urlsafe_b64encode(tok).strip().replace(b'=', b'').replace(b'-', b'_')
# safe_token = safe_token.decode('utf8')
# return safe_token
#
# def _current_session_id():
# return _current_weblab()._session_id()
#
# def _current_weblab():
# if 'weblab' not in current_app.extensions:
# raise WebLabNotInitializedError("App not initialized with weblab.init_app()")
# return current_app.extensions['weblab']
#
# Path: weblablib/exc.py
# class AlreadyRunningError(WebLabError):
# """When creating a task (:meth:`WebLab.task`) with ``unique='global'`` or ``unique='user'``, the second thread/process attempting to run the same method will obtain this error"""
# pass
#
# class TimeoutError(WebLabError):
# """When joining (:meth:`WebLabTask.join`) a task with a timeout, this error may arise"""
# pass
. Output only the next line. | def func(self): |
Predict the next line after this snippet: <|code_start|># Copyright 2017 onwards LabsLand Experimentia S.L.
# This software is licensed under the GNU AGPL v3:
# GNU Affero General Public License version 3 (see the file LICENSE)
# Read in the documentation about the license
from __future__ import unicode_literals, print_function, division
class _TaskWrapper(object):
def __init__(self, weblab, func, unique):
self._func = func
self._unique = unique
self._name = func.__name__
if len(self._name) == len(create_token()):
raise ValueError("The function '{}' has an invalid name: the number of characters "
"must be higher or lower than this. Otherwise get_task(task_id) "
<|code_end|>
using the current file's imports:
import sys
import time
import threading
import traceback
import six
import redis
from werkzeug.local import LocalProxy
from werkzeug.datastructures import ImmutableDict
from flask import g
from weblablib.utils import create_token, _current_session_id, _current_weblab
from weblablib.exc import AlreadyRunningError, TimeoutError
and any relevant context from other files:
# Path: weblablib/utils.py
# def create_token(size=None):
# if size is None:
# size = 32
# tok = os.urandom(size)
# safe_token = base64.urlsafe_b64encode(tok).strip().replace(b'=', b'').replace(b'-', b'_')
# safe_token = safe_token.decode('utf8')
# return safe_token
#
# def _current_session_id():
# return _current_weblab()._session_id()
#
# def _current_weblab():
# if 'weblab' not in current_app.extensions:
# raise WebLabNotInitializedError("App not initialized with weblab.init_app()")
# return current_app.extensions['weblab']
#
# Path: weblablib/exc.py
# class AlreadyRunningError(WebLabError):
# """When creating a task (:meth:`WebLab.task`) with ``unique='global'`` or ``unique='user'``, the second thread/process attempting to run the same method will obtain this error"""
# pass
#
# class TimeoutError(WebLabError):
# """When joining (:meth:`WebLabTask.join`) a task with a timeout, this error may arise"""
# pass
. Output only the next line. | "could potentially fail".format(func.__name__)) |
Predict the next line for this snippet: <|code_start|> # We're outside a task
self.assertFalse(weblablib.current_task_stopping)
self.weblab.join_tasks(self.current_task, timeout=0.01, stop=True)
# But the counter is still zero
self.assertEquals(self.counter, 0)
global started
started = False
def wait_for_thread():
global started
started = True
task1.join(timeout=3)
background_thread = threading.Thread(target=wait_for_thread)
background_thread.daemon = True
background_thread.start()
initial = time.time()
while not started:
time.sleep(0.05)
if time.time() - initial > 2:
self.fail("Error waiting for thread to start")
# Run the tasks
self.weblab.run_tasks()
background_thread.join(timeout=5)
<|code_end|>
with the help of current file imports:
import os
import sys
import json
import time
import base64
import datetime
import requests
import threading
import six
import flask.cli as flask_cli
import weblablib
import unittest
import weblablib.views as weblablib_views
from StringIO import StringIO
from io import StringIO
from flask import Flask, url_for, render_template_string, g, session
from weblablib.tasks import _TaskRunner
from click.testing import CliRunner
from flask_socketio import SocketIO, SocketIOTestClient
and context from other files:
# Path: weblablib/tasks.py
# class _TaskRunner(threading.Thread):
#
# _instances = []
# _STEPS_WAITING = 20
#
# def __init__(self, number, weblab, app):
# super(_TaskRunner, self).__init__()
# self.name = 'weblab-task-runner-{}'.format(number)
# self.daemon = True
# self.app = app
# self.weblab = weblab
# self._stopping = False
#
# def stop(self):
# self._stopping = True
#
# def run(self):
# _TaskRunner._instances.append(self)
#
# while not self._stopping:
# try:
# with self.app.app_context():
# self.weblab.run_tasks()
# except redis.ConnectionError:
# # In the case of a redis ConnectionError, let's wait a bit more to see if
# # this happens again. It can be that we are just restarting the server
# # and Redis died before, or a Redis upgrade or so.
# traceback.print_exc()
# time.sleep(5)
# except Exception:
# traceback.print_exc()
# continue
#
# for _ in six.moves.range(_TaskRunner._STEPS_WAITING):
# time.sleep(0.05)
# if self._stopping:
# break
, which may contain function names, class names, or code. Output only the next line. | self.assertFalse(background_thread.isAlive()) |
Continue the code snippet: <|code_start|># Copyright 2017 onwards LabsLand Experimentia S.L.
# This software is licensed under the GNU AGPL v3:
# GNU Affero General Public License version 3 (see the file LICENSE)
# Read in the documentation about the license
from __future__ import unicode_literals, print_function, division
def status_time(session_id):
weblab = _current_weblab()
backend = weblab._backend
user = backend.get_user(session_id)
if isinstance(user, ExpiredUser) and user.disposing_resources:
return 2 # Try again in 2 seconds
if user.is_anonymous or not isinstance(user, CurrentUser):
<|code_end|>
. Use current file imports:
import time
import json
import traceback
from flask import g
from weblablib.exc import NotFoundError
from weblablib.utils import _current_weblab, _current_backend, _current_session_id
from weblablib.users import ExpiredUser, CurrentUser, weblab_user, _set_weblab_user_cache
and context (classes, functions, or code) from other files:
# Path: weblablib/exc.py
# class NotFoundError(WebLabError, KeyError):
# pass
#
# Path: weblablib/utils.py
# def _current_weblab():
# if 'weblab' not in current_app.extensions:
# raise WebLabNotInitializedError("App not initialized with weblab.init_app()")
# return current_app.extensions['weblab']
#
# def _current_backend():
# return _current_weblab()._backend
#
# def _current_session_id():
# return _current_weblab()._session_id()
#
# Path: weblablib/users.py
# def get_weblab_user(cached=True):
# def _set_weblab_user_cache(user):
# def active(self):
# def is_anonymous(self):
# def __str__(self):
# def _method(self, *args, **kwargs): # pylint: disable=unused-argument, no-self-use
# def active(self):
# def is_anonymous(self):
# def locale(self):
# def data(self):
# def __str__(self):
# def __init__(self, session_id, back, last_poll, max_date, username, username_unique,
# exited, data, locale, full_name, experiment_name, category_name, experiment_id,
# request_client_data, request_server_data, start_date):
# def experiment_name(self):
# def category_name(self):
# def experiment_id(self):
# def full_name(self):
# def locale(self):
# def back(self):
# def last_poll(self):
# def session_id(self):
# def max_date(self):
# def username(self):
# def username_unique(self):
# def exited(self):
# def request_client_data(self):
# def request_server_data(self):
# def start_date(self):
# def add_action(self, session_id, action):
# def store_action(self, session_id, action_id, action): # pylint: disable=no-self-use
# def clean_actions(self, session_id): # pylint: disable=no-self-use
# def __init__(self, user, data, previous_hash=None):
# def initial_hash(self):
# def _get_hash(self, data):
# def store(self):
# def store_if_modified(self):
# def is_modified(self):
# def retrieve(self):
# def __init__(self, *args, **kwargs):
# def data(self):
# def data(self, data):
# def update_data(self, new_data=_OBJECT):
# def time_without_polling(self):
# def time_left(self):
# def to_expired_user(self):
# def user(self):
# def active(self):
# def is_anonymous(self):
# def __str__(self):
# def __init__(self, *args, **kwargs):
# def data(self):
# def data(self, value):
# def update_data(self, value=None):
# def time_left(self):
# def active(self):
# def is_anonymous(self):
# def __str__(self):
# class WebLabUser(object):
# class AnonymousDataImmutableDict(dict):
# class AnonymousUser(WebLabUser):
# class _CurrentOrExpiredUser(WebLabUser): # pylint: disable=abstract-method
# class DataHolder(dict):
# class CurrentUser(_CurrentOrExpiredUser):
# class ExpiredUser(_CurrentOrExpiredUser):
# _OBJECT = object()
. Output only the next line. | return -1 |
Predict the next line after this snippet: <|code_start|> backend = _current_backend()
if session_id:
if weblab_user.active:
# If there was no data in the beginning
# OR there was data in the beginning and now it is different,
# only then modify the current session
if not hasattr(g, '_initial_data') or g._initial_data != json.dumps(weblab_user.data):
backend.update_data(session_id, weblab_user.data)
return response
def dispose_user(session_id, waiting):
backend = _current_backend()
user = backend.get_user(session_id)
if user.is_anonymous:
raise NotFoundError()
if isinstance(user, CurrentUser):
current_expired_user = user.to_expired_user()
deleted = backend.delete_user(session_id, current_expired_user)
if deleted:
try:
weblab = _current_weblab()
weblab._set_session_id(session_id)
if weblab._on_dispose:
_set_weblab_user_cache(user)
try:
<|code_end|>
using the current file's imports:
import time
import json
import traceback
from flask import g
from weblablib.exc import NotFoundError
from weblablib.utils import _current_weblab, _current_backend, _current_session_id
from weblablib.users import ExpiredUser, CurrentUser, weblab_user, _set_weblab_user_cache
and any relevant context from other files:
# Path: weblablib/exc.py
# class NotFoundError(WebLabError, KeyError):
# pass
#
# Path: weblablib/utils.py
# def _current_weblab():
# if 'weblab' not in current_app.extensions:
# raise WebLabNotInitializedError("App not initialized with weblab.init_app()")
# return current_app.extensions['weblab']
#
# def _current_backend():
# return _current_weblab()._backend
#
# def _current_session_id():
# return _current_weblab()._session_id()
#
# Path: weblablib/users.py
# def get_weblab_user(cached=True):
# def _set_weblab_user_cache(user):
# def active(self):
# def is_anonymous(self):
# def __str__(self):
# def _method(self, *args, **kwargs): # pylint: disable=unused-argument, no-self-use
# def active(self):
# def is_anonymous(self):
# def locale(self):
# def data(self):
# def __str__(self):
# def __init__(self, session_id, back, last_poll, max_date, username, username_unique,
# exited, data, locale, full_name, experiment_name, category_name, experiment_id,
# request_client_data, request_server_data, start_date):
# def experiment_name(self):
# def category_name(self):
# def experiment_id(self):
# def full_name(self):
# def locale(self):
# def back(self):
# def last_poll(self):
# def session_id(self):
# def max_date(self):
# def username(self):
# def username_unique(self):
# def exited(self):
# def request_client_data(self):
# def request_server_data(self):
# def start_date(self):
# def add_action(self, session_id, action):
# def store_action(self, session_id, action_id, action): # pylint: disable=no-self-use
# def clean_actions(self, session_id): # pylint: disable=no-self-use
# def __init__(self, user, data, previous_hash=None):
# def initial_hash(self):
# def _get_hash(self, data):
# def store(self):
# def store_if_modified(self):
# def is_modified(self):
# def retrieve(self):
# def __init__(self, *args, **kwargs):
# def data(self):
# def data(self, data):
# def update_data(self, new_data=_OBJECT):
# def time_without_polling(self):
# def time_left(self):
# def to_expired_user(self):
# def user(self):
# def active(self):
# def is_anonymous(self):
# def __str__(self):
# def __init__(self, *args, **kwargs):
# def data(self):
# def data(self, value):
# def update_data(self, value=None):
# def time_left(self):
# def active(self):
# def is_anonymous(self):
# def __str__(self):
# class WebLabUser(object):
# class AnonymousDataImmutableDict(dict):
# class AnonymousUser(WebLabUser):
# class _CurrentOrExpiredUser(WebLabUser): # pylint: disable=abstract-method
# class DataHolder(dict):
# class CurrentUser(_CurrentOrExpiredUser):
# class ExpiredUser(_CurrentOrExpiredUser):
# _OBJECT = object()
. Output only the next line. | weblab._on_dispose() |
Given the code snippet: <|code_start|>
def status_time(session_id):
weblab = _current_weblab()
backend = weblab._backend
user = backend.get_user(session_id)
if isinstance(user, ExpiredUser) and user.disposing_resources:
return 2 # Try again in 2 seconds
if user.is_anonymous or not isinstance(user, CurrentUser):
return -1
if user.exited:
return -1
if weblab.timeout and weblab.timeout > 0:
# If timeout is set to -1, it will never timeout (unless user exited)
if user.time_without_polling >= weblab.timeout:
return -1
if user.time_left <= 0:
return -1
return min(weblab.poll_interval, int(user.time_left))
def store_initial_weblab_user_data():
session_id = _current_session_id()
if session_id:
backend = _current_backend()
<|code_end|>
, generate the next line using the imports in this file:
import time
import json
import traceback
from flask import g
from weblablib.exc import NotFoundError
from weblablib.utils import _current_weblab, _current_backend, _current_session_id
from weblablib.users import ExpiredUser, CurrentUser, weblab_user, _set_weblab_user_cache
and context (functions, classes, or occasionally code) from other files:
# Path: weblablib/exc.py
# class NotFoundError(WebLabError, KeyError):
# pass
#
# Path: weblablib/utils.py
# def _current_weblab():
# if 'weblab' not in current_app.extensions:
# raise WebLabNotInitializedError("App not initialized with weblab.init_app()")
# return current_app.extensions['weblab']
#
# def _current_backend():
# return _current_weblab()._backend
#
# def _current_session_id():
# return _current_weblab()._session_id()
#
# Path: weblablib/users.py
# def get_weblab_user(cached=True):
# def _set_weblab_user_cache(user):
# def active(self):
# def is_anonymous(self):
# def __str__(self):
# def _method(self, *args, **kwargs): # pylint: disable=unused-argument, no-self-use
# def active(self):
# def is_anonymous(self):
# def locale(self):
# def data(self):
# def __str__(self):
# def __init__(self, session_id, back, last_poll, max_date, username, username_unique,
# exited, data, locale, full_name, experiment_name, category_name, experiment_id,
# request_client_data, request_server_data, start_date):
# def experiment_name(self):
# def category_name(self):
# def experiment_id(self):
# def full_name(self):
# def locale(self):
# def back(self):
# def last_poll(self):
# def session_id(self):
# def max_date(self):
# def username(self):
# def username_unique(self):
# def exited(self):
# def request_client_data(self):
# def request_server_data(self):
# def start_date(self):
# def add_action(self, session_id, action):
# def store_action(self, session_id, action_id, action): # pylint: disable=no-self-use
# def clean_actions(self, session_id): # pylint: disable=no-self-use
# def __init__(self, user, data, previous_hash=None):
# def initial_hash(self):
# def _get_hash(self, data):
# def store(self):
# def store_if_modified(self):
# def is_modified(self):
# def retrieve(self):
# def __init__(self, *args, **kwargs):
# def data(self):
# def data(self, data):
# def update_data(self, new_data=_OBJECT):
# def time_without_polling(self):
# def time_left(self):
# def to_expired_user(self):
# def user(self):
# def active(self):
# def is_anonymous(self):
# def __str__(self):
# def __init__(self, *args, **kwargs):
# def data(self):
# def data(self, value):
# def update_data(self, value=None):
# def time_left(self):
# def active(self):
# def is_anonymous(self):
# def __str__(self):
# class WebLabUser(object):
# class AnonymousDataImmutableDict(dict):
# class AnonymousUser(WebLabUser):
# class _CurrentOrExpiredUser(WebLabUser): # pylint: disable=abstract-method
# class DataHolder(dict):
# class CurrentUser(_CurrentOrExpiredUser):
# class ExpiredUser(_CurrentOrExpiredUser):
# _OBJECT = object()
. Output only the next line. | current_user = backend.get_user(session_id) |
Given snippet: <|code_start|> # Nothing is triggered in Redis. For this reason, after each request
# we check that the data has changed or not.
#
session_id = _current_session_id()
backend = _current_backend()
if session_id:
if weblab_user.active:
# If there was no data in the beginning
# OR there was data in the beginning and now it is different,
# only then modify the current session
if not hasattr(g, '_initial_data') or g._initial_data != json.dumps(weblab_user.data):
backend.update_data(session_id, weblab_user.data)
return response
def dispose_user(session_id, waiting):
backend = _current_backend()
user = backend.get_user(session_id)
if user.is_anonymous:
raise NotFoundError()
if isinstance(user, CurrentUser):
current_expired_user = user.to_expired_user()
deleted = backend.delete_user(session_id, current_expired_user)
if deleted:
try:
weblab = _current_weblab()
weblab._set_session_id(session_id)
<|code_end|>
, continue by predicting the next line. Consider current file imports:
import time
import json
import traceback
from flask import g
from weblablib.exc import NotFoundError
from weblablib.utils import _current_weblab, _current_backend, _current_session_id
from weblablib.users import ExpiredUser, CurrentUser, weblab_user, _set_weblab_user_cache
and context:
# Path: weblablib/exc.py
# class NotFoundError(WebLabError, KeyError):
# pass
#
# Path: weblablib/utils.py
# def _current_weblab():
# if 'weblab' not in current_app.extensions:
# raise WebLabNotInitializedError("App not initialized with weblab.init_app()")
# return current_app.extensions['weblab']
#
# def _current_backend():
# return _current_weblab()._backend
#
# def _current_session_id():
# return _current_weblab()._session_id()
#
# Path: weblablib/users.py
# def get_weblab_user(cached=True):
# def _set_weblab_user_cache(user):
# def active(self):
# def is_anonymous(self):
# def __str__(self):
# def _method(self, *args, **kwargs): # pylint: disable=unused-argument, no-self-use
# def active(self):
# def is_anonymous(self):
# def locale(self):
# def data(self):
# def __str__(self):
# def __init__(self, session_id, back, last_poll, max_date, username, username_unique,
# exited, data, locale, full_name, experiment_name, category_name, experiment_id,
# request_client_data, request_server_data, start_date):
# def experiment_name(self):
# def category_name(self):
# def experiment_id(self):
# def full_name(self):
# def locale(self):
# def back(self):
# def last_poll(self):
# def session_id(self):
# def max_date(self):
# def username(self):
# def username_unique(self):
# def exited(self):
# def request_client_data(self):
# def request_server_data(self):
# def start_date(self):
# def add_action(self, session_id, action):
# def store_action(self, session_id, action_id, action): # pylint: disable=no-self-use
# def clean_actions(self, session_id): # pylint: disable=no-self-use
# def __init__(self, user, data, previous_hash=None):
# def initial_hash(self):
# def _get_hash(self, data):
# def store(self):
# def store_if_modified(self):
# def is_modified(self):
# def retrieve(self):
# def __init__(self, *args, **kwargs):
# def data(self):
# def data(self, data):
# def update_data(self, new_data=_OBJECT):
# def time_without_polling(self):
# def time_left(self):
# def to_expired_user(self):
# def user(self):
# def active(self):
# def is_anonymous(self):
# def __str__(self):
# def __init__(self, *args, **kwargs):
# def data(self):
# def data(self, value):
# def update_data(self, value=None):
# def time_left(self):
# def active(self):
# def is_anonymous(self):
# def __str__(self):
# class WebLabUser(object):
# class AnonymousDataImmutableDict(dict):
# class AnonymousUser(WebLabUser):
# class _CurrentOrExpiredUser(WebLabUser): # pylint: disable=abstract-method
# class DataHolder(dict):
# class CurrentUser(_CurrentOrExpiredUser):
# class ExpiredUser(_CurrentOrExpiredUser):
# _OBJECT = object()
which might include code, classes, or functions. Output only the next line. | if weblab._on_dispose: |
Here is a snippet: <|code_start|> # If there was no data in the beginning
# OR there was data in the beginning and now it is different,
# only then modify the current session
if not hasattr(g, '_initial_data') or g._initial_data != json.dumps(weblab_user.data):
backend.update_data(session_id, weblab_user.data)
return response
def dispose_user(session_id, waiting):
backend = _current_backend()
user = backend.get_user(session_id)
if user.is_anonymous:
raise NotFoundError()
if isinstance(user, CurrentUser):
current_expired_user = user.to_expired_user()
deleted = backend.delete_user(session_id, current_expired_user)
if deleted:
try:
weblab = _current_weblab()
weblab._set_session_id(session_id)
if weblab._on_dispose:
_set_weblab_user_cache(user)
try:
weblab._on_dispose()
except Exception:
traceback.print_exc()
<|code_end|>
. Write the next line using the current file imports:
import time
import json
import traceback
from flask import g
from weblablib.exc import NotFoundError
from weblablib.utils import _current_weblab, _current_backend, _current_session_id
from weblablib.users import ExpiredUser, CurrentUser, weblab_user, _set_weblab_user_cache
and context from other files:
# Path: weblablib/exc.py
# class NotFoundError(WebLabError, KeyError):
# pass
#
# Path: weblablib/utils.py
# def _current_weblab():
# if 'weblab' not in current_app.extensions:
# raise WebLabNotInitializedError("App not initialized with weblab.init_app()")
# return current_app.extensions['weblab']
#
# def _current_backend():
# return _current_weblab()._backend
#
# def _current_session_id():
# return _current_weblab()._session_id()
#
# Path: weblablib/users.py
# def get_weblab_user(cached=True):
# def _set_weblab_user_cache(user):
# def active(self):
# def is_anonymous(self):
# def __str__(self):
# def _method(self, *args, **kwargs): # pylint: disable=unused-argument, no-self-use
# def active(self):
# def is_anonymous(self):
# def locale(self):
# def data(self):
# def __str__(self):
# def __init__(self, session_id, back, last_poll, max_date, username, username_unique,
# exited, data, locale, full_name, experiment_name, category_name, experiment_id,
# request_client_data, request_server_data, start_date):
# def experiment_name(self):
# def category_name(self):
# def experiment_id(self):
# def full_name(self):
# def locale(self):
# def back(self):
# def last_poll(self):
# def session_id(self):
# def max_date(self):
# def username(self):
# def username_unique(self):
# def exited(self):
# def request_client_data(self):
# def request_server_data(self):
# def start_date(self):
# def add_action(self, session_id, action):
# def store_action(self, session_id, action_id, action): # pylint: disable=no-self-use
# def clean_actions(self, session_id): # pylint: disable=no-self-use
# def __init__(self, user, data, previous_hash=None):
# def initial_hash(self):
# def _get_hash(self, data):
# def store(self):
# def store_if_modified(self):
# def is_modified(self):
# def retrieve(self):
# def __init__(self, *args, **kwargs):
# def data(self):
# def data(self, data):
# def update_data(self, new_data=_OBJECT):
# def time_without_polling(self):
# def time_left(self):
# def to_expired_user(self):
# def user(self):
# def active(self):
# def is_anonymous(self):
# def __str__(self):
# def __init__(self, *args, **kwargs):
# def data(self):
# def data(self, value):
# def update_data(self, value=None):
# def time_left(self):
# def active(self):
# def is_anonymous(self):
# def __str__(self):
# class WebLabUser(object):
# class AnonymousDataImmutableDict(dict):
# class AnonymousUser(WebLabUser):
# class _CurrentOrExpiredUser(WebLabUser): # pylint: disable=abstract-method
# class DataHolder(dict):
# class CurrentUser(_CurrentOrExpiredUser):
# class ExpiredUser(_CurrentOrExpiredUser):
# _OBJECT = object()
, which may include functions, classes, or code. Output only the next line. | update_weblab_user_data(response=None) |
Here is a snippet: <|code_start|># Copyright 2017 onwards LabsLand Experimentia S.L.
# This software is licensed under the GNU AGPL v3:
# GNU Affero General Public License version 3 (see the file LICENSE)
# Read in the documentation about the license
from __future__ import unicode_literals, print_function, division
def status_time(session_id):
weblab = _current_weblab()
backend = weblab._backend
user = backend.get_user(session_id)
if isinstance(user, ExpiredUser) and user.disposing_resources:
return 2 # Try again in 2 seconds
if user.is_anonymous or not isinstance(user, CurrentUser):
return -1
<|code_end|>
. Write the next line using the current file imports:
import time
import json
import traceback
from flask import g
from weblablib.exc import NotFoundError
from weblablib.utils import _current_weblab, _current_backend, _current_session_id
from weblablib.users import ExpiredUser, CurrentUser, weblab_user, _set_weblab_user_cache
and context from other files:
# Path: weblablib/exc.py
# class NotFoundError(WebLabError, KeyError):
# pass
#
# Path: weblablib/utils.py
# def _current_weblab():
# if 'weblab' not in current_app.extensions:
# raise WebLabNotInitializedError("App not initialized with weblab.init_app()")
# return current_app.extensions['weblab']
#
# def _current_backend():
# return _current_weblab()._backend
#
# def _current_session_id():
# return _current_weblab()._session_id()
#
# Path: weblablib/users.py
# def get_weblab_user(cached=True):
# def _set_weblab_user_cache(user):
# def active(self):
# def is_anonymous(self):
# def __str__(self):
# def _method(self, *args, **kwargs): # pylint: disable=unused-argument, no-self-use
# def active(self):
# def is_anonymous(self):
# def locale(self):
# def data(self):
# def __str__(self):
# def __init__(self, session_id, back, last_poll, max_date, username, username_unique,
# exited, data, locale, full_name, experiment_name, category_name, experiment_id,
# request_client_data, request_server_data, start_date):
# def experiment_name(self):
# def category_name(self):
# def experiment_id(self):
# def full_name(self):
# def locale(self):
# def back(self):
# def last_poll(self):
# def session_id(self):
# def max_date(self):
# def username(self):
# def username_unique(self):
# def exited(self):
# def request_client_data(self):
# def request_server_data(self):
# def start_date(self):
# def add_action(self, session_id, action):
# def store_action(self, session_id, action_id, action): # pylint: disable=no-self-use
# def clean_actions(self, session_id): # pylint: disable=no-self-use
# def __init__(self, user, data, previous_hash=None):
# def initial_hash(self):
# def _get_hash(self, data):
# def store(self):
# def store_if_modified(self):
# def is_modified(self):
# def retrieve(self):
# def __init__(self, *args, **kwargs):
# def data(self):
# def data(self, data):
# def update_data(self, new_data=_OBJECT):
# def time_without_polling(self):
# def time_left(self):
# def to_expired_user(self):
# def user(self):
# def active(self):
# def is_anonymous(self):
# def __str__(self):
# def __init__(self, *args, **kwargs):
# def data(self):
# def data(self, value):
# def update_data(self, value=None):
# def time_left(self):
# def active(self):
# def is_anonymous(self):
# def __str__(self):
# class WebLabUser(object):
# class AnonymousDataImmutableDict(dict):
# class AnonymousUser(WebLabUser):
# class _CurrentOrExpiredUser(WebLabUser): # pylint: disable=abstract-method
# class DataHolder(dict):
# class CurrentUser(_CurrentOrExpiredUser):
# class ExpiredUser(_CurrentOrExpiredUser):
# _OBJECT = object()
, which may include functions, classes, or code. Output only the next line. | if user.exited: |
Continue the code snippet: <|code_start|>
def dispose_user(session_id, waiting):
backend = _current_backend()
user = backend.get_user(session_id)
if user.is_anonymous:
raise NotFoundError()
if isinstance(user, CurrentUser):
current_expired_user = user.to_expired_user()
deleted = backend.delete_user(session_id, current_expired_user)
if deleted:
try:
weblab = _current_weblab()
weblab._set_session_id(session_id)
if weblab._on_dispose:
_set_weblab_user_cache(user)
try:
weblab._on_dispose()
except Exception:
traceback.print_exc()
update_weblab_user_data(response=None)
finally:
backend.finished_dispose(session_id)
unfinished_tasks = backend.get_unfinished_tasks(session_id)
for task_id in unfinished_tasks:
unfinished_task = weblab.get_task(task_id)
<|code_end|>
. Use current file imports:
import time
import json
import traceback
from flask import g
from weblablib.exc import NotFoundError
from weblablib.utils import _current_weblab, _current_backend, _current_session_id
from weblablib.users import ExpiredUser, CurrentUser, weblab_user, _set_weblab_user_cache
and context (classes, functions, or code) from other files:
# Path: weblablib/exc.py
# class NotFoundError(WebLabError, KeyError):
# pass
#
# Path: weblablib/utils.py
# def _current_weblab():
# if 'weblab' not in current_app.extensions:
# raise WebLabNotInitializedError("App not initialized with weblab.init_app()")
# return current_app.extensions['weblab']
#
# def _current_backend():
# return _current_weblab()._backend
#
# def _current_session_id():
# return _current_weblab()._session_id()
#
# Path: weblablib/users.py
# def get_weblab_user(cached=True):
# def _set_weblab_user_cache(user):
# def active(self):
# def is_anonymous(self):
# def __str__(self):
# def _method(self, *args, **kwargs): # pylint: disable=unused-argument, no-self-use
# def active(self):
# def is_anonymous(self):
# def locale(self):
# def data(self):
# def __str__(self):
# def __init__(self, session_id, back, last_poll, max_date, username, username_unique,
# exited, data, locale, full_name, experiment_name, category_name, experiment_id,
# request_client_data, request_server_data, start_date):
# def experiment_name(self):
# def category_name(self):
# def experiment_id(self):
# def full_name(self):
# def locale(self):
# def back(self):
# def last_poll(self):
# def session_id(self):
# def max_date(self):
# def username(self):
# def username_unique(self):
# def exited(self):
# def request_client_data(self):
# def request_server_data(self):
# def start_date(self):
# def add_action(self, session_id, action):
# def store_action(self, session_id, action_id, action): # pylint: disable=no-self-use
# def clean_actions(self, session_id): # pylint: disable=no-self-use
# def __init__(self, user, data, previous_hash=None):
# def initial_hash(self):
# def _get_hash(self, data):
# def store(self):
# def store_if_modified(self):
# def is_modified(self):
# def retrieve(self):
# def __init__(self, *args, **kwargs):
# def data(self):
# def data(self, data):
# def update_data(self, new_data=_OBJECT):
# def time_without_polling(self):
# def time_left(self):
# def to_expired_user(self):
# def user(self):
# def active(self):
# def is_anonymous(self):
# def __str__(self):
# def __init__(self, *args, **kwargs):
# def data(self):
# def data(self, value):
# def update_data(self, value=None):
# def time_left(self):
# def active(self):
# def is_anonymous(self):
# def __str__(self):
# class WebLabUser(object):
# class AnonymousDataImmutableDict(dict):
# class AnonymousUser(WebLabUser):
# class _CurrentOrExpiredUser(WebLabUser): # pylint: disable=abstract-method
# class DataHolder(dict):
# class CurrentUser(_CurrentOrExpiredUser):
# class ExpiredUser(_CurrentOrExpiredUser):
# _OBJECT = object()
. Output only the next line. | if unfinished_task: |
Predict the next line for this snippet: <|code_start|> if session_id:
backend = _current_backend()
current_user = backend.get_user(session_id)
if current_user.active:
g._initial_data = json.dumps(current_user.data)
def update_weblab_user_data(response):
# If a developer does:
#
# weblab_user.data["foo"] = "bar"
#
# Nothing is triggered in Redis. For this reason, after each request
# we check that the data has changed or not.
#
session_id = _current_session_id()
backend = _current_backend()
if session_id:
if weblab_user.active:
# If there was no data in the beginning
# OR there was data in the beginning and now it is different,
# only then modify the current session
if not hasattr(g, '_initial_data') or g._initial_data != json.dumps(weblab_user.data):
backend.update_data(session_id, weblab_user.data)
return response
def dispose_user(session_id, waiting):
backend = _current_backend()
user = backend.get_user(session_id)
<|code_end|>
with the help of current file imports:
import time
import json
import traceback
from flask import g
from weblablib.exc import NotFoundError
from weblablib.utils import _current_weblab, _current_backend, _current_session_id
from weblablib.users import ExpiredUser, CurrentUser, weblab_user, _set_weblab_user_cache
and context from other files:
# Path: weblablib/exc.py
# class NotFoundError(WebLabError, KeyError):
# pass
#
# Path: weblablib/utils.py
# def _current_weblab():
# if 'weblab' not in current_app.extensions:
# raise WebLabNotInitializedError("App not initialized with weblab.init_app()")
# return current_app.extensions['weblab']
#
# def _current_backend():
# return _current_weblab()._backend
#
# def _current_session_id():
# return _current_weblab()._session_id()
#
# Path: weblablib/users.py
# def get_weblab_user(cached=True):
# def _set_weblab_user_cache(user):
# def active(self):
# def is_anonymous(self):
# def __str__(self):
# def _method(self, *args, **kwargs): # pylint: disable=unused-argument, no-self-use
# def active(self):
# def is_anonymous(self):
# def locale(self):
# def data(self):
# def __str__(self):
# def __init__(self, session_id, back, last_poll, max_date, username, username_unique,
# exited, data, locale, full_name, experiment_name, category_name, experiment_id,
# request_client_data, request_server_data, start_date):
# def experiment_name(self):
# def category_name(self):
# def experiment_id(self):
# def full_name(self):
# def locale(self):
# def back(self):
# def last_poll(self):
# def session_id(self):
# def max_date(self):
# def username(self):
# def username_unique(self):
# def exited(self):
# def request_client_data(self):
# def request_server_data(self):
# def start_date(self):
# def add_action(self, session_id, action):
# def store_action(self, session_id, action_id, action): # pylint: disable=no-self-use
# def clean_actions(self, session_id): # pylint: disable=no-self-use
# def __init__(self, user, data, previous_hash=None):
# def initial_hash(self):
# def _get_hash(self, data):
# def store(self):
# def store_if_modified(self):
# def is_modified(self):
# def retrieve(self):
# def __init__(self, *args, **kwargs):
# def data(self):
# def data(self, data):
# def update_data(self, new_data=_OBJECT):
# def time_without_polling(self):
# def time_left(self):
# def to_expired_user(self):
# def user(self):
# def active(self):
# def is_anonymous(self):
# def __str__(self):
# def __init__(self, *args, **kwargs):
# def data(self):
# def data(self, value):
# def update_data(self, value=None):
# def time_left(self):
# def active(self):
# def is_anonymous(self):
# def __str__(self):
# class WebLabUser(object):
# class AnonymousDataImmutableDict(dict):
# class AnonymousUser(WebLabUser):
# class _CurrentOrExpiredUser(WebLabUser): # pylint: disable=abstract-method
# class DataHolder(dict):
# class CurrentUser(_CurrentOrExpiredUser):
# class ExpiredUser(_CurrentOrExpiredUser):
# _OBJECT = object()
, which may contain function names, class names, or code. Output only the next line. | if user.is_anonymous: |
Using the snippet: <|code_start|>
class TraceLogger():
def __init__(self,
f_tree_mutation_log: Callable[[TreeMutation], Any]=lambda x: x is not None,
f_model_log: Callable[[Model], Any]=lambda x: deep_copy_model(x),
f_in_sample_prediction_log: Callable[[np.ndarray], Any]=lambda x: x):
self.f_tree_mutation_log = f_tree_mutation_log
self.f_model_log = f_model_log
self.f_in_sample_prediction_log = f_in_sample_prediction_log
def __getitem__(self, item: str):
<|code_end|>
, determine the next line of code. You have imports:
from typing import Any, Callable
from bartpy.model import Model, deep_copy_model
from bartpy.mutation import TreeMutation
import numpy as np
and context (class names, function names, or code) available:
# Path: bartpy/model.py
# class Model:
#
# def __init__(self,
# data: Optional[Data],
# sigma: Sigma,
# trees: Optional[List[Tree]]=None,
# n_trees: int=50,
# alpha: float=0.95,
# beta: float=2.,
# k: int=2.,
# initializer: Initializer=SklearnTreeInitializer()):
#
# self.data = deepcopy(data)
# self.alpha = float(alpha)
# self.beta = float(beta)
# self.k = k
# self._sigma = sigma
# self._prediction = None
# self._initializer = initializer
#
# if trees is None:
# self.n_trees = n_trees
# self._trees = self.initialize_trees()
# if self._initializer is not None:
# self._initializer.initialize_trees(self.refreshed_trees())
# else:
# self.n_trees = len(trees)
# self._trees = trees
#
# def initialize_trees(self) -> List[Tree]:
# trees = [Tree([LeafNode(Split(deepcopy(self.data)))]) for _ in range(self.n_trees)]
# for tree in trees:
# tree.update_y(tree.update_y(self.data.y.values / self.n_trees))
# return trees
#
# def residuals(self) -> np.ndarray:
# return self.data.y.values - self.predict()
#
# def unnormalized_residuals(self) -> np.ndarray:
# return self.data.y.unnormalized_y - self.data.y.unnormalize_y(self.predict())
#
# def predict(self, X: np.ndarray=None) -> np.ndarray:
# if X is not None:
# return self._out_of_sample_predict(X)
# return np.sum([tree.predict() for tree in self.trees], axis=0)
#
# def _out_of_sample_predict(self, X: np.ndarray) -> np.ndarray:
# if type(X) == pd.DataFrame:
# X: pd.DataFrame = X
# X = X.values
# return np.sum([tree.predict(X) for tree in self.trees], axis=0)
#
# @property
# def trees(self) -> List[Tree]:
# return self._trees
#
# def refreshed_trees(self) -> Generator[Tree, None, None]:
# if self._prediction is None:
# self._prediction = self.predict()
# for tree in self._trees:
# self._prediction -= tree.predict()
# tree.update_y(self.data.y.values - self._prediction)
# yield tree
# self._prediction += tree.predict()
#
# @property
# def sigma_m(self) -> float:
# return 0.5 / (self.k * np.power(self.n_trees, 0.5))
#
# @property
# def sigma(self) -> Sigma:
# return self._sigma
#
# def deep_copy_model(model: Model) -> Model:
# copied_model = Model(None, deepcopy(model.sigma), [deep_copy_tree(tree) for tree in model.trees])
# return copied_model
#
# Path: bartpy/mutation.py
# class TreeMutation(object):
# """
# An encapsulation of a change to be made to the tree.
# Constructed of three components
# - the node to be changed
# - what it should be changed to
# - a string name of the kind of change (normally grow or prune)
# """
#
# def __init__(self, kind: str, existing_node: TreeNode, updated_node: TreeNode):
# self.kind = kind
# self.existing_node = existing_node
# self.updated_node = updated_node
#
# def __str__(self):
# return "{} - {} => {}".format(self.kind, self.existing_node, self.updated_node)
. Output only the next line. | if item == "Tree": |
Next line prediction: <|code_start|>
class TraceLogger():
def __init__(self,
f_tree_mutation_log: Callable[[TreeMutation], Any]=lambda x: x is not None,
f_model_log: Callable[[Model], Any]=lambda x: deep_copy_model(x),
f_in_sample_prediction_log: Callable[[np.ndarray], Any]=lambda x: x):
self.f_tree_mutation_log = f_tree_mutation_log
self.f_model_log = f_model_log
self.f_in_sample_prediction_log = f_in_sample_prediction_log
def __getitem__(self, item: str):
if item == "Tree":
return self.f_tree_mutation_log
if item == "Model":
return self.f_model_log
<|code_end|>
. Use current file imports:
(from typing import Any, Callable
from bartpy.model import Model, deep_copy_model
from bartpy.mutation import TreeMutation
import numpy as np)
and context including class names, function names, or small code snippets from other files:
# Path: bartpy/model.py
# class Model:
#
# def __init__(self,
# data: Optional[Data],
# sigma: Sigma,
# trees: Optional[List[Tree]]=None,
# n_trees: int=50,
# alpha: float=0.95,
# beta: float=2.,
# k: int=2.,
# initializer: Initializer=SklearnTreeInitializer()):
#
# self.data = deepcopy(data)
# self.alpha = float(alpha)
# self.beta = float(beta)
# self.k = k
# self._sigma = sigma
# self._prediction = None
# self._initializer = initializer
#
# if trees is None:
# self.n_trees = n_trees
# self._trees = self.initialize_trees()
# if self._initializer is not None:
# self._initializer.initialize_trees(self.refreshed_trees())
# else:
# self.n_trees = len(trees)
# self._trees = trees
#
# def initialize_trees(self) -> List[Tree]:
# trees = [Tree([LeafNode(Split(deepcopy(self.data)))]) for _ in range(self.n_trees)]
# for tree in trees:
# tree.update_y(tree.update_y(self.data.y.values / self.n_trees))
# return trees
#
# def residuals(self) -> np.ndarray:
# return self.data.y.values - self.predict()
#
# def unnormalized_residuals(self) -> np.ndarray:
# return self.data.y.unnormalized_y - self.data.y.unnormalize_y(self.predict())
#
# def predict(self, X: np.ndarray=None) -> np.ndarray:
# if X is not None:
# return self._out_of_sample_predict(X)
# return np.sum([tree.predict() for tree in self.trees], axis=0)
#
# def _out_of_sample_predict(self, X: np.ndarray) -> np.ndarray:
# if type(X) == pd.DataFrame:
# X: pd.DataFrame = X
# X = X.values
# return np.sum([tree.predict(X) for tree in self.trees], axis=0)
#
# @property
# def trees(self) -> List[Tree]:
# return self._trees
#
# def refreshed_trees(self) -> Generator[Tree, None, None]:
# if self._prediction is None:
# self._prediction = self.predict()
# for tree in self._trees:
# self._prediction -= tree.predict()
# tree.update_y(self.data.y.values - self._prediction)
# yield tree
# self._prediction += tree.predict()
#
# @property
# def sigma_m(self) -> float:
# return 0.5 / (self.k * np.power(self.n_trees, 0.5))
#
# @property
# def sigma(self) -> Sigma:
# return self._sigma
#
# def deep_copy_model(model: Model) -> Model:
# copied_model = Model(None, deepcopy(model.sigma), [deep_copy_tree(tree) for tree in model.trees])
# return copied_model
#
# Path: bartpy/mutation.py
# class TreeMutation(object):
# """
# An encapsulation of a change to be made to the tree.
# Constructed of three components
# - the node to be changed
# - what it should be changed to
# - a string name of the kind of change (normally grow or prune)
# """
#
# def __init__(self, kind: str, existing_node: TreeNode, updated_node: TreeNode):
# self.kind = kind
# self.existing_node = existing_node
# self.updated_node = updated_node
#
# def __str__(self):
# return "{} - {} => {}".format(self.kind, self.existing_node, self.updated_node)
. Output only the next line. | if item == "In Sample Prediction": |
Based on the snippet: <|code_start|>
class TraceLogger():
def __init__(self,
f_tree_mutation_log: Callable[[TreeMutation], Any]=lambda x: x is not None,
f_model_log: Callable[[Model], Any]=lambda x: deep_copy_model(x),
f_in_sample_prediction_log: Callable[[np.ndarray], Any]=lambda x: x):
<|code_end|>
, predict the immediate next line with the help of imports:
from typing import Any, Callable
from bartpy.model import Model, deep_copy_model
from bartpy.mutation import TreeMutation
import numpy as np
and context (classes, functions, sometimes code) from other files:
# Path: bartpy/model.py
# class Model:
#
# def __init__(self,
# data: Optional[Data],
# sigma: Sigma,
# trees: Optional[List[Tree]]=None,
# n_trees: int=50,
# alpha: float=0.95,
# beta: float=2.,
# k: int=2.,
# initializer: Initializer=SklearnTreeInitializer()):
#
# self.data = deepcopy(data)
# self.alpha = float(alpha)
# self.beta = float(beta)
# self.k = k
# self._sigma = sigma
# self._prediction = None
# self._initializer = initializer
#
# if trees is None:
# self.n_trees = n_trees
# self._trees = self.initialize_trees()
# if self._initializer is not None:
# self._initializer.initialize_trees(self.refreshed_trees())
# else:
# self.n_trees = len(trees)
# self._trees = trees
#
# def initialize_trees(self) -> List[Tree]:
# trees = [Tree([LeafNode(Split(deepcopy(self.data)))]) for _ in range(self.n_trees)]
# for tree in trees:
# tree.update_y(tree.update_y(self.data.y.values / self.n_trees))
# return trees
#
# def residuals(self) -> np.ndarray:
# return self.data.y.values - self.predict()
#
# def unnormalized_residuals(self) -> np.ndarray:
# return self.data.y.unnormalized_y - self.data.y.unnormalize_y(self.predict())
#
# def predict(self, X: np.ndarray=None) -> np.ndarray:
# if X is not None:
# return self._out_of_sample_predict(X)
# return np.sum([tree.predict() for tree in self.trees], axis=0)
#
# def _out_of_sample_predict(self, X: np.ndarray) -> np.ndarray:
# if type(X) == pd.DataFrame:
# X: pd.DataFrame = X
# X = X.values
# return np.sum([tree.predict(X) for tree in self.trees], axis=0)
#
# @property
# def trees(self) -> List[Tree]:
# return self._trees
#
# def refreshed_trees(self) -> Generator[Tree, None, None]:
# if self._prediction is None:
# self._prediction = self.predict()
# for tree in self._trees:
# self._prediction -= tree.predict()
# tree.update_y(self.data.y.values - self._prediction)
# yield tree
# self._prediction += tree.predict()
#
# @property
# def sigma_m(self) -> float:
# return 0.5 / (self.k * np.power(self.n_trees, 0.5))
#
# @property
# def sigma(self) -> Sigma:
# return self._sigma
#
# def deep_copy_model(model: Model) -> Model:
# copied_model = Model(None, deepcopy(model.sigma), [deep_copy_tree(tree) for tree in model.trees])
# return copied_model
#
# Path: bartpy/mutation.py
# class TreeMutation(object):
# """
# An encapsulation of a change to be made to the tree.
# Constructed of three components
# - the node to be changed
# - what it should be changed to
# - a string name of the kind of change (normally grow or prune)
# """
#
# def __init__(self, kind: str, existing_node: TreeNode, updated_node: TreeNode):
# self.kind = kind
# self.existing_node = existing_node
# self.updated_node = updated_node
#
# def __str__(self):
# return "{} - {} => {}".format(self.kind, self.existing_node, self.updated_node)
. Output only the next line. | self.f_tree_mutation_log = f_tree_mutation_log |
Given the code snippet: <|code_start|> self.e = LeafNode(Split(self.data))
self.c = DecisionNode(Split(self.data), self.d, self.e)
self.b = LeafNode(Split(self.data))
self.a = DecisionNode(Split(self.data), self.b, self.c)
self.tree = Tree([self.a, self.b, self.c, self.d, self.e])
def test_proposal_isnt_mutating(self):
proposal = uniformly_sample_prune_mutation(self.tree)
self.assertIn(proposal.existing_node, self.tree.nodes)
self.assertNotIn(proposal.updated_node, self.tree.nodes)
def test_types(self):
proposal = uniformly_sample_prune_mutation(self.tree)
self.assertIsInstance(proposal.existing_node, DecisionNode)
self.assertIsInstance(proposal.updated_node, LeafNode)
class TestGrowTreeMutationProposer(unittest.TestCase):
def setUp(self):
self.data = make_bartpy_data(pd.DataFrame({"a": np.random.normal(size=1000)}), np.array(np.random.normal(size=1000)))
self.d = LeafNode(Split(self.data))
self.e = LeafNode(Split(self.data))
self.c = DecisionNode(Split(self.data), self.d, self.e)
self.b = LeafNode(Split(self.data))
self.a = DecisionNode(Split(self.data), self.b, self.c)
self.tree = Tree([self.a, self.b, self.c, self.d, self.e])
def test_proposal_isnt_mutating(self):
proposal = uniformly_sample_grow_mutation(self.tree)
<|code_end|>
, generate the next line using the imports in this file:
import unittest
import pandas as pd
import numpy as np
from bartpy.data import make_bartpy_data
from bartpy.samplers.unconstrainedtree.proposer import uniformly_sample_grow_mutation, uniformly_sample_prune_mutation
from bartpy.split import Split
from bartpy.tree import LeafNode, Tree, DecisionNode
and context (functions, classes, or occasionally code) from other files:
# Path: bartpy/data.py
# def make_bartpy_data(X: Union[np.ndarray, pd.DataFrame],
# y: np.ndarray,
# normalize: bool=True) -> 'Data':
# X = format_covariate_matrix(X)
# y = y.astype(float)
# return Data(X, y, normalize=normalize)
#
# Path: bartpy/samplers/unconstrainedtree/proposer.py
# def uniformly_sample_grow_mutation(tree: Tree) -> TreeMutation:
# node = random_splittable_leaf_node(tree)
# updated_node = sample_split_node(node)
# return GrowMutation(node, updated_node)
#
# def uniformly_sample_prune_mutation(tree: Tree) -> TreeMutation:
# node = random_prunable_decision_node(tree)
# updated_node = LeafNode(node.split, depth=node.depth)
# return PruneMutation(node, updated_node)
#
# Path: bartpy/split.py
# class Split:
# """
# The Split class represents the conditioned data at any point in the decision tree
# It contains the logic for:
#
# - Maintaining a record of which rows of the covariate matrix are in the split
# - Being able to easily access a `Data` object with the relevant rows
# - Applying `SplitConditions` to further break up the data
# """
#
# def __init__(self, data: Data, combined_condition: Optional[CombinedCondition]=None):
# self._data = data
# if combined_condition is None:
# combined_condition = CombinedCondition(self._data.X.variables, [])
# self._combined_condition = combined_condition
#
# @property
# def data(self):
# return self._data
#
# def combined_condition(self):
# return self._combined_condition
#
# def condition(self, X: np.ndarray=None) -> np.array:
# if X is None:
# return ~self._data.mask
# else:
# return self.out_of_sample_condition(X)
#
# def out_of_sample_condition(self, X: np.ndarray) -> np.ndarray:
# return self._combined_condition.condition(X)
#
# def out_of_sample_conditioner(self) -> CombinedCondition:
# return self._combined_condition
#
# def __add__(self, other: SplitCondition):
#
# return Split(self._data + other,
# self._combined_condition + other)
#
# def most_recent_split_condition(self) -> Optional[SplitCondition]:
# return self._combined_condition.most_recent_split_condition()
#
# Path: bartpy/tree.py
# class Tree:
# def __init__(self, nodes: List[TreeNode]):
# def nodes(self) -> List[TreeNode]:
# def leaf_nodes(self) -> List[LeafNode]:
# def splittable_leaf_nodes(self) -> List[LeafNode]:
# def decision_nodes(self) -> List[DecisionNode]:
# def prunable_decision_nodes(self) -> List[DecisionNode]:
# def update_y(self, y: np.ndarray) -> None:
# def predict(self, X: np.ndarray=None) -> np.ndarray:
# def _out_of_sample_predict(self, X) -> np.ndarray:
# def remove_node(self, node: TreeNode) -> None:
# def add_node(self, node: TreeNode) -> None:
# def mutate(tree: Tree, mutation: TreeMutation) -> None:
# def deep_copy_tree(tree: Tree):
. Output only the next line. | self.assertIn(proposal.existing_node, self.tree.nodes) |
Predict the next line after this snippet: <|code_start|>
class TestPruneTreeMutationProposer(unittest.TestCase):
def setUp(self):
self.data = make_bartpy_data(pd.DataFrame({"a": [1, 2]}), np.array([1, 2]), normalize=False)
self.d = LeafNode(Split(self.data))
self.e = LeafNode(Split(self.data))
self.c = DecisionNode(Split(self.data), self.d, self.e)
self.b = LeafNode(Split(self.data))
self.a = DecisionNode(Split(self.data), self.b, self.c)
self.tree = Tree([self.a, self.b, self.c, self.d, self.e])
def test_proposal_isnt_mutating(self):
proposal = uniformly_sample_prune_mutation(self.tree)
<|code_end|>
using the current file's imports:
import unittest
import pandas as pd
import numpy as np
from bartpy.data import make_bartpy_data
from bartpy.samplers.unconstrainedtree.proposer import uniformly_sample_grow_mutation, uniformly_sample_prune_mutation
from bartpy.split import Split
from bartpy.tree import LeafNode, Tree, DecisionNode
and any relevant context from other files:
# Path: bartpy/data.py
# def make_bartpy_data(X: Union[np.ndarray, pd.DataFrame],
# y: np.ndarray,
# normalize: bool=True) -> 'Data':
# X = format_covariate_matrix(X)
# y = y.astype(float)
# return Data(X, y, normalize=normalize)
#
# Path: bartpy/samplers/unconstrainedtree/proposer.py
# def uniformly_sample_grow_mutation(tree: Tree) -> TreeMutation:
# node = random_splittable_leaf_node(tree)
# updated_node = sample_split_node(node)
# return GrowMutation(node, updated_node)
#
# def uniformly_sample_prune_mutation(tree: Tree) -> TreeMutation:
# node = random_prunable_decision_node(tree)
# updated_node = LeafNode(node.split, depth=node.depth)
# return PruneMutation(node, updated_node)
#
# Path: bartpy/split.py
# class Split:
# """
# The Split class represents the conditioned data at any point in the decision tree
# It contains the logic for:
#
# - Maintaining a record of which rows of the covariate matrix are in the split
# - Being able to easily access a `Data` object with the relevant rows
# - Applying `SplitConditions` to further break up the data
# """
#
# def __init__(self, data: Data, combined_condition: Optional[CombinedCondition]=None):
# self._data = data
# if combined_condition is None:
# combined_condition = CombinedCondition(self._data.X.variables, [])
# self._combined_condition = combined_condition
#
# @property
# def data(self):
# return self._data
#
# def combined_condition(self):
# return self._combined_condition
#
# def condition(self, X: np.ndarray=None) -> np.array:
# if X is None:
# return ~self._data.mask
# else:
# return self.out_of_sample_condition(X)
#
# def out_of_sample_condition(self, X: np.ndarray) -> np.ndarray:
# return self._combined_condition.condition(X)
#
# def out_of_sample_conditioner(self) -> CombinedCondition:
# return self._combined_condition
#
# def __add__(self, other: SplitCondition):
#
# return Split(self._data + other,
# self._combined_condition + other)
#
# def most_recent_split_condition(self) -> Optional[SplitCondition]:
# return self._combined_condition.most_recent_split_condition()
#
# Path: bartpy/tree.py
# class Tree:
# def __init__(self, nodes: List[TreeNode]):
# def nodes(self) -> List[TreeNode]:
# def leaf_nodes(self) -> List[LeafNode]:
# def splittable_leaf_nodes(self) -> List[LeafNode]:
# def decision_nodes(self) -> List[DecisionNode]:
# def prunable_decision_nodes(self) -> List[DecisionNode]:
# def update_y(self, y: np.ndarray) -> None:
# def predict(self, X: np.ndarray=None) -> np.ndarray:
# def _out_of_sample_predict(self, X) -> np.ndarray:
# def remove_node(self, node: TreeNode) -> None:
# def add_node(self, node: TreeNode) -> None:
# def mutate(tree: Tree, mutation: TreeMutation) -> None:
# def deep_copy_tree(tree: Tree):
. Output only the next line. | self.assertIn(proposal.existing_node, self.tree.nodes) |
Here is a snippet: <|code_start|>
class TestPruneTreeMutationProposer(unittest.TestCase):
def setUp(self):
self.data = make_bartpy_data(pd.DataFrame({"a": [1, 2]}), np.array([1, 2]), normalize=False)
self.d = LeafNode(Split(self.data))
self.e = LeafNode(Split(self.data))
self.c = DecisionNode(Split(self.data), self.d, self.e)
self.b = LeafNode(Split(self.data))
self.a = DecisionNode(Split(self.data), self.b, self.c)
self.tree = Tree([self.a, self.b, self.c, self.d, self.e])
def test_proposal_isnt_mutating(self):
proposal = uniformly_sample_prune_mutation(self.tree)
self.assertIn(proposal.existing_node, self.tree.nodes)
self.assertNotIn(proposal.updated_node, self.tree.nodes)
def test_types(self):
proposal = uniformly_sample_prune_mutation(self.tree)
self.assertIsInstance(proposal.existing_node, DecisionNode)
self.assertIsInstance(proposal.updated_node, LeafNode)
<|code_end|>
. Write the next line using the current file imports:
import unittest
import pandas as pd
import numpy as np
from bartpy.data import make_bartpy_data
from bartpy.samplers.unconstrainedtree.proposer import uniformly_sample_grow_mutation, uniformly_sample_prune_mutation
from bartpy.split import Split
from bartpy.tree import LeafNode, Tree, DecisionNode
and context from other files:
# Path: bartpy/data.py
# def make_bartpy_data(X: Union[np.ndarray, pd.DataFrame],
# y: np.ndarray,
# normalize: bool=True) -> 'Data':
# X = format_covariate_matrix(X)
# y = y.astype(float)
# return Data(X, y, normalize=normalize)
#
# Path: bartpy/samplers/unconstrainedtree/proposer.py
# def uniformly_sample_grow_mutation(tree: Tree) -> TreeMutation:
# node = random_splittable_leaf_node(tree)
# updated_node = sample_split_node(node)
# return GrowMutation(node, updated_node)
#
# def uniformly_sample_prune_mutation(tree: Tree) -> TreeMutation:
# node = random_prunable_decision_node(tree)
# updated_node = LeafNode(node.split, depth=node.depth)
# return PruneMutation(node, updated_node)
#
# Path: bartpy/split.py
# class Split:
# """
# The Split class represents the conditioned data at any point in the decision tree
# It contains the logic for:
#
# - Maintaining a record of which rows of the covariate matrix are in the split
# - Being able to easily access a `Data` object with the relevant rows
# - Applying `SplitConditions` to further break up the data
# """
#
# def __init__(self, data: Data, combined_condition: Optional[CombinedCondition]=None):
# self._data = data
# if combined_condition is None:
# combined_condition = CombinedCondition(self._data.X.variables, [])
# self._combined_condition = combined_condition
#
# @property
# def data(self):
# return self._data
#
# def combined_condition(self):
# return self._combined_condition
#
# def condition(self, X: np.ndarray=None) -> np.array:
# if X is None:
# return ~self._data.mask
# else:
# return self.out_of_sample_condition(X)
#
# def out_of_sample_condition(self, X: np.ndarray) -> np.ndarray:
# return self._combined_condition.condition(X)
#
# def out_of_sample_conditioner(self) -> CombinedCondition:
# return self._combined_condition
#
# def __add__(self, other: SplitCondition):
#
# return Split(self._data + other,
# self._combined_condition + other)
#
# def most_recent_split_condition(self) -> Optional[SplitCondition]:
# return self._combined_condition.most_recent_split_condition()
#
# Path: bartpy/tree.py
# class Tree:
# def __init__(self, nodes: List[TreeNode]):
# def nodes(self) -> List[TreeNode]:
# def leaf_nodes(self) -> List[LeafNode]:
# def splittable_leaf_nodes(self) -> List[LeafNode]:
# def decision_nodes(self) -> List[DecisionNode]:
# def prunable_decision_nodes(self) -> List[DecisionNode]:
# def update_y(self, y: np.ndarray) -> None:
# def predict(self, X: np.ndarray=None) -> np.ndarray:
# def _out_of_sample_predict(self, X) -> np.ndarray:
# def remove_node(self, node: TreeNode) -> None:
# def add_node(self, node: TreeNode) -> None:
# def mutate(tree: Tree, mutation: TreeMutation) -> None:
# def deep_copy_tree(tree: Tree):
, which may include functions, classes, or code. Output only the next line. | class TestGrowTreeMutationProposer(unittest.TestCase): |
Based on the snippet: <|code_start|> self.c = DecisionNode(Split(self.data), self.d, self.e)
self.b = LeafNode(Split(self.data))
self.a = DecisionNode(Split(self.data), self.b, self.c)
self.tree = Tree([self.a, self.b, self.c, self.d, self.e])
def test_proposal_isnt_mutating(self):
proposal = uniformly_sample_prune_mutation(self.tree)
self.assertIn(proposal.existing_node, self.tree.nodes)
self.assertNotIn(proposal.updated_node, self.tree.nodes)
def test_types(self):
proposal = uniformly_sample_prune_mutation(self.tree)
self.assertIsInstance(proposal.existing_node, DecisionNode)
self.assertIsInstance(proposal.updated_node, LeafNode)
class TestGrowTreeMutationProposer(unittest.TestCase):
def setUp(self):
self.data = make_bartpy_data(pd.DataFrame({"a": np.random.normal(size=1000)}), np.array(np.random.normal(size=1000)))
self.d = LeafNode(Split(self.data))
self.e = LeafNode(Split(self.data))
self.c = DecisionNode(Split(self.data), self.d, self.e)
self.b = LeafNode(Split(self.data))
self.a = DecisionNode(Split(self.data), self.b, self.c)
self.tree = Tree([self.a, self.b, self.c, self.d, self.e])
def test_proposal_isnt_mutating(self):
proposal = uniformly_sample_grow_mutation(self.tree)
self.assertIn(proposal.existing_node, self.tree.nodes)
<|code_end|>
, predict the immediate next line with the help of imports:
import unittest
import pandas as pd
import numpy as np
from bartpy.data import make_bartpy_data
from bartpy.samplers.unconstrainedtree.proposer import uniformly_sample_grow_mutation, uniformly_sample_prune_mutation
from bartpy.split import Split
from bartpy.tree import LeafNode, Tree, DecisionNode
and context (classes, functions, sometimes code) from other files:
# Path: bartpy/data.py
# def make_bartpy_data(X: Union[np.ndarray, pd.DataFrame],
# y: np.ndarray,
# normalize: bool=True) -> 'Data':
# X = format_covariate_matrix(X)
# y = y.astype(float)
# return Data(X, y, normalize=normalize)
#
# Path: bartpy/samplers/unconstrainedtree/proposer.py
# def uniformly_sample_grow_mutation(tree: Tree) -> TreeMutation:
# node = random_splittable_leaf_node(tree)
# updated_node = sample_split_node(node)
# return GrowMutation(node, updated_node)
#
# def uniformly_sample_prune_mutation(tree: Tree) -> TreeMutation:
# node = random_prunable_decision_node(tree)
# updated_node = LeafNode(node.split, depth=node.depth)
# return PruneMutation(node, updated_node)
#
# Path: bartpy/split.py
# class Split:
# """
# The Split class represents the conditioned data at any point in the decision tree
# It contains the logic for:
#
# - Maintaining a record of which rows of the covariate matrix are in the split
# - Being able to easily access a `Data` object with the relevant rows
# - Applying `SplitConditions` to further break up the data
# """
#
# def __init__(self, data: Data, combined_condition: Optional[CombinedCondition]=None):
# self._data = data
# if combined_condition is None:
# combined_condition = CombinedCondition(self._data.X.variables, [])
# self._combined_condition = combined_condition
#
# @property
# def data(self):
# return self._data
#
# def combined_condition(self):
# return self._combined_condition
#
# def condition(self, X: np.ndarray=None) -> np.array:
# if X is None:
# return ~self._data.mask
# else:
# return self.out_of_sample_condition(X)
#
# def out_of_sample_condition(self, X: np.ndarray) -> np.ndarray:
# return self._combined_condition.condition(X)
#
# def out_of_sample_conditioner(self) -> CombinedCondition:
# return self._combined_condition
#
# def __add__(self, other: SplitCondition):
#
# return Split(self._data + other,
# self._combined_condition + other)
#
# def most_recent_split_condition(self) -> Optional[SplitCondition]:
# return self._combined_condition.most_recent_split_condition()
#
# Path: bartpy/tree.py
# class Tree:
# def __init__(self, nodes: List[TreeNode]):
# def nodes(self) -> List[TreeNode]:
# def leaf_nodes(self) -> List[LeafNode]:
# def splittable_leaf_nodes(self) -> List[LeafNode]:
# def decision_nodes(self) -> List[DecisionNode]:
# def prunable_decision_nodes(self) -> List[DecisionNode]:
# def update_y(self, y: np.ndarray) -> None:
# def predict(self, X: np.ndarray=None) -> np.ndarray:
# def _out_of_sample_predict(self, X) -> np.ndarray:
# def remove_node(self, node: TreeNode) -> None:
# def add_node(self, node: TreeNode) -> None:
# def mutate(tree: Tree, mutation: TreeMutation) -> None:
# def deep_copy_tree(tree: Tree):
. Output only the next line. | self.assertNotIn(proposal.updated_node, self.tree.nodes) |
Given snippet: <|code_start|>
class TestPruneTreeMutationProposer(unittest.TestCase):
def setUp(self):
self.data = make_bartpy_data(pd.DataFrame({"a": [1, 2]}), np.array([1, 2]), normalize=False)
self.d = LeafNode(Split(self.data))
self.e = LeafNode(Split(self.data))
self.c = DecisionNode(Split(self.data), self.d, self.e)
self.b = LeafNode(Split(self.data))
self.a = DecisionNode(Split(self.data), self.b, self.c)
self.tree = Tree([self.a, self.b, self.c, self.d, self.e])
<|code_end|>
, continue by predicting the next line. Consider current file imports:
import unittest
import pandas as pd
import numpy as np
from bartpy.data import make_bartpy_data
from bartpy.samplers.unconstrainedtree.proposer import uniformly_sample_grow_mutation, uniformly_sample_prune_mutation
from bartpy.split import Split
from bartpy.tree import LeafNode, Tree, DecisionNode
and context:
# Path: bartpy/data.py
# def make_bartpy_data(X: Union[np.ndarray, pd.DataFrame],
# y: np.ndarray,
# normalize: bool=True) -> 'Data':
# X = format_covariate_matrix(X)
# y = y.astype(float)
# return Data(X, y, normalize=normalize)
#
# Path: bartpy/samplers/unconstrainedtree/proposer.py
# def uniformly_sample_grow_mutation(tree: Tree) -> TreeMutation:
# node = random_splittable_leaf_node(tree)
# updated_node = sample_split_node(node)
# return GrowMutation(node, updated_node)
#
# def uniformly_sample_prune_mutation(tree: Tree) -> TreeMutation:
# node = random_prunable_decision_node(tree)
# updated_node = LeafNode(node.split, depth=node.depth)
# return PruneMutation(node, updated_node)
#
# Path: bartpy/split.py
# class Split:
# """
# The Split class represents the conditioned data at any point in the decision tree
# It contains the logic for:
#
# - Maintaining a record of which rows of the covariate matrix are in the split
# - Being able to easily access a `Data` object with the relevant rows
# - Applying `SplitConditions` to further break up the data
# """
#
# def __init__(self, data: Data, combined_condition: Optional[CombinedCondition]=None):
# self._data = data
# if combined_condition is None:
# combined_condition = CombinedCondition(self._data.X.variables, [])
# self._combined_condition = combined_condition
#
# @property
# def data(self):
# return self._data
#
# def combined_condition(self):
# return self._combined_condition
#
# def condition(self, X: np.ndarray=None) -> np.array:
# if X is None:
# return ~self._data.mask
# else:
# return self.out_of_sample_condition(X)
#
# def out_of_sample_condition(self, X: np.ndarray) -> np.ndarray:
# return self._combined_condition.condition(X)
#
# def out_of_sample_conditioner(self) -> CombinedCondition:
# return self._combined_condition
#
# def __add__(self, other: SplitCondition):
#
# return Split(self._data + other,
# self._combined_condition + other)
#
# def most_recent_split_condition(self) -> Optional[SplitCondition]:
# return self._combined_condition.most_recent_split_condition()
#
# Path: bartpy/tree.py
# class Tree:
# def __init__(self, nodes: List[TreeNode]):
# def nodes(self) -> List[TreeNode]:
# def leaf_nodes(self) -> List[LeafNode]:
# def splittable_leaf_nodes(self) -> List[LeafNode]:
# def decision_nodes(self) -> List[DecisionNode]:
# def prunable_decision_nodes(self) -> List[DecisionNode]:
# def update_y(self, y: np.ndarray) -> None:
# def predict(self, X: np.ndarray=None) -> np.ndarray:
# def _out_of_sample_predict(self, X) -> np.ndarray:
# def remove_node(self, node: TreeNode) -> None:
# def add_node(self, node: TreeNode) -> None:
# def mutate(tree: Tree, mutation: TreeMutation) -> None:
# def deep_copy_tree(tree: Tree):
which might include code, classes, or functions. Output only the next line. | def test_proposal_isnt_mutating(self): |
Predict the next line for this snippet: <|code_start|> self.c = DecisionNode(Split(self.data), self.d, self.e)
self.b = LeafNode(Split(self.data))
self.a = DecisionNode(Split(self.data), self.b, self.c)
self.tree = Tree([self.a, self.b, self.c, self.d, self.e])
def test_proposal_isnt_mutating(self):
proposal = uniformly_sample_prune_mutation(self.tree)
self.assertIn(proposal.existing_node, self.tree.nodes)
self.assertNotIn(proposal.updated_node, self.tree.nodes)
def test_types(self):
proposal = uniformly_sample_prune_mutation(self.tree)
self.assertIsInstance(proposal.existing_node, DecisionNode)
self.assertIsInstance(proposal.updated_node, LeafNode)
class TestGrowTreeMutationProposer(unittest.TestCase):
def setUp(self):
self.data = make_bartpy_data(pd.DataFrame({"a": np.random.normal(size=1000)}), np.array(np.random.normal(size=1000)))
self.d = LeafNode(Split(self.data))
self.e = LeafNode(Split(self.data))
self.c = DecisionNode(Split(self.data), self.d, self.e)
self.b = LeafNode(Split(self.data))
self.a = DecisionNode(Split(self.data), self.b, self.c)
self.tree = Tree([self.a, self.b, self.c, self.d, self.e])
def test_proposal_isnt_mutating(self):
proposal = uniformly_sample_grow_mutation(self.tree)
self.assertIn(proposal.existing_node, self.tree.nodes)
<|code_end|>
with the help of current file imports:
import unittest
import pandas as pd
import numpy as np
from bartpy.data import make_bartpy_data
from bartpy.samplers.unconstrainedtree.proposer import uniformly_sample_grow_mutation, uniformly_sample_prune_mutation
from bartpy.split import Split
from bartpy.tree import LeafNode, Tree, DecisionNode
and context from other files:
# Path: bartpy/data.py
# def make_bartpy_data(X: Union[np.ndarray, pd.DataFrame],
# y: np.ndarray,
# normalize: bool=True) -> 'Data':
# X = format_covariate_matrix(X)
# y = y.astype(float)
# return Data(X, y, normalize=normalize)
#
# Path: bartpy/samplers/unconstrainedtree/proposer.py
# def uniformly_sample_grow_mutation(tree: Tree) -> TreeMutation:
# node = random_splittable_leaf_node(tree)
# updated_node = sample_split_node(node)
# return GrowMutation(node, updated_node)
#
# def uniformly_sample_prune_mutation(tree: Tree) -> TreeMutation:
# node = random_prunable_decision_node(tree)
# updated_node = LeafNode(node.split, depth=node.depth)
# return PruneMutation(node, updated_node)
#
# Path: bartpy/split.py
# class Split:
# """
# The Split class represents the conditioned data at any point in the decision tree
# It contains the logic for:
#
# - Maintaining a record of which rows of the covariate matrix are in the split
# - Being able to easily access a `Data` object with the relevant rows
# - Applying `SplitConditions` to further break up the data
# """
#
# def __init__(self, data: Data, combined_condition: Optional[CombinedCondition]=None):
# self._data = data
# if combined_condition is None:
# combined_condition = CombinedCondition(self._data.X.variables, [])
# self._combined_condition = combined_condition
#
# @property
# def data(self):
# return self._data
#
# def combined_condition(self):
# return self._combined_condition
#
# def condition(self, X: np.ndarray=None) -> np.array:
# if X is None:
# return ~self._data.mask
# else:
# return self.out_of_sample_condition(X)
#
# def out_of_sample_condition(self, X: np.ndarray) -> np.ndarray:
# return self._combined_condition.condition(X)
#
# def out_of_sample_conditioner(self) -> CombinedCondition:
# return self._combined_condition
#
# def __add__(self, other: SplitCondition):
#
# return Split(self._data + other,
# self._combined_condition + other)
#
# def most_recent_split_condition(self) -> Optional[SplitCondition]:
# return self._combined_condition.most_recent_split_condition()
#
# Path: bartpy/tree.py
# class Tree:
# def __init__(self, nodes: List[TreeNode]):
# def nodes(self) -> List[TreeNode]:
# def leaf_nodes(self) -> List[LeafNode]:
# def splittable_leaf_nodes(self) -> List[LeafNode]:
# def decision_nodes(self) -> List[DecisionNode]:
# def prunable_decision_nodes(self) -> List[DecisionNode]:
# def update_y(self, y: np.ndarray) -> None:
# def predict(self, X: np.ndarray=None) -> np.ndarray:
# def _out_of_sample_predict(self, X) -> np.ndarray:
# def remove_node(self, node: TreeNode) -> None:
# def add_node(self, node: TreeNode) -> None:
# def mutate(tree: Tree, mutation: TreeMutation) -> None:
# def deep_copy_tree(tree: Tree):
, which may contain function names, class names, or code. Output only the next line. | self.assertNotIn(proposal.updated_node, self.tree.nodes) |
Given snippet: <|code_start|>
class TestPruneTreeMutationProposer(unittest.TestCase):
def setUp(self):
self.data = make_bartpy_data(pd.DataFrame({"a": [1, 2]}), np.array([1, 2]), normalize=False)
self.d = LeafNode(Split(self.data))
self.e = LeafNode(Split(self.data))
self.c = DecisionNode(Split(self.data), self.d, self.e)
self.b = LeafNode(Split(self.data))
self.a = DecisionNode(Split(self.data), self.b, self.c)
self.tree = Tree([self.a, self.b, self.c, self.d, self.e])
<|code_end|>
, continue by predicting the next line. Consider current file imports:
import unittest
import pandas as pd
import numpy as np
from bartpy.data import make_bartpy_data
from bartpy.samplers.unconstrainedtree.proposer import uniformly_sample_grow_mutation, uniformly_sample_prune_mutation
from bartpy.split import Split
from bartpy.tree import LeafNode, Tree, DecisionNode
and context:
# Path: bartpy/data.py
# def make_bartpy_data(X: Union[np.ndarray, pd.DataFrame],
# y: np.ndarray,
# normalize: bool=True) -> 'Data':
# X = format_covariate_matrix(X)
# y = y.astype(float)
# return Data(X, y, normalize=normalize)
#
# Path: bartpy/samplers/unconstrainedtree/proposer.py
# def uniformly_sample_grow_mutation(tree: Tree) -> TreeMutation:
# node = random_splittable_leaf_node(tree)
# updated_node = sample_split_node(node)
# return GrowMutation(node, updated_node)
#
# def uniformly_sample_prune_mutation(tree: Tree) -> TreeMutation:
# node = random_prunable_decision_node(tree)
# updated_node = LeafNode(node.split, depth=node.depth)
# return PruneMutation(node, updated_node)
#
# Path: bartpy/split.py
# class Split:
# """
# The Split class represents the conditioned data at any point in the decision tree
# It contains the logic for:
#
# - Maintaining a record of which rows of the covariate matrix are in the split
# - Being able to easily access a `Data` object with the relevant rows
# - Applying `SplitConditions` to further break up the data
# """
#
# def __init__(self, data: Data, combined_condition: Optional[CombinedCondition]=None):
# self._data = data
# if combined_condition is None:
# combined_condition = CombinedCondition(self._data.X.variables, [])
# self._combined_condition = combined_condition
#
# @property
# def data(self):
# return self._data
#
# def combined_condition(self):
# return self._combined_condition
#
# def condition(self, X: np.ndarray=None) -> np.array:
# if X is None:
# return ~self._data.mask
# else:
# return self.out_of_sample_condition(X)
#
# def out_of_sample_condition(self, X: np.ndarray) -> np.ndarray:
# return self._combined_condition.condition(X)
#
# def out_of_sample_conditioner(self) -> CombinedCondition:
# return self._combined_condition
#
# def __add__(self, other: SplitCondition):
#
# return Split(self._data + other,
# self._combined_condition + other)
#
# def most_recent_split_condition(self) -> Optional[SplitCondition]:
# return self._combined_condition.most_recent_split_condition()
#
# Path: bartpy/tree.py
# class Tree:
# def __init__(self, nodes: List[TreeNode]):
# def nodes(self) -> List[TreeNode]:
# def leaf_nodes(self) -> List[LeafNode]:
# def splittable_leaf_nodes(self) -> List[LeafNode]:
# def decision_nodes(self) -> List[DecisionNode]:
# def prunable_decision_nodes(self) -> List[DecisionNode]:
# def update_y(self, y: np.ndarray) -> None:
# def predict(self, X: np.ndarray=None) -> np.ndarray:
# def _out_of_sample_predict(self, X) -> np.ndarray:
# def remove_node(self, node: TreeNode) -> None:
# def add_node(self, node: TreeNode) -> None:
# def mutate(tree: Tree, mutation: TreeMutation) -> None:
# def deep_copy_tree(tree: Tree):
which might include code, classes, or functions. Output only the next line. | def test_proposal_isnt_mutating(self): |
Given snippet: <|code_start|>
class SigmaSampler(Sampler):
def step(self, model: Model, sigma: Sigma) -> float:
sample_value = self.sample(model, sigma)
sigma.set_value(sample_value)
return sample_value
@staticmethod
def sample(model: Model, sigma: Sigma) -> float:
posterior_alpha = sigma.alpha + (model.data.X.n_obsv / 2.)
<|code_end|>
, continue by predicting the next line. Consider current file imports:
import numpy as np
from bartpy.model import Model
from bartpy.samplers.sampler import Sampler
from bartpy.sigma import Sigma
and context:
# Path: bartpy/model.py
# class Model:
#
# def __init__(self,
# data: Optional[Data],
# sigma: Sigma,
# trees: Optional[List[Tree]]=None,
# n_trees: int=50,
# alpha: float=0.95,
# beta: float=2.,
# k: int=2.,
# initializer: Initializer=SklearnTreeInitializer()):
#
# self.data = deepcopy(data)
# self.alpha = float(alpha)
# self.beta = float(beta)
# self.k = k
# self._sigma = sigma
# self._prediction = None
# self._initializer = initializer
#
# if trees is None:
# self.n_trees = n_trees
# self._trees = self.initialize_trees()
# if self._initializer is not None:
# self._initializer.initialize_trees(self.refreshed_trees())
# else:
# self.n_trees = len(trees)
# self._trees = trees
#
# def initialize_trees(self) -> List[Tree]:
# trees = [Tree([LeafNode(Split(deepcopy(self.data)))]) for _ in range(self.n_trees)]
# for tree in trees:
# tree.update_y(tree.update_y(self.data.y.values / self.n_trees))
# return trees
#
# def residuals(self) -> np.ndarray:
# return self.data.y.values - self.predict()
#
# def unnormalized_residuals(self) -> np.ndarray:
# return self.data.y.unnormalized_y - self.data.y.unnormalize_y(self.predict())
#
# def predict(self, X: np.ndarray=None) -> np.ndarray:
# if X is not None:
# return self._out_of_sample_predict(X)
# return np.sum([tree.predict() for tree in self.trees], axis=0)
#
# def _out_of_sample_predict(self, X: np.ndarray) -> np.ndarray:
# if type(X) == pd.DataFrame:
# X: pd.DataFrame = X
# X = X.values
# return np.sum([tree.predict(X) for tree in self.trees], axis=0)
#
# @property
# def trees(self) -> List[Tree]:
# return self._trees
#
# def refreshed_trees(self) -> Generator[Tree, None, None]:
# if self._prediction is None:
# self._prediction = self.predict()
# for tree in self._trees:
# self._prediction -= tree.predict()
# tree.update_y(self.data.y.values - self._prediction)
# yield tree
# self._prediction += tree.predict()
#
# @property
# def sigma_m(self) -> float:
# return 0.5 / (self.k * np.power(self.n_trees, 0.5))
#
# @property
# def sigma(self) -> Sigma:
# return self._sigma
#
# Path: bartpy/samplers/sampler.py
# class Sampler(ABC):
#
# @abstractmethod
# def step(self, model: Model, tree: Tree) -> bool:
# raise NotImplementedError()
#
# Path: bartpy/sigma.py
# class Sigma:
# """
# A representation of the sigma term in the model.
# Specifically, this is the sigma of y itself, i.e. the sigma in
# y ~ Normal(sum_of_trees, sigma)
#
# The default prior is an inverse gamma distribution on the variance
# The parametrization is slightly different to the numpy gamma version, with the scale parameter inverted
#
# Parameters
# ----------
# alpha - the shape of the prior
# beta - the scale of the prior
# scaling_factor - the range of the original distribution
# needed to rescale the variance into the original scale rather than on (-0.5, 0.5)
#
# """
#
# def __init__(self, alpha: float, beta: float, scaling_factor: float):
# self.alpha = alpha
# self.beta = beta
# self._current_value = 1.0
# self.scaling_factor = scaling_factor
#
# def set_value(self, value: float) -> None:
# self._current_value = value
#
# def current_value(self) -> float:
# return self._current_value
#
# def current_unnormalized_value(self) -> float:
# return self.current_value() * self.scaling_factor
which might include code, classes, or functions. Output only the next line. | posterior_beta = sigma.beta + (0.5 * (np.sum(np.square(model.residuals())))) |
Given snippet: <|code_start|>
class SigmaSampler(Sampler):
def step(self, model: Model, sigma: Sigma) -> float:
sample_value = self.sample(model, sigma)
sigma.set_value(sample_value)
return sample_value
<|code_end|>
, continue by predicting the next line. Consider current file imports:
import numpy as np
from bartpy.model import Model
from bartpy.samplers.sampler import Sampler
from bartpy.sigma import Sigma
and context:
# Path: bartpy/model.py
# class Model:
#
# def __init__(self,
# data: Optional[Data],
# sigma: Sigma,
# trees: Optional[List[Tree]]=None,
# n_trees: int=50,
# alpha: float=0.95,
# beta: float=2.,
# k: int=2.,
# initializer: Initializer=SklearnTreeInitializer()):
#
# self.data = deepcopy(data)
# self.alpha = float(alpha)
# self.beta = float(beta)
# self.k = k
# self._sigma = sigma
# self._prediction = None
# self._initializer = initializer
#
# if trees is None:
# self.n_trees = n_trees
# self._trees = self.initialize_trees()
# if self._initializer is not None:
# self._initializer.initialize_trees(self.refreshed_trees())
# else:
# self.n_trees = len(trees)
# self._trees = trees
#
# def initialize_trees(self) -> List[Tree]:
# trees = [Tree([LeafNode(Split(deepcopy(self.data)))]) for _ in range(self.n_trees)]
# for tree in trees:
# tree.update_y(tree.update_y(self.data.y.values / self.n_trees))
# return trees
#
# def residuals(self) -> np.ndarray:
# return self.data.y.values - self.predict()
#
# def unnormalized_residuals(self) -> np.ndarray:
# return self.data.y.unnormalized_y - self.data.y.unnormalize_y(self.predict())
#
# def predict(self, X: np.ndarray=None) -> np.ndarray:
# if X is not None:
# return self._out_of_sample_predict(X)
# return np.sum([tree.predict() for tree in self.trees], axis=0)
#
# def _out_of_sample_predict(self, X: np.ndarray) -> np.ndarray:
# if type(X) == pd.DataFrame:
# X: pd.DataFrame = X
# X = X.values
# return np.sum([tree.predict(X) for tree in self.trees], axis=0)
#
# @property
# def trees(self) -> List[Tree]:
# return self._trees
#
# def refreshed_trees(self) -> Generator[Tree, None, None]:
# if self._prediction is None:
# self._prediction = self.predict()
# for tree in self._trees:
# self._prediction -= tree.predict()
# tree.update_y(self.data.y.values - self._prediction)
# yield tree
# self._prediction += tree.predict()
#
# @property
# def sigma_m(self) -> float:
# return 0.5 / (self.k * np.power(self.n_trees, 0.5))
#
# @property
# def sigma(self) -> Sigma:
# return self._sigma
#
# Path: bartpy/samplers/sampler.py
# class Sampler(ABC):
#
# @abstractmethod
# def step(self, model: Model, tree: Tree) -> bool:
# raise NotImplementedError()
#
# Path: bartpy/sigma.py
# class Sigma:
# """
# A representation of the sigma term in the model.
# Specifically, this is the sigma of y itself, i.e. the sigma in
# y ~ Normal(sum_of_trees, sigma)
#
# The default prior is an inverse gamma distribution on the variance
# The parametrization is slightly different to the numpy gamma version, with the scale parameter inverted
#
# Parameters
# ----------
# alpha - the shape of the prior
# beta - the scale of the prior
# scaling_factor - the range of the original distribution
# needed to rescale the variance into the original scale rather than on (-0.5, 0.5)
#
# """
#
# def __init__(self, alpha: float, beta: float, scaling_factor: float):
# self.alpha = alpha
# self.beta = beta
# self._current_value = 1.0
# self.scaling_factor = scaling_factor
#
# def set_value(self, value: float) -> None:
# self._current_value = value
#
# def current_value(self) -> float:
# return self._current_value
#
# def current_unnormalized_value(self) -> float:
# return self.current_value() * self.scaling_factor
which might include code, classes, or functions. Output only the next line. | @staticmethod |
Here is a snippet: <|code_start|>
class SigmaSampler(Sampler):
def step(self, model: Model, sigma: Sigma) -> float:
sample_value = self.sample(model, sigma)
sigma.set_value(sample_value)
return sample_value
@staticmethod
def sample(model: Model, sigma: Sigma) -> float:
posterior_alpha = sigma.alpha + (model.data.X.n_obsv / 2.)
posterior_beta = sigma.beta + (0.5 * (np.sum(np.square(model.residuals()))))
<|code_end|>
. Write the next line using the current file imports:
import numpy as np
from bartpy.model import Model
from bartpy.samplers.sampler import Sampler
from bartpy.sigma import Sigma
and context from other files:
# Path: bartpy/model.py
# class Model:
#
# def __init__(self,
# data: Optional[Data],
# sigma: Sigma,
# trees: Optional[List[Tree]]=None,
# n_trees: int=50,
# alpha: float=0.95,
# beta: float=2.,
# k: int=2.,
# initializer: Initializer=SklearnTreeInitializer()):
#
# self.data = deepcopy(data)
# self.alpha = float(alpha)
# self.beta = float(beta)
# self.k = k
# self._sigma = sigma
# self._prediction = None
# self._initializer = initializer
#
# if trees is None:
# self.n_trees = n_trees
# self._trees = self.initialize_trees()
# if self._initializer is not None:
# self._initializer.initialize_trees(self.refreshed_trees())
# else:
# self.n_trees = len(trees)
# self._trees = trees
#
# def initialize_trees(self) -> List[Tree]:
# trees = [Tree([LeafNode(Split(deepcopy(self.data)))]) for _ in range(self.n_trees)]
# for tree in trees:
# tree.update_y(tree.update_y(self.data.y.values / self.n_trees))
# return trees
#
# def residuals(self) -> np.ndarray:
# return self.data.y.values - self.predict()
#
# def unnormalized_residuals(self) -> np.ndarray:
# return self.data.y.unnormalized_y - self.data.y.unnormalize_y(self.predict())
#
# def predict(self, X: np.ndarray=None) -> np.ndarray:
# if X is not None:
# return self._out_of_sample_predict(X)
# return np.sum([tree.predict() for tree in self.trees], axis=0)
#
# def _out_of_sample_predict(self, X: np.ndarray) -> np.ndarray:
# if type(X) == pd.DataFrame:
# X: pd.DataFrame = X
# X = X.values
# return np.sum([tree.predict(X) for tree in self.trees], axis=0)
#
# @property
# def trees(self) -> List[Tree]:
# return self._trees
#
# def refreshed_trees(self) -> Generator[Tree, None, None]:
# if self._prediction is None:
# self._prediction = self.predict()
# for tree in self._trees:
# self._prediction -= tree.predict()
# tree.update_y(self.data.y.values - self._prediction)
# yield tree
# self._prediction += tree.predict()
#
# @property
# def sigma_m(self) -> float:
# return 0.5 / (self.k * np.power(self.n_trees, 0.5))
#
# @property
# def sigma(self) -> Sigma:
# return self._sigma
#
# Path: bartpy/samplers/sampler.py
# class Sampler(ABC):
#
# @abstractmethod
# def step(self, model: Model, tree: Tree) -> bool:
# raise NotImplementedError()
#
# Path: bartpy/sigma.py
# class Sigma:
# """
# A representation of the sigma term in the model.
# Specifically, this is the sigma of y itself, i.e. the sigma in
# y ~ Normal(sum_of_trees, sigma)
#
# The default prior is an inverse gamma distribution on the variance
# The parametrization is slightly different to the numpy gamma version, with the scale parameter inverted
#
# Parameters
# ----------
# alpha - the shape of the prior
# beta - the scale of the prior
# scaling_factor - the range of the original distribution
# needed to rescale the variance into the original scale rather than on (-0.5, 0.5)
#
# """
#
# def __init__(self, alpha: float, beta: float, scaling_factor: float):
# self.alpha = alpha
# self.beta = beta
# self._current_value = 1.0
# self.scaling_factor = scaling_factor
#
# def set_value(self, value: float) -> None:
# self._current_value = value
#
# def current_value(self) -> float:
# return self._current_value
#
# def current_unnormalized_value(self) -> float:
# return self.current_value() * self.scaling_factor
, which may include functions, classes, or code. Output only the next line. | draw = np.power(np.random.gamma(posterior_alpha, 1./posterior_beta), -0.5) |
Using the snippet: <|code_start|>
def run(alpha, beta, n_trees):
x = np.random.normal(0, 1, size=3000)
X = pd.DataFrame(x)
y = np.random.normal(0, 0.1, size=3000) + 2 * x + np.sin(x)
plt.scatter(x, y)
plt.show()
model = ResidualBART(n_samples=200, n_burn=50, n_trees=n_trees, alpha=alpha, beta=beta)
model.fit(X, y)
predictions = model.predict()
plt.scatter(x, y)
plt.scatter(x, predictions)
plt.show()
return model, x, y
<|code_end|>
, determine the next line of code. You have imports:
import pandas as pd
import numpy as np
from matplotlib import pyplot as plt
from bartpy.extensions.baseestimator import ResidualBART
from datetime import datetime as dt
and context (class names, function names, or code) available:
# Path: bartpy/extensions/baseestimator.py
# class ResidualBART(SklearnModel):
#
# def __init__(self,
# base_estimator: RegressorMixin = None,
# **kwargs):
#
# if base_estimator is not None:
# self.base_estimator = clone(base_estimator)
# else:
# base_estimator = LinearRegression()
# self.base_estimator = base_estimator
# super().__init__(**kwargs)
#
# def fit(self, X: np.ndarray, y: np.ndarray) -> 'ResidualBART':
# self.base_estimator.fit(X, y)
# SklearnModel.fit(self, X, y - self.base_estimator.predict(X))
# return self
#
# def predict(self, X: np.ndarray=None) -> np.ndarray:
# if X is None:
# X = self.data.X
# sm_prediction = self.base_estimator.predict(X)
# bart_prediction = SklearnModel.predict(self, X)
# return sm_prediction + bart_prediction
. Output only the next line. | if __name__ == "__main__": |
Continue the code snippet: <|code_start|>
def run(size=100,
alpha=0.95,
beta=2.0,
n_trees=50):
warnings.simplefilter("error", UserWarning)
x = np.linspace(0, 5, size)
X = pd.DataFrame(x)
y = np.random.normal(0, 0.1, size=size) + np.sin(x)
model = ResidualBART(
n_samples=100,
n_burn=50,
n_trees=n_trees,
alpha=alpha,
beta=beta,
n_jobs=1,
n_chains=1)
<|code_end|>
. Use current file imports:
import pandas as pd
import numpy as np
import warnings
from matplotlib import pyplot as plt
from sklearn.model_selection import train_test_split
from bartpy.extensions.baseestimator import ResidualBART
and context (classes, functions, or code) from other files:
# Path: bartpy/extensions/baseestimator.py
# class ResidualBART(SklearnModel):
#
# def __init__(self,
# base_estimator: RegressorMixin = None,
# **kwargs):
#
# if base_estimator is not None:
# self.base_estimator = clone(base_estimator)
# else:
# base_estimator = LinearRegression()
# self.base_estimator = base_estimator
# super().__init__(**kwargs)
#
# def fit(self, X: np.ndarray, y: np.ndarray) -> 'ResidualBART':
# self.base_estimator.fit(X, y)
# SklearnModel.fit(self, X, y - self.base_estimator.predict(X))
# return self
#
# def predict(self, X: np.ndarray=None) -> np.ndarray:
# if X is None:
# X = self.data.X
# sm_prediction = self.base_estimator.predict(X)
# bart_prediction = SklearnModel.predict(self, X)
# return sm_prediction + bart_prediction
. Output only the next line. | X_train, X_test, y_train, y_test = train_test_split(X, |
Using the snippet: <|code_start|> def test_single_condition_data(self):
data = make_bartpy_data(pd.DataFrame({"a": [1, 2]}).values, np.array([1, 2]))
left_condition, right_condition = SplitCondition(0, 1, le), SplitCondition(0, 1, gt)
left_split, right_split = Split(data) + left_condition, Split(data) + right_condition
self.assertListEqual([1], list(left_split.data.X.get_column(0)))
self.assertListEqual([2], list(right_split.data.X.get_column(0)))
def test_combined_condition_data(self):
data = make_bartpy_data(pd.DataFrame({"a": [1, 2, 3, 4]}).values, np.array([1, 2, 1, 1]))
first_left_condition, first_right_condition = SplitCondition(0, 3, le), SplitCondition(0, 3, gt)
second_left_condition, second_right_condition = SplitCondition(0, 1, le), SplitCondition(0, 1, gt)
split = Split(data)
updated_split = split + first_left_condition + second_right_condition
conditioned_data = updated_split.data
self.assertListEqual([2, 3], list(conditioned_data.X.get_column(0)))
def test_most_recent_split(self):
data = make_bartpy_data(pd.DataFrame({"a": [1, 2, 3, 4]}).values, np.array([1, 2, 1, 1]))
first_left_condition, first_right_condition = SplitCondition(0, 3, le), SplitCondition(0, 3, gt)
second_left_condition, second_right_condition = SplitCondition(0, 1, le), SplitCondition(0, 1, gt)
split = Split(data)
updated_split = split + first_left_condition + second_right_condition
self.assertEqual((split + first_left_condition).most_recent_split_condition(), first_left_condition)
self.assertEqual(updated_split.most_recent_split_condition(), second_right_condition)
<|code_end|>
, determine the next line of code. You have imports:
from operator import le, gt
from bartpy.data import Data, make_bartpy_data
from bartpy.split import SplitCondition, Split, CombinedCondition
import unittest
import pandas as pd
import numpy as np
and context (class names, function names, or code) available:
# Path: bartpy/data.py
# class Data(object):
# """
# Encapsulates the data within a split of feature space.
# Primarily used to cache computations on the data for better performance
#
# Parameters
# ----------
# X: np.ndarray
# The subset of the covariate matrix that falls into the split
# y: np.ndarray
# The subset of the target array that falls into the split
# normalize: bool
# Whether to map the target into -0.5, 0.5
# cache: bool
# Whether to cache common values.
# You really only want to turn this off if you're not going to the resulting object for anything (e.g. when testing)
# """
#
# def __init__(self,
# X: np.ndarray,
# y: np.ndarray,
# mask: Optional[np.ndarray]=None,
# normalize: bool=False,
# unique_columns: List[int]=None,
# splittable_variables: Optional[List[Optional[bool]]]=None,
# y_sum: float=None,
# n_obsv: int=None):
#
# if mask is None:
# mask = np.zeros_like(y).astype(bool)
# self._mask: np.ndarray = mask
#
# if n_obsv is None:
# n_obsv = (~self.mask).astype(int).sum()
#
# self._n_obsv = n_obsv
#
# self._X = CovariateMatrix(X, mask, n_obsv, unique_columns, splittable_variables)
# self._y = Target(y, mask, n_obsv, normalize, y_sum)
#
# @property
# def y(self) -> Target:
# return self._y
#
# @property
# def X(self) -> CovariateMatrix:
# return self._X
#
# @property
# def mask(self) -> np.ndarray:
# return self._mask
#
# def update_y(self, y: np.ndarray) -> None:
# self._y.update_y(y)
#
# def __add__(self, other: SplitCondition) -> 'Data':
# updated_mask = self.X.update_mask(other)
#
# return Data(self.X.values,
# self.y.values,
# updated_mask,
# normalize=False,
# unique_columns=self._X._unique_columns,
# splittable_variables=self._X._splittable_variables,
# y_sum=other.carry_y_sum,
# n_obsv=other.carry_n_obsv)
#
# def make_bartpy_data(X: Union[np.ndarray, pd.DataFrame],
# y: np.ndarray,
# normalize: bool=True) -> 'Data':
# X = format_covariate_matrix(X)
# y = y.astype(float)
# return Data(X, y, normalize=normalize)
#
# Path: bartpy/split.py
# class Split:
# def __init__(self, data: Data, combined_condition: Optional[CombinedCondition]=None):
# def data(self):
# def combined_condition(self):
# def condition(self, X: np.ndarray=None) -> np.array:
# def out_of_sample_condition(self, X: np.ndarray) -> np.ndarray:
# def out_of_sample_conditioner(self) -> CombinedCondition:
# def __add__(self, other: SplitCondition):
# def most_recent_split_condition(self) -> Optional[SplitCondition]:
. Output only the next line. | class TestCombinedCondition(unittest.TestCase): |
Predict the next line after this snippet: <|code_start|> data = make_bartpy_data(pd.DataFrame({"a": [1, 2, 3, 4]}).values, np.array([1, 2, 1, 1]))
first_left_condition, first_right_condition = SplitCondition(0, 3, le), SplitCondition(0, 3, gt)
second_left_condition, second_right_condition = SplitCondition(0, 1, le), SplitCondition(0, 1, gt)
split = Split(data)
updated_split = split + first_left_condition + second_right_condition
self.assertEqual((split + first_left_condition).most_recent_split_condition(), first_left_condition)
self.assertEqual(updated_split.most_recent_split_condition(), second_right_condition)
class TestCombinedCondition(unittest.TestCase):
def setUp(self):
self.X = np.array([1, 2, 4, 6, 3, 5]).reshape(6, 1)
def test_single_condition(self):
condition = SplitCondition(0, 3, gt)
combined_condition = CombinedCondition([0], [condition])
self.assertListEqual(list(combined_condition.condition(self.X)), [False, False, True, True, False, True])
def test_multiple_conditions(self):
conditions = [
SplitCondition(0, 2, gt),
SplitCondition(0, 5, le)
]
combined_condition = CombinedCondition([0], conditions)
self.assertEqual(combined_condition.variables[0].min_value, 2)
self.assertEqual(combined_condition.variables[0].max_value, 5)
<|code_end|>
using the current file's imports:
from operator import le, gt
from bartpy.data import Data, make_bartpy_data
from bartpy.split import SplitCondition, Split, CombinedCondition
import unittest
import pandas as pd
import numpy as np
and any relevant context from other files:
# Path: bartpy/data.py
# class Data(object):
# """
# Encapsulates the data within a split of feature space.
# Primarily used to cache computations on the data for better performance
#
# Parameters
# ----------
# X: np.ndarray
# The subset of the covariate matrix that falls into the split
# y: np.ndarray
# The subset of the target array that falls into the split
# normalize: bool
# Whether to map the target into -0.5, 0.5
# cache: bool
# Whether to cache common values.
# You really only want to turn this off if you're not going to the resulting object for anything (e.g. when testing)
# """
#
# def __init__(self,
# X: np.ndarray,
# y: np.ndarray,
# mask: Optional[np.ndarray]=None,
# normalize: bool=False,
# unique_columns: List[int]=None,
# splittable_variables: Optional[List[Optional[bool]]]=None,
# y_sum: float=None,
# n_obsv: int=None):
#
# if mask is None:
# mask = np.zeros_like(y).astype(bool)
# self._mask: np.ndarray = mask
#
# if n_obsv is None:
# n_obsv = (~self.mask).astype(int).sum()
#
# self._n_obsv = n_obsv
#
# self._X = CovariateMatrix(X, mask, n_obsv, unique_columns, splittable_variables)
# self._y = Target(y, mask, n_obsv, normalize, y_sum)
#
# @property
# def y(self) -> Target:
# return self._y
#
# @property
# def X(self) -> CovariateMatrix:
# return self._X
#
# @property
# def mask(self) -> np.ndarray:
# return self._mask
#
# def update_y(self, y: np.ndarray) -> None:
# self._y.update_y(y)
#
# def __add__(self, other: SplitCondition) -> 'Data':
# updated_mask = self.X.update_mask(other)
#
# return Data(self.X.values,
# self.y.values,
# updated_mask,
# normalize=False,
# unique_columns=self._X._unique_columns,
# splittable_variables=self._X._splittable_variables,
# y_sum=other.carry_y_sum,
# n_obsv=other.carry_n_obsv)
#
# def make_bartpy_data(X: Union[np.ndarray, pd.DataFrame],
# y: np.ndarray,
# normalize: bool=True) -> 'Data':
# X = format_covariate_matrix(X)
# y = y.astype(float)
# return Data(X, y, normalize=normalize)
#
# Path: bartpy/split.py
# class Split:
# def __init__(self, data: Data, combined_condition: Optional[CombinedCondition]=None):
# def data(self):
# def combined_condition(self):
# def condition(self, X: np.ndarray=None) -> np.array:
# def out_of_sample_condition(self, X: np.ndarray) -> np.ndarray:
# def out_of_sample_conditioner(self) -> CombinedCondition:
# def __add__(self, other: SplitCondition):
# def most_recent_split_condition(self) -> Optional[SplitCondition]:
. Output only the next line. | self.assertListEqual(list(combined_condition.condition(self.X)), [False, False, True, False, True, True]) |
Continue the code snippet: <|code_start|> def test_most_recent_split(self):
data = make_bartpy_data(pd.DataFrame({"a": [1, 2, 3, 4]}).values, np.array([1, 2, 1, 1]))
first_left_condition, first_right_condition = SplitCondition(0, 3, le), SplitCondition(0, 3, gt)
second_left_condition, second_right_condition = SplitCondition(0, 1, le), SplitCondition(0, 1, gt)
split = Split(data)
updated_split = split + first_left_condition + second_right_condition
self.assertEqual((split + first_left_condition).most_recent_split_condition(), first_left_condition)
self.assertEqual(updated_split.most_recent_split_condition(), second_right_condition)
class TestCombinedCondition(unittest.TestCase):
def setUp(self):
self.X = np.array([1, 2, 4, 6, 3, 5]).reshape(6, 1)
def test_single_condition(self):
condition = SplitCondition(0, 3, gt)
combined_condition = CombinedCondition([0], [condition])
self.assertListEqual(list(combined_condition.condition(self.X)), [False, False, True, True, False, True])
def test_multiple_conditions(self):
conditions = [
SplitCondition(0, 2, gt),
SplitCondition(0, 5, le)
]
combined_condition = CombinedCondition([0], conditions)
self.assertEqual(combined_condition.variables[0].min_value, 2)
<|code_end|>
. Use current file imports:
from operator import le, gt
from bartpy.data import Data, make_bartpy_data
from bartpy.split import SplitCondition, Split, CombinedCondition
import unittest
import pandas as pd
import numpy as np
and context (classes, functions, or code) from other files:
# Path: bartpy/data.py
# class Data(object):
# """
# Encapsulates the data within a split of feature space.
# Primarily used to cache computations on the data for better performance
#
# Parameters
# ----------
# X: np.ndarray
# The subset of the covariate matrix that falls into the split
# y: np.ndarray
# The subset of the target array that falls into the split
# normalize: bool
# Whether to map the target into -0.5, 0.5
# cache: bool
# Whether to cache common values.
# You really only want to turn this off if you're not going to the resulting object for anything (e.g. when testing)
# """
#
# def __init__(self,
# X: np.ndarray,
# y: np.ndarray,
# mask: Optional[np.ndarray]=None,
# normalize: bool=False,
# unique_columns: List[int]=None,
# splittable_variables: Optional[List[Optional[bool]]]=None,
# y_sum: float=None,
# n_obsv: int=None):
#
# if mask is None:
# mask = np.zeros_like(y).astype(bool)
# self._mask: np.ndarray = mask
#
# if n_obsv is None:
# n_obsv = (~self.mask).astype(int).sum()
#
# self._n_obsv = n_obsv
#
# self._X = CovariateMatrix(X, mask, n_obsv, unique_columns, splittable_variables)
# self._y = Target(y, mask, n_obsv, normalize, y_sum)
#
# @property
# def y(self) -> Target:
# return self._y
#
# @property
# def X(self) -> CovariateMatrix:
# return self._X
#
# @property
# def mask(self) -> np.ndarray:
# return self._mask
#
# def update_y(self, y: np.ndarray) -> None:
# self._y.update_y(y)
#
# def __add__(self, other: SplitCondition) -> 'Data':
# updated_mask = self.X.update_mask(other)
#
# return Data(self.X.values,
# self.y.values,
# updated_mask,
# normalize=False,
# unique_columns=self._X._unique_columns,
# splittable_variables=self._X._splittable_variables,
# y_sum=other.carry_y_sum,
# n_obsv=other.carry_n_obsv)
#
# def make_bartpy_data(X: Union[np.ndarray, pd.DataFrame],
# y: np.ndarray,
# normalize: bool=True) -> 'Data':
# X = format_covariate_matrix(X)
# y = y.astype(float)
# return Data(X, y, normalize=normalize)
#
# Path: bartpy/split.py
# class Split:
# def __init__(self, data: Data, combined_condition: Optional[CombinedCondition]=None):
# def data(self):
# def combined_condition(self):
# def condition(self, X: np.ndarray=None) -> np.array:
# def out_of_sample_condition(self, X: np.ndarray) -> np.ndarray:
# def out_of_sample_conditioner(self) -> CombinedCondition:
# def __add__(self, other: SplitCondition):
# def most_recent_split_condition(self) -> Optional[SplitCondition]:
. Output only the next line. | self.assertEqual(combined_condition.variables[0].max_value, 5) |
Predict the next line for this snippet: <|code_start|>
def test_most_recent_split(self):
data = make_bartpy_data(pd.DataFrame({"a": [1, 2, 3, 4]}).values, np.array([1, 2, 1, 1]))
first_left_condition, first_right_condition = SplitCondition(0, 3, le), SplitCondition(0, 3, gt)
second_left_condition, second_right_condition = SplitCondition(0, 1, le), SplitCondition(0, 1, gt)
split = Split(data)
updated_split = split + first_left_condition + second_right_condition
self.assertEqual((split + first_left_condition).most_recent_split_condition(), first_left_condition)
self.assertEqual(updated_split.most_recent_split_condition(), second_right_condition)
class TestCombinedCondition(unittest.TestCase):
def setUp(self):
self.X = np.array([1, 2, 4, 6, 3, 5]).reshape(6, 1)
def test_single_condition(self):
condition = SplitCondition(0, 3, gt)
combined_condition = CombinedCondition([0], [condition])
self.assertListEqual(list(combined_condition.condition(self.X)), [False, False, True, True, False, True])
def test_multiple_conditions(self):
conditions = [
SplitCondition(0, 2, gt),
SplitCondition(0, 5, le)
]
combined_condition = CombinedCondition([0], conditions)
<|code_end|>
with the help of current file imports:
from operator import le, gt
from bartpy.data import Data, make_bartpy_data
from bartpy.split import SplitCondition, Split, CombinedCondition
import unittest
import pandas as pd
import numpy as np
and context from other files:
# Path: bartpy/data.py
# class Data(object):
# """
# Encapsulates the data within a split of feature space.
# Primarily used to cache computations on the data for better performance
#
# Parameters
# ----------
# X: np.ndarray
# The subset of the covariate matrix that falls into the split
# y: np.ndarray
# The subset of the target array that falls into the split
# normalize: bool
# Whether to map the target into -0.5, 0.5
# cache: bool
# Whether to cache common values.
# You really only want to turn this off if you're not going to the resulting object for anything (e.g. when testing)
# """
#
# def __init__(self,
# X: np.ndarray,
# y: np.ndarray,
# mask: Optional[np.ndarray]=None,
# normalize: bool=False,
# unique_columns: List[int]=None,
# splittable_variables: Optional[List[Optional[bool]]]=None,
# y_sum: float=None,
# n_obsv: int=None):
#
# if mask is None:
# mask = np.zeros_like(y).astype(bool)
# self._mask: np.ndarray = mask
#
# if n_obsv is None:
# n_obsv = (~self.mask).astype(int).sum()
#
# self._n_obsv = n_obsv
#
# self._X = CovariateMatrix(X, mask, n_obsv, unique_columns, splittable_variables)
# self._y = Target(y, mask, n_obsv, normalize, y_sum)
#
# @property
# def y(self) -> Target:
# return self._y
#
# @property
# def X(self) -> CovariateMatrix:
# return self._X
#
# @property
# def mask(self) -> np.ndarray:
# return self._mask
#
# def update_y(self, y: np.ndarray) -> None:
# self._y.update_y(y)
#
# def __add__(self, other: SplitCondition) -> 'Data':
# updated_mask = self.X.update_mask(other)
#
# return Data(self.X.values,
# self.y.values,
# updated_mask,
# normalize=False,
# unique_columns=self._X._unique_columns,
# splittable_variables=self._X._splittable_variables,
# y_sum=other.carry_y_sum,
# n_obsv=other.carry_n_obsv)
#
# def make_bartpy_data(X: Union[np.ndarray, pd.DataFrame],
# y: np.ndarray,
# normalize: bool=True) -> 'Data':
# X = format_covariate_matrix(X)
# y = y.astype(float)
# return Data(X, y, normalize=normalize)
#
# Path: bartpy/split.py
# class Split:
# def __init__(self, data: Data, combined_condition: Optional[CombinedCondition]=None):
# def data(self):
# def combined_condition(self):
# def condition(self, X: np.ndarray=None) -> np.array:
# def out_of_sample_condition(self, X: np.ndarray) -> np.ndarray:
# def out_of_sample_conditioner(self) -> CombinedCondition:
# def __add__(self, other: SplitCondition):
# def most_recent_split_condition(self) -> Optional[SplitCondition]:
, which may contain function names, class names, or code. Output only the next line. | self.assertEqual(combined_condition.variables[0].min_value, 2) |
Continue the code snippet: <|code_start|> def __init__(self, raw, user):
"""Initialize comment from raw JSON dict and user"""
super(Comment, self).__init__(raw, user)
self.comment = raw["comment"]
self.comment_id = raw["commentID"]
try:
self.gmt = raw["gmt"]
except KeyError:
self.gmt = None
try:
self.back_id = raw["backID"]
self.overlay_id = raw["overlayID"]
except KeyError:
self.back_id = None
self.overlay_id = None
try:
self.text_style = raw["textStyle"]
except KeyError:
self.text_style = None
def __str__(self):
"""Return comment as string"""
return "%s (%d)" % (helper.emoji_remove(self.comment), self.likes)
def update(self):
"""Update properties from Yik Yak. Return True if successful, False if
unsuccessful"""
comment = self.user.get_comment(self.comment_id, self.message_id)
if comment is not None:
self = comment
<|code_end|>
. Use current file imports:
from yaklient import helper
from yaklient.objects.message import Message
and context (classes, functions, or code) from other files:
# Path: yaklient/helper.py
# def backslash_remove(text):
# def emoji_remove(text):
# def generate_id(dashes=True, upper=False):
# def hash_msg(key, msg):
# def __init__(self, error_string, response):
# def __str__(self):
# class ParsingResponseError(Exception):
#
# Path: yaklient/objects/message.py
# class Message(object):
# """An abstract class for a postable object on Yik Yak (Comment or Yak)"""
# def __init__(self, raw, user):
# """Initialize message from raw JSON dict and user"""
# self.delivery_id = raw["deliveryID"]
# self.liked = raw["liked"]
# self.likes = raw["numberOfLikes"]
# self.message_id = helper.backslash_remove(raw["messageID"])
# self.poster_id = raw["posterID"]
# self.time = raw["time"]
# self.user = user
# try:
# self.reyaked = raw["reyaked"]
# except KeyError:
# self.reyaked = None
#
# @abstractmethod
# def __str__(self):
# """Return message as string"""
# pass
#
# def delete(self):
# """Delete message from Yik Yak. Return True if successful, False if
# unsuccessful"""
# return self.user.delete(self)
#
# def downvote(self):
# """Downvote the message. Return True if successful, False if
# unsuccessful"""
# if self.user.downvote(self):
# self.likes -= 1
# return True
# else:
# return False
#
# def get_comments(self):
# """Get comments on the message"""
# return self.user.get_comments(self)
#
# def post_comment(self, comment):
# """Post a comment on the message. Return True if successful, False if
# unsuccessful"""
# return self.user.post_comment(comment, self.message_id)
#
# def report(self):
# """Report a message to Yik Yak"""
# self.user.report(self)
#
# @abstractmethod
# def update(self):
# """Update properties from Yik Yak"""
# pass
#
# def upvote(self):
# """Upvote the message. Return True if successful, False if
# unsuccessful"""
# if self.user.upvote(self):
# self.likes += 1
# return True
# else:
# return False
. Output only the next line. | return True |
Predict the next line after this snippet: <|code_start|> def __init__(self, raw, user):
"""Initialize comment from raw JSON dict and user"""
super(Comment, self).__init__(raw, user)
self.comment = raw["comment"]
self.comment_id = raw["commentID"]
try:
self.gmt = raw["gmt"]
except KeyError:
self.gmt = None
try:
self.back_id = raw["backID"]
self.overlay_id = raw["overlayID"]
except KeyError:
self.back_id = None
self.overlay_id = None
try:
self.text_style = raw["textStyle"]
except KeyError:
self.text_style = None
def __str__(self):
"""Return comment as string"""
return "%s (%d)" % (helper.emoji_remove(self.comment), self.likes)
def update(self):
"""Update properties from Yik Yak. Return True if successful, False if
unsuccessful"""
comment = self.user.get_comment(self.comment_id, self.message_id)
if comment is not None:
self = comment
<|code_end|>
using the current file's imports:
from yaklient import helper
from yaklient.objects.message import Message
and any relevant context from other files:
# Path: yaklient/helper.py
# def backslash_remove(text):
# def emoji_remove(text):
# def generate_id(dashes=True, upper=False):
# def hash_msg(key, msg):
# def __init__(self, error_string, response):
# def __str__(self):
# class ParsingResponseError(Exception):
#
# Path: yaklient/objects/message.py
# class Message(object):
# """An abstract class for a postable object on Yik Yak (Comment or Yak)"""
# def __init__(self, raw, user):
# """Initialize message from raw JSON dict and user"""
# self.delivery_id = raw["deliveryID"]
# self.liked = raw["liked"]
# self.likes = raw["numberOfLikes"]
# self.message_id = helper.backslash_remove(raw["messageID"])
# self.poster_id = raw["posterID"]
# self.time = raw["time"]
# self.user = user
# try:
# self.reyaked = raw["reyaked"]
# except KeyError:
# self.reyaked = None
#
# @abstractmethod
# def __str__(self):
# """Return message as string"""
# pass
#
# def delete(self):
# """Delete message from Yik Yak. Return True if successful, False if
# unsuccessful"""
# return self.user.delete(self)
#
# def downvote(self):
# """Downvote the message. Return True if successful, False if
# unsuccessful"""
# if self.user.downvote(self):
# self.likes -= 1
# return True
# else:
# return False
#
# def get_comments(self):
# """Get comments on the message"""
# return self.user.get_comments(self)
#
# def post_comment(self, comment):
# """Post a comment on the message. Return True if successful, False if
# unsuccessful"""
# return self.user.post_comment(comment, self.message_id)
#
# def report(self):
# """Report a message to Yik Yak"""
# self.user.report(self)
#
# @abstractmethod
# def update(self):
# """Update properties from Yik Yak"""
# pass
#
# def upvote(self):
# """Upvote the message. Return True if successful, False if
# unsuccessful"""
# if self.user.upvote(self):
# self.likes += 1
# return True
# else:
# return False
. Output only the next line. | return True |
Given snippet: <|code_start|>
# Session for requests
SESSION = Session()
REQUEST = SESSION.request
def _create_installation(iid):
"""Send a request to create an installation (ID: iid). Return the object
ID associated with it"""
data = {
"deviceType": "android",
"appVersion": settings.YIKYAK_VERSION,
"parseVersion": settings.PARSE_VERSION,
"appName": "Yik Yak",
"timeZone": tzname[0],
"installationId": iid,
"appIdentifier": "com.yik.yak"
}
return _send("create", data, iid)
def _save_user(user_id, iid, object_id):
"""Send a request to add user_id to the installation with (ID: iid,
object_id"""
# User ID is surrounded by a 'c' on either side
user_id = 'c' + user_id + 'c'
data = {
"channels": {"objects": [user_id], "__op": "AddUnique"},
"objectId": object_id
<|code_end|>
, continue by predicting the next line. Consider current file imports:
import json
from requests import Session
from requests_oauthlib import OAuth1
from time import tzname
from urlparse import urljoin
from yaklient.helper import generate_id, ParsingResponseError
from yaklient import settings
and context:
# Path: yaklient/helper.py
# def generate_id(dashes=True, upper=False):
# """Return ID with or without dashes as either uppercase or lowercase"""
# uuid = uuid4() if dashes else uuid4().get_hex()
# return str(uuid).upper() if upper else str(uuid)
#
# class ParsingResponseError(Exception):
# """An error parsing a request"""
# def __init__(self, error_string, response):
# """Initialize the error message"""
# self.msg = "%s\nResponse:\n%s" % (error_string, response.text)
# super(ParsingResponseError, self).__init__(self.msg)
# self.response = response
#
# def __str__(self):
# """Return the error message"""
# return self.msg
#
# Path: yaklient/settings.py
# YIKYAK_APIKEY = "EF64523D2BD1FA21F18F5BC654DFC41B"
# YIKYAK_ENDPOINT = "https://us-central-api.yikyakapi.net/api/"
# YIKYAK_VERSION = "2.7.3"
# YIKYAK_VERSION_LETTER = "e"
# PARSE_APPID = "wMkdjBI4ircsNcRn8mXnBkgH0dwOcrkexrdMY3vY"
# PARSE_CLIENTKEY = "GbNFwvFgoUu1wYuwIexNImy8bnSlNhqssG7gd53Y"
# PARSE_ENDPOINT = "https://api.parse.com/2/"
# PARSE_VERSION = "1.7.1"
# PARSE_VERSION_LETTER = "a"
# PARSE_BUILD = "59"
# PARSE_API_LEVEL = "22"
# NOTIFY_ENDPOINT = "https://notify.yikyakapi.net/api/"
# BASECAMP_ENDPOINT = "https://bc.yikyakapi.net/api/"
# AWS_ACCESS_KEY = "AKIAJFD2ANADKEMPW52A"
# AWS_BUCKET = "photos-upload-yy"
# AWS_SECRET_KEY = None
# AWS_UPLOAD_ENDPOINT = "http://signedup.yikyakapi.net/upload"
# ALLOWED_SITES_URL = "http://lv.yikyakapi.net/getSites"
# CONFIG_URL = "https://d3436qb9f9xu23.cloudfront.net/yikyak-config-android.json"
# VM_TYPE = "Dalvik"
# VM_VERSION = "1.6.0"
# ANDROID_VERSION = "4.0.5"
# DEVICE = "google_sdk"
# BUILD = "MR1"
# VM_VERSIONS = ["1.7.0", "1.8.0", "1.8.1"]
# ANDROID_VERSIONS = ["4.0.4", "4.5", "4.3", "5.1"]
# BUILD_STRING_LENGTHS = [3, 4, 5]
# DEVICES = ["Nexus 4", "Nexus 5", "HTC One_M8", "SM-N900V", "XT1080",
# "SM-G900V", "SCH-I545", "Android SDK built for x86"]
# RANDOMIZE_USER_AGENT = True
# RANDOMIZE_ENDPOINT = False
# LOCATIONIZE_ENDPOINT = False
# LOG_USERIDS = True
# NO_YAKS_MESSAGE_ID = "Y/b3c6c56b0305f2bc794e40b504f7150f"
# TOO_CLOSE_TO_SCHOOL_MESSAGE_ID = "Y/1687dcbe8ca5a308d46c44343a4c69eb"
# CONTACT_US_REASONS = ["My Basecamp location is wrong.",
# "I'm not near a high school but it says I am! Help!",
# "I want my college to be a Peek location!",
# "I have a really cool idea for the app.",
# "Yik Yak isn't working properly on my phone.",
# "Someone posted something and I want it taken down.",
# "My Yakarma has been reset.", "I forgot my pin code.",
# "Other"]
which might include code, classes, or functions. Output only the next line. | } |
Predict the next line for this snippet: <|code_start|> "timeZone": tzname[0],
"installationId": iid,
"appIdentifier": "com.yik.yak"
}
return _send("create", data, iid)
def _save_user(user_id, iid, object_id):
"""Send a request to add user_id to the installation with (ID: iid,
object_id"""
# User ID is surrounded by a 'c' on either side
user_id = 'c' + user_id + 'c'
data = {
"channels": {"objects": [user_id], "__op": "AddUnique"},
"objectId": object_id
}
return _send("update", data, iid)
def _send(method, data, iid):
"""Send data associated with an installation (ID: iid) to Yik Yak's Parse
service using specified method. Return the response"""
url = urljoin(settings.PARSE_ENDPOINT, method)
data = {
"classname": "_Installation",
"data": data,
"osVersion": settings.ANDROID_VERSION,
"appBuildVersion": settings.PARSE_BUILD,
"appDisplayVersion": settings.YIKYAK_VERSION,
"v": settings.PARSE_VERSION_LETTER + settings.PARSE_VERSION,
<|code_end|>
with the help of current file imports:
import json
from requests import Session
from requests_oauthlib import OAuth1
from time import tzname
from urlparse import urljoin
from yaklient.helper import generate_id, ParsingResponseError
from yaklient import settings
and context from other files:
# Path: yaklient/helper.py
# def generate_id(dashes=True, upper=False):
# """Return ID with or without dashes as either uppercase or lowercase"""
# uuid = uuid4() if dashes else uuid4().get_hex()
# return str(uuid).upper() if upper else str(uuid)
#
# class ParsingResponseError(Exception):
# """An error parsing a request"""
# def __init__(self, error_string, response):
# """Initialize the error message"""
# self.msg = "%s\nResponse:\n%s" % (error_string, response.text)
# super(ParsingResponseError, self).__init__(self.msg)
# self.response = response
#
# def __str__(self):
# """Return the error message"""
# return self.msg
#
# Path: yaklient/settings.py
# YIKYAK_APIKEY = "EF64523D2BD1FA21F18F5BC654DFC41B"
# YIKYAK_ENDPOINT = "https://us-central-api.yikyakapi.net/api/"
# YIKYAK_VERSION = "2.7.3"
# YIKYAK_VERSION_LETTER = "e"
# PARSE_APPID = "wMkdjBI4ircsNcRn8mXnBkgH0dwOcrkexrdMY3vY"
# PARSE_CLIENTKEY = "GbNFwvFgoUu1wYuwIexNImy8bnSlNhqssG7gd53Y"
# PARSE_ENDPOINT = "https://api.parse.com/2/"
# PARSE_VERSION = "1.7.1"
# PARSE_VERSION_LETTER = "a"
# PARSE_BUILD = "59"
# PARSE_API_LEVEL = "22"
# NOTIFY_ENDPOINT = "https://notify.yikyakapi.net/api/"
# BASECAMP_ENDPOINT = "https://bc.yikyakapi.net/api/"
# AWS_ACCESS_KEY = "AKIAJFD2ANADKEMPW52A"
# AWS_BUCKET = "photos-upload-yy"
# AWS_SECRET_KEY = None
# AWS_UPLOAD_ENDPOINT = "http://signedup.yikyakapi.net/upload"
# ALLOWED_SITES_URL = "http://lv.yikyakapi.net/getSites"
# CONFIG_URL = "https://d3436qb9f9xu23.cloudfront.net/yikyak-config-android.json"
# VM_TYPE = "Dalvik"
# VM_VERSION = "1.6.0"
# ANDROID_VERSION = "4.0.5"
# DEVICE = "google_sdk"
# BUILD = "MR1"
# VM_VERSIONS = ["1.7.0", "1.8.0", "1.8.1"]
# ANDROID_VERSIONS = ["4.0.4", "4.5", "4.3", "5.1"]
# BUILD_STRING_LENGTHS = [3, 4, 5]
# DEVICES = ["Nexus 4", "Nexus 5", "HTC One_M8", "SM-N900V", "XT1080",
# "SM-G900V", "SCH-I545", "Android SDK built for x86"]
# RANDOMIZE_USER_AGENT = True
# RANDOMIZE_ENDPOINT = False
# LOCATIONIZE_ENDPOINT = False
# LOG_USERIDS = True
# NO_YAKS_MESSAGE_ID = "Y/b3c6c56b0305f2bc794e40b504f7150f"
# TOO_CLOSE_TO_SCHOOL_MESSAGE_ID = "Y/1687dcbe8ca5a308d46c44343a4c69eb"
# CONTACT_US_REASONS = ["My Basecamp location is wrong.",
# "I'm not near a high school but it says I am! Help!",
# "I want my college to be a Peek location!",
# "I have a really cool idea for the app.",
# "Yik Yak isn't working properly on my phone.",
# "Someone posted something and I want it taken down.",
# "My Yakarma has been reset.", "I forgot my pin code.",
# "Other"]
, which may contain function names, class names, or code. Output only the next line. | "iid": iid, |
Here is a snippet: <|code_start|> "data": data,
"osVersion": settings.ANDROID_VERSION,
"appBuildVersion": settings.PARSE_BUILD,
"appDisplayVersion": settings.YIKYAK_VERSION,
"v": settings.PARSE_VERSION_LETTER + settings.PARSE_VERSION,
"iid": iid,
"uuid": generate_id()
}
json_data = json.dumps(data)
auth = OAuth1(settings.PARSE_APPID, settings.PARSE_CLIENTKEY)
user_agent = "Parse Android SDK %s (com.yik.yak/%s) API Level %s"
user_agent %= (settings.PARSE_VERSION, settings.PARSE_BUILD,
settings.PARSE_API_LEVEL)
headers = {"Accept-Encoding": "gzip", "User-Agent": user_agent}
return REQUEST("POST", url, data=json_data, auth=auth, headers=headers)
def register_user(user_id):
"""Register a user with Yik Yak's Parse service. Create a new installation
and add user_id to it. Return installation ID and object ID"""
# Installation ID
iid = generate_id()
# Create installation and check for errors
response = _create_installation(iid)
try:
object_id = response.json()["result"]["data"]["objectId"]
except (KeyError, ValueError):
raise ParsingResponseError("Error creating installation", response)
# Save user and check for errors and consistency
response = _save_user(user_id, iid, object_id)
<|code_end|>
. Write the next line using the current file imports:
import json
from requests import Session
from requests_oauthlib import OAuth1
from time import tzname
from urlparse import urljoin
from yaklient.helper import generate_id, ParsingResponseError
from yaklient import settings
and context from other files:
# Path: yaklient/helper.py
# def generate_id(dashes=True, upper=False):
# """Return ID with or without dashes as either uppercase or lowercase"""
# uuid = uuid4() if dashes else uuid4().get_hex()
# return str(uuid).upper() if upper else str(uuid)
#
# class ParsingResponseError(Exception):
# """An error parsing a request"""
# def __init__(self, error_string, response):
# """Initialize the error message"""
# self.msg = "%s\nResponse:\n%s" % (error_string, response.text)
# super(ParsingResponseError, self).__init__(self.msg)
# self.response = response
#
# def __str__(self):
# """Return the error message"""
# return self.msg
#
# Path: yaklient/settings.py
# YIKYAK_APIKEY = "EF64523D2BD1FA21F18F5BC654DFC41B"
# YIKYAK_ENDPOINT = "https://us-central-api.yikyakapi.net/api/"
# YIKYAK_VERSION = "2.7.3"
# YIKYAK_VERSION_LETTER = "e"
# PARSE_APPID = "wMkdjBI4ircsNcRn8mXnBkgH0dwOcrkexrdMY3vY"
# PARSE_CLIENTKEY = "GbNFwvFgoUu1wYuwIexNImy8bnSlNhqssG7gd53Y"
# PARSE_ENDPOINT = "https://api.parse.com/2/"
# PARSE_VERSION = "1.7.1"
# PARSE_VERSION_LETTER = "a"
# PARSE_BUILD = "59"
# PARSE_API_LEVEL = "22"
# NOTIFY_ENDPOINT = "https://notify.yikyakapi.net/api/"
# BASECAMP_ENDPOINT = "https://bc.yikyakapi.net/api/"
# AWS_ACCESS_KEY = "AKIAJFD2ANADKEMPW52A"
# AWS_BUCKET = "photos-upload-yy"
# AWS_SECRET_KEY = None
# AWS_UPLOAD_ENDPOINT = "http://signedup.yikyakapi.net/upload"
# ALLOWED_SITES_URL = "http://lv.yikyakapi.net/getSites"
# CONFIG_URL = "https://d3436qb9f9xu23.cloudfront.net/yikyak-config-android.json"
# VM_TYPE = "Dalvik"
# VM_VERSION = "1.6.0"
# ANDROID_VERSION = "4.0.5"
# DEVICE = "google_sdk"
# BUILD = "MR1"
# VM_VERSIONS = ["1.7.0", "1.8.0", "1.8.1"]
# ANDROID_VERSIONS = ["4.0.4", "4.5", "4.3", "5.1"]
# BUILD_STRING_LENGTHS = [3, 4, 5]
# DEVICES = ["Nexus 4", "Nexus 5", "HTC One_M8", "SM-N900V", "XT1080",
# "SM-G900V", "SCH-I545", "Android SDK built for x86"]
# RANDOMIZE_USER_AGENT = True
# RANDOMIZE_ENDPOINT = False
# LOCATIONIZE_ENDPOINT = False
# LOG_USERIDS = True
# NO_YAKS_MESSAGE_ID = "Y/b3c6c56b0305f2bc794e40b504f7150f"
# TOO_CLOSE_TO_SCHOOL_MESSAGE_ID = "Y/1687dcbe8ca5a308d46c44343a4c69eb"
# CONTACT_US_REASONS = ["My Basecamp location is wrong.",
# "I'm not near a high school but it says I am! Help!",
# "I want my college to be a Peek location!",
# "I have a really cool idea for the app.",
# "Yik Yak isn't working properly on my phone.",
# "Someone posted something and I want it taken down.",
# "My Yakarma has been reset.", "I forgot my pin code.",
# "Other"]
, which may include functions, classes, or code. Output only the next line. | try: |
Predict the next line after this snippet: <|code_start|> ("long", user.location.longitude),
("userID", user.user_id),
("userLat", user.location.latitude),
("userLong", user.location.longitude)]
return _send("GET", settings.YIKYAK_ENDPOINT, "getMyTops", params)
def get_area_tops(user):
"""Return raw response data for top Yaks in area"""
params = [("lat", user.location.latitude),
("long", user.location.longitude),
("token", get_token()),
("userID", user.user_id),
("userLat", user.location.latitude),
("userLong", user.location.longitude)]
return _send("GET", settings.YIKYAK_ENDPOINT, "getAreaTops", params)
def get_comments(user, message_id, basecamp=0):
"""Return raw response data for all comments on a message (ID: message_id)
using user (optionally at basecamp)"""
if basecamp:
location = user.basecamp_location
else:
location = user.location
params = [("accuracy", user.location.accuracy),
("bc", int(basecamp)),
("lat", location.latitude),
("long", location.longitude),
("messageID", message_id),
<|code_end|>
using the current file's imports:
from requests import Session
from time import time
from urllib import urlencode, unquote
from urlparse import urljoin
from yaklient import settings
from yaklient.config import get_token, get_user_agent
from yaklient.helper import generate_id, hash_msg
and any relevant context from other files:
# Path: yaklient/settings.py
# YIKYAK_APIKEY = "EF64523D2BD1FA21F18F5BC654DFC41B"
# YIKYAK_ENDPOINT = "https://us-central-api.yikyakapi.net/api/"
# YIKYAK_VERSION = "2.7.3"
# YIKYAK_VERSION_LETTER = "e"
# PARSE_APPID = "wMkdjBI4ircsNcRn8mXnBkgH0dwOcrkexrdMY3vY"
# PARSE_CLIENTKEY = "GbNFwvFgoUu1wYuwIexNImy8bnSlNhqssG7gd53Y"
# PARSE_ENDPOINT = "https://api.parse.com/2/"
# PARSE_VERSION = "1.7.1"
# PARSE_VERSION_LETTER = "a"
# PARSE_BUILD = "59"
# PARSE_API_LEVEL = "22"
# NOTIFY_ENDPOINT = "https://notify.yikyakapi.net/api/"
# BASECAMP_ENDPOINT = "https://bc.yikyakapi.net/api/"
# AWS_ACCESS_KEY = "AKIAJFD2ANADKEMPW52A"
# AWS_BUCKET = "photos-upload-yy"
# AWS_SECRET_KEY = None
# AWS_UPLOAD_ENDPOINT = "http://signedup.yikyakapi.net/upload"
# ALLOWED_SITES_URL = "http://lv.yikyakapi.net/getSites"
# CONFIG_URL = "https://d3436qb9f9xu23.cloudfront.net/yikyak-config-android.json"
# VM_TYPE = "Dalvik"
# VM_VERSION = "1.6.0"
# ANDROID_VERSION = "4.0.5"
# DEVICE = "google_sdk"
# BUILD = "MR1"
# VM_VERSIONS = ["1.7.0", "1.8.0", "1.8.1"]
# ANDROID_VERSIONS = ["4.0.4", "4.5", "4.3", "5.1"]
# BUILD_STRING_LENGTHS = [3, 4, 5]
# DEVICES = ["Nexus 4", "Nexus 5", "HTC One_M8", "SM-N900V", "XT1080",
# "SM-G900V", "SCH-I545", "Android SDK built for x86"]
# RANDOMIZE_USER_AGENT = True
# RANDOMIZE_ENDPOINT = False
# LOCATIONIZE_ENDPOINT = False
# LOG_USERIDS = True
# NO_YAKS_MESSAGE_ID = "Y/b3c6c56b0305f2bc794e40b504f7150f"
# TOO_CLOSE_TO_SCHOOL_MESSAGE_ID = "Y/1687dcbe8ca5a308d46c44343a4c69eb"
# CONTACT_US_REASONS = ["My Basecamp location is wrong.",
# "I'm not near a high school but it says I am! Help!",
# "I want my college to be a Peek location!",
# "I have a really cool idea for the app.",
# "Yik Yak isn't working properly on my phone.",
# "Someone posted something and I want it taken down.",
# "My Yakarma has been reset.", "I forgot my pin code.",
# "Other"]
#
# Path: yaklient/helper.py
# def generate_id(dashes=True, upper=False):
# """Return ID with or without dashes as either uppercase or lowercase"""
# uuid = uuid4() if dashes else uuid4().get_hex()
# return str(uuid).upper() if upper else str(uuid)
#
# def hash_msg(key, msg):
# """Return SHA1 hash from key and msg"""
# return b64encode(hmac.new(key, msg, sha1).digest())
. Output only the next line. | ("token", get_token()), |
Using the snippet: <|code_start|>
def log_event(user, event_type):
"""Return raw response data from logging an app event of type event_type
using user"""
params = [("accuracy", user.location.accuracy),
("token", get_token()),
("userID", user.user_id),
("userLat", user.location.latitude),
("userLong", user.location.longitude)]
data = [("eventType", event_type),
("lat", user.location.latitude),
("long", user.location.longitude)]
return _send("POST", settings.YIKYAK_ENDPOINT, "logEvent", params, data)
def send_message(user, message, handle=None, btp=0, basecamp=0):
"""Return raw response data from sending a message with an optional handle
using user (optionally at basecamp and optionally with parameter
bypassedThreatPopup as btp)"""
if basecamp:
location = user.basecamp_location
else:
location = user.location
params = [("bc", int(basecamp)),
("token", get_token()),
("userID", user.user_id)]
data = [("bypassedThreatPopup", int(btp)),
("lat", location.latitude),
("long", location.longitude),
<|code_end|>
, determine the next line of code. You have imports:
from requests import Session
from time import time
from urllib import urlencode, unquote
from urlparse import urljoin
from yaklient import settings
from yaklient.config import get_token, get_user_agent
from yaklient.helper import generate_id, hash_msg
and context (class names, function names, or code) available:
# Path: yaklient/settings.py
# YIKYAK_APIKEY = "EF64523D2BD1FA21F18F5BC654DFC41B"
# YIKYAK_ENDPOINT = "https://us-central-api.yikyakapi.net/api/"
# YIKYAK_VERSION = "2.7.3"
# YIKYAK_VERSION_LETTER = "e"
# PARSE_APPID = "wMkdjBI4ircsNcRn8mXnBkgH0dwOcrkexrdMY3vY"
# PARSE_CLIENTKEY = "GbNFwvFgoUu1wYuwIexNImy8bnSlNhqssG7gd53Y"
# PARSE_ENDPOINT = "https://api.parse.com/2/"
# PARSE_VERSION = "1.7.1"
# PARSE_VERSION_LETTER = "a"
# PARSE_BUILD = "59"
# PARSE_API_LEVEL = "22"
# NOTIFY_ENDPOINT = "https://notify.yikyakapi.net/api/"
# BASECAMP_ENDPOINT = "https://bc.yikyakapi.net/api/"
# AWS_ACCESS_KEY = "AKIAJFD2ANADKEMPW52A"
# AWS_BUCKET = "photos-upload-yy"
# AWS_SECRET_KEY = None
# AWS_UPLOAD_ENDPOINT = "http://signedup.yikyakapi.net/upload"
# ALLOWED_SITES_URL = "http://lv.yikyakapi.net/getSites"
# CONFIG_URL = "https://d3436qb9f9xu23.cloudfront.net/yikyak-config-android.json"
# VM_TYPE = "Dalvik"
# VM_VERSION = "1.6.0"
# ANDROID_VERSION = "4.0.5"
# DEVICE = "google_sdk"
# BUILD = "MR1"
# VM_VERSIONS = ["1.7.0", "1.8.0", "1.8.1"]
# ANDROID_VERSIONS = ["4.0.4", "4.5", "4.3", "5.1"]
# BUILD_STRING_LENGTHS = [3, 4, 5]
# DEVICES = ["Nexus 4", "Nexus 5", "HTC One_M8", "SM-N900V", "XT1080",
# "SM-G900V", "SCH-I545", "Android SDK built for x86"]
# RANDOMIZE_USER_AGENT = True
# RANDOMIZE_ENDPOINT = False
# LOCATIONIZE_ENDPOINT = False
# LOG_USERIDS = True
# NO_YAKS_MESSAGE_ID = "Y/b3c6c56b0305f2bc794e40b504f7150f"
# TOO_CLOSE_TO_SCHOOL_MESSAGE_ID = "Y/1687dcbe8ca5a308d46c44343a4c69eb"
# CONTACT_US_REASONS = ["My Basecamp location is wrong.",
# "I'm not near a high school but it says I am! Help!",
# "I want my college to be a Peek location!",
# "I have a really cool idea for the app.",
# "Yik Yak isn't working properly on my phone.",
# "Someone posted something and I want it taken down.",
# "My Yakarma has been reset.", "I forgot my pin code.",
# "Other"]
#
# Path: yaklient/helper.py
# def generate_id(dashes=True, upper=False):
# """Return ID with or without dashes as either uppercase or lowercase"""
# uuid = uuid4() if dashes else uuid4().get_hex()
# return str(uuid).upper() if upper else str(uuid)
#
# def hash_msg(key, msg):
# """Return SHA1 hash from key and msg"""
# return b64encode(hmac.new(key, msg, sha1).digest())
. Output only the next line. | ("message", message)] |
Based on the snippet: <|code_start|> if basecamp and location is None:
location = user.basecamp_location
params = [("accuracy", user.location.accuracy),
("bc", int(basecamp)),
("lat", location.latitude),
("long", location.longitude),
("token", get_token()),
("userID", user.user_id),
("userLat", user.location.latitude),
("userLong", user.location.longitude)]
return _send("GET", settings.YIKYAK_ENDPOINT, "getMessages", params)
def get_peek_messages(user, peek_id):
"""Return raw response data for messages at peek location (ID: peek_id)
using user"""
params = [("accuracy", user.location.accuracy),
("lat", user.location.latitude),
("long", user.location.longitude),
("peekID", peek_id),
("token", get_token()),
("userID", user.user_id),
("userLat", user.location.latitude),
("userLong", user.location.longitude)]
return _send("GET", settings.YIKYAK_ENDPOINT, "getPeekMessages", params)
def hot(user, location, basecamp=0):
"""Return raw response data for top messages in location/basecamp using
user"""
<|code_end|>
, predict the immediate next line with the help of imports:
from requests import Session
from time import time
from urllib import urlencode, unquote
from urlparse import urljoin
from yaklient import settings
from yaklient.config import get_token, get_user_agent
from yaklient.helper import generate_id, hash_msg
and context (classes, functions, sometimes code) from other files:
# Path: yaklient/settings.py
# YIKYAK_APIKEY = "EF64523D2BD1FA21F18F5BC654DFC41B"
# YIKYAK_ENDPOINT = "https://us-central-api.yikyakapi.net/api/"
# YIKYAK_VERSION = "2.7.3"
# YIKYAK_VERSION_LETTER = "e"
# PARSE_APPID = "wMkdjBI4ircsNcRn8mXnBkgH0dwOcrkexrdMY3vY"
# PARSE_CLIENTKEY = "GbNFwvFgoUu1wYuwIexNImy8bnSlNhqssG7gd53Y"
# PARSE_ENDPOINT = "https://api.parse.com/2/"
# PARSE_VERSION = "1.7.1"
# PARSE_VERSION_LETTER = "a"
# PARSE_BUILD = "59"
# PARSE_API_LEVEL = "22"
# NOTIFY_ENDPOINT = "https://notify.yikyakapi.net/api/"
# BASECAMP_ENDPOINT = "https://bc.yikyakapi.net/api/"
# AWS_ACCESS_KEY = "AKIAJFD2ANADKEMPW52A"
# AWS_BUCKET = "photos-upload-yy"
# AWS_SECRET_KEY = None
# AWS_UPLOAD_ENDPOINT = "http://signedup.yikyakapi.net/upload"
# ALLOWED_SITES_URL = "http://lv.yikyakapi.net/getSites"
# CONFIG_URL = "https://d3436qb9f9xu23.cloudfront.net/yikyak-config-android.json"
# VM_TYPE = "Dalvik"
# VM_VERSION = "1.6.0"
# ANDROID_VERSION = "4.0.5"
# DEVICE = "google_sdk"
# BUILD = "MR1"
# VM_VERSIONS = ["1.7.0", "1.8.0", "1.8.1"]
# ANDROID_VERSIONS = ["4.0.4", "4.5", "4.3", "5.1"]
# BUILD_STRING_LENGTHS = [3, 4, 5]
# DEVICES = ["Nexus 4", "Nexus 5", "HTC One_M8", "SM-N900V", "XT1080",
# "SM-G900V", "SCH-I545", "Android SDK built for x86"]
# RANDOMIZE_USER_AGENT = True
# RANDOMIZE_ENDPOINT = False
# LOCATIONIZE_ENDPOINT = False
# LOG_USERIDS = True
# NO_YAKS_MESSAGE_ID = "Y/b3c6c56b0305f2bc794e40b504f7150f"
# TOO_CLOSE_TO_SCHOOL_MESSAGE_ID = "Y/1687dcbe8ca5a308d46c44343a4c69eb"
# CONTACT_US_REASONS = ["My Basecamp location is wrong.",
# "I'm not near a high school but it says I am! Help!",
# "I want my college to be a Peek location!",
# "I have a really cool idea for the app.",
# "Yik Yak isn't working properly on my phone.",
# "Someone posted something and I want it taken down.",
# "My Yakarma has been reset.", "I forgot my pin code.",
# "Other"]
#
# Path: yaklient/helper.py
# def generate_id(dashes=True, upper=False):
# """Return ID with or without dashes as either uppercase or lowercase"""
# uuid = uuid4() if dashes else uuid4().get_hex()
# return str(uuid).upper() if upper else str(uuid)
#
# def hash_msg(key, msg):
# """Return SHA1 hash from key and msg"""
# return b64encode(hmac.new(key, msg, sha1).digest())
. Output only the next line. | if basecamp: |
Predict the next line for this snippet: <|code_start|>class Message(object):
"""An abstract class for a postable object on Yik Yak (Comment or Yak)"""
def __init__(self, raw, user):
"""Initialize message from raw JSON dict and user"""
self.delivery_id = raw["deliveryID"]
self.liked = raw["liked"]
self.likes = raw["numberOfLikes"]
self.message_id = helper.backslash_remove(raw["messageID"])
self.poster_id = raw["posterID"]
self.time = raw["time"]
self.user = user
try:
self.reyaked = raw["reyaked"]
except KeyError:
self.reyaked = None
@abstractmethod
def __str__(self):
"""Return message as string"""
pass
def delete(self):
"""Delete message from Yik Yak. Return True if successful, False if
unsuccessful"""
return self.user.delete(self)
def downvote(self):
"""Downvote the message. Return True if successful, False if
unsuccessful"""
if self.user.downvote(self):
<|code_end|>
with the help of current file imports:
from abc import abstractmethod
from yaklient import helper
and context from other files:
# Path: yaklient/helper.py
# def backslash_remove(text):
# def emoji_remove(text):
# def generate_id(dashes=True, upper=False):
# def hash_msg(key, msg):
# def __init__(self, error_string, response):
# def __str__(self):
# class ParsingResponseError(Exception):
, which may contain function names, class names, or code. Output only the next line. | self.likes -= 1 |
Here is a snippet: <|code_start|>
class Uptime:
def __init__(self, manager):
self.client = manager.client
<|code_end|>
. Write the next line using the current file imports:
import os
import psutil
import datetime
from time import time
from dasbit.helper import timesince
and context from other files:
# Path: dasbit/helper.py
# def timesince(date, suffix=' ago'):
# chunks = (
# (60 * 60 * 24 * 365, lambda n: 'year' if n is 1 else 'years'),
# (60 * 60 * 24 * 30, lambda n: 'month' if n is 1 else 'months'),
# (60 * 60 * 24 * 7, lambda n: 'week' if n is 1 else 'weeks'),
# (60 * 60 * 24, lambda n: 'day' if n is 1 else 'days'),
# (60 * 60, lambda n: 'hour' if n is 1 else 'hours'),
# (60, lambda n: 'minute' if n is 1 else 'minutes')
# )
#
# now = datetime.datetime.utcnow()
# delta = now - date
# since = delta.days * 24 * 60 * 60 + delta.seconds
#
# if since <= 0:
# return '0 minutes' + suffix
#
# for i, (seconds, name) in enumerate(chunks):
# count = since // seconds
#
# if count != 0:
# break
#
# s = '%d %s' % (count, name(count))
#
# if i + 1 < len(chunks):
# seconds2, name2 = chunks[i + 1]
# count2 = (since - (seconds * count)) // seconds2
#
# if count2 != 0:
# s += ' and %d %s' % (count2, name2(count2))
#
# s += suffix
#
# return s
, which may include functions, classes, or code. Output only the next line. | manager.registerCommand('uptime', 'uptime', 'uptime', None, self.getUptime) |
Using the snippet: <|code_start|> # Convert headers dictionary to
# twisted raw headers format.
headers = kwargs.get('headers')
if headers:
if isinstance(headers, dict):
h = Headers({})
for k, v in headers.iteritems():
if isinstance(v, str):
h.addRawHeader(k, v)
else:
h.setRawHeaders(k, v)
headers = h
else:
headers = Headers({})
# Here we choose a right producer
# based on the parameters passed in.
bodyProducer = None
data = kwargs.get('data')
files = kwargs.get('files')
if files:
# If the files keyword is present we will issue a
# multipart/form-data request as it suits better for cases
# with files and/or large objects.
files = list(_convert_files(files))
boundary = uuid.uuid4()
headers.setRawHeaders(
'content-type', [
'multipart/form-data; boundary=%s' % (boundary,)])
<|code_end|>
, determine the next line of code. You have imports:
import mimetypes
import uuid
from io import BytesIO
from StringIO import StringIO
from os import path
from urlparse import urlparse, urlunparse
from urllib import urlencode
from twisted.internet.interfaces import IProtocol
from twisted.internet.defer import Deferred
from twisted.python.components import proxyForInterface
from twisted.web.http_headers import Headers
from twisted.web.iweb import IBodyProducer, IResponse
from twisted.web.client import (
FileBodyProducer,
RedirectAgent,
ContentDecoderAgent,
GzipDecoder
)
from twisted.python.components import registerAdapter
from treq._utils import default_reactor
from treq.auth import add_auth
from treq import multipart
from treq.response import _Response
and context (class names, function names, or code) available:
# Path: treq/response.py
# class _Response(proxyForInterface(IResponse)):
# def content(self):
# return content(self.original)
#
# def json(self, *args, **kwargs):
# return json_content(self.original, *args, **kwargs)
#
# def text(self, *args, **kwargs):
# return text_content(self.original, *args, **kwargs)
#
# def history(self):
# if not hasattr(self, "previousResponse"):
# raise NotImplementedError(
# "Twisted < 13.1.0 does not support response history.")
#
# response = self
# history = []
#
# while response.previousResponse is not None:
# history.append(_Response(response.previousResponse))
# response = response.previousResponse
#
# history.reverse()
# return history
. Output only the next line. | if data: |
Next line prediction: <|code_start|> # make test deterministic
transactions = transactions.order_by("-created")
self.assertEqual(len(transactions), 1)
self.assertEqual(transactions[0].title, 'this_month')
def test_this_months_transactions_list(self):
with mock.patch('books.services.timezone') as mock_now:
mock_now.now.return_value = datetime(2015, 4, 23, tzinfo=pytz.utc)
c = Client()
logged_in = c.login(username=self.user.username, password='secret')
self.assertTrue(logged_in)
response = c.get(reverse('transaction_list'))
self.assertEqual(200, response.status_code)
qs = response.context['transactions']
self.assertSequenceEqual(qs, [self.this_month])
def test_last_months_transactions(self):
with mock.patch('books.services.timezone') as mock_now:
mock_now.now.return_value = datetime(2015, 4, 23, tzinfo=pytz.utc)
transactions = services.get_last_months_transactions(self.user)
# make test deterministic
transactions = transactions.order_by("-created")
self.assertEqual(len(transactions), 2)
self.assertEqual(transactions[0].title, 'this_month')
<|code_end|>
. Use current file imports:
(from datetime import datetime
from unittest import mock
from django.core.urlresolvers import reverse
from django.test import Client
from django.test import TestCase
from books.factories import DebtLoanFactory
from books.factories import TransactionFactory
from books.factories import UserFactory
from books.models import DebtLoan
from books.models import Transaction
from books import services
import pytz)
and context including class names, function names, or small code snippets from other files:
# Path: books/factories.py
# class DebtLoanFactory(factory.DjangoModelFactory):
# with_who = "ACME co."
# title = factory.Sequence(lambda n: 'debt_loan_%d' % n)
# amount = 42
# category = models.DebtLoan.DEBT
# user = factory.SubFactory(UserFactory)
#
# class Meta:
# model = models.DebtLoan
#
# Path: books/factories.py
# class TransactionFactory(factory.DjangoModelFactory):
# title = factory.Sequence(lambda n: 'transaction_%d' % n)
# amount = 10
# category = models.Transaction.EXPENSE
# user = factory.SubFactory(UserFactory)
#
# class Meta:
# model = models.Transaction
#
# Path: books/factories.py
# class TransactionFactory(factory.DjangoModelFactory):
# class Meta:
# class DebtLoanFactory(factory.DjangoModelFactory):
# class Meta:
#
# Path: books/models.py
# class DebtLoan(models.Model):
# DEBT = 0
# LOAN = 1
# CATEGORY_CHOICES = (
# (DEBT, 'debt'),
# (LOAN, 'loan'),
# )
#
# with_who = fields.CharField(max_length=255)
# title = fields.CharField(max_length=255, null=True, blank=True)
# amount = fields.DecimalField(max_digits=10, decimal_places=2)
# category = fields.PositiveSmallIntegerField(choices=CATEGORY_CHOICES)
# created = fields.DateTimeField(default=timezone.now, editable=False)
# modified = fields.DateTimeField(default=timezone.now)
# active = fields.BooleanField(default=True)
# user = models.ForeignKey(User, on_delete=models.CASCADE)
#
# def __str__(self):
# if self.title:
# return "{}: {}".format(self.with_who, self.title)
# else:
# return "{}".format(self.with_who)
#
# def deactivate(self):
# if self.active:
# self.active = False
# self.save()
#
# Path: books/models.py
# class Transaction(models.Model):
# EXPENSE = 'exp'
# INCOME = 'inc'
# CATEGORY_CHOICES = (
# (EXPENSE, 'expense'),
# (INCOME, 'income'),
# )
#
# title = fields.CharField(max_length=255)
# amount = fields.DecimalField(max_digits=10, decimal_places=2)
# category = fields.CharField(max_length=3, choices=CATEGORY_CHOICES)
# created = fields.DateTimeField(default=timezone.now, editable=False)
# modified = fields.DateTimeField(default=timezone.now)
# active = fields.BooleanField(default=True)
# user = models.ForeignKey(User, on_delete=models.CASCADE)
#
# def __str__(self):
# return "{}".format(self.title)
#
# def deactivate(self):
# if self.active:
# self.active = False
# self.save()
#
# Path: books/services.py
# def get_months_transactions(user):
# def get_last_months_transactions(user):
# def get_this_years_transactions(user):
. Output only the next line. | self.assertEqual(transactions[1].title, 'last_month') |
Here is a snippet: <|code_start|> self.assertEqual(transactions[2].title, 'this_year')
def test_this_years_transactions_list(self):
with mock.patch('books.services.timezone') as mock_now:
mock_now.now.return_value = datetime(2015, 4, 23, tzinfo=pytz.utc)
c = Client()
logged_in = c.login(username=self.user.username, password='secret')
self.assertTrue(logged_in)
response = c.get(reverse('transaction_list_filter'),
{'filter': 'this_year'},
follow=True)
self.assertRedirects(response, reverse('transaction_list'))
qs = response.context['transactions']
self.assertSequenceEqual(
qs,
[self.this_month, self.last_month, self.this_year]
)
def test_all_time_transactions_list(self):
with mock.patch('books.services.timezone') as mock_now:
mock_now.now.return_value = datetime(2015, 4, 23, tzinfo=pytz.utc)
c = Client()
logged_in = c.login(username=self.user.username, password='secret')
self.assertTrue(logged_in)
response = c.get(reverse('transaction_list_filter'),
<|code_end|>
. Write the next line using the current file imports:
from datetime import datetime
from unittest import mock
from django.core.urlresolvers import reverse
from django.test import Client
from django.test import TestCase
from books.factories import DebtLoanFactory
from books.factories import TransactionFactory
from books.factories import UserFactory
from books.models import DebtLoan
from books.models import Transaction
from books import services
import pytz
and context from other files:
# Path: books/factories.py
# class DebtLoanFactory(factory.DjangoModelFactory):
# with_who = "ACME co."
# title = factory.Sequence(lambda n: 'debt_loan_%d' % n)
# amount = 42
# category = models.DebtLoan.DEBT
# user = factory.SubFactory(UserFactory)
#
# class Meta:
# model = models.DebtLoan
#
# Path: books/factories.py
# class TransactionFactory(factory.DjangoModelFactory):
# title = factory.Sequence(lambda n: 'transaction_%d' % n)
# amount = 10
# category = models.Transaction.EXPENSE
# user = factory.SubFactory(UserFactory)
#
# class Meta:
# model = models.Transaction
#
# Path: books/factories.py
# class TransactionFactory(factory.DjangoModelFactory):
# class Meta:
# class DebtLoanFactory(factory.DjangoModelFactory):
# class Meta:
#
# Path: books/models.py
# class DebtLoan(models.Model):
# DEBT = 0
# LOAN = 1
# CATEGORY_CHOICES = (
# (DEBT, 'debt'),
# (LOAN, 'loan'),
# )
#
# with_who = fields.CharField(max_length=255)
# title = fields.CharField(max_length=255, null=True, blank=True)
# amount = fields.DecimalField(max_digits=10, decimal_places=2)
# category = fields.PositiveSmallIntegerField(choices=CATEGORY_CHOICES)
# created = fields.DateTimeField(default=timezone.now, editable=False)
# modified = fields.DateTimeField(default=timezone.now)
# active = fields.BooleanField(default=True)
# user = models.ForeignKey(User, on_delete=models.CASCADE)
#
# def __str__(self):
# if self.title:
# return "{}: {}".format(self.with_who, self.title)
# else:
# return "{}".format(self.with_who)
#
# def deactivate(self):
# if self.active:
# self.active = False
# self.save()
#
# Path: books/models.py
# class Transaction(models.Model):
# EXPENSE = 'exp'
# INCOME = 'inc'
# CATEGORY_CHOICES = (
# (EXPENSE, 'expense'),
# (INCOME, 'income'),
# )
#
# title = fields.CharField(max_length=255)
# amount = fields.DecimalField(max_digits=10, decimal_places=2)
# category = fields.CharField(max_length=3, choices=CATEGORY_CHOICES)
# created = fields.DateTimeField(default=timezone.now, editable=False)
# modified = fields.DateTimeField(default=timezone.now)
# active = fields.BooleanField(default=True)
# user = models.ForeignKey(User, on_delete=models.CASCADE)
#
# def __str__(self):
# return "{}".format(self.title)
#
# def deactivate(self):
# if self.active:
# self.active = False
# self.save()
#
# Path: books/services.py
# def get_months_transactions(user):
# def get_last_months_transactions(user):
# def get_this_years_transactions(user):
, which may include functions, classes, or code. Output only the next line. | {'filter': 'all_time'}, |
Continue the code snippet: <|code_start|>
c = Client()
logged_in = c.login(username=self.user.username, password='secret')
self.assertTrue(logged_in)
response = c.get(reverse('transaction_list_filter'),
{'filter': 'this_year'},
follow=True)
self.assertRedirects(response, reverse('transaction_list'))
qs = response.context['transactions']
self.assertSequenceEqual(
qs,
[self.this_month, self.last_month, self.this_year]
)
def test_all_time_transactions_list(self):
with mock.patch('books.services.timezone') as mock_now:
mock_now.now.return_value = datetime(2015, 4, 23, tzinfo=pytz.utc)
c = Client()
logged_in = c.login(username=self.user.username, password='secret')
self.assertTrue(logged_in)
response = c.get(reverse('transaction_list_filter'),
{'filter': 'all_time'},
follow=True)
self.assertRedirects(response, reverse('transaction_list'))
qs = response.context['transactions']
<|code_end|>
. Use current file imports:
from datetime import datetime
from unittest import mock
from django.core.urlresolvers import reverse
from django.test import Client
from django.test import TestCase
from books.factories import DebtLoanFactory
from books.factories import TransactionFactory
from books.factories import UserFactory
from books.models import DebtLoan
from books.models import Transaction
from books import services
import pytz
and context (classes, functions, or code) from other files:
# Path: books/factories.py
# class DebtLoanFactory(factory.DjangoModelFactory):
# with_who = "ACME co."
# title = factory.Sequence(lambda n: 'debt_loan_%d' % n)
# amount = 42
# category = models.DebtLoan.DEBT
# user = factory.SubFactory(UserFactory)
#
# class Meta:
# model = models.DebtLoan
#
# Path: books/factories.py
# class TransactionFactory(factory.DjangoModelFactory):
# title = factory.Sequence(lambda n: 'transaction_%d' % n)
# amount = 10
# category = models.Transaction.EXPENSE
# user = factory.SubFactory(UserFactory)
#
# class Meta:
# model = models.Transaction
#
# Path: books/factories.py
# class TransactionFactory(factory.DjangoModelFactory):
# class Meta:
# class DebtLoanFactory(factory.DjangoModelFactory):
# class Meta:
#
# Path: books/models.py
# class DebtLoan(models.Model):
# DEBT = 0
# LOAN = 1
# CATEGORY_CHOICES = (
# (DEBT, 'debt'),
# (LOAN, 'loan'),
# )
#
# with_who = fields.CharField(max_length=255)
# title = fields.CharField(max_length=255, null=True, blank=True)
# amount = fields.DecimalField(max_digits=10, decimal_places=2)
# category = fields.PositiveSmallIntegerField(choices=CATEGORY_CHOICES)
# created = fields.DateTimeField(default=timezone.now, editable=False)
# modified = fields.DateTimeField(default=timezone.now)
# active = fields.BooleanField(default=True)
# user = models.ForeignKey(User, on_delete=models.CASCADE)
#
# def __str__(self):
# if self.title:
# return "{}: {}".format(self.with_who, self.title)
# else:
# return "{}".format(self.with_who)
#
# def deactivate(self):
# if self.active:
# self.active = False
# self.save()
#
# Path: books/models.py
# class Transaction(models.Model):
# EXPENSE = 'exp'
# INCOME = 'inc'
# CATEGORY_CHOICES = (
# (EXPENSE, 'expense'),
# (INCOME, 'income'),
# )
#
# title = fields.CharField(max_length=255)
# amount = fields.DecimalField(max_digits=10, decimal_places=2)
# category = fields.CharField(max_length=3, choices=CATEGORY_CHOICES)
# created = fields.DateTimeField(default=timezone.now, editable=False)
# modified = fields.DateTimeField(default=timezone.now)
# active = fields.BooleanField(default=True)
# user = models.ForeignKey(User, on_delete=models.CASCADE)
#
# def __str__(self):
# return "{}".format(self.title)
#
# def deactivate(self):
# if self.active:
# self.active = False
# self.save()
#
# Path: books/services.py
# def get_months_transactions(user):
# def get_last_months_transactions(user):
# def get_this_years_transactions(user):
. Output only the next line. | self.assertSequenceEqual( |
Here is a snippet: <|code_start|>
class TransactionTests(TestCase):
def setUp(self):
self.user = UserFactory()
def test_create_model(self):
self.assertEqual(0, Transaction.objects.count())
TransactionFactory(title='first')
self.assertEqual(1, Transaction.objects.count())
self.assertEqual(str(Transaction.objects.latest('id')), 'first')
<|code_end|>
. Write the next line using the current file imports:
from datetime import datetime
from unittest import mock
from django.core.urlresolvers import reverse
from django.test import Client
from django.test import TestCase
from books.factories import DebtLoanFactory
from books.factories import TransactionFactory
from books.factories import UserFactory
from books.models import DebtLoan
from books.models import Transaction
from books import services
import pytz
and context from other files:
# Path: books/factories.py
# class DebtLoanFactory(factory.DjangoModelFactory):
# with_who = "ACME co."
# title = factory.Sequence(lambda n: 'debt_loan_%d' % n)
# amount = 42
# category = models.DebtLoan.DEBT
# user = factory.SubFactory(UserFactory)
#
# class Meta:
# model = models.DebtLoan
#
# Path: books/factories.py
# class TransactionFactory(factory.DjangoModelFactory):
# title = factory.Sequence(lambda n: 'transaction_%d' % n)
# amount = 10
# category = models.Transaction.EXPENSE
# user = factory.SubFactory(UserFactory)
#
# class Meta:
# model = models.Transaction
#
# Path: books/factories.py
# class TransactionFactory(factory.DjangoModelFactory):
# class Meta:
# class DebtLoanFactory(factory.DjangoModelFactory):
# class Meta:
#
# Path: books/models.py
# class DebtLoan(models.Model):
# DEBT = 0
# LOAN = 1
# CATEGORY_CHOICES = (
# (DEBT, 'debt'),
# (LOAN, 'loan'),
# )
#
# with_who = fields.CharField(max_length=255)
# title = fields.CharField(max_length=255, null=True, blank=True)
# amount = fields.DecimalField(max_digits=10, decimal_places=2)
# category = fields.PositiveSmallIntegerField(choices=CATEGORY_CHOICES)
# created = fields.DateTimeField(default=timezone.now, editable=False)
# modified = fields.DateTimeField(default=timezone.now)
# active = fields.BooleanField(default=True)
# user = models.ForeignKey(User, on_delete=models.CASCADE)
#
# def __str__(self):
# if self.title:
# return "{}: {}".format(self.with_who, self.title)
# else:
# return "{}".format(self.with_who)
#
# def deactivate(self):
# if self.active:
# self.active = False
# self.save()
#
# Path: books/models.py
# class Transaction(models.Model):
# EXPENSE = 'exp'
# INCOME = 'inc'
# CATEGORY_CHOICES = (
# (EXPENSE, 'expense'),
# (INCOME, 'income'),
# )
#
# title = fields.CharField(max_length=255)
# amount = fields.DecimalField(max_digits=10, decimal_places=2)
# category = fields.CharField(max_length=3, choices=CATEGORY_CHOICES)
# created = fields.DateTimeField(default=timezone.now, editable=False)
# modified = fields.DateTimeField(default=timezone.now)
# active = fields.BooleanField(default=True)
# user = models.ForeignKey(User, on_delete=models.CASCADE)
#
# def __str__(self):
# return "{}".format(self.title)
#
# def deactivate(self):
# if self.active:
# self.active = False
# self.save()
#
# Path: books/services.py
# def get_months_transactions(user):
# def get_last_months_transactions(user):
# def get_this_years_transactions(user):
, which may include functions, classes, or code. Output only the next line. | def test_transaction_create_get(self): |
Here is a snippet: <|code_start|> self.assertTrue(logged_in)
self.assertEqual(0, DebtLoan.objects.count())
response = c.post(reverse('debt_loan_create'),
{'with_who': 'FooBar inc.',
'title': 'forty-two',
'amount': 42,
'category': DebtLoan.LOAN},
follow=True)
self.assertRedirects(response, reverse('debt_loan_list'))
self.assertEqual(1, DebtLoan.objects.count())
self.assertEqual('forty-two', DebtLoan.objects.latest('id').title)
self.assertEqual(42, DebtLoan.objects.latest('id').amount)
def test_debt_loan_update_get(self):
c = Client()
logged_in = c.login(username=self.user.username, password='secret')
self.assertTrue(logged_in)
t = DebtLoanFactory(title='first',
amount=1,
category=DebtLoan.LOAN)
self.assertEqual(1, DebtLoan.objects.count())
response = c.get(reverse('debt_loan_update', args=[t.id]))
self.assertEqual(200, response.status_code)
def test_debt_loan_update_post(self):
c = Client()
logged_in = c.login(username=self.user.username, password='secret')
<|code_end|>
. Write the next line using the current file imports:
from datetime import datetime
from unittest import mock
from django.core.urlresolvers import reverse
from django.test import Client
from django.test import TestCase
from books.factories import DebtLoanFactory
from books.factories import TransactionFactory
from books.factories import UserFactory
from books.models import DebtLoan
from books.models import Transaction
from books import services
import pytz
and context from other files:
# Path: books/factories.py
# class DebtLoanFactory(factory.DjangoModelFactory):
# with_who = "ACME co."
# title = factory.Sequence(lambda n: 'debt_loan_%d' % n)
# amount = 42
# category = models.DebtLoan.DEBT
# user = factory.SubFactory(UserFactory)
#
# class Meta:
# model = models.DebtLoan
#
# Path: books/factories.py
# class TransactionFactory(factory.DjangoModelFactory):
# title = factory.Sequence(lambda n: 'transaction_%d' % n)
# amount = 10
# category = models.Transaction.EXPENSE
# user = factory.SubFactory(UserFactory)
#
# class Meta:
# model = models.Transaction
#
# Path: books/factories.py
# class TransactionFactory(factory.DjangoModelFactory):
# class Meta:
# class DebtLoanFactory(factory.DjangoModelFactory):
# class Meta:
#
# Path: books/models.py
# class DebtLoan(models.Model):
# DEBT = 0
# LOAN = 1
# CATEGORY_CHOICES = (
# (DEBT, 'debt'),
# (LOAN, 'loan'),
# )
#
# with_who = fields.CharField(max_length=255)
# title = fields.CharField(max_length=255, null=True, blank=True)
# amount = fields.DecimalField(max_digits=10, decimal_places=2)
# category = fields.PositiveSmallIntegerField(choices=CATEGORY_CHOICES)
# created = fields.DateTimeField(default=timezone.now, editable=False)
# modified = fields.DateTimeField(default=timezone.now)
# active = fields.BooleanField(default=True)
# user = models.ForeignKey(User, on_delete=models.CASCADE)
#
# def __str__(self):
# if self.title:
# return "{}: {}".format(self.with_who, self.title)
# else:
# return "{}".format(self.with_who)
#
# def deactivate(self):
# if self.active:
# self.active = False
# self.save()
#
# Path: books/models.py
# class Transaction(models.Model):
# EXPENSE = 'exp'
# INCOME = 'inc'
# CATEGORY_CHOICES = (
# (EXPENSE, 'expense'),
# (INCOME, 'income'),
# )
#
# title = fields.CharField(max_length=255)
# amount = fields.DecimalField(max_digits=10, decimal_places=2)
# category = fields.CharField(max_length=3, choices=CATEGORY_CHOICES)
# created = fields.DateTimeField(default=timezone.now, editable=False)
# modified = fields.DateTimeField(default=timezone.now)
# active = fields.BooleanField(default=True)
# user = models.ForeignKey(User, on_delete=models.CASCADE)
#
# def __str__(self):
# return "{}".format(self.title)
#
# def deactivate(self):
# if self.active:
# self.active = False
# self.save()
#
# Path: books/services.py
# def get_months_transactions(user):
# def get_last_months_transactions(user):
# def get_this_years_transactions(user):
, which may include functions, classes, or code. Output only the next line. | self.assertTrue(logged_in) |
Next line prediction: <|code_start|> response = c.get(reverse('debt_loan_update', args=[t.id]))
self.assertEqual(200, response.status_code)
def test_debt_loan_update_post(self):
c = Client()
logged_in = c.login(username=self.user.username, password='secret')
self.assertTrue(logged_in)
t = DebtLoanFactory(title='first',
amount=1,
category=DebtLoan.LOAN)
self.assertEqual(1, DebtLoan.objects.count())
response = c.post(reverse('debt_loan_update', args=[t.id]),
{'with_who': 'FooBar inc.',
'title': 'forty-two',
'amount': 42,
'category': DebtLoan.LOAN},
follow=True)
self.assertRedirects(response, reverse('debt_loan_list'))
self.assertEqual(1, DebtLoan.objects.count())
self.assertEqual('forty-two', DebtLoan.objects.latest('id').title)
self.assertEqual(42, DebtLoan.objects.latest('id').amount)
def test_debt_loan_delete(self):
c = Client()
logged_in = c.login(username=self.user.username, password='secret')
self.assertTrue(logged_in)
t = DebtLoanFactory(title='first',
<|code_end|>
. Use current file imports:
(from datetime import datetime
from unittest import mock
from django.core.urlresolvers import reverse
from django.test import Client
from django.test import TestCase
from books.factories import DebtLoanFactory
from books.factories import TransactionFactory
from books.factories import UserFactory
from books.models import DebtLoan
from books.models import Transaction
from books import services
import pytz)
and context including class names, function names, or small code snippets from other files:
# Path: books/factories.py
# class DebtLoanFactory(factory.DjangoModelFactory):
# with_who = "ACME co."
# title = factory.Sequence(lambda n: 'debt_loan_%d' % n)
# amount = 42
# category = models.DebtLoan.DEBT
# user = factory.SubFactory(UserFactory)
#
# class Meta:
# model = models.DebtLoan
#
# Path: books/factories.py
# class TransactionFactory(factory.DjangoModelFactory):
# title = factory.Sequence(lambda n: 'transaction_%d' % n)
# amount = 10
# category = models.Transaction.EXPENSE
# user = factory.SubFactory(UserFactory)
#
# class Meta:
# model = models.Transaction
#
# Path: books/factories.py
# class TransactionFactory(factory.DjangoModelFactory):
# class Meta:
# class DebtLoanFactory(factory.DjangoModelFactory):
# class Meta:
#
# Path: books/models.py
# class DebtLoan(models.Model):
# DEBT = 0
# LOAN = 1
# CATEGORY_CHOICES = (
# (DEBT, 'debt'),
# (LOAN, 'loan'),
# )
#
# with_who = fields.CharField(max_length=255)
# title = fields.CharField(max_length=255, null=True, blank=True)
# amount = fields.DecimalField(max_digits=10, decimal_places=2)
# category = fields.PositiveSmallIntegerField(choices=CATEGORY_CHOICES)
# created = fields.DateTimeField(default=timezone.now, editable=False)
# modified = fields.DateTimeField(default=timezone.now)
# active = fields.BooleanField(default=True)
# user = models.ForeignKey(User, on_delete=models.CASCADE)
#
# def __str__(self):
# if self.title:
# return "{}: {}".format(self.with_who, self.title)
# else:
# return "{}".format(self.with_who)
#
# def deactivate(self):
# if self.active:
# self.active = False
# self.save()
#
# Path: books/models.py
# class Transaction(models.Model):
# EXPENSE = 'exp'
# INCOME = 'inc'
# CATEGORY_CHOICES = (
# (EXPENSE, 'expense'),
# (INCOME, 'income'),
# )
#
# title = fields.CharField(max_length=255)
# amount = fields.DecimalField(max_digits=10, decimal_places=2)
# category = fields.CharField(max_length=3, choices=CATEGORY_CHOICES)
# created = fields.DateTimeField(default=timezone.now, editable=False)
# modified = fields.DateTimeField(default=timezone.now)
# active = fields.BooleanField(default=True)
# user = models.ForeignKey(User, on_delete=models.CASCADE)
#
# def __str__(self):
# return "{}".format(self.title)
#
# def deactivate(self):
# if self.active:
# self.active = False
# self.save()
#
# Path: books/services.py
# def get_months_transactions(user):
# def get_last_months_transactions(user):
# def get_this_years_transactions(user):
. Output only the next line. | amount=1, |
Predict the next line for this snippet: <|code_start|>
urlpatterns = [
url(r'^transactions/$', views.transaction_list, name='transaction_list'),
url(r'^create/$', views.transaction_create, name='transaction_create'),
url(r'^delete/(?P<pk>\d+)/$', views.transaction_delete,
name='transaction_delete'),
url(r'^update/(?P<pk>\d+)/$', views.transaction_update,
name='transaction_update'),
url(r'^filter/$', views.transaction_list_filter,
name='transaction_list_filter'),
url(r'^debts-loans/$', views.debt_loan_list, name='debt_loan_list'),
url(r'^debts-loans/create/$', views.debt_loan_create,
name='debt_loan_create'),
url(r'^debts-loans/delete/(?P<pk>\d+)/$', views.debt_loan_delete,
<|code_end|>
with the help of current file imports:
from django.conf.urls import url
from books import views
and context from other files:
# Path: books/views.py
# def transaction_list(request):
# def transaction_list_filter(request):
# def transaction_create(request):
# def transaction_delete(request, pk):
# def transaction_update(request, pk):
# def debt_loan_list(request):
# def debt_loan_create(request):
# def debt_loan_delete(request, pk):
# def debt_loan_update(request, pk):
, which may contain function names, class names, or code. Output only the next line. | name='debt_loan_delete'), |
Given the following code snippet before the placeholder: <|code_start|>
class TransactionForm(forms.ModelForm):
class Meta:
model = models.Transaction
<|code_end|>
, predict the next line using imports from the current file:
from django import forms
from books import models
and context including class names, function names, and sometimes code from other files:
# Path: books/models.py
# class Transaction(models.Model):
# class DebtLoan(models.Model):
# EXPENSE = 'exp'
# INCOME = 'inc'
# CATEGORY_CHOICES = (
# (EXPENSE, 'expense'),
# (INCOME, 'income'),
# )
# DEBT = 0
# LOAN = 1
# CATEGORY_CHOICES = (
# (DEBT, 'debt'),
# (LOAN, 'loan'),
# )
# def __str__(self):
# def deactivate(self):
# def __str__(self):
# def deactivate(self):
. Output only the next line. | fields = ['title', 'amount', 'category'] |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.