Instruction stringlengths 362 7.83k | output_code stringlengths 1 945 |
|---|---|
Given snippet: <|code_start|> the client identifier and redirection URI.
**state**
REQUIRED if the "state" parameter was present in the client
authorization request. The exact value received from the
client.
:param uri: The full redirect URL back to the client.
:param state: The state parameter from the authorization request.
For example, the authorization server redirects the user-agent by
sending the following HTTP response:
.. code-block:: http
HTTP/1.1 302 Found
Location: https://client.example.com/cb?code=SplxlOBeZQQYbYS6WxSbIA
&state=xyz
"""
if not is_secure_transport(uri):
raise InsecureTransportError()
query = urlparse.urlparse(uri).query
params = dict(urlparse.parse_qsl(query))
if not 'code' in params:
raise MissingCodeError("Missing code parameter in response.")
if state and params.get('state', None) != state:
<|code_end|>
, continue by predicting the next line. Consider current file imports:
import json
import time
import urlparse
import urllib.parse as urlparse
from oauthlib.common import add_params_to_uri, add_params_to_qs, unicode_type
from .errors import raise_from_error, MissingTokenError, MissingTokenTypeError
from .errors import MismatchingStateError, MissingCodeError
from .errors import InsecureTransportError
from .utils import list_to_scope, scope_to_list, is_secure_transport
and context:
# Path: twitter-winner/oauthlib/oauth2/rfc6749/errors.py
# def raise_from_error(error, params=None):
# import inspect
# import sys
# kwargs = {
# 'description': params.get('error_description'),
# 'uri': params.get('error_uri'),
# 'state': params.get('state')
# }
# for _, cls in inspect.getmembers(sys.modules[__name__], inspect.isclass):
# if cls.error == error:
# raise cls(**kwargs)
#
# class MissingTokenError(OAuth2Error):
# error = 'missing_token'
#
# class MissingTokenTypeError(OAuth2Error):
# error = 'missing_token_type'
#
# Path: twitter-winner/oauthlib/oauth2/rfc6749/errors.py
# class MismatchingStateError(OAuth2Error):
# error = 'mismatching_state'
# description = 'CSRF Warning! State not equal in request and response.'
#
# class MissingCodeError(OAuth2Error):
# error = 'missing_code'
#
# Path: twitter-winner/oauthlib/oauth2/rfc6749/errors.py
# class InsecureTransportError(OAuth2Error):
# error = 'insecure_transport'
# description = 'OAuth 2 MUST utilize https.'
#
# Path: twitter-winner/oauthlib/oauth2/rfc6749/utils.py
# def list_to_scope(scope):
# """Convert a list of scopes to a space separated string."""
# if isinstance(scope, unicode_type) or scope is None:
# return scope
# elif isinstance(scope, list):
# return " ".join([unicode_type(s) for s in scope])
# else:
# raise ValueError("Invalid scope, must be string or list.")
#
# def scope_to_list(scope):
# """Convert a space separated string to a list of scopes."""
# if isinstance(scope, list):
# return [unicode_type(s) for s in scope]
# elif scope is None:
# return None
# else:
# return scope.split(" ")
#
# def is_secure_transport(uri):
# """Check if the uri is over ssl."""
# if os.environ.get('OAUTHLIB_INSECURE_TRANSPORT'):
# return True
# return uri.lower().startswith('https://')
which might include code, classes, or functions. Output only the next line. | raise MismatchingStateError() |
Continue the code snippet: <|code_start|> once, the authorization server MUST deny the request and SHOULD
revoke (when possible) all tokens previously issued based on
that authorization code. The authorization code is bound to
the client identifier and redirection URI.
**state**
REQUIRED if the "state" parameter was present in the client
authorization request. The exact value received from the
client.
:param uri: The full redirect URL back to the client.
:param state: The state parameter from the authorization request.
For example, the authorization server redirects the user-agent by
sending the following HTTP response:
.. code-block:: http
HTTP/1.1 302 Found
Location: https://client.example.com/cb?code=SplxlOBeZQQYbYS6WxSbIA
&state=xyz
"""
if not is_secure_transport(uri):
raise InsecureTransportError()
query = urlparse.urlparse(uri).query
params = dict(urlparse.parse_qsl(query))
if not 'code' in params:
<|code_end|>
. Use current file imports:
import json
import time
import urlparse
import urllib.parse as urlparse
from oauthlib.common import add_params_to_uri, add_params_to_qs, unicode_type
from .errors import raise_from_error, MissingTokenError, MissingTokenTypeError
from .errors import MismatchingStateError, MissingCodeError
from .errors import InsecureTransportError
from .utils import list_to_scope, scope_to_list, is_secure_transport
and context (classes, functions, or code) from other files:
# Path: twitter-winner/oauthlib/oauth2/rfc6749/errors.py
# def raise_from_error(error, params=None):
# import inspect
# import sys
# kwargs = {
# 'description': params.get('error_description'),
# 'uri': params.get('error_uri'),
# 'state': params.get('state')
# }
# for _, cls in inspect.getmembers(sys.modules[__name__], inspect.isclass):
# if cls.error == error:
# raise cls(**kwargs)
#
# class MissingTokenError(OAuth2Error):
# error = 'missing_token'
#
# class MissingTokenTypeError(OAuth2Error):
# error = 'missing_token_type'
#
# Path: twitter-winner/oauthlib/oauth2/rfc6749/errors.py
# class MismatchingStateError(OAuth2Error):
# error = 'mismatching_state'
# description = 'CSRF Warning! State not equal in request and response.'
#
# class MissingCodeError(OAuth2Error):
# error = 'missing_code'
#
# Path: twitter-winner/oauthlib/oauth2/rfc6749/errors.py
# class InsecureTransportError(OAuth2Error):
# error = 'insecure_transport'
# description = 'OAuth 2 MUST utilize https.'
#
# Path: twitter-winner/oauthlib/oauth2/rfc6749/utils.py
# def list_to_scope(scope):
# """Convert a list of scopes to a space separated string."""
# if isinstance(scope, unicode_type) or scope is None:
# return scope
# elif isinstance(scope, list):
# return " ".join([unicode_type(s) for s in scope])
# else:
# raise ValueError("Invalid scope, must be string or list.")
#
# def scope_to_list(scope):
# """Convert a space separated string to a list of scopes."""
# if isinstance(scope, list):
# return [unicode_type(s) for s in scope]
# elif scope is None:
# return None
# else:
# return scope.split(" ")
#
# def is_secure_transport(uri):
# """Check if the uri is over ssl."""
# if os.environ.get('OAUTHLIB_INSECURE_TRANSPORT'):
# return True
# return uri.lower().startswith('https://')
. Output only the next line. | raise MissingCodeError("Missing code parameter in response.") |
Given the code snippet: <|code_start|> "code" and "token".
:param client_id: The client identifier as described in `Section 2.2`_.
:param redirect_uri: The client provided URI to redirect back to after
authorization as described in `Section 3.1.2`_.
:param scope: The scope of the access request as described by
`Section 3.3`_.
:param state: An opaque value used by the client to maintain
state between the request and callback. The authorization
server includes this value when redirecting the user-agent
back to the client. The parameter SHOULD be used for
preventing cross-site request forgery as described in
`Section 10.12`_.
:param kwargs: Extra arguments to embed in the grant/authorization URL.
An example of an authorization code grant authorization URL:
.. code-block:: http
GET /authorize?response_type=code&client_id=s6BhdRkqt3&state=xyz
&redirect_uri=https%3A%2F%2Fclient%2Eexample%2Ecom%2Fcb HTTP/1.1
Host: server.example.com
.. _`W3C.REC-html401-19991224`: http://tools.ietf.org/html/rfc6749#ref-W3C.REC-html401-19991224
.. _`Section 2.2`: http://tools.ietf.org/html/rfc6749#section-2.2
.. _`Section 3.1.2`: http://tools.ietf.org/html/rfc6749#section-3.1.2
.. _`Section 3.3`: http://tools.ietf.org/html/rfc6749#section-3.3
.. _`section 10.12`: http://tools.ietf.org/html/rfc6749#section-10.12
"""
if not is_secure_transport(uri):
<|code_end|>
, generate the next line using the imports in this file:
import json
import time
import urlparse
import urllib.parse as urlparse
from oauthlib.common import add_params_to_uri, add_params_to_qs, unicode_type
from .errors import raise_from_error, MissingTokenError, MissingTokenTypeError
from .errors import MismatchingStateError, MissingCodeError
from .errors import InsecureTransportError
from .utils import list_to_scope, scope_to_list, is_secure_transport
and context (functions, classes, or occasionally code) from other files:
# Path: twitter-winner/oauthlib/oauth2/rfc6749/errors.py
# def raise_from_error(error, params=None):
# import inspect
# import sys
# kwargs = {
# 'description': params.get('error_description'),
# 'uri': params.get('error_uri'),
# 'state': params.get('state')
# }
# for _, cls in inspect.getmembers(sys.modules[__name__], inspect.isclass):
# if cls.error == error:
# raise cls(**kwargs)
#
# class MissingTokenError(OAuth2Error):
# error = 'missing_token'
#
# class MissingTokenTypeError(OAuth2Error):
# error = 'missing_token_type'
#
# Path: twitter-winner/oauthlib/oauth2/rfc6749/errors.py
# class MismatchingStateError(OAuth2Error):
# error = 'mismatching_state'
# description = 'CSRF Warning! State not equal in request and response.'
#
# class MissingCodeError(OAuth2Error):
# error = 'missing_code'
#
# Path: twitter-winner/oauthlib/oauth2/rfc6749/errors.py
# class InsecureTransportError(OAuth2Error):
# error = 'insecure_transport'
# description = 'OAuth 2 MUST utilize https.'
#
# Path: twitter-winner/oauthlib/oauth2/rfc6749/utils.py
# def list_to_scope(scope):
# """Convert a list of scopes to a space separated string."""
# if isinstance(scope, unicode_type) or scope is None:
# return scope
# elif isinstance(scope, list):
# return " ".join([unicode_type(s) for s in scope])
# else:
# raise ValueError("Invalid scope, must be string or list.")
#
# def scope_to_list(scope):
# """Convert a space separated string to a list of scopes."""
# if isinstance(scope, list):
# return [unicode_type(s) for s in scope]
# elif scope is None:
# return None
# else:
# return scope.split(" ")
#
# def is_secure_transport(uri):
# """Check if the uri is over ssl."""
# if os.environ.get('OAUTHLIB_INSECURE_TRANSPORT'):
# return True
# return uri.lower().startswith('https://')
. Output only the next line. | raise InsecureTransportError() |
Given snippet: <|code_start|> state between the request and callback. The authorization
server includes this value when redirecting the user-agent
back to the client. The parameter SHOULD be used for
preventing cross-site request forgery as described in
`Section 10.12`_.
:param kwargs: Extra arguments to embed in the grant/authorization URL.
An example of an authorization code grant authorization URL:
.. code-block:: http
GET /authorize?response_type=code&client_id=s6BhdRkqt3&state=xyz
&redirect_uri=https%3A%2F%2Fclient%2Eexample%2Ecom%2Fcb HTTP/1.1
Host: server.example.com
.. _`W3C.REC-html401-19991224`: http://tools.ietf.org/html/rfc6749#ref-W3C.REC-html401-19991224
.. _`Section 2.2`: http://tools.ietf.org/html/rfc6749#section-2.2
.. _`Section 3.1.2`: http://tools.ietf.org/html/rfc6749#section-3.1.2
.. _`Section 3.3`: http://tools.ietf.org/html/rfc6749#section-3.3
.. _`section 10.12`: http://tools.ietf.org/html/rfc6749#section-10.12
"""
if not is_secure_transport(uri):
raise InsecureTransportError()
params = [(('response_type', response_type)),
(('client_id', client_id))]
if redirect_uri:
params.append(('redirect_uri', redirect_uri))
if scope:
<|code_end|>
, continue by predicting the next line. Consider current file imports:
import json
import time
import urlparse
import urllib.parse as urlparse
from oauthlib.common import add_params_to_uri, add_params_to_qs, unicode_type
from .errors import raise_from_error, MissingTokenError, MissingTokenTypeError
from .errors import MismatchingStateError, MissingCodeError
from .errors import InsecureTransportError
from .utils import list_to_scope, scope_to_list, is_secure_transport
and context:
# Path: twitter-winner/oauthlib/oauth2/rfc6749/errors.py
# def raise_from_error(error, params=None):
# import inspect
# import sys
# kwargs = {
# 'description': params.get('error_description'),
# 'uri': params.get('error_uri'),
# 'state': params.get('state')
# }
# for _, cls in inspect.getmembers(sys.modules[__name__], inspect.isclass):
# if cls.error == error:
# raise cls(**kwargs)
#
# class MissingTokenError(OAuth2Error):
# error = 'missing_token'
#
# class MissingTokenTypeError(OAuth2Error):
# error = 'missing_token_type'
#
# Path: twitter-winner/oauthlib/oauth2/rfc6749/errors.py
# class MismatchingStateError(OAuth2Error):
# error = 'mismatching_state'
# description = 'CSRF Warning! State not equal in request and response.'
#
# class MissingCodeError(OAuth2Error):
# error = 'missing_code'
#
# Path: twitter-winner/oauthlib/oauth2/rfc6749/errors.py
# class InsecureTransportError(OAuth2Error):
# error = 'insecure_transport'
# description = 'OAuth 2 MUST utilize https.'
#
# Path: twitter-winner/oauthlib/oauth2/rfc6749/utils.py
# def list_to_scope(scope):
# """Convert a list of scopes to a space separated string."""
# if isinstance(scope, unicode_type) or scope is None:
# return scope
# elif isinstance(scope, list):
# return " ".join([unicode_type(s) for s in scope])
# else:
# raise ValueError("Invalid scope, must be string or list.")
#
# def scope_to_list(scope):
# """Convert a space separated string to a list of scopes."""
# if isinstance(scope, list):
# return [unicode_type(s) for s in scope]
# elif scope is None:
# return None
# else:
# return scope.split(" ")
#
# def is_secure_transport(uri):
# """Check if the uri is over ssl."""
# if os.environ.get('OAUTHLIB_INSECURE_TRANSPORT'):
# return True
# return uri.lower().startswith('https://')
which might include code, classes, or functions. Output only the next line. | params.append(('scope', list_to_scope(scope))) |
Continue the code snippet: <|code_start|> expire in one hour from the time the response was generated.
If omitted, the authorization server SHOULD provide the
expiration time via other means or document the default value.
**scope**
OPTIONAL, if identical to the scope requested by the client,
otherwise REQUIRED. The scope of the access token as described
by Section 3.3.
**state**
REQUIRED if the "state" parameter was present in the client
authorization request. The exact value received from the
client.
Similar to the authorization code response, but with a full token provided
in the URL fragment:
.. code-block:: http
HTTP/1.1 302 Found
Location: http://example.com/cb#access_token=2YotnFZFEjr1zCsicMWpAA
&state=xyz&token_type=example&expires_in=3600
"""
if not is_secure_transport(uri):
raise InsecureTransportError()
fragment = urlparse.urlparse(uri).fragment
params = dict(urlparse.parse_qsl(fragment, keep_blank_values=True))
if 'scope' in params:
<|code_end|>
. Use current file imports:
import json
import time
import urlparse
import urllib.parse as urlparse
from oauthlib.common import add_params_to_uri, add_params_to_qs, unicode_type
from .errors import raise_from_error, MissingTokenError, MissingTokenTypeError
from .errors import MismatchingStateError, MissingCodeError
from .errors import InsecureTransportError
from .utils import list_to_scope, scope_to_list, is_secure_transport
and context (classes, functions, or code) from other files:
# Path: twitter-winner/oauthlib/oauth2/rfc6749/errors.py
# def raise_from_error(error, params=None):
# import inspect
# import sys
# kwargs = {
# 'description': params.get('error_description'),
# 'uri': params.get('error_uri'),
# 'state': params.get('state')
# }
# for _, cls in inspect.getmembers(sys.modules[__name__], inspect.isclass):
# if cls.error == error:
# raise cls(**kwargs)
#
# class MissingTokenError(OAuth2Error):
# error = 'missing_token'
#
# class MissingTokenTypeError(OAuth2Error):
# error = 'missing_token_type'
#
# Path: twitter-winner/oauthlib/oauth2/rfc6749/errors.py
# class MismatchingStateError(OAuth2Error):
# error = 'mismatching_state'
# description = 'CSRF Warning! State not equal in request and response.'
#
# class MissingCodeError(OAuth2Error):
# error = 'missing_code'
#
# Path: twitter-winner/oauthlib/oauth2/rfc6749/errors.py
# class InsecureTransportError(OAuth2Error):
# error = 'insecure_transport'
# description = 'OAuth 2 MUST utilize https.'
#
# Path: twitter-winner/oauthlib/oauth2/rfc6749/utils.py
# def list_to_scope(scope):
# """Convert a list of scopes to a space separated string."""
# if isinstance(scope, unicode_type) or scope is None:
# return scope
# elif isinstance(scope, list):
# return " ".join([unicode_type(s) for s in scope])
# else:
# raise ValueError("Invalid scope, must be string or list.")
#
# def scope_to_list(scope):
# """Convert a space separated string to a list of scopes."""
# if isinstance(scope, list):
# return [unicode_type(s) for s in scope]
# elif scope is None:
# return None
# else:
# return scope.split(" ")
#
# def is_secure_transport(uri):
# """Check if the uri is over ssl."""
# if os.environ.get('OAUTHLIB_INSECURE_TRANSPORT'):
# return True
# return uri.lower().startswith('https://')
. Output only the next line. | params['scope'] = scope_to_list(params['scope']) |
Given the code snippet: <|code_start|> :param response_type: To indicate which OAuth 2 grant/flow is required,
"code" and "token".
:param client_id: The client identifier as described in `Section 2.2`_.
:param redirect_uri: The client provided URI to redirect back to after
authorization as described in `Section 3.1.2`_.
:param scope: The scope of the access request as described by
`Section 3.3`_.
:param state: An opaque value used by the client to maintain
state between the request and callback. The authorization
server includes this value when redirecting the user-agent
back to the client. The parameter SHOULD be used for
preventing cross-site request forgery as described in
`Section 10.12`_.
:param kwargs: Extra arguments to embed in the grant/authorization URL.
An example of an authorization code grant authorization URL:
.. code-block:: http
GET /authorize?response_type=code&client_id=s6BhdRkqt3&state=xyz
&redirect_uri=https%3A%2F%2Fclient%2Eexample%2Ecom%2Fcb HTTP/1.1
Host: server.example.com
.. _`W3C.REC-html401-19991224`: http://tools.ietf.org/html/rfc6749#ref-W3C.REC-html401-19991224
.. _`Section 2.2`: http://tools.ietf.org/html/rfc6749#section-2.2
.. _`Section 3.1.2`: http://tools.ietf.org/html/rfc6749#section-3.1.2
.. _`Section 3.3`: http://tools.ietf.org/html/rfc6749#section-3.3
.. _`section 10.12`: http://tools.ietf.org/html/rfc6749#section-10.12
"""
<|code_end|>
, generate the next line using the imports in this file:
import json
import time
import urlparse
import urllib.parse as urlparse
from oauthlib.common import add_params_to_uri, add_params_to_qs, unicode_type
from .errors import raise_from_error, MissingTokenError, MissingTokenTypeError
from .errors import MismatchingStateError, MissingCodeError
from .errors import InsecureTransportError
from .utils import list_to_scope, scope_to_list, is_secure_transport
and context (functions, classes, or occasionally code) from other files:
# Path: twitter-winner/oauthlib/oauth2/rfc6749/errors.py
# def raise_from_error(error, params=None):
# import inspect
# import sys
# kwargs = {
# 'description': params.get('error_description'),
# 'uri': params.get('error_uri'),
# 'state': params.get('state')
# }
# for _, cls in inspect.getmembers(sys.modules[__name__], inspect.isclass):
# if cls.error == error:
# raise cls(**kwargs)
#
# class MissingTokenError(OAuth2Error):
# error = 'missing_token'
#
# class MissingTokenTypeError(OAuth2Error):
# error = 'missing_token_type'
#
# Path: twitter-winner/oauthlib/oauth2/rfc6749/errors.py
# class MismatchingStateError(OAuth2Error):
# error = 'mismatching_state'
# description = 'CSRF Warning! State not equal in request and response.'
#
# class MissingCodeError(OAuth2Error):
# error = 'missing_code'
#
# Path: twitter-winner/oauthlib/oauth2/rfc6749/errors.py
# class InsecureTransportError(OAuth2Error):
# error = 'insecure_transport'
# description = 'OAuth 2 MUST utilize https.'
#
# Path: twitter-winner/oauthlib/oauth2/rfc6749/utils.py
# def list_to_scope(scope):
# """Convert a list of scopes to a space separated string."""
# if isinstance(scope, unicode_type) or scope is None:
# return scope
# elif isinstance(scope, list):
# return " ".join([unicode_type(s) for s in scope])
# else:
# raise ValueError("Invalid scope, must be string or list.")
#
# def scope_to_list(scope):
# """Convert a space separated string to a list of scopes."""
# if isinstance(scope, list):
# return [unicode_type(s) for s in scope]
# elif scope is None:
# return None
# else:
# return scope.split(" ")
#
# def is_secure_transport(uri):
# """Check if the uri is over ssl."""
# if os.environ.get('OAUTHLIB_INSECURE_TRANSPORT'):
# return True
# return uri.lower().startswith('https://')
. Output only the next line. | if not is_secure_transport(uri): |
Continue the code snippet: <|code_start|>
class BaseEndpoint(object):
def __init__(self):
self._available = True
self._catch_errors = False
@property
def available(self):
return self._available
@available.setter
def available(self, available):
self._available = available
@property
def catch_errors(self):
return self._catch_errors
@catch_errors.setter
def catch_errors(self, catch_errors):
self._catch_errors = catch_errors
def catch_errors_and_unavailability(f):
@functools.wraps(f)
def wrapper(endpoint, uri, *args, **kwargs):
if not endpoint.available:
<|code_end|>
. Use current file imports:
import functools
from oauthlib.common import log
from ..errors import TemporarilyUnavailableError, ServerError
from ..errors import FatalClientError, OAuth2Error
and context (classes, functions, or code) from other files:
# Path: twitter-winner/oauthlib/oauth2/rfc6749/errors.py
# class TemporarilyUnavailableError(OAuth2Error):
# """The authorization server is currently unable to handle the request
# due to a temporary overloading or maintenance of the server.
# (This error code is needed because a 503 Service Unavailable HTTP
# status code cannot be returned to the client via a HTTP redirect.)
# """
# error = 'temporarily_unavailable'
#
# class ServerError(OAuth2Error):
# """The authorization server encountered an unexpected condition that
# prevented it from fulfilling the request. (This error code is needed
# because a 500 Internal Server Error HTTP status code cannot be returned
# to the client via a HTTP redirect.)
# """
# error = 'server_error'
#
# Path: twitter-winner/oauthlib/oauth2/rfc6749/errors.py
# class FatalClientError(OAuth2Error):
# """Errors during authorization where user should not be redirected back.
#
# If the request fails due to a missing, invalid, or mismatching
# redirection URI, or if the client identifier is missing or invalid,
# the authorization server SHOULD inform the resource owner of the
# error and MUST NOT automatically redirect the user-agent to the
# invalid redirection URI.
#
# Instead the user should be informed of the error by the provider itself.
# """
# pass
#
# class OAuth2Error(Exception):
# error = None
# status_code = 400
#
# def __init__(self, description=None, uri=None, state=None, status_code=None,
# request=None):
# """
# description: A human-readable ASCII [USASCII] text providing
# additional information, used to assist the client
# developer in understanding the error that occurred.
# Values for the "error_description" parameter MUST NOT
# include characters outside the set
# x20-21 / x23-5B / x5D-7E.
#
# uri: A URI identifying a human-readable web page with information
# about the error, used to provide the client developer with
# additional information about the error. Values for the
# "error_uri" parameter MUST conform to the URI- Reference
# syntax, and thus MUST NOT include characters outside the set
# x21 / x23-5B / x5D-7E.
#
# state: A CSRF protection value received from the client.
#
# request: Oauthlib Request object
# """
# self.description = description
# self.uri = uri
# self.state = state
#
# if status_code:
# self.status_code = status_code
#
# if request:
# self.redirect_uri = request.redirect_uri
# self.client_id = request.client_id
# self.scopes = request.scopes
# self.response_type = request.response_type
# self.grant_type = request.grant_type
#
# def in_uri(self, uri):
# return add_params_to_uri(uri, self.twotuples)
#
# @property
# def twotuples(self):
# error = [('error', self.error)]
# if self.description:
# error.append(('error_description', self.description))
# if self.uri:
# error.append(('error_uri', self.uri))
# if self.state:
# error.append(('state', self.state))
# return error
#
# @property
# def urlencoded(self):
# return urlencode(self.twotuples)
#
# @property
# def json(self):
# return json.dumps(dict(self.twotuples))
. Output only the next line. | e = TemporarilyUnavailableError() |
Given snippet: <|code_start|>
@available.setter
def available(self, available):
self._available = available
@property
def catch_errors(self):
return self._catch_errors
@catch_errors.setter
def catch_errors(self, catch_errors):
self._catch_errors = catch_errors
def catch_errors_and_unavailability(f):
@functools.wraps(f)
def wrapper(endpoint, uri, *args, **kwargs):
if not endpoint.available:
e = TemporarilyUnavailableError()
log.info('Endpoint unavailable, ignoring request %s.' % uri)
return {}, e.json, 503
if endpoint.catch_errors:
try:
return f(endpoint, uri, *args, **kwargs)
except OAuth2Error:
raise
except FatalClientError:
raise
except Exception as e:
<|code_end|>
, continue by predicting the next line. Consider current file imports:
import functools
from oauthlib.common import log
from ..errors import TemporarilyUnavailableError, ServerError
from ..errors import FatalClientError, OAuth2Error
and context:
# Path: twitter-winner/oauthlib/oauth2/rfc6749/errors.py
# class TemporarilyUnavailableError(OAuth2Error):
# """The authorization server is currently unable to handle the request
# due to a temporary overloading or maintenance of the server.
# (This error code is needed because a 503 Service Unavailable HTTP
# status code cannot be returned to the client via a HTTP redirect.)
# """
# error = 'temporarily_unavailable'
#
# class ServerError(OAuth2Error):
# """The authorization server encountered an unexpected condition that
# prevented it from fulfilling the request. (This error code is needed
# because a 500 Internal Server Error HTTP status code cannot be returned
# to the client via a HTTP redirect.)
# """
# error = 'server_error'
#
# Path: twitter-winner/oauthlib/oauth2/rfc6749/errors.py
# class FatalClientError(OAuth2Error):
# """Errors during authorization where user should not be redirected back.
#
# If the request fails due to a missing, invalid, or mismatching
# redirection URI, or if the client identifier is missing or invalid,
# the authorization server SHOULD inform the resource owner of the
# error and MUST NOT automatically redirect the user-agent to the
# invalid redirection URI.
#
# Instead the user should be informed of the error by the provider itself.
# """
# pass
#
# class OAuth2Error(Exception):
# error = None
# status_code = 400
#
# def __init__(self, description=None, uri=None, state=None, status_code=None,
# request=None):
# """
# description: A human-readable ASCII [USASCII] text providing
# additional information, used to assist the client
# developer in understanding the error that occurred.
# Values for the "error_description" parameter MUST NOT
# include characters outside the set
# x20-21 / x23-5B / x5D-7E.
#
# uri: A URI identifying a human-readable web page with information
# about the error, used to provide the client developer with
# additional information about the error. Values for the
# "error_uri" parameter MUST conform to the URI- Reference
# syntax, and thus MUST NOT include characters outside the set
# x21 / x23-5B / x5D-7E.
#
# state: A CSRF protection value received from the client.
#
# request: Oauthlib Request object
# """
# self.description = description
# self.uri = uri
# self.state = state
#
# if status_code:
# self.status_code = status_code
#
# if request:
# self.redirect_uri = request.redirect_uri
# self.client_id = request.client_id
# self.scopes = request.scopes
# self.response_type = request.response_type
# self.grant_type = request.grant_type
#
# def in_uri(self, uri):
# return add_params_to_uri(uri, self.twotuples)
#
# @property
# def twotuples(self):
# error = [('error', self.error)]
# if self.description:
# error.append(('error_description', self.description))
# if self.uri:
# error.append(('error_uri', self.uri))
# if self.state:
# error.append(('state', self.state))
# return error
#
# @property
# def urlencoded(self):
# return urlencode(self.twotuples)
#
# @property
# def json(self):
# return json.dumps(dict(self.twotuples))
which might include code, classes, or functions. Output only the next line. | error = ServerError() |
Given snippet: <|code_start|> @property
def available(self):
return self._available
@available.setter
def available(self, available):
self._available = available
@property
def catch_errors(self):
return self._catch_errors
@catch_errors.setter
def catch_errors(self, catch_errors):
self._catch_errors = catch_errors
def catch_errors_and_unavailability(f):
@functools.wraps(f)
def wrapper(endpoint, uri, *args, **kwargs):
if not endpoint.available:
e = TemporarilyUnavailableError()
log.info('Endpoint unavailable, ignoring request %s.' % uri)
return {}, e.json, 503
if endpoint.catch_errors:
try:
return f(endpoint, uri, *args, **kwargs)
except OAuth2Error:
raise
<|code_end|>
, continue by predicting the next line. Consider current file imports:
import functools
from oauthlib.common import log
from ..errors import TemporarilyUnavailableError, ServerError
from ..errors import FatalClientError, OAuth2Error
and context:
# Path: twitter-winner/oauthlib/oauth2/rfc6749/errors.py
# class TemporarilyUnavailableError(OAuth2Error):
# """The authorization server is currently unable to handle the request
# due to a temporary overloading or maintenance of the server.
# (This error code is needed because a 503 Service Unavailable HTTP
# status code cannot be returned to the client via a HTTP redirect.)
# """
# error = 'temporarily_unavailable'
#
# class ServerError(OAuth2Error):
# """The authorization server encountered an unexpected condition that
# prevented it from fulfilling the request. (This error code is needed
# because a 500 Internal Server Error HTTP status code cannot be returned
# to the client via a HTTP redirect.)
# """
# error = 'server_error'
#
# Path: twitter-winner/oauthlib/oauth2/rfc6749/errors.py
# class FatalClientError(OAuth2Error):
# """Errors during authorization where user should not be redirected back.
#
# If the request fails due to a missing, invalid, or mismatching
# redirection URI, or if the client identifier is missing or invalid,
# the authorization server SHOULD inform the resource owner of the
# error and MUST NOT automatically redirect the user-agent to the
# invalid redirection URI.
#
# Instead the user should be informed of the error by the provider itself.
# """
# pass
#
# class OAuth2Error(Exception):
# error = None
# status_code = 400
#
# def __init__(self, description=None, uri=None, state=None, status_code=None,
# request=None):
# """
# description: A human-readable ASCII [USASCII] text providing
# additional information, used to assist the client
# developer in understanding the error that occurred.
# Values for the "error_description" parameter MUST NOT
# include characters outside the set
# x20-21 / x23-5B / x5D-7E.
#
# uri: A URI identifying a human-readable web page with information
# about the error, used to provide the client developer with
# additional information about the error. Values for the
# "error_uri" parameter MUST conform to the URI- Reference
# syntax, and thus MUST NOT include characters outside the set
# x21 / x23-5B / x5D-7E.
#
# state: A CSRF protection value received from the client.
#
# request: Oauthlib Request object
# """
# self.description = description
# self.uri = uri
# self.state = state
#
# if status_code:
# self.status_code = status_code
#
# if request:
# self.redirect_uri = request.redirect_uri
# self.client_id = request.client_id
# self.scopes = request.scopes
# self.response_type = request.response_type
# self.grant_type = request.grant_type
#
# def in_uri(self, uri):
# return add_params_to_uri(uri, self.twotuples)
#
# @property
# def twotuples(self):
# error = [('error', self.error)]
# if self.description:
# error.append(('error_description', self.description))
# if self.uri:
# error.append(('error_uri', self.uri))
# if self.state:
# error.append(('state', self.state))
# return error
#
# @property
# def urlencoded(self):
# return urlencode(self.twotuples)
#
# @property
# def json(self):
# return json.dumps(dict(self.twotuples))
which might include code, classes, or functions. Output only the next line. | except FatalClientError: |
Next line prediction: <|code_start|> self._catch_errors = False
@property
def available(self):
return self._available
@available.setter
def available(self, available):
self._available = available
@property
def catch_errors(self):
return self._catch_errors
@catch_errors.setter
def catch_errors(self, catch_errors):
self._catch_errors = catch_errors
def catch_errors_and_unavailability(f):
@functools.wraps(f)
def wrapper(endpoint, uri, *args, **kwargs):
if not endpoint.available:
e = TemporarilyUnavailableError()
log.info('Endpoint unavailable, ignoring request %s.' % uri)
return {}, e.json, 503
if endpoint.catch_errors:
try:
return f(endpoint, uri, *args, **kwargs)
<|code_end|>
. Use current file imports:
(import functools
from oauthlib.common import log
from ..errors import TemporarilyUnavailableError, ServerError
from ..errors import FatalClientError, OAuth2Error)
and context including class names, function names, or small code snippets from other files:
# Path: twitter-winner/oauthlib/oauth2/rfc6749/errors.py
# class TemporarilyUnavailableError(OAuth2Error):
# """The authorization server is currently unable to handle the request
# due to a temporary overloading or maintenance of the server.
# (This error code is needed because a 503 Service Unavailable HTTP
# status code cannot be returned to the client via a HTTP redirect.)
# """
# error = 'temporarily_unavailable'
#
# class ServerError(OAuth2Error):
# """The authorization server encountered an unexpected condition that
# prevented it from fulfilling the request. (This error code is needed
# because a 500 Internal Server Error HTTP status code cannot be returned
# to the client via a HTTP redirect.)
# """
# error = 'server_error'
#
# Path: twitter-winner/oauthlib/oauth2/rfc6749/errors.py
# class FatalClientError(OAuth2Error):
# """Errors during authorization where user should not be redirected back.
#
# If the request fails due to a missing, invalid, or mismatching
# redirection URI, or if the client identifier is missing or invalid,
# the authorization server SHOULD inform the resource owner of the
# error and MUST NOT automatically redirect the user-agent to the
# invalid redirection URI.
#
# Instead the user should be informed of the error by the provider itself.
# """
# pass
#
# class OAuth2Error(Exception):
# error = None
# status_code = 400
#
# def __init__(self, description=None, uri=None, state=None, status_code=None,
# request=None):
# """
# description: A human-readable ASCII [USASCII] text providing
# additional information, used to assist the client
# developer in understanding the error that occurred.
# Values for the "error_description" parameter MUST NOT
# include characters outside the set
# x20-21 / x23-5B / x5D-7E.
#
# uri: A URI identifying a human-readable web page with information
# about the error, used to provide the client developer with
# additional information about the error. Values for the
# "error_uri" parameter MUST conform to the URI- Reference
# syntax, and thus MUST NOT include characters outside the set
# x21 / x23-5B / x5D-7E.
#
# state: A CSRF protection value received from the client.
#
# request: Oauthlib Request object
# """
# self.description = description
# self.uri = uri
# self.state = state
#
# if status_code:
# self.status_code = status_code
#
# if request:
# self.redirect_uri = request.redirect_uri
# self.client_id = request.client_id
# self.scopes = request.scopes
# self.response_type = request.response_type
# self.grant_type = request.grant_type
#
# def in_uri(self, uri):
# return add_params_to_uri(uri, self.twotuples)
#
# @property
# def twotuples(self):
# error = [('error', self.error)]
# if self.description:
# error.append(('error_description', self.description))
# if self.uri:
# error.append(('error_uri', self.uri))
# if self.state:
# error.append(('state', self.state))
# return error
#
# @property
# def urlencoded(self):
# return urlencode(self.twotuples)
#
# @property
# def json(self):
# return json.dumps(dict(self.twotuples))
. Output only the next line. | except OAuth2Error: |
Next line prediction: <|code_start|># -*- coding: utf-8 -*-
"""
oauthlib.oauth2.rfc6749
~~~~~~~~~~~~~~~~~~~~~~~
This module is an implementation of various logic needed
for consuming and providing OAuth 2.0 RFC6749.
"""
from __future__ import absolute_import, unicode_literals
<|code_end|>
. Use current file imports:
(from oauthlib.common import Request, log
from .base import BaseEndpoint, catch_errors_and_unavailability)
and context including class names, function names, or small code snippets from other files:
# Path: twitter-winner/oauthlib/oauth2/rfc6749/endpoints/base.py
# class BaseEndpoint(object):
# def __init__(self):
# self._available = True
# self._catch_errors = False
#
# @property
# def available(self):
# return self._available
#
# @available.setter
# def available(self, available):
# self._available = available
#
# @property
# def catch_errors(self):
# return self._catch_errors
#
# @catch_errors.setter
# def catch_errors(self, catch_errors):
# self._catch_errors = catch_errors
#
# def catch_errors_and_unavailability(f):
# @functools.wraps(f)
# def wrapper(endpoint, uri, *args, **kwargs):
# if not endpoint.available:
# e = TemporarilyUnavailableError()
# log.info('Endpoint unavailable, ignoring request %s.' % uri)
# return {}, e.json, 503
#
# if endpoint.catch_errors:
# try:
# return f(endpoint, uri, *args, **kwargs)
# except OAuth2Error:
# raise
# except FatalClientError:
# raise
# except Exception as e:
# error = ServerError()
# log.warning('Exception caught while processing request, %s.' % e)
# return {}, error.json, 500
# else:
# return f(endpoint, uri, *args, **kwargs)
# return wrapper
. Output only the next line. | class TokenEndpoint(BaseEndpoint): |
Using the snippet: <|code_start|> unrecognized request parameters. Request and response parameters
MUST NOT be included more than once::
# Delegated to each grant type.
.. _`Appendix B`: http://tools.ietf.org/html/rfc6749#appendix-B
"""
def __init__(self, default_grant_type, default_token_type, grant_types):
BaseEndpoint.__init__(self)
self._grant_types = grant_types
self._default_token_type = default_token_type
self._default_grant_type = default_grant_type
@property
def grant_types(self):
return self._grant_types
@property
def default_grant_type(self):
return self._default_grant_type
@property
def default_grant_type_handler(self):
return self.grant_types.get(self.default_grant_type)
@property
def default_token_type(self):
return self._default_token_type
<|code_end|>
, determine the next line of code. You have imports:
from oauthlib.common import Request, log
from .base import BaseEndpoint, catch_errors_and_unavailability
and context (class names, function names, or code) available:
# Path: twitter-winner/oauthlib/oauth2/rfc6749/endpoints/base.py
# class BaseEndpoint(object):
# def __init__(self):
# self._available = True
# self._catch_errors = False
#
# @property
# def available(self):
# return self._available
#
# @available.setter
# def available(self, available):
# self._available = available
#
# @property
# def catch_errors(self):
# return self._catch_errors
#
# @catch_errors.setter
# def catch_errors(self, catch_errors):
# self._catch_errors = catch_errors
#
# def catch_errors_and_unavailability(f):
# @functools.wraps(f)
# def wrapper(endpoint, uri, *args, **kwargs):
# if not endpoint.available:
# e = TemporarilyUnavailableError()
# log.info('Endpoint unavailable, ignoring request %s.' % uri)
# return {}, e.json, 503
#
# if endpoint.catch_errors:
# try:
# return f(endpoint, uri, *args, **kwargs)
# except OAuth2Error:
# raise
# except FatalClientError:
# raise
# except Exception as e:
# error = ServerError()
# log.warning('Exception caught while processing request, %s.' % e)
# return {}, error.json, 500
# else:
# return f(endpoint, uri, *args, **kwargs)
# return wrapper
. Output only the next line. | @catch_errors_and_unavailability |
Using the snippet: <|code_start|># -*- coding: utf-8 -*-
"""
oauthlib.oauth2.rfc6749
~~~~~~~~~~~~~~~~~~~~~~~
This module is an implementation of various logic needed
for consuming and providing OAuth 2.0 RFC6749.
"""
from __future__ import absolute_import, unicode_literals
<|code_end|>
, determine the next line of code. You have imports:
from oauthlib.common import Request, log
from .base import BaseEndpoint, catch_errors_and_unavailability
and context (class names, function names, or code) available:
# Path: twitter-winner/oauthlib/oauth2/rfc6749/endpoints/base.py
# class BaseEndpoint(object):
# def __init__(self):
# self._available = True
# self._catch_errors = False
#
# @property
# def available(self):
# return self._available
#
# @available.setter
# def available(self, available):
# self._available = available
#
# @property
# def catch_errors(self):
# return self._catch_errors
#
# @catch_errors.setter
# def catch_errors(self, catch_errors):
# self._catch_errors = catch_errors
#
# def catch_errors_and_unavailability(f):
# @functools.wraps(f)
# def wrapper(endpoint, uri, *args, **kwargs):
# if not endpoint.available:
# e = TemporarilyUnavailableError()
# log.info('Endpoint unavailable, ignoring request %s.' % uri)
# return {}, e.json, 503
#
# if endpoint.catch_errors:
# try:
# return f(endpoint, uri, *args, **kwargs)
# except OAuth2Error:
# raise
# except FatalClientError:
# raise
# except Exception as e:
# error = ServerError()
# log.warning('Exception caught while processing request, %s.' % e)
# return {}, error.json, 500
# else:
# return f(endpoint, uri, *args, **kwargs)
# return wrapper
. Output only the next line. | class ResourceEndpoint(BaseEndpoint): |
Continue the code snippet: <|code_start|> # For most cases, returning a 403 should suffice.
The method in which the client utilizes the access token to
authenticate with the resource server depends on the type of access
token issued by the authorization server. Typically, it involves
using the HTTP "Authorization" request header field [RFC2617] with an
authentication scheme defined by the specification of the access
token type used, such as [RFC6750]::
# Access tokens may also be provided in query and body
https://example.com/protected?access_token=kjfch2345sdf # Query
access_token=sdf23409df # Body
"""
def __init__(self, default_token, token_types):
BaseEndpoint.__init__(self)
self._tokens = token_types
self._default_token = default_token
@property
def default_token(self):
return self._default_token
@property
def default_token_type_handler(self):
return self.tokens.get(self.default_token)
@property
def tokens(self):
return self._tokens
<|code_end|>
. Use current file imports:
from oauthlib.common import Request, log
from .base import BaseEndpoint, catch_errors_and_unavailability
and context (classes, functions, or code) from other files:
# Path: twitter-winner/oauthlib/oauth2/rfc6749/endpoints/base.py
# class BaseEndpoint(object):
# def __init__(self):
# self._available = True
# self._catch_errors = False
#
# @property
# def available(self):
# return self._available
#
# @available.setter
# def available(self, available):
# self._available = available
#
# @property
# def catch_errors(self):
# return self._catch_errors
#
# @catch_errors.setter
# def catch_errors(self, catch_errors):
# self._catch_errors = catch_errors
#
# def catch_errors_and_unavailability(f):
# @functools.wraps(f)
# def wrapper(endpoint, uri, *args, **kwargs):
# if not endpoint.available:
# e = TemporarilyUnavailableError()
# log.info('Endpoint unavailable, ignoring request %s.' % uri)
# return {}, e.json, 503
#
# if endpoint.catch_errors:
# try:
# return f(endpoint, uri, *args, **kwargs)
# except OAuth2Error:
# raise
# except FatalClientError:
# raise
# except Exception as e:
# error = ServerError()
# log.warning('Exception caught while processing request, %s.' % e)
# return {}, error.json, 500
# else:
# return f(endpoint, uri, *args, **kwargs)
# return wrapper
. Output only the next line. | @catch_errors_and_unavailability |
Based on the snippet: <|code_start|> '/\*[^*]*WHITELIST = (\{.*?\})\s*\*/', rawrules, flags=re.DOTALL)
return set(json.loads(m.group(1)) if m else [])
def get_rules(self):
rawrules = self._recursive_fetch(self.rules_url)
try:
if type(rawrules) is unicode:
return rawrules.encode('ascii', 'ignore')
except NameError:
pass # py3
return rawrules
def get_whitelist(self):
if not self.whitelist_url:
return set()
data = self._httpget(self.whitelist_url)
hashes = re.findall('[a-f0-9]{40}', data) # assume sha1 hex hash
return set(hashes)
def transform_rules(self, rawrules):
"""For specific rules providers, to mangle into mwscan compatible form"""
whitelist = set()
return rawrules, whitelist
def _get_cache_filename(self, url):
hash = hashlib.sha1(url.encode()).hexdigest()
cachefile = self.__class__.__name__.lower() + '.cache_' + hash
<|code_end|>
, predict the immediate next line with the help of imports:
import os
import re
import json
import logging
import hashlib
import requests
import yara
import time
from requests.exceptions import RequestException
from mwscan import settings
and context (classes, functions, sometimes code) from other files:
# Path: mwscan/settings.py
# CACHEDIR = os.path.expanduser('~/.cache/mwscan')
# LAST_RUN_FILE = os.path.join(CACHEDIR, 'last_run')
# DEFAULT_EXCLUDEFILE = os.path.expanduser('~/.config/mwscan/excludes')
# CODE_EXT = ('php', 'phtml', 'js', 'jsx', 'html', 'php3', 'php4', 'php5', 'php7', 'sh', 'ini')
. Output only the next line. | cachefile = os.path.join(settings.CACHEDIR, cachefile) |
Given snippet: <|code_start|>
try:
except ImportError:
class TestWebMalwareScanner(TestCase):
def _load_file_rules(self, path):
args = namedtuple('Args', 'rules')(rules=path)
return Files(args=args).get()
def setUp(self):
settings.CACHEDIR = '/tmp'
settings.LAST_RUN_FILE = '/tmp/last_run'
self.fixture_path = os.path.join(os.path.dirname(os.path.realpath(__file__)), 'fixtures')
self.rules_path = os.path.join(self.fixture_path, 'rules.yar')
self.target_path = os.path.join(self.fixture_path, 'files')
<|code_end|>
, continue by predicting the next line. Consider current file imports:
import os
import time
import mock
from unittest import TestCase
from mwscan import scan, settings
from mwscan.ruleset import Files
from collections import namedtuple
from unittest import mock
and context:
# Path: mwscan/scan.py
# def parse_args():
# def scanpath_to_runfile(path):
# def load_last_run_results(scanpath):
# def write_last_run_results(scanpath, malware):
# def find_targets(root_path, required_extensions=None,
# exclude_patterns=None, follow_symlinks=False):
# def scan_files(files, rules, whitelist, find_cb=None):
# def log_find(path, matches, sha1sum, excluded=None, show_sha1=False):
# def load_exclude_patterns(path):
# def main():
#
# Path: mwscan/settings.py
# CACHEDIR = os.path.expanduser('~/.cache/mwscan')
# LAST_RUN_FILE = os.path.join(CACHEDIR, 'last_run')
# DEFAULT_EXCLUDEFILE = os.path.expanduser('~/.config/mwscan/excludes')
# CODE_EXT = ('php', 'phtml', 'js', 'jsx', 'html', 'php3', 'php4', 'php5', 'php7', 'sh', 'ini')
#
# Path: mwscan/ruleset.py
# class Files(RulesProvider):
#
# # initialize with Files(args)
#
# def get_rules(self):
# path = self._args.rules
# logging.info("Loading {0}".format(self._args.rules))
# with open(path, 'rb') as fh:
# return fh.read().decode('utf-8', 'ignore')
which might include code, classes, or functions. Output only the next line. | self.state_file = scan.scanpath_to_runfile(self.target_path) |
Given the following code snippet before the placeholder: <|code_start|>
try:
except ImportError:
class TestWebMalwareScanner(TestCase):
def _load_file_rules(self, path):
args = namedtuple('Args', 'rules')(rules=path)
return Files(args=args).get()
def setUp(self):
<|code_end|>
, predict the next line using imports from the current file:
import os
import time
import mock
from unittest import TestCase
from mwscan import scan, settings
from mwscan.ruleset import Files
from collections import namedtuple
from unittest import mock
and context including class names, function names, and sometimes code from other files:
# Path: mwscan/scan.py
# def parse_args():
# def scanpath_to_runfile(path):
# def load_last_run_results(scanpath):
# def write_last_run_results(scanpath, malware):
# def find_targets(root_path, required_extensions=None,
# exclude_patterns=None, follow_symlinks=False):
# def scan_files(files, rules, whitelist, find_cb=None):
# def log_find(path, matches, sha1sum, excluded=None, show_sha1=False):
# def load_exclude_patterns(path):
# def main():
#
# Path: mwscan/settings.py
# CACHEDIR = os.path.expanduser('~/.cache/mwscan')
# LAST_RUN_FILE = os.path.join(CACHEDIR, 'last_run')
# DEFAULT_EXCLUDEFILE = os.path.expanduser('~/.config/mwscan/excludes')
# CODE_EXT = ('php', 'phtml', 'js', 'jsx', 'html', 'php3', 'php4', 'php5', 'php7', 'sh', 'ini')
#
# Path: mwscan/ruleset.py
# class Files(RulesProvider):
#
# # initialize with Files(args)
#
# def get_rules(self):
# path = self._args.rules
# logging.info("Loading {0}".format(self._args.rules))
# with open(path, 'rb') as fh:
# return fh.read().decode('utf-8', 'ignore')
. Output only the next line. | settings.CACHEDIR = '/tmp' |
Predict the next line for this snippet: <|code_start|>
try:
except ImportError:
class TestWebMalwareScanner(TestCase):
def _load_file_rules(self, path):
args = namedtuple('Args', 'rules')(rules=path)
<|code_end|>
with the help of current file imports:
import os
import time
import mock
from unittest import TestCase
from mwscan import scan, settings
from mwscan.ruleset import Files
from collections import namedtuple
from unittest import mock
and context from other files:
# Path: mwscan/scan.py
# def parse_args():
# def scanpath_to_runfile(path):
# def load_last_run_results(scanpath):
# def write_last_run_results(scanpath, malware):
# def find_targets(root_path, required_extensions=None,
# exclude_patterns=None, follow_symlinks=False):
# def scan_files(files, rules, whitelist, find_cb=None):
# def log_find(path, matches, sha1sum, excluded=None, show_sha1=False):
# def load_exclude_patterns(path):
# def main():
#
# Path: mwscan/settings.py
# CACHEDIR = os.path.expanduser('~/.cache/mwscan')
# LAST_RUN_FILE = os.path.join(CACHEDIR, 'last_run')
# DEFAULT_EXCLUDEFILE = os.path.expanduser('~/.config/mwscan/excludes')
# CODE_EXT = ('php', 'phtml', 'js', 'jsx', 'html', 'php3', 'php4', 'php5', 'php7', 'sh', 'ini')
#
# Path: mwscan/ruleset.py
# class Files(RulesProvider):
#
# # initialize with Files(args)
#
# def get_rules(self):
# path = self._args.rules
# logging.info("Loading {0}".format(self._args.rules))
# with open(path, 'rb') as fh:
# return fh.read().decode('utf-8', 'ignore')
, which may contain function names, class names, or code. Output only the next line. | return Files(args=args).get() |
Continue the code snippet: <|code_start|>
try:
except ImportError:
# speed up repeated tests, depends on py2/3 pickle version
try:
requests_cache.install_cache('rulesets-{0}'.format(sys.version_info[0]), expire_after=3600 * 24)
except ImportError:
pass
settings.CACHEDIR = '/cachedir'
openmock = mwscan.ruleset.open = mock.mock_open()
class TestRuleset():
"""
Checks rule providers:
- Minimum number of parsed rules / whitelists
- Do rules compile with our Yara?
DRY: uses test generators (doesn't work with unittest.TestCase)
"""
def _check_provider(self, provider, expected_numrules, expected_numwhitelist):
<|code_end|>
. Use current file imports:
import os
import sys
import mwscan.ruleset
import mock
import requests_cache
from mwscan.ruleset import providers, RulesProvider
from mwscan import settings
from unittest import TestCase
from unittest import mock
and context (classes, functions, or code) from other files:
# Path: mwscan/ruleset.py
# def strip_last_url_path(url):
# def last_url_path(url):
# def __init__(self, **kwargs):
# def find_whitelist_in_rawrules(self, rawrules):
# def get_rules(self):
# def get_whitelist(self):
# def transform_rules(self, rawrules):
# def _get_cache_filename(self, url):
# def _get_cache_timestamp_content(self, cachefile):
# def _httpget(self, url):
# def get(self):
# def _recursive_fetch(self, url):
# def include(match):
# def get_rules(self):
# def transform_rules(self, rawrules):
# class RulesProvider:
# class Files(RulesProvider):
# class NBS(RulesProvider):
# class Magemojo(RulesProvider):
# class Magesec(RulesProvider):
# class Mwscan(RulesProvider):
# class MageHost(RulesProvider):
#
# Path: mwscan/settings.py
# CACHEDIR = os.path.expanduser('~/.cache/mwscan')
# LAST_RUN_FILE = os.path.join(CACHEDIR, 'last_run')
# DEFAULT_EXCLUDEFILE = os.path.expanduser('~/.config/mwscan/excludes')
# CODE_EXT = ('php', 'phtml', 'js', 'jsx', 'html', 'php3', 'php4', 'php5', 'php7', 'sh', 'ini')
. Output only the next line. | provobj = providers[provider]() |
Predict the next line after this snippet: <|code_start|> # no other way to count unfortunately
got_numrules = len(list(iter(rules)))
got_numwhitelist = len(whitelist)
assert type(rules).__name__ == 'Rules', \
'wrong type: %s' % type(rules).__name__
assert got_numrules >= expected_numrules, \
'wrong num rules: %s' % got_numrules
assert got_numwhitelist >= expected_numwhitelist, \
'wrong num whitelists %s' % got_numwhitelist
def test_providers(self):
tests = (
# provider, min rules, min whitelisted
('nbs', 15, 1279),
('byte', 60, 20),
('magesec', 150, 100000),
# ('magemojo', 150, 65000),
)
for provider, numrules, numwhitelists in tests:
yield self._check_provider, provider, numrules, numwhitelists
class TestHttpGet(TestCase):
def setUp(self):
<|code_end|>
using the current file's imports:
import os
import sys
import mwscan.ruleset
import mock
import requests_cache
from mwscan.ruleset import providers, RulesProvider
from mwscan import settings
from unittest import TestCase
from unittest import mock
and any relevant context from other files:
# Path: mwscan/ruleset.py
# def strip_last_url_path(url):
# def last_url_path(url):
# def __init__(self, **kwargs):
# def find_whitelist_in_rawrules(self, rawrules):
# def get_rules(self):
# def get_whitelist(self):
# def transform_rules(self, rawrules):
# def _get_cache_filename(self, url):
# def _get_cache_timestamp_content(self, cachefile):
# def _httpget(self, url):
# def get(self):
# def _recursive_fetch(self, url):
# def include(match):
# def get_rules(self):
# def transform_rules(self, rawrules):
# class RulesProvider:
# class Files(RulesProvider):
# class NBS(RulesProvider):
# class Magemojo(RulesProvider):
# class Magesec(RulesProvider):
# class Mwscan(RulesProvider):
# class MageHost(RulesProvider):
#
# Path: mwscan/settings.py
# CACHEDIR = os.path.expanduser('~/.cache/mwscan')
# LAST_RUN_FILE = os.path.join(CACHEDIR, 'last_run')
# DEFAULT_EXCLUDEFILE = os.path.expanduser('~/.config/mwscan/excludes')
# CODE_EXT = ('php', 'phtml', 'js', 'jsx', 'html', 'php3', 'php4', 'php5', 'php7', 'sh', 'ini')
. Output only the next line. | self.rp = RulesProvider() |
Given the following code snippet before the placeholder: <|code_start|>
try:
except ImportError:
# speed up repeated tests, depends on py2/3 pickle version
try:
requests_cache.install_cache('rulesets-{0}'.format(sys.version_info[0]), expire_after=3600 * 24)
except ImportError:
pass
<|code_end|>
, predict the next line using imports from the current file:
import os
import sys
import mwscan.ruleset
import mock
import requests_cache
from mwscan.ruleset import providers, RulesProvider
from mwscan import settings
from unittest import TestCase
from unittest import mock
and context including class names, function names, and sometimes code from other files:
# Path: mwscan/ruleset.py
# def strip_last_url_path(url):
# def last_url_path(url):
# def __init__(self, **kwargs):
# def find_whitelist_in_rawrules(self, rawrules):
# def get_rules(self):
# def get_whitelist(self):
# def transform_rules(self, rawrules):
# def _get_cache_filename(self, url):
# def _get_cache_timestamp_content(self, cachefile):
# def _httpget(self, url):
# def get(self):
# def _recursive_fetch(self, url):
# def include(match):
# def get_rules(self):
# def transform_rules(self, rawrules):
# class RulesProvider:
# class Files(RulesProvider):
# class NBS(RulesProvider):
# class Magemojo(RulesProvider):
# class Magesec(RulesProvider):
# class Mwscan(RulesProvider):
# class MageHost(RulesProvider):
#
# Path: mwscan/settings.py
# CACHEDIR = os.path.expanduser('~/.cache/mwscan')
# LAST_RUN_FILE = os.path.join(CACHEDIR, 'last_run')
# DEFAULT_EXCLUDEFILE = os.path.expanduser('~/.config/mwscan/excludes')
# CODE_EXT = ('php', 'phtml', 'js', 'jsx', 'html', 'php3', 'php4', 'php5', 'php7', 'sh', 'ini')
. Output only the next line. | settings.CACHEDIR = '/cachedir' |
Continue the code snippet: <|code_start|> but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program; if not, write to the Free Software Foundation,
Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
"""
try:
except ImportError:
psutil = None
def parse_args():
parser = argparse.ArgumentParser(
formatter_class=argparse.ArgumentDefaultsHelpFormatter,
description="Scan webfolders for hidden malware.",
)
parser.add_argument('path', help='File or dir to scan.')
parser.add_argument('-n', '--newonly', help='Only show newly found infections since last run.', action='store_true')
parser.add_argument('-d', '--deep', action='store_true', help='Scan all files, not just code.')
parser.add_argument('-q', '--quiet', action='store_true', help='Supress output except for matches.')
parser.add_argument('-c', '--checksum', action='store_true', help='Show SHA1 checksum for matches.')
parser.add_argument('-r', '--rules', help='Yara rules file.')
parser.add_argument('-s', '--ruleset', choices=sorted(providers.keys()), default='mwscan', help='Download and use from upstream')
parser.add_argument('-w', '--whitelist', help='Use extra SHA1 whitelist file.')
parser.add_argument('-f', '--followsymlinks', action='store_true', help='Follow Symlinks.')
<|code_end|>
. Use current file imports:
import os
import re
import sys
import argparse
import logging
import hashlib
import psutil
from functools import partial
from mwscan import settings
from mwscan.ruleset import providers
and context (classes, functions, or code) from other files:
# Path: mwscan/settings.py
# CACHEDIR = os.path.expanduser('~/.cache/mwscan')
# LAST_RUN_FILE = os.path.join(CACHEDIR, 'last_run')
# DEFAULT_EXCLUDEFILE = os.path.expanduser('~/.config/mwscan/excludes')
# CODE_EXT = ('php', 'phtml', 'js', 'jsx', 'html', 'php3', 'php4', 'php5', 'php7', 'sh', 'ini')
#
# Path: mwscan/ruleset.py
# def strip_last_url_path(url):
# def last_url_path(url):
# def __init__(self, **kwargs):
# def find_whitelist_in_rawrules(self, rawrules):
# def get_rules(self):
# def get_whitelist(self):
# def transform_rules(self, rawrules):
# def _get_cache_filename(self, url):
# def _get_cache_timestamp_content(self, cachefile):
# def _httpget(self, url):
# def get(self):
# def _recursive_fetch(self, url):
# def include(match):
# def get_rules(self):
# def transform_rules(self, rawrules):
# class RulesProvider:
# class Files(RulesProvider):
# class NBS(RulesProvider):
# class Magemojo(RulesProvider):
# class Magesec(RulesProvider):
# class Mwscan(RulesProvider):
# class MageHost(RulesProvider):
. Output only the next line. | parser.add_argument('--excludefile', help=argparse.SUPPRESS, default=settings.DEFAULT_EXCLUDEFILE) |
Continue the code snippet: <|code_start|> (at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program; if not, write to the Free Software Foundation,
Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
"""
try:
except ImportError:
psutil = None
def parse_args():
parser = argparse.ArgumentParser(
formatter_class=argparse.ArgumentDefaultsHelpFormatter,
description="Scan webfolders for hidden malware.",
)
parser.add_argument('path', help='File or dir to scan.')
parser.add_argument('-n', '--newonly', help='Only show newly found infections since last run.', action='store_true')
parser.add_argument('-d', '--deep', action='store_true', help='Scan all files, not just code.')
parser.add_argument('-q', '--quiet', action='store_true', help='Supress output except for matches.')
parser.add_argument('-c', '--checksum', action='store_true', help='Show SHA1 checksum for matches.')
parser.add_argument('-r', '--rules', help='Yara rules file.')
<|code_end|>
. Use current file imports:
import os
import re
import sys
import argparse
import logging
import hashlib
import psutil
from functools import partial
from mwscan import settings
from mwscan.ruleset import providers
and context (classes, functions, or code) from other files:
# Path: mwscan/settings.py
# CACHEDIR = os.path.expanduser('~/.cache/mwscan')
# LAST_RUN_FILE = os.path.join(CACHEDIR, 'last_run')
# DEFAULT_EXCLUDEFILE = os.path.expanduser('~/.config/mwscan/excludes')
# CODE_EXT = ('php', 'phtml', 'js', 'jsx', 'html', 'php3', 'php4', 'php5', 'php7', 'sh', 'ini')
#
# Path: mwscan/ruleset.py
# def strip_last_url_path(url):
# def last_url_path(url):
# def __init__(self, **kwargs):
# def find_whitelist_in_rawrules(self, rawrules):
# def get_rules(self):
# def get_whitelist(self):
# def transform_rules(self, rawrules):
# def _get_cache_filename(self, url):
# def _get_cache_timestamp_content(self, cachefile):
# def _httpget(self, url):
# def get(self):
# def _recursive_fetch(self, url):
# def include(match):
# def get_rules(self):
# def transform_rules(self, rawrules):
# class RulesProvider:
# class Files(RulesProvider):
# class NBS(RulesProvider):
# class Magemojo(RulesProvider):
# class Magesec(RulesProvider):
# class Mwscan(RulesProvider):
# class MageHost(RulesProvider):
. Output only the next line. | parser.add_argument('-s', '--ruleset', choices=sorted(providers.keys()), default='mwscan', help='Download and use from upstream') |
Predict the next line for this snippet: <|code_start|>
# Register your models here.
class CompetitionAdmin(admin.ModelAdmin):
prepopulated_fields = {'slug': ('name',)}
class Meta:
<|code_end|>
with the help of current file imports:
from django.contrib import admin
from competition.models import Competition, Challenge, ChallengeFile
and context from other files:
# Path: competition/models.py
# class Competition(models.Model):
# name = models.CharField('Name', max_length=255, unique=True)
# slug = models.SlugField(unique=True)
# url = models.URLField('Competition URL', blank=True)
# start_time = models.DateTimeField(blank=True, null=True)
# end_time = models.DateTimeField(blank=True, null=True)
#
# def __unicode__(self):
# return self.name
#
# __str__ = __unicode__
#
# def get_absolute_url(self):
# return reverse('view_ctf', kwargs={'ctf_slug': self.slug})
#
# class Challenge(models.Model):
# NOT_STARTED = 0
# IN_PROGRESS = 1
# SOLVED = 2
#
# PROGRESS_CHOICES = (
# (NOT_STARTED, 'Not Started'),
# (IN_PROGRESS, 'In Progress'),
# (SOLVED, 'Solved')
# )
#
# name = models.CharField('Name', max_length=255)
# slug = models.SlugField()
# progress = models.PositiveSmallIntegerField(choices=PROGRESS_CHOICES)
# num_progress = models.FloatField('Progress %', default=0)
# point_value = models.FloatField(default=0)
#
# competition = models.ForeignKey(Competition, related_name='challenges')
# last_viewed = models.DateTimeField(auto_created=True)
#
# def __unicode__(self):
# return self.name
#
# __str__ = __unicode__
#
# def get_absolute_url(self):
# return reverse('view_challenge', kwargs={'ctf_slug': self.competition.slug, 'chall_slug': self.slug})
#
# def last_viewed_display(self):
# if self.last_viewed == 0:
# return 'Never'
# else:
# return self.last_viewed
#
# class Meta:
# unique_together = ('name', 'competition')
# ordering = ('progress',)
#
# class ChallengeFile(models.Model):
# file = models.FileField(upload_to='files/')
# ctime = models.DateTimeField(auto_created=True)
# mtime = models.DateTimeField(auto_now=True)
# challenge = models.ForeignKey(Challenge, related_name='files')
#
# def __unicode__(self):
# return self.file.name
#
# __str__ = __unicode__
#
# def filename(self):
# return os.path.basename(self.file.name)
, which may contain function names, class names, or code. Output only the next line. | model = Competition |
Based on the snippet: <|code_start|>
# Register your models here.
class CompetitionAdmin(admin.ModelAdmin):
prepopulated_fields = {'slug': ('name',)}
class Meta:
model = Competition
class ChallengeAdmin(admin.ModelAdmin):
prepopulated_fields = {'slug': ('name',)}
class Meta:
<|code_end|>
, predict the immediate next line with the help of imports:
from django.contrib import admin
from competition.models import Competition, Challenge, ChallengeFile
and context (classes, functions, sometimes code) from other files:
# Path: competition/models.py
# class Competition(models.Model):
# name = models.CharField('Name', max_length=255, unique=True)
# slug = models.SlugField(unique=True)
# url = models.URLField('Competition URL', blank=True)
# start_time = models.DateTimeField(blank=True, null=True)
# end_time = models.DateTimeField(blank=True, null=True)
#
# def __unicode__(self):
# return self.name
#
# __str__ = __unicode__
#
# def get_absolute_url(self):
# return reverse('view_ctf', kwargs={'ctf_slug': self.slug})
#
# class Challenge(models.Model):
# NOT_STARTED = 0
# IN_PROGRESS = 1
# SOLVED = 2
#
# PROGRESS_CHOICES = (
# (NOT_STARTED, 'Not Started'),
# (IN_PROGRESS, 'In Progress'),
# (SOLVED, 'Solved')
# )
#
# name = models.CharField('Name', max_length=255)
# slug = models.SlugField()
# progress = models.PositiveSmallIntegerField(choices=PROGRESS_CHOICES)
# num_progress = models.FloatField('Progress %', default=0)
# point_value = models.FloatField(default=0)
#
# competition = models.ForeignKey(Competition, related_name='challenges')
# last_viewed = models.DateTimeField(auto_created=True)
#
# def __unicode__(self):
# return self.name
#
# __str__ = __unicode__
#
# def get_absolute_url(self):
# return reverse('view_challenge', kwargs={'ctf_slug': self.competition.slug, 'chall_slug': self.slug})
#
# def last_viewed_display(self):
# if self.last_viewed == 0:
# return 'Never'
# else:
# return self.last_viewed
#
# class Meta:
# unique_together = ('name', 'competition')
# ordering = ('progress',)
#
# class ChallengeFile(models.Model):
# file = models.FileField(upload_to='files/')
# ctime = models.DateTimeField(auto_created=True)
# mtime = models.DateTimeField(auto_now=True)
# challenge = models.ForeignKey(Challenge, related_name='files')
#
# def __unicode__(self):
# return self.file.name
#
# __str__ = __unicode__
#
# def filename(self):
# return os.path.basename(self.file.name)
. Output only the next line. | model = Challenge |
Given the code snippet: <|code_start|>
# Register your models here.
class CompetitionAdmin(admin.ModelAdmin):
prepopulated_fields = {'slug': ('name',)}
class Meta:
model = Competition
class ChallengeAdmin(admin.ModelAdmin):
prepopulated_fields = {'slug': ('name',)}
class Meta:
model = Challenge
admin.site.register(Competition, CompetitionAdmin)
admin.site.register(Challenge, ChallengeAdmin)
<|code_end|>
, generate the next line using the imports in this file:
from django.contrib import admin
from competition.models import Competition, Challenge, ChallengeFile
and context (functions, classes, or occasionally code) from other files:
# Path: competition/models.py
# class Competition(models.Model):
# name = models.CharField('Name', max_length=255, unique=True)
# slug = models.SlugField(unique=True)
# url = models.URLField('Competition URL', blank=True)
# start_time = models.DateTimeField(blank=True, null=True)
# end_time = models.DateTimeField(blank=True, null=True)
#
# def __unicode__(self):
# return self.name
#
# __str__ = __unicode__
#
# def get_absolute_url(self):
# return reverse('view_ctf', kwargs={'ctf_slug': self.slug})
#
# class Challenge(models.Model):
# NOT_STARTED = 0
# IN_PROGRESS = 1
# SOLVED = 2
#
# PROGRESS_CHOICES = (
# (NOT_STARTED, 'Not Started'),
# (IN_PROGRESS, 'In Progress'),
# (SOLVED, 'Solved')
# )
#
# name = models.CharField('Name', max_length=255)
# slug = models.SlugField()
# progress = models.PositiveSmallIntegerField(choices=PROGRESS_CHOICES)
# num_progress = models.FloatField('Progress %', default=0)
# point_value = models.FloatField(default=0)
#
# competition = models.ForeignKey(Competition, related_name='challenges')
# last_viewed = models.DateTimeField(auto_created=True)
#
# def __unicode__(self):
# return self.name
#
# __str__ = __unicode__
#
# def get_absolute_url(self):
# return reverse('view_challenge', kwargs={'ctf_slug': self.competition.slug, 'chall_slug': self.slug})
#
# def last_viewed_display(self):
# if self.last_viewed == 0:
# return 'Never'
# else:
# return self.last_viewed
#
# class Meta:
# unique_together = ('name', 'competition')
# ordering = ('progress',)
#
# class ChallengeFile(models.Model):
# file = models.FileField(upload_to='files/')
# ctime = models.DateTimeField(auto_created=True)
# mtime = models.DateTimeField(auto_now=True)
# challenge = models.ForeignKey(Challenge, related_name='files')
#
# def __unicode__(self):
# return self.file.name
#
# __str__ = __unicode__
#
# def filename(self):
# return os.path.basename(self.file.name)
. Output only the next line. | admin.site.register(ChallengeFile) |
Next line prediction: <|code_start|>
def ctf_sidebar(request):
context = {
'sidebar': {
<|code_end|>
. Use current file imports:
(from competition.models import Competition)
and context including class names, function names, or small code snippets from other files:
# Path: competition/models.py
# class Competition(models.Model):
# name = models.CharField('Name', max_length=255, unique=True)
# slug = models.SlugField(unique=True)
# url = models.URLField('Competition URL', blank=True)
# start_time = models.DateTimeField(blank=True, null=True)
# end_time = models.DateTimeField(blank=True, null=True)
#
# def __unicode__(self):
# return self.name
#
# __str__ = __unicode__
#
# def get_absolute_url(self):
# return reverse('view_ctf', kwargs={'ctf_slug': self.slug})
. Output only the next line. | 'ctfs': Competition.objects.only('name', 'slug') |
Predict the next line after this snippet: <|code_start|>
holder = ButtonHolder(
Submit('submit', 'Submit'),
Reset('reset', 'Reset'),
css_class='text-right'
)
self.add_helper.layout = Layout(
Fieldset(
'Add a challenge',
'name',
'point_value',
'progress',
'num_progress'
),
holder
)
self.update_helper.layout = Layout(
Fieldset(
'Update a challenge',
'name',
'point_value',
'progress',
'num_progress'
),
holder
)
class Meta:
<|code_end|>
using the current file's imports:
from crispy_forms.helper import FormHelper
from crispy_forms_foundation.layout import Submit, Reset, Layout, Fieldset, ButtonHolder, HTML
from django import forms
from django.contrib.auth.forms import UserCreationForm, AuthenticationForm, PasswordChangeForm as PWChangeForm, \
SetPasswordForm
from django.core.urlresolvers import reverse
from competition.models import Challenge, Competition, ChallengeFile
and any relevant context from other files:
# Path: competition/models.py
# class Challenge(models.Model):
# NOT_STARTED = 0
# IN_PROGRESS = 1
# SOLVED = 2
#
# PROGRESS_CHOICES = (
# (NOT_STARTED, 'Not Started'),
# (IN_PROGRESS, 'In Progress'),
# (SOLVED, 'Solved')
# )
#
# name = models.CharField('Name', max_length=255)
# slug = models.SlugField()
# progress = models.PositiveSmallIntegerField(choices=PROGRESS_CHOICES)
# num_progress = models.FloatField('Progress %', default=0)
# point_value = models.FloatField(default=0)
#
# competition = models.ForeignKey(Competition, related_name='challenges')
# last_viewed = models.DateTimeField(auto_created=True)
#
# def __unicode__(self):
# return self.name
#
# __str__ = __unicode__
#
# def get_absolute_url(self):
# return reverse('view_challenge', kwargs={'ctf_slug': self.competition.slug, 'chall_slug': self.slug})
#
# def last_viewed_display(self):
# if self.last_viewed == 0:
# return 'Never'
# else:
# return self.last_viewed
#
# class Meta:
# unique_together = ('name', 'competition')
# ordering = ('progress',)
#
# class Competition(models.Model):
# name = models.CharField('Name', max_length=255, unique=True)
# slug = models.SlugField(unique=True)
# url = models.URLField('Competition URL', blank=True)
# start_time = models.DateTimeField(blank=True, null=True)
# end_time = models.DateTimeField(blank=True, null=True)
#
# def __unicode__(self):
# return self.name
#
# __str__ = __unicode__
#
# def get_absolute_url(self):
# return reverse('view_ctf', kwargs={'ctf_slug': self.slug})
#
# class ChallengeFile(models.Model):
# file = models.FileField(upload_to='files/')
# ctime = models.DateTimeField(auto_created=True)
# mtime = models.DateTimeField(auto_now=True)
# challenge = models.ForeignKey(Challenge, related_name='files')
#
# def __unicode__(self):
# return self.file.name
#
# __str__ = __unicode__
#
# def filename(self):
# return os.path.basename(self.file.name)
. Output only the next line. | model = Challenge |
Here is a snippet: <|code_start|>
button_holder = ButtonHolder(
Submit('submit', 'Submit'),
Reset('reset', 'Reset'),
css_class='text-right'
)
self.add_helper.layout = Layout(
Fieldset(
'Add a competition',
'name',
'url',
'start_time',
'end_time'
),
button_holder
)
self.update_helper.layout = Layout(
Fieldset(
'Update competition',
'name',
'url',
'start_time',
'end_time'
),
button_holder
)
class Meta:
<|code_end|>
. Write the next line using the current file imports:
from crispy_forms.helper import FormHelper
from crispy_forms_foundation.layout import Submit, Reset, Layout, Fieldset, ButtonHolder, HTML
from django import forms
from django.contrib.auth.forms import UserCreationForm, AuthenticationForm, PasswordChangeForm as PWChangeForm, \
SetPasswordForm
from django.core.urlresolvers import reverse
from competition.models import Challenge, Competition, ChallengeFile
and context from other files:
# Path: competition/models.py
# class Challenge(models.Model):
# NOT_STARTED = 0
# IN_PROGRESS = 1
# SOLVED = 2
#
# PROGRESS_CHOICES = (
# (NOT_STARTED, 'Not Started'),
# (IN_PROGRESS, 'In Progress'),
# (SOLVED, 'Solved')
# )
#
# name = models.CharField('Name', max_length=255)
# slug = models.SlugField()
# progress = models.PositiveSmallIntegerField(choices=PROGRESS_CHOICES)
# num_progress = models.FloatField('Progress %', default=0)
# point_value = models.FloatField(default=0)
#
# competition = models.ForeignKey(Competition, related_name='challenges')
# last_viewed = models.DateTimeField(auto_created=True)
#
# def __unicode__(self):
# return self.name
#
# __str__ = __unicode__
#
# def get_absolute_url(self):
# return reverse('view_challenge', kwargs={'ctf_slug': self.competition.slug, 'chall_slug': self.slug})
#
# def last_viewed_display(self):
# if self.last_viewed == 0:
# return 'Never'
# else:
# return self.last_viewed
#
# class Meta:
# unique_together = ('name', 'competition')
# ordering = ('progress',)
#
# class Competition(models.Model):
# name = models.CharField('Name', max_length=255, unique=True)
# slug = models.SlugField(unique=True)
# url = models.URLField('Competition URL', blank=True)
# start_time = models.DateTimeField(blank=True, null=True)
# end_time = models.DateTimeField(blank=True, null=True)
#
# def __unicode__(self):
# return self.name
#
# __str__ = __unicode__
#
# def get_absolute_url(self):
# return reverse('view_ctf', kwargs={'ctf_slug': self.slug})
#
# class ChallengeFile(models.Model):
# file = models.FileField(upload_to='files/')
# ctime = models.DateTimeField(auto_created=True)
# mtime = models.DateTimeField(auto_now=True)
# challenge = models.ForeignKey(Challenge, related_name='files')
#
# def __unicode__(self):
# return self.file.name
#
# __str__ = __unicode__
#
# def filename(self):
# return os.path.basename(self.file.name)
, which may include functions, classes, or code. Output only the next line. | model = Competition |
Given the code snippet: <|code_start|>
self.update_helper = FormHelper()
self.update_helper.form_id = 'update-file'
self.update_helper.form_method = 'post'
self.update_helper.form_action = ''
holder = ButtonHolder(
Submit('submit', 'Submit'),
Reset('reset', 'Reset'),
css_class='text-right'
)
self.add_helper.layout = Layout(
Fieldset(
'Add a file',
HTML('<p>Original filenames are preserved whenever possible.</p>'),
'file',
),
holder
)
self.update_helper.layout = Layout(
Fieldset(
'Update a file',
'file',
),
holder
)
class Meta:
<|code_end|>
, generate the next line using the imports in this file:
from crispy_forms.helper import FormHelper
from crispy_forms_foundation.layout import Submit, Reset, Layout, Fieldset, ButtonHolder, HTML
from django import forms
from django.contrib.auth.forms import UserCreationForm, AuthenticationForm, PasswordChangeForm as PWChangeForm, \
SetPasswordForm
from django.core.urlresolvers import reverse
from competition.models import Challenge, Competition, ChallengeFile
and context (functions, classes, or occasionally code) from other files:
# Path: competition/models.py
# class Challenge(models.Model):
# NOT_STARTED = 0
# IN_PROGRESS = 1
# SOLVED = 2
#
# PROGRESS_CHOICES = (
# (NOT_STARTED, 'Not Started'),
# (IN_PROGRESS, 'In Progress'),
# (SOLVED, 'Solved')
# )
#
# name = models.CharField('Name', max_length=255)
# slug = models.SlugField()
# progress = models.PositiveSmallIntegerField(choices=PROGRESS_CHOICES)
# num_progress = models.FloatField('Progress %', default=0)
# point_value = models.FloatField(default=0)
#
# competition = models.ForeignKey(Competition, related_name='challenges')
# last_viewed = models.DateTimeField(auto_created=True)
#
# def __unicode__(self):
# return self.name
#
# __str__ = __unicode__
#
# def get_absolute_url(self):
# return reverse('view_challenge', kwargs={'ctf_slug': self.competition.slug, 'chall_slug': self.slug})
#
# def last_viewed_display(self):
# if self.last_viewed == 0:
# return 'Never'
# else:
# return self.last_viewed
#
# class Meta:
# unique_together = ('name', 'competition')
# ordering = ('progress',)
#
# class Competition(models.Model):
# name = models.CharField('Name', max_length=255, unique=True)
# slug = models.SlugField(unique=True)
# url = models.URLField('Competition URL', blank=True)
# start_time = models.DateTimeField(blank=True, null=True)
# end_time = models.DateTimeField(blank=True, null=True)
#
# def __unicode__(self):
# return self.name
#
# __str__ = __unicode__
#
# def get_absolute_url(self):
# return reverse('view_ctf', kwargs={'ctf_slug': self.slug})
#
# class ChallengeFile(models.Model):
# file = models.FileField(upload_to='files/')
# ctime = models.DateTimeField(auto_created=True)
# mtime = models.DateTimeField(auto_now=True)
# challenge = models.ForeignKey(Challenge, related_name='files')
#
# def __unicode__(self):
# return self.file.name
#
# __str__ = __unicode__
#
# def filename(self):
# return os.path.basename(self.file.name)
. Output only the next line. | model = ChallengeFile |
Next line prediction: <|code_start|>
try:
# not required since it's included, but...
except ImportError:
@login_required
@require_GET
def chart_data(request, ctf_slug):
<|code_end|>
. Use current file imports:
(import json
import datetime as dt
from django.contrib.auth.decorators import login_required
from django.core.urlresolvers import resolve
from django.http import HttpResponse
from django.shortcuts import get_object_or_404
from django.views.decorators.csrf import csrf_exempt
from django.views.decorators.http import require_GET, require_POST
from django.db.models import Sum
from competition.models import Competition, Challenge
from tools.misc import JSONResponse
from pytz import UTC
from tools.misc import UTC)
and context including class names, function names, or small code snippets from other files:
# Path: competition/models.py
# class Competition(models.Model):
# name = models.CharField('Name', max_length=255, unique=True)
# slug = models.SlugField(unique=True)
# url = models.URLField('Competition URL', blank=True)
# start_time = models.DateTimeField(blank=True, null=True)
# end_time = models.DateTimeField(blank=True, null=True)
#
# def __unicode__(self):
# return self.name
#
# __str__ = __unicode__
#
# def get_absolute_url(self):
# return reverse('view_ctf', kwargs={'ctf_slug': self.slug})
#
# class Challenge(models.Model):
# NOT_STARTED = 0
# IN_PROGRESS = 1
# SOLVED = 2
#
# PROGRESS_CHOICES = (
# (NOT_STARTED, 'Not Started'),
# (IN_PROGRESS, 'In Progress'),
# (SOLVED, 'Solved')
# )
#
# name = models.CharField('Name', max_length=255)
# slug = models.SlugField()
# progress = models.PositiveSmallIntegerField(choices=PROGRESS_CHOICES)
# num_progress = models.FloatField('Progress %', default=0)
# point_value = models.FloatField(default=0)
#
# competition = models.ForeignKey(Competition, related_name='challenges')
# last_viewed = models.DateTimeField(auto_created=True)
#
# def __unicode__(self):
# return self.name
#
# __str__ = __unicode__
#
# def get_absolute_url(self):
# return reverse('view_challenge', kwargs={'ctf_slug': self.competition.slug, 'chall_slug': self.slug})
#
# def last_viewed_display(self):
# if self.last_viewed == 0:
# return 'Never'
# else:
# return self.last_viewed
#
# class Meta:
# unique_together = ('name', 'competition')
# ordering = ('progress',)
#
# Path: tools/misc.py
# class JSONResponse(HttpResponse):
# def __init__(self, *args, **kwargs):
# # Content-Type override
# if 'content_type' not in kwargs:
# kwargs['content_type'] = 'application/json'
# super(JSONResponse, self).__init__(*args, **kwargs)
. Output only the next line. | ctf = get_object_or_404(Competition.objects.prefetch_related('challenges'), slug=ctf_slug) |
Given the code snippet: <|code_start|>
try:
# not required since it's included, but...
except ImportError:
@login_required
@require_GET
def chart_data(request, ctf_slug):
ctf = get_object_or_404(Competition.objects.prefetch_related('challenges'), slug=ctf_slug)
challenges = ctf.challenges
# compute aggregate data
assert isinstance(ctf, Competition)
<|code_end|>
, generate the next line using the imports in this file:
import json
import datetime as dt
from django.contrib.auth.decorators import login_required
from django.core.urlresolvers import resolve
from django.http import HttpResponse
from django.shortcuts import get_object_or_404
from django.views.decorators.csrf import csrf_exempt
from django.views.decorators.http import require_GET, require_POST
from django.db.models import Sum
from competition.models import Competition, Challenge
from tools.misc import JSONResponse
from pytz import UTC
from tools.misc import UTC
and context (functions, classes, or occasionally code) from other files:
# Path: competition/models.py
# class Competition(models.Model):
# name = models.CharField('Name', max_length=255, unique=True)
# slug = models.SlugField(unique=True)
# url = models.URLField('Competition URL', blank=True)
# start_time = models.DateTimeField(blank=True, null=True)
# end_time = models.DateTimeField(blank=True, null=True)
#
# def __unicode__(self):
# return self.name
#
# __str__ = __unicode__
#
# def get_absolute_url(self):
# return reverse('view_ctf', kwargs={'ctf_slug': self.slug})
#
# class Challenge(models.Model):
# NOT_STARTED = 0
# IN_PROGRESS = 1
# SOLVED = 2
#
# PROGRESS_CHOICES = (
# (NOT_STARTED, 'Not Started'),
# (IN_PROGRESS, 'In Progress'),
# (SOLVED, 'Solved')
# )
#
# name = models.CharField('Name', max_length=255)
# slug = models.SlugField()
# progress = models.PositiveSmallIntegerField(choices=PROGRESS_CHOICES)
# num_progress = models.FloatField('Progress %', default=0)
# point_value = models.FloatField(default=0)
#
# competition = models.ForeignKey(Competition, related_name='challenges')
# last_viewed = models.DateTimeField(auto_created=True)
#
# def __unicode__(self):
# return self.name
#
# __str__ = __unicode__
#
# def get_absolute_url(self):
# return reverse('view_challenge', kwargs={'ctf_slug': self.competition.slug, 'chall_slug': self.slug})
#
# def last_viewed_display(self):
# if self.last_viewed == 0:
# return 'Never'
# else:
# return self.last_viewed
#
# class Meta:
# unique_together = ('name', 'competition')
# ordering = ('progress',)
#
# Path: tools/misc.py
# class JSONResponse(HttpResponse):
# def __init__(self, *args, **kwargs):
# # Content-Type override
# if 'content_type' not in kwargs:
# kwargs['content_type'] = 'application/json'
# super(JSONResponse, self).__init__(*args, **kwargs)
. Output only the next line. | solved_challenges = challenges.filter(progress=Challenge.SOLVED) |
Given the following code snippet before the placeholder: <|code_start|>
# Py2+3 unix time
if ctf.start_time is not None:
start_time = (ctf.start_time - dt.datetime(1970, 1, 1, tzinfo=UTC)).total_seconds()
else:
start_time = None
if ctf.end_time is not None:
end_time = (ctf.end_time - dt.datetime(1970, 1, 1, tzinfo=UTC)).total_seconds()
else:
end_time = None
users = {
'online': 4,
'total': 22
}
pv_sum = Sum('point_value')
points = {
'earned': solved_challenges.aggregate(pv_sum)['point_value__sum'] or 0.001,
'in_progress': in_progress_challenges.aggregate(pv_sum)['point_value__sum'] or 0.001,
'total': challenges.aggregate(pv_sum)['point_value__sum'] or 1
}
data = {
'challenges': challenges_data,
'start_time': start_time,
'end_time': end_time,
'users': users,
'points': points
}
<|code_end|>
, predict the next line using imports from the current file:
import json
import datetime as dt
from django.contrib.auth.decorators import login_required
from django.core.urlresolvers import resolve
from django.http import HttpResponse
from django.shortcuts import get_object_or_404
from django.views.decorators.csrf import csrf_exempt
from django.views.decorators.http import require_GET, require_POST
from django.db.models import Sum
from competition.models import Competition, Challenge
from tools.misc import JSONResponse
from pytz import UTC
from tools.misc import UTC
and context including class names, function names, and sometimes code from other files:
# Path: competition/models.py
# class Competition(models.Model):
# name = models.CharField('Name', max_length=255, unique=True)
# slug = models.SlugField(unique=True)
# url = models.URLField('Competition URL', blank=True)
# start_time = models.DateTimeField(blank=True, null=True)
# end_time = models.DateTimeField(blank=True, null=True)
#
# def __unicode__(self):
# return self.name
#
# __str__ = __unicode__
#
# def get_absolute_url(self):
# return reverse('view_ctf', kwargs={'ctf_slug': self.slug})
#
# class Challenge(models.Model):
# NOT_STARTED = 0
# IN_PROGRESS = 1
# SOLVED = 2
#
# PROGRESS_CHOICES = (
# (NOT_STARTED, 'Not Started'),
# (IN_PROGRESS, 'In Progress'),
# (SOLVED, 'Solved')
# )
#
# name = models.CharField('Name', max_length=255)
# slug = models.SlugField()
# progress = models.PositiveSmallIntegerField(choices=PROGRESS_CHOICES)
# num_progress = models.FloatField('Progress %', default=0)
# point_value = models.FloatField(default=0)
#
# competition = models.ForeignKey(Competition, related_name='challenges')
# last_viewed = models.DateTimeField(auto_created=True)
#
# def __unicode__(self):
# return self.name
#
# __str__ = __unicode__
#
# def get_absolute_url(self):
# return reverse('view_challenge', kwargs={'ctf_slug': self.competition.slug, 'chall_slug': self.slug})
#
# def last_viewed_display(self):
# if self.last_viewed == 0:
# return 'Never'
# else:
# return self.last_viewed
#
# class Meta:
# unique_together = ('name', 'competition')
# ordering = ('progress',)
#
# Path: tools/misc.py
# class JSONResponse(HttpResponse):
# def __init__(self, *args, **kwargs):
# # Content-Type override
# if 'content_type' not in kwargs:
# kwargs['content_type'] = 'application/json'
# super(JSONResponse, self).__init__(*args, **kwargs)
. Output only the next line. | return JSONResponse(json.dumps(data)) |
Predict the next line for this snippet: <|code_start|># -*- coding: utf-8 -*-
#
# This file is part of Invenio.
# Copyright (C) 2015-2018 CERN.
#
# Invenio is free software; you can redistribute it and/or modify it
# under the terms of the MIT License; see LICENSE file for more details.
"""Pytest configuration."""
from __future__ import absolute_import, print_function
@pytest.yield_fixture()
def app(request):
"""Flask application fixture."""
# Set temporary instance path for sqlite
instance_path = tempfile.mkdtemp()
app = Flask('testapp', instance_path=instance_path)
InvenioDB(app)
<|code_end|>
with the help of current file imports:
import os
import pytest
import shutil
import tempfile
from flask import Flask
from invenio_db import InvenioDB
from sqlalchemy_utils.functions import create_database, database_exists
from invenio_pidstore import InvenioPIDStore
from invenio_db import db as db_
and context from other files:
# Path: invenio_pidstore/ext.py
# class InvenioPIDStore(object):
# """Invenio-PIDStore extension."""
#
# def __init__(self, app=None,
# minters_entry_point_group='invenio_pidstore.minters',
# fetchers_entry_point_group='invenio_pidstore.fetchers'):
# """Extension initialization.
#
# :param minters_entry_point_group: The entrypoint for minters.
# (Default: `invenio_pidstore.minters`).
# :param fetchers_entry_point_group: The entrypoint for fetchers.
# (Default: `invenio_pidstore.fetchers`).
# """
# if app:
# self._state = self.init_app(
# app, minters_entry_point_group=minters_entry_point_group,
# fetchers_entry_point_group=fetchers_entry_point_group
# )
#
# def init_app(self, app, minters_entry_point_group=None,
# fetchers_entry_point_group=None):
# """Flask application initialization.
#
# Initialize:
#
# * The CLI commands.
#
# * Initialize the logger (Default: `app.debug`).
#
# * Initialize the default admin object link endpoint.
# (Default: `{"rec": "recordmetadata.details_view"}` if
# `invenio-records` is installed, otherwise `{}`).
#
# * Register the `pid_exists` template filter.
#
# * Initialize extension state.
#
# :param app: The Flask application
# :param minters_entry_point_group: The minters entry point group
# (Default: None).
# :param fetchers_entry_point_group: The fetchers entry point group
# (Default: None).
# :returns: PIDStore state application.
# """
# self.init_config(app)
# # Initialize CLI
# app.cli.add_command(cmd)
#
# # Initialize logger
# app.config.setdefault('PIDSTORE_APP_LOGGER_HANDLERS', app.debug)
# if app.config['PIDSTORE_APP_LOGGER_HANDLERS']:
# for handler in app.logger.handlers:
# logger.addHandler(handler)
#
# # Initialize admin object link endpoints.
# try:
# pkg_resources.get_distribution('invenio-records')
# app.config.setdefault('PIDSTORE_OBJECT_ENDPOINTS', dict(
# rec='recordmetadata.details_view',
# ))
# except pkg_resources.DistributionNotFound:
# app.config.setdefault('PIDSTORE_OBJECT_ENDPOINTS', {})
#
# # Register template filter
# app.jinja_env.filters['pid_exists'] = pid_exists
#
# # Initialize extension state.
# state = _PIDStoreState(
# app=app,
# minters_entry_point_group=minters_entry_point_group,
# fetchers_entry_point_group=fetchers_entry_point_group,
# )
# app.extensions['invenio-pidstore'] = state
# return state
#
# def init_config(self, app):
# """Initialize configuration."""
# for k in dir(config):
# if k.startswith('PIDSTORE_') and k not in (
# 'PIDSTORE_OBJECT_ENDPOINTS',
# 'PIDSTORE_APP_LOGGER_HANDLERS'):
# app.config.setdefault(k, getattr(config, k))
#
# def __getattr__(self, name):
# """Proxy to state object."""
# return getattr(self._state, name, None)
, which may contain function names, class names, or code. Output only the next line. | InvenioPIDStore(app) |
Predict the next line after this snippet: <|code_start|> $ ./app-teardown.sh
SPHINX-END
"""
from __future__ import absolute_import, print_function
# Create Flask application
app = Flask(__name__)
app.config.update(
DB_VERSIONING_USER_MODEL=None,
SECRET_KEY='test_key',
SECURITY_PASSWORD_HASH='pbkdf2_sha512',
SECURITY_PASSWORD_SALT="CHANGE_ME_ALSO",
SECURITY_PASSWORD_SCHEMES=[
'pbkdf2_sha512', 'sha512_crypt', 'invenio_aes_encrypted_email'
],
SQLALCHEMY_DATABASE_URI=os.environ.get(
'SQLALCHEMY_DATABASE_URI', 'sqlite:///test.db'
),
WTF_CSRF_ENABLED=False,
)
Babel(app)
Menu(app)
InvenioDB(app)
admin = InvenioAdmin(app)
<|code_end|>
using the current file's imports:
import os
from flask import Flask
from flask_babelex import Babel
from flask_menu import Menu
from invenio_access import InvenioAccess
from invenio_accounts import InvenioAccounts
from invenio_accounts.views import blueprint as accounts_blueprint
from invenio_admin import InvenioAdmin
from invenio_db import InvenioDB
from invenio_records import InvenioRecords
from invenio_pidstore import InvenioPIDStore
and any relevant context from other files:
# Path: invenio_pidstore/ext.py
# class InvenioPIDStore(object):
# """Invenio-PIDStore extension."""
#
# def __init__(self, app=None,
# minters_entry_point_group='invenio_pidstore.minters',
# fetchers_entry_point_group='invenio_pidstore.fetchers'):
# """Extension initialization.
#
# :param minters_entry_point_group: The entrypoint for minters.
# (Default: `invenio_pidstore.minters`).
# :param fetchers_entry_point_group: The entrypoint for fetchers.
# (Default: `invenio_pidstore.fetchers`).
# """
# if app:
# self._state = self.init_app(
# app, minters_entry_point_group=minters_entry_point_group,
# fetchers_entry_point_group=fetchers_entry_point_group
# )
#
# def init_app(self, app, minters_entry_point_group=None,
# fetchers_entry_point_group=None):
# """Flask application initialization.
#
# Initialize:
#
# * The CLI commands.
#
# * Initialize the logger (Default: `app.debug`).
#
# * Initialize the default admin object link endpoint.
# (Default: `{"rec": "recordmetadata.details_view"}` if
# `invenio-records` is installed, otherwise `{}`).
#
# * Register the `pid_exists` template filter.
#
# * Initialize extension state.
#
# :param app: The Flask application
# :param minters_entry_point_group: The minters entry point group
# (Default: None).
# :param fetchers_entry_point_group: The fetchers entry point group
# (Default: None).
# :returns: PIDStore state application.
# """
# self.init_config(app)
# # Initialize CLI
# app.cli.add_command(cmd)
#
# # Initialize logger
# app.config.setdefault('PIDSTORE_APP_LOGGER_HANDLERS', app.debug)
# if app.config['PIDSTORE_APP_LOGGER_HANDLERS']:
# for handler in app.logger.handlers:
# logger.addHandler(handler)
#
# # Initialize admin object link endpoints.
# try:
# pkg_resources.get_distribution('invenio-records')
# app.config.setdefault('PIDSTORE_OBJECT_ENDPOINTS', dict(
# rec='recordmetadata.details_view',
# ))
# except pkg_resources.DistributionNotFound:
# app.config.setdefault('PIDSTORE_OBJECT_ENDPOINTS', {})
#
# # Register template filter
# app.jinja_env.filters['pid_exists'] = pid_exists
#
# # Initialize extension state.
# state = _PIDStoreState(
# app=app,
# minters_entry_point_group=minters_entry_point_group,
# fetchers_entry_point_group=fetchers_entry_point_group,
# )
# app.extensions['invenio-pidstore'] = state
# return state
#
# def init_config(self, app):
# """Initialize configuration."""
# for k in dir(config):
# if k.startswith('PIDSTORE_') and k not in (
# 'PIDSTORE_OBJECT_ENDPOINTS',
# 'PIDSTORE_APP_LOGGER_HANDLERS'):
# app.config.setdefault(k, getattr(config, k))
#
# def __getattr__(self, name):
# """Proxy to state object."""
# return getattr(self._state, name, None)
. Output only the next line. | InvenioPIDStore(app) |
Predict the next line for this snippet: <|code_start|>#
# Invenio is free software; you can redistribute it and/or modify it
# under the terms of the MIT License; see LICENSE file for more details.
"""DataCite PID provider."""
from __future__ import absolute_import
class RecordIdProviderV2(BaseProvider):
"""Record identifier provider V2.
This is the recommended record id provider.
It generates a random alphanumeric string as opposed to an increasing
integer (:class:`invenio_pidstore.providers.recordid.RecordIdProvider`).
"""
pid_type = 'recid'
"""Type of persistent identifier."""
pid_provider = None
"""Provider name.
The provider name is not recorded in the PID since the provider does not
provide any additional features besides creation of record ids.
"""
<|code_end|>
with the help of current file imports:
import copy
from base32_lib import base32
from flask import current_app
from ..models import PIDStatus
from .base import BaseProvider
and context from other files:
# Path: invenio_pidstore/models.py
# class PIDStatus(Enum):
# """Constants for possible status of any given PID."""
#
# __order__ = 'NEW RESERVED REGISTERED REDIRECTED DELETED'
#
# NEW = 'N'
# """PID has *not* yet been registered with the service provider."""
#
# RESERVED = 'K'
# """PID reserved in the service provider but not yet fully registered."""
#
# REGISTERED = 'R'
# """PID has been registered with the service provider."""
#
# REDIRECTED = 'M'
# """PID has been redirected to another persistent identifier."""
#
# DELETED = 'D'
# """PID has been deleted/inactivated with the service provider.
#
# This should happen very rarely, and must be kept track of, as the PID
# should not be reused for something else.
# """
#
# def __init__(self, value):
# """Hack."""
#
# def __eq__(self, other):
# """Equality test."""
# return self.value == other
#
# def __str__(self):
# """Return its value."""
# return self.value
#
# @property
# def title(self):
# """Return human readable title."""
# return PID_STATUS_TITLES[self.name]
#
# Path: invenio_pidstore/providers/base.py
# class BaseProvider(object):
# """Abstract class for persistent identifier provider classes."""
#
# pid_type = None
# """Default persistent identifier type."""
#
# pid_provider = None
# """Persistent identifier provider name."""
#
# default_status = PIDStatus.NEW
# """Default status for newly created PIDs by this provider."""
#
# @classmethod
# def create(cls, pid_type=None, pid_value=None, object_type=None,
# object_uuid=None, status=None, **kwargs):
# """Create a new instance for the given type and pid.
#
# :param pid_type: Persistent identifier type. (Default: None).
# :param pid_value: Persistent identifier value. (Default: None).
# :param status: Current PID status.
# (Default: :attr:`invenio_pidstore.models.PIDStatus.NEW`)
# :param object_type: The object type is a string that identify its type.
# (Default: None).
# :param object_uuid: The object UUID. (Default: None).
# :returns: A :class:`invenio_pidstore.providers.base.BaseProvider`
# instance.
# """
# assert pid_value
# assert pid_type or cls.pid_type
#
# pid = PersistentIdentifier.create(
# pid_type or cls.pid_type,
# pid_value,
# pid_provider=cls.pid_provider,
# object_type=object_type,
# object_uuid=object_uuid,
# status=status or cls.default_status,
# )
# return cls(pid, **kwargs)
#
# @classmethod
# def get(cls, pid_value, pid_type=None, **kwargs):
# """Get a persistent identifier for this provider.
#
# :param pid_type: Persistent identifier type. (Default: configured
# :attr:`invenio_pidstore.providers.base.BaseProvider.pid_type`)
# :param pid_value: Persistent identifier value.
# :param kwargs: See
# :meth:`invenio_pidstore.providers.base.BaseProvider` required
# initialization properties.
# :returns: A :class:`invenio_pidstore.providers.base.BaseProvider`
# instance.
# """
# return cls(
# PersistentIdentifier.get(pid_type or cls.pid_type, pid_value,
# pid_provider=cls.pid_provider),
# **kwargs)
#
# def __init__(self, pid, **kwargs):
# """Initialize provider using persistent identifier.
#
# :param pid: A :class:`invenio_pidstore.models.PersistentIdentifier`
# instance.
# """
# self.pid = pid
# assert pid.pid_provider == self.pid_provider
#
# def reserve(self):
# """Reserve a persistent identifier.
#
# This might or might not be useful depending on the service of the
# provider.
#
# See: :meth:`invenio_pidstore.models.PersistentIdentifier.reserve`.
# """
# return self.pid.reserve()
#
# def register(self):
# """Register a persistent identifier.
#
# See: :meth:`invenio_pidstore.models.PersistentIdentifier.register`.
# """
# return self.pid.register()
#
# def update(self):
# """Update information about the persistent identifier."""
# pass
#
# def delete(self):
# """Delete a persistent identifier.
#
# See: :meth:`invenio_pidstore.models.PersistentIdentifier.delete`.
# """
# return self.pid.delete()
#
# def sync_status(self):
# """Synchronize PIDstatus with remote service provider."""
# pass
, which may contain function names, class names, or code. Output only the next line. | default_status_with_obj = PIDStatus.REGISTERED |
Based on the snippet: <|code_start|>def test_recid_fetcher(app, db):
"""Test legacy recid fetcher."""
with app.app_context():
rec_uuid = uuid.uuid4()
data = {}
minted_pid = recid_minter(rec_uuid, data)
fetched_pid = recid_fetcher(rec_uuid, data)
assert minted_pid.pid_value == fetched_pid.pid_value
assert fetched_pid.pid_type == fetched_pid.provider.pid_type
assert fetched_pid.pid_type == 'recid'
def test_recid_fetcher_v2(app, db):
"""Test recommended recid fetcher."""
with app.app_context():
rec_uuid = uuid.uuid4()
data = {}
minted_pid = recid_minter_v2(rec_uuid, data)
fetched_pid = recid_fetcher_v2(rec_uuid, data)
assert minted_pid.pid_value == fetched_pid.pid_value
assert minted_pid.pid_type == fetched_pid.pid_type
assert fetched_pid.pid_type == 'recid'
assert fetched_pid.pid_value == minted_pid.pid_value
def test_register_fetcher(app):
"""Test base provider."""
with app.app_context():
<|code_end|>
, predict the immediate next line with the help of imports:
import uuid
from invenio_pidstore import current_pidstore
from invenio_pidstore.fetchers import recid_fetcher, recid_fetcher_v2
from invenio_pidstore.minters import recid_minter, recid_minter_v2
and context (classes, functions, sometimes code) from other files:
# Path: invenio_pidstore/proxies.py
#
# Path: invenio_pidstore/fetchers.py
# def recid_fetcher(record_uuid, data):
# """Legacy way to fetch a record's identifiers.
#
# :param record_uuid: The record UUID.
# :param data: The record metadata.
# :returns: A :data:`invenio_pidstore.fetchers.FetchedPID` instance.
# """
# pid_field = current_app.config['PIDSTORE_RECID_FIELD']
# return FetchedPID(
# provider=RecordIdProvider,
# pid_type=RecordIdProvider.pid_type,
# pid_value=str(data[pid_field]),
# )
#
# def recid_fetcher_v2(record_uuid, data):
# """Fetch a record's identifiers.
#
# :param record_uuid: The record UUID.
# :param data: The record metadata.
# :returns: A :data:`invenio_pidstore.fetchers.FetchedPID` instance.
# """
# pid_field = current_app.config['PIDSTORE_RECID_FIELD']
# return FetchedPID(
# provider=RecordIdProviderV2,
# pid_type=RecordIdProviderV2.pid_type,
# pid_value=str(data[pid_field])
# )
#
# Path: invenio_pidstore/minters.py
# def recid_minter(record_uuid, data):
# """Mint record identifiers.
#
# This is a minter specific for records.
# With the help of
# :class:`invenio_pidstore.providers.recordid.RecordIdProvider`, it creates
# the PID instance with `rec` as predefined `object_type`.
#
# Procedure followed: (we will use `control_number` as value of
# `PIDSTORE_RECID_FIELD` for the simplicity of the documentation.)
#
# #. If a `control_number` field is already there, a `AssertionError`
# exception is raised.
#
# #. The provider is initialized with the help of
# :class:`invenio_pidstore.providers.recordid.RecordIdProvider`.
# It's called with default value 'rec' for `object_type` and `record_uuid`
# variable for `object_uuid`.
#
# #. The new `id_value` is stored inside `data` as `control_number` field.
#
# :param record_uuid: The record UUID.
# :param data: The record metadata.
# :returns: A fresh `invenio_pidstore.models.PersistentIdentifier` instance.
# """
# pid_field = current_app.config['PIDSTORE_RECID_FIELD']
# assert pid_field not in data
# provider = RecordIdProvider.create(
# object_type='rec', object_uuid=record_uuid)
# data[pid_field] = provider.pid.pid_value
# return provider.pid
#
# def recid_minter_v2(record_uuid, data):
# """Mint record identifiers with RecordIDProviderV2.
#
# This minter is recommended to be used when creating records to get
# PersistentIdentifier with ``object_type='rec'`` and the new random
# alphanumeric `pid_value`.
#
# Raises ``AssertionError`` if a ``PIDSTORE_RECID_FIELD`` entry is already in
# ``data``. The minted ``pid_value`` will be stored in that field.
#
# :param record_uuid: The object UUID of the record.
# :param data: The record metadata.
# :returns: A fresh `invenio_pidstore.models.PersistentIdentifier` instance.
# """
# pid_field = current_app.config['PIDSTORE_RECID_FIELD']
# assert pid_field not in data
# provider = RecordIdProviderV2.create(
# object_type='rec', object_uuid=record_uuid)
# data[pid_field] = provider.pid.pid_value
# return provider.pid
. Output only the next line. | current_pidstore.register_fetcher('anothername', recid_minter) |
Based on the snippet: <|code_start|># -*- coding: utf-8 -*-
#
# This file is part of Invenio.
# Copyright (C) 2015-2018 CERN.
#
# Invenio is free software; you can redistribute it and/or modify it
# under the terms of the MIT License; see LICENSE file for more details.
"""Fetcher tests."""
from __future__ import absolute_import, print_function
def test_recid_fetcher(app, db):
"""Test legacy recid fetcher."""
with app.app_context():
rec_uuid = uuid.uuid4()
data = {}
minted_pid = recid_minter(rec_uuid, data)
<|code_end|>
, predict the immediate next line with the help of imports:
import uuid
from invenio_pidstore import current_pidstore
from invenio_pidstore.fetchers import recid_fetcher, recid_fetcher_v2
from invenio_pidstore.minters import recid_minter, recid_minter_v2
and context (classes, functions, sometimes code) from other files:
# Path: invenio_pidstore/proxies.py
#
# Path: invenio_pidstore/fetchers.py
# def recid_fetcher(record_uuid, data):
# """Legacy way to fetch a record's identifiers.
#
# :param record_uuid: The record UUID.
# :param data: The record metadata.
# :returns: A :data:`invenio_pidstore.fetchers.FetchedPID` instance.
# """
# pid_field = current_app.config['PIDSTORE_RECID_FIELD']
# return FetchedPID(
# provider=RecordIdProvider,
# pid_type=RecordIdProvider.pid_type,
# pid_value=str(data[pid_field]),
# )
#
# def recid_fetcher_v2(record_uuid, data):
# """Fetch a record's identifiers.
#
# :param record_uuid: The record UUID.
# :param data: The record metadata.
# :returns: A :data:`invenio_pidstore.fetchers.FetchedPID` instance.
# """
# pid_field = current_app.config['PIDSTORE_RECID_FIELD']
# return FetchedPID(
# provider=RecordIdProviderV2,
# pid_type=RecordIdProviderV2.pid_type,
# pid_value=str(data[pid_field])
# )
#
# Path: invenio_pidstore/minters.py
# def recid_minter(record_uuid, data):
# """Mint record identifiers.
#
# This is a minter specific for records.
# With the help of
# :class:`invenio_pidstore.providers.recordid.RecordIdProvider`, it creates
# the PID instance with `rec` as predefined `object_type`.
#
# Procedure followed: (we will use `control_number` as value of
# `PIDSTORE_RECID_FIELD` for the simplicity of the documentation.)
#
# #. If a `control_number` field is already there, a `AssertionError`
# exception is raised.
#
# #. The provider is initialized with the help of
# :class:`invenio_pidstore.providers.recordid.RecordIdProvider`.
# It's called with default value 'rec' for `object_type` and `record_uuid`
# variable for `object_uuid`.
#
# #. The new `id_value` is stored inside `data` as `control_number` field.
#
# :param record_uuid: The record UUID.
# :param data: The record metadata.
# :returns: A fresh `invenio_pidstore.models.PersistentIdentifier` instance.
# """
# pid_field = current_app.config['PIDSTORE_RECID_FIELD']
# assert pid_field not in data
# provider = RecordIdProvider.create(
# object_type='rec', object_uuid=record_uuid)
# data[pid_field] = provider.pid.pid_value
# return provider.pid
#
# def recid_minter_v2(record_uuid, data):
# """Mint record identifiers with RecordIDProviderV2.
#
# This minter is recommended to be used when creating records to get
# PersistentIdentifier with ``object_type='rec'`` and the new random
# alphanumeric `pid_value`.
#
# Raises ``AssertionError`` if a ``PIDSTORE_RECID_FIELD`` entry is already in
# ``data``. The minted ``pid_value`` will be stored in that field.
#
# :param record_uuid: The object UUID of the record.
# :param data: The record metadata.
# :returns: A fresh `invenio_pidstore.models.PersistentIdentifier` instance.
# """
# pid_field = current_app.config['PIDSTORE_RECID_FIELD']
# assert pid_field not in data
# provider = RecordIdProviderV2.create(
# object_type='rec', object_uuid=record_uuid)
# data[pid_field] = provider.pid.pid_value
# return provider.pid
. Output only the next line. | fetched_pid = recid_fetcher(rec_uuid, data) |
Using the snippet: <|code_start|>#
# Invenio is free software; you can redistribute it and/or modify it
# under the terms of the MIT License; see LICENSE file for more details.
"""Fetcher tests."""
from __future__ import absolute_import, print_function
def test_recid_fetcher(app, db):
"""Test legacy recid fetcher."""
with app.app_context():
rec_uuid = uuid.uuid4()
data = {}
minted_pid = recid_minter(rec_uuid, data)
fetched_pid = recid_fetcher(rec_uuid, data)
assert minted_pid.pid_value == fetched_pid.pid_value
assert fetched_pid.pid_type == fetched_pid.provider.pid_type
assert fetched_pid.pid_type == 'recid'
def test_recid_fetcher_v2(app, db):
"""Test recommended recid fetcher."""
with app.app_context():
rec_uuid = uuid.uuid4()
data = {}
minted_pid = recid_minter_v2(rec_uuid, data)
<|code_end|>
, determine the next line of code. You have imports:
import uuid
from invenio_pidstore import current_pidstore
from invenio_pidstore.fetchers import recid_fetcher, recid_fetcher_v2
from invenio_pidstore.minters import recid_minter, recid_minter_v2
and context (class names, function names, or code) available:
# Path: invenio_pidstore/proxies.py
#
# Path: invenio_pidstore/fetchers.py
# def recid_fetcher(record_uuid, data):
# """Legacy way to fetch a record's identifiers.
#
# :param record_uuid: The record UUID.
# :param data: The record metadata.
# :returns: A :data:`invenio_pidstore.fetchers.FetchedPID` instance.
# """
# pid_field = current_app.config['PIDSTORE_RECID_FIELD']
# return FetchedPID(
# provider=RecordIdProvider,
# pid_type=RecordIdProvider.pid_type,
# pid_value=str(data[pid_field]),
# )
#
# def recid_fetcher_v2(record_uuid, data):
# """Fetch a record's identifiers.
#
# :param record_uuid: The record UUID.
# :param data: The record metadata.
# :returns: A :data:`invenio_pidstore.fetchers.FetchedPID` instance.
# """
# pid_field = current_app.config['PIDSTORE_RECID_FIELD']
# return FetchedPID(
# provider=RecordIdProviderV2,
# pid_type=RecordIdProviderV2.pid_type,
# pid_value=str(data[pid_field])
# )
#
# Path: invenio_pidstore/minters.py
# def recid_minter(record_uuid, data):
# """Mint record identifiers.
#
# This is a minter specific for records.
# With the help of
# :class:`invenio_pidstore.providers.recordid.RecordIdProvider`, it creates
# the PID instance with `rec` as predefined `object_type`.
#
# Procedure followed: (we will use `control_number` as value of
# `PIDSTORE_RECID_FIELD` for the simplicity of the documentation.)
#
# #. If a `control_number` field is already there, a `AssertionError`
# exception is raised.
#
# #. The provider is initialized with the help of
# :class:`invenio_pidstore.providers.recordid.RecordIdProvider`.
# It's called with default value 'rec' for `object_type` and `record_uuid`
# variable for `object_uuid`.
#
# #. The new `id_value` is stored inside `data` as `control_number` field.
#
# :param record_uuid: The record UUID.
# :param data: The record metadata.
# :returns: A fresh `invenio_pidstore.models.PersistentIdentifier` instance.
# """
# pid_field = current_app.config['PIDSTORE_RECID_FIELD']
# assert pid_field not in data
# provider = RecordIdProvider.create(
# object_type='rec', object_uuid=record_uuid)
# data[pid_field] = provider.pid.pid_value
# return provider.pid
#
# def recid_minter_v2(record_uuid, data):
# """Mint record identifiers with RecordIDProviderV2.
#
# This minter is recommended to be used when creating records to get
# PersistentIdentifier with ``object_type='rec'`` and the new random
# alphanumeric `pid_value`.
#
# Raises ``AssertionError`` if a ``PIDSTORE_RECID_FIELD`` entry is already in
# ``data``. The minted ``pid_value`` will be stored in that field.
#
# :param record_uuid: The object UUID of the record.
# :param data: The record metadata.
# :returns: A fresh `invenio_pidstore.models.PersistentIdentifier` instance.
# """
# pid_field = current_app.config['PIDSTORE_RECID_FIELD']
# assert pid_field not in data
# provider = RecordIdProviderV2.create(
# object_type='rec', object_uuid=record_uuid)
# data[pid_field] = provider.pid.pid_value
# return provider.pid
. Output only the next line. | fetched_pid = recid_fetcher_v2(rec_uuid, data) |
Using the snippet: <|code_start|># -*- coding: utf-8 -*-
#
# This file is part of Invenio.
# Copyright (C) 2015-2018 CERN.
#
# Invenio is free software; you can redistribute it and/or modify it
# under the terms of the MIT License; see LICENSE file for more details.
"""Fetcher tests."""
from __future__ import absolute_import, print_function
def test_recid_fetcher(app, db):
"""Test legacy recid fetcher."""
with app.app_context():
rec_uuid = uuid.uuid4()
data = {}
<|code_end|>
, determine the next line of code. You have imports:
import uuid
from invenio_pidstore import current_pidstore
from invenio_pidstore.fetchers import recid_fetcher, recid_fetcher_v2
from invenio_pidstore.minters import recid_minter, recid_minter_v2
and context (class names, function names, or code) available:
# Path: invenio_pidstore/proxies.py
#
# Path: invenio_pidstore/fetchers.py
# def recid_fetcher(record_uuid, data):
# """Legacy way to fetch a record's identifiers.
#
# :param record_uuid: The record UUID.
# :param data: The record metadata.
# :returns: A :data:`invenio_pidstore.fetchers.FetchedPID` instance.
# """
# pid_field = current_app.config['PIDSTORE_RECID_FIELD']
# return FetchedPID(
# provider=RecordIdProvider,
# pid_type=RecordIdProvider.pid_type,
# pid_value=str(data[pid_field]),
# )
#
# def recid_fetcher_v2(record_uuid, data):
# """Fetch a record's identifiers.
#
# :param record_uuid: The record UUID.
# :param data: The record metadata.
# :returns: A :data:`invenio_pidstore.fetchers.FetchedPID` instance.
# """
# pid_field = current_app.config['PIDSTORE_RECID_FIELD']
# return FetchedPID(
# provider=RecordIdProviderV2,
# pid_type=RecordIdProviderV2.pid_type,
# pid_value=str(data[pid_field])
# )
#
# Path: invenio_pidstore/minters.py
# def recid_minter(record_uuid, data):
# """Mint record identifiers.
#
# This is a minter specific for records.
# With the help of
# :class:`invenio_pidstore.providers.recordid.RecordIdProvider`, it creates
# the PID instance with `rec` as predefined `object_type`.
#
# Procedure followed: (we will use `control_number` as value of
# `PIDSTORE_RECID_FIELD` for the simplicity of the documentation.)
#
# #. If a `control_number` field is already there, a `AssertionError`
# exception is raised.
#
# #. The provider is initialized with the help of
# :class:`invenio_pidstore.providers.recordid.RecordIdProvider`.
# It's called with default value 'rec' for `object_type` and `record_uuid`
# variable for `object_uuid`.
#
# #. The new `id_value` is stored inside `data` as `control_number` field.
#
# :param record_uuid: The record UUID.
# :param data: The record metadata.
# :returns: A fresh `invenio_pidstore.models.PersistentIdentifier` instance.
# """
# pid_field = current_app.config['PIDSTORE_RECID_FIELD']
# assert pid_field not in data
# provider = RecordIdProvider.create(
# object_type='rec', object_uuid=record_uuid)
# data[pid_field] = provider.pid.pid_value
# return provider.pid
#
# def recid_minter_v2(record_uuid, data):
# """Mint record identifiers with RecordIDProviderV2.
#
# This minter is recommended to be used when creating records to get
# PersistentIdentifier with ``object_type='rec'`` and the new random
# alphanumeric `pid_value`.
#
# Raises ``AssertionError`` if a ``PIDSTORE_RECID_FIELD`` entry is already in
# ``data``. The minted ``pid_value`` will be stored in that field.
#
# :param record_uuid: The object UUID of the record.
# :param data: The record metadata.
# :returns: A fresh `invenio_pidstore.models.PersistentIdentifier` instance.
# """
# pid_field = current_app.config['PIDSTORE_RECID_FIELD']
# assert pid_field not in data
# provider = RecordIdProviderV2.create(
# object_type='rec', object_uuid=record_uuid)
# data[pid_field] = provider.pid.pid_value
# return provider.pid
. Output only the next line. | minted_pid = recid_minter(rec_uuid, data) |
Predict the next line after this snippet: <|code_start|># This file is part of Invenio.
# Copyright (C) 2015-2018 CERN.
#
# Invenio is free software; you can redistribute it and/or modify it
# under the terms of the MIT License; see LICENSE file for more details.
"""Fetcher tests."""
from __future__ import absolute_import, print_function
def test_recid_fetcher(app, db):
"""Test legacy recid fetcher."""
with app.app_context():
rec_uuid = uuid.uuid4()
data = {}
minted_pid = recid_minter(rec_uuid, data)
fetched_pid = recid_fetcher(rec_uuid, data)
assert minted_pid.pid_value == fetched_pid.pid_value
assert fetched_pid.pid_type == fetched_pid.provider.pid_type
assert fetched_pid.pid_type == 'recid'
def test_recid_fetcher_v2(app, db):
"""Test recommended recid fetcher."""
with app.app_context():
rec_uuid = uuid.uuid4()
data = {}
<|code_end|>
using the current file's imports:
import uuid
from invenio_pidstore import current_pidstore
from invenio_pidstore.fetchers import recid_fetcher, recid_fetcher_v2
from invenio_pidstore.minters import recid_minter, recid_minter_v2
and any relevant context from other files:
# Path: invenio_pidstore/proxies.py
#
# Path: invenio_pidstore/fetchers.py
# def recid_fetcher(record_uuid, data):
# """Legacy way to fetch a record's identifiers.
#
# :param record_uuid: The record UUID.
# :param data: The record metadata.
# :returns: A :data:`invenio_pidstore.fetchers.FetchedPID` instance.
# """
# pid_field = current_app.config['PIDSTORE_RECID_FIELD']
# return FetchedPID(
# provider=RecordIdProvider,
# pid_type=RecordIdProvider.pid_type,
# pid_value=str(data[pid_field]),
# )
#
# def recid_fetcher_v2(record_uuid, data):
# """Fetch a record's identifiers.
#
# :param record_uuid: The record UUID.
# :param data: The record metadata.
# :returns: A :data:`invenio_pidstore.fetchers.FetchedPID` instance.
# """
# pid_field = current_app.config['PIDSTORE_RECID_FIELD']
# return FetchedPID(
# provider=RecordIdProviderV2,
# pid_type=RecordIdProviderV2.pid_type,
# pid_value=str(data[pid_field])
# )
#
# Path: invenio_pidstore/minters.py
# def recid_minter(record_uuid, data):
# """Mint record identifiers.
#
# This is a minter specific for records.
# With the help of
# :class:`invenio_pidstore.providers.recordid.RecordIdProvider`, it creates
# the PID instance with `rec` as predefined `object_type`.
#
# Procedure followed: (we will use `control_number` as value of
# `PIDSTORE_RECID_FIELD` for the simplicity of the documentation.)
#
# #. If a `control_number` field is already there, a `AssertionError`
# exception is raised.
#
# #. The provider is initialized with the help of
# :class:`invenio_pidstore.providers.recordid.RecordIdProvider`.
# It's called with default value 'rec' for `object_type` and `record_uuid`
# variable for `object_uuid`.
#
# #. The new `id_value` is stored inside `data` as `control_number` field.
#
# :param record_uuid: The record UUID.
# :param data: The record metadata.
# :returns: A fresh `invenio_pidstore.models.PersistentIdentifier` instance.
# """
# pid_field = current_app.config['PIDSTORE_RECID_FIELD']
# assert pid_field not in data
# provider = RecordIdProvider.create(
# object_type='rec', object_uuid=record_uuid)
# data[pid_field] = provider.pid.pid_value
# return provider.pid
#
# def recid_minter_v2(record_uuid, data):
# """Mint record identifiers with RecordIDProviderV2.
#
# This minter is recommended to be used when creating records to get
# PersistentIdentifier with ``object_type='rec'`` and the new random
# alphanumeric `pid_value`.
#
# Raises ``AssertionError`` if a ``PIDSTORE_RECID_FIELD`` entry is already in
# ``data``. The minted ``pid_value`` will be stored in that field.
#
# :param record_uuid: The object UUID of the record.
# :param data: The record metadata.
# :returns: A fresh `invenio_pidstore.models.PersistentIdentifier` instance.
# """
# pid_field = current_app.config['PIDSTORE_RECID_FIELD']
# assert pid_field not in data
# provider = RecordIdProviderV2.create(
# object_type='rec', object_uuid=record_uuid)
# data[pid_field] = provider.pid.pid_value
# return provider.pid
. Output only the next line. | minted_pid = recid_minter_v2(rec_uuid, data) |
Given snippet: <|code_start|> return provider.pid
def recid_minter(record_uuid, data):
"""Mint record identifiers.
This is a minter specific for records.
With the help of
:class:`invenio_pidstore.providers.recordid.RecordIdProvider`, it creates
the PID instance with `rec` as predefined `object_type`.
Procedure followed: (we will use `control_number` as value of
`PIDSTORE_RECID_FIELD` for the simplicity of the documentation.)
#. If a `control_number` field is already there, a `AssertionError`
exception is raised.
#. The provider is initialized with the help of
:class:`invenio_pidstore.providers.recordid.RecordIdProvider`.
It's called with default value 'rec' for `object_type` and `record_uuid`
variable for `object_uuid`.
#. The new `id_value` is stored inside `data` as `control_number` field.
:param record_uuid: The record UUID.
:param data: The record metadata.
:returns: A fresh `invenio_pidstore.models.PersistentIdentifier` instance.
"""
pid_field = current_app.config['PIDSTORE_RECID_FIELD']
assert pid_field not in data
<|code_end|>
, continue by predicting the next line. Consider current file imports:
from flask import current_app
from .providers.recordid import RecordIdProvider
from .providers.recordid_v2 import RecordIdProviderV2
and context:
# Path: invenio_pidstore/providers/recordid.py
# class RecordIdProvider(BaseProvider):
# """Record identifier provider."""
#
# pid_type = 'recid'
# """Type of persistent identifier."""
#
# pid_provider = None
# """Provider name.
#
# The provider name is not recorded in the PID since the provider does not
# provide any additional features besides creation of record ids.
# """
#
# default_status = PIDStatus.RESERVED
# """Record IDs are by default registered immediately.
#
# Default: :attr:`invenio_pidstore.models.PIDStatus.RESERVED`
# """
#
# @classmethod
# def create(cls, object_type=None, object_uuid=None, **kwargs):
# """Create a new record identifier.
#
# Note: if the object_type and object_uuid values are passed, then the
# PID status will be automatically setted to
# :attr:`invenio_pidstore.models.PIDStatus.REGISTERED`.
#
# :param object_type: The object type. (Default: None.)
# :param object_uuid: The object identifier. (Default: None).
# :param kwargs: You specify the pid_value.
# """
# # Request next integer in recid sequence.
# assert 'pid_value' not in kwargs
# kwargs['pid_value'] = str(RecordIdentifier.next())
# kwargs.setdefault('status', cls.default_status)
# if object_type and object_uuid:
# kwargs['status'] = PIDStatus.REGISTERED
# return super(RecordIdProvider, cls).create(
# object_type=object_type, object_uuid=object_uuid, **kwargs)
#
# Path: invenio_pidstore/providers/recordid_v2.py
# class RecordIdProviderV2(BaseProvider):
# """Record identifier provider V2.
#
# This is the recommended record id provider.
#
# It generates a random alphanumeric string as opposed to an increasing
# integer (:class:`invenio_pidstore.providers.recordid.RecordIdProvider`).
# """
#
# pid_type = 'recid'
# """Type of persistent identifier."""
#
# pid_provider = None
# """Provider name.
#
# The provider name is not recorded in the PID since the provider does not
# provide any additional features besides creation of record ids.
# """
#
# default_status_with_obj = PIDStatus.REGISTERED
# """Record IDs are by default registered immediately.
#
# Default: :attr:`invenio_pidstore.models.PIDStatus.REGISTERED`
# """
#
# default_status = PIDStatus.RESERVED
# """Record IDs with an object are by default reserved.
#
# Default: :attr:`invenio_pidstore.models.PIDStatus.RESERVED`
# """
#
# @classmethod
# def generate_id(cls, options=None):
# """Generate record id."""
# passed_options = options or {}
# # WHY: A new dict needs to be created to prevent side-effects
# options = copy.deepcopy(current_app.config.get(
# 'PIDSTORE_RECORDID_OPTIONS', {}
# ))
# options.update(passed_options)
# length = options.get('length', 10)
# split_every = options.get('split_every', 0)
# checksum = options.get('checksum', True)
#
# return base32.generate(
# length=length,
# split_every=split_every,
# checksum=checksum
# )
#
# @classmethod
# def create(cls, object_type=None, object_uuid=None, options=None,
# **kwargs):
# """Create a new record identifier.
#
# Note: if the object_type and object_uuid values are passed, then the
# PID status will be automatically setted to
# :attr:`invenio_pidstore.models.PIDStatus.REGISTERED`.
#
# For more information about parameters,
# see :meth:`invenio_pidstore.providers.base.BaseProvider.create`.
#
# :param object_type: The object type. (Default: None.)
# :param object_uuid: The object identifier. (Default: None).
# :param options: ``dict`` with optional keys:
# ``"length"`` (integer), ``"split_every"`` (integer),
# ``"checksum"`` (boolean). (Default: None).
# :param kwargs: dict to hold generated pid_value and status. See
# :meth:`invenio_pidstore.providers.base.BaseProvider.create` extra
# parameters.
# :returns: A :class:`RecordIdProviderV2` instance.
# """
# assert 'pid_value' not in kwargs
#
# kwargs['pid_value'] = cls.generate_id(options)
# kwargs.setdefault('status', cls.default_status)
#
# if object_type and object_uuid:
# kwargs['status'] = cls.default_status_with_obj
#
# return super(RecordIdProviderV2, cls).create(
# object_type=object_type, object_uuid=object_uuid, **kwargs)
which might include code, classes, or functions. Output only the next line. | provider = RecordIdProvider.create( |
Continue the code snippet: <|code_start|>#
# This file is part of Invenio.
# Copyright (C) 2015-2018 CERN.
#
# Invenio is free software; you can redistribute it and/or modify it
# under the terms of the MIT License; see LICENSE file for more details.
"""Persistent identifier minters."""
from __future__ import absolute_import, print_function
def recid_minter_v2(record_uuid, data):
"""Mint record identifiers with RecordIDProviderV2.
This minter is recommended to be used when creating records to get
PersistentIdentifier with ``object_type='rec'`` and the new random
alphanumeric `pid_value`.
Raises ``AssertionError`` if a ``PIDSTORE_RECID_FIELD`` entry is already in
``data``. The minted ``pid_value`` will be stored in that field.
:param record_uuid: The object UUID of the record.
:param data: The record metadata.
:returns: A fresh `invenio_pidstore.models.PersistentIdentifier` instance.
"""
pid_field = current_app.config['PIDSTORE_RECID_FIELD']
assert pid_field not in data
<|code_end|>
. Use current file imports:
from flask import current_app
from .providers.recordid import RecordIdProvider
from .providers.recordid_v2 import RecordIdProviderV2
and context (classes, functions, or code) from other files:
# Path: invenio_pidstore/providers/recordid.py
# class RecordIdProvider(BaseProvider):
# """Record identifier provider."""
#
# pid_type = 'recid'
# """Type of persistent identifier."""
#
# pid_provider = None
# """Provider name.
#
# The provider name is not recorded in the PID since the provider does not
# provide any additional features besides creation of record ids.
# """
#
# default_status = PIDStatus.RESERVED
# """Record IDs are by default registered immediately.
#
# Default: :attr:`invenio_pidstore.models.PIDStatus.RESERVED`
# """
#
# @classmethod
# def create(cls, object_type=None, object_uuid=None, **kwargs):
# """Create a new record identifier.
#
# Note: if the object_type and object_uuid values are passed, then the
# PID status will be automatically setted to
# :attr:`invenio_pidstore.models.PIDStatus.REGISTERED`.
#
# :param object_type: The object type. (Default: None.)
# :param object_uuid: The object identifier. (Default: None).
# :param kwargs: You specify the pid_value.
# """
# # Request next integer in recid sequence.
# assert 'pid_value' not in kwargs
# kwargs['pid_value'] = str(RecordIdentifier.next())
# kwargs.setdefault('status', cls.default_status)
# if object_type and object_uuid:
# kwargs['status'] = PIDStatus.REGISTERED
# return super(RecordIdProvider, cls).create(
# object_type=object_type, object_uuid=object_uuid, **kwargs)
#
# Path: invenio_pidstore/providers/recordid_v2.py
# class RecordIdProviderV2(BaseProvider):
# """Record identifier provider V2.
#
# This is the recommended record id provider.
#
# It generates a random alphanumeric string as opposed to an increasing
# integer (:class:`invenio_pidstore.providers.recordid.RecordIdProvider`).
# """
#
# pid_type = 'recid'
# """Type of persistent identifier."""
#
# pid_provider = None
# """Provider name.
#
# The provider name is not recorded in the PID since the provider does not
# provide any additional features besides creation of record ids.
# """
#
# default_status_with_obj = PIDStatus.REGISTERED
# """Record IDs are by default registered immediately.
#
# Default: :attr:`invenio_pidstore.models.PIDStatus.REGISTERED`
# """
#
# default_status = PIDStatus.RESERVED
# """Record IDs with an object are by default reserved.
#
# Default: :attr:`invenio_pidstore.models.PIDStatus.RESERVED`
# """
#
# @classmethod
# def generate_id(cls, options=None):
# """Generate record id."""
# passed_options = options or {}
# # WHY: A new dict needs to be created to prevent side-effects
# options = copy.deepcopy(current_app.config.get(
# 'PIDSTORE_RECORDID_OPTIONS', {}
# ))
# options.update(passed_options)
# length = options.get('length', 10)
# split_every = options.get('split_every', 0)
# checksum = options.get('checksum', True)
#
# return base32.generate(
# length=length,
# split_every=split_every,
# checksum=checksum
# )
#
# @classmethod
# def create(cls, object_type=None, object_uuid=None, options=None,
# **kwargs):
# """Create a new record identifier.
#
# Note: if the object_type and object_uuid values are passed, then the
# PID status will be automatically setted to
# :attr:`invenio_pidstore.models.PIDStatus.REGISTERED`.
#
# For more information about parameters,
# see :meth:`invenio_pidstore.providers.base.BaseProvider.create`.
#
# :param object_type: The object type. (Default: None.)
# :param object_uuid: The object identifier. (Default: None).
# :param options: ``dict`` with optional keys:
# ``"length"`` (integer), ``"split_every"`` (integer),
# ``"checksum"`` (boolean). (Default: None).
# :param kwargs: dict to hold generated pid_value and status. See
# :meth:`invenio_pidstore.providers.base.BaseProvider.create` extra
# parameters.
# :returns: A :class:`RecordIdProviderV2` instance.
# """
# assert 'pid_value' not in kwargs
#
# kwargs['pid_value'] = cls.generate_id(options)
# kwargs.setdefault('status', cls.default_status)
#
# if object_type and object_uuid:
# kwargs['status'] = cls.default_status_with_obj
#
# return super(RecordIdProviderV2, cls).create(
# object_type=object_type, object_uuid=object_uuid, **kwargs)
. Output only the next line. | provider = RecordIdProviderV2.create( |
Based on the snippet: <|code_start|>
FetchedPID = namedtuple('FetchedPID', ['provider', 'pid_type', 'pid_value'])
"""A pid fetcher."""
def recid_fetcher_v2(record_uuid, data):
"""Fetch a record's identifiers.
:param record_uuid: The record UUID.
:param data: The record metadata.
:returns: A :data:`invenio_pidstore.fetchers.FetchedPID` instance.
"""
pid_field = current_app.config['PIDSTORE_RECID_FIELD']
return FetchedPID(
provider=RecordIdProviderV2,
pid_type=RecordIdProviderV2.pid_type,
pid_value=str(data[pid_field])
)
def recid_fetcher(record_uuid, data):
"""Legacy way to fetch a record's identifiers.
:param record_uuid: The record UUID.
:param data: The record metadata.
:returns: A :data:`invenio_pidstore.fetchers.FetchedPID` instance.
"""
pid_field = current_app.config['PIDSTORE_RECID_FIELD']
return FetchedPID(
<|code_end|>
, predict the immediate next line with the help of imports:
from collections import namedtuple
from flask import current_app
from .providers.recordid import RecordIdProvider
from .providers.recordid_v2 import RecordIdProviderV2
and context (classes, functions, sometimes code) from other files:
# Path: invenio_pidstore/providers/recordid.py
# class RecordIdProvider(BaseProvider):
# """Record identifier provider."""
#
# pid_type = 'recid'
# """Type of persistent identifier."""
#
# pid_provider = None
# """Provider name.
#
# The provider name is not recorded in the PID since the provider does not
# provide any additional features besides creation of record ids.
# """
#
# default_status = PIDStatus.RESERVED
# """Record IDs are by default registered immediately.
#
# Default: :attr:`invenio_pidstore.models.PIDStatus.RESERVED`
# """
#
# @classmethod
# def create(cls, object_type=None, object_uuid=None, **kwargs):
# """Create a new record identifier.
#
# Note: if the object_type and object_uuid values are passed, then the
# PID status will be automatically setted to
# :attr:`invenio_pidstore.models.PIDStatus.REGISTERED`.
#
# :param object_type: The object type. (Default: None.)
# :param object_uuid: The object identifier. (Default: None).
# :param kwargs: You specify the pid_value.
# """
# # Request next integer in recid sequence.
# assert 'pid_value' not in kwargs
# kwargs['pid_value'] = str(RecordIdentifier.next())
# kwargs.setdefault('status', cls.default_status)
# if object_type and object_uuid:
# kwargs['status'] = PIDStatus.REGISTERED
# return super(RecordIdProvider, cls).create(
# object_type=object_type, object_uuid=object_uuid, **kwargs)
#
# Path: invenio_pidstore/providers/recordid_v2.py
# class RecordIdProviderV2(BaseProvider):
# """Record identifier provider V2.
#
# This is the recommended record id provider.
#
# It generates a random alphanumeric string as opposed to an increasing
# integer (:class:`invenio_pidstore.providers.recordid.RecordIdProvider`).
# """
#
# pid_type = 'recid'
# """Type of persistent identifier."""
#
# pid_provider = None
# """Provider name.
#
# The provider name is not recorded in the PID since the provider does not
# provide any additional features besides creation of record ids.
# """
#
# default_status_with_obj = PIDStatus.REGISTERED
# """Record IDs are by default registered immediately.
#
# Default: :attr:`invenio_pidstore.models.PIDStatus.REGISTERED`
# """
#
# default_status = PIDStatus.RESERVED
# """Record IDs with an object are by default reserved.
#
# Default: :attr:`invenio_pidstore.models.PIDStatus.RESERVED`
# """
#
# @classmethod
# def generate_id(cls, options=None):
# """Generate record id."""
# passed_options = options or {}
# # WHY: A new dict needs to be created to prevent side-effects
# options = copy.deepcopy(current_app.config.get(
# 'PIDSTORE_RECORDID_OPTIONS', {}
# ))
# options.update(passed_options)
# length = options.get('length', 10)
# split_every = options.get('split_every', 0)
# checksum = options.get('checksum', True)
#
# return base32.generate(
# length=length,
# split_every=split_every,
# checksum=checksum
# )
#
# @classmethod
# def create(cls, object_type=None, object_uuid=None, options=None,
# **kwargs):
# """Create a new record identifier.
#
# Note: if the object_type and object_uuid values are passed, then the
# PID status will be automatically setted to
# :attr:`invenio_pidstore.models.PIDStatus.REGISTERED`.
#
# For more information about parameters,
# see :meth:`invenio_pidstore.providers.base.BaseProvider.create`.
#
# :param object_type: The object type. (Default: None.)
# :param object_uuid: The object identifier. (Default: None).
# :param options: ``dict`` with optional keys:
# ``"length"`` (integer), ``"split_every"`` (integer),
# ``"checksum"`` (boolean). (Default: None).
# :param kwargs: dict to hold generated pid_value and status. See
# :meth:`invenio_pidstore.providers.base.BaseProvider.create` extra
# parameters.
# :returns: A :class:`RecordIdProviderV2` instance.
# """
# assert 'pid_value' not in kwargs
#
# kwargs['pid_value'] = cls.generate_id(options)
# kwargs.setdefault('status', cls.default_status)
#
# if object_type and object_uuid:
# kwargs['status'] = cls.default_status_with_obj
#
# return super(RecordIdProviderV2, cls).create(
# object_type=object_type, object_uuid=object_uuid, **kwargs)
. Output only the next line. | provider=RecordIdProvider, |
Given snippet: <|code_start|>
.. code-block:: python
def my_fetcher(record_uuid, data):
return FetchedPID(
provider=MyRecordIdProvider,
pid_type=MyRecordIdProvider.pid_type,
pid_value=extract_pid_value(data),
)
To see more about providers see :mod:`invenio_pidstore.providers`.
"""
from __future__ import absolute_import, print_function
FetchedPID = namedtuple('FetchedPID', ['provider', 'pid_type', 'pid_value'])
"""A pid fetcher."""
def recid_fetcher_v2(record_uuid, data):
"""Fetch a record's identifiers.
:param record_uuid: The record UUID.
:param data: The record metadata.
:returns: A :data:`invenio_pidstore.fetchers.FetchedPID` instance.
"""
pid_field = current_app.config['PIDSTORE_RECID_FIELD']
return FetchedPID(
<|code_end|>
, continue by predicting the next line. Consider current file imports:
from collections import namedtuple
from flask import current_app
from .providers.recordid import RecordIdProvider
from .providers.recordid_v2 import RecordIdProviderV2
and context:
# Path: invenio_pidstore/providers/recordid.py
# class RecordIdProvider(BaseProvider):
# """Record identifier provider."""
#
# pid_type = 'recid'
# """Type of persistent identifier."""
#
# pid_provider = None
# """Provider name.
#
# The provider name is not recorded in the PID since the provider does not
# provide any additional features besides creation of record ids.
# """
#
# default_status = PIDStatus.RESERVED
# """Record IDs are by default registered immediately.
#
# Default: :attr:`invenio_pidstore.models.PIDStatus.RESERVED`
# """
#
# @classmethod
# def create(cls, object_type=None, object_uuid=None, **kwargs):
# """Create a new record identifier.
#
# Note: if the object_type and object_uuid values are passed, then the
# PID status will be automatically setted to
# :attr:`invenio_pidstore.models.PIDStatus.REGISTERED`.
#
# :param object_type: The object type. (Default: None.)
# :param object_uuid: The object identifier. (Default: None).
# :param kwargs: You specify the pid_value.
# """
# # Request next integer in recid sequence.
# assert 'pid_value' not in kwargs
# kwargs['pid_value'] = str(RecordIdentifier.next())
# kwargs.setdefault('status', cls.default_status)
# if object_type and object_uuid:
# kwargs['status'] = PIDStatus.REGISTERED
# return super(RecordIdProvider, cls).create(
# object_type=object_type, object_uuid=object_uuid, **kwargs)
#
# Path: invenio_pidstore/providers/recordid_v2.py
# class RecordIdProviderV2(BaseProvider):
# """Record identifier provider V2.
#
# This is the recommended record id provider.
#
# It generates a random alphanumeric string as opposed to an increasing
# integer (:class:`invenio_pidstore.providers.recordid.RecordIdProvider`).
# """
#
# pid_type = 'recid'
# """Type of persistent identifier."""
#
# pid_provider = None
# """Provider name.
#
# The provider name is not recorded in the PID since the provider does not
# provide any additional features besides creation of record ids.
# """
#
# default_status_with_obj = PIDStatus.REGISTERED
# """Record IDs are by default registered immediately.
#
# Default: :attr:`invenio_pidstore.models.PIDStatus.REGISTERED`
# """
#
# default_status = PIDStatus.RESERVED
# """Record IDs with an object are by default reserved.
#
# Default: :attr:`invenio_pidstore.models.PIDStatus.RESERVED`
# """
#
# @classmethod
# def generate_id(cls, options=None):
# """Generate record id."""
# passed_options = options or {}
# # WHY: A new dict needs to be created to prevent side-effects
# options = copy.deepcopy(current_app.config.get(
# 'PIDSTORE_RECORDID_OPTIONS', {}
# ))
# options.update(passed_options)
# length = options.get('length', 10)
# split_every = options.get('split_every', 0)
# checksum = options.get('checksum', True)
#
# return base32.generate(
# length=length,
# split_every=split_every,
# checksum=checksum
# )
#
# @classmethod
# def create(cls, object_type=None, object_uuid=None, options=None,
# **kwargs):
# """Create a new record identifier.
#
# Note: if the object_type and object_uuid values are passed, then the
# PID status will be automatically setted to
# :attr:`invenio_pidstore.models.PIDStatus.REGISTERED`.
#
# For more information about parameters,
# see :meth:`invenio_pidstore.providers.base.BaseProvider.create`.
#
# :param object_type: The object type. (Default: None.)
# :param object_uuid: The object identifier. (Default: None).
# :param options: ``dict`` with optional keys:
# ``"length"`` (integer), ``"split_every"`` (integer),
# ``"checksum"`` (boolean). (Default: None).
# :param kwargs: dict to hold generated pid_value and status. See
# :meth:`invenio_pidstore.providers.base.BaseProvider.create` extra
# parameters.
# :returns: A :class:`RecordIdProviderV2` instance.
# """
# assert 'pid_value' not in kwargs
#
# kwargs['pid_value'] = cls.generate_id(options)
# kwargs.setdefault('status', cls.default_status)
#
# if object_type and object_uuid:
# kwargs['status'] = cls.default_status_with_obj
#
# return super(RecordIdProviderV2, cls).create(
# object_type=object_type, object_uuid=object_uuid, **kwargs)
which might include code, classes, or functions. Output only the next line. | provider=RecordIdProviderV2, |
Given the following code snippet before the placeholder: <|code_start|>
pid = recid_minter(rec_uuid, data)
assert pid
assert data[app.config['PIDSTORE_RECID_FIELD']] == pid.pid_value
assert pid.object_type == 'rec'
assert pid.object_uuid == rec_uuid
def test_recid_minter_v2(app, db):
"""Test recommended recid minter."""
with app.app_context():
rec_uuid = uuid.uuid4()
data = {}
recid_field = app.config['PIDSTORE_RECID_FIELD']
pid = recid_minter_v2(rec_uuid, data)
assert pid
assert data[recid_field] == pid.pid_value
assert pid.object_type == 'rec'
assert pid.object_uuid == rec_uuid
with pytest.raises(AssertionError):
recid_minter_v2(rec_uuid, {recid_field: '1'})
def test_register_minter(app):
"""Test base provider."""
with app.app_context():
<|code_end|>
, predict the next line using imports from the current file:
import pytest
import uuid
from invenio_pidstore import current_pidstore
from invenio_pidstore.minters import recid_minter, recid_minter_v2
and context including class names, function names, and sometimes code from other files:
# Path: invenio_pidstore/proxies.py
#
# Path: invenio_pidstore/minters.py
# def recid_minter(record_uuid, data):
# """Mint record identifiers.
#
# This is a minter specific for records.
# With the help of
# :class:`invenio_pidstore.providers.recordid.RecordIdProvider`, it creates
# the PID instance with `rec` as predefined `object_type`.
#
# Procedure followed: (we will use `control_number` as value of
# `PIDSTORE_RECID_FIELD` for the simplicity of the documentation.)
#
# #. If a `control_number` field is already there, a `AssertionError`
# exception is raised.
#
# #. The provider is initialized with the help of
# :class:`invenio_pidstore.providers.recordid.RecordIdProvider`.
# It's called with default value 'rec' for `object_type` and `record_uuid`
# variable for `object_uuid`.
#
# #. The new `id_value` is stored inside `data` as `control_number` field.
#
# :param record_uuid: The record UUID.
# :param data: The record metadata.
# :returns: A fresh `invenio_pidstore.models.PersistentIdentifier` instance.
# """
# pid_field = current_app.config['PIDSTORE_RECID_FIELD']
# assert pid_field not in data
# provider = RecordIdProvider.create(
# object_type='rec', object_uuid=record_uuid)
# data[pid_field] = provider.pid.pid_value
# return provider.pid
#
# def recid_minter_v2(record_uuid, data):
# """Mint record identifiers with RecordIDProviderV2.
#
# This minter is recommended to be used when creating records to get
# PersistentIdentifier with ``object_type='rec'`` and the new random
# alphanumeric `pid_value`.
#
# Raises ``AssertionError`` if a ``PIDSTORE_RECID_FIELD`` entry is already in
# ``data``. The minted ``pid_value`` will be stored in that field.
#
# :param record_uuid: The object UUID of the record.
# :param data: The record metadata.
# :returns: A fresh `invenio_pidstore.models.PersistentIdentifier` instance.
# """
# pid_field = current_app.config['PIDSTORE_RECID_FIELD']
# assert pid_field not in data
# provider = RecordIdProviderV2.create(
# object_type='rec', object_uuid=record_uuid)
# data[pid_field] = provider.pid.pid_value
# return provider.pid
. Output only the next line. | current_pidstore.register_minter('anothername', recid_minter) |
Based on the snippet: <|code_start|># -*- coding: utf-8 -*-
#
# This file is part of Invenio.
# Copyright (C) 2015-2018 CERN.
#
# Invenio is free software; you can redistribute it and/or modify it
# under the terms of the MIT License; see LICENSE file for more details.
"""Minter tests."""
from __future__ import absolute_import, print_function
def test_recid_minter(app, db):
"""Test legacy recid minter."""
with app.app_context():
rec_uuid = uuid.uuid4()
data = {}
<|code_end|>
, predict the immediate next line with the help of imports:
import pytest
import uuid
from invenio_pidstore import current_pidstore
from invenio_pidstore.minters import recid_minter, recid_minter_v2
and context (classes, functions, sometimes code) from other files:
# Path: invenio_pidstore/proxies.py
#
# Path: invenio_pidstore/minters.py
# def recid_minter(record_uuid, data):
# """Mint record identifiers.
#
# This is a minter specific for records.
# With the help of
# :class:`invenio_pidstore.providers.recordid.RecordIdProvider`, it creates
# the PID instance with `rec` as predefined `object_type`.
#
# Procedure followed: (we will use `control_number` as value of
# `PIDSTORE_RECID_FIELD` for the simplicity of the documentation.)
#
# #. If a `control_number` field is already there, a `AssertionError`
# exception is raised.
#
# #. The provider is initialized with the help of
# :class:`invenio_pidstore.providers.recordid.RecordIdProvider`.
# It's called with default value 'rec' for `object_type` and `record_uuid`
# variable for `object_uuid`.
#
# #. The new `id_value` is stored inside `data` as `control_number` field.
#
# :param record_uuid: The record UUID.
# :param data: The record metadata.
# :returns: A fresh `invenio_pidstore.models.PersistentIdentifier` instance.
# """
# pid_field = current_app.config['PIDSTORE_RECID_FIELD']
# assert pid_field not in data
# provider = RecordIdProvider.create(
# object_type='rec', object_uuid=record_uuid)
# data[pid_field] = provider.pid.pid_value
# return provider.pid
#
# def recid_minter_v2(record_uuid, data):
# """Mint record identifiers with RecordIDProviderV2.
#
# This minter is recommended to be used when creating records to get
# PersistentIdentifier with ``object_type='rec'`` and the new random
# alphanumeric `pid_value`.
#
# Raises ``AssertionError`` if a ``PIDSTORE_RECID_FIELD`` entry is already in
# ``data``. The minted ``pid_value`` will be stored in that field.
#
# :param record_uuid: The object UUID of the record.
# :param data: The record metadata.
# :returns: A fresh `invenio_pidstore.models.PersistentIdentifier` instance.
# """
# pid_field = current_app.config['PIDSTORE_RECID_FIELD']
# assert pid_field not in data
# provider = RecordIdProviderV2.create(
# object_type='rec', object_uuid=record_uuid)
# data[pid_field] = provider.pid.pid_value
# return provider.pid
. Output only the next line. | pid = recid_minter(rec_uuid, data) |
Given the code snippet: <|code_start|># under the terms of the MIT License; see LICENSE file for more details.
"""Minter tests."""
from __future__ import absolute_import, print_function
def test_recid_minter(app, db):
"""Test legacy recid minter."""
with app.app_context():
rec_uuid = uuid.uuid4()
data = {}
pid = recid_minter(rec_uuid, data)
assert pid
assert data[app.config['PIDSTORE_RECID_FIELD']] == pid.pid_value
assert pid.object_type == 'rec'
assert pid.object_uuid == rec_uuid
def test_recid_minter_v2(app, db):
"""Test recommended recid minter."""
with app.app_context():
rec_uuid = uuid.uuid4()
data = {}
recid_field = app.config['PIDSTORE_RECID_FIELD']
<|code_end|>
, generate the next line using the imports in this file:
import pytest
import uuid
from invenio_pidstore import current_pidstore
from invenio_pidstore.minters import recid_minter, recid_minter_v2
and context (functions, classes, or occasionally code) from other files:
# Path: invenio_pidstore/proxies.py
#
# Path: invenio_pidstore/minters.py
# def recid_minter(record_uuid, data):
# """Mint record identifiers.
#
# This is a minter specific for records.
# With the help of
# :class:`invenio_pidstore.providers.recordid.RecordIdProvider`, it creates
# the PID instance with `rec` as predefined `object_type`.
#
# Procedure followed: (we will use `control_number` as value of
# `PIDSTORE_RECID_FIELD` for the simplicity of the documentation.)
#
# #. If a `control_number` field is already there, a `AssertionError`
# exception is raised.
#
# #. The provider is initialized with the help of
# :class:`invenio_pidstore.providers.recordid.RecordIdProvider`.
# It's called with default value 'rec' for `object_type` and `record_uuid`
# variable for `object_uuid`.
#
# #. The new `id_value` is stored inside `data` as `control_number` field.
#
# :param record_uuid: The record UUID.
# :param data: The record metadata.
# :returns: A fresh `invenio_pidstore.models.PersistentIdentifier` instance.
# """
# pid_field = current_app.config['PIDSTORE_RECID_FIELD']
# assert pid_field not in data
# provider = RecordIdProvider.create(
# object_type='rec', object_uuid=record_uuid)
# data[pid_field] = provider.pid.pid_value
# return provider.pid
#
# def recid_minter_v2(record_uuid, data):
# """Mint record identifiers with RecordIDProviderV2.
#
# This minter is recommended to be used when creating records to get
# PersistentIdentifier with ``object_type='rec'`` and the new random
# alphanumeric `pid_value`.
#
# Raises ``AssertionError`` if a ``PIDSTORE_RECID_FIELD`` entry is already in
# ``data``. The minted ``pid_value`` will be stored in that field.
#
# :param record_uuid: The object UUID of the record.
# :param data: The record metadata.
# :returns: A fresh `invenio_pidstore.models.PersistentIdentifier` instance.
# """
# pid_field = current_app.config['PIDSTORE_RECID_FIELD']
# assert pid_field not in data
# provider = RecordIdProviderV2.create(
# object_type='rec', object_uuid=record_uuid)
# data[pid_field] = provider.pid.pid_value
# return provider.pid
. Output only the next line. | pid = recid_minter_v2(rec_uuid, data) |
Based on the snippet: <|code_start|># -*- coding: utf-8 -*-
#
# This file is part of Invenio.
# Copyright (C) 2015-2019 CERN.
# Copyright (C) 2019 Northwestern University.
#
# Invenio is free software; you can redistribute it and/or modify it
# under the terms of the MIT License; see LICENSE file for more details.
"""DataCite PID provider."""
from __future__ import absolute_import
class DataCiteProvider(BaseProvider):
"""DOI provider using DataCite API."""
pid_type = 'doi'
"""Default persistent identifier type."""
pid_provider = 'datacite'
"""Persistent identifier provider name."""
<|code_end|>
, predict the immediate next line with the help of imports:
from datacite import DataCiteMDSClient
from datacite.errors import DataCiteError, DataCiteGoneError, \
DataCiteNoContentError, DataCiteNotFoundError, HttpError
from flask import current_app
from ..models import PIDStatus, logger
from .base import BaseProvider
and context (classes, functions, sometimes code) from other files:
# Path: invenio_pidstore/models.py
# PID_STATUS_TITLES = {
# 'NEW': _('New'),
# 'RESERVED': _('Reserved'),
# 'REGISTERED': _('Registered'),
# 'REDIRECTED': _('Redirected'),
# 'DELETED': _('Deleted'),
# }
# NEW = 'N'
# RESERVED = 'K'
# REGISTERED = 'R'
# REDIRECTED = 'M'
# DELETED = 'D'
# class PIDStatus(Enum):
# class PersistentIdentifier(db.Model, Timestamp):
# class Redirect(db.Model, Timestamp):
# class RecordIdentifier(db.Model):
# def __init__(self, value):
# def __eq__(self, other):
# def __str__(self):
# def title(self):
# def create(cls, pid_type, pid_value, pid_provider=None,
# status=PIDStatus.NEW, object_type=None, object_uuid=None,):
# def get(cls, pid_type, pid_value, pid_provider=None):
# def get_by_object(cls, pid_type, object_type, object_uuid):
# def has_object(self):
# def get_assigned_object(self, object_type=None):
# def assign(self, object_type, object_uuid, overwrite=False):
# def unassign(self):
# def get_redirect(self):
# def redirect(self, pid):
# def reserve(self):
# def register(self):
# def delete(self):
# def sync_status(self, status):
# def is_redirected(self):
# def is_registered(self):
# def is_deleted(self):
# def is_new(self):
# def is_reserved(self):
# def __repr__(self):
# def next(cls):
# def max(cls):
# def _set_sequence(cls, val):
# def insert(cls, val):
#
# Path: invenio_pidstore/providers/base.py
# class BaseProvider(object):
# """Abstract class for persistent identifier provider classes."""
#
# pid_type = None
# """Default persistent identifier type."""
#
# pid_provider = None
# """Persistent identifier provider name."""
#
# default_status = PIDStatus.NEW
# """Default status for newly created PIDs by this provider."""
#
# @classmethod
# def create(cls, pid_type=None, pid_value=None, object_type=None,
# object_uuid=None, status=None, **kwargs):
# """Create a new instance for the given type and pid.
#
# :param pid_type: Persistent identifier type. (Default: None).
# :param pid_value: Persistent identifier value. (Default: None).
# :param status: Current PID status.
# (Default: :attr:`invenio_pidstore.models.PIDStatus.NEW`)
# :param object_type: The object type is a string that identify its type.
# (Default: None).
# :param object_uuid: The object UUID. (Default: None).
# :returns: A :class:`invenio_pidstore.providers.base.BaseProvider`
# instance.
# """
# assert pid_value
# assert pid_type or cls.pid_type
#
# pid = PersistentIdentifier.create(
# pid_type or cls.pid_type,
# pid_value,
# pid_provider=cls.pid_provider,
# object_type=object_type,
# object_uuid=object_uuid,
# status=status or cls.default_status,
# )
# return cls(pid, **kwargs)
#
# @classmethod
# def get(cls, pid_value, pid_type=None, **kwargs):
# """Get a persistent identifier for this provider.
#
# :param pid_type: Persistent identifier type. (Default: configured
# :attr:`invenio_pidstore.providers.base.BaseProvider.pid_type`)
# :param pid_value: Persistent identifier value.
# :param kwargs: See
# :meth:`invenio_pidstore.providers.base.BaseProvider` required
# initialization properties.
# :returns: A :class:`invenio_pidstore.providers.base.BaseProvider`
# instance.
# """
# return cls(
# PersistentIdentifier.get(pid_type or cls.pid_type, pid_value,
# pid_provider=cls.pid_provider),
# **kwargs)
#
# def __init__(self, pid, **kwargs):
# """Initialize provider using persistent identifier.
#
# :param pid: A :class:`invenio_pidstore.models.PersistentIdentifier`
# instance.
# """
# self.pid = pid
# assert pid.pid_provider == self.pid_provider
#
# def reserve(self):
# """Reserve a persistent identifier.
#
# This might or might not be useful depending on the service of the
# provider.
#
# See: :meth:`invenio_pidstore.models.PersistentIdentifier.reserve`.
# """
# return self.pid.reserve()
#
# def register(self):
# """Register a persistent identifier.
#
# See: :meth:`invenio_pidstore.models.PersistentIdentifier.register`.
# """
# return self.pid.register()
#
# def update(self):
# """Update information about the persistent identifier."""
# pass
#
# def delete(self):
# """Delete a persistent identifier.
#
# See: :meth:`invenio_pidstore.models.PersistentIdentifier.delete`.
# """
# return self.pid.delete()
#
# def sync_status(self):
# """Synchronize PIDstatus with remote service provider."""
# pass
. Output only the next line. | default_status = PIDStatus.NEW |
Based on the snippet: <|code_start|># -*- coding: utf-8 -*-
#
# This file is part of Invenio.
# Copyright (C) 2015-2018 CERN.
#
# Invenio is free software; you can redistribute it and/or modify it
# under the terms of the MIT License; see LICENSE file for more details.
"""Record identifier model tests."""
from __future__ import absolute_import, print_function
def test_record_identifier(app, db):
"""Test base provider."""
with app.app_context():
<|code_end|>
, predict the immediate next line with the help of imports:
from invenio_pidstore.models import RecordIdentifier
and context (classes, functions, sometimes code) from other files:
# Path: invenio_pidstore/models.py
# class RecordIdentifier(db.Model):
# """Sequence generator for integer record identifiers.
#
# The sole purpose of this model is to generate integer record identifiers in
# sequence using the underlying database's auto increment features in a
# transaction friendly manner. The feature is primarily provided to support
# legacy Invenio instances to continue their current record identifier
# scheme. For new instances we strong encourage to not use auto incrementing
# record identifiers, but instead use e.g. UUIDs as record identifiers.
# """
#
# __tablename__ = 'pidstore_recid'
#
# recid = db.Column(
# db.BigInteger().with_variant(db.Integer, "sqlite"),
# primary_key=True, autoincrement=True)
#
# @classmethod
# def next(cls):
# """Return next available record identifier."""
# try:
# with db.session.begin_nested():
# obj = cls()
# db.session.add(obj)
# except IntegrityError: # pragma: no cover
# with db.session.begin_nested():
# # Someone has likely modified the table without using the
# # models API. Let's fix the problem.
# cls._set_sequence(cls.max())
# obj = cls()
# db.session.add(obj)
# return obj.recid
#
# @classmethod
# def max(cls):
# """Get max record identifier."""
# max_recid = db.session.query(func.max(cls.recid)).scalar()
# return max_recid if max_recid else 0
#
# @classmethod
# def _set_sequence(cls, val):
# """Internal function to reset sequence to specific value.
#
# Note: this function is for PostgreSQL compatibility.
#
# :param val: The value to be set.
# """
# if db.engine.dialect.name == 'postgresql': # pragma: no cover
# db.session.execute(
# "SELECT setval(pg_get_serial_sequence("
# "'{0}', 'recid'), :newval)".format(
# cls.__tablename__), dict(newval=val))
#
# @classmethod
# def insert(cls, val):
# """Insert a record identifier.
#
# :param val: The `recid` column value to insert.
# """
# with db.session.begin_nested():
# obj = cls(recid=val)
# db.session.add(obj)
# cls._set_sequence(cls.max())
. Output only the next line. | assert RecordIdentifier.next() == 1 |
Next line prediction: <|code_start|>
def _tsmi(dataset):
out = (dataset.red + dataset.green) * 0.0001 / 2
return out.where(out>0, 0)
def tsm(dataset_in, clean_mask=None, no_data=0):
"""
Calculate Total Suspended Matter (TSM) for water.
Parameters
----------
dataset_in: xarray.Dataset
Dataset retrieved from the Data Cube.
Must have 'red' and 'green' data variables.
clean_mask: np.ndarray
A NumPy array with dtype boolean
True for values considered clean;
if no clean mask is supplied, all values will be considered clean
no_data: numeric
no data pixel value; default: -9999
Raises
------
ValueError
if dataset_in is an empty xarray.Dataset.
"""
assert 'red' in dataset_in and 'green' in dataset_in, "Red and Green bands are required for the TSM analysis."
# Default to masking nothing.
if clean_mask is None:
<|code_end|>
. Use current file imports:
(import gc
import numpy as np
import xarray as xr
import warnings
import scipy.ndimage.filters as conv
from xarray.ufuncs import sign as xr_sign
from . import dc_utilities as utilities
from .dc_utilities import create_default_clean_mask
from datetime import datetime)
and context including class names, function names, or small code snippets from other files:
# Path: data_cube_utilities/dc_utilities.py
# def create_default_clean_mask(dataset_in):
# """
# Creates a data mask that masks nothing.
#
# Parameters
# ----------
# dataset_in: xarray.Dataset
# dataset retrieved from the Data Cube.
#
# Raises
# ------
# ValueError
# if dataset_in is an empty xarray.Dataset.
# """
# data = None
# if isinstance(dataset_in, xr.Dataset):
# data_vars = list(dataset_in.data_vars)
# if len(data_vars) != 0:
# data = dataset_in[data_vars[0]].data
# elif isinstance(dataset_in, xr.DataArray):
# data = dataset_in.data
# clean_mask = None
# if isinstance(data, dask.array.core.Array):
# clean_mask = dask.array.ones_like(data, dtype='uint8')
# else:
# if data is None:
# clean_mask = np.ones(dataset_in.shape, dtype=np.bool)
# else:
# clean_mask = np.ones_like(data, dtype=np.bool)
# return clean_mask.astype(np.bool)
. Output only the next line. | clean_mask = create_default_clean_mask(dataset_in) |
Next line prediction: <|code_start|> dataset_in: xarray.Dataset
A dataset retrieved from the Data Cube; should contain:
coordinates: time, latitude, longitude
variables: variables to be mosaicked (e.g. red, green, and blue bands)
clean_mask: np.ndarray
An ndarray of the same shape as `dataset_in` - specifying which values to mask out.
If no clean mask is specified, then all values are kept during compositing.
no_data: int or float
The no data value.
dtype: str or numpy.dtype
A string denoting a Python datatype name (e.g. int, float) or a NumPy dtype (e.g.
np.int16, np.float32) to convert the data to.
var: str
The name of the data variable in `dataset_in` to use.
min_max: Whether to use the minimum or maximum times of `var` for the composite.
Returns
-------
dataset_out: xarray.Dataset
Composited data with the format:
coordinates: latitude, longitude
variables: same as dataset_in
"""
assert var is not None, \
"The parameter `var` must be set to the name of a data variable in `dataset_in`"
assert min_max is not None and min_max in ['min', 'max'], \
"The parameter `min_max` must be one of ['min', 'max']."
# Default to masking nothing.
if clean_mask is None:
<|code_end|>
. Use current file imports:
(import numpy as np
import xarray as xr
import dask
import hdmedians as hd
from functools import partial
from xarray.ufuncs import isnan as xr_nan
from collections import OrderedDict
from . import dc_utilities as utilities
from .dc_utilities import create_default_clean_mask)
and context including class names, function names, or small code snippets from other files:
# Path: data_cube_utilities/dc_utilities.py
# def create_default_clean_mask(dataset_in):
# """
# Creates a data mask that masks nothing.
#
# Parameters
# ----------
# dataset_in: xarray.Dataset
# dataset retrieved from the Data Cube.
#
# Raises
# ------
# ValueError
# if dataset_in is an empty xarray.Dataset.
# """
# data = None
# if isinstance(dataset_in, xr.Dataset):
# data_vars = list(dataset_in.data_vars)
# if len(data_vars) != 0:
# data = dataset_in[data_vars[0]].data
# elif isinstance(dataset_in, xr.DataArray):
# data = dataset_in.data
# clean_mask = None
# if isinstance(data, dask.array.core.Array):
# clean_mask = dask.array.ones_like(data, dtype='uint8')
# else:
# if data is None:
# clean_mask = np.ones(dataset_in.shape, dtype=np.bool)
# else:
# clean_mask = np.ones_like(data, dtype=np.bool)
# return clean_mask.astype(np.bool)
. Output only the next line. | clean_mask = create_default_clean_mask(dataset_in) |
Given the following code snippet before the placeholder: <|code_start|> """
return (ds.swir1 - ds.nir) / (ds.swir1 + ds.nir)
def DBSI(ds, normalize=True):
"""
Computes the Dry Bare-Soil Index as defined in the paper "Applying
Built-Up and Bare-Soil Indices from Landsat 8 to Cities in Dry Climates".
The formula is (SWIR1 - GREEN) / (SWIR1 + GREEN) - NDVI.
If `normalize == False`, returned values should be in the range [-2,2].
This is a spectral index for which high values often indicate bare soil and
low values often indicate urban areas.
Note that DBSI often performs better in arid and semi-arid environments than NDBI, since
it differentiates bare soil from urban areas better.
Parameters
----------
ds: xarray.Dataset
An `xarray.Dataset` that must contain
'swir1', 'green', 'nir', and 'red' `DataArrays`.
normalize: boolean
Whether to normalize to the range [-1,1] - the range of most common spectral indices.
Returns
-------
dbsi: xarray.DataArray
An `xarray.DataArray` with the same shape as `ds` - the same coordinates in
the same order.
"""
<|code_end|>
, predict the next line using imports from the current file:
import numpy as np
from .vegetation import NDVI
and context including class names, function names, and sometimes code from other files:
# Path: data_cube_utilities/vegetation.py
# def NDVI(ds):
# """
# Computes the Normalized Difference Vegetation Index for an `xarray.Dataset`.
# The formula is (NIR - RED) / (NIR + RED).
# Values should be in the range [-1,1] for valid LANDSAT data (nir and red are positive).
#
# Parameters
# ----------
# ds: xarray.Dataset
# An `xarray.Dataset` that must contain 'nir' and 'red' `DataArrays`.
#
# Returns
# -------
# ndvi: xarray.DataArray
# An `xarray.DataArray` with the same shape as `ds` - the same coordinates in
# the same order.
# """
# return (ds.nir - ds.red) / (ds.nir + ds.red)
. Output only the next line. | dbsi = (ds.swir1 - ds.green) / (ds.swir1 + ds.green) - NDVI(ds) |
Given the code snippet: <|code_start|># This import is only for
def EVI(*args, **kwargs):
"""
Instead of this function, please use the EVI() function in vegetation.py.
"""
return _EVI_orig(*args, **kwargs)
def EVI2(*args, **kwargs):
"""
Instead of this function, please use the EVI2() function in vegetation.py.
"""
return _EVI2_orig(*args, **kwargs)
def NDVI(*args, **kwargs):
"""
Instead of this function, please use the NDVI() function in vegetation.py.
"""
<|code_end|>
, generate the next line using the imports in this file:
import xarray as xr
import numpy as np
from .vegetation import EVI as _EVI_orig, EVI2 as _EVI2_orig, NDVI as _NDVI_orig
from .dc_water_classifier import wofs_classify
and context (functions, classes, or occasionally code) from other files:
# Path: data_cube_utilities/vegetation.py
# def EVI(ds, G=2.5, C1=6, C2=7.5, L=1, normalize=True):
# """
# Computes the 3-band Enhanced Vegetation Index for an `xarray.Dataset`.
# The formula is G * (NIR - RED) / (NIR + C1*RED - C2*BLUE + L).
# Usually, G = 2.5, C1 = 6, C2 = 7.5, and L = 1.
# For Landsat data, returned values should be in the range [-1,1] if `normalize == True`.
# If `normalize == False`, returned values should be in the range [-1,2.5].
#
# EVI is superior to NDVI in accuracy because it is less dependent on the solar
# incidence angle, atmospheric conditions (e.g. particles and clouds), shadows, and
# soil appearance.
#
# Parameters
# ----------
# ds: xarray.Dataset
# An `xarray.Dataset` that must contain 'nir', 'red', and 'blue' `DataArrays`.
# G, C1, C2, L: float
# G is the gain factor - a constant scaling factor.
# C1 and C2 pertain to aerosols in clouds.
# L adjusts for canopy background and soil appearance. It particularly pertains to
# the nir and red bands, which are transmitted non-linearly through a canopy.
# normalize: boolean
# Whether to normalize to the range [-1,1] - the range of most common spectral indices.
#
# Returns
# -------
# evi: xarray.DataArray
# An `xarray.DataArray` with the same shape as `ds` - the same coordinates in
# the same order.
# """
# evi = G * (ds.nir - ds.red) / (ds.nir + C1 * ds.red - C2 * ds.blue + L)
# # Clamp values to the range [-1,2.5].
# evi.values[evi.values < -1] = -1
# evi.values[2.5 < evi.values] = 2.5
# if normalize:
# # Scale values in the range [0,2.5] to the range [0,1].
# pos_vals_mask = 0 < evi.values
# evi.values[pos_vals_mask] = np.interp(evi.values[pos_vals_mask], (0, 2.5), (0, 1))
# return evi
#
# def EVI2(ds, G=2.5, C=2.4, L=1, normalize=True):
# """
# Computes the 2-band Enhanced Vegetation Index for an `xarray.Dataset`.
# The formula is G*((NIR-RED)/(NIR+C*Red+L)).
# Usually, G = 2.5, C = 2.4, and L = 1.
# For Landsat data, returned values should be in the range [-1,1] if `normalize == True`.
# If `normalize == False`, returned values should be in the range [-1,2.5].
#
# EVI2 does not require a blue band like EVI, which means less data is required to use it.
# Additionally, the blue band used in EVI can have a low signal-to-noise ratio
# in earth observation imagery. When atmospheric effects are insignificant (e.g. on clear days),
# EVI2 should closely match EVI.
#
# Parameters
# ----------
# ds: xarray.Dataset
# An `xarray.Dataset` that must contain 'nir', and 'red' `DataArrays`.
# G, C, L: float
# G is the gain factor - a constant scaling factor.
# C pertains to aerosols in clouds.
# L adjusts for canopy background and soil appearance. It particularly pertains to
# the nir and red bands, which are transmitted non-linearly through a canopy.
# normalize: boolean
# Whether to normalize to the range [-1,1] - the range of most common spectral indices.
#
# Returns
# -------
# evi: xarray.DataArray
# An `xarray.DataArray` with the same shape as `ds` - the same coordinates in
# the same order.
# """
# evi = G * (ds.nir - ds.red) / (ds.nir + C * ds.red + L)
# # Clamp values to the range [-1,2.5].
# evi.values[evi.values < -1] = -1
# evi.values[2.5 < evi.values] = 2.5
# if normalize:
# # Scale values in the range [0,2.5] to the range [0,1].
# pos_vals_mask = 0 < evi.values
# evi.values[pos_vals_mask] = np.interp(evi.values[pos_vals_mask], (0, 2.5), (0, 1))
# return evi
#
# def NDVI(ds):
# """
# Computes the Normalized Difference Vegetation Index for an `xarray.Dataset`.
# The formula is (NIR - RED) / (NIR + RED).
# Values should be in the range [-1,1] for valid LANDSAT data (nir and red are positive).
#
# Parameters
# ----------
# ds: xarray.Dataset
# An `xarray.Dataset` that must contain 'nir' and 'red' `DataArrays`.
#
# Returns
# -------
# ndvi: xarray.DataArray
# An `xarray.DataArray` with the same shape as `ds` - the same coordinates in
# the same order.
# """
# return (ds.nir - ds.red) / (ds.nir + ds.red)
. Output only the next line. | return _NDVI_orig(*args, **kwargs) |
Predict the next line after this snippet: <|code_start|>
def landsat_clean_mask_invalid(dataset, platform, collection, level):
"""
Masks out invalid data according to the LANDSAT
surface reflectance specifications. See this document:
https://landsat.usgs.gov/sites/default/files/documents/ledaps_product_guide.pdf pages 19-20.
Parameters
----------
dataset: xarray.Dataset
An `xarray.Dataset` containing bands such as 'red', 'green', or 'blue'.
platform: str
A string denoting the platform to be used. Can be
"LANDSAT_5", "LANDSAT_7", or "LANDSAT_8".
collection: string
The Landsat collection of the data.
Can be any of ['c1', 'c2'] for Collection 1 or 2, respectively.
level: string
The processing level of the Landsat data.
Currently only 'l2' (Level 2) is supported.
Returns
-------
valid_mask: xarray.DataArray
An `xarray.DataArray` with the same number and order of coordinates as in `dataset`.
The `True` values specify what pixels are valid.
"""
valid_mask = None
data_arr_names = [arr_name for arr_name in list(dataset.data_vars)
if arr_name not in ['pixel_qa', 'radsat_qa', 'cloud_qa']]
<|code_end|>
using the current file's imports:
import warnings
import numpy as np
import xarray as xr
from .dc_utilities import get_range
and any relevant context from other files:
# Path: data_cube_utilities/dc_utilities.py
# def get_range(platform, collection, level):
# """
# Obtain the "valid" value range for a given combination of platform,
# collection, level, and data variable (does vary by data variable for some products).
#
# Parameters
# ----------
# platform: str
# A string denoting the platform to be used. Can be
# "LANDSAT_5", "LANDSAT_7", or "LANDSAT_8".
# collection: string
# The Landsat collection of the data.
# Can be any of ['c1', 'c2'] for Collection 1 or 2, respectively.
# level: string
# The processing level of the Landsat data.
# Currently only 'l2' (Level 2) is supported.
#
# Returns
# -------
# range: dict or list or None
# A dict of 2-tuples (lists) denoting the range for each data variable with a recorded range.
# `None` otherwise.
# """
# range_dict = None
# if (platform, collection, level) in \
# [('LANDSAT_5', 'c1', 'l2'), ('LANDSAT_7', 'c1', 'l2'),
# ('LANDSAT_8', 'c1', 'l2')]:
# range_dict = {'red': [0, 10000], 'green': [0, 10000], 'blue': [0, 10000],
# 'nir': [0, 10000], 'swir1': [0, 10000], 'swir2': [0, 10000]}
# elif (platform, collection, level) in \
# [('LANDSAT_5', 'c2', 'l2'), ('LANDSAT_7', 'c2', 'l2'),
# ('LANDSAT_8', 'c2', 'l2')]:
# range_dict = {'red': [1, 65455], 'green': [1, 65455], 'blue': [1, 65455],
# 'nir': [1, 65455], 'swir1': [1, 65455], 'swir2': [1, 65455]}
# return range_dict
. Output only the next line. | rng = get_range(platform, collection, level) |
Based on the snippet: <|code_start|> classified[_tmp2] = 0 #Node 34
_tmp = r1 & ~r11
r18 = ndi_52 <= 0.34
classified[_tmp & ~r18] = 0 #Node 36
_tmp &= r18
r19 = band1 <= 249.5
classified[_tmp & ~r19] = 0 #Node 38
_tmp &= r19
r20 = ndi_43 <= 0.45
classified[_tmp & ~r20] = 0 #Node 40
_tmp &= r20
r21 = band3 <= 364.5
classified[_tmp & ~r21] = 0 #Node 42
_tmp &= r21
r22 = band1 <= 129.5
classified[_tmp & r22] = 1 #Node 44
classified[_tmp & ~r22] = 0 #Node 45
# Completed regression tree
return classified
# Default to masking nothing.
if clean_mask is None:
<|code_end|>
, predict the immediate next line with the help of imports:
import os
import gc
import numpy as np
import xarray as xr
import dask
import datacube
import warnings
import argparse
import collections
from osgeo import gdal
from datetime import datetime
from . import dc_utilities as utilities
from .dc_utilities import create_default_clean_mask
and context (classes, functions, sometimes code) from other files:
# Path: data_cube_utilities/dc_utilities.py
# def create_default_clean_mask(dataset_in):
# """
# Creates a data mask that masks nothing.
#
# Parameters
# ----------
# dataset_in: xarray.Dataset
# dataset retrieved from the Data Cube.
#
# Raises
# ------
# ValueError
# if dataset_in is an empty xarray.Dataset.
# """
# data = None
# if isinstance(dataset_in, xr.Dataset):
# data_vars = list(dataset_in.data_vars)
# if len(data_vars) != 0:
# data = dataset_in[data_vars[0]].data
# elif isinstance(dataset_in, xr.DataArray):
# data = dataset_in.data
# clean_mask = None
# if isinstance(data, dask.array.core.Array):
# clean_mask = dask.array.ones_like(data, dtype='uint8')
# else:
# if data is None:
# clean_mask = np.ones(dataset_in.shape, dtype=np.bool)
# else:
# clean_mask = np.ones_like(data, dtype=np.bool)
# return clean_mask.astype(np.bool)
. Output only the next line. | clean_mask = create_default_clean_mask(dataset_in) |
Given the following code snippet before the placeholder: <|code_start|> self.times = [
datetime(1999, 5, 6),
datetime(2006, 1, 2),
datetime(2006, 1, 16),
datetime(2015, 12, 31),
datetime(2016, 1, 1),
]
self.latitudes = [1, 2]
self.longitudes = [1, 2]
self.sample_data = np.array([[[1, 1], [1, 1]],
[[2, 2], [2, 2]],
[[3, 3], [3, 3]],
[[0, 0], [0, 0]],
[[5, 5], [5, 5]]])
# yapf: enable
def tearDown(self):
pass
def test_create_cfmask_clean_mask(self):
dataset = xr.Dataset(
{
'cf_mask': (('time', 'latitude', 'longitude'), self.sample_data)
},
coords={'time': self.times,
'latitude': self.latitudes,
'longitude': self.longitudes})
<|code_end|>
, predict the next line using imports from the current file:
import unittest
import numpy as np
import xarray as xr
from datetime import datetime
from data_cube_utilities import dc_utilities
and context including class names, function names, and sometimes code from other files:
# Path: data_cube_utilities/dc_utilities.py
# def get_range(platform, collection, level):
# def convert_range(dataset, from_platform, from_collection, from_level,
# to_platform, to_collection, to_level):
# def convert_data_var(data_arr, data_var_name):
# def reverse_array_dict(dictionary):
# def list_prod(lst):
# def check_for_float(array):
# def create_cfmask_clean_mask(cfmask):
# def create_default_clean_mask(dataset_in):
# def get_spatial_ref(crs):
# def perform_timeseries_analysis(dataset_in, band_name, intermediate_product=None, no_data=-9999, operation="mean"):
# def clear_attrs(dataset):
# def create_bit_mask(data_array, valid_bits, no_data=-9999):
# def add_timestamp_data_to_xr(dataset):
# def write_geotiff_from_xr(tif_path, data, bands=None, no_data=-9999, crs="EPSG:4326",
# x_coord='longitude', y_coord='latitude'):
# def write_png_from_xr(png_path, dataset, bands, png_filled_path=None, fill_color='red', scale=None, low_res=False,
# no_data=-9999, crs="EPSG:4326"):
# def write_single_band_png_from_xr(png_path, dataset, band, color_scale=None, fill_color=None, interpolate=True,
# no_data=-9999, crs="EPSG:4326"):
# def _get_transform_from_xr(data, x_coord='longitude', y_coord='latitude'):
# def chunks(l, n):
# def ignore_warnings(func, *args, **kwargs):
# X = dataset[[data_var_name]].stack(row=('latitude', 'longitude', ...)).to_array().transpose('row', 'variable')
# X = X.where(~nan_mask_x, drop=True)
. Output only the next line. | cf_mask = dc_utilities.create_cfmask_clean_mask(dataset.cf_mask) |
Next line prediction: <|code_start|>
class TestChunker(unittest.TestCase):
def setUp(self):
self.negative_to_positive = (-1, 1)
self.positive_to_negative = (1, -1)
self.dates = [
datetime(2005, 1, 1), datetime(2006, 1, 1), datetime(2007, 5, 3), datetime(2014, 2, 1), datetime(2000, 1, 1)
]
def tearDown(self):
pass
def test_create_geographic_chunks(self):
with self.assertRaises(AssertionError):
<|code_end|>
. Use current file imports:
(import unittest
import xarray as xr
import numpy as np
from datetime import datetime
from data_cube_utilities import dc_chunker)
and context including class names, function names, or small code snippets from other files:
# Path: data_cube_utilities/dc_chunker.py
# def create_geographic_chunks(longitude=None, latitude=None, geographic_chunk_size=0.5):
# def create_square_geographic_chunks(longitude=None, latitude=None,
# geographic_chunk_size=0.05):
# def combine_geographic_chunks(chunks):
# def create_time_chunks(datetime_list, _reversed=False, time_chunk_size=10):
# def group_datetimes_by_year(datetime_list):
# def group_datetimes_by_month(datetime_list, months=[1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12]):
# def _chunk_iterable(_iterable, chunk_size):
# def generate_baseline(_iterable, window_length):
. Output only the next line. | dc_chunker.create_geographic_chunks(longitude=self.positive_to_negative, latitude=self.positive_to_negative) |
Here is a snippet: <|code_start|> Fits a polynomial of any positive integer degree to some data - x and y. Returns predicted interpolation values.
Parameters
----------
x: list-like
The x values of the data to fit to.
y: list-like
The y values of the data to fit to.
x_smooth: list-like
The exact x values to interpolate for. Supercedes `n_pts`.
n_pts: int
The number of evenly spaced points spanning the range of `x` to interpolate for.
degree: int
The degree of the polynomial to fit.
Returns
-------
x_smooth, y_smooth: numpy.ndarray
The smoothed x and y values of the curve fit.
"""
if x_smooth is None:
x_smooth_inds = np.linspace(0, len(x)-1, n_pts)
x_smooth = np.interp(x_smooth_inds, np.arange(len(x)), x)
y_smooth = np.array([np.array([coef * (x_val ** current_degree) for
coef, current_degree in zip(np.polyfit(x, y, degree),
range(degree, -1, -1))]).sum() for x_val in x_smooth])
return x_smooth, y_smooth
def fourier_fit(x, y, n_predict=0, x_smooth=None, n_pts=n_pts_smooth,
<|code_end|>
. Write the next line using the current file imports:
import numpy as np
from numpy import fft
from .plotter_utils_consts import n_pts_smooth, default_fourier_n_harm
from scipy.optimize import curve_fit
from .scale import np_scale
from scipy.interpolate import CubicSpline
from scipy.ndimage.filters import gaussian_filter1d
and context from other files:
# Path: data_cube_utilities/plotter_utils_consts.py
, which may include functions, classes, or code. Output only the next line. | n_harm=default_fourier_n_harm): |
Given snippet: <|code_start|># Copyright 2014 Rustici Software
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""
.. module:: serializable_base
:synopsis: A base object that provides the common initializer from :class:`tincan.Base`
as well as common serialization functionality
"""
<|code_end|>
, continue by predicting the next line. Consider current file imports:
import json
import uuid
import datetime
import re
from tincan.base import Base
from tincan.version import Version
from tincan.conversions.iso8601 import jsonify_datetime, jsonify_timedelta
and context:
# Path: tincan/base.py
# class Base(object):
# _props = []
#
# def __init__(self, *args, **kwargs):
# """Initializes an object by checking the provided arguments
# against lists defined in the individual class. If required
# properties are defined, this method will set them to None by default.
# Optional properties will be ignored if they are not provided. The
# class may provide custom setters for properties, in which case those
# setters (see __setattr__ below).
#
# """
# if hasattr(self, '_props_req') and self._props_req:
# list(map(lambda k: setattr(self, k, None), self._props_req))
#
# new_kwargs = {}
# for obj in args:
# new_kwargs.update(obj if isinstance(obj, dict) else vars(obj))
#
# new_kwargs.update(kwargs)
#
# for key, value in new_kwargs.items():
# setattr(self, key, value)
#
# def __setattr__(self, attr, value):
# """Makes sure that only allowed properties are set. This method will
# call the proper attribute setter as defined in the class to provide
# additional error checking
#
# :param attr: the attribute being set
# :type attr: str
# :param value: the value to set
#
# """
# if attr.startswith('_') and attr[1:] in self._props:
# super(Base, self).__setattr__(attr, value)
# elif attr not in self._props:
# raise AttributeError(
# f"Property '{attr}' cannot be set on a 'tincan.{self.__class__.__name__}' object."
# f"Allowed properties: {', '.join(self._props)}"
# )
# else:
# super(Base, self).__setattr__(attr, value)
#
# def __eq__(self, other):
# return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
#
# Path: tincan/version.py
# class Version(object):
# supported = [
# '1.0.3',
# '1.0.2',
# '1.0.1',
# '1.0.0',
# ]
# latest = supported[0]
#
# Path: tincan/conversions/iso8601.py
# def jsonify_datetime(value):
# assert isinstance(value, datetime.datetime)
# return value.isoformat()
#
# def jsonify_timedelta(value):
# """Converts a `datetime.timedelta` to an ISO 8601 duration
# string for JSON-ification.
#
# :param value: something to convert
# :type value: datetime.timedelta
# :return: the value after conversion
# :rtype unicode
#
# """
#
# assert isinstance(value, datetime.timedelta)
#
# # split seconds to larger units
# seconds = value.total_seconds()
# minutes, seconds = divmod(seconds, 60)
# hours, minutes = divmod(minutes, 60)
# days, hours = divmod(hours, 24)
#
# days, hours, minutes = list(map(int, (days, hours, minutes)))
# seconds = round(seconds, 6)
#
# # build date
# date = ''
# if days:
# date = '%sD' % days
#
# # build time
# time = 'T'
#
# # hours
# bigger_exists = date or hours
# if bigger_exists:
# time += '{:02}H'.format(hours)
#
# # minutes
# bigger_exists = bigger_exists or minutes
# if bigger_exists:
# time += '{:02}M'.format(minutes)
#
# # seconds
# if seconds.is_integer():
# seconds = '{:02}'.format(int(seconds))
# else:
# # 9 chars long w/leading 0, 6 digits after decimal
# seconds = '%09.6f' % seconds
# # remove trailing zeros
# seconds = seconds.rstrip('0')
#
# time += '{}S'.format(seconds)
#
# return 'P' + date + time
which might include code, classes, or functions. Output only the next line. | class SerializableBase(Base): |
Given the following code snippet before the placeholder: <|code_start|> for uscore, camel in self._props_corrected.items():
if camel in new_kwargs:
new_kwargs[uscore[1:]] = new_kwargs[camel]
new_kwargs.pop(camel)
super(SerializableBase, self).__init__(**new_kwargs)
@classmethod
def from_json(cls, json_data):
"""Tries to convert a JSON representation to an object of the same
type as self
A class can provide a _fromJSON implementation in order to do specific
type checking or other custom implementation details. This method
will throw a ValueError for invalid JSON, a TypeError for
improperly constructed, but valid JSON, and any custom errors
that can be be propagated from class constructors.
:param json_data: The JSON string to convert
:type json_data: str | unicode
:raises: TypeError, ValueError, LanguageMapInitError
"""
data = json.loads(json_data)
result = cls(data)
if hasattr(result, "_from_json"):
result._from_json()
return result
<|code_end|>
, predict the next line using imports from the current file:
import json
import uuid
import datetime
import re
from tincan.base import Base
from tincan.version import Version
from tincan.conversions.iso8601 import jsonify_datetime, jsonify_timedelta
and context including class names, function names, and sometimes code from other files:
# Path: tincan/base.py
# class Base(object):
# _props = []
#
# def __init__(self, *args, **kwargs):
# """Initializes an object by checking the provided arguments
# against lists defined in the individual class. If required
# properties are defined, this method will set them to None by default.
# Optional properties will be ignored if they are not provided. The
# class may provide custom setters for properties, in which case those
# setters (see __setattr__ below).
#
# """
# if hasattr(self, '_props_req') and self._props_req:
# list(map(lambda k: setattr(self, k, None), self._props_req))
#
# new_kwargs = {}
# for obj in args:
# new_kwargs.update(obj if isinstance(obj, dict) else vars(obj))
#
# new_kwargs.update(kwargs)
#
# for key, value in new_kwargs.items():
# setattr(self, key, value)
#
# def __setattr__(self, attr, value):
# """Makes sure that only allowed properties are set. This method will
# call the proper attribute setter as defined in the class to provide
# additional error checking
#
# :param attr: the attribute being set
# :type attr: str
# :param value: the value to set
#
# """
# if attr.startswith('_') and attr[1:] in self._props:
# super(Base, self).__setattr__(attr, value)
# elif attr not in self._props:
# raise AttributeError(
# f"Property '{attr}' cannot be set on a 'tincan.{self.__class__.__name__}' object."
# f"Allowed properties: {', '.join(self._props)}"
# )
# else:
# super(Base, self).__setattr__(attr, value)
#
# def __eq__(self, other):
# return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
#
# Path: tincan/version.py
# class Version(object):
# supported = [
# '1.0.3',
# '1.0.2',
# '1.0.1',
# '1.0.0',
# ]
# latest = supported[0]
#
# Path: tincan/conversions/iso8601.py
# def jsonify_datetime(value):
# assert isinstance(value, datetime.datetime)
# return value.isoformat()
#
# def jsonify_timedelta(value):
# """Converts a `datetime.timedelta` to an ISO 8601 duration
# string for JSON-ification.
#
# :param value: something to convert
# :type value: datetime.timedelta
# :return: the value after conversion
# :rtype unicode
#
# """
#
# assert isinstance(value, datetime.timedelta)
#
# # split seconds to larger units
# seconds = value.total_seconds()
# minutes, seconds = divmod(seconds, 60)
# hours, minutes = divmod(minutes, 60)
# days, hours = divmod(hours, 24)
#
# days, hours, minutes = list(map(int, (days, hours, minutes)))
# seconds = round(seconds, 6)
#
# # build date
# date = ''
# if days:
# date = '%sD' % days
#
# # build time
# time = 'T'
#
# # hours
# bigger_exists = date or hours
# if bigger_exists:
# time += '{:02}H'.format(hours)
#
# # minutes
# bigger_exists = bigger_exists or minutes
# if bigger_exists:
# time += '{:02}M'.format(minutes)
#
# # seconds
# if seconds.is_integer():
# seconds = '{:02}'.format(int(seconds))
# else:
# # 9 chars long w/leading 0, 6 digits after decimal
# seconds = '%09.6f' % seconds
# # remove trailing zeros
# seconds = seconds.rstrip('0')
#
# time += '{}S'.format(seconds)
#
# return 'P' + date + time
. Output only the next line. | def to_json(self, version=Version.latest): |
Predict the next line for this snippet: <|code_start|> def as_version(self, version=Version.latest):
"""Returns a dict that has been modified based on versioning
in order to be represented in JSON properly
A class should overload as_version(self, version)
implementation in order to tailor a more specific representation
:param version: the relevant version. This allows for variance
between versions
:type version: str | unicode
"""
if not isinstance(self, list):
result = {}
for k, v in iter(self.items()) if isinstance(self, dict) else iter(vars(self).items()):
k = self._props_corrected.get(k, k)
if isinstance(v, SerializableBase):
result[k] = v.as_version(version)
elif isinstance(v, list):
result[k] = []
for val in v:
if isinstance(val, SerializableBase):
result[k].append(val.as_version(version))
else:
result[k].append(val)
elif isinstance(v, uuid.UUID):
result[k] = str(v)
elif isinstance(v, datetime.timedelta):
result[k] = jsonify_timedelta(v)
elif isinstance(v, datetime.datetime):
<|code_end|>
with the help of current file imports:
import json
import uuid
import datetime
import re
from tincan.base import Base
from tincan.version import Version
from tincan.conversions.iso8601 import jsonify_datetime, jsonify_timedelta
and context from other files:
# Path: tincan/base.py
# class Base(object):
# _props = []
#
# def __init__(self, *args, **kwargs):
# """Initializes an object by checking the provided arguments
# against lists defined in the individual class. If required
# properties are defined, this method will set them to None by default.
# Optional properties will be ignored if they are not provided. The
# class may provide custom setters for properties, in which case those
# setters (see __setattr__ below).
#
# """
# if hasattr(self, '_props_req') and self._props_req:
# list(map(lambda k: setattr(self, k, None), self._props_req))
#
# new_kwargs = {}
# for obj in args:
# new_kwargs.update(obj if isinstance(obj, dict) else vars(obj))
#
# new_kwargs.update(kwargs)
#
# for key, value in new_kwargs.items():
# setattr(self, key, value)
#
# def __setattr__(self, attr, value):
# """Makes sure that only allowed properties are set. This method will
# call the proper attribute setter as defined in the class to provide
# additional error checking
#
# :param attr: the attribute being set
# :type attr: str
# :param value: the value to set
#
# """
# if attr.startswith('_') and attr[1:] in self._props:
# super(Base, self).__setattr__(attr, value)
# elif attr not in self._props:
# raise AttributeError(
# f"Property '{attr}' cannot be set on a 'tincan.{self.__class__.__name__}' object."
# f"Allowed properties: {', '.join(self._props)}"
# )
# else:
# super(Base, self).__setattr__(attr, value)
#
# def __eq__(self, other):
# return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
#
# Path: tincan/version.py
# class Version(object):
# supported = [
# '1.0.3',
# '1.0.2',
# '1.0.1',
# '1.0.0',
# ]
# latest = supported[0]
#
# Path: tincan/conversions/iso8601.py
# def jsonify_datetime(value):
# assert isinstance(value, datetime.datetime)
# return value.isoformat()
#
# def jsonify_timedelta(value):
# """Converts a `datetime.timedelta` to an ISO 8601 duration
# string for JSON-ification.
#
# :param value: something to convert
# :type value: datetime.timedelta
# :return: the value after conversion
# :rtype unicode
#
# """
#
# assert isinstance(value, datetime.timedelta)
#
# # split seconds to larger units
# seconds = value.total_seconds()
# minutes, seconds = divmod(seconds, 60)
# hours, minutes = divmod(minutes, 60)
# days, hours = divmod(hours, 24)
#
# days, hours, minutes = list(map(int, (days, hours, minutes)))
# seconds = round(seconds, 6)
#
# # build date
# date = ''
# if days:
# date = '%sD' % days
#
# # build time
# time = 'T'
#
# # hours
# bigger_exists = date or hours
# if bigger_exists:
# time += '{:02}H'.format(hours)
#
# # minutes
# bigger_exists = bigger_exists or minutes
# if bigger_exists:
# time += '{:02}M'.format(minutes)
#
# # seconds
# if seconds.is_integer():
# seconds = '{:02}'.format(int(seconds))
# else:
# # 9 chars long w/leading 0, 6 digits after decimal
# seconds = '%09.6f' % seconds
# # remove trailing zeros
# seconds = seconds.rstrip('0')
#
# time += '{}S'.format(seconds)
#
# return 'P' + date + time
, which may contain function names, class names, or code. Output only the next line. | result[k] = jsonify_datetime(v) |
Based on the snippet: <|code_start|> return json.dumps(self.as_version(version))
def as_version(self, version=Version.latest):
"""Returns a dict that has been modified based on versioning
in order to be represented in JSON properly
A class should overload as_version(self, version)
implementation in order to tailor a more specific representation
:param version: the relevant version. This allows for variance
between versions
:type version: str | unicode
"""
if not isinstance(self, list):
result = {}
for k, v in iter(self.items()) if isinstance(self, dict) else iter(vars(self).items()):
k = self._props_corrected.get(k, k)
if isinstance(v, SerializableBase):
result[k] = v.as_version(version)
elif isinstance(v, list):
result[k] = []
for val in v:
if isinstance(val, SerializableBase):
result[k].append(val.as_version(version))
else:
result[k].append(val)
elif isinstance(v, uuid.UUID):
result[k] = str(v)
elif isinstance(v, datetime.timedelta):
<|code_end|>
, predict the immediate next line with the help of imports:
import json
import uuid
import datetime
import re
from tincan.base import Base
from tincan.version import Version
from tincan.conversions.iso8601 import jsonify_datetime, jsonify_timedelta
and context (classes, functions, sometimes code) from other files:
# Path: tincan/base.py
# class Base(object):
# _props = []
#
# def __init__(self, *args, **kwargs):
# """Initializes an object by checking the provided arguments
# against lists defined in the individual class. If required
# properties are defined, this method will set them to None by default.
# Optional properties will be ignored if they are not provided. The
# class may provide custom setters for properties, in which case those
# setters (see __setattr__ below).
#
# """
# if hasattr(self, '_props_req') and self._props_req:
# list(map(lambda k: setattr(self, k, None), self._props_req))
#
# new_kwargs = {}
# for obj in args:
# new_kwargs.update(obj if isinstance(obj, dict) else vars(obj))
#
# new_kwargs.update(kwargs)
#
# for key, value in new_kwargs.items():
# setattr(self, key, value)
#
# def __setattr__(self, attr, value):
# """Makes sure that only allowed properties are set. This method will
# call the proper attribute setter as defined in the class to provide
# additional error checking
#
# :param attr: the attribute being set
# :type attr: str
# :param value: the value to set
#
# """
# if attr.startswith('_') and attr[1:] in self._props:
# super(Base, self).__setattr__(attr, value)
# elif attr not in self._props:
# raise AttributeError(
# f"Property '{attr}' cannot be set on a 'tincan.{self.__class__.__name__}' object."
# f"Allowed properties: {', '.join(self._props)}"
# )
# else:
# super(Base, self).__setattr__(attr, value)
#
# def __eq__(self, other):
# return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
#
# Path: tincan/version.py
# class Version(object):
# supported = [
# '1.0.3',
# '1.0.2',
# '1.0.1',
# '1.0.0',
# ]
# latest = supported[0]
#
# Path: tincan/conversions/iso8601.py
# def jsonify_datetime(value):
# assert isinstance(value, datetime.datetime)
# return value.isoformat()
#
# def jsonify_timedelta(value):
# """Converts a `datetime.timedelta` to an ISO 8601 duration
# string for JSON-ification.
#
# :param value: something to convert
# :type value: datetime.timedelta
# :return: the value after conversion
# :rtype unicode
#
# """
#
# assert isinstance(value, datetime.timedelta)
#
# # split seconds to larger units
# seconds = value.total_seconds()
# minutes, seconds = divmod(seconds, 60)
# hours, minutes = divmod(minutes, 60)
# days, hours = divmod(hours, 24)
#
# days, hours, minutes = list(map(int, (days, hours, minutes)))
# seconds = round(seconds, 6)
#
# # build date
# date = ''
# if days:
# date = '%sD' % days
#
# # build time
# time = 'T'
#
# # hours
# bigger_exists = date or hours
# if bigger_exists:
# time += '{:02}H'.format(hours)
#
# # minutes
# bigger_exists = bigger_exists or minutes
# if bigger_exists:
# time += '{:02}M'.format(minutes)
#
# # seconds
# if seconds.is_integer():
# seconds = '{:02}'.format(int(seconds))
# else:
# # 9 chars long w/leading 0, 6 digits after decimal
# seconds = '%09.6f' % seconds
# # remove trailing zeros
# seconds = seconds.rstrip('0')
#
# time += '{}S'.format(seconds)
#
# return 'P' + date + time
. Output only the next line. | result[k] = jsonify_timedelta(v) |
Predict the next line after this snippet: <|code_start|> def timestamp(self):
"""The Document timestamp.
:setter: Tries to convert to :class:`datetime.datetime`. If
no timezone is given, makes a naive `datetime.datetime`.
Strings will be parsed as ISO 8601 timestamps.
If a number is provided, it will be interpreted as a UNIX
timestamp, which by definition is UTC.
If a `dict` is provided, does `datetime.datetime(**value)`.
If a `tuple` or a `list` is provided, does
`datetime.datetime(*value)`. Uses the timezone in the tuple or
list if provided.
:setter type: :class:`datetime.datetime` | unicode | str | int | float | dict | tuple | None
:rtype: :class:`datetime.datetime`
"""
return self._timestamp
@timestamp.setter
def timestamp(self, value):
if value is None:
self._timestamp = value
return
try:
<|code_end|>
using the current file's imports:
import datetime
from tincan.base import Base
from tincan.conversions.iso8601 import make_datetime
and any relevant context from other files:
# Path: tincan/base.py
# class Base(object):
# _props = []
#
# def __init__(self, *args, **kwargs):
# """Initializes an object by checking the provided arguments
# against lists defined in the individual class. If required
# properties are defined, this method will set them to None by default.
# Optional properties will be ignored if they are not provided. The
# class may provide custom setters for properties, in which case those
# setters (see __setattr__ below).
#
# """
# if hasattr(self, '_props_req') and self._props_req:
# list(map(lambda k: setattr(self, k, None), self._props_req))
#
# new_kwargs = {}
# for obj in args:
# new_kwargs.update(obj if isinstance(obj, dict) else vars(obj))
#
# new_kwargs.update(kwargs)
#
# for key, value in new_kwargs.items():
# setattr(self, key, value)
#
# def __setattr__(self, attr, value):
# """Makes sure that only allowed properties are set. This method will
# call the proper attribute setter as defined in the class to provide
# additional error checking
#
# :param attr: the attribute being set
# :type attr: str
# :param value: the value to set
#
# """
# if attr.startswith('_') and attr[1:] in self._props:
# super(Base, self).__setattr__(attr, value)
# elif attr not in self._props:
# raise AttributeError(
# f"Property '{attr}' cannot be set on a 'tincan.{self.__class__.__name__}' object."
# f"Allowed properties: {', '.join(self._props)}"
# )
# else:
# super(Base, self).__setattr__(attr, value)
#
# def __eq__(self, other):
# return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
#
# Path: tincan/conversions/iso8601.py
# def make_datetime(value):
# """Tries to convert the given value to a :class:`datetime.datetime`. If
# no timezone is given, raises a ValueError.
#
# Strings will be parsed as ISO 8601 timestamps.
#
# If a number is provided, it will be interpreted as a UNIX
# timestamp, which by definition is UTC.
#
# If a `dict` is provided, does `datetime.datetime(**value)`.
#
# If a `tuple` or a `list` is provided, does
# `datetime.datetime(*value)`. Uses the timezone in the tuple or
# list if provided.
#
# :param value: something to convert
# :type value: str | unicode | float | int | :class:`datetime.datetime` | dict | list | tuple
# :return: the value after conversion
# :rtype: :class:`datetime.datetime`
# :raises: ValueError | TypeError
#
# """
# result = _make_datetime(value)
# if not result.tzinfo:
# raise ValueError(
# f"value was a timestamp, but no timezone was set! "
# f"Value was a '{value.__class__.__name__}' object: {repr(value)}"
# f"\n\n"
# f"Converted to naive 'datetime.datetime' object: {repr(result)}"
# )
#
# return result
. Output only the next line. | self._timestamp = make_datetime(value) |
Next line prediction: <|code_start|># Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
if __name__ == '__main__':
sys.path.insert(0, dirname(dirname(dirname(abspath(__file__)))))
setup_tincan_path()
class DocumentTest(unittest.TestCase):
def setUp(self):
pass
def tearDown(self):
pass
def test_init_empty(self):
<|code_end|>
. Use current file imports:
(import unittest
import datetime
import pytz
import sys
from os.path import dirname, abspath
from test.main import setup_tincan_path
from tincan import Document)
and context including class names, function names, or small code snippets from other files:
# Path: tincan/documents/document.py
# class Document(Base):
# """Document class can be instantiated from a dict, another :class:`tincan.Document`, or from kwargs
#
# :param id: The id of this document
# :type id: unicode
# :param content_type: The content type of the content of this document
# :type content_type: unicode
# :param content: The content of this document
# :type content: bytearray
# :param etag: The etag of this document
# :type etag: unicode
# :param timestamp: The timestamp of this document
# :type timestamp: :class:`datetime.datetime`
#
# """
# _props_req = [
# 'id',
# 'content',
# 'content_type',
# 'etag',
# 'timestamp',
# ]
#
# _props = []
#
# _props.extend(_props_req)
#
# def __init__(self, *args, **kwargs):
# self._id = None
# self._content = None
# self._content_type = None
# self._etag = None
# self._timestamp = None
#
# super(Document, self).__init__(*args, **kwargs)
#
# @property
# def id(self):
# """The Document id
#
# :setter: Tries to convert to unicode
# :setter type: str | unicode
# :rtype: unicode
#
# """
# return self._id
#
# @id.setter
# def id(self, value):
# if isinstance(value, (bytes, bytearray)) and value is not None:
# value = value.decode("utf-8")
# self._id = value
#
# @property
# def content_type(self):
# """The Document content type
#
# :setter: Tries to convert to unicode
# :setter type: str | unicode
# :rtype: unicode
#
# """
# return self._content_type
#
# @content_type.setter
# def content_type(self, value):
# if isinstance(value, (bytes, bytearray)) and value is not None:
# value = value.decode("utf-8")
# self._content_type = value
#
# @property
# def content(self):
# """The Document content
#
# :setter: Tries to convert to bytearray.
# :setter type: str | unicode | bytearray
# :rtype: bytearray
#
# """
# return self._content
#
# @content.setter
# def content(self, value):
# if not isinstance(value, bytearray) and value is not None:
# value = bytearray(value, "utf-8")
#
# self._content = value
#
# @property
# def etag(self):
# """The Document etag
#
# :setter: Tries to convert to unicode
# :setter type: str | unicode
# :rtype: unicode
#
# """
# return self._etag
#
# @etag.setter
# def etag(self, value):
# if isinstance(value, (bytes, bytearray)) and value is not None:
# value = value.decode("utf-8")
# self._etag = value
#
# @property
# def timestamp(self):
# """The Document timestamp.
#
# :setter: Tries to convert to :class:`datetime.datetime`. If
# no timezone is given, makes a naive `datetime.datetime`.
#
# Strings will be parsed as ISO 8601 timestamps.
#
# If a number is provided, it will be interpreted as a UNIX
# timestamp, which by definition is UTC.
#
# If a `dict` is provided, does `datetime.datetime(**value)`.
#
# If a `tuple` or a `list` is provided, does
# `datetime.datetime(*value)`. Uses the timezone in the tuple or
# list if provided.
#
# :setter type: :class:`datetime.datetime` | unicode | str | int | float | dict | tuple | None
# :rtype: :class:`datetime.datetime`
#
# """
# return self._timestamp
#
# @timestamp.setter
# def timestamp(self, value):
# if value is None:
# self._timestamp = value
# return
#
# try:
# self._timestamp = make_datetime(value)
# except TypeError as e:
# message = (
# f"Property 'timestamp' in a 'tincan.{self.__class__.__name__}' "
# f"object must be set with a "
# f"datetime.datetime, str, unicode, int, float, dict "
# f"or None.\n\n{repr(e)}"
# )
# raise TypeError(message) from e
. Output only the next line. | doc = Document() |
Using the snippet: <|code_start|># Copyright 2014 Rustici Software
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""
.. module:: statements_result
:synopsis: Statements result model class, returned by LRS calls to get
multiple statements.
"""
<|code_end|>
, determine the next line of code. You have imports:
from tincan.serializable_base import SerializableBase
from tincan.statement_list import StatementList
and context (class names, function names, or code) available:
# Path: tincan/serializable_base.py
# class SerializableBase(Base):
# _props_corrected = {
# '_more_info': 'moreInfo',
# '_interaction_type': 'interactionType',
# '_correct_responses_pattern': 'correctResponsesPattern',
# '_object_type': 'objectType',
# '_usage_type': 'usageType',
# '_content_type': 'contentType',
# '_fileurl': 'fileUrl',
# '_context_activities': 'contextActivities',
# '_home_page': 'homePage',
# }
#
# _UUID_REGEX = re.compile(
# r'^[a-f0-9]{8}-'
# r'[a-f0-9]{4}-'
# r'[1-5][a-f0-9]{3}-'
# r'[89ab][a-f0-9]{3}-'
# r'[a-f0-9]{12}$'
# )
#
# def __init__(self, *args, **kwargs):
#
# new_kwargs = {}
# for obj in args:
# new_kwargs.update(obj if isinstance(obj, dict) else vars(obj))
#
# new_kwargs.update(kwargs)
#
# for uscore, camel in self._props_corrected.items():
# if camel in new_kwargs:
# new_kwargs[uscore[1:]] = new_kwargs[camel]
# new_kwargs.pop(camel)
#
# super(SerializableBase, self).__init__(**new_kwargs)
#
# @classmethod
# def from_json(cls, json_data):
# """Tries to convert a JSON representation to an object of the same
# type as self
#
# A class can provide a _fromJSON implementation in order to do specific
# type checking or other custom implementation details. This method
# will throw a ValueError for invalid JSON, a TypeError for
# improperly constructed, but valid JSON, and any custom errors
# that can be be propagated from class constructors.
#
# :param json_data: The JSON string to convert
# :type json_data: str | unicode
#
# :raises: TypeError, ValueError, LanguageMapInitError
# """
#
# data = json.loads(json_data)
# result = cls(data)
# if hasattr(result, "_from_json"):
# result._from_json()
# return result
#
# def to_json(self, version=Version.latest):
# """Tries to convert an object into a JSON representation and return
# the resulting string
#
# An Object can define how it is serialized by overriding the as_version()
# implementation. A caller may further define how the object is serialized
# by passing in a custom encoder. The default encoder will ignore
# properties of an object that are None at the time of serialization.
#
# :param version: The version to which the object must be serialized to.
# This will default to the latest version supported by the library.
# :type version: str | unicode
#
# """
# return json.dumps(self.as_version(version))
#
# def as_version(self, version=Version.latest):
# """Returns a dict that has been modified based on versioning
# in order to be represented in JSON properly
#
# A class should overload as_version(self, version)
# implementation in order to tailor a more specific representation
#
# :param version: the relevant version. This allows for variance
# between versions
# :type version: str | unicode
#
# """
# if not isinstance(self, list):
# result = {}
# for k, v in iter(self.items()) if isinstance(self, dict) else iter(vars(self).items()):
# k = self._props_corrected.get(k, k)
# if isinstance(v, SerializableBase):
# result[k] = v.as_version(version)
# elif isinstance(v, list):
# result[k] = []
# for val in v:
# if isinstance(val, SerializableBase):
# result[k].append(val.as_version(version))
# else:
# result[k].append(val)
# elif isinstance(v, uuid.UUID):
# result[k] = str(v)
# elif isinstance(v, datetime.timedelta):
# result[k] = jsonify_timedelta(v)
# elif isinstance(v, datetime.datetime):
# result[k] = jsonify_datetime(v)
# else:
# result[k] = v
# result = self._filter_none(result)
# else:
# result = []
# for v in self:
# if isinstance(v, SerializableBase):
# result.append(v.as_version(version))
# else:
# result.append(v)
# return result
#
# @staticmethod
# def _filter_none(obj):
# """Filters out attributes set to None prior to serialization, and
# returns a new object without those attributes. This saves
# the serializer from sending empty bytes over the network. This method also
# fixes the keys to look as expected by ignoring a leading '_' if it
# is present.
#
# :param obj: the dictionary representation of an object that may have
# None attributes
# :type obj: dict
#
# """
# result = {}
# for k, v in obj.items():
# if v is not None:
# if k.startswith('_'):
# k = k[1:]
# result[k] = v
# return result
#
# Path: tincan/statement_list.py
# class StatementList(TypedList):
# _cls = Statement
. Output only the next line. | class StatementsResult(SerializableBase): |
Predict the next line after this snippet: <|code_start|>
class StatementsResult(SerializableBase):
_props_req = [
'statements',
'more',
]
_props = []
_props.extend(_props_req)
def __init__(self, *args, **kwargs):
self._statements = None
self._more = None
super(StatementsResult, self).__init__(*args, **kwargs)
@property
def statements(self):
"""Statements for StatementsResult
:setter: Tries to convert each element to :class:`tincan.Statement`
:setter type: list of :class:`tincan.Statement`
:rtype: list of :class:`tincan.Statement`
"""
return self._statements
@statements.setter
def statements(self, value):
if value is None:
<|code_end|>
using the current file's imports:
from tincan.serializable_base import SerializableBase
from tincan.statement_list import StatementList
and any relevant context from other files:
# Path: tincan/serializable_base.py
# class SerializableBase(Base):
# _props_corrected = {
# '_more_info': 'moreInfo',
# '_interaction_type': 'interactionType',
# '_correct_responses_pattern': 'correctResponsesPattern',
# '_object_type': 'objectType',
# '_usage_type': 'usageType',
# '_content_type': 'contentType',
# '_fileurl': 'fileUrl',
# '_context_activities': 'contextActivities',
# '_home_page': 'homePage',
# }
#
# _UUID_REGEX = re.compile(
# r'^[a-f0-9]{8}-'
# r'[a-f0-9]{4}-'
# r'[1-5][a-f0-9]{3}-'
# r'[89ab][a-f0-9]{3}-'
# r'[a-f0-9]{12}$'
# )
#
# def __init__(self, *args, **kwargs):
#
# new_kwargs = {}
# for obj in args:
# new_kwargs.update(obj if isinstance(obj, dict) else vars(obj))
#
# new_kwargs.update(kwargs)
#
# for uscore, camel in self._props_corrected.items():
# if camel in new_kwargs:
# new_kwargs[uscore[1:]] = new_kwargs[camel]
# new_kwargs.pop(camel)
#
# super(SerializableBase, self).__init__(**new_kwargs)
#
# @classmethod
# def from_json(cls, json_data):
# """Tries to convert a JSON representation to an object of the same
# type as self
#
# A class can provide a _fromJSON implementation in order to do specific
# type checking or other custom implementation details. This method
# will throw a ValueError for invalid JSON, a TypeError for
# improperly constructed, but valid JSON, and any custom errors
# that can be be propagated from class constructors.
#
# :param json_data: The JSON string to convert
# :type json_data: str | unicode
#
# :raises: TypeError, ValueError, LanguageMapInitError
# """
#
# data = json.loads(json_data)
# result = cls(data)
# if hasattr(result, "_from_json"):
# result._from_json()
# return result
#
# def to_json(self, version=Version.latest):
# """Tries to convert an object into a JSON representation and return
# the resulting string
#
# An Object can define how it is serialized by overriding the as_version()
# implementation. A caller may further define how the object is serialized
# by passing in a custom encoder. The default encoder will ignore
# properties of an object that are None at the time of serialization.
#
# :param version: The version to which the object must be serialized to.
# This will default to the latest version supported by the library.
# :type version: str | unicode
#
# """
# return json.dumps(self.as_version(version))
#
# def as_version(self, version=Version.latest):
# """Returns a dict that has been modified based on versioning
# in order to be represented in JSON properly
#
# A class should overload as_version(self, version)
# implementation in order to tailor a more specific representation
#
# :param version: the relevant version. This allows for variance
# between versions
# :type version: str | unicode
#
# """
# if not isinstance(self, list):
# result = {}
# for k, v in iter(self.items()) if isinstance(self, dict) else iter(vars(self).items()):
# k = self._props_corrected.get(k, k)
# if isinstance(v, SerializableBase):
# result[k] = v.as_version(version)
# elif isinstance(v, list):
# result[k] = []
# for val in v:
# if isinstance(val, SerializableBase):
# result[k].append(val.as_version(version))
# else:
# result[k].append(val)
# elif isinstance(v, uuid.UUID):
# result[k] = str(v)
# elif isinstance(v, datetime.timedelta):
# result[k] = jsonify_timedelta(v)
# elif isinstance(v, datetime.datetime):
# result[k] = jsonify_datetime(v)
# else:
# result[k] = v
# result = self._filter_none(result)
# else:
# result = []
# for v in self:
# if isinstance(v, SerializableBase):
# result.append(v.as_version(version))
# else:
# result.append(v)
# return result
#
# @staticmethod
# def _filter_none(obj):
# """Filters out attributes set to None prior to serialization, and
# returns a new object without those attributes. This saves
# the serializer from sending empty bytes over the network. This method also
# fixes the keys to look as expected by ignoring a leading '_' if it
# is present.
#
# :param obj: the dictionary representation of an object that may have
# None attributes
# :type obj: dict
#
# """
# result = {}
# for k, v in obj.items():
# if v is not None:
# if k.startswith('_'):
# k = k[1:]
# result[k] = v
# return result
#
# Path: tincan/statement_list.py
# class StatementList(TypedList):
# _cls = Statement
. Output only the next line. | self._statements = StatementList() |
Given snippet: <|code_start|># Copyright 2014 Rustici Software
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""
.. module:: interactioncomponentlist
:synopsis: A wrapper for list that is able to type check
"""
class InteractionComponentList(TypedList):
<|code_end|>
, continue by predicting the next line. Consider current file imports:
from tincan.interaction_component import InteractionComponent
from tincan.typed_list import TypedList
and context:
# Path: tincan/interaction_component.py
# class InteractionComponent(SerializableBase):
# _props_req = [
# 'id',
# ]
#
# _props = [
# 'description',
# ]
#
# _props.extend(_props_req)
#
# def __init__(self, *args, **kwargs):
# self._id = None
# self._description = None
#
# super(InteractionComponent, self).__init__(*args, **kwargs)
#
# @property
# def id(self):
# """Id for Agent
#
# :setter: Tries to convert to unicode
# :setter type: unicode
# :rtype: unicode
#
# """
# return self._id
#
# @id.setter
# def id(self, value):
# if value is not None:
# if value == '':
# raise ValueError("id cannot be set to an empty string or non-string type")
# self._id = None if value is None else str(value)
#
# @property
# def description(self):
# """Description for Agent
#
# :setter: Tries to convert to :class:`tincan.LanguageMap`
# :setter type: :class:`tincan.LanguageMap`
# :rtype: :class:`tincan.LanguageMap`
#
# """
# return self._description
#
# @description.setter
# def description(self, value):
# if value is not None and not isinstance(value, LanguageMap):
# value = LanguageMap(value)
# self._description = value
#
# @description.deleter
# def description(self):
# del self._description
#
# Path: tincan/typed_list.py
# class TypedList(list, SerializableBase):
# _cls = None
#
# def __init__(self, *args, **kwargs):
# self._check_cls()
# new_args = [self._make_cls(v) for v in list(*args, **kwargs)]
# super(TypedList, self).__init__(new_args)
#
# def __setitem__(self, ind, value):
# self._check_cls()
# value = self._make_cls(value)
# super(TypedList, self).__setitem__(ind, value)
#
# def _check_cls(self):
# """If self._cls is not set, raises ValueError.
#
# :raises: ValueError
# """
# if self._cls is None:
# raise ValueError("_cls has not been set")
#
# def _make_cls(self, value):
# """If value is not instance of self._cls, converts and returns
# it. Otherwise, returns value.
#
# :param value: the thing to make a self._cls from
# :rtype self._cls
# """
# if isinstance(value, self._cls):
# return value
# return self._cls(value)
#
# def append(self, value):
# self._check_cls()
# value = self._make_cls(value)
# super(TypedList, self).append(value)
#
# def extend(self, value):
# self._check_cls()
# new_args = [self._make_cls(v) for v in value]
# super(TypedList, self).extend(new_args)
#
# def insert(self, ind, value):
# self._check_cls()
# value = self._make_cls(value)
# super(TypedList, self).insert(ind, value)
which might include code, classes, or functions. Output only the next line. | _cls = InteractionComponent |
Here is a snippet: <|code_start|>
class Rectangle(object):
def __init__(self, matrix):
self.value = 0.0
self.hue = random()
self.x = randrange(0, matrix.width)
self.y = randrange(0, matrix.height)
self.w = randrange(int(matrix.width/8), int(matrix.width/2))
self.h = randrange(int(matrix.height/8), int(matrix.height/2))
def update(self, matrix):
self.value += 0.2
matrix.fillRect(
self.x, self.y,
self.w, self.h,
<|code_end|>
. Write the next line using the current file imports:
from ._baseclass import ArtBaseClass
from opc.hue import hsvToRgb
from random import random, randrange
and context from other files:
# Path: opc/hue.py
# @mwt(timeout=20)
# @timefunc
# def hsvToRgb(h, s=1.0, v=1.0, rainbow=True):
# """
# Convert a (h, s, v) value to the (r, g, b) color space. By default,
# we use a more cpu-intense method to curve the hue ramps above the
# typical linear, with a view to evening out the visible range of yellow,
# cyan, and purple.
#
# This method takes h, s, and v values in range 0..1 and returns r, g,
# and b values in range 0..255.
# """
# if rainbow:
# return [sin(c*pi/2)*255 for c in colorsys.hsv_to_rgb(h, s, v)]
#
# return [c*255 for c in colorsys.hsv_to_rgb(h, s, v)]
, which may include functions, classes, or code. Output only the next line. | hsvToRgb(h=self.hue, v=self.value), |
Using the snippet: <|code_start|>
# implementation of a linear feedback shift register generator that gives us
# n-bits of pseudo-random values, completing the entire sequence before
# repeating.
# polynomials required to calculate a n-bit lsfr. Higher bits are
# available, just not included here yet.
poly = {
2: (2, 1),
3: (3, 2),
4: (4, 3),
5: (5, 3),
6: (6, 5),
7: (7, 6),
8: (8, 6, 5, 4),
9: (9, 5),
10: (10, 7),
11: (11, 9),
12: (12, 11, 14),
13: (13, 12, 11, 8),
14: (14, 13, 12, 2),
15: (15, 14),
16: (16, 14, 13, 11),
}
def bit(value, bit):
return (value >> bit) & 1
<|code_end|>
, determine the next line of code. You have imports:
from random import random
from opc.utils.prof import timefunc
and context (class names, function names, or code) available:
# Path: opc/utils/prof.py
# def timefunc(f, reference=False):
# global records
#
# records[f.__name__] = Record(f.__name__, reference)
#
# @wraps(f)
# def f_timer(*args, **kwargs):
# global active
#
# if not active:
# return f(*args, **kwargs)
#
# start = time.time()
# result = f(*args, **kwargs)
# end = time.time()
#
# records[f.__name__].addTime(end-start)
#
# return result
#
# return f_timer
. Output only the next line. | @timefunc |
Based on the snippet: <|code_start|> def __init__(self, x, y, radius):
self.x = x
self.y = y
self.phase = 0
self.radius = radius
def transform(self, angle):
x = self.x + self.radius * sin(angle)
y = self.y + self.radius * cos(angle)
return x, y
class Racer(object):
def __init__(self, x, y, position, accel):
self.polar = Polar(x, y, position)
self.accel = accel
def next(self, theta):
angle = self.accel * sin(theta)
return self.polar.transform(angle)
class Art(ArtBaseClass):
description = "Racing pixels"
def __init__(self, matrix, config):
<|code_end|>
, predict the immediate next line with the help of imports:
from ._baseclass import ArtBaseClass
from math import sin, cos, pi
from opc.hue import getColorGen
from .utils.frange import frange
and context (classes, functions, sometimes code) from other files:
# Path: opc/hue.py
# def getColorGen(step=0.05, hue=0, sat=1, val=1):
# """
# Generator that returns a stream of shades as colors
# """
# hue = getHueGen(step, hue)
# while True:
# yield hsvToRgb(next(hue), sat, val)
. Output only the next line. | self.hue = getColorGen(0.001) |
Predict the next line for this snippet: <|code_start|> self.y = y
self.dx = dx
self.dy = dy
self.hue = hue
self.age = 0
def update(self, matrix, expires):
decay = 1-(float(self.age)/expires)
self.age += 1
color = hsvToRgb(self.hue, 1, decay)
matrix.drawPixel(self.x, self.y, color)
self.x += self.dx
if self.x == 0 or self.x >= (matrix.width-1):
self.dx = -self.dx
self.y += self.dy
if self.y == 0 or self.y >= (matrix.height-1):
self.dy = -self.dy
def expire(self, matrix):
matrix.drawPixel(self.x, self.y, BLACK)
class Gun(object):
def __init__(self, matrix):
self.points = []
self.expires = int(matrix.numpix/2)
self.location = self._locationGenerator(matrix)
<|code_end|>
with the help of current file imports:
from opc.colors import BLACK
from opc.hue import getHueGen, hsvToRgb
from random import randint
and context from other files:
# Path: opc/hue.py
# def getHueGen(step=0.05, hue=0):
# """
# Generator that returns a stream of shades as hues
# """
# while True:
# hue = fmod(hue + step, 1)
# yield hue
#
# @mwt(timeout=20)
# @timefunc
# def hsvToRgb(h, s=1.0, v=1.0, rainbow=True):
# """
# Convert a (h, s, v) value to the (r, g, b) color space. By default,
# we use a more cpu-intense method to curve the hue ramps above the
# typical linear, with a view to evening out the visible range of yellow,
# cyan, and purple.
#
# This method takes h, s, and v values in range 0..1 and returns r, g,
# and b values in range 0..255.
# """
# if rainbow:
# return [sin(c*pi/2)*255 for c in colorsys.hsv_to_rgb(h, s, v)]
#
# return [c*255 for c in colorsys.hsv_to_rgb(h, s, v)]
, which may contain function names, class names, or code. Output only the next line. | self.hue = getHueGen(step=0.05, hue=randint(0, 100)/100.0) |
Given the code snippet: <|code_start|>
class Point(object):
def __init__(self, x, y, dx, dy, hue):
self.x = x
self.y = y
self.dx = dx
self.dy = dy
self.hue = hue
self.age = 0
def update(self, matrix, expires):
decay = 1-(float(self.age)/expires)
self.age += 1
<|code_end|>
, generate the next line using the imports in this file:
from opc.colors import BLACK
from opc.hue import getHueGen, hsvToRgb
from random import randint
and context (functions, classes, or occasionally code) from other files:
# Path: opc/hue.py
# def getHueGen(step=0.05, hue=0):
# """
# Generator that returns a stream of shades as hues
# """
# while True:
# hue = fmod(hue + step, 1)
# yield hue
#
# @mwt(timeout=20)
# @timefunc
# def hsvToRgb(h, s=1.0, v=1.0, rainbow=True):
# """
# Convert a (h, s, v) value to the (r, g, b) color space. By default,
# we use a more cpu-intense method to curve the hue ramps above the
# typical linear, with a view to evening out the visible range of yellow,
# cyan, and purple.
#
# This method takes h, s, and v values in range 0..1 and returns r, g,
# and b values in range 0..255.
# """
# if rainbow:
# return [sin(c*pi/2)*255 for c in colorsys.hsv_to_rgb(h, s, v)]
#
# return [c*255 for c in colorsys.hsv_to_rgb(h, s, v)]
. Output only the next line. | color = hsvToRgb(self.hue, 1, decay) |
Given snippet: <|code_start|>
class Art(ArtBaseClass):
description = "Downsample a high-res image to improve perceived clarity"
def __init__(self, matrix, config):
self.width = sqrt(matrix.numpix)/3
<|code_end|>
, continue by predicting the next line. Consider current file imports:
from ._baseclass import ArtBaseClass
from math import sin, cos, sqrt
from opc.colors import BLACK
from opc.hue import getColorGen
and context:
# Path: opc/hue.py
# def getColorGen(step=0.05, hue=0, sat=1, val=1):
# """
# Generator that returns a stream of shades as colors
# """
# hue = getHueGen(step, hue)
# while True:
# yield hsvToRgb(next(hue), sat, val)
which might include code, classes, or functions. Output only the next line. | self.hue = getColorGen(0.001) |
Based on the snippet: <|code_start|>
SEGMENTS=4
HUEDELTA=1/(SEGMENTS*1.5)
class Barber(ArtBaseClass):
def __init__(self, matrix, config):
self.width = int(matrix.width/SEGMENTS)
<|code_end|>
, predict the immediate next line with the help of imports:
from .. _baseclass import ArtBaseClass
from opc.hue import getHueGen
and context (classes, functions, sometimes code) from other files:
# Path: opc/hue.py
# def getHueGen(step=0.05, hue=0):
# """
# Generator that returns a stream of shades as hues
# """
# while True:
# hue = fmod(hue + step, 1)
# yield hue
. Output only the next line. | self.hue = getHueGen(step=0.01) |
Here is a snippet: <|code_start|>
class Art(ArtBaseClass):
description = "Bubbling pixels"
def __init__(self, matrix, config):
self.pieces = int(sqrt(matrix.numpix))
cycles = int(sqrt(matrix.numpix)*2)
<|code_end|>
. Write the next line using the current file imports:
from ._baseclass import ArtBaseClass
from .utils.shrapnel import Shrapnel
from math import sqrt
and context from other files:
# Path: art/utils/shrapnel.py
# class Shrapnel(Pen):
# def __init__(self, matrix, motion_cycles, huedelta=0.001, saturation=1,
# radius=0, decelerate=False):
# self.centerx = matrix.width/2.0
# self.centery = matrix.height/2.0
# self.cycles = motion_cycles
# self.decelerate = decelerate
#
# # we will reset some params to sensible values in a minute, so let's
# # not fuss with x, y, dx, dy now
# super(Shrapnel, self).__init__(
# matrix.width,
# matrix.height,
# 0, 0, 0, 0,
# huedelta=huedelta,
# saturation=saturation,
# radius=radius
# )
#
# super(Shrapnel, self).setBumpStrategy(self._pause, x=True, y=True)
#
# self.reset(matrix)
#
# def _pause(self, x=None, y=None):
# self.paused = True
#
# def reset(self, matrix):
# # the furthest distance any pen will have to travel is on the diagonal
# w, h = matrix.width, matrix.height
# maxDimension = sqrt(w*w + h*h)
#
# # slowest pens need to cover the distance in cycles time, but there may
# # be some that go faster
# velocity = maxDimension/(2.0*self.cycles) + 0.05*random()*maxDimension
#
# angle = random()*2*pi
# self.dx = velocity * sin(angle)
# self.dy = velocity * cos(angle)
#
# self.x = self.centerx
# self.y = self.centery
# self.paused = False
#
# def clock(self, matrix):
# super(Shrapnel, self).clock(matrix)
#
# # optionally slow over time
# # XXX: this may cause problems for larger spans?
# if self.decelerate:
# self.dx *= 0.99
# self.dy *= 0.99
#
# return self.paused
, which may include functions, classes, or code. Output only the next line. | self.shrapnel = [Shrapnel(matrix, cycles, saturation=0.2) |
Predict the next line for this snippet: <|code_start|> self.color = color
self.mid = matrix.height/2
self.x = x
self.y = 0
def vote(self, value):
if self.x == value:
return True
if self.y > self.mid:
return False
if self.x-1 <= value <= self.x+1:
return True
return False
def next(self, matrix):
matrix.drawPixel(self.x, self.y, self.color)
self.y += 1
return self.y < matrix.height
class Art(ArtBaseClass):
description = "Pixels floating upwards with a trail"
def __init__(self, matrix, config):
self.count = matrix.width/3
<|code_end|>
with the help of current file imports:
from ._baseclass import ArtBaseClass
from random import randint, random
from opc.colors import BLACK
from opc.hue import getColorGen
and context from other files:
# Path: opc/hue.py
# def getColorGen(step=0.05, hue=0, sat=1, val=1):
# """
# Generator that returns a stream of shades as colors
# """
# hue = getHueGen(step, hue)
# while True:
# yield hsvToRgb(next(hue), sat, val)
, which may contain function names, class names, or code. Output only the next line. | self.color = getColorGen(step=0.015, hue=random()) |
Given the code snippet: <|code_start|>
REPEATS = 4
class BarSet(object):
def __init__(self, matrix, horizontal):
# increments are split into primary and secondary. primary items(p)
# increase on subsequent calls, secondary increments happen within
# a call to draw a row.
if horizontal:
self.pdx, self.pdy = 0, 1
self.sdx, self.sdy = 1, 0
else:
self.pdx, self.pdy = 1, 0
self.sdx, self.sdy = 0, 1
if self._cointoss(0.1):
self.color = WHITE
else:
<|code_end|>
, generate the next line using the imports in this file:
from ._baseclass import ArtBaseClass
from opc.colors import WHITE
from opc.hue import hsvToRgb
from math import sqrt
from random import random
and context (functions, classes, or occasionally code) from other files:
# Path: opc/hue.py
# @mwt(timeout=20)
# @timefunc
# def hsvToRgb(h, s=1.0, v=1.0, rainbow=True):
# """
# Convert a (h, s, v) value to the (r, g, b) color space. By default,
# we use a more cpu-intense method to curve the hue ramps above the
# typical linear, with a view to evening out the visible range of yellow,
# cyan, and purple.
#
# This method takes h, s, and v values in range 0..1 and returns r, g,
# and b values in range 0..255.
# """
# if rainbow:
# return [sin(c*pi/2)*255 for c in colorsys.hsv_to_rgb(h, s, v)]
#
# return [c*255 for c in colorsys.hsv_to_rgb(h, s, v)]
. Output only the next line. | self.color = hsvToRgb(random(), self._stepped(), self._stepped()) |
Predict the next line after this snippet: <|code_start|>
SCALE = 4
class Art(ArtBaseClass):
description = "Demo image rotation"
def __init__(self, matrix, config):
<|code_end|>
using the current file's imports:
from ._baseclass import ArtBaseClass
from math import sqrt
from random import random
from opc.hue import getColorGen
and any relevant context from other files:
# Path: opc/hue.py
# def getColorGen(step=0.05, hue=0, sat=1, val=1):
# """
# Generator that returns a stream of shades as colors
# """
# hue = getHueGen(step, hue)
# while True:
# yield hsvToRgb(next(hue), sat, val)
. Output only the next line. | self.hue = getColorGen(0.006) |
Next line prediction: <|code_start|>
class Art(ArtBaseClass):
description = "Slow transition of hues across the display"
def __init__(self, matrix, config):
self.base = 0
def start(self, matrix):
pass
def refresh(self, matrix):
self.base += 4
h = matrix.height - 1
for x in range(matrix.width):
hue = (self.base+32*x) / (sqrt(matrix.numpix) * 64.0)
for y in range(matrix.height):
sat = min(1, 0.25 + (1.5*y)/h)
val = min(1, 0.25 + (1.5*(h-y)/h))
<|code_end|>
. Use current file imports:
(from ._baseclass import ArtBaseClass
from math import sqrt
from opc.hue import hsvToRgb)
and context including class names, function names, or small code snippets from other files:
# Path: opc/hue.py
# @mwt(timeout=20)
# @timefunc
# def hsvToRgb(h, s=1.0, v=1.0, rainbow=True):
# """
# Convert a (h, s, v) value to the (r, g, b) color space. By default,
# we use a more cpu-intense method to curve the hue ramps above the
# typical linear, with a view to evening out the visible range of yellow,
# cyan, and purple.
#
# This method takes h, s, and v values in range 0..1 and returns r, g,
# and b values in range 0..255.
# """
# if rainbow:
# return [sin(c*pi/2)*255 for c in colorsys.hsv_to_rgb(h, s, v)]
#
# return [c*255 for c in colorsys.hsv_to_rgb(h, s, v)]
. Output only the next line. | matrix.drawPixel(x, y, hsvToRgb(hue, sat, val)) |
Given snippet: <|code_start|> self.mandel = Mandelbrot(matrix.width, matrix.height, ITERSTEPS)
# this gives a pretty good view of the artifact at max zoom
self.origin = Region(-2.0, -1.5, 1.0, 1.5)
self._begin(matrix)
self.i = 0
def start(self, matrix):
matrix.hq()
def _begin(self, matrix):
self.target = copy(self.origin)
self.current = copy(self.target)
self.delta = Region(0.000000, 0.000000, -0.093750, -0.093750)
self.stateExecute = self._drawBig
self.stepsDown = 0
self._render(matrix, self.target)
def _render(self, matrix, target):
grid = self.mandel.draw(target)
matrix.clear()
for x in range(matrix.width):
for y in range(matrix.height):
point = grid[x][y]
if point is not None:
hue = (0.0+point)/self.mandel.maxsteps
<|code_end|>
, continue by predicting the next line. Consider current file imports:
from ._baseclass import ArtBaseClass
from opc.hue import hsvToRgb
from opc.matrix import HQ
from .utils.fractools import Mandelbrot, Region
from copy import copy
and context:
# Path: opc/hue.py
# @mwt(timeout=20)
# @timefunc
# def hsvToRgb(h, s=1.0, v=1.0, rainbow=True):
# """
# Convert a (h, s, v) value to the (r, g, b) color space. By default,
# we use a more cpu-intense method to curve the hue ramps above the
# typical linear, with a view to evening out the visible range of yellow,
# cyan, and purple.
#
# This method takes h, s, and v values in range 0..1 and returns r, g,
# and b values in range 0..255.
# """
# if rainbow:
# return [sin(c*pi/2)*255 for c in colorsys.hsv_to_rgb(h, s, v)]
#
# return [c*255 for c in colorsys.hsv_to_rgb(h, s, v)]
#
# Path: opc/matrix/hq.py
# class HQ(object):
#
# """
# use the HQ class to savely wrap art init blocks when you need to
# switch on HQ for set-up purposes. For example:
#
# from opc.matrix import HQ
#
#
# class Art(object):
#
# def __init__(self, matrix):
#
# with HQ(matrix):
# initialization stuff...
# """
#
# def __init__(self, matrix):
# self.matrix = matrix
#
# def __enter__(self):
# self.matrix.hq()
#
# def __exit__(self, type, value, traceback):
# self.matrix.hq(False)
which might include code, classes, or functions. Output only the next line. | matrix.drawPixel(x, y, hsvToRgb(hue)) |
Predict the next line after this snippet: <|code_start|>
ZOOMSTEPS = 24
ITERSTEPS = 30
DEBUG = False
class Art(ArtBaseClass):
description = "Auto-zooming mandelbrot"
def __init__(self, matrix, config):
<|code_end|>
using the current file's imports:
from ._baseclass import ArtBaseClass
from opc.hue import hsvToRgb
from opc.matrix import HQ
from .utils.fractools import Mandelbrot, Region
from copy import copy
and any relevant context from other files:
# Path: opc/hue.py
# @mwt(timeout=20)
# @timefunc
# def hsvToRgb(h, s=1.0, v=1.0, rainbow=True):
# """
# Convert a (h, s, v) value to the (r, g, b) color space. By default,
# we use a more cpu-intense method to curve the hue ramps above the
# typical linear, with a view to evening out the visible range of yellow,
# cyan, and purple.
#
# This method takes h, s, and v values in range 0..1 and returns r, g,
# and b values in range 0..255.
# """
# if rainbow:
# return [sin(c*pi/2)*255 for c in colorsys.hsv_to_rgb(h, s, v)]
#
# return [c*255 for c in colorsys.hsv_to_rgb(h, s, v)]
#
# Path: opc/matrix/hq.py
# class HQ(object):
#
# """
# use the HQ class to savely wrap art init blocks when you need to
# switch on HQ for set-up purposes. For example:
#
# from opc.matrix import HQ
#
#
# class Art(object):
#
# def __init__(self, matrix):
#
# with HQ(matrix):
# initialization stuff...
# """
#
# def __init__(self, matrix):
# self.matrix = matrix
#
# def __enter__(self):
# self.matrix.hq()
#
# def __exit__(self, type, value, traceback):
# self.matrix.hq(False)
. Output only the next line. | with HQ(matrix): |
Given the following code snippet before the placeholder: <|code_start|> colormap cells
"""
steps = float(index1) - index0
delta = [(color1[gun] - color0[gun]) / steps for gun in range(3)]
for index in range(index0, index1):
c = [color0[gun] + delta[gun] * (index-index0) for gun in range(3)]
self.cmap[index] = c
def convert(self, point, scale=None):
"""
Get the color associated with the given index. This method allows for
scaling up or down, in the case where the desired range is either
bigger or smaller than the colormap
"""
if scale:
point = point * self.size/scale
index = int(min(self.size-1, max(0, point)))
return self.cmap[index]
def apply(self, data, scale=None):
scale = self.size-1 if scale is None else scale
return self.cmap[(data*scale).astype(np.int)]
def soften(self, neighbors=1):
"""
Use inverse distance weighting to soften the transitions between colors
in the map, looking out by NEIGHBORS entries left and right.
"""
<|code_end|>
, predict the next line using imports from the current file:
from .utils import idw
from exceptions import AttributeError
import numpy as np
and context including class names, function names, and sometimes code from other files:
# Path: opc/utils/idw.py
# def idw(sample, base, maxdist):
# """
# A rudimentary implementation of inverse distance weighting.
# for a given point in a series, examine that points neighbors
# with more distant values having an increasing diminished
# impact on the result.
#
# Start off with the most distant pair, taking the average. Then
# this value is half as significant as the next-closest pair
# in the series. Repeat until we reach the point itself, in
# which case this has double weighting (-0 and +0 being the same
# thing.
# """
#
# default = sample[base]
# total = None
#
# for distance in range(maxdist, -1, -1):
# samples = (
# _relative(sample, base, distance, default),
# _relative(sample, base, -distance, default)
# )
#
# if total is None:
# total = sum(samples)/2
# else:
# total = (total + sum(samples)/2)/2
#
# return total
. Output only the next line. | self.cmap = idw.soften_2d(self.cmap, neighbors) |
Continue the code snippet: <|code_start|>
class Art(ArtBaseClass):
description = "Rolling sine wave marks border between contrasting colors"
def __init__(self, matrix, config):
<|code_end|>
. Use current file imports:
from ._baseclass import ArtBaseClass
from opc.hue import getColorGen
import math
and context (classes, functions, or code) from other files:
# Path: opc/hue.py
# def getColorGen(step=0.05, hue=0, sat=1, val=1):
# """
# Generator that returns a stream of shades as colors
# """
# hue = getHueGen(step, hue)
# while True:
# yield hsvToRgb(next(hue), sat, val)
. Output only the next line. | self.hue1 = getColorGen(step=0.00001, hue=0.0) |
Predict the next line after this snippet: <|code_start|> field = np.empty((matrix.width, matrix.height))
# linearize x and y coordinates for regions
xs = np.array([region["x"] for region in self.regions])
ys = np.array([region["y"] for region in self.regions])
# find closest region to eack pixel
for y in range(matrix.height):
for x in range(matrix.width):
c=self._closest(xs, ys, x, y)
field[x,y] = self.huebase+c/self.count
# update region coordinates
self.regions = [{
"dx": self._bounce(region["x"], region["dx"], matrix.width-1),
"dy": self._bounce(region["y"], region["dy"], matrix.height-1),
"x": region["x"]+region["dx"],
"y": region["y"]+region["dy"],
} for region in self.regions]
# blend old and new
if self.field is not None:
self.field = (self.field*0.3) + (field*0.7)
else:
self.field = field
# draaw
for y in range(matrix.height):
for x in range(matrix.width):
<|code_end|>
using the current file's imports:
from ._baseclass import ArtBaseClass
from opc.hue import hsvToRgb
import numpy as np
import random
import math
and any relevant context from other files:
# Path: opc/hue.py
# @mwt(timeout=20)
# @timefunc
# def hsvToRgb(h, s=1.0, v=1.0, rainbow=True):
# """
# Convert a (h, s, v) value to the (r, g, b) color space. By default,
# we use a more cpu-intense method to curve the hue ramps above the
# typical linear, with a view to evening out the visible range of yellow,
# cyan, and purple.
#
# This method takes h, s, and v values in range 0..1 and returns r, g,
# and b values in range 0..255.
# """
# if rainbow:
# return [sin(c*pi/2)*255 for c in colorsys.hsv_to_rgb(h, s, v)]
#
# return [c*255 for c in colorsys.hsv_to_rgb(h, s, v)]
. Output only the next line. | matrix.drawPixel(x, y, hsvToRgb(self.field[x,y], 1, 1)) |
Using the snippet: <|code_start|>
DTYPE = np.uint8
class Filter(object):
@timefunc
def maskbelow(self, thresh, color):
"""
Set (r, g, b) values below an average value of thresh
to value
"""
keys = np.mean(self.buf.buf,2)<thresh
self.buf.buf[keys] = color
@timefunc
def maskabove(self, thresh, color):
"""
Set (r, g, b) values above an average value of thresh
to value
"""
keys = self.buf.buf[np.mean(self.buf.buf,2)>thresh]
self.buf.buf[keys] = color
@timefunc
def shift(self, dh=1.0, ds=1.0, dv=1.0):
"""
Shift any of hue, saturation, and value on the matrix, specifying
the attributes that you'd like to adjust
"""
<|code_end|>
, determine the next line of code. You have imports:
import numpy as np
from ..nphue import rgb_to_hsv, hsv_to_rgb
from ..utils.prof import timefunc
and context (class names, function names, or code) available:
# Path: opc/nphue.py
# @timefunc
# def rgb_to_hsv(rgb):
# # Translated from source of colorsys.rgb_to_hsv
# # r,g,b should be a numpy arrays with values between 0 and 255
# # rgb_to_hsv returns an array of floats between 0.0 and 1.0.
# rgb = rgb.astype('float')
# hsv = np.zeros_like(rgb)
# # in case an RGBA array was passed, just copy the A channel
# hsv[..., 3:] = rgb[..., 3:]
# r, g, b = rgb[..., 0], rgb[..., 1], rgb[..., 2]
# maxc = np.max(rgb[..., :3], axis=-1)
# minc = np.min(rgb[..., :3], axis=-1)
# hsv[..., 2] = maxc
# mask = maxc != minc
# hsv[mask, 1] = (maxc - minc)[mask] / maxc[mask]
# rc = np.zeros_like(r)
# gc = np.zeros_like(g)
# bc = np.zeros_like(b)
# rc[mask] = (maxc - r)[mask] / (maxc - minc)[mask]
# gc[mask] = (maxc - g)[mask] / (maxc - minc)[mask]
# bc[mask] = (maxc - b)[mask] / (maxc - minc)[mask]
# hsv[..., 0] = np.select(
# [r == maxc, g == maxc],
# [bc - gc, 2.0 + rc - bc], default=4.0 + gc - rc)
# hsv[..., 0] = (hsv[..., 0] / 6.0) % 1.0
# return hsv
#
# @timefunc
# def hsv_to_rgb(hsv):
# # Translated from source of colorsys.hsv_to_rgb
# # h,s should be a numpy arrays with values between 0.0 and 1.0
# # v should be a numpy array with values between 0.0 and 255.0
# # hsv_to_rgb returns an array of uints between 0 and 255.
# rgb = np.empty_like(hsv)
# rgb[..., 3:] = hsv[..., 3:]
# h, s, v = hsv[..., 0], hsv[..., 1], hsv[..., 2]
# i = (h * 6.0).astype('uint8')
# f = (h * 6.0) - i
# p = v * (1.0 - s)
# q = v * (1.0 - s * f)
# t = v * (1.0 - s * (1.0 - f))
# i = i % 6
# conditions = [s == 0.0, i == 1, i == 2, i == 3, i == 4, i == 5]
# rgb[..., 0] = np.select(conditions, [v, q, p, p, t, v], default=v)
# rgb[..., 1] = np.select(conditions, [v, v, v, q, p, p], default=t)
# rgb[..., 2] = np.select(conditions, [v, p, t, v, v, q], default=p)
# return rgb.astype('uint8')
#
# Path: opc/utils/prof.py
# def timefunc(f, reference=False):
# global records
#
# records[f.__name__] = Record(f.__name__, reference)
#
# @wraps(f)
# def f_timer(*args, **kwargs):
# global active
#
# if not active:
# return f(*args, **kwargs)
#
# start = time.time()
# result = f(*args, **kwargs)
# end = time.time()
#
# records[f.__name__].addTime(end-start)
#
# return result
#
# return f_timer
. Output only the next line. | hsv = rgb_to_hsv(self.buf.buf) |
Here is a snippet: <|code_start|>
class Filter(object):
@timefunc
def maskbelow(self, thresh, color):
"""
Set (r, g, b) values below an average value of thresh
to value
"""
keys = np.mean(self.buf.buf,2)<thresh
self.buf.buf[keys] = color
@timefunc
def maskabove(self, thresh, color):
"""
Set (r, g, b) values above an average value of thresh
to value
"""
keys = self.buf.buf[np.mean(self.buf.buf,2)>thresh]
self.buf.buf[keys] = color
@timefunc
def shift(self, dh=1.0, ds=1.0, dv=1.0):
"""
Shift any of hue, saturation, and value on the matrix, specifying
the attributes that you'd like to adjust
"""
hsv = rgb_to_hsv(self.buf.buf)
mod = hsv * np.array([dh, ds, dv])
<|code_end|>
. Write the next line using the current file imports:
import numpy as np
from ..nphue import rgb_to_hsv, hsv_to_rgb
from ..utils.prof import timefunc
and context from other files:
# Path: opc/nphue.py
# @timefunc
# def rgb_to_hsv(rgb):
# # Translated from source of colorsys.rgb_to_hsv
# # r,g,b should be a numpy arrays with values between 0 and 255
# # rgb_to_hsv returns an array of floats between 0.0 and 1.0.
# rgb = rgb.astype('float')
# hsv = np.zeros_like(rgb)
# # in case an RGBA array was passed, just copy the A channel
# hsv[..., 3:] = rgb[..., 3:]
# r, g, b = rgb[..., 0], rgb[..., 1], rgb[..., 2]
# maxc = np.max(rgb[..., :3], axis=-1)
# minc = np.min(rgb[..., :3], axis=-1)
# hsv[..., 2] = maxc
# mask = maxc != minc
# hsv[mask, 1] = (maxc - minc)[mask] / maxc[mask]
# rc = np.zeros_like(r)
# gc = np.zeros_like(g)
# bc = np.zeros_like(b)
# rc[mask] = (maxc - r)[mask] / (maxc - minc)[mask]
# gc[mask] = (maxc - g)[mask] / (maxc - minc)[mask]
# bc[mask] = (maxc - b)[mask] / (maxc - minc)[mask]
# hsv[..., 0] = np.select(
# [r == maxc, g == maxc],
# [bc - gc, 2.0 + rc - bc], default=4.0 + gc - rc)
# hsv[..., 0] = (hsv[..., 0] / 6.0) % 1.0
# return hsv
#
# @timefunc
# def hsv_to_rgb(hsv):
# # Translated from source of colorsys.hsv_to_rgb
# # h,s should be a numpy arrays with values between 0.0 and 1.0
# # v should be a numpy array with values between 0.0 and 255.0
# # hsv_to_rgb returns an array of uints between 0 and 255.
# rgb = np.empty_like(hsv)
# rgb[..., 3:] = hsv[..., 3:]
# h, s, v = hsv[..., 0], hsv[..., 1], hsv[..., 2]
# i = (h * 6.0).astype('uint8')
# f = (h * 6.0) - i
# p = v * (1.0 - s)
# q = v * (1.0 - s * f)
# t = v * (1.0 - s * (1.0 - f))
# i = i % 6
# conditions = [s == 0.0, i == 1, i == 2, i == 3, i == 4, i == 5]
# rgb[..., 0] = np.select(conditions, [v, q, p, p, t, v], default=v)
# rgb[..., 1] = np.select(conditions, [v, v, v, q, p, p], default=t)
# rgb[..., 2] = np.select(conditions, [v, p, t, v, v, q], default=p)
# return rgb.astype('uint8')
#
# Path: opc/utils/prof.py
# def timefunc(f, reference=False):
# global records
#
# records[f.__name__] = Record(f.__name__, reference)
#
# @wraps(f)
# def f_timer(*args, **kwargs):
# global active
#
# if not active:
# return f(*args, **kwargs)
#
# start = time.time()
# result = f(*args, **kwargs)
# end = time.time()
#
# records[f.__name__].addTime(end-start)
#
# return result
#
# return f_timer
, which may include functions, classes, or code. Output only the next line. | rgb = hsv_to_rgb(mod) |
Here is a snippet: <|code_start|>
class Art(ArtBaseClass):
description = "And then it exploded..."
PAUSE_CYCLES = 10
def __init__(self, matrix, config):
self.pause = 0
self.pieces = int(sqrt(matrix.numpix))
cycles = int(sqrt(matrix.numpix)*2)
<|code_end|>
. Write the next line using the current file imports:
from ._baseclass import ArtBaseClass
from .utils.shrapnel import Shrapnel
from math import sqrt
and context from other files:
# Path: art/utils/shrapnel.py
# class Shrapnel(Pen):
# def __init__(self, matrix, motion_cycles, huedelta=0.001, saturation=1,
# radius=0, decelerate=False):
# self.centerx = matrix.width/2.0
# self.centery = matrix.height/2.0
# self.cycles = motion_cycles
# self.decelerate = decelerate
#
# # we will reset some params to sensible values in a minute, so let's
# # not fuss with x, y, dx, dy now
# super(Shrapnel, self).__init__(
# matrix.width,
# matrix.height,
# 0, 0, 0, 0,
# huedelta=huedelta,
# saturation=saturation,
# radius=radius
# )
#
# super(Shrapnel, self).setBumpStrategy(self._pause, x=True, y=True)
#
# self.reset(matrix)
#
# def _pause(self, x=None, y=None):
# self.paused = True
#
# def reset(self, matrix):
# # the furthest distance any pen will have to travel is on the diagonal
# w, h = matrix.width, matrix.height
# maxDimension = sqrt(w*w + h*h)
#
# # slowest pens need to cover the distance in cycles time, but there may
# # be some that go faster
# velocity = maxDimension/(2.0*self.cycles) + 0.05*random()*maxDimension
#
# angle = random()*2*pi
# self.dx = velocity * sin(angle)
# self.dy = velocity * cos(angle)
#
# self.x = self.centerx
# self.y = self.centery
# self.paused = False
#
# def clock(self, matrix):
# super(Shrapnel, self).clock(matrix)
#
# # optionally slow over time
# # XXX: this may cause problems for larger spans?
# if self.decelerate:
# self.dx *= 0.99
# self.dy *= 0.99
#
# return self.paused
, which may include functions, classes, or code. Output only the next line. | self.shrapnel = [Shrapnel(matrix, cycles, decelerate=True) |
Based on the snippet: <|code_start|>
class Art(ArtBaseClass):
description = "Rain falling down the display"
def __init__(self, matrix, config):
<|code_end|>
, predict the immediate next line with the help of imports:
from ._baseclass import ArtBaseClass
from random import random, randrange
from math import sqrt
from .utils.pen import Pen
from opc.matrix import HQ
and context (classes, functions, sometimes code) from other files:
# Path: opc/matrix/hq.py
# class HQ(object):
#
# """
# use the HQ class to savely wrap art init blocks when you need to
# switch on HQ for set-up purposes. For example:
#
# from opc.matrix import HQ
#
#
# class Art(object):
#
# def __init__(self, matrix):
#
# with HQ(matrix):
# initialization stuff...
# """
#
# def __init__(self, matrix):
# self.matrix = matrix
#
# def __enter__(self):
# self.matrix.hq()
#
# def __exit__(self, type, value, traceback):
# self.matrix.hq(False)
. Output only the next line. | with HQ(matrix): |
Given the code snippet: <|code_start|>
class Flow(ArtBaseClass):
huecount = (8+1) # the addition covers offscreen for recoloring
def __init__(self, matrix, config):
self.base = 0
self.offset = 0
self.usecount = []
self.blocksize = int(matrix.numpix/(self.huecount-1))
self.hues = []
for hue in range(0, self.huecount):
self.hues.append(random())
self.sinlut = []
for i in range(0, self.blocksize):
s = sin(pi*i/(self.blocksize-1))
self.sinlut.append(s*s*s)
def start(self, matrix):
pass
def _color(self):
self.offset += 1
if self.contiguous:
hue = 0.0 + (self.offset+2*self.base) % 256
<|code_end|>
, generate the next line using the imports in this file:
from .. _baseclass import ArtBaseClass
from opc.hue import hsvToRgb
from random import random
from math import sin, pi
and context (functions, classes, or occasionally code) from other files:
# Path: opc/hue.py
# @mwt(timeout=20)
# @timefunc
# def hsvToRgb(h, s=1.0, v=1.0, rainbow=True):
# """
# Convert a (h, s, v) value to the (r, g, b) color space. By default,
# we use a more cpu-intense method to curve the hue ramps above the
# typical linear, with a view to evening out the visible range of yellow,
# cyan, and purple.
#
# This method takes h, s, and v values in range 0..1 and returns r, g,
# and b values in range 0..255.
# """
# if rainbow:
# return [sin(c*pi/2)*255 for c in colorsys.hsv_to_rgb(h, s, v)]
#
# return [c*255 for c in colorsys.hsv_to_rgb(h, s, v)]
. Output only the next line. | return hsvToRgb(hue/255) |
Predict the next line after this snippet: <|code_start|>
class Art(Barber):
description = "Barber-pole-esque (clean)"
def _line(self, matrix, x1, x2, hue):
<|code_end|>
using the current file's imports:
from .baseclasses.barber import Barber
from opc.hue import hsvToRgb
and any relevant context from other files:
# Path: art/baseclasses/barber.py
# class Barber(ArtBaseClass):
#
# def __init__(self, matrix, config):
# self.width = int(matrix.width/SEGMENTS)
# self.hue = getHueGen(step=0.01)
#
# def start(self, matrix):
# pass
#
# def _line(self, matrix, x1, x2, hue):
# raise NotImplementedError
#
# def refresh(self, matrix):
# matrix.scroll("up")
# matrix.scroll("right")
#
# hue = next(self.hue)
# for segment in range(SEGMENTS):
# self._line(
# matrix,
# self.width*segment,
# self.width*(segment+1),
# hue+(HUEDELTA*segment)
# )
#
# def interval(self):
# return 120
#
# Path: opc/hue.py
# @mwt(timeout=20)
# @timefunc
# def hsvToRgb(h, s=1.0, v=1.0, rainbow=True):
# """
# Convert a (h, s, v) value to the (r, g, b) color space. By default,
# we use a more cpu-intense method to curve the hue ramps above the
# typical linear, with a view to evening out the visible range of yellow,
# cyan, and purple.
#
# This method takes h, s, and v values in range 0..1 and returns r, g,
# and b values in range 0..255.
# """
# if rainbow:
# return [sin(c*pi/2)*255 for c in colorsys.hsv_to_rgb(h, s, v)]
#
# return [c*255 for c in colorsys.hsv_to_rgb(h, s, v)]
. Output only the next line. | color = hsvToRgb(hue, 1, 1) |
Predict the next line for this snippet: <|code_start|>
logger = logging.getLogger(__name__)
class Pixel(object):
@timefunc
def setStripPixel(self, z, color):
"""
Exposed helper method that sets a given pixel in the unrolled strip
of LEDs.
"""
x = int(z / self.height)
y = int(z % self.height)
self.buf.buf[x, y] = color
@timefunc
def getPixel(self, x, y, wrap=False):
"""
Retrieve the color tuple of the pixel from the specified location
"""
if wrap:
x = x % self.width
y = y % self.height
return self.buf.buf[x, y]
@timefunc
<|code_end|>
with the help of current file imports:
import numpy as np
import logging
from ..utils.prof import timefunc
from ..utils.wrapexception import wrapexception
and context from other files:
# Path: opc/utils/prof.py
# def timefunc(f, reference=False):
# global records
#
# records[f.__name__] = Record(f.__name__, reference)
#
# @wraps(f)
# def f_timer(*args, **kwargs):
# global active
#
# if not active:
# return f(*args, **kwargs)
#
# start = time.time()
# result = f(*args, **kwargs)
# end = time.time()
#
# records[f.__name__].addTime(end-start)
#
# return result
#
# return f_timer
#
# Path: opc/utils/wrapexception.py
# def wrapexception(func, logger=logging):
#
# def decorator(func):
#
# @wraps(func)
# def wrapper(*args, **kwargs):
# self = args[0]
# try:
# r = func(*args, **kwargs)
# except IndexError as e:
# if self.debug:
# logging.error("Bounds Error: "+str(e))
# raise e
#
# return wrapper
#
# return decorator
, which may contain function names, class names, or code. Output only the next line. | @wrapexception(logger) |
Based on the snippet: <|code_start|>
DELTA_Z = 0.02
class Art(ArtBaseClass):
description = "Lissajous figures"
def __init__(self, matrix, config):
<|code_end|>
, predict the immediate next line with the help of imports:
from ._baseclass import ArtBaseClass
from opc.hue import getColorGen
from math import sin, cos, pi
from .utils.frange import frange
and context (classes, functions, sometimes code) from other files:
# Path: opc/hue.py
# def getColorGen(step=0.05, hue=0, sat=1, val=1):
# """
# Generator that returns a stream of shades as colors
# """
# hue = getHueGen(step, hue)
# while True:
# yield hsvToRgb(next(hue), sat, val)
. Output only the next line. | self.color = getColorGen(0.01) |
Predict the next line for this snippet: <|code_start|>
HSCALE = 0.01
RESET_INTERVAL = 20
class Art(object):
description = "Conway's Game of Life"
def __init__(self, matrix, config):
self._interval = 300 if math.sqrt(matrix.numpix) < 20 else 75
self.hue = random.random()
self._init(matrix)
def start(self, matrix):
pass
def _init(self, matrix):
self.lifes = numpy.empty([matrix.width, matrix.height])
self.prior = numpy.copy(self.lifes)
for y in range(matrix.height):
for x in range(matrix.width):
self.lifes[x, y] = bool(random.getrandbits(1))
self.reset_counter = 0
def _hue(self, offset):
<|code_end|>
with the help of current file imports:
from opc.hue import hsvToRgb
import math
import random
import numpy
and context from other files:
# Path: opc/hue.py
# @mwt(timeout=20)
# @timefunc
# def hsvToRgb(h, s=1.0, v=1.0, rainbow=True):
# """
# Convert a (h, s, v) value to the (r, g, b) color space. By default,
# we use a more cpu-intense method to curve the hue ramps above the
# typical linear, with a view to evening out the visible range of yellow,
# cyan, and purple.
#
# This method takes h, s, and v values in range 0..1 and returns r, g,
# and b values in range 0..255.
# """
# if rainbow:
# return [sin(c*pi/2)*255 for c in colorsys.hsv_to_rgb(h, s, v)]
#
# return [c*255 for c in colorsys.hsv_to_rgb(h, s, v)]
, which may contain function names, class names, or code. Output only the next line. | return hsvToRgb(self.hue+HSCALE*offset) |
Continue the code snippet: <|code_start|> self._sampleDiamond(x, y + halfstep, stepsize,
self._rand() * scale)
def generate(self):
samplesize = self.featureSize
scale = 1.0
# seed initial values
for y in range(0, self.height, samplesize):
for x in range(0, self.width, samplesize):
self._setSample(x, y, self._rand())
# iterate through the intermediate spaces
while samplesize > 1:
self._diamondSquare(samplesize, scale)
samplesize /= 2
scale /= 2.0
@timefunc
def translate(self, matrix, hue=None, colormap=None):
if hue is None and colormap is None:
raise AttributeError("Need either a hue or colormap")
vmin = np.min(self.values)
vmax = np.max(self.values)
values = (self.values-vmin)/(vmax-vmin)
if colormap is None:
for x in range(self.width):
for y in range(self.height):
<|code_end|>
. Use current file imports:
from opc.hue import hsvToRgb
from opc.nphue import h_to_rgb
from random import random
from exceptions import AttributeError
from art.utils.array import array
from opc.utils.prof import timefunc
import numpy as np
import sys
and context (classes, functions, or code) from other files:
# Path: opc/hue.py
# @mwt(timeout=20)
# @timefunc
# def hsvToRgb(h, s=1.0, v=1.0, rainbow=True):
# """
# Convert a (h, s, v) value to the (r, g, b) color space. By default,
# we use a more cpu-intense method to curve the hue ramps above the
# typical linear, with a view to evening out the visible range of yellow,
# cyan, and purple.
#
# This method takes h, s, and v values in range 0..1 and returns r, g,
# and b values in range 0..255.
# """
# if rainbow:
# return [sin(c*pi/2)*255 for c in colorsys.hsv_to_rgb(h, s, v)]
#
# return [c*255 for c in colorsys.hsv_to_rgb(h, s, v)]
#
# Path: opc/nphue.py
# @timefunc
# def h_to_rgb(h, sat=1, val=255.0):
# # Local variation of hsv_to_rgb that only cares about a variable
# # hue, with (s,v) assumed to be constant
# # h should be a numpy array with values between 0.0 and 1.0
# # hsv_to_rgb returns an array of uints between 0 and 255.
# s = np.full_like(h, sat)
# v = np.full_like(h, val)
# hsv = np.dstack((h, s, v))
#
# return hsv_to_rgb(hsv)
#
# Path: opc/utils/prof.py
# def timefunc(f, reference=False):
# global records
#
# records[f.__name__] = Record(f.__name__, reference)
#
# @wraps(f)
# def f_timer(*args, **kwargs):
# global active
#
# if not active:
# return f(*args, **kwargs)
#
# start = time.time()
# result = f(*args, **kwargs)
# end = time.time()
#
# records[f.__name__].addTime(end-start)
#
# return result
#
# return f_timer
. Output only the next line. | color = hsvToRgb(hue+values[x, y]/5, 1, values[x, y]) |
Predict the next line after this snippet: <|code_start|> stepsize = int(stepsize)
halfstep = int(stepsize/2)
for y in range(0, self.height+halfstep, stepsize):
for x in range(0, self.width+halfstep, stepsize):
self._sampleSquare(halfstep+x, halfstep+y,
stepsize, self._rand() * scale)
for y in range(0, self.height, stepsize):
for x in range(0, self.width, stepsize):
self._sampleDiamond(x + halfstep, y, stepsize,
self._rand() * scale)
self._sampleDiamond(x, y + halfstep, stepsize,
self._rand() * scale)
def generate(self):
samplesize = self.featureSize
scale = 1.0
# seed initial values
for y in range(0, self.height, samplesize):
for x in range(0, self.width, samplesize):
self._setSample(x, y, self._rand())
# iterate through the intermediate spaces
while samplesize > 1:
self._diamondSquare(samplesize, scale)
samplesize /= 2
scale /= 2.0
<|code_end|>
using the current file's imports:
from opc.hue import hsvToRgb
from opc.nphue import h_to_rgb
from random import random
from exceptions import AttributeError
from art.utils.array import array
from opc.utils.prof import timefunc
import numpy as np
import sys
and any relevant context from other files:
# Path: opc/hue.py
# @mwt(timeout=20)
# @timefunc
# def hsvToRgb(h, s=1.0, v=1.0, rainbow=True):
# """
# Convert a (h, s, v) value to the (r, g, b) color space. By default,
# we use a more cpu-intense method to curve the hue ramps above the
# typical linear, with a view to evening out the visible range of yellow,
# cyan, and purple.
#
# This method takes h, s, and v values in range 0..1 and returns r, g,
# and b values in range 0..255.
# """
# if rainbow:
# return [sin(c*pi/2)*255 for c in colorsys.hsv_to_rgb(h, s, v)]
#
# return [c*255 for c in colorsys.hsv_to_rgb(h, s, v)]
#
# Path: opc/nphue.py
# @timefunc
# def h_to_rgb(h, sat=1, val=255.0):
# # Local variation of hsv_to_rgb that only cares about a variable
# # hue, with (s,v) assumed to be constant
# # h should be a numpy array with values between 0.0 and 1.0
# # hsv_to_rgb returns an array of uints between 0 and 255.
# s = np.full_like(h, sat)
# v = np.full_like(h, val)
# hsv = np.dstack((h, s, v))
#
# return hsv_to_rgb(hsv)
#
# Path: opc/utils/prof.py
# def timefunc(f, reference=False):
# global records
#
# records[f.__name__] = Record(f.__name__, reference)
#
# @wraps(f)
# def f_timer(*args, **kwargs):
# global active
#
# if not active:
# return f(*args, **kwargs)
#
# start = time.time()
# result = f(*args, **kwargs)
# end = time.time()
#
# records[f.__name__].addTime(end-start)
#
# return result
#
# return f_timer
. Output only the next line. | @timefunc |
Next line prediction: <|code_start|>
class Art(ArtBaseClass):
description = "Sparse Balistics"
def __init__(self, matrix, config):
<|code_end|>
. Use current file imports:
(from ._baseclass import ArtBaseClass
from .utils.fire import Gun)
and context including class names, function names, or small code snippets from other files:
# Path: art/utils/fire.py
# class Gun(object):
#
# def __init__(self, matrix):
# self.points = []
# self.expires = int(matrix.numpix/2)
# self.location = self._locationGenerator(matrix)
# self.hue = getHueGen(step=0.05, hue=randint(0, 100)/100.0)
#
# def _locationGenerator(self, matrix):
# x, y = 0, 0
#
# while True:
# for x in range(matrix.width-1):
# yield x, y, -1, 1
#
# for y in range(0, matrix.height-1):
# yield matrix.width-1, y, -1, -1
#
# for x in range(matrix.width-1, 0, -1):
# yield x, matrix.height-1, 1, -1
#
# for y in range(matrix.height-1, 0, -1):
# yield 0, y, 1, 1
#
# def fire(self, matrix):
# x, y, dx, dy = next(self.location)
#
# point = Point(x, y, dx, dy, next(self.hue))
# self.points.append(point)
#
# if len(self.points) > self.expires:
# point = self.points.pop(0)
# point.expire(matrix)
#
# for point in self.points:
# point.update(matrix, self.expires)
. Output only the next line. | self.gun = Gun(matrix) |
Given the code snippet: <|code_start|>
class Channel(object):
def __init__(self, matrix):
ones = np.ones(matrix.numpix).reshape((matrix.height, matrix.width))
self.x = ones*np.arange(matrix.width)
self.y = np.flipud(np.rot90(np.rot90(ones)*np.arange(matrix.height)))
self.delta = random()*0.5+0.2
def refresh(self, matrix):
self.x += self.delta
self.y += self.delta
c1 = self.x/16
c2 = self.y/8
c3 = (self.x+self.y)/16
c4 = np.sqrt((self.x*self.x)+(self.y*self.y))/8
channel = np.sum(np.sin(c) for c in (c1, c2, c3, c4))
return np.fmod(np.fabs(1+channel/2), 1.0)
class Art(ArtBaseClass):
description = "Plasma by RGB channel"
def __init__(self, matrix, config):
<|code_end|>
, generate the next line using the imports in this file:
from ._baseclass import ArtBaseClass
from opc.matrix import HQ
from random import random
import numpy as np
and context (functions, classes, or occasionally code) from other files:
# Path: opc/matrix/hq.py
# class HQ(object):
#
# """
# use the HQ class to savely wrap art init blocks when you need to
# switch on HQ for set-up purposes. For example:
#
# from opc.matrix import HQ
#
#
# class Art(object):
#
# def __init__(self, matrix):
#
# with HQ(matrix):
# initialization stuff...
# """
#
# def __init__(self, matrix):
# self.matrix = matrix
#
# def __enter__(self):
# self.matrix.hq()
#
# def __exit__(self, type, value, traceback):
# self.matrix.hq(False)
. Output only the next line. | with HQ(matrix): |
Based on the snippet: <|code_start|> """
default = sample[base]
total = None
for distance in range(maxdist, -1, -1):
samples = (
_relative(sample, base, distance, default),
_relative(sample, base, -distance, default)
)
if total is None:
total = sum(samples)/2
else:
total = (total + sum(samples)/2)/2
return total
def soften_1d(sample, maxdist):
"""
perform inverse distance weighting of values in a one dimensional array
"""
return [idw(sample, i, maxdist) for i in range(len(sample))]
def soften_2d(sample, maxdist):
"""
perform inverse distance weighting of values in a two dimensional array
"""
<|code_end|>
, predict the immediate next line with the help of imports:
from .gunroller import GunRoller
and context (classes, functions, sometimes code) from other files:
# Path: opc/utils/gunroller.py
# class GunRoller(object):
#
# def __init__(self, subject):
# self.shape = np.asarray(subject.shape)
# self.subject = subject
#
# def unroll(self):
# count = reduce(mul, self.shape, 1)
# reshaped = self.subject.reshape((int(count/3), 3))
# rotated = np.rot90(reshaped)
#
# return rotated
#
# def reroll(self, subject):
# unrotated = np.rot90(subject, 3)
# unshaped = unrotated.reshape(self.shape)
#
# return unshaped
. Output only the next line. | gr = GunRoller(sample) |
Using the snippet: <|code_start|>
class ScrollText(ArtBaseClass):
description = "Scroll text across the display"
fg = None
bg = None
def __init__(self, matrix, config):
self.config = config
self._initText()
self.thisMessage = self._getText()
self.nextMessage = self._getText()
<|code_end|>
, determine the next line of code. You have imports:
from .. _baseclass import ArtBaseClass
from opc.text import OPCText, typeface_bbc
and context (class names, function names, or code) available:
# Path: opc/text.py
# class WidthCache(object):
# class OPCText(object):
# def __init__(self, space):
# def add(self, char, width):
# def get(self, char):
# def __init__(self, typeface):
# def drawHalfChar(self, matrix, x, y, char, offset, fg, bg):
# def drawChar(self, matrix, x, y, c, fg, bg):
# def drawText(self, matrix, x, y, string, fg, bg):
. Output only the next line. | self.typeface = OPCText(typeface_bbc) |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.