Instruction stringlengths 362 7.83k | output_code stringlengths 1 945 |
|---|---|
Predict the next line after this snippet: <|code_start|># Commented test cases are in PST, and aren't correctly handled with the
# naïve datetime objects used in the current code base
def test_ghdate_to_datetime():
eq_(string_to_datetime('2011/05/22 00:24:15 -0700'),
dt(2011, 5, 22, 0, 24, 15))
eq_(string_to_datetime('2009/04/18 13:04:09 -0700'),
dt(2009, 4, 18, 13, 4, 9))
#eq_(string_to_datetime('2009/11/12 21:15:17 -0800'),
# dt(2009, 11, 12, 21, 15, 17))
#eq_(string_to_datetime('2009/11/12 21:16:20 -0800'),
# dt(2009, 11, 12, 21, 16, 20))
eq_(string_to_datetime('2010/04/17 17:24:29 -0700'),
dt(2010, 4, 17, 17, 24, 29))
eq_(string_to_datetime('2010/05/18 06:10:36 -0700'),
dt(2010, 5, 18, 6, 10, 36))
eq_(string_to_datetime('2010/05/25 21:59:37 -0700'),
dt(2010, 5, 25, 21, 59, 37))
eq_(string_to_datetime('2010/05/26 17:08:41 -0700'),
dt(2010, 5, 26, 17, 8, 41))
eq_(string_to_datetime('2010/06/20 06:13:37 -0700'),
dt(2010, 6, 20, 6, 13, 37))
eq_(string_to_datetime('2010/07/28 12:56:51 -0700'),
dt(2010, 7, 28, 12, 56, 51))
eq_(string_to_datetime('2010/09/20 21:32:49 -0700'),
dt(2010, 9, 20, 21, 32, 49))
def test_datetime_to_ghdate():
<|code_end|>
using the current file's imports:
from datetime import datetime as dt
from nose.tools import eq_
from github2.core import (datetime_to_ghdate, datetime_to_commitdate,
datetime_to_isodate, string_to_datetime)
and any relevant context from other files:
# Path: github2/core.py
# @_handle_naive_datetimes
# def datetime_to_ghdate(datetime_):
# """Convert Python datetime to GitHub date string.
#
# :param datetime datetime_: datetime object to convert
#
# """
# return datetime_.strftime(GITHUB_DATE_FORMAT)
#
# @_handle_naive_datetimes
# def datetime_to_commitdate(datetime_):
# """Convert Python datetime to GitHub date string.
#
# :param datetime datetime_: datetime object to convert
#
# """
# date_without_tz = datetime_.strftime(COMMIT_DATE_FORMAT)
# utcoffset = GITHUB_TZ.utcoffset(datetime_)
# hours, minutes = divmod(utcoffset.days * 86400 + utcoffset.seconds, 3600)
#
# return "".join([date_without_tz, "%+03d:%02d" % (hours, minutes)])
#
# def datetime_to_isodate(datetime_):
# """Convert Python datetime to GitHub date string.
#
# :param str datetime_: datetime object to convert
#
# .. note:: Supports naive and timezone-aware datetimes
# """
# if not datetime_.tzinfo:
# datetime_ = datetime_.replace(tzinfo=tz.tzutc())
# else:
# datetime_ = datetime_.astimezone(tz.tzutc())
# return "%sZ" % datetime_.isoformat()[:-6]
#
# def string_to_datetime(string):
# """Convert a string to Python datetime.
#
# :param str github_date: date string to parse
#
# """
# parsed = parser.parse(string)
# if NAIVE:
# parsed = parsed.replace(tzinfo=None)
# return parsed
. Output only the next line. | eq_(datetime_to_ghdate(dt(2011, 5, 22, 0, 24, 15)), |
Here is a snippet: <|code_start|> '2010/09/21 04:32:49 -0700')
def test_commitdate_to_datetime():
eq_(string_to_datetime('2011-05-22T00:24:15-07:00'),
dt(2011, 5, 22, 0, 24, 15))
eq_(string_to_datetime('2011-04-09T10:07:30-07:00'),
dt(2011, 4, 9, 10, 7, 30))
#eq_(string_to_datetime('2011-02-19T07:16:11-08:00'),
# dt(2011, 2, 19, 7, 16, 11))
#eq_(string_to_datetime('2010-12-21T12:34:27-08:00'),
# dt(2010, 12, 21, 12, 34, 27))
eq_(string_to_datetime('2011-04-09T10:20:05-07:00'),
dt(2011, 4, 9, 10, 20, 5))
eq_(string_to_datetime('2011-04-09T10:05:58-07:00'),
dt(2011, 4, 9, 10, 5, 58))
eq_(string_to_datetime('2011-04-09T09:53:00-07:00'),
dt(2011, 4, 9, 9, 53, 0))
eq_(string_to_datetime('2011-04-09T10:00:21-07:00'),
dt(2011, 4, 9, 10, 0, 21))
#eq_(string_to_datetime('2010-12-16T15:10:59-08:00'),
# dt(2010, 12, 16, 15, 10, 59))
eq_(string_to_datetime('2011-04-09T09:53:00-07:00'),
dt(2011, 4, 9, 9, 53, 0))
eq_(string_to_datetime('2011-04-09T09:53:00-07:00'),
dt(2011, 4, 9, 9, 53, 0))
def test_datetime_to_commitdate():
<|code_end|>
. Write the next line using the current file imports:
from datetime import datetime as dt
from nose.tools import eq_
from github2.core import (datetime_to_ghdate, datetime_to_commitdate,
datetime_to_isodate, string_to_datetime)
and context from other files:
# Path: github2/core.py
# @_handle_naive_datetimes
# def datetime_to_ghdate(datetime_):
# """Convert Python datetime to GitHub date string.
#
# :param datetime datetime_: datetime object to convert
#
# """
# return datetime_.strftime(GITHUB_DATE_FORMAT)
#
# @_handle_naive_datetimes
# def datetime_to_commitdate(datetime_):
# """Convert Python datetime to GitHub date string.
#
# :param datetime datetime_: datetime object to convert
#
# """
# date_without_tz = datetime_.strftime(COMMIT_DATE_FORMAT)
# utcoffset = GITHUB_TZ.utcoffset(datetime_)
# hours, minutes = divmod(utcoffset.days * 86400 + utcoffset.seconds, 3600)
#
# return "".join([date_without_tz, "%+03d:%02d" % (hours, minutes)])
#
# def datetime_to_isodate(datetime_):
# """Convert Python datetime to GitHub date string.
#
# :param str datetime_: datetime object to convert
#
# .. note:: Supports naive and timezone-aware datetimes
# """
# if not datetime_.tzinfo:
# datetime_ = datetime_.replace(tzinfo=tz.tzutc())
# else:
# datetime_ = datetime_.astimezone(tz.tzutc())
# return "%sZ" % datetime_.isoformat()[:-6]
#
# def string_to_datetime(string):
# """Convert a string to Python datetime.
#
# :param str github_date: date string to parse
#
# """
# parsed = parser.parse(string)
# if NAIVE:
# parsed = parsed.replace(tzinfo=None)
# return parsed
, which may include functions, classes, or code. Output only the next line. | eq_(datetime_to_commitdate(dt(2011, 5, 22, 0, 24, 15)), |
Given the following code snippet before the placeholder: <|code_start|> '2011-04-09T10:05:58-07:00')
eq_(datetime_to_commitdate(dt(2011, 4, 9, 9, 53, 0)),
'2011-04-09T09:53:00-07:00')
eq_(datetime_to_commitdate(dt(2011, 4, 9, 10, 0, 21)),
'2011-04-09T10:00:21-07:00')
#eq_(datetime_to_commitdate(dt(2010, 12, 16, 15, 10, 59)),
# '2010-12-16T15:10:59-08:00')
eq_(datetime_to_commitdate(dt(2011, 4, 9, 9, 53, 0)),
'2011-04-09T09:53:00-07:00')
eq_(datetime_to_commitdate(dt(2011, 4, 9, 9, 53, 0)),
'2011-04-09T09:53:00-07:00')
def test_isodate_to_datetime():
eq_(string_to_datetime('2011-05-22T00:24:15Z'), dt(2011, 5, 22, 0, 24, 15))
eq_(string_to_datetime('2011-04-09T10:07:30Z'), dt(2011, 4, 9, 10, 7, 30))
eq_(string_to_datetime('2011-02-19T07:16:11Z'), dt(2011, 2, 19, 7, 16, 11))
eq_(string_to_datetime('2010-12-21T12:34:27Z'),
dt(2010, 12, 21, 12, 34, 27))
eq_(string_to_datetime('2011-04-09T10:20:05Z'), dt(2011, 4, 9, 10, 20, 5))
eq_(string_to_datetime('2011-04-09T10:05:58Z'), dt(2011, 4, 9, 10, 5, 58))
eq_(string_to_datetime('2011-04-09T09:53:00Z'), dt(2011, 4, 9, 9, 53, 0))
eq_(string_to_datetime('2011-04-09T10:00:21Z'), dt(2011, 4, 9, 10, 0, 21))
eq_(string_to_datetime('2010-12-16T15:10:59Z'),
dt(2010, 12, 16, 15, 10, 59))
eq_(string_to_datetime('2011-04-09T09:53:00Z'), dt(2011, 4, 9, 9, 53, 0))
eq_(string_to_datetime('2011-04-09T09:53:00Z'), dt(2011, 4, 9, 9, 53, 0))
def test_datetime_to_isodate():
<|code_end|>
, predict the next line using imports from the current file:
from datetime import datetime as dt
from nose.tools import eq_
from github2.core import (datetime_to_ghdate, datetime_to_commitdate,
datetime_to_isodate, string_to_datetime)
and context including class names, function names, and sometimes code from other files:
# Path: github2/core.py
# @_handle_naive_datetimes
# def datetime_to_ghdate(datetime_):
# """Convert Python datetime to GitHub date string.
#
# :param datetime datetime_: datetime object to convert
#
# """
# return datetime_.strftime(GITHUB_DATE_FORMAT)
#
# @_handle_naive_datetimes
# def datetime_to_commitdate(datetime_):
# """Convert Python datetime to GitHub date string.
#
# :param datetime datetime_: datetime object to convert
#
# """
# date_without_tz = datetime_.strftime(COMMIT_DATE_FORMAT)
# utcoffset = GITHUB_TZ.utcoffset(datetime_)
# hours, minutes = divmod(utcoffset.days * 86400 + utcoffset.seconds, 3600)
#
# return "".join([date_without_tz, "%+03d:%02d" % (hours, minutes)])
#
# def datetime_to_isodate(datetime_):
# """Convert Python datetime to GitHub date string.
#
# :param str datetime_: datetime object to convert
#
# .. note:: Supports naive and timezone-aware datetimes
# """
# if not datetime_.tzinfo:
# datetime_ = datetime_.replace(tzinfo=tz.tzutc())
# else:
# datetime_ = datetime_.astimezone(tz.tzutc())
# return "%sZ" % datetime_.isoformat()[:-6]
#
# def string_to_datetime(string):
# """Convert a string to Python datetime.
#
# :param str github_date: date string to parse
#
# """
# parsed = parser.parse(string)
# if NAIVE:
# parsed = parsed.replace(tzinfo=None)
# return parsed
. Output only the next line. | eq_(datetime_to_isodate(dt(2011, 5, 22, 0, 24, 15)), |
Given snippet: <|code_start|>
try:
except ImportError:
try:
except ImportError: # For Python <2.6
try:
except ImportError: # for Python <2.7
_binding = unittest2.TestCase('run')
assert_dict_contains_subset = _binding.assertDictContainsSubset # NOQA
assert_dict_equal = _binding.assertDictEqual # NOQA
def assert_params(first, second):
assert_dict_equal(first, parse_qs(second))
def assert_params_contain(first, second):
assert_dict_contains_subset(first, parse_qs(second))
class TestAuthEncode(unittest.TestCase):
"""Test processing of authentication data."""
def setUp(self):
<|code_end|>
, continue by predicting the next line. Consider current file imports:
import unittest
import unittest2
from urllib.parse import parse_qs # For Python 3
from urlparse import parse_qs # NOQA
from cgi import parse_qs # NOQA
from nose.tools import (assert_dict_contains_subset, assert_dict_equal)
from github2 import request
and context:
# Path: github2/request.py
# DEFAULT_GITHUB_URL = "https://github.com"
# LOGGER = logging.getLogger('github2.request')
# _HTTPLIB2_BUNDLE = path.realpath(path.dirname(httplib2.CA_CERTS))
# SYSTEM_CERTS = not _HTTPLIB2_BUNDLE.startswith(path.dirname(httplib2.__file__))
# CA_CERTS = None
# CURL_CERTS = False
# CA_CERTS = cert_file
# SYSTEM_CERTS = True
# CA_CERTS = '/usr/local/share/certs/ca-root-nss.crt'
# SYSTEM_CERTS = True
# CA_CERTS = getenv('CURL_CA_BUNDLE')
# CURL_CERTS = True
# CA_CERTS = path.join(path.dirname(path.abspath(__file__)),
# "DigiCert_High_Assurance_EV_Root_CA.crt")
# def charset_from_headers(headers):
# def __init__(self, message, content, code):
# def __init__(self, username=None, api_token=None, url_prefix=None,
# requests_per_second=None, access_token=None,
# cache=None, proxy_host=None, proxy_port=None,
# github_url=None):
# def encode_authentication_data(self, extra_post_data):
# def get(self, *path_components):
# def post(self, *path_components, **extra_post_data):
# def put(self, *path_components, **extra_post_data):
# def delete(self, *path_components, **extra_post_data):
# def make_request(self, path, extra_post_data=None, method="GET"):
# def raw_request(self, url, extra_post_data, method="GET"):
# def http_headers(self):
# class GithubError(Exception):
# class HttpError(RuntimeError):
# class GithubRequest(object):
which might include code, classes, or functions. Output only the next line. | self.r = request.GithubRequest() |
Continue the code snippet: <|code_start|> token = auth_result[1]
else:
return None
token['timestamp'] = time.time() + (int(token['expires_in']) - 10)
self.set_token(token)
return token['token_type'] + ' ' + token['access_token']
def set_token(self, token):
self.storage_strategy.store(token, self.get_evn_key())
def get_evn_key(self):
return hashlib.md5((self._handler.client_id + self._handler.api_url +
self._handler.apikey).encode('utf-8')).hexdigest()
class ApiAuthorization(object):
def __init__(self, handler):
self._handler = handler
def oauth_token(self):
result = self._handler.request(method='POST',
url='oauth/token',
data={'grant_type': 'client_credentials'},
oauth_request=True)
return result
def basic_token(self):
if self._handler.client_id is None or self._handler.apikey is None:
<|code_end|>
. Use current file imports:
import base64
import hashlib
import json
import stat
import os
import tempfile
import six
import time
import mangopay
from mangopay.exceptions import AuthenticationError
and context (classes, functions, or code) from other files:
# Path: mangopay/exceptions.py
# class AuthenticationError(APIError):
# pass
. Output only the next line. | raise AuthenticationError( |
Here is a snippet: <|code_start|> result.status_code,
result.headers,
result.text if hasattr(result, 'text') else result.content
)
self.read_response_headers(result.headers)
if result.status_code not in (requests.codes.ok, requests.codes.not_found,
requests.codes.created, requests.codes.accepted,
requests.codes.no_content):
self._create_apierror(result, url=url, data=truncated_data, method=method)
elif result.status_code == requests.codes.no_content:
return result, None
else:
if result.content:
try:
content = result.content
if six.PY3:
content = content.decode('utf-8')
return result, json.loads(content)
except ValueError:
if result.content.startswith(b'data='):
return result.content
self._create_decodeerror(result, url=url)
else:
self._create_decodeerror(result, url=url)
def init_rate_limits(self):
<|code_end|>
. Write the next line using the current file imports:
import requests
import time
import logging
import six
import copy
import mangopay
import urllib.parse as urlrequest
import urllib as urlrequest
import simplejson as json
import json
from mangopay.ratelimit import RateLimit
from mangopay.auth import AuthorizationTokenManager
from .exceptions import APIError, DecodeError
from .signals import request_finished, request_started, request_error
from .utils import reraise_as, truncatechars
from requests.exceptions import ConnectionError, ConnectTimeout, Timeout
and context from other files:
# Path: mangopay/ratelimit.py
# class RateLimit:
#
# interval_minutes = 0
# calls_made = 0
# calls_remaining = 0
# reset_time_millis = 0
#
# def __init__(self, interval_minutes):
# self.interval_minutes = interval_minutes
#
# Path: mangopay/auth.py
# class AuthorizationTokenManager(object):
#
# def __init__(self, handler, storage_strategy):
# self._handler = handler
# self.authorization = ApiAuthorization(handler)
# if storage_strategy:
# self.storage_strategy = storage_strategy
# else:
# self.storage_strategy = StaticStorageStrategy()
#
# def basic_token(self):
# return self.authorization.basic_token()
#
# def get_token(self):
# try:
# token = self.storage_strategy.get(self.get_evn_key())
# except KeyError:
# token = None
#
# if not token or not token['timestamp'] or token['timestamp'] <= time.time():
# auth_result = self.authorization.oauth_token()
# if auth_result[0].status_code == 200:
# token = auth_result[1]
# else:
# return None
# token['timestamp'] = time.time() + (int(token['expires_in']) - 10)
# self.set_token(token)
#
# return token['token_type'] + ' ' + token['access_token']
#
# def set_token(self, token):
# self.storage_strategy.store(token, self.get_evn_key())
#
# def get_evn_key(self):
# return hashlib.md5((self._handler.client_id + self._handler.api_url +
# self._handler.apikey).encode('utf-8')).hexdigest()
#
# Path: mangopay/exceptions.py
# class APIError(Exception):
# def __init__(self, *args, **kwargs):
# self.code = kwargs.pop('code', None)
# self.url = kwargs.pop('url', None)
# self.content = kwargs.pop('content', None)
# self.headers = kwargs.pop('headers', None)
#
# super(APIError, self).__init__(*args, **kwargs)
#
# class DecodeError(APIError):
# def __init__(self, *args, **kwargs):
# self.body = kwargs.pop('body', None)
# self.headers = kwargs.pop('headers', None)
# self.url = kwargs.pop('url', None)
# self.content = kwargs.pop('content', None)
#
# super(DecodeError, self).__init__(*args, **kwargs)
#
# Path: mangopay/utils.py
# def reraise_as(new_exception_or_type):
# """
# Obtained from https://github.com/dcramer/reraise/blob/master/src/reraise.py
# >>> try:
# >>> do_something_crazy()
# >>> except Exception:
# >>> reraise_as(UnhandledException)
# """
# __traceback_hide__ = True # NOQA
#
# e_type, e_value, e_traceback = sys.exc_info()
#
# if inspect.isclass(new_exception_or_type):
# new_type = new_exception_or_type
# new_exception = new_exception_or_type()
# else:
# new_type = type(new_exception_or_type)
# new_exception = new_exception_or_type
#
# new_exception.__cause__ = e_value
#
# try:
# six.reraise(new_type, new_exception, e_traceback)
# finally:
# del e_traceback
#
# def truncatechars(value, length=255):
# if isinstance(value, dict):
# for k, v in value.items():
# value[k] = truncatechars(v)
# elif isinstance(value, six.string_types):
# return (value[:length] + '...') if len(value) > length else value
#
# return value
, which may include functions, classes, or code. Output only the next line. | rate_limits = [RateLimit(15), RateLimit(30), RateLimit(60), RateLimit(24 * 60)] |
Given snippet: <|code_start|>
try:
except ImportError:
try:
except ImportError:
logger = logging.getLogger('mangopay')
requests_session = requests.Session()
rate_limits = None
class APIRequest(object):
def __init__(self, client_id=None, apikey=None, api_url=None, api_sandbox_url=None, sandbox=None,
timeout=30.0, storage_strategy=None, proxies=None):
global rate_limits
rate_limits = None
if (sandbox is None and mangopay.sandbox) or sandbox:
self.api_url = api_sandbox_url or mangopay.api_sandbox_url
else:
self.api_url = api_url or mangopay.api_url
self.client_id = client_id or mangopay.client_id
self.apikey = apikey or mangopay.apikey
<|code_end|>
, continue by predicting the next line. Consider current file imports:
import requests
import time
import logging
import six
import copy
import mangopay
import urllib.parse as urlrequest
import urllib as urlrequest
import simplejson as json
import json
from mangopay.ratelimit import RateLimit
from mangopay.auth import AuthorizationTokenManager
from .exceptions import APIError, DecodeError
from .signals import request_finished, request_started, request_error
from .utils import reraise_as, truncatechars
from requests.exceptions import ConnectionError, ConnectTimeout, Timeout
and context:
# Path: mangopay/ratelimit.py
# class RateLimit:
#
# interval_minutes = 0
# calls_made = 0
# calls_remaining = 0
# reset_time_millis = 0
#
# def __init__(self, interval_minutes):
# self.interval_minutes = interval_minutes
#
# Path: mangopay/auth.py
# class AuthorizationTokenManager(object):
#
# def __init__(self, handler, storage_strategy):
# self._handler = handler
# self.authorization = ApiAuthorization(handler)
# if storage_strategy:
# self.storage_strategy = storage_strategy
# else:
# self.storage_strategy = StaticStorageStrategy()
#
# def basic_token(self):
# return self.authorization.basic_token()
#
# def get_token(self):
# try:
# token = self.storage_strategy.get(self.get_evn_key())
# except KeyError:
# token = None
#
# if not token or not token['timestamp'] or token['timestamp'] <= time.time():
# auth_result = self.authorization.oauth_token()
# if auth_result[0].status_code == 200:
# token = auth_result[1]
# else:
# return None
# token['timestamp'] = time.time() + (int(token['expires_in']) - 10)
# self.set_token(token)
#
# return token['token_type'] + ' ' + token['access_token']
#
# def set_token(self, token):
# self.storage_strategy.store(token, self.get_evn_key())
#
# def get_evn_key(self):
# return hashlib.md5((self._handler.client_id + self._handler.api_url +
# self._handler.apikey).encode('utf-8')).hexdigest()
#
# Path: mangopay/exceptions.py
# class APIError(Exception):
# def __init__(self, *args, **kwargs):
# self.code = kwargs.pop('code', None)
# self.url = kwargs.pop('url', None)
# self.content = kwargs.pop('content', None)
# self.headers = kwargs.pop('headers', None)
#
# super(APIError, self).__init__(*args, **kwargs)
#
# class DecodeError(APIError):
# def __init__(self, *args, **kwargs):
# self.body = kwargs.pop('body', None)
# self.headers = kwargs.pop('headers', None)
# self.url = kwargs.pop('url', None)
# self.content = kwargs.pop('content', None)
#
# super(DecodeError, self).__init__(*args, **kwargs)
#
# Path: mangopay/utils.py
# def reraise_as(new_exception_or_type):
# """
# Obtained from https://github.com/dcramer/reraise/blob/master/src/reraise.py
# >>> try:
# >>> do_something_crazy()
# >>> except Exception:
# >>> reraise_as(UnhandledException)
# """
# __traceback_hide__ = True # NOQA
#
# e_type, e_value, e_traceback = sys.exc_info()
#
# if inspect.isclass(new_exception_or_type):
# new_type = new_exception_or_type
# new_exception = new_exception_or_type()
# else:
# new_type = type(new_exception_or_type)
# new_exception = new_exception_or_type
#
# new_exception.__cause__ = e_value
#
# try:
# six.reraise(new_type, new_exception, e_traceback)
# finally:
# del e_traceback
#
# def truncatechars(value, length=255):
# if isinstance(value, dict):
# for k, v in value.items():
# value[k] = truncatechars(v)
# elif isinstance(value, six.string_types):
# return (value[:length] + '...') if len(value) > length else value
#
# return value
which might include code, classes, or functions. Output only the next line. | self.auth_manager = AuthorizationTokenManager(self, storage_strategy) |
Predict the next line for this snippet: <|code_start|> url = self._absolute_url(url, encoded_params)
else:
url = '%s?%s' % (url, encoded_params)
if data or data == {}:
truncated_data = truncatechars(copy.copy(data))
data = json.dumps(data, default=lambda x: x.to_api_json())
logger.debug('DATA[IN -> %s]\n\t- headers: %s\n\t- content: %s', url, headers, truncated_data)
ts = time.time()
# signal:
request_started.send(url=url, data=truncated_data, headers=headers, method=method)
try:
result = requests_session.request(method, url,
data=data,
headers=headers,
timeout=self.timeout,
proxies=self.proxies)
except ConnectionError as e:
msg = '{}'.format(e)
if msg:
msg = '%s: %s' % (type(e).__name__, msg)
else:
msg = type(e).__name__
<|code_end|>
with the help of current file imports:
import requests
import time
import logging
import six
import copy
import mangopay
import urllib.parse as urlrequest
import urllib as urlrequest
import simplejson as json
import json
from mangopay.ratelimit import RateLimit
from mangopay.auth import AuthorizationTokenManager
from .exceptions import APIError, DecodeError
from .signals import request_finished, request_started, request_error
from .utils import reraise_as, truncatechars
from requests.exceptions import ConnectionError, ConnectTimeout, Timeout
and context from other files:
# Path: mangopay/ratelimit.py
# class RateLimit:
#
# interval_minutes = 0
# calls_made = 0
# calls_remaining = 0
# reset_time_millis = 0
#
# def __init__(self, interval_minutes):
# self.interval_minutes = interval_minutes
#
# Path: mangopay/auth.py
# class AuthorizationTokenManager(object):
#
# def __init__(self, handler, storage_strategy):
# self._handler = handler
# self.authorization = ApiAuthorization(handler)
# if storage_strategy:
# self.storage_strategy = storage_strategy
# else:
# self.storage_strategy = StaticStorageStrategy()
#
# def basic_token(self):
# return self.authorization.basic_token()
#
# def get_token(self):
# try:
# token = self.storage_strategy.get(self.get_evn_key())
# except KeyError:
# token = None
#
# if not token or not token['timestamp'] or token['timestamp'] <= time.time():
# auth_result = self.authorization.oauth_token()
# if auth_result[0].status_code == 200:
# token = auth_result[1]
# else:
# return None
# token['timestamp'] = time.time() + (int(token['expires_in']) - 10)
# self.set_token(token)
#
# return token['token_type'] + ' ' + token['access_token']
#
# def set_token(self, token):
# self.storage_strategy.store(token, self.get_evn_key())
#
# def get_evn_key(self):
# return hashlib.md5((self._handler.client_id + self._handler.api_url +
# self._handler.apikey).encode('utf-8')).hexdigest()
#
# Path: mangopay/exceptions.py
# class APIError(Exception):
# def __init__(self, *args, **kwargs):
# self.code = kwargs.pop('code', None)
# self.url = kwargs.pop('url', None)
# self.content = kwargs.pop('content', None)
# self.headers = kwargs.pop('headers', None)
#
# super(APIError, self).__init__(*args, **kwargs)
#
# class DecodeError(APIError):
# def __init__(self, *args, **kwargs):
# self.body = kwargs.pop('body', None)
# self.headers = kwargs.pop('headers', None)
# self.url = kwargs.pop('url', None)
# self.content = kwargs.pop('content', None)
#
# super(DecodeError, self).__init__(*args, **kwargs)
#
# Path: mangopay/utils.py
# def reraise_as(new_exception_or_type):
# """
# Obtained from https://github.com/dcramer/reraise/blob/master/src/reraise.py
# >>> try:
# >>> do_something_crazy()
# >>> except Exception:
# >>> reraise_as(UnhandledException)
# """
# __traceback_hide__ = True # NOQA
#
# e_type, e_value, e_traceback = sys.exc_info()
#
# if inspect.isclass(new_exception_or_type):
# new_type = new_exception_or_type
# new_exception = new_exception_or_type()
# else:
# new_type = type(new_exception_or_type)
# new_exception = new_exception_or_type
#
# new_exception.__cause__ = e_value
#
# try:
# six.reraise(new_type, new_exception, e_traceback)
# finally:
# del e_traceback
#
# def truncatechars(value, length=255):
# if isinstance(value, dict):
# for k, v in value.items():
# value[k] = truncatechars(v)
# elif isinstance(value, six.string_types):
# return (value[:length] + '...') if len(value) > length else value
#
# return value
, which may contain function names, class names, or code. Output only the next line. | reraise_as(APIError(msg)) |
Using the snippet: <|code_start|> request_error.send(url=url, status_code=status_code, headers=headers)
try:
content = result.json()
except ValueError:
content = None
raise APIError(text, code=status_code, content=content, headers=headers)
def _create_decodeerror(self, result, url=None):
text = result.text if hasattr(result, 'text') else result.content
status_code = result.status_code
headers = result.headers
logger.error('DECODE ERROR: status_code: %s | headers: %s | content: %s',
status_code,
headers,
text,
)
request_error.send(url=url, status_code=status_code, headers=headers)
try:
content = result.json()
except ValueError:
content = None
<|code_end|>
, determine the next line of code. You have imports:
import requests
import time
import logging
import six
import copy
import mangopay
import urllib.parse as urlrequest
import urllib as urlrequest
import simplejson as json
import json
from mangopay.ratelimit import RateLimit
from mangopay.auth import AuthorizationTokenManager
from .exceptions import APIError, DecodeError
from .signals import request_finished, request_started, request_error
from .utils import reraise_as, truncatechars
from requests.exceptions import ConnectionError, ConnectTimeout, Timeout
and context (class names, function names, or code) available:
# Path: mangopay/ratelimit.py
# class RateLimit:
#
# interval_minutes = 0
# calls_made = 0
# calls_remaining = 0
# reset_time_millis = 0
#
# def __init__(self, interval_minutes):
# self.interval_minutes = interval_minutes
#
# Path: mangopay/auth.py
# class AuthorizationTokenManager(object):
#
# def __init__(self, handler, storage_strategy):
# self._handler = handler
# self.authorization = ApiAuthorization(handler)
# if storage_strategy:
# self.storage_strategy = storage_strategy
# else:
# self.storage_strategy = StaticStorageStrategy()
#
# def basic_token(self):
# return self.authorization.basic_token()
#
# def get_token(self):
# try:
# token = self.storage_strategy.get(self.get_evn_key())
# except KeyError:
# token = None
#
# if not token or not token['timestamp'] or token['timestamp'] <= time.time():
# auth_result = self.authorization.oauth_token()
# if auth_result[0].status_code == 200:
# token = auth_result[1]
# else:
# return None
# token['timestamp'] = time.time() + (int(token['expires_in']) - 10)
# self.set_token(token)
#
# return token['token_type'] + ' ' + token['access_token']
#
# def set_token(self, token):
# self.storage_strategy.store(token, self.get_evn_key())
#
# def get_evn_key(self):
# return hashlib.md5((self._handler.client_id + self._handler.api_url +
# self._handler.apikey).encode('utf-8')).hexdigest()
#
# Path: mangopay/exceptions.py
# class APIError(Exception):
# def __init__(self, *args, **kwargs):
# self.code = kwargs.pop('code', None)
# self.url = kwargs.pop('url', None)
# self.content = kwargs.pop('content', None)
# self.headers = kwargs.pop('headers', None)
#
# super(APIError, self).__init__(*args, **kwargs)
#
# class DecodeError(APIError):
# def __init__(self, *args, **kwargs):
# self.body = kwargs.pop('body', None)
# self.headers = kwargs.pop('headers', None)
# self.url = kwargs.pop('url', None)
# self.content = kwargs.pop('content', None)
#
# super(DecodeError, self).__init__(*args, **kwargs)
#
# Path: mangopay/utils.py
# def reraise_as(new_exception_or_type):
# """
# Obtained from https://github.com/dcramer/reraise/blob/master/src/reraise.py
# >>> try:
# >>> do_something_crazy()
# >>> except Exception:
# >>> reraise_as(UnhandledException)
# """
# __traceback_hide__ = True # NOQA
#
# e_type, e_value, e_traceback = sys.exc_info()
#
# if inspect.isclass(new_exception_or_type):
# new_type = new_exception_or_type
# new_exception = new_exception_or_type()
# else:
# new_type = type(new_exception_or_type)
# new_exception = new_exception_or_type
#
# new_exception.__cause__ = e_value
#
# try:
# six.reraise(new_type, new_exception, e_traceback)
# finally:
# del e_traceback
#
# def truncatechars(value, length=255):
# if isinstance(value, dict):
# for k, v in value.items():
# value[k] = truncatechars(v)
# elif isinstance(value, six.string_types):
# return (value[:length] + '...') if len(value) > length else value
#
# return value
. Output only the next line. | raise DecodeError(text, |
Based on the snippet: <|code_start|> url = self._absolute_url(url, encoded_params)
else:
url = '%s?%s' % (url, encoded_params)
if data or data == {}:
truncated_data = truncatechars(copy.copy(data))
data = json.dumps(data, default=lambda x: x.to_api_json())
logger.debug('DATA[IN -> %s]\n\t- headers: %s\n\t- content: %s', url, headers, truncated_data)
ts = time.time()
# signal:
request_started.send(url=url, data=truncated_data, headers=headers, method=method)
try:
result = requests_session.request(method, url,
data=data,
headers=headers,
timeout=self.timeout,
proxies=self.proxies)
except ConnectionError as e:
msg = '{}'.format(e)
if msg:
msg = '%s: %s' % (type(e).__name__, msg)
else:
msg = type(e).__name__
<|code_end|>
, predict the immediate next line with the help of imports:
import requests
import time
import logging
import six
import copy
import mangopay
import urllib.parse as urlrequest
import urllib as urlrequest
import simplejson as json
import json
from mangopay.ratelimit import RateLimit
from mangopay.auth import AuthorizationTokenManager
from .exceptions import APIError, DecodeError
from .signals import request_finished, request_started, request_error
from .utils import reraise_as, truncatechars
from requests.exceptions import ConnectionError, ConnectTimeout, Timeout
and context (classes, functions, sometimes code) from other files:
# Path: mangopay/ratelimit.py
# class RateLimit:
#
# interval_minutes = 0
# calls_made = 0
# calls_remaining = 0
# reset_time_millis = 0
#
# def __init__(self, interval_minutes):
# self.interval_minutes = interval_minutes
#
# Path: mangopay/auth.py
# class AuthorizationTokenManager(object):
#
# def __init__(self, handler, storage_strategy):
# self._handler = handler
# self.authorization = ApiAuthorization(handler)
# if storage_strategy:
# self.storage_strategy = storage_strategy
# else:
# self.storage_strategy = StaticStorageStrategy()
#
# def basic_token(self):
# return self.authorization.basic_token()
#
# def get_token(self):
# try:
# token = self.storage_strategy.get(self.get_evn_key())
# except KeyError:
# token = None
#
# if not token or not token['timestamp'] or token['timestamp'] <= time.time():
# auth_result = self.authorization.oauth_token()
# if auth_result[0].status_code == 200:
# token = auth_result[1]
# else:
# return None
# token['timestamp'] = time.time() + (int(token['expires_in']) - 10)
# self.set_token(token)
#
# return token['token_type'] + ' ' + token['access_token']
#
# def set_token(self, token):
# self.storage_strategy.store(token, self.get_evn_key())
#
# def get_evn_key(self):
# return hashlib.md5((self._handler.client_id + self._handler.api_url +
# self._handler.apikey).encode('utf-8')).hexdigest()
#
# Path: mangopay/exceptions.py
# class APIError(Exception):
# def __init__(self, *args, **kwargs):
# self.code = kwargs.pop('code', None)
# self.url = kwargs.pop('url', None)
# self.content = kwargs.pop('content', None)
# self.headers = kwargs.pop('headers', None)
#
# super(APIError, self).__init__(*args, **kwargs)
#
# class DecodeError(APIError):
# def __init__(self, *args, **kwargs):
# self.body = kwargs.pop('body', None)
# self.headers = kwargs.pop('headers', None)
# self.url = kwargs.pop('url', None)
# self.content = kwargs.pop('content', None)
#
# super(DecodeError, self).__init__(*args, **kwargs)
#
# Path: mangopay/utils.py
# def reraise_as(new_exception_or_type):
# """
# Obtained from https://github.com/dcramer/reraise/blob/master/src/reraise.py
# >>> try:
# >>> do_something_crazy()
# >>> except Exception:
# >>> reraise_as(UnhandledException)
# """
# __traceback_hide__ = True # NOQA
#
# e_type, e_value, e_traceback = sys.exc_info()
#
# if inspect.isclass(new_exception_or_type):
# new_type = new_exception_or_type
# new_exception = new_exception_or_type()
# else:
# new_type = type(new_exception_or_type)
# new_exception = new_exception_or_type
#
# new_exception.__cause__ = e_value
#
# try:
# six.reraise(new_type, new_exception, e_traceback)
# finally:
# del e_traceback
#
# def truncatechars(value, length=255):
# if isinstance(value, dict):
# for k, v in value.items():
# value[k] = truncatechars(v)
# elif isinstance(value, six.string_types):
# return (value[:length] + '...') if len(value) > length else value
#
# return value
. Output only the next line. | reraise_as(APIError(msg)) |
Given the following code snippet before the placeholder: <|code_start|> if is_mangopay_request:
headers['User-Agent'] = 'MangoPay V2 Python/' + str(mangopay.package_version)
if oauth_request:
headers['Authorization'] = self.auth_manager.basic_token()
headers['Content-Type'] = 'application/x-www-form-urlencoded'
else:
headers['Authorization'] = self.auth_manager.get_token()
headers['Content-Type'] = 'application/json'
if idempotency_key:
headers['Idempotency-Key'] = idempotency_key
else:
if "data_XXX" in params:
params[str("data")] = params[str("data_XXX")]
params.__delitem__(str("data_XXX"))
headers['Content-Type'] = 'application/x-www-form-urlencoded'
truncated_data = None
encoded_params = urlrequest.urlencode(params)
if is_mangopay_request:
if oauth_request:
url = self.api_url + url
else:
url = self._absolute_url(url, encoded_params)
else:
url = '%s?%s' % (url, encoded_params)
if data or data == {}:
<|code_end|>
, predict the next line using imports from the current file:
import requests
import time
import logging
import six
import copy
import mangopay
import urllib.parse as urlrequest
import urllib as urlrequest
import simplejson as json
import json
from mangopay.ratelimit import RateLimit
from mangopay.auth import AuthorizationTokenManager
from .exceptions import APIError, DecodeError
from .signals import request_finished, request_started, request_error
from .utils import reraise_as, truncatechars
from requests.exceptions import ConnectionError, ConnectTimeout, Timeout
and context including class names, function names, and sometimes code from other files:
# Path: mangopay/ratelimit.py
# class RateLimit:
#
# interval_minutes = 0
# calls_made = 0
# calls_remaining = 0
# reset_time_millis = 0
#
# def __init__(self, interval_minutes):
# self.interval_minutes = interval_minutes
#
# Path: mangopay/auth.py
# class AuthorizationTokenManager(object):
#
# def __init__(self, handler, storage_strategy):
# self._handler = handler
# self.authorization = ApiAuthorization(handler)
# if storage_strategy:
# self.storage_strategy = storage_strategy
# else:
# self.storage_strategy = StaticStorageStrategy()
#
# def basic_token(self):
# return self.authorization.basic_token()
#
# def get_token(self):
# try:
# token = self.storage_strategy.get(self.get_evn_key())
# except KeyError:
# token = None
#
# if not token or not token['timestamp'] or token['timestamp'] <= time.time():
# auth_result = self.authorization.oauth_token()
# if auth_result[0].status_code == 200:
# token = auth_result[1]
# else:
# return None
# token['timestamp'] = time.time() + (int(token['expires_in']) - 10)
# self.set_token(token)
#
# return token['token_type'] + ' ' + token['access_token']
#
# def set_token(self, token):
# self.storage_strategy.store(token, self.get_evn_key())
#
# def get_evn_key(self):
# return hashlib.md5((self._handler.client_id + self._handler.api_url +
# self._handler.apikey).encode('utf-8')).hexdigest()
#
# Path: mangopay/exceptions.py
# class APIError(Exception):
# def __init__(self, *args, **kwargs):
# self.code = kwargs.pop('code', None)
# self.url = kwargs.pop('url', None)
# self.content = kwargs.pop('content', None)
# self.headers = kwargs.pop('headers', None)
#
# super(APIError, self).__init__(*args, **kwargs)
#
# class DecodeError(APIError):
# def __init__(self, *args, **kwargs):
# self.body = kwargs.pop('body', None)
# self.headers = kwargs.pop('headers', None)
# self.url = kwargs.pop('url', None)
# self.content = kwargs.pop('content', None)
#
# super(DecodeError, self).__init__(*args, **kwargs)
#
# Path: mangopay/utils.py
# def reraise_as(new_exception_or_type):
# """
# Obtained from https://github.com/dcramer/reraise/blob/master/src/reraise.py
# >>> try:
# >>> do_something_crazy()
# >>> except Exception:
# >>> reraise_as(UnhandledException)
# """
# __traceback_hide__ = True # NOQA
#
# e_type, e_value, e_traceback = sys.exc_info()
#
# if inspect.isclass(new_exception_or_type):
# new_type = new_exception_or_type
# new_exception = new_exception_or_type()
# else:
# new_type = type(new_exception_or_type)
# new_exception = new_exception_or_type
#
# new_exception.__cause__ = e_value
#
# try:
# six.reraise(new_type, new_exception, e_traceback)
# finally:
# del e_traceback
#
# def truncatechars(value, length=255):
# if isinstance(value, dict):
# for k, v in value.items():
# value[k] = truncatechars(v)
# elif isinstance(value, six.string_types):
# return (value[:length] + '...') if len(value) > length else value
#
# return value
. Output only the next line. | truncated_data = truncatechars(copy.copy(data)) |
Using the snippet: <|code_start|>def add_camelcase_aliases(cls):
for name in cls().__dict__.keys():
if name[0] == '_':
continue
setattr(cls, name.title().replace('_', ''), AliasProperty(name))
return cls
@add_camelcase_aliases
@python_2_unicode_compatible
class Money(object):
__hash__ = None
def __init__(self, amount="0", currency=None):
try:
self.amount = decimal.Decimal(amount)
except decimal.InvalidOperation:
raise ValueError("amount value could not be converted to "
"Decimal(): '{}'".format(amount))
self.currency = currency
def __repr__(self):
return "{} {}".format(self.currency, self.amount)
def __str__(self):
return force_text("{} {:,.2f}".format(self.currency, self.amount))
def __lt__(self, other):
if isinstance(other, Money):
if other.currency != self.currency:
<|code_end|>
, determine the next line of code. You have imports:
import copy
import datetime
import decimal
import inspect
import sys
import pytz
import six
import urllib
from calendar import timegm
from functools import wraps
from .compat import python_2_unicode_compatible
from .exceptions import CurrencyMismatch
from urllib import request
and context (class names, function names, or code) available:
# Path: mangopay/exceptions.py
# class CurrencyMismatch(Exception):
# pass
. Output only the next line. | raise CurrencyMismatch(self.currency, other.currency, '<') |
Given the code snippet: <|code_start|> with open(filepath, newline='', encoding='utf-8') as file:
return file.read()
class RegisteredMocks(unittest.TestCase):
def setUp(self):
self.mock_oauth()
def register_mock(self, data):
match_querystring = False
if 'match_querystring' in data:
match_querystring = data['match_querystring'] or False
if isinstance(data, list):
for d in data:
self.register_mock(d)
else:
if isinstance(data['body'], (dict, list)):
data['body'] = json.dumps(data['body'])
responses.add(data['method'], data['url'],
body=data['body'], status=data['status'],
content_type='application/json',
match_querystring=match_querystring)
def mock_oauth(self):
self.register_mock({
'method': responses.POST,
<|code_end|>
, generate the next line using the imports in this file:
import json
import os
import re
import sys
import unittest
import responses
from datetime import datetime
from mangopay.utils import timestamp_from_date
from tests import settings
and context (functions, classes, or occasionally code) from other files:
# Path: mangopay/utils.py
# def timestamp_from_date(date):
# epoch = datetime.date(1970, 1, 1)
# diff = date - epoch
# return diff.days * 24 * 3600 + diff.seconds
#
# Path: tests/settings.py
# MANGOPAY_CLIENT_ID = 'sdk-unit-tests'
# MANGOPAY_APIKEY = 'cqFfFrWfCcb7UadHNxx2C9Lo6Djw8ZduLi7J9USTmu8bhxxpju'
# MANGOPAY_API_URL = 'https://api.mangopay.com/v2.01/'
# MANGOPAY_API_SANDBOX_URL = 'https://api.sandbox.mangopay.com/v2.01/'
# MANGOPAY_USE_SANDBOX = True
# MANGOPAY_API_VERSION = 2
# MOCK_TESTS_RESPONSES = True
. Output only the next line. | 'url': settings.MANGOPAY_API_SANDBOX_URL + 'oauth/token', |
Using the snippet: <|code_start|> def test_format_not_tty(self, isatty):
assert ' test' == self.color.format('test', 'red', indent=2)
assert isatty.called
def test_format_default_s(self):
assert self.color.format('') == ''
def test_attribute(self):
self.color.red = '\33[31m'
self.color.lred = '\33[31;1m'
with raises(AttributeError):
self.color.test
@patch('zdict.utils.constants')
@patch('os.mkdir')
@patch('os.path.isdir', return_value=False)
def test_create_zdict_dir_if_not_exists(mkdir, isdir, constants):
constants.BASE_DIR = '/mock'
create_zdict_dir_if_not_exists()
isdir.assert_called_with('/mock')
mkdir.assert_called_with('/mock')
@patch('builtins.open')
@patch('zdict.utils.constants')
@patch('os.path.exists', return_value=False)
def test_create_zdict_db_if_not_exists(exists, constants, open):
constants.DB_FILE = '/mock'
<|code_end|>
, determine the next line of code. You have imports:
import sys
from zdict.utils import (Color, create_zdict_db_if_not_exists,
create_zdict_dir_if_not_exists,
import_readline)
from pytest import mark, raises
from unittest.mock import patch
and context (class names, function names, or code) available:
# Path: zdict/utils.py
# class Color(metaclass=ColorConst):
# _force_color = False
#
# @classmethod
# def set_force_color(cls, force_color=True):
# cls._force_color = force_color
#
# @classmethod
# def format(self, s='', color='org', indent=0):
# '''
# :type s: str
# :param s: message
# :param color: predefined color name, e,g,: red, RED.
# Using 'l' prefix for bright color, e.g.: lred, lwhite.
# It's case-insensitive.
#
# If stdout isn't a tty, the color option will be ignored.
# '''
# colorize = self._force_color or sys.stdout.isatty()
#
# return '{indent}{color}{s}{org}'.format(
# indent=' ' * indent,
# color=getattr(self, color, '') if colorize else '',
# s=s,
# org=self.ORG if colorize else '',
# )
#
# @classmethod
# def print(self, *args, end='\n', **kwargs):
# print(self.format(*args, **kwargs), end=end)
#
# def create_zdict_db_if_not_exists():
# if not os.path.exists(constants.DB_FILE):
# open(constants.DB_FILE, 'a').close()
#
# def create_zdict_dir_if_not_exists():
# if not os.path.isdir(constants.BASE_DIR):
# os.mkdir(constants.BASE_DIR)
#
# def import_readline():
# if sys.platform == 'darwin' and sys.version_info <= (3, 5):
# import gnureadline as readline
# else:
# import readline
# return readline
. Output only the next line. | create_zdict_db_if_not_exists() |
Given snippet: <|code_start|> del self.color
@patch('zdict.utils.sys.stdout.isatty', return_value=True)
def test_format_in_tty(self, isatty):
assert '\33[31;1mtest\33[0m' == self.color.format('test', 'lred')
assert '\33[31mtest\33[0m' == self.color.format('test', 'red')
assert (' \33[31mtest\33[0m' ==
self.color.format('test', 'red', indent=2))
assert isatty.called
@patch('zdict.utils.sys.stdout.isatty', return_value=False)
def test_format_not_tty(self, isatty):
assert ' test' == self.color.format('test', 'red', indent=2)
assert isatty.called
def test_format_default_s(self):
assert self.color.format('') == ''
def test_attribute(self):
self.color.red = '\33[31m'
self.color.lred = '\33[31;1m'
with raises(AttributeError):
self.color.test
@patch('zdict.utils.constants')
@patch('os.mkdir')
@patch('os.path.isdir', return_value=False)
def test_create_zdict_dir_if_not_exists(mkdir, isdir, constants):
constants.BASE_DIR = '/mock'
<|code_end|>
, continue by predicting the next line. Consider current file imports:
import sys
from zdict.utils import (Color, create_zdict_db_if_not_exists,
create_zdict_dir_if_not_exists,
import_readline)
from pytest import mark, raises
from unittest.mock import patch
and context:
# Path: zdict/utils.py
# class Color(metaclass=ColorConst):
# _force_color = False
#
# @classmethod
# def set_force_color(cls, force_color=True):
# cls._force_color = force_color
#
# @classmethod
# def format(self, s='', color='org', indent=0):
# '''
# :type s: str
# :param s: message
# :param color: predefined color name, e,g,: red, RED.
# Using 'l' prefix for bright color, e.g.: lred, lwhite.
# It's case-insensitive.
#
# If stdout isn't a tty, the color option will be ignored.
# '''
# colorize = self._force_color or sys.stdout.isatty()
#
# return '{indent}{color}{s}{org}'.format(
# indent=' ' * indent,
# color=getattr(self, color, '') if colorize else '',
# s=s,
# org=self.ORG if colorize else '',
# )
#
# @classmethod
# def print(self, *args, end='\n', **kwargs):
# print(self.format(*args, **kwargs), end=end)
#
# def create_zdict_db_if_not_exists():
# if not os.path.exists(constants.DB_FILE):
# open(constants.DB_FILE, 'a').close()
#
# def create_zdict_dir_if_not_exists():
# if not os.path.isdir(constants.BASE_DIR):
# os.mkdir(constants.BASE_DIR)
#
# def import_readline():
# if sys.platform == 'darwin' and sys.version_info <= (3, 5):
# import gnureadline as readline
# else:
# import readline
# return readline
which might include code, classes, or functions. Output only the next line. | create_zdict_dir_if_not_exists() |
Using the snippet: <|code_start|>
@patch('zdict.utils.constants')
@patch('os.mkdir')
@patch('os.path.isdir', return_value=False)
def test_create_zdict_dir_if_not_exists(mkdir, isdir, constants):
constants.BASE_DIR = '/mock'
create_zdict_dir_if_not_exists()
isdir.assert_called_with('/mock')
mkdir.assert_called_with('/mock')
@patch('builtins.open')
@patch('zdict.utils.constants')
@patch('os.path.exists', return_value=False)
def test_create_zdict_db_if_not_exists(exists, constants, open):
constants.DB_FILE = '/mock'
create_zdict_db_if_not_exists()
exists.assert_called_with('/mock')
assert open.called
@mark.skipif(sys.platform.startswith('freebsd'),
reason="gnureadline installation failed on freebsd")
def test_platform_readline():
'''
Check the imported readline module on different platforms
'''
with patch.object(sys, 'platform', new='linux'):
<|code_end|>
, determine the next line of code. You have imports:
import sys
from zdict.utils import (Color, create_zdict_db_if_not_exists,
create_zdict_dir_if_not_exists,
import_readline)
from pytest import mark, raises
from unittest.mock import patch
and context (class names, function names, or code) available:
# Path: zdict/utils.py
# class Color(metaclass=ColorConst):
# _force_color = False
#
# @classmethod
# def set_force_color(cls, force_color=True):
# cls._force_color = force_color
#
# @classmethod
# def format(self, s='', color='org', indent=0):
# '''
# :type s: str
# :param s: message
# :param color: predefined color name, e,g,: red, RED.
# Using 'l' prefix for bright color, e.g.: lred, lwhite.
# It's case-insensitive.
#
# If stdout isn't a tty, the color option will be ignored.
# '''
# colorize = self._force_color or sys.stdout.isatty()
#
# return '{indent}{color}{s}{org}'.format(
# indent=' ' * indent,
# color=getattr(self, color, '') if colorize else '',
# s=s,
# org=self.ORG if colorize else '',
# )
#
# @classmethod
# def print(self, *args, end='\n', **kwargs):
# print(self.format(*args, **kwargs), end=end)
#
# def create_zdict_db_if_not_exists():
# if not os.path.exists(constants.DB_FILE):
# open(constants.DB_FILE, 'a').close()
#
# def create_zdict_dir_if_not_exists():
# if not os.path.isdir(constants.BASE_DIR):
# os.mkdir(constants.BASE_DIR)
#
# def import_readline():
# if sys.platform == 'darwin' and sys.version_info <= (3, 5):
# import gnureadline as readline
# else:
# import readline
# return readline
. Output only the next line. | readline = import_readline() |
Based on the snippet: <|code_start|>
class TestUrbanDict:
@classmethod
def setup_class(cls):
<|code_end|>
, predict the immediate next line with the help of imports:
from pytest import raises
from unittest.mock import patch
from zdict.dictionaries.urban import UrbanDict
from zdict.exceptions import NotFoundError
from zdict.zdict import get_args
and context (classes, functions, sometimes code) from other files:
# Path: zdict/dictionaries/urban.py
# class UrbanDict(DictBase):
#
# HOMEPAGE_URL = "https://www.urbandictionary.com/"
# API = 'http://api.urbandictionary.com/v0/define?term={word}'
#
# @property
# def provider(self):
# return 'urban'
#
# @property
# def title(self):
# return 'Urban Dictionary'
#
# def _get_url(self, word) -> str:
# return self.API.format(word=word)
#
# def show(self, record: Record):
# content = json.loads(record.content)
#
# data = content['list'][0]
#
# # print word
# self.color.print(data.get('word', ''), 'yellow')
#
# for line in data.get('definition', '').splitlines():
# self.color.print(line, 'org', indent=2)
#
# for example in data.get('example', '').split('\n'):
# self.color.print(
# example,
# 'indigo',
# indent=2,
# )
#
# print()
#
# def query(self, word: str):
# content_str = self._get_raw(word)
# content_dict = json.loads(content_str)
#
# if content_dict['list'] == []:
# raise NotFoundError(word)
#
# record = Record(
# word=word,
# content=content_str,
# source=self.provider,
# )
#
# return record
#
# Path: zdict/exceptions.py
# class NotFoundError(Exception):
# def __init__(self, word):
# self.word = word
#
# def __str__(self):
# return '"{}" not found!'.format(self.word)
#
# Path: zdict/zdict.py
# def get_args():
# # parse args
# parser = ArgumentParser(prog='zdict')
#
# parser.add_argument(
# 'words',
# metavar='word',
# type=str,
# nargs='*',
# help='Words for searching its translation'
# )
#
# parser.add_argument(
# "-v", "--version",
# action="version",
# version='%(prog)s-' + constants.VERSION
# )
#
# parser.add_argument(
# "-d", "--disable-db-cache",
# default=False,
# action="store_true",
# help="Temporarily not using the result from db cache.\
# (still save the result into db)"
# )
#
# parser.add_argument(
# "-t", "--query-timeout",
# type=float,
# default=5.0,
# action="store",
# help="Set timeout for every query. default is 5 seconds."
# )
#
# def positive_int_only(value):
# ivalue = int(value)
# if ivalue <= 0:
# raise ArgumentTypeError(
# "%s is an invalid positive int value" % value
# )
# return ivalue
#
# parser.add_argument(
# "-j", "--jobs",
# type=positive_int_only,
# nargs="?",
# default=0, # 0: not using, None: auto, N (1, 2, ...): N jobs
# action="store",
# help="""
# Allow N jobs at once.
# Do not pass any argument to use the number of CPUs in the system.
# """
# )
#
# parser.add_argument(
# "-sp", "--show-provider",
# default=False,
# action="store_true",
# help="Show the dictionary provider of the queried word"
# )
#
# parser.add_argument(
# "-su", "--show-url",
# default=False,
# action="store_true",
# help="Show the url of the queried word"
# )
#
# available_dictionaries = list(dictionary_map.keys())
# available_dictionaries.append('all')
# parser.add_argument(
# "-dt", "--dict",
# default="yahoo",
# action="store",
# choices=available_dictionaries,
# metavar=','.join(available_dictionaries),
# help="""
# Must be seperated by comma and no spaces after each comma.
# Choose the dictionary you want. (default: yahoo)
# Use 'all' for qureying all dictionaries.
# If 'all' or more than 1 dictionaries been chosen,
# --show-provider will be set to True in order to
# provide more understandable output.
# """
# )
#
# parser.add_argument(
# "-ld", "--list-dicts",
# default=False,
# action="store_true",
# help="Show currently supported dictionaries."
# )
#
# parser.add_argument(
# "-V", "--verbose",
# default=False,
# action="store_true",
# help="Show more information for the queried word.\
# (If the chosen dictionary have implemented verbose related functions)"
# )
#
# parser.add_argument(
# "-c", "--force-color",
# default=False,
# action="store_true",
# help="Force color printing (zdict automatically disable color printing \
# when output is not a tty, use this option to force color printing)"
# )
#
# parser.add_argument(
# '--dump', dest='pattern',
# nargs='?',
# default=None, const=r'^.*$',
# help='Dump the querying history, can be filtered with regex'
# )
#
# parser.add_argument(
# "-D", "--debug",
# default=False,
# action="store_true",
# help="Print raw html prettified by BeautifulSoup for debugging."
# )
#
# return parser.parse_args()
. Output only the next line. | cls.dict = UrbanDict(get_args()) |
Here is a snippet: <|code_start|>
class TestUrbanDict:
@classmethod
def setup_class(cls):
<|code_end|>
. Write the next line using the current file imports:
from pytest import raises
from unittest.mock import patch
from zdict.dictionaries.urban import UrbanDict
from zdict.exceptions import NotFoundError
from zdict.zdict import get_args
and context from other files:
# Path: zdict/dictionaries/urban.py
# class UrbanDict(DictBase):
#
# HOMEPAGE_URL = "https://www.urbandictionary.com/"
# API = 'http://api.urbandictionary.com/v0/define?term={word}'
#
# @property
# def provider(self):
# return 'urban'
#
# @property
# def title(self):
# return 'Urban Dictionary'
#
# def _get_url(self, word) -> str:
# return self.API.format(word=word)
#
# def show(self, record: Record):
# content = json.loads(record.content)
#
# data = content['list'][0]
#
# # print word
# self.color.print(data.get('word', ''), 'yellow')
#
# for line in data.get('definition', '').splitlines():
# self.color.print(line, 'org', indent=2)
#
# for example in data.get('example', '').split('\n'):
# self.color.print(
# example,
# 'indigo',
# indent=2,
# )
#
# print()
#
# def query(self, word: str):
# content_str = self._get_raw(word)
# content_dict = json.loads(content_str)
#
# if content_dict['list'] == []:
# raise NotFoundError(word)
#
# record = Record(
# word=word,
# content=content_str,
# source=self.provider,
# )
#
# return record
#
# Path: zdict/exceptions.py
# class NotFoundError(Exception):
# def __init__(self, word):
# self.word = word
#
# def __str__(self):
# return '"{}" not found!'.format(self.word)
#
# Path: zdict/zdict.py
# def get_args():
# # parse args
# parser = ArgumentParser(prog='zdict')
#
# parser.add_argument(
# 'words',
# metavar='word',
# type=str,
# nargs='*',
# help='Words for searching its translation'
# )
#
# parser.add_argument(
# "-v", "--version",
# action="version",
# version='%(prog)s-' + constants.VERSION
# )
#
# parser.add_argument(
# "-d", "--disable-db-cache",
# default=False,
# action="store_true",
# help="Temporarily not using the result from db cache.\
# (still save the result into db)"
# )
#
# parser.add_argument(
# "-t", "--query-timeout",
# type=float,
# default=5.0,
# action="store",
# help="Set timeout for every query. default is 5 seconds."
# )
#
# def positive_int_only(value):
# ivalue = int(value)
# if ivalue <= 0:
# raise ArgumentTypeError(
# "%s is an invalid positive int value" % value
# )
# return ivalue
#
# parser.add_argument(
# "-j", "--jobs",
# type=positive_int_only,
# nargs="?",
# default=0, # 0: not using, None: auto, N (1, 2, ...): N jobs
# action="store",
# help="""
# Allow N jobs at once.
# Do not pass any argument to use the number of CPUs in the system.
# """
# )
#
# parser.add_argument(
# "-sp", "--show-provider",
# default=False,
# action="store_true",
# help="Show the dictionary provider of the queried word"
# )
#
# parser.add_argument(
# "-su", "--show-url",
# default=False,
# action="store_true",
# help="Show the url of the queried word"
# )
#
# available_dictionaries = list(dictionary_map.keys())
# available_dictionaries.append('all')
# parser.add_argument(
# "-dt", "--dict",
# default="yahoo",
# action="store",
# choices=available_dictionaries,
# metavar=','.join(available_dictionaries),
# help="""
# Must be seperated by comma and no spaces after each comma.
# Choose the dictionary you want. (default: yahoo)
# Use 'all' for qureying all dictionaries.
# If 'all' or more than 1 dictionaries been chosen,
# --show-provider will be set to True in order to
# provide more understandable output.
# """
# )
#
# parser.add_argument(
# "-ld", "--list-dicts",
# default=False,
# action="store_true",
# help="Show currently supported dictionaries."
# )
#
# parser.add_argument(
# "-V", "--verbose",
# default=False,
# action="store_true",
# help="Show more information for the queried word.\
# (If the chosen dictionary have implemented verbose related functions)"
# )
#
# parser.add_argument(
# "-c", "--force-color",
# default=False,
# action="store_true",
# help="Force color printing (zdict automatically disable color printing \
# when output is not a tty, use this option to force color printing)"
# )
#
# parser.add_argument(
# '--dump', dest='pattern',
# nargs='?',
# default=None, const=r'^.*$',
# help='Dump the querying history, can be filtered with regex'
# )
#
# parser.add_argument(
# "-D", "--debug",
# default=False,
# action="store_true",
# help="Print raw html prettified by BeautifulSoup for debugging."
# )
#
# return parser.parse_args()
, which may include functions, classes, or code. Output only the next line. | cls.dict = UrbanDict(get_args()) |
Next line prediction: <|code_start|>
class DictCompleter:
def __init__(self):
self.db = db
self.db.connection()
def __del__(self):
self.db.close()
def complete(self, text, state):
if state == 0: # new query
self.records = iter(
<|code_end|>
. Use current file imports:
(from zdict.models import Record, db)
and context including class names, function names, or small code snippets from other files:
# Path: zdict/models.py
# class Record(peewee.Model):
# class Meta:
. Output only the next line. | Record.select().where(Record.word.startswith(text))) |
Here is a snippet: <|code_start|>
def test_import_pyjokes_module():
with patch('importlib.util.find_spec', return_value=None):
assert import_pyjokes_module() is None
assert import_pyjokes_module().__name__ == 'pyjokes'
def test_get_pyjoke():
with patch('pyjokes.get_jokes', return_value=['test']):
<|code_end|>
. Write the next line using the current file imports:
from zdict.easter_eggs import (import_pyjokes_module, get_pyjoke)
from unittest.mock import patch
import pyjokes
and context from other files:
# Path: zdict/easter_eggs.py
# def import_pyjokes_module():
# if importlib.util.find_spec('pyjokes'):
# return importlib.import_module('pyjokes')
#
# def get_pyjoke(pyjokes, word: str):
# if not pyjokes:
# return
#
# try:
# # very basic string searching in jokes
# r = random.choice(
# list(filter(
# lambda j: word in map(
# lambda x: ''.join(c for c in x if c.isalnum()),
# j.split()
# ),
# pyjokes.get_jokes()
# ))
# )
# except IndexError:
# return
# else:
# return Record(word=word, content=r, source='pyjokes')
, which may include functions, classes, or code. Output only the next line. | assert get_pyjoke(pyjokes, 'test').content == 'test' |
Given the code snippet: <|code_start|>
@patch('zdict.completer.db')
def test_DictCompleter_db_connect(db):
'''
Test case for DictCompleter init/del
DictCompleter will connect to db when __init__,
and close db when __del__.
'''
# init
<|code_end|>
, generate the next line using the imports in this file:
from pytest import raises
from unittest.mock import patch, Mock
from zdict.completer import DictCompleter
and context (functions, classes, or occasionally code) from other files:
# Path: zdict/completer.py
# class DictCompleter:
# def __init__(self):
# self.db = db
# self.db.connection()
#
# def __del__(self):
# self.db.close()
#
# def complete(self, text, state):
# if state == 0: # new query
# self.records = iter(
# Record.select().where(Record.word.startswith(text)))
#
# return next(self.records).word
. Output only the next line. | completer = DictCompleter() |
Here is a snippet: <|code_start|>
def import_pyjokes_module():
if importlib.util.find_spec('pyjokes'):
return importlib.import_module('pyjokes')
def get_pyjoke(pyjokes, word: str):
if not pyjokes:
return
try:
# very basic string searching in jokes
r = random.choice(
list(filter(
lambda j: word in map(
lambda x: ''.join(c for c in x if c.isalnum()),
j.split()
),
pyjokes.get_jokes()
))
)
except IndexError:
return
else:
<|code_end|>
. Write the next line using the current file imports:
import re
import random
import importlib
import importlib.util
from zdict.models import Record
from zdict.utils import Color
and context from other files:
# Path: zdict/models.py
# class Record(peewee.Model):
# '''
# A model for storing the query results into the SQLite db.
#
# :param word: the vocabulary
# :param content: the query result of the vocabulary.
# It's a json document has the following spec.
# {
# 'word': word,
# // storing the querying result.
# 'pronounce': [
# ('key', 'value'),
# ...
# ],
# 'sound': [
# ('type', 'url'),
# ...
# // type: (mp3|ogg)
# ],
# 'explain': [
# ('speech',
# (
# 'meaning',
# ('sentence1', 'translation'),
# ...
# ),
# ...
# ),
# ...
# ]
# }
# :param source: source of the content. May be Yahoo!, Google, ... Dict
# '''
#
# word = peewee.TextField()
# content = peewee.TextField()
# source = peewee.CharField()
#
# class Meta:
# database = db
# primary_key = peewee.CompositeKey('word', 'source')
#
# Path: zdict/utils.py
# class Color(metaclass=ColorConst):
# _force_color = False
#
# @classmethod
# def set_force_color(cls, force_color=True):
# cls._force_color = force_color
#
# @classmethod
# def format(self, s='', color='org', indent=0):
# '''
# :type s: str
# :param s: message
# :param color: predefined color name, e,g,: red, RED.
# Using 'l' prefix for bright color, e.g.: lred, lwhite.
# It's case-insensitive.
#
# If stdout isn't a tty, the color option will be ignored.
# '''
# colorize = self._force_color or sys.stdout.isatty()
#
# return '{indent}{color}{s}{org}'.format(
# indent=' ' * indent,
# color=getattr(self, color, '') if colorize else '',
# s=s,
# org=self.ORG if colorize else '',
# )
#
# @classmethod
# def print(self, *args, end='\n', **kwargs):
# print(self.format(*args, **kwargs), end=end)
, which may include functions, classes, or code. Output only the next line. | return Record(word=word, content=r, source='pyjokes') |
Using the snippet: <|code_start|>
def get_pyjoke(pyjokes, word: str):
if not pyjokes:
return
try:
# very basic string searching in jokes
r = random.choice(
list(filter(
lambda j: word in map(
lambda x: ''.join(c for c in x if c.isalnum()),
j.split()
),
pyjokes.get_jokes()
))
)
except IndexError:
return
else:
return Record(word=word, content=r, source='pyjokes')
def show_pyjoke(record: Record):
if not record:
return
for i, s in enumerate(
re.split(r'\b({})\b'.format(record.word), record.content)
):
<|code_end|>
, determine the next line of code. You have imports:
import re
import random
import importlib
import importlib.util
from zdict.models import Record
from zdict.utils import Color
and context (class names, function names, or code) available:
# Path: zdict/models.py
# class Record(peewee.Model):
# '''
# A model for storing the query results into the SQLite db.
#
# :param word: the vocabulary
# :param content: the query result of the vocabulary.
# It's a json document has the following spec.
# {
# 'word': word,
# // storing the querying result.
# 'pronounce': [
# ('key', 'value'),
# ...
# ],
# 'sound': [
# ('type', 'url'),
# ...
# // type: (mp3|ogg)
# ],
# 'explain': [
# ('speech',
# (
# 'meaning',
# ('sentence1', 'translation'),
# ...
# ),
# ...
# ),
# ...
# ]
# }
# :param source: source of the content. May be Yahoo!, Google, ... Dict
# '''
#
# word = peewee.TextField()
# content = peewee.TextField()
# source = peewee.CharField()
#
# class Meta:
# database = db
# primary_key = peewee.CompositeKey('word', 'source')
#
# Path: zdict/utils.py
# class Color(metaclass=ColorConst):
# _force_color = False
#
# @classmethod
# def set_force_color(cls, force_color=True):
# cls._force_color = force_color
#
# @classmethod
# def format(self, s='', color='org', indent=0):
# '''
# :type s: str
# :param s: message
# :param color: predefined color name, e,g,: red, RED.
# Using 'l' prefix for bright color, e.g.: lred, lwhite.
# It's case-insensitive.
#
# If stdout isn't a tty, the color option will be ignored.
# '''
# colorize = self._force_color or sys.stdout.isatty()
#
# return '{indent}{color}{s}{org}'.format(
# indent=' ' * indent,
# color=getattr(self, color, '') if colorize else '',
# s=s,
# org=self.ORG if colorize else '',
# )
#
# @classmethod
# def print(self, *args, end='\n', **kwargs):
# print(self.format(*args, **kwargs), end=end)
. Output only the next line. | Color.print( |
Here is a snippet: <|code_start|>
@patch('zdict.api.Record')
def test_dump(Record):
# Assume our db contains following words
Record.select.return_value = [
Mock(word='apple'),
Mock(word='apply')]
# init query
<|code_end|>
. Write the next line using the current file imports:
from unittest.mock import patch, Mock
from zdict.api import dump
and context from other files:
# Path: zdict/api.py
# def dump(pattern=r'^.*$'):
# return [r.word for r in Record.select() if re.fullmatch(pattern, r.word)]
, which may include functions, classes, or code. Output only the next line. | ret = dump() |
Using the snippet: <|code_start|> db_record.save()
def show_provider(self):
self.color.print('[' + self.provider + ']', 'blue')
def show_url(self, word):
self.color.print('(' + self._get_url(word) + ')', 'blue')
def lookup(self, word):
'''
Main workflow for searching a word.
'''
word = word.lower()
if self.args.show_provider:
self.show_provider()
if self.args.show_url:
self.show_url(word)
if not self.args.disable_db_cache:
record = self.query_db_cache(word)
if record:
self.show(record)
return
try:
record = self.query(word)
<|code_end|>
, determine the next line of code. You have imports:
import abc
import json
import requests
import traceback
import sys
from zdict import exceptions
from zdict.models import Record, db
from zdict.utils import Color
from bs4 import BeautifulSoup
and context (class names, function names, or code) available:
# Path: zdict/exceptions.py
# class QueryError(Exception):
# class NotFoundError(Exception):
# class NoNetworkError(Exception):
# class TimeoutError(Exception):
# class UnexpectedError(Exception):
# class APIKeyError(Exception):
# def __init__(self, word, status_code):
# def __str__(self):
# def __init__(self, word):
# def __str__(self):
# def __str__(self):
# def __str__(self):
# def __init__(self):
# def __init__(self, msg):
# def __str__(self):
#
# Path: zdict/models.py
# class Record(peewee.Model):
# class Meta:
#
# Path: zdict/utils.py
# class Color(metaclass=ColorConst):
# _force_color = False
#
# @classmethod
# def set_force_color(cls, force_color=True):
# cls._force_color = force_color
#
# @classmethod
# def format(self, s='', color='org', indent=0):
# '''
# :type s: str
# :param s: message
# :param color: predefined color name, e,g,: red, RED.
# Using 'l' prefix for bright color, e.g.: lred, lwhite.
# It's case-insensitive.
#
# If stdout isn't a tty, the color option will be ignored.
# '''
# colorize = self._force_color or sys.stdout.isatty()
#
# return '{indent}{color}{s}{org}'.format(
# indent=' ' * indent,
# color=getattr(self, color, '') if colorize else '',
# s=s,
# org=self.ORG if colorize else '',
# )
#
# @classmethod
# def print(self, *args, end='\n', **kwargs):
# print(self.format(*args, **kwargs), end=end)
. Output only the next line. | except exceptions.NoNetworkError as e: |
Next line prediction: <|code_start|>
class DictBase(metaclass=abc.ABCMeta):
REQUIRED_TABLE = (
Record,
)
def __init__(self, args):
self.args = args
self.color = Color()
<|code_end|>
. Use current file imports:
(import abc
import json
import requests
import traceback
import sys
from zdict import exceptions
from zdict.models import Record, db
from zdict.utils import Color
from bs4 import BeautifulSoup)
and context including class names, function names, or small code snippets from other files:
# Path: zdict/exceptions.py
# class QueryError(Exception):
# class NotFoundError(Exception):
# class NoNetworkError(Exception):
# class TimeoutError(Exception):
# class UnexpectedError(Exception):
# class APIKeyError(Exception):
# def __init__(self, word, status_code):
# def __str__(self):
# def __init__(self, word):
# def __str__(self):
# def __str__(self):
# def __str__(self):
# def __init__(self):
# def __init__(self, msg):
# def __str__(self):
#
# Path: zdict/models.py
# class Record(peewee.Model):
# class Meta:
#
# Path: zdict/utils.py
# class Color(metaclass=ColorConst):
# _force_color = False
#
# @classmethod
# def set_force_color(cls, force_color=True):
# cls._force_color = force_color
#
# @classmethod
# def format(self, s='', color='org', indent=0):
# '''
# :type s: str
# :param s: message
# :param color: predefined color name, e,g,: red, RED.
# Using 'l' prefix for bright color, e.g.: lred, lwhite.
# It's case-insensitive.
#
# If stdout isn't a tty, the color option will be ignored.
# '''
# colorize = self._force_color or sys.stdout.isatty()
#
# return '{indent}{color}{s}{org}'.format(
# indent=' ' * indent,
# color=getattr(self, color, '') if colorize else '',
# s=s,
# org=self.ORG if colorize else '',
# )
#
# @classmethod
# def print(self, *args, end='\n', **kwargs):
# print(self.format(*args, **kwargs), end=end)
. Output only the next line. | self.db = db |
Given snippet: <|code_start|>
class DictBase(metaclass=abc.ABCMeta):
REQUIRED_TABLE = (
Record,
)
def __init__(self, args):
self.args = args
<|code_end|>
, continue by predicting the next line. Consider current file imports:
import abc
import json
import requests
import traceback
import sys
from zdict import exceptions
from zdict.models import Record, db
from zdict.utils import Color
from bs4 import BeautifulSoup
and context:
# Path: zdict/exceptions.py
# class QueryError(Exception):
# class NotFoundError(Exception):
# class NoNetworkError(Exception):
# class TimeoutError(Exception):
# class UnexpectedError(Exception):
# class APIKeyError(Exception):
# def __init__(self, word, status_code):
# def __str__(self):
# def __init__(self, word):
# def __str__(self):
# def __str__(self):
# def __str__(self):
# def __init__(self):
# def __init__(self, msg):
# def __str__(self):
#
# Path: zdict/models.py
# class Record(peewee.Model):
# class Meta:
#
# Path: zdict/utils.py
# class Color(metaclass=ColorConst):
# _force_color = False
#
# @classmethod
# def set_force_color(cls, force_color=True):
# cls._force_color = force_color
#
# @classmethod
# def format(self, s='', color='org', indent=0):
# '''
# :type s: str
# :param s: message
# :param color: predefined color name, e,g,: red, RED.
# Using 'l' prefix for bright color, e.g.: lred, lwhite.
# It's case-insensitive.
#
# If stdout isn't a tty, the color option will be ignored.
# '''
# colorize = self._force_color or sys.stdout.isatty()
#
# return '{indent}{color}{s}{org}'.format(
# indent=' ' * indent,
# color=getattr(self, color, '') if colorize else '',
# s=s,
# org=self.ORG if colorize else '',
# )
#
# @classmethod
# def print(self, *args, end='\n', **kwargs):
# print(self.format(*args, **kwargs), end=end)
which might include code, classes, or functions. Output only the next line. | self.color = Color() |
Given the following code snippet before the placeholder: <|code_start|>
class TestMultiprocessing(unittest.TestCase):
def test_multiprocessing(self):
testargs = ['', '-j', '2', '-d', '-dt', 'yahoo', 'test']
with patch.object(sys, 'argv', new=testargs):
f1 = StringIO()
with redirect_stdout(f1):
<|code_end|>
, predict the next line using imports from the current file:
import sys
import unittest
from contextlib import redirect_stdout
from io import StringIO
from unittest.mock import patch
from zdict.zdict import main
and context including class names, function names, and sometimes code from other files:
# Path: zdict/zdict.py
# def main():
# if user_set_encoding_and_is_utf8():
# check_zdict_dir_and_db()
#
# global dictionary_map
# dictionary_map = get_dictionary_map()
#
# args = get_args()
# args = set_args(args)
#
# execute_zdict(args)
# else:
# exit()
. Output only the next line. | main() |
Predict the next line after this snippet: <|code_start|> except ValueError:
print("Didn't detect your LC_ALL environment variable.")
print("Please export LC_ALL with some UTF-8 encoding.")
print("For example: `export LC_ALL=en_US.UTF-8`")
return False
else:
if enc != "UTF-8":
print("zdict only works with encoding=UTF-8, ")
print("but your encoding is: {} {}".format(lang, enc))
print("Please export LC_ALL with some UTF-8 encoding.")
print("For example: `export LC_ALL=en_US.UTF-8`")
return False
return True
def get_args():
# parse args
parser = ArgumentParser(prog='zdict')
parser.add_argument(
'words',
metavar='word',
type=str,
nargs='*',
help='Words for searching its translation'
)
parser.add_argument(
"-v", "--version",
action="version",
<|code_end|>
using the current file's imports:
from argparse import ArgumentParser, ArgumentTypeError
from locale import getdefaultlocale
from multiprocessing import Pool
from contextlib import redirect_stdout
from io import StringIO
from zdict import constants, utils, easter_eggs
from zdict.api import dump
from zdict.completer import DictCompleter
from zdict.loader import get_dictionary_map
from zdict.utils import readline, check_zdict_dir_and_db
import sys
import signal
and any relevant context from other files:
# Path: zdict/constants.py
# VERSION = '4.0.5'
# BASE_DIR_NAME = '.zdict'
# BASE_DIR = os.path.join(os.path.expanduser("~"), BASE_DIR_NAME)
# DB_NAME = 'zdict.db'
# DB_FILE = os.path.join(BASE_DIR, DB_NAME)
#
# Path: zdict/utils.py
# def create_zdict_dir_if_not_exists():
# def create_zdict_db_if_not_exists():
# def check_zdict_dir_and_db():
# def __getattr__(cls, color):
# def set_force_color(cls, force_color=True):
# def format(self, s='', color='org', indent=0):
# def print(self, *args, end='\n', **kwargs):
# def import_readline():
# class ColorConst(type):
# class Color(metaclass=ColorConst):
# COLOR_TEMPLATE = "\33[{}m"
# COLOR_LIST = (
# ('ORG', 0),
# ('BLACK', 30),
# ('RED', 31),
# ('GREEN', 32),
# ('YELLOW', 33),
# ('BLUE', 34),
# ('MAGENTA', 35),
# ('INDIGO', 36),
# ('WHITE', 37),
# )
#
# Path: zdict/easter_eggs.py
# def import_pyjokes_module():
# def get_pyjoke(pyjokes, word: str):
# def show_pyjoke(record: Record):
# def lookup_pyjokes(word: str):
#
# Path: zdict/api.py
# def dump(pattern=r'^.*$'):
# return [r.word for r in Record.select() if re.fullmatch(pattern, r.word)]
#
# Path: zdict/completer.py
# class DictCompleter:
# def __init__(self):
# self.db = db
# self.db.connection()
#
# def __del__(self):
# self.db.close()
#
# def complete(self, text, state):
# if state == 0: # new query
# self.records = iter(
# Record.select().where(Record.word.startswith(text)))
#
# return next(self.records).word
#
# Path: zdict/loader.py
# def get_dictionary_map():
# '''
# Auto discover dictionaries in package ``dictionaries``.
# Each dictionary class MUST be the subclass of ``DictBase``
#
# :return: a dict with {provider_name: cls}
# SomeDict.provider as key, the class as value
# '''
# package = 'zdict.dictionaries'
# exclude_files = ('template.py',)
#
# return {
# cls(None).provider: cls
# for _, cls in (
# chain.from_iterable(
# getmembers(mod, predicate=_is_dict)
# for mod in (
# import_module(
# '{}.{}'.format(package, f.partition('.py')[0]))
# for f in os.listdir(dictionaries.__path__[0])
# if (not f.startswith('_') and
# f.endswith('.py') and
# f not in exclude_files)
# )
# )
# )
# }
#
# Path: zdict/utils.py
# def create_zdict_dir_if_not_exists():
# def create_zdict_db_if_not_exists():
# def check_zdict_dir_and_db():
# def __getattr__(cls, color):
# def set_force_color(cls, force_color=True):
# def format(self, s='', color='org', indent=0):
# def print(self, *args, end='\n', **kwargs):
# def import_readline():
# class ColorConst(type):
# class Color(metaclass=ColorConst):
# COLOR_TEMPLATE = "\33[{}m"
# COLOR_LIST = (
# ('ORG', 0),
# ('BLACK', 30),
# ('RED', 31),
# ('GREEN', 32),
# ('YELLOW', 33),
# ('BLUE', 34),
# ('MAGENTA', 35),
# ('INDIGO', 36),
# ('WHITE', 37),
# )
. Output only the next line. | version='%(prog)s-' + constants.VERSION |
Continue the code snippet: <|code_start|> (If the chosen dictionary have implemented verbose related functions)"
)
parser.add_argument(
"-c", "--force-color",
default=False,
action="store_true",
help="Force color printing (zdict automatically disable color printing \
when output is not a tty, use this option to force color printing)"
)
parser.add_argument(
'--dump', dest='pattern',
nargs='?',
default=None, const=r'^.*$',
help='Dump the querying history, can be filtered with regex'
)
parser.add_argument(
"-D", "--debug",
default=False,
action="store_true",
help="Print raw html prettified by BeautifulSoup for debugging."
)
return parser.parse_args()
def set_args(args):
if args.force_color:
<|code_end|>
. Use current file imports:
from argparse import ArgumentParser, ArgumentTypeError
from locale import getdefaultlocale
from multiprocessing import Pool
from contextlib import redirect_stdout
from io import StringIO
from zdict import constants, utils, easter_eggs
from zdict.api import dump
from zdict.completer import DictCompleter
from zdict.loader import get_dictionary_map
from zdict.utils import readline, check_zdict_dir_and_db
import sys
import signal
and context (classes, functions, or code) from other files:
# Path: zdict/constants.py
# VERSION = '4.0.5'
# BASE_DIR_NAME = '.zdict'
# BASE_DIR = os.path.join(os.path.expanduser("~"), BASE_DIR_NAME)
# DB_NAME = 'zdict.db'
# DB_FILE = os.path.join(BASE_DIR, DB_NAME)
#
# Path: zdict/utils.py
# def create_zdict_dir_if_not_exists():
# def create_zdict_db_if_not_exists():
# def check_zdict_dir_and_db():
# def __getattr__(cls, color):
# def set_force_color(cls, force_color=True):
# def format(self, s='', color='org', indent=0):
# def print(self, *args, end='\n', **kwargs):
# def import_readline():
# class ColorConst(type):
# class Color(metaclass=ColorConst):
# COLOR_TEMPLATE = "\33[{}m"
# COLOR_LIST = (
# ('ORG', 0),
# ('BLACK', 30),
# ('RED', 31),
# ('GREEN', 32),
# ('YELLOW', 33),
# ('BLUE', 34),
# ('MAGENTA', 35),
# ('INDIGO', 36),
# ('WHITE', 37),
# )
#
# Path: zdict/easter_eggs.py
# def import_pyjokes_module():
# def get_pyjoke(pyjokes, word: str):
# def show_pyjoke(record: Record):
# def lookup_pyjokes(word: str):
#
# Path: zdict/api.py
# def dump(pattern=r'^.*$'):
# return [r.word for r in Record.select() if re.fullmatch(pattern, r.word)]
#
# Path: zdict/completer.py
# class DictCompleter:
# def __init__(self):
# self.db = db
# self.db.connection()
#
# def __del__(self):
# self.db.close()
#
# def complete(self, text, state):
# if state == 0: # new query
# self.records = iter(
# Record.select().where(Record.word.startswith(text)))
#
# return next(self.records).word
#
# Path: zdict/loader.py
# def get_dictionary_map():
# '''
# Auto discover dictionaries in package ``dictionaries``.
# Each dictionary class MUST be the subclass of ``DictBase``
#
# :return: a dict with {provider_name: cls}
# SomeDict.provider as key, the class as value
# '''
# package = 'zdict.dictionaries'
# exclude_files = ('template.py',)
#
# return {
# cls(None).provider: cls
# for _, cls in (
# chain.from_iterable(
# getmembers(mod, predicate=_is_dict)
# for mod in (
# import_module(
# '{}.{}'.format(package, f.partition('.py')[0]))
# for f in os.listdir(dictionaries.__path__[0])
# if (not f.startswith('_') and
# f.endswith('.py') and
# f not in exclude_files)
# )
# )
# )
# }
#
# Path: zdict/utils.py
# def create_zdict_dir_if_not_exists():
# def create_zdict_db_if_not_exists():
# def check_zdict_dir_and_db():
# def __getattr__(cls, color):
# def set_force_color(cls, force_color=True):
# def format(self, s='', color='org', indent=0):
# def print(self, *args, end='\n', **kwargs):
# def import_readline():
# class ColorConst(type):
# class Color(metaclass=ColorConst):
# COLOR_TEMPLATE = "\33[{}m"
# COLOR_LIST = (
# ('ORG', 0),
# ('BLACK', 30),
# ('RED', 31),
# ('GREEN', 32),
# ('YELLOW', 33),
# ('BLUE', 34),
# ('MAGENTA', 35),
# ('INDIGO', 36),
# ('WHITE', 37),
# )
. Output only the next line. | utils.Color.set_force_color() |
Predict the next line after this snippet: <|code_start|> return f.getvalue()
def init_worker():
# When -j been used, make subprocesses ignore KeyboardInterrupt
# for not showing KeyboardInterrupt traceback error message.
signal.signal(signal.SIGINT, signal.SIG_IGN)
def normal_mode(args):
if args.jobs == 0:
# user didn't use `-j`
for word in args.words:
for d in args.dict:
zdict = dictionary_map[d](args)
zdict.lookup(word)
else:
# user did use `-j`
# If processes is None, os.cpu_count() is used.
pool = Pool(args.jobs, init_worker)
for word in args.words:
futures = [
pool.apply_async(lookup_string_wrapper,
(dictionary_map[d], word, args))
for d in args.dict
]
results = [i.get() for i in futures]
print(''.join(results))
<|code_end|>
using the current file's imports:
from argparse import ArgumentParser, ArgumentTypeError
from locale import getdefaultlocale
from multiprocessing import Pool
from contextlib import redirect_stdout
from io import StringIO
from zdict import constants, utils, easter_eggs
from zdict.api import dump
from zdict.completer import DictCompleter
from zdict.loader import get_dictionary_map
from zdict.utils import readline, check_zdict_dir_and_db
import sys
import signal
and any relevant context from other files:
# Path: zdict/constants.py
# VERSION = '4.0.5'
# BASE_DIR_NAME = '.zdict'
# BASE_DIR = os.path.join(os.path.expanduser("~"), BASE_DIR_NAME)
# DB_NAME = 'zdict.db'
# DB_FILE = os.path.join(BASE_DIR, DB_NAME)
#
# Path: zdict/utils.py
# def create_zdict_dir_if_not_exists():
# def create_zdict_db_if_not_exists():
# def check_zdict_dir_and_db():
# def __getattr__(cls, color):
# def set_force_color(cls, force_color=True):
# def format(self, s='', color='org', indent=0):
# def print(self, *args, end='\n', **kwargs):
# def import_readline():
# class ColorConst(type):
# class Color(metaclass=ColorConst):
# COLOR_TEMPLATE = "\33[{}m"
# COLOR_LIST = (
# ('ORG', 0),
# ('BLACK', 30),
# ('RED', 31),
# ('GREEN', 32),
# ('YELLOW', 33),
# ('BLUE', 34),
# ('MAGENTA', 35),
# ('INDIGO', 36),
# ('WHITE', 37),
# )
#
# Path: zdict/easter_eggs.py
# def import_pyjokes_module():
# def get_pyjoke(pyjokes, word: str):
# def show_pyjoke(record: Record):
# def lookup_pyjokes(word: str):
#
# Path: zdict/api.py
# def dump(pattern=r'^.*$'):
# return [r.word for r in Record.select() if re.fullmatch(pattern, r.word)]
#
# Path: zdict/completer.py
# class DictCompleter:
# def __init__(self):
# self.db = db
# self.db.connection()
#
# def __del__(self):
# self.db.close()
#
# def complete(self, text, state):
# if state == 0: # new query
# self.records = iter(
# Record.select().where(Record.word.startswith(text)))
#
# return next(self.records).word
#
# Path: zdict/loader.py
# def get_dictionary_map():
# '''
# Auto discover dictionaries in package ``dictionaries``.
# Each dictionary class MUST be the subclass of ``DictBase``
#
# :return: a dict with {provider_name: cls}
# SomeDict.provider as key, the class as value
# '''
# package = 'zdict.dictionaries'
# exclude_files = ('template.py',)
#
# return {
# cls(None).provider: cls
# for _, cls in (
# chain.from_iterable(
# getmembers(mod, predicate=_is_dict)
# for mod in (
# import_module(
# '{}.{}'.format(package, f.partition('.py')[0]))
# for f in os.listdir(dictionaries.__path__[0])
# if (not f.startswith('_') and
# f.endswith('.py') and
# f not in exclude_files)
# )
# )
# )
# }
#
# Path: zdict/utils.py
# def create_zdict_dir_if_not_exists():
# def create_zdict_db_if_not_exists():
# def check_zdict_dir_and_db():
# def __getattr__(cls, color):
# def set_force_color(cls, force_color=True):
# def format(self, s='', color='org', indent=0):
# def print(self, *args, end='\n', **kwargs):
# def import_readline():
# class ColorConst(type):
# class Color(metaclass=ColorConst):
# COLOR_TEMPLATE = "\33[{}m"
# COLOR_LIST = (
# ('ORG', 0),
# ('BLACK', 30),
# ('RED', 31),
# ('GREEN', 32),
# ('YELLOW', 33),
# ('BLUE', 34),
# ('MAGENTA', 35),
# ('INDIGO', 36),
# ('WHITE', 37),
# )
. Output only the next line. | easter_eggs.lookup_pyjokes(word) |
Using the snippet: <|code_start|> else:
return
def loop_prompt(self):
while True:
self.prompt()
def interactive_mode(args):
# configure readline and completer
readline.parse_and_bind("tab: complete")
readline.set_completer(DictCompleter().complete)
zdict = MetaInteractivePrompt(args)
zdict.loop_prompt()
def execute_zdict(args):
if args.list_dicts:
for provider in sorted(dictionary_map):
print(
'{}: {}\n{}\n'.format(
provider,
dictionary_map[provider](args).title,
dictionary_map[provider](args).HOMEPAGE_URL,
)
)
exit()
if args.pattern:
<|code_end|>
, determine the next line of code. You have imports:
from argparse import ArgumentParser, ArgumentTypeError
from locale import getdefaultlocale
from multiprocessing import Pool
from contextlib import redirect_stdout
from io import StringIO
from zdict import constants, utils, easter_eggs
from zdict.api import dump
from zdict.completer import DictCompleter
from zdict.loader import get_dictionary_map
from zdict.utils import readline, check_zdict_dir_and_db
import sys
import signal
and context (class names, function names, or code) available:
# Path: zdict/constants.py
# VERSION = '4.0.5'
# BASE_DIR_NAME = '.zdict'
# BASE_DIR = os.path.join(os.path.expanduser("~"), BASE_DIR_NAME)
# DB_NAME = 'zdict.db'
# DB_FILE = os.path.join(BASE_DIR, DB_NAME)
#
# Path: zdict/utils.py
# def create_zdict_dir_if_not_exists():
# def create_zdict_db_if_not_exists():
# def check_zdict_dir_and_db():
# def __getattr__(cls, color):
# def set_force_color(cls, force_color=True):
# def format(self, s='', color='org', indent=0):
# def print(self, *args, end='\n', **kwargs):
# def import_readline():
# class ColorConst(type):
# class Color(metaclass=ColorConst):
# COLOR_TEMPLATE = "\33[{}m"
# COLOR_LIST = (
# ('ORG', 0),
# ('BLACK', 30),
# ('RED', 31),
# ('GREEN', 32),
# ('YELLOW', 33),
# ('BLUE', 34),
# ('MAGENTA', 35),
# ('INDIGO', 36),
# ('WHITE', 37),
# )
#
# Path: zdict/easter_eggs.py
# def import_pyjokes_module():
# def get_pyjoke(pyjokes, word: str):
# def show_pyjoke(record: Record):
# def lookup_pyjokes(word: str):
#
# Path: zdict/api.py
# def dump(pattern=r'^.*$'):
# return [r.word for r in Record.select() if re.fullmatch(pattern, r.word)]
#
# Path: zdict/completer.py
# class DictCompleter:
# def __init__(self):
# self.db = db
# self.db.connection()
#
# def __del__(self):
# self.db.close()
#
# def complete(self, text, state):
# if state == 0: # new query
# self.records = iter(
# Record.select().where(Record.word.startswith(text)))
#
# return next(self.records).word
#
# Path: zdict/loader.py
# def get_dictionary_map():
# '''
# Auto discover dictionaries in package ``dictionaries``.
# Each dictionary class MUST be the subclass of ``DictBase``
#
# :return: a dict with {provider_name: cls}
# SomeDict.provider as key, the class as value
# '''
# package = 'zdict.dictionaries'
# exclude_files = ('template.py',)
#
# return {
# cls(None).provider: cls
# for _, cls in (
# chain.from_iterable(
# getmembers(mod, predicate=_is_dict)
# for mod in (
# import_module(
# '{}.{}'.format(package, f.partition('.py')[0]))
# for f in os.listdir(dictionaries.__path__[0])
# if (not f.startswith('_') and
# f.endswith('.py') and
# f not in exclude_files)
# )
# )
# )
# }
#
# Path: zdict/utils.py
# def create_zdict_dir_if_not_exists():
# def create_zdict_db_if_not_exists():
# def check_zdict_dir_and_db():
# def __getattr__(cls, color):
# def set_force_color(cls, force_color=True):
# def format(self, s='', color='org', indent=0):
# def print(self, *args, end='\n', **kwargs):
# def import_readline():
# class ColorConst(type):
# class Color(metaclass=ColorConst):
# COLOR_TEMPLATE = "\33[{}m"
# COLOR_LIST = (
# ('ORG', 0),
# ('BLACK', 30),
# ('RED', 31),
# ('GREEN', 32),
# ('YELLOW', 33),
# ('BLUE', 34),
# ('MAGENTA', 35),
# ('INDIGO', 36),
# ('WHITE', 37),
# )
. Output only the next line. | for word in dump(pattern=args.pattern): |
Predict the next line for this snippet: <|code_start|>
def __del__(self):
del self.dicts
def prompt(self):
user_input = input('[zDict]: ').strip()
if user_input:
if self.pool:
futures = [
self.pool.apply_async(lookup_string_wrapper,
(d, user_input, self.args))
for d in self.dict_classes
]
results = [i.get() for i in futures]
print(''.join(results))
else:
for dictionary_instance in self.dicts:
dictionary_instance.lookup(user_input)
else:
return
def loop_prompt(self):
while True:
self.prompt()
def interactive_mode(args):
# configure readline and completer
readline.parse_and_bind("tab: complete")
<|code_end|>
with the help of current file imports:
from argparse import ArgumentParser, ArgumentTypeError
from locale import getdefaultlocale
from multiprocessing import Pool
from contextlib import redirect_stdout
from io import StringIO
from zdict import constants, utils, easter_eggs
from zdict.api import dump
from zdict.completer import DictCompleter
from zdict.loader import get_dictionary_map
from zdict.utils import readline, check_zdict_dir_and_db
import sys
import signal
and context from other files:
# Path: zdict/constants.py
# VERSION = '4.0.5'
# BASE_DIR_NAME = '.zdict'
# BASE_DIR = os.path.join(os.path.expanduser("~"), BASE_DIR_NAME)
# DB_NAME = 'zdict.db'
# DB_FILE = os.path.join(BASE_DIR, DB_NAME)
#
# Path: zdict/utils.py
# def create_zdict_dir_if_not_exists():
# def create_zdict_db_if_not_exists():
# def check_zdict_dir_and_db():
# def __getattr__(cls, color):
# def set_force_color(cls, force_color=True):
# def format(self, s='', color='org', indent=0):
# def print(self, *args, end='\n', **kwargs):
# def import_readline():
# class ColorConst(type):
# class Color(metaclass=ColorConst):
# COLOR_TEMPLATE = "\33[{}m"
# COLOR_LIST = (
# ('ORG', 0),
# ('BLACK', 30),
# ('RED', 31),
# ('GREEN', 32),
# ('YELLOW', 33),
# ('BLUE', 34),
# ('MAGENTA', 35),
# ('INDIGO', 36),
# ('WHITE', 37),
# )
#
# Path: zdict/easter_eggs.py
# def import_pyjokes_module():
# def get_pyjoke(pyjokes, word: str):
# def show_pyjoke(record: Record):
# def lookup_pyjokes(word: str):
#
# Path: zdict/api.py
# def dump(pattern=r'^.*$'):
# return [r.word for r in Record.select() if re.fullmatch(pattern, r.word)]
#
# Path: zdict/completer.py
# class DictCompleter:
# def __init__(self):
# self.db = db
# self.db.connection()
#
# def __del__(self):
# self.db.close()
#
# def complete(self, text, state):
# if state == 0: # new query
# self.records = iter(
# Record.select().where(Record.word.startswith(text)))
#
# return next(self.records).word
#
# Path: zdict/loader.py
# def get_dictionary_map():
# '''
# Auto discover dictionaries in package ``dictionaries``.
# Each dictionary class MUST be the subclass of ``DictBase``
#
# :return: a dict with {provider_name: cls}
# SomeDict.provider as key, the class as value
# '''
# package = 'zdict.dictionaries'
# exclude_files = ('template.py',)
#
# return {
# cls(None).provider: cls
# for _, cls in (
# chain.from_iterable(
# getmembers(mod, predicate=_is_dict)
# for mod in (
# import_module(
# '{}.{}'.format(package, f.partition('.py')[0]))
# for f in os.listdir(dictionaries.__path__[0])
# if (not f.startswith('_') and
# f.endswith('.py') and
# f not in exclude_files)
# )
# )
# )
# }
#
# Path: zdict/utils.py
# def create_zdict_dir_if_not_exists():
# def create_zdict_db_if_not_exists():
# def check_zdict_dir_and_db():
# def __getattr__(cls, color):
# def set_force_color(cls, force_color=True):
# def format(self, s='', color='org', indent=0):
# def print(self, *args, end='\n', **kwargs):
# def import_readline():
# class ColorConst(type):
# class Color(metaclass=ColorConst):
# COLOR_TEMPLATE = "\33[{}m"
# COLOR_LIST = (
# ('ORG', 0),
# ('BLACK', 30),
# ('RED', 31),
# ('GREEN', 32),
# ('YELLOW', 33),
# ('BLUE', 34),
# ('MAGENTA', 35),
# ('INDIGO', 36),
# ('WHITE', 37),
# )
, which may contain function names, class names, or code. Output only the next line. | readline.set_completer(DictCompleter().complete) |
Using the snippet: <|code_start|> for provider in sorted(dictionary_map):
print(
'{}: {}\n{}\n'.format(
provider,
dictionary_map[provider](args).title,
dictionary_map[provider](args).HOMEPAGE_URL,
)
)
exit()
if args.pattern:
for word in dump(pattern=args.pattern):
print(word)
exit()
try:
if args.words:
normal_mode(args)
else:
interactive_mode(args)
except (KeyboardInterrupt, EOFError):
print()
return
def main():
if user_set_encoding_and_is_utf8():
check_zdict_dir_and_db()
global dictionary_map
<|code_end|>
, determine the next line of code. You have imports:
from argparse import ArgumentParser, ArgumentTypeError
from locale import getdefaultlocale
from multiprocessing import Pool
from contextlib import redirect_stdout
from io import StringIO
from zdict import constants, utils, easter_eggs
from zdict.api import dump
from zdict.completer import DictCompleter
from zdict.loader import get_dictionary_map
from zdict.utils import readline, check_zdict_dir_and_db
import sys
import signal
and context (class names, function names, or code) available:
# Path: zdict/constants.py
# VERSION = '4.0.5'
# BASE_DIR_NAME = '.zdict'
# BASE_DIR = os.path.join(os.path.expanduser("~"), BASE_DIR_NAME)
# DB_NAME = 'zdict.db'
# DB_FILE = os.path.join(BASE_DIR, DB_NAME)
#
# Path: zdict/utils.py
# def create_zdict_dir_if_not_exists():
# def create_zdict_db_if_not_exists():
# def check_zdict_dir_and_db():
# def __getattr__(cls, color):
# def set_force_color(cls, force_color=True):
# def format(self, s='', color='org', indent=0):
# def print(self, *args, end='\n', **kwargs):
# def import_readline():
# class ColorConst(type):
# class Color(metaclass=ColorConst):
# COLOR_TEMPLATE = "\33[{}m"
# COLOR_LIST = (
# ('ORG', 0),
# ('BLACK', 30),
# ('RED', 31),
# ('GREEN', 32),
# ('YELLOW', 33),
# ('BLUE', 34),
# ('MAGENTA', 35),
# ('INDIGO', 36),
# ('WHITE', 37),
# )
#
# Path: zdict/easter_eggs.py
# def import_pyjokes_module():
# def get_pyjoke(pyjokes, word: str):
# def show_pyjoke(record: Record):
# def lookup_pyjokes(word: str):
#
# Path: zdict/api.py
# def dump(pattern=r'^.*$'):
# return [r.word for r in Record.select() if re.fullmatch(pattern, r.word)]
#
# Path: zdict/completer.py
# class DictCompleter:
# def __init__(self):
# self.db = db
# self.db.connection()
#
# def __del__(self):
# self.db.close()
#
# def complete(self, text, state):
# if state == 0: # new query
# self.records = iter(
# Record.select().where(Record.word.startswith(text)))
#
# return next(self.records).word
#
# Path: zdict/loader.py
# def get_dictionary_map():
# '''
# Auto discover dictionaries in package ``dictionaries``.
# Each dictionary class MUST be the subclass of ``DictBase``
#
# :return: a dict with {provider_name: cls}
# SomeDict.provider as key, the class as value
# '''
# package = 'zdict.dictionaries'
# exclude_files = ('template.py',)
#
# return {
# cls(None).provider: cls
# for _, cls in (
# chain.from_iterable(
# getmembers(mod, predicate=_is_dict)
# for mod in (
# import_module(
# '{}.{}'.format(package, f.partition('.py')[0]))
# for f in os.listdir(dictionaries.__path__[0])
# if (not f.startswith('_') and
# f.endswith('.py') and
# f not in exclude_files)
# )
# )
# )
# }
#
# Path: zdict/utils.py
# def create_zdict_dir_if_not_exists():
# def create_zdict_db_if_not_exists():
# def check_zdict_dir_and_db():
# def __getattr__(cls, color):
# def set_force_color(cls, force_color=True):
# def format(self, s='', color='org', indent=0):
# def print(self, *args, end='\n', **kwargs):
# def import_readline():
# class ColorConst(type):
# class Color(metaclass=ColorConst):
# COLOR_TEMPLATE = "\33[{}m"
# COLOR_LIST = (
# ('ORG', 0),
# ('BLACK', 30),
# ('RED', 31),
# ('GREEN', 32),
# ('YELLOW', 33),
# ('BLUE', 34),
# ('MAGENTA', 35),
# ('INDIGO', 36),
# ('WHITE', 37),
# )
. Output only the next line. | dictionary_map = get_dictionary_map() |
Using the snippet: <|code_start|> self.pool = Pool(self.args.jobs, init_worker)
def __del__(self):
del self.dicts
def prompt(self):
user_input = input('[zDict]: ').strip()
if user_input:
if self.pool:
futures = [
self.pool.apply_async(lookup_string_wrapper,
(d, user_input, self.args))
for d in self.dict_classes
]
results = [i.get() for i in futures]
print(''.join(results))
else:
for dictionary_instance in self.dicts:
dictionary_instance.lookup(user_input)
else:
return
def loop_prompt(self):
while True:
self.prompt()
def interactive_mode(args):
# configure readline and completer
<|code_end|>
, determine the next line of code. You have imports:
from argparse import ArgumentParser, ArgumentTypeError
from locale import getdefaultlocale
from multiprocessing import Pool
from contextlib import redirect_stdout
from io import StringIO
from zdict import constants, utils, easter_eggs
from zdict.api import dump
from zdict.completer import DictCompleter
from zdict.loader import get_dictionary_map
from zdict.utils import readline, check_zdict_dir_and_db
import sys
import signal
and context (class names, function names, or code) available:
# Path: zdict/constants.py
# VERSION = '4.0.5'
# BASE_DIR_NAME = '.zdict'
# BASE_DIR = os.path.join(os.path.expanduser("~"), BASE_DIR_NAME)
# DB_NAME = 'zdict.db'
# DB_FILE = os.path.join(BASE_DIR, DB_NAME)
#
# Path: zdict/utils.py
# def create_zdict_dir_if_not_exists():
# def create_zdict_db_if_not_exists():
# def check_zdict_dir_and_db():
# def __getattr__(cls, color):
# def set_force_color(cls, force_color=True):
# def format(self, s='', color='org', indent=0):
# def print(self, *args, end='\n', **kwargs):
# def import_readline():
# class ColorConst(type):
# class Color(metaclass=ColorConst):
# COLOR_TEMPLATE = "\33[{}m"
# COLOR_LIST = (
# ('ORG', 0),
# ('BLACK', 30),
# ('RED', 31),
# ('GREEN', 32),
# ('YELLOW', 33),
# ('BLUE', 34),
# ('MAGENTA', 35),
# ('INDIGO', 36),
# ('WHITE', 37),
# )
#
# Path: zdict/easter_eggs.py
# def import_pyjokes_module():
# def get_pyjoke(pyjokes, word: str):
# def show_pyjoke(record: Record):
# def lookup_pyjokes(word: str):
#
# Path: zdict/api.py
# def dump(pattern=r'^.*$'):
# return [r.word for r in Record.select() if re.fullmatch(pattern, r.word)]
#
# Path: zdict/completer.py
# class DictCompleter:
# def __init__(self):
# self.db = db
# self.db.connection()
#
# def __del__(self):
# self.db.close()
#
# def complete(self, text, state):
# if state == 0: # new query
# self.records = iter(
# Record.select().where(Record.word.startswith(text)))
#
# return next(self.records).word
#
# Path: zdict/loader.py
# def get_dictionary_map():
# '''
# Auto discover dictionaries in package ``dictionaries``.
# Each dictionary class MUST be the subclass of ``DictBase``
#
# :return: a dict with {provider_name: cls}
# SomeDict.provider as key, the class as value
# '''
# package = 'zdict.dictionaries'
# exclude_files = ('template.py',)
#
# return {
# cls(None).provider: cls
# for _, cls in (
# chain.from_iterable(
# getmembers(mod, predicate=_is_dict)
# for mod in (
# import_module(
# '{}.{}'.format(package, f.partition('.py')[0]))
# for f in os.listdir(dictionaries.__path__[0])
# if (not f.startswith('_') and
# f.endswith('.py') and
# f not in exclude_files)
# )
# )
# )
# }
#
# Path: zdict/utils.py
# def create_zdict_dir_if_not_exists():
# def create_zdict_db_if_not_exists():
# def check_zdict_dir_and_db():
# def __getattr__(cls, color):
# def set_force_color(cls, force_color=True):
# def format(self, s='', color='org', indent=0):
# def print(self, *args, end='\n', **kwargs):
# def import_readline():
# class ColorConst(type):
# class Color(metaclass=ColorConst):
# COLOR_TEMPLATE = "\33[{}m"
# COLOR_LIST = (
# ('ORG', 0),
# ('BLACK', 30),
# ('RED', 31),
# ('GREEN', 32),
# ('YELLOW', 33),
# ('BLUE', 34),
# ('MAGENTA', 35),
# ('INDIGO', 36),
# ('WHITE', 37),
# )
. Output only the next line. | readline.parse_and_bind("tab: complete") |
Here is a snippet: <|code_start|>
def execute_zdict(args):
if args.list_dicts:
for provider in sorted(dictionary_map):
print(
'{}: {}\n{}\n'.format(
provider,
dictionary_map[provider](args).title,
dictionary_map[provider](args).HOMEPAGE_URL,
)
)
exit()
if args.pattern:
for word in dump(pattern=args.pattern):
print(word)
exit()
try:
if args.words:
normal_mode(args)
else:
interactive_mode(args)
except (KeyboardInterrupt, EOFError):
print()
return
def main():
if user_set_encoding_and_is_utf8():
<|code_end|>
. Write the next line using the current file imports:
from argparse import ArgumentParser, ArgumentTypeError
from locale import getdefaultlocale
from multiprocessing import Pool
from contextlib import redirect_stdout
from io import StringIO
from zdict import constants, utils, easter_eggs
from zdict.api import dump
from zdict.completer import DictCompleter
from zdict.loader import get_dictionary_map
from zdict.utils import readline, check_zdict_dir_and_db
import sys
import signal
and context from other files:
# Path: zdict/constants.py
# VERSION = '4.0.5'
# BASE_DIR_NAME = '.zdict'
# BASE_DIR = os.path.join(os.path.expanduser("~"), BASE_DIR_NAME)
# DB_NAME = 'zdict.db'
# DB_FILE = os.path.join(BASE_DIR, DB_NAME)
#
# Path: zdict/utils.py
# def create_zdict_dir_if_not_exists():
# def create_zdict_db_if_not_exists():
# def check_zdict_dir_and_db():
# def __getattr__(cls, color):
# def set_force_color(cls, force_color=True):
# def format(self, s='', color='org', indent=0):
# def print(self, *args, end='\n', **kwargs):
# def import_readline():
# class ColorConst(type):
# class Color(metaclass=ColorConst):
# COLOR_TEMPLATE = "\33[{}m"
# COLOR_LIST = (
# ('ORG', 0),
# ('BLACK', 30),
# ('RED', 31),
# ('GREEN', 32),
# ('YELLOW', 33),
# ('BLUE', 34),
# ('MAGENTA', 35),
# ('INDIGO', 36),
# ('WHITE', 37),
# )
#
# Path: zdict/easter_eggs.py
# def import_pyjokes_module():
# def get_pyjoke(pyjokes, word: str):
# def show_pyjoke(record: Record):
# def lookup_pyjokes(word: str):
#
# Path: zdict/api.py
# def dump(pattern=r'^.*$'):
# return [r.word for r in Record.select() if re.fullmatch(pattern, r.word)]
#
# Path: zdict/completer.py
# class DictCompleter:
# def __init__(self):
# self.db = db
# self.db.connection()
#
# def __del__(self):
# self.db.close()
#
# def complete(self, text, state):
# if state == 0: # new query
# self.records = iter(
# Record.select().where(Record.word.startswith(text)))
#
# return next(self.records).word
#
# Path: zdict/loader.py
# def get_dictionary_map():
# '''
# Auto discover dictionaries in package ``dictionaries``.
# Each dictionary class MUST be the subclass of ``DictBase``
#
# :return: a dict with {provider_name: cls}
# SomeDict.provider as key, the class as value
# '''
# package = 'zdict.dictionaries'
# exclude_files = ('template.py',)
#
# return {
# cls(None).provider: cls
# for _, cls in (
# chain.from_iterable(
# getmembers(mod, predicate=_is_dict)
# for mod in (
# import_module(
# '{}.{}'.format(package, f.partition('.py')[0]))
# for f in os.listdir(dictionaries.__path__[0])
# if (not f.startswith('_') and
# f.endswith('.py') and
# f not in exclude_files)
# )
# )
# )
# }
#
# Path: zdict/utils.py
# def create_zdict_dir_if_not_exists():
# def create_zdict_db_if_not_exists():
# def check_zdict_dir_and_db():
# def __getattr__(cls, color):
# def set_force_color(cls, force_color=True):
# def format(self, s='', color='org', indent=0):
# def print(self, *args, end='\n', **kwargs):
# def import_readline():
# class ColorConst(type):
# class Color(metaclass=ColorConst):
# COLOR_TEMPLATE = "\33[{}m"
# COLOR_LIST = (
# ('ORG', 0),
# ('BLACK', 30),
# ('RED', 31),
# ('GREEN', 32),
# ('YELLOW', 33),
# ('BLUE', 34),
# ('MAGENTA', 35),
# ('INDIGO', 36),
# ('WHITE', 37),
# )
, which may include functions, classes, or code. Output only the next line. | check_zdict_dir_and_db() |
Continue the code snippet: <|code_start|> self.add_processor(name, processor)
self._processor_wrappers.add(name)
def add_pipe(self, name, pipe):
if name in self._pipes:
_logger.warning("pipe override: '%s'", name)
self._pipes[name] = pipe
def invoke(self, pipe, stream=None):
# A given 'pipe' may be either a pipe name or an actual
# pipe definition. That's why need this ugly type check because any
# string value is considered as a name. Passing an actual pipe is
# very handy in couple of use cases, such as running a sub pipe
# from some processor.
if isinstance(pipe, str):
if pipe not in self._pipes:
raise ValueError(f"no such pipe: '{pipe}'")
pipe = self._pipes[pipe]
# Since processors expect an input stream to be an iterator, we cast a
# given stream explicitly to an iterator even though everything will
# probably work even if it's not. We just want to respect and enforce
# established contracts.
stream = iter(stream or [])
for processor in pipe:
# Resolve every JSON reference we encounter in a processor's
# parameters. Please note, we're doing this so late because we
# want to take into account metadata and other changes produced
# by previous processors in the pipe.
<|code_end|>
. Use current file imports:
import collections
import logging
from .._processors import _misc
and context (classes, functions, or code) from other files:
# Path: src/holocron/_processors/_misc.py
# def resolve_json_references(value, context, keep_unknown=True):
# def _do_resolve(node):
# def __init__(self, *, fallback=None, jsonschema=None):
# def __call__(self, fn):
# def wrapper(app, *args, **kwargs):
# def is_encoding(value):
# def is_timezone(value):
# def is_path(value):
# class parameters:
. Output only the next line. | processor = _misc.resolve_json_references( |
Predict the next line after this snippet: <|code_start|>
def _re_match(value, pattern, flags=0):
# If a regular expression is used agains a Python's path class, we can cast
# the path object to string for user, because it's a behaviour a user would
# expect anyway.
if isinstance(value, pathlib.PurePath):
value = str(value)
return re.match(pattern, value, flags)
class _ConditionEvaluator:
"""Evaluate a python-like expressions in boolean context."""
def __init__(self):
# When processor requires some means to evaluate string conditions.
# Turns out there're not so many simple and safe solutions, so we
# decided to go same approach Ansible went and use Jinja2 to evaluate
# expressions. It's safe, fast, extensible, and we already have
# dependency on Jinja2.
self._env = jinja2.Environment()
self._env.filters.update({"match": _re_match})
def eval(self, cond, **context):
template = self._env.from_string(f"{{% if {cond} %}}true{{% endif %}}")
return template.render(**context) == "true"
<|code_end|>
using the current file's imports:
import collections
import pathlib
import re
import jinja2
from ._misc import parameters
and any relevant context from other files:
# Path: src/holocron/_processors/_misc.py
# class parameters:
# def __init__(self, *, fallback=None, jsonschema=None):
# self._fallback = fallback or {}
# self._jsonschema = jsonschema
#
# def __call__(self, fn):
# @functools.wraps(fn)
# def wrapper(app, *args, **kwargs):
# signature = inspect.signature(fn)
# arguments = signature.bind_partial(app, *args, **kwargs).arguments
#
# # First two arguments always are an application instance and a
# # stream of items to process. Since they are passed by Holocron
# # core as positional arguments, there's no real need to check their
# # schema, so we strip them away.
# arguments = dict(list(arguments.items())[2:])
# parameters = dict(list(signature.parameters.items())[2:])
#
# # If some parameter has not been passed, a value from a fallback
# # must be used instead (if any).
# for param in parameters:
# if param not in arguments:
# try:
# value = resolve_json_references(
# {"$ref": self._fallback[param]},
# {"metadata:": app.metadata},
# )
# except (jsonpointer.JsonPointerException, KeyError):
# continue
#
# # We need to save resolved value in both arguments and
# # kwargs mappings, because the former is used to *validate*
# # passed arguments, and the latter to supply a value from a
# # fallback.
# arguments[param] = kwargs[param] = value
#
# if self._jsonschema:
# try:
# format_checker = jsonschema.FormatChecker()
#
# @format_checker.checks("encoding", (LookupError,))
# def is_encoding(value):
# if isinstance(value, str):
# import codecs
#
# return codecs.lookup(value)
#
# @format_checker.checks("timezone", ())
# def is_timezone(value):
# if isinstance(value, str):
# import dateutil.tz
#
# return dateutil.tz.gettz(value)
#
# @format_checker.checks("path", (TypeError,))
# def is_path(value):
# if isinstance(value, str):
# import pathlib
#
# return pathlib.Path(value)
#
# jsonschema.validate(
# arguments,
# self._jsonschema,
# format_checker=format_checker,
# )
# except jsonschema.exceptions.ValidationError as exc:
# message = exc.message
#
# if exc.absolute_path:
# message = f"{'.'.join(exc.absolute_path)}: {exc.message}"
#
# raise ValueError(message)
#
# return fn(app, *args, **kwargs)
#
# return wrapper
. Output only the next line. | @parameters( |
Here is a snippet: <|code_start|> # Memorizing 'source' property is not required for application core,
# however, it may be useful for troubleshooting pipes as well as
# writing 'when' conditions.
source=source,
destination=source,
content=content,
created=created,
updated=updated,
baseurl=app.metadata["url"],
)
def _finditems(app, path, pattern, encoding, tzinfo):
if pattern:
re_name = re.compile(pattern)
for root, _, filenames in os.walk(path, topdown=True):
root = pathlib.Path(root)
for filename in filenames:
source = root.joinpath(filename).relative_to(path)
if pattern and not re_name.match(str(source)):
continue
yield _createitem(
app, root / filename, source, encoding=encoding, tzinfo=tzinfo
)
<|code_end|>
. Write the next line using the current file imports:
import datetime
import os
import pathlib
import re
import dateutil.tz
import holocron
from ._misc import parameters
and context from other files:
# Path: src/holocron/_processors/_misc.py
# class parameters:
# def __init__(self, *, fallback=None, jsonschema=None):
# self._fallback = fallback or {}
# self._jsonschema = jsonschema
#
# def __call__(self, fn):
# @functools.wraps(fn)
# def wrapper(app, *args, **kwargs):
# signature = inspect.signature(fn)
# arguments = signature.bind_partial(app, *args, **kwargs).arguments
#
# # First two arguments always are an application instance and a
# # stream of items to process. Since they are passed by Holocron
# # core as positional arguments, there's no real need to check their
# # schema, so we strip them away.
# arguments = dict(list(arguments.items())[2:])
# parameters = dict(list(signature.parameters.items())[2:])
#
# # If some parameter has not been passed, a value from a fallback
# # must be used instead (if any).
# for param in parameters:
# if param not in arguments:
# try:
# value = resolve_json_references(
# {"$ref": self._fallback[param]},
# {"metadata:": app.metadata},
# )
# except (jsonpointer.JsonPointerException, KeyError):
# continue
#
# # We need to save resolved value in both arguments and
# # kwargs mappings, because the former is used to *validate*
# # passed arguments, and the latter to supply a value from a
# # fallback.
# arguments[param] = kwargs[param] = value
#
# if self._jsonschema:
# try:
# format_checker = jsonschema.FormatChecker()
#
# @format_checker.checks("encoding", (LookupError,))
# def is_encoding(value):
# if isinstance(value, str):
# import codecs
#
# return codecs.lookup(value)
#
# @format_checker.checks("timezone", ())
# def is_timezone(value):
# if isinstance(value, str):
# import dateutil.tz
#
# return dateutil.tz.gettz(value)
#
# @format_checker.checks("path", (TypeError,))
# def is_path(value):
# if isinstance(value, str):
# import pathlib
#
# return pathlib.Path(value)
#
# jsonschema.validate(
# arguments,
# self._jsonschema,
# format_checker=format_checker,
# )
# except jsonschema.exceptions.ValidationError as exc:
# message = exc.message
#
# if exc.absolute_path:
# message = f"{'.'.join(exc.absolute_path)}: {exc.message}"
#
# raise ValueError(message)
#
# return fn(app, *args, **kwargs)
#
# return wrapper
, which may include functions, classes, or code. Output only the next line. | @parameters( |
Based on the snippet: <|code_start|>"""Convert CommonMark into HTML."""
_LOGGER = logging.getLogger("holocron")
def _pygmentize(code, language, _):
try:
formatter = _pygmentize.formatter
except AttributeError:
class HtmlFormatter(pygments.formatters.html.HtmlFormatter):
def wrap(self, source, _):
# Since 'markdown-it' creates required '<pre>' & '<code>'
# containers, there's no need to duplicate them with pygments.
yield from source
formatter = _pygmentize.formatter = HtmlFormatter(wrapcode=True)
try:
lexer = pygments.lexers.get_lexer_by_name(language)
except pygments.util.ClassNotFound:
_LOGGER.warning("pygmentize: no such langauge: '%s'", language)
return None
return pygments.highlight(code, lexer, formatter)
<|code_end|>
, predict the immediate next line with the help of imports:
import logging
import markdown_it
import pygments
import pygments.formatters.html
import pygments.lexers
import pygments.util
from mdit_py_plugins.container import container_plugin
from mdit_py_plugins.deflist import deflist_plugin
from mdit_py_plugins.footnote import footnote_plugin
from ._misc import parameters
and context (classes, functions, sometimes code) from other files:
# Path: src/holocron/_processors/_misc.py
# class parameters:
# def __init__(self, *, fallback=None, jsonschema=None):
# self._fallback = fallback or {}
# self._jsonschema = jsonschema
#
# def __call__(self, fn):
# @functools.wraps(fn)
# def wrapper(app, *args, **kwargs):
# signature = inspect.signature(fn)
# arguments = signature.bind_partial(app, *args, **kwargs).arguments
#
# # First two arguments always are an application instance and a
# # stream of items to process. Since they are passed by Holocron
# # core as positional arguments, there's no real need to check their
# # schema, so we strip them away.
# arguments = dict(list(arguments.items())[2:])
# parameters = dict(list(signature.parameters.items())[2:])
#
# # If some parameter has not been passed, a value from a fallback
# # must be used instead (if any).
# for param in parameters:
# if param not in arguments:
# try:
# value = resolve_json_references(
# {"$ref": self._fallback[param]},
# {"metadata:": app.metadata},
# )
# except (jsonpointer.JsonPointerException, KeyError):
# continue
#
# # We need to save resolved value in both arguments and
# # kwargs mappings, because the former is used to *validate*
# # passed arguments, and the latter to supply a value from a
# # fallback.
# arguments[param] = kwargs[param] = value
#
# if self._jsonschema:
# try:
# format_checker = jsonschema.FormatChecker()
#
# @format_checker.checks("encoding", (LookupError,))
# def is_encoding(value):
# if isinstance(value, str):
# import codecs
#
# return codecs.lookup(value)
#
# @format_checker.checks("timezone", ())
# def is_timezone(value):
# if isinstance(value, str):
# import dateutil.tz
#
# return dateutil.tz.gettz(value)
#
# @format_checker.checks("path", (TypeError,))
# def is_path(value):
# if isinstance(value, str):
# import pathlib
#
# return pathlib.Path(value)
#
# jsonschema.validate(
# arguments,
# self._jsonschema,
# format_checker=format_checker,
# )
# except jsonschema.exceptions.ValidationError as exc:
# message = exc.message
#
# if exc.absolute_path:
# message = f"{'.'.join(exc.absolute_path)}: {exc.message}"
#
# raise ValueError(message)
#
# return fn(app, *args, **kwargs)
#
# return wrapper
. Output only the next line. | @parameters( |
Predict the next line after this snippet: <|code_start|>"""Parse YAML front matter and set its values as item"s properties."""
_DELIMITERS = {
"toml": r"+++",
"yaml": r"---",
}
_LOADERS = {
"toml": toml.loads,
"yaml": yaml.safe_load,
}
<|code_end|>
using the current file's imports:
import collections.abc
import re
import toml
import yaml
from ._misc import parameters
and any relevant context from other files:
# Path: src/holocron/_processors/_misc.py
# class parameters:
# def __init__(self, *, fallback=None, jsonschema=None):
# self._fallback = fallback or {}
# self._jsonschema = jsonschema
#
# def __call__(self, fn):
# @functools.wraps(fn)
# def wrapper(app, *args, **kwargs):
# signature = inspect.signature(fn)
# arguments = signature.bind_partial(app, *args, **kwargs).arguments
#
# # First two arguments always are an application instance and a
# # stream of items to process. Since they are passed by Holocron
# # core as positional arguments, there's no real need to check their
# # schema, so we strip them away.
# arguments = dict(list(arguments.items())[2:])
# parameters = dict(list(signature.parameters.items())[2:])
#
# # If some parameter has not been passed, a value from a fallback
# # must be used instead (if any).
# for param in parameters:
# if param not in arguments:
# try:
# value = resolve_json_references(
# {"$ref": self._fallback[param]},
# {"metadata:": app.metadata},
# )
# except (jsonpointer.JsonPointerException, KeyError):
# continue
#
# # We need to save resolved value in both arguments and
# # kwargs mappings, because the former is used to *validate*
# # passed arguments, and the latter to supply a value from a
# # fallback.
# arguments[param] = kwargs[param] = value
#
# if self._jsonschema:
# try:
# format_checker = jsonschema.FormatChecker()
#
# @format_checker.checks("encoding", (LookupError,))
# def is_encoding(value):
# if isinstance(value, str):
# import codecs
#
# return codecs.lookup(value)
#
# @format_checker.checks("timezone", ())
# def is_timezone(value):
# if isinstance(value, str):
# import dateutil.tz
#
# return dateutil.tz.gettz(value)
#
# @format_checker.checks("path", (TypeError,))
# def is_path(value):
# if isinstance(value, str):
# import pathlib
#
# return pathlib.Path(value)
#
# jsonschema.validate(
# arguments,
# self._jsonschema,
# format_checker=format_checker,
# )
# except jsonschema.exceptions.ValidationError as exc:
# message = exc.message
#
# if exc.absolute_path:
# message = f"{'.'.join(exc.absolute_path)}: {exc.message}"
#
# raise ValueError(message)
#
# return fn(app, *args, **kwargs)
#
# return wrapper
. Output only the next line. | @parameters( |
Predict the next line after this snippet: <|code_start|> },
"encoding": {"type": "string", "format": "encoding"},
"pretty": {"type": "boolean"},
"syndication_format": {"type": "string", "enum": ["atom", "rss"]},
},
},
)
def process(
app,
stream,
*,
feed,
item,
syndication_format="atom",
save_as="feed.xml",
limit=10,
encoding="UTF-8",
pretty=True,
):
passthrough, stream = itertools.tee(stream)
# In order to decrease amount of traffic required to deliver feed content
# (and thus increase the throughput), the number of items in the feed is
# usually limited to the "N" latest items. This is handy because feed is
# usually used to deliver news, and news are known to get outdated.
stream = sorted(stream, key=lambda d: d["published"], reverse=True)
if limit:
stream = stream[:limit]
def _resolvefeed(name):
<|code_end|>
using the current file's imports:
import itertools
import pathlib
import feedgen.feed
import pkg_resources
import holocron
from ._misc import parameters, resolve_json_references
and any relevant context from other files:
# Path: src/holocron/_processors/_misc.py
# class parameters:
# def __init__(self, *, fallback=None, jsonschema=None):
# self._fallback = fallback or {}
# self._jsonschema = jsonschema
#
# def __call__(self, fn):
# @functools.wraps(fn)
# def wrapper(app, *args, **kwargs):
# signature = inspect.signature(fn)
# arguments = signature.bind_partial(app, *args, **kwargs).arguments
#
# # First two arguments always are an application instance and a
# # stream of items to process. Since they are passed by Holocron
# # core as positional arguments, there's no real need to check their
# # schema, so we strip them away.
# arguments = dict(list(arguments.items())[2:])
# parameters = dict(list(signature.parameters.items())[2:])
#
# # If some parameter has not been passed, a value from a fallback
# # must be used instead (if any).
# for param in parameters:
# if param not in arguments:
# try:
# value = resolve_json_references(
# {"$ref": self._fallback[param]},
# {"metadata:": app.metadata},
# )
# except (jsonpointer.JsonPointerException, KeyError):
# continue
#
# # We need to save resolved value in both arguments and
# # kwargs mappings, because the former is used to *validate*
# # passed arguments, and the latter to supply a value from a
# # fallback.
# arguments[param] = kwargs[param] = value
#
# if self._jsonschema:
# try:
# format_checker = jsonschema.FormatChecker()
#
# @format_checker.checks("encoding", (LookupError,))
# def is_encoding(value):
# if isinstance(value, str):
# import codecs
#
# return codecs.lookup(value)
#
# @format_checker.checks("timezone", ())
# def is_timezone(value):
# if isinstance(value, str):
# import dateutil.tz
#
# return dateutil.tz.gettz(value)
#
# @format_checker.checks("path", (TypeError,))
# def is_path(value):
# if isinstance(value, str):
# import pathlib
#
# return pathlib.Path(value)
#
# jsonschema.validate(
# arguments,
# self._jsonschema,
# format_checker=format_checker,
# )
# except jsonschema.exceptions.ValidationError as exc:
# message = exc.message
#
# if exc.absolute_path:
# message = f"{'.'.join(exc.absolute_path)}: {exc.message}"
#
# raise ValueError(message)
#
# return fn(app, *args, **kwargs)
#
# return wrapper
#
# def resolve_json_references(value, context, keep_unknown=True):
# def _do_resolve(node):
# node = copy.copy(node)
#
# if isinstance(node, collections.abc.Mapping) and "$ref" in node:
# uri, fragment = urllib.parse.urldefrag(node["$ref"])
# try:
# return jsonpointer.resolve_pointer(context[uri], fragment)
# except KeyError:
# if keep_unknown:
# return node
# raise
# elif isinstance(node, collections.abc.Mapping):
# for k, v in node.items():
# node[k] = _do_resolve(v)
# elif isinstance(node, collections.abc.Sequence) and not isinstance(node, str):
# if not isinstance(node, collections.abc.MutableSequence):
# node = list(node)
#
# for i in range(len(node)):
# node[i] = _do_resolve(node[i])
# return node
#
# return _do_resolve(value)
. Output only the next line. | return resolve_json_references(feed.get(name), {"feed:": feed}) |
Given the code snippet: <|code_start|>"""Convert Markdown into HTML."""
_top_heading_re = re.compile(
(
# Ignore optional newlines at the beginning of content, as well as
# ignore character '#' preceded before heading if any.
r"\n*#?"
# Capture heading text regardless of which syntax is used, in other
# words capture either text after '#' or text underlined with '='
# at the beginning of contnet.
r"(?P<heading>(?<=#)[^\n#]+|[^\n]+(?=\n=))"
# Ignore underline of '=' if corresponding syntax for heading is
# used, so it won't be matched by ANY pattern of content below.
r"(?:\n=+)?"
r"\s*(?P<content>.*)"
),
re.DOTALL,
)
<|code_end|>
, generate the next line using the imports in this file:
import re
import markdown
from ._misc import parameters
and context (functions, classes, or occasionally code) from other files:
# Path: src/holocron/_processors/_misc.py
# class parameters:
# def __init__(self, *, fallback=None, jsonschema=None):
# self._fallback = fallback or {}
# self._jsonschema = jsonschema
#
# def __call__(self, fn):
# @functools.wraps(fn)
# def wrapper(app, *args, **kwargs):
# signature = inspect.signature(fn)
# arguments = signature.bind_partial(app, *args, **kwargs).arguments
#
# # First two arguments always are an application instance and a
# # stream of items to process. Since they are passed by Holocron
# # core as positional arguments, there's no real need to check their
# # schema, so we strip them away.
# arguments = dict(list(arguments.items())[2:])
# parameters = dict(list(signature.parameters.items())[2:])
#
# # If some parameter has not been passed, a value from a fallback
# # must be used instead (if any).
# for param in parameters:
# if param not in arguments:
# try:
# value = resolve_json_references(
# {"$ref": self._fallback[param]},
# {"metadata:": app.metadata},
# )
# except (jsonpointer.JsonPointerException, KeyError):
# continue
#
# # We need to save resolved value in both arguments and
# # kwargs mappings, because the former is used to *validate*
# # passed arguments, and the latter to supply a value from a
# # fallback.
# arguments[param] = kwargs[param] = value
#
# if self._jsonschema:
# try:
# format_checker = jsonschema.FormatChecker()
#
# @format_checker.checks("encoding", (LookupError,))
# def is_encoding(value):
# if isinstance(value, str):
# import codecs
#
# return codecs.lookup(value)
#
# @format_checker.checks("timezone", ())
# def is_timezone(value):
# if isinstance(value, str):
# import dateutil.tz
#
# return dateutil.tz.gettz(value)
#
# @format_checker.checks("path", (TypeError,))
# def is_path(value):
# if isinstance(value, str):
# import pathlib
#
# return pathlib.Path(value)
#
# jsonschema.validate(
# arguments,
# self._jsonschema,
# format_checker=format_checker,
# )
# except jsonschema.exceptions.ValidationError as exc:
# message = exc.message
#
# if exc.absolute_path:
# message = f"{'.'.join(exc.absolute_path)}: {exc.message}"
#
# raise ValueError(message)
#
# return fn(app, *args, **kwargs)
#
# return wrapper
. Output only the next line. | @parameters( |
Given the code snippet: <|code_start|> # instance. This is, to be honest, the main purpose of this factory
# function, because otherwise one must create an Application instance
# directly.
import_processors.process(
instance,
[],
imports=[
"archive = holocron._processors.archive:process",
"chain = holocron._processors.chain:process",
"commonmark = holocron._processors.commonmark:process",
"feed = holocron._processors.feed:process",
"frontmatter = holocron._processors.frontmatter:process",
"import-processors = holocron._processors.import_processors:process",
"jinja2 = holocron._processors.jinja2:process",
"markdown = holocron._processors.markdown:process",
"metadata = holocron._processors.metadata:process",
"pipe = holocron._processors.pipe:process",
"prettyuri = holocron._processors.prettyuri:process",
"restructuredtext = holocron._processors.restructuredtext:process",
"save = holocron._processors.save:process",
"sitemap = holocron._processors.sitemap:process",
"source = holocron._processors.source:process",
"todatetime = holocron._processors.todatetime:process",
],
)
# When is the only known processor wrapper, and, frankly, we don't expect
# more. Processor wrappers are mere hacks to avoid hardcoding yet provide
# better syntax for wrapping processors. So let's hardcode that knowledge
# here, and think later about general approach when the need arise.
<|code_end|>
, generate the next line using the imports in this file:
from .._processors import import_processors, when
from . import Application
and context (functions, classes, or occasionally code) from other files:
# Path: src/holocron/_processors/import_processors.py
# def process(app, items, *, imports, from_=None):
# def passgen(app, items):
#
# Path: src/holocron/_processors/when.py
# def _re_match(value, pattern, flags=0):
# def __init__(self):
# def eval(self, cond, **context):
# def process(app, stream, processor, *_condition, condition=None):
# def smartstream():
# class _ConditionEvaluator:
#
# Path: src/holocron/_core/application.py
# class Application:
# """Application instance orchestrates processors execution."""
#
# _processor_reserved_props = {"name", "args"}
#
# def __init__(self, metadata=None):
# # Metadata is a KV store shared between processors. It serves two
# # purposes: first, metadata contains an application level data, and
# # secondly, it's the only way to consume artifacts produced by one
# # processor from another processor.
# #
# # ChainMap is used to prevent writes to original metadata mapping,
# # and thus making it easier to distinguish initial metadata values
# # from the one set by processors in the mid of troubleshooting.
# self._metadata = collections.ChainMap({}, metadata or {})
#
# # Processors are (normally) stateless functions that receive an input
# # stream of items and produce an output stream of items. This property
# # keeps track of known processors, and is used to retrieve processors
# # one someone asked to execute a pipe.
# self._processors = {}
#
# # Processor wrappers are processors that somehow wrap another
# # processor. They receive wrapped processor as input argument and are
# # fully responsible to invoke it when necessary. The only reason why
# # this special type of processors exists in the first place is because
# # Holocron implements syntax sugar for them. Other than that, there is
# # no difference indeed.
# self._processor_wrappers = set()
#
# # Pipes are sets of processors connected in series, where the output
# # of one processor is the input of the next one. This property keeps
# # track of known pipes, and is used to execute processors in sequence
# # when invoked.
# self._pipes = {}
#
# @property
# def metadata(self):
# return self._metadata
#
# def add_processor(self, name, processor):
# if name in self._processors:
# _logger.warning("processor override: '%s'", name)
# self._processors[name] = processor
#
# def add_processor_wrapper(self, name, processor):
# if name in self._processor_reserved_props:
# raise ValueError(f"illegal wrapper name: {name}")
#
# self.add_processor(name, processor)
# self._processor_wrappers.add(name)
#
# def add_pipe(self, name, pipe):
# if name in self._pipes:
# _logger.warning("pipe override: '%s'", name)
# self._pipes[name] = pipe
#
# def invoke(self, pipe, stream=None):
# # A given 'pipe' may be either a pipe name or an actual
# # pipe definition. That's why need this ugly type check because any
# # string value is considered as a name. Passing an actual pipe is
# # very handy in couple of use cases, such as running a sub pipe
# # from some processor.
# if isinstance(pipe, str):
# if pipe not in self._pipes:
# raise ValueError(f"no such pipe: '{pipe}'")
# pipe = self._pipes[pipe]
#
# # Since processors expect an input stream to be an iterator, we cast a
# # given stream explicitly to an iterator even though everything will
# # probably work even if it's not. We just want to respect and enforce
# # established contracts.
# stream = iter(stream or [])
#
# for processor in pipe:
# # Resolve every JSON reference we encounter in a processor's
# # parameters. Please note, we're doing this so late because we
# # want to take into account metadata and other changes produced
# # by previous processors in the pipe.
# processor = _misc.resolve_json_references(
# processor, {"metadata:": self.metadata}
# )
#
# name, args, kwargs = _unpack_and_wrap_processor(
# processor, self._processor_reserved_props
# )
#
# if name not in self._processors:
# raise ValueError(f"no such processor: '{name}'")
#
# processfn = self._processors[name]
# stream = processfn(self, stream, *args, **kwargs)
#
# yield from stream
. Output only the next line. | instance.add_processor_wrapper("when", when.process) |
Based on the snippet: <|code_start|>#-*- coding: utf-8 -*-
"""
Copyright (C) 2015 Michal Goral.
This file is part of Subconvert
Subconvert is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
Subconvert is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with Subconvert. If not, see <http://www.gnu.org/licenses/>.
"""
def abcMatcher(sub):
return "abc" in sub.text
class TestSubtitleSearch(unittest.TestCase):
"""SubtitleSearch test suite."""
# for matchText tests
<|code_end|>
, predict the immediate next line with the help of imports:
import unittest
from subconvert.utils.SubtitleSearch import *
from tests.Mocks import SubtitleMock
and context (classes, functions, sometimes code) from other files:
# Path: tests/Mocks.py
# class SubtitleMock:
# def __init__(self, start = None, end = None, text = None):
# self.start = start
# self.end = end
# self.text = text
# self.fps = start.fps if start is not None else None
#
# def __eq__(self, other):
# return self.text == other.text
#
# def __ne__(self, other):
# return self.text != other.text
#
# def change(self, start = None, end = None, text = None):
# if start is not None:
# self.start = start
# if end is not None:
# self.end = end
# if text is not None:
# self.text = text
#
# def clone(self):
# return self
. Output only the next line. | sub = SubtitleMock(text = "zażółć Gęślą jaźń") |
Based on the snippet: <|code_start|># Copyright 2018 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Get data loader for classification."""
CNN_MODALITIES = ['rgb', 'oflow', 'depth']
GRU_MODALITIES = ['jjd', 'jjv', 'jld']
def get_dataloader(opt):
"""Constructs dataset with transforms, and wrap it in a data loader."""
idx_t = 0 if opt.split == 'train' else 1
xforms = []
for modality in opt.modalities:
if opt.dset == 'ntu-rgbd':
<|code_end|>
, predict the immediate next line with the help of imports:
import os
import third_party.two_stream_pytorch.video_transforms as vtransforms
import torch.utils.data as data
import torchvision.transforms as transforms
from data_pipeline.ntu_rgbd import NTU_RGBD
and context (classes, functions, sometimes code) from other files:
# Path: data_pipeline/ntu_rgbd.py
# class NTU_RGBD(data.Dataset):
# """Class for NTU RGBD Dataset"""
# MEAN_STD = {
# 'rgb': ([0.485, 0.456, 0.406], [0.229, 0.224, 0.225]),
# 'oflow': (0.5, 1. / 255),
# 'depth': (4084.1213735 / (255 * 256), 1008.31271366 / (255 * 256)),
# 'jjd': (0.53968146, 0.32319776),
# 'jjv': (0, 0.35953656),
# 'jld': (0.15982792, 0.12776225)
# }
#
# def __init__(self,
# root,
# split,
# evaluation,
# modalities,
# n_samples,
# n_frames,
# downsample,
# transforms=None,
# subsample=0):
# """NTU RGBD dataset.
#
# Args:
# root: dataset root
# split: train to randomly select n_samples samples; test to uniformly
# select n_samples spanning the whole video
# evaluation: one of ['cross-subject', 'cross-view']
# modalities: subset of ['rgb', 'oflow', 'depth', 'jjd', 'jjv', 'jld']
# n_samples: number of samples from the video
# n_frames: number of frames per sample
# downsample: fps /= downsample
# transforms: transformations to apply to data
# subsample: number of samples per class. 0 if using full dataset.
# """
# modalities = utils.unsqueeze(modalities)
# transforms = utils.unsqueeze(transforms)
#
# # Loader functions
# loaders = {
# 'rgb': rgb_loader,
# 'oflow': oflow_loader,
# 'depth': depth_loader,
# 'jjd': jjd_loader,
# 'jjv': jjv_loader,
# 'jld': jld_loader
# }
# has_skel = any([m in ALL_MODALITIES[3:] for m in modalities])
# dataset = make_dataset(root, has_skel, evaluation, split, subsample)
#
# self.root = root
# self.split = split
# self.modalities = modalities
# self.n_samples = n_samples
# self.n_frames = n_frames
# self.downsample = downsample
# self.transforms = transforms
# self.loaders = loaders
# self.dataset = dataset
#
# def __getitem__(self, idx):
# vid_name, label = self.dataset[idx]
# # -1 because len(oflow) = len(rgb)-1
# length = len(
# glob.glob(
# os.path.join(self.root, rgb_folder_name, vid_name,
# '*.' + rgb_pattern.split('.')[1]))) - 1
#
# length_ds = length // self.downsample
# if length_ds < self.n_frames:
# frame_ids_s = np.arange(0, length_ds, 1) # arange: exclusive
# frame_ids_s = np.concatenate(
# (frame_ids_s,
# np.array([frame_ids_s[-1]] * (self.n_frames - length_ds))))
# frame_ids = np.repeat(
# frame_ids_s[np.newaxis, :], self.n_samples,
# axis=0).astype(int) * self.downsample
# else:
# if self.split == 'train': # randomly select n_samples samples
# starts = np.random.randint(0, length_ds - self.n_frames + 1,
# self.n_samples) # randint: exclusive
# # uniformly select n_samples spanning the whole video
# elif self.split == 'val' or self.split == 'test':
# starts = np.linspace(
# 0, length_ds - self.n_frames, self.n_samples,
# dtype=int) # linspace: inclusive
# else:
# starts = np.arange(0,
# length_ds - self.n_frames + 1) # arange: exclusive
#
# frame_ids = []
# for start in starts:
# frame_ids_s = np.arange(start, start + self.n_frames,
# 1) * self.downsample # arange: exclusive
# frame_ids.append(frame_ids_s)
# frame_ids = np.stack(frame_ids)
#
# # load raw data
# inputs = []
# for modality in self.modalities:
# vid = self.loaders[modality](self.root, vid_name, frame_ids)
# inputs.append(vid)
#
# # transform
# if self.transforms is not None:
# for i in range(len(self.transforms)):
# if self.transforms[i] is not None:
# inputs[i] = self.transforms[i](inputs[i])
#
# return inputs, label
#
# def __len__(self):
# return len(self.dataset)
. Output only the next line. | mean, std = NTU_RGBD.MEAN_STD[modality] |
Based on the snippet: <|code_start|> lrs = model.lr_decay()
logger.log('***** lr decay *****: {}'.format(lrs))
def test(opt, model, dataloader):
# Logging
logger = logging.Logger(opt.ckpt_path, opt.split)
stats = logging.Statistics(opt.ckpt_path, opt.split)
logger.log(opt)
model.load(opt.load_ckpt_paths, opt.load_opts, opt.load_epoch)
all_scores = []
video_names = []
for step, data in enumerate(dataloader, 1):
inputs, label, vid_name = data
info_acc, logits, scores = model.test(inputs, label, opt.timestep)
all_scores.append(scores)
video_names.append(vid_name[0])
update = stats.update(logits.shape[0], info_acc)
if utils.is_due(step, opt.print_every):
utils.info('step {}/{}: {}'.format(step, len(dataloader), update))
logger.log('[Summary] {}'.format(stats.summarize()))
# Evaluate
iou_thresholds = [0.1, 0.3, 0.5]
groundtruth_dir = os.path.join(opt.dset_path, opt.dset, 'groundtruth',
'validation/cross-subject')
assert os.path.exists(groundtruth_dir), '{} does not exist'.format(groundtruth_dir)
<|code_end|>
, predict the immediate next line with the help of imports:
import argparse
import os
import utils
import utils.logging as logging
from .get_dataloader import *
from .get_model import *
from .evaluation.map import calc_map
and context (classes, functions, sometimes code) from other files:
# Path: detection/evaluation/map.py
# def calc_map(opt, video_scores, video_names, groundtruth_dir, iou_thresholds):
# """Get mAP (action) for IoU 0.1, 0.3 and 0.5."""
# activity_threshold = 0.4
# num_videos = len(video_scores)
# video_files = [name + '.txt' for name in video_names]
#
# v_props = []
# for i in range(num_videos):
# # video_name = video_names[i]
# scores = video_scores[i]
# segments = get_segments(scores, activity_threshold)
#
# prop = []
# for segment in segments:
# start, end, cls, score = segment
# # start, end are indices of clips. Transform to frame index.
# start_index = start * opt.step_size * opt.downsample
# end_index = (
# (end - 1) * opt.step_size + opt.n_frames) * opt.downsample - 1
# prop.append([cls, start_index, end_index, score, video_files[i]])
# v_props.append(prop)
#
# # Run evaluation on different IoU thresholds.
# mean_aps = []
# for iou in iou_thresholds:
# mean_ap = process(v_props, video_files, groundtruth_dir, iou)
# mean_aps.append(mean_ap)
# return mean_aps
. Output only the next line. | mean_aps = calc_map(opt, all_scores, video_names, groundtruth_dir, iou_thresholds) |
Given the code snippet: <|code_start|>#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""TODO: One-sentence doc string."""
def visualize_rgb(images):
"""Visualize RGB modality."""
images = utils.to_numpy(images)
mean = np.array([0.485, 0.456, 0.406])
std = np.array([0.229, 0.224, 0.225])
images = np.moveaxis(images, -3, -1)
images = images*std+mean
images = np.clip(images*255, 0, 255)
images = images[..., ::-1].astype(np.uint8)
images = images[0, 0] # subsample
<|code_end|>
, generate the next line using the imports in this file:
import os
import cv2
import numpy as np
import utils
from utils import imgproc
and context (functions, classes, or occasionally code) from other files:
# Path: utils/imgproc.py
# def imread_rgb(dset, path):
# def imread_oflow(dset, *paths):
# def imread_depth(dset, path):
# def inpaint(img, threshold=1):
# def resize(video, size, interpolation):
# def proc_oflow(images):
. Output only the next line. | imgproc.save_avi('/home/luoa/research/rgb.avi', images) |
Predict the next line for this snippet: <|code_start|># Copyright 2018 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Get data loader for detection."""
CNN_MODALITIES = ['rgb', 'oflow', 'depth']
GRU_MODALITIES = ['jjd', 'jjv', 'jld']
def get_dataloader(opt):
idx_t = 0 if opt.split == 'train' else 1
xforms = []
for modality in opt.modalities:
if opt.dset == 'pku-mmd':
<|code_end|>
with the help of current file imports:
import os
import torch.utils.data as data
import torchvision.transforms as transforms
import third_party.two_stream_pytorch.video_transforms as vtransforms
from data_pipeline.pku_mmd import PKU_MMD
and context from other files:
# Path: data_pipeline/pku_mmd.py
# class PKU_MMD(data.Dataset):
# MEAN_STD = {
# 'rgb': ([0.485, 0.456, 0.406], [0.229, 0.224, 0.225]),
# 'oflow': (0.5, 1. / 255),
# 'depth': (4084.1213735 / (255 * 256), 1008.31271366 / (255 * 256)),
# 'jjd': (0.53968146, 0.32319776),
# 'jjv': (0, 0.35953656),
# 'jld': (0.15982792, 0.12776225)
# }
#
# def __init__(self,
# root,
# mode,
# evaluation,
# modalities,
# step_size,
# n_frames,
# downsample,
# timestep,
# transforms=None,
# subsample_rate=0):
# """PKU_MMD Constructor.
#
# Contructs the PKU_MMD dataset.
#
# Args:
# root: dataset root
# mode: train (0), test (1)
# evaluation: one of ['cross-subject', 'cross-view']
# modalities: one of ['rgb', 'oflow', 'depth', 'jjd', 'jjv', 'jld']
# step_size: step size between clips
# n_frames: number of frames per clip
# transforms: transform.
# subsample_rate: sampling rate
# downsample: fps /= downsample
# timestep: number of clips in a sequence.
# """
# modalities = utils.unsqueeze(modalities)
# transforms = utils.unsqueeze(transforms)
#
# loaders = {
# 'rgb': rgb_loader,
# 'oflow': oflow_loader,
# 'depth': depth_loader,
# 'jjd': jjd_loader,
# 'jjv': jjv_loader,
# 'jld': jld_loader
# }
# self.loaders = loaders
# self.dataset = make_dataset(
# root, evaluation, mode, subsample_rate=subsample_rate)
#
# self.root = root
# self.modalities = modalities
#
# self.step_size = step_size
# self.n_frames = n_frames
# self.downsample = downsample
# self.timestep = timestep
# self.all = mode != 0 # True if test mode, return the entire video
# self.transforms = transforms
#
# def __getitem__(self, idx):
# vid_name, label = self.dataset[idx]
# # label: action_id, start_frame, end_frame, confidence
# # -1 because len(oflow) = len(rgb)-1
# length = len(
# glob.glob(
# os.path.join(self.root, rgb_folder_name, vid_name,
# '*.' + rgb_pattern.split('.')[1]))) - 1
# length_ds = length // self.downsample
#
# if self.all:
# # Return entire video
# starts = np.arange(0, length_ds - self.n_frames + 1,
# self.step_size) # arange: exclusive
# else:
# start = random.randint(
# 0, length_ds - ((self.timestep - 1) * self.step_size + self.n_frames))
# starts = [start + i * self.step_size for i in range(self.timestep)
# ] # randint: inclusive
#
# frame_ids = []
# for start in starts:
# frame_ids_s = np.arange(start, start + self.n_frames,
# 1) * self.downsample # arange: exclusive
# frame_ids.append(frame_ids_s)
# frame_ids = np.stack(frame_ids)
#
# targets = []
# for frame_ids_s in frame_ids:
# target = 0
# max_ratio = 0.5
# for action_id, start_frame, end_frame, _ in label:
# overlap = get_overlap([frame_ids_s[0], frame_ids_s[-1] - 1],
# [start_frame, end_frame - 1])
# ratio = overlap / (frame_ids_s[-1] - frame_ids_s[0])
# if ratio > max_ratio:
# target = int(action_id)
# targets.append(target)
# targets = np.stack(targets)
#
# # load raw data
# inputs = []
# for modality in self.modalities:
# vid = self.loaders[modality](self.root, vid_name, frame_ids)
# inputs.append(vid)
#
# # transform
# if self.transforms is not None:
# for i, transform in enumerate(self.transforms):
# if transform is not None:
# inputs[i] = transform(inputs[i])
#
# return inputs, targets, vid_name
#
# def __len__(self):
# return len(self.dataset)
, which may contain function names, class names, or code. Output only the next line. | mean, std = PKU_MMD.MEAN_STD[modality] |
Given snippet: <|code_start|>#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Get detection model."""
ALL_MODALITIES = ['rgb', 'oflow', 'depth', 'jjd', 'jjv', 'jld']
def get_model(opt):
if opt.dset == 'pku-mmd':
n_classes = 51
all_input_sizes = [-1, -1, -1, 276, 828, 836]
all_n_channels = [3, 2, 1, -1, -1, -1]
else:
raise NotImplementedError
n_channels = [all_n_channels[ALL_MODALITIES.index(m)] for m in opt.modalities]
input_sizes = [
all_input_sizes[ALL_MODALITIES.index(m)] for m in opt.modalities
]
if len(opt.modalities) == 1:
# Single stream
index = 0
<|code_end|>
, continue by predicting the next line. Consider current file imports:
from .model import SingleStream
from .model import GraphDistillation
and context:
# Path: detection/model.py
# class SingleStream(BaseModel):
# """Model to train a single modality.
# """
#
# def __init__(self, *args, **kwargs):
# super(SingleStream, self).__init__(*args, **kwargs)
# assert len(self.embeds) == 1
#
# def _backward(self, results, label):
# logits, _ = results
# logits = logits.view(-1, logits.size(-1))
#
# loss = self.criterion_cls(logits, label.view(-1))
# loss.backward()
# torch.nn.utils.clip_grad_norm(self.embeds[self.to_idx].parameters(), 5)
# for optimizer in self.optimizers:
# optimizer.step()
# optimizer.zero_grad()
#
# info_loss = [('loss', loss.data[0])]
# return info_loss
#
# Path: detection/model.py
# class GraphDistillation(BaseModel):
# """Model to train with graph distillation.
#
# xfer_to is the modality to train.
# """
#
# def __init__(self, modalities, n_classes, n_frames, n_channels, input_sizes,
# hidden_size, n_layers, dropout, hidden_size_seq, n_layers_seq,
# dropout_seq, bg_w, lr, lr_decay_rate, to_idx, ckpt_path,
# w_losses, w_modalities, metric, xfer_to, gd_size, gd_reg):
# super(GraphDistillation, self).__init__(\
# modalities, n_classes, n_frames, n_channels, input_sizes,
# hidden_size, n_layers, dropout, hidden_size_seq, n_layers_seq, dropout_seq,
# bg_w, lr, lr_decay_rate, to_idx, ckpt_path)
#
# # Index of the modality to distill
# to_idx = self.modalities.index(xfer_to)
# from_idx = [x for x in range(len(self.modalities)) if x != to_idx]
# assert len(from_idx) >= 1
#
# # Prior
# w_modalities = [w_modalities[i] for i in from_idx
# ] # remove modality being transferred to
# gd_prior = utils.softmax(w_modalities, 0.25)
# # Distillation model
# self.distillation_kernel = \
# get_distillation_kernel(n_classes + 1, hidden_size, gd_size, to_idx, from_idx,
# gd_prior, gd_reg, w_losses, metric).cuda()
#
# # Add optimizer to self.optimizers
# gd_optimizer = optim.SGD(
# self.distillation_kernel.parameters(),
# lr=lr,
# momentum=0.9,
# weight_decay=5e-4)
# self.optimizers.append(gd_optimizer)
# self.lr_decay_rates.append(lr_decay_rate)
#
# self.xfer_to = xfer_to
# self.to_idx = to_idx
# self.from_idx = from_idx
#
# def _forward(self, inputs):
# logits, reprs = super(GraphDistillation, self)._forward(inputs)
# n_modalities, batch_size, length, _ = logits.size()
# logits = logits.view(n_modalities, batch_size * length, -1)
# reprs = reprs.view(n_modalities, batch_size * length, -1)
# # Get edge weights of the graph
# graph = self.distillation_kernel(logits, reprs)
# return logits, reprs, graph
#
# def _backward(self, results, label):
# logits, reprs, graph = results # graph: size (len(from_idx) x batch_size)
# label = label.view(-1)
# info_loss = []
#
# # Classification loss
# loss_cls = self.criterion_cls(logits[self.to_idx], label)
# # Graph distillation loss
# loss_reg, loss_logit, loss_repr = \
# self.distillation_kernel.distillation_loss(logits, reprs, graph)
#
# loss = loss_cls + loss_reg + loss_logit + loss_repr
# loss.backward()
# torch.nn.utils.clip_grad_norm(self.embeds[self.to_idx].parameters(), 5)
# for optimizer in self.optimizers:
# optimizer.step()
# optimizer.zero_grad()
#
# info_loss = [('loss_cls', loss_cls.data[0]), ('loss_reg', loss_reg.data[0]),
# ('loss_logit', loss_logit.data[0]), ('loss_repr',
# loss_repr.data[0])]
# return info_loss
which might include code, classes, or functions. Output only the next line. | model = SingleStream(opt.modalities, n_classes, opt.n_frames, n_channels, |
Here is a snippet: <|code_start|>
"""Get detection model."""
ALL_MODALITIES = ['rgb', 'oflow', 'depth', 'jjd', 'jjv', 'jld']
def get_model(opt):
if opt.dset == 'pku-mmd':
n_classes = 51
all_input_sizes = [-1, -1, -1, 276, 828, 836]
all_n_channels = [3, 2, 1, -1, -1, -1]
else:
raise NotImplementedError
n_channels = [all_n_channels[ALL_MODALITIES.index(m)] for m in opt.modalities]
input_sizes = [
all_input_sizes[ALL_MODALITIES.index(m)] for m in opt.modalities
]
if len(opt.modalities) == 1:
# Single stream
index = 0
model = SingleStream(opt.modalities, n_classes, opt.n_frames, n_channels,
input_sizes, opt.hidden_size, opt.n_layers,
opt.dropout, opt.hidden_size_seq, opt.n_layers_seq,
opt.dropout_seq, opt.bg_w, opt.lr, opt.lr_decay_rate,
index, opt.ckpt_path)
else:
index = opt.modalities.index(opt.xfer_to)
<|code_end|>
. Write the next line using the current file imports:
from .model import SingleStream
from .model import GraphDistillation
and context from other files:
# Path: detection/model.py
# class SingleStream(BaseModel):
# """Model to train a single modality.
# """
#
# def __init__(self, *args, **kwargs):
# super(SingleStream, self).__init__(*args, **kwargs)
# assert len(self.embeds) == 1
#
# def _backward(self, results, label):
# logits, _ = results
# logits = logits.view(-1, logits.size(-1))
#
# loss = self.criterion_cls(logits, label.view(-1))
# loss.backward()
# torch.nn.utils.clip_grad_norm(self.embeds[self.to_idx].parameters(), 5)
# for optimizer in self.optimizers:
# optimizer.step()
# optimizer.zero_grad()
#
# info_loss = [('loss', loss.data[0])]
# return info_loss
#
# Path: detection/model.py
# class GraphDistillation(BaseModel):
# """Model to train with graph distillation.
#
# xfer_to is the modality to train.
# """
#
# def __init__(self, modalities, n_classes, n_frames, n_channels, input_sizes,
# hidden_size, n_layers, dropout, hidden_size_seq, n_layers_seq,
# dropout_seq, bg_w, lr, lr_decay_rate, to_idx, ckpt_path,
# w_losses, w_modalities, metric, xfer_to, gd_size, gd_reg):
# super(GraphDistillation, self).__init__(\
# modalities, n_classes, n_frames, n_channels, input_sizes,
# hidden_size, n_layers, dropout, hidden_size_seq, n_layers_seq, dropout_seq,
# bg_w, lr, lr_decay_rate, to_idx, ckpt_path)
#
# # Index of the modality to distill
# to_idx = self.modalities.index(xfer_to)
# from_idx = [x for x in range(len(self.modalities)) if x != to_idx]
# assert len(from_idx) >= 1
#
# # Prior
# w_modalities = [w_modalities[i] for i in from_idx
# ] # remove modality being transferred to
# gd_prior = utils.softmax(w_modalities, 0.25)
# # Distillation model
# self.distillation_kernel = \
# get_distillation_kernel(n_classes + 1, hidden_size, gd_size, to_idx, from_idx,
# gd_prior, gd_reg, w_losses, metric).cuda()
#
# # Add optimizer to self.optimizers
# gd_optimizer = optim.SGD(
# self.distillation_kernel.parameters(),
# lr=lr,
# momentum=0.9,
# weight_decay=5e-4)
# self.optimizers.append(gd_optimizer)
# self.lr_decay_rates.append(lr_decay_rate)
#
# self.xfer_to = xfer_to
# self.to_idx = to_idx
# self.from_idx = from_idx
#
# def _forward(self, inputs):
# logits, reprs = super(GraphDistillation, self)._forward(inputs)
# n_modalities, batch_size, length, _ = logits.size()
# logits = logits.view(n_modalities, batch_size * length, -1)
# reprs = reprs.view(n_modalities, batch_size * length, -1)
# # Get edge weights of the graph
# graph = self.distillation_kernel(logits, reprs)
# return logits, reprs, graph
#
# def _backward(self, results, label):
# logits, reprs, graph = results # graph: size (len(from_idx) x batch_size)
# label = label.view(-1)
# info_loss = []
#
# # Classification loss
# loss_cls = self.criterion_cls(logits[self.to_idx], label)
# # Graph distillation loss
# loss_reg, loss_logit, loss_repr = \
# self.distillation_kernel.distillation_loss(logits, reprs, graph)
#
# loss = loss_cls + loss_reg + loss_logit + loss_repr
# loss.backward()
# torch.nn.utils.clip_grad_norm(self.embeds[self.to_idx].parameters(), 5)
# for optimizer in self.optimizers:
# optimizer.step()
# optimizer.zero_grad()
#
# info_loss = [('loss_cls', loss_cls.data[0]), ('loss_reg', loss_reg.data[0]),
# ('loss_logit', loss_logit.data[0]), ('loss_repr',
# loss_repr.data[0])]
# return info_loss
, which may include functions, classes, or code. Output only the next line. | model = GraphDistillation( |
Here is a snippet: <|code_start|># See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Get classification model."""
ALL_MODALITIES = ['rgb', 'oflow', 'depth', 'jjd', 'jjv', 'jld']
def get_model(opt):
"""Get model given the dataset and modalities."""
if opt.dset == 'ntu-rgbd':
n_classes = 60
all_input_sizes = [-1, -1, -1, 276, 828, 836]
all_n_channels = [3, 2, 1, -1, -1, -1]
else:
raise NotImplementedError
n_channels = [all_n_channels[ALL_MODALITIES.index(m)] for m in opt.modalities]
input_sizes = [
all_input_sizes[ALL_MODALITIES.index(m)] for m in opt.modalities
]
if len(opt.modalities) == 1:
# Single stream
model = SingleStream(opt.modalities, n_classes, opt.n_frames, n_channels,
input_sizes, opt.hidden_size, opt.n_layers,
opt.dropout, opt.lr, opt.lr_decay_rate, opt.ckpt_path)
else:
<|code_end|>
. Write the next line using the current file imports:
from .model import GraphDistillation
from .model import SingleStream
and context from other files:
# Path: classification/model.py
# class GraphDistillation(BaseModel):
# """Model to train with graph distillation.
#
# xfer_to is the modality to train.
# """
#
# def __init__(self, modalities, n_classes, n_frames, n_channels, input_sizes,
# hidden_size, n_layers, dropout, lr, lr_decay_rate, ckpt_path,
# w_losses, w_modalities, metric, xfer_to, gd_size, gd_reg):
# super(GraphDistillation, self).__init__( \
# modalities, n_classes, n_frames, n_channels, input_sizes,
# hidden_size, n_layers, dropout, lr, lr_decay_rate, ckpt_path)
#
# # Index of the modality to distill
# to_idx = self.modalities.index(xfer_to)
# from_idx = [x for x in range(len(self.modalities)) if x != to_idx]
# assert len(from_idx) >= 1
#
# # Prior
# w_modalities = [w_modalities[i] for i in from_idx
# ] # remove modality being transferred to
# gd_prior = utils.softmax(w_modalities, 0.25)
# # Distillation model
# self.distillation_kernel = get_distillation_kernel(
# n_classes, hidden_size, gd_size, to_idx, from_idx, gd_prior, gd_reg,
# w_losses, metric).cuda()
#
# params = list(self.embeds[to_idx].parameters()) + \
# list(self.distillation_kernel.parameters())
# self.optimizer = optim.SGD(params, lr=lr, momentum=0.9, weight_decay=5e-4)
#
# self.xfer_to = xfer_to
# self.to_idx = to_idx
# self.from_idx = from_idx
#
# def _forward(self, inputs):
# logits, reprs = super(GraphDistillation, self)._forward(inputs)
# # Get edge weights of the graph
# graph = self.distillation_kernel(logits, reprs)
# return logits, reprs, graph
#
# def _backward(self, results, label):
# logits, reprs, graph = results # graph: size = len(from_idx) x batch_size
# info_loss = []
#
# # Classification loss
# loss_cls = self.criterion_cls(logits[self.to_idx], label)
# # Graph distillation loss
# loss_reg, loss_logit, loss_repr = \
# self.distillation_kernel.distillation_loss(logits, reprs, graph)
#
# loss = loss_cls + loss_reg + loss_logit + loss_repr
# loss.backward()
# if self.xfer_to in GRU_MODALITIES:
# torch.nn.utils.clip_grad_norm(self.embeds[self.to_idx].parameters(), 5)
# self.optimizer.step()
# self.optimizer.zero_grad()
#
# info_loss = [('loss_cls', loss_cls.data[0]), ('loss_reg', loss_reg.data[0]),
# ('loss_logit', loss_logit.data[0]), ('loss_repr',
# loss_repr.data[0])]
# return info_loss
#
# Path: classification/model.py
# class SingleStream(BaseModel):
# """Model to train a single modality."""
#
# def __init__(self, *args, **kwargs):
# super(SingleStream, self).__init__(*args, **kwargs)
# assert len(self.embeds) == 1
# self.optimizer = optim.SGD(
# self.embeds[0].parameters(),
# lr=self.lr,
# momentum=0.9,
# weight_decay=5e-4)
# self.to_idx = 0
#
# def _backward(self, results, label):
# logits, _ = results
# logits = logits.view(*logits.size()[1:])
# loss = self.criterion_cls(logits, label)
# loss.backward()
# self.optimizer.step()
# self.optimizer.zero_grad()
#
# info_loss = [('loss', loss.data[0])]
# return info_loss
, which may include functions, classes, or code. Output only the next line. | model = GraphDistillation( |
Based on the snippet: <|code_start|>#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Get classification model."""
ALL_MODALITIES = ['rgb', 'oflow', 'depth', 'jjd', 'jjv', 'jld']
def get_model(opt):
"""Get model given the dataset and modalities."""
if opt.dset == 'ntu-rgbd':
n_classes = 60
all_input_sizes = [-1, -1, -1, 276, 828, 836]
all_n_channels = [3, 2, 1, -1, -1, -1]
else:
raise NotImplementedError
n_channels = [all_n_channels[ALL_MODALITIES.index(m)] for m in opt.modalities]
input_sizes = [
all_input_sizes[ALL_MODALITIES.index(m)] for m in opt.modalities
]
if len(opt.modalities) == 1:
# Single stream
<|code_end|>
, predict the immediate next line with the help of imports:
from .model import GraphDistillation
from .model import SingleStream
and context (classes, functions, sometimes code) from other files:
# Path: classification/model.py
# class GraphDistillation(BaseModel):
# """Model to train with graph distillation.
#
# xfer_to is the modality to train.
# """
#
# def __init__(self, modalities, n_classes, n_frames, n_channels, input_sizes,
# hidden_size, n_layers, dropout, lr, lr_decay_rate, ckpt_path,
# w_losses, w_modalities, metric, xfer_to, gd_size, gd_reg):
# super(GraphDistillation, self).__init__( \
# modalities, n_classes, n_frames, n_channels, input_sizes,
# hidden_size, n_layers, dropout, lr, lr_decay_rate, ckpt_path)
#
# # Index of the modality to distill
# to_idx = self.modalities.index(xfer_to)
# from_idx = [x for x in range(len(self.modalities)) if x != to_idx]
# assert len(from_idx) >= 1
#
# # Prior
# w_modalities = [w_modalities[i] for i in from_idx
# ] # remove modality being transferred to
# gd_prior = utils.softmax(w_modalities, 0.25)
# # Distillation model
# self.distillation_kernel = get_distillation_kernel(
# n_classes, hidden_size, gd_size, to_idx, from_idx, gd_prior, gd_reg,
# w_losses, metric).cuda()
#
# params = list(self.embeds[to_idx].parameters()) + \
# list(self.distillation_kernel.parameters())
# self.optimizer = optim.SGD(params, lr=lr, momentum=0.9, weight_decay=5e-4)
#
# self.xfer_to = xfer_to
# self.to_idx = to_idx
# self.from_idx = from_idx
#
# def _forward(self, inputs):
# logits, reprs = super(GraphDistillation, self)._forward(inputs)
# # Get edge weights of the graph
# graph = self.distillation_kernel(logits, reprs)
# return logits, reprs, graph
#
# def _backward(self, results, label):
# logits, reprs, graph = results # graph: size = len(from_idx) x batch_size
# info_loss = []
#
# # Classification loss
# loss_cls = self.criterion_cls(logits[self.to_idx], label)
# # Graph distillation loss
# loss_reg, loss_logit, loss_repr = \
# self.distillation_kernel.distillation_loss(logits, reprs, graph)
#
# loss = loss_cls + loss_reg + loss_logit + loss_repr
# loss.backward()
# if self.xfer_to in GRU_MODALITIES:
# torch.nn.utils.clip_grad_norm(self.embeds[self.to_idx].parameters(), 5)
# self.optimizer.step()
# self.optimizer.zero_grad()
#
# info_loss = [('loss_cls', loss_cls.data[0]), ('loss_reg', loss_reg.data[0]),
# ('loss_logit', loss_logit.data[0]), ('loss_repr',
# loss_repr.data[0])]
# return info_loss
#
# Path: classification/model.py
# class SingleStream(BaseModel):
# """Model to train a single modality."""
#
# def __init__(self, *args, **kwargs):
# super(SingleStream, self).__init__(*args, **kwargs)
# assert len(self.embeds) == 1
# self.optimizer = optim.SGD(
# self.embeds[0].parameters(),
# lr=self.lr,
# momentum=0.9,
# weight_decay=5e-4)
# self.to_idx = 0
#
# def _backward(self, results, label):
# logits, _ = results
# logits = logits.view(*logits.size()[1:])
# loss = self.criterion_cls(logits, label)
# loss.backward()
# self.optimizer.step()
# self.optimizer.zero_grad()
#
# info_loss = [('loss', loss.data[0])]
# return info_loss
. Output only the next line. | model = SingleStream(opt.modalities, n_classes, opt.n_frames, n_channels, |
Based on the snippet: <|code_start|> smaller edge of the image will be matched to this number.
i.e, if height > width, then image will be rescaled to
(size * height / width, size)
interpolation (int, optional): Desired interpolation. Default is
``bilinear``
"""
def __init__(self, size, transform_pixel=False, interpolation='bilinear'):
""":param size: output size :param transform_pixel: transform pixel values for flow :param interpolation: 'bilinear', 'nearest'
"""
assert isinstance(size, int) or (isinstance(size, collections.Iterable) and
len(size) == 2)
self.size = size
self.transform_pixel = transform_pixel
self.interpolation = interpolation
def __call__(self, video):
"""Args: video (numpy.ndarray): Video to be scaled.
Returns:
numpy.ndarray: Rescaled video.
"""
w, h = video.shape[-2], video.shape[-3]
if isinstance(self.size, int):
if (w <= h and w == self.size) or (h <= w and h == self.size):
return video
if w < h:
ow = self.size
oh = int(self.size * h / w)
<|code_end|>
, predict the immediate next line with the help of imports:
import collections
import math
import numbers
import random
import numpy as np
import torch
import torch.nn.functional as F
import utils
from utils import imgproc
and context (classes, functions, sometimes code) from other files:
# Path: utils/imgproc.py
# def imread_rgb(dset, path):
# def imread_oflow(dset, *paths):
# def imread_depth(dset, path):
# def inpaint(img, threshold=1):
# def resize(video, size, interpolation):
# def proc_oflow(images):
. Output only the next line. | video = imgproc.resize(video, (oh, ow), self.interpolation) |
Here is a snippet: <|code_start|> def find_files_for_tar(self, context, silent_build):
"""
Return [(filename, arcname), ...] for all the files.
"""
if not context.enabled:
return
files = self.find_files(context, silent_build)
for path in files:
relname = os.path.relpath(path, context.parent_dir)
arcname = "./{0}".format(relname.encode("utf-8").decode("ascii", "ignore"))
if os.path.exists(path):
yield path, arcname
def find_files(self, context, silent_build):
"""
Find the set of files from our parent_dir that we care about
"""
first_layer = ["'{0}'".format(thing) for thing in os.listdir(context.parent_dir)]
output, status = command_output(
"find {0} -type l -or -type f {1} -follow -print".format(
" ".join(first_layer), context.find_options
),
cwd=context.parent_dir,
)
if status != 0:
if context.ignore_find_errors:
log.warning("The find command failed to run, will continue anyway")
else:
<|code_end|>
. Write the next line using the current file imports:
from harpoon.errors import HarpoonError, BadOption
from harpoon.helpers import a_temp_file
from contextlib import contextmanager
from io import BytesIO
from harpoon.ship.runner import Runner, ContainerRunner
import subprocess
import tempfile
import tarfile
import fnmatch
import logging
import shutil
import docker
import shlex
import os
import re
and context from other files:
# Path: harpoon/errors.py
# class HarpoonError(DelfickError):
# pass
#
# class BadOption(HarpoonError):
# desc = "Bad Option"
#
# Path: harpoon/helpers.py
# @contextmanager
# def a_temp_file():
# """Yield the name of a temporary file and ensure it's removed after use"""
# filename = None
# try:
# tmpfile = tempfile.NamedTemporaryFile(delete=False)
# filename = tmpfile.name
# yield tmpfile
# finally:
# if filename and os.path.exists(filename):
# os.remove(filename)
, which may include functions, classes, or code. Output only the next line. | raise HarpoonError( |
Continue the code snippet: <|code_start|> t.add(fle.name, arcname=arcname)
yield ContextWrapper(t, tmpfile)
@contextmanager
def the_context(self, content, silent_build=False):
"""Return either a file with the content written to it, or a whole new context tar"""
if isinstance(content, str):
with a_temp_file() as fle:
fle.write(content.encode("utf-8"))
fle.seek(0)
yield fle
elif "context" in content:
with ContextBuilder().make_context(
content["context"], silent_build=silent_build
) as wrapper:
wrapper.close()
yield wrapper.tmpfile
elif "image" in content:
with a_temp_file() as fle:
content["conf"].command = "yes"
with ContainerRunner(
Runner(), content["conf"], content["images"], detach=True, delete_anyway=True
):
try:
strm, stat = content["docker_api"].get_archive(
content["conf"].container_id, content["path"]
)
except docker.errors.NotFound:
<|code_end|>
. Use current file imports:
from harpoon.errors import HarpoonError, BadOption
from harpoon.helpers import a_temp_file
from contextlib import contextmanager
from io import BytesIO
from harpoon.ship.runner import Runner, ContainerRunner
import subprocess
import tempfile
import tarfile
import fnmatch
import logging
import shutil
import docker
import shlex
import os
import re
and context (classes, functions, or code) from other files:
# Path: harpoon/errors.py
# class HarpoonError(DelfickError):
# pass
#
# class BadOption(HarpoonError):
# desc = "Bad Option"
#
# Path: harpoon/helpers.py
# @contextmanager
# def a_temp_file():
# """Yield the name of a temporary file and ensure it's removed after use"""
# filename = None
# try:
# tmpfile = tempfile.NamedTemporaryFile(delete=False)
# filename = tmpfile.name
# yield tmpfile
# finally:
# if filename and os.path.exists(filename):
# os.remove(filename)
. Output only the next line. | raise BadOption( |
Given the code snippet: <|code_start|> code = 0
except subprocess.CalledProcessError as error:
output = error.output
code = error.returncode
lines = [line for line in output.decode().split("\n") if line.strip()]
return lines, code
class ContextWrapper(object):
"""Wraps a tarfile context, so we can continue changing it afterwards"""
def __init__(self, t, tmpfile):
self.t = t
self.tmpfile = tmpfile
def close(self):
self.t.close()
self.tmpfile.flush()
self.tmpfile.seek(0)
@property
def name(self):
return self.tmpfile.name
@contextmanager
def clone_with_new_dockerfile(self, conf, docker_file):
"""Clone this tarfile and add in another filename before closing the new tar and returning"""
log.info("Copying context to add a different dockerfile")
self.close()
<|code_end|>
, generate the next line using the imports in this file:
from harpoon.errors import HarpoonError, BadOption
from harpoon.helpers import a_temp_file
from contextlib import contextmanager
from io import BytesIO
from harpoon.ship.runner import Runner, ContainerRunner
import subprocess
import tempfile
import tarfile
import fnmatch
import logging
import shutil
import docker
import shlex
import os
import re
and context (functions, classes, or occasionally code) from other files:
# Path: harpoon/errors.py
# class HarpoonError(DelfickError):
# pass
#
# class BadOption(HarpoonError):
# desc = "Bad Option"
#
# Path: harpoon/helpers.py
# @contextmanager
# def a_temp_file():
# """Yield the name of a temporary file and ensure it's removed after use"""
# filename = None
# try:
# tmpfile = tempfile.NamedTemporaryFile(delete=False)
# filename = tmpfile.name
# yield tmpfile
# finally:
# if filename and os.path.exists(filename):
# os.remove(filename)
. Output only the next line. | with a_temp_file() as tmpfile: |
Given the following code snippet before the placeholder: <|code_start|>"""
It's recommended you read this file from the bottom up.
"""
class CommandContentAddString(dictobj):
fields = ["content"]
def resolve(self):
return self.content
for_json = resolve
class CommandContent(dictobj):
def setup(self, *args, **kwargs):
if self.__class__ is CommandContent:
<|code_end|>
, predict the next line using imports from the current file:
from harpoon.formatter import MergedOptionStringFormatter
from harpoon.errors import BadOption, ProgrammerError
from harpoon.option_spec.command_objs import Command
from delfick_project.norms import sb, BadSpecValue, dictobj, va, Validator
from harpoon.option_spec.harpoon_specs import HarpoonSpec
from harpoon.option_spec.image_objs import Image
from harpoon.option_spec.harpoon_specs import HarpoonSpec
import hashlib
import json
and context including class names, function names, and sometimes code from other files:
# Path: harpoon/formatter.py
# class MergedOptionStringFormatter(MergedOptionStringFormatter):
# """
# Resolve format options into a MergedOptions dictionary
#
# Usage is like:
#
# configuration = MergedOptions.using({"numbers": "1 two {three}", "three": 3})
# formatter = MergedOptionStringFormatter(configuration, "{numbers}")
# val = formatter.format()
# # val == "1 two 3"
#
# Where that second argument can be more than one format:
#
# configuration = MergedOptions.using({"one": 1, "two": 2, "three": 3})
# formatter = MergedOptionStringFormatter(configuration, "{one} {two} {three}")
# val = formatter.format()
# # val == "1 2 3"
#
# The formatter also has a special feature where it returns the object it finds
# if the string to be formatted is that one object::
#
# class dictsubclass(dict): pass
# configuration = MergedOptions.using({"some_object": dictsubclass({1:2, 3:4})}, dont_prefix=[dictsubclass])
# formatter = MergedOptionStringFormatter(configuration, "{some_object}")
# val = formatter.format()
# # val == {1:2, 3:4}
#
# For this to work, the object must be a subclass of dict and in the dont_prefix option of the configuration.
# """
#
# passthrough_format_specs = ["env", "from_env"]
#
# def get_string(self, key):
# """Get a string from all_options"""
# # Make sure we special case the "content" option
# if type(key) is str and key.startswith("content."):
# return self.no_format(self.all_options["content"][key[8:]])
#
# if type(key) is list and len(key) == 2 and key[0] == "content":
# return self.no_format(self.all_options[key])
#
# return super().get_string(key)
#
# def special_format_field(self, obj, format_spec):
# """Know about any special formats"""
# if format_spec == "env":
# return "${{{0}}}".format(obj)
#
# elif format_spec == "from_env":
# if obj not in os.environ:
# raise NoSuchEnvironmentVariable(wanted=obj)
# return os.environ[obj]
#
# Path: harpoon/errors.py
# class HarpoonError(DelfickError):
# class BadConfiguration(HarpoonError):
# class BadTask(HarpoonError):
# class BadOption(HarpoonError):
# class NoSuchKey(HarpoonError):
# class NoSuchImage(HarpoonError):
# class BadCommand(HarpoonError):
# class BadImage(HarpoonError):
# class CouldntKill(HarpoonError):
# class FailedImage(HarpoonError):
# class BadYaml(HarpoonError):
# class BadResult(HarpoonError):
# class UserQuit(HarpoonError):
# class BadDockerConnection(HarpoonError):
# class ImageDepCycle(HarpoonError):
# class BadDirectory(BadSpecValue):
# class BadFilename(BadSpecValue):
# class DeprecatedFeature(BadSpecValue):
# class BadEnvironment(HarpoonError):
# class BadAmazon(HarpoonError):
# class AlreadyBoundPorts(HarpoonError):
# class NoSuchEnvironmentVariable(HarpoonError):
# class FoundNoBoto(HarpoonError):
#
# Path: harpoon/option_spec/command_objs.py
# class Command(dictobj):
# """Holds a single command"""
#
# fields = [
# "instruction",
# ("extra_context", lambda: sb.NotSpecified),
# ("extra", lambda: sb.NotSpecified),
# ]
#
# def __repr__(self):
# return "<Command({0})>".format(self.instruction)
#
# @property
# def action(self):
# return self._action
#
# @property
# def dependent_image(self):
# if self.action == "FROM":
# return self.command
# elif self.action == "ADD":
# if self.extra_context is not sb.NotSpecified:
# options, _ = self.extra_context
# if hasattr(options, "image"):
# return options.image
# elif self.action == "COPY":
# if self.extra_context is not sb.NotSpecified:
# options, _ = self.extra_context
# if getattr(options, "image", sb.NotSpecified) is not sb.NotSpecified:
# return options.image
#
# @property
# def instruction(self):
# return self._instruction
#
# @instruction.setter
# def instruction(self, val):
# """Set the action and command from an instruction"""
# self._instruction = val
# if isinstance(val, tuple):
# if len(val) == 2:
# self._action, self.command = val
# else:
# self._action, self.command, self.extra = val
# else:
# split = val.split(" ", 1)
# if split[0] == "FROM":
# split = val.split(" ", 2)
#
# if len(split) == 3:
# self._action, self.command, self.extra = split
# else:
# self._action, self.command = split
#
# @property
# def as_string(self):
# """Return the command as a single string for the docker file"""
# if type(self.instruction) is str:
# return self.instruction
#
# if self.action == "FROM" and not isinstance(self.command, str):
# extra = "" if self.extra is sb.NotSpecified else " {0}".format(self.extra)
# return "{0} {1}{2}".format(self.action, self.command.from_name, extra)
# else:
# return "{0} {1}".format(self.action, self.command)
. Output only the next line. | raise ProgrammerError("This should never be instantiated without subclassing it") |
Next line prediction: <|code_start|> process = subprocess.Popen(["docker", "login", "--help"], stdout=subprocess.PIPE)
out, _ = process.communicate()
if isinstance(out, bytes):
out = out.decode()
if "-e, --email" in out:
cmd = "docker login -u {0} -p {1} -e emailnotneeded@goawaydocker.com {2}".format(
username, password, registry
)
else:
cmd = "docker login -u {0} -p {1} {2}".format(username, password, registry)
os.system(cmd)
docker_api.reload_config()
else:
docker_api.login(username, password, registry=registry, reauth=True)
class PlainAuthentication(dictobj):
fields = ["username", "password"]
@property
def creds(self):
return self.username, self.password
class KmsAuthentication(dictobj):
fields = ["username", "password", "role", "region"]
@property
def creds(self):
<|code_end|>
. Use current file imports:
(from harpoon.amazon import assume_role, decrypt_kms, get_s3_slip
from delfick_project.norms import sb, dictobj
from urllib.parse import urlparse
import subprocess
import logging
import time
import os)
and context including class names, function names, or small code snippets from other files:
# Path: harpoon/amazon.py
# def assume_role(arn):
# if boto3 is None:
# raise FoundNoBoto()
#
# log.info("Assuming role as %s", arn)
#
# session = boto3.session.Session()
# session_name = "harpoon-{0}-".format(VERSION)
#
# # Clear out empty values
# for name in [
# "AWS_ACCESS_KEY_ID",
# "AWS_SECRET_ACCESS_KEY",
# "AWS_SECURITY_TOKEN",
# "AWS_SESSION_TOKEN",
# ]:
# if name in os.environ and not os.environ[name]:
# del os.environ[name]
#
# sts = session.client("sts")
# with catch_no_credentials("Couldn't assume role", arn=arn):
# with catch_boto_400("Couldn't assume role", arn=arn):
# creds = sts.assume_role(RoleArn=arn, RoleSessionName=session_name)["Credentials"]
#
# return boto3.session.Session(
# aws_access_key_id=creds["AccessKeyId"],
# aws_secret_access_key=creds["SecretAccessKey"],
# aws_session_token=creds["SessionToken"],
# )
#
# def decrypt_kms(session, ciphertext, region):
# return session.client("kms", region).decrypt(CiphertextBlob=base64.b64decode(ciphertext))[
# "Plaintext"
# ]
#
# def get_s3_slip(session, location):
# parsed = urlparse(location)
# log.info("Getting slip from s3://{0}{1}".format(parsed.netloc, parsed.path))
# return (
# session.resource("s3").Object(parsed.netloc, parsed.path[1:]).get()["Body"].read().strip()
# )
. Output only the next line. | session = assume_role(self.role) |
Given snippet: <|code_start|> out, _ = process.communicate()
if isinstance(out, bytes):
out = out.decode()
if "-e, --email" in out:
cmd = "docker login -u {0} -p {1} -e emailnotneeded@goawaydocker.com {2}".format(
username, password, registry
)
else:
cmd = "docker login -u {0} -p {1} {2}".format(username, password, registry)
os.system(cmd)
docker_api.reload_config()
else:
docker_api.login(username, password, registry=registry, reauth=True)
class PlainAuthentication(dictobj):
fields = ["username", "password"]
@property
def creds(self):
return self.username, self.password
class KmsAuthentication(dictobj):
fields = ["username", "password", "role", "region"]
@property
def creds(self):
session = assume_role(self.role)
<|code_end|>
, continue by predicting the next line. Consider current file imports:
from harpoon.amazon import assume_role, decrypt_kms, get_s3_slip
from delfick_project.norms import sb, dictobj
from urllib.parse import urlparse
import subprocess
import logging
import time
import os
and context:
# Path: harpoon/amazon.py
# def assume_role(arn):
# if boto3 is None:
# raise FoundNoBoto()
#
# log.info("Assuming role as %s", arn)
#
# session = boto3.session.Session()
# session_name = "harpoon-{0}-".format(VERSION)
#
# # Clear out empty values
# for name in [
# "AWS_ACCESS_KEY_ID",
# "AWS_SECRET_ACCESS_KEY",
# "AWS_SECURITY_TOKEN",
# "AWS_SESSION_TOKEN",
# ]:
# if name in os.environ and not os.environ[name]:
# del os.environ[name]
#
# sts = session.client("sts")
# with catch_no_credentials("Couldn't assume role", arn=arn):
# with catch_boto_400("Couldn't assume role", arn=arn):
# creds = sts.assume_role(RoleArn=arn, RoleSessionName=session_name)["Credentials"]
#
# return boto3.session.Session(
# aws_access_key_id=creds["AccessKeyId"],
# aws_secret_access_key=creds["SecretAccessKey"],
# aws_session_token=creds["SessionToken"],
# )
#
# def decrypt_kms(session, ciphertext, region):
# return session.client("kms", region).decrypt(CiphertextBlob=base64.b64decode(ciphertext))[
# "Plaintext"
# ]
#
# def get_s3_slip(session, location):
# parsed = urlparse(location)
# log.info("Getting slip from s3://{0}{1}".format(parsed.netloc, parsed.path))
# return (
# session.resource("s3").Object(parsed.netloc, parsed.path[1:]).get()["Body"].read().strip()
# )
which might include code, classes, or functions. Output only the next line. | password = decrypt_kms(session, self.password, self.region) |
Given the following code snippet before the placeholder: <|code_start|> def creds(self):
return self.username, self.password
class KmsAuthentication(dictobj):
fields = ["username", "password", "role", "region"]
@property
def creds(self):
session = assume_role(self.role)
password = decrypt_kms(session, self.password, self.region)
return self.username, password
class S3SlipAuthentication(dictobj):
fields = ["location", "role"]
@property
def creds(self):
if not hasattr(self, "_store"):
self._store = None
if self._store is not None:
tm, stored = self._store
if time.time() - tm > 3000:
self.store = None
else:
return stored
session = assume_role(self.role)
<|code_end|>
, predict the next line using imports from the current file:
from harpoon.amazon import assume_role, decrypt_kms, get_s3_slip
from delfick_project.norms import sb, dictobj
from urllib.parse import urlparse
import subprocess
import logging
import time
import os
and context including class names, function names, and sometimes code from other files:
# Path: harpoon/amazon.py
# def assume_role(arn):
# if boto3 is None:
# raise FoundNoBoto()
#
# log.info("Assuming role as %s", arn)
#
# session = boto3.session.Session()
# session_name = "harpoon-{0}-".format(VERSION)
#
# # Clear out empty values
# for name in [
# "AWS_ACCESS_KEY_ID",
# "AWS_SECRET_ACCESS_KEY",
# "AWS_SECURITY_TOKEN",
# "AWS_SESSION_TOKEN",
# ]:
# if name in os.environ and not os.environ[name]:
# del os.environ[name]
#
# sts = session.client("sts")
# with catch_no_credentials("Couldn't assume role", arn=arn):
# with catch_boto_400("Couldn't assume role", arn=arn):
# creds = sts.assume_role(RoleArn=arn, RoleSessionName=session_name)["Credentials"]
#
# return boto3.session.Session(
# aws_access_key_id=creds["AccessKeyId"],
# aws_secret_access_key=creds["SecretAccessKey"],
# aws_session_token=creds["SessionToken"],
# )
#
# def decrypt_kms(session, ciphertext, region):
# return session.client("kms", region).decrypt(CiphertextBlob=base64.b64decode(ciphertext))[
# "Plaintext"
# ]
#
# def get_s3_slip(session, location):
# parsed = urlparse(location)
# log.info("Getting slip from s3://{0}{1}".format(parsed.netloc, parsed.path))
# return (
# session.resource("s3").Object(parsed.netloc, parsed.path[1:]).get()["Body"].read().strip()
# )
. Output only the next line. | slip = get_s3_slip(session, self.location) |
Given snippet: <|code_start|>"""
Responsible for finding tasks in the configuration and executing them
"""
class TaskFinder(object):
def __init__(self, collector):
self.tasks = {}
self.collector = collector
def image_finder(self, task):
return getattr(
self.tasks[task], "image", self.collector.configuration["harpoon"].chosen_image
)
def task_runner(self, task, **kwargs):
if task not in self.tasks:
raise BadTask("Unknown task", task=task, available=sorted(list(self.tasks.keys())))
return self.tasks[task].run(
<|code_end|>
, continue by predicting the next line. Consider current file imports:
from harpoon.actions import available_actions, default_actions
from harpoon.option_spec.task_objs import Task
from harpoon.errors import BadTask
and context:
# Path: harpoon/actions.py
# class an_action(object):
# def __init__(self, needs_image=False):
# def __call__(self, func):
# def push(collector, image, **kwargs):
# def push_all(collector, **kwargs):
# def pull_arbitrary(collector, image, **kwargs):
# def pull(collector, image, **kwargs):
# def pull_dependencies(collector, image, **kwargs):
# def pull_parent(collector, image, **kwargs):
# def pull_all(collector, image, **kwargs):
# def pull_all_external(collector, **kwargs):
# def pull_parents(collector, **kwargs):
# def make(collector, image, **kwargs):
# def make_all(collector, **kwargs):
# def make_pushable(collector, **kwargs):
# def run(collector, image, **kwargs):
# def list_tasks(collector, tasks, **kwargs):
# def delete_untagged(collector, **kwargs):
# def show(collector, **kwargs):
# def show_pushable(collector, **kwargs):
# def print_dockerfile(collector, image, **kwargs):
# def get_docker_context(collector, image, **kwargs):
# def print_all_dockerfiles(collector, **kwargs):
# def read_login(collector, image, **kwargs):
# def write_login(collector, image, **kwargs):
# def untag(collector, image, artifact, **kwargs):
# def tag(collector, image, artifact, **kwargs):
# def retrieve(collector, image, artifact, **kwargs):
# def container_manager(collector, image, **kwargs):
# def shutdown(signum, frame):
#
# Path: harpoon/option_spec/task_objs.py
# class Task(dictobj):
# """
# Used to add extra options associated with the task and to start the action
# from ``harpoon.actions``.
#
# Also responsible for complaining if the specified action doesn't exist.
#
# Will also ask the image to complain about any missing environment variables.
# """
#
# fields = {
# ("action", "run"): "The action to run with this image",
# ("options", None): "The options to merge with the image options",
# ("overrides", None): "The options to merge with the root configuration",
# ("description", ""): "The description of the task",
# ("label", "Project"): "The namespace when listing tasks",
# }
#
# def setup(self, *args, **kwargs):
# super(Task, self).setup(*args, **kwargs)
# self.set_description()
#
# def set_description(self, available_actions=None):
# if not self.description:
# if not available_actions:
# from harpoon.actions import available_actions
# if self.action in available_actions:
# self.description = available_actions[self.action].__doc__
#
# def run(self, collector, image, available_actions, tasks, **extras):
# """Run this task"""
# task_func = available_actions[self.action]
# configuration = collector.configuration.wrapped()
#
# if self.options:
# if image:
# configuration.update({"images": {image: self.options}})
# else:
# configuration.update(self.options)
#
# # args like --port and the like should override what's in the options
# # But themselves be overridden by the overrides
# configuration.update(configuration["args_dict"].as_dict(), source="<args_dict>")
#
# if self.overrides:
# overrides = {}
# for key, val in self.overrides.items():
# overrides[key] = val
# if isinstance(val, MergedOptions):
# overrides[key] = dict(val.items())
# configuration.update(overrides)
#
# if task_func.needs_image:
# self.find_image(image, configuration)
# image = configuration["images"][image]
# image.find_missing_env()
#
# from harpoon.collector import Collector
#
# new_collector = Collector()
# new_collector.configuration = configuration
# new_collector.configuration_file = collector.configuration_file
# artifact = configuration["harpoon"].artifact
# return task_func(new_collector, image=image, tasks=tasks, artifact=artifact, **extras)
#
# def find_image(self, image, configuration):
# """Complain if we don't have an image"""
# images = configuration["images"]
# available = list(images.keys())
#
# if not image:
# info = {}
# if available:
# info["available"] = available
# raise BadOption("Please use --image to specify an image", **info)
#
# if image not in images:
# raise BadOption("No such image", wanted=image, available=available)
#
# Path: harpoon/errors.py
# class BadTask(HarpoonError):
# desc = "Bad task"
which might include code, classes, or functions. Output only the next line. | self.collector, self.image_finder(task), available_actions, self.tasks, **kwargs |
Here is a snippet: <|code_start|>"""
Responsible for finding tasks in the configuration and executing them
"""
class TaskFinder(object):
def __init__(self, collector):
self.tasks = {}
self.collector = collector
def image_finder(self, task):
return getattr(
self.tasks[task], "image", self.collector.configuration["harpoon"].chosen_image
)
def task_runner(self, task, **kwargs):
if task not in self.tasks:
raise BadTask("Unknown task", task=task, available=sorted(list(self.tasks.keys())))
return self.tasks[task].run(
self.collector, self.image_finder(task), available_actions, self.tasks, **kwargs
)
def default_tasks(self):
"""Return default tasks"""
<|code_end|>
. Write the next line using the current file imports:
from harpoon.actions import available_actions, default_actions
from harpoon.option_spec.task_objs import Task
from harpoon.errors import BadTask
and context from other files:
# Path: harpoon/actions.py
# class an_action(object):
# def __init__(self, needs_image=False):
# def __call__(self, func):
# def push(collector, image, **kwargs):
# def push_all(collector, **kwargs):
# def pull_arbitrary(collector, image, **kwargs):
# def pull(collector, image, **kwargs):
# def pull_dependencies(collector, image, **kwargs):
# def pull_parent(collector, image, **kwargs):
# def pull_all(collector, image, **kwargs):
# def pull_all_external(collector, **kwargs):
# def pull_parents(collector, **kwargs):
# def make(collector, image, **kwargs):
# def make_all(collector, **kwargs):
# def make_pushable(collector, **kwargs):
# def run(collector, image, **kwargs):
# def list_tasks(collector, tasks, **kwargs):
# def delete_untagged(collector, **kwargs):
# def show(collector, **kwargs):
# def show_pushable(collector, **kwargs):
# def print_dockerfile(collector, image, **kwargs):
# def get_docker_context(collector, image, **kwargs):
# def print_all_dockerfiles(collector, **kwargs):
# def read_login(collector, image, **kwargs):
# def write_login(collector, image, **kwargs):
# def untag(collector, image, artifact, **kwargs):
# def tag(collector, image, artifact, **kwargs):
# def retrieve(collector, image, artifact, **kwargs):
# def container_manager(collector, image, **kwargs):
# def shutdown(signum, frame):
#
# Path: harpoon/option_spec/task_objs.py
# class Task(dictobj):
# """
# Used to add extra options associated with the task and to start the action
# from ``harpoon.actions``.
#
# Also responsible for complaining if the specified action doesn't exist.
#
# Will also ask the image to complain about any missing environment variables.
# """
#
# fields = {
# ("action", "run"): "The action to run with this image",
# ("options", None): "The options to merge with the image options",
# ("overrides", None): "The options to merge with the root configuration",
# ("description", ""): "The description of the task",
# ("label", "Project"): "The namespace when listing tasks",
# }
#
# def setup(self, *args, **kwargs):
# super(Task, self).setup(*args, **kwargs)
# self.set_description()
#
# def set_description(self, available_actions=None):
# if not self.description:
# if not available_actions:
# from harpoon.actions import available_actions
# if self.action in available_actions:
# self.description = available_actions[self.action].__doc__
#
# def run(self, collector, image, available_actions, tasks, **extras):
# """Run this task"""
# task_func = available_actions[self.action]
# configuration = collector.configuration.wrapped()
#
# if self.options:
# if image:
# configuration.update({"images": {image: self.options}})
# else:
# configuration.update(self.options)
#
# # args like --port and the like should override what's in the options
# # But themselves be overridden by the overrides
# configuration.update(configuration["args_dict"].as_dict(), source="<args_dict>")
#
# if self.overrides:
# overrides = {}
# for key, val in self.overrides.items():
# overrides[key] = val
# if isinstance(val, MergedOptions):
# overrides[key] = dict(val.items())
# configuration.update(overrides)
#
# if task_func.needs_image:
# self.find_image(image, configuration)
# image = configuration["images"][image]
# image.find_missing_env()
#
# from harpoon.collector import Collector
#
# new_collector = Collector()
# new_collector.configuration = configuration
# new_collector.configuration_file = collector.configuration_file
# artifact = configuration["harpoon"].artifact
# return task_func(new_collector, image=image, tasks=tasks, artifact=artifact, **extras)
#
# def find_image(self, image, configuration):
# """Complain if we don't have an image"""
# images = configuration["images"]
# available = list(images.keys())
#
# if not image:
# info = {}
# if available:
# info["available"] = available
# raise BadOption("Please use --image to specify an image", **info)
#
# if image not in images:
# raise BadOption("No such image", wanted=image, available=available)
#
# Path: harpoon/errors.py
# class BadTask(HarpoonError):
# desc = "Bad task"
, which may include functions, classes, or code. Output only the next line. | return dict((name, Task(action=name, label="Harpoon")) for name in default_actions) |
Predict the next line after this snippet: <|code_start|>"""
Responsible for finding tasks in the configuration and executing them
"""
class TaskFinder(object):
def __init__(self, collector):
self.tasks = {}
self.collector = collector
def image_finder(self, task):
return getattr(
self.tasks[task], "image", self.collector.configuration["harpoon"].chosen_image
)
def task_runner(self, task, **kwargs):
if task not in self.tasks:
raise BadTask("Unknown task", task=task, available=sorted(list(self.tasks.keys())))
return self.tasks[task].run(
self.collector, self.image_finder(task), available_actions, self.tasks, **kwargs
)
def default_tasks(self):
"""Return default tasks"""
<|code_end|>
using the current file's imports:
from harpoon.actions import available_actions, default_actions
from harpoon.option_spec.task_objs import Task
from harpoon.errors import BadTask
and any relevant context from other files:
# Path: harpoon/actions.py
# class an_action(object):
# def __init__(self, needs_image=False):
# def __call__(self, func):
# def push(collector, image, **kwargs):
# def push_all(collector, **kwargs):
# def pull_arbitrary(collector, image, **kwargs):
# def pull(collector, image, **kwargs):
# def pull_dependencies(collector, image, **kwargs):
# def pull_parent(collector, image, **kwargs):
# def pull_all(collector, image, **kwargs):
# def pull_all_external(collector, **kwargs):
# def pull_parents(collector, **kwargs):
# def make(collector, image, **kwargs):
# def make_all(collector, **kwargs):
# def make_pushable(collector, **kwargs):
# def run(collector, image, **kwargs):
# def list_tasks(collector, tasks, **kwargs):
# def delete_untagged(collector, **kwargs):
# def show(collector, **kwargs):
# def show_pushable(collector, **kwargs):
# def print_dockerfile(collector, image, **kwargs):
# def get_docker_context(collector, image, **kwargs):
# def print_all_dockerfiles(collector, **kwargs):
# def read_login(collector, image, **kwargs):
# def write_login(collector, image, **kwargs):
# def untag(collector, image, artifact, **kwargs):
# def tag(collector, image, artifact, **kwargs):
# def retrieve(collector, image, artifact, **kwargs):
# def container_manager(collector, image, **kwargs):
# def shutdown(signum, frame):
#
# Path: harpoon/option_spec/task_objs.py
# class Task(dictobj):
# """
# Used to add extra options associated with the task and to start the action
# from ``harpoon.actions``.
#
# Also responsible for complaining if the specified action doesn't exist.
#
# Will also ask the image to complain about any missing environment variables.
# """
#
# fields = {
# ("action", "run"): "The action to run with this image",
# ("options", None): "The options to merge with the image options",
# ("overrides", None): "The options to merge with the root configuration",
# ("description", ""): "The description of the task",
# ("label", "Project"): "The namespace when listing tasks",
# }
#
# def setup(self, *args, **kwargs):
# super(Task, self).setup(*args, **kwargs)
# self.set_description()
#
# def set_description(self, available_actions=None):
# if not self.description:
# if not available_actions:
# from harpoon.actions import available_actions
# if self.action in available_actions:
# self.description = available_actions[self.action].__doc__
#
# def run(self, collector, image, available_actions, tasks, **extras):
# """Run this task"""
# task_func = available_actions[self.action]
# configuration = collector.configuration.wrapped()
#
# if self.options:
# if image:
# configuration.update({"images": {image: self.options}})
# else:
# configuration.update(self.options)
#
# # args like --port and the like should override what's in the options
# # But themselves be overridden by the overrides
# configuration.update(configuration["args_dict"].as_dict(), source="<args_dict>")
#
# if self.overrides:
# overrides = {}
# for key, val in self.overrides.items():
# overrides[key] = val
# if isinstance(val, MergedOptions):
# overrides[key] = dict(val.items())
# configuration.update(overrides)
#
# if task_func.needs_image:
# self.find_image(image, configuration)
# image = configuration["images"][image]
# image.find_missing_env()
#
# from harpoon.collector import Collector
#
# new_collector = Collector()
# new_collector.configuration = configuration
# new_collector.configuration_file = collector.configuration_file
# artifact = configuration["harpoon"].artifact
# return task_func(new_collector, image=image, tasks=tasks, artifact=artifact, **extras)
#
# def find_image(self, image, configuration):
# """Complain if we don't have an image"""
# images = configuration["images"]
# available = list(images.keys())
#
# if not image:
# info = {}
# if available:
# info["available"] = available
# raise BadOption("Please use --image to specify an image", **info)
#
# if image not in images:
# raise BadOption("No such image", wanted=image, available=available)
#
# Path: harpoon/errors.py
# class BadTask(HarpoonError):
# desc = "Bad task"
. Output only the next line. | return dict((name, Task(action=name, label="Harpoon")) for name in default_actions) |
Next line prediction: <|code_start|>"""
Responsible for finding tasks in the configuration and executing them
"""
class TaskFinder(object):
def __init__(self, collector):
self.tasks = {}
self.collector = collector
def image_finder(self, task):
return getattr(
self.tasks[task], "image", self.collector.configuration["harpoon"].chosen_image
)
def task_runner(self, task, **kwargs):
if task not in self.tasks:
<|code_end|>
. Use current file imports:
(from harpoon.actions import available_actions, default_actions
from harpoon.option_spec.task_objs import Task
from harpoon.errors import BadTask)
and context including class names, function names, or small code snippets from other files:
# Path: harpoon/actions.py
# class an_action(object):
# def __init__(self, needs_image=False):
# def __call__(self, func):
# def push(collector, image, **kwargs):
# def push_all(collector, **kwargs):
# def pull_arbitrary(collector, image, **kwargs):
# def pull(collector, image, **kwargs):
# def pull_dependencies(collector, image, **kwargs):
# def pull_parent(collector, image, **kwargs):
# def pull_all(collector, image, **kwargs):
# def pull_all_external(collector, **kwargs):
# def pull_parents(collector, **kwargs):
# def make(collector, image, **kwargs):
# def make_all(collector, **kwargs):
# def make_pushable(collector, **kwargs):
# def run(collector, image, **kwargs):
# def list_tasks(collector, tasks, **kwargs):
# def delete_untagged(collector, **kwargs):
# def show(collector, **kwargs):
# def show_pushable(collector, **kwargs):
# def print_dockerfile(collector, image, **kwargs):
# def get_docker_context(collector, image, **kwargs):
# def print_all_dockerfiles(collector, **kwargs):
# def read_login(collector, image, **kwargs):
# def write_login(collector, image, **kwargs):
# def untag(collector, image, artifact, **kwargs):
# def tag(collector, image, artifact, **kwargs):
# def retrieve(collector, image, artifact, **kwargs):
# def container_manager(collector, image, **kwargs):
# def shutdown(signum, frame):
#
# Path: harpoon/option_spec/task_objs.py
# class Task(dictobj):
# """
# Used to add extra options associated with the task and to start the action
# from ``harpoon.actions``.
#
# Also responsible for complaining if the specified action doesn't exist.
#
# Will also ask the image to complain about any missing environment variables.
# """
#
# fields = {
# ("action", "run"): "The action to run with this image",
# ("options", None): "The options to merge with the image options",
# ("overrides", None): "The options to merge with the root configuration",
# ("description", ""): "The description of the task",
# ("label", "Project"): "The namespace when listing tasks",
# }
#
# def setup(self, *args, **kwargs):
# super(Task, self).setup(*args, **kwargs)
# self.set_description()
#
# def set_description(self, available_actions=None):
# if not self.description:
# if not available_actions:
# from harpoon.actions import available_actions
# if self.action in available_actions:
# self.description = available_actions[self.action].__doc__
#
# def run(self, collector, image, available_actions, tasks, **extras):
# """Run this task"""
# task_func = available_actions[self.action]
# configuration = collector.configuration.wrapped()
#
# if self.options:
# if image:
# configuration.update({"images": {image: self.options}})
# else:
# configuration.update(self.options)
#
# # args like --port and the like should override what's in the options
# # But themselves be overridden by the overrides
# configuration.update(configuration["args_dict"].as_dict(), source="<args_dict>")
#
# if self.overrides:
# overrides = {}
# for key, val in self.overrides.items():
# overrides[key] = val
# if isinstance(val, MergedOptions):
# overrides[key] = dict(val.items())
# configuration.update(overrides)
#
# if task_func.needs_image:
# self.find_image(image, configuration)
# image = configuration["images"][image]
# image.find_missing_env()
#
# from harpoon.collector import Collector
#
# new_collector = Collector()
# new_collector.configuration = configuration
# new_collector.configuration_file = collector.configuration_file
# artifact = configuration["harpoon"].artifact
# return task_func(new_collector, image=image, tasks=tasks, artifact=artifact, **extras)
#
# def find_image(self, image, configuration):
# """Complain if we don't have an image"""
# images = configuration["images"]
# available = list(images.keys())
#
# if not image:
# info = {}
# if available:
# info["available"] = available
# raise BadOption("Please use --image to specify an image", **info)
#
# if image not in images:
# raise BadOption("No such image", wanted=image, available=available)
#
# Path: harpoon/errors.py
# class BadTask(HarpoonError):
# desc = "Bad task"
. Output only the next line. | raise BadTask("Unknown task", task=task, available=sorted(list(self.tasks.keys()))) |
Next line prediction: <|code_start|> example, you can attach to a running container from within a Python REPL
and when the container exits, the user will be returned to the Python REPL
without adverse effects.
"""
def __init__(self, client, container, interactive=True, stdout=None, stderr=None, stdin=None):
"""
Initialize the PTY using the docker.Client instance and container dict.
"""
self.client = client
self.container = container
self.raw = None
self.interactive = interactive
self.stdout = stdout or sys.stdout
self.stderr = stderr or sys.stderr
self.stdin = stdin or sys.stdin
def start(self, **kwargs):
"""
Present the PTY of the container inside the current process.
This will take over the current process' TTY until the container's PTY
is closed.
"""
pty_stdin, pty_stdout, pty_stderr = self.sockets()
pumps = []
if pty_stdin and self.interactive:
<|code_end|>
. Use current file imports:
(import sys
import signal
from ssl import SSLError
from harpoon.dockerpty import io
from harpoon.dockerpty import tty)
and context including class names, function names, or small code snippets from other files:
# Path: harpoon/dockerpty/io.py
# def set_blocking(fd, blocking=True):
# def select(read_streams, write_streams, timeout=0):
# def __init__(self, fd):
# def fileno(self):
# def set_blocking(self, value):
# def read(self, n=4096):
# def write(self, data):
# def do_write(self):
# def needs_write(self):
# def close(self):
# def __repr__(self):
# def __init__(self, stream):
# def fileno(self):
# def set_blocking(self, value):
# def read(self, n=4096):
# def write(self, data):
# def needs_write(self):
# def do_write(self):
# def close(self):
# def _next_packet_size(self, n=0):
# def __repr__(self):
# def __init__(self, from_stream, to_stream, wait_for_output=True, propagate_close=True):
# def fileno(self):
# def set_blocking(self, value):
# def flush(self, n=4096):
# def is_done(self):
# def __repr__(self):
# class Stream(object):
# class Demuxer(object):
# class Pump(object):
# ERRNO_RECOVERABLE = [errno.EINTR, errno.EDEADLK, errno.EWOULDBLOCK]
#
# Path: harpoon/dockerpty/tty.py
# def size(fd):
# def __init__(self, fd, raw=True):
# def __enter__(self):
# def __exit__(self, *_):
# def israw(self):
# def start(self):
# def stop(self):
# def __repr__(self):
# class Terminal(object):
. Output only the next line. | pumps.append(io.Pump(io.Stream(self.stdin), pty_stdin, wait_for_output=False)) |
Predict the next line for this snippet: <|code_start|> returned in the tuple.
"""
info = self.container_info()
def attach_socket(key):
if info["Config"]["Attach{0}".format(key.capitalize())]:
socket = self.client.attach_socket(self.container, {key: 1, "stream": 1, "logs": 1})
stream = io.Stream(socket)
if info["Config"]["Tty"]:
return stream
else:
return io.Demuxer(stream)
else:
return None
return map(attach_socket, ("stdin", "stdout", "stderr"))
def resize(self, size=None):
"""
Resize the container's PTY.
If `size` is not None, it must be a tuple of (height,width), otherwise
it will be determined by the size of the current TTY.
"""
if not self.israw():
return
<|code_end|>
with the help of current file imports:
import sys
import signal
from ssl import SSLError
from harpoon.dockerpty import io
from harpoon.dockerpty import tty
and context from other files:
# Path: harpoon/dockerpty/io.py
# def set_blocking(fd, blocking=True):
# def select(read_streams, write_streams, timeout=0):
# def __init__(self, fd):
# def fileno(self):
# def set_blocking(self, value):
# def read(self, n=4096):
# def write(self, data):
# def do_write(self):
# def needs_write(self):
# def close(self):
# def __repr__(self):
# def __init__(self, stream):
# def fileno(self):
# def set_blocking(self, value):
# def read(self, n=4096):
# def write(self, data):
# def needs_write(self):
# def do_write(self):
# def close(self):
# def _next_packet_size(self, n=0):
# def __repr__(self):
# def __init__(self, from_stream, to_stream, wait_for_output=True, propagate_close=True):
# def fileno(self):
# def set_blocking(self, value):
# def flush(self, n=4096):
# def is_done(self):
# def __repr__(self):
# class Stream(object):
# class Demuxer(object):
# class Pump(object):
# ERRNO_RECOVERABLE = [errno.EINTR, errno.EDEADLK, errno.EWOULDBLOCK]
#
# Path: harpoon/dockerpty/tty.py
# def size(fd):
# def __init__(self, fd, raw=True):
# def __enter__(self):
# def __exit__(self, *_):
# def israw(self):
# def start(self):
# def stop(self):
# def __repr__(self):
# class Terminal(object):
, which may contain function names, class names, or code. Output only the next line. | size = size or tty.size(self.stdout) |
Based on the snippet: <|code_start|> )
for dep in conf.commands.dependent_images:
if isinstance(dep, str):
if ":" not in dep:
dep = "{0}:latest".format(dep)
if dep not in current_tags:
conf.login(dep, is_pushing=False)
cache_from = list(conf.cache_from_names)
if cache_from:
log.info(
"Using cache from the following images\timages={0}".format(", ".join(cache_from))
)
lines = conf.harpoon.docker_api.build(
tag=image_name,
fileobj=context.tmpfile,
custom_context=True,
cache_from=list(conf.cache_from_names),
rm=True,
pull=False,
)
for found in lines:
for line in found.decode().split("\n"):
if line.strip():
try:
stream.feed(line.encode())
<|code_end|>
, predict the immediate next line with the help of imports:
from harpoon.ship.progress_stream import Failure, Unknown
from harpoon.ship.builders.base import BuilderBase
from harpoon.errors import FailedImage
from harpoon import helpers as hp
from itertools import chain
import logging
and context (classes, functions, sometimes code) from other files:
# Path: harpoon/ship/progress_stream.py
# class Failure(Exception):
# pass
#
# class Unknown(Exception):
# pass
#
# Path: harpoon/ship/builders/base.py
# class BuilderBase(object):
# def log_context_size(self, context, conf):
# context_size = humanize.naturalsize(os.stat(context.name).st_size)
# log.info(
# "Building '%s' in '%s' with %s of context",
# conf.name,
# conf.context.parent_dir,
# context_size,
# )
#
# @contextmanager
# def remove_replaced_images(self, conf):
# tag = "latest" if conf.tag is sb.NotSpecified else conf.tag
# image_name = "{0}:{1}".format(conf.image_name, tag)
#
# if not conf.harpoon.keep_replaced:
# try:
# current_id = conf.harpoon.docker_api.inspect_image(image_name)["Id"]
# except docker.errors.APIError as error:
# if str(error).startswith("404 Client Error: Not Found"):
# current_id = None
# else:
# raise
#
# info = {"cached": False}
# yield info
#
# if current_id and not info.get("cached"):
# log.info("Looking for replaced images to remove")
# untagged = [
# image["Id"] for image in conf.harpoon.docker_api.images(filters={"dangling": True})
# ]
# if current_id in untagged:
# log.info(
# "Deleting replaced image\ttag=%s\told_hash=%s",
# "{0}".format(image_name),
# current_id,
# )
# try:
# conf.harpoon.docker_api.remove_image(current_id)
# except Exception as error:
# log.error(
# "Failed to remove replaced image\thash=%s\terror=%s", current_id, error
# )
#
# Path: harpoon/errors.py
# class FailedImage(HarpoonError):
# desc = "Something about an image failed"
#
# Path: harpoon/helpers.py
# def a_temp_file():
# def until(timeout=10, step=0.5, action=None, silent=False):
# def __init__(self, func):
# def __get__(self, instance=None, owner=None):
# def __set__(self, instance, value):
# def __delete__(self, instance):
# def write_to(output, txt):
# class memoized_property(object):
. Output only the next line. | except Failure as error: |
Based on the snippet: <|code_start|> for dep in conf.commands.dependent_images:
if isinstance(dep, str):
if ":" not in dep:
dep = "{0}:latest".format(dep)
if dep not in current_tags:
conf.login(dep, is_pushing=False)
cache_from = list(conf.cache_from_names)
if cache_from:
log.info(
"Using cache from the following images\timages={0}".format(", ".join(cache_from))
)
lines = conf.harpoon.docker_api.build(
tag=image_name,
fileobj=context.tmpfile,
custom_context=True,
cache_from=list(conf.cache_from_names),
rm=True,
pull=False,
)
for found in lines:
for line in found.decode().split("\n"):
if line.strip():
try:
stream.feed(line.encode())
except Failure as error:
raise FailedImage("Failed to build an image", image=conf.name, msg=error)
<|code_end|>
, predict the immediate next line with the help of imports:
from harpoon.ship.progress_stream import Failure, Unknown
from harpoon.ship.builders.base import BuilderBase
from harpoon.errors import FailedImage
from harpoon import helpers as hp
from itertools import chain
import logging
and context (classes, functions, sometimes code) from other files:
# Path: harpoon/ship/progress_stream.py
# class Failure(Exception):
# pass
#
# class Unknown(Exception):
# pass
#
# Path: harpoon/ship/builders/base.py
# class BuilderBase(object):
# def log_context_size(self, context, conf):
# context_size = humanize.naturalsize(os.stat(context.name).st_size)
# log.info(
# "Building '%s' in '%s' with %s of context",
# conf.name,
# conf.context.parent_dir,
# context_size,
# )
#
# @contextmanager
# def remove_replaced_images(self, conf):
# tag = "latest" if conf.tag is sb.NotSpecified else conf.tag
# image_name = "{0}:{1}".format(conf.image_name, tag)
#
# if not conf.harpoon.keep_replaced:
# try:
# current_id = conf.harpoon.docker_api.inspect_image(image_name)["Id"]
# except docker.errors.APIError as error:
# if str(error).startswith("404 Client Error: Not Found"):
# current_id = None
# else:
# raise
#
# info = {"cached": False}
# yield info
#
# if current_id and not info.get("cached"):
# log.info("Looking for replaced images to remove")
# untagged = [
# image["Id"] for image in conf.harpoon.docker_api.images(filters={"dangling": True})
# ]
# if current_id in untagged:
# log.info(
# "Deleting replaced image\ttag=%s\told_hash=%s",
# "{0}".format(image_name),
# current_id,
# )
# try:
# conf.harpoon.docker_api.remove_image(current_id)
# except Exception as error:
# log.error(
# "Failed to remove replaced image\thash=%s\terror=%s", current_id, error
# )
#
# Path: harpoon/errors.py
# class FailedImage(HarpoonError):
# desc = "Something about an image failed"
#
# Path: harpoon/helpers.py
# def a_temp_file():
# def until(timeout=10, step=0.5, action=None, silent=False):
# def __init__(self, func):
# def __get__(self, instance=None, owner=None):
# def __set__(self, instance, value):
# def __delete__(self, instance):
# def write_to(output, txt):
# class memoized_property(object):
. Output only the next line. | except Unknown as error: |
Next line prediction: <|code_start|>
for dep in conf.commands.dependent_images:
if isinstance(dep, str):
if ":" not in dep:
dep = "{0}:latest".format(dep)
if dep not in current_tags:
conf.login(dep, is_pushing=False)
cache_from = list(conf.cache_from_names)
if cache_from:
log.info(
"Using cache from the following images\timages={0}".format(", ".join(cache_from))
)
lines = conf.harpoon.docker_api.build(
tag=image_name,
fileobj=context.tmpfile,
custom_context=True,
cache_from=list(conf.cache_from_names),
rm=True,
pull=False,
)
for found in lines:
for line in found.decode().split("\n"):
if line.strip():
try:
stream.feed(line.encode())
except Failure as error:
<|code_end|>
. Use current file imports:
(from harpoon.ship.progress_stream import Failure, Unknown
from harpoon.ship.builders.base import BuilderBase
from harpoon.errors import FailedImage
from harpoon import helpers as hp
from itertools import chain
import logging)
and context including class names, function names, or small code snippets from other files:
# Path: harpoon/ship/progress_stream.py
# class Failure(Exception):
# pass
#
# class Unknown(Exception):
# pass
#
# Path: harpoon/ship/builders/base.py
# class BuilderBase(object):
# def log_context_size(self, context, conf):
# context_size = humanize.naturalsize(os.stat(context.name).st_size)
# log.info(
# "Building '%s' in '%s' with %s of context",
# conf.name,
# conf.context.parent_dir,
# context_size,
# )
#
# @contextmanager
# def remove_replaced_images(self, conf):
# tag = "latest" if conf.tag is sb.NotSpecified else conf.tag
# image_name = "{0}:{1}".format(conf.image_name, tag)
#
# if not conf.harpoon.keep_replaced:
# try:
# current_id = conf.harpoon.docker_api.inspect_image(image_name)["Id"]
# except docker.errors.APIError as error:
# if str(error).startswith("404 Client Error: Not Found"):
# current_id = None
# else:
# raise
#
# info = {"cached": False}
# yield info
#
# if current_id and not info.get("cached"):
# log.info("Looking for replaced images to remove")
# untagged = [
# image["Id"] for image in conf.harpoon.docker_api.images(filters={"dangling": True})
# ]
# if current_id in untagged:
# log.info(
# "Deleting replaced image\ttag=%s\told_hash=%s",
# "{0}".format(image_name),
# current_id,
# )
# try:
# conf.harpoon.docker_api.remove_image(current_id)
# except Exception as error:
# log.error(
# "Failed to remove replaced image\thash=%s\terror=%s", current_id, error
# )
#
# Path: harpoon/errors.py
# class FailedImage(HarpoonError):
# desc = "Something about an image failed"
#
# Path: harpoon/helpers.py
# def a_temp_file():
# def until(timeout=10, step=0.5, action=None, silent=False):
# def __init__(self, func):
# def __get__(self, instance=None, owner=None):
# def __set__(self, instance, value):
# def __delete__(self, instance):
# def write_to(output, txt):
# class memoized_property(object):
. Output only the next line. | raise FailedImage("Failed to build an image", image=conf.name, msg=error) |
Based on the snippet: <|code_start|>
if dep not in current_tags:
conf.login(dep, is_pushing=False)
cache_from = list(conf.cache_from_names)
if cache_from:
log.info(
"Using cache from the following images\timages={0}".format(", ".join(cache_from))
)
lines = conf.harpoon.docker_api.build(
tag=image_name,
fileobj=context.tmpfile,
custom_context=True,
cache_from=list(conf.cache_from_names),
rm=True,
pull=False,
)
for found in lines:
for line in found.decode().split("\n"):
if line.strip():
try:
stream.feed(line.encode())
except Failure as error:
raise FailedImage("Failed to build an image", image=conf.name, msg=error)
except Unknown as error:
log.warning("Unknown line\tline=%s", error)
for part in stream.printable():
<|code_end|>
, predict the immediate next line with the help of imports:
from harpoon.ship.progress_stream import Failure, Unknown
from harpoon.ship.builders.base import BuilderBase
from harpoon.errors import FailedImage
from harpoon import helpers as hp
from itertools import chain
import logging
and context (classes, functions, sometimes code) from other files:
# Path: harpoon/ship/progress_stream.py
# class Failure(Exception):
# pass
#
# class Unknown(Exception):
# pass
#
# Path: harpoon/ship/builders/base.py
# class BuilderBase(object):
# def log_context_size(self, context, conf):
# context_size = humanize.naturalsize(os.stat(context.name).st_size)
# log.info(
# "Building '%s' in '%s' with %s of context",
# conf.name,
# conf.context.parent_dir,
# context_size,
# )
#
# @contextmanager
# def remove_replaced_images(self, conf):
# tag = "latest" if conf.tag is sb.NotSpecified else conf.tag
# image_name = "{0}:{1}".format(conf.image_name, tag)
#
# if not conf.harpoon.keep_replaced:
# try:
# current_id = conf.harpoon.docker_api.inspect_image(image_name)["Id"]
# except docker.errors.APIError as error:
# if str(error).startswith("404 Client Error: Not Found"):
# current_id = None
# else:
# raise
#
# info = {"cached": False}
# yield info
#
# if current_id and not info.get("cached"):
# log.info("Looking for replaced images to remove")
# untagged = [
# image["Id"] for image in conf.harpoon.docker_api.images(filters={"dangling": True})
# ]
# if current_id in untagged:
# log.info(
# "Deleting replaced image\ttag=%s\told_hash=%s",
# "{0}".format(image_name),
# current_id,
# )
# try:
# conf.harpoon.docker_api.remove_image(current_id)
# except Exception as error:
# log.error(
# "Failed to remove replaced image\thash=%s\terror=%s", current_id, error
# )
#
# Path: harpoon/errors.py
# class FailedImage(HarpoonError):
# desc = "Something about an image failed"
#
# Path: harpoon/helpers.py
# def a_temp_file():
# def until(timeout=10, step=0.5, action=None, silent=False):
# def __init__(self, func):
# def __get__(self, instance=None, owner=None):
# def __set__(self, instance, value):
# def __delete__(self, instance):
# def write_to(output, txt):
# class memoized_property(object):
. Output only the next line. | hp.write_to(conf.harpoon.stdout, part) |
Here is a snippet: <|code_start|>
class dictsubclass(dict): pass
configuration = MergedOptions.using({"some_object": dictsubclass({1:2, 3:4})}, dont_prefix=[dictsubclass])
formatter = MergedOptionStringFormatter(configuration, "{some_object}")
val = formatter.format()
# val == {1:2, 3:4}
For this to work, the object must be a subclass of dict and in the dont_prefix option of the configuration.
"""
passthrough_format_specs = ["env", "from_env"]
def get_string(self, key):
"""Get a string from all_options"""
# Make sure we special case the "content" option
if type(key) is str and key.startswith("content."):
return self.no_format(self.all_options["content"][key[8:]])
if type(key) is list and len(key) == 2 and key[0] == "content":
return self.no_format(self.all_options[key])
return super().get_string(key)
def special_format_field(self, obj, format_spec):
"""Know about any special formats"""
if format_spec == "env":
return "${{{0}}}".format(obj)
elif format_spec == "from_env":
if obj not in os.environ:
<|code_end|>
. Write the next line using the current file imports:
from harpoon.errors import NoSuchEnvironmentVariable
from delfick_project.option_merge.formatter import MergedOptionStringFormatter
import os
and context from other files:
# Path: harpoon/errors.py
# class NoSuchEnvironmentVariable(HarpoonError):
# desc = "Couldn't find environment variable"
, which may include functions, classes, or code. Output only the next line. | raise NoSuchEnvironmentVariable(wanted=obj) |
Here is a snippet: <|code_start|> if os.path.exists(filename):
return
time.sleep(0.1)
if not os.path.exists(filename):
assert False, "Failed to wait for filename: {0}".format(filename)
def wait_for_port(self, port, timeout=2):
start = time.time()
while time.time() - start < timeout:
if self.port_connected(port):
return
time.sleep(0.1)
assert self.port_connected(port)
def assertForks(self, info, timeout=1):
start = time.time()
while time.time() - start < timeout:
if info["done"]:
return
if not info["done"]:
assert False, "The process should have forked, but it hasn't within timeout"
def version(self, info):
return requests.get(self.local(info["port"], "/version")).content.decode()
def start_inprocess(self, manager):
port = self.free_port()
<|code_end|>
. Write the next line using the current file imports:
from harpoon.container_manager import make_server
from contextlib import contextmanager
import subprocess
import threading
import requests
import tempfile
import socket
import psutil
import signal
import pytest
import time
import sys
import os
and context from other files:
# Path: harpoon/container_manager.py
# def make_server(manager, address):
# class RequestHandler(BaseHTTPRequestHandler):
# def handle(self):
# try:
# super().handle()
# except KeyboardInterrupt:
# raise
# except Exception as error:
# self.send_response(500)
# self.send_header("Content-Type", "application/json")
# self.end_headers()
#
# if isinstance(error, DelfickError):
# asdct = error.as_dict()
# else:
# log.exception("Unexpected error")
# asdct = {"message": str(error)}
#
# e = {"error": asdct, "error_code": error.__class__.__name__}
# self.wfile.write(json.dumps(e, default=lambda o: repr(o)).encode())
#
# def do_GET(self):
# if self.path == "/version":
# manager.version(self)
# elif self.path == "/shutdown":
# if manager.shutting_down:
# self.send_response(204)
# self.end_headers()
# else:
# manager.shutdown(self)
# else:
# self.send_response(404)
# self.end_headers()
# self.wfile.write("Unknown path: {0}".format(self.path).encode())
#
# def do_POST(self):
# if self.path == "/stop_container":
# manager.stop_container(self)
# elif self.path == "/start_container":
# manager.start_container(self)
# elif self.path == "/unlock_container":
# manager.unlock_container(self)
# else:
# self.send_response(404)
# self.end_headers()
# self.wfile.write("Unknown path: {0}".format(self.path).encode())
#
# def log_message(self, format, *args):
# log.info("%s - %s", self.address_string(), format % args)
#
# class Server(socketserver.ThreadingMixIn, HTTPServer):
# pass
#
# return Server(address, RequestHandler)
, which may include functions, classes, or code. Output only the next line. | server = make_server(manager, ("127.0.0.1", port)) |
Given the code snippet: <|code_start|>
info = {}
class DockersAssertionsMixin:
@property
def docker_client(self):
if "docker_client" not in info:
<|code_end|>
, generate the next line using the imports in this file:
from harpoon.executor import docker_context
from harpoon.ship.builder import Builder
from contextlib import contextmanager
import docker.errors
import uuid
and context (functions, classes, or occasionally code) from other files:
# Path: harpoon/executor.py
# def docker_context():
# """Make a docker context"""
# try:
# client = docker.from_env(
# version="auto",
# timeout=int(os.environ.get("DOCKER_CLIENT_TIMEOUT", 180)),
# assert_hostname=False,
# )
#
# info = client.info()
# log.info(
# "Connected to docker daemon\tdriver=%s\tkernel=%s",
# info["Driver"],
# info["KernelVersion"],
# )
# except (DockerException, APIError) as error:
# raise BadDockerConnection(error=error)
#
# return client
#
# Path: harpoon/ship/builder.py
# class Builder(BuilderBase):
# """Build an image from Image configuration"""
#
# def make_image(
# self,
# conf,
# images,
# chain=None,
# parent_chain=None,
# made=None,
# ignore_deps=False,
# ignore_parent=False,
# pushing=False,
# ):
# """Make us an image"""
# made = {} if made is None else made
# chain = [] if chain is None else chain
# parent_chain = [] if parent_chain is None else parent_chain
#
# if conf.name in made:
# return
#
# if conf.name in chain and not ignore_deps:
# raise BadCommand("Recursive dependency images", chain=chain + [conf.name])
#
# if conf.name in parent_chain and not ignore_parent:
# raise BadCommand("Recursive FROM statements", chain=parent_chain + [conf.name])
#
# if conf.name not in images:
# raise NoSuchImage(looking_for=conf.name, available=images.keys())
#
# if not ignore_deps:
# for dependency, image in conf.dependency_images():
# self.make_image(
# images[dependency],
# images,
# chain=chain + [conf.name],
# made=made,
# pushing=pushing,
# )
#
# if not ignore_parent:
# for dep in conf.commands.dependent_images:
# if not isinstance(dep, str):
# self.make_image(
# dep, images, chain, parent_chain + [conf.name], made=made, pushing=pushing
# )
#
# # Should have all our dependencies now
# log.info("Making image for '%s' (%s)", conf.name, conf.image_name)
# cached = self.build_image(conf, pushing=pushing)
# made[conf.name] = True
# return cached
#
# def build_image(self, conf, pushing=False):
# """Build this image"""
# with conf.make_context() as context:
# try:
# stream = BuildProgressStream(conf.harpoon.silent_build)
# with self.remove_replaced_images(conf) as info:
# cached = NormalBuilder().build(conf, context, stream)
# info["cached"] = cached
# except (KeyboardInterrupt, Exception) as error:
# exc_info = sys.exc_info()
# if stream.current_container:
# Runner().stage_build_intervention(conf, stream.current_container)
#
# if isinstance(error, KeyboardInterrupt):
# raise UserQuit()
# else:
# exc_info[1].__traceback__ = exc_info[2]
# raise exc_info[1]
# finally:
# if stream and stream.intermediate_images and conf.cleanup_intermediate_images:
# for image in stream.intermediate_images:
# log.info("Deleting intermediate image\timage=%s", image)
# try:
# conf.harpoon.docker_api.remove_image(image)
# except Exception as error:
# log.error(
# "Failed to remove intermediate image\timage=%s\terror=%s",
# image,
# error,
# )
#
# return cached
#
# def layered(self, images, only_pushable=False):
# """Yield layers of images"""
# if only_pushable:
# operate_on = dict(
# (image, instance) for image, instance in images.items() if instance.image_index
# )
# else:
# operate_on = images
#
# layers = Layers(operate_on, all_deps=images)
# layers.add_all_to_layers()
# for layer in layers.layered:
# buf = []
# for image_name, image in layer:
# if image.image_index:
# buf.append((image_name, image))
# if buf:
# yield buf
. Output only the next line. | info["docker_client"] = docker_context() |
Predict the next line after this snippet: <|code_start|> @property
def docker_client(self):
if "docker_client" not in info:
info["docker_client"] = docker_context()
return info["docker_client"]
@property
def docker_api(self):
return self.docker_client.api
def refresh_docker_client(self):
if "docker_client" in info:
del info["docker_client"]
def new_docker_client(self):
self.refresh_docker_client()
return self.docker_client
@contextmanager
def a_built_image(self, options, harpoon_options=None, images=None, **kwargs):
ident = str(uuid.uuid1())
ident_tag = "{0}:latest".format(ident)
conf = self.make_image(options, harpoon_options, **kwargs)
if images:
images[conf.image_name] = conf
conf.image_name = ident
if images:
images[conf.image_name] = conf
<|code_end|>
using the current file's imports:
from harpoon.executor import docker_context
from harpoon.ship.builder import Builder
from contextlib import contextmanager
import docker.errors
import uuid
and any relevant context from other files:
# Path: harpoon/executor.py
# def docker_context():
# """Make a docker context"""
# try:
# client = docker.from_env(
# version="auto",
# timeout=int(os.environ.get("DOCKER_CLIENT_TIMEOUT", 180)),
# assert_hostname=False,
# )
#
# info = client.info()
# log.info(
# "Connected to docker daemon\tdriver=%s\tkernel=%s",
# info["Driver"],
# info["KernelVersion"],
# )
# except (DockerException, APIError) as error:
# raise BadDockerConnection(error=error)
#
# return client
#
# Path: harpoon/ship/builder.py
# class Builder(BuilderBase):
# """Build an image from Image configuration"""
#
# def make_image(
# self,
# conf,
# images,
# chain=None,
# parent_chain=None,
# made=None,
# ignore_deps=False,
# ignore_parent=False,
# pushing=False,
# ):
# """Make us an image"""
# made = {} if made is None else made
# chain = [] if chain is None else chain
# parent_chain = [] if parent_chain is None else parent_chain
#
# if conf.name in made:
# return
#
# if conf.name in chain and not ignore_deps:
# raise BadCommand("Recursive dependency images", chain=chain + [conf.name])
#
# if conf.name in parent_chain and not ignore_parent:
# raise BadCommand("Recursive FROM statements", chain=parent_chain + [conf.name])
#
# if conf.name not in images:
# raise NoSuchImage(looking_for=conf.name, available=images.keys())
#
# if not ignore_deps:
# for dependency, image in conf.dependency_images():
# self.make_image(
# images[dependency],
# images,
# chain=chain + [conf.name],
# made=made,
# pushing=pushing,
# )
#
# if not ignore_parent:
# for dep in conf.commands.dependent_images:
# if not isinstance(dep, str):
# self.make_image(
# dep, images, chain, parent_chain + [conf.name], made=made, pushing=pushing
# )
#
# # Should have all our dependencies now
# log.info("Making image for '%s' (%s)", conf.name, conf.image_name)
# cached = self.build_image(conf, pushing=pushing)
# made[conf.name] = True
# return cached
#
# def build_image(self, conf, pushing=False):
# """Build this image"""
# with conf.make_context() as context:
# try:
# stream = BuildProgressStream(conf.harpoon.silent_build)
# with self.remove_replaced_images(conf) as info:
# cached = NormalBuilder().build(conf, context, stream)
# info["cached"] = cached
# except (KeyboardInterrupt, Exception) as error:
# exc_info = sys.exc_info()
# if stream.current_container:
# Runner().stage_build_intervention(conf, stream.current_container)
#
# if isinstance(error, KeyboardInterrupt):
# raise UserQuit()
# else:
# exc_info[1].__traceback__ = exc_info[2]
# raise exc_info[1]
# finally:
# if stream and stream.intermediate_images and conf.cleanup_intermediate_images:
# for image in stream.intermediate_images:
# log.info("Deleting intermediate image\timage=%s", image)
# try:
# conf.harpoon.docker_api.remove_image(image)
# except Exception as error:
# log.error(
# "Failed to remove intermediate image\timage=%s\terror=%s",
# image,
# error,
# )
#
# return cached
#
# def layered(self, images, only_pushable=False):
# """Yield layers of images"""
# if only_pushable:
# operate_on = dict(
# (image, instance) for image, instance in images.items() if instance.image_index
# )
# else:
# operate_on = images
#
# layers = Layers(operate_on, all_deps=images)
# layers.add_all_to_layers()
# for layer in layers.layered:
# buf = []
# for image_name, image in layer:
# if image.image_index:
# buf.append((image_name, image))
# if buf:
# yield buf
. Output only the next line. | cached = Builder().make_image(conf, images) |
Given the following code snippet before the placeholder: <|code_start|>
if self.overrides:
overrides = {}
for key, val in self.overrides.items():
overrides[key] = val
if isinstance(val, MergedOptions):
overrides[key] = dict(val.items())
configuration.update(overrides)
if task_func.needs_image:
self.find_image(image, configuration)
image = configuration["images"][image]
image.find_missing_env()
new_collector = Collector()
new_collector.configuration = configuration
new_collector.configuration_file = collector.configuration_file
artifact = configuration["harpoon"].artifact
return task_func(new_collector, image=image, tasks=tasks, artifact=artifact, **extras)
def find_image(self, image, configuration):
"""Complain if we don't have an image"""
images = configuration["images"]
available = list(images.keys())
if not image:
info = {}
if available:
info["available"] = available
<|code_end|>
, predict the next line using imports from the current file:
from harpoon.errors import BadOption
from delfick_project.option_merge import MergedOptions
from delfick_project.norms import dictobj
from harpoon.actions import available_actions
from harpoon.collector import Collector
and context including class names, function names, and sometimes code from other files:
# Path: harpoon/errors.py
# class BadOption(HarpoonError):
# desc = "Bad Option"
. Output only the next line. | raise BadOption("Please use --image to specify an image", **info) |
Given the following code snippet before the placeholder: <|code_start|> #
# QTop
#
# Copyright (c) 2016 Jacob Marks (jacob.marks@yale.edu)
#
# This file is part of QTop.
#
# QTop is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
sys.path.append('../')
sys.path.append('decoders/')
################## Testing ##################
L, d, p = 13, 2, 0.08
<|code_end|>
, predict the next line using imports from the current file:
import sys
import networkx as nx
import numpy as np
import matplotlib.pyplot as plt
from src import color_codes, error_models, visualization
from dsp import *
and context including class names, function names, and sometimes code from other files:
# Path: src/color_codes.py
# class ColorCode(Code):
# class Color_6_6_6(ColorCode):
# def __init__(self, depth, dimension = 2):
# def generateColors(self):
# def complementaryType(self, types):
# def complementaryTypes(self, type):
# def Sign(self, count, num_sides = 6):
# def Plaquette(self, measure_qubit, type):
# def generateDual(self):
# def CodeCycle(self, model, p):
# def generatePrimalEdges(self):
# def PrimalBound(self, count, type, measures):
# def hasLogicalError(self):
# def __init__(self, depth, dimension = 2):
# def generateCode(self):
# N = int(float(depth)/2)
# P = self.Plaquette(measures[m], m)
#
# Path: src/error_models.py
# def PauliX(charge, dim):
# def PauliZ(charge, dim):
# def PauliXZ(charge, dim):
# def Identity_Channel(charge):
# def PhaseFlip_Channel(charge, dim, p):
# def BitFlip_Channel(charge, dim, p):
# def Correlated_Channel(charge, dim, p):
# def BP_Channel(charge, dim, p):
# def Depolarizing_Channel(charge, dim, p):
# def Depolarizing_Helper(charge, dim):
# def Error_Channel(charge, dim, p, type):
# def __init__(self, **kwargs):
# def Initialize(self, code, type, p):
# def Identity(self, code, p):
# def Fourier(self, code, type, p):
# def Measure(self, code, type, p):
# def Sum(self, code, count, num_sides, type, charge_type, control_p, target_p):
# def __init__(self):
# def __init__(self):
# def __init__(self):
# def __init__(self):
# def __init__(self):
# def __init__(self):
# class ErrorModel:
# class CodeCapacity(ErrorModel):
# class Depolarizing(ErrorModel):
# class Phenomenological(ErrorModel):
# class CircuitLevel(ErrorModel):
# class PhaseFlip(ErrorModel):
# class BitFlip(ErrorModel):
#
# Path: src/visualization.py
# def PlotPrimal(code, title, plot_number = 1):
# def PlotDual(code, title, plot_number = 2):
# def PlotPlaquette(code, title, plot_number = 3):
# def PlotShrunk(code, type, title, plot_number = 4):
. Output only the next line. | code = color_codes.Color_6_6_6(L,d) |
Next line prediction: <|code_start|> #
# QTop
#
# Copyright (c) 2016 Jacob Marks (jacob.marks@yale.edu)
#
# This file is part of QTop.
#
# QTop is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
sys.path.append('../')
sys.path.append('decoders/')
################## Testing ##################
L, d, p = 13, 2, 0.08
code = color_codes.Color_6_6_6(L,d)
<|code_end|>
. Use current file imports:
(import sys
import networkx as nx
import numpy as np
import matplotlib.pyplot as plt
from src import color_codes, error_models, visualization
from dsp import *)
and context including class names, function names, or small code snippets from other files:
# Path: src/color_codes.py
# class ColorCode(Code):
# class Color_6_6_6(ColorCode):
# def __init__(self, depth, dimension = 2):
# def generateColors(self):
# def complementaryType(self, types):
# def complementaryTypes(self, type):
# def Sign(self, count, num_sides = 6):
# def Plaquette(self, measure_qubit, type):
# def generateDual(self):
# def CodeCycle(self, model, p):
# def generatePrimalEdges(self):
# def PrimalBound(self, count, type, measures):
# def hasLogicalError(self):
# def __init__(self, depth, dimension = 2):
# def generateCode(self):
# N = int(float(depth)/2)
# P = self.Plaquette(measures[m], m)
#
# Path: src/error_models.py
# def PauliX(charge, dim):
# def PauliZ(charge, dim):
# def PauliXZ(charge, dim):
# def Identity_Channel(charge):
# def PhaseFlip_Channel(charge, dim, p):
# def BitFlip_Channel(charge, dim, p):
# def Correlated_Channel(charge, dim, p):
# def BP_Channel(charge, dim, p):
# def Depolarizing_Channel(charge, dim, p):
# def Depolarizing_Helper(charge, dim):
# def Error_Channel(charge, dim, p, type):
# def __init__(self, **kwargs):
# def Initialize(self, code, type, p):
# def Identity(self, code, p):
# def Fourier(self, code, type, p):
# def Measure(self, code, type, p):
# def Sum(self, code, count, num_sides, type, charge_type, control_p, target_p):
# def __init__(self):
# def __init__(self):
# def __init__(self):
# def __init__(self):
# def __init__(self):
# def __init__(self):
# class ErrorModel:
# class CodeCapacity(ErrorModel):
# class Depolarizing(ErrorModel):
# class Phenomenological(ErrorModel):
# class CircuitLevel(ErrorModel):
# class PhaseFlip(ErrorModel):
# class BitFlip(ErrorModel):
#
# Path: src/visualization.py
# def PlotPrimal(code, title, plot_number = 1):
# def PlotDual(code, title, plot_number = 2):
# def PlotPlaquette(code, title, plot_number = 3):
# def PlotShrunk(code, type, title, plot_number = 4):
. Output only the next line. | model = error_models.PhaseError() |
Using the snippet: <|code_start|> #
# QTop
#
# Copyright (c) 2016 Jacob Marks (jacob.marks@yale.edu)
#
# This file is part of QTop.
#
# QTop is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
sys.path.append('../')
sys.path.append('decoders/')
################## Testing ##################
L, d, p = 13, 2, 0.08
code = color_codes.Color_6_6_6(L,d)
model = error_models.PhaseError()
code = code.CodeCycle(model, p)
<|code_end|>
, determine the next line of code. You have imports:
import sys
import networkx as nx
import numpy as np
import matplotlib.pyplot as plt
from src import color_codes, error_models, visualization
from dsp import *
and context (class names, function names, or code) available:
# Path: src/color_codes.py
# class ColorCode(Code):
# class Color_6_6_6(ColorCode):
# def __init__(self, depth, dimension = 2):
# def generateColors(self):
# def complementaryType(self, types):
# def complementaryTypes(self, type):
# def Sign(self, count, num_sides = 6):
# def Plaquette(self, measure_qubit, type):
# def generateDual(self):
# def CodeCycle(self, model, p):
# def generatePrimalEdges(self):
# def PrimalBound(self, count, type, measures):
# def hasLogicalError(self):
# def __init__(self, depth, dimension = 2):
# def generateCode(self):
# N = int(float(depth)/2)
# P = self.Plaquette(measures[m], m)
#
# Path: src/error_models.py
# def PauliX(charge, dim):
# def PauliZ(charge, dim):
# def PauliXZ(charge, dim):
# def Identity_Channel(charge):
# def PhaseFlip_Channel(charge, dim, p):
# def BitFlip_Channel(charge, dim, p):
# def Correlated_Channel(charge, dim, p):
# def BP_Channel(charge, dim, p):
# def Depolarizing_Channel(charge, dim, p):
# def Depolarizing_Helper(charge, dim):
# def Error_Channel(charge, dim, p, type):
# def __init__(self, **kwargs):
# def Initialize(self, code, type, p):
# def Identity(self, code, p):
# def Fourier(self, code, type, p):
# def Measure(self, code, type, p):
# def Sum(self, code, count, num_sides, type, charge_type, control_p, target_p):
# def __init__(self):
# def __init__(self):
# def __init__(self):
# def __init__(self):
# def __init__(self):
# def __init__(self):
# class ErrorModel:
# class CodeCapacity(ErrorModel):
# class Depolarizing(ErrorModel):
# class Phenomenological(ErrorModel):
# class CircuitLevel(ErrorModel):
# class PhaseFlip(ErrorModel):
# class BitFlip(ErrorModel):
#
# Path: src/visualization.py
# def PlotPrimal(code, title, plot_number = 1):
# def PlotDual(code, title, plot_number = 2):
# def PlotPlaquette(code, title, plot_number = 3):
# def PlotShrunk(code, type, title, plot_number = 4):
. Output only the next line. | visualization.PlotPlaquette(code, "Before Decoding", 1) |
Predict the next line after this snippet: <|code_start|> #
# QTop
#
# Copyright (c) 2016 Jacob Marks (jacob.marks@yale.edu)
#
# This file is part of QTop.
#
# QTop is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
sys.path.append('../')
# sys.path.append('../../')
################ GCC ###################
def center(cluster):
X = round(np.mean([x for (x,y) in cluster]),3)
Y = round(np.mean([y for (x,y) in cluster]),3)
return (X,Y)
def closest_to_point(nodes,pt):
<|code_end|>
using the current file's imports:
from decoders import *
from matplotlib import path
from math import floor
from src import common
import sys
import time
import networkx as nx
import numpy as np
and any relevant context from other files:
# Path: src/common.py
# def removekey(d, key):
# def colinear(p0, p1, p2):
# def manhattan_dist(A,B):
# def euclidean_dist(A,B):
# def Charge(X_charge = 0, Z_charge = 0):
# def Sign(count, num_sides):
# def __init__(self, depth, dimension = 2):
# def Syndrome(self, type, charge_type):
# def PhysicalErrors(self):
# def distance(self, type, node1, node2):
# def Assessment(self):
# class Code:
. Output only the next line. | return min(nodes, key = lambda x:common.euclidean_dist(pt, x)) |
Given the code snippet: <|code_start|>
for node in cluster:
if UnclusteredGraph.degree(node[0]) > degree and 'external' not in node[1]:
center = node
degree = UnclusteredGraph.degree(center[0])
return center
def Fuse(code, UnclusteredGraph, cluster, dim, type, charge_type):
# use syndrome transport to move excitations around lattice
center = CentralNode(UnclusteredGraph, cluster)
for mate in cluster:
if mate != center:
Transport(code, center, mate, dim, type, charge_type)
def Transport(code, fixed_node, mobile_node, dim, type, charge_type):
chain = nx.shortest_path(code.Dual[type], mobile_node[0], fixed_node[0])
chain_length = len(chain) - 1
first_link = chain[0]
charge = code.Stabilizers[type][first_link]['charge'][charge_type]
for link in range(chain_length):
previous, next = chain[link], chain[link + 1]
for shared in code.Stabilizers[type][previous]['data']:
if shared in code.Stabilizers[type][next]['data']:
num_sides = code.Stabilizers[type][next]['sides']
count = code.Stabilizers[type][previous]['data'][shared]
<|code_end|>
, generate the next line using the imports in this file:
from decoders import *
from matplotlib import path
from math import floor
from src import common
import sys
import networkx as nx
import numpy as np
and context (functions, classes, or occasionally code) from other files:
# Path: src/common.py
# def removekey(d, key):
# def colinear(p0, p1, p2):
# def manhattan_dist(A,B):
# def euclidean_dist(A,B):
# def Charge(X_charge = 0, Z_charge = 0):
# def Sign(count, num_sides):
# def __init__(self, depth, dimension = 2):
# def Syndrome(self, type, charge_type):
# def PhysicalErrors(self):
# def distance(self, type, node1, node2):
# def Assessment(self):
# class Code:
. Output only the next line. | sign = common.Sign(count, num_sides) |
Predict the next line for this snippet: <|code_start|> #
# QTop
#
# Copyright (c) 2016 Jacob Marks (jacob.marks@yale.edu)
#
# This file is part of QTop.
#
# QTop is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
sys.path.append('../')
sys.path.append('../src')
################## Surface Code Simulation ##################
path_to = str(sys.argv[1])
model = error_models.BitFlip()
decoder = gcc.GCC_decoder()
L_vals = [9,11,13]
p_vals = np.linspace(0.07,0.16,15)
num_trials = 30000
d = 101
<|code_end|>
with the help of current file imports:
import numpy as np
import sys
from src import common, simulation, error_models
from decoders import gcc
and context from other files:
# Path: src/common.py
# def removekey(d, key):
# def colinear(p0, p1, p2):
# def manhattan_dist(A,B):
# def euclidean_dist(A,B):
# def Charge(X_charge = 0, Z_charge = 0):
# def Sign(count, num_sides):
# def __init__(self, depth, dimension = 2):
# def Syndrome(self, type, charge_type):
# def PhysicalErrors(self):
# def distance(self, type, node1, node2):
# def Assessment(self):
# class Code:
#
# Path: src/simulation.py
# class simulation:
#
# def __init__(self, dimension, code_type, model, decoder, path_to):
# self.model = model
# [self.decoder, self.decoder_type] = decoder
# [self.model, self.model_type] = model
# self.dimension = dimension
# self.code_type = code_type
# self.path = path_to
#
# def __call__(self, L, p):
# signal.signal(signal.SIGALRM, timeout_handler)
# signal.alarm(10)
# if self.code_type == 'Surface Code':
# code = SurfaceCode(L, self.dimension)
# if self.code_type == '6-6-6 Color Code':
# code = Color_6_6_6(L, self.dimension)
# code = code.CodeCycle(self.model, p)
# try:
# decoders.Decode(code, self.decoder)
# return code.Assessment()
# except TimeoutException:
# return self(L, p)
# except ValueError:
# return self(L, p)
#
# Path: src/error_models.py
# def PauliX(charge, dim):
# def PauliZ(charge, dim):
# def PauliXZ(charge, dim):
# def Identity_Channel(charge):
# def PhaseFlip_Channel(charge, dim, p):
# def BitFlip_Channel(charge, dim, p):
# def Correlated_Channel(charge, dim, p):
# def BP_Channel(charge, dim, p):
# def Depolarizing_Channel(charge, dim, p):
# def Depolarizing_Helper(charge, dim):
# def Error_Channel(charge, dim, p, type):
# def __init__(self, **kwargs):
# def Initialize(self, code, type, p):
# def Identity(self, code, p):
# def Fourier(self, code, type, p):
# def Measure(self, code, type, p):
# def Sum(self, code, count, num_sides, type, charge_type, control_p, target_p):
# def __init__(self):
# def __init__(self):
# def __init__(self):
# def __init__(self):
# def __init__(self):
# def __init__(self):
# class ErrorModel:
# class CodeCapacity(ErrorModel):
# class Depolarizing(ErrorModel):
# class Phenomenological(ErrorModel):
# class CircuitLevel(ErrorModel):
# class PhaseFlip(ErrorModel):
# class BitFlip(ErrorModel):
, which may contain function names, class names, or code. Output only the next line. | sim = simulation.simulation(d, '6-6-6 Color Code', [model, 'Bit Flip Channel'], [decoder, 'GCC'], path_to) |
Predict the next line after this snippet: <|code_start|> #
# QTop
#
# Copyright (c) 2016 Jacob Marks (jacob.marks@yale.edu)
#
# This file is part of QTop.
#
# QTop is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
sys.path.append('../')
sys.path.append('../src')
################## Surface Code Simulation ##################
path_to = str(sys.argv[1])
<|code_end|>
using the current file's imports:
import numpy as np
import sys
from src import common, simulation, error_models
from decoders import gcc
and any relevant context from other files:
# Path: src/common.py
# def removekey(d, key):
# def colinear(p0, p1, p2):
# def manhattan_dist(A,B):
# def euclidean_dist(A,B):
# def Charge(X_charge = 0, Z_charge = 0):
# def Sign(count, num_sides):
# def __init__(self, depth, dimension = 2):
# def Syndrome(self, type, charge_type):
# def PhysicalErrors(self):
# def distance(self, type, node1, node2):
# def Assessment(self):
# class Code:
#
# Path: src/simulation.py
# class simulation:
#
# def __init__(self, dimension, code_type, model, decoder, path_to):
# self.model = model
# [self.decoder, self.decoder_type] = decoder
# [self.model, self.model_type] = model
# self.dimension = dimension
# self.code_type = code_type
# self.path = path_to
#
# def __call__(self, L, p):
# signal.signal(signal.SIGALRM, timeout_handler)
# signal.alarm(10)
# if self.code_type == 'Surface Code':
# code = SurfaceCode(L, self.dimension)
# if self.code_type == '6-6-6 Color Code':
# code = Color_6_6_6(L, self.dimension)
# code = code.CodeCycle(self.model, p)
# try:
# decoders.Decode(code, self.decoder)
# return code.Assessment()
# except TimeoutException:
# return self(L, p)
# except ValueError:
# return self(L, p)
#
# Path: src/error_models.py
# def PauliX(charge, dim):
# def PauliZ(charge, dim):
# def PauliXZ(charge, dim):
# def Identity_Channel(charge):
# def PhaseFlip_Channel(charge, dim, p):
# def BitFlip_Channel(charge, dim, p):
# def Correlated_Channel(charge, dim, p):
# def BP_Channel(charge, dim, p):
# def Depolarizing_Channel(charge, dim, p):
# def Depolarizing_Helper(charge, dim):
# def Error_Channel(charge, dim, p, type):
# def __init__(self, **kwargs):
# def Initialize(self, code, type, p):
# def Identity(self, code, p):
# def Fourier(self, code, type, p):
# def Measure(self, code, type, p):
# def Sum(self, code, count, num_sides, type, charge_type, control_p, target_p):
# def __init__(self):
# def __init__(self):
# def __init__(self):
# def __init__(self):
# def __init__(self):
# def __init__(self):
# class ErrorModel:
# class CodeCapacity(ErrorModel):
# class Depolarizing(ErrorModel):
# class Phenomenological(ErrorModel):
# class CircuitLevel(ErrorModel):
# class PhaseFlip(ErrorModel):
# class BitFlip(ErrorModel):
. Output only the next line. | model = error_models.BitFlip() |
Given the code snippet: <|code_start|> #
# QTop
#
# Copyright (c) 2016 Jacob Marks (jacob.marks@yale.edu)
#
# This file is part of QTop.
#
# QTop is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
sys.path.append('../')
sys.path.append('../src')
################## Surface Code Simulation ##################
path_to = str(sys.argv[1])
model = error_models.Depolarizing()
decoder = rg.HDRG_decoder()
L_vals = [7,9,11,13]
p_vals = np.linspace(0.1,0.17,15)
num_trials = 30000
d = 5
<|code_end|>
, generate the next line using the imports in this file:
import numpy as np
import sys
from src import common, simulation, error_models
from decoders import rg
and context (functions, classes, or occasionally code) from other files:
# Path: src/common.py
# def removekey(d, key):
# def colinear(p0, p1, p2):
# def manhattan_dist(A,B):
# def euclidean_dist(A,B):
# def Charge(X_charge = 0, Z_charge = 0):
# def Sign(count, num_sides):
# def __init__(self, depth, dimension = 2):
# def Syndrome(self, type, charge_type):
# def PhysicalErrors(self):
# def distance(self, type, node1, node2):
# def Assessment(self):
# class Code:
#
# Path: src/simulation.py
# class simulation:
#
# def __init__(self, dimension, code_type, model, decoder, path_to):
# self.model = model
# [self.decoder, self.decoder_type] = decoder
# [self.model, self.model_type] = model
# self.dimension = dimension
# self.code_type = code_type
# self.path = path_to
#
# def __call__(self, L, p):
# signal.signal(signal.SIGALRM, timeout_handler)
# signal.alarm(10)
# if self.code_type == 'Surface Code':
# code = SurfaceCode(L, self.dimension)
# if self.code_type == '6-6-6 Color Code':
# code = Color_6_6_6(L, self.dimension)
# code = code.CodeCycle(self.model, p)
# try:
# decoders.Decode(code, self.decoder)
# return code.Assessment()
# except TimeoutException:
# return self(L, p)
# except ValueError:
# return self(L, p)
#
# Path: src/error_models.py
# def PauliX(charge, dim):
# def PauliZ(charge, dim):
# def PauliXZ(charge, dim):
# def Identity_Channel(charge):
# def PhaseFlip_Channel(charge, dim, p):
# def BitFlip_Channel(charge, dim, p):
# def Correlated_Channel(charge, dim, p):
# def BP_Channel(charge, dim, p):
# def Depolarizing_Channel(charge, dim, p):
# def Depolarizing_Helper(charge, dim):
# def Error_Channel(charge, dim, p, type):
# def __init__(self, **kwargs):
# def Initialize(self, code, type, p):
# def Identity(self, code, p):
# def Fourier(self, code, type, p):
# def Measure(self, code, type, p):
# def Sum(self, code, count, num_sides, type, charge_type, control_p, target_p):
# def __init__(self):
# def __init__(self):
# def __init__(self):
# def __init__(self):
# def __init__(self):
# def __init__(self):
# class ErrorModel:
# class CodeCapacity(ErrorModel):
# class Depolarizing(ErrorModel):
# class Phenomenological(ErrorModel):
# class CircuitLevel(ErrorModel):
# class PhaseFlip(ErrorModel):
# class BitFlip(ErrorModel):
. Output only the next line. | sim = simulation.simulation(d, 'Surface Code', [model, 'Depolarizing Channel'], [decoder, 'RG'], path_to) |
Given the code snippet: <|code_start|> #
# QTop
#
# Copyright (c) 2016 Jacob Marks (jacob.marks@yale.edu)
#
# This file is part of QTop.
#
# QTop is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
sys.path.append('../')
sys.path.append('../src')
################## Surface Code Simulation ##################
path_to = str(sys.argv[1])
<|code_end|>
, generate the next line using the imports in this file:
import numpy as np
import sys
from src import common, simulation, error_models
from decoders import rg
and context (functions, classes, or occasionally code) from other files:
# Path: src/common.py
# def removekey(d, key):
# def colinear(p0, p1, p2):
# def manhattan_dist(A,B):
# def euclidean_dist(A,B):
# def Charge(X_charge = 0, Z_charge = 0):
# def Sign(count, num_sides):
# def __init__(self, depth, dimension = 2):
# def Syndrome(self, type, charge_type):
# def PhysicalErrors(self):
# def distance(self, type, node1, node2):
# def Assessment(self):
# class Code:
#
# Path: src/simulation.py
# class simulation:
#
# def __init__(self, dimension, code_type, model, decoder, path_to):
# self.model = model
# [self.decoder, self.decoder_type] = decoder
# [self.model, self.model_type] = model
# self.dimension = dimension
# self.code_type = code_type
# self.path = path_to
#
# def __call__(self, L, p):
# signal.signal(signal.SIGALRM, timeout_handler)
# signal.alarm(10)
# if self.code_type == 'Surface Code':
# code = SurfaceCode(L, self.dimension)
# if self.code_type == '6-6-6 Color Code':
# code = Color_6_6_6(L, self.dimension)
# code = code.CodeCycle(self.model, p)
# try:
# decoders.Decode(code, self.decoder)
# return code.Assessment()
# except TimeoutException:
# return self(L, p)
# except ValueError:
# return self(L, p)
#
# Path: src/error_models.py
# def PauliX(charge, dim):
# def PauliZ(charge, dim):
# def PauliXZ(charge, dim):
# def Identity_Channel(charge):
# def PhaseFlip_Channel(charge, dim, p):
# def BitFlip_Channel(charge, dim, p):
# def Correlated_Channel(charge, dim, p):
# def BP_Channel(charge, dim, p):
# def Depolarizing_Channel(charge, dim, p):
# def Depolarizing_Helper(charge, dim):
# def Error_Channel(charge, dim, p, type):
# def __init__(self, **kwargs):
# def Initialize(self, code, type, p):
# def Identity(self, code, p):
# def Fourier(self, code, type, p):
# def Measure(self, code, type, p):
# def Sum(self, code, count, num_sides, type, charge_type, control_p, target_p):
# def __init__(self):
# def __init__(self):
# def __init__(self):
# def __init__(self):
# def __init__(self):
# def __init__(self):
# class ErrorModel:
# class CodeCapacity(ErrorModel):
# class Depolarizing(ErrorModel):
# class Phenomenological(ErrorModel):
# class CircuitLevel(ErrorModel):
# class PhaseFlip(ErrorModel):
# class BitFlip(ErrorModel):
. Output only the next line. | model = error_models.Depolarizing() |
Continue the code snippet: <|code_start|> #
# QTop
#
# Copyright (c) 2016 Jacob Marks (jacob.marks@yale.edu)
#
# This file is part of QTop.
#
# QTop is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
sys.path.append('../')
sys.path.append('../src')
################## Surface Code Simulation ##################
path_to = str(sys.argv[1])
model = error_models.Depolarizing()
decoder = rg.HDRG_decoder()
L_vals = [7,9,11,13]
p_vals = np.linspace(0.1,0.17,15)
num_trials = 30000
d = 3
<|code_end|>
. Use current file imports:
import numpy as np
import sys
from src import common, simulation, error_models
from decoders import rg
and context (classes, functions, or code) from other files:
# Path: src/common.py
# def removekey(d, key):
# def colinear(p0, p1, p2):
# def manhattan_dist(A,B):
# def euclidean_dist(A,B):
# def Charge(X_charge = 0, Z_charge = 0):
# def Sign(count, num_sides):
# def __init__(self, depth, dimension = 2):
# def Syndrome(self, type, charge_type):
# def PhysicalErrors(self):
# def distance(self, type, node1, node2):
# def Assessment(self):
# class Code:
#
# Path: src/simulation.py
# class simulation:
#
# def __init__(self, dimension, code_type, model, decoder, path_to):
# self.model = model
# [self.decoder, self.decoder_type] = decoder
# [self.model, self.model_type] = model
# self.dimension = dimension
# self.code_type = code_type
# self.path = path_to
#
# def __call__(self, L, p):
# signal.signal(signal.SIGALRM, timeout_handler)
# signal.alarm(10)
# if self.code_type == 'Surface Code':
# code = SurfaceCode(L, self.dimension)
# if self.code_type == '6-6-6 Color Code':
# code = Color_6_6_6(L, self.dimension)
# code = code.CodeCycle(self.model, p)
# try:
# decoders.Decode(code, self.decoder)
# return code.Assessment()
# except TimeoutException:
# return self(L, p)
# except ValueError:
# return self(L, p)
#
# Path: src/error_models.py
# def PauliX(charge, dim):
# def PauliZ(charge, dim):
# def PauliXZ(charge, dim):
# def Identity_Channel(charge):
# def PhaseFlip_Channel(charge, dim, p):
# def BitFlip_Channel(charge, dim, p):
# def Correlated_Channel(charge, dim, p):
# def BP_Channel(charge, dim, p):
# def Depolarizing_Channel(charge, dim, p):
# def Depolarizing_Helper(charge, dim):
# def Error_Channel(charge, dim, p, type):
# def __init__(self, **kwargs):
# def Initialize(self, code, type, p):
# def Identity(self, code, p):
# def Fourier(self, code, type, p):
# def Measure(self, code, type, p):
# def Sum(self, code, count, num_sides, type, charge_type, control_p, target_p):
# def __init__(self):
# def __init__(self):
# def __init__(self):
# def __init__(self):
# def __init__(self):
# def __init__(self):
# class ErrorModel:
# class CodeCapacity(ErrorModel):
# class Depolarizing(ErrorModel):
# class Phenomenological(ErrorModel):
# class CircuitLevel(ErrorModel):
# class PhaseFlip(ErrorModel):
# class BitFlip(ErrorModel):
. Output only the next line. | sim = simulation.simulation(d, 'Surface Code', [model, 'Depolarizing Channel'], [decoder, 'RG'], path_to) |
Given the code snippet: <|code_start|> #
# QTop
#
# Copyright (c) 2016 Jacob Marks (jacob.marks@yale.edu)
#
# This file is part of QTop.
#
# QTop is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
sys.path.append('../')
sys.path.append('../src')
################## Surface Code Simulation ##################
path_to = str(sys.argv[1])
<|code_end|>
, generate the next line using the imports in this file:
import numpy as np
import sys
from src import common, simulation, error_models
from decoders import rg
and context (functions, classes, or occasionally code) from other files:
# Path: src/common.py
# def removekey(d, key):
# def colinear(p0, p1, p2):
# def manhattan_dist(A,B):
# def euclidean_dist(A,B):
# def Charge(X_charge = 0, Z_charge = 0):
# def Sign(count, num_sides):
# def __init__(self, depth, dimension = 2):
# def Syndrome(self, type, charge_type):
# def PhysicalErrors(self):
# def distance(self, type, node1, node2):
# def Assessment(self):
# class Code:
#
# Path: src/simulation.py
# class simulation:
#
# def __init__(self, dimension, code_type, model, decoder, path_to):
# self.model = model
# [self.decoder, self.decoder_type] = decoder
# [self.model, self.model_type] = model
# self.dimension = dimension
# self.code_type = code_type
# self.path = path_to
#
# def __call__(self, L, p):
# signal.signal(signal.SIGALRM, timeout_handler)
# signal.alarm(10)
# if self.code_type == 'Surface Code':
# code = SurfaceCode(L, self.dimension)
# if self.code_type == '6-6-6 Color Code':
# code = Color_6_6_6(L, self.dimension)
# code = code.CodeCycle(self.model, p)
# try:
# decoders.Decode(code, self.decoder)
# return code.Assessment()
# except TimeoutException:
# return self(L, p)
# except ValueError:
# return self(L, p)
#
# Path: src/error_models.py
# def PauliX(charge, dim):
# def PauliZ(charge, dim):
# def PauliXZ(charge, dim):
# def Identity_Channel(charge):
# def PhaseFlip_Channel(charge, dim, p):
# def BitFlip_Channel(charge, dim, p):
# def Correlated_Channel(charge, dim, p):
# def BP_Channel(charge, dim, p):
# def Depolarizing_Channel(charge, dim, p):
# def Depolarizing_Helper(charge, dim):
# def Error_Channel(charge, dim, p, type):
# def __init__(self, **kwargs):
# def Initialize(self, code, type, p):
# def Identity(self, code, p):
# def Fourier(self, code, type, p):
# def Measure(self, code, type, p):
# def Sum(self, code, count, num_sides, type, charge_type, control_p, target_p):
# def __init__(self):
# def __init__(self):
# def __init__(self):
# def __init__(self):
# def __init__(self):
# def __init__(self):
# class ErrorModel:
# class CodeCapacity(ErrorModel):
# class Depolarizing(ErrorModel):
# class Phenomenological(ErrorModel):
# class CircuitLevel(ErrorModel):
# class PhaseFlip(ErrorModel):
# class BitFlip(ErrorModel):
. Output only the next line. | model = error_models.Depolarizing() |
Given the code snippet: <|code_start|> #
# QTop
#
# Copyright (c) 2016 Jacob Marks (jacob.marks@yale.edu)
#
# This file is part of QTop.
#
# QTop is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
sys.path.append('../')
sys.path.append('../src')
################## Surface Code Simulation ##################
path_to = str(sys.argv[1])
model = error_models.BitFlip()
decoder = rg.HDRG_decoder()
L_vals = [9,11,13]
p_vals = np.linspace(0.1,0.19,15)
num_trials = 30000
d = 100
<|code_end|>
, generate the next line using the imports in this file:
import numpy as np
import sys
from src import common, simulation, error_models
from decoders import rg
and context (functions, classes, or occasionally code) from other files:
# Path: src/common.py
# def removekey(d, key):
# def colinear(p0, p1, p2):
# def manhattan_dist(A,B):
# def euclidean_dist(A,B):
# def Charge(X_charge = 0, Z_charge = 0):
# def Sign(count, num_sides):
# def __init__(self, depth, dimension = 2):
# def Syndrome(self, type, charge_type):
# def PhysicalErrors(self):
# def distance(self, type, node1, node2):
# def Assessment(self):
# class Code:
#
# Path: src/simulation.py
# class simulation:
#
# def __init__(self, dimension, code_type, model, decoder, path_to):
# self.model = model
# [self.decoder, self.decoder_type] = decoder
# [self.model, self.model_type] = model
# self.dimension = dimension
# self.code_type = code_type
# self.path = path_to
#
# def __call__(self, L, p):
# signal.signal(signal.SIGALRM, timeout_handler)
# signal.alarm(10)
# if self.code_type == 'Surface Code':
# code = SurfaceCode(L, self.dimension)
# if self.code_type == '6-6-6 Color Code':
# code = Color_6_6_6(L, self.dimension)
# code = code.CodeCycle(self.model, p)
# try:
# decoders.Decode(code, self.decoder)
# return code.Assessment()
# except TimeoutException:
# return self(L, p)
# except ValueError:
# return self(L, p)
#
# Path: src/error_models.py
# def PauliX(charge, dim):
# def PauliZ(charge, dim):
# def PauliXZ(charge, dim):
# def Identity_Channel(charge):
# def PhaseFlip_Channel(charge, dim, p):
# def BitFlip_Channel(charge, dim, p):
# def Correlated_Channel(charge, dim, p):
# def BP_Channel(charge, dim, p):
# def Depolarizing_Channel(charge, dim, p):
# def Depolarizing_Helper(charge, dim):
# def Error_Channel(charge, dim, p, type):
# def __init__(self, **kwargs):
# def Initialize(self, code, type, p):
# def Identity(self, code, p):
# def Fourier(self, code, type, p):
# def Measure(self, code, type, p):
# def Sum(self, code, count, num_sides, type, charge_type, control_p, target_p):
# def __init__(self):
# def __init__(self):
# def __init__(self):
# def __init__(self):
# def __init__(self):
# def __init__(self):
# class ErrorModel:
# class CodeCapacity(ErrorModel):
# class Depolarizing(ErrorModel):
# class Phenomenological(ErrorModel):
# class CircuitLevel(ErrorModel):
# class PhaseFlip(ErrorModel):
# class BitFlip(ErrorModel):
. Output only the next line. | sim = simulation.simulation(d, 'Surface Code', [model, 'Bit Flip Channel'], [decoder, 'RG'], path_to) |
Given the following code snippet before the placeholder: <|code_start|> #
# QTop
#
# Copyright (c) 2016 Jacob Marks (jacob.marks@yale.edu)
#
# This file is part of QTop.
#
# QTop is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
sys.path.append('../')
sys.path.append('../src')
################## Surface Code Simulation ##################
path_to = str(sys.argv[1])
<|code_end|>
, predict the next line using imports from the current file:
import numpy as np
import sys
from src import common, simulation, error_models
from decoders import rg
and context including class names, function names, and sometimes code from other files:
# Path: src/common.py
# def removekey(d, key):
# def colinear(p0, p1, p2):
# def manhattan_dist(A,B):
# def euclidean_dist(A,B):
# def Charge(X_charge = 0, Z_charge = 0):
# def Sign(count, num_sides):
# def __init__(self, depth, dimension = 2):
# def Syndrome(self, type, charge_type):
# def PhysicalErrors(self):
# def distance(self, type, node1, node2):
# def Assessment(self):
# class Code:
#
# Path: src/simulation.py
# class simulation:
#
# def __init__(self, dimension, code_type, model, decoder, path_to):
# self.model = model
# [self.decoder, self.decoder_type] = decoder
# [self.model, self.model_type] = model
# self.dimension = dimension
# self.code_type = code_type
# self.path = path_to
#
# def __call__(self, L, p):
# signal.signal(signal.SIGALRM, timeout_handler)
# signal.alarm(10)
# if self.code_type == 'Surface Code':
# code = SurfaceCode(L, self.dimension)
# if self.code_type == '6-6-6 Color Code':
# code = Color_6_6_6(L, self.dimension)
# code = code.CodeCycle(self.model, p)
# try:
# decoders.Decode(code, self.decoder)
# return code.Assessment()
# except TimeoutException:
# return self(L, p)
# except ValueError:
# return self(L, p)
#
# Path: src/error_models.py
# def PauliX(charge, dim):
# def PauliZ(charge, dim):
# def PauliXZ(charge, dim):
# def Identity_Channel(charge):
# def PhaseFlip_Channel(charge, dim, p):
# def BitFlip_Channel(charge, dim, p):
# def Correlated_Channel(charge, dim, p):
# def BP_Channel(charge, dim, p):
# def Depolarizing_Channel(charge, dim, p):
# def Depolarizing_Helper(charge, dim):
# def Error_Channel(charge, dim, p, type):
# def __init__(self, **kwargs):
# def Initialize(self, code, type, p):
# def Identity(self, code, p):
# def Fourier(self, code, type, p):
# def Measure(self, code, type, p):
# def Sum(self, code, count, num_sides, type, charge_type, control_p, target_p):
# def __init__(self):
# def __init__(self):
# def __init__(self):
# def __init__(self):
# def __init__(self):
# def __init__(self):
# class ErrorModel:
# class CodeCapacity(ErrorModel):
# class Depolarizing(ErrorModel):
# class Phenomenological(ErrorModel):
# class CircuitLevel(ErrorModel):
# class PhaseFlip(ErrorModel):
# class BitFlip(ErrorModel):
. Output only the next line. | model = error_models.BitFlip() |
Given the following code snippet before the placeholder: <|code_start|> #
# QTop
#
# Copyright (c) 2016 Jacob Marks (jacob.marks@yale.edu)
#
# This file is part of QTop.
#
# QTop is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
sys.path.append('../')
sys.path.append('../src')
################## Surface Code Simulation ##################
path_to = str(sys.argv[1])
model = error_models.Depolarizing()
decoder = gcc.GCC_decoder()
L_vals = [9,11,13]
p_vals = np.linspace(0.07,0.14,15)
num_trials = 30000
d = 25
<|code_end|>
, predict the next line using imports from the current file:
import numpy as np
import sys
from src import common, simulation, error_models
from decoders import gcc
and context including class names, function names, and sometimes code from other files:
# Path: src/common.py
# def removekey(d, key):
# def colinear(p0, p1, p2):
# def manhattan_dist(A,B):
# def euclidean_dist(A,B):
# def Charge(X_charge = 0, Z_charge = 0):
# def Sign(count, num_sides):
# def __init__(self, depth, dimension = 2):
# def Syndrome(self, type, charge_type):
# def PhysicalErrors(self):
# def distance(self, type, node1, node2):
# def Assessment(self):
# class Code:
#
# Path: src/simulation.py
# class simulation:
#
# def __init__(self, dimension, code_type, model, decoder, path_to):
# self.model = model
# [self.decoder, self.decoder_type] = decoder
# [self.model, self.model_type] = model
# self.dimension = dimension
# self.code_type = code_type
# self.path = path_to
#
# def __call__(self, L, p):
# signal.signal(signal.SIGALRM, timeout_handler)
# signal.alarm(10)
# if self.code_type == 'Surface Code':
# code = SurfaceCode(L, self.dimension)
# if self.code_type == '6-6-6 Color Code':
# code = Color_6_6_6(L, self.dimension)
# code = code.CodeCycle(self.model, p)
# try:
# decoders.Decode(code, self.decoder)
# return code.Assessment()
# except TimeoutException:
# return self(L, p)
# except ValueError:
# return self(L, p)
#
# Path: src/error_models.py
# def PauliX(charge, dim):
# def PauliZ(charge, dim):
# def PauliXZ(charge, dim):
# def Identity_Channel(charge):
# def PhaseFlip_Channel(charge, dim, p):
# def BitFlip_Channel(charge, dim, p):
# def Correlated_Channel(charge, dim, p):
# def BP_Channel(charge, dim, p):
# def Depolarizing_Channel(charge, dim, p):
# def Depolarizing_Helper(charge, dim):
# def Error_Channel(charge, dim, p, type):
# def __init__(self, **kwargs):
# def Initialize(self, code, type, p):
# def Identity(self, code, p):
# def Fourier(self, code, type, p):
# def Measure(self, code, type, p):
# def Sum(self, code, count, num_sides, type, charge_type, control_p, target_p):
# def __init__(self):
# def __init__(self):
# def __init__(self):
# def __init__(self):
# def __init__(self):
# def __init__(self):
# class ErrorModel:
# class CodeCapacity(ErrorModel):
# class Depolarizing(ErrorModel):
# class Phenomenological(ErrorModel):
# class CircuitLevel(ErrorModel):
# class PhaseFlip(ErrorModel):
# class BitFlip(ErrorModel):
. Output only the next line. | sim = simulation.simulation(d, '6-6-6 Color Code', [model, 'Depolarizing Channel'], [decoder, 'GCC'], path_to) |
Next line prediction: <|code_start|> #
# QTop
#
# Copyright (c) 2016 Jacob Marks (jacob.marks@yale.edu)
#
# This file is part of QTop.
#
# QTop is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
sys.path.append('../')
sys.path.append('../src')
################## Surface Code Simulation ##################
path_to = str(sys.argv[1])
<|code_end|>
. Use current file imports:
(import numpy as np
import sys
from src import common, simulation, error_models
from decoders import gcc)
and context including class names, function names, or small code snippets from other files:
# Path: src/common.py
# def removekey(d, key):
# def colinear(p0, p1, p2):
# def manhattan_dist(A,B):
# def euclidean_dist(A,B):
# def Charge(X_charge = 0, Z_charge = 0):
# def Sign(count, num_sides):
# def __init__(self, depth, dimension = 2):
# def Syndrome(self, type, charge_type):
# def PhysicalErrors(self):
# def distance(self, type, node1, node2):
# def Assessment(self):
# class Code:
#
# Path: src/simulation.py
# class simulation:
#
# def __init__(self, dimension, code_type, model, decoder, path_to):
# self.model = model
# [self.decoder, self.decoder_type] = decoder
# [self.model, self.model_type] = model
# self.dimension = dimension
# self.code_type = code_type
# self.path = path_to
#
# def __call__(self, L, p):
# signal.signal(signal.SIGALRM, timeout_handler)
# signal.alarm(10)
# if self.code_type == 'Surface Code':
# code = SurfaceCode(L, self.dimension)
# if self.code_type == '6-6-6 Color Code':
# code = Color_6_6_6(L, self.dimension)
# code = code.CodeCycle(self.model, p)
# try:
# decoders.Decode(code, self.decoder)
# return code.Assessment()
# except TimeoutException:
# return self(L, p)
# except ValueError:
# return self(L, p)
#
# Path: src/error_models.py
# def PauliX(charge, dim):
# def PauliZ(charge, dim):
# def PauliXZ(charge, dim):
# def Identity_Channel(charge):
# def PhaseFlip_Channel(charge, dim, p):
# def BitFlip_Channel(charge, dim, p):
# def Correlated_Channel(charge, dim, p):
# def BP_Channel(charge, dim, p):
# def Depolarizing_Channel(charge, dim, p):
# def Depolarizing_Helper(charge, dim):
# def Error_Channel(charge, dim, p, type):
# def __init__(self, **kwargs):
# def Initialize(self, code, type, p):
# def Identity(self, code, p):
# def Fourier(self, code, type, p):
# def Measure(self, code, type, p):
# def Sum(self, code, count, num_sides, type, charge_type, control_p, target_p):
# def __init__(self):
# def __init__(self):
# def __init__(self):
# def __init__(self):
# def __init__(self):
# def __init__(self):
# class ErrorModel:
# class CodeCapacity(ErrorModel):
# class Depolarizing(ErrorModel):
# class Phenomenological(ErrorModel):
# class CircuitLevel(ErrorModel):
# class PhaseFlip(ErrorModel):
# class BitFlip(ErrorModel):
. Output only the next line. | model = error_models.Depolarizing() |
Using the snippet: <|code_start|> #
# QTop
#
# Copyright (c) 2016 Jacob Marks (jacob.marks@yale.edu)
#
# This file is part of QTop.
#
# QTop is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
sys.path.append('../')
sys.path.append('../src')
################## Surface Code Simulation ##################
path_to = str(sys.argv[1])
model = error_models.BitFlip()
decoder = gcc.GCC_decoder()
L_vals = [9,11,13]
p_vals = np.linspace(0.05,0.11,15)
num_trials = 50000
d = 3
<|code_end|>
, determine the next line of code. You have imports:
import numpy as np
import sys
from src import common, simulation, error_models
from decoders import gcc
and context (class names, function names, or code) available:
# Path: src/common.py
# def removekey(d, key):
# def colinear(p0, p1, p2):
# def manhattan_dist(A,B):
# def euclidean_dist(A,B):
# def Charge(X_charge = 0, Z_charge = 0):
# def Sign(count, num_sides):
# def __init__(self, depth, dimension = 2):
# def Syndrome(self, type, charge_type):
# def PhysicalErrors(self):
# def distance(self, type, node1, node2):
# def Assessment(self):
# class Code:
#
# Path: src/simulation.py
# class simulation:
#
# def __init__(self, dimension, code_type, model, decoder, path_to):
# self.model = model
# [self.decoder, self.decoder_type] = decoder
# [self.model, self.model_type] = model
# self.dimension = dimension
# self.code_type = code_type
# self.path = path_to
#
# def __call__(self, L, p):
# signal.signal(signal.SIGALRM, timeout_handler)
# signal.alarm(10)
# if self.code_type == 'Surface Code':
# code = SurfaceCode(L, self.dimension)
# if self.code_type == '6-6-6 Color Code':
# code = Color_6_6_6(L, self.dimension)
# code = code.CodeCycle(self.model, p)
# try:
# decoders.Decode(code, self.decoder)
# return code.Assessment()
# except TimeoutException:
# return self(L, p)
# except ValueError:
# return self(L, p)
#
# Path: src/error_models.py
# def PauliX(charge, dim):
# def PauliZ(charge, dim):
# def PauliXZ(charge, dim):
# def Identity_Channel(charge):
# def PhaseFlip_Channel(charge, dim, p):
# def BitFlip_Channel(charge, dim, p):
# def Correlated_Channel(charge, dim, p):
# def BP_Channel(charge, dim, p):
# def Depolarizing_Channel(charge, dim, p):
# def Depolarizing_Helper(charge, dim):
# def Error_Channel(charge, dim, p, type):
# def __init__(self, **kwargs):
# def Initialize(self, code, type, p):
# def Identity(self, code, p):
# def Fourier(self, code, type, p):
# def Measure(self, code, type, p):
# def Sum(self, code, count, num_sides, type, charge_type, control_p, target_p):
# def __init__(self):
# def __init__(self):
# def __init__(self):
# def __init__(self):
# def __init__(self):
# def __init__(self):
# class ErrorModel:
# class CodeCapacity(ErrorModel):
# class Depolarizing(ErrorModel):
# class Phenomenological(ErrorModel):
# class CircuitLevel(ErrorModel):
# class PhaseFlip(ErrorModel):
# class BitFlip(ErrorModel):
. Output only the next line. | sim = simulation.simulation(d, '6-6-6 Color Code', [model, 'Bit Flip Channel'], [decoder, 'GCC'], path_to) |
Predict the next line for this snippet: <|code_start|> #
# QTop
#
# Copyright (c) 2016 Jacob Marks (jacob.marks@yale.edu)
#
# This file is part of QTop.
#
# QTop is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
sys.path.append('../')
sys.path.append('../src')
################## Surface Code Simulation ##################
path_to = str(sys.argv[1])
<|code_end|>
with the help of current file imports:
import numpy as np
import sys
from src import common, simulation, error_models
from decoders import gcc
and context from other files:
# Path: src/common.py
# def removekey(d, key):
# def colinear(p0, p1, p2):
# def manhattan_dist(A,B):
# def euclidean_dist(A,B):
# def Charge(X_charge = 0, Z_charge = 0):
# def Sign(count, num_sides):
# def __init__(self, depth, dimension = 2):
# def Syndrome(self, type, charge_type):
# def PhysicalErrors(self):
# def distance(self, type, node1, node2):
# def Assessment(self):
# class Code:
#
# Path: src/simulation.py
# class simulation:
#
# def __init__(self, dimension, code_type, model, decoder, path_to):
# self.model = model
# [self.decoder, self.decoder_type] = decoder
# [self.model, self.model_type] = model
# self.dimension = dimension
# self.code_type = code_type
# self.path = path_to
#
# def __call__(self, L, p):
# signal.signal(signal.SIGALRM, timeout_handler)
# signal.alarm(10)
# if self.code_type == 'Surface Code':
# code = SurfaceCode(L, self.dimension)
# if self.code_type == '6-6-6 Color Code':
# code = Color_6_6_6(L, self.dimension)
# code = code.CodeCycle(self.model, p)
# try:
# decoders.Decode(code, self.decoder)
# return code.Assessment()
# except TimeoutException:
# return self(L, p)
# except ValueError:
# return self(L, p)
#
# Path: src/error_models.py
# def PauliX(charge, dim):
# def PauliZ(charge, dim):
# def PauliXZ(charge, dim):
# def Identity_Channel(charge):
# def PhaseFlip_Channel(charge, dim, p):
# def BitFlip_Channel(charge, dim, p):
# def Correlated_Channel(charge, dim, p):
# def BP_Channel(charge, dim, p):
# def Depolarizing_Channel(charge, dim, p):
# def Depolarizing_Helper(charge, dim):
# def Error_Channel(charge, dim, p, type):
# def __init__(self, **kwargs):
# def Initialize(self, code, type, p):
# def Identity(self, code, p):
# def Fourier(self, code, type, p):
# def Measure(self, code, type, p):
# def Sum(self, code, count, num_sides, type, charge_type, control_p, target_p):
# def __init__(self):
# def __init__(self):
# def __init__(self):
# def __init__(self):
# def __init__(self):
# def __init__(self):
# class ErrorModel:
# class CodeCapacity(ErrorModel):
# class Depolarizing(ErrorModel):
# class Phenomenological(ErrorModel):
# class CircuitLevel(ErrorModel):
# class PhaseFlip(ErrorModel):
# class BitFlip(ErrorModel):
, which may contain function names, class names, or code. Output only the next line. | model = error_models.BitFlip() |
Using the snippet: <|code_start|> #
# QTop
#
# Copyright (c) 2016 Jacob Marks (jacob.marks@yale.edu)
#
# This file is part of QTop.
#
# QTop is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
sys.path.append('../')
sys.path.append('../src')
################## Color Code Simulation ##################
path_to = str(sys.argv[1])
model = error_models.BitFlip()
decoder = dsp.DSP_decoder()
L_vals = [9,11,13]
p_vals = np.linspace(0.05,0.12,15)
num_trials = 30000
d = 2
<|code_end|>
, determine the next line of code. You have imports:
import numpy as np
import sys
from src import common, simulation, error_models
from decoders import dsp
and context (class names, function names, or code) available:
# Path: src/common.py
# def removekey(d, key):
# def colinear(p0, p1, p2):
# def manhattan_dist(A,B):
# def euclidean_dist(A,B):
# def Charge(X_charge = 0, Z_charge = 0):
# def Sign(count, num_sides):
# def __init__(self, depth, dimension = 2):
# def Syndrome(self, type, charge_type):
# def PhysicalErrors(self):
# def distance(self, type, node1, node2):
# def Assessment(self):
# class Code:
#
# Path: src/simulation.py
# class simulation:
#
# def __init__(self, dimension, code_type, model, decoder, path_to):
# self.model = model
# [self.decoder, self.decoder_type] = decoder
# [self.model, self.model_type] = model
# self.dimension = dimension
# self.code_type = code_type
# self.path = path_to
#
# def __call__(self, L, p):
# signal.signal(signal.SIGALRM, timeout_handler)
# signal.alarm(10)
# if self.code_type == 'Surface Code':
# code = SurfaceCode(L, self.dimension)
# if self.code_type == '6-6-6 Color Code':
# code = Color_6_6_6(L, self.dimension)
# code = code.CodeCycle(self.model, p)
# try:
# decoders.Decode(code, self.decoder)
# return code.Assessment()
# except TimeoutException:
# return self(L, p)
# except ValueError:
# return self(L, p)
#
# Path: src/error_models.py
# def PauliX(charge, dim):
# def PauliZ(charge, dim):
# def PauliXZ(charge, dim):
# def Identity_Channel(charge):
# def PhaseFlip_Channel(charge, dim, p):
# def BitFlip_Channel(charge, dim, p):
# def Correlated_Channel(charge, dim, p):
# def BP_Channel(charge, dim, p):
# def Depolarizing_Channel(charge, dim, p):
# def Depolarizing_Helper(charge, dim):
# def Error_Channel(charge, dim, p, type):
# def __init__(self, **kwargs):
# def Initialize(self, code, type, p):
# def Identity(self, code, p):
# def Fourier(self, code, type, p):
# def Measure(self, code, type, p):
# def Sum(self, code, count, num_sides, type, charge_type, control_p, target_p):
# def __init__(self):
# def __init__(self):
# def __init__(self):
# def __init__(self):
# def __init__(self):
# def __init__(self):
# class ErrorModel:
# class CodeCapacity(ErrorModel):
# class Depolarizing(ErrorModel):
# class Phenomenological(ErrorModel):
# class CircuitLevel(ErrorModel):
# class PhaseFlip(ErrorModel):
# class BitFlip(ErrorModel):
. Output only the next line. | sim = simulation.simulation(d, '6-6-6 Color Code', [model, 'Bit Flip Channel'], [decoder, 'DSP'], path_to) |
Next line prediction: <|code_start|> #
# QTop
#
# Copyright (c) 2016 Jacob Marks (jacob.marks@yale.edu)
#
# This file is part of QTop.
#
# QTop is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
sys.path.append('../')
sys.path.append('../src')
################## Color Code Simulation ##################
path_to = str(sys.argv[1])
<|code_end|>
. Use current file imports:
(import numpy as np
import sys
from src import common, simulation, error_models
from decoders import dsp)
and context including class names, function names, or small code snippets from other files:
# Path: src/common.py
# def removekey(d, key):
# def colinear(p0, p1, p2):
# def manhattan_dist(A,B):
# def euclidean_dist(A,B):
# def Charge(X_charge = 0, Z_charge = 0):
# def Sign(count, num_sides):
# def __init__(self, depth, dimension = 2):
# def Syndrome(self, type, charge_type):
# def PhysicalErrors(self):
# def distance(self, type, node1, node2):
# def Assessment(self):
# class Code:
#
# Path: src/simulation.py
# class simulation:
#
# def __init__(self, dimension, code_type, model, decoder, path_to):
# self.model = model
# [self.decoder, self.decoder_type] = decoder
# [self.model, self.model_type] = model
# self.dimension = dimension
# self.code_type = code_type
# self.path = path_to
#
# def __call__(self, L, p):
# signal.signal(signal.SIGALRM, timeout_handler)
# signal.alarm(10)
# if self.code_type == 'Surface Code':
# code = SurfaceCode(L, self.dimension)
# if self.code_type == '6-6-6 Color Code':
# code = Color_6_6_6(L, self.dimension)
# code = code.CodeCycle(self.model, p)
# try:
# decoders.Decode(code, self.decoder)
# return code.Assessment()
# except TimeoutException:
# return self(L, p)
# except ValueError:
# return self(L, p)
#
# Path: src/error_models.py
# def PauliX(charge, dim):
# def PauliZ(charge, dim):
# def PauliXZ(charge, dim):
# def Identity_Channel(charge):
# def PhaseFlip_Channel(charge, dim, p):
# def BitFlip_Channel(charge, dim, p):
# def Correlated_Channel(charge, dim, p):
# def BP_Channel(charge, dim, p):
# def Depolarizing_Channel(charge, dim, p):
# def Depolarizing_Helper(charge, dim):
# def Error_Channel(charge, dim, p, type):
# def __init__(self, **kwargs):
# def Initialize(self, code, type, p):
# def Identity(self, code, p):
# def Fourier(self, code, type, p):
# def Measure(self, code, type, p):
# def Sum(self, code, count, num_sides, type, charge_type, control_p, target_p):
# def __init__(self):
# def __init__(self):
# def __init__(self):
# def __init__(self):
# def __init__(self):
# def __init__(self):
# class ErrorModel:
# class CodeCapacity(ErrorModel):
# class Depolarizing(ErrorModel):
# class Phenomenological(ErrorModel):
# class CircuitLevel(ErrorModel):
# class PhaseFlip(ErrorModel):
# class BitFlip(ErrorModel):
. Output only the next line. | model = error_models.BitFlip() |
Given snippet: <|code_start|> #
# QTop
#
# Copyright (c) 2016 Jacob Marks (jacob.marks@yale.edu)
#
# This file is part of QTop.
#
# QTop is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
sys.path.append('../')
sys.path.append('../src')
################## Surface Code Simulation ##################
path_to = str(sys.argv[1])
model = error_models.BitFlip()
decoder = rg.HDRG_decoder()
L_vals = [9,11,13]
p_vals = np.linspace(0.09,0.16,15)
num_trials = 30000
d = 5
<|code_end|>
, continue by predicting the next line. Consider current file imports:
import numpy as np
import sys
from src import common, simulation, error_models
from decoders import rg
and context:
# Path: src/common.py
# def removekey(d, key):
# def colinear(p0, p1, p2):
# def manhattan_dist(A,B):
# def euclidean_dist(A,B):
# def Charge(X_charge = 0, Z_charge = 0):
# def Sign(count, num_sides):
# def __init__(self, depth, dimension = 2):
# def Syndrome(self, type, charge_type):
# def PhysicalErrors(self):
# def distance(self, type, node1, node2):
# def Assessment(self):
# class Code:
#
# Path: src/simulation.py
# class simulation:
#
# def __init__(self, dimension, code_type, model, decoder, path_to):
# self.model = model
# [self.decoder, self.decoder_type] = decoder
# [self.model, self.model_type] = model
# self.dimension = dimension
# self.code_type = code_type
# self.path = path_to
#
# def __call__(self, L, p):
# signal.signal(signal.SIGALRM, timeout_handler)
# signal.alarm(10)
# if self.code_type == 'Surface Code':
# code = SurfaceCode(L, self.dimension)
# if self.code_type == '6-6-6 Color Code':
# code = Color_6_6_6(L, self.dimension)
# code = code.CodeCycle(self.model, p)
# try:
# decoders.Decode(code, self.decoder)
# return code.Assessment()
# except TimeoutException:
# return self(L, p)
# except ValueError:
# return self(L, p)
#
# Path: src/error_models.py
# def PauliX(charge, dim):
# def PauliZ(charge, dim):
# def PauliXZ(charge, dim):
# def Identity_Channel(charge):
# def PhaseFlip_Channel(charge, dim, p):
# def BitFlip_Channel(charge, dim, p):
# def Correlated_Channel(charge, dim, p):
# def BP_Channel(charge, dim, p):
# def Depolarizing_Channel(charge, dim, p):
# def Depolarizing_Helper(charge, dim):
# def Error_Channel(charge, dim, p, type):
# def __init__(self, **kwargs):
# def Initialize(self, code, type, p):
# def Identity(self, code, p):
# def Fourier(self, code, type, p):
# def Measure(self, code, type, p):
# def Sum(self, code, count, num_sides, type, charge_type, control_p, target_p):
# def __init__(self):
# def __init__(self):
# def __init__(self):
# def __init__(self):
# def __init__(self):
# def __init__(self):
# class ErrorModel:
# class CodeCapacity(ErrorModel):
# class Depolarizing(ErrorModel):
# class Phenomenological(ErrorModel):
# class CircuitLevel(ErrorModel):
# class PhaseFlip(ErrorModel):
# class BitFlip(ErrorModel):
which might include code, classes, or functions. Output only the next line. | sim = simulation.simulation(d, 'Surface Code', [model, 'Bit Flip Channel'], [decoder, 'RG'], path_to) |
Predict the next line for this snippet: <|code_start|> #
# QTop
#
# Copyright (c) 2016 Jacob Marks (jacob.marks@yale.edu)
#
# This file is part of QTop.
#
# QTop is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
sys.path.append('../')
sys.path.append('../src')
################## Surface Code Simulation ##################
path_to = str(sys.argv[1])
<|code_end|>
with the help of current file imports:
import numpy as np
import sys
from src import common, simulation, error_models
from decoders import rg
and context from other files:
# Path: src/common.py
# def removekey(d, key):
# def colinear(p0, p1, p2):
# def manhattan_dist(A,B):
# def euclidean_dist(A,B):
# def Charge(X_charge = 0, Z_charge = 0):
# def Sign(count, num_sides):
# def __init__(self, depth, dimension = 2):
# def Syndrome(self, type, charge_type):
# def PhysicalErrors(self):
# def distance(self, type, node1, node2):
# def Assessment(self):
# class Code:
#
# Path: src/simulation.py
# class simulation:
#
# def __init__(self, dimension, code_type, model, decoder, path_to):
# self.model = model
# [self.decoder, self.decoder_type] = decoder
# [self.model, self.model_type] = model
# self.dimension = dimension
# self.code_type = code_type
# self.path = path_to
#
# def __call__(self, L, p):
# signal.signal(signal.SIGALRM, timeout_handler)
# signal.alarm(10)
# if self.code_type == 'Surface Code':
# code = SurfaceCode(L, self.dimension)
# if self.code_type == '6-6-6 Color Code':
# code = Color_6_6_6(L, self.dimension)
# code = code.CodeCycle(self.model, p)
# try:
# decoders.Decode(code, self.decoder)
# return code.Assessment()
# except TimeoutException:
# return self(L, p)
# except ValueError:
# return self(L, p)
#
# Path: src/error_models.py
# def PauliX(charge, dim):
# def PauliZ(charge, dim):
# def PauliXZ(charge, dim):
# def Identity_Channel(charge):
# def PhaseFlip_Channel(charge, dim, p):
# def BitFlip_Channel(charge, dim, p):
# def Correlated_Channel(charge, dim, p):
# def BP_Channel(charge, dim, p):
# def Depolarizing_Channel(charge, dim, p):
# def Depolarizing_Helper(charge, dim):
# def Error_Channel(charge, dim, p, type):
# def __init__(self, **kwargs):
# def Initialize(self, code, type, p):
# def Identity(self, code, p):
# def Fourier(self, code, type, p):
# def Measure(self, code, type, p):
# def Sum(self, code, count, num_sides, type, charge_type, control_p, target_p):
# def __init__(self):
# def __init__(self):
# def __init__(self):
# def __init__(self):
# def __init__(self):
# def __init__(self):
# class ErrorModel:
# class CodeCapacity(ErrorModel):
# class Depolarizing(ErrorModel):
# class Phenomenological(ErrorModel):
# class CircuitLevel(ErrorModel):
# class PhaseFlip(ErrorModel):
# class BitFlip(ErrorModel):
, which may contain function names, class names, or code. Output only the next line. | model = error_models.BitFlip() |
Predict the next line after this snippet: <|code_start|> #
# QTop
#
# Copyright (c) 2016 Jacob Marks (jacob.marks@yale.edu)
#
# This file is part of QTop.
#
# QTop is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
sys.path.append('../')
sys.path.append('decoders/')
################## Testing ##################
L, d, p = 9, 2, 0.02
<|code_end|>
using the current file's imports:
import sys
import networkx as nx
import numpy as np
import matplotlib.pyplot as plt
from src import color_codes, error_models, visualization
from dsp import *
and any relevant context from other files:
# Path: src/color_codes.py
# class ColorCode(Code):
# class Color_6_6_6(ColorCode):
# def __init__(self, depth, dimension = 2):
# def generateColors(self):
# def complementaryType(self, types):
# def complementaryTypes(self, type):
# def Sign(self, count, num_sides = 6):
# def Plaquette(self, measure_qubit, type):
# def generateDual(self):
# def CodeCycle(self, model, p):
# def generatePrimalEdges(self):
# def PrimalBound(self, count, type, measures):
# def hasLogicalError(self):
# def __init__(self, depth, dimension = 2):
# def generateCode(self):
# N = int(float(depth)/2)
# P = self.Plaquette(measures[m], m)
#
# Path: src/error_models.py
# def PauliX(charge, dim):
# def PauliZ(charge, dim):
# def PauliXZ(charge, dim):
# def Identity_Channel(charge):
# def PhaseFlip_Channel(charge, dim, p):
# def BitFlip_Channel(charge, dim, p):
# def Correlated_Channel(charge, dim, p):
# def BP_Channel(charge, dim, p):
# def Depolarizing_Channel(charge, dim, p):
# def Depolarizing_Helper(charge, dim):
# def Error_Channel(charge, dim, p, type):
# def __init__(self, **kwargs):
# def Initialize(self, code, type, p):
# def Identity(self, code, p):
# def Fourier(self, code, type, p):
# def Measure(self, code, type, p):
# def Sum(self, code, count, num_sides, type, charge_type, control_p, target_p):
# def __init__(self):
# def __init__(self):
# def __init__(self):
# def __init__(self):
# def __init__(self):
# def __init__(self):
# class ErrorModel:
# class CodeCapacity(ErrorModel):
# class Depolarizing(ErrorModel):
# class Phenomenological(ErrorModel):
# class CircuitLevel(ErrorModel):
# class PhaseFlip(ErrorModel):
# class BitFlip(ErrorModel):
#
# Path: src/visualization.py
# def PlotPrimal(code, title, plot_number = 1):
# def PlotDual(code, title, plot_number = 2):
# def PlotPlaquette(code, title, plot_number = 3):
# def PlotShrunk(code, type, title, plot_number = 4):
. Output only the next line. | code = color_codes.Color_4_8_8(L,d) |
Given snippet: <|code_start|> #
# QTop
#
# Copyright (c) 2016 Jacob Marks (jacob.marks@yale.edu)
#
# This file is part of QTop.
#
# QTop is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
sys.path.append('../')
sys.path.append('decoders/')
################## Testing ##################
L, d, p = 9, 2, 0.02
code = color_codes.Color_4_8_8(L,d)
code1 = color_codes.Color_6_6_6(L,d)
# model = error_models.CodeCapacity()
# code = code.CodeCycle(model, p)
# visualization.PlotPlaquette(code, "Before Decoding", 1)
# decoder = DSP_decoder()
# code = decoder(code)
<|code_end|>
, continue by predicting the next line. Consider current file imports:
import sys
import networkx as nx
import numpy as np
import matplotlib.pyplot as plt
from src import color_codes, error_models, visualization
from dsp import *
and context:
# Path: src/color_codes.py
# class ColorCode(Code):
# class Color_6_6_6(ColorCode):
# def __init__(self, depth, dimension = 2):
# def generateColors(self):
# def complementaryType(self, types):
# def complementaryTypes(self, type):
# def Sign(self, count, num_sides = 6):
# def Plaquette(self, measure_qubit, type):
# def generateDual(self):
# def CodeCycle(self, model, p):
# def generatePrimalEdges(self):
# def PrimalBound(self, count, type, measures):
# def hasLogicalError(self):
# def __init__(self, depth, dimension = 2):
# def generateCode(self):
# N = int(float(depth)/2)
# P = self.Plaquette(measures[m], m)
#
# Path: src/error_models.py
# def PauliX(charge, dim):
# def PauliZ(charge, dim):
# def PauliXZ(charge, dim):
# def Identity_Channel(charge):
# def PhaseFlip_Channel(charge, dim, p):
# def BitFlip_Channel(charge, dim, p):
# def Correlated_Channel(charge, dim, p):
# def BP_Channel(charge, dim, p):
# def Depolarizing_Channel(charge, dim, p):
# def Depolarizing_Helper(charge, dim):
# def Error_Channel(charge, dim, p, type):
# def __init__(self, **kwargs):
# def Initialize(self, code, type, p):
# def Identity(self, code, p):
# def Fourier(self, code, type, p):
# def Measure(self, code, type, p):
# def Sum(self, code, count, num_sides, type, charge_type, control_p, target_p):
# def __init__(self):
# def __init__(self):
# def __init__(self):
# def __init__(self):
# def __init__(self):
# def __init__(self):
# class ErrorModel:
# class CodeCapacity(ErrorModel):
# class Depolarizing(ErrorModel):
# class Phenomenological(ErrorModel):
# class CircuitLevel(ErrorModel):
# class PhaseFlip(ErrorModel):
# class BitFlip(ErrorModel):
#
# Path: src/visualization.py
# def PlotPrimal(code, title, plot_number = 1):
# def PlotDual(code, title, plot_number = 2):
# def PlotPlaquette(code, title, plot_number = 3):
# def PlotShrunk(code, type, title, plot_number = 4):
which might include code, classes, or functions. Output only the next line. | visualization.PlotPrimal(code, "Bound Data", 1) |
Given the code snippet: <|code_start|> #
# QTop
#
# Copyright (c) 2016 Jacob Marks (jacob.marks@yale.edu)
#
# This file is part of QTop.
#
# QTop is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
sys.path.append('../')
sys.path.append('../src')
################## Surface Code Simulation ##################
path_to = str(sys.argv[1])
model = error_models.Depolarizing()
decoder = rg.HDRG_decoder()
L_vals = [9,11,13]
p_vals = np.linspace(0.1,0.17,13)
num_trials = 30000
d = 100
<|code_end|>
, generate the next line using the imports in this file:
import numpy as np
import sys
from src import common, simulation, error_models
from decoders import rg
and context (functions, classes, or occasionally code) from other files:
# Path: src/common.py
# def removekey(d, key):
# def colinear(p0, p1, p2):
# def manhattan_dist(A,B):
# def euclidean_dist(A,B):
# def Charge(X_charge = 0, Z_charge = 0):
# def Sign(count, num_sides):
# def __init__(self, depth, dimension = 2):
# def Syndrome(self, type, charge_type):
# def PhysicalErrors(self):
# def distance(self, type, node1, node2):
# def Assessment(self):
# class Code:
#
# Path: src/simulation.py
# class simulation:
#
# def __init__(self, dimension, code_type, model, decoder, path_to):
# self.model = model
# [self.decoder, self.decoder_type] = decoder
# [self.model, self.model_type] = model
# self.dimension = dimension
# self.code_type = code_type
# self.path = path_to
#
# def __call__(self, L, p):
# signal.signal(signal.SIGALRM, timeout_handler)
# signal.alarm(10)
# if self.code_type == 'Surface Code':
# code = SurfaceCode(L, self.dimension)
# if self.code_type == '6-6-6 Color Code':
# code = Color_6_6_6(L, self.dimension)
# code = code.CodeCycle(self.model, p)
# try:
# decoders.Decode(code, self.decoder)
# return code.Assessment()
# except TimeoutException:
# return self(L, p)
# except ValueError:
# return self(L, p)
#
# Path: src/error_models.py
# def PauliX(charge, dim):
# def PauliZ(charge, dim):
# def PauliXZ(charge, dim):
# def Identity_Channel(charge):
# def PhaseFlip_Channel(charge, dim, p):
# def BitFlip_Channel(charge, dim, p):
# def Correlated_Channel(charge, dim, p):
# def BP_Channel(charge, dim, p):
# def Depolarizing_Channel(charge, dim, p):
# def Depolarizing_Helper(charge, dim):
# def Error_Channel(charge, dim, p, type):
# def __init__(self, **kwargs):
# def Initialize(self, code, type, p):
# def Identity(self, code, p):
# def Fourier(self, code, type, p):
# def Measure(self, code, type, p):
# def Sum(self, code, count, num_sides, type, charge_type, control_p, target_p):
# def __init__(self):
# def __init__(self):
# def __init__(self):
# def __init__(self):
# def __init__(self):
# def __init__(self):
# class ErrorModel:
# class CodeCapacity(ErrorModel):
# class Depolarizing(ErrorModel):
# class Phenomenological(ErrorModel):
# class CircuitLevel(ErrorModel):
# class PhaseFlip(ErrorModel):
# class BitFlip(ErrorModel):
. Output only the next line. | sim = simulation.simulation(d, 'Surface Code', [model, 'Depolarizing Channel'], [decoder, 'RG'], path_to) |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.