text_prompt stringlengths 157 13.1k | code_prompt stringlengths 7 19.8k ⌀ |
|---|---|
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def _git_command(params, cwd):
""" Executes a git command, returning the output :param params: A list of the parameters to pass to git :param cwd: The working directory to execute git in :return: A 2-element tuple of (stdout, stderr) """ |
proc = subprocess.Popen(
['git'] + params,
stdout=subprocess.PIPE,
stderr=subprocess.STDOUT,
cwd=cwd
)
stdout, stderr = proc.communicate()
code = proc.wait()
if code != 0:
e = OSError('git exit code was non-zero')
e.stdout = stdout
raise e
return stdout.decode('utf-8').strip() |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def _parse_env_var_file(data):
""" Parses a basic VAR="value data" file contents into a dict :param data: A unicode string of the file data :return: A dict of parsed name/value data """ |
output = {}
for line in data.splitlines():
line = line.strip()
if not line or '=' not in line:
continue
parts = line.split('=')
if len(parts) != 2:
continue
name = parts[0]
value = parts[1]
if len(value) > 1:
if value[0] == '"' and value[-1] == '"':
value = value[1:-1]
output[name] = value
return output |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def _platform_name():
""" Returns information about the current operating system and version :return: A unicode string containing the OS name and version """ |
if sys.platform == 'darwin':
version = _plat.mac_ver()[0]
_plat_ver_info = tuple(map(int, version.split('.')))
if _plat_ver_info < (10, 12):
name = 'OS X'
else:
name = 'macOS'
return '%s %s' % (name, version)
elif sys.platform == 'win32':
_win_ver = sys.getwindowsversion()
_plat_ver_info = (_win_ver[0], _win_ver[1])
return 'Windows %s' % _plat.win32_ver()[0]
elif sys.platform in ['linux', 'linux2']:
if os.path.exists('/etc/os-release'):
with open('/etc/os-release', 'r', encoding='utf-8') as f:
pairs = _parse_env_var_file(f.read())
if 'NAME' in pairs and 'VERSION_ID' in pairs:
return '%s %s' % (pairs['NAME'], pairs['VERSION_ID'])
version = pairs['VERSION_ID']
elif 'PRETTY_NAME' in pairs:
return pairs['PRETTY_NAME']
elif 'NAME' in pairs:
return pairs['NAME']
else:
raise ValueError('No suitable version info found in /etc/os-release')
elif os.path.exists('/etc/lsb-release'):
with open('/etc/lsb-release', 'r', encoding='utf-8') as f:
pairs = _parse_env_var_file(f.read())
if 'DISTRIB_DESCRIPTION' in pairs:
return pairs['DISTRIB_DESCRIPTION']
else:
raise ValueError('No suitable version info found in /etc/lsb-release')
else:
return 'Linux'
else:
return '%s %s' % (_plat.system(), _plat.release()) |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def _list_files(root):
""" Lists all of the files in a directory, taking into account any .gitignore file that is present :param root: A unicode filesystem path :return: A list of unicode strings, containing paths of all files not ignored by .gitignore with root, using relative paths """ |
dir_patterns, file_patterns = _gitignore(root)
paths = []
prefix = os.path.abspath(root) + os.sep
for base, dirs, files in os.walk(root):
for d in dirs:
for dir_pattern in dir_patterns:
if fnmatch(d, dir_pattern):
dirs.remove(d)
break
for f in files:
skip = False
for file_pattern in file_patterns:
if fnmatch(f, file_pattern):
skip = True
break
if skip:
continue
full_path = os.path.join(base, f)
if full_path[:len(prefix)] == prefix:
full_path = full_path[len(prefix):]
paths.append(full_path)
return sorted(paths) |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def _execute(params, cwd):
""" Executes a subprocess :param params: A list of the executable and arguments to pass to it :param cwd: The working directory to execute the command in :return: A 2-element tuple of (stdout, stderr) """ |
proc = subprocess.Popen(
params,
stdout=subprocess.PIPE,
stderr=subprocess.PIPE,
cwd=cwd
)
stdout, stderr = proc.communicate()
code = proc.wait()
if code != 0:
e = OSError('subprocess exit code for %r was %d: %s' % (params, code, stderr))
e.stdout = stdout
e.stderr = stderr
raise e
return (stdout, stderr) |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def run():
""" Installs required development dependencies. Uses git to checkout other modularcrypto repos for more accurate coverage data. """ |
deps_dir = os.path.join(build_root, 'modularcrypto-deps')
if os.path.exists(deps_dir):
shutil.rmtree(deps_dir, ignore_errors=True)
os.mkdir(deps_dir)
try:
print("Staging ci dependencies")
_stage_requirements(deps_dir, os.path.join(package_root, 'requires', 'ci'))
print("Checking out modularcrypto packages for coverage")
for other_package in other_packages:
pkg_url = 'https://github.com/wbond/%s.git' % other_package
pkg_dir = os.path.join(build_root, other_package)
if os.path.exists(pkg_dir):
print("%s is already present" % other_package)
continue
print("Cloning %s" % pkg_url)
_execute(['git', 'clone', pkg_url], build_root)
print()
except (Exception):
if os.path.exists(deps_dir):
shutil.rmtree(deps_dir, ignore_errors=True)
raise
return True |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def _download(url, dest):
""" Downloads a URL to a directory :param url: The URL to download :param dest: The path to the directory to save the file in :return: The filesystem path to the saved file """ |
print('Downloading %s' % url)
filename = os.path.basename(url)
dest_path = os.path.join(dest, filename)
if sys.platform == 'win32':
powershell_exe = os.path.join('system32\\WindowsPowerShell\\v1.0\\powershell.exe')
code = "[System.Net.ServicePointManager]::SecurityProtocol = [System.Net.SecurityProtocolType]::Tls12;"
code += "(New-Object Net.WebClient).DownloadFile('%s', '%s');" % (url, dest_path)
_execute([powershell_exe, '-Command', code], dest)
else:
_execute(['curl', '-L', '--silent', '--show-error', '-O', url], dest)
return dest_path |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def _archive_single_dir(archive):
""" Check if all members of the archive are in a single top-level directory :param archive: An archive from _open_archive() :return: None if not a single top level directory in archive, otherwise a unicode string of the top level directory name """ |
common_root = None
for info in _list_archive_members(archive):
fn = _info_name(info)
if fn in set(['.', '/']):
continue
sep = None
if '/' in fn:
sep = '/'
elif '\\' in fn:
sep = '\\'
if sep is None:
root_dir = fn
else:
root_dir, _ = fn.split(sep, 1)
if common_root is None:
common_root = root_dir
else:
if common_root != root_dir:
return None
return common_root |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def _info_name(info):
""" Returns a normalized file path for an archive info object :param info: An info object from _list_archive_members() :return: A unicode string with all directory separators normalized to "/" """ |
if isinstance(info, zipfile.ZipInfo):
return info.filename.replace('\\', '/')
return info.name.replace('\\', '/') |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def _extract_info(archive, info):
""" Extracts the contents of an archive info object ;param archive: An archive from _open_archive() :param info: An info object from _list_archive_members() :return: None, or a byte string of the file contents """ |
if isinstance(archive, zipfile.ZipFile):
fn = info.filename
is_dir = fn.endswith('/') or fn.endswith('\\')
out = archive.read(info)
if is_dir and out == b'':
return None
return out
info_file = archive.extractfile(info)
if info_file:
return info_file.read()
return None |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def _extract_package(deps_dir, pkg_path):
""" Extract a .whl, .zip, .tar.gz or .tar.bz2 into a package path to use when running CI tasks :param deps_dir: A unicode string of the directory the package should be extracted to :param pkg_path: A unicode string of the path to the archive """ |
if pkg_path.endswith('.exe'):
try:
zf = None
zf = zipfile.ZipFile(pkg_path, 'r')
# Exes have a PLATLIB folder containing everything we want
for zi in zf.infolist():
if not zi.filename.startswith('PLATLIB'):
continue
data = _extract_info(zf, zi)
if data is not None:
dst_path = os.path.join(deps_dir, zi.filename[8:])
dst_dir = os.path.dirname(dst_path)
if not os.path.exists(dst_dir):
os.makedirs(dst_dir)
with open(dst_path, 'wb') as f:
f.write(data)
finally:
if zf:
zf.close()
return
if pkg_path.endswith('.whl'):
try:
zf = None
zf = zipfile.ZipFile(pkg_path, 'r')
# Wheels contain exactly what we need and nothing else
zf.extractall(deps_dir)
finally:
if zf:
zf.close()
return
# Source archives may contain a bunch of other things.
# The following code works for the packages coverage and
# configparser, which are the two we currently require that
# do not provide wheels
try:
ar = None
ar = _open_archive(pkg_path)
pkg_name = None
base_path = _archive_single_dir(ar) or ''
if len(base_path):
if '-' in base_path:
pkg_name, _ = base_path.split('-', 1)
base_path += '/'
base_pkg_path = None
if pkg_name is not None:
base_pkg_path = base_path + pkg_name + '/'
src_path = base_path + 'src/'
members = []
for info in _list_archive_members(ar):
fn = _info_name(info)
if base_pkg_path is not None and fn.startswith(base_pkg_path):
dst_path = fn[len(base_pkg_path) - len(pkg_name) - 1:]
members.append((info, dst_path))
continue
if fn.startswith(src_path):
members.append((info, fn[len(src_path):]))
continue
for info, path in members:
info_data = _extract_info(ar, info)
# Dirs won't return a file
if info_data is not None:
dst_path = os.path.join(deps_dir, path)
dst_dir = os.path.dirname(dst_path)
if not os.path.exists(dst_dir):
os.makedirs(dst_dir)
with open(dst_path, 'wb') as f:
f.write(info_data)
finally:
if ar:
ar.close() |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def _parse_requires(path):
""" Does basic parsing of pip requirements files, to allow for using something other than Python to do actual TLS requests :param path: A path to a requirements file :return: A list of dict objects containing the keys: - 'type' ('any', 'url', '==', '>=') - 'pkg' - 'ver' (if 'type' == '==' or 'type' == '>=') """ |
python_version = '.'.join(map(str_cls, sys.version_info[0:2]))
sys_platform = sys.platform
packages = []
with open(path, 'rb') as f:
contents = f.read().decode('utf-8')
for line in re.split(r'\r?\n', contents):
line = line.strip()
if not len(line):
continue
if re.match(r'^\s*#', line):
continue
if ';' in line:
package, cond = line.split(';', 1)
package = package.strip()
cond = cond.strip()
cond = cond.replace('sys_platform', repr(sys_platform))
cond = cond.replace('python_version', repr(python_version))
if not eval(cond):
continue
else:
package = line.strip()
if re.match(r'^\s*-r\s*', package):
sub_req_file = re.sub(r'^\s*-r\s*', '', package)
sub_req_file = os.path.abspath(os.path.join(os.path.dirname(path), sub_req_file))
packages.extend(_parse_requires(sub_req_file))
continue
if re.match(r'https?://', package):
packages.append({'type': 'url', 'pkg': package})
continue
if '>=' in package:
parts = package.split('>=')
package = parts[0].strip()
ver = parts[1].strip()
packages.append({'type': '>=', 'pkg': package, 'ver': ver})
continue
if '==' in package:
parts = package.split('==')
package = parts[0].strip()
ver = parts[1].strip()
packages.append({'type': '==', 'pkg': package, 'ver': ver})
continue
if re.search(r'[^ a-zA-Z0-9\-]', package):
raise Exception('Unsupported requirements format version constraint: %s' % package)
packages.append({'type': 'any', 'pkg': package})
return packages |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def unarmor(pem_bytes, multiple=False):
""" Convert a PEM-encoded byte string into a DER-encoded byte string :param pem_bytes: A byte string of the PEM-encoded data :param multiple: If True, function will return a generator :raises: ValueError - when the pem_bytes do not appear to be PEM-encoded bytes :return: A 3-element tuple (object_name, headers, der_bytes). The object_name is a unicode string of what is between "-----BEGIN " and "-----". Examples include: "CERTIFICATE", "PUBLIC KEY", "PRIVATE KEY". The headers is a dict containing any lines in the form "Name: Value" that are right after the begin line. """ |
generator = _unarmor(pem_bytes)
if not multiple:
return next(generator)
return generator |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def preferred_ordinal(cls, attr_name):
""" Returns an ordering value for a particular attribute key. Unrecognized attributes and OIDs will be sorted lexically at the end. :return: An orderable value. """ |
attr_name = cls.map(attr_name)
if attr_name in cls.preferred_order:
ordinal = cls.preferred_order.index(attr_name)
else:
ordinal = len(cls.preferred_order)
return (ordinal, attr_name) |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def prepped_value(self):
""" Returns the value after being processed by the internationalized string preparation as specified by RFC 5280 :return: A unicode string """ |
if self._prepped is None:
self._prepped = self._ldap_string_prep(self['value'].native)
return self._prepped |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def _get_values(self, rdn):
""" Returns a dict of prepped values contained in an RDN :param rdn: A RelativeDistinguishedName object :return: A dict object with unicode strings of NameTypeAndValue value field values that have been prepped for comparison """ |
output = {}
[output.update([(ntv['type'].native, ntv.prepped_value)]) for ntv in rdn]
return output |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def build(cls, name_dict, use_printable=False):
""" Creates a Name object from a dict of unicode string keys and values. The keys should be from NameType._map, or a dotted-integer OID unicode string. :param name_dict: A dict of name information, e.g. {"common_name": "Will Bond", "country_name": "US", "organization": "Codex Non Sufficit LC"} :param use_printable: A bool - if PrintableString should be used for encoding instead of UTF8String. This is for backwards compatibility with old software. :return: An x509.Name object """ |
rdns = []
if not use_printable:
encoding_name = 'utf8_string'
encoding_class = UTF8String
else:
encoding_name = 'printable_string'
encoding_class = PrintableString
# Sort the attributes according to NameType.preferred_order
name_dict = OrderedDict(
sorted(
name_dict.items(),
key=lambda item: NameType.preferred_ordinal(item[0])
)
)
for attribute_name, attribute_value in name_dict.items():
attribute_name = NameType.map(attribute_name)
if attribute_name == 'email_address':
value = EmailAddress(attribute_value)
elif attribute_name == 'domain_component':
value = DNSName(attribute_value)
elif attribute_name in set(['dn_qualifier', 'country_name', 'serial_number']):
value = DirectoryString(
name='printable_string',
value=PrintableString(attribute_value)
)
else:
value = DirectoryString(
name=encoding_name,
value=encoding_class(attribute_value)
)
rdns.append(RelativeDistinguishedName([
NameTypeAndValue({
'type': attribute_name,
'value': value
})
]))
return cls(name='', value=RDNSequence(rdns)) |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def _recursive_humanize(self, value):
""" Recursively serializes data compiled from the RDNSequence :param value: An Asn1Value object, or a list of Asn1Value objects :return: A unicode string """ |
if isinstance(value, list):
return', '.join(
reversed([self._recursive_humanize(sub_value) for sub_value in value])
)
return value.native |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def crl_distribution_points(self):
""" Returns complete CRL URLs - does not include delta CRLs :return: A list of zero or more DistributionPoint objects """ |
if self._crl_distribution_points is None:
self._crl_distribution_points = self._get_http_crl_distribution_points(self.crl_distribution_points_value)
return self._crl_distribution_points |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def delta_crl_distribution_points(self):
""" Returns delta CRL URLs - does not include complete CRLs :return: A list of zero or more DistributionPoint objects """ |
if self._delta_crl_distribution_points is None:
self._delta_crl_distribution_points = self._get_http_crl_distribution_points(self.freshest_crl_value)
return self._delta_crl_distribution_points |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def _get_http_crl_distribution_points(self, crl_distribution_points):
""" Fetches the DistributionPoint object for non-relative, HTTP CRLs referenced by the certificate :param crl_distribution_points: A CRLDistributionPoints object to grab the DistributionPoints from :return: A list of zero or more DistributionPoint objects """ |
output = []
if crl_distribution_points is None:
return []
for distribution_point in crl_distribution_points:
distribution_point_name = distribution_point['distribution_point']
if distribution_point_name is VOID:
continue
# RFC 5280 indicates conforming CA should not use the relative form
if distribution_point_name.name == 'name_relative_to_crl_issuer':
continue
# This library is currently only concerned with HTTP-based CRLs
for general_name in distribution_point_name.chosen:
if general_name.name == 'uniform_resource_identifier':
output.append(distribution_point)
return output |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def _is_wildcard_match(self, domain_labels, valid_domain_labels):
""" Determines if the labels in a domain are a match for labels from a wildcard valid domain name :param domain_labels: A list of unicode strings, with A-label form for IDNs, of the labels in the domain name to check :param valid_domain_labels: A list of unicode strings, with A-label form for IDNs, of the labels in a wildcard domain pattern :return: A boolean - if the domain matches the valid domain """ |
first_domain_label = domain_labels[0]
other_domain_labels = domain_labels[1:]
wildcard_label = valid_domain_labels[0]
other_valid_domain_labels = valid_domain_labels[1:]
# The wildcard is only allowed in the first label, so if
# The subsequent labels are not equal, there is no match
if other_domain_labels != other_valid_domain_labels:
return False
if wildcard_label == '*':
return True
wildcard_regex = re.compile('^' + wildcard_label.replace('*', '.*') + '$')
if wildcard_regex.match(first_domain_label):
return True
return False |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def run():
""" Runs flake8 lint :return: A bool - if flake8 did not find any errors """ |
print('Running flake8 %s' % flake8.__version__)
flake8_style = get_style_guide(config_file=os.path.join(package_root, 'tox.ini'))
paths = []
for _dir in [package_name, 'dev', 'tests']:
for root, _, filenames in os.walk(_dir):
for filename in filenames:
if not filename.endswith('.py'):
continue
paths.append(os.path.join(root, filename))
report = flake8_style.check_files(paths)
success = report.total_errors == 0
if success:
print('OK')
return success |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def run():
""" Runs the linter and tests :return: A bool - if the linter and tests ran successfully """ |
print('Python ' + sys.version.replace('\n', ''))
try:
oscrypto_tests_module_info = imp.find_module('tests', [os.path.join(build_root, 'oscrypto')])
oscrypto_tests = imp.load_module('oscrypto.tests', *oscrypto_tests_module_info)
oscrypto = oscrypto_tests.local_oscrypto()
print('\noscrypto backend: %s' % oscrypto.backend())
except (ImportError):
pass
if run_lint:
print('')
lint_result = run_lint()
else:
lint_result = True
if run_coverage:
print('\nRunning tests (via coverage.py)')
sys.stdout.flush()
tests_result = run_coverage(ci=True)
else:
print('\nRunning tests')
sys.stdout.flush()
tests_result = run_tests()
sys.stdout.flush()
return lint_result and tests_result |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def replace(self, year=None, month=None, day=None):
""" Returns a new datetime.date or asn1crypto.util.extended_date object with the specified components replaced :return: A datetime.date or asn1crypto.util.extended_date object """ |
if year is None:
year = self.year
if month is None:
month = self.month
if day is None:
day = self.day
if year > 0:
cls = date
else:
cls = extended_date
return cls(
year,
month,
day
) |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def replace(self, year=None, month=None, day=None, hour=None, minute=None, second=None, microsecond=None, tzinfo=None):
""" Returns a new datetime.datetime or asn1crypto.util.extended_datetime object with the specified components replaced :return: A datetime.datetime or asn1crypto.util.extended_datetime object """ |
if year is None:
year = self.year
if month is None:
month = self.month
if day is None:
day = self.day
if hour is None:
hour = self.hour
if minute is None:
minute = self.minute
if second is None:
second = self.second
if microsecond is None:
microsecond = self.microsecond
if tzinfo is None:
tzinfo = self.tzinfo
if year > 0:
cls = datetime
else:
cls = extended_datetime
return cls(
year,
month,
day,
hour,
minute,
second,
microsecond,
tzinfo
) |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def delta_crl_distribution_points(self):
""" Returns delta CRL URLs - only applies to complete CRLs :return: A list of zero or more DistributionPoint objects """ |
if self._delta_crl_distribution_points is None:
self._delta_crl_distribution_points = []
if self.freshest_crl_value is not None:
for distribution_point in self.freshest_crl_value:
distribution_point_name = distribution_point['distribution_point']
# RFC 5280 indicates conforming CA should not use the relative form
if distribution_point_name.name == 'name_relative_to_crl_issuer':
continue
# This library is currently only concerned with HTTP-based CRLs
for general_name in distribution_point_name.chosen:
if general_name.name == 'uniform_resource_identifier':
self._delta_crl_distribution_points.append(distribution_point)
return self._delta_crl_distribution_points |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def _set_extensions(self):
""" Sets common named extensions to private attributes and creates a list of critical extensions """ |
self._critical_extensions = set()
for extension in self['single_extensions']:
name = extension['extn_id'].native
attribute_name = '_%s_value' % name
if hasattr(self, attribute_name):
setattr(self, attribute_name, extension['extn_value'].parsed)
if extension['critical'].native:
self._critical_extensions.add(name)
self._processed_extensions = True |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def _basic_debug(prefix, self):
""" Prints out basic information about an Asn1Value object. Extracted for reuse among different classes that customize the debug information. :param prefix: A unicode string of spaces to prefix output line with :param self: The object to print the debugging information about """ |
print('%s%s Object #%s' % (prefix, type_name(self), id(self)))
if self._header:
print('%s Header: 0x%s' % (prefix, binascii.hexlify(self._header or b'').decode('utf-8')))
has_header = self.method is not None and self.class_ is not None and self.tag is not None
if has_header:
method_name = METHOD_NUM_TO_NAME_MAP.get(self.method)
class_name = CLASS_NUM_TO_NAME_MAP.get(self.class_)
if self.explicit is not None:
for class_, tag in self.explicit:
print(
'%s %s tag %s (explicitly tagged)' %
(
prefix,
CLASS_NUM_TO_NAME_MAP.get(class_),
tag
)
)
if has_header:
print('%s %s %s %s' % (prefix, method_name, class_name, self.tag))
elif self.implicit:
if has_header:
print('%s %s %s tag %s (implicitly tagged)' % (prefix, method_name, class_name, self.tag))
elif has_header:
print('%s %s %s tag %s' % (prefix, method_name, class_name, self.tag))
print('%s Data: 0x%s' % (prefix, binascii.hexlify(self.contents or b'').decode('utf-8'))) |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def _tag_type_to_explicit_implicit(params):
""" Converts old-style "tag_type" and "tag" params to "explicit" and "implicit" :param params: A dict of parameters to convert from tag_type/tag to explicit/implicit """ |
if 'tag_type' in params:
if params['tag_type'] == 'explicit':
params['explicit'] = (params.get('class', 2), params['tag'])
elif params['tag_type'] == 'implicit':
params['implicit'] = (params.get('class', 2), params['tag'])
del params['tag_type']
del params['tag']
if 'class' in params:
del params['class'] |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def _build_id_tuple(params, spec):
""" Builds a 2-element tuple used to identify fields by grabbing the class_ and tag from an Asn1Value class and the params dict being passed to it :param params: A dict of params to pass to spec :param spec: An Asn1Value class :return: A 2-element integer tuple in the form (class_, tag) """ |
# Handle situations where the spec is not known at setup time
if spec is None:
return (None, None)
required_class = spec.class_
required_tag = spec.tag
_tag_type_to_explicit_implicit(params)
if 'explicit' in params:
if isinstance(params['explicit'], tuple):
required_class, required_tag = params['explicit']
else:
required_class = 2
required_tag = params['explicit']
elif 'implicit' in params:
if isinstance(params['implicit'], tuple):
required_class, required_tag = params['implicit']
else:
required_class = 2
required_tag = params['implicit']
if required_class is not None and not isinstance(required_class, int_types):
required_class = CLASS_NAME_TO_NUM_MAP[required_class]
required_class = params.get('class_', required_class)
required_tag = params.get('tag', required_tag)
return (required_class, required_tag) |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def _parse_build(encoded_data, pointer=0, spec=None, spec_params=None, strict=False):
""" Parses a byte string generically, or using a spec with optional params :param encoded_data: A byte string that contains BER-encoded data :param pointer: The index in the byte string to parse from :param spec: A class derived from Asn1Value that defines what class_ and tag the value should have, and the semantics of the encoded value. The return value will be of this type. If omitted, the encoded value will be decoded using the standard universal tag based on the encoded tag number. :param spec_params: A dict of params to pass to the spec object :param strict: A boolean indicating if trailing data should be forbidden - if so, a ValueError will be raised when trailing data exists :return: A 2-element tuple: - 0: An object of the type spec, or if not specified, a child of Asn1Value - 1: An integer indicating how many bytes were consumed """ |
encoded_len = len(encoded_data)
info, new_pointer = _parse(encoded_data, encoded_len, pointer)
if strict and new_pointer != pointer + encoded_len:
extra_bytes = pointer + encoded_len - new_pointer
raise ValueError('Extra data - %d bytes of trailing data were provided' % extra_bytes)
return (_build(*info, spec=spec, spec_params=spec_params), new_pointer) |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def _new_instance(self):
""" Constructs a new copy of the current object, preserving any tagging :return: An Asn1Value object """ |
new_obj = self.__class__()
new_obj.class_ = self.class_
new_obj.tag = self.tag
new_obj.implicit = self.implicit
new_obj.explicit = self.explicit
return new_obj |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def retag(self, tagging, tag=None):
""" Copies the object, applying a new tagging to it :param tagging: A dict containing the keys "explicit" and "implicit". Legacy API allows a unicode string of "implicit" or "explicit". :param tag: A integer tag number. Only used when tagging is a unicode string. :return: An Asn1Value object """ |
# This is required to preserve the old API
if not isinstance(tagging, dict):
tagging = {tagging: tag}
new_obj = self.__class__(explicit=tagging.get('explicit'), implicit=tagging.get('implicit'))
new_obj._copy(self, copy.deepcopy)
return new_obj |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def untag(self):
""" Copies the object, removing any special tagging from it :return: An Asn1Value object """ |
new_obj = self.__class__()
new_obj._copy(self, copy.deepcopy)
return new_obj |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def _as_chunk(self):
""" A method to return a chunk of data that can be combined for constructed method values :return: A native Python value that can be added together. Examples include byte strings, unicode strings or tuples. """ |
if self._chunks_offset == 0:
return self.contents
return self.contents[self._chunks_offset:] |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def _copy(self, other, copy_func):
""" Copies the contents of another Constructable object to itself :param object: Another instance of the same class :param copy_func: An reference of copy.copy() or copy.deepcopy() to use when copying lists, dicts and objects """ |
super(Constructable, self)._copy(other, copy_func)
self.method = other.method
self._indefinite = other._indefinite |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def _copy(self, other, copy_func):
""" Copies the contents of another Any object to itself :param object: Another instance of the same class :param copy_func: An reference of copy.copy() or copy.deepcopy() to use when copying lists, dicts and objects """ |
super(Any, self)._copy(other, copy_func)
self._parsed = copy_func(other._parsed) |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def _setup(self):
""" Generates _id_map from _alternatives to allow validating contents """ |
cls = self.__class__
cls._id_map = {}
cls._name_map = {}
for index, info in enumerate(cls._alternatives):
if len(info) < 3:
info = info + ({},)
cls._alternatives[index] = info
id_ = _build_id_tuple(info[2], info[1])
cls._id_map[id_] = index
cls._name_map[info[0]] = index |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def parse(self):
""" Parses the detected alternative :return: An Asn1Value object of the chosen alternative """ |
if self._parsed is not None:
return self._parsed
try:
_, spec, params = self._alternatives[self._choice]
self._parsed, _ = _parse_build(self._contents, spec=spec, spec_params=params)
except (ValueError, TypeError) as e:
args = e.args[1:]
e.args = (e.args[0] + '\n while parsing %s' % type_name(self),) + args
raise e |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def _copy(self, other, copy_func):
""" Copies the contents of another Choice object to itself :param object: Another instance of the same class :param copy_func: An reference of copy.copy() or copy.deepcopy() to use when copying lists, dicts and objects """ |
super(Choice, self)._copy(other, copy_func)
self._choice = other._choice
self._name = other._name
self._parsed = copy_func(other._parsed) |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def _copy(self, other, copy_func):
""" Copies the contents of another AbstractString object to itself :param object: Another instance of the same class :param copy_func: An reference of copy.copy() or copy.deepcopy() to use when copying lists, dicts and objects """ |
super(AbstractString, self)._copy(other, copy_func)
self._unicode = other._unicode |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def _copy(self, other, copy_func):
""" Copies the contents of another OctetBitString object to itself :param object: Another instance of the same class :param copy_func: An reference of copy.copy() or copy.deepcopy() to use when copying lists, dicts and objects """ |
super(OctetBitString, self)._copy(other, copy_func)
self._bytes = other._bytes |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def _copy(self, other, copy_func):
""" Copies the contents of another OctetString object to itself :param object: Another instance of the same class :param copy_func: An reference of copy.copy() or copy.deepcopy() to use when copying lists, dicts and objects """ |
super(OctetString, self)._copy(other, copy_func)
self._bytes = other._bytes |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def _copy(self, other, copy_func):
""" Copies the contents of another ParsableOctetString object to itself :param object: Another instance of the same class :param copy_func: An reference of copy.copy() or copy.deepcopy() to use when copying lists, dicts and objects """ |
super(ParsableOctetString, self)._copy(other, copy_func)
self._bytes = other._bytes
self._parsed = copy_func(other._parsed) |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def _lazy_child(self, index):
""" Builds a child object if the child has only been parsed into a tuple so far """ |
child = self.children[index]
if child.__class__ == tuple:
child = self.children[index] = _build(*child)
return child |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def _set_contents(self, force=False):
""" Updates the .contents attribute of the value with the encoded value of all of the child objects :param force: Ensure all contents are in DER format instead of possibly using cached BER-encoded data """ |
if self.children is None:
self._parse_children()
contents = BytesIO()
for index, info in enumerate(self._fields):
child = self.children[index]
if child is None:
child_dump = b''
elif child.__class__ == tuple:
if force:
child_dump = self._lazy_child(index).dump(force=force)
else:
child_dump = child[3] + child[4] + child[5]
else:
child_dump = child.dump(force=force)
# Skip values that are the same as the default
if info[2] and 'default' in info[2]:
default_value = info[1](**info[2])
if default_value.dump() == child_dump:
continue
contents.write(child_dump)
self._contents = contents.getvalue()
self._header = None
if self._trailer != b'':
self._trailer = b'' |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def _setup(self):
""" Generates _field_map, _field_ids and _oid_nums for use in parsing """ |
cls = self.__class__
cls._field_map = {}
cls._field_ids = []
cls._precomputed_specs = []
for index, field in enumerate(cls._fields):
if len(field) < 3:
field = field + ({},)
cls._fields[index] = field
cls._field_map[field[0]] = index
cls._field_ids.append(_build_id_tuple(field[2], field[1]))
if cls._oid_pair is not None:
cls._oid_nums = (cls._field_map[cls._oid_pair[0]], cls._field_map[cls._oid_pair[1]])
for index, field in enumerate(cls._fields):
has_callback = cls._spec_callbacks is not None and field[0] in cls._spec_callbacks
is_mapped_oid = cls._oid_nums is not None and cls._oid_nums[1] == index
if has_callback or is_mapped_oid:
cls._precomputed_specs.append(None)
else:
cls._precomputed_specs.append((field[0], field[1], field[1], field[2], None)) |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def _determine_spec(self, index):
""" Determine how a value for a field should be constructed :param index: The field number :return: A tuple containing the following elements: - unicode string of the field name - Asn1Value class of the field spec - Asn1Value class of the value spec - None or dict of params to pass to the field spec - None or Asn1Value class indicating the value spec was derived from an OID or a spec callback """ |
name, field_spec, field_params = self._fields[index]
value_spec = field_spec
spec_override = None
if self._spec_callbacks is not None and name in self._spec_callbacks:
callback = self._spec_callbacks[name]
spec_override = callback(self)
if spec_override:
# Allow a spec callback to specify both the base spec and
# the override, for situations such as OctetString and parse_as
if spec_override.__class__ == tuple and len(spec_override) == 2:
field_spec, value_spec = spec_override
if value_spec is None:
value_spec = field_spec
spec_override = None
# When no field spec is specified, use a single return value as that
elif field_spec is None:
field_spec = spec_override
value_spec = field_spec
spec_override = None
else:
value_spec = spec_override
elif self._oid_nums is not None and self._oid_nums[1] == index:
oid = self._lazy_child(self._oid_nums[0]).native
if oid in self._oid_specs:
spec_override = self._oid_specs[oid]
value_spec = spec_override
return (name, field_spec, value_spec, field_params, spec_override) |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def _copy(self, other, copy_func):
""" Copies the contents of another Sequence object to itself :param object: Another instance of the same class :param copy_func: An reference of copy.copy() or copy.deepcopy() to use when copying lists, dicts and objects """ |
super(Sequence, self)._copy(other, copy_func)
if self.children is not None:
self.children = []
for child in other.children:
if child.__class__ == tuple:
self.children.append(child)
else:
self.children.append(child.copy()) |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def append(self, value):
""" Allows adding a child to the end of the sequence :param value: Native python datatype that will be passed to _child_spec to create new child object """ |
# We inline this checks to prevent method invocation each time
if self.children is None:
self._parse_children()
self.children.append(self._make_value(value))
if self._native is not None:
self._native.append(self.children[-1].native)
self._mutated = True |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def _set_contents(self, force=False):
""" Encodes all child objects into the contents for this object :param force: Ensure all contents are in DER format instead of possibly using cached BER-encoded data """ |
if self.children is None:
self._parse_children()
contents = BytesIO()
for child in self:
contents.write(child.dump(force=force))
self._contents = contents.getvalue()
self._header = None
if self._trailer != b'':
self._trailer = b'' |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def _parse_children(self, recurse=False):
""" Parses the contents and generates Asn1Value objects based on the definitions from _child_spec. :param recurse: If child objects that are Sequence or SequenceOf objects should be recursively parsed :raises: ValueError - when an error occurs parsing child objects """ |
try:
self.children = []
if self._contents is None:
return
contents_length = len(self._contents)
child_pointer = 0
while child_pointer < contents_length:
parts, child_pointer = _parse(self._contents, contents_length, pointer=child_pointer)
if self._child_spec:
child = parts + (self._child_spec,)
else:
child = parts
if recurse:
child = _build(*child)
if isinstance(child, (Sequence, SequenceOf)):
child._parse_children(recurse=True)
self.children.append(child)
except (ValueError, TypeError) as e:
self.children = None
args = e.args[1:]
e.args = (e.args[0] + '\n while parsing %s' % type_name(self),) + args
raise e |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def type_name(value):
""" Returns a user-readable name for the type of an object :param value: A value to get the type name of :return: A unicode string of the object's type name """ |
if inspect.isclass(value):
cls = value
else:
cls = value.__class__
if cls.__module__ in set(['builtins', '__builtin__']):
return cls.__name__
return '%s.%s' % (cls.__module__, cls.__name__) |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def emit(class_, method, tag, contents):
""" Constructs a byte string of an ASN.1 DER-encoded value This is typically not useful. Instead, use one of the standard classes from asn1crypto.core, or construct a new class with specific fields, and call the .dump() method. :param class_: An integer ASN.1 class value: 0 (universal), 1 (application), 2 (context), 3 (private) :param method: An integer ASN.1 method value: 0 (primitive), 1 (constructed) :param tag: An integer ASN.1 tag value :param contents: A byte string of the encoded byte contents :return: A byte string of the ASN.1 DER value (header and contents) """ |
if not isinstance(class_, int):
raise TypeError('class_ must be an integer, not %s' % type_name(class_))
if class_ < 0 or class_ > 3:
raise ValueError('class_ must be one of 0, 1, 2 or 3, not %s' % class_)
if not isinstance(method, int):
raise TypeError('method must be an integer, not %s' % type_name(method))
if method < 0 or method > 1:
raise ValueError('method must be 0 or 1, not %s' % method)
if not isinstance(tag, int):
raise TypeError('tag must be an integer, not %s' % type_name(tag))
if tag < 0:
raise ValueError('tag must be greater than zero, not %s' % tag)
if not isinstance(contents, byte_cls):
raise TypeError('contents must be a byte string, not %s' % type_name(contents))
return _dump_header(class_, method, tag, contents) + contents |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def _parse(encoded_data, data_len, pointer=0, lengths_only=False):
""" Parses a byte string into component parts :param encoded_data: A byte string that contains BER-encoded data :param data_len: The integer length of the encoded data :param pointer: The index in the byte string to parse from :param lengths_only: A boolean to cause the call to return a 2-element tuple of the integer number of bytes in the header and the integer number of bytes in the contents. Internal use only. :return: A 2-element tuple: - 0: A tuple of (class_, method, tag, header, content, trailer) - 1: An integer indicating how many bytes were consumed """ |
if data_len < pointer + 2:
raise ValueError(_INSUFFICIENT_DATA_MESSAGE % (2, data_len - pointer))
start = pointer
first_octet = ord(encoded_data[pointer]) if _PY2 else encoded_data[pointer]
pointer += 1
tag = first_octet & 31
# Base 128 length using 8th bit as continuation indicator
if tag == 31:
tag = 0
while True:
num = ord(encoded_data[pointer]) if _PY2 else encoded_data[pointer]
pointer += 1
tag *= 128
tag += num & 127
if num >> 7 == 0:
break
length_octet = ord(encoded_data[pointer]) if _PY2 else encoded_data[pointer]
pointer += 1
if length_octet >> 7 == 0:
if lengths_only:
return (pointer, pointer + (length_octet & 127))
contents_end = pointer + (length_octet & 127)
else:
length_octets = length_octet & 127
if length_octets:
pointer += length_octets
contents_end = pointer + int_from_bytes(encoded_data[pointer - length_octets:pointer], signed=False)
if lengths_only:
return (pointer, contents_end)
else:
# To properly parse indefinite length values, we need to scan forward
# parsing headers until we find a value with a length of zero. If we
# just scanned looking for \x00\x00, nested indefinite length values
# would not work.
contents_end = pointer
# Unfortunately we need to understand the contents of the data to
# properly scan forward, which bleeds some representation info into
# the parser. This condition handles the unused bits byte in
# constructed bit strings.
if tag == 3:
contents_end += 1
while contents_end < data_len:
sub_header_end, contents_end = _parse(encoded_data, data_len, contents_end, lengths_only=True)
if contents_end == sub_header_end and encoded_data[contents_end - 2:contents_end] == b'\x00\x00':
break
if lengths_only:
return (pointer, contents_end)
if contents_end > data_len:
raise ValueError(_INSUFFICIENT_DATA_MESSAGE % (contents_end, data_len))
return (
(
first_octet >> 6,
(first_octet >> 5) & 1,
tag,
encoded_data[start:pointer],
encoded_data[pointer:contents_end - 2],
b'\x00\x00'
),
contents_end
)
if contents_end > data_len:
raise ValueError(_INSUFFICIENT_DATA_MESSAGE % (contents_end, data_len))
return (
(
first_octet >> 6,
(first_octet >> 5) & 1,
tag,
encoded_data[start:pointer],
encoded_data[pointer:contents_end],
b''
),
contents_end
) |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def _dump_header(class_, method, tag, contents):
""" Constructs the header bytes for an ASN.1 object :param class_: An integer ASN.1 class value: 0 (universal), 1 (application), 2 (context), 3 (private) :param method: An integer ASN.1 method value: 0 (primitive), 1 (constructed) :param tag: An integer ASN.1 tag value :param contents: A byte string of the encoded byte contents :return: A byte string of the ASN.1 DER header """ |
header = b''
id_num = 0
id_num |= class_ << 6
id_num |= method << 5
if tag >= 31:
header += chr_cls(id_num | 31)
while tag > 0:
continuation_bit = 0x80 if tag > 0x7F else 0
header += chr_cls(continuation_bit | (tag & 0x7F))
tag = tag >> 7
else:
header += chr_cls(id_num | tag)
length = len(contents)
if length <= 127:
header += chr_cls(length)
else:
length_bytes = int_to_bytes(length)
header += chr_cls(0x80 | len(length_bytes))
header += length_bytes
return header |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def _iri_utf8_errors_handler(exc):
""" Error handler for decoding UTF-8 parts of a URI into an IRI. Leaves byte sequences encoded in %XX format, but as part of a unicode string. :param exc: The UnicodeDecodeError exception :return: A 2-element tuple of (replacement unicode string, integer index to resume at) """ |
bytes_as_ints = bytes_to_list(exc.object[exc.start:exc.end])
replacements = ['%%%02x' % num for num in bytes_as_ints]
return (''.join(replacements), exc.end) |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def _urlquote(string, safe=''):
""" Quotes a unicode string for use in a URL :param string: A unicode string :param safe: A unicode string of character to not encode :return: None (if string is None) or an ASCII byte string of the quoted string """ |
if string is None or string == '':
return None
# Anything already hex quoted is pulled out of the URL and unquoted if
# possible
escapes = []
if re.search('%[0-9a-fA-F]{2}', string):
# Try to unquote any percent values, restoring them if they are not
# valid UTF-8. Also, requote any safe chars since encoded versions of
# those are functionally different than the unquoted ones.
def _try_unescape(match):
byte_string = unquote_to_bytes(match.group(0))
unicode_string = byte_string.decode('utf-8', 'iriutf8')
for safe_char in list(safe):
unicode_string = unicode_string.replace(safe_char, '%%%02x' % ord(safe_char))
return unicode_string
string = re.sub('(?:%[0-9a-fA-F]{2})+', _try_unescape, string)
# Once we have the minimal set of hex quoted values, removed them from
# the string so that they are not double quoted
def _extract_escape(match):
escapes.append(match.group(0).encode('ascii'))
return '\x00'
string = re.sub('%[0-9a-fA-F]{2}', _extract_escape, string)
output = urlquote(string.encode('utf-8'), safe=safe.encode('utf-8'))
if not isinstance(output, byte_cls):
output = output.encode('ascii')
# Restore the existing quoted values that we extracted
if len(escapes) > 0:
def _return_escape(_):
return escapes.pop(0)
output = re.sub(b'%00', _return_escape, output)
return output |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def _urlunquote(byte_string, remap=None, preserve=None):
""" Unquotes a URI portion from a byte string into unicode using UTF-8 :param byte_string: A byte string of the data to unquote :param remap: A list of characters (as unicode) that should be re-mapped to a %XX encoding. This is used when characters are not valid in part of a URL. :param preserve: A bool - indicates that the chars to be remapped if they occur in non-hex form, should be preserved. E.g. / for URL path. :return: A unicode string """ |
if byte_string is None:
return byte_string
if byte_string == b'':
return ''
if preserve:
replacements = ['\x1A', '\x1C', '\x1D', '\x1E', '\x1F']
preserve_unmap = {}
for char in remap:
replacement = replacements.pop(0)
preserve_unmap[replacement] = char
byte_string = byte_string.replace(char.encode('ascii'), replacement.encode('ascii'))
byte_string = unquote_to_bytes(byte_string)
if remap:
for char in remap:
byte_string = byte_string.replace(char.encode('ascii'), ('%%%02x' % ord(char)).encode('ascii'))
output = byte_string.decode('utf-8', 'iriutf8')
if preserve:
for replacement, original in preserve_unmap.items():
output = output.replace(replacement, original)
return output |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def init_args():
"""Parse and return the arguments.""" |
parser = argparse.ArgumentParser(
description='draw basic graphs on terminal')
parser.add_argument(
'filename',
nargs='?',
default="-",
help='data file name (comma or space separated). Defaults to stdin.')
parser.add_argument(
'--title',
help='Title of graph'
)
parser.add_argument(
'--width',
type=int,
default=50,
help='width of graph in characters default:50'
)
parser.add_argument(
'--format',
default='{:<5.2f}',
help='format specifier to use.'
)
parser.add_argument(
'--suffix',
default='',
help='string to add as a suffix to all data points.'
)
parser.add_argument(
'--no-labels',
action='store_true',
help='Do not print the label column'
)
parser.add_argument(
'--color',
nargs='*',
choices=AVAILABLE_COLORS,
help='Graph bar color( s )'
)
parser.add_argument(
'--vertical',
action='store_true',
help='Vertical graph'
)
parser.add_argument(
'--stacked',
action='store_true',
help='Stacked bar graph'
)
parser.add_argument(
'--different-scale',
action='store_true',
help='Categories have different scales.'
)
parser.add_argument(
'--calendar',
action='store_true',
help='Calendar Heatmap chart'
)
parser.add_argument(
'--start-dt',
help='Start date for Calendar chart'
)
parser.add_argument(
'--custom-tick',
default='',
help='Custom tick mark, emoji approved'
)
parser.add_argument(
'--delim',
default='',
help='Custom delimiter, default , or space'
)
parser.add_argument(
'--verbose',
action='store_true',
help='Verbose output, helpful for debugging'
)
parser.add_argument(
'--version',
action='store_true',
help='Display version and exit'
)
if len(sys.argv) == 1:
if sys.stdin.isatty():
parser.print_usage()
sys.exit(2)
args = vars(parser.parse_args())
if args['custom_tick'] != '':
global TICK, SM_TICK
TICK = args['custom_tick']
SM_TICK = ''
if args['delim'] != '':
global DELIM
DELIM = args['delim']
return args |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def find_max_label_length(labels):
"""Return the maximum length for the labels.""" |
length = 0
for i in range(len(labels)):
if len(labels[i]) > length:
length = len(labels[i])
return length |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def normalize(data, width):
"""Normalize the data and return it.""" |
min_dat = find_min(data)
# We offset by the minimum if there's a negative.
off_data = []
if min_dat < 0:
min_dat = abs(min_dat)
for dat in data:
off_data.append([_d + min_dat for _d in dat])
else:
off_data = data
min_dat = find_min(off_data)
max_dat = find_max(off_data)
if max_dat < width:
# Don't need to normalize if the max value
# is less than the width we allow.
return off_data
# max_dat / width is the value for a single tick. norm_factor is the
# inverse of this value
# If you divide a number to the value of single tick, you will find how
# many ticks it does contain basically.
norm_factor = width / float(max_dat)
normal_dat = []
for dat in off_data:
normal_dat.append([_v * norm_factor for _v in dat])
return normal_dat |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def horiz_rows(labels, data, normal_dat, args, colors):
"""Prepare the horizontal graph. Each row is printed through the print_row function.""" |
val_min = find_min(data)
for i in range(len(labels)):
if args['no_labels']:
# Hide the labels.
label = ''
else:
label = "{:<{x}}: ".format(labels[i],
x=find_max_label_length(labels))
values = data[i]
num_blocks = normal_dat[i]
for j in range(len(values)):
# In Multiple series graph 1st category has label at the beginning,
# whereas the rest categories have only spaces.
if j > 0:
len_label = len(label)
label = ' ' * len_label
tail = ' {}{}'.format(args['format'].format(values[j]),
args['suffix'])
if colors:
color = colors[j]
else:
color = None
if not args['vertical']:
print(label, end="")
yield(values[j], int(num_blocks[j]), val_min, color)
if not args['vertical']:
print(tail) |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def print_row(value, num_blocks, val_min, color):
"""A method to print a row for a horizontal graphs. i.e: 1: ▇▇ 2 2: ▇▇▇ 3 3: ▇▇▇▇ 4 """ |
if color:
sys.stdout.write(f'\033[{color}m') # Start to write colorized.
if num_blocks < 1 and (value > val_min or value > 0):
# Print something if it's not the smallest
# and the normal value is less than one.
sys.stdout.write(SM_TICK)
else:
for _ in range(num_blocks):
sys.stdout.write(TICK)
if color:
sys.stdout.write('\033[0m') |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def stacked_graph(labels, data, normal_data, len_categories, args, colors):
"""Prepare the horizontal stacked graph. Each row is printed through the print_row function.""" |
val_min = find_min(data)
for i in range(len(labels)):
if args['no_labels']:
# Hide the labels.
label = ''
else:
label = "{:<{x}}: ".format(labels[i],
x=find_max_label_length(labels))
print(label, end="")
values = data[i]
num_blocks = normal_data[i]
for j in range(len(values)):
print_row(values[j], int(num_blocks[j]), val_min, colors[j])
tail = ' {}{}'.format(args['format'].format(sum(values)),
args['suffix'])
print(tail) |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def vertically(value, num_blocks, val_min, color, args):
"""Prepare the vertical graph. The whole graph is printed through the print_vertical function.""" |
global maxi, value_list
value_list.append(str(value))
# In case the number of blocks at the end of the normalization is less
# than the default number, use the maxi variable to escape.
if maxi < num_blocks:
maxi = num_blocks
if num_blocks > 0:
vertical_list.append((TICK * num_blocks))
else:
vertical_list.append(SM_TICK)
# Zip_longest method in order to turn them vertically.
for row in zip_longest(*vertical_list, fillvalue=' '):
zipped_list.append(row)
counter, result_list = 0, []
# Combined with the maxi variable, escapes the appending method at
# the correct point or the default one (width).
for i in reversed(zipped_list):
result_list.append(i)
counter += 1
if maxi == args['width']:
if counter == (args['width']):
break
else:
if counter == maxi:
break
# Return a list of rows which will be used to print the result vertically.
return result_list |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def print_vertical(vertical_rows, labels, color, args):
"""Print the whole vertical graph.""" |
if color:
sys.stdout.write(f'\033[{color}m') # Start to write colorized.
for row in vertical_rows:
print(*row)
sys.stdout.write('\033[0m') # End of printing colored
print("-" * len(row) + "Values" + "-" * len(row))
# Print Values
for value in zip_longest(*value_list, fillvalue=' '):
print(" ".join(value))
if args['no_labels'] == False:
print("-" * len(row) + "Labels" + "-" * len(row))
# Print Labels
for label in zip_longest(*labels, fillvalue=''):
print(" ".join(label)) |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def chart(colors, data, args, labels):
"""Handle the normalization of data and the printing of the graph.""" |
len_categories = len(data[0])
if len_categories > 1:
# Stacked graph
if args['stacked']:
normal_dat = normalize(data, args['width'])
stacked_graph(labels, data, normal_dat, len_categories,
args, colors)
return
if not colors:
colors = [None] * len_categories
# Multiple series graph with different scales
# Normalization per category
if args['different_scale']:
for i in range(len_categories):
cat_data = []
for dat in data:
cat_data.append([dat[i]])
# Normalize data, handle negatives.
normal_cat_data = normalize(cat_data, args['width'])
# Generate data for a row.
for row in horiz_rows(labels, cat_data, normal_cat_data,
args, [colors[i]]):
# Print the row
if not args['vertical']:
print_row(*row)
else:
vertic = vertically(*row, args=args)
# Vertical graph
if args['vertical']:
print_vertical(vertic, labels, colors[i], args)
print()
value_list.clear(), zipped_list.clear(), vertical_list.clear()
return
# One category/Multiple series graph with same scale
# All-together normalization
if not args['stacked']:
normal_dat = normalize(data, args['width'])
for row in horiz_rows(labels, data, normal_dat, args, colors):
if not args['vertical']:
print_row(*row)
else:
vertic = vertically(*row, args=args)
if args['vertical'] and len_categories == 1:
if colors:
color = colors[0]
else:
color = None
print_vertical(vertic, labels, color, args)
print() |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def check_data(labels, data, args):
"""Check that all data were inserted correctly. Return the colors.""" |
len_categories = len(data[0])
# Check that there are data for all labels.
if len(labels) != len(data):
print(">> Error: Label and data array sizes don't match")
sys.exit(1)
# Check that there are data for all categories per label.
for dat in data:
if len(dat) != len_categories:
print(">> Error: There are missing values")
sys.exit(1)
colors = []
# If user inserts colors, they should be as many as the categories.
if args['color'] is not None:
if len(args['color']) != len_categories:
print(">> Error: Color and category array sizes don't match")
sys.exit(1)
for color in args['color']:
colors.append(AVAILABLE_COLORS.get(color))
# Vertical graph for multiple series of same scale is not supported yet.
if args['vertical'] and len_categories > 1 and not args['different_scale']:
print(">> Error: Vertical graph for multiple series of same "
"scale is not supported yet.")
sys.exit(1)
# If user hasn't inserted colors, pick the first n colors
# from the dict (n = number of categories).
if args['stacked'] and not colors:
colors = [v for v in list(AVAILABLE_COLORS.values())[:len_categories]]
return colors |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def print_categories(categories, colors):
"""Print a tick and the category's name for each category above the graph.""" |
for i in range(len(categories)):
if colors:
sys.stdout.write(f'\033[{colors[i]}m') # Start to write colorized.
sys.stdout.write(TICK + ' ' + categories[i] + ' ')
if colors:
sys.stdout.write('\033[0m') # Back to original.
print('\n\n') |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def read_data(args):
"""Read data from a file or stdin and returns it. Filename includes (categories), labels and data. We append categories and labels to lists. Data are inserted to a list of lists due to the categories. i.e. categories = ['boys', 'girls'] |
filename = args['filename']
stdin = filename == '-'
if args['verbose']:
print(f'>> Reading data from {( "stdin" if stdin else filename )}')
print('')
if args['title']:
print('# ' + args['title'] + '\n')
categories, labels, data, colors = ([] for i in range(4))
f = sys.stdin if stdin else open(filename, "r")
for line in f:
line = line.strip()
if line:
if not line.startswith('#'):
if line.find(DELIM) > 0:
cols = line.split(DELIM)
else:
cols = line.split()
# Line contains categories.
if line.startswith('@'):
cols[0] = cols[0].replace("@ ", "")
categories = cols
# Line contains label and values.
else:
labels.append(cols[0].strip())
data_points = []
for i in range(1, len(cols)):
data_points.append(float(cols[i].strip()))
data.append(data_points)
f.close()
# Check that all data are valid. (i.e. There are no missing values.)
colors = check_data(labels, data, args)
if categories:
# Print categories' names above the graph.
print_categories(categories, colors)
return categories, labels, data, colors |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def calendar_heatmap(data, labels, args):
"""Print a calendar heatmap.""" |
if args['color']:
colornum = AVAILABLE_COLORS.get(args['color'][0])
else:
colornum = AVAILABLE_COLORS.get('blue')
dt_dict = {}
for i in range(len(labels)):
dt_dict[labels[i]] = data[i][0]
# get max value
max_val = float(max(data)[0])
tick_1 = "░"
tick_2 = "▒"
tick_3 = "▓"
tick_4 = "█"
if args['custom_tick']:
tick_1 = tick_2 = tick_3 = tick_4 = args['custom_tick']
# check if start day set, otherwise use one year ago
if args['start_dt']:
start_dt = datetime.strptime(args['start_dt'], '%Y-%m-%d')
else:
start = datetime.now()
start_dt = datetime(year=start.year-1, month=start.month,
day=start.day)
# modify start date to be a Monday, subtract weekday() from day
start_dt = start_dt - timedelta(start_dt.weekday())
# TODO: legend doesn't line up properly for all start dates/data
# top legend for months
sys.stdout.write(" ")
for month in range(13):
month_dt = datetime(year=start_dt.year, month=start_dt.month, day=1) +\
timedelta(days=month*31)
sys.stdout.write(month_dt.strftime("%b") + " ")
if args['custom_tick']: #assume custom tick is emoji which is one wider
sys.stdout.write(" ")
sys.stdout.write('\n')
for day in range(7):
sys.stdout.write(DAYS[day] + ': ')
for week in range(53):
day_ = start_dt + timedelta(days=day + week*7)
day_str = day_.strftime("%Y-%m-%d")
if day_str in dt_dict:
if dt_dict[day_str] > max_val * 0.75:
tick = tick_4
elif dt_dict[day_str] > max_val * 0.50:
tick = tick_3
elif dt_dict[day_str] > max_val * 0.25:
tick = tick_2
else:
tick = tick_1
else:
tick = ' '
if colornum:
sys.stdout.write(f'\033[{colornum}m')
sys.stdout.write(tick)
if colornum:
sys.stdout.write('\033[0m')
sys.stdout.write('\n') |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
| def _wrapper(func, *args, **kwargs):
'Decorator for the methods that follow'
try:
if func.__name__ == "init":
# init may not fail, as its return code is just stored as
# private_data field of struct fuse_context
return func(*args, **kwargs) or 0
else:
try:
return func(*args, **kwargs) or 0
except OSError as e:
if e.errno > 0:
log.debug(
"FUSE operation %s raised a %s, returning errno %s.",
func.__name__, type(e), e.errno, exc_info=True)
return -e.errno
else:
log.error(
"FUSE operation %s raised an OSError with negative "
"errno %s, returning errno.EINVAL.",
func.__name__, e.errno, exc_info=True)
return -errno.EINVAL
except Exception:
log.error("Uncaught exception from FUSE operation %s, "
"returning errno.EINVAL.",
func.__name__, exc_info=True)
return -errno.EINVAL
except BaseException as e:
self.__critical_exception = e
log.critical(
"Uncaught critical exception from FUSE operation %s, aborting.",
func.__name__, exc_info=True)
# the raised exception (even SystemExit) will be caught by FUSE
# potentially causing SIGSEGV, so tell system to stop/interrupt FUSE
fuse_exit()
return -errno.EFAULT |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def lookup(self, req, parent, name):
"""Look up a directory entry by name and get its attributes. Valid replies: reply_entry reply_err """ |
self.reply_err(req, errno.ENOENT) |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def getattr(self, req, ino, fi):
"""Get file attributes Valid replies: reply_attr reply_err """ |
if ino == 1:
attr = {'st_ino': 1, 'st_mode': S_IFDIR | 0o755, 'st_nlink': 2}
self.reply_attr(req, attr, 1.0)
else:
self.reply_err(req, errno.ENOENT) |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def setattr(self, req, ino, attr, to_set, fi):
"""Set file attributes Valid replies: reply_attr reply_err """ |
self.reply_err(req, errno.EROFS) |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def mknod(self, req, parent, name, mode, rdev):
"""Create file node Valid replies: reply_entry reply_err """ |
self.reply_err(req, errno.EROFS) |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def unlink(self, req, parent, name):
"""Remove a file Valid replies: reply_err """ |
self.reply_err(req, errno.EROFS) |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def rmdir(self, req, parent, name):
"""Remove a directory Valid replies: reply_err """ |
self.reply_err(req, errno.EROFS) |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def rename(self, req, parent, name, newparent, newname):
"""Rename a file Valid replies: reply_err """ |
self.reply_err(req, errno.EROFS) |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def link(self, req, ino, newparent, newname):
"""Create a hard link Valid replies: reply_entry reply_err """ |
self.reply_err(req, errno.EROFS) |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def listxattr(self, req, ino, size):
"""List extended attribute names Valid replies: reply_buf reply_data reply_xattr reply_err """ |
self.reply_err(req, errno.ENOSYS) |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def removexattr(self, req, ino, name):
"""Remove an extended attribute Valid replies: reply_err """ |
self.reply_err(req, errno.ENOSYS) |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def access(self, req, ino, mask):
"""Check file access permissions Valid replies: reply_err """ |
self.reply_err(req, errno.ENOSYS) |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def create(self, req, parent, name, mode, fi):
"""Create and open a file Valid replies: reply_create reply_err """ |
self.reply_err(req, errno.ENOSYS) |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def state_tomography_programs(state_prep, qubits=None, rotation_generator=tomography.default_rotations):
""" Yield tomographic sequences that prepare a state with Quil program `state_prep` and then append tomographic rotations on the specified `qubits`. If `qubits is None`, it assumes all qubits in the program should be tomographically rotated. :param Program state_prep: The program to prepare the state to be tomographed. :param list|NoneType qubits: A list of Qubits or Numbers, to perform the tomography on. If `None`, performs it on all in state_prep. :param generator rotation_generator: A generator that yields tomography rotations to perform. :return: Program for state tomography. :rtype: Program """ |
if qubits is None:
qubits = state_prep.get_qubits()
for tomography_program in rotation_generator(*qubits):
state_tomography_program = Program(Pragma("PRESERVE_BLOCK"))
state_tomography_program.inst(state_prep)
state_tomography_program.inst(tomography_program)
state_tomography_program.inst(Pragma("END_PRESERVE_BLOCK"))
yield state_tomography_program |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def do_state_tomography(preparation_program, nsamples, cxn, qubits=None, use_run=False):
""" Method to perform both a QPU and QVM state tomography, and use the latter as as reference to calculate the fidelity of the former. :param Program preparation_program: Program to execute. :param int nsamples: Number of samples to take for the program. :param QVMConnection|QPUConnection cxn: Connection on which to run the program. :param list qubits: List of qubits for the program. to use in the tomography analysis. :param bool use_run: If ``True``, use append measurements on all qubits and use ``cxn.run`` instead of ``cxn.run_and_measure``. :return: The state tomogram. :rtype: StateTomography """ |
return tomography._do_tomography(preparation_program, nsamples, cxn, qubits,
tomography.MAX_QUBITS_STATE_TOMO,
StateTomography, state_tomography_programs,
DEFAULT_STATE_TOMO_SETTINGS, use_run=use_run) |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def estimate_from_ssr(histograms, readout_povm, channel_ops, settings):
""" Estimate a density matrix from single shot histograms obtained by measuring bitstrings in the Z-eigenbasis after application of given channel operators. :param numpy.ndarray histograms: The single shot histograms, `shape=(n_channels, dim)`. :param DiagognalPOVM readout_povm: The POVM corresponding to the readout plus classifier. :param list channel_ops: The tomography measurement channels as `qutip.Qobj`'s. :param TomographySettings settings: The solver and estimation settings. :return: The generated StateTomography object. :rtype: StateTomography """ |
nqc = len(channel_ops[0].dims[0])
pauli_basis = grove.tomography.operator_utils.PAULI_BASIS ** nqc
pi_basis = readout_povm.pi_basis
if not histograms.shape[1] == pi_basis.dim: # pragma no coverage
raise ValueError("Currently tomography is only implemented for two-level systems.")
# prepare the log-likelihood function parameters, see documentation
n_kj = np.asarray(histograms)
c_jk_m = _prepare_c_jk_m(readout_povm, pauli_basis, channel_ops)
rho_m = cvxpy.Variable(pauli_basis.dim)
p_jk = c_jk_m * rho_m
obj = -n_kj.ravel() * cvxpy.log(p_jk)
p_jk_mat = cvxpy.reshape(p_jk, pi_basis.dim, len(channel_ops)) # cvxpy has col-major order
# Default constraints:
# MLE must describe valid probability distribution
# i.e., for each k, p_jk must sum to one and be element-wise non-negative:
# 1. \sum_j p_jk == 1 for all k
# 2. p_jk >= 0 for all j, k
# where p_jk = \sum_m c_jk_m rho_m
constraints = [
p_jk >= 0,
np.matrix(np.ones((1, pi_basis.dim))) * p_jk_mat == 1,
]
rho_m_real_imag = sum((rm * o_ut.to_realimag(Pm)
for (rm, Pm) in ut.izip(rho_m, pauli_basis.ops)), 0)
if POSITIVE in settings.constraints:
if tomography._SDP_SOLVER.is_functional():
constraints.append(rho_m_real_imag >> 0)
else: # pragma no coverage
_log.warning("No convex solver capable of semi-definite problems installed.\n"
"Dropping the positivity constraint on the density matrix.")
if UNIT_TRACE in settings.constraints:
# this assumes that the first element of the Pauli basis is always proportional to
# the identity
constraints.append(rho_m[0, 0] == 1. / pauli_basis.ops[0].tr().real)
prob = cvxpy.Problem(cvxpy.Minimize(obj), constraints)
_log.info("Starting convex solver")
prob.solve(solver=tomography.SOLVER, **settings.solver_kwargs)
if prob.status != cvxpy.OPTIMAL: # pragma no coverage
_log.warning("Problem did not converge to optimal solution. "
"Solver settings: {}".format(settings.solver_kwargs))
return StateTomography(np.array(rho_m.value).ravel(), pauli_basis, settings) |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def plot_state_histogram(self, ax):
""" Visualize the complex matrix elements of the estimated state. :param matplotlib.Axes ax: A matplotlib Axes object to plot into. """ |
title = "Estimated state"
nqc = int(round(np.log2(self.rho_est.data.shape[0])))
labels = ut.basis_labels(nqc)
return ut.state_histogram(self.rho_est, ax, title) |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def plot(self):
""" Visualize the state. :return: The generated figure. :rtype: matplotlib.Figure """ |
width = 10
# The pleasing golden ratio.
height = width / 1.618
f = plt.figure(figsize=(width, height))
ax = f.add_subplot(111, projection="3d")
self.plot_state_histogram(ax)
return f |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def is_constant(self, qc: QuantumComputer, bitstring_map: Dict[str, str]) -> bool: """Computes whether bitstring_map represents a constant function, given that it is constant or balanced. Constant means all inputs map to the same value, balanced means half of the inputs maps to one value, and half to the other. :param QVMConnection cxn: The connection object to the Rigetti cloud to run pyQuil programs. :param bitstring_map: A dictionary whose keys are bitstrings, and whose values are bits represented as strings. :type bistring_map: Dict[String, String] :return: True if the bitstring_map represented a constant function, false otherwise. :rtype: bool """ |
self._init_attr(bitstring_map)
prog = Program()
dj_ro = prog.declare('ro', 'BIT', len(self.computational_qubits))
prog += self.deutsch_jozsa_circuit
prog += [MEASURE(qubit, ro) for qubit, ro in zip(self.computational_qubits, dj_ro)]
executable = qc.compile(prog)
returned_bitstring = qc.run(executable)
# We are only running a single shot, so we are only interested in the first element.
bitstring = np.array(returned_bitstring, dtype=int)
constant = all([bit == 0 for bit in bitstring])
return constant |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def unitary_function(mappings: Dict[str, str]) -> np.ndarray: """ Creates a unitary transformation that maps each state to the values specified in mappings. Some (but not all) of these transformations involve a scratch qubit, so room for one is always provided. That is, if given the mapping of n qubits, the calculated transformation will be on n + 1 qubits, where the 0th is the scratch bit and the return value of the function is left in the 1st. :param mappings: Dictionary of the mappings of f(x) on all length n bitstrings, e.g. :return: ndarray representing specified unitary transformation. """ |
num_qubits = int(np.log2(len(mappings)))
bitsum = sum([int(bit) for bit in mappings.values()])
# Only zeros were entered
if bitsum == 0:
return np.kron(SWAP_MATRIX, np.identity(2 ** (num_qubits - 1)))
# Half of the entries were 0, half 1
elif bitsum == 2 ** (num_qubits - 1):
unitary_funct = np.zeros(shape=(2 ** num_qubits, 2 ** num_qubits))
index_lists = [list(range(2 ** (num_qubits - 1))),
list(range(2 ** (num_qubits - 1), 2 ** num_qubits))]
for j in range(2 ** num_qubits):
bitstring = np.binary_repr(j, num_qubits)
value = int(mappings[bitstring])
mappings.pop(bitstring)
i = index_lists[value].pop()
unitary_funct[i, j] = 1
return np.kron(np.identity(2), unitary_funct)
# Only ones were entered
elif bitsum == 2 ** num_qubits:
x_gate = np.array([[0, 1], [1, 0]])
return np.kron(SWAP_MATRIX, np.identity(2 ** (num_qubits - 1))).dot(
np.kron(x_gate, np.identity(2 ** num_qubits)))
else:
raise ValueError("f(x) must be constant or balanced") |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def basis_selector_oracle(qubits: List[int], bitstring: str) -> Program: """ Defines an oracle that selects the ith element of the computational basis. Flips the sign of the state :math:`\\vert x\\rangle>` if and only if x==bitstring and does nothing otherwise. :param qubits: The qubits the oracle is called on. The qubits are assumed to be ordered from most significant qubit to least significant qubit. :param bitstring: The desired bitstring, given as a string of ones and zeros. e.g. "101" :return: A program representing this oracle. """ |
if len(qubits) != len(bitstring):
raise ValueError("The bitstring should be the same length as the number of qubits.")
oracle_prog = Program()
# In the case of one qubit, we just want to flip the phase of state relative to the other.
if len(bitstring) == 1:
oracle_prog.inst(Z(qubits[0]))
return oracle_prog
else:
bitflip_prog = Program()
for i, qubit in enumerate(qubits):
if bitstring[i] == '0':
bitflip_prog.inst(X(qubit))
oracle_prog += bitflip_prog
controls = qubits[:-1]
target = qubits[-1]
operation = np.array([[1, 0], [0, -1]])
gate_name = 'Z'
n_qubit_controlled_z = (ControlledProgramBuilder()
.with_controls(controls)
.with_target(target)
.with_operation(operation)
.with_gate_name(gate_name)
.build())
oracle_prog += n_qubit_controlled_z
oracle_prog += bitflip_prog
return oracle_prog |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def make_diagonal_povm(pi_basis, confusion_rate_matrix):
""" Create a DiagonalPOVM from a ``pi_basis`` and the ``confusion_rate_matrix`` associated with a readout. See also the grove documentation. :param OperatorBasis pi_basis: An operator basis of rank-1 projection operators. :param numpy.ndarray confusion_rate_matrix: The matrix of detection probabilities conditional on a prepared qubit state. :return: The POVM corresponding to confusion_rate_matrix. :rtype: DiagonalPOVM """ |
confusion_rate_matrix = np.asarray(confusion_rate_matrix)
if not np.allclose(confusion_rate_matrix.sum(axis=0), np.ones(confusion_rate_matrix.shape[1])):
raise CRMUnnormalizedError("Unnormalized confusion matrix:\n{}".format(
confusion_rate_matrix))
if not (confusion_rate_matrix >= 0).all() or not (confusion_rate_matrix <= 1).all():
raise CRMValueError("Confusion matrix must have values in [0, 1]:"
"\n{}".format(confusion_rate_matrix))
ops = [sum((pi_j * pjk for (pi_j, pjk) in izip(pi_basis.ops, pjs)), 0)
for pjs in confusion_rate_matrix]
return DiagonalPOVM(pi_basis=pi_basis, confusion_rate_matrix=confusion_rate_matrix, ops=ops) |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def is_hermitian(operator):
""" Check if matrix or operator is hermitian. :param (numpy.ndarray|qutip.Qobj) operator: The operator or matrix to be tested. :return: True if the operator is hermitian. :rtype: bool """ |
if isinstance(operator, qt.Qobj):
return (operator.dag() - operator).norm(FROBENIUS) / operator.norm(FROBENIUS) < EPS
if isinstance(operator, np.ndarray):
return np.linalg.norm(operator.T.conj() - operator) / np.linalg.norm(operator) < EPS
return spnorm(operator.H - operator) / spnorm(operator) < EPS |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def is_projector(operator):
""" Check if operator is a projector. :param qutip.Qobj operator: The operator or matrix to be tested. :return: True if the operator is a projector. :rtype: bool """ |
# verify that P^dag=P and P^2-P=0 holds up to relative numerical accuracy EPS.
return (is_hermitian(operator) and (operator * operator - operator).norm(FROBENIUS)
/ operator.norm(FROBENIUS) < EPS) |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def choi_matrix(pauli_tm, basis):
""" Compute the Choi matrix for a quantum process from its Pauli Transfer Matrix. This agrees with the definition in `Chow et al. <https://doi.org/10.1103/PhysRevLett.109.060501>`_ except for a different overall normalization. Our normalization agrees with that of qutip. :param numpy.ndarray pauli_tm: The Pauli Transfer Matrix as 2d-array. :param OperatorBasis basis: The operator basis, typically products of normalized Paulis. :return: The Choi matrix as qutip.Qobj. :rtype: qutip.Qobj """ |
if not basis.is_orthonormal(): # pragma no coverage
raise ValueError("Need an orthonormal operator basis.")
if not all((is_hermitian(op) for op in basis.ops)): # pragma no coverage
raise ValueError("Need an operator basis of hermitian operators.")
sbasis = basis.super_basis()
D = basis.dim
choi = sum((pauli_tm[jj, kk] * sbasis.ops[jj + kk * D] for jj in range(D) for kk in range(D)))
choi.superrep = CHOI
return choi |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def metric(self):
""" Compute a matrix of Hilbert-Schmidt inner products for the basis operators, update self._metric, and return the value. :return: The matrix of inner products. :rtype: numpy.matrix """ |
if self._metric is None:
_log.debug("Computing and caching operator basis metric")
self._metric = np.matrix([[(j.dag() * k).tr() for k in self.ops] for j in self.ops])
return self._metric |
<SYSTEM_TASK:>
Solve the following problem using Python, implementing the functions described below, one line at a time
<END_TASK>
<USER_TASK:>
Description:
def is_orthonormal(self):
""" Compute a matrix of Hilbert-Schmidt inner products for the basis operators, and see if they are orthonormal. If they are return True, else, False. :return: True if the basis vectors represented by this OperatorBasis are orthonormal, False otherwise. :rtype: bool """ |
if self._is_orthonormal is None:
_log.debug("Testing and caching if operator basis is orthonormal")
self._is_orthonormal = np.allclose(self.metric(), np.eye(self.dim))
return self._is_orthonormal |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.