id int32 0 252k | repo stringlengths 7 55 | path stringlengths 4 127 | func_name stringlengths 1 88 | original_string stringlengths 75 19.8k | language stringclasses 1 value | code stringlengths 51 19.8k | code_tokens list | docstring stringlengths 3 17.3k | docstring_tokens list | sha stringlengths 40 40 | url stringlengths 87 242 |
|---|---|---|---|---|---|---|---|---|---|---|---|
243,100 | sentinel-hub/sentinelhub-py | sentinelhub/aws.py | AwsTile.get_requests | def get_requests(self):
"""
Creates tile structure and returns list of files for download.
:return: List of download requests and list of empty folders that need to be created
:rtype: (list(download.DownloadRequest), list(str))
"""
self.download_list = []
for data_name in [band for band in self.bands if self._band_exists(band)] + self.metafiles:
if data_name in AwsConstants.TILE_FILES:
url = self.get_url(data_name)
filename = self.get_filepath(data_name)
self.download_list.append(DownloadRequest(url=url, filename=filename,
data_type=AwsConstants.AWS_FILES[data_name],
data_name=data_name))
self.sort_download_list()
return self.download_list, self.folder_list | python | def get_requests(self):
self.download_list = []
for data_name in [band for band in self.bands if self._band_exists(band)] + self.metafiles:
if data_name in AwsConstants.TILE_FILES:
url = self.get_url(data_name)
filename = self.get_filepath(data_name)
self.download_list.append(DownloadRequest(url=url, filename=filename,
data_type=AwsConstants.AWS_FILES[data_name],
data_name=data_name))
self.sort_download_list()
return self.download_list, self.folder_list | [
"def",
"get_requests",
"(",
"self",
")",
":",
"self",
".",
"download_list",
"=",
"[",
"]",
"for",
"data_name",
"in",
"[",
"band",
"for",
"band",
"in",
"self",
".",
"bands",
"if",
"self",
".",
"_band_exists",
"(",
"band",
")",
"]",
"+",
"self",
".",
... | Creates tile structure and returns list of files for download.
:return: List of download requests and list of empty folders that need to be created
:rtype: (list(download.DownloadRequest), list(str)) | [
"Creates",
"tile",
"structure",
"and",
"returns",
"list",
"of",
"files",
"for",
"download",
"."
] | 08a83b7f1e289187159a643336995d8369860fea | https://github.com/sentinel-hub/sentinelhub-py/blob/08a83b7f1e289187159a643336995d8369860fea/sentinelhub/aws.py#L516-L532 |
243,101 | sentinel-hub/sentinelhub-py | sentinelhub/aws.py | AwsTile.get_aws_index | def get_aws_index(self):
"""
Returns tile index on AWS. If `tile_index` was not set during class initialization it will be determined
according to existing tiles on AWS.
:return: Index of tile on AWS
:rtype: int
"""
if self.aws_index is not None:
return self.aws_index
tile_info_list = get_tile_info(self.tile_name, self.datetime, all_tiles=True)
if not tile_info_list:
raise ValueError('Cannot find aws_index for specified tile and time')
if self.data_source is DataSource.SENTINEL2_L2A:
for tile_info in sorted(tile_info_list, key=self._parse_aws_index):
try:
self.aws_index = self._parse_aws_index(tile_info)
self.get_tile_info()
return self.aws_index
except AwsDownloadFailedException:
pass
return self._parse_aws_index(tile_info_list[0]) | python | def get_aws_index(self):
if self.aws_index is not None:
return self.aws_index
tile_info_list = get_tile_info(self.tile_name, self.datetime, all_tiles=True)
if not tile_info_list:
raise ValueError('Cannot find aws_index for specified tile and time')
if self.data_source is DataSource.SENTINEL2_L2A:
for tile_info in sorted(tile_info_list, key=self._parse_aws_index):
try:
self.aws_index = self._parse_aws_index(tile_info)
self.get_tile_info()
return self.aws_index
except AwsDownloadFailedException:
pass
return self._parse_aws_index(tile_info_list[0]) | [
"def",
"get_aws_index",
"(",
"self",
")",
":",
"if",
"self",
".",
"aws_index",
"is",
"not",
"None",
":",
"return",
"self",
".",
"aws_index",
"tile_info_list",
"=",
"get_tile_info",
"(",
"self",
".",
"tile_name",
",",
"self",
".",
"datetime",
",",
"all_tile... | Returns tile index on AWS. If `tile_index` was not set during class initialization it will be determined
according to existing tiles on AWS.
:return: Index of tile on AWS
:rtype: int | [
"Returns",
"tile",
"index",
"on",
"AWS",
".",
"If",
"tile_index",
"was",
"not",
"set",
"during",
"class",
"initialization",
"it",
"will",
"be",
"determined",
"according",
"to",
"existing",
"tiles",
"on",
"AWS",
"."
] | 08a83b7f1e289187159a643336995d8369860fea | https://github.com/sentinel-hub/sentinelhub-py/blob/08a83b7f1e289187159a643336995d8369860fea/sentinelhub/aws.py#L534-L557 |
243,102 | sentinel-hub/sentinelhub-py | sentinelhub/aws.py | AwsTile.tile_is_valid | def tile_is_valid(self):
""" Checks if tile has tile info and valid timestamp
:return: `True` if tile is valid and `False` otherwise
:rtype: bool
"""
return self.tile_info is not None \
and (self.datetime == self.date or self.datetime == self.parse_datetime(self.tile_info['timestamp'])) | python | def tile_is_valid(self):
return self.tile_info is not None \
and (self.datetime == self.date or self.datetime == self.parse_datetime(self.tile_info['timestamp'])) | [
"def",
"tile_is_valid",
"(",
"self",
")",
":",
"return",
"self",
".",
"tile_info",
"is",
"not",
"None",
"and",
"(",
"self",
".",
"datetime",
"==",
"self",
".",
"date",
"or",
"self",
".",
"datetime",
"==",
"self",
".",
"parse_datetime",
"(",
"self",
"."... | Checks if tile has tile info and valid timestamp
:return: `True` if tile is valid and `False` otherwise
:rtype: bool | [
"Checks",
"if",
"tile",
"has",
"tile",
"info",
"and",
"valid",
"timestamp"
] | 08a83b7f1e289187159a643336995d8369860fea | https://github.com/sentinel-hub/sentinelhub-py/blob/08a83b7f1e289187159a643336995d8369860fea/sentinelhub/aws.py#L570-L577 |
243,103 | sentinel-hub/sentinelhub-py | sentinelhub/aws.py | AwsTile.get_tile_url | def get_tile_url(self, force_http=False):
"""
Creates base url of tile location on AWS.
:param force_http: True if HTTP base URL should be used and False otherwise
:type force_http: str
:return: url of tile location
:rtype: str
"""
base_url = self.base_http_url if force_http else self.base_url
url = '{}tiles/{}/{}/{}/'.format(base_url, self.tile_name[0:2].lstrip('0'), self.tile_name[2],
self.tile_name[3:5])
date_params = self.date.split('-')
for param in date_params:
url += param.lstrip('0') + '/'
return url + str(self.aws_index) | python | def get_tile_url(self, force_http=False):
base_url = self.base_http_url if force_http else self.base_url
url = '{}tiles/{}/{}/{}/'.format(base_url, self.tile_name[0:2].lstrip('0'), self.tile_name[2],
self.tile_name[3:5])
date_params = self.date.split('-')
for param in date_params:
url += param.lstrip('0') + '/'
return url + str(self.aws_index) | [
"def",
"get_tile_url",
"(",
"self",
",",
"force_http",
"=",
"False",
")",
":",
"base_url",
"=",
"self",
".",
"base_http_url",
"if",
"force_http",
"else",
"self",
".",
"base_url",
"url",
"=",
"'{}tiles/{}/{}/{}/'",
".",
"format",
"(",
"base_url",
",",
"self",... | Creates base url of tile location on AWS.
:param force_http: True if HTTP base URL should be used and False otherwise
:type force_http: str
:return: url of tile location
:rtype: str | [
"Creates",
"base",
"url",
"of",
"tile",
"location",
"on",
"AWS",
"."
] | 08a83b7f1e289187159a643336995d8369860fea | https://github.com/sentinel-hub/sentinelhub-py/blob/08a83b7f1e289187159a643336995d8369860fea/sentinelhub/aws.py#L603-L618 |
243,104 | ethereum/pyethereum | ethereum/abi.py | split32 | def split32(data):
""" Split data into pieces of 32 bytes. """
all_pieces = []
for position in range(0, len(data), 32):
piece = data[position:position + 32]
all_pieces.append(piece)
return all_pieces | python | def split32(data):
all_pieces = []
for position in range(0, len(data), 32):
piece = data[position:position + 32]
all_pieces.append(piece)
return all_pieces | [
"def",
"split32",
"(",
"data",
")",
":",
"all_pieces",
"=",
"[",
"]",
"for",
"position",
"in",
"range",
"(",
"0",
",",
"len",
"(",
"data",
")",
",",
"32",
")",
":",
"piece",
"=",
"data",
"[",
"position",
":",
"position",
"+",
"32",
"]",
"all_piec... | Split data into pieces of 32 bytes. | [
"Split",
"data",
"into",
"pieces",
"of",
"32",
"bytes",
"."
] | b704a5c6577863edc539a1ec3d2620a443b950fb | https://github.com/ethereum/pyethereum/blob/b704a5c6577863edc539a1ec3d2620a443b950fb/ethereum/abi.py#L38-L46 |
243,105 | ethereum/pyethereum | ethereum/abi.py | _canonical_type | def _canonical_type(name): # pylint: disable=too-many-return-statements
""" Replace aliases to the corresponding type to compute the ids. """
if name == 'int':
return 'int256'
if name == 'uint':
return 'uint256'
if name == 'fixed':
return 'fixed128x128'
if name == 'ufixed':
return 'ufixed128x128'
if name.startswith('int['):
return 'int256' + name[3:]
if name.startswith('uint['):
return 'uint256' + name[4:]
if name.startswith('fixed['):
return 'fixed128x128' + name[5:]
if name.startswith('ufixed['):
return 'ufixed128x128' + name[6:]
return name | python | def _canonical_type(name): # pylint: disable=too-many-return-statements
if name == 'int':
return 'int256'
if name == 'uint':
return 'uint256'
if name == 'fixed':
return 'fixed128x128'
if name == 'ufixed':
return 'ufixed128x128'
if name.startswith('int['):
return 'int256' + name[3:]
if name.startswith('uint['):
return 'uint256' + name[4:]
if name.startswith('fixed['):
return 'fixed128x128' + name[5:]
if name.startswith('ufixed['):
return 'ufixed128x128' + name[6:]
return name | [
"def",
"_canonical_type",
"(",
"name",
")",
":",
"# pylint: disable=too-many-return-statements",
"if",
"name",
"==",
"'int'",
":",
"return",
"'int256'",
"if",
"name",
"==",
"'uint'",
":",
"return",
"'uint256'",
"if",
"name",
"==",
"'fixed'",
":",
"return",
"'fix... | Replace aliases to the corresponding type to compute the ids. | [
"Replace",
"aliases",
"to",
"the",
"corresponding",
"type",
"to",
"compute",
"the",
"ids",
"."
] | b704a5c6577863edc539a1ec3d2620a443b950fb | https://github.com/ethereum/pyethereum/blob/b704a5c6577863edc539a1ec3d2620a443b950fb/ethereum/abi.py#L49-L76 |
243,106 | ethereum/pyethereum | ethereum/abi.py | method_id | def method_id(name, encode_types):
""" Return the unique method id.
The signature is defined as the canonical expression of the basic
prototype, i.e. the function name with the parenthesised list of parameter
types. Parameter types are split by a single comma - no spaces are used.
The method id is defined as the first four bytes (left, high-order in
big-endian) of the Keccak (SHA-3) hash of the signature of the function.
"""
function_types = [
_canonical_type(type_)
for type_ in encode_types
]
function_signature = '{function_name}({canonical_types})'.format(
function_name=name,
canonical_types=','.join(function_types),
)
function_keccak = utils.sha3(function_signature)
first_bytes = function_keccak[:4]
return big_endian_to_int(first_bytes) | python | def method_id(name, encode_types):
function_types = [
_canonical_type(type_)
for type_ in encode_types
]
function_signature = '{function_name}({canonical_types})'.format(
function_name=name,
canonical_types=','.join(function_types),
)
function_keccak = utils.sha3(function_signature)
first_bytes = function_keccak[:4]
return big_endian_to_int(first_bytes) | [
"def",
"method_id",
"(",
"name",
",",
"encode_types",
")",
":",
"function_types",
"=",
"[",
"_canonical_type",
"(",
"type_",
")",
"for",
"type_",
"in",
"encode_types",
"]",
"function_signature",
"=",
"'{function_name}({canonical_types})'",
".",
"format",
"(",
"fun... | Return the unique method id.
The signature is defined as the canonical expression of the basic
prototype, i.e. the function name with the parenthesised list of parameter
types. Parameter types are split by a single comma - no spaces are used.
The method id is defined as the first four bytes (left, high-order in
big-endian) of the Keccak (SHA-3) hash of the signature of the function. | [
"Return",
"the",
"unique",
"method",
"id",
"."
] | b704a5c6577863edc539a1ec3d2620a443b950fb | https://github.com/ethereum/pyethereum/blob/b704a5c6577863edc539a1ec3d2620a443b950fb/ethereum/abi.py#L87-L110 |
243,107 | ethereum/pyethereum | ethereum/abi.py | event_id | def event_id(name, encode_types):
""" Return the event id.
Defined as:
`keccak(EVENT_NAME+"("+EVENT_ARGS.map(canonical_type_of).join(",")+")")`
Where `canonical_type_of` is a function that simply returns the canonical
type of a given argument, e.g. for uint indexed foo, it would return
uint256). Note the lack of spaces.
"""
event_types = [
_canonical_type(type_)
for type_ in encode_types
]
event_signature = '{event_name}({canonical_types})'.format(
event_name=name,
canonical_types=','.join(event_types),
)
return big_endian_to_int(utils.sha3(event_signature)) | python | def event_id(name, encode_types):
event_types = [
_canonical_type(type_)
for type_ in encode_types
]
event_signature = '{event_name}({canonical_types})'.format(
event_name=name,
canonical_types=','.join(event_types),
)
return big_endian_to_int(utils.sha3(event_signature)) | [
"def",
"event_id",
"(",
"name",
",",
"encode_types",
")",
":",
"event_types",
"=",
"[",
"_canonical_type",
"(",
"type_",
")",
"for",
"type_",
"in",
"encode_types",
"]",
"event_signature",
"=",
"'{event_name}({canonical_types})'",
".",
"format",
"(",
"event_name",
... | Return the event id.
Defined as:
`keccak(EVENT_NAME+"("+EVENT_ARGS.map(canonical_type_of).join(",")+")")`
Where `canonical_type_of` is a function that simply returns the canonical
type of a given argument, e.g. for uint indexed foo, it would return
uint256). Note the lack of spaces. | [
"Return",
"the",
"event",
"id",
"."
] | b704a5c6577863edc539a1ec3d2620a443b950fb | https://github.com/ethereum/pyethereum/blob/b704a5c6577863edc539a1ec3d2620a443b950fb/ethereum/abi.py#L113-L135 |
243,108 | ethereum/pyethereum | ethereum/abi.py | ContractTranslator.encode_function_call | def encode_function_call(self, function_name, args):
""" Return the encoded function call.
Args:
function_name (str): One of the existing functions described in the
contract interface.
args (List[object]): The function arguments that wll be encoded and
used in the contract execution in the vm.
Return:
bin: The encoded function name and arguments so that it can be used
with the evm to execute a funcion call, the binary string follows
the Ethereum Contract ABI.
"""
if function_name not in self.function_data:
raise ValueError('Unkown function {}'.format(function_name))
description = self.function_data[function_name]
function_selector = zpad(encode_int(description['prefix']), 4)
arguments = encode_abi(description['encode_types'], args)
return function_selector + arguments | python | def encode_function_call(self, function_name, args):
if function_name not in self.function_data:
raise ValueError('Unkown function {}'.format(function_name))
description = self.function_data[function_name]
function_selector = zpad(encode_int(description['prefix']), 4)
arguments = encode_abi(description['encode_types'], args)
return function_selector + arguments | [
"def",
"encode_function_call",
"(",
"self",
",",
"function_name",
",",
"args",
")",
":",
"if",
"function_name",
"not",
"in",
"self",
".",
"function_data",
":",
"raise",
"ValueError",
"(",
"'Unkown function {}'",
".",
"format",
"(",
"function_name",
")",
")",
"... | Return the encoded function call.
Args:
function_name (str): One of the existing functions described in the
contract interface.
args (List[object]): The function arguments that wll be encoded and
used in the contract execution in the vm.
Return:
bin: The encoded function name and arguments so that it can be used
with the evm to execute a funcion call, the binary string follows
the Ethereum Contract ABI. | [
"Return",
"the",
"encoded",
"function",
"call",
"."
] | b704a5c6577863edc539a1ec3d2620a443b950fb | https://github.com/ethereum/pyethereum/blob/b704a5c6577863edc539a1ec3d2620a443b950fb/ethereum/abi.py#L502-L524 |
243,109 | ethereum/pyethereum | ethereum/abi.py | ContractTranslator.decode_function_result | def decode_function_result(self, function_name, data):
""" Return the function call result decoded.
Args:
function_name (str): One of the existing functions described in the
contract interface.
data (bin): The encoded result from calling `function_name`.
Return:
List[object]: The values returned by the call to `function_name`.
"""
description = self.function_data[function_name]
arguments = decode_abi(description['decode_types'], data)
return arguments | python | def decode_function_result(self, function_name, data):
description = self.function_data[function_name]
arguments = decode_abi(description['decode_types'], data)
return arguments | [
"def",
"decode_function_result",
"(",
"self",
",",
"function_name",
",",
"data",
")",
":",
"description",
"=",
"self",
".",
"function_data",
"[",
"function_name",
"]",
"arguments",
"=",
"decode_abi",
"(",
"description",
"[",
"'decode_types'",
"]",
",",
"data",
... | Return the function call result decoded.
Args:
function_name (str): One of the existing functions described in the
contract interface.
data (bin): The encoded result from calling `function_name`.
Return:
List[object]: The values returned by the call to `function_name`. | [
"Return",
"the",
"function",
"call",
"result",
"decoded",
"."
] | b704a5c6577863edc539a1ec3d2620a443b950fb | https://github.com/ethereum/pyethereum/blob/b704a5c6577863edc539a1ec3d2620a443b950fb/ethereum/abi.py#L526-L539 |
243,110 | ethereum/pyethereum | ethereum/abi.py | ContractTranslator.encode_constructor_arguments | def encode_constructor_arguments(self, args):
""" Return the encoded constructor call. """
if self.constructor_data is None:
raise ValueError(
"The contract interface didn't have a constructor")
return encode_abi(self.constructor_data['encode_types'], args) | python | def encode_constructor_arguments(self, args):
if self.constructor_data is None:
raise ValueError(
"The contract interface didn't have a constructor")
return encode_abi(self.constructor_data['encode_types'], args) | [
"def",
"encode_constructor_arguments",
"(",
"self",
",",
"args",
")",
":",
"if",
"self",
".",
"constructor_data",
"is",
"None",
":",
"raise",
"ValueError",
"(",
"\"The contract interface didn't have a constructor\"",
")",
"return",
"encode_abi",
"(",
"self",
".",
"c... | Return the encoded constructor call. | [
"Return",
"the",
"encoded",
"constructor",
"call",
"."
] | b704a5c6577863edc539a1ec3d2620a443b950fb | https://github.com/ethereum/pyethereum/blob/b704a5c6577863edc539a1ec3d2620a443b950fb/ethereum/abi.py#L541-L547 |
243,111 | ethereum/pyethereum | ethereum/abi.py | ContractTranslator.decode_event | def decode_event(self, log_topics, log_data):
""" Return a dictionary representation the log.
Note:
This function won't work with anonymous events.
Args:
log_topics (List[bin]): The log's indexed arguments.
log_data (bin): The encoded non-indexed arguments.
"""
# https://github.com/ethereum/wiki/wiki/Ethereum-Contract-ABI#function-selector-and-argument-encoding
# topics[0]: keccak(EVENT_NAME+"("+EVENT_ARGS.map(canonical_type_of).join(",")+")")
# If the event is declared as anonymous the topics[0] is not generated;
if not len(log_topics) or log_topics[0] not in self.event_data:
raise ValueError('Unknown log type')
event_id_ = log_topics[0]
event = self.event_data[event_id_]
# data: abi_serialise(EVENT_NON_INDEXED_ARGS)
# EVENT_NON_INDEXED_ARGS is the series of EVENT_ARGS that are not
# indexed, abi_serialise is the ABI serialisation function used for
# returning a series of typed values from a function.
unindexed_types = [
type_
for type_, indexed in zip(event['types'], event['indexed'])
if not indexed
]
unindexed_args = decode_abi(unindexed_types, log_data)
# topics[n]: EVENT_INDEXED_ARGS[n - 1]
# EVENT_INDEXED_ARGS is the series of EVENT_ARGS that are indexed
indexed_count = 1 # skip topics[0]
result = {}
for name, type_, indexed in zip(
event['names'], event['types'], event['indexed']):
if indexed:
topic_bytes = utils.zpad(
utils.encode_int(log_topics[indexed_count]),
32,
)
indexed_count += 1
value = decode_single(process_type(type_), topic_bytes)
else:
value = unindexed_args.pop(0)
result[name] = value
result['_event_type'] = utils.to_string(event['name'])
return result | python | def decode_event(self, log_topics, log_data):
# https://github.com/ethereum/wiki/wiki/Ethereum-Contract-ABI#function-selector-and-argument-encoding
# topics[0]: keccak(EVENT_NAME+"("+EVENT_ARGS.map(canonical_type_of).join(",")+")")
# If the event is declared as anonymous the topics[0] is not generated;
if not len(log_topics) or log_topics[0] not in self.event_data:
raise ValueError('Unknown log type')
event_id_ = log_topics[0]
event = self.event_data[event_id_]
# data: abi_serialise(EVENT_NON_INDEXED_ARGS)
# EVENT_NON_INDEXED_ARGS is the series of EVENT_ARGS that are not
# indexed, abi_serialise is the ABI serialisation function used for
# returning a series of typed values from a function.
unindexed_types = [
type_
for type_, indexed in zip(event['types'], event['indexed'])
if not indexed
]
unindexed_args = decode_abi(unindexed_types, log_data)
# topics[n]: EVENT_INDEXED_ARGS[n - 1]
# EVENT_INDEXED_ARGS is the series of EVENT_ARGS that are indexed
indexed_count = 1 # skip topics[0]
result = {}
for name, type_, indexed in zip(
event['names'], event['types'], event['indexed']):
if indexed:
topic_bytes = utils.zpad(
utils.encode_int(log_topics[indexed_count]),
32,
)
indexed_count += 1
value = decode_single(process_type(type_), topic_bytes)
else:
value = unindexed_args.pop(0)
result[name] = value
result['_event_type'] = utils.to_string(event['name'])
return result | [
"def",
"decode_event",
"(",
"self",
",",
"log_topics",
",",
"log_data",
")",
":",
"# https://github.com/ethereum/wiki/wiki/Ethereum-Contract-ABI#function-selector-and-argument-encoding",
"# topics[0]: keccak(EVENT_NAME+\"(\"+EVENT_ARGS.map(canonical_type_of).join(\",\")+\")\")",
"# If the ev... | Return a dictionary representation the log.
Note:
This function won't work with anonymous events.
Args:
log_topics (List[bin]): The log's indexed arguments.
log_data (bin): The encoded non-indexed arguments. | [
"Return",
"a",
"dictionary",
"representation",
"the",
"log",
"."
] | b704a5c6577863edc539a1ec3d2620a443b950fb | https://github.com/ethereum/pyethereum/blob/b704a5c6577863edc539a1ec3d2620a443b950fb/ethereum/abi.py#L549-L601 |
243,112 | ethereum/pyethereum | ethereum/abi.py | ContractTranslator.listen | def listen(self, log, noprint=True):
"""
Return a dictionary representation of the Log instance.
Note:
This function won't work with anonymous events.
Args:
log (processblock.Log): The Log instance that needs to be parsed.
noprint (bool): Flag to turn off priting of the decoded log instance.
"""
try:
result = self.decode_event(log.topics, log.data)
except ValueError:
return # api compatibility
if not noprint:
print(result)
return result | python | def listen(self, log, noprint=True):
try:
result = self.decode_event(log.topics, log.data)
except ValueError:
return # api compatibility
if not noprint:
print(result)
return result | [
"def",
"listen",
"(",
"self",
",",
"log",
",",
"noprint",
"=",
"True",
")",
":",
"try",
":",
"result",
"=",
"self",
".",
"decode_event",
"(",
"log",
".",
"topics",
",",
"log",
".",
"data",
")",
"except",
"ValueError",
":",
"return",
"# api compatibilit... | Return a dictionary representation of the Log instance.
Note:
This function won't work with anonymous events.
Args:
log (processblock.Log): The Log instance that needs to be parsed.
noprint (bool): Flag to turn off priting of the decoded log instance. | [
"Return",
"a",
"dictionary",
"representation",
"of",
"the",
"Log",
"instance",
"."
] | b704a5c6577863edc539a1ec3d2620a443b950fb | https://github.com/ethereum/pyethereum/blob/b704a5c6577863edc539a1ec3d2620a443b950fb/ethereum/abi.py#L603-L622 |
243,113 | ethereum/pyethereum | ethereum/experimental/pruning_trie.py | unpack_to_nibbles | def unpack_to_nibbles(bindata):
"""unpack packed binary data to nibbles
:param bindata: binary packed from nibbles
:return: nibbles sequence, may have a terminator
"""
o = bin_to_nibbles(bindata)
flags = o[0]
if flags & 2:
o.append(NIBBLE_TERMINATOR)
if flags & 1 == 1:
o = o[1:]
else:
o = o[2:]
return o | python | def unpack_to_nibbles(bindata):
o = bin_to_nibbles(bindata)
flags = o[0]
if flags & 2:
o.append(NIBBLE_TERMINATOR)
if flags & 1 == 1:
o = o[1:]
else:
o = o[2:]
return o | [
"def",
"unpack_to_nibbles",
"(",
"bindata",
")",
":",
"o",
"=",
"bin_to_nibbles",
"(",
"bindata",
")",
"flags",
"=",
"o",
"[",
"0",
"]",
"if",
"flags",
"&",
"2",
":",
"o",
".",
"append",
"(",
"NIBBLE_TERMINATOR",
")",
"if",
"flags",
"&",
"1",
"==",
... | unpack packed binary data to nibbles
:param bindata: binary packed from nibbles
:return: nibbles sequence, may have a terminator | [
"unpack",
"packed",
"binary",
"data",
"to",
"nibbles"
] | b704a5c6577863edc539a1ec3d2620a443b950fb | https://github.com/ethereum/pyethereum/blob/b704a5c6577863edc539a1ec3d2620a443b950fb/ethereum/experimental/pruning_trie.py#L154-L168 |
243,114 | ethereum/pyethereum | ethereum/experimental/pruning_trie.py | starts_with | def starts_with(full, part):
""" test whether the items in the part is
the leading items of the full
"""
if len(full) < len(part):
return False
return full[:len(part)] == part | python | def starts_with(full, part):
if len(full) < len(part):
return False
return full[:len(part)] == part | [
"def",
"starts_with",
"(",
"full",
",",
"part",
")",
":",
"if",
"len",
"(",
"full",
")",
"<",
"len",
"(",
"part",
")",
":",
"return",
"False",
"return",
"full",
"[",
":",
"len",
"(",
"part",
")",
"]",
"==",
"part"
] | test whether the items in the part is
the leading items of the full | [
"test",
"whether",
"the",
"items",
"in",
"the",
"part",
"is",
"the",
"leading",
"items",
"of",
"the",
"full"
] | b704a5c6577863edc539a1ec3d2620a443b950fb | https://github.com/ethereum/pyethereum/blob/b704a5c6577863edc539a1ec3d2620a443b950fb/ethereum/experimental/pruning_trie.py#L171-L177 |
243,115 | ethereum/pyethereum | ethereum/experimental/pruning_trie.py | Trie._get_node_type | def _get_node_type(self, node):
""" get node type and content
:param node: node in form of list, or BLANK_NODE
:return: node type
"""
if node == BLANK_NODE:
return NODE_TYPE_BLANK
if len(node) == 2:
nibbles = unpack_to_nibbles(node[0])
has_terminator = (nibbles and nibbles[-1] == NIBBLE_TERMINATOR)
return NODE_TYPE_LEAF if has_terminator\
else NODE_TYPE_EXTENSION
if len(node) == 17:
return NODE_TYPE_BRANCH | python | def _get_node_type(self, node):
if node == BLANK_NODE:
return NODE_TYPE_BLANK
if len(node) == 2:
nibbles = unpack_to_nibbles(node[0])
has_terminator = (nibbles and nibbles[-1] == NIBBLE_TERMINATOR)
return NODE_TYPE_LEAF if has_terminator\
else NODE_TYPE_EXTENSION
if len(node) == 17:
return NODE_TYPE_BRANCH | [
"def",
"_get_node_type",
"(",
"self",
",",
"node",
")",
":",
"if",
"node",
"==",
"BLANK_NODE",
":",
"return",
"NODE_TYPE_BLANK",
"if",
"len",
"(",
"node",
")",
"==",
"2",
":",
"nibbles",
"=",
"unpack_to_nibbles",
"(",
"node",
"[",
"0",
"]",
")",
"has_t... | get node type and content
:param node: node in form of list, or BLANK_NODE
:return: node type | [
"get",
"node",
"type",
"and",
"content"
] | b704a5c6577863edc539a1ec3d2620a443b950fb | https://github.com/ethereum/pyethereum/blob/b704a5c6577863edc539a1ec3d2620a443b950fb/ethereum/experimental/pruning_trie.py#L353-L368 |
243,116 | ethereum/pyethereum | ethereum/experimental/pruning_trie.py | Trie._get | def _get(self, node, key):
""" get value inside a node
:param node: node in form of list, or BLANK_NODE
:param key: nibble list without terminator
:return:
BLANK_NODE if does not exist, otherwise value or hash
"""
node_type = self._get_node_type(node)
if node_type == NODE_TYPE_BLANK:
return BLANK_NODE
if node_type == NODE_TYPE_BRANCH:
# already reach the expected node
if not key:
return node[-1]
sub_node = self._decode_to_node(node[key[0]])
return self._get(sub_node, key[1:])
# key value node
curr_key = without_terminator(unpack_to_nibbles(node[0]))
if node_type == NODE_TYPE_LEAF:
return node[1] if key == curr_key else BLANK_NODE
if node_type == NODE_TYPE_EXTENSION:
# traverse child nodes
if starts_with(key, curr_key):
sub_node = self._decode_to_node(node[1])
return self._get(sub_node, key[len(curr_key):])
else:
return BLANK_NODE | python | def _get(self, node, key):
node_type = self._get_node_type(node)
if node_type == NODE_TYPE_BLANK:
return BLANK_NODE
if node_type == NODE_TYPE_BRANCH:
# already reach the expected node
if not key:
return node[-1]
sub_node = self._decode_to_node(node[key[0]])
return self._get(sub_node, key[1:])
# key value node
curr_key = without_terminator(unpack_to_nibbles(node[0]))
if node_type == NODE_TYPE_LEAF:
return node[1] if key == curr_key else BLANK_NODE
if node_type == NODE_TYPE_EXTENSION:
# traverse child nodes
if starts_with(key, curr_key):
sub_node = self._decode_to_node(node[1])
return self._get(sub_node, key[len(curr_key):])
else:
return BLANK_NODE | [
"def",
"_get",
"(",
"self",
",",
"node",
",",
"key",
")",
":",
"node_type",
"=",
"self",
".",
"_get_node_type",
"(",
"node",
")",
"if",
"node_type",
"==",
"NODE_TYPE_BLANK",
":",
"return",
"BLANK_NODE",
"if",
"node_type",
"==",
"NODE_TYPE_BRANCH",
":",
"# ... | get value inside a node
:param node: node in form of list, or BLANK_NODE
:param key: nibble list without terminator
:return:
BLANK_NODE if does not exist, otherwise value or hash | [
"get",
"value",
"inside",
"a",
"node"
] | b704a5c6577863edc539a1ec3d2620a443b950fb | https://github.com/ethereum/pyethereum/blob/b704a5c6577863edc539a1ec3d2620a443b950fb/ethereum/experimental/pruning_trie.py#L370-L401 |
243,117 | ethereum/pyethereum | ethereum/experimental/pruning_trie.py | Trie._normalize_branch_node | def _normalize_branch_node(self, node):
# sys.stderr.write('nbn\n')
"""node should have only one item changed
"""
not_blank_items_count = sum(1 for x in range(17) if node[x])
assert not_blank_items_count >= 1
if not_blank_items_count > 1:
self._encode_node(node)
return node
# now only one item is not blank
not_blank_index = [i for i, item in enumerate(node) if item][0]
# the value item is not blank
if not_blank_index == 16:
o = [pack_nibbles(with_terminator([])), node[16]]
self._encode_node(o)
return o
# normal item is not blank
sub_node = self._decode_to_node(node[not_blank_index])
sub_node_type = self._get_node_type(sub_node)
if is_key_value_type(sub_node_type):
# collape subnode to this node, not this node will have same
# terminator with the new sub node, and value does not change
self._delete_node_storage(sub_node)
new_key = [not_blank_index] + \
unpack_to_nibbles(sub_node[0])
o = [pack_nibbles(new_key), sub_node[1]]
self._encode_node(o)
return o
if sub_node_type == NODE_TYPE_BRANCH:
o = [pack_nibbles([not_blank_index]),
node[not_blank_index]]
self._encode_node(o)
return o
assert False | python | def _normalize_branch_node(self, node):
# sys.stderr.write('nbn\n')
not_blank_items_count = sum(1 for x in range(17) if node[x])
assert not_blank_items_count >= 1
if not_blank_items_count > 1:
self._encode_node(node)
return node
# now only one item is not blank
not_blank_index = [i for i, item in enumerate(node) if item][0]
# the value item is not blank
if not_blank_index == 16:
o = [pack_nibbles(with_terminator([])), node[16]]
self._encode_node(o)
return o
# normal item is not blank
sub_node = self._decode_to_node(node[not_blank_index])
sub_node_type = self._get_node_type(sub_node)
if is_key_value_type(sub_node_type):
# collape subnode to this node, not this node will have same
# terminator with the new sub node, and value does not change
self._delete_node_storage(sub_node)
new_key = [not_blank_index] + \
unpack_to_nibbles(sub_node[0])
o = [pack_nibbles(new_key), sub_node[1]]
self._encode_node(o)
return o
if sub_node_type == NODE_TYPE_BRANCH:
o = [pack_nibbles([not_blank_index]),
node[not_blank_index]]
self._encode_node(o)
return o
assert False | [
"def",
"_normalize_branch_node",
"(",
"self",
",",
"node",
")",
":",
"# sys.stderr.write('nbn\\n')",
"not_blank_items_count",
"=",
"sum",
"(",
"1",
"for",
"x",
"in",
"range",
"(",
"17",
")",
"if",
"node",
"[",
"x",
"]",
")",
"assert",
"not_blank_items_count",
... | node should have only one item changed | [
"node",
"should",
"have",
"only",
"one",
"item",
"changed"
] | b704a5c6577863edc539a1ec3d2620a443b950fb | https://github.com/ethereum/pyethereum/blob/b704a5c6577863edc539a1ec3d2620a443b950fb/ethereum/experimental/pruning_trie.py#L650-L688 |
243,118 | ethereum/pyethereum | ethereum/slogging.py | DEBUG | def DEBUG(msg, *args, **kwargs):
"""temporary logger during development that is always on"""
logger = getLogger("DEBUG")
if len(logger.handlers) == 0:
logger.addHandler(StreamHandler())
logger.propagate = False
logger.setLevel(logging.DEBUG)
logger.DEV(msg, *args, **kwargs) | python | def DEBUG(msg, *args, **kwargs):
logger = getLogger("DEBUG")
if len(logger.handlers) == 0:
logger.addHandler(StreamHandler())
logger.propagate = False
logger.setLevel(logging.DEBUG)
logger.DEV(msg, *args, **kwargs) | [
"def",
"DEBUG",
"(",
"msg",
",",
"*",
"args",
",",
"*",
"*",
"kwargs",
")",
":",
"logger",
"=",
"getLogger",
"(",
"\"DEBUG\"",
")",
"if",
"len",
"(",
"logger",
".",
"handlers",
")",
"==",
"0",
":",
"logger",
".",
"addHandler",
"(",
"StreamHandler",
... | temporary logger during development that is always on | [
"temporary",
"logger",
"during",
"development",
"that",
"is",
"always",
"on"
] | b704a5c6577863edc539a1ec3d2620a443b950fb | https://github.com/ethereum/pyethereum/blob/b704a5c6577863edc539a1ec3d2620a443b950fb/ethereum/slogging.py#L349-L356 |
243,119 | ethereum/pyethereum | ethereum/vm.py | vm_trace | def vm_trace(ext, msg, compustate, opcode, pushcache, tracer=log_vm_op):
"""
This diverges from normal logging, as we use the logging namespace
only to decide which features get logged in 'eth.vm.op'
i.e. tracing can not be activated by activating a sub
like 'eth.vm.op.stack'
"""
op, in_args, out_args, fee = opcodes.opcodes[opcode]
trace_data = {}
trace_data['stack'] = list(map(to_string, list(compustate.prev_stack)))
if compustate.prev_prev_op in ('MLOAD', 'MSTORE', 'MSTORE8', 'SHA3', 'CALL',
'CALLCODE', 'CREATE', 'CALLDATACOPY', 'CODECOPY',
'EXTCODECOPY'):
if len(compustate.prev_memory) < 4096:
trace_data['memory'] = \
''.join([encode_hex(ascii_chr(x)) for x
in compustate.prev_memory])
else:
trace_data['sha3memory'] = \
encode_hex(utils.sha3(b''.join([ascii_chr(x) for
x in compustate.prev_memory])))
if compustate.prev_prev_op in ('SSTORE',) or compustate.steps == 0:
trace_data['storage'] = ext.log_storage(msg.to)
trace_data['gas'] = to_string(compustate.prev_gas)
trace_data['gas_cost'] = to_string(compustate.prev_gas - compustate.gas)
trace_data['fee'] = fee
trace_data['inst'] = opcode
trace_data['pc'] = to_string(compustate.prev_pc)
if compustate.steps == 0:
trace_data['depth'] = msg.depth
trace_data['address'] = msg.to
trace_data['steps'] = compustate.steps
trace_data['depth'] = msg.depth
if op[:4] == 'PUSH':
print(repr(pushcache))
trace_data['pushvalue'] = pushcache[compustate.prev_pc]
tracer.trace('vm', op=op, **trace_data)
compustate.steps += 1
compustate.prev_prev_op = op | python | def vm_trace(ext, msg, compustate, opcode, pushcache, tracer=log_vm_op):
op, in_args, out_args, fee = opcodes.opcodes[opcode]
trace_data = {}
trace_data['stack'] = list(map(to_string, list(compustate.prev_stack)))
if compustate.prev_prev_op in ('MLOAD', 'MSTORE', 'MSTORE8', 'SHA3', 'CALL',
'CALLCODE', 'CREATE', 'CALLDATACOPY', 'CODECOPY',
'EXTCODECOPY'):
if len(compustate.prev_memory) < 4096:
trace_data['memory'] = \
''.join([encode_hex(ascii_chr(x)) for x
in compustate.prev_memory])
else:
trace_data['sha3memory'] = \
encode_hex(utils.sha3(b''.join([ascii_chr(x) for
x in compustate.prev_memory])))
if compustate.prev_prev_op in ('SSTORE',) or compustate.steps == 0:
trace_data['storage'] = ext.log_storage(msg.to)
trace_data['gas'] = to_string(compustate.prev_gas)
trace_data['gas_cost'] = to_string(compustate.prev_gas - compustate.gas)
trace_data['fee'] = fee
trace_data['inst'] = opcode
trace_data['pc'] = to_string(compustate.prev_pc)
if compustate.steps == 0:
trace_data['depth'] = msg.depth
trace_data['address'] = msg.to
trace_data['steps'] = compustate.steps
trace_data['depth'] = msg.depth
if op[:4] == 'PUSH':
print(repr(pushcache))
trace_data['pushvalue'] = pushcache[compustate.prev_pc]
tracer.trace('vm', op=op, **trace_data)
compustate.steps += 1
compustate.prev_prev_op = op | [
"def",
"vm_trace",
"(",
"ext",
",",
"msg",
",",
"compustate",
",",
"opcode",
",",
"pushcache",
",",
"tracer",
"=",
"log_vm_op",
")",
":",
"op",
",",
"in_args",
",",
"out_args",
",",
"fee",
"=",
"opcodes",
".",
"opcodes",
"[",
"opcode",
"]",
"trace_data... | This diverges from normal logging, as we use the logging namespace
only to decide which features get logged in 'eth.vm.op'
i.e. tracing can not be activated by activating a sub
like 'eth.vm.op.stack' | [
"This",
"diverges",
"from",
"normal",
"logging",
"as",
"we",
"use",
"the",
"logging",
"namespace",
"only",
"to",
"decide",
"which",
"features",
"get",
"logged",
"in",
"eth",
".",
"vm",
".",
"op",
"i",
".",
"e",
".",
"tracing",
"can",
"not",
"be",
"acti... | b704a5c6577863edc539a1ec3d2620a443b950fb | https://github.com/ethereum/pyethereum/blob/b704a5c6577863edc539a1ec3d2620a443b950fb/ethereum/vm.py#L202-L242 |
243,120 | ethereum/pyethereum | ethereum/transactions.py | Transaction.sign | def sign(self, key, network_id=None):
"""Sign this transaction with a private key.
A potentially already existing signature would be overridden.
"""
if network_id is None:
rawhash = utils.sha3(rlp.encode(unsigned_tx_from_tx(self), UnsignedTransaction))
else:
assert 1 <= network_id < 2**63 - 18
rlpdata = rlp.encode(rlp.infer_sedes(self).serialize(self)[
:-3] + [network_id, b'', b''])
rawhash = utils.sha3(rlpdata)
key = normalize_key(key)
v, r, s = ecsign(rawhash, key)
if network_id is not None:
v += 8 + network_id * 2
ret = self.copy(
v=v, r=r, s=s
)
ret._sender = utils.privtoaddr(key)
return ret | python | def sign(self, key, network_id=None):
if network_id is None:
rawhash = utils.sha3(rlp.encode(unsigned_tx_from_tx(self), UnsignedTransaction))
else:
assert 1 <= network_id < 2**63 - 18
rlpdata = rlp.encode(rlp.infer_sedes(self).serialize(self)[
:-3] + [network_id, b'', b''])
rawhash = utils.sha3(rlpdata)
key = normalize_key(key)
v, r, s = ecsign(rawhash, key)
if network_id is not None:
v += 8 + network_id * 2
ret = self.copy(
v=v, r=r, s=s
)
ret._sender = utils.privtoaddr(key)
return ret | [
"def",
"sign",
"(",
"self",
",",
"key",
",",
"network_id",
"=",
"None",
")",
":",
"if",
"network_id",
"is",
"None",
":",
"rawhash",
"=",
"utils",
".",
"sha3",
"(",
"rlp",
".",
"encode",
"(",
"unsigned_tx_from_tx",
"(",
"self",
")",
",",
"UnsignedTransa... | Sign this transaction with a private key.
A potentially already existing signature would be overridden. | [
"Sign",
"this",
"transaction",
"with",
"a",
"private",
"key",
"."
] | b704a5c6577863edc539a1ec3d2620a443b950fb | https://github.com/ethereum/pyethereum/blob/b704a5c6577863edc539a1ec3d2620a443b950fb/ethereum/transactions.py#L118-L141 |
243,121 | ethereum/pyethereum | ethereum/transactions.py | Transaction.creates | def creates(self):
"returns the address of a contract created by this tx"
if self.to in (b'', '\0' * 20):
return mk_contract_address(self.sender, self.nonce) | python | def creates(self):
"returns the address of a contract created by this tx"
if self.to in (b'', '\0' * 20):
return mk_contract_address(self.sender, self.nonce) | [
"def",
"creates",
"(",
"self",
")",
":",
"if",
"self",
".",
"to",
"in",
"(",
"b''",
",",
"'\\0'",
"*",
"20",
")",
":",
"return",
"mk_contract_address",
"(",
"self",
".",
"sender",
",",
"self",
".",
"nonce",
")"
] | returns the address of a contract created by this tx | [
"returns",
"the",
"address",
"of",
"a",
"contract",
"created",
"by",
"this",
"tx"
] | b704a5c6577863edc539a1ec3d2620a443b950fb | https://github.com/ethereum/pyethereum/blob/b704a5c6577863edc539a1ec3d2620a443b950fb/ethereum/transactions.py#L167-L170 |
243,122 | ethereum/pyethereum | ethereum/pow/ethpow.py | check_pow | def check_pow(block_number, header_hash, mixhash, nonce, difficulty):
"""Check if the proof-of-work of the block is valid.
:param nonce: if given the proof of work function will be evaluated
with this nonce instead of the one already present in
the header
:returns: `True` or `False`
"""
log.debug('checking pow', block_number=block_number)
if len(mixhash) != 32 or len(header_hash) != 32 or len(nonce) != 8:
return False
# Grab current cache
cache = get_cache(block_number)
mining_output = hashimoto_light(block_number, cache, header_hash, nonce)
if mining_output[b'mix digest'] != mixhash:
return False
return utils.big_endian_to_int(
mining_output[b'result']) <= 2**256 // (difficulty or 1) | python | def check_pow(block_number, header_hash, mixhash, nonce, difficulty):
log.debug('checking pow', block_number=block_number)
if len(mixhash) != 32 or len(header_hash) != 32 or len(nonce) != 8:
return False
# Grab current cache
cache = get_cache(block_number)
mining_output = hashimoto_light(block_number, cache, header_hash, nonce)
if mining_output[b'mix digest'] != mixhash:
return False
return utils.big_endian_to_int(
mining_output[b'result']) <= 2**256 // (difficulty or 1) | [
"def",
"check_pow",
"(",
"block_number",
",",
"header_hash",
",",
"mixhash",
",",
"nonce",
",",
"difficulty",
")",
":",
"log",
".",
"debug",
"(",
"'checking pow'",
",",
"block_number",
"=",
"block_number",
")",
"if",
"len",
"(",
"mixhash",
")",
"!=",
"32",... | Check if the proof-of-work of the block is valid.
:param nonce: if given the proof of work function will be evaluated
with this nonce instead of the one already present in
the header
:returns: `True` or `False` | [
"Check",
"if",
"the",
"proof",
"-",
"of",
"-",
"work",
"of",
"the",
"block",
"is",
"valid",
"."
] | b704a5c6577863edc539a1ec3d2620a443b950fb | https://github.com/ethereum/pyethereum/blob/b704a5c6577863edc539a1ec3d2620a443b950fb/ethereum/pow/ethpow.py#L62-L80 |
243,123 | ethereum/pyethereum | ethereum/block.py | BlockHeader.to_dict | def to_dict(self):
"""Serialize the header to a readable dictionary."""
d = {}
for field in ('prevhash', 'uncles_hash', 'extra_data', 'nonce',
'mixhash'):
d[field] = '0x' + encode_hex(getattr(self, field))
for field in ('state_root', 'tx_list_root', 'receipts_root',
'coinbase'):
d[field] = encode_hex(getattr(self, field))
for field in ('number', 'difficulty', 'gas_limit', 'gas_used',
'timestamp'):
d[field] = utils.to_string(getattr(self, field))
d['bloom'] = encode_hex(int256.serialize(self.bloom))
assert len(d) == len(BlockHeader.fields)
return d | python | def to_dict(self):
d = {}
for field in ('prevhash', 'uncles_hash', 'extra_data', 'nonce',
'mixhash'):
d[field] = '0x' + encode_hex(getattr(self, field))
for field in ('state_root', 'tx_list_root', 'receipts_root',
'coinbase'):
d[field] = encode_hex(getattr(self, field))
for field in ('number', 'difficulty', 'gas_limit', 'gas_used',
'timestamp'):
d[field] = utils.to_string(getattr(self, field))
d['bloom'] = encode_hex(int256.serialize(self.bloom))
assert len(d) == len(BlockHeader.fields)
return d | [
"def",
"to_dict",
"(",
"self",
")",
":",
"d",
"=",
"{",
"}",
"for",
"field",
"in",
"(",
"'prevhash'",
",",
"'uncles_hash'",
",",
"'extra_data'",
",",
"'nonce'",
",",
"'mixhash'",
")",
":",
"d",
"[",
"field",
"]",
"=",
"'0x'",
"+",
"encode_hex",
"(",
... | Serialize the header to a readable dictionary. | [
"Serialize",
"the",
"header",
"to",
"a",
"readable",
"dictionary",
"."
] | b704a5c6577863edc539a1ec3d2620a443b950fb | https://github.com/ethereum/pyethereum/blob/b704a5c6577863edc539a1ec3d2620a443b950fb/ethereum/block.py#L137-L151 |
243,124 | ethereum/pyethereum | ethereum/utils.py | decode_int | def decode_int(v):
"""decodes and integer from serialization"""
if len(v) > 0 and (v[0] == b'\x00' or v[0] == 0):
raise Exception("No leading zero bytes allowed for integers")
return big_endian_to_int(v) | python | def decode_int(v):
if len(v) > 0 and (v[0] == b'\x00' or v[0] == 0):
raise Exception("No leading zero bytes allowed for integers")
return big_endian_to_int(v) | [
"def",
"decode_int",
"(",
"v",
")",
":",
"if",
"len",
"(",
"v",
")",
">",
"0",
"and",
"(",
"v",
"[",
"0",
"]",
"==",
"b'\\x00'",
"or",
"v",
"[",
"0",
"]",
"==",
"0",
")",
":",
"raise",
"Exception",
"(",
"\"No leading zero bytes allowed for integers\"... | decodes and integer from serialization | [
"decodes",
"and",
"integer",
"from",
"serialization"
] | b704a5c6577863edc539a1ec3d2620a443b950fb | https://github.com/ethereum/pyethereum/blob/b704a5c6577863edc539a1ec3d2620a443b950fb/ethereum/utils.py#L350-L354 |
243,125 | ethereum/pyethereum | ethereum/utils.py | encode_int | def encode_int(v):
"""encodes an integer into serialization"""
if not is_numeric(v) or v < 0 or v >= TT256:
raise Exception("Integer invalid or out of range: %r" % v)
return int_to_big_endian(v) | python | def encode_int(v):
if not is_numeric(v) or v < 0 or v >= TT256:
raise Exception("Integer invalid or out of range: %r" % v)
return int_to_big_endian(v) | [
"def",
"encode_int",
"(",
"v",
")",
":",
"if",
"not",
"is_numeric",
"(",
"v",
")",
"or",
"v",
"<",
"0",
"or",
"v",
">=",
"TT256",
":",
"raise",
"Exception",
"(",
"\"Integer invalid or out of range: %r\"",
"%",
"v",
")",
"return",
"int_to_big_endian",
"(",
... | encodes an integer into serialization | [
"encodes",
"an",
"integer",
"into",
"serialization"
] | b704a5c6577863edc539a1ec3d2620a443b950fb | https://github.com/ethereum/pyethereum/blob/b704a5c6577863edc539a1ec3d2620a443b950fb/ethereum/utils.py#L371-L375 |
243,126 | ethereum/pyethereum | ethereum/utils.py | print_func_call | def print_func_call(ignore_first_arg=False, max_call_number=100):
""" utility function to facilitate debug, it will print input args before
function call, and print return value after function call
usage:
@print_func_call
def some_func_to_be_debu():
pass
:param ignore_first_arg: whether print the first arg or not.
useful when ignore the `self` parameter of an object method call
"""
from functools import wraps
def display(x):
x = to_string(x)
try:
x.decode('ascii')
except BaseException:
return 'NON_PRINTABLE'
return x
local = {'call_number': 0}
def inner(f):
@wraps(f)
def wrapper(*args, **kwargs):
local['call_number'] += 1
tmp_args = args[1:] if ignore_first_arg and len(args) else args
this_call_number = local['call_number']
print(('{0}#{1} args: {2}, {3}'.format(
f.__name__,
this_call_number,
', '.join([display(x) for x in tmp_args]),
', '.join(display(key) + '=' + to_string(value)
for key, value in kwargs.items())
)))
res = f(*args, **kwargs)
print(('{0}#{1} return: {2}'.format(
f.__name__,
this_call_number,
display(res))))
if local['call_number'] > 100:
raise Exception("Touch max call number!")
return res
return wrapper
return inner | python | def print_func_call(ignore_first_arg=False, max_call_number=100):
from functools import wraps
def display(x):
x = to_string(x)
try:
x.decode('ascii')
except BaseException:
return 'NON_PRINTABLE'
return x
local = {'call_number': 0}
def inner(f):
@wraps(f)
def wrapper(*args, **kwargs):
local['call_number'] += 1
tmp_args = args[1:] if ignore_first_arg and len(args) else args
this_call_number = local['call_number']
print(('{0}#{1} args: {2}, {3}'.format(
f.__name__,
this_call_number,
', '.join([display(x) for x in tmp_args]),
', '.join(display(key) + '=' + to_string(value)
for key, value in kwargs.items())
)))
res = f(*args, **kwargs)
print(('{0}#{1} return: {2}'.format(
f.__name__,
this_call_number,
display(res))))
if local['call_number'] > 100:
raise Exception("Touch max call number!")
return res
return wrapper
return inner | [
"def",
"print_func_call",
"(",
"ignore_first_arg",
"=",
"False",
",",
"max_call_number",
"=",
"100",
")",
":",
"from",
"functools",
"import",
"wraps",
"def",
"display",
"(",
"x",
")",
":",
"x",
"=",
"to_string",
"(",
"x",
")",
"try",
":",
"x",
".",
"de... | utility function to facilitate debug, it will print input args before
function call, and print return value after function call
usage:
@print_func_call
def some_func_to_be_debu():
pass
:param ignore_first_arg: whether print the first arg or not.
useful when ignore the `self` parameter of an object method call | [
"utility",
"function",
"to",
"facilitate",
"debug",
"it",
"will",
"print",
"input",
"args",
"before",
"function",
"call",
"and",
"print",
"return",
"value",
"after",
"function",
"call"
] | b704a5c6577863edc539a1ec3d2620a443b950fb | https://github.com/ethereum/pyethereum/blob/b704a5c6577863edc539a1ec3d2620a443b950fb/ethereum/utils.py#L449-L498 |
243,127 | ethereum/pyethereum | ethereum/tools/_solidity.py | get_compiler_path | def get_compiler_path():
""" Return the path to the solc compiler.
This funtion will search for the solc binary in the $PATH and return the
path of the first executable occurence.
"""
# If the user provides a specific solc binary let's use that
given_binary = os.environ.get('SOLC_BINARY')
if given_binary:
return given_binary
for path in os.getenv('PATH', '').split(os.pathsep):
path = path.strip('"')
executable_path = os.path.join(path, BINARY)
if os.path.isfile(executable_path) and os.access(
executable_path, os.X_OK):
return executable_path
return None | python | def get_compiler_path():
# If the user provides a specific solc binary let's use that
given_binary = os.environ.get('SOLC_BINARY')
if given_binary:
return given_binary
for path in os.getenv('PATH', '').split(os.pathsep):
path = path.strip('"')
executable_path = os.path.join(path, BINARY)
if os.path.isfile(executable_path) and os.access(
executable_path, os.X_OK):
return executable_path
return None | [
"def",
"get_compiler_path",
"(",
")",
":",
"# If the user provides a specific solc binary let's use that",
"given_binary",
"=",
"os",
".",
"environ",
".",
"get",
"(",
"'SOLC_BINARY'",
")",
"if",
"given_binary",
":",
"return",
"given_binary",
"for",
"path",
"in",
"os",... | Return the path to the solc compiler.
This funtion will search for the solc binary in the $PATH and return the
path of the first executable occurence. | [
"Return",
"the",
"path",
"to",
"the",
"solc",
"compiler",
"."
] | b704a5c6577863edc539a1ec3d2620a443b950fb | https://github.com/ethereum/pyethereum/blob/b704a5c6577863edc539a1ec3d2620a443b950fb/ethereum/tools/_solidity.py#L24-L43 |
243,128 | ethereum/pyethereum | ethereum/tools/_solidity.py | solc_arguments | def solc_arguments(libraries=None, combined='bin,abi',
optimize=True, extra_args=None):
""" Build the arguments to call the solc binary. """
args = [
'--combined-json', combined,
]
def str_of(address):
"""cast address to string. py2/3 compatability. """
try:
return address.decode('utf8')
except AttributeError:
return address
if optimize:
args.append('--optimize')
if extra_args:
try:
args.extend(shlex.split(extra_args))
except BaseException: # if not a parseable string then treat it as a list
args.extend(extra_args)
if libraries is not None and len(libraries):
addresses = [
'{name}:{address}'.format(
name=name, address=str_of(address))
for name, address in libraries.items()
]
args.extend([
'--libraries',
','.join(addresses),
])
return args | python | def solc_arguments(libraries=None, combined='bin,abi',
optimize=True, extra_args=None):
args = [
'--combined-json', combined,
]
def str_of(address):
"""cast address to string. py2/3 compatability. """
try:
return address.decode('utf8')
except AttributeError:
return address
if optimize:
args.append('--optimize')
if extra_args:
try:
args.extend(shlex.split(extra_args))
except BaseException: # if not a parseable string then treat it as a list
args.extend(extra_args)
if libraries is not None and len(libraries):
addresses = [
'{name}:{address}'.format(
name=name, address=str_of(address))
for name, address in libraries.items()
]
args.extend([
'--libraries',
','.join(addresses),
])
return args | [
"def",
"solc_arguments",
"(",
"libraries",
"=",
"None",
",",
"combined",
"=",
"'bin,abi'",
",",
"optimize",
"=",
"True",
",",
"extra_args",
"=",
"None",
")",
":",
"args",
"=",
"[",
"'--combined-json'",
",",
"combined",
",",
"]",
"def",
"str_of",
"(",
"ad... | Build the arguments to call the solc binary. | [
"Build",
"the",
"arguments",
"to",
"call",
"the",
"solc",
"binary",
"."
] | b704a5c6577863edc539a1ec3d2620a443b950fb | https://github.com/ethereum/pyethereum/blob/b704a5c6577863edc539a1ec3d2620a443b950fb/ethereum/tools/_solidity.py#L54-L89 |
243,129 | ethereum/pyethereum | ethereum/tools/_solidity.py | solc_parse_output | def solc_parse_output(compiler_output):
""" Parses the compiler output. """
# At the moment some solc output like --hashes or -- gas will not output
# json at all so if used with those arguments the logic here will break.
# Perhaps solidity will slowly switch to a json only output and this comment
# can eventually go away and we will not need to add more logic here at
# all.
result = yaml.safe_load(compiler_output)['contracts']
if 'bin' in tuple(result.values())[0]:
for value in result.values():
value['bin_hex'] = value['bin']
# decoding can fail if the compiled contract has unresolved symbols
try:
value['bin'] = decode_hex(value['bin_hex'])
except (TypeError, ValueError):
pass
for json_data in ('abi', 'devdoc', 'userdoc'):
# the values in the output can be configured through the
# --combined-json flag, check that it's present in the first value and
# assume all values are consistent
if json_data not in tuple(result.values())[0]:
continue
for value in result.values():
value[json_data] = yaml.safe_load(value[json_data])
return result | python | def solc_parse_output(compiler_output):
# At the moment some solc output like --hashes or -- gas will not output
# json at all so if used with those arguments the logic here will break.
# Perhaps solidity will slowly switch to a json only output and this comment
# can eventually go away and we will not need to add more logic here at
# all.
result = yaml.safe_load(compiler_output)['contracts']
if 'bin' in tuple(result.values())[0]:
for value in result.values():
value['bin_hex'] = value['bin']
# decoding can fail if the compiled contract has unresolved symbols
try:
value['bin'] = decode_hex(value['bin_hex'])
except (TypeError, ValueError):
pass
for json_data in ('abi', 'devdoc', 'userdoc'):
# the values in the output can be configured through the
# --combined-json flag, check that it's present in the first value and
# assume all values are consistent
if json_data not in tuple(result.values())[0]:
continue
for value in result.values():
value[json_data] = yaml.safe_load(value[json_data])
return result | [
"def",
"solc_parse_output",
"(",
"compiler_output",
")",
":",
"# At the moment some solc output like --hashes or -- gas will not output",
"# json at all so if used with those arguments the logic here will break.",
"# Perhaps solidity will slowly switch to a json only output and this comment",
"# c... | Parses the compiler output. | [
"Parses",
"the",
"compiler",
"output",
"."
] | b704a5c6577863edc539a1ec3d2620a443b950fb | https://github.com/ethereum/pyethereum/blob/b704a5c6577863edc539a1ec3d2620a443b950fb/ethereum/tools/_solidity.py#L92-L121 |
243,130 | ethereum/pyethereum | ethereum/tools/_solidity.py | compiler_version | def compiler_version():
""" Return the version of the installed solc. """
version_info = subprocess.check_output(['solc', '--version'])
match = re.search(b'^Version: ([0-9a-z.-]+)/', version_info, re.MULTILINE)
if match:
return match.group(1) | python | def compiler_version():
version_info = subprocess.check_output(['solc', '--version'])
match = re.search(b'^Version: ([0-9a-z.-]+)/', version_info, re.MULTILINE)
if match:
return match.group(1) | [
"def",
"compiler_version",
"(",
")",
":",
"version_info",
"=",
"subprocess",
".",
"check_output",
"(",
"[",
"'solc'",
",",
"'--version'",
"]",
")",
"match",
"=",
"re",
".",
"search",
"(",
"b'^Version: ([0-9a-z.-]+)/'",
",",
"version_info",
",",
"re",
".",
"M... | Return the version of the installed solc. | [
"Return",
"the",
"version",
"of",
"the",
"installed",
"solc",
"."
] | b704a5c6577863edc539a1ec3d2620a443b950fb | https://github.com/ethereum/pyethereum/blob/b704a5c6577863edc539a1ec3d2620a443b950fb/ethereum/tools/_solidity.py#L124-L130 |
243,131 | ethereum/pyethereum | ethereum/tools/_solidity.py | solidity_names | def solidity_names(code): # pylint: disable=too-many-branches
""" Return the library and contract names in order of appearence. """
names = []
in_string = None
backslash = False
comment = None
# "parse" the code by hand to handle the corner cases:
# - the contract or library can be inside a comment or string
# - multiline comments
# - the contract and library keywords could not be at the start of the line
for pos, char in enumerate(code):
if in_string:
if not backslash and in_string == char:
in_string = None
backslash = False
if char == '\\': # pylint: disable=simplifiable-if-statement
backslash = True
else:
backslash = False
elif comment == '//':
if char in ('\n', '\r'):
comment = None
elif comment == '/*':
if char == '*' and code[pos + 1] == '/':
comment = None
else:
if char == '"' or char == "'":
in_string = char
if char == '/':
if code[pos + 1] == '/':
comment = '//'
if code[pos + 1] == '*':
comment = '/*'
if char == 'c' and code[pos: pos + 8] == 'contract':
result = re.match(
'^contract[^_$a-zA-Z]+([_$a-zA-Z][_$a-zA-Z0-9]*)', code[pos:])
if result:
names.append(('contract', result.groups()[0]))
if char == 'i' and code[pos: pos + 9] == 'interface':
result = re.match(
'^interface[^_$a-zA-Z]+([_$a-zA-Z][_$a-zA-Z0-9]*)', code[pos:])
if result:
names.append(('contract', result.groups()[0]))
if char == 'l' and code[pos: pos + 7] == 'library':
result = re.match(
'^library[^_$a-zA-Z]+([_$a-zA-Z][_$a-zA-Z0-9]*)', code[pos:])
if result:
names.append(('library', result.groups()[0]))
return names | python | def solidity_names(code): # pylint: disable=too-many-branches
names = []
in_string = None
backslash = False
comment = None
# "parse" the code by hand to handle the corner cases:
# - the contract or library can be inside a comment or string
# - multiline comments
# - the contract and library keywords could not be at the start of the line
for pos, char in enumerate(code):
if in_string:
if not backslash and in_string == char:
in_string = None
backslash = False
if char == '\\': # pylint: disable=simplifiable-if-statement
backslash = True
else:
backslash = False
elif comment == '//':
if char in ('\n', '\r'):
comment = None
elif comment == '/*':
if char == '*' and code[pos + 1] == '/':
comment = None
else:
if char == '"' or char == "'":
in_string = char
if char == '/':
if code[pos + 1] == '/':
comment = '//'
if code[pos + 1] == '*':
comment = '/*'
if char == 'c' and code[pos: pos + 8] == 'contract':
result = re.match(
'^contract[^_$a-zA-Z]+([_$a-zA-Z][_$a-zA-Z0-9]*)', code[pos:])
if result:
names.append(('contract', result.groups()[0]))
if char == 'i' and code[pos: pos + 9] == 'interface':
result = re.match(
'^interface[^_$a-zA-Z]+([_$a-zA-Z][_$a-zA-Z0-9]*)', code[pos:])
if result:
names.append(('contract', result.groups()[0]))
if char == 'l' and code[pos: pos + 7] == 'library':
result = re.match(
'^library[^_$a-zA-Z]+([_$a-zA-Z][_$a-zA-Z0-9]*)', code[pos:])
if result:
names.append(('library', result.groups()[0]))
return names | [
"def",
"solidity_names",
"(",
"code",
")",
":",
"# pylint: disable=too-many-branches",
"names",
"=",
"[",
"]",
"in_string",
"=",
"None",
"backslash",
"=",
"False",
"comment",
"=",
"None",
"# \"parse\" the code by hand to handle the corner cases:",
"# - the contract or libr... | Return the library and contract names in order of appearence. | [
"Return",
"the",
"library",
"and",
"contract",
"names",
"in",
"order",
"of",
"appearence",
"."
] | b704a5c6577863edc539a1ec3d2620a443b950fb | https://github.com/ethereum/pyethereum/blob/b704a5c6577863edc539a1ec3d2620a443b950fb/ethereum/tools/_solidity.py#L133-L194 |
243,132 | ethereum/pyethereum | ethereum/tools/_solidity.py | compile_file | def compile_file(filepath, libraries=None, combined='bin,abi',
optimize=True, extra_args=None):
""" Return the compile contract code.
Args:
filepath (str): The path to the contract source code.
libraries (dict): A dictionary mapping library name to it's address.
combined (str): The argument for solc's --combined-json.
optimize (bool): Enable/disables compiler optimization.
Returns:
dict: A mapping from the contract name to it's binary.
"""
workdir, filename = os.path.split(filepath)
args = solc_arguments(
libraries=libraries,
combined=combined,
optimize=optimize,
extra_args=extra_args)
args.insert(0, get_compiler_path())
args.append(filename)
output = subprocess.check_output(args, cwd=workdir)
return solc_parse_output(output) | python | def compile_file(filepath, libraries=None, combined='bin,abi',
optimize=True, extra_args=None):
workdir, filename = os.path.split(filepath)
args = solc_arguments(
libraries=libraries,
combined=combined,
optimize=optimize,
extra_args=extra_args)
args.insert(0, get_compiler_path())
args.append(filename)
output = subprocess.check_output(args, cwd=workdir)
return solc_parse_output(output) | [
"def",
"compile_file",
"(",
"filepath",
",",
"libraries",
"=",
"None",
",",
"combined",
"=",
"'bin,abi'",
",",
"optimize",
"=",
"True",
",",
"extra_args",
"=",
"None",
")",
":",
"workdir",
",",
"filename",
"=",
"os",
".",
"path",
".",
"split",
"(",
"fi... | Return the compile contract code.
Args:
filepath (str): The path to the contract source code.
libraries (dict): A dictionary mapping library name to it's address.
combined (str): The argument for solc's --combined-json.
optimize (bool): Enable/disables compiler optimization.
Returns:
dict: A mapping from the contract name to it's binary. | [
"Return",
"the",
"compile",
"contract",
"code",
"."
] | b704a5c6577863edc539a1ec3d2620a443b950fb | https://github.com/ethereum/pyethereum/blob/b704a5c6577863edc539a1ec3d2620a443b950fb/ethereum/tools/_solidity.py#L265-L291 |
243,133 | ethereum/pyethereum | ethereum/tools/_solidity.py | solidity_get_contract_key | def solidity_get_contract_key(all_contracts, filepath, contract_name):
""" A backwards compatible method of getting the key to the all_contracts
dictionary for a particular contract"""
if contract_name in all_contracts:
return contract_name
else:
if filepath is None:
filename = '<stdin>'
else:
_, filename = os.path.split(filepath)
contract_key = filename + ":" + contract_name
return contract_key if contract_key in all_contracts else None | python | def solidity_get_contract_key(all_contracts, filepath, contract_name):
if contract_name in all_contracts:
return contract_name
else:
if filepath is None:
filename = '<stdin>'
else:
_, filename = os.path.split(filepath)
contract_key = filename + ":" + contract_name
return contract_key if contract_key in all_contracts else None | [
"def",
"solidity_get_contract_key",
"(",
"all_contracts",
",",
"filepath",
",",
"contract_name",
")",
":",
"if",
"contract_name",
"in",
"all_contracts",
":",
"return",
"contract_name",
"else",
":",
"if",
"filepath",
"is",
"None",
":",
"filename",
"=",
"'<stdin>'",... | A backwards compatible method of getting the key to the all_contracts
dictionary for a particular contract | [
"A",
"backwards",
"compatible",
"method",
"of",
"getting",
"the",
"key",
"to",
"the",
"all_contracts",
"dictionary",
"for",
"a",
"particular",
"contract"
] | b704a5c6577863edc539a1ec3d2620a443b950fb | https://github.com/ethereum/pyethereum/blob/b704a5c6577863edc539a1ec3d2620a443b950fb/ethereum/tools/_solidity.py#L308-L319 |
243,134 | ethereum/pyethereum | ethereum/tools/_solidity.py | Solc.compile | def compile(cls, code, path=None, libraries=None,
contract_name='', extra_args=None):
""" Return the binary of last contract in code. """
result = cls._code_or_path(
code,
path,
contract_name,
libraries,
'bin',
extra_args)
return result['bin'] | python | def compile(cls, code, path=None, libraries=None,
contract_name='', extra_args=None):
result = cls._code_or_path(
code,
path,
contract_name,
libraries,
'bin',
extra_args)
return result['bin'] | [
"def",
"compile",
"(",
"cls",
",",
"code",
",",
"path",
"=",
"None",
",",
"libraries",
"=",
"None",
",",
"contract_name",
"=",
"''",
",",
"extra_args",
"=",
"None",
")",
":",
"result",
"=",
"cls",
".",
"_code_or_path",
"(",
"code",
",",
"path",
",",
... | Return the binary of last contract in code. | [
"Return",
"the",
"binary",
"of",
"last",
"contract",
"in",
"code",
"."
] | b704a5c6577863edc539a1ec3d2620a443b950fb | https://github.com/ethereum/pyethereum/blob/b704a5c6577863edc539a1ec3d2620a443b950fb/ethereum/tools/_solidity.py#L420-L430 |
243,135 | ethereum/pyethereum | ethereum/tools/_solidity.py | Solc.combined | def combined(cls, code, path=None, extra_args=None):
""" Compile combined-json with abi,bin,devdoc,userdoc.
@param code: literal solidity code as a string.
@param path: absolute path to solidity-file. Note: code & path are
mutually exclusive!
@param extra_args: Either a space separated string or a list of extra
arguments to be passed to the solidity compiler.
"""
if code and path:
raise ValueError('sourcecode and path are mutually exclusive.')
if path:
contracts = compile_file(path, extra_args=extra_args)
with open(path) as handler:
code = handler.read()
elif code:
contracts = compile_code(code, extra_args=extra_args)
else:
raise ValueError('either code or path needs to be supplied.')
sorted_contracts = []
for name in solidity_names(code):
sorted_contracts.append(
(
name[1],
solidity_get_contract_data(contracts, path, name[1])
)
)
return sorted_contracts | python | def combined(cls, code, path=None, extra_args=None):
if code and path:
raise ValueError('sourcecode and path are mutually exclusive.')
if path:
contracts = compile_file(path, extra_args=extra_args)
with open(path) as handler:
code = handler.read()
elif code:
contracts = compile_code(code, extra_args=extra_args)
else:
raise ValueError('either code or path needs to be supplied.')
sorted_contracts = []
for name in solidity_names(code):
sorted_contracts.append(
(
name[1],
solidity_get_contract_data(contracts, path, name[1])
)
)
return sorted_contracts | [
"def",
"combined",
"(",
"cls",
",",
"code",
",",
"path",
"=",
"None",
",",
"extra_args",
"=",
"None",
")",
":",
"if",
"code",
"and",
"path",
":",
"raise",
"ValueError",
"(",
"'sourcecode and path are mutually exclusive.'",
")",
"if",
"path",
":",
"contracts"... | Compile combined-json with abi,bin,devdoc,userdoc.
@param code: literal solidity code as a string.
@param path: absolute path to solidity-file. Note: code & path are
mutually exclusive!
@param extra_args: Either a space separated string or a list of extra
arguments to be passed to the solidity compiler. | [
"Compile",
"combined",
"-",
"json",
"with",
"abi",
"bin",
"devdoc",
"userdoc",
"."
] | b704a5c6577863edc539a1ec3d2620a443b950fb | https://github.com/ethereum/pyethereum/blob/b704a5c6577863edc539a1ec3d2620a443b950fb/ethereum/tools/_solidity.py#L447-L480 |
243,136 | ethereum/pyethereum | ethereum/tools/_solidity.py | Solc.compile_rich | def compile_rich(cls, code, path=None, extra_args=None):
"""full format as returned by jsonrpc"""
return {
contract_name: {
'code': '0x' + contract.get('bin_hex'),
'info': {
'abiDefinition': contract.get('abi'),
'compilerVersion': cls.compiler_version(),
'developerDoc': contract.get('devdoc'),
'language': 'Solidity',
'languageVersion': '0',
'source': code,
'userDoc': contract.get('userdoc')
},
}
for contract_name, contract
in cls.combined(code, path=path, extra_args=extra_args)
} | python | def compile_rich(cls, code, path=None, extra_args=None):
return {
contract_name: {
'code': '0x' + contract.get('bin_hex'),
'info': {
'abiDefinition': contract.get('abi'),
'compilerVersion': cls.compiler_version(),
'developerDoc': contract.get('devdoc'),
'language': 'Solidity',
'languageVersion': '0',
'source': code,
'userDoc': contract.get('userdoc')
},
}
for contract_name, contract
in cls.combined(code, path=path, extra_args=extra_args)
} | [
"def",
"compile_rich",
"(",
"cls",
",",
"code",
",",
"path",
"=",
"None",
",",
"extra_args",
"=",
"None",
")",
":",
"return",
"{",
"contract_name",
":",
"{",
"'code'",
":",
"'0x'",
"+",
"contract",
".",
"get",
"(",
"'bin_hex'",
")",
",",
"'info'",
":... | full format as returned by jsonrpc | [
"full",
"format",
"as",
"returned",
"by",
"jsonrpc"
] | b704a5c6577863edc539a1ec3d2620a443b950fb | https://github.com/ethereum/pyethereum/blob/b704a5c6577863edc539a1ec3d2620a443b950fb/ethereum/tools/_solidity.py#L483-L501 |
243,137 | ethereum/pyethereum | ethereum/pow/consensus.py | validate_uncles | def validate_uncles(state, block):
"""Validate the uncles of this block."""
# Make sure hash matches up
if utils.sha3(rlp.encode(block.uncles)) != block.header.uncles_hash:
raise VerificationFailed("Uncle hash mismatch")
# Enforce maximum number of uncles
if len(block.uncles) > state.config['MAX_UNCLES']:
raise VerificationFailed("Too many uncles")
# Uncle must have lower block number than blockj
for uncle in block.uncles:
if uncle.number >= block.header.number:
raise VerificationFailed("Uncle number too high")
# Check uncle validity
MAX_UNCLE_DEPTH = state.config['MAX_UNCLE_DEPTH']
ancestor_chain = [block.header] + \
[a for a in state.prev_headers[:MAX_UNCLE_DEPTH + 1] if a]
# Uncles of this block cannot be direct ancestors and cannot also
# be uncles included 1-6 blocks ago
ineligible = [b.hash for b in ancestor_chain]
for blknum, uncles in state.recent_uncles.items():
if state.block_number > int(
blknum) >= state.block_number - MAX_UNCLE_DEPTH:
ineligible.extend([u for u in uncles])
eligible_ancestor_hashes = [x.hash for x in ancestor_chain[2:]]
for uncle in block.uncles:
if uncle.prevhash not in eligible_ancestor_hashes:
raise VerificationFailed("Uncle does not have a valid ancestor")
parent = [x for x in ancestor_chain if x.hash == uncle.prevhash][0]
if uncle.difficulty != calc_difficulty(
parent, uncle.timestamp, config=state.config):
raise VerificationFailed("Difficulty mismatch")
if uncle.number != parent.number + 1:
raise VerificationFailed("Number mismatch")
if uncle.timestamp < parent.timestamp:
raise VerificationFailed("Timestamp mismatch")
if uncle.hash in ineligible:
raise VerificationFailed("Duplicate uncle")
if uncle.gas_used > uncle.gas_limit:
raise VerificationFailed("Uncle used too much gas")
if not check_pow(state, uncle):
raise VerificationFailed('uncle pow mismatch')
ineligible.append(uncle.hash)
return True | python | def validate_uncles(state, block):
# Make sure hash matches up
if utils.sha3(rlp.encode(block.uncles)) != block.header.uncles_hash:
raise VerificationFailed("Uncle hash mismatch")
# Enforce maximum number of uncles
if len(block.uncles) > state.config['MAX_UNCLES']:
raise VerificationFailed("Too many uncles")
# Uncle must have lower block number than blockj
for uncle in block.uncles:
if uncle.number >= block.header.number:
raise VerificationFailed("Uncle number too high")
# Check uncle validity
MAX_UNCLE_DEPTH = state.config['MAX_UNCLE_DEPTH']
ancestor_chain = [block.header] + \
[a for a in state.prev_headers[:MAX_UNCLE_DEPTH + 1] if a]
# Uncles of this block cannot be direct ancestors and cannot also
# be uncles included 1-6 blocks ago
ineligible = [b.hash for b in ancestor_chain]
for blknum, uncles in state.recent_uncles.items():
if state.block_number > int(
blknum) >= state.block_number - MAX_UNCLE_DEPTH:
ineligible.extend([u for u in uncles])
eligible_ancestor_hashes = [x.hash for x in ancestor_chain[2:]]
for uncle in block.uncles:
if uncle.prevhash not in eligible_ancestor_hashes:
raise VerificationFailed("Uncle does not have a valid ancestor")
parent = [x for x in ancestor_chain if x.hash == uncle.prevhash][0]
if uncle.difficulty != calc_difficulty(
parent, uncle.timestamp, config=state.config):
raise VerificationFailed("Difficulty mismatch")
if uncle.number != parent.number + 1:
raise VerificationFailed("Number mismatch")
if uncle.timestamp < parent.timestamp:
raise VerificationFailed("Timestamp mismatch")
if uncle.hash in ineligible:
raise VerificationFailed("Duplicate uncle")
if uncle.gas_used > uncle.gas_limit:
raise VerificationFailed("Uncle used too much gas")
if not check_pow(state, uncle):
raise VerificationFailed('uncle pow mismatch')
ineligible.append(uncle.hash)
return True | [
"def",
"validate_uncles",
"(",
"state",
",",
"block",
")",
":",
"# Make sure hash matches up",
"if",
"utils",
".",
"sha3",
"(",
"rlp",
".",
"encode",
"(",
"block",
".",
"uncles",
")",
")",
"!=",
"block",
".",
"header",
".",
"uncles_hash",
":",
"raise",
"... | Validate the uncles of this block. | [
"Validate",
"the",
"uncles",
"of",
"this",
"block",
"."
] | b704a5c6577863edc539a1ec3d2620a443b950fb | https://github.com/ethereum/pyethereum/blob/b704a5c6577863edc539a1ec3d2620a443b950fb/ethereum/pow/consensus.py#L63-L106 |
243,138 | ethereum/pyethereum | ethereum/pow/consensus.py | finalize | def finalize(state, block):
"""Apply rewards and commit."""
if state.is_METROPOLIS():
br = state.config['BYZANTIUM_BLOCK_REWARD']
nr = state.config['BYZANTIUM_NEPHEW_REWARD']
else:
br = state.config['BLOCK_REWARD']
nr = state.config['NEPHEW_REWARD']
delta = int(br + nr * len(block.uncles))
state.delta_balance(state.block_coinbase, delta)
udpf = state.config['UNCLE_DEPTH_PENALTY_FACTOR']
for uncle in block.uncles:
r = int(br * (udpf + uncle.number - state.block_number) // udpf)
state.delta_balance(uncle.coinbase, r)
if state.block_number - \
state.config['MAX_UNCLE_DEPTH'] in state.recent_uncles:
del state.recent_uncles[state.block_number -
state.config['MAX_UNCLE_DEPTH']] | python | def finalize(state, block):
if state.is_METROPOLIS():
br = state.config['BYZANTIUM_BLOCK_REWARD']
nr = state.config['BYZANTIUM_NEPHEW_REWARD']
else:
br = state.config['BLOCK_REWARD']
nr = state.config['NEPHEW_REWARD']
delta = int(br + nr * len(block.uncles))
state.delta_balance(state.block_coinbase, delta)
udpf = state.config['UNCLE_DEPTH_PENALTY_FACTOR']
for uncle in block.uncles:
r = int(br * (udpf + uncle.number - state.block_number) // udpf)
state.delta_balance(uncle.coinbase, r)
if state.block_number - \
state.config['MAX_UNCLE_DEPTH'] in state.recent_uncles:
del state.recent_uncles[state.block_number -
state.config['MAX_UNCLE_DEPTH']] | [
"def",
"finalize",
"(",
"state",
",",
"block",
")",
":",
"if",
"state",
".",
"is_METROPOLIS",
"(",
")",
":",
"br",
"=",
"state",
".",
"config",
"[",
"'BYZANTIUM_BLOCK_REWARD'",
"]",
"nr",
"=",
"state",
".",
"config",
"[",
"'BYZANTIUM_NEPHEW_REWARD'",
"]",
... | Apply rewards and commit. | [
"Apply",
"rewards",
"and",
"commit",
"."
] | b704a5c6577863edc539a1ec3d2620a443b950fb | https://github.com/ethereum/pyethereum/blob/b704a5c6577863edc539a1ec3d2620a443b950fb/ethereum/pow/consensus.py#L110-L132 |
243,139 | Microsoft/knack | knack/deprecation.py | Deprecated.ensure_new_style_deprecation | def ensure_new_style_deprecation(cli_ctx, kwargs, object_type):
""" Helper method to make the previous string-based deprecate_info kwarg
work with the new style. """
deprecate_info = kwargs.get('deprecate_info', None)
if isinstance(deprecate_info, Deprecated):
deprecate_info.object_type = object_type
elif isinstance(deprecate_info, STRING_TYPES):
deprecate_info = Deprecated(cli_ctx, redirect=deprecate_info, object_type=object_type)
kwargs['deprecate_info'] = deprecate_info
return deprecate_info | python | def ensure_new_style_deprecation(cli_ctx, kwargs, object_type):
deprecate_info = kwargs.get('deprecate_info', None)
if isinstance(deprecate_info, Deprecated):
deprecate_info.object_type = object_type
elif isinstance(deprecate_info, STRING_TYPES):
deprecate_info = Deprecated(cli_ctx, redirect=deprecate_info, object_type=object_type)
kwargs['deprecate_info'] = deprecate_info
return deprecate_info | [
"def",
"ensure_new_style_deprecation",
"(",
"cli_ctx",
",",
"kwargs",
",",
"object_type",
")",
":",
"deprecate_info",
"=",
"kwargs",
".",
"get",
"(",
"'deprecate_info'",
",",
"None",
")",
"if",
"isinstance",
"(",
"deprecate_info",
",",
"Deprecated",
")",
":",
... | Helper method to make the previous string-based deprecate_info kwarg
work with the new style. | [
"Helper",
"method",
"to",
"make",
"the",
"previous",
"string",
"-",
"based",
"deprecate_info",
"kwarg",
"work",
"with",
"the",
"new",
"style",
"."
] | 5f1a480a33f103e2688c46eef59fb2d9eaf2baad | https://github.com/Microsoft/knack/blob/5f1a480a33f103e2688c46eef59fb2d9eaf2baad/knack/deprecation.py#L53-L62 |
243,140 | Microsoft/knack | knack/deprecation.py | Deprecated._version_less_than_or_equal_to | def _version_less_than_or_equal_to(self, v1, v2):
""" Returns true if v1 <= v2. """
# pylint: disable=no-name-in-module, import-error
from distutils.version import LooseVersion
return LooseVersion(v1) <= LooseVersion(v2) | python | def _version_less_than_or_equal_to(self, v1, v2):
# pylint: disable=no-name-in-module, import-error
from distutils.version import LooseVersion
return LooseVersion(v1) <= LooseVersion(v2) | [
"def",
"_version_less_than_or_equal_to",
"(",
"self",
",",
"v1",
",",
"v2",
")",
":",
"# pylint: disable=no-name-in-module, import-error",
"from",
"distutils",
".",
"version",
"import",
"LooseVersion",
"return",
"LooseVersion",
"(",
"v1",
")",
"<=",
"LooseVersion",
"(... | Returns true if v1 <= v2. | [
"Returns",
"true",
"if",
"v1",
"<",
"=",
"v2",
"."
] | 5f1a480a33f103e2688c46eef59fb2d9eaf2baad | https://github.com/Microsoft/knack/blob/5f1a480a33f103e2688c46eef59fb2d9eaf2baad/knack/deprecation.py#L127-L131 |
243,141 | Microsoft/knack | knack/prompting.py | prompt_choice_list | def prompt_choice_list(msg, a_list, default=1, help_string=None):
"""Prompt user to select from a list of possible choices.
:param msg:A message displayed to the user before the choice list
:type msg: str
:param a_list:The list of choices (list of strings or list of dicts with 'name' & 'desc')
"type a_list: list
:param default:The default option that should be chosen if user doesn't enter a choice
:type default: int
:returns: The list index of the item chosen.
"""
verify_is_a_tty()
options = '\n'.join([' [{}] {}{}'
.format(i + 1,
x['name'] if isinstance(x, dict) and 'name' in x else x,
' - ' + x['desc'] if isinstance(x, dict) and 'desc' in x else '')
for i, x in enumerate(a_list)])
allowed_vals = list(range(1, len(a_list) + 1))
while True:
val = _input('{}\n{}\nPlease enter a choice [Default choice({})]: '.format(msg, options, default))
if val == '?' and help_string is not None:
print(help_string)
continue
if not val:
val = '{}'.format(default)
try:
ans = int(val)
if ans in allowed_vals:
# array index is 0-based, user input is 1-based
return ans - 1
raise ValueError
except ValueError:
logger.warning('Valid values are %s', allowed_vals) | python | def prompt_choice_list(msg, a_list, default=1, help_string=None):
verify_is_a_tty()
options = '\n'.join([' [{}] {}{}'
.format(i + 1,
x['name'] if isinstance(x, dict) and 'name' in x else x,
' - ' + x['desc'] if isinstance(x, dict) and 'desc' in x else '')
for i, x in enumerate(a_list)])
allowed_vals = list(range(1, len(a_list) + 1))
while True:
val = _input('{}\n{}\nPlease enter a choice [Default choice({})]: '.format(msg, options, default))
if val == '?' and help_string is not None:
print(help_string)
continue
if not val:
val = '{}'.format(default)
try:
ans = int(val)
if ans in allowed_vals:
# array index is 0-based, user input is 1-based
return ans - 1
raise ValueError
except ValueError:
logger.warning('Valid values are %s', allowed_vals) | [
"def",
"prompt_choice_list",
"(",
"msg",
",",
"a_list",
",",
"default",
"=",
"1",
",",
"help_string",
"=",
"None",
")",
":",
"verify_is_a_tty",
"(",
")",
"options",
"=",
"'\\n'",
".",
"join",
"(",
"[",
"' [{}] {}{}'",
".",
"format",
"(",
"i",
"+",
"1",... | Prompt user to select from a list of possible choices.
:param msg:A message displayed to the user before the choice list
:type msg: str
:param a_list:The list of choices (list of strings or list of dicts with 'name' & 'desc')
"type a_list: list
:param default:The default option that should be chosen if user doesn't enter a choice
:type default: int
:returns: The list index of the item chosen. | [
"Prompt",
"user",
"to",
"select",
"from",
"a",
"list",
"of",
"possible",
"choices",
"."
] | 5f1a480a33f103e2688c46eef59fb2d9eaf2baad | https://github.com/Microsoft/knack/blob/5f1a480a33f103e2688c46eef59fb2d9eaf2baad/knack/prompting.py#L99-L131 |
243,142 | Microsoft/knack | knack/parser.py | CLICommandParser._add_argument | def _add_argument(obj, arg):
""" Only pass valid argparse kwargs to argparse.ArgumentParser.add_argument """
argparse_options = {name: value for name, value in arg.options.items() if name in ARGPARSE_SUPPORTED_KWARGS}
if arg.options_list:
scrubbed_options_list = []
for item in arg.options_list:
if isinstance(item, Deprecated):
# don't add expired options to the parser
if item.expired():
continue
class _DeprecatedOption(str):
def __new__(cls, *args, **kwargs):
instance = str.__new__(cls, *args, **kwargs)
return instance
option = _DeprecatedOption(item.target)
setattr(option, 'deprecate_info', item)
item = option
scrubbed_options_list.append(item)
return obj.add_argument(*scrubbed_options_list, **argparse_options)
if 'required' in argparse_options:
del argparse_options['required']
if 'metavar' not in argparse_options:
argparse_options['metavar'] = '<{}>'.format(argparse_options['dest'].upper())
return obj.add_argument(**argparse_options) | python | def _add_argument(obj, arg):
argparse_options = {name: value for name, value in arg.options.items() if name in ARGPARSE_SUPPORTED_KWARGS}
if arg.options_list:
scrubbed_options_list = []
for item in arg.options_list:
if isinstance(item, Deprecated):
# don't add expired options to the parser
if item.expired():
continue
class _DeprecatedOption(str):
def __new__(cls, *args, **kwargs):
instance = str.__new__(cls, *args, **kwargs)
return instance
option = _DeprecatedOption(item.target)
setattr(option, 'deprecate_info', item)
item = option
scrubbed_options_list.append(item)
return obj.add_argument(*scrubbed_options_list, **argparse_options)
if 'required' in argparse_options:
del argparse_options['required']
if 'metavar' not in argparse_options:
argparse_options['metavar'] = '<{}>'.format(argparse_options['dest'].upper())
return obj.add_argument(**argparse_options) | [
"def",
"_add_argument",
"(",
"obj",
",",
"arg",
")",
":",
"argparse_options",
"=",
"{",
"name",
":",
"value",
"for",
"name",
",",
"value",
"in",
"arg",
".",
"options",
".",
"items",
"(",
")",
"if",
"name",
"in",
"ARGPARSE_SUPPORTED_KWARGS",
"}",
"if",
... | Only pass valid argparse kwargs to argparse.ArgumentParser.add_argument | [
"Only",
"pass",
"valid",
"argparse",
"kwargs",
"to",
"argparse",
".",
"ArgumentParser",
".",
"add_argument"
] | 5f1a480a33f103e2688c46eef59fb2d9eaf2baad | https://github.com/Microsoft/knack/blob/5f1a480a33f103e2688c46eef59fb2d9eaf2baad/knack/parser.py#L42-L69 |
243,143 | Microsoft/knack | knack/parser.py | CLICommandParser.load_command_table | def load_command_table(self, command_loader):
""" Process the command table and load it into the parser
:param cmd_tbl: A dictionary containing the commands
:type cmd_tbl: dict
"""
cmd_tbl = command_loader.command_table
grp_tbl = command_loader.command_group_table
if not cmd_tbl:
raise ValueError('The command table is empty. At least one command is required.')
# If we haven't already added a subparser, we
# better do it.
if not self.subparsers:
sp = self.add_subparsers(dest='_command')
sp.required = True
self.subparsers = {(): sp}
for command_name, metadata in cmd_tbl.items():
subparser = self._get_subparser(command_name.split(), grp_tbl)
command_verb = command_name.split()[-1]
# To work around http://bugs.python.org/issue9253, we artificially add any new
# parsers we add to the "choices" section of the subparser.
subparser = self._get_subparser(command_name.split(), grp_tbl)
deprecate_info = metadata.deprecate_info
if not subparser or (deprecate_info and deprecate_info.expired()):
continue
# inject command_module designer's help formatter -- default is HelpFormatter
fc = metadata.formatter_class or argparse.HelpFormatter
command_parser = subparser.add_parser(command_verb,
description=metadata.description,
parents=self.parents,
conflict_handler='error',
help_file=metadata.help,
formatter_class=fc,
cli_help=self.cli_help)
command_parser.cli_ctx = self.cli_ctx
command_validator = metadata.validator
argument_validators = []
argument_groups = {}
for arg in metadata.arguments.values():
# don't add deprecated arguments to the parser
deprecate_info = arg.type.settings.get('deprecate_info', None)
if deprecate_info and deprecate_info.expired():
continue
if arg.validator:
argument_validators.append(arg.validator)
if arg.arg_group:
try:
group = argument_groups[arg.arg_group]
except KeyError:
# group not found so create
group_name = '{} Arguments'.format(arg.arg_group)
group = command_parser.add_argument_group(arg.arg_group, group_name)
argument_groups[arg.arg_group] = group
param = CLICommandParser._add_argument(group, arg)
else:
param = CLICommandParser._add_argument(command_parser, arg)
param.completer = arg.completer
param.deprecate_info = arg.deprecate_info
command_parser.set_defaults(
func=metadata,
command=command_name,
_command_validator=command_validator,
_argument_validators=argument_validators,
_parser=command_parser) | python | def load_command_table(self, command_loader):
cmd_tbl = command_loader.command_table
grp_tbl = command_loader.command_group_table
if not cmd_tbl:
raise ValueError('The command table is empty. At least one command is required.')
# If we haven't already added a subparser, we
# better do it.
if not self.subparsers:
sp = self.add_subparsers(dest='_command')
sp.required = True
self.subparsers = {(): sp}
for command_name, metadata in cmd_tbl.items():
subparser = self._get_subparser(command_name.split(), grp_tbl)
command_verb = command_name.split()[-1]
# To work around http://bugs.python.org/issue9253, we artificially add any new
# parsers we add to the "choices" section of the subparser.
subparser = self._get_subparser(command_name.split(), grp_tbl)
deprecate_info = metadata.deprecate_info
if not subparser or (deprecate_info and deprecate_info.expired()):
continue
# inject command_module designer's help formatter -- default is HelpFormatter
fc = metadata.formatter_class or argparse.HelpFormatter
command_parser = subparser.add_parser(command_verb,
description=metadata.description,
parents=self.parents,
conflict_handler='error',
help_file=metadata.help,
formatter_class=fc,
cli_help=self.cli_help)
command_parser.cli_ctx = self.cli_ctx
command_validator = metadata.validator
argument_validators = []
argument_groups = {}
for arg in metadata.arguments.values():
# don't add deprecated arguments to the parser
deprecate_info = arg.type.settings.get('deprecate_info', None)
if deprecate_info and deprecate_info.expired():
continue
if arg.validator:
argument_validators.append(arg.validator)
if arg.arg_group:
try:
group = argument_groups[arg.arg_group]
except KeyError:
# group not found so create
group_name = '{} Arguments'.format(arg.arg_group)
group = command_parser.add_argument_group(arg.arg_group, group_name)
argument_groups[arg.arg_group] = group
param = CLICommandParser._add_argument(group, arg)
else:
param = CLICommandParser._add_argument(command_parser, arg)
param.completer = arg.completer
param.deprecate_info = arg.deprecate_info
command_parser.set_defaults(
func=metadata,
command=command_name,
_command_validator=command_validator,
_argument_validators=argument_validators,
_parser=command_parser) | [
"def",
"load_command_table",
"(",
"self",
",",
"command_loader",
")",
":",
"cmd_tbl",
"=",
"command_loader",
".",
"command_table",
"grp_tbl",
"=",
"command_loader",
".",
"command_group_table",
"if",
"not",
"cmd_tbl",
":",
"raise",
"ValueError",
"(",
"'The command ta... | Process the command table and load it into the parser
:param cmd_tbl: A dictionary containing the commands
:type cmd_tbl: dict | [
"Process",
"the",
"command",
"table",
"and",
"load",
"it",
"into",
"the",
"parser"
] | 5f1a480a33f103e2688c46eef59fb2d9eaf2baad | https://github.com/Microsoft/knack/blob/5f1a480a33f103e2688c46eef59fb2d9eaf2baad/knack/parser.py#L111-L178 |
243,144 | Microsoft/knack | knack/parser.py | CLICommandParser._get_subparser | def _get_subparser(self, path, group_table=None):
"""For each part of the path, walk down the tree of
subparsers, creating new ones if one doesn't already exist.
"""
group_table = group_table or {}
for length in range(0, len(path)):
parent_path = path[:length]
parent_subparser = self.subparsers.get(tuple(parent_path), None)
if not parent_subparser:
# No subparser exists for the given subpath - create and register
# a new subparser.
# Since we know that we always have a root subparser (we created)
# one when we started loading the command table, and we walk the
# path from left to right (i.e. for "cmd subcmd1 subcmd2", we start
# with ensuring that a subparser for cmd exists, then for subcmd1,
# subcmd2 and so on), we know we can always back up one step and
# add a subparser if one doesn't exist
command_group = group_table.get(' '.join(parent_path))
if command_group:
deprecate_info = command_group.group_kwargs.get('deprecate_info', None)
if deprecate_info and deprecate_info.expired():
continue
grandparent_path = path[:length - 1]
grandparent_subparser = self.subparsers[tuple(grandparent_path)]
new_path = path[length - 1]
new_parser = grandparent_subparser.add_parser(new_path, cli_help=self.cli_help)
# Due to http://bugs.python.org/issue9253, we have to give the subparser
# a destination and set it to required in order to get a meaningful error
parent_subparser = new_parser.add_subparsers(dest='_subcommand')
command_group = group_table.get(' '.join(parent_path), None)
deprecate_info = None
if command_group:
deprecate_info = command_group.group_kwargs.get('deprecate_info', None)
parent_subparser.required = True
parent_subparser.deprecate_info = deprecate_info
self.subparsers[tuple(path[0:length])] = parent_subparser
return parent_subparser | python | def _get_subparser(self, path, group_table=None):
group_table = group_table or {}
for length in range(0, len(path)):
parent_path = path[:length]
parent_subparser = self.subparsers.get(tuple(parent_path), None)
if not parent_subparser:
# No subparser exists for the given subpath - create and register
# a new subparser.
# Since we know that we always have a root subparser (we created)
# one when we started loading the command table, and we walk the
# path from left to right (i.e. for "cmd subcmd1 subcmd2", we start
# with ensuring that a subparser for cmd exists, then for subcmd1,
# subcmd2 and so on), we know we can always back up one step and
# add a subparser if one doesn't exist
command_group = group_table.get(' '.join(parent_path))
if command_group:
deprecate_info = command_group.group_kwargs.get('deprecate_info', None)
if deprecate_info and deprecate_info.expired():
continue
grandparent_path = path[:length - 1]
grandparent_subparser = self.subparsers[tuple(grandparent_path)]
new_path = path[length - 1]
new_parser = grandparent_subparser.add_parser(new_path, cli_help=self.cli_help)
# Due to http://bugs.python.org/issue9253, we have to give the subparser
# a destination and set it to required in order to get a meaningful error
parent_subparser = new_parser.add_subparsers(dest='_subcommand')
command_group = group_table.get(' '.join(parent_path), None)
deprecate_info = None
if command_group:
deprecate_info = command_group.group_kwargs.get('deprecate_info', None)
parent_subparser.required = True
parent_subparser.deprecate_info = deprecate_info
self.subparsers[tuple(path[0:length])] = parent_subparser
return parent_subparser | [
"def",
"_get_subparser",
"(",
"self",
",",
"path",
",",
"group_table",
"=",
"None",
")",
":",
"group_table",
"=",
"group_table",
"or",
"{",
"}",
"for",
"length",
"in",
"range",
"(",
"0",
",",
"len",
"(",
"path",
")",
")",
":",
"parent_path",
"=",
"pa... | For each part of the path, walk down the tree of
subparsers, creating new ones if one doesn't already exist. | [
"For",
"each",
"part",
"of",
"the",
"path",
"walk",
"down",
"the",
"tree",
"of",
"subparsers",
"creating",
"new",
"ones",
"if",
"one",
"doesn",
"t",
"already",
"exist",
"."
] | 5f1a480a33f103e2688c46eef59fb2d9eaf2baad | https://github.com/Microsoft/knack/blob/5f1a480a33f103e2688c46eef59fb2d9eaf2baad/knack/parser.py#L180-L217 |
243,145 | Microsoft/knack | knack/parser.py | CLICommandParser.parse_args | def parse_args(self, args=None, namespace=None):
""" Overrides argparse.ArgumentParser.parse_args
Enables '@'-prefixed files to be expanded before arguments are processed
by ArgumentParser.parse_args as usual
"""
self._expand_prefixed_files(args)
return super(CLICommandParser, self).parse_args(args) | python | def parse_args(self, args=None, namespace=None):
self._expand_prefixed_files(args)
return super(CLICommandParser, self).parse_args(args) | [
"def",
"parse_args",
"(",
"self",
",",
"args",
"=",
"None",
",",
"namespace",
"=",
"None",
")",
":",
"self",
".",
"_expand_prefixed_files",
"(",
"args",
")",
"return",
"super",
"(",
"CLICommandParser",
",",
"self",
")",
".",
"parse_args",
"(",
"args",
")... | Overrides argparse.ArgumentParser.parse_args
Enables '@'-prefixed files to be expanded before arguments are processed
by ArgumentParser.parse_args as usual | [
"Overrides",
"argparse",
".",
"ArgumentParser",
".",
"parse_args"
] | 5f1a480a33f103e2688c46eef59fb2d9eaf2baad | https://github.com/Microsoft/knack/blob/5f1a480a33f103e2688c46eef59fb2d9eaf2baad/knack/parser.py#L249-L256 |
243,146 | Microsoft/knack | knack/introspection.py | extract_full_summary_from_signature | def extract_full_summary_from_signature(operation):
""" Extract the summary from the docstring of the command. """
lines = inspect.getdoc(operation)
regex = r'\s*(:param)\s+(.+?)\s*:(.*)'
summary = ''
if lines:
match = re.search(regex, lines)
summary = lines[:match.regs[0][0]] if match else lines
summary = summary.replace('\n', ' ').replace('\r', '')
return summary | python | def extract_full_summary_from_signature(operation):
lines = inspect.getdoc(operation)
regex = r'\s*(:param)\s+(.+?)\s*:(.*)'
summary = ''
if lines:
match = re.search(regex, lines)
summary = lines[:match.regs[0][0]] if match else lines
summary = summary.replace('\n', ' ').replace('\r', '')
return summary | [
"def",
"extract_full_summary_from_signature",
"(",
"operation",
")",
":",
"lines",
"=",
"inspect",
".",
"getdoc",
"(",
"operation",
")",
"regex",
"=",
"r'\\s*(:param)\\s+(.+?)\\s*:(.*)'",
"summary",
"=",
"''",
"if",
"lines",
":",
"match",
"=",
"re",
".",
"search... | Extract the summary from the docstring of the command. | [
"Extract",
"the",
"summary",
"from",
"the",
"docstring",
"of",
"the",
"command",
"."
] | 5f1a480a33f103e2688c46eef59fb2d9eaf2baad | https://github.com/Microsoft/knack/blob/5f1a480a33f103e2688c46eef59fb2d9eaf2baad/knack/introspection.py#L15-L25 |
243,147 | Microsoft/knack | knack/cli.py | CLI.get_runtime_version | def get_runtime_version(self): # pylint: disable=no-self-use
""" Get the runtime information.
:return: Runtime information
:rtype: str
"""
import platform
version_info = '\n\n'
version_info += 'Python ({}) {}'.format(platform.system(), sys.version)
version_info += '\n\n'
version_info += 'Python location \'{}\''.format(sys.executable)
version_info += '\n'
return version_info | python | def get_runtime_version(self): # pylint: disable=no-self-use
import platform
version_info = '\n\n'
version_info += 'Python ({}) {}'.format(platform.system(), sys.version)
version_info += '\n\n'
version_info += 'Python location \'{}\''.format(sys.executable)
version_info += '\n'
return version_info | [
"def",
"get_runtime_version",
"(",
"self",
")",
":",
"# pylint: disable=no-self-use",
"import",
"platform",
"version_info",
"=",
"'\\n\\n'",
"version_info",
"+=",
"'Python ({}) {}'",
".",
"format",
"(",
"platform",
".",
"system",
"(",
")",
",",
"sys",
".",
"versio... | Get the runtime information.
:return: Runtime information
:rtype: str | [
"Get",
"the",
"runtime",
"information",
"."
] | 5f1a480a33f103e2688c46eef59fb2d9eaf2baad | https://github.com/Microsoft/knack/blob/5f1a480a33f103e2688c46eef59fb2d9eaf2baad/knack/cli.py#L107-L120 |
243,148 | Microsoft/knack | knack/cli.py | CLI.show_version | def show_version(self):
""" Print version information to the out file. """
version_info = self.get_cli_version()
version_info += self.get_runtime_version()
print(version_info, file=self.out_file) | python | def show_version(self):
version_info = self.get_cli_version()
version_info += self.get_runtime_version()
print(version_info, file=self.out_file) | [
"def",
"show_version",
"(",
"self",
")",
":",
"version_info",
"=",
"self",
".",
"get_cli_version",
"(",
")",
"version_info",
"+=",
"self",
".",
"get_runtime_version",
"(",
")",
"print",
"(",
"version_info",
",",
"file",
"=",
"self",
".",
"out_file",
")"
] | Print version information to the out file. | [
"Print",
"version",
"information",
"to",
"the",
"out",
"file",
"."
] | 5f1a480a33f103e2688c46eef59fb2d9eaf2baad | https://github.com/Microsoft/knack/blob/5f1a480a33f103e2688c46eef59fb2d9eaf2baad/knack/cli.py#L122-L126 |
243,149 | Microsoft/knack | knack/cli.py | CLI.unregister_event | def unregister_event(self, event_name, handler):
""" Unregister a callable that will be called when event is raised.
:param event_name: The name of the event (see knack.events for in-built events)
:type event_name: str
:param handler: The callback that was used to register the event
:type handler: function
"""
try:
self._event_handlers[event_name].remove(handler)
except ValueError:
pass | python | def unregister_event(self, event_name, handler):
try:
self._event_handlers[event_name].remove(handler)
except ValueError:
pass | [
"def",
"unregister_event",
"(",
"self",
",",
"event_name",
",",
"handler",
")",
":",
"try",
":",
"self",
".",
"_event_handlers",
"[",
"event_name",
"]",
".",
"remove",
"(",
"handler",
")",
"except",
"ValueError",
":",
"pass"
] | Unregister a callable that will be called when event is raised.
:param event_name: The name of the event (see knack.events for in-built events)
:type event_name: str
:param handler: The callback that was used to register the event
:type handler: function | [
"Unregister",
"a",
"callable",
"that",
"will",
"be",
"called",
"when",
"event",
"is",
"raised",
"."
] | 5f1a480a33f103e2688c46eef59fb2d9eaf2baad | https://github.com/Microsoft/knack/blob/5f1a480a33f103e2688c46eef59fb2d9eaf2baad/knack/cli.py#L139-L150 |
243,150 | Microsoft/knack | knack/cli.py | CLI.raise_event | def raise_event(self, event_name, **kwargs):
""" Raise an event. Calls each handler in turn with kwargs
:param event_name: The name of the event to raise
:type event_name: str
:param kwargs: Kwargs to be passed to all event handlers
"""
handlers = list(self._event_handlers[event_name])
logger.debug('Event: %s %s', event_name, handlers)
for func in handlers:
func(self, **kwargs) | python | def raise_event(self, event_name, **kwargs):
handlers = list(self._event_handlers[event_name])
logger.debug('Event: %s %s', event_name, handlers)
for func in handlers:
func(self, **kwargs) | [
"def",
"raise_event",
"(",
"self",
",",
"event_name",
",",
"*",
"*",
"kwargs",
")",
":",
"handlers",
"=",
"list",
"(",
"self",
".",
"_event_handlers",
"[",
"event_name",
"]",
")",
"logger",
".",
"debug",
"(",
"'Event: %s %s'",
",",
"event_name",
",",
"ha... | Raise an event. Calls each handler in turn with kwargs
:param event_name: The name of the event to raise
:type event_name: str
:param kwargs: Kwargs to be passed to all event handlers | [
"Raise",
"an",
"event",
".",
"Calls",
"each",
"handler",
"in",
"turn",
"with",
"kwargs"
] | 5f1a480a33f103e2688c46eef59fb2d9eaf2baad | https://github.com/Microsoft/knack/blob/5f1a480a33f103e2688c46eef59fb2d9eaf2baad/knack/cli.py#L152-L162 |
243,151 | Microsoft/knack | knack/cli.py | CLI.exception_handler | def exception_handler(self, ex): # pylint: disable=no-self-use
""" The default exception handler """
if isinstance(ex, CLIError):
logger.error(ex)
else:
logger.exception(ex)
return 1 | python | def exception_handler(self, ex): # pylint: disable=no-self-use
if isinstance(ex, CLIError):
logger.error(ex)
else:
logger.exception(ex)
return 1 | [
"def",
"exception_handler",
"(",
"self",
",",
"ex",
")",
":",
"# pylint: disable=no-self-use",
"if",
"isinstance",
"(",
"ex",
",",
"CLIError",
")",
":",
"logger",
".",
"error",
"(",
"ex",
")",
"else",
":",
"logger",
".",
"exception",
"(",
"ex",
")",
"ret... | The default exception handler | [
"The",
"default",
"exception",
"handler"
] | 5f1a480a33f103e2688c46eef59fb2d9eaf2baad | https://github.com/Microsoft/knack/blob/5f1a480a33f103e2688c46eef59fb2d9eaf2baad/knack/cli.py#L164-L170 |
243,152 | Microsoft/knack | knack/cli.py | CLI.invoke | def invoke(self, args, initial_invocation_data=None, out_file=None):
""" Invoke a command.
:param args: The arguments that represent the command
:type args: list, tuple
:param initial_invocation_data: Prime the in memory collection of key-value data for this invocation.
:type initial_invocation_data: dict
:param out_file: The file to send output to. If not used, we use out_file for knack.cli.CLI instance
:type out_file: file-like object
:return: The exit code of the invocation
:rtype: int
"""
from .util import CommandResultItem
if not isinstance(args, (list, tuple)):
raise TypeError('args should be a list or tuple.')
exit_code = 0
try:
args = self.completion.get_completion_args() or args
out_file = out_file or self.out_file
self.logging.configure(args)
logger.debug('Command arguments: %s', args)
self.raise_event(EVENT_CLI_PRE_EXECUTE)
if CLI._should_show_version(args):
self.show_version()
self.result = CommandResultItem(None)
else:
self.invocation = self.invocation_cls(cli_ctx=self,
parser_cls=self.parser_cls,
commands_loader_cls=self.commands_loader_cls,
help_cls=self.help_cls,
initial_data=initial_invocation_data)
cmd_result = self.invocation.execute(args)
self.result = cmd_result
exit_code = self.result.exit_code
output_type = self.invocation.data['output']
if cmd_result and cmd_result.result is not None:
formatter = self.output.get_formatter(output_type)
self.output.out(cmd_result, formatter=formatter, out_file=out_file)
self.raise_event(EVENT_CLI_POST_EXECUTE)
except KeyboardInterrupt as ex:
self.result = CommandResultItem(None, error=ex)
exit_code = 1
except Exception as ex: # pylint: disable=broad-except
exit_code = self.exception_handler(ex)
self.result = CommandResultItem(None, error=ex)
finally:
pass
self.result.exit_code = exit_code
return exit_code | python | def invoke(self, args, initial_invocation_data=None, out_file=None):
from .util import CommandResultItem
if not isinstance(args, (list, tuple)):
raise TypeError('args should be a list or tuple.')
exit_code = 0
try:
args = self.completion.get_completion_args() or args
out_file = out_file or self.out_file
self.logging.configure(args)
logger.debug('Command arguments: %s', args)
self.raise_event(EVENT_CLI_PRE_EXECUTE)
if CLI._should_show_version(args):
self.show_version()
self.result = CommandResultItem(None)
else:
self.invocation = self.invocation_cls(cli_ctx=self,
parser_cls=self.parser_cls,
commands_loader_cls=self.commands_loader_cls,
help_cls=self.help_cls,
initial_data=initial_invocation_data)
cmd_result = self.invocation.execute(args)
self.result = cmd_result
exit_code = self.result.exit_code
output_type = self.invocation.data['output']
if cmd_result and cmd_result.result is not None:
formatter = self.output.get_formatter(output_type)
self.output.out(cmd_result, formatter=formatter, out_file=out_file)
self.raise_event(EVENT_CLI_POST_EXECUTE)
except KeyboardInterrupt as ex:
self.result = CommandResultItem(None, error=ex)
exit_code = 1
except Exception as ex: # pylint: disable=broad-except
exit_code = self.exception_handler(ex)
self.result = CommandResultItem(None, error=ex)
finally:
pass
self.result.exit_code = exit_code
return exit_code | [
"def",
"invoke",
"(",
"self",
",",
"args",
",",
"initial_invocation_data",
"=",
"None",
",",
"out_file",
"=",
"None",
")",
":",
"from",
".",
"util",
"import",
"CommandResultItem",
"if",
"not",
"isinstance",
"(",
"args",
",",
"(",
"list",
",",
"tuple",
")... | Invoke a command.
:param args: The arguments that represent the command
:type args: list, tuple
:param initial_invocation_data: Prime the in memory collection of key-value data for this invocation.
:type initial_invocation_data: dict
:param out_file: The file to send output to. If not used, we use out_file for knack.cli.CLI instance
:type out_file: file-like object
:return: The exit code of the invocation
:rtype: int | [
"Invoke",
"a",
"command",
"."
] | 5f1a480a33f103e2688c46eef59fb2d9eaf2baad | https://github.com/Microsoft/knack/blob/5f1a480a33f103e2688c46eef59fb2d9eaf2baad/knack/cli.py#L172-L223 |
243,153 | Microsoft/knack | knack/log.py | get_logger | def get_logger(module_name=None):
""" Get the logger for a module. If no module name is given, the current CLI logger is returned.
Example:
get_logger(__name__)
:param module_name: The module to get the logger for
:type module_name: str
:return: The logger
:rtype: logger
"""
if module_name:
logger_name = '{}.{}'.format(CLI_LOGGER_NAME, module_name)
else:
logger_name = CLI_LOGGER_NAME
return logging.getLogger(logger_name) | python | def get_logger(module_name=None):
if module_name:
logger_name = '{}.{}'.format(CLI_LOGGER_NAME, module_name)
else:
logger_name = CLI_LOGGER_NAME
return logging.getLogger(logger_name) | [
"def",
"get_logger",
"(",
"module_name",
"=",
"None",
")",
":",
"if",
"module_name",
":",
"logger_name",
"=",
"'{}.{}'",
".",
"format",
"(",
"CLI_LOGGER_NAME",
",",
"module_name",
")",
"else",
":",
"logger_name",
"=",
"CLI_LOGGER_NAME",
"return",
"logging",
".... | Get the logger for a module. If no module name is given, the current CLI logger is returned.
Example:
get_logger(__name__)
:param module_name: The module to get the logger for
:type module_name: str
:return: The logger
:rtype: logger | [
"Get",
"the",
"logger",
"for",
"a",
"module",
".",
"If",
"no",
"module",
"name",
"is",
"given",
"the",
"current",
"CLI",
"logger",
"is",
"returned",
"."
] | 5f1a480a33f103e2688c46eef59fb2d9eaf2baad | https://github.com/Microsoft/knack/blob/5f1a480a33f103e2688c46eef59fb2d9eaf2baad/knack/log.py#L16-L31 |
243,154 | Microsoft/knack | knack/log.py | CLILogging.configure | def configure(self, args):
""" Configure the loggers with the appropriate log level etc.
:param args: The arguments from the command line
:type args: list
"""
verbose_level = self._determine_verbose_level(args)
log_level_config = self.console_log_configs[verbose_level]
root_logger = logging.getLogger()
cli_logger = logging.getLogger(CLI_LOGGER_NAME)
# Set the levels of the loggers to lowest level.
# Handlers can override by choosing a higher level.
root_logger.setLevel(logging.DEBUG)
cli_logger.setLevel(logging.DEBUG)
cli_logger.propagate = False
if root_logger.handlers and cli_logger.handlers:
# loggers already configured
return
self._init_console_handlers(root_logger, cli_logger, log_level_config)
if self.file_log_enabled:
self._init_logfile_handlers(root_logger, cli_logger)
get_logger(__name__).debug("File logging enabled - writing logs to '%s'.", self.log_dir) | python | def configure(self, args):
verbose_level = self._determine_verbose_level(args)
log_level_config = self.console_log_configs[verbose_level]
root_logger = logging.getLogger()
cli_logger = logging.getLogger(CLI_LOGGER_NAME)
# Set the levels of the loggers to lowest level.
# Handlers can override by choosing a higher level.
root_logger.setLevel(logging.DEBUG)
cli_logger.setLevel(logging.DEBUG)
cli_logger.propagate = False
if root_logger.handlers and cli_logger.handlers:
# loggers already configured
return
self._init_console_handlers(root_logger, cli_logger, log_level_config)
if self.file_log_enabled:
self._init_logfile_handlers(root_logger, cli_logger)
get_logger(__name__).debug("File logging enabled - writing logs to '%s'.", self.log_dir) | [
"def",
"configure",
"(",
"self",
",",
"args",
")",
":",
"verbose_level",
"=",
"self",
".",
"_determine_verbose_level",
"(",
"args",
")",
"log_level_config",
"=",
"self",
".",
"console_log_configs",
"[",
"verbose_level",
"]",
"root_logger",
"=",
"logging",
".",
... | Configure the loggers with the appropriate log level etc.
:param args: The arguments from the command line
:type args: list | [
"Configure",
"the",
"loggers",
"with",
"the",
"appropriate",
"log",
"level",
"etc",
"."
] | 5f1a480a33f103e2688c46eef59fb2d9eaf2baad | https://github.com/Microsoft/knack/blob/5f1a480a33f103e2688c46eef59fb2d9eaf2baad/knack/log.py#L120-L141 |
243,155 | Microsoft/knack | knack/log.py | CLILogging._determine_verbose_level | def _determine_verbose_level(self, args):
""" Get verbose level by reading the arguments. """
verbose_level = 0
for arg in args:
if arg == CLILogging.VERBOSE_FLAG:
verbose_level += 1
elif arg == CLILogging.DEBUG_FLAG:
verbose_level += 2
# Use max verbose level if too much verbosity specified.
return min(verbose_level, len(self.console_log_configs) - 1) | python | def _determine_verbose_level(self, args):
verbose_level = 0
for arg in args:
if arg == CLILogging.VERBOSE_FLAG:
verbose_level += 1
elif arg == CLILogging.DEBUG_FLAG:
verbose_level += 2
# Use max verbose level if too much verbosity specified.
return min(verbose_level, len(self.console_log_configs) - 1) | [
"def",
"_determine_verbose_level",
"(",
"self",
",",
"args",
")",
":",
"verbose_level",
"=",
"0",
"for",
"arg",
"in",
"args",
":",
"if",
"arg",
"==",
"CLILogging",
".",
"VERBOSE_FLAG",
":",
"verbose_level",
"+=",
"1",
"elif",
"arg",
"==",
"CLILogging",
"."... | Get verbose level by reading the arguments. | [
"Get",
"verbose",
"level",
"by",
"reading",
"the",
"arguments",
"."
] | 5f1a480a33f103e2688c46eef59fb2d9eaf2baad | https://github.com/Microsoft/knack/blob/5f1a480a33f103e2688c46eef59fb2d9eaf2baad/knack/log.py#L143-L152 |
243,156 | Microsoft/knack | knack/arguments.py | enum_choice_list | def enum_choice_list(data):
""" Creates the argparse choices and type kwargs for a supplied enum type or list of strings """
# transform enum types, otherwise assume list of string choices
if not data:
return {}
try:
choices = [x.value for x in data]
except AttributeError:
choices = data
def _type(value):
return next((x for x in choices if x.lower() == value.lower()), value) if value else value
params = {
'choices': CaseInsensitiveList(choices),
'type': _type
}
return params | python | def enum_choice_list(data):
# transform enum types, otherwise assume list of string choices
if not data:
return {}
try:
choices = [x.value for x in data]
except AttributeError:
choices = data
def _type(value):
return next((x for x in choices if x.lower() == value.lower()), value) if value else value
params = {
'choices': CaseInsensitiveList(choices),
'type': _type
}
return params | [
"def",
"enum_choice_list",
"(",
"data",
")",
":",
"# transform enum types, otherwise assume list of string choices",
"if",
"not",
"data",
":",
"return",
"{",
"}",
"try",
":",
"choices",
"=",
"[",
"x",
".",
"value",
"for",
"x",
"in",
"data",
"]",
"except",
"Att... | Creates the argparse choices and type kwargs for a supplied enum type or list of strings | [
"Creates",
"the",
"argparse",
"choices",
"and",
"type",
"kwargs",
"for",
"a",
"supplied",
"enum",
"type",
"or",
"list",
"of",
"strings"
] | 5f1a480a33f103e2688c46eef59fb2d9eaf2baad | https://github.com/Microsoft/knack/blob/5f1a480a33f103e2688c46eef59fb2d9eaf2baad/knack/arguments.py#L359-L376 |
243,157 | Microsoft/knack | knack/arguments.py | ArgumentRegistry.register_cli_argument | def register_cli_argument(self, scope, dest, argtype, **kwargs):
""" Add an argument to the argument registry
:param scope: The command level to apply the argument registration (e.g. 'mygroup mycommand')
:type scope: str
:param dest: The parameter/destination that this argument is for
:type dest: str
:param argtype: The argument type for this command argument
:type argtype: knack.arguments.CLIArgumentType
:param kwargs: see knack.arguments.CLIArgumentType
"""
argument = CLIArgumentType(overrides=argtype, **kwargs)
self.arguments[scope][dest] = argument | python | def register_cli_argument(self, scope, dest, argtype, **kwargs):
argument = CLIArgumentType(overrides=argtype, **kwargs)
self.arguments[scope][dest] = argument | [
"def",
"register_cli_argument",
"(",
"self",
",",
"scope",
",",
"dest",
",",
"argtype",
",",
"*",
"*",
"kwargs",
")",
":",
"argument",
"=",
"CLIArgumentType",
"(",
"overrides",
"=",
"argtype",
",",
"*",
"*",
"kwargs",
")",
"self",
".",
"arguments",
"[",
... | Add an argument to the argument registry
:param scope: The command level to apply the argument registration (e.g. 'mygroup mycommand')
:type scope: str
:param dest: The parameter/destination that this argument is for
:type dest: str
:param argtype: The argument type for this command argument
:type argtype: knack.arguments.CLIArgumentType
:param kwargs: see knack.arguments.CLIArgumentType | [
"Add",
"an",
"argument",
"to",
"the",
"argument",
"registry"
] | 5f1a480a33f103e2688c46eef59fb2d9eaf2baad | https://github.com/Microsoft/knack/blob/5f1a480a33f103e2688c46eef59fb2d9eaf2baad/knack/arguments.py#L93-L105 |
243,158 | Microsoft/knack | knack/arguments.py | ArgumentRegistry.get_cli_argument | def get_cli_argument(self, command, name):
""" Get the argument for the command after applying the scope hierarchy
:param command: The command that we want the argument for
:type command: str
:param name: The name of the argument
:type name: str
:return: The CLI command after all overrides in the scope hierarchy have been applied
:rtype: knack.arguments.CLIArgumentType
"""
parts = command.split()
result = CLIArgumentType()
for index in range(0, len(parts) + 1):
probe = ' '.join(parts[0:index])
override = self.arguments.get(probe, {}).get(name, None)
if override:
result.update(override)
return result | python | def get_cli_argument(self, command, name):
parts = command.split()
result = CLIArgumentType()
for index in range(0, len(parts) + 1):
probe = ' '.join(parts[0:index])
override = self.arguments.get(probe, {}).get(name, None)
if override:
result.update(override)
return result | [
"def",
"get_cli_argument",
"(",
"self",
",",
"command",
",",
"name",
")",
":",
"parts",
"=",
"command",
".",
"split",
"(",
")",
"result",
"=",
"CLIArgumentType",
"(",
")",
"for",
"index",
"in",
"range",
"(",
"0",
",",
"len",
"(",
"parts",
")",
"+",
... | Get the argument for the command after applying the scope hierarchy
:param command: The command that we want the argument for
:type command: str
:param name: The name of the argument
:type name: str
:return: The CLI command after all overrides in the scope hierarchy have been applied
:rtype: knack.arguments.CLIArgumentType | [
"Get",
"the",
"argument",
"for",
"the",
"command",
"after",
"applying",
"the",
"scope",
"hierarchy"
] | 5f1a480a33f103e2688c46eef59fb2d9eaf2baad | https://github.com/Microsoft/knack/blob/5f1a480a33f103e2688c46eef59fb2d9eaf2baad/knack/arguments.py#L107-L124 |
243,159 | Microsoft/knack | knack/arguments.py | ArgumentsContext.argument | def argument(self, argument_dest, arg_type=None, **kwargs):
""" Register an argument for the given command scope using a knack.arguments.CLIArgumentType
:param argument_dest: The destination argument to add this argument type to
:type argument_dest: str
:param arg_type: Predefined CLIArgumentType definition to register, as modified by any provided kwargs.
:type arg_type: knack.arguments.CLIArgumentType
:param kwargs: Possible values: `options_list`, `validator`, `completer`, `nargs`, `action`, `const`, `default`,
`type`, `choices`, `required`, `help`, `metavar`. See /docs/arguments.md.
"""
self._check_stale()
if not self._applicable():
return
deprecate_action = self._handle_deprecations(argument_dest, **kwargs)
if deprecate_action:
kwargs['action'] = deprecate_action
self.command_loader.argument_registry.register_cli_argument(self.command_scope,
argument_dest,
arg_type,
**kwargs) | python | def argument(self, argument_dest, arg_type=None, **kwargs):
self._check_stale()
if not self._applicable():
return
deprecate_action = self._handle_deprecations(argument_dest, **kwargs)
if deprecate_action:
kwargs['action'] = deprecate_action
self.command_loader.argument_registry.register_cli_argument(self.command_scope,
argument_dest,
arg_type,
**kwargs) | [
"def",
"argument",
"(",
"self",
",",
"argument_dest",
",",
"arg_type",
"=",
"None",
",",
"*",
"*",
"kwargs",
")",
":",
"self",
".",
"_check_stale",
"(",
")",
"if",
"not",
"self",
".",
"_applicable",
"(",
")",
":",
"return",
"deprecate_action",
"=",
"se... | Register an argument for the given command scope using a knack.arguments.CLIArgumentType
:param argument_dest: The destination argument to add this argument type to
:type argument_dest: str
:param arg_type: Predefined CLIArgumentType definition to register, as modified by any provided kwargs.
:type arg_type: knack.arguments.CLIArgumentType
:param kwargs: Possible values: `options_list`, `validator`, `completer`, `nargs`, `action`, `const`, `default`,
`type`, `choices`, `required`, `help`, `metavar`. See /docs/arguments.md. | [
"Register",
"an",
"argument",
"for",
"the",
"given",
"command",
"scope",
"using",
"a",
"knack",
".",
"arguments",
".",
"CLIArgumentType"
] | 5f1a480a33f103e2688c46eef59fb2d9eaf2baad | https://github.com/Microsoft/knack/blob/5f1a480a33f103e2688c46eef59fb2d9eaf2baad/knack/arguments.py#L247-L267 |
243,160 | Microsoft/knack | knack/arguments.py | ArgumentsContext.positional | def positional(self, argument_dest, arg_type=None, **kwargs):
""" Register a positional argument for the given command scope using a knack.arguments.CLIArgumentType
:param argument_dest: The destination argument to add this argument type to
:type argument_dest: str
:param arg_type: Predefined CLIArgumentType definition to register, as modified by any provided kwargs.
:type arg_type: knack.arguments.CLIArgumentType
:param kwargs: Possible values: `validator`, `completer`, `nargs`, `action`, `const`, `default`,
`type`, `choices`, `required`, `help`, `metavar`. See /docs/arguments.md.
"""
self._check_stale()
if not self._applicable():
return
if self.command_scope not in self.command_loader.command_table:
raise ValueError("command authoring error: positional argument '{}' cannot be registered to a group-level "
"scope '{}'. It must be registered to a specific command.".format(
argument_dest, self.command_scope))
# Before adding the new positional arg, ensure that there are no existing positional arguments
# registered for this command.
command_args = self.command_loader.argument_registry.arguments[self.command_scope]
positional_args = {k: v for k, v in command_args.items() if v.settings.get('options_list') == []}
if positional_args and argument_dest not in positional_args:
raise CLIError("command authoring error: commands may have, at most, one positional argument. '{}' already "
"has positional argument: {}.".format(self.command_scope, ' '.join(positional_args.keys())))
deprecate_action = self._handle_deprecations(argument_dest, **kwargs)
if deprecate_action:
kwargs['action'] = deprecate_action
kwargs['options_list'] = []
self.command_loader.argument_registry.register_cli_argument(self.command_scope,
argument_dest,
arg_type,
**kwargs) | python | def positional(self, argument_dest, arg_type=None, **kwargs):
self._check_stale()
if not self._applicable():
return
if self.command_scope not in self.command_loader.command_table:
raise ValueError("command authoring error: positional argument '{}' cannot be registered to a group-level "
"scope '{}'. It must be registered to a specific command.".format(
argument_dest, self.command_scope))
# Before adding the new positional arg, ensure that there are no existing positional arguments
# registered for this command.
command_args = self.command_loader.argument_registry.arguments[self.command_scope]
positional_args = {k: v for k, v in command_args.items() if v.settings.get('options_list') == []}
if positional_args and argument_dest not in positional_args:
raise CLIError("command authoring error: commands may have, at most, one positional argument. '{}' already "
"has positional argument: {}.".format(self.command_scope, ' '.join(positional_args.keys())))
deprecate_action = self._handle_deprecations(argument_dest, **kwargs)
if deprecate_action:
kwargs['action'] = deprecate_action
kwargs['options_list'] = []
self.command_loader.argument_registry.register_cli_argument(self.command_scope,
argument_dest,
arg_type,
**kwargs) | [
"def",
"positional",
"(",
"self",
",",
"argument_dest",
",",
"arg_type",
"=",
"None",
",",
"*",
"*",
"kwargs",
")",
":",
"self",
".",
"_check_stale",
"(",
")",
"if",
"not",
"self",
".",
"_applicable",
"(",
")",
":",
"return",
"if",
"self",
".",
"comm... | Register a positional argument for the given command scope using a knack.arguments.CLIArgumentType
:param argument_dest: The destination argument to add this argument type to
:type argument_dest: str
:param arg_type: Predefined CLIArgumentType definition to register, as modified by any provided kwargs.
:type arg_type: knack.arguments.CLIArgumentType
:param kwargs: Possible values: `validator`, `completer`, `nargs`, `action`, `const`, `default`,
`type`, `choices`, `required`, `help`, `metavar`. See /docs/arguments.md. | [
"Register",
"a",
"positional",
"argument",
"for",
"the",
"given",
"command",
"scope",
"using",
"a",
"knack",
".",
"arguments",
".",
"CLIArgumentType"
] | 5f1a480a33f103e2688c46eef59fb2d9eaf2baad | https://github.com/Microsoft/knack/blob/5f1a480a33f103e2688c46eef59fb2d9eaf2baad/knack/arguments.py#L269-L304 |
243,161 | Microsoft/knack | knack/arguments.py | ArgumentsContext.extra | def extra(self, argument_dest, **kwargs):
"""Register extra parameters for the given command. Typically used to augment auto-command built
commands to add more parameters than the specific SDK method introspected.
:param argument_dest: The destination argument to add this argument type to
:type argument_dest: str
:param kwargs: Possible values: `options_list`, `validator`, `completer`, `nargs`, `action`, `const`, `default`,
`type`, `choices`, `required`, `help`, `metavar`. See /docs/arguments.md.
"""
self._check_stale()
if not self._applicable():
return
if self.command_scope in self.command_loader.command_group_table:
raise ValueError("command authoring error: extra argument '{}' cannot be registered to a group-level "
"scope '{}'. It must be registered to a specific command.".format(
argument_dest, self.command_scope))
deprecate_action = self._handle_deprecations(argument_dest, **kwargs)
if deprecate_action:
kwargs['action'] = deprecate_action
self.command_loader.extra_argument_registry[self.command_scope][argument_dest] = CLICommandArgument(
argument_dest, **kwargs) | python | def extra(self, argument_dest, **kwargs):
self._check_stale()
if not self._applicable():
return
if self.command_scope in self.command_loader.command_group_table:
raise ValueError("command authoring error: extra argument '{}' cannot be registered to a group-level "
"scope '{}'. It must be registered to a specific command.".format(
argument_dest, self.command_scope))
deprecate_action = self._handle_deprecations(argument_dest, **kwargs)
if deprecate_action:
kwargs['action'] = deprecate_action
self.command_loader.extra_argument_registry[self.command_scope][argument_dest] = CLICommandArgument(
argument_dest, **kwargs) | [
"def",
"extra",
"(",
"self",
",",
"argument_dest",
",",
"*",
"*",
"kwargs",
")",
":",
"self",
".",
"_check_stale",
"(",
")",
"if",
"not",
"self",
".",
"_applicable",
"(",
")",
":",
"return",
"if",
"self",
".",
"command_scope",
"in",
"self",
".",
"com... | Register extra parameters for the given command. Typically used to augment auto-command built
commands to add more parameters than the specific SDK method introspected.
:param argument_dest: The destination argument to add this argument type to
:type argument_dest: str
:param kwargs: Possible values: `options_list`, `validator`, `completer`, `nargs`, `action`, `const`, `default`,
`type`, `choices`, `required`, `help`, `metavar`. See /docs/arguments.md. | [
"Register",
"extra",
"parameters",
"for",
"the",
"given",
"command",
".",
"Typically",
"used",
"to",
"augment",
"auto",
"-",
"command",
"built",
"commands",
"to",
"add",
"more",
"parameters",
"than",
"the",
"specific",
"SDK",
"method",
"introspected",
"."
] | 5f1a480a33f103e2688c46eef59fb2d9eaf2baad | https://github.com/Microsoft/knack/blob/5f1a480a33f103e2688c46eef59fb2d9eaf2baad/knack/arguments.py#L319-L341 |
243,162 | Microsoft/knack | knack/commands.py | CLICommandsLoader.load_command_table | def load_command_table(self, args): # pylint: disable=unused-argument
""" Load commands into the command table
:param args: List of the arguments from the command line
:type args: list
:return: The ordered command table
:rtype: collections.OrderedDict
"""
self.cli_ctx.raise_event(EVENT_CMDLOADER_LOAD_COMMAND_TABLE, cmd_tbl=self.command_table)
return OrderedDict(self.command_table) | python | def load_command_table(self, args): # pylint: disable=unused-argument
self.cli_ctx.raise_event(EVENT_CMDLOADER_LOAD_COMMAND_TABLE, cmd_tbl=self.command_table)
return OrderedDict(self.command_table) | [
"def",
"load_command_table",
"(",
"self",
",",
"args",
")",
":",
"# pylint: disable=unused-argument",
"self",
".",
"cli_ctx",
".",
"raise_event",
"(",
"EVENT_CMDLOADER_LOAD_COMMAND_TABLE",
",",
"cmd_tbl",
"=",
"self",
".",
"command_table",
")",
"return",
"OrderedDict"... | Load commands into the command table
:param args: List of the arguments from the command line
:type args: list
:return: The ordered command table
:rtype: collections.OrderedDict | [
"Load",
"commands",
"into",
"the",
"command",
"table"
] | 5f1a480a33f103e2688c46eef59fb2d9eaf2baad | https://github.com/Microsoft/knack/blob/5f1a480a33f103e2688c46eef59fb2d9eaf2baad/knack/commands.py#L189-L198 |
243,163 | Microsoft/knack | knack/commands.py | CLICommandsLoader.load_arguments | def load_arguments(self, command):
""" Load the arguments for the specified command
:param command: The command to load arguments for
:type command: str
"""
from knack.arguments import ArgumentsContext
self.cli_ctx.raise_event(EVENT_CMDLOADER_LOAD_ARGUMENTS, cmd_tbl=self.command_table, command=command)
try:
self.command_table[command].load_arguments()
except KeyError:
return
# ensure global 'cmd' is ignored
with ArgumentsContext(self, '') as c:
c.ignore('cmd')
self._apply_parameter_info(command, self.command_table[command]) | python | def load_arguments(self, command):
from knack.arguments import ArgumentsContext
self.cli_ctx.raise_event(EVENT_CMDLOADER_LOAD_ARGUMENTS, cmd_tbl=self.command_table, command=command)
try:
self.command_table[command].load_arguments()
except KeyError:
return
# ensure global 'cmd' is ignored
with ArgumentsContext(self, '') as c:
c.ignore('cmd')
self._apply_parameter_info(command, self.command_table[command]) | [
"def",
"load_arguments",
"(",
"self",
",",
"command",
")",
":",
"from",
"knack",
".",
"arguments",
"import",
"ArgumentsContext",
"self",
".",
"cli_ctx",
".",
"raise_event",
"(",
"EVENT_CMDLOADER_LOAD_ARGUMENTS",
",",
"cmd_tbl",
"=",
"self",
".",
"command_table",
... | Load the arguments for the specified command
:param command: The command to load arguments for
:type command: str | [
"Load",
"the",
"arguments",
"for",
"the",
"specified",
"command"
] | 5f1a480a33f103e2688c46eef59fb2d9eaf2baad | https://github.com/Microsoft/knack/blob/5f1a480a33f103e2688c46eef59fb2d9eaf2baad/knack/commands.py#L200-L218 |
243,164 | Microsoft/knack | knack/commands.py | CLICommandsLoader.create_command | def create_command(self, name, operation, **kwargs):
""" Constructs the command object that can then be added to the command table """
if not isinstance(operation, six.string_types):
raise ValueError("Operation must be a string. Got '{}'".format(operation))
name = ' '.join(name.split())
client_factory = kwargs.get('client_factory', None)
def _command_handler(command_args):
op = CLICommandsLoader._get_op_handler(operation)
client = client_factory(command_args) if client_factory else None
result = op(client, **command_args) if client else op(**command_args)
return result
def arguments_loader():
return list(extract_args_from_signature(CLICommandsLoader._get_op_handler(operation),
excluded_params=self.excluded_command_handler_args))
def description_loader():
return extract_full_summary_from_signature(CLICommandsLoader._get_op_handler(operation))
kwargs['arguments_loader'] = arguments_loader
kwargs['description_loader'] = description_loader
cmd = self.command_cls(self.cli_ctx, name, _command_handler, **kwargs)
return cmd | python | def create_command(self, name, operation, **kwargs):
if not isinstance(operation, six.string_types):
raise ValueError("Operation must be a string. Got '{}'".format(operation))
name = ' '.join(name.split())
client_factory = kwargs.get('client_factory', None)
def _command_handler(command_args):
op = CLICommandsLoader._get_op_handler(operation)
client = client_factory(command_args) if client_factory else None
result = op(client, **command_args) if client else op(**command_args)
return result
def arguments_loader():
return list(extract_args_from_signature(CLICommandsLoader._get_op_handler(operation),
excluded_params=self.excluded_command_handler_args))
def description_loader():
return extract_full_summary_from_signature(CLICommandsLoader._get_op_handler(operation))
kwargs['arguments_loader'] = arguments_loader
kwargs['description_loader'] = description_loader
cmd = self.command_cls(self.cli_ctx, name, _command_handler, **kwargs)
return cmd | [
"def",
"create_command",
"(",
"self",
",",
"name",
",",
"operation",
",",
"*",
"*",
"kwargs",
")",
":",
"if",
"not",
"isinstance",
"(",
"operation",
",",
"six",
".",
"string_types",
")",
":",
"raise",
"ValueError",
"(",
"\"Operation must be a string. Got '{}'\... | Constructs the command object that can then be added to the command table | [
"Constructs",
"the",
"command",
"object",
"that",
"can",
"then",
"be",
"added",
"to",
"the",
"command",
"table"
] | 5f1a480a33f103e2688c46eef59fb2d9eaf2baad | https://github.com/Microsoft/knack/blob/5f1a480a33f103e2688c46eef59fb2d9eaf2baad/knack/commands.py#L229-L255 |
243,165 | Microsoft/knack | knack/commands.py | CLICommandsLoader._get_op_handler | def _get_op_handler(operation):
""" Import and load the operation handler """
try:
mod_to_import, attr_path = operation.split('#')
op = import_module(mod_to_import)
for part in attr_path.split('.'):
op = getattr(op, part)
if isinstance(op, types.FunctionType):
return op
return six.get_method_function(op)
except (ValueError, AttributeError):
raise ValueError("The operation '{}' is invalid.".format(operation)) | python | def _get_op_handler(operation):
try:
mod_to_import, attr_path = operation.split('#')
op = import_module(mod_to_import)
for part in attr_path.split('.'):
op = getattr(op, part)
if isinstance(op, types.FunctionType):
return op
return six.get_method_function(op)
except (ValueError, AttributeError):
raise ValueError("The operation '{}' is invalid.".format(operation)) | [
"def",
"_get_op_handler",
"(",
"operation",
")",
":",
"try",
":",
"mod_to_import",
",",
"attr_path",
"=",
"operation",
".",
"split",
"(",
"'#'",
")",
"op",
"=",
"import_module",
"(",
"mod_to_import",
")",
"for",
"part",
"in",
"attr_path",
".",
"split",
"("... | Import and load the operation handler | [
"Import",
"and",
"load",
"the",
"operation",
"handler"
] | 5f1a480a33f103e2688c46eef59fb2d9eaf2baad | https://github.com/Microsoft/knack/blob/5f1a480a33f103e2688c46eef59fb2d9eaf2baad/knack/commands.py#L258-L269 |
243,166 | Microsoft/knack | knack/commands.py | CommandGroup.command | def command(self, name, handler_name, **kwargs):
""" Register a command into the command table
:param name: The name of the command
:type name: str
:param handler_name: The name of the handler that will be applied to the operations template
:type handler_name: str
:param kwargs: Kwargs to apply to the command.
Possible values: `client_factory`, `arguments_loader`, `description_loader`, `description`,
`formatter_class`, `table_transformer`, `deprecate_info`, `validator`, `confirmation`.
"""
import copy
command_name = '{} {}'.format(self.group_name, name) if self.group_name else name
command_kwargs = copy.deepcopy(self.group_kwargs)
command_kwargs.update(kwargs)
# don't inherit deprecation info from command group
command_kwargs['deprecate_info'] = kwargs.get('deprecate_info', None)
self.command_loader._populate_command_group_table_with_subgroups(' '.join(command_name.split()[:-1])) # pylint: disable=protected-access
self.command_loader.command_table[command_name] = self.command_loader.create_command(
command_name,
self.operations_tmpl.format(handler_name),
**command_kwargs) | python | def command(self, name, handler_name, **kwargs):
import copy
command_name = '{} {}'.format(self.group_name, name) if self.group_name else name
command_kwargs = copy.deepcopy(self.group_kwargs)
command_kwargs.update(kwargs)
# don't inherit deprecation info from command group
command_kwargs['deprecate_info'] = kwargs.get('deprecate_info', None)
self.command_loader._populate_command_group_table_with_subgroups(' '.join(command_name.split()[:-1])) # pylint: disable=protected-access
self.command_loader.command_table[command_name] = self.command_loader.create_command(
command_name,
self.operations_tmpl.format(handler_name),
**command_kwargs) | [
"def",
"command",
"(",
"self",
",",
"name",
",",
"handler_name",
",",
"*",
"*",
"kwargs",
")",
":",
"import",
"copy",
"command_name",
"=",
"'{} {}'",
".",
"format",
"(",
"self",
".",
"group_name",
",",
"name",
")",
"if",
"self",
".",
"group_name",
"els... | Register a command into the command table
:param name: The name of the command
:type name: str
:param handler_name: The name of the handler that will be applied to the operations template
:type handler_name: str
:param kwargs: Kwargs to apply to the command.
Possible values: `client_factory`, `arguments_loader`, `description_loader`, `description`,
`formatter_class`, `table_transformer`, `deprecate_info`, `validator`, `confirmation`. | [
"Register",
"a",
"command",
"into",
"the",
"command",
"table"
] | 5f1a480a33f103e2688c46eef59fb2d9eaf2baad | https://github.com/Microsoft/knack/blob/5f1a480a33f103e2688c46eef59fb2d9eaf2baad/knack/commands.py#L307-L330 |
243,167 | Microsoft/knack | knack/invocation.py | CommandInvoker._rudimentary_get_command | def _rudimentary_get_command(self, args):
""" Rudimentary parsing to get the command """
nouns = []
command_names = self.commands_loader.command_table.keys()
for arg in args:
if arg and arg[0] != '-':
nouns.append(arg)
else:
break
def _find_args(args):
search = ' '.join(args).lower()
return next((x for x in command_names if x.startswith(search)), False)
# since the command name may be immediately followed by a positional arg, strip those off
while nouns and not _find_args(nouns):
del nouns[-1]
# ensure the command string is case-insensitive
for i in range(len(nouns)):
args[i] = args[i].lower()
return ' '.join(nouns) | python | def _rudimentary_get_command(self, args):
nouns = []
command_names = self.commands_loader.command_table.keys()
for arg in args:
if arg and arg[0] != '-':
nouns.append(arg)
else:
break
def _find_args(args):
search = ' '.join(args).lower()
return next((x for x in command_names if x.startswith(search)), False)
# since the command name may be immediately followed by a positional arg, strip those off
while nouns and not _find_args(nouns):
del nouns[-1]
# ensure the command string is case-insensitive
for i in range(len(nouns)):
args[i] = args[i].lower()
return ' '.join(nouns) | [
"def",
"_rudimentary_get_command",
"(",
"self",
",",
"args",
")",
":",
"nouns",
"=",
"[",
"]",
"command_names",
"=",
"self",
".",
"commands_loader",
".",
"command_table",
".",
"keys",
"(",
")",
"for",
"arg",
"in",
"args",
":",
"if",
"arg",
"and",
"arg",
... | Rudimentary parsing to get the command | [
"Rudimentary",
"parsing",
"to",
"get",
"the",
"command"
] | 5f1a480a33f103e2688c46eef59fb2d9eaf2baad | https://github.com/Microsoft/knack/blob/5f1a480a33f103e2688c46eef59fb2d9eaf2baad/knack/invocation.py#L67-L89 |
243,168 | Microsoft/knack | knack/invocation.py | CommandInvoker.execute | def execute(self, args):
""" Executes the command invocation
:param args: The command arguments for this invocation
:type args: list
:return: The command result
:rtype: knack.util.CommandResultItem
"""
import colorama
self.cli_ctx.raise_event(EVENT_INVOKER_PRE_CMD_TBL_CREATE, args=args)
cmd_tbl = self.commands_loader.load_command_table(args)
command = self._rudimentary_get_command(args)
self.cli_ctx.invocation.data['command_string'] = command
self.commands_loader.load_arguments(command)
self.cli_ctx.raise_event(EVENT_INVOKER_POST_CMD_TBL_CREATE, cmd_tbl=cmd_tbl)
self.parser.load_command_table(self.commands_loader)
self.cli_ctx.raise_event(EVENT_INVOKER_CMD_TBL_LOADED, parser=self.parser)
arg_check = [a for a in args if a not in ['--verbose', '--debug']]
if not arg_check:
self.cli_ctx.completion.enable_autocomplete(self.parser)
subparser = self.parser.subparsers[tuple()]
self.help.show_welcome(subparser)
return CommandResultItem(None, exit_code=0)
if args[0].lower() == 'help':
args[0] = '--help'
self.cli_ctx.completion.enable_autocomplete(self.parser)
self.cli_ctx.raise_event(EVENT_INVOKER_PRE_PARSE_ARGS, args=args)
parsed_args = self.parser.parse_args(args)
self.cli_ctx.raise_event(EVENT_INVOKER_POST_PARSE_ARGS, command=parsed_args.command, args=parsed_args)
self._validation(parsed_args)
# save the command name (leaf in the tree)
self.data['command'] = parsed_args.command
cmd = parsed_args.func
if hasattr(parsed_args, 'cmd'):
parsed_args.cmd = cmd
deprecations = getattr(parsed_args, '_argument_deprecations', [])
if cmd.deprecate_info:
deprecations.append(cmd.deprecate_info)
params = self._filter_params(parsed_args)
# search for implicit deprecation
path_comps = cmd.name.split()[:-1]
implicit_deprecate_info = None
while path_comps and not implicit_deprecate_info:
implicit_deprecate_info = resolve_deprecate_info(self.cli_ctx, ' '.join(path_comps))
del path_comps[-1]
if implicit_deprecate_info:
deprecate_kwargs = implicit_deprecate_info.__dict__.copy()
deprecate_kwargs['object_type'] = 'command'
del deprecate_kwargs['_get_tag']
del deprecate_kwargs['_get_message']
deprecations.append(ImplicitDeprecated(**deprecate_kwargs))
colorama.init()
for d in deprecations:
print(d.message, file=sys.stderr)
colorama.deinit()
cmd_result = parsed_args.func(params)
cmd_result = todict(cmd_result)
event_data = {'result': cmd_result}
self.cli_ctx.raise_event(EVENT_INVOKER_TRANSFORM_RESULT, event_data=event_data)
self.cli_ctx.raise_event(EVENT_INVOKER_FILTER_RESULT, event_data=event_data)
return CommandResultItem(event_data['result'],
exit_code=0,
table_transformer=cmd_tbl[parsed_args.command].table_transformer,
is_query_active=self.data['query_active']) | python | def execute(self, args):
import colorama
self.cli_ctx.raise_event(EVENT_INVOKER_PRE_CMD_TBL_CREATE, args=args)
cmd_tbl = self.commands_loader.load_command_table(args)
command = self._rudimentary_get_command(args)
self.cli_ctx.invocation.data['command_string'] = command
self.commands_loader.load_arguments(command)
self.cli_ctx.raise_event(EVENT_INVOKER_POST_CMD_TBL_CREATE, cmd_tbl=cmd_tbl)
self.parser.load_command_table(self.commands_loader)
self.cli_ctx.raise_event(EVENT_INVOKER_CMD_TBL_LOADED, parser=self.parser)
arg_check = [a for a in args if a not in ['--verbose', '--debug']]
if not arg_check:
self.cli_ctx.completion.enable_autocomplete(self.parser)
subparser = self.parser.subparsers[tuple()]
self.help.show_welcome(subparser)
return CommandResultItem(None, exit_code=0)
if args[0].lower() == 'help':
args[0] = '--help'
self.cli_ctx.completion.enable_autocomplete(self.parser)
self.cli_ctx.raise_event(EVENT_INVOKER_PRE_PARSE_ARGS, args=args)
parsed_args = self.parser.parse_args(args)
self.cli_ctx.raise_event(EVENT_INVOKER_POST_PARSE_ARGS, command=parsed_args.command, args=parsed_args)
self._validation(parsed_args)
# save the command name (leaf in the tree)
self.data['command'] = parsed_args.command
cmd = parsed_args.func
if hasattr(parsed_args, 'cmd'):
parsed_args.cmd = cmd
deprecations = getattr(parsed_args, '_argument_deprecations', [])
if cmd.deprecate_info:
deprecations.append(cmd.deprecate_info)
params = self._filter_params(parsed_args)
# search for implicit deprecation
path_comps = cmd.name.split()[:-1]
implicit_deprecate_info = None
while path_comps and not implicit_deprecate_info:
implicit_deprecate_info = resolve_deprecate_info(self.cli_ctx, ' '.join(path_comps))
del path_comps[-1]
if implicit_deprecate_info:
deprecate_kwargs = implicit_deprecate_info.__dict__.copy()
deprecate_kwargs['object_type'] = 'command'
del deprecate_kwargs['_get_tag']
del deprecate_kwargs['_get_message']
deprecations.append(ImplicitDeprecated(**deprecate_kwargs))
colorama.init()
for d in deprecations:
print(d.message, file=sys.stderr)
colorama.deinit()
cmd_result = parsed_args.func(params)
cmd_result = todict(cmd_result)
event_data = {'result': cmd_result}
self.cli_ctx.raise_event(EVENT_INVOKER_TRANSFORM_RESULT, event_data=event_data)
self.cli_ctx.raise_event(EVENT_INVOKER_FILTER_RESULT, event_data=event_data)
return CommandResultItem(event_data['result'],
exit_code=0,
table_transformer=cmd_tbl[parsed_args.command].table_transformer,
is_query_active=self.data['query_active']) | [
"def",
"execute",
"(",
"self",
",",
"args",
")",
":",
"import",
"colorama",
"self",
".",
"cli_ctx",
".",
"raise_event",
"(",
"EVENT_INVOKER_PRE_CMD_TBL_CREATE",
",",
"args",
"=",
"args",
")",
"cmd_tbl",
"=",
"self",
".",
"commands_loader",
".",
"load_command_t... | Executes the command invocation
:param args: The command arguments for this invocation
:type args: list
:return: The command result
:rtype: knack.util.CommandResultItem | [
"Executes",
"the",
"command",
"invocation"
] | 5f1a480a33f103e2688c46eef59fb2d9eaf2baad | https://github.com/Microsoft/knack/blob/5f1a480a33f103e2688c46eef59fb2d9eaf2baad/knack/invocation.py#L120-L198 |
243,169 | Microsoft/knack | knack/completion.py | CLICompletion.get_completion_args | def get_completion_args(self, is_completion=False, comp_line=None): # pylint: disable=no-self-use
""" Get the args that will be used to tab completion if completion is active. """
is_completion = is_completion or os.environ.get(ARGCOMPLETE_ENV_NAME)
comp_line = comp_line or os.environ.get('COMP_LINE')
# The first item is the exe name so ignore that.
return comp_line.split()[1:] if is_completion and comp_line else None | python | def get_completion_args(self, is_completion=False, comp_line=None): # pylint: disable=no-self-use
is_completion = is_completion or os.environ.get(ARGCOMPLETE_ENV_NAME)
comp_line = comp_line or os.environ.get('COMP_LINE')
# The first item is the exe name so ignore that.
return comp_line.split()[1:] if is_completion and comp_line else None | [
"def",
"get_completion_args",
"(",
"self",
",",
"is_completion",
"=",
"False",
",",
"comp_line",
"=",
"None",
")",
":",
"# pylint: disable=no-self-use",
"is_completion",
"=",
"is_completion",
"or",
"os",
".",
"environ",
".",
"get",
"(",
"ARGCOMPLETE_ENV_NAME",
")"... | Get the args that will be used to tab completion if completion is active. | [
"Get",
"the",
"args",
"that",
"will",
"be",
"used",
"to",
"tab",
"completion",
"if",
"completion",
"is",
"active",
"."
] | 5f1a480a33f103e2688c46eef59fb2d9eaf2baad | https://github.com/Microsoft/knack/blob/5f1a480a33f103e2688c46eef59fb2d9eaf2baad/knack/completion.py#L37-L42 |
243,170 | Microsoft/knack | knack/output.py | OutputProducer.out | def out(self, obj, formatter=None, out_file=None): # pylint: disable=no-self-use
""" Produces the output using the command result.
The method does not return a result as the output is written straight to the output file.
:param obj: The command result
:type obj: knack.util.CommandResultItem
:param formatter: The formatter we should use for the command result
:type formatter: function
:param out_file: The file to write output to
:type out_file: file-like object
"""
if not isinstance(obj, CommandResultItem):
raise TypeError('Expected {} got {}'.format(CommandResultItem.__name__, type(obj)))
import platform
import colorama
if platform.system() == 'Windows':
out_file = colorama.AnsiToWin32(out_file).stream
output = formatter(obj)
try:
print(output, file=out_file, end='')
except IOError as ex:
if ex.errno == errno.EPIPE:
pass
else:
raise
except UnicodeEncodeError:
print(output.encode('ascii', 'ignore').decode('utf-8', 'ignore'),
file=out_file, end='') | python | def out(self, obj, formatter=None, out_file=None): # pylint: disable=no-self-use
if not isinstance(obj, CommandResultItem):
raise TypeError('Expected {} got {}'.format(CommandResultItem.__name__, type(obj)))
import platform
import colorama
if platform.system() == 'Windows':
out_file = colorama.AnsiToWin32(out_file).stream
output = formatter(obj)
try:
print(output, file=out_file, end='')
except IOError as ex:
if ex.errno == errno.EPIPE:
pass
else:
raise
except UnicodeEncodeError:
print(output.encode('ascii', 'ignore').decode('utf-8', 'ignore'),
file=out_file, end='') | [
"def",
"out",
"(",
"self",
",",
"obj",
",",
"formatter",
"=",
"None",
",",
"out_file",
"=",
"None",
")",
":",
"# pylint: disable=no-self-use",
"if",
"not",
"isinstance",
"(",
"obj",
",",
"CommandResultItem",
")",
":",
"raise",
"TypeError",
"(",
"'Expected {}... | Produces the output using the command result.
The method does not return a result as the output is written straight to the output file.
:param obj: The command result
:type obj: knack.util.CommandResultItem
:param formatter: The formatter we should use for the command result
:type formatter: function
:param out_file: The file to write output to
:type out_file: file-like object | [
"Produces",
"the",
"output",
"using",
"the",
"command",
"result",
".",
"The",
"method",
"does",
"not",
"return",
"a",
"result",
"as",
"the",
"output",
"is",
"written",
"straight",
"to",
"the",
"output",
"file",
"."
] | 5f1a480a33f103e2688c46eef59fb2d9eaf2baad | https://github.com/Microsoft/knack/blob/5f1a480a33f103e2688c46eef59fb2d9eaf2baad/knack/output.py#L113-L142 |
243,171 | ryanmcgrath/twython | twython/endpoints.py | EndpointsMixin.update_status_with_media | def update_status_with_media(self, **params): # pragma: no cover
"""Updates the authenticating user's current status and attaches media
for upload. In other words, it creates a Tweet with a picture attached.
Docs:
https://developer.twitter.com/en/docs/tweets/post-and-engage/api-reference/post-statuses-update_with_media
"""
warnings.warn(
'This method is deprecated. You should use Twython.upload_media instead.',
TwythonDeprecationWarning,
stacklevel=2
)
return self.post('statuses/update_with_media', params=params) | python | def update_status_with_media(self, **params): # pragma: no cover
warnings.warn(
'This method is deprecated. You should use Twython.upload_media instead.',
TwythonDeprecationWarning,
stacklevel=2
)
return self.post('statuses/update_with_media', params=params) | [
"def",
"update_status_with_media",
"(",
"self",
",",
"*",
"*",
"params",
")",
":",
"# pragma: no cover",
"warnings",
".",
"warn",
"(",
"'This method is deprecated. You should use Twython.upload_media instead.'",
",",
"TwythonDeprecationWarning",
",",
"stacklevel",
"=",
"2",... | Updates the authenticating user's current status and attaches media
for upload. In other words, it creates a Tweet with a picture attached.
Docs:
https://developer.twitter.com/en/docs/tweets/post-and-engage/api-reference/post-statuses-update_with_media | [
"Updates",
"the",
"authenticating",
"user",
"s",
"current",
"status",
"and",
"attaches",
"media",
"for",
"upload",
".",
"In",
"other",
"words",
"it",
"creates",
"a",
"Tweet",
"with",
"a",
"picture",
"attached",
"."
] | 7366de80efcbbdfaf615d3f1fea72546196916fc | https://github.com/ryanmcgrath/twython/blob/7366de80efcbbdfaf615d3f1fea72546196916fc/twython/endpoints.py#L134-L147 |
243,172 | ryanmcgrath/twython | twython/endpoints.py | EndpointsMixin.create_metadata | def create_metadata(self, **params):
""" Adds metadata to a media element, such as image descriptions for visually impaired.
Docs:
https://developer.twitter.com/en/docs/media/upload-media/api-reference/post-media-metadata-create
"""
params = json.dumps(params)
return self.post("https://upload.twitter.com/1.1/media/metadata/create.json", params=params) | python | def create_metadata(self, **params):
params = json.dumps(params)
return self.post("https://upload.twitter.com/1.1/media/metadata/create.json", params=params) | [
"def",
"create_metadata",
"(",
"self",
",",
"*",
"*",
"params",
")",
":",
"params",
"=",
"json",
".",
"dumps",
"(",
"params",
")",
"return",
"self",
".",
"post",
"(",
"\"https://upload.twitter.com/1.1/media/metadata/create.json\"",
",",
"params",
"=",
"params",
... | Adds metadata to a media element, such as image descriptions for visually impaired.
Docs:
https://developer.twitter.com/en/docs/media/upload-media/api-reference/post-media-metadata-create | [
"Adds",
"metadata",
"to",
"a",
"media",
"element",
"such",
"as",
"image",
"descriptions",
"for",
"visually",
"impaired",
"."
] | 7366de80efcbbdfaf615d3f1fea72546196916fc | https://github.com/ryanmcgrath/twython/blob/7366de80efcbbdfaf615d3f1fea72546196916fc/twython/endpoints.py#L164-L172 |
243,173 | ryanmcgrath/twython | twython/api.py | Twython._get_error_message | def _get_error_message(self, response):
"""Parse and return the first error message"""
error_message = 'An error occurred processing your request.'
try:
content = response.json()
# {"errors":[{"code":34,"message":"Sorry,
# that page does not exist"}]}
error_message = content['errors'][0]['message']
except TypeError:
error_message = content['errors']
except ValueError:
# bad json data from Twitter for an error
pass
except (KeyError, IndexError):
# missing data so fallback to default message
pass
return error_message | python | def _get_error_message(self, response):
error_message = 'An error occurred processing your request.'
try:
content = response.json()
# {"errors":[{"code":34,"message":"Sorry,
# that page does not exist"}]}
error_message = content['errors'][0]['message']
except TypeError:
error_message = content['errors']
except ValueError:
# bad json data from Twitter for an error
pass
except (KeyError, IndexError):
# missing data so fallback to default message
pass
return error_message | [
"def",
"_get_error_message",
"(",
"self",
",",
"response",
")",
":",
"error_message",
"=",
"'An error occurred processing your request.'",
"try",
":",
"content",
"=",
"response",
".",
"json",
"(",
")",
"# {\"errors\":[{\"code\":34,\"message\":\"Sorry,",
"# that page does no... | Parse and return the first error message | [
"Parse",
"and",
"return",
"the",
"first",
"error",
"message"
] | 7366de80efcbbdfaf615d3f1fea72546196916fc | https://github.com/ryanmcgrath/twython/blob/7366de80efcbbdfaf615d3f1fea72546196916fc/twython/api.py#L218-L236 |
243,174 | ryanmcgrath/twython | twython/api.py | Twython.request | def request(self, endpoint, method='GET', params=None, version='1.1', json_encoded=False):
"""Return dict of response received from Twitter's API
:param endpoint: (required) Full url or Twitter API endpoint
(e.g. search/tweets)
:type endpoint: string
:param method: (optional) Method of accessing data, either
GET, POST or DELETE. (default GET)
:type method: string
:param params: (optional) Dict of parameters (if any) accepted
the by Twitter API endpoint you are trying to
access (default None)
:type params: dict or None
:param version: (optional) Twitter API version to access
(default 1.1)
:type version: string
:param json_encoded: (optional) Flag to indicate if this method should send data encoded as json
(default False)
:type json_encoded: bool
:rtype: dict
"""
if endpoint.startswith('http://'):
raise TwythonError('api.twitter.com is restricted to SSL/TLS traffic.')
# In case they want to pass a full Twitter URL
# i.e. https://api.twitter.com/1.1/search/tweets.json
if endpoint.startswith('https://'):
url = endpoint
else:
url = '%s/%s.json' % (self.api_url % version, endpoint)
content = self._request(url, method=method, params=params,
api_call=url, json_encoded=json_encoded)
return content | python | def request(self, endpoint, method='GET', params=None, version='1.1', json_encoded=False):
if endpoint.startswith('http://'):
raise TwythonError('api.twitter.com is restricted to SSL/TLS traffic.')
# In case they want to pass a full Twitter URL
# i.e. https://api.twitter.com/1.1/search/tweets.json
if endpoint.startswith('https://'):
url = endpoint
else:
url = '%s/%s.json' % (self.api_url % version, endpoint)
content = self._request(url, method=method, params=params,
api_call=url, json_encoded=json_encoded)
return content | [
"def",
"request",
"(",
"self",
",",
"endpoint",
",",
"method",
"=",
"'GET'",
",",
"params",
"=",
"None",
",",
"version",
"=",
"'1.1'",
",",
"json_encoded",
"=",
"False",
")",
":",
"if",
"endpoint",
".",
"startswith",
"(",
"'http://'",
")",
":",
"raise"... | Return dict of response received from Twitter's API
:param endpoint: (required) Full url or Twitter API endpoint
(e.g. search/tweets)
:type endpoint: string
:param method: (optional) Method of accessing data, either
GET, POST or DELETE. (default GET)
:type method: string
:param params: (optional) Dict of parameters (if any) accepted
the by Twitter API endpoint you are trying to
access (default None)
:type params: dict or None
:param version: (optional) Twitter API version to access
(default 1.1)
:type version: string
:param json_encoded: (optional) Flag to indicate if this method should send data encoded as json
(default False)
:type json_encoded: bool
:rtype: dict | [
"Return",
"dict",
"of",
"response",
"received",
"from",
"Twitter",
"s",
"API"
] | 7366de80efcbbdfaf615d3f1fea72546196916fc | https://github.com/ryanmcgrath/twython/blob/7366de80efcbbdfaf615d3f1fea72546196916fc/twython/api.py#L238-L274 |
243,175 | ryanmcgrath/twython | twython/api.py | Twython.get_lastfunction_header | def get_lastfunction_header(self, header, default_return_value=None):
"""Returns a specific header from the last API call
This will return None if the header is not present
:param header: (required) The name of the header you want to get
the value of
Most useful for the following header information:
x-rate-limit-limit,
x-rate-limit-remaining,
x-rate-limit-class,
x-rate-limit-reset
"""
if self._last_call is None:
raise TwythonError('This function must be called after an API call. \
It delivers header information.')
return self._last_call['headers'].get(header, default_return_value) | python | def get_lastfunction_header(self, header, default_return_value=None):
if self._last_call is None:
raise TwythonError('This function must be called after an API call. \
It delivers header information.')
return self._last_call['headers'].get(header, default_return_value) | [
"def",
"get_lastfunction_header",
"(",
"self",
",",
"header",
",",
"default_return_value",
"=",
"None",
")",
":",
"if",
"self",
".",
"_last_call",
"is",
"None",
":",
"raise",
"TwythonError",
"(",
"'This function must be called after an API call. \\\n ... | Returns a specific header from the last API call
This will return None if the header is not present
:param header: (required) The name of the header you want to get
the value of
Most useful for the following header information:
x-rate-limit-limit,
x-rate-limit-remaining,
x-rate-limit-class,
x-rate-limit-reset | [
"Returns",
"a",
"specific",
"header",
"from",
"the",
"last",
"API",
"call",
"This",
"will",
"return",
"None",
"if",
"the",
"header",
"is",
"not",
"present"
] | 7366de80efcbbdfaf615d3f1fea72546196916fc | https://github.com/ryanmcgrath/twython/blob/7366de80efcbbdfaf615d3f1fea72546196916fc/twython/api.py#L288-L306 |
243,176 | ryanmcgrath/twython | twython/api.py | Twython.get_authentication_tokens | def get_authentication_tokens(self, callback_url=None, force_login=False,
screen_name=''):
"""Returns a dict including an authorization URL, ``auth_url``, to
direct a user to
:param callback_url: (optional) Url the user is returned to after
they authorize your app (web clients only)
:param force_login: (optional) Forces the user to enter their
credentials to ensure the correct users
account is authorized.
:param screen_name: (optional) If forced_login is set OR user is
not currently logged in, Prefills the username
input box of the OAuth login screen with the
given value
:rtype: dict
"""
if self.oauth_version != 1:
raise TwythonError('This method can only be called when your \
OAuth version is 1.0.')
request_args = {}
if callback_url:
request_args['oauth_callback'] = callback_url
response = self.client.get(self.request_token_url, params=request_args)
if response.status_code == 401:
raise TwythonAuthError(response.content,
error_code=response.status_code)
elif response.status_code != 200:
raise TwythonError(response.content,
error_code=response.status_code)
request_tokens = dict(parse_qsl(response.content.decode('utf-8')))
if not request_tokens:
raise TwythonError('Unable to decode request tokens.')
oauth_callback_confirmed = request_tokens.get('oauth_callback_confirmed') \
== 'true'
auth_url_params = {
'oauth_token': request_tokens['oauth_token'],
}
if force_login:
auth_url_params.update({
'force_login': force_login,
'screen_name': screen_name
})
# Use old-style callback argument if server didn't accept new-style
if callback_url and not oauth_callback_confirmed:
auth_url_params['oauth_callback'] = self.callback_url
request_tokens['auth_url'] = self.authenticate_url + \
'?' + urlencode(auth_url_params)
return request_tokens | python | def get_authentication_tokens(self, callback_url=None, force_login=False,
screen_name=''):
if self.oauth_version != 1:
raise TwythonError('This method can only be called when your \
OAuth version is 1.0.')
request_args = {}
if callback_url:
request_args['oauth_callback'] = callback_url
response = self.client.get(self.request_token_url, params=request_args)
if response.status_code == 401:
raise TwythonAuthError(response.content,
error_code=response.status_code)
elif response.status_code != 200:
raise TwythonError(response.content,
error_code=response.status_code)
request_tokens = dict(parse_qsl(response.content.decode('utf-8')))
if not request_tokens:
raise TwythonError('Unable to decode request tokens.')
oauth_callback_confirmed = request_tokens.get('oauth_callback_confirmed') \
== 'true'
auth_url_params = {
'oauth_token': request_tokens['oauth_token'],
}
if force_login:
auth_url_params.update({
'force_login': force_login,
'screen_name': screen_name
})
# Use old-style callback argument if server didn't accept new-style
if callback_url and not oauth_callback_confirmed:
auth_url_params['oauth_callback'] = self.callback_url
request_tokens['auth_url'] = self.authenticate_url + \
'?' + urlencode(auth_url_params)
return request_tokens | [
"def",
"get_authentication_tokens",
"(",
"self",
",",
"callback_url",
"=",
"None",
",",
"force_login",
"=",
"False",
",",
"screen_name",
"=",
"''",
")",
":",
"if",
"self",
".",
"oauth_version",
"!=",
"1",
":",
"raise",
"TwythonError",
"(",
"'This method can on... | Returns a dict including an authorization URL, ``auth_url``, to
direct a user to
:param callback_url: (optional) Url the user is returned to after
they authorize your app (web clients only)
:param force_login: (optional) Forces the user to enter their
credentials to ensure the correct users
account is authorized.
:param screen_name: (optional) If forced_login is set OR user is
not currently logged in, Prefills the username
input box of the OAuth login screen with the
given value
:rtype: dict | [
"Returns",
"a",
"dict",
"including",
"an",
"authorization",
"URL",
"auth_url",
"to",
"direct",
"a",
"user",
"to"
] | 7366de80efcbbdfaf615d3f1fea72546196916fc | https://github.com/ryanmcgrath/twython/blob/7366de80efcbbdfaf615d3f1fea72546196916fc/twython/api.py#L308-L365 |
243,177 | ryanmcgrath/twython | twython/api.py | Twython.obtain_access_token | def obtain_access_token(self):
"""Returns an OAuth 2 access token to make OAuth 2 authenticated
read-only calls.
:rtype: string
"""
if self.oauth_version != 2:
raise TwythonError('This method can only be called when your \
OAuth version is 2.0.')
data = {'grant_type': 'client_credentials'}
basic_auth = HTTPBasicAuth(self.app_key, self.app_secret)
try:
response = self.client.post(self.request_token_url,
data=data, auth=basic_auth)
content = response.content.decode('utf-8')
try:
content = content.json()
except AttributeError:
content = json.loads(content)
access_token = content['access_token']
except (KeyError, ValueError, requests.exceptions.RequestException):
raise TwythonAuthError('Unable to obtain OAuth 2 access token.')
else:
return access_token | python | def obtain_access_token(self):
if self.oauth_version != 2:
raise TwythonError('This method can only be called when your \
OAuth version is 2.0.')
data = {'grant_type': 'client_credentials'}
basic_auth = HTTPBasicAuth(self.app_key, self.app_secret)
try:
response = self.client.post(self.request_token_url,
data=data, auth=basic_auth)
content = response.content.decode('utf-8')
try:
content = content.json()
except AttributeError:
content = json.loads(content)
access_token = content['access_token']
except (KeyError, ValueError, requests.exceptions.RequestException):
raise TwythonAuthError('Unable to obtain OAuth 2 access token.')
else:
return access_token | [
"def",
"obtain_access_token",
"(",
"self",
")",
":",
"if",
"self",
".",
"oauth_version",
"!=",
"2",
":",
"raise",
"TwythonError",
"(",
"'This method can only be called when your \\\n OAuth version is 2.0.'",
")",
"data",
"=",
"{",
"'grant_type'... | Returns an OAuth 2 access token to make OAuth 2 authenticated
read-only calls.
:rtype: string | [
"Returns",
"an",
"OAuth",
"2",
"access",
"token",
"to",
"make",
"OAuth",
"2",
"authenticated",
"read",
"-",
"only",
"calls",
"."
] | 7366de80efcbbdfaf615d3f1fea72546196916fc | https://github.com/ryanmcgrath/twython/blob/7366de80efcbbdfaf615d3f1fea72546196916fc/twython/api.py#L405-L429 |
243,178 | ryanmcgrath/twython | twython/api.py | Twython.construct_api_url | def construct_api_url(api_url, **params):
"""Construct a Twitter API url, encoded, with parameters
:param api_url: URL of the Twitter API endpoint you are attempting
to construct
:param \*\*params: Parameters that are accepted by Twitter for the
endpoint you're requesting
:rtype: string
Usage::
>>> from twython import Twython
>>> twitter = Twython()
>>> api_url = 'https://api.twitter.com/1.1/search/tweets.json'
>>> constructed_url = twitter.construct_api_url(api_url, q='python',
result_type='popular')
>>> print constructed_url
https://api.twitter.com/1.1/search/tweets.json?q=python&result_type=popular
"""
querystring = []
params, _ = _transparent_params(params or {})
params = requests.utils.to_key_val_list(params)
for (k, v) in params:
querystring.append(
'%s=%s' % (Twython.encode(k), quote_plus(Twython.encode(v)))
)
return '%s?%s' % (api_url, '&'.join(querystring)) | python | def construct_api_url(api_url, **params):
querystring = []
params, _ = _transparent_params(params or {})
params = requests.utils.to_key_val_list(params)
for (k, v) in params:
querystring.append(
'%s=%s' % (Twython.encode(k), quote_plus(Twython.encode(v)))
)
return '%s?%s' % (api_url, '&'.join(querystring)) | [
"def",
"construct_api_url",
"(",
"api_url",
",",
"*",
"*",
"params",
")",
":",
"querystring",
"=",
"[",
"]",
"params",
",",
"_",
"=",
"_transparent_params",
"(",
"params",
"or",
"{",
"}",
")",
"params",
"=",
"requests",
".",
"utils",
".",
"to_key_val_lis... | Construct a Twitter API url, encoded, with parameters
:param api_url: URL of the Twitter API endpoint you are attempting
to construct
:param \*\*params: Parameters that are accepted by Twitter for the
endpoint you're requesting
:rtype: string
Usage::
>>> from twython import Twython
>>> twitter = Twython()
>>> api_url = 'https://api.twitter.com/1.1/search/tweets.json'
>>> constructed_url = twitter.construct_api_url(api_url, q='python',
result_type='popular')
>>> print constructed_url
https://api.twitter.com/1.1/search/tweets.json?q=python&result_type=popular | [
"Construct",
"a",
"Twitter",
"API",
"url",
"encoded",
"with",
"parameters"
] | 7366de80efcbbdfaf615d3f1fea72546196916fc | https://github.com/ryanmcgrath/twython/blob/7366de80efcbbdfaf615d3f1fea72546196916fc/twython/api.py#L432-L460 |
243,179 | ryanmcgrath/twython | twython/api.py | Twython.cursor | def cursor(self, function, return_pages=False, **params):
"""Returns a generator for results that match a specified query.
:param function: Instance of a Twython function
(Twython.get_home_timeline, Twython.search)
:param \*\*params: Extra parameters to send with your request
(usually parameters accepted by the Twitter API endpoint)
:rtype: generator
Usage::
>>> from twython import Twython
>>> twitter = Twython(APP_KEY, APP_SECRET, OAUTH_TOKEN,
OAUTH_TOKEN_SECRET)
>>> results = twitter.cursor(twitter.search, q='python')
>>> for result in results:
>>> print result
"""
if not callable(function):
raise TypeError('.cursor() takes a Twython function as its first \
argument. Did you provide the result of a \
function call?')
if not hasattr(function, 'iter_mode'):
raise TwythonError('Unable to create generator for Twython \
method "%s"' % function.__name__)
while True:
content = function(**params)
if not content:
raise StopIteration
if hasattr(function, 'iter_key'):
results = content.get(function.iter_key)
else:
results = content
if return_pages:
yield results
else:
for result in results:
yield result
if function.iter_mode == 'cursor' and \
content['next_cursor_str'] == '0':
raise StopIteration
try:
if function.iter_mode == 'id':
# Set max_id in params to one less than lowest tweet id
if hasattr(function, 'iter_metadata'):
# Get supplied next max_id
metadata = content.get(function.iter_metadata)
if 'next_results' in metadata:
next_results = urlsplit(metadata['next_results'])
params = dict(parse_qsl(next_results.query))
else:
# No more results
raise StopIteration
else:
# Twitter gives tweets in reverse chronological order:
params['max_id'] = str(int(content[-1]['id_str']) - 1)
elif function.iter_mode == 'cursor':
params['cursor'] = content['next_cursor_str']
except (TypeError, ValueError): # pragma: no cover
raise TwythonError('Unable to generate next page of search \
results, `page` is not a number.')
except (KeyError, AttributeError): #pragma no cover
raise TwythonError('Unable to generate next page of search \
results, content has unexpected structure.') | python | def cursor(self, function, return_pages=False, **params):
if not callable(function):
raise TypeError('.cursor() takes a Twython function as its first \
argument. Did you provide the result of a \
function call?')
if not hasattr(function, 'iter_mode'):
raise TwythonError('Unable to create generator for Twython \
method "%s"' % function.__name__)
while True:
content = function(**params)
if not content:
raise StopIteration
if hasattr(function, 'iter_key'):
results = content.get(function.iter_key)
else:
results = content
if return_pages:
yield results
else:
for result in results:
yield result
if function.iter_mode == 'cursor' and \
content['next_cursor_str'] == '0':
raise StopIteration
try:
if function.iter_mode == 'id':
# Set max_id in params to one less than lowest tweet id
if hasattr(function, 'iter_metadata'):
# Get supplied next max_id
metadata = content.get(function.iter_metadata)
if 'next_results' in metadata:
next_results = urlsplit(metadata['next_results'])
params = dict(parse_qsl(next_results.query))
else:
# No more results
raise StopIteration
else:
# Twitter gives tweets in reverse chronological order:
params['max_id'] = str(int(content[-1]['id_str']) - 1)
elif function.iter_mode == 'cursor':
params['cursor'] = content['next_cursor_str']
except (TypeError, ValueError): # pragma: no cover
raise TwythonError('Unable to generate next page of search \
results, `page` is not a number.')
except (KeyError, AttributeError): #pragma no cover
raise TwythonError('Unable to generate next page of search \
results, content has unexpected structure.') | [
"def",
"cursor",
"(",
"self",
",",
"function",
",",
"return_pages",
"=",
"False",
",",
"*",
"*",
"params",
")",
":",
"if",
"not",
"callable",
"(",
"function",
")",
":",
"raise",
"TypeError",
"(",
"'.cursor() takes a Twython function as its first \\\n ... | Returns a generator for results that match a specified query.
:param function: Instance of a Twython function
(Twython.get_home_timeline, Twython.search)
:param \*\*params: Extra parameters to send with your request
(usually parameters accepted by the Twitter API endpoint)
:rtype: generator
Usage::
>>> from twython import Twython
>>> twitter = Twython(APP_KEY, APP_SECRET, OAUTH_TOKEN,
OAUTH_TOKEN_SECRET)
>>> results = twitter.cursor(twitter.search, q='python')
>>> for result in results:
>>> print result | [
"Returns",
"a",
"generator",
"for",
"results",
"that",
"match",
"a",
"specified",
"query",
"."
] | 7366de80efcbbdfaf615d3f1fea72546196916fc | https://github.com/ryanmcgrath/twython/blob/7366de80efcbbdfaf615d3f1fea72546196916fc/twython/api.py#L471-L543 |
243,180 | ryanmcgrath/twython | twython/streaming/api.py | TwythonStreamer._request | def _request(self, url, method='GET', params=None):
"""Internal stream request handling"""
self.connected = True
retry_counter = 0
method = method.lower()
func = getattr(self.client, method)
params, _ = _transparent_params(params)
def _send(retry_counter):
requests_args = {}
for k, v in self.client_args.items():
# Maybe this should be set as a class
# variable and only done once?
if k in ('timeout', 'allow_redirects', 'verify'):
requests_args[k] = v
while self.connected:
try:
if method == 'get':
requests_args['params'] = params
else:
requests_args['data'] = params
response = func(url, **requests_args)
except requests.exceptions.Timeout:
self.on_timeout()
else:
if response.status_code != 200:
self.on_error(response.status_code, response.content)
if self.retry_count and \
(self.retry_count - retry_counter) > 0:
time.sleep(self.retry_in)
retry_counter += 1
_send(retry_counter)
return response
while self.connected:
response = _send(retry_counter)
for line in response.iter_lines(self.chunk_size):
if not self.connected:
break
if line:
try:
if is_py3:
line = line.decode('utf-8')
data = json.loads(line)
except ValueError: # pragma: no cover
self.on_error(response.status_code,
'Unable to decode response, \
not valid JSON.')
else:
if self.on_success(data): # pragma: no cover
for message_type in self.handlers:
if message_type in data:
handler = getattr(self,
'on_' + message_type,
None)
if handler \
and callable(handler) \
and not handler(data.get(message_type)):
break
response.close() | python | def _request(self, url, method='GET', params=None):
self.connected = True
retry_counter = 0
method = method.lower()
func = getattr(self.client, method)
params, _ = _transparent_params(params)
def _send(retry_counter):
requests_args = {}
for k, v in self.client_args.items():
# Maybe this should be set as a class
# variable and only done once?
if k in ('timeout', 'allow_redirects', 'verify'):
requests_args[k] = v
while self.connected:
try:
if method == 'get':
requests_args['params'] = params
else:
requests_args['data'] = params
response = func(url, **requests_args)
except requests.exceptions.Timeout:
self.on_timeout()
else:
if response.status_code != 200:
self.on_error(response.status_code, response.content)
if self.retry_count and \
(self.retry_count - retry_counter) > 0:
time.sleep(self.retry_in)
retry_counter += 1
_send(retry_counter)
return response
while self.connected:
response = _send(retry_counter)
for line in response.iter_lines(self.chunk_size):
if not self.connected:
break
if line:
try:
if is_py3:
line = line.decode('utf-8')
data = json.loads(line)
except ValueError: # pragma: no cover
self.on_error(response.status_code,
'Unable to decode response, \
not valid JSON.')
else:
if self.on_success(data): # pragma: no cover
for message_type in self.handlers:
if message_type in data:
handler = getattr(self,
'on_' + message_type,
None)
if handler \
and callable(handler) \
and not handler(data.get(message_type)):
break
response.close() | [
"def",
"_request",
"(",
"self",
",",
"url",
",",
"method",
"=",
"'GET'",
",",
"params",
"=",
"None",
")",
":",
"self",
".",
"connected",
"=",
"True",
"retry_counter",
"=",
"0",
"method",
"=",
"method",
".",
"lower",
"(",
")",
"func",
"=",
"getattr",
... | Internal stream request handling | [
"Internal",
"stream",
"request",
"handling"
] | 7366de80efcbbdfaf615d3f1fea72546196916fc | https://github.com/ryanmcgrath/twython/blob/7366de80efcbbdfaf615d3f1fea72546196916fc/twython/streaming/api.py#L99-L165 |
243,181 | orsinium/textdistance | textdistance/libraries.py | LibrariesManager.optimize | def optimize(self):
"""Sort algorithm implementations by speed.
"""
# load benchmarks results
with open(LIBRARIES_FILE, 'r') as f:
libs_data = json.load(f)
# optimize
for alg, libs_names in libs_data.items():
libs = self.get_libs(alg)
if not libs:
continue
# drop slow libs
self.libs[alg] = [lib for lib in libs if [lib.module_name, lib.func_name] in libs_names]
# sort libs by speed
self.libs[alg].sort(key=lambda lib: libs_names.index([lib.module_name, lib.func_name])) | python | def optimize(self):
# load benchmarks results
with open(LIBRARIES_FILE, 'r') as f:
libs_data = json.load(f)
# optimize
for alg, libs_names in libs_data.items():
libs = self.get_libs(alg)
if not libs:
continue
# drop slow libs
self.libs[alg] = [lib for lib in libs if [lib.module_name, lib.func_name] in libs_names]
# sort libs by speed
self.libs[alg].sort(key=lambda lib: libs_names.index([lib.module_name, lib.func_name])) | [
"def",
"optimize",
"(",
"self",
")",
":",
"# load benchmarks results",
"with",
"open",
"(",
"LIBRARIES_FILE",
",",
"'r'",
")",
"as",
"f",
":",
"libs_data",
"=",
"json",
".",
"load",
"(",
"f",
")",
"# optimize",
"for",
"alg",
",",
"libs_names",
"in",
"lib... | Sort algorithm implementations by speed. | [
"Sort",
"algorithm",
"implementations",
"by",
"speed",
"."
] | 34d2e40bb0b26efc03da80b63fd58ebbd3f2cdd7 | https://github.com/orsinium/textdistance/blob/34d2e40bb0b26efc03da80b63fd58ebbd3f2cdd7/textdistance/libraries.py#L23-L37 |
243,182 | orsinium/textdistance | textdistance/libraries.py | LibrariesManager.clone | def clone(self):
"""Clone library manager prototype
"""
obj = self.__class__()
obj.libs = deepcopy(self.libs)
return obj | python | def clone(self):
obj = self.__class__()
obj.libs = deepcopy(self.libs)
return obj | [
"def",
"clone",
"(",
"self",
")",
":",
"obj",
"=",
"self",
".",
"__class__",
"(",
")",
"obj",
".",
"libs",
"=",
"deepcopy",
"(",
"self",
".",
"libs",
")",
"return",
"obj"
] | Clone library manager prototype | [
"Clone",
"library",
"manager",
"prototype"
] | 34d2e40bb0b26efc03da80b63fd58ebbd3f2cdd7 | https://github.com/orsinium/textdistance/blob/34d2e40bb0b26efc03da80b63fd58ebbd3f2cdd7/textdistance/libraries.py#L51-L56 |
243,183 | orsinium/textdistance | textdistance/algorithms/base.py | Base.normalized_distance | def normalized_distance(self, *sequences):
"""Get distance from 0 to 1
"""
return float(self.distance(*sequences)) / self.maximum(*sequences) | python | def normalized_distance(self, *sequences):
return float(self.distance(*sequences)) / self.maximum(*sequences) | [
"def",
"normalized_distance",
"(",
"self",
",",
"*",
"sequences",
")",
":",
"return",
"float",
"(",
"self",
".",
"distance",
"(",
"*",
"sequences",
")",
")",
"/",
"self",
".",
"maximum",
"(",
"*",
"sequences",
")"
] | Get distance from 0 to 1 | [
"Get",
"distance",
"from",
"0",
"to",
"1"
] | 34d2e40bb0b26efc03da80b63fd58ebbd3f2cdd7 | https://github.com/orsinium/textdistance/blob/34d2e40bb0b26efc03da80b63fd58ebbd3f2cdd7/textdistance/algorithms/base.py#L39-L42 |
243,184 | orsinium/textdistance | textdistance/algorithms/base.py | Base.external_answer | def external_answer(self, *sequences):
"""Try to get answer from known external libraries.
"""
# if this feature disabled
if not getattr(self, 'external', False):
return
# all external libs doesn't support test_func
if hasattr(self, 'test_func') and self.test_func is not self._ident:
return
# try to get external libs for algorithm
libs = libraries.get_libs(self.__class__.__name__)
for lib in libs:
# if conditions not satisfied
if not lib.check_conditions(self, *sequences):
continue
# if library is not installed yet
if not lib.get_function():
continue
prepared_sequences = lib.prepare(*sequences)
# fail side libraries silently and try next libs
try:
return lib.func(*prepared_sequences)
except Exception:
pass | python | def external_answer(self, *sequences):
# if this feature disabled
if not getattr(self, 'external', False):
return
# all external libs doesn't support test_func
if hasattr(self, 'test_func') and self.test_func is not self._ident:
return
# try to get external libs for algorithm
libs = libraries.get_libs(self.__class__.__name__)
for lib in libs:
# if conditions not satisfied
if not lib.check_conditions(self, *sequences):
continue
# if library is not installed yet
if not lib.get_function():
continue
prepared_sequences = lib.prepare(*sequences)
# fail side libraries silently and try next libs
try:
return lib.func(*prepared_sequences)
except Exception:
pass | [
"def",
"external_answer",
"(",
"self",
",",
"*",
"sequences",
")",
":",
"# if this feature disabled",
"if",
"not",
"getattr",
"(",
"self",
",",
"'external'",
",",
"False",
")",
":",
"return",
"# all external libs doesn't support test_func",
"if",
"hasattr",
"(",
"... | Try to get answer from known external libraries. | [
"Try",
"to",
"get",
"answer",
"from",
"known",
"external",
"libraries",
"."
] | 34d2e40bb0b26efc03da80b63fd58ebbd3f2cdd7 | https://github.com/orsinium/textdistance/blob/34d2e40bb0b26efc03da80b63fd58ebbd3f2cdd7/textdistance/algorithms/base.py#L51-L75 |
243,185 | orsinium/textdistance | textdistance/algorithms/base.py | Base._ident | def _ident(*elements):
"""Return True if all sequences are equal.
"""
try:
# for hashable elements
return len(set(elements)) == 1
except TypeError:
# for unhashable elements
for e1, e2 in zip(elements, elements[1:]):
if e1 != e2:
return False
return True | python | def _ident(*elements):
try:
# for hashable elements
return len(set(elements)) == 1
except TypeError:
# for unhashable elements
for e1, e2 in zip(elements, elements[1:]):
if e1 != e2:
return False
return True | [
"def",
"_ident",
"(",
"*",
"elements",
")",
":",
"try",
":",
"# for hashable elements",
"return",
"len",
"(",
"set",
"(",
"elements",
")",
")",
"==",
"1",
"except",
"TypeError",
":",
"# for unhashable elements",
"for",
"e1",
",",
"e2",
"in",
"zip",
"(",
... | Return True if all sequences are equal. | [
"Return",
"True",
"if",
"all",
"sequences",
"are",
"equal",
"."
] | 34d2e40bb0b26efc03da80b63fd58ebbd3f2cdd7 | https://github.com/orsinium/textdistance/blob/34d2e40bb0b26efc03da80b63fd58ebbd3f2cdd7/textdistance/algorithms/base.py#L98-L109 |
243,186 | orsinium/textdistance | textdistance/algorithms/base.py | Base._get_sequences | def _get_sequences(self, *sequences):
"""Prepare sequences.
qval=None: split text by words
qval=1: do not split sequences. For text this is mean comparing by letters.
qval>1: split sequences by q-grams
"""
# by words
if not self.qval:
return [s.split() for s in sequences]
# by chars
if self.qval == 1:
return sequences
# by n-grams
return [find_ngrams(s, self.qval) for s in sequences] | python | def _get_sequences(self, *sequences):
# by words
if not self.qval:
return [s.split() for s in sequences]
# by chars
if self.qval == 1:
return sequences
# by n-grams
return [find_ngrams(s, self.qval) for s in sequences] | [
"def",
"_get_sequences",
"(",
"self",
",",
"*",
"sequences",
")",
":",
"# by words",
"if",
"not",
"self",
".",
"qval",
":",
"return",
"[",
"s",
".",
"split",
"(",
")",
"for",
"s",
"in",
"sequences",
"]",
"# by chars",
"if",
"self",
".",
"qval",
"==",... | Prepare sequences.
qval=None: split text by words
qval=1: do not split sequences. For text this is mean comparing by letters.
qval>1: split sequences by q-grams | [
"Prepare",
"sequences",
"."
] | 34d2e40bb0b26efc03da80b63fd58ebbd3f2cdd7 | https://github.com/orsinium/textdistance/blob/34d2e40bb0b26efc03da80b63fd58ebbd3f2cdd7/textdistance/algorithms/base.py#L111-L125 |
243,187 | orsinium/textdistance | textdistance/algorithms/base.py | Base._get_counters | def _get_counters(self, *sequences):
"""Prepare sequences and convert it to Counters.
"""
# already Counters
if all(isinstance(s, Counter) for s in sequences):
return sequences
return [Counter(s) for s in self._get_sequences(*sequences)] | python | def _get_counters(self, *sequences):
# already Counters
if all(isinstance(s, Counter) for s in sequences):
return sequences
return [Counter(s) for s in self._get_sequences(*sequences)] | [
"def",
"_get_counters",
"(",
"self",
",",
"*",
"sequences",
")",
":",
"# already Counters",
"if",
"all",
"(",
"isinstance",
"(",
"s",
",",
"Counter",
")",
"for",
"s",
"in",
"sequences",
")",
":",
"return",
"sequences",
"return",
"[",
"Counter",
"(",
"s",... | Prepare sequences and convert it to Counters. | [
"Prepare",
"sequences",
"and",
"convert",
"it",
"to",
"Counters",
"."
] | 34d2e40bb0b26efc03da80b63fd58ebbd3f2cdd7 | https://github.com/orsinium/textdistance/blob/34d2e40bb0b26efc03da80b63fd58ebbd3f2cdd7/textdistance/algorithms/base.py#L127-L133 |
243,188 | orsinium/textdistance | textdistance/algorithms/base.py | Base._count_counters | def _count_counters(self, counter):
"""Return all elements count from Counter
"""
if getattr(self, 'as_set', False):
return len(set(counter))
else:
return sum(counter.values()) | python | def _count_counters(self, counter):
if getattr(self, 'as_set', False):
return len(set(counter))
else:
return sum(counter.values()) | [
"def",
"_count_counters",
"(",
"self",
",",
"counter",
")",
":",
"if",
"getattr",
"(",
"self",
",",
"'as_set'",
",",
"False",
")",
":",
"return",
"len",
"(",
"set",
"(",
"counter",
")",
")",
"else",
":",
"return",
"sum",
"(",
"counter",
".",
"values"... | Return all elements count from Counter | [
"Return",
"all",
"elements",
"count",
"from",
"Counter"
] | 34d2e40bb0b26efc03da80b63fd58ebbd3f2cdd7 | https://github.com/orsinium/textdistance/blob/34d2e40bb0b26efc03da80b63fd58ebbd3f2cdd7/textdistance/algorithms/base.py#L153-L159 |
243,189 | SCIP-Interfaces/PySCIPOpt | examples/finished/transp.py | make_inst2 | def make_inst2():
"""creates example data set 2"""
I,d = multidict({1:45, 2:20, 3:30 , 4:30}) # demand
J,M = multidict({1:35, 2:50, 3:40}) # capacity
c = {(1,1):8, (1,2):9, (1,3):14 , # {(customer,factory) : cost<float>}
(2,1):6, (2,2):12, (2,3):9 ,
(3,1):10, (3,2):13, (3,3):16 ,
(4,1):9, (4,2):7, (4,3):5 ,
}
return I,J,c,d,M | python | def make_inst2():
I,d = multidict({1:45, 2:20, 3:30 , 4:30}) # demand
J,M = multidict({1:35, 2:50, 3:40}) # capacity
c = {(1,1):8, (1,2):9, (1,3):14 , # {(customer,factory) : cost<float>}
(2,1):6, (2,2):12, (2,3):9 ,
(3,1):10, (3,2):13, (3,3):16 ,
(4,1):9, (4,2):7, (4,3):5 ,
}
return I,J,c,d,M | [
"def",
"make_inst2",
"(",
")",
":",
"I",
",",
"d",
"=",
"multidict",
"(",
"{",
"1",
":",
"45",
",",
"2",
":",
"20",
",",
"3",
":",
"30",
",",
"4",
":",
"30",
"}",
")",
"# demand",
"J",
",",
"M",
"=",
"multidict",
"(",
"{",
"1",
":",
"35",... | creates example data set 2 | [
"creates",
"example",
"data",
"set",
"2"
] | 9c960b40d94a48b0304d73dbe28b467b9c065abe | https://github.com/SCIP-Interfaces/PySCIPOpt/blob/9c960b40d94a48b0304d73dbe28b467b9c065abe/examples/finished/transp.py#L62-L71 |
243,190 | SCIP-Interfaces/PySCIPOpt | examples/unfinished/vrp_lazy.py | VRPconshdlr.addCuts | def addCuts(self, checkonly):
"""add cuts if necessary and return whether model is feasible"""
cutsadded = False
edges = []
x = self.model.data
for (i, j) in x:
if self.model.getVal(x[i, j]) > .5:
if i != V[0] and j != V[0]:
edges.append((i, j))
G = networkx.Graph()
G.add_edges_from(edges)
Components = list(networkx.connected_components(G))
for S in Components:
S_card = len(S)
q_sum = sum(q[i] for i in S)
NS = int(math.ceil(float(q_sum) / Q))
S_edges = [(i, j) for i in S for j in S if i < j and (i, j) in edges]
if S_card >= 3 and (len(S_edges) >= S_card or NS > 1):
cutsadded = True
if checkonly:
break
else:
self.model.addCons(quicksum(x[i, j] for i in S for j in S if j > i) <= S_card - NS)
print("adding cut for", S_edges)
return cutsadded | python | def addCuts(self, checkonly):
cutsadded = False
edges = []
x = self.model.data
for (i, j) in x:
if self.model.getVal(x[i, j]) > .5:
if i != V[0] and j != V[0]:
edges.append((i, j))
G = networkx.Graph()
G.add_edges_from(edges)
Components = list(networkx.connected_components(G))
for S in Components:
S_card = len(S)
q_sum = sum(q[i] for i in S)
NS = int(math.ceil(float(q_sum) / Q))
S_edges = [(i, j) for i in S for j in S if i < j and (i, j) in edges]
if S_card >= 3 and (len(S_edges) >= S_card or NS > 1):
cutsadded = True
if checkonly:
break
else:
self.model.addCons(quicksum(x[i, j] for i in S for j in S if j > i) <= S_card - NS)
print("adding cut for", S_edges)
return cutsadded | [
"def",
"addCuts",
"(",
"self",
",",
"checkonly",
")",
":",
"cutsadded",
"=",
"False",
"edges",
"=",
"[",
"]",
"x",
"=",
"self",
".",
"model",
".",
"data",
"for",
"(",
"i",
",",
"j",
")",
"in",
"x",
":",
"if",
"self",
".",
"model",
".",
"getVal"... | add cuts if necessary and return whether model is feasible | [
"add",
"cuts",
"if",
"necessary",
"and",
"return",
"whether",
"model",
"is",
"feasible"
] | 9c960b40d94a48b0304d73dbe28b467b9c065abe | https://github.com/SCIP-Interfaces/PySCIPOpt/blob/9c960b40d94a48b0304d73dbe28b467b9c065abe/examples/unfinished/vrp_lazy.py#L17-L42 |
243,191 | SCIP-Interfaces/PySCIPOpt | examples/finished/read_tsplib.py | distCEIL2D | def distCEIL2D(x1,y1,x2,y2):
"""returns smallest integer not less than the distance of two points"""
xdiff = x2 - x1
ydiff = y2 - y1
return int(math.ceil(math.sqrt(xdiff*xdiff + ydiff*ydiff))) | python | def distCEIL2D(x1,y1,x2,y2):
xdiff = x2 - x1
ydiff = y2 - y1
return int(math.ceil(math.sqrt(xdiff*xdiff + ydiff*ydiff))) | [
"def",
"distCEIL2D",
"(",
"x1",
",",
"y1",
",",
"x2",
",",
"y2",
")",
":",
"xdiff",
"=",
"x2",
"-",
"x1",
"ydiff",
"=",
"y2",
"-",
"y1",
"return",
"int",
"(",
"math",
".",
"ceil",
"(",
"math",
".",
"sqrt",
"(",
"xdiff",
"*",
"xdiff",
"+",
"yd... | returns smallest integer not less than the distance of two points | [
"returns",
"smallest",
"integer",
"not",
"less",
"than",
"the",
"distance",
"of",
"two",
"points"
] | 9c960b40d94a48b0304d73dbe28b467b9c065abe | https://github.com/SCIP-Interfaces/PySCIPOpt/blob/9c960b40d94a48b0304d73dbe28b467b9c065abe/examples/finished/read_tsplib.py#L53-L57 |
243,192 | SCIP-Interfaces/PySCIPOpt | examples/finished/read_tsplib.py | read_atsplib | def read_atsplib(filename):
"basic function for reading a ATSP problem on the TSPLIB format"
"NOTE: only works for explicit matrices"
if filename[-3:] == ".gz":
f = gzip.open(filename, 'r')
data = f.readlines()
else:
f = open(filename, 'r')
data = f.readlines()
for line in data:
if line.find("DIMENSION") >= 0:
n = int(line.split()[1])
break
else:
raise IOError("'DIMENSION' keyword not found in file '%s'" % filename)
for line in data:
if line.find("EDGE_WEIGHT_TYPE") >= 0:
if line.split()[1] == "EXPLICIT":
break
else:
raise IOError("'EDGE_WEIGHT_TYPE' is not 'EXPLICIT' in file '%s'" % filename)
for k,line in enumerate(data):
if line.find("EDGE_WEIGHT_SECTION") >= 0:
break
else:
raise IOError("'EDGE_WEIGHT_SECTION' not found in file '%s'" % filename)
c = {}
# flatten list of distances
dist = []
for line in data[k+1:]:
if line.find("EOF") >= 0:
break
for val in line.split():
dist.append(int(val))
k = 0
for i in range(n):
for j in range(n):
c[i+1,j+1] = dist[k]
k += 1
return n,c | python | def read_atsplib(filename):
"basic function for reading a ATSP problem on the TSPLIB format"
"NOTE: only works for explicit matrices"
if filename[-3:] == ".gz":
f = gzip.open(filename, 'r')
data = f.readlines()
else:
f = open(filename, 'r')
data = f.readlines()
for line in data:
if line.find("DIMENSION") >= 0:
n = int(line.split()[1])
break
else:
raise IOError("'DIMENSION' keyword not found in file '%s'" % filename)
for line in data:
if line.find("EDGE_WEIGHT_TYPE") >= 0:
if line.split()[1] == "EXPLICIT":
break
else:
raise IOError("'EDGE_WEIGHT_TYPE' is not 'EXPLICIT' in file '%s'" % filename)
for k,line in enumerate(data):
if line.find("EDGE_WEIGHT_SECTION") >= 0:
break
else:
raise IOError("'EDGE_WEIGHT_SECTION' not found in file '%s'" % filename)
c = {}
# flatten list of distances
dist = []
for line in data[k+1:]:
if line.find("EOF") >= 0:
break
for val in line.split():
dist.append(int(val))
k = 0
for i in range(n):
for j in range(n):
c[i+1,j+1] = dist[k]
k += 1
return n,c | [
"def",
"read_atsplib",
"(",
"filename",
")",
":",
"\"NOTE: only works for explicit matrices\"",
"if",
"filename",
"[",
"-",
"3",
":",
"]",
"==",
"\".gz\"",
":",
"f",
"=",
"gzip",
".",
"open",
"(",
"filename",
",",
"'r'",
")",
"data",
"=",
"f",
".",
"read... | basic function for reading a ATSP problem on the TSPLIB format | [
"basic",
"function",
"for",
"reading",
"a",
"ATSP",
"problem",
"on",
"the",
"TSPLIB",
"format"
] | 9c960b40d94a48b0304d73dbe28b467b9c065abe | https://github.com/SCIP-Interfaces/PySCIPOpt/blob/9c960b40d94a48b0304d73dbe28b467b9c065abe/examples/finished/read_tsplib.py#L216-L262 |
243,193 | SCIP-Interfaces/PySCIPOpt | src/pyscipopt/Multidict.py | multidict | def multidict(D):
'''creates a multidictionary'''
keys = list(D.keys())
if len(keys) == 0:
return [[]]
try:
N = len(D[keys[0]])
islist = True
except:
N = 1
islist = False
dlist = [dict() for d in range(N)]
for k in keys:
if islist:
for i in range(N):
dlist[i][k] = D[k][i]
else:
dlist[0][k] = D[k]
return [keys]+dlist | python | def multidict(D):
'''creates a multidictionary'''
keys = list(D.keys())
if len(keys) == 0:
return [[]]
try:
N = len(D[keys[0]])
islist = True
except:
N = 1
islist = False
dlist = [dict() for d in range(N)]
for k in keys:
if islist:
for i in range(N):
dlist[i][k] = D[k][i]
else:
dlist[0][k] = D[k]
return [keys]+dlist | [
"def",
"multidict",
"(",
"D",
")",
":",
"keys",
"=",
"list",
"(",
"D",
".",
"keys",
"(",
")",
")",
"if",
"len",
"(",
"keys",
")",
"==",
"0",
":",
"return",
"[",
"[",
"]",
"]",
"try",
":",
"N",
"=",
"len",
"(",
"D",
"[",
"keys",
"[",
"0",
... | creates a multidictionary | [
"creates",
"a",
"multidictionary"
] | 9c960b40d94a48b0304d73dbe28b467b9c065abe | https://github.com/SCIP-Interfaces/PySCIPOpt/blob/9c960b40d94a48b0304d73dbe28b467b9c065abe/src/pyscipopt/Multidict.py#L3-L21 |
243,194 | intake/intake | intake/catalog/local.py | register_plugin_module | def register_plugin_module(mod):
"""Find plugins in given module"""
for k, v in load_plugins_from_module(mod).items():
if k:
if isinstance(k, (list, tuple)):
k = k[0]
global_registry[k] = v | python | def register_plugin_module(mod):
for k, v in load_plugins_from_module(mod).items():
if k:
if isinstance(k, (list, tuple)):
k = k[0]
global_registry[k] = v | [
"def",
"register_plugin_module",
"(",
"mod",
")",
":",
"for",
"k",
",",
"v",
"in",
"load_plugins_from_module",
"(",
"mod",
")",
".",
"items",
"(",
")",
":",
"if",
"k",
":",
"if",
"isinstance",
"(",
"k",
",",
"(",
"list",
",",
"tuple",
")",
")",
":"... | Find plugins in given module | [
"Find",
"plugins",
"in",
"given",
"module"
] | 277b96bfdee39d8a3048ea5408c6d6716d568336 | https://github.com/intake/intake/blob/277b96bfdee39d8a3048ea5408c6d6716d568336/intake/catalog/local.py#L494-L500 |
243,195 | intake/intake | intake/catalog/local.py | register_plugin_dir | def register_plugin_dir(path):
"""Find plugins in given directory"""
import glob
for f in glob.glob(path + '/*.py'):
for k, v in load_plugins_from_module(f).items():
if k:
global_registry[k] = v | python | def register_plugin_dir(path):
import glob
for f in glob.glob(path + '/*.py'):
for k, v in load_plugins_from_module(f).items():
if k:
global_registry[k] = v | [
"def",
"register_plugin_dir",
"(",
"path",
")",
":",
"import",
"glob",
"for",
"f",
"in",
"glob",
".",
"glob",
"(",
"path",
"+",
"'/*.py'",
")",
":",
"for",
"k",
",",
"v",
"in",
"load_plugins_from_module",
"(",
"f",
")",
".",
"items",
"(",
")",
":",
... | Find plugins in given directory | [
"Find",
"plugins",
"in",
"given",
"directory"
] | 277b96bfdee39d8a3048ea5408c6d6716d568336 | https://github.com/intake/intake/blob/277b96bfdee39d8a3048ea5408c6d6716d568336/intake/catalog/local.py#L503-L509 |
243,196 | intake/intake | intake/catalog/local.py | UserParameter.describe | def describe(self):
"""Information about this parameter"""
desc = {
'name': self.name,
'description': self.description,
# the Parameter might not have a type at all
'type': self.type or 'unknown',
}
for attr in ['min', 'max', 'allowed', 'default']:
v = getattr(self, attr)
if v is not None:
desc[attr] = v
return desc | python | def describe(self):
desc = {
'name': self.name,
'description': self.description,
# the Parameter might not have a type at all
'type': self.type or 'unknown',
}
for attr in ['min', 'max', 'allowed', 'default']:
v = getattr(self, attr)
if v is not None:
desc[attr] = v
return desc | [
"def",
"describe",
"(",
"self",
")",
":",
"desc",
"=",
"{",
"'name'",
":",
"self",
".",
"name",
",",
"'description'",
":",
"self",
".",
"description",
",",
"# the Parameter might not have a type at all",
"'type'",
":",
"self",
".",
"type",
"or",
"'unknown'",
... | Information about this parameter | [
"Information",
"about",
"this",
"parameter"
] | 277b96bfdee39d8a3048ea5408c6d6716d568336 | https://github.com/intake/intake/blob/277b96bfdee39d8a3048ea5408c6d6716d568336/intake/catalog/local.py#L88-L100 |
243,197 | intake/intake | intake/catalog/local.py | UserParameter.validate | def validate(self, value):
"""Does value meet parameter requirements?"""
if self.type is not None:
value = coerce(self.type, value)
if self.min is not None and value < self.min:
raise ValueError('%s=%s is less than %s' % (self.name, value,
self.min))
if self.max is not None and value > self.max:
raise ValueError('%s=%s is greater than %s' % (
self.name, value, self.max))
if self.allowed is not None and value not in self.allowed:
raise ValueError('%s=%s is not one of the allowed values: %s' % (
self.name, value, ','.join(map(str, self.allowed))))
return value | python | def validate(self, value):
if self.type is not None:
value = coerce(self.type, value)
if self.min is not None and value < self.min:
raise ValueError('%s=%s is less than %s' % (self.name, value,
self.min))
if self.max is not None and value > self.max:
raise ValueError('%s=%s is greater than %s' % (
self.name, value, self.max))
if self.allowed is not None and value not in self.allowed:
raise ValueError('%s=%s is not one of the allowed values: %s' % (
self.name, value, ','.join(map(str, self.allowed))))
return value | [
"def",
"validate",
"(",
"self",
",",
"value",
")",
":",
"if",
"self",
".",
"type",
"is",
"not",
"None",
":",
"value",
"=",
"coerce",
"(",
"self",
".",
"type",
",",
"value",
")",
"if",
"self",
".",
"min",
"is",
"not",
"None",
"and",
"value",
"<",
... | Does value meet parameter requirements? | [
"Does",
"value",
"meet",
"parameter",
"requirements?"
] | 277b96bfdee39d8a3048ea5408c6d6716d568336 | https://github.com/intake/intake/blob/277b96bfdee39d8a3048ea5408c6d6716d568336/intake/catalog/local.py#L111-L126 |
243,198 | intake/intake | intake/catalog/local.py | LocalCatalogEntry.describe | def describe(self):
"""Basic information about this entry"""
if isinstance(self._plugin, list):
pl = [p.name for p in self._plugin]
elif isinstance(self._plugin, dict):
pl = {k: classname(v) for k, v in self._plugin.items()}
else:
pl = self._plugin if isinstance(self._plugin, str) else self._plugin.name
return {
'name': self._name,
'container': self._container,
'plugin': pl,
'description': self._description,
'direct_access': self._direct_access,
'user_parameters': [u.describe() for u in self._user_parameters],
'metadata': self._metadata,
'args': self._open_args
} | python | def describe(self):
if isinstance(self._plugin, list):
pl = [p.name for p in self._plugin]
elif isinstance(self._plugin, dict):
pl = {k: classname(v) for k, v in self._plugin.items()}
else:
pl = self._plugin if isinstance(self._plugin, str) else self._plugin.name
return {
'name': self._name,
'container': self._container,
'plugin': pl,
'description': self._description,
'direct_access': self._direct_access,
'user_parameters': [u.describe() for u in self._user_parameters],
'metadata': self._metadata,
'args': self._open_args
} | [
"def",
"describe",
"(",
"self",
")",
":",
"if",
"isinstance",
"(",
"self",
".",
"_plugin",
",",
"list",
")",
":",
"pl",
"=",
"[",
"p",
".",
"name",
"for",
"p",
"in",
"self",
".",
"_plugin",
"]",
"elif",
"isinstance",
"(",
"self",
".",
"_plugin",
... | Basic information about this entry | [
"Basic",
"information",
"about",
"this",
"entry"
] | 277b96bfdee39d8a3048ea5408c6d6716d568336 | https://github.com/intake/intake/blob/277b96bfdee39d8a3048ea5408c6d6716d568336/intake/catalog/local.py#L207-L224 |
243,199 | intake/intake | intake/catalog/local.py | LocalCatalogEntry.get | def get(self, **user_parameters):
"""Instantiate the DataSource for the given parameters"""
plugin, open_args = self._create_open_args(user_parameters)
data_source = plugin(**open_args)
data_source.catalog_object = self._catalog
data_source.name = self.name
data_source.description = self._description
data_source.cat = self._catalog
return data_source | python | def get(self, **user_parameters):
plugin, open_args = self._create_open_args(user_parameters)
data_source = plugin(**open_args)
data_source.catalog_object = self._catalog
data_source.name = self.name
data_source.description = self._description
data_source.cat = self._catalog
return data_source | [
"def",
"get",
"(",
"self",
",",
"*",
"*",
"user_parameters",
")",
":",
"plugin",
",",
"open_args",
"=",
"self",
".",
"_create_open_args",
"(",
"user_parameters",
")",
"data_source",
"=",
"plugin",
"(",
"*",
"*",
"open_args",
")",
"data_source",
".",
"catal... | Instantiate the DataSource for the given parameters | [
"Instantiate",
"the",
"DataSource",
"for",
"the",
"given",
"parameters"
] | 277b96bfdee39d8a3048ea5408c6d6716d568336 | https://github.com/intake/intake/blob/277b96bfdee39d8a3048ea5408c6d6716d568336/intake/catalog/local.py#L263-L272 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.