docstring
stringlengths
52
499
function
stringlengths
67
35.2k
__index_level_0__
int64
52.6k
1.16M
Returns a copy of these credentials with modified claims. Args: additional_claims (Mapping[str, str]): Any additional claims for the JWT payload. This will be merged with the current additional claims. Returns: google.auth.service_account.Credentials: A new credentials instance.
def with_claims(self, additional_claims): new_additional_claims = copy.deepcopy(self._additional_claims) new_additional_claims.update(additional_claims or {}) return self.__class__( self._signer, service_account_email=self._service_account_email, scopes=self._scopes, token_uri=self._token_uri, subject=self._subject, project_id=self._project_id, additional_claims=new_additional_claims)
315,071
Create a copy of these credentials with the specified target audience. Args: target_audience (str): The intended audience for these credentials, used when requesting the ID Token. Returns: google.auth.service_account.IDTokenCredentials: A new credentials instance.
def with_target_audience(self, target_audience): return self.__class__( self._signer, service_account_email=self._service_account_email, token_uri=self._token_uri, target_audience=target_audience, additional_claims=self._additional_claims.copy())
315,075
Converts an iterable of 1s and 0s to bytes. Combines the list 8 at a time, treating each group of 8 bits as a single byte. Args: bit_list (Sequence): Sequence of 1s and 0s. Returns: bytes: The decoded bytes.
def _bit_list_to_bytes(bit_list): num_bits = len(bit_list) byte_vals = bytearray() for start in six.moves.xrange(0, num_bits, 8): curr_bits = bit_list[start:start + 8] char_val = sum( val * digit for val, digit in six.moves.zip(_POW2, curr_bits)) byte_vals.append(char_val) return bytes(byte_vals)
315,081
Construct an Signer instance from a private key in PEM format. Args: key (str): Private key in PEM format. key_id (str): An optional key id used to identify the private key. Returns: google.auth.crypt.Signer: The constructed signer. Raises: ValueError: If the key cannot be parsed as PKCS#1 or PKCS#8 in PEM format.
def from_string(cls, key, key_id=None): key = _helpers.from_bytes(key) # PEM expects str in Python 3 marker_id, key_bytes = pem.readPemBlocksFromFile( six.StringIO(key), _PKCS1_MARKER, _PKCS8_MARKER) # Key is in pkcs1 format. if marker_id == 0: private_key = rsa.key.PrivateKey.load_pkcs1( key_bytes, format='DER') # Key is in pkcs8. elif marker_id == 1: key_info, remaining = decoder.decode( key_bytes, asn1Spec=_PKCS8_SPEC) if remaining != b'': raise ValueError('Unused bytes', remaining) private_key_info = key_info.getComponentByName('privateKey') private_key = rsa.key.PrivateKey.load_pkcs1( private_key_info.asOctets(), format='DER') else: raise ValueError('No key could be detected.') return cls(private_key, key_id=key_id)
315,084
Translates an error response into an exception. Args: response_body (str): The decoded response data. Raises: google.auth.exceptions.RefreshError
def _handle_error_response(response_body): try: error_data = json.loads(response_body) error_details = '{}: {}'.format( error_data['error'], error_data.get('error_description')) # If no details could be extracted, use the response data. except (KeyError, ValueError): error_details = response_body raise exceptions.RefreshError( error_details, response_body)
315,085
Parses the expiry field from a response into a datetime. Args: response_data (Mapping): The JSON-parsed response data. Returns: Optional[datetime]: The expiration or ``None`` if no expiration was specified.
def _parse_expiry(response_data): expires_in = response_data.get('expires_in', None) if expires_in is not None: return _helpers.utcnow() + datetime.timedelta( seconds=expires_in) else: return None
315,086
Retrieve information about the service account. Updates the scopes and retrieves the full service account email. Args: request (google.auth.transport.Request): The object used to make HTTP requests.
def _retrieve_info(self, request): info = _metadata.get_service_account_info( request, service_account=self._service_account_email) self._service_account_email = info['email'] self._scopes = info['scopes']
315,094
Refresh the access token and scopes. Args: request (google.auth.transport.Request): The object used to make HTTP requests. Raises: google.auth.exceptions.RefreshError: If the Compute Engine metadata service can't be reached if if the instance has not credentials.
def refresh(self, request): try: self._retrieve_info(request) self.token, self.expiry = _metadata.get_service_account_token( request, service_account=self._service_account_email) except exceptions.TransportError as caught_exc: new_exc = exceptions.RefreshError(caught_exc) six.raise_from(new_exc, caught_exc)
315,095
Creates a Credentials instance from parsed authorized user info. Args: info (Mapping[str, str]): The authorized user info in Google format. scopes (Sequence[str]): Optional list of scopes to include in the credentials. Returns: google.oauth2.credentials.Credentials: The constructed credentials. Raises: ValueError: If the info is not in the expected format.
def from_authorized_user_info(cls, info, scopes=None): keys_needed = set(('refresh_token', 'client_id', 'client_secret')) missing = keys_needed.difference(six.iterkeys(info)) if missing: raise ValueError( 'Authorized user info was not in the expected format, missing ' 'fields {}.'.format(', '.join(missing))) return Credentials( None, # No access token, must be refreshed. refresh_token=info['refresh_token'], token_uri=_GOOGLE_OAUTH2_TOKEN_ENDPOINT, scopes=scopes, client_id=info['client_id'], client_secret=info['client_secret'])
315,099
Creates a Credentials instance from an authorized user json file. Args: filename (str): The path to the authorized user json file. scopes (Sequence[str]): Optional list of scopes to include in the credentials. Returns: google.oauth2.credentials.Credentials: The constructed credentials. Raises: ValueError: If the file is not in the expected format.
def from_authorized_user_file(cls, filename, scopes=None): with io.open(filename, 'r', encoding='utf-8') as json_file: data = json.load(json_file) return cls.from_authorized_user_info(data, scopes)
315,100
Checks to see if the metadata server is available. Args: request (google.auth.transport.Request): A callable used to make HTTP requests. timeout (int): How long to wait for the metadata server to respond. retry_count (int): How many times to attempt connecting to metadata server using above timeout. Returns: bool: True if the metadata server is reachable, False otherwise.
def ping(request, timeout=_METADATA_DEFAULT_TIMEOUT, retry_count=3): # NOTE: The explicit ``timeout`` is a workaround. The underlying # issue is that resolving an unknown host on some networks will take # 20-30 seconds; making this timeout short fixes the issue, but # could lead to false negatives in the event that we are on GCE, but # the metadata resolution was particularly slow. The latter case is # "unlikely". retries = 0 while retries < retry_count: try: response = request( url=_METADATA_IP_ROOT, method='GET', headers=_METADATA_HEADERS, timeout=timeout) metadata_flavor = response.headers.get(_METADATA_FLAVOR_HEADER) return (response.status == http_client.OK and metadata_flavor == _METADATA_FLAVOR_VALUE) except exceptions.TransportError: _LOGGER.info('Compute Engine Metadata server unavailable on' 'attempt %s of %s', retries+1, retry_count) retries += 1 return False
315,102
Reads a Google service account JSON file and returns its parsed info. Args: filename (str): The path to the service account .json file. require (Sequence[str]): List of keys required to be present in the info. Returns: Tuple[ Mapping[str, str], google.auth.crypt.Signer ]: The verified info and a signer instance.
def from_filename(filename, require=None): with io.open(filename, 'r', encoding='utf-8') as json_file: data = json.load(json_file) return data, from_dict(data, require=require)
315,106
Decorator that copies a method's docstring from another class. Args: source_class (type): The class that has the documented method. Returns: Callable: A decorator that will copy the docstring of the same named method in the source class to the decorated method.
def copy_docstring(source_class): def decorator(method): if method.__doc__: raise ValueError('Method already has a docstring.') source_method = getattr(source_class, method.__name__) method.__doc__ = source_method.__doc__ return method return decorator
315,107
Converts bytes to a string value, if necessary. Args: value (Union[str, bytes]): The value to be converted. Returns: str: The original value converted to unicode (if bytes) or as passed in if it started out as unicode. Raises: ValueError: If the value could not be converted to unicode.
def from_bytes(value): result = (value.decode('utf-8') if isinstance(value, six.binary_type) else value) if isinstance(result, six.text_type): return result else: raise ValueError( '{0!r} could not be converted to unicode'.format(value))
315,109
Decodes base64 strings lacking padding characters. Google infrastructure tends to omit the base64 padding characters. Args: value (Union[str, bytes]): The encoded value. Returns: bytes: The decoded value
def padded_urlsafe_b64decode(value): b64string = to_bytes(value) padded = b64string + b'=' * (-len(b64string) % 4) return base64.urlsafe_b64decode(padded)
315,111
Check if the user is invoking one of the comments in 'subcommands' in the from az alias . Args: subcommands: The list of subcommands to check through. args: The CLI arguments to process. Returns: True if the user is invoking 'az alias {command}'.
def is_alias_command(subcommands, args): if not args: return False for subcommand in subcommands: if args[:2] == ['alias', subcommand]: return True return False
316,312
Remove positional argument placeholders from alias_command. Args: alias_command: The alias command to remove from. Returns: The alias command string without positional argument placeholder.
def remove_pos_arg_placeholders(alias_command): # Boundary index is the index at which named argument or positional argument starts split_command = shlex.split(alias_command) boundary_index = len(split_command) for i, subcommand in enumerate(split_command): if not re.match('^[a-z]', subcommand.lower()) or i > COLLISION_CHECK_LEVEL_DEPTH: boundary_index = i break return ' '.join(split_command[:boundary_index]).lower()
316,313
Filter aliases that does not have a command field in the configuration file. Args: alias_table: The alias table. Yield: A tuple with [0] being the first word of the alias and [1] being the command that the alias points to.
def filter_aliases(alias_table): for alias in alias_table.sections(): if alias_table.has_option(alias, 'command'): yield (alias.split()[0], remove_pos_arg_placeholders(alias_table.get(alias, 'command')))
316,314
Build a dictionary where the keys are all the alias commands (without positional argument placeholders) and the values are all the parent commands of the keys. After that, write the table into a file. The purpose of the dictionary is to validate the alias tab completion state. For example: { "group": ["", "ad"], "dns": ["network"] } Args: alias_table: The alias table. Returns: The tab completion table.
def build_tab_completion_table(alias_table): alias_commands = [t[1] for t in filter_aliases(alias_table)] tab_completion_table = defaultdict(list) for alias_command in alias_commands: for reserved_command in azext_alias.cached_reserved_commands: # Check if alias_command has no parent command if reserved_command == alias_command or reserved_command.startswith(alias_command + ' ') \ and '' not in tab_completion_table[alias_command]: tab_completion_table[alias_command].append('') elif ' {} '.format(alias_command) in reserved_command or reserved_command.endswith(' ' + alias_command): # Extract parent commands index = reserved_command.index(alias_command) parent_command = reserved_command[:index - 1] if parent_command not in tab_completion_table[alias_command]: tab_completion_table[alias_command].append(parent_command) with open(GLOBAL_ALIAS_TAB_COMP_TABLE_PATH, 'w') as f: f.write(json.dumps(tab_completion_table)) return tab_completion_table
316,315
Reduce the alias table to a tuple that contains the alias and the command that the alias points to. Args: The alias table to be reduced. Yields A tuple that contains the alias and the command that the alias points to.
def reduce_alias_table(alias_table): for alias in alias_table.sections(): if alias_table.has_option(alias, 'command'): yield (alias, alias_table.get(alias, 'command'))
316,316
Retrieve a file from an URL Args: url: The URL to retrieve the file from. Returns: The absolute path of the downloaded file.
def retrieve_file_from_url(url): try: alias_source, _ = urlretrieve(url) # Check for HTTPError in Python 2.x with open(alias_source, 'r') as f: content = f.read() if content[:3].isdigit(): raise CLIError(ALIAS_FILE_URL_ERROR.format(url, content.strip())) except Exception as exception: if isinstance(exception, CLIError): raise # Python 3.x raise CLIError(ALIAS_FILE_URL_ERROR.format(url, exception)) return alias_source
316,317
Validate input arguments when the user invokes 'az alias create'. Args: namespace: argparse namespace object.
def process_alias_create_namespace(namespace): namespace = filter_alias_create_namespace(namespace) _validate_alias_name(namespace.alias_name) _validate_alias_command(namespace.alias_command) _validate_alias_command_level(namespace.alias_name, namespace.alias_command) _validate_pos_args_syntax(namespace.alias_name, namespace.alias_command)
316,480
Validate input arguments when the user invokes 'az alias import'. Args: namespace: argparse namespace object.
def process_alias_import_namespace(namespace): if is_url(namespace.alias_source): alias_source = retrieve_file_from_url(namespace.alias_source) _validate_alias_file_content(alias_source, url=namespace.alias_source) else: namespace.alias_source = os.path.abspath(namespace.alias_source) _validate_alias_file_path(namespace.alias_source) _validate_alias_file_content(namespace.alias_source)
316,481
Validate input arguments when the user invokes 'az alias export'. Args: namespace: argparse namespace object.
def process_alias_export_namespace(namespace): namespace.export_path = os.path.abspath(namespace.export_path) if os.path.isfile(namespace.export_path): raise CLIError(FILE_ALREADY_EXISTS_ERROR.format(namespace.export_path)) export_path_dir = os.path.dirname(namespace.export_path) if not os.path.isdir(export_path_dir): os.makedirs(export_path_dir) if os.path.isdir(namespace.export_path): namespace.export_path = os.path.join(namespace.export_path, ALIAS_FILE_NAME)
316,482
Check if the alias name is valid. Args: alias_name: The name of the alias to validate.
def _validate_alias_name(alias_name): if not alias_name: raise CLIError(EMPTY_ALIAS_ERROR) if not re.match('^[a-zA-Z]', alias_name): raise CLIError(INVALID_STARTING_CHAR_ERROR.format(alias_name[0]))
316,483
Check if the alias command is valid. Args: alias_command: The command to validate.
def _validate_alias_command(alias_command): if not alias_command: raise CLIError(EMPTY_ALIAS_ERROR) split_command = shlex.split(alias_command) boundary_index = len(split_command) for i, subcommand in enumerate(split_command): if not re.match('^[a-z]', subcommand.lower()) or i > COLLISION_CHECK_LEVEL_DEPTH: boundary_index = i break # Extract possible CLI commands and validate command_to_validate = ' '.join(split_command[:boundary_index]).lower() for command in azext_alias.cached_reserved_commands: if re.match(r'([a-z\-]*\s)*{}($|\s)'.format(command_to_validate), command): return _validate_positional_arguments(shlex.split(alias_command))
316,484
Check if the positional argument syntax is valid in alias name and alias command. Args: alias_name: The name of the alias to validate. alias_command: The command to validate.
def _validate_pos_args_syntax(alias_name, alias_command): pos_args_from_alias = get_placeholders(alias_name) # Split by '|' to extract positional argument name from Jinja filter (e.g. {{ arg_name | upper }}) # Split by '.' to extract positional argument name from function call (e.g. {{ arg_name.split()[0] }}) pos_args_from_command = [x.split('|')[0].split('.')[0].strip() for x in get_placeholders(alias_command)] if set(pos_args_from_alias) != set(pos_args_from_command): arg_diff = set(pos_args_from_alias) ^ set(pos_args_from_command) raise CLIError(INCONSISTENT_ARG_ERROR.format('' if len(arg_diff) == 1 else 's', arg_diff, 'is' if len(arg_diff) == 1 else 'are'))
316,485
Make sure that if the alias is a reserved command, the command that the alias points to in the command tree does not conflict in levels. e.g. 'dns' -> 'network dns' is valid because dns is a level 2 command and network dns starts at level 1. However, 'list' -> 'show' is not valid because list and show are both reserved commands at level 2. Args: alias: The name of the alias. command: The command that the alias points to.
def _validate_alias_command_level(alias, command): alias_collision_table = AliasManager.build_collision_table([alias]) # Alias is not a reserved command, so it can point to any command if not alias_collision_table: return command_collision_table = AliasManager.build_collision_table([command]) alias_collision_levels = alias_collision_table.get(alias.split()[0], []) command_collision_levels = command_collision_table.get(command.split()[0], []) # Check if there is a command level conflict if set(alias_collision_levels) & set(command_collision_levels): raise CLIError(COMMAND_LVL_ERROR.format(alias, command))
316,486
Make sure the alias file path is neither non-existant nor a directory Args: The alias file path to import aliases from.
def _validate_alias_file_path(alias_file_path): if not os.path.exists(alias_file_path): raise CLIError(ALIAS_FILE_NOT_FOUND_ERROR) if os.path.isdir(alias_file_path): raise CLIError(ALIAS_FILE_DIR_ERROR.format(alias_file_path))
316,487
Make sure the alias name and alias command in the alias file is in valid format. Args: The alias file path to import aliases from.
def _validate_alias_file_content(alias_file_path, url=''): alias_table = get_config_parser() try: alias_table.read(alias_file_path) for alias_name, alias_command in reduce_alias_table(alias_table): _validate_alias_name(alias_name) _validate_alias_command(alias_command) _validate_alias_command_level(alias_name, alias_command) _validate_pos_args_syntax(alias_name, alias_command) except Exception as exception: # pylint: disable=broad-except error_msg = CONFIG_PARSING_ERROR % AliasManager.process_exception_message(exception) error_msg = error_msg.replace(alias_file_path, url or alias_file_path) raise CLIError(error_msg)
316,488
To validate the positional argument feature - https://github.com/Azure/azure-cli/pull/6055. Assuming that unknown commands are positional arguments immediately led by words that only appear at the end of the commands Slight modification of https://github.com/Azure/azure-cli/blob/dev/src/azure-cli-core/azure/cli/core/commands/__init__.py#L356-L373 Args: args: The arguments that the user inputs in the terminal. Returns: Rudimentary parsed arguments.
def _validate_positional_arguments(args): nouns = [] for arg in args: if not arg.startswith('-') or not arg.startswith('{{'): nouns.append(arg) else: break while nouns: search = ' '.join(nouns) # Since the command name may be immediately followed by a positional arg, strip those off if not next((x for x in azext_alias.cached_reserved_commands if x.endswith(search)), False): del nouns[-1] else: return raise CLIError(INVALID_ALIAS_COMMAND_ERROR.format(' '.join(args)))
316,489
Create an alias. Args: alias_name: The name of the alias. alias_command: The command that the alias points to.
def create_alias(alias_name, alias_command): alias_name, alias_command = alias_name.strip(), alias_command.strip() alias_table = get_alias_table() if alias_name not in alias_table.sections(): alias_table.add_section(alias_name) alias_table.set(alias_name, 'command', alias_command) _commit_change(alias_table)
316,543
Export all registered aliases to a given path, as an INI configuration file. Args: export_path: The path of the alias configuration file to export to. exclusions: Space-separated aliases excluded from export.
def export_aliases(export_path=None, exclusions=None): if not export_path: export_path = os.path.abspath(ALIAS_FILE_NAME) alias_table = get_alias_table() for exclusion in exclusions or []: if exclusion not in alias_table.sections(): raise CLIError(ALIAS_NOT_FOUND_ERROR.format(exclusion)) alias_table.remove_section(exclusion) _commit_change(alias_table, export_path=export_path, post_commit=False) logger.warning(POST_EXPORT_ALIAS_MSG, export_path)
316,544
Import aliases from a file or an URL. Args: alias_source: The source of the alias. It can be a filepath or an URL.
def import_aliases(alias_source): alias_table = get_alias_table() if is_url(alias_source): alias_source = retrieve_file_from_url(alias_source) alias_table.read(alias_source) os.remove(alias_source) else: alias_table.read(alias_source) _commit_change(alias_table)
316,545
Remove an alias. Args: alias_name: The name of the alias to be removed.
def remove_alias(alias_names): alias_table = get_alias_table() for alias_name in alias_names: if alias_name not in alias_table.sections(): raise CLIError(ALIAS_NOT_FOUND_ERROR.format(alias_name)) alias_table.remove_section(alias_name) _commit_change(alias_table)
316,547
Record changes to the alias table. Also write new alias config hash and collided alias, if any. Args: alias_table: The alias table to commit. export_path: The path to export the aliases to. Default: GLOBAL_ALIAS_PATH. post_commit: True if we want to perform some extra actions after writing alias to file.
def _commit_change(alias_table, export_path=None, post_commit=True): with open(export_path or GLOBAL_ALIAS_PATH, 'w+') as alias_config_file: alias_table.write(alias_config_file) if post_commit: alias_config_file.seek(0) alias_config_hash = hashlib.sha1(alias_config_file.read().encode('utf-8')).hexdigest() AliasManager.write_alias_config_hash(alias_config_hash) collided_alias = AliasManager.build_collision_table(alias_table.sections()) AliasManager.write_collided_alias(collided_alias) build_tab_completion_table(alias_table)
316,548
Determine whether autocomplete can be performed at the current state. Args: parser: The current CLI parser. cur_commands: The current commands typed in the console. alias_command: The alias command. Returns: True if autocomplete can be performed.
def _is_autocomplete_valid(cur_commands, alias_command): parent_command = ' '.join(cur_commands[1:]) with open(GLOBAL_ALIAS_TAB_COMP_TABLE_PATH, 'r') as tab_completion_table_file: try: tab_completion_table = json.loads(tab_completion_table_file.read()) return alias_command in tab_completion_table and parent_command in tab_completion_table[alias_command] except Exception: # pylint: disable=broad-except return False
316,660
Transform any aliases in cur_commands into their respective commands. Args: alias_table: The alias table. cur_commands: current commands typed in the console.
def _transform_cur_commands(cur_commands, alias_table=None): transformed = [] alias_table = alias_table if alias_table else get_alias_table() for cmd in cur_commands: if cmd in alias_table.sections() and alias_table.has_option(cmd, 'command'): transformed += alias_table.get(cmd, 'command').split() else: transformed.append(cmd) cur_commands[:] = transformed
316,661
Transform any aliases in args to their respective commands. Args: args: A list of space-delimited command input extracted directly from the console. Returns: A list of transformed commands according to the alias configuration file.
def transform(self, args): if self.parse_error(): # Write an empty hash so next run will check the config file against the entire command table again AliasManager.write_alias_config_hash(empty_hash=True) return args # Only load the entire command table if it detects changes in the alias config if self.detect_alias_config_change(): self.load_full_command_table() self.collided_alias = AliasManager.build_collision_table(self.alias_table.sections()) build_tab_completion_table(self.alias_table) else: self.load_collided_alias() transformed_commands = [] alias_iter = enumerate(args, 1) for alias_index, alias in alias_iter: is_collided_alias = alias in self.collided_alias and alias_index in self.collided_alias[alias] # Check if the current alias is a named argument # index - 2 because alias_iter starts counting at index 1 is_named_arg = alias_index > 1 and args[alias_index - 2].startswith('-') is_named_arg_flag = alias.startswith('-') excluded_commands = is_alias_command(['remove', 'export'], transformed_commands) if not alias or is_collided_alias or is_named_arg or is_named_arg_flag or excluded_commands: transformed_commands.append(alias) continue full_alias = self.get_full_alias(alias) if self.alias_table.has_option(full_alias, 'command'): cmd_derived_from_alias = self.alias_table.get(full_alias, 'command') telemetry.set_alias_hit(full_alias) else: transformed_commands.append(alias) continue pos_args_table = build_pos_args_table(full_alias, args, alias_index) if pos_args_table: logger.debug(POS_ARG_DEBUG_MSG, full_alias, cmd_derived_from_alias, pos_args_table) transformed_commands += render_template(cmd_derived_from_alias, pos_args_table) # Skip the next arg(s) because they have been already consumed as a positional argument above for pos_arg in pos_args_table: # pylint: disable=unused-variable next(alias_iter) else: logger.debug(DEBUG_MSG, full_alias, cmd_derived_from_alias) transformed_commands += shlex.split(cmd_derived_from_alias) return self.post_transform(transformed_commands)
316,692
Get the full alias given a search query. Args: query: The query this function performs searching on. Returns: The full alias (with the placeholders, if any).
def get_full_alias(self, query): if query in self.alias_table.sections(): return query return next((section for section in self.alias_table.sections() if section.split()[0] == query), '')
316,693
Inject environment variables, and write hash to alias hash file after transforming alias to commands. Args: args: A list of args to post-transform.
def post_transform(self, args): # Ignore 'az' if it is the first command args = args[1:] if args and args[0] == 'az' else args post_transform_commands = [] for i, arg in enumerate(args): # Do not translate environment variables for command argument if is_alias_command(['create'], args) and i > 0 and args[i - 1] in ['-c', '--command']: post_transform_commands.append(arg) else: post_transform_commands.append(os.path.expandvars(arg)) AliasManager.write_alias_config_hash(self.alias_config_hash) AliasManager.write_collided_alias(self.collided_alias) return post_transform_commands
316,695
Write self.alias_config_hash to the alias hash file. Args: empty_hash: True if we want to write an empty string into the file. Empty string in the alias hash file means that we have to perform a full load of the command table in the next run.
def write_alias_config_hash(alias_config_hash='', empty_hash=False): with open(GLOBAL_ALIAS_HASH_PATH, 'w') as alias_config_hash_file: alias_config_hash_file.write('' if empty_hash else alias_config_hash)
316,697
Process an exception message. Args: exception: The exception to process. Returns: A filtered string summarizing the exception.
def process_exception_message(exception): exception_message = str(exception) for replace_char in ['\t', '\n', '\\n']: exception_message = exception_message.replace(replace_char, '' if replace_char != '\t' else ' ') return exception_message.replace('section', 'alias')
316,699
Get all the placeholders' names in order. Use the regex below to locate all the opening ({{) and closing brackets (}}). After that, extract "stuff" inside the brackets. Args: arg: The word which this function performs searching on. check_duplicates: True if we want to check for duplicated positional arguments. Returns: A list of positional arguments in order.
def get_placeholders(arg, check_duplicates=False): placeholders = [] last_match = None arg = normalize_placeholders(arg) for cur_match in re.finditer(r'\s*{{|}}\s*', arg): matched_text = cur_match.group().strip() if not last_match and matched_text == '{{': last_match = cur_match continue last_matched_text = '' if not last_match else last_match.group().strip() # Check if the positional argument is enclosed with {{ }} properly if (not last_matched_text and matched_text == '}}') or (last_matched_text == '{{' and matched_text != '}}'): raise CLIError(PLACEHOLDER_BRACKETS_ERROR.format(arg)) elif last_matched_text == '{{' and matched_text == '}}': # Extract start and end index of the placeholder name start_index, end_index = last_match.span()[1], cur_match.span()[0] placeholders.append(arg[start_index: end_index].strip()) last_match = None # last_match did not reset - that means brackets are not enclosed properly if last_match: raise CLIError(PLACEHOLDER_BRACKETS_ERROR.format(arg)) # Make sure there is no duplicated placeholder names if check_duplicates and len(placeholders) != len(set(placeholders)): raise CLIError(DUPLICATED_PLACEHOLDER_ERROR.format(arg)) return placeholders
316,889
Build a dictionary where the key is placeholder name and the value is the position argument value. Args: full_alias: The full alias (including any placeholders). args: The arguments that the user inputs in the terminal. start_index: The index at which we start ingesting position arguments. Returns: A dictionary with the key beign the name of the placeholder and its value being the respective positional argument.
def build_pos_args_table(full_alias, args, start_index): pos_args_placeholder = get_placeholders(full_alias, check_duplicates=True) pos_args = args[start_index: start_index + len(pos_args_placeholder)] if len(pos_args_placeholder) != len(pos_args): error_msg = INSUFFICIENT_POS_ARG_ERROR.format(full_alias, len(pos_args_placeholder), '' if len(pos_args_placeholder) == 1 else 's', len(pos_args)) raise CLIError(error_msg) # Escape '"' because we are using "" to surround placeholder expressions for i, pos_arg in enumerate(pos_args): pos_args[i] = pos_arg.replace('"', '\\"') return dict(zip(pos_args_placeholder, pos_args))
316,891
Render cmd_derived_from_alias as a Jinja template with pos_args_table as the arguments. Args: cmd_derived_from_alias: The string to be injected with positional arguemnts. pos_args_table: The dictionary used to rendered. Returns: A processed string with positional arguments injected.
def render_template(cmd_derived_from_alias, pos_args_table): try: cmd_derived_from_alias = normalize_placeholders(cmd_derived_from_alias, inject_quotes=True) template = jinja.Template(cmd_derived_from_alias) # Shlex.split allows us to split a string by spaces while preserving quoted substrings # (positional arguments in this case) rendered = shlex.split(template.render(pos_args_table)) # Manually check if there is any runtime error (such as index out of range) # since Jinja template engine only checks for compile time error. # Only check for runtime errors if there is an empty string in rendered. if '' in rendered: check_runtime_errors(cmd_derived_from_alias, pos_args_table) return rendered except Exception as exception: # Exception raised from runtime error if isinstance(exception, CLIError): raise # The template has some sort of compile time errors split_exception_message = str(exception).split() # Check if the error message provides the index of the erroneous character error_index = split_exception_message[-1] if error_index.isdigit(): split_exception_message.insert(-1, 'index') error_msg = RENDER_TEMPLATE_ERROR.format(' '.join(split_exception_message), cmd_derived_from_alias) # Calculate where to put an arrow (^) char so that it is exactly below the erroneous character # e.g. ... "{{a.split('|)}}" # ^ error_msg += '\n{}^'.format(' ' * (len(error_msg) - len(cmd_derived_from_alias) + int(error_index) - 1)) else: exception_str = str(exception).replace('"{{', '}}').replace('}}"', '}}') error_msg = RENDER_TEMPLATE_ERROR.format(cmd_derived_from_alias, exception_str) raise CLIError(error_msg)
316,892
Validate placeholders and their expressions in cmd_derived_from_alias to make sure that there is no runtime error (such as index out of range). Args: cmd_derived_from_alias: The command derived from the alias (include any positional argument placehodlers) pos_args_table: The positional argument table.
def check_runtime_errors(cmd_derived_from_alias, pos_args_table): for placeholder, value in pos_args_table.items(): exec('{} = "{}"'.format(placeholder, value)) # pylint: disable=exec-used expressions = get_placeholders(cmd_derived_from_alias) for expression in expressions: try: exec(expression) # pylint: disable=exec-used except Exception as exception: # pylint: disable=broad-except error_msg = PLACEHOLDER_EVAL_ERROR.format(expression, exception) raise CLIError(error_msg)
316,893
Create an instance. Args: script_hash (UInt160): is_frozen (bool): votes (list): of EllipticCurve.ECPoint items. balances (dict): Key (UInt256): assetID. Value (Fixed8): balance.
def __init__(self, script_hash=None, is_frozen=False, votes=None, balances=None): self.ScriptHash = script_hash self.IsFrozen = is_frozen if votes is None: self.Votes = [] else: self.Votes = votes if balances is None: self.Balances = {} else: self.Balances = balances
317,196
Get AccountState object from a replica. Args: replica (obj): must have ScriptHash, IsFrozen, Votes and Balances members. Returns: AccountState:
def FromReplica(self, replica): return AccountState(replica.ScriptHash, replica.IsFrozen, replica.Votes, replica.Balances)
317,198
Deserialize full object. Args: buffer (bytes, bytearray, BytesIO): (Optional) data to create the stream from. Returns: AccountState:
def DeserializeFromDB(buffer): m = StreamManager.GetStream(buffer) reader = BinaryReader(m) account = AccountState() account.Deserialize(reader) StreamManager.ReleaseStream(m) return account
317,200
Deserialize full object. Args: reader (neocore.IO.BinaryReader):
def Deserialize(self, reader): super(AccountState, self).Deserialize(reader) self.ScriptHash = reader.ReadUInt160() self.IsFrozen = reader.ReadBool() num_votes = reader.ReadVarInt() for i in range(0, num_votes): self.Votes.append(reader.ReadBytes(33)) num_balances = reader.ReadVarInt() self.Balances = {} for i in range(0, num_balances): assetid = reader.ReadUInt256() amount = reader.ReadFixed8() self.Balances[assetid] = amount
317,201
Serialize full object. Args: writer (neo.IO.BinaryWriter):
def Serialize(self, writer): super(AccountState, self).Serialize(writer) writer.WriteUInt160(self.ScriptHash) writer.WriteBool(self.IsFrozen) writer.WriteVarInt(len(self.Votes)) for vote in self.Votes: writer.WriteBytes(vote) blen = len(self.Balances) writer.WriteVarInt(blen) for key, fixed8 in self.Balances.items(): writer.WriteUInt256(key) writer.WriteFixed8(fixed8)
317,202
Flag indicating if the asset has a balance. Args: assetId (UInt256): Returns: bool: True if a balance is present. False otherwise.
def HasBalance(self, assetId): for key, fixed8 in self.Balances.items(): if key == assetId: return True return False
317,203
Get the balance for a given asset id. Args: assetId (UInt256): Returns: Fixed8: balance value.
def BalanceFor(self, assetId): for key, fixed8 in self.Balances.items(): if key == assetId: return fixed8 return Fixed8(0)
317,204
Set the balance for an asset id. Args: assetId (UInt256): fixed8_val (Fixed8): balance value.
def SetBalanceFor(self, assetId, fixed8_val): found = False for key, val in self.Balances.items(): if key == assetId: self.Balances[key] = fixed8_val found = True if not found: self.Balances[assetId] = fixed8_val
317,205
Add amount to the specified balance. Args: assetId (UInt256): fixed8_val (Fixed8): amount to add.
def AddToBalance(self, assetId, fixed8_val): found = False for key, balance in self.Balances.items(): if key == assetId: self.Balances[assetId] = self.Balances[assetId] + fixed8_val found = True if not found: self.Balances[assetId] = fixed8_val
317,206
Subtract amount to the specified balance. Args: assetId (UInt256): fixed8_val (Fixed8): amount to add.
def SubtractFromBalance(self, assetId, fixed8_val): found = False for key, balance in self.Balances.items(): if key == assetId: self.Balances[assetId] = self.Balances[assetId] - fixed8_val found = True if not found: self.Balances[assetId] = fixed8_val * Fixed8(-1)
317,207
Create an instance. Args: type (neo.Network.InventoryType): hashes (list): of bytearray items.
def __init__(self, type=None, hashes=None): self.Type = type self.Hashes = hashes if hashes else []
317,225
Deserialize full object. Args: reader (neo.IO.BinaryReader):
def Deserialize(self, reader): self.Type = reader.ReadByte() self.Hashes = reader.ReadHashes()
317,227
Serialize object. Raises: Exception: if hash writing fails. Args: writer (neo.IO.BinaryWriter):
def Serialize(self, writer): try: writer.WriteByte(self.Type) writer.WriteHashes(self.Hashes) except Exception as e: logger.error(f"COULD NOT WRITE INVENTORY HASHES ({self.Type} {self.Hashes}) {e}")
317,228
Listener for SmartContractEvent Args: sc_event (SmartContractEvent): event to check and see if it contains NEP5Token created
def on_smart_contract_created(self, sc_event: SmartContractEvent): if isinstance(sc_event.contract, ContractState): if not sc_event.test_mode: sc_event.CheckIsNEP5() if sc_event.token: self._new_contracts_to_write.append(sc_event)
317,236
Listener for NotifyEvent Args: sc_event (NotifyEvent): event to check whether it should be persisted
def on_smart_contract_event(self, sc_event: NotifyEvent): if not isinstance(sc_event, NotifyEvent): logger.info("Not Notify Event instance") return if sc_event.ShouldPersist: if sc_event.notify_type in [NotifyType.TRANSFER, NotifyType.REFUND, NotifyType.MINT]: self._events_to_write.append(sc_event)
317,237
Called when a block has been persisted to disk. Used as a hook to persist notification data. Args: block (neo.Core.Block): the currently persisting block
def on_persist_completed(self, block): if len(self._events_to_write): addr_db = self.db.prefixed_db(NotificationPrefix.PREFIX_ADDR) block_db = self.db.prefixed_db(NotificationPrefix.PREFIX_BLOCK) contract_db = self.db.prefixed_db(NotificationPrefix.PREFIX_CONTRACT) block_write_batch = block_db.write_batch() contract_write_batch = contract_db.write_batch() block_count = 0 block_bytes = self._events_to_write[0].block_number.to_bytes(4, 'little') for evt in self._events_to_write: # type:NotifyEvent # write the event for both or one of the addresses involved in the transfer write_both = True hash_data = evt.ToByteArray() bytes_to = bytes(evt.addr_to.Data) bytes_from = bytes(evt.addr_from.Data) if bytes_to == bytes_from: write_both = False total_bytes_to = addr_db.get(bytes_to + NotificationPrefix.PREFIX_COUNT) total_bytes_from = addr_db.get(bytes_from + NotificationPrefix.PREFIX_COUNT) if not total_bytes_to: total_bytes_to = b'\x00' if not total_bytes_from: total_bytes_from = b'x\00' addr_to_key = bytes_to + total_bytes_to addr_from_key = bytes_from + total_bytes_from with addr_db.write_batch() as b: b.put(addr_to_key, hash_data) if write_both: b.put(addr_from_key, hash_data) total_bytes_to = int.from_bytes(total_bytes_to, 'little') + 1 total_bytes_from = int.from_bytes(total_bytes_from, 'little') + 1 new_bytes_to = total_bytes_to.to_bytes(4, 'little') new_bytes_from = total_bytes_from.to_bytes(4, 'little') b.put(bytes_to + NotificationPrefix.PREFIX_COUNT, new_bytes_to) if write_both: b.put(bytes_from + NotificationPrefix.PREFIX_COUNT, new_bytes_from) # write the event to the per-block database per_block_key = block_bytes + block_count.to_bytes(4, 'little') block_write_batch.put(per_block_key, hash_data) block_count += 1 # write the event to the per-contract database contract_bytes = bytes(evt.contract_hash.Data) count_for_contract = contract_db.get(contract_bytes + NotificationPrefix.PREFIX_COUNT) if not count_for_contract: count_for_contract = b'\x00' contract_event_key = contract_bytes + count_for_contract contract_count_int = int.from_bytes(count_for_contract, 'little') + 1 new_contract_count = contract_count_int.to_bytes(4, 'little') contract_write_batch.put(contract_bytes + NotificationPrefix.PREFIX_COUNT, new_contract_count) contract_write_batch.put(contract_event_key, hash_data) # finish off the per-block write batch and contract write batch block_write_batch.write() contract_write_batch.write() self._events_to_write = [] if len(self._new_contracts_to_write): token_db = self.db.prefixed_db(NotificationPrefix.PREFIX_TOKEN) token_write_batch = token_db.write_batch() for token_event in self._new_contracts_to_write: try: hash_data = token_event.ToByteArray() # used to fail here hash_key = token_event.contract.Code.ScriptHash().ToBytes() token_write_batch.put(hash_key, hash_data) except Exception as e: logger.debug(f"Failed to write new contract, reason: {e}") token_write_batch.write() self._new_contracts_to_write = []
317,238
Look up notifications for a block Args: block_number (int): height of block to search for notifications Returns: list: a list of notifications
def get_by_block(self, block_number): blocklist_snapshot = self.db.prefixed_db(NotificationPrefix.PREFIX_BLOCK).snapshot() block_bytes = block_number.to_bytes(4, 'little') results = [] for val in blocklist_snapshot.iterator(prefix=block_bytes, include_key=False): event = SmartContractEvent.FromByteArray(val) results.append(event) return results
317,239
Lookup a set of notifications by address Args: address (UInt160 or str): hash of address for notifications Returns: list: a list of notifications
def get_by_addr(self, address): addr = address if isinstance(address, str) and len(address) == 34: addr = Helper.AddrStrToScriptHash(address) if not isinstance(addr, UInt160): raise Exception("Incorrect address format") addrlist_snapshot = self.db.prefixed_db(NotificationPrefix.PREFIX_ADDR).snapshot() results = [] for val in addrlist_snapshot.iterator(prefix=bytes(addr.Data), include_key=False): if len(val) > 4: try: event = SmartContractEvent.FromByteArray(val) results.append(event) except Exception as e: logger.error("could not parse event: %s %s" % (e, val)) return results
317,240
Look up a set of notifications by the contract they are associated with Args: contract_hash (UInt160 or str): hash of contract for notifications to be retreived Returns: list: a list of notifications
def get_by_contract(self, contract_hash): hash = contract_hash if isinstance(contract_hash, str) and len(contract_hash) == 40: hash = UInt160.ParseString(contract_hash) if not isinstance(hash, UInt160): raise Exception("Incorrect address format") contractlist_snapshot = self.db.prefixed_db(NotificationPrefix.PREFIX_CONTRACT).snapshot() results = [] for val in contractlist_snapshot.iterator(prefix=bytes(hash.Data), include_key=False): if len(val) > 4: try: event = SmartContractEvent.FromByteArray(val) results.append(event) except Exception as e: logger.error("could not parse event: %s %s" % (e, val)) return results
317,241
Looks up a token by hash Args: hash (UInt160): The token to look up Returns: SmartContractEvent: A smart contract event with a contract that is an NEP5 Token
def get_token(self, hash): tokens_snapshot = self.db.prefixed_db(NotificationPrefix.PREFIX_TOKEN).snapshot() try: val = tokens_snapshot.get(hash.ToBytes()) if val: event = SmartContractEvent.FromByteArray(val) return event except Exception as e: logger.error("Smart contract event with contract hash %s not found: %s " % (hash.ToString(), e)) return None
317,243
Deserialize full object. Args: reader (neo.IO.BinaryReader):
def Deserialize(self, reader): super(Header, self).Deserialize(reader) if reader.ReadByte() != 0: raise Exception('Incorrect Header Format')
317,245
Deserialize into a Header object from the provided data. Args: data (bytes): index: UNUSED Returns: Header:
def FromTrimmedData(data, index): header = Header() ms = StreamManager.GetStream(data) reader = BinaryReader(ms) header.DeserializeUnsigned(reader) reader.ReadByte() witness = Witness() witness.Deserialize(reader) header.Script = witness StreamManager.ReleaseStream(ms) return header
317,246
Serialize full object. Args: writer (neo.IO.BinaryWriter):
def Serialize(self, writer): super(Header, self).Serialize(writer) writer.WriteByte(0)
317,247
Create an instance. Args: code (neo.Core.FunctionCode): contract_properties (neo.SmartContract.ContractParameterType): contract type. name (bytes): version (bytes): author (bytes): email (bytes): description (bytes):
def __init__(self, code=None, contract_properties=0, name=None, version=None, author=None, email=None, description=None): self.Code = code self.ContractProperties = contract_properties self.Name = name self.CodeVersion = version self.Author = author self.Email = email self.Description = description
317,273
Deserialize full object. Args: reader (neocore.IO.BinaryReader):
def Deserialize(self, reader): super(ContractState, self).Deserialize(reader) code = FunctionCode() code.Deserialize(reader) self.Code = code self.ContractProperties = reader.ReadUInt8() self.Name = reader.ReadVarString(max=252) self.CodeVersion = reader.ReadVarString(max=252) self.Author = reader.ReadVarString(max=252) self.Email = reader.ReadVarString(max=252) self.Description = reader.ReadVarString(max=65536)
317,275
Deserialize full object. Args: buffer (bytes, bytearray, BytesIO): (Optional) data to create the stream from. Returns: ContractState:
def DeserializeFromDB(buffer): m = StreamManager.GetStream(buffer) reader = BinaryReader(m) c = ContractState() c.Deserialize(reader) StreamManager.ReleaseStream(m) return c
317,276
Serialize full object. Args: writer (neo.IO.BinaryWriter):
def Serialize(self, writer): super(ContractState, self).Serialize(writer) self.Code.Serialize(writer) writer.WriteUInt8(self.ContractProperties) writer.WriteVarString(self.Name) writer.WriteVarString(self.CodeVersion) writer.WriteVarString(self.Author) writer.WriteVarString(self.Email) writer.WriteVarString(self.Description)
317,277
Gets the name of a ContractParameterType based on its value Args: param_type (ContractParameterType): type to get the name of Returns: str
def ToName(param_type): items = inspect.getmembers(ContractParameterType) if type(param_type) is bytes: param_type = int.from_bytes(param_type, 'little') for item in items: name = item[0] val = int(item[1].value) if val == param_type: return name return None
317,281
Create a ContractParameterType object from a str Args: val (str): the value to be converted to a ContractParameterType. val can be hex encoded (b'07'), int (7), string int ("7"), or string literal ("String") Returns: ContractParameterType
def FromString(val): # first, check if the value supplied is the string literal of the enum (e.g. "String") if isinstance(val, bytes): val = val.decode('utf-8') try: return ContractParameterType[val] except Exception as e: # ignore a KeyError if the val isn't found in the Enum pass # second, check if the value supplied is bytes or hex-encoded (e.g. b'07') try: if isinstance(val, (bytearray, bytes)): int_val = int.from_bytes(val, 'little') else: int_val = int.from_bytes(binascii.unhexlify(val), 'little') except (binascii.Error, TypeError) as e: # if it's not hex-encoded, then convert as int (e.g. "7" or 7) int_val = int(val) return ContractParameterType(int_val)
317,282
Create an instance. Args: *args: **kwargs:
def __init__(self, *args, **kwargs): super(MinerTransaction, self).__init__(*args, **kwargs) self.Type = TransactionType.MinerTransaction
317,283
Deserialize full object. Args: reader (neo.IO.BinaryReader):
def DeserializeExclusiveData(self, reader): self.Nonce = reader.ReadUInt32() self.Type = TransactionType.MinerTransaction
317,284
Deserialize full object. Args: reader (neo.IO.BinaryReader):
def Deserialize(self, reader): self.Script = reader.ReadVarBytes() self.ParameterList = reader.ReadVarBytes() self.ReturnType = reader.ReadByte()
317,308
Serialize full object. Args: writer (neo.IO.BinaryWriter):
def Serialize(self, writer): writer.WriteVarBytes(self.Script) writer.WriteVarBytes(self.ParameterList) writer.WriteByte(self.ReturnType)
317,309
Serialize object. Args: writer (neo.IO.BinaryWriter):
def Serialize(self, writer): writer.WriteVarInt(len(self.NetworkAddressesWithTime)) for address in self.NetworkAddressesWithTime: address.Serialize(writer)
317,311
Create an instance. Args: *args: **kwargs:
def __init__(self, *args, **kwargs): super(InvocationTransaction, self).__init__(*args, **kwargs) self.Gas = Fixed8(0) self.Type = TransactionType.InvocationTransaction
317,322
Deserialize full object. Args: reader (neo.IO.BinaryReader): Raises: Exception: If the version read is incorrect.
def DeserializeExclusiveData(self, reader): if self.Version > 1: raise Exception('Invalid format') self.Script = reader.ReadVarBytes() if len(self.Script) == 0: raise Exception('Invalid Format') if self.Version >= 1: self.Gas = reader.ReadFixed8() if self.Gas < Fixed8.Zero(): raise Exception("Invalid Format") else: self.Gas = Fixed8(0)
317,323
Serialize object. Args: writer (neo.IO.BinaryWriter):
def SerializeExclusiveData(self, writer): writer.WriteVarBytes(self.Script) if self.Version >= 1: writer.WriteFixed8(self.Gas)
317,324
Verify the transaction. Args: mempool: Returns: bool: True if verified. False otherwise.
def Verify(self, mempool): if self.Gas.value % 100000000 != 0: return False return super(InvocationTransaction, self).Verify(mempool)
317,325
Create an instance. Args: *args: **kwargs:
def __init__(self, *args, **kwargs): super(StateTransaction, self).__init__(*args, **kwargs) self.Type = TransactionType.StateTransaction
317,328
Deserialize full object. Args: reader (neo.IO.BinaryReader): Raises: Exception: If the transaction type is incorrect or if there are no claims.
def DeserializeExclusiveData(self, reader): self.Type = TransactionType.StateTransaction self.Descriptors = reader.ReadSerializableArray('neo.Core.State.StateDescriptor.StateDescriptor')
317,330
Verify the transaction. Args: mempool: Returns: bool: True if verified. False otherwise.
def Verify(self, mempool): for descriptor in self.Descriptors: if not descriptor.Verify(): return False return super(StateTransaction, self).Verify(mempool)
317,332
Get the block hash by its block height Args: height(int): height of the block to retrieve hash from. Returns: bytes: a non-raw block hash (e.g. b'6dd83ed8a3fc02e322f91f30431bf3662a8c8e8ebe976c3565f0d21c70620991', but not b'\x6d\xd8...etc'
def GetBlockHash(self, height): if self._current_block_height < height: return if len(self._header_index) <= height: return return self._header_index[height]
317,358
Get a block by its height. Args: height(int): the height of the block to retrieve. Returns: neo.Core.Block: block instance.
def GetBlockByHeight(self, height): hash = self.GetBlockHash(height) if hash is not None: return self.GetBlockByHash(hash)
317,360
Create an instance. Args: *args: **kwargs:
def __init__(self, *args, **kwargs): super(EnrollmentTransaction, self).__init__(*args, **kwargs) self.Type = TransactionType.EnrollmentTransaction
317,370
Deserialize full object. Args: reader (neo.IO.BinaryReader): Raises: Exception: If the version read is incorrect.
def DeserializeExclusiveData(self, reader): if self.Version is not 0: raise Exception('Invalid format') self.PublicKey = ECDSA.Deserialize_Secp256r1(reader)
317,371
Load settings from the privnet JSON config file Args: host (string, optional): if supplied, uses this IP or domain as neo nodes. The host must use these standard ports: P2P 20333, RPC 30333.
def setup_privnet(self, host=None): self.setup(FILENAME_SETTINGS_PRIVNET) if isinstance(host, str): if ":" in host: raise Exception("No protocol prefix or port allowed in host, use just the IP or domain.") print("Using custom privatenet host:", host) self.SEED_LIST = ["%s:20333" % host] self.RPC_LIST = ["http://%s:30333" % host] print("- P2P:", ", ".join(self.SEED_LIST)) print("- RPC:", ", ".join(self.RPC_LIST)) self.check_privatenet()
317,384
Setup logging to a (rotating) logfile. Args: filename (str): Logfile. If filename is None, disable file logging max_bytes (int): Maximum number of bytes per logfile. If used together with backup_count, logfile will be rotated when it reaches this amount of bytes. backup_count (int): Number of rotated logfiles to keep
def set_logfile(self, filename, max_bytes=0, backup_count=0): _logger = logging.getLogger("neo-python") if not filename and not self.rotating_filehandler: _logger.removeHandler(self.rotating_filehandler) else: self.rotating_filehandler = RotatingFileHandler(filename, mode='a', maxBytes=max_bytes, backupCount=backup_count, encoding=None) self.rotating_filehandler.setLevel(logging.DEBUG) self.rotating_filehandler.setFormatter(LogFormatter(color=False)) _logger.addHandler(self.rotating_filehandler)
317,387
Set the minimum loglevel for all components Args: level (int): eg. logging.DEBUG or logging.ERROR. See also https://docs.python.org/2/library/logging.html#logging-levels
def set_loglevel(self, level): self.log_level = level log_manager.config_stdio(default_level=level)
317,388
Create an instance. Args: value (bytearray): value to store.
def __init__(self, value=None): if value is None: self.Value = bytearray(0) else: self.Value = value
317,420
Deserialize full object. Args: reader (neocore.IO.BinaryReader):
def Deserialize(self, reader): super(StorageItem, self).Deserialize(reader) self.Value = reader.ReadVarBytes()
317,421
Deserialize full object. Args: buffer (bytes, bytearray, BytesIO): (Optional) data to create the stream from. Returns: StorageItem:
def DeserializeFromDB(buffer): m = StreamManager.GetStream(buffer) reader = BinaryReader(m) v = StorageItem() v.Deserialize(reader) StreamManager.ReleaseStream(m) return v
317,422
Serialize full object. Args: writer (neo.IO.BinaryWriter):
def Serialize(self, writer): super(StorageItem, self).Serialize(writer) writer.WriteVarBytes(self.Value)
317,423
Create an instance. Args: *args: **kwargs:
def __init__(self, *args, **kwargs): super(IssueTransaction, self).__init__(*args, **kwargs) self.Type = TransactionType.IssueTransaction
317,424