code
stringlengths
75
104k
code_sememe
stringlengths
47
309k
token_type
stringlengths
215
214k
code_dependency
stringlengths
75
155k
def _caches_dicts(self): """ Caches variants_dict and replace_dict in a single database hit. """ qs = (self.get_query_set() if django.VERSION < (1, 6) else self.get_queryset()) variants_dict = self._get_variants_dict(qs) cache.set(VARIANTS_DICT_CACHE_KEY, variants_dict) replace_dict = self._get_replace_dict(qs) cache.set(REPLACE_DICT_CACHE_KEY, replace_dict) return variants_dict, replace_dict
def function[_caches_dicts, parameter[self]]: constant[ Caches variants_dict and replace_dict in a single database hit. ] variable[qs] assign[=] <ast.IfExp object at 0x7da2047e85e0> variable[variants_dict] assign[=] call[name[self]._get_variants_dict, parameter[name[qs]]] call[name[cache].set, parameter[name[VARIANTS_DICT_CACHE_KEY], name[variants_dict]]] variable[replace_dict] assign[=] call[name[self]._get_replace_dict, parameter[name[qs]]] call[name[cache].set, parameter[name[REPLACE_DICT_CACHE_KEY], name[replace_dict]]] return[tuple[[<ast.Name object at 0x7da18fe90b50>, <ast.Name object at 0x7da18fe93550>]]]
keyword[def] identifier[_caches_dicts] ( identifier[self] ): literal[string] identifier[qs] =( identifier[self] . identifier[get_query_set] () keyword[if] identifier[django] . identifier[VERSION] <( literal[int] , literal[int] ) keyword[else] identifier[self] . identifier[get_queryset] ()) identifier[variants_dict] = identifier[self] . identifier[_get_variants_dict] ( identifier[qs] ) identifier[cache] . identifier[set] ( identifier[VARIANTS_DICT_CACHE_KEY] , identifier[variants_dict] ) identifier[replace_dict] = identifier[self] . identifier[_get_replace_dict] ( identifier[qs] ) identifier[cache] . identifier[set] ( identifier[REPLACE_DICT_CACHE_KEY] , identifier[replace_dict] ) keyword[return] identifier[variants_dict] , identifier[replace_dict]
def _caches_dicts(self): """ Caches variants_dict and replace_dict in a single database hit. """ qs = self.get_query_set() if django.VERSION < (1, 6) else self.get_queryset() variants_dict = self._get_variants_dict(qs) cache.set(VARIANTS_DICT_CACHE_KEY, variants_dict) replace_dict = self._get_replace_dict(qs) cache.set(REPLACE_DICT_CACHE_KEY, replace_dict) return (variants_dict, replace_dict)
def scope_compose(scope, name, sep=private.SCOPE_SEPARATOR): """ compose a new scope :param str scope: current scope :param str name: name of next level in scope :return the composed scope """ if name == None: new_scope = scope else: new_scope = scope if scope else name if scope and name: new_scope = scope + sep + name return new_scope
def function[scope_compose, parameter[scope, name, sep]]: constant[ compose a new scope :param str scope: current scope :param str name: name of next level in scope :return the composed scope ] if compare[name[name] equal[==] constant[None]] begin[:] variable[new_scope] assign[=] name[scope] if <ast.BoolOp object at 0x7da2044c34c0> begin[:] variable[new_scope] assign[=] binary_operation[binary_operation[name[scope] + name[sep]] + name[name]] return[name[new_scope]]
keyword[def] identifier[scope_compose] ( identifier[scope] , identifier[name] , identifier[sep] = identifier[private] . identifier[SCOPE_SEPARATOR] ): literal[string] keyword[if] identifier[name] == keyword[None] : identifier[new_scope] = identifier[scope] keyword[else] : identifier[new_scope] = identifier[scope] keyword[if] identifier[scope] keyword[else] identifier[name] keyword[if] identifier[scope] keyword[and] identifier[name] : identifier[new_scope] = identifier[scope] + identifier[sep] + identifier[name] keyword[return] identifier[new_scope]
def scope_compose(scope, name, sep=private.SCOPE_SEPARATOR): """ compose a new scope :param str scope: current scope :param str name: name of next level in scope :return the composed scope """ if name == None: new_scope = scope # depends on [control=['if'], data=[]] else: new_scope = scope if scope else name if scope and name: new_scope = scope + sep + name # depends on [control=['if'], data=[]] return new_scope
def persons_significant_control(self, num, statements=False, **kwargs): """Search for a list of persons with significant control. Searches for persons of significant control based on company number for a specified company. Specify statements=True to only search for officers with statements. Args: num (str, int): Company number to search on. statements (Optional[bool]): Search only for persons with statements. Default is False. kwargs (dict): additional keywords passed into requests.session.get *params* keyword. """ baseuri = (self._BASE_URI + 'company/{}/persons-with-significant-control'.format(num)) # Only append statements to the URL if statements is True if statements is True: baseuri += '-statements' res = self.session.get(baseuri, params=kwargs) self.handle_http_error(res) return res
def function[persons_significant_control, parameter[self, num, statements]]: constant[Search for a list of persons with significant control. Searches for persons of significant control based on company number for a specified company. Specify statements=True to only search for officers with statements. Args: num (str, int): Company number to search on. statements (Optional[bool]): Search only for persons with statements. Default is False. kwargs (dict): additional keywords passed into requests.session.get *params* keyword. ] variable[baseuri] assign[=] binary_operation[name[self]._BASE_URI + call[constant[company/{}/persons-with-significant-control].format, parameter[name[num]]]] if compare[name[statements] is constant[True]] begin[:] <ast.AugAssign object at 0x7da1b04b52a0> variable[res] assign[=] call[name[self].session.get, parameter[name[baseuri]]] call[name[self].handle_http_error, parameter[name[res]]] return[name[res]]
keyword[def] identifier[persons_significant_control] ( identifier[self] , identifier[num] , identifier[statements] = keyword[False] ,** identifier[kwargs] ): literal[string] identifier[baseuri] =( identifier[self] . identifier[_BASE_URI] + literal[string] . identifier[format] ( identifier[num] )) keyword[if] identifier[statements] keyword[is] keyword[True] : identifier[baseuri] += literal[string] identifier[res] = identifier[self] . identifier[session] . identifier[get] ( identifier[baseuri] , identifier[params] = identifier[kwargs] ) identifier[self] . identifier[handle_http_error] ( identifier[res] ) keyword[return] identifier[res]
def persons_significant_control(self, num, statements=False, **kwargs): """Search for a list of persons with significant control. Searches for persons of significant control based on company number for a specified company. Specify statements=True to only search for officers with statements. Args: num (str, int): Company number to search on. statements (Optional[bool]): Search only for persons with statements. Default is False. kwargs (dict): additional keywords passed into requests.session.get *params* keyword. """ baseuri = self._BASE_URI + 'company/{}/persons-with-significant-control'.format(num) # Only append statements to the URL if statements is True if statements is True: baseuri += '-statements' # depends on [control=['if'], data=[]] res = self.session.get(baseuri, params=kwargs) self.handle_http_error(res) return res
def write_if_different(filename, data): """Write `data` to `filename`, if the content of the file is different. Parameters ---------- filename : str The file name to be written to. data : bytes The data to be written to `filename`. """ assert isinstance(data, bytes) if os.path.exists(filename): with open(filename, 'rb') as fd: original_data = fd.read() else: original_data = None if original_data != data: with open(filename, 'wb') as fd: fd.write(data)
def function[write_if_different, parameter[filename, data]]: constant[Write `data` to `filename`, if the content of the file is different. Parameters ---------- filename : str The file name to be written to. data : bytes The data to be written to `filename`. ] assert[call[name[isinstance], parameter[name[data], name[bytes]]]] if call[name[os].path.exists, parameter[name[filename]]] begin[:] with call[name[open], parameter[name[filename], constant[rb]]] begin[:] variable[original_data] assign[=] call[name[fd].read, parameter[]] if compare[name[original_data] not_equal[!=] name[data]] begin[:] with call[name[open], parameter[name[filename], constant[wb]]] begin[:] call[name[fd].write, parameter[name[data]]]
keyword[def] identifier[write_if_different] ( identifier[filename] , identifier[data] ): literal[string] keyword[assert] identifier[isinstance] ( identifier[data] , identifier[bytes] ) keyword[if] identifier[os] . identifier[path] . identifier[exists] ( identifier[filename] ): keyword[with] identifier[open] ( identifier[filename] , literal[string] ) keyword[as] identifier[fd] : identifier[original_data] = identifier[fd] . identifier[read] () keyword[else] : identifier[original_data] = keyword[None] keyword[if] identifier[original_data] != identifier[data] : keyword[with] identifier[open] ( identifier[filename] , literal[string] ) keyword[as] identifier[fd] : identifier[fd] . identifier[write] ( identifier[data] )
def write_if_different(filename, data): """Write `data` to `filename`, if the content of the file is different. Parameters ---------- filename : str The file name to be written to. data : bytes The data to be written to `filename`. """ assert isinstance(data, bytes) if os.path.exists(filename): with open(filename, 'rb') as fd: original_data = fd.read() # depends on [control=['with'], data=['fd']] # depends on [control=['if'], data=[]] else: original_data = None if original_data != data: with open(filename, 'wb') as fd: fd.write(data) # depends on [control=['with'], data=['fd']] # depends on [control=['if'], data=['data']]
def import_process_template_status(self, id): """ImportProcessTemplateStatus. [Preview API] Tells whether promote has completed for the specified promote job ID. :param str id: The ID of the promote job operation :rtype: :class:`<ProcessPromoteStatus> <azure.devops.v5_0.work_item_tracking_process_template.models.ProcessPromoteStatus>` """ route_values = {} if id is not None: route_values['id'] = self._serialize.url('id', id, 'str') route_values['action'] = 'Status' response = self._send(http_method='GET', location_id='29e1f38d-9e9c-4358-86a5-cdf9896a5759', version='5.0-preview.1', route_values=route_values) return self._deserialize('ProcessPromoteStatus', response)
def function[import_process_template_status, parameter[self, id]]: constant[ImportProcessTemplateStatus. [Preview API] Tells whether promote has completed for the specified promote job ID. :param str id: The ID of the promote job operation :rtype: :class:`<ProcessPromoteStatus> <azure.devops.v5_0.work_item_tracking_process_template.models.ProcessPromoteStatus>` ] variable[route_values] assign[=] dictionary[[], []] if compare[name[id] is_not constant[None]] begin[:] call[name[route_values]][constant[id]] assign[=] call[name[self]._serialize.url, parameter[constant[id], name[id], constant[str]]] call[name[route_values]][constant[action]] assign[=] constant[Status] variable[response] assign[=] call[name[self]._send, parameter[]] return[call[name[self]._deserialize, parameter[constant[ProcessPromoteStatus], name[response]]]]
keyword[def] identifier[import_process_template_status] ( identifier[self] , identifier[id] ): literal[string] identifier[route_values] ={} keyword[if] identifier[id] keyword[is] keyword[not] keyword[None] : identifier[route_values] [ literal[string] ]= identifier[self] . identifier[_serialize] . identifier[url] ( literal[string] , identifier[id] , literal[string] ) identifier[route_values] [ literal[string] ]= literal[string] identifier[response] = identifier[self] . identifier[_send] ( identifier[http_method] = literal[string] , identifier[location_id] = literal[string] , identifier[version] = literal[string] , identifier[route_values] = identifier[route_values] ) keyword[return] identifier[self] . identifier[_deserialize] ( literal[string] , identifier[response] )
def import_process_template_status(self, id): """ImportProcessTemplateStatus. [Preview API] Tells whether promote has completed for the specified promote job ID. :param str id: The ID of the promote job operation :rtype: :class:`<ProcessPromoteStatus> <azure.devops.v5_0.work_item_tracking_process_template.models.ProcessPromoteStatus>` """ route_values = {} if id is not None: route_values['id'] = self._serialize.url('id', id, 'str') # depends on [control=['if'], data=['id']] route_values['action'] = 'Status' response = self._send(http_method='GET', location_id='29e1f38d-9e9c-4358-86a5-cdf9896a5759', version='5.0-preview.1', route_values=route_values) return self._deserialize('ProcessPromoteStatus', response)
def iam(cls, account_name, api_key, **kwargs): """ Create a Cloudant client that uses IAM authentication. :param account_name: Cloudant account name. :param api_key: IAM authentication API key. """ return cls(None, api_key, account=account_name, auto_renew=kwargs.get('auto_renew', True), use_iam=True, **kwargs)
def function[iam, parameter[cls, account_name, api_key]]: constant[ Create a Cloudant client that uses IAM authentication. :param account_name: Cloudant account name. :param api_key: IAM authentication API key. ] return[call[name[cls], parameter[constant[None], name[api_key]]]]
keyword[def] identifier[iam] ( identifier[cls] , identifier[account_name] , identifier[api_key] ,** identifier[kwargs] ): literal[string] keyword[return] identifier[cls] ( keyword[None] , identifier[api_key] , identifier[account] = identifier[account_name] , identifier[auto_renew] = identifier[kwargs] . identifier[get] ( literal[string] , keyword[True] ), identifier[use_iam] = keyword[True] , ** identifier[kwargs] )
def iam(cls, account_name, api_key, **kwargs): """ Create a Cloudant client that uses IAM authentication. :param account_name: Cloudant account name. :param api_key: IAM authentication API key. """ return cls(None, api_key, account=account_name, auto_renew=kwargs.get('auto_renew', True), use_iam=True, **kwargs)
def set_priors(self, priors=None, fixed=None, random=None, match_derived_names=True): '''Set priors for one or more existing terms. Args: priors (dict): Dict of priors to update. Keys are names of terms to update; values are the new priors (either a Prior instance, or an int or float that scales the default priors). Note that a tuple can be passed as the key, in which case the same prior will be applied to all terms named in the tuple. fixed (Prior, int, float, str): a prior specification to apply to all fixed terms currently included in the model. random (Prior, int, float, str): a prior specification to apply to all random terms currently included in the model. match_derived_names (bool): if True, the specified prior(s) will be applied not only to terms that match the keyword exactly, but to the levels of random effects that were derived from the original specification with the passed name. For example, `priors={'condition|subject':0.5}` would apply the prior to the terms with names '1|subject', 'condition[T.1]|subject', and so on. If False, an exact match is required for the prior to be applied. ''' # save arguments to pass to _set_priors() at build time kwargs = dict(zip( ['priors', 'fixed', 'random', 'match_derived_names'], [priors, fixed, random, match_derived_names])) self._added_priors.update(kwargs) self.built = False
def function[set_priors, parameter[self, priors, fixed, random, match_derived_names]]: constant[Set priors for one or more existing terms. Args: priors (dict): Dict of priors to update. Keys are names of terms to update; values are the new priors (either a Prior instance, or an int or float that scales the default priors). Note that a tuple can be passed as the key, in which case the same prior will be applied to all terms named in the tuple. fixed (Prior, int, float, str): a prior specification to apply to all fixed terms currently included in the model. random (Prior, int, float, str): a prior specification to apply to all random terms currently included in the model. match_derived_names (bool): if True, the specified prior(s) will be applied not only to terms that match the keyword exactly, but to the levels of random effects that were derived from the original specification with the passed name. For example, `priors={'condition|subject':0.5}` would apply the prior to the terms with names '1|subject', 'condition[T.1]|subject', and so on. If False, an exact match is required for the prior to be applied. ] variable[kwargs] assign[=] call[name[dict], parameter[call[name[zip], parameter[list[[<ast.Constant object at 0x7da1b1661570>, <ast.Constant object at 0x7da1b1661810>, <ast.Constant object at 0x7da1b1660430>, <ast.Constant object at 0x7da1b16627d0>]], list[[<ast.Name object at 0x7da1b1660e20>, <ast.Name object at 0x7da1b16613c0>, <ast.Name object at 0x7da1b1661ab0>, <ast.Name object at 0x7da1b16613f0>]]]]]] call[name[self]._added_priors.update, parameter[name[kwargs]]] name[self].built assign[=] constant[False]
keyword[def] identifier[set_priors] ( identifier[self] , identifier[priors] = keyword[None] , identifier[fixed] = keyword[None] , identifier[random] = keyword[None] , identifier[match_derived_names] = keyword[True] ): literal[string] identifier[kwargs] = identifier[dict] ( identifier[zip] ( [ literal[string] , literal[string] , literal[string] , literal[string] ], [ identifier[priors] , identifier[fixed] , identifier[random] , identifier[match_derived_names] ])) identifier[self] . identifier[_added_priors] . identifier[update] ( identifier[kwargs] ) identifier[self] . identifier[built] = keyword[False]
def set_priors(self, priors=None, fixed=None, random=None, match_derived_names=True): """Set priors for one or more existing terms. Args: priors (dict): Dict of priors to update. Keys are names of terms to update; values are the new priors (either a Prior instance, or an int or float that scales the default priors). Note that a tuple can be passed as the key, in which case the same prior will be applied to all terms named in the tuple. fixed (Prior, int, float, str): a prior specification to apply to all fixed terms currently included in the model. random (Prior, int, float, str): a prior specification to apply to all random terms currently included in the model. match_derived_names (bool): if True, the specified prior(s) will be applied not only to terms that match the keyword exactly, but to the levels of random effects that were derived from the original specification with the passed name. For example, `priors={'condition|subject':0.5}` would apply the prior to the terms with names '1|subject', 'condition[T.1]|subject', and so on. If False, an exact match is required for the prior to be applied. """ # save arguments to pass to _set_priors() at build time kwargs = dict(zip(['priors', 'fixed', 'random', 'match_derived_names'], [priors, fixed, random, match_derived_names])) self._added_priors.update(kwargs) self.built = False
def get_mysql_credentials(cfg_file): """Get the credentials and database name from options in config file.""" try: parser = ConfigParser.ConfigParser() cfg_fp = open(cfg_file) parser.readfp(cfg_fp) cfg_fp.close() except ConfigParser.NoOptionError: cfg_fp.close() print('Failed to find mysql connections credentials.') sys.exit(1) except IOError: print('ERROR: Cannot open %s.', cfg_file) sys.exit(1) value = parser.get('dfa_mysql', 'connection') try: # Find location of pattern in connection parameter as shown below: # http://username:password@host/databasename?characterset=encoding' sobj = re.search(r"(://).*(@).*(/).*(\?)", value) # The list parameter contains: # indices[0], is the index of '://' # indices[1], is the index of '@' # indices[2], is the index of '/' # indices[3], is the index of '?' indices = [sobj.start(1), sobj.start(2), sobj.start(3), sobj.start(4)] # Get the credentials cred = value[indices[0] + 3:indices[1]].split(':') # Get the host name host = value[indices[1] + 1:indices[2]] # Get the database name db_name = value[indices[2] + 1:indices[3]] # Get the character encoding charset = value[indices[3] + 1:].split('=')[1] return cred[0], cred[1], host, db_name, charset except (ValueError, IndexError, AttributeError): print('Failed to find mysql connections credentials.') sys.exit(1)
def function[get_mysql_credentials, parameter[cfg_file]]: constant[Get the credentials and database name from options in config file.] <ast.Try object at 0x7da18ede6680> variable[value] assign[=] call[name[parser].get, parameter[constant[dfa_mysql], constant[connection]]] <ast.Try object at 0x7da18ede7100>
keyword[def] identifier[get_mysql_credentials] ( identifier[cfg_file] ): literal[string] keyword[try] : identifier[parser] = identifier[ConfigParser] . identifier[ConfigParser] () identifier[cfg_fp] = identifier[open] ( identifier[cfg_file] ) identifier[parser] . identifier[readfp] ( identifier[cfg_fp] ) identifier[cfg_fp] . identifier[close] () keyword[except] identifier[ConfigParser] . identifier[NoOptionError] : identifier[cfg_fp] . identifier[close] () identifier[print] ( literal[string] ) identifier[sys] . identifier[exit] ( literal[int] ) keyword[except] identifier[IOError] : identifier[print] ( literal[string] , identifier[cfg_file] ) identifier[sys] . identifier[exit] ( literal[int] ) identifier[value] = identifier[parser] . identifier[get] ( literal[string] , literal[string] ) keyword[try] : identifier[sobj] = identifier[re] . identifier[search] ( literal[string] , identifier[value] ) identifier[indices] =[ identifier[sobj] . identifier[start] ( literal[int] ), identifier[sobj] . identifier[start] ( literal[int] ), identifier[sobj] . identifier[start] ( literal[int] ), identifier[sobj] . identifier[start] ( literal[int] )] identifier[cred] = identifier[value] [ identifier[indices] [ literal[int] ]+ literal[int] : identifier[indices] [ literal[int] ]]. identifier[split] ( literal[string] ) identifier[host] = identifier[value] [ identifier[indices] [ literal[int] ]+ literal[int] : identifier[indices] [ literal[int] ]] identifier[db_name] = identifier[value] [ identifier[indices] [ literal[int] ]+ literal[int] : identifier[indices] [ literal[int] ]] identifier[charset] = identifier[value] [ identifier[indices] [ literal[int] ]+ literal[int] :]. identifier[split] ( literal[string] )[ literal[int] ] keyword[return] identifier[cred] [ literal[int] ], identifier[cred] [ literal[int] ], identifier[host] , identifier[db_name] , identifier[charset] keyword[except] ( identifier[ValueError] , identifier[IndexError] , identifier[AttributeError] ): identifier[print] ( literal[string] ) identifier[sys] . identifier[exit] ( literal[int] )
def get_mysql_credentials(cfg_file): """Get the credentials and database name from options in config file.""" try: parser = ConfigParser.ConfigParser() cfg_fp = open(cfg_file) parser.readfp(cfg_fp) cfg_fp.close() # depends on [control=['try'], data=[]] except ConfigParser.NoOptionError: cfg_fp.close() print('Failed to find mysql connections credentials.') sys.exit(1) # depends on [control=['except'], data=[]] except IOError: print('ERROR: Cannot open %s.', cfg_file) sys.exit(1) # depends on [control=['except'], data=[]] value = parser.get('dfa_mysql', 'connection') try: # Find location of pattern in connection parameter as shown below: # http://username:password@host/databasename?characterset=encoding' sobj = re.search('(://).*(@).*(/).*(\\?)', value) # The list parameter contains: # indices[0], is the index of '://' # indices[1], is the index of '@' # indices[2], is the index of '/' # indices[3], is the index of '?' indices = [sobj.start(1), sobj.start(2), sobj.start(3), sobj.start(4)] # Get the credentials cred = value[indices[0] + 3:indices[1]].split(':') # Get the host name host = value[indices[1] + 1:indices[2]] # Get the database name db_name = value[indices[2] + 1:indices[3]] # Get the character encoding charset = value[indices[3] + 1:].split('=')[1] return (cred[0], cred[1], host, db_name, charset) # depends on [control=['try'], data=[]] except (ValueError, IndexError, AttributeError): print('Failed to find mysql connections credentials.') sys.exit(1) # depends on [control=['except'], data=[]]
def dice(edge=15, fn=32): """ dice """ edge = float(edge) # dice c = ops.Cube(edge, center=True) s = ops.Sphere(edge * 3 / 4, center=True) dice = c & s # points c = ops.Circle(edge / 12, _fn=fn) h = 0.7 point = c.linear_extrude(height=h) point1 = point.translate([0, 0, edge / 2 - h / 2]) point2_1 = point1.rotate(a=90, v=[1, 0, 0]).translate([edge / 6, 0, edge / 6]) point2_2 = point2_1.mirror([-edge / 6, 0, -edge / 6]) point2 = point2_1 + point2_2 point3 = point2.rotate(a=90, v=[0, 0, 1]) + point1.rotate(a=90, v=[0, 1, 0]) point4_12 = point2.rotate(a=-90, v=[0, 0, 1]) point4 = point4_12 + point4_12.mirror([0, 1, 0]) point5_123 = point3.rotate(a=90, v=[0, 0, 1]) point5 = point5_123 + point5_123.mirror([1, 0, 0]) point6_1 = point.translate([0, 0, -(edge / 2 + h / 2)]).translate([0, edge / 6, 0]) point6_2 = point6_1.translate([edge / 4, 0, 0]) point6_3 = point6_1.translate([-edge / 4, 0, 0]) point6_123 = point6_1 + point6_2 + point6_3 point6_456 = point6_123.mirror([0, 1, 0]) point6 = point6_123 + point6_456 dice_with_holes = dice - point1 - point2 - point3 - point4 - point5 - point6 dice_with_holes = dice_with_holes.mirror([0, 0, 1]) return(dice_with_holes)
def function[dice, parameter[edge, fn]]: constant[ dice ] variable[edge] assign[=] call[name[float], parameter[name[edge]]] variable[c] assign[=] call[name[ops].Cube, parameter[name[edge]]] variable[s] assign[=] call[name[ops].Sphere, parameter[binary_operation[binary_operation[name[edge] * constant[3]] / constant[4]]]] variable[dice] assign[=] binary_operation[name[c] <ast.BitAnd object at 0x7da2590d6b60> name[s]] variable[c] assign[=] call[name[ops].Circle, parameter[binary_operation[name[edge] / constant[12]]]] variable[h] assign[=] constant[0.7] variable[point] assign[=] call[name[c].linear_extrude, parameter[]] variable[point1] assign[=] call[name[point].translate, parameter[list[[<ast.Constant object at 0x7da1b27eecb0>, <ast.Constant object at 0x7da1b27ee9e0>, <ast.BinOp object at 0x7da1b27eee30>]]]] variable[point2_1] assign[=] call[call[name[point1].rotate, parameter[]].translate, parameter[list[[<ast.BinOp object at 0x7da1b27ec5e0>, <ast.Constant object at 0x7da1b27ee650>, <ast.BinOp object at 0x7da1b27ec400>]]]] variable[point2_2] assign[=] call[name[point2_1].mirror, parameter[list[[<ast.BinOp object at 0x7da1b27ed240>, <ast.Constant object at 0x7da1b27edfc0>, <ast.BinOp object at 0x7da1b27ee710>]]]] variable[point2] assign[=] binary_operation[name[point2_1] + name[point2_2]] variable[point3] assign[=] binary_operation[call[name[point2].rotate, parameter[]] + call[name[point1].rotate, parameter[]]] variable[point4_12] assign[=] call[name[point2].rotate, parameter[]] variable[point4] assign[=] binary_operation[name[point4_12] + call[name[point4_12].mirror, parameter[list[[<ast.Constant object at 0x7da1b272eb00>, <ast.Constant object at 0x7da1b272f790>, <ast.Constant object at 0x7da1b272f070>]]]]] variable[point5_123] assign[=] call[name[point3].rotate, parameter[]] variable[point5] assign[=] binary_operation[name[point5_123] + call[name[point5_123].mirror, parameter[list[[<ast.Constant object at 0x7da204564f70>, <ast.Constant object at 0x7da204567580>, <ast.Constant object at 0x7da204566290>]]]]] variable[point6_1] assign[=] call[call[name[point].translate, parameter[list[[<ast.Constant object at 0x7da204566500>, <ast.Constant object at 0x7da204564af0>, <ast.UnaryOp object at 0x7da204566920>]]]].translate, parameter[list[[<ast.Constant object at 0x7da20e9b19c0>, <ast.BinOp object at 0x7da20e9b2cb0>, <ast.Constant object at 0x7da20e9b03d0>]]]] variable[point6_2] assign[=] call[name[point6_1].translate, parameter[list[[<ast.BinOp object at 0x7da20e9b06a0>, <ast.Constant object at 0x7da20e9b2740>, <ast.Constant object at 0x7da20e9b05e0>]]]] variable[point6_3] assign[=] call[name[point6_1].translate, parameter[list[[<ast.BinOp object at 0x7da20e9b2ad0>, <ast.Constant object at 0x7da20e9b2ec0>, <ast.Constant object at 0x7da20e9b1b10>]]]] variable[point6_123] assign[=] binary_operation[binary_operation[name[point6_1] + name[point6_2]] + name[point6_3]] variable[point6_456] assign[=] call[name[point6_123].mirror, parameter[list[[<ast.Constant object at 0x7da20e9b0e20>, <ast.Constant object at 0x7da20e9b2080>, <ast.Constant object at 0x7da20e9b1210>]]]] variable[point6] assign[=] binary_operation[name[point6_123] + name[point6_456]] variable[dice_with_holes] assign[=] binary_operation[binary_operation[binary_operation[binary_operation[binary_operation[binary_operation[name[dice] - name[point1]] - name[point2]] - name[point3]] - name[point4]] - name[point5]] - name[point6]] variable[dice_with_holes] assign[=] call[name[dice_with_holes].mirror, parameter[list[[<ast.Constant object at 0x7da20c7cacb0>, <ast.Constant object at 0x7da20c7c84f0>, <ast.Constant object at 0x7da20c7c8cd0>]]]] return[name[dice_with_holes]]
keyword[def] identifier[dice] ( identifier[edge] = literal[int] , identifier[fn] = literal[int] ): literal[string] identifier[edge] = identifier[float] ( identifier[edge] ) identifier[c] = identifier[ops] . identifier[Cube] ( identifier[edge] , identifier[center] = keyword[True] ) identifier[s] = identifier[ops] . identifier[Sphere] ( identifier[edge] * literal[int] / literal[int] , identifier[center] = keyword[True] ) identifier[dice] = identifier[c] & identifier[s] identifier[c] = identifier[ops] . identifier[Circle] ( identifier[edge] / literal[int] , identifier[_fn] = identifier[fn] ) identifier[h] = literal[int] identifier[point] = identifier[c] . identifier[linear_extrude] ( identifier[height] = identifier[h] ) identifier[point1] = identifier[point] . identifier[translate] ([ literal[int] , literal[int] , identifier[edge] / literal[int] - identifier[h] / literal[int] ]) identifier[point2_1] = identifier[point1] . identifier[rotate] ( identifier[a] = literal[int] , identifier[v] =[ literal[int] , literal[int] , literal[int] ]). identifier[translate] ([ identifier[edge] / literal[int] , literal[int] , identifier[edge] / literal[int] ]) identifier[point2_2] = identifier[point2_1] . identifier[mirror] ([- identifier[edge] / literal[int] , literal[int] ,- identifier[edge] / literal[int] ]) identifier[point2] = identifier[point2_1] + identifier[point2_2] identifier[point3] = identifier[point2] . identifier[rotate] ( identifier[a] = literal[int] , identifier[v] =[ literal[int] , literal[int] , literal[int] ])+ identifier[point1] . identifier[rotate] ( identifier[a] = literal[int] , identifier[v] =[ literal[int] , literal[int] , literal[int] ]) identifier[point4_12] = identifier[point2] . identifier[rotate] ( identifier[a] =- literal[int] , identifier[v] =[ literal[int] , literal[int] , literal[int] ]) identifier[point4] = identifier[point4_12] + identifier[point4_12] . identifier[mirror] ([ literal[int] , literal[int] , literal[int] ]) identifier[point5_123] = identifier[point3] . identifier[rotate] ( identifier[a] = literal[int] , identifier[v] =[ literal[int] , literal[int] , literal[int] ]) identifier[point5] = identifier[point5_123] + identifier[point5_123] . identifier[mirror] ([ literal[int] , literal[int] , literal[int] ]) identifier[point6_1] = identifier[point] . identifier[translate] ([ literal[int] , literal[int] ,-( identifier[edge] / literal[int] + identifier[h] / literal[int] )]). identifier[translate] ([ literal[int] , identifier[edge] / literal[int] , literal[int] ]) identifier[point6_2] = identifier[point6_1] . identifier[translate] ([ identifier[edge] / literal[int] , literal[int] , literal[int] ]) identifier[point6_3] = identifier[point6_1] . identifier[translate] ([- identifier[edge] / literal[int] , literal[int] , literal[int] ]) identifier[point6_123] = identifier[point6_1] + identifier[point6_2] + identifier[point6_3] identifier[point6_456] = identifier[point6_123] . identifier[mirror] ([ literal[int] , literal[int] , literal[int] ]) identifier[point6] = identifier[point6_123] + identifier[point6_456] identifier[dice_with_holes] = identifier[dice] - identifier[point1] - identifier[point2] - identifier[point3] - identifier[point4] - identifier[point5] - identifier[point6] identifier[dice_with_holes] = identifier[dice_with_holes] . identifier[mirror] ([ literal[int] , literal[int] , literal[int] ]) keyword[return] ( identifier[dice_with_holes] )
def dice(edge=15, fn=32): """ dice """ edge = float(edge) # dice c = ops.Cube(edge, center=True) s = ops.Sphere(edge * 3 / 4, center=True) dice = c & s # points c = ops.Circle(edge / 12, _fn=fn) h = 0.7 point = c.linear_extrude(height=h) point1 = point.translate([0, 0, edge / 2 - h / 2]) point2_1 = point1.rotate(a=90, v=[1, 0, 0]).translate([edge / 6, 0, edge / 6]) point2_2 = point2_1.mirror([-edge / 6, 0, -edge / 6]) point2 = point2_1 + point2_2 point3 = point2.rotate(a=90, v=[0, 0, 1]) + point1.rotate(a=90, v=[0, 1, 0]) point4_12 = point2.rotate(a=-90, v=[0, 0, 1]) point4 = point4_12 + point4_12.mirror([0, 1, 0]) point5_123 = point3.rotate(a=90, v=[0, 0, 1]) point5 = point5_123 + point5_123.mirror([1, 0, 0]) point6_1 = point.translate([0, 0, -(edge / 2 + h / 2)]).translate([0, edge / 6, 0]) point6_2 = point6_1.translate([edge / 4, 0, 0]) point6_3 = point6_1.translate([-edge / 4, 0, 0]) point6_123 = point6_1 + point6_2 + point6_3 point6_456 = point6_123.mirror([0, 1, 0]) point6 = point6_123 + point6_456 dice_with_holes = dice - point1 - point2 - point3 - point4 - point5 - point6 dice_with_holes = dice_with_holes.mirror([0, 0, 1]) return dice_with_holes
def get_bookmark(self, bookmark_id): """ Get a single bookmark represented by `bookmark_id`. The requested bookmark must belong to the current user. :param bookmark_id: ID of the bookmark to retrieve. """ url = self._generate_url('bookmarks/{0}'.format(bookmark_id)) return self.get(url)
def function[get_bookmark, parameter[self, bookmark_id]]: constant[ Get a single bookmark represented by `bookmark_id`. The requested bookmark must belong to the current user. :param bookmark_id: ID of the bookmark to retrieve. ] variable[url] assign[=] call[name[self]._generate_url, parameter[call[constant[bookmarks/{0}].format, parameter[name[bookmark_id]]]]] return[call[name[self].get, parameter[name[url]]]]
keyword[def] identifier[get_bookmark] ( identifier[self] , identifier[bookmark_id] ): literal[string] identifier[url] = identifier[self] . identifier[_generate_url] ( literal[string] . identifier[format] ( identifier[bookmark_id] )) keyword[return] identifier[self] . identifier[get] ( identifier[url] )
def get_bookmark(self, bookmark_id): """ Get a single bookmark represented by `bookmark_id`. The requested bookmark must belong to the current user. :param bookmark_id: ID of the bookmark to retrieve. """ url = self._generate_url('bookmarks/{0}'.format(bookmark_id)) return self.get(url)
def prepare_url_list(urlresolver, namespace_path='', namespace=''): """ returns list of tuples [(<url_name>, <url_patern_tuple> ), ...] """ exclude_ns = getattr(settings, 'JS_REVERSE_EXCLUDE_NAMESPACES', JS_EXCLUDE_NAMESPACES) include_only_ns = getattr(settings, 'JS_REVERSE_INCLUDE_ONLY_NAMESPACES', JS_INCLUDE_ONLY_NAMESPACES) if exclude_ns and include_only_ns: raise ImproperlyConfigured( 'Neither use JS_REVERSE_EXCLUDE_NAMESPACES nor JS_REVERSE_INCLUDE_ONLY_NAMESPACES setting') if namespace[:-1] in exclude_ns: return include_only_allow = True # include_only state varible if include_only_ns != []: # True mean that ns passed the test in_on_empty_ns = False in_on_is_in_list = False in_on_null = False # Test urls without ns if namespace == '' and '' in include_only_ns: in_on_empty_ns = True # check if nestead ns isn't subns of include_only ns # e.g. ns = "foo:bar" include_only = ["foo"] -> this ns will be used # works for ns = "lorem:ipsum:dolor" include_only = ["lorem:ipsum"] # ns "lorem" will be ignored but "lorem:ipsum" & "lorem:ipsum:.." won't for ns in include_only_ns: if ns != "" and namespace[:-1].startswith(ns): in_on_is_in_list = True break # Test if isn't used "\0" flag # use "foo\0" to add urls just from "foo" not from subns "foo:bar" if namespace[:-1] + '\0' in include_only_ns: in_on_null = True include_only_allow = in_on_empty_ns or in_on_is_in_list or in_on_null if include_only_allow: for url_name in urlresolver.reverse_dict.keys(): if isinstance(url_name, (text_type, str)): url_patterns = [] for url_pattern in urlresolver.reverse_dict.getlist(url_name): url_patterns += [ [namespace_path + pat[0], pat[1]] for pat in url_pattern[0]] yield [namespace + url_name, url_patterns] for inner_ns, (inner_ns_path, inner_urlresolver) in \ urlresolver.namespace_dict.items(): inner_ns_path = namespace_path + inner_ns_path inner_ns = namespace + inner_ns + ':' # if we have inner_ns_path, reconstruct a new resolver so that we can # handle regex substitutions within the regex of a namespace. if inner_ns_path: args = [inner_ns_path, inner_urlresolver] # https://github.com/ierror/django-js-reverse/issues/65 if StrictVersion(django.get_version()) >= StrictVersion("2.0.6"): args.append(tuple(urlresolver.pattern.converters.items())) inner_urlresolver = urlresolvers.get_ns_resolver(*args) inner_ns_path = '' for x in prepare_url_list(inner_urlresolver, inner_ns_path, inner_ns): yield x
def function[prepare_url_list, parameter[urlresolver, namespace_path, namespace]]: constant[ returns list of tuples [(<url_name>, <url_patern_tuple> ), ...] ] variable[exclude_ns] assign[=] call[name[getattr], parameter[name[settings], constant[JS_REVERSE_EXCLUDE_NAMESPACES], name[JS_EXCLUDE_NAMESPACES]]] variable[include_only_ns] assign[=] call[name[getattr], parameter[name[settings], constant[JS_REVERSE_INCLUDE_ONLY_NAMESPACES], name[JS_INCLUDE_ONLY_NAMESPACES]]] if <ast.BoolOp object at 0x7da1b0b36c80> begin[:] <ast.Raise object at 0x7da1b0b37a30> if compare[call[name[namespace]][<ast.Slice object at 0x7da1b0b37c70>] in name[exclude_ns]] begin[:] return[None] variable[include_only_allow] assign[=] constant[True] if compare[name[include_only_ns] not_equal[!=] list[[]]] begin[:] variable[in_on_empty_ns] assign[=] constant[False] variable[in_on_is_in_list] assign[=] constant[False] variable[in_on_null] assign[=] constant[False] if <ast.BoolOp object at 0x7da1b0b36d40> begin[:] variable[in_on_empty_ns] assign[=] constant[True] for taget[name[ns]] in starred[name[include_only_ns]] begin[:] if <ast.BoolOp object at 0x7da1b0b34e50> begin[:] variable[in_on_is_in_list] assign[=] constant[True] break if compare[binary_operation[call[name[namespace]][<ast.Slice object at 0x7da1b0b37340>] + constant[]] in name[include_only_ns]] begin[:] variable[in_on_null] assign[=] constant[True] variable[include_only_allow] assign[=] <ast.BoolOp object at 0x7da1b0b37be0> if name[include_only_allow] begin[:] for taget[name[url_name]] in starred[call[name[urlresolver].reverse_dict.keys, parameter[]]] begin[:] if call[name[isinstance], parameter[name[url_name], tuple[[<ast.Name object at 0x7da1b0b37a90>, <ast.Name object at 0x7da1b0b376d0>]]]] begin[:] variable[url_patterns] assign[=] list[[]] for taget[name[url_pattern]] in starred[call[name[urlresolver].reverse_dict.getlist, parameter[name[url_name]]]] begin[:] <ast.AugAssign object at 0x7da1b0b37160> <ast.Yield object at 0x7da1b0b35c90> for taget[tuple[[<ast.Name object at 0x7da1b0bf2fb0>, <ast.Tuple object at 0x7da1b0bf3910>]]] in starred[call[name[urlresolver].namespace_dict.items, parameter[]]] begin[:] variable[inner_ns_path] assign[=] binary_operation[name[namespace_path] + name[inner_ns_path]] variable[inner_ns] assign[=] binary_operation[binary_operation[name[namespace] + name[inner_ns]] + constant[:]] if name[inner_ns_path] begin[:] variable[args] assign[=] list[[<ast.Name object at 0x7da1b0bf1510>, <ast.Name object at 0x7da1b0bf35e0>]] if compare[call[name[StrictVersion], parameter[call[name[django].get_version, parameter[]]]] greater_or_equal[>=] call[name[StrictVersion], parameter[constant[2.0.6]]]] begin[:] call[name[args].append, parameter[call[name[tuple], parameter[call[name[urlresolver].pattern.converters.items, parameter[]]]]]] variable[inner_urlresolver] assign[=] call[name[urlresolvers].get_ns_resolver, parameter[<ast.Starred object at 0x7da1b0bc9990>]] variable[inner_ns_path] assign[=] constant[] for taget[name[x]] in starred[call[name[prepare_url_list], parameter[name[inner_urlresolver], name[inner_ns_path], name[inner_ns]]]] begin[:] <ast.Yield object at 0x7da1b0bc8640>
keyword[def] identifier[prepare_url_list] ( identifier[urlresolver] , identifier[namespace_path] = literal[string] , identifier[namespace] = literal[string] ): literal[string] identifier[exclude_ns] = identifier[getattr] ( identifier[settings] , literal[string] , identifier[JS_EXCLUDE_NAMESPACES] ) identifier[include_only_ns] = identifier[getattr] ( identifier[settings] , literal[string] , identifier[JS_INCLUDE_ONLY_NAMESPACES] ) keyword[if] identifier[exclude_ns] keyword[and] identifier[include_only_ns] : keyword[raise] identifier[ImproperlyConfigured] ( literal[string] ) keyword[if] identifier[namespace] [:- literal[int] ] keyword[in] identifier[exclude_ns] : keyword[return] identifier[include_only_allow] = keyword[True] keyword[if] identifier[include_only_ns] !=[]: identifier[in_on_empty_ns] = keyword[False] identifier[in_on_is_in_list] = keyword[False] identifier[in_on_null] = keyword[False] keyword[if] identifier[namespace] == literal[string] keyword[and] literal[string] keyword[in] identifier[include_only_ns] : identifier[in_on_empty_ns] = keyword[True] keyword[for] identifier[ns] keyword[in] identifier[include_only_ns] : keyword[if] identifier[ns] != literal[string] keyword[and] identifier[namespace] [:- literal[int] ]. identifier[startswith] ( identifier[ns] ): identifier[in_on_is_in_list] = keyword[True] keyword[break] keyword[if] identifier[namespace] [:- literal[int] ]+ literal[string] keyword[in] identifier[include_only_ns] : identifier[in_on_null] = keyword[True] identifier[include_only_allow] = identifier[in_on_empty_ns] keyword[or] identifier[in_on_is_in_list] keyword[or] identifier[in_on_null] keyword[if] identifier[include_only_allow] : keyword[for] identifier[url_name] keyword[in] identifier[urlresolver] . identifier[reverse_dict] . identifier[keys] (): keyword[if] identifier[isinstance] ( identifier[url_name] ,( identifier[text_type] , identifier[str] )): identifier[url_patterns] =[] keyword[for] identifier[url_pattern] keyword[in] identifier[urlresolver] . identifier[reverse_dict] . identifier[getlist] ( identifier[url_name] ): identifier[url_patterns] +=[ [ identifier[namespace_path] + identifier[pat] [ literal[int] ], identifier[pat] [ literal[int] ]] keyword[for] identifier[pat] keyword[in] identifier[url_pattern] [ literal[int] ]] keyword[yield] [ identifier[namespace] + identifier[url_name] , identifier[url_patterns] ] keyword[for] identifier[inner_ns] ,( identifier[inner_ns_path] , identifier[inner_urlresolver] ) keyword[in] identifier[urlresolver] . identifier[namespace_dict] . identifier[items] (): identifier[inner_ns_path] = identifier[namespace_path] + identifier[inner_ns_path] identifier[inner_ns] = identifier[namespace] + identifier[inner_ns] + literal[string] keyword[if] identifier[inner_ns_path] : identifier[args] =[ identifier[inner_ns_path] , identifier[inner_urlresolver] ] keyword[if] identifier[StrictVersion] ( identifier[django] . identifier[get_version] ())>= identifier[StrictVersion] ( literal[string] ): identifier[args] . identifier[append] ( identifier[tuple] ( identifier[urlresolver] . identifier[pattern] . identifier[converters] . identifier[items] ())) identifier[inner_urlresolver] = identifier[urlresolvers] . identifier[get_ns_resolver] (* identifier[args] ) identifier[inner_ns_path] = literal[string] keyword[for] identifier[x] keyword[in] identifier[prepare_url_list] ( identifier[inner_urlresolver] , identifier[inner_ns_path] , identifier[inner_ns] ): keyword[yield] identifier[x]
def prepare_url_list(urlresolver, namespace_path='', namespace=''): """ returns list of tuples [(<url_name>, <url_patern_tuple> ), ...] """ exclude_ns = getattr(settings, 'JS_REVERSE_EXCLUDE_NAMESPACES', JS_EXCLUDE_NAMESPACES) include_only_ns = getattr(settings, 'JS_REVERSE_INCLUDE_ONLY_NAMESPACES', JS_INCLUDE_ONLY_NAMESPACES) if exclude_ns and include_only_ns: raise ImproperlyConfigured('Neither use JS_REVERSE_EXCLUDE_NAMESPACES nor JS_REVERSE_INCLUDE_ONLY_NAMESPACES setting') # depends on [control=['if'], data=[]] if namespace[:-1] in exclude_ns: return # depends on [control=['if'], data=[]] include_only_allow = True # include_only state varible if include_only_ns != []: # True mean that ns passed the test in_on_empty_ns = False in_on_is_in_list = False in_on_null = False # Test urls without ns if namespace == '' and '' in include_only_ns: in_on_empty_ns = True # depends on [control=['if'], data=[]] # check if nestead ns isn't subns of include_only ns # e.g. ns = "foo:bar" include_only = ["foo"] -> this ns will be used # works for ns = "lorem:ipsum:dolor" include_only = ["lorem:ipsum"] # ns "lorem" will be ignored but "lorem:ipsum" & "lorem:ipsum:.." won't for ns in include_only_ns: if ns != '' and namespace[:-1].startswith(ns): in_on_is_in_list = True break # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['ns']] # Test if isn't used "\0" flag # use "foo\0" to add urls just from "foo" not from subns "foo:bar" if namespace[:-1] + '\x00' in include_only_ns: in_on_null = True # depends on [control=['if'], data=[]] include_only_allow = in_on_empty_ns or in_on_is_in_list or in_on_null # depends on [control=['if'], data=['include_only_ns']] if include_only_allow: for url_name in urlresolver.reverse_dict.keys(): if isinstance(url_name, (text_type, str)): url_patterns = [] for url_pattern in urlresolver.reverse_dict.getlist(url_name): url_patterns += [[namespace_path + pat[0], pat[1]] for pat in url_pattern[0]] # depends on [control=['for'], data=['url_pattern']] yield [namespace + url_name, url_patterns] # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['url_name']] # depends on [control=['if'], data=[]] for (inner_ns, (inner_ns_path, inner_urlresolver)) in urlresolver.namespace_dict.items(): inner_ns_path = namespace_path + inner_ns_path inner_ns = namespace + inner_ns + ':' # if we have inner_ns_path, reconstruct a new resolver so that we can # handle regex substitutions within the regex of a namespace. if inner_ns_path: args = [inner_ns_path, inner_urlresolver] # https://github.com/ierror/django-js-reverse/issues/65 if StrictVersion(django.get_version()) >= StrictVersion('2.0.6'): args.append(tuple(urlresolver.pattern.converters.items())) # depends on [control=['if'], data=[]] inner_urlresolver = urlresolvers.get_ns_resolver(*args) inner_ns_path = '' # depends on [control=['if'], data=[]] for x in prepare_url_list(inner_urlresolver, inner_ns_path, inner_ns): yield x # depends on [control=['for'], data=['x']] # depends on [control=['for'], data=[]]
def get_summary(self): """Returns some summary data for a finished analysis""" if not self.analysis_finished: return [] summary = {'times_summary': []} for i in range(len(self.runs[self.current_run].steps) - 1): step = self.runs[self.current_run].steps[i] begin = parse(step['when']) end = parse(self.runs[self.current_run].steps[i + 1]['when']) duration = end - begin summary['times_summary'].append((step['step'], duration.seconds)) return summary
def function[get_summary, parameter[self]]: constant[Returns some summary data for a finished analysis] if <ast.UnaryOp object at 0x7da20c990d00> begin[:] return[list[[]]] variable[summary] assign[=] dictionary[[<ast.Constant object at 0x7da20c992740>], [<ast.List object at 0x7da20c992dd0>]] for taget[name[i]] in starred[call[name[range], parameter[binary_operation[call[name[len], parameter[call[name[self].runs][name[self].current_run].steps]] - constant[1]]]]] begin[:] variable[step] assign[=] call[call[name[self].runs][name[self].current_run].steps][name[i]] variable[begin] assign[=] call[name[parse], parameter[call[name[step]][constant[when]]]] variable[end] assign[=] call[name[parse], parameter[call[call[call[name[self].runs][name[self].current_run].steps][binary_operation[name[i] + constant[1]]]][constant[when]]]] variable[duration] assign[=] binary_operation[name[end] - name[begin]] call[call[name[summary]][constant[times_summary]].append, parameter[tuple[[<ast.Subscript object at 0x7da20c990910>, <ast.Attribute object at 0x7da20c992ef0>]]]] return[name[summary]]
keyword[def] identifier[get_summary] ( identifier[self] ): literal[string] keyword[if] keyword[not] identifier[self] . identifier[analysis_finished] : keyword[return] [] identifier[summary] ={ literal[string] :[]} keyword[for] identifier[i] keyword[in] identifier[range] ( identifier[len] ( identifier[self] . identifier[runs] [ identifier[self] . identifier[current_run] ]. identifier[steps] )- literal[int] ): identifier[step] = identifier[self] . identifier[runs] [ identifier[self] . identifier[current_run] ]. identifier[steps] [ identifier[i] ] identifier[begin] = identifier[parse] ( identifier[step] [ literal[string] ]) identifier[end] = identifier[parse] ( identifier[self] . identifier[runs] [ identifier[self] . identifier[current_run] ]. identifier[steps] [ identifier[i] + literal[int] ][ literal[string] ]) identifier[duration] = identifier[end] - identifier[begin] identifier[summary] [ literal[string] ]. identifier[append] (( identifier[step] [ literal[string] ], identifier[duration] . identifier[seconds] )) keyword[return] identifier[summary]
def get_summary(self): """Returns some summary data for a finished analysis""" if not self.analysis_finished: return [] # depends on [control=['if'], data=[]] summary = {'times_summary': []} for i in range(len(self.runs[self.current_run].steps) - 1): step = self.runs[self.current_run].steps[i] begin = parse(step['when']) end = parse(self.runs[self.current_run].steps[i + 1]['when']) duration = end - begin summary['times_summary'].append((step['step'], duration.seconds)) # depends on [control=['for'], data=['i']] return summary
def get_heron_dir(): """ This will extract heron directory from .pex file. For example, when __file__ is '/Users/heron-user/bin/heron/heron/tools/common/src/python/utils/config.pyc', and its real path is '/Users/heron-user/.heron/bin/heron/tools/common/src/python/utils/config.pyc', the internal variable ``path`` would be '/Users/heron-user/.heron', which is the heron directory This means the variable `go_above_dirs` below is 9. :return: root location of the .pex file """ go_above_dirs = 9 path = "/".join(os.path.realpath(__file__).split('/')[:-go_above_dirs]) return normalized_class_path(path)
def function[get_heron_dir, parameter[]]: constant[ This will extract heron directory from .pex file. For example, when __file__ is '/Users/heron-user/bin/heron/heron/tools/common/src/python/utils/config.pyc', and its real path is '/Users/heron-user/.heron/bin/heron/tools/common/src/python/utils/config.pyc', the internal variable ``path`` would be '/Users/heron-user/.heron', which is the heron directory This means the variable `go_above_dirs` below is 9. :return: root location of the .pex file ] variable[go_above_dirs] assign[=] constant[9] variable[path] assign[=] call[constant[/].join, parameter[call[call[call[name[os].path.realpath, parameter[name[__file__]]].split, parameter[constant[/]]]][<ast.Slice object at 0x7da2054a7df0>]]] return[call[name[normalized_class_path], parameter[name[path]]]]
keyword[def] identifier[get_heron_dir] (): literal[string] identifier[go_above_dirs] = literal[int] identifier[path] = literal[string] . identifier[join] ( identifier[os] . identifier[path] . identifier[realpath] ( identifier[__file__] ). identifier[split] ( literal[string] )[:- identifier[go_above_dirs] ]) keyword[return] identifier[normalized_class_path] ( identifier[path] )
def get_heron_dir(): """ This will extract heron directory from .pex file. For example, when __file__ is '/Users/heron-user/bin/heron/heron/tools/common/src/python/utils/config.pyc', and its real path is '/Users/heron-user/.heron/bin/heron/tools/common/src/python/utils/config.pyc', the internal variable ``path`` would be '/Users/heron-user/.heron', which is the heron directory This means the variable `go_above_dirs` below is 9. :return: root location of the .pex file """ go_above_dirs = 9 path = '/'.join(os.path.realpath(__file__).split('/')[:-go_above_dirs]) return normalized_class_path(path)
def dispense(self, volume=None, location=None, rate=1.0): """ Dispense a volume of liquid (in microliters/uL) using this pipette Notes ----- If only a volume is passed, the pipette will dispense from it's current position. If only a location is passed, `dispense` will default to it's `current_volume` The location may be a Well, or a specific position in relation to a Well, such as `Well.top()`. If a Well is specified without calling a a position method (such as .top or .bottom), this method will default to the bottom of the well. Parameters ---------- volume : int or float The number of microliters to dispense (Default: self.current_volume) location : :any:`Placeable` or tuple(:any:`Placeable`, :any:`Vector`) The :any:`Placeable` (:any:`Well`) to perform the dispense. Can also be a tuple with first item :any:`Placeable`, second item relative :any:`Vector` rate : float Set plunger speed for this dispense, where speed = rate * dispense_speed (see :meth:`set_speed`) Returns ------- This instance of :class:`Pipette`. Examples -------- .. >>> from opentrons import instruments, labware, robot # doctest: +SKIP >>> robot.reset() # doctest: +SKIP >>> plate = labware.load('96-flat', '3') # doctest: +SKIP >>> p300 = instruments.P300_Single(mount='left') # doctest: +SKIP # fill the pipette with liquid (200uL) >>> p300.aspirate(plate[0]) # doctest: +SKIP # dispense 50uL to a Well >>> p300.dispense(50, plate[0]) # doctest: +SKIP # dispense 50uL to the center of a well >>> relative_vector = plate[1].center() # doctest: +SKIP >>> p300.dispense(50, (plate[1], relative_vector)) # doctest: +SKIP # dispense 20uL in place, at half the speed >>> p300.dispense(20, rate=0.5) # doctest: +SKIP # dispense the pipette's remaining volume (80uL) to a Well >>> p300.dispense(plate[2]) # doctest: +SKIP """ if not self.tip_attached: log.warning("Cannot dispense without a tip attached.") # Note: volume positional argument may not be passed. if it isn't then # assume the first positional argument is the location if not helpers.is_number(volume): if volume and not location: location = volume volume = self.current_volume # Ensure we don't dispense more than the current volume volume = min(self.current_volume, volume) display_location = location if location else self.previous_placeable do_publish(self.broker, commands.dispense, self.dispense, 'before', None, None, self, volume, display_location, rate) # if volume is specified as 0uL, then do nothing if volume != 0: self._position_for_dispense(location) mm_position = self._dispense_plunger_position( self.current_volume - volume) speed = self.speeds['dispense'] * rate self.instrument_actuator.push_speed() self.instrument_actuator.set_speed(speed) self.instrument_actuator.set_active_current(self._plunger_current) self.robot.poses = self.instrument_actuator.move( self.robot.poses, x=mm_position ) self.instrument_actuator.pop_speed() self.current_volume -= volume # update after actual dispense do_publish(self.broker, commands.dispense, self.dispense, 'after', self, None, self, volume, display_location, rate) return self
def function[dispense, parameter[self, volume, location, rate]]: constant[ Dispense a volume of liquid (in microliters/uL) using this pipette Notes ----- If only a volume is passed, the pipette will dispense from it's current position. If only a location is passed, `dispense` will default to it's `current_volume` The location may be a Well, or a specific position in relation to a Well, such as `Well.top()`. If a Well is specified without calling a a position method (such as .top or .bottom), this method will default to the bottom of the well. Parameters ---------- volume : int or float The number of microliters to dispense (Default: self.current_volume) location : :any:`Placeable` or tuple(:any:`Placeable`, :any:`Vector`) The :any:`Placeable` (:any:`Well`) to perform the dispense. Can also be a tuple with first item :any:`Placeable`, second item relative :any:`Vector` rate : float Set plunger speed for this dispense, where speed = rate * dispense_speed (see :meth:`set_speed`) Returns ------- This instance of :class:`Pipette`. Examples -------- .. >>> from opentrons import instruments, labware, robot # doctest: +SKIP >>> robot.reset() # doctest: +SKIP >>> plate = labware.load('96-flat', '3') # doctest: +SKIP >>> p300 = instruments.P300_Single(mount='left') # doctest: +SKIP # fill the pipette with liquid (200uL) >>> p300.aspirate(plate[0]) # doctest: +SKIP # dispense 50uL to a Well >>> p300.dispense(50, plate[0]) # doctest: +SKIP # dispense 50uL to the center of a well >>> relative_vector = plate[1].center() # doctest: +SKIP >>> p300.dispense(50, (plate[1], relative_vector)) # doctest: +SKIP # dispense 20uL in place, at half the speed >>> p300.dispense(20, rate=0.5) # doctest: +SKIP # dispense the pipette's remaining volume (80uL) to a Well >>> p300.dispense(plate[2]) # doctest: +SKIP ] if <ast.UnaryOp object at 0x7da1b26ae740> begin[:] call[name[log].warning, parameter[constant[Cannot dispense without a tip attached.]]] if <ast.UnaryOp object at 0x7da1b26addb0> begin[:] if <ast.BoolOp object at 0x7da1b26af220> begin[:] variable[location] assign[=] name[volume] variable[volume] assign[=] name[self].current_volume variable[volume] assign[=] call[name[min], parameter[name[self].current_volume, name[volume]]] variable[display_location] assign[=] <ast.IfExp object at 0x7da1b26afee0> call[name[do_publish], parameter[name[self].broker, name[commands].dispense, name[self].dispense, constant[before], constant[None], constant[None], name[self], name[volume], name[display_location], name[rate]]] if compare[name[volume] not_equal[!=] constant[0]] begin[:] call[name[self]._position_for_dispense, parameter[name[location]]] variable[mm_position] assign[=] call[name[self]._dispense_plunger_position, parameter[binary_operation[name[self].current_volume - name[volume]]]] variable[speed] assign[=] binary_operation[call[name[self].speeds][constant[dispense]] * name[rate]] call[name[self].instrument_actuator.push_speed, parameter[]] call[name[self].instrument_actuator.set_speed, parameter[name[speed]]] call[name[self].instrument_actuator.set_active_current, parameter[name[self]._plunger_current]] name[self].robot.poses assign[=] call[name[self].instrument_actuator.move, parameter[name[self].robot.poses]] call[name[self].instrument_actuator.pop_speed, parameter[]] <ast.AugAssign object at 0x7da1b0924550> call[name[do_publish], parameter[name[self].broker, name[commands].dispense, name[self].dispense, constant[after], name[self], constant[None], name[self], name[volume], name[display_location], name[rate]]] return[name[self]]
keyword[def] identifier[dispense] ( identifier[self] , identifier[volume] = keyword[None] , identifier[location] = keyword[None] , identifier[rate] = literal[int] ): literal[string] keyword[if] keyword[not] identifier[self] . identifier[tip_attached] : identifier[log] . identifier[warning] ( literal[string] ) keyword[if] keyword[not] identifier[helpers] . identifier[is_number] ( identifier[volume] ): keyword[if] identifier[volume] keyword[and] keyword[not] identifier[location] : identifier[location] = identifier[volume] identifier[volume] = identifier[self] . identifier[current_volume] identifier[volume] = identifier[min] ( identifier[self] . identifier[current_volume] , identifier[volume] ) identifier[display_location] = identifier[location] keyword[if] identifier[location] keyword[else] identifier[self] . identifier[previous_placeable] identifier[do_publish] ( identifier[self] . identifier[broker] , identifier[commands] . identifier[dispense] , identifier[self] . identifier[dispense] , literal[string] , keyword[None] , keyword[None] , identifier[self] , identifier[volume] , identifier[display_location] , identifier[rate] ) keyword[if] identifier[volume] != literal[int] : identifier[self] . identifier[_position_for_dispense] ( identifier[location] ) identifier[mm_position] = identifier[self] . identifier[_dispense_plunger_position] ( identifier[self] . identifier[current_volume] - identifier[volume] ) identifier[speed] = identifier[self] . identifier[speeds] [ literal[string] ]* identifier[rate] identifier[self] . identifier[instrument_actuator] . identifier[push_speed] () identifier[self] . identifier[instrument_actuator] . identifier[set_speed] ( identifier[speed] ) identifier[self] . identifier[instrument_actuator] . identifier[set_active_current] ( identifier[self] . identifier[_plunger_current] ) identifier[self] . identifier[robot] . identifier[poses] = identifier[self] . identifier[instrument_actuator] . identifier[move] ( identifier[self] . identifier[robot] . identifier[poses] , identifier[x] = identifier[mm_position] ) identifier[self] . identifier[instrument_actuator] . identifier[pop_speed] () identifier[self] . identifier[current_volume] -= identifier[volume] identifier[do_publish] ( identifier[self] . identifier[broker] , identifier[commands] . identifier[dispense] , identifier[self] . identifier[dispense] , literal[string] , identifier[self] , keyword[None] , identifier[self] , identifier[volume] , identifier[display_location] , identifier[rate] ) keyword[return] identifier[self]
def dispense(self, volume=None, location=None, rate=1.0): """ Dispense a volume of liquid (in microliters/uL) using this pipette Notes ----- If only a volume is passed, the pipette will dispense from it's current position. If only a location is passed, `dispense` will default to it's `current_volume` The location may be a Well, or a specific position in relation to a Well, such as `Well.top()`. If a Well is specified without calling a a position method (such as .top or .bottom), this method will default to the bottom of the well. Parameters ---------- volume : int or float The number of microliters to dispense (Default: self.current_volume) location : :any:`Placeable` or tuple(:any:`Placeable`, :any:`Vector`) The :any:`Placeable` (:any:`Well`) to perform the dispense. Can also be a tuple with first item :any:`Placeable`, second item relative :any:`Vector` rate : float Set plunger speed for this dispense, where speed = rate * dispense_speed (see :meth:`set_speed`) Returns ------- This instance of :class:`Pipette`. Examples -------- .. >>> from opentrons import instruments, labware, robot # doctest: +SKIP >>> robot.reset() # doctest: +SKIP >>> plate = labware.load('96-flat', '3') # doctest: +SKIP >>> p300 = instruments.P300_Single(mount='left') # doctest: +SKIP # fill the pipette with liquid (200uL) >>> p300.aspirate(plate[0]) # doctest: +SKIP # dispense 50uL to a Well >>> p300.dispense(50, plate[0]) # doctest: +SKIP # dispense 50uL to the center of a well >>> relative_vector = plate[1].center() # doctest: +SKIP >>> p300.dispense(50, (plate[1], relative_vector)) # doctest: +SKIP # dispense 20uL in place, at half the speed >>> p300.dispense(20, rate=0.5) # doctest: +SKIP # dispense the pipette's remaining volume (80uL) to a Well >>> p300.dispense(plate[2]) # doctest: +SKIP """ if not self.tip_attached: log.warning('Cannot dispense without a tip attached.') # depends on [control=['if'], data=[]] # Note: volume positional argument may not be passed. if it isn't then # assume the first positional argument is the location if not helpers.is_number(volume): if volume and (not location): location = volume # depends on [control=['if'], data=[]] volume = self.current_volume # depends on [control=['if'], data=[]] # Ensure we don't dispense more than the current volume volume = min(self.current_volume, volume) display_location = location if location else self.previous_placeable do_publish(self.broker, commands.dispense, self.dispense, 'before', None, None, self, volume, display_location, rate) # if volume is specified as 0uL, then do nothing if volume != 0: self._position_for_dispense(location) mm_position = self._dispense_plunger_position(self.current_volume - volume) speed = self.speeds['dispense'] * rate self.instrument_actuator.push_speed() self.instrument_actuator.set_speed(speed) self.instrument_actuator.set_active_current(self._plunger_current) self.robot.poses = self.instrument_actuator.move(self.robot.poses, x=mm_position) self.instrument_actuator.pop_speed() self.current_volume -= volume # update after actual dispense # depends on [control=['if'], data=['volume']] do_publish(self.broker, commands.dispense, self.dispense, 'after', self, None, self, volume, display_location, rate) return self
def _update_history(self): """Update the history file""" version = self.data['new_version'] history = self.vcs.history_file() if not history: logger.warn("No history file found") return history_lines = open(history).read().split('\n') headings = utils.extract_headings_from_history(history_lines) if not len(headings): logger.warn("No detectable existing version headings in the " "history file.") inject_location = 0 underline_char = '-' else: first = headings[0] inject_location = first['line'] underline_line = first['line'] + 1 try: underline_char = history_lines[underline_line][0] except IndexError: logger.debug("No character on line below header.") underline_char = '-' header = '%s (unreleased)' % version inject = [header, underline_char * len(header), '', self.data['nothing_changed_yet'], '', ''] history_lines[inject_location:inject_location] = inject contents = '\n'.join(history_lines) open(history, 'w').write(contents) logger.info("Injected new section into the history: %r", header)
def function[_update_history, parameter[self]]: constant[Update the history file] variable[version] assign[=] call[name[self].data][constant[new_version]] variable[history] assign[=] call[name[self].vcs.history_file, parameter[]] if <ast.UnaryOp object at 0x7da1b146dc90> begin[:] call[name[logger].warn, parameter[constant[No history file found]]] return[None] variable[history_lines] assign[=] call[call[call[name[open], parameter[name[history]]].read, parameter[]].split, parameter[constant[ ]]] variable[headings] assign[=] call[name[utils].extract_headings_from_history, parameter[name[history_lines]]] if <ast.UnaryOp object at 0x7da1b146cd60> begin[:] call[name[logger].warn, parameter[constant[No detectable existing version headings in the history file.]]] variable[inject_location] assign[=] constant[0] variable[underline_char] assign[=] constant[-] variable[header] assign[=] binary_operation[constant[%s (unreleased)] <ast.Mod object at 0x7da2590d6920> name[version]] variable[inject] assign[=] list[[<ast.Name object at 0x7da1b146e470>, <ast.BinOp object at 0x7da1b146fa30>, <ast.Constant object at 0x7da1b146d900>, <ast.Subscript object at 0x7da1b146ce80>, <ast.Constant object at 0x7da1b146d930>, <ast.Constant object at 0x7da1b146db10>]] call[name[history_lines]][<ast.Slice object at 0x7da1b146d990>] assign[=] name[inject] variable[contents] assign[=] call[constant[ ].join, parameter[name[history_lines]]] call[call[name[open], parameter[name[history], constant[w]]].write, parameter[name[contents]]] call[name[logger].info, parameter[constant[Injected new section into the history: %r], name[header]]]
keyword[def] identifier[_update_history] ( identifier[self] ): literal[string] identifier[version] = identifier[self] . identifier[data] [ literal[string] ] identifier[history] = identifier[self] . identifier[vcs] . identifier[history_file] () keyword[if] keyword[not] identifier[history] : identifier[logger] . identifier[warn] ( literal[string] ) keyword[return] identifier[history_lines] = identifier[open] ( identifier[history] ). identifier[read] (). identifier[split] ( literal[string] ) identifier[headings] = identifier[utils] . identifier[extract_headings_from_history] ( identifier[history_lines] ) keyword[if] keyword[not] identifier[len] ( identifier[headings] ): identifier[logger] . identifier[warn] ( literal[string] literal[string] ) identifier[inject_location] = literal[int] identifier[underline_char] = literal[string] keyword[else] : identifier[first] = identifier[headings] [ literal[int] ] identifier[inject_location] = identifier[first] [ literal[string] ] identifier[underline_line] = identifier[first] [ literal[string] ]+ literal[int] keyword[try] : identifier[underline_char] = identifier[history_lines] [ identifier[underline_line] ][ literal[int] ] keyword[except] identifier[IndexError] : identifier[logger] . identifier[debug] ( literal[string] ) identifier[underline_char] = literal[string] identifier[header] = literal[string] % identifier[version] identifier[inject] =[ identifier[header] , identifier[underline_char] * identifier[len] ( identifier[header] ), literal[string] , identifier[self] . identifier[data] [ literal[string] ], literal[string] , literal[string] ] identifier[history_lines] [ identifier[inject_location] : identifier[inject_location] ]= identifier[inject] identifier[contents] = literal[string] . identifier[join] ( identifier[history_lines] ) identifier[open] ( identifier[history] , literal[string] ). identifier[write] ( identifier[contents] ) identifier[logger] . identifier[info] ( literal[string] , identifier[header] )
def _update_history(self): """Update the history file""" version = self.data['new_version'] history = self.vcs.history_file() if not history: logger.warn('No history file found') return # depends on [control=['if'], data=[]] history_lines = open(history).read().split('\n') headings = utils.extract_headings_from_history(history_lines) if not len(headings): logger.warn('No detectable existing version headings in the history file.') inject_location = 0 underline_char = '-' # depends on [control=['if'], data=[]] else: first = headings[0] inject_location = first['line'] underline_line = first['line'] + 1 try: underline_char = history_lines[underline_line][0] # depends on [control=['try'], data=[]] except IndexError: logger.debug('No character on line below header.') underline_char = '-' # depends on [control=['except'], data=[]] header = '%s (unreleased)' % version inject = [header, underline_char * len(header), '', self.data['nothing_changed_yet'], '', ''] history_lines[inject_location:inject_location] = inject contents = '\n'.join(history_lines) open(history, 'w').write(contents) logger.info('Injected new section into the history: %r', header)
def jdbc_datasource_present(name, description='', enabled=True, restype='datasource', vendor='mysql', sql_url='', sql_user='', sql_password='', min_size=8, max_size=32, resize_quantity=2, idle_timeout=300, wait_timeout=60, non_transactional=False, transaction_isolation='', isolation_guaranteed=True, server=None): ''' Ensures that the JDBC Datasource exists name Name of the datasource description Description of the datasource enabled Is the datasource enabled? defaults to ``true`` restype Resource type, can be ``datasource``, ``xa_datasource``, ``connection_pool_datasource`` or ``driver``, defaults to ``datasource`` vendor SQL Server type, currently supports ``mysql``, ``postgresql`` and ``mssql``, defaults to ``mysql`` sql_url URL of the server in jdbc form sql_user Username for the server sql_password Password for that username min_size Minimum and initial number of connections in the pool, defaults to ``8`` max_size Maximum number of connections that can be created in the pool, defaults to ``32`` resize_quantity Number of connections to be removed when idle_timeout expires, defaults to ``2`` idle_timeout Maximum time a connection can remain idle in the pool, in seconds, defaults to ``300`` wait_timeout Maximum time a caller can wait before timeout, in seconds, defaults to ``60`` non_transactional Return non-transactional connections transaction_isolation Defaults to the JDBC driver default isolation_guaranteed All connections use the same isolation level ''' ret = {'name': name, 'result': None, 'comment': None, 'changes': {}} # Manage parameters res_name = 'jdbc/{0}'.format(name) pool_data = {} pool_data_properties = {} res_data = {} if restype == 'datasource': pool_data['resType'] = 'javax.sql.DataSource' elif restype == 'xa_datasource': pool_data['resType'] = 'javax.sql.XADataSource' elif restype == 'connection_pool_datasource': pool_data['resType'] = 'javax.sql.ConnectionPoolDataSource' elif restype == 'driver': pool_data['resType'] = 'javax.sql.Driver' datasources = {} datasources['mysql'] = { 'driver': 'com.mysql.jdbc.Driver', 'datasource': 'com.mysql.jdbc.jdbc2.optional.MysqlDataSource', 'xa_datasource': 'com.mysql.jdbc.jdbc2.optional.MysqlXADataSource', 'connection_pool_datasource': 'com.mysql.jdbc.jdbc2.optional.MysqlConnectionPoolDataSource' } datasources['postgresql'] = { 'driver': 'org.postgresql.Driver', 'datasource': 'org.postgresql.ds.PGSimpleDataSource', 'xa_datasource': 'org.postgresql.xa.PGXADataSource', 'connection_pool_datasource': 'org.postgresql.ds.PGConnectionPoolDataSource' } datasources['mssql'] = { 'driver': 'com.microsoft.sqlserver.jdbc.SQLServerDriver', 'datasource': 'com.microsoft.sqlserver.jdbc.SQLServerDataSource', 'xa_datasource': 'com.microsoft.sqlserver.jdbc.SQLServerXADataSource', 'connection_pool_datasource': 'com.microsoft.sqlserver.jdbc.SQLServerConnectionPoolDataSource' } if restype == 'driver': pool_data['driverClassname'] = datasources[vendor]['driver'] else: pool_data['datasourceClassname'] = datasources[vendor][restype] pool_data_properties['url'] = sql_url pool_data_properties['user'] = sql_user pool_data_properties['password'] = sql_password pool_data['properties'] = pool_data_properties pool_data['description'] = description res_data['description'] = description res_data['poolName'] = name res_data['enabled'] = enabled pool_data['steadyPoolSize'] = min_size pool_data['maxPoolSize'] = max_size pool_data['poolResizeQuantity'] = resize_quantity pool_data['idleTimeoutInSeconds'] = idle_timeout pool_data['maxWaitTimeInMillis'] = wait_timeout*1000 pool_data['nonTransactionalConnections'] = non_transactional pool_data['transactionIsolationLevel'] = transaction_isolation pool_data['isIsolationLevelGuaranteed'] = isolation_guaranteed pool_ret = _do_element_present(name, 'jdbc_connection_pool', pool_data, server) res_ret = _do_element_present(res_name, 'jdbc_resource', res_data, server) if not pool_ret['error'] and not res_ret['error']: if not __opts__['test']: ret['result'] = True if pool_ret['create'] or res_ret['create']: ret['changes']['pool'] = pool_ret['changes'] ret['changes']['resource'] = res_ret['changes'] if __opts__['test']: ret['comment'] = 'JDBC Datasource set to be created' else: ret['comment'] = 'JDBC Datasource created' elif pool_ret['update'] or res_ret['update']: ret['changes']['pool'] = pool_ret['changes'] ret['changes']['resource'] = res_ret['changes'] if __opts__['test']: ret['comment'] = 'JDBC Datasource set to be updated' else: ret['comment'] = 'JDBC Datasource updated' else: ret['result'] = True ret['changes'] = {} ret['comment'] = 'JDBC Datasource is already up-to-date' else: ret['result'] = False ret['comment'] = 'ERROR: {0} // {1}'.format(pool_ret['error'], res_ret['error']) return ret
def function[jdbc_datasource_present, parameter[name, description, enabled, restype, vendor, sql_url, sql_user, sql_password, min_size, max_size, resize_quantity, idle_timeout, wait_timeout, non_transactional, transaction_isolation, isolation_guaranteed, server]]: constant[ Ensures that the JDBC Datasource exists name Name of the datasource description Description of the datasource enabled Is the datasource enabled? defaults to ``true`` restype Resource type, can be ``datasource``, ``xa_datasource``, ``connection_pool_datasource`` or ``driver``, defaults to ``datasource`` vendor SQL Server type, currently supports ``mysql``, ``postgresql`` and ``mssql``, defaults to ``mysql`` sql_url URL of the server in jdbc form sql_user Username for the server sql_password Password for that username min_size Minimum and initial number of connections in the pool, defaults to ``8`` max_size Maximum number of connections that can be created in the pool, defaults to ``32`` resize_quantity Number of connections to be removed when idle_timeout expires, defaults to ``2`` idle_timeout Maximum time a connection can remain idle in the pool, in seconds, defaults to ``300`` wait_timeout Maximum time a caller can wait before timeout, in seconds, defaults to ``60`` non_transactional Return non-transactional connections transaction_isolation Defaults to the JDBC driver default isolation_guaranteed All connections use the same isolation level ] variable[ret] assign[=] dictionary[[<ast.Constant object at 0x7da1b20891e0>, <ast.Constant object at 0x7da1b20887c0>, <ast.Constant object at 0x7da1b208a110>, <ast.Constant object at 0x7da1b2088850>], [<ast.Name object at 0x7da1b208a050>, <ast.Constant object at 0x7da1b2089fc0>, <ast.Constant object at 0x7da1b2088640>, <ast.Dict object at 0x7da1b2089e40>]] variable[res_name] assign[=] call[constant[jdbc/{0}].format, parameter[name[name]]] variable[pool_data] assign[=] dictionary[[], []] variable[pool_data_properties] assign[=] dictionary[[], []] variable[res_data] assign[=] dictionary[[], []] if compare[name[restype] equal[==] constant[datasource]] begin[:] call[name[pool_data]][constant[resType]] assign[=] constant[javax.sql.DataSource] variable[datasources] assign[=] dictionary[[], []] call[name[datasources]][constant[mysql]] assign[=] dictionary[[<ast.Constant object at 0x7da1b2007d30>, <ast.Constant object at 0x7da1b20041c0>, <ast.Constant object at 0x7da1b2007a90>, <ast.Constant object at 0x7da1b2004130>], [<ast.Constant object at 0x7da1b2007190>, <ast.Constant object at 0x7da1b2006ec0>, <ast.Constant object at 0x7da1b20077c0>, <ast.Constant object at 0x7da1b2004310>]] call[name[datasources]][constant[postgresql]] assign[=] dictionary[[<ast.Constant object at 0x7da1b20040d0>, <ast.Constant object at 0x7da1b208a0e0>, <ast.Constant object at 0x7da1b2089f00>, <ast.Constant object at 0x7da1b2089e10>], [<ast.Constant object at 0x7da1b2089d50>, <ast.Constant object at 0x7da1b2089300>, <ast.Constant object at 0x7da1b2088dc0>, <ast.Constant object at 0x7da1b208b9a0>]] call[name[datasources]][constant[mssql]] assign[=] dictionary[[<ast.Constant object at 0x7da1b2089510>, <ast.Constant object at 0x7da1b2089d80>, <ast.Constant object at 0x7da1b2089b10>, <ast.Constant object at 0x7da1b208bf10>], [<ast.Constant object at 0x7da1b20888e0>, <ast.Constant object at 0x7da1b20895a0>, <ast.Constant object at 0x7da1b2088910>, <ast.Constant object at 0x7da1b208bee0>]] if compare[name[restype] equal[==] constant[driver]] begin[:] call[name[pool_data]][constant[driverClassname]] assign[=] call[call[name[datasources]][name[vendor]]][constant[driver]] call[name[pool_data_properties]][constant[url]] assign[=] name[sql_url] call[name[pool_data_properties]][constant[user]] assign[=] name[sql_user] call[name[pool_data_properties]][constant[password]] assign[=] name[sql_password] call[name[pool_data]][constant[properties]] assign[=] name[pool_data_properties] call[name[pool_data]][constant[description]] assign[=] name[description] call[name[res_data]][constant[description]] assign[=] name[description] call[name[res_data]][constant[poolName]] assign[=] name[name] call[name[res_data]][constant[enabled]] assign[=] name[enabled] call[name[pool_data]][constant[steadyPoolSize]] assign[=] name[min_size] call[name[pool_data]][constant[maxPoolSize]] assign[=] name[max_size] call[name[pool_data]][constant[poolResizeQuantity]] assign[=] name[resize_quantity] call[name[pool_data]][constant[idleTimeoutInSeconds]] assign[=] name[idle_timeout] call[name[pool_data]][constant[maxWaitTimeInMillis]] assign[=] binary_operation[name[wait_timeout] * constant[1000]] call[name[pool_data]][constant[nonTransactionalConnections]] assign[=] name[non_transactional] call[name[pool_data]][constant[transactionIsolationLevel]] assign[=] name[transaction_isolation] call[name[pool_data]][constant[isIsolationLevelGuaranteed]] assign[=] name[isolation_guaranteed] variable[pool_ret] assign[=] call[name[_do_element_present], parameter[name[name], constant[jdbc_connection_pool], name[pool_data], name[server]]] variable[res_ret] assign[=] call[name[_do_element_present], parameter[name[res_name], constant[jdbc_resource], name[res_data], name[server]]] if <ast.BoolOp object at 0x7da1b2089690> begin[:] if <ast.UnaryOp object at 0x7da1b208b010> begin[:] call[name[ret]][constant[result]] assign[=] constant[True] if <ast.BoolOp object at 0x7da1b20889d0> begin[:] call[call[name[ret]][constant[changes]]][constant[pool]] assign[=] call[name[pool_ret]][constant[changes]] call[call[name[ret]][constant[changes]]][constant[resource]] assign[=] call[name[res_ret]][constant[changes]] if call[name[__opts__]][constant[test]] begin[:] call[name[ret]][constant[comment]] assign[=] constant[JDBC Datasource set to be created] return[name[ret]]
keyword[def] identifier[jdbc_datasource_present] ( identifier[name] , identifier[description] = literal[string] , identifier[enabled] = keyword[True] , identifier[restype] = literal[string] , identifier[vendor] = literal[string] , identifier[sql_url] = literal[string] , identifier[sql_user] = literal[string] , identifier[sql_password] = literal[string] , identifier[min_size] = literal[int] , identifier[max_size] = literal[int] , identifier[resize_quantity] = literal[int] , identifier[idle_timeout] = literal[int] , identifier[wait_timeout] = literal[int] , identifier[non_transactional] = keyword[False] , identifier[transaction_isolation] = literal[string] , identifier[isolation_guaranteed] = keyword[True] , identifier[server] = keyword[None] ): literal[string] identifier[ret] ={ literal[string] : identifier[name] , literal[string] : keyword[None] , literal[string] : keyword[None] , literal[string] :{}} identifier[res_name] = literal[string] . identifier[format] ( identifier[name] ) identifier[pool_data] ={} identifier[pool_data_properties] ={} identifier[res_data] ={} keyword[if] identifier[restype] == literal[string] : identifier[pool_data] [ literal[string] ]= literal[string] keyword[elif] identifier[restype] == literal[string] : identifier[pool_data] [ literal[string] ]= literal[string] keyword[elif] identifier[restype] == literal[string] : identifier[pool_data] [ literal[string] ]= literal[string] keyword[elif] identifier[restype] == literal[string] : identifier[pool_data] [ literal[string] ]= literal[string] identifier[datasources] ={} identifier[datasources] [ literal[string] ]={ literal[string] : literal[string] , literal[string] : literal[string] , literal[string] : literal[string] , literal[string] : literal[string] } identifier[datasources] [ literal[string] ]={ literal[string] : literal[string] , literal[string] : literal[string] , literal[string] : literal[string] , literal[string] : literal[string] } identifier[datasources] [ literal[string] ]={ literal[string] : literal[string] , literal[string] : literal[string] , literal[string] : literal[string] , literal[string] : literal[string] } keyword[if] identifier[restype] == literal[string] : identifier[pool_data] [ literal[string] ]= identifier[datasources] [ identifier[vendor] ][ literal[string] ] keyword[else] : identifier[pool_data] [ literal[string] ]= identifier[datasources] [ identifier[vendor] ][ identifier[restype] ] identifier[pool_data_properties] [ literal[string] ]= identifier[sql_url] identifier[pool_data_properties] [ literal[string] ]= identifier[sql_user] identifier[pool_data_properties] [ literal[string] ]= identifier[sql_password] identifier[pool_data] [ literal[string] ]= identifier[pool_data_properties] identifier[pool_data] [ literal[string] ]= identifier[description] identifier[res_data] [ literal[string] ]= identifier[description] identifier[res_data] [ literal[string] ]= identifier[name] identifier[res_data] [ literal[string] ]= identifier[enabled] identifier[pool_data] [ literal[string] ]= identifier[min_size] identifier[pool_data] [ literal[string] ]= identifier[max_size] identifier[pool_data] [ literal[string] ]= identifier[resize_quantity] identifier[pool_data] [ literal[string] ]= identifier[idle_timeout] identifier[pool_data] [ literal[string] ]= identifier[wait_timeout] * literal[int] identifier[pool_data] [ literal[string] ]= identifier[non_transactional] identifier[pool_data] [ literal[string] ]= identifier[transaction_isolation] identifier[pool_data] [ literal[string] ]= identifier[isolation_guaranteed] identifier[pool_ret] = identifier[_do_element_present] ( identifier[name] , literal[string] , identifier[pool_data] , identifier[server] ) identifier[res_ret] = identifier[_do_element_present] ( identifier[res_name] , literal[string] , identifier[res_data] , identifier[server] ) keyword[if] keyword[not] identifier[pool_ret] [ literal[string] ] keyword[and] keyword[not] identifier[res_ret] [ literal[string] ]: keyword[if] keyword[not] identifier[__opts__] [ literal[string] ]: identifier[ret] [ literal[string] ]= keyword[True] keyword[if] identifier[pool_ret] [ literal[string] ] keyword[or] identifier[res_ret] [ literal[string] ]: identifier[ret] [ literal[string] ][ literal[string] ]= identifier[pool_ret] [ literal[string] ] identifier[ret] [ literal[string] ][ literal[string] ]= identifier[res_ret] [ literal[string] ] keyword[if] identifier[__opts__] [ literal[string] ]: identifier[ret] [ literal[string] ]= literal[string] keyword[else] : identifier[ret] [ literal[string] ]= literal[string] keyword[elif] identifier[pool_ret] [ literal[string] ] keyword[or] identifier[res_ret] [ literal[string] ]: identifier[ret] [ literal[string] ][ literal[string] ]= identifier[pool_ret] [ literal[string] ] identifier[ret] [ literal[string] ][ literal[string] ]= identifier[res_ret] [ literal[string] ] keyword[if] identifier[__opts__] [ literal[string] ]: identifier[ret] [ literal[string] ]= literal[string] keyword[else] : identifier[ret] [ literal[string] ]= literal[string] keyword[else] : identifier[ret] [ literal[string] ]= keyword[True] identifier[ret] [ literal[string] ]={} identifier[ret] [ literal[string] ]= literal[string] keyword[else] : identifier[ret] [ literal[string] ]= keyword[False] identifier[ret] [ literal[string] ]= literal[string] . identifier[format] ( identifier[pool_ret] [ literal[string] ], identifier[res_ret] [ literal[string] ]) keyword[return] identifier[ret]
def jdbc_datasource_present(name, description='', enabled=True, restype='datasource', vendor='mysql', sql_url='', sql_user='', sql_password='', min_size=8, max_size=32, resize_quantity=2, idle_timeout=300, wait_timeout=60, non_transactional=False, transaction_isolation='', isolation_guaranteed=True, server=None): """ Ensures that the JDBC Datasource exists name Name of the datasource description Description of the datasource enabled Is the datasource enabled? defaults to ``true`` restype Resource type, can be ``datasource``, ``xa_datasource``, ``connection_pool_datasource`` or ``driver``, defaults to ``datasource`` vendor SQL Server type, currently supports ``mysql``, ``postgresql`` and ``mssql``, defaults to ``mysql`` sql_url URL of the server in jdbc form sql_user Username for the server sql_password Password for that username min_size Minimum and initial number of connections in the pool, defaults to ``8`` max_size Maximum number of connections that can be created in the pool, defaults to ``32`` resize_quantity Number of connections to be removed when idle_timeout expires, defaults to ``2`` idle_timeout Maximum time a connection can remain idle in the pool, in seconds, defaults to ``300`` wait_timeout Maximum time a caller can wait before timeout, in seconds, defaults to ``60`` non_transactional Return non-transactional connections transaction_isolation Defaults to the JDBC driver default isolation_guaranteed All connections use the same isolation level """ ret = {'name': name, 'result': None, 'comment': None, 'changes': {}} # Manage parameters res_name = 'jdbc/{0}'.format(name) pool_data = {} pool_data_properties = {} res_data = {} if restype == 'datasource': pool_data['resType'] = 'javax.sql.DataSource' # depends on [control=['if'], data=[]] elif restype == 'xa_datasource': pool_data['resType'] = 'javax.sql.XADataSource' # depends on [control=['if'], data=[]] elif restype == 'connection_pool_datasource': pool_data['resType'] = 'javax.sql.ConnectionPoolDataSource' # depends on [control=['if'], data=[]] elif restype == 'driver': pool_data['resType'] = 'javax.sql.Driver' # depends on [control=['if'], data=[]] datasources = {} datasources['mysql'] = {'driver': 'com.mysql.jdbc.Driver', 'datasource': 'com.mysql.jdbc.jdbc2.optional.MysqlDataSource', 'xa_datasource': 'com.mysql.jdbc.jdbc2.optional.MysqlXADataSource', 'connection_pool_datasource': 'com.mysql.jdbc.jdbc2.optional.MysqlConnectionPoolDataSource'} datasources['postgresql'] = {'driver': 'org.postgresql.Driver', 'datasource': 'org.postgresql.ds.PGSimpleDataSource', 'xa_datasource': 'org.postgresql.xa.PGXADataSource', 'connection_pool_datasource': 'org.postgresql.ds.PGConnectionPoolDataSource'} datasources['mssql'] = {'driver': 'com.microsoft.sqlserver.jdbc.SQLServerDriver', 'datasource': 'com.microsoft.sqlserver.jdbc.SQLServerDataSource', 'xa_datasource': 'com.microsoft.sqlserver.jdbc.SQLServerXADataSource', 'connection_pool_datasource': 'com.microsoft.sqlserver.jdbc.SQLServerConnectionPoolDataSource'} if restype == 'driver': pool_data['driverClassname'] = datasources[vendor]['driver'] # depends on [control=['if'], data=[]] else: pool_data['datasourceClassname'] = datasources[vendor][restype] pool_data_properties['url'] = sql_url pool_data_properties['user'] = sql_user pool_data_properties['password'] = sql_password pool_data['properties'] = pool_data_properties pool_data['description'] = description res_data['description'] = description res_data['poolName'] = name res_data['enabled'] = enabled pool_data['steadyPoolSize'] = min_size pool_data['maxPoolSize'] = max_size pool_data['poolResizeQuantity'] = resize_quantity pool_data['idleTimeoutInSeconds'] = idle_timeout pool_data['maxWaitTimeInMillis'] = wait_timeout * 1000 pool_data['nonTransactionalConnections'] = non_transactional pool_data['transactionIsolationLevel'] = transaction_isolation pool_data['isIsolationLevelGuaranteed'] = isolation_guaranteed pool_ret = _do_element_present(name, 'jdbc_connection_pool', pool_data, server) res_ret = _do_element_present(res_name, 'jdbc_resource', res_data, server) if not pool_ret['error'] and (not res_ret['error']): if not __opts__['test']: ret['result'] = True # depends on [control=['if'], data=[]] if pool_ret['create'] or res_ret['create']: ret['changes']['pool'] = pool_ret['changes'] ret['changes']['resource'] = res_ret['changes'] if __opts__['test']: ret['comment'] = 'JDBC Datasource set to be created' # depends on [control=['if'], data=[]] else: ret['comment'] = 'JDBC Datasource created' # depends on [control=['if'], data=[]] elif pool_ret['update'] or res_ret['update']: ret['changes']['pool'] = pool_ret['changes'] ret['changes']['resource'] = res_ret['changes'] if __opts__['test']: ret['comment'] = 'JDBC Datasource set to be updated' # depends on [control=['if'], data=[]] else: ret['comment'] = 'JDBC Datasource updated' # depends on [control=['if'], data=[]] else: ret['result'] = True ret['changes'] = {} ret['comment'] = 'JDBC Datasource is already up-to-date' # depends on [control=['if'], data=[]] else: ret['result'] = False ret['comment'] = 'ERROR: {0} // {1}'.format(pool_ret['error'], res_ret['error']) return ret
def _read_fasta_files(f, args): """ read fasta files of each sample and generate a seq_obj with the information of each unique sequence in each sample :param f: file containing the path for each fasta file and the name of the sample. Two column format with `tab` as field separator :returns: * :code:`seq_l`: is a list of seq_obj objects, containing the information of each sequence * :code:`sample_l`: is a list with the name of the samples (column two of the config file) """ seq_l = {} sample_l = [] idx = 1 for line1 in f: line1 = line1.strip() cols = line1.split("\t") with open(cols[0], 'r') as fasta: sample_l.append(cols[1]) for line in fasta: if line.startswith(">"): idx += 1 counts = int(re.search("x([0-9]+)", line.strip()).group(1)) else: seq = line.strip() seq = seq[0:int(args.maxl)] if len(seq) > int(args.maxl) else seq if counts > int(args.minc) and len(seq) > int(args.minl): if seq not in seq_l: seq_l[seq] = sequence_unique(idx, seq) seq_l[seq].add_exp(cols[1], counts) return seq_l, sample_l
def function[_read_fasta_files, parameter[f, args]]: constant[ read fasta files of each sample and generate a seq_obj with the information of each unique sequence in each sample :param f: file containing the path for each fasta file and the name of the sample. Two column format with `tab` as field separator :returns: * :code:`seq_l`: is a list of seq_obj objects, containing the information of each sequence * :code:`sample_l`: is a list with the name of the samples (column two of the config file) ] variable[seq_l] assign[=] dictionary[[], []] variable[sample_l] assign[=] list[[]] variable[idx] assign[=] constant[1] for taget[name[line1]] in starred[name[f]] begin[:] variable[line1] assign[=] call[name[line1].strip, parameter[]] variable[cols] assign[=] call[name[line1].split, parameter[constant[ ]]] with call[name[open], parameter[call[name[cols]][constant[0]], constant[r]]] begin[:] call[name[sample_l].append, parameter[call[name[cols]][constant[1]]]] for taget[name[line]] in starred[name[fasta]] begin[:] if call[name[line].startswith, parameter[constant[>]]] begin[:] <ast.AugAssign object at 0x7da1b032a080> variable[counts] assign[=] call[name[int], parameter[call[call[name[re].search, parameter[constant[x([0-9]+)], call[name[line].strip, parameter[]]]].group, parameter[constant[1]]]]] return[tuple[[<ast.Name object at 0x7da1b039cdc0>, <ast.Name object at 0x7da1b039cdf0>]]]
keyword[def] identifier[_read_fasta_files] ( identifier[f] , identifier[args] ): literal[string] identifier[seq_l] ={} identifier[sample_l] =[] identifier[idx] = literal[int] keyword[for] identifier[line1] keyword[in] identifier[f] : identifier[line1] = identifier[line1] . identifier[strip] () identifier[cols] = identifier[line1] . identifier[split] ( literal[string] ) keyword[with] identifier[open] ( identifier[cols] [ literal[int] ], literal[string] ) keyword[as] identifier[fasta] : identifier[sample_l] . identifier[append] ( identifier[cols] [ literal[int] ]) keyword[for] identifier[line] keyword[in] identifier[fasta] : keyword[if] identifier[line] . identifier[startswith] ( literal[string] ): identifier[idx] += literal[int] identifier[counts] = identifier[int] ( identifier[re] . identifier[search] ( literal[string] , identifier[line] . identifier[strip] ()). identifier[group] ( literal[int] )) keyword[else] : identifier[seq] = identifier[line] . identifier[strip] () identifier[seq] = identifier[seq] [ literal[int] : identifier[int] ( identifier[args] . identifier[maxl] )] keyword[if] identifier[len] ( identifier[seq] )> identifier[int] ( identifier[args] . identifier[maxl] ) keyword[else] identifier[seq] keyword[if] identifier[counts] > identifier[int] ( identifier[args] . identifier[minc] ) keyword[and] identifier[len] ( identifier[seq] )> identifier[int] ( identifier[args] . identifier[minl] ): keyword[if] identifier[seq] keyword[not] keyword[in] identifier[seq_l] : identifier[seq_l] [ identifier[seq] ]= identifier[sequence_unique] ( identifier[idx] , identifier[seq] ) identifier[seq_l] [ identifier[seq] ]. identifier[add_exp] ( identifier[cols] [ literal[int] ], identifier[counts] ) keyword[return] identifier[seq_l] , identifier[sample_l]
def _read_fasta_files(f, args): """ read fasta files of each sample and generate a seq_obj with the information of each unique sequence in each sample :param f: file containing the path for each fasta file and the name of the sample. Two column format with `tab` as field separator :returns: * :code:`seq_l`: is a list of seq_obj objects, containing the information of each sequence * :code:`sample_l`: is a list with the name of the samples (column two of the config file) """ seq_l = {} sample_l = [] idx = 1 for line1 in f: line1 = line1.strip() cols = line1.split('\t') with open(cols[0], 'r') as fasta: sample_l.append(cols[1]) for line in fasta: if line.startswith('>'): idx += 1 counts = int(re.search('x([0-9]+)', line.strip()).group(1)) # depends on [control=['if'], data=[]] else: seq = line.strip() seq = seq[0:int(args.maxl)] if len(seq) > int(args.maxl) else seq if counts > int(args.minc) and len(seq) > int(args.minl): if seq not in seq_l: seq_l[seq] = sequence_unique(idx, seq) # depends on [control=['if'], data=['seq', 'seq_l']] seq_l[seq].add_exp(cols[1], counts) # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['line']] # depends on [control=['with'], data=['fasta']] # depends on [control=['for'], data=['line1']] return (seq_l, sample_l)
def evpn_afi(self, **kwargs): """EVPN AFI. This method just enables/disables or gets the EVPN AFI. Args: rbridge_id (str): The rbridge ID of the device on which BGP will be configured in a VCS fabric. delete (bool): Deletes the neighbor if `delete` is ``True``. get (bool): Get config instead of editing config. (True, False) callback (function): A function executed upon completion of the method. The only parameter passed to `callback` will be the ``ElementTree`` `config`. Returns: Return value of `callback`. Raises: None Examples: >>> import pynos.device >>> conn = ('10.24.39.203', '22') >>> auth = ('admin', 'password') >>> with pynos.device.Device(conn=conn, auth=auth) as dev: ... output = dev.bgp.local_asn(local_as='65535', ... rbridge_id='225') ... output = dev.bgp.evpn_afi(rbridge_id='225') ... output = dev.bgp.evpn_afi(rbridge_id='225', get=True) ... output = dev.bgp.evpn_afi(rbridge_id='225', ... delete=True) """ callback = kwargs.pop('callback', self._callback) config = ET.Element("config") rbridge_id = ET.SubElement(config, "rbridge-id", xmlns="urn:brocade.com:mgmt:" "brocade-rbridge") rbridge_id_key = ET.SubElement(rbridge_id, "rbridge-id") rbridge_id_key.text = kwargs.pop('rbridge_id') router = ET.SubElement(rbridge_id, "router") router_bgp = ET.SubElement(router, "router-bgp", xmlns="urn:brocade.com:mgmt:brocade-bgp") address_family = ET.SubElement(router_bgp, "address-family") l2vpn = ET.SubElement(address_family, "l2vpn") ET.SubElement(l2vpn, "evpn") if kwargs.pop('delete', False): config.find('.//*l2vpn').set('operation', 'delete') if kwargs.pop('get', False): return callback(config, handler='get_config') return callback(config)
def function[evpn_afi, parameter[self]]: constant[EVPN AFI. This method just enables/disables or gets the EVPN AFI. Args: rbridge_id (str): The rbridge ID of the device on which BGP will be configured in a VCS fabric. delete (bool): Deletes the neighbor if `delete` is ``True``. get (bool): Get config instead of editing config. (True, False) callback (function): A function executed upon completion of the method. The only parameter passed to `callback` will be the ``ElementTree`` `config`. Returns: Return value of `callback`. Raises: None Examples: >>> import pynos.device >>> conn = ('10.24.39.203', '22') >>> auth = ('admin', 'password') >>> with pynos.device.Device(conn=conn, auth=auth) as dev: ... output = dev.bgp.local_asn(local_as='65535', ... rbridge_id='225') ... output = dev.bgp.evpn_afi(rbridge_id='225') ... output = dev.bgp.evpn_afi(rbridge_id='225', get=True) ... output = dev.bgp.evpn_afi(rbridge_id='225', ... delete=True) ] variable[callback] assign[=] call[name[kwargs].pop, parameter[constant[callback], name[self]._callback]] variable[config] assign[=] call[name[ET].Element, parameter[constant[config]]] variable[rbridge_id] assign[=] call[name[ET].SubElement, parameter[name[config], constant[rbridge-id]]] variable[rbridge_id_key] assign[=] call[name[ET].SubElement, parameter[name[rbridge_id], constant[rbridge-id]]] name[rbridge_id_key].text assign[=] call[name[kwargs].pop, parameter[constant[rbridge_id]]] variable[router] assign[=] call[name[ET].SubElement, parameter[name[rbridge_id], constant[router]]] variable[router_bgp] assign[=] call[name[ET].SubElement, parameter[name[router], constant[router-bgp]]] variable[address_family] assign[=] call[name[ET].SubElement, parameter[name[router_bgp], constant[address-family]]] variable[l2vpn] assign[=] call[name[ET].SubElement, parameter[name[address_family], constant[l2vpn]]] call[name[ET].SubElement, parameter[name[l2vpn], constant[evpn]]] if call[name[kwargs].pop, parameter[constant[delete], constant[False]]] begin[:] call[call[name[config].find, parameter[constant[.//*l2vpn]]].set, parameter[constant[operation], constant[delete]]] if call[name[kwargs].pop, parameter[constant[get], constant[False]]] begin[:] return[call[name[callback], parameter[name[config]]]] return[call[name[callback], parameter[name[config]]]]
keyword[def] identifier[evpn_afi] ( identifier[self] ,** identifier[kwargs] ): literal[string] identifier[callback] = identifier[kwargs] . identifier[pop] ( literal[string] , identifier[self] . identifier[_callback] ) identifier[config] = identifier[ET] . identifier[Element] ( literal[string] ) identifier[rbridge_id] = identifier[ET] . identifier[SubElement] ( identifier[config] , literal[string] , identifier[xmlns] = literal[string] literal[string] ) identifier[rbridge_id_key] = identifier[ET] . identifier[SubElement] ( identifier[rbridge_id] , literal[string] ) identifier[rbridge_id_key] . identifier[text] = identifier[kwargs] . identifier[pop] ( literal[string] ) identifier[router] = identifier[ET] . identifier[SubElement] ( identifier[rbridge_id] , literal[string] ) identifier[router_bgp] = identifier[ET] . identifier[SubElement] ( identifier[router] , literal[string] , identifier[xmlns] = literal[string] ) identifier[address_family] = identifier[ET] . identifier[SubElement] ( identifier[router_bgp] , literal[string] ) identifier[l2vpn] = identifier[ET] . identifier[SubElement] ( identifier[address_family] , literal[string] ) identifier[ET] . identifier[SubElement] ( identifier[l2vpn] , literal[string] ) keyword[if] identifier[kwargs] . identifier[pop] ( literal[string] , keyword[False] ): identifier[config] . identifier[find] ( literal[string] ). identifier[set] ( literal[string] , literal[string] ) keyword[if] identifier[kwargs] . identifier[pop] ( literal[string] , keyword[False] ): keyword[return] identifier[callback] ( identifier[config] , identifier[handler] = literal[string] ) keyword[return] identifier[callback] ( identifier[config] )
def evpn_afi(self, **kwargs): """EVPN AFI. This method just enables/disables or gets the EVPN AFI. Args: rbridge_id (str): The rbridge ID of the device on which BGP will be configured in a VCS fabric. delete (bool): Deletes the neighbor if `delete` is ``True``. get (bool): Get config instead of editing config. (True, False) callback (function): A function executed upon completion of the method. The only parameter passed to `callback` will be the ``ElementTree`` `config`. Returns: Return value of `callback`. Raises: None Examples: >>> import pynos.device >>> conn = ('10.24.39.203', '22') >>> auth = ('admin', 'password') >>> with pynos.device.Device(conn=conn, auth=auth) as dev: ... output = dev.bgp.local_asn(local_as='65535', ... rbridge_id='225') ... output = dev.bgp.evpn_afi(rbridge_id='225') ... output = dev.bgp.evpn_afi(rbridge_id='225', get=True) ... output = dev.bgp.evpn_afi(rbridge_id='225', ... delete=True) """ callback = kwargs.pop('callback', self._callback) config = ET.Element('config') rbridge_id = ET.SubElement(config, 'rbridge-id', xmlns='urn:brocade.com:mgmt:brocade-rbridge') rbridge_id_key = ET.SubElement(rbridge_id, 'rbridge-id') rbridge_id_key.text = kwargs.pop('rbridge_id') router = ET.SubElement(rbridge_id, 'router') router_bgp = ET.SubElement(router, 'router-bgp', xmlns='urn:brocade.com:mgmt:brocade-bgp') address_family = ET.SubElement(router_bgp, 'address-family') l2vpn = ET.SubElement(address_family, 'l2vpn') ET.SubElement(l2vpn, 'evpn') if kwargs.pop('delete', False): config.find('.//*l2vpn').set('operation', 'delete') # depends on [control=['if'], data=[]] if kwargs.pop('get', False): return callback(config, handler='get_config') # depends on [control=['if'], data=[]] return callback(config)
def rect_to_cyl_vec(vx,vy,vz,X,Y,Z,cyl=False): """ NAME: rect_to_cyl_vec PURPOSE: transform vectors from rectangular to cylindrical coordinates vectors INPUT: vx - vy - vz - X - X Y - Y Z - Z cyl - if True, X,Y,Z are already cylindrical OUTPUT: vR,vT,vz HISTORY: 2010-09-24 - Written - Bovy (NYU) """ if not cyl: R,phi,Z= rect_to_cyl(X,Y,Z) else: phi= Y vr=+vx*sc.cos(phi)+vy*sc.sin(phi) vt= -vx*sc.sin(phi)+vy*sc.cos(phi) return (vr,vt,vz)
def function[rect_to_cyl_vec, parameter[vx, vy, vz, X, Y, Z, cyl]]: constant[ NAME: rect_to_cyl_vec PURPOSE: transform vectors from rectangular to cylindrical coordinates vectors INPUT: vx - vy - vz - X - X Y - Y Z - Z cyl - if True, X,Y,Z are already cylindrical OUTPUT: vR,vT,vz HISTORY: 2010-09-24 - Written - Bovy (NYU) ] if <ast.UnaryOp object at 0x7da1b0da3f70> begin[:] <ast.Tuple object at 0x7da1b0da2500> assign[=] call[name[rect_to_cyl], parameter[name[X], name[Y], name[Z]]] variable[vr] assign[=] binary_operation[binary_operation[<ast.UnaryOp object at 0x7da1b0da2140> * call[name[sc].cos, parameter[name[phi]]]] + binary_operation[name[vy] * call[name[sc].sin, parameter[name[phi]]]]] variable[vt] assign[=] binary_operation[binary_operation[<ast.UnaryOp object at 0x7da1b0da3fd0> * call[name[sc].sin, parameter[name[phi]]]] + binary_operation[name[vy] * call[name[sc].cos, parameter[name[phi]]]]] return[tuple[[<ast.Name object at 0x7da1b0cf7ac0>, <ast.Name object at 0x7da1b0cf5c30>, <ast.Name object at 0x7da1b0cf7850>]]]
keyword[def] identifier[rect_to_cyl_vec] ( identifier[vx] , identifier[vy] , identifier[vz] , identifier[X] , identifier[Y] , identifier[Z] , identifier[cyl] = keyword[False] ): literal[string] keyword[if] keyword[not] identifier[cyl] : identifier[R] , identifier[phi] , identifier[Z] = identifier[rect_to_cyl] ( identifier[X] , identifier[Y] , identifier[Z] ) keyword[else] : identifier[phi] = identifier[Y] identifier[vr] =+ identifier[vx] * identifier[sc] . identifier[cos] ( identifier[phi] )+ identifier[vy] * identifier[sc] . identifier[sin] ( identifier[phi] ) identifier[vt] =- identifier[vx] * identifier[sc] . identifier[sin] ( identifier[phi] )+ identifier[vy] * identifier[sc] . identifier[cos] ( identifier[phi] ) keyword[return] ( identifier[vr] , identifier[vt] , identifier[vz] )
def rect_to_cyl_vec(vx, vy, vz, X, Y, Z, cyl=False): """ NAME: rect_to_cyl_vec PURPOSE: transform vectors from rectangular to cylindrical coordinates vectors INPUT: vx - vy - vz - X - X Y - Y Z - Z cyl - if True, X,Y,Z are already cylindrical OUTPUT: vR,vT,vz HISTORY: 2010-09-24 - Written - Bovy (NYU) """ if not cyl: (R, phi, Z) = rect_to_cyl(X, Y, Z) # depends on [control=['if'], data=[]] else: phi = Y vr = +vx * sc.cos(phi) + vy * sc.sin(phi) vt = -vx * sc.sin(phi) + vy * sc.cos(phi) return (vr, vt, vz)
def move_emitters(self): """ Move each emitter by it's velocity. Emmitters that move off the ends and are not wrapped get sacked. """ moved_emitters = [] for e_pos, e_dir, e_vel, e_range, e_color, e_pal in self.emitters: e_pos = e_pos + e_vel if e_vel > 0: if e_pos >= (self._end + 1): if self.wrap: e_pos = e_pos - (self._end + 1) + self._start else: continue # Sacked else: if e_pos < self._start: if self.wrap: e_pos = e_pos + self._end + 1 + self._start else: continue # Sacked moved_emitters.append( (e_pos, e_dir, e_vel, e_range, e_color, e_pal)) self.emitters = moved_emitters
def function[move_emitters, parameter[self]]: constant[ Move each emitter by it's velocity. Emmitters that move off the ends and are not wrapped get sacked. ] variable[moved_emitters] assign[=] list[[]] for taget[tuple[[<ast.Name object at 0x7da204621180>, <ast.Name object at 0x7da204621b70>, <ast.Name object at 0x7da204622d40>, <ast.Name object at 0x7da204621e40>, <ast.Name object at 0x7da204621120>, <ast.Name object at 0x7da204621840>]]] in starred[name[self].emitters] begin[:] variable[e_pos] assign[=] binary_operation[name[e_pos] + name[e_vel]] if compare[name[e_vel] greater[>] constant[0]] begin[:] if compare[name[e_pos] greater_or_equal[>=] binary_operation[name[self]._end + constant[1]]] begin[:] if name[self].wrap begin[:] variable[e_pos] assign[=] binary_operation[binary_operation[name[e_pos] - binary_operation[name[self]._end + constant[1]]] + name[self]._start] call[name[moved_emitters].append, parameter[tuple[[<ast.Name object at 0x7da2046234c0>, <ast.Name object at 0x7da204622b60>, <ast.Name object at 0x7da204622bc0>, <ast.Name object at 0x7da2046207c0>, <ast.Name object at 0x7da204621ae0>, <ast.Name object at 0x7da204623790>]]]] name[self].emitters assign[=] name[moved_emitters]
keyword[def] identifier[move_emitters] ( identifier[self] ): literal[string] identifier[moved_emitters] =[] keyword[for] identifier[e_pos] , identifier[e_dir] , identifier[e_vel] , identifier[e_range] , identifier[e_color] , identifier[e_pal] keyword[in] identifier[self] . identifier[emitters] : identifier[e_pos] = identifier[e_pos] + identifier[e_vel] keyword[if] identifier[e_vel] > literal[int] : keyword[if] identifier[e_pos] >=( identifier[self] . identifier[_end] + literal[int] ): keyword[if] identifier[self] . identifier[wrap] : identifier[e_pos] = identifier[e_pos] -( identifier[self] . identifier[_end] + literal[int] )+ identifier[self] . identifier[_start] keyword[else] : keyword[continue] keyword[else] : keyword[if] identifier[e_pos] < identifier[self] . identifier[_start] : keyword[if] identifier[self] . identifier[wrap] : identifier[e_pos] = identifier[e_pos] + identifier[self] . identifier[_end] + literal[int] + identifier[self] . identifier[_start] keyword[else] : keyword[continue] identifier[moved_emitters] . identifier[append] ( ( identifier[e_pos] , identifier[e_dir] , identifier[e_vel] , identifier[e_range] , identifier[e_color] , identifier[e_pal] )) identifier[self] . identifier[emitters] = identifier[moved_emitters]
def move_emitters(self): """ Move each emitter by it's velocity. Emmitters that move off the ends and are not wrapped get sacked. """ moved_emitters = [] for (e_pos, e_dir, e_vel, e_range, e_color, e_pal) in self.emitters: e_pos = e_pos + e_vel if e_vel > 0: if e_pos >= self._end + 1: if self.wrap: e_pos = e_pos - (self._end + 1) + self._start # depends on [control=['if'], data=[]] else: continue # Sacked # depends on [control=['if'], data=['e_pos']] # depends on [control=['if'], data=[]] elif e_pos < self._start: if self.wrap: e_pos = e_pos + self._end + 1 + self._start # depends on [control=['if'], data=[]] else: continue # Sacked # depends on [control=['if'], data=['e_pos']] moved_emitters.append((e_pos, e_dir, e_vel, e_range, e_color, e_pal)) # depends on [control=['for'], data=[]] self.emitters = moved_emitters
def fmtval(value, colorstr=None, precision=None, spacing=True, trunc=True, end=' '): ''' Formats and returns a given number according to specifications. ''' colwidth = opts.colwidth # get precision if precision is None: precision = opts.precision fmt = '%%.%sf' % precision # format with decimal mark, separators result = locale.format(fmt, value, True) if spacing: result = '%%%ss' % colwidth % result if trunc: if len(result) > colwidth: # truncate w/ellipsis result = truncstr(result, colwidth) # Add color if needed if opts.incolor and colorstr: return colorstr % result + end else: return result + end
def function[fmtval, parameter[value, colorstr, precision, spacing, trunc, end]]: constant[ Formats and returns a given number according to specifications. ] variable[colwidth] assign[=] name[opts].colwidth if compare[name[precision] is constant[None]] begin[:] variable[precision] assign[=] name[opts].precision variable[fmt] assign[=] binary_operation[constant[%%.%sf] <ast.Mod object at 0x7da2590d6920> name[precision]] variable[result] assign[=] call[name[locale].format, parameter[name[fmt], name[value], constant[True]]] if name[spacing] begin[:] variable[result] assign[=] binary_operation[binary_operation[constant[%%%ss] <ast.Mod object at 0x7da2590d6920> name[colwidth]] <ast.Mod object at 0x7da2590d6920> name[result]] if name[trunc] begin[:] if compare[call[name[len], parameter[name[result]]] greater[>] name[colwidth]] begin[:] variable[result] assign[=] call[name[truncstr], parameter[name[result], name[colwidth]]] if <ast.BoolOp object at 0x7da20c990910> begin[:] return[binary_operation[binary_operation[name[colorstr] <ast.Mod object at 0x7da2590d6920> name[result]] + name[end]]]
keyword[def] identifier[fmtval] ( identifier[value] , identifier[colorstr] = keyword[None] , identifier[precision] = keyword[None] , identifier[spacing] = keyword[True] , identifier[trunc] = keyword[True] , identifier[end] = literal[string] ): literal[string] identifier[colwidth] = identifier[opts] . identifier[colwidth] keyword[if] identifier[precision] keyword[is] keyword[None] : identifier[precision] = identifier[opts] . identifier[precision] identifier[fmt] = literal[string] % identifier[precision] identifier[result] = identifier[locale] . identifier[format] ( identifier[fmt] , identifier[value] , keyword[True] ) keyword[if] identifier[spacing] : identifier[result] = literal[string] % identifier[colwidth] % identifier[result] keyword[if] identifier[trunc] : keyword[if] identifier[len] ( identifier[result] )> identifier[colwidth] : identifier[result] = identifier[truncstr] ( identifier[result] , identifier[colwidth] ) keyword[if] identifier[opts] . identifier[incolor] keyword[and] identifier[colorstr] : keyword[return] identifier[colorstr] % identifier[result] + identifier[end] keyword[else] : keyword[return] identifier[result] + identifier[end]
def fmtval(value, colorstr=None, precision=None, spacing=True, trunc=True, end=' '): """ Formats and returns a given number according to specifications. """ colwidth = opts.colwidth # get precision if precision is None: precision = opts.precision # depends on [control=['if'], data=['precision']] fmt = '%%.%sf' % precision # format with decimal mark, separators result = locale.format(fmt, value, True) if spacing: result = '%%%ss' % colwidth % result # depends on [control=['if'], data=[]] if trunc: if len(result) > colwidth: # truncate w/ellipsis result = truncstr(result, colwidth) # depends on [control=['if'], data=['colwidth']] # depends on [control=['if'], data=[]] # Add color if needed if opts.incolor and colorstr: return colorstr % result + end # depends on [control=['if'], data=[]] else: return result + end
def do_cp(self, params): """ \x1b[1mNAME\x1b[0m cp - Copy from/to local/remote or remote/remote paths \x1b[1mSYNOPSIS\x1b[0m cp <src> <dst> [recursive] [overwrite] [asynchronous] [verbose] [max_items] \x1b[1mDESCRIPTION\x1b[0m src and dst can be: /some/path (in the connected server) zk://[scheme:user:passwd@]host/<path> json://!some!path!backup.json/some/path file:///some/file with a few restrictions. Given the semantic differences that znodes have with filesystem directories recursive copying from znodes to an fs could lose data, but to a JSON file it would work just fine. \x1b[1mOPTIONS\x1b[0m * recursive: recursively copy src (default: false) * overwrite: overwrite the dst path (default: false) * asynchronous: do asynchronous copies (default: false) * verbose: verbose output of every path (default: false) * max_items: max number of paths to copy (0 is infinite) (default: 0) \x1b[1mEXAMPLES\x1b[0m > cp /some/znode /backup/copy-znode # local > cp /some/znode zk://digest:bernie:pasta@10.0.0.1/backup true true > cp /some/znode json://!home!user!backup.json/ true true > cp file:///tmp/file /some/zone # fs to zk """ try: self.copy(params, params.recursive, params.overwrite, params.max_items, False) except AuthFailedError: self.show_output("Authentication failed.")
def function[do_cp, parameter[self, params]]: constant[ NAME cp - Copy from/to local/remote or remote/remote paths SYNOPSIS cp <src> <dst> [recursive] [overwrite] [asynchronous] [verbose] [max_items] DESCRIPTION src and dst can be: /some/path (in the connected server) zk://[scheme:user:passwd@]host/<path> json://!some!path!backup.json/some/path file:///some/file with a few restrictions. Given the semantic differences that znodes have with filesystem directories recursive copying from znodes to an fs could lose data, but to a JSON file it would work just fine. OPTIONS * recursive: recursively copy src (default: false) * overwrite: overwrite the dst path (default: false) * asynchronous: do asynchronous copies (default: false) * verbose: verbose output of every path (default: false) * max_items: max number of paths to copy (0 is infinite) (default: 0) EXAMPLES > cp /some/znode /backup/copy-znode # local > cp /some/znode zk://digest:bernie:pasta@10.0.0.1/backup true true > cp /some/znode json://!home!user!backup.json/ true true > cp file:///tmp/file /some/zone # fs to zk ] <ast.Try object at 0x7da18f00f5b0>
keyword[def] identifier[do_cp] ( identifier[self] , identifier[params] ): literal[string] keyword[try] : identifier[self] . identifier[copy] ( identifier[params] , identifier[params] . identifier[recursive] , identifier[params] . identifier[overwrite] , identifier[params] . identifier[max_items] , keyword[False] ) keyword[except] identifier[AuthFailedError] : identifier[self] . identifier[show_output] ( literal[string] )
def do_cp(self, params): """ \x1b[1mNAME\x1b[0m cp - Copy from/to local/remote or remote/remote paths \x1b[1mSYNOPSIS\x1b[0m cp <src> <dst> [recursive] [overwrite] [asynchronous] [verbose] [max_items] \x1b[1mDESCRIPTION\x1b[0m src and dst can be: /some/path (in the connected server) zk://[scheme:user:passwd@]host/<path> json://!some!path!backup.json/some/path file:///some/file with a few restrictions. Given the semantic differences that znodes have with filesystem directories recursive copying from znodes to an fs could lose data, but to a JSON file it would work just fine. \x1b[1mOPTIONS\x1b[0m * recursive: recursively copy src (default: false) * overwrite: overwrite the dst path (default: false) * asynchronous: do asynchronous copies (default: false) * verbose: verbose output of every path (default: false) * max_items: max number of paths to copy (0 is infinite) (default: 0) \x1b[1mEXAMPLES\x1b[0m > cp /some/znode /backup/copy-znode # local > cp /some/znode zk://digest:bernie:pasta@10.0.0.1/backup true true > cp /some/znode json://!home!user!backup.json/ true true > cp file:///tmp/file /some/zone # fs to zk """ try: self.copy(params, params.recursive, params.overwrite, params.max_items, False) # depends on [control=['try'], data=[]] except AuthFailedError: self.show_output('Authentication failed.') # depends on [control=['except'], data=[]]
def isValidSemver(version): """Semantic version number - determines whether the version is qualified. The format is MAJOR.Minor.PATCH, more with https://semver.org/""" if version and isinstance(version, string_types): try: semver.parse(version) except (TypeError,ValueError): return False else: return True return False
def function[isValidSemver, parameter[version]]: constant[Semantic version number - determines whether the version is qualified. The format is MAJOR.Minor.PATCH, more with https://semver.org/] if <ast.BoolOp object at 0x7da20c6aa350> begin[:] <ast.Try object at 0x7da1b0cb80a0> return[constant[False]]
keyword[def] identifier[isValidSemver] ( identifier[version] ): literal[string] keyword[if] identifier[version] keyword[and] identifier[isinstance] ( identifier[version] , identifier[string_types] ): keyword[try] : identifier[semver] . identifier[parse] ( identifier[version] ) keyword[except] ( identifier[TypeError] , identifier[ValueError] ): keyword[return] keyword[False] keyword[else] : keyword[return] keyword[True] keyword[return] keyword[False]
def isValidSemver(version): """Semantic version number - determines whether the version is qualified. The format is MAJOR.Minor.PATCH, more with https://semver.org/""" if version and isinstance(version, string_types): try: semver.parse(version) # depends on [control=['try'], data=[]] except (TypeError, ValueError): return False # depends on [control=['except'], data=[]] else: return True # depends on [control=['if'], data=[]] return False
def select_font_face(self, family='', slant=constants.FONT_SLANT_NORMAL, weight=constants.FONT_WEIGHT_NORMAL): """Selects a family and style of font from a simplified description as a family name, slant and weight. .. note:: The :meth:`select_font_face` method is part of what the cairo designers call the "toy" text API. It is convenient for short demos and simple programs, but it is not expected to be adequate for serious text-using applications. See :ref:`fonts` for details. Cairo provides no operation to list available family names on the system (this is a "toy", remember), but the standard CSS2 generic family names, (``"serif"``, ``"sans-serif"``, ``"cursive"``, ``"fantasy"``, ``"monospace"``), are likely to work as expected. If family starts with the string ``"cairo:"``, or if no native font backends are compiled in, cairo will use an internal font family. The internal font family recognizes many modifiers in the family string, most notably, it recognizes the string ``"monospace"``. That is, the family name ``"cairo:monospace"`` will use the monospace version of the internal font family. If text is drawn without a call to :meth:`select_font_face`, (nor :meth:`set_font_face` nor :meth:`set_scaled_font`), the default family is platform-specific, but is essentially ``"sans-serif"``. Default slant is :obj:`NORMAL <FONT_SLANT_NORMAL>`, and default weight is :obj:`NORMAL <FONT_WEIGHT_NORMAL>`. This method is equivalent to a call to :class:`ToyFontFace` followed by :meth:`set_font_face`. """ cairo.cairo_select_font_face( self._pointer, _encode_string(family), slant, weight) self._check_status()
def function[select_font_face, parameter[self, family, slant, weight]]: constant[Selects a family and style of font from a simplified description as a family name, slant and weight. .. note:: The :meth:`select_font_face` method is part of what the cairo designers call the "toy" text API. It is convenient for short demos and simple programs, but it is not expected to be adequate for serious text-using applications. See :ref:`fonts` for details. Cairo provides no operation to list available family names on the system (this is a "toy", remember), but the standard CSS2 generic family names, (``"serif"``, ``"sans-serif"``, ``"cursive"``, ``"fantasy"``, ``"monospace"``), are likely to work as expected. If family starts with the string ``"cairo:"``, or if no native font backends are compiled in, cairo will use an internal font family. The internal font family recognizes many modifiers in the family string, most notably, it recognizes the string ``"monospace"``. That is, the family name ``"cairo:monospace"`` will use the monospace version of the internal font family. If text is drawn without a call to :meth:`select_font_face`, (nor :meth:`set_font_face` nor :meth:`set_scaled_font`), the default family is platform-specific, but is essentially ``"sans-serif"``. Default slant is :obj:`NORMAL <FONT_SLANT_NORMAL>`, and default weight is :obj:`NORMAL <FONT_WEIGHT_NORMAL>`. This method is equivalent to a call to :class:`ToyFontFace` followed by :meth:`set_font_face`. ] call[name[cairo].cairo_select_font_face, parameter[name[self]._pointer, call[name[_encode_string], parameter[name[family]]], name[slant], name[weight]]] call[name[self]._check_status, parameter[]]
keyword[def] identifier[select_font_face] ( identifier[self] , identifier[family] = literal[string] , identifier[slant] = identifier[constants] . identifier[FONT_SLANT_NORMAL] , identifier[weight] = identifier[constants] . identifier[FONT_WEIGHT_NORMAL] ): literal[string] identifier[cairo] . identifier[cairo_select_font_face] ( identifier[self] . identifier[_pointer] , identifier[_encode_string] ( identifier[family] ), identifier[slant] , identifier[weight] ) identifier[self] . identifier[_check_status] ()
def select_font_face(self, family='', slant=constants.FONT_SLANT_NORMAL, weight=constants.FONT_WEIGHT_NORMAL): """Selects a family and style of font from a simplified description as a family name, slant and weight. .. note:: The :meth:`select_font_face` method is part of what the cairo designers call the "toy" text API. It is convenient for short demos and simple programs, but it is not expected to be adequate for serious text-using applications. See :ref:`fonts` for details. Cairo provides no operation to list available family names on the system (this is a "toy", remember), but the standard CSS2 generic family names, (``"serif"``, ``"sans-serif"``, ``"cursive"``, ``"fantasy"``, ``"monospace"``), are likely to work as expected. If family starts with the string ``"cairo:"``, or if no native font backends are compiled in, cairo will use an internal font family. The internal font family recognizes many modifiers in the family string, most notably, it recognizes the string ``"monospace"``. That is, the family name ``"cairo:monospace"`` will use the monospace version of the internal font family. If text is drawn without a call to :meth:`select_font_face`, (nor :meth:`set_font_face` nor :meth:`set_scaled_font`), the default family is platform-specific, but is essentially ``"sans-serif"``. Default slant is :obj:`NORMAL <FONT_SLANT_NORMAL>`, and default weight is :obj:`NORMAL <FONT_WEIGHT_NORMAL>`. This method is equivalent to a call to :class:`ToyFontFace` followed by :meth:`set_font_face`. """ cairo.cairo_select_font_face(self._pointer, _encode_string(family), slant, weight) self._check_status()
def get_parameters(self, params, graph=None): """Get the parameters of the model. :param params: dictionary of keys (str names) and values (tensors). :return: evaluated tensors in params """ g = graph if graph is not None else self.tf_graph with g.as_default(): with tf.Session() as self.tf_session: self.tf_saver.restore(self.tf_session, self.model_path) out = {} for par in params: if type(params[par]) == list: for i, p in enumerate(params[par]): out[par + '-' + str(i+1)] = p.eval() else: out[par] = params[par].eval() return out
def function[get_parameters, parameter[self, params, graph]]: constant[Get the parameters of the model. :param params: dictionary of keys (str names) and values (tensors). :return: evaluated tensors in params ] variable[g] assign[=] <ast.IfExp object at 0x7da18f09cd30> with call[name[g].as_default, parameter[]] begin[:] with call[name[tf].Session, parameter[]] begin[:] call[name[self].tf_saver.restore, parameter[name[self].tf_session, name[self].model_path]] variable[out] assign[=] dictionary[[], []] for taget[name[par]] in starred[name[params]] begin[:] if compare[call[name[type], parameter[call[name[params]][name[par]]]] equal[==] name[list]] begin[:] for taget[tuple[[<ast.Name object at 0x7da18f09ebf0>, <ast.Name object at 0x7da18f09dcc0>]]] in starred[call[name[enumerate], parameter[call[name[params]][name[par]]]]] begin[:] call[name[out]][binary_operation[binary_operation[name[par] + constant[-]] + call[name[str], parameter[binary_operation[name[i] + constant[1]]]]]] assign[=] call[name[p].eval, parameter[]] return[name[out]]
keyword[def] identifier[get_parameters] ( identifier[self] , identifier[params] , identifier[graph] = keyword[None] ): literal[string] identifier[g] = identifier[graph] keyword[if] identifier[graph] keyword[is] keyword[not] keyword[None] keyword[else] identifier[self] . identifier[tf_graph] keyword[with] identifier[g] . identifier[as_default] (): keyword[with] identifier[tf] . identifier[Session] () keyword[as] identifier[self] . identifier[tf_session] : identifier[self] . identifier[tf_saver] . identifier[restore] ( identifier[self] . identifier[tf_session] , identifier[self] . identifier[model_path] ) identifier[out] ={} keyword[for] identifier[par] keyword[in] identifier[params] : keyword[if] identifier[type] ( identifier[params] [ identifier[par] ])== identifier[list] : keyword[for] identifier[i] , identifier[p] keyword[in] identifier[enumerate] ( identifier[params] [ identifier[par] ]): identifier[out] [ identifier[par] + literal[string] + identifier[str] ( identifier[i] + literal[int] )]= identifier[p] . identifier[eval] () keyword[else] : identifier[out] [ identifier[par] ]= identifier[params] [ identifier[par] ]. identifier[eval] () keyword[return] identifier[out]
def get_parameters(self, params, graph=None): """Get the parameters of the model. :param params: dictionary of keys (str names) and values (tensors). :return: evaluated tensors in params """ g = graph if graph is not None else self.tf_graph with g.as_default(): with tf.Session() as self.tf_session: self.tf_saver.restore(self.tf_session, self.model_path) out = {} for par in params: if type(params[par]) == list: for (i, p) in enumerate(params[par]): out[par + '-' + str(i + 1)] = p.eval() # depends on [control=['for'], data=[]] # depends on [control=['if'], data=[]] else: out[par] = params[par].eval() # depends on [control=['for'], data=['par']] return out # depends on [control=['with'], data=[]] # depends on [control=['with'], data=[]]
def credential_property(cred): """ A credential property factory for each message class that will set private attributes and return obfuscated credentials when requested. """ def getter(instance): return "***obfuscated***" def setter(instance, value): private = "_" + cred instance.__dict__[private] = value return property(fget=getter, fset=setter)
def function[credential_property, parameter[cred]]: constant[ A credential property factory for each message class that will set private attributes and return obfuscated credentials when requested. ] def function[getter, parameter[instance]]: return[constant[***obfuscated***]] def function[setter, parameter[instance, value]]: variable[private] assign[=] binary_operation[constant[_] + name[cred]] call[name[instance].__dict__][name[private]] assign[=] name[value] return[call[name[property], parameter[]]]
keyword[def] identifier[credential_property] ( identifier[cred] ): literal[string] keyword[def] identifier[getter] ( identifier[instance] ): keyword[return] literal[string] keyword[def] identifier[setter] ( identifier[instance] , identifier[value] ): identifier[private] = literal[string] + identifier[cred] identifier[instance] . identifier[__dict__] [ identifier[private] ]= identifier[value] keyword[return] identifier[property] ( identifier[fget] = identifier[getter] , identifier[fset] = identifier[setter] )
def credential_property(cred): """ A credential property factory for each message class that will set private attributes and return obfuscated credentials when requested. """ def getter(instance): return '***obfuscated***' def setter(instance, value): private = '_' + cred instance.__dict__[private] = value return property(fget=getter, fset=setter)
def start_server( server_address: str = None, *, backend: Callable = multiprocessing.Process ) -> Tuple[multiprocessing.Process, str]: """ Start a new zproc server. :param server_address: .. include:: /api/snippets/server_address.rst :param backend: .. include:: /api/snippets/backend.rst :return: ` A `tuple``, containing a :py:class:`multiprocessing.Process` object for server and the server address. """ recv_conn, send_conn = multiprocessing.Pipe() server_process = backend(target=main, args=[server_address, send_conn]) server_process.start() try: with recv_conn: server_meta: ServerMeta = serializer.loads(recv_conn.recv_bytes()) except zmq.ZMQError as e: if e.errno == 98: raise ConnectionError( "Encountered - %s. Perhaps the server is already running?" % repr(e) ) if e.errno == 22: raise ValueError( "Encountered - %s. `server_address` must be a string containing a valid endpoint." % repr(e) ) raise return server_process, server_meta.state_router
def function[start_server, parameter[server_address]]: constant[ Start a new zproc server. :param server_address: .. include:: /api/snippets/server_address.rst :param backend: .. include:: /api/snippets/backend.rst :return: ` A `tuple``, containing a :py:class:`multiprocessing.Process` object for server and the server address. ] <ast.Tuple object at 0x7da1b05385b0> assign[=] call[name[multiprocessing].Pipe, parameter[]] variable[server_process] assign[=] call[name[backend], parameter[]] call[name[server_process].start, parameter[]] <ast.Try object at 0x7da1b053a140> return[tuple[[<ast.Name object at 0x7da1b053a950>, <ast.Attribute object at 0x7da1b053a560>]]]
keyword[def] identifier[start_server] ( identifier[server_address] : identifier[str] = keyword[None] ,*, identifier[backend] : identifier[Callable] = identifier[multiprocessing] . identifier[Process] )-> identifier[Tuple] [ identifier[multiprocessing] . identifier[Process] , identifier[str] ]: literal[string] identifier[recv_conn] , identifier[send_conn] = identifier[multiprocessing] . identifier[Pipe] () identifier[server_process] = identifier[backend] ( identifier[target] = identifier[main] , identifier[args] =[ identifier[server_address] , identifier[send_conn] ]) identifier[server_process] . identifier[start] () keyword[try] : keyword[with] identifier[recv_conn] : identifier[server_meta] : identifier[ServerMeta] = identifier[serializer] . identifier[loads] ( identifier[recv_conn] . identifier[recv_bytes] ()) keyword[except] identifier[zmq] . identifier[ZMQError] keyword[as] identifier[e] : keyword[if] identifier[e] . identifier[errno] == literal[int] : keyword[raise] identifier[ConnectionError] ( literal[string] % identifier[repr] ( identifier[e] ) ) keyword[if] identifier[e] . identifier[errno] == literal[int] : keyword[raise] identifier[ValueError] ( literal[string] % identifier[repr] ( identifier[e] ) ) keyword[raise] keyword[return] identifier[server_process] , identifier[server_meta] . identifier[state_router]
def start_server(server_address: str=None, *, backend: Callable=multiprocessing.Process) -> Tuple[multiprocessing.Process, str]: """ Start a new zproc server. :param server_address: .. include:: /api/snippets/server_address.rst :param backend: .. include:: /api/snippets/backend.rst :return: ` A `tuple``, containing a :py:class:`multiprocessing.Process` object for server and the server address. """ (recv_conn, send_conn) = multiprocessing.Pipe() server_process = backend(target=main, args=[server_address, send_conn]) server_process.start() try: with recv_conn: server_meta: ServerMeta = serializer.loads(recv_conn.recv_bytes()) # depends on [control=['with'], data=[]] # depends on [control=['try'], data=[]] except zmq.ZMQError as e: if e.errno == 98: raise ConnectionError('Encountered - %s. Perhaps the server is already running?' % repr(e)) # depends on [control=['if'], data=[]] if e.errno == 22: raise ValueError('Encountered - %s. `server_address` must be a string containing a valid endpoint.' % repr(e)) # depends on [control=['if'], data=[]] raise # depends on [control=['except'], data=['e']] return (server_process, server_meta.state_router)
def block_layer(inputs, filters, bottleneck, block_fn, blocks, strides, training, name, data_format): """Creates one layer of blocks for the ResNet model. Args: inputs: A tensor of size [batch, channels, height_in, width_in] or [batch, height_in, width_in, channels] depending on data_format. filters: The number of filters for the first convolution of the layer. bottleneck: Is the block created a bottleneck block. block_fn: The block to use within the model, either `building_block` or `bottleneck_block`. blocks: The number of blocks contained in the layer. strides: The stride to use for the first convolution of the layer. If greater than 1, this layer will ultimately downsample the input. training: Either True or False, whether we are currently training the model. Needed for batch norm. name: A string name for the tensor output of the block layer. data_format: The input format ('channels_last' or 'channels_first'). Returns: The output tensor of the block layer. """ # Bottleneck blocks end with 4x the number of filters as they start with filters_out = filters * 4 if bottleneck else filters def projection_shortcut(inputs): return conv2d_fixed_padding( inputs=inputs, filters=filters_out, kernel_size=1, strides=strides, data_format=data_format) # Only the first block per block_layer uses projection_shortcut and strides inputs = block_fn(inputs, filters, training, projection_shortcut, strides, data_format) for _ in range(1, blocks): inputs = block_fn(inputs, filters, training, None, 1, data_format) return tf.identity(inputs, name)
def function[block_layer, parameter[inputs, filters, bottleneck, block_fn, blocks, strides, training, name, data_format]]: constant[Creates one layer of blocks for the ResNet model. Args: inputs: A tensor of size [batch, channels, height_in, width_in] or [batch, height_in, width_in, channels] depending on data_format. filters: The number of filters for the first convolution of the layer. bottleneck: Is the block created a bottleneck block. block_fn: The block to use within the model, either `building_block` or `bottleneck_block`. blocks: The number of blocks contained in the layer. strides: The stride to use for the first convolution of the layer. If greater than 1, this layer will ultimately downsample the input. training: Either True or False, whether we are currently training the model. Needed for batch norm. name: A string name for the tensor output of the block layer. data_format: The input format ('channels_last' or 'channels_first'). Returns: The output tensor of the block layer. ] variable[filters_out] assign[=] <ast.IfExp object at 0x7da18f00f9d0> def function[projection_shortcut, parameter[inputs]]: return[call[name[conv2d_fixed_padding], parameter[]]] variable[inputs] assign[=] call[name[block_fn], parameter[name[inputs], name[filters], name[training], name[projection_shortcut], name[strides], name[data_format]]] for taget[name[_]] in starred[call[name[range], parameter[constant[1], name[blocks]]]] begin[:] variable[inputs] assign[=] call[name[block_fn], parameter[name[inputs], name[filters], name[training], constant[None], constant[1], name[data_format]]] return[call[name[tf].identity, parameter[name[inputs], name[name]]]]
keyword[def] identifier[block_layer] ( identifier[inputs] , identifier[filters] , identifier[bottleneck] , identifier[block_fn] , identifier[blocks] , identifier[strides] , identifier[training] , identifier[name] , identifier[data_format] ): literal[string] identifier[filters_out] = identifier[filters] * literal[int] keyword[if] identifier[bottleneck] keyword[else] identifier[filters] keyword[def] identifier[projection_shortcut] ( identifier[inputs] ): keyword[return] identifier[conv2d_fixed_padding] ( identifier[inputs] = identifier[inputs] , identifier[filters] = identifier[filters_out] , identifier[kernel_size] = literal[int] , identifier[strides] = identifier[strides] , identifier[data_format] = identifier[data_format] ) identifier[inputs] = identifier[block_fn] ( identifier[inputs] , identifier[filters] , identifier[training] , identifier[projection_shortcut] , identifier[strides] , identifier[data_format] ) keyword[for] identifier[_] keyword[in] identifier[range] ( literal[int] , identifier[blocks] ): identifier[inputs] = identifier[block_fn] ( identifier[inputs] , identifier[filters] , identifier[training] , keyword[None] , literal[int] , identifier[data_format] ) keyword[return] identifier[tf] . identifier[identity] ( identifier[inputs] , identifier[name] )
def block_layer(inputs, filters, bottleneck, block_fn, blocks, strides, training, name, data_format): """Creates one layer of blocks for the ResNet model. Args: inputs: A tensor of size [batch, channels, height_in, width_in] or [batch, height_in, width_in, channels] depending on data_format. filters: The number of filters for the first convolution of the layer. bottleneck: Is the block created a bottleneck block. block_fn: The block to use within the model, either `building_block` or `bottleneck_block`. blocks: The number of blocks contained in the layer. strides: The stride to use for the first convolution of the layer. If greater than 1, this layer will ultimately downsample the input. training: Either True or False, whether we are currently training the model. Needed for batch norm. name: A string name for the tensor output of the block layer. data_format: The input format ('channels_last' or 'channels_first'). Returns: The output tensor of the block layer. """ # Bottleneck blocks end with 4x the number of filters as they start with filters_out = filters * 4 if bottleneck else filters def projection_shortcut(inputs): return conv2d_fixed_padding(inputs=inputs, filters=filters_out, kernel_size=1, strides=strides, data_format=data_format) # Only the first block per block_layer uses projection_shortcut and strides inputs = block_fn(inputs, filters, training, projection_shortcut, strides, data_format) for _ in range(1, blocks): inputs = block_fn(inputs, filters, training, None, 1, data_format) # depends on [control=['for'], data=[]] return tf.identity(inputs, name)
def build_wheel_graph(num_nodes): """Builds a wheel graph with the specified number of nodes. Ref: http://mathworld.wolfram.com/WheelGraph.html""" # The easiest way to build a wheel graph is to build # C_n-1 and then add a hub node and spoke edges graph = build_cycle_graph(num_nodes - 1) cycle_graph_vertices = graph.get_all_node_ids() node_id = graph.new_node() for cycle_node in cycle_graph_vertices: graph.new_edge(node_id, cycle_node) return graph
def function[build_wheel_graph, parameter[num_nodes]]: constant[Builds a wheel graph with the specified number of nodes. Ref: http://mathworld.wolfram.com/WheelGraph.html] variable[graph] assign[=] call[name[build_cycle_graph], parameter[binary_operation[name[num_nodes] - constant[1]]]] variable[cycle_graph_vertices] assign[=] call[name[graph].get_all_node_ids, parameter[]] variable[node_id] assign[=] call[name[graph].new_node, parameter[]] for taget[name[cycle_node]] in starred[name[cycle_graph_vertices]] begin[:] call[name[graph].new_edge, parameter[name[node_id], name[cycle_node]]] return[name[graph]]
keyword[def] identifier[build_wheel_graph] ( identifier[num_nodes] ): literal[string] identifier[graph] = identifier[build_cycle_graph] ( identifier[num_nodes] - literal[int] ) identifier[cycle_graph_vertices] = identifier[graph] . identifier[get_all_node_ids] () identifier[node_id] = identifier[graph] . identifier[new_node] () keyword[for] identifier[cycle_node] keyword[in] identifier[cycle_graph_vertices] : identifier[graph] . identifier[new_edge] ( identifier[node_id] , identifier[cycle_node] ) keyword[return] identifier[graph]
def build_wheel_graph(num_nodes): """Builds a wheel graph with the specified number of nodes. Ref: http://mathworld.wolfram.com/WheelGraph.html""" # The easiest way to build a wheel graph is to build # C_n-1 and then add a hub node and spoke edges graph = build_cycle_graph(num_nodes - 1) cycle_graph_vertices = graph.get_all_node_ids() node_id = graph.new_node() for cycle_node in cycle_graph_vertices: graph.new_edge(node_id, cycle_node) # depends on [control=['for'], data=['cycle_node']] return graph
def add_node(self, node, node_data=None): """ Adds a new node to the graph. Arbitrary data can be attached to the node via the node_data parameter. Adding the same node twice will be silently ignored. The node must be a hashable value. """ # # the nodes will contain tuples that will store incoming edges, # outgoing edges and data # # index 0 -> incoming edges # index 1 -> outgoing edges if node in self.hidden_nodes: # Node is present, but hidden return if node not in self.nodes: self.nodes[node] = ([], [], node_data)
def function[add_node, parameter[self, node, node_data]]: constant[ Adds a new node to the graph. Arbitrary data can be attached to the node via the node_data parameter. Adding the same node twice will be silently ignored. The node must be a hashable value. ] if compare[name[node] in name[self].hidden_nodes] begin[:] return[None] if compare[name[node] <ast.NotIn object at 0x7da2590d7190> name[self].nodes] begin[:] call[name[self].nodes][name[node]] assign[=] tuple[[<ast.List object at 0x7da1b0e246d0>, <ast.List object at 0x7da1b0e245b0>, <ast.Name object at 0x7da1b0e26b60>]]
keyword[def] identifier[add_node] ( identifier[self] , identifier[node] , identifier[node_data] = keyword[None] ): literal[string] keyword[if] identifier[node] keyword[in] identifier[self] . identifier[hidden_nodes] : keyword[return] keyword[if] identifier[node] keyword[not] keyword[in] identifier[self] . identifier[nodes] : identifier[self] . identifier[nodes] [ identifier[node] ]=([],[], identifier[node_data] )
def add_node(self, node, node_data=None): """ Adds a new node to the graph. Arbitrary data can be attached to the node via the node_data parameter. Adding the same node twice will be silently ignored. The node must be a hashable value. """ # # the nodes will contain tuples that will store incoming edges, # outgoing edges and data # # index 0 -> incoming edges # index 1 -> outgoing edges if node in self.hidden_nodes: # Node is present, but hidden return # depends on [control=['if'], data=[]] if node not in self.nodes: self.nodes[node] = ([], [], node_data) # depends on [control=['if'], data=['node']]
def spawn_process(self, port): """Create an Application and HTTPServer for the given port. :param int port: The port to listen on :rtype: multiprocessing.Process """ return process.Process(name="ServerProcess.%i" % port, kwargs={'namespace': self.namespace, 'port': port})
def function[spawn_process, parameter[self, port]]: constant[Create an Application and HTTPServer for the given port. :param int port: The port to listen on :rtype: multiprocessing.Process ] return[call[name[process].Process, parameter[]]]
keyword[def] identifier[spawn_process] ( identifier[self] , identifier[port] ): literal[string] keyword[return] identifier[process] . identifier[Process] ( identifier[name] = literal[string] % identifier[port] , identifier[kwargs] ={ literal[string] : identifier[self] . identifier[namespace] , literal[string] : identifier[port] })
def spawn_process(self, port): """Create an Application and HTTPServer for the given port. :param int port: The port to listen on :rtype: multiprocessing.Process """ return process.Process(name='ServerProcess.%i' % port, kwargs={'namespace': self.namespace, 'port': port})
def some(arr): """Return True iff there is an element, a, of arr such that a is not None""" return functools.reduce(lambda x, y: x or (y is not None), arr, False)
def function[some, parameter[arr]]: constant[Return True iff there is an element, a, of arr such that a is not None] return[call[name[functools].reduce, parameter[<ast.Lambda object at 0x7da1b1c7aa40>, name[arr], constant[False]]]]
keyword[def] identifier[some] ( identifier[arr] ): literal[string] keyword[return] identifier[functools] . identifier[reduce] ( keyword[lambda] identifier[x] , identifier[y] : identifier[x] keyword[or] ( identifier[y] keyword[is] keyword[not] keyword[None] ), identifier[arr] , keyword[False] )
def some(arr): """Return True iff there is an element, a, of arr such that a is not None""" return functools.reduce(lambda x, y: x or y is not None, arr, False)
def _set_attrib(name, key, value, param, root=None, validate=True): ''' Set a parameter in /etc/shadow ''' pre_info = info(name, root=root) # If the user is not present or the attribute is already present, # we return early if not pre_info['name']: return False if value == pre_info[key]: return True cmd = ['chage'] if root is not None: cmd.extend(('-R', root)) cmd.extend((param, value, name)) ret = not __salt__['cmd.run'](cmd, python_shell=False) if validate: ret = info(name, root=root).get(key) == value return ret
def function[_set_attrib, parameter[name, key, value, param, root, validate]]: constant[ Set a parameter in /etc/shadow ] variable[pre_info] assign[=] call[name[info], parameter[name[name]]] if <ast.UnaryOp object at 0x7da20e74bf70> begin[:] return[constant[False]] if compare[name[value] equal[==] call[name[pre_info]][name[key]]] begin[:] return[constant[True]] variable[cmd] assign[=] list[[<ast.Constant object at 0x7da1b21871f0>]] if compare[name[root] is_not constant[None]] begin[:] call[name[cmd].extend, parameter[tuple[[<ast.Constant object at 0x7da1b2184910>, <ast.Name object at 0x7da1b2187610>]]]] call[name[cmd].extend, parameter[tuple[[<ast.Name object at 0x7da1b2187c40>, <ast.Name object at 0x7da1b2184e20>, <ast.Name object at 0x7da1b2186440>]]]] variable[ret] assign[=] <ast.UnaryOp object at 0x7da1b2184c10> if name[validate] begin[:] variable[ret] assign[=] compare[call[call[name[info], parameter[name[name]]].get, parameter[name[key]]] equal[==] name[value]] return[name[ret]]
keyword[def] identifier[_set_attrib] ( identifier[name] , identifier[key] , identifier[value] , identifier[param] , identifier[root] = keyword[None] , identifier[validate] = keyword[True] ): literal[string] identifier[pre_info] = identifier[info] ( identifier[name] , identifier[root] = identifier[root] ) keyword[if] keyword[not] identifier[pre_info] [ literal[string] ]: keyword[return] keyword[False] keyword[if] identifier[value] == identifier[pre_info] [ identifier[key] ]: keyword[return] keyword[True] identifier[cmd] =[ literal[string] ] keyword[if] identifier[root] keyword[is] keyword[not] keyword[None] : identifier[cmd] . identifier[extend] (( literal[string] , identifier[root] )) identifier[cmd] . identifier[extend] (( identifier[param] , identifier[value] , identifier[name] )) identifier[ret] = keyword[not] identifier[__salt__] [ literal[string] ]( identifier[cmd] , identifier[python_shell] = keyword[False] ) keyword[if] identifier[validate] : identifier[ret] = identifier[info] ( identifier[name] , identifier[root] = identifier[root] ). identifier[get] ( identifier[key] )== identifier[value] keyword[return] identifier[ret]
def _set_attrib(name, key, value, param, root=None, validate=True): """ Set a parameter in /etc/shadow """ pre_info = info(name, root=root) # If the user is not present or the attribute is already present, # we return early if not pre_info['name']: return False # depends on [control=['if'], data=[]] if value == pre_info[key]: return True # depends on [control=['if'], data=[]] cmd = ['chage'] if root is not None: cmd.extend(('-R', root)) # depends on [control=['if'], data=['root']] cmd.extend((param, value, name)) ret = not __salt__['cmd.run'](cmd, python_shell=False) if validate: ret = info(name, root=root).get(key) == value # depends on [control=['if'], data=[]] return ret
def _shuffle_items(items, bucket_key=None, disable=None, seed=None, session=None): """ Shuffles a list of `items` in place. If `bucket_key` is None, items are shuffled across the entire list. `bucket_key` is an optional function called for each item in `items` to calculate the key of bucket in which the item falls. Bucket defines the boundaries across which items will not be shuffled. `disable` is a function that takes an item and returns a falsey value if this item is ok to be shuffled. It returns a truthy value otherwise and the truthy value is used as part of the item's key when determining the bucket it belongs to. """ if seed is not None: random.seed(seed) # If `bucket_key` is falsey, shuffle is global. if not bucket_key and not disable: random.shuffle(items) return def get_full_bucket_key(item): assert bucket_key or disable if bucket_key and disable: return ItemKey(bucket=bucket_key(item, session), disabled=disable(item, session)) elif disable: return ItemKey(disabled=disable(item, session)) else: return ItemKey(bucket=bucket_key(item, session)) # For a sequence of items A1, A2, B1, B2, C1, C2, # where key(A1) == key(A2) == key(C1) == key(C2), # items A1, A2, C1, and C2 will end up in the same bucket. buckets = OrderedDict() for item in items: full_bucket_key = get_full_bucket_key(item) if full_bucket_key not in buckets: buckets[full_bucket_key] = [] buckets[full_bucket_key].append(item) # Shuffle inside a bucket bucket_keys = list(buckets.keys()) for full_bucket_key in buckets.keys(): if full_bucket_key.bucket == FAILED_FIRST_LAST_FAILED_BUCKET_KEY: # Do not shuffle the last failed bucket continue if not full_bucket_key.disabled: random.shuffle(buckets[full_bucket_key]) # Shuffle buckets # Only the first bucket can be FAILED_FIRST_LAST_FAILED_BUCKET_KEY if bucket_keys and bucket_keys[0].bucket == FAILED_FIRST_LAST_FAILED_BUCKET_KEY: new_bucket_keys = list(buckets.keys())[1:] random.shuffle(new_bucket_keys) new_bucket_keys.insert(0, bucket_keys[0]) else: new_bucket_keys = list(buckets.keys()) random.shuffle(new_bucket_keys) items[:] = [item for bk in new_bucket_keys for item in buckets[bk]] return
def function[_shuffle_items, parameter[items, bucket_key, disable, seed, session]]: constant[ Shuffles a list of `items` in place. If `bucket_key` is None, items are shuffled across the entire list. `bucket_key` is an optional function called for each item in `items` to calculate the key of bucket in which the item falls. Bucket defines the boundaries across which items will not be shuffled. `disable` is a function that takes an item and returns a falsey value if this item is ok to be shuffled. It returns a truthy value otherwise and the truthy value is used as part of the item's key when determining the bucket it belongs to. ] if compare[name[seed] is_not constant[None]] begin[:] call[name[random].seed, parameter[name[seed]]] if <ast.BoolOp object at 0x7da18c4cddb0> begin[:] call[name[random].shuffle, parameter[name[items]]] return[None] def function[get_full_bucket_key, parameter[item]]: assert[<ast.BoolOp object at 0x7da18c4ceb30>] if <ast.BoolOp object at 0x7da18c4cc640> begin[:] return[call[name[ItemKey], parameter[]]] variable[buckets] assign[=] call[name[OrderedDict], parameter[]] for taget[name[item]] in starred[name[items]] begin[:] variable[full_bucket_key] assign[=] call[name[get_full_bucket_key], parameter[name[item]]] if compare[name[full_bucket_key] <ast.NotIn object at 0x7da2590d7190> name[buckets]] begin[:] call[name[buckets]][name[full_bucket_key]] assign[=] list[[]] call[call[name[buckets]][name[full_bucket_key]].append, parameter[name[item]]] variable[bucket_keys] assign[=] call[name[list], parameter[call[name[buckets].keys, parameter[]]]] for taget[name[full_bucket_key]] in starred[call[name[buckets].keys, parameter[]]] begin[:] if compare[name[full_bucket_key].bucket equal[==] name[FAILED_FIRST_LAST_FAILED_BUCKET_KEY]] begin[:] continue if <ast.UnaryOp object at 0x7da1b2795540> begin[:] call[name[random].shuffle, parameter[call[name[buckets]][name[full_bucket_key]]]] if <ast.BoolOp object at 0x7da1b27977c0> begin[:] variable[new_bucket_keys] assign[=] call[call[name[list], parameter[call[name[buckets].keys, parameter[]]]]][<ast.Slice object at 0x7da1b27949d0>] call[name[random].shuffle, parameter[name[new_bucket_keys]]] call[name[new_bucket_keys].insert, parameter[constant[0], call[name[bucket_keys]][constant[0]]]] call[name[items]][<ast.Slice object at 0x7da2054a5990>] assign[=] <ast.ListComp object at 0x7da2054a5180> return[None]
keyword[def] identifier[_shuffle_items] ( identifier[items] , identifier[bucket_key] = keyword[None] , identifier[disable] = keyword[None] , identifier[seed] = keyword[None] , identifier[session] = keyword[None] ): literal[string] keyword[if] identifier[seed] keyword[is] keyword[not] keyword[None] : identifier[random] . identifier[seed] ( identifier[seed] ) keyword[if] keyword[not] identifier[bucket_key] keyword[and] keyword[not] identifier[disable] : identifier[random] . identifier[shuffle] ( identifier[items] ) keyword[return] keyword[def] identifier[get_full_bucket_key] ( identifier[item] ): keyword[assert] identifier[bucket_key] keyword[or] identifier[disable] keyword[if] identifier[bucket_key] keyword[and] identifier[disable] : keyword[return] identifier[ItemKey] ( identifier[bucket] = identifier[bucket_key] ( identifier[item] , identifier[session] ), identifier[disabled] = identifier[disable] ( identifier[item] , identifier[session] )) keyword[elif] identifier[disable] : keyword[return] identifier[ItemKey] ( identifier[disabled] = identifier[disable] ( identifier[item] , identifier[session] )) keyword[else] : keyword[return] identifier[ItemKey] ( identifier[bucket] = identifier[bucket_key] ( identifier[item] , identifier[session] )) identifier[buckets] = identifier[OrderedDict] () keyword[for] identifier[item] keyword[in] identifier[items] : identifier[full_bucket_key] = identifier[get_full_bucket_key] ( identifier[item] ) keyword[if] identifier[full_bucket_key] keyword[not] keyword[in] identifier[buckets] : identifier[buckets] [ identifier[full_bucket_key] ]=[] identifier[buckets] [ identifier[full_bucket_key] ]. identifier[append] ( identifier[item] ) identifier[bucket_keys] = identifier[list] ( identifier[buckets] . identifier[keys] ()) keyword[for] identifier[full_bucket_key] keyword[in] identifier[buckets] . identifier[keys] (): keyword[if] identifier[full_bucket_key] . identifier[bucket] == identifier[FAILED_FIRST_LAST_FAILED_BUCKET_KEY] : keyword[continue] keyword[if] keyword[not] identifier[full_bucket_key] . identifier[disabled] : identifier[random] . identifier[shuffle] ( identifier[buckets] [ identifier[full_bucket_key] ]) keyword[if] identifier[bucket_keys] keyword[and] identifier[bucket_keys] [ literal[int] ]. identifier[bucket] == identifier[FAILED_FIRST_LAST_FAILED_BUCKET_KEY] : identifier[new_bucket_keys] = identifier[list] ( identifier[buckets] . identifier[keys] ())[ literal[int] :] identifier[random] . identifier[shuffle] ( identifier[new_bucket_keys] ) identifier[new_bucket_keys] . identifier[insert] ( literal[int] , identifier[bucket_keys] [ literal[int] ]) keyword[else] : identifier[new_bucket_keys] = identifier[list] ( identifier[buckets] . identifier[keys] ()) identifier[random] . identifier[shuffle] ( identifier[new_bucket_keys] ) identifier[items] [:]=[ identifier[item] keyword[for] identifier[bk] keyword[in] identifier[new_bucket_keys] keyword[for] identifier[item] keyword[in] identifier[buckets] [ identifier[bk] ]] keyword[return]
def _shuffle_items(items, bucket_key=None, disable=None, seed=None, session=None): """ Shuffles a list of `items` in place. If `bucket_key` is None, items are shuffled across the entire list. `bucket_key` is an optional function called for each item in `items` to calculate the key of bucket in which the item falls. Bucket defines the boundaries across which items will not be shuffled. `disable` is a function that takes an item and returns a falsey value if this item is ok to be shuffled. It returns a truthy value otherwise and the truthy value is used as part of the item's key when determining the bucket it belongs to. """ if seed is not None: random.seed(seed) # depends on [control=['if'], data=['seed']] # If `bucket_key` is falsey, shuffle is global. if not bucket_key and (not disable): random.shuffle(items) return # depends on [control=['if'], data=[]] def get_full_bucket_key(item): assert bucket_key or disable if bucket_key and disable: return ItemKey(bucket=bucket_key(item, session), disabled=disable(item, session)) # depends on [control=['if'], data=[]] elif disable: return ItemKey(disabled=disable(item, session)) # depends on [control=['if'], data=[]] else: return ItemKey(bucket=bucket_key(item, session)) # For a sequence of items A1, A2, B1, B2, C1, C2, # where key(A1) == key(A2) == key(C1) == key(C2), # items A1, A2, C1, and C2 will end up in the same bucket. buckets = OrderedDict() for item in items: full_bucket_key = get_full_bucket_key(item) if full_bucket_key not in buckets: buckets[full_bucket_key] = [] # depends on [control=['if'], data=['full_bucket_key', 'buckets']] buckets[full_bucket_key].append(item) # depends on [control=['for'], data=['item']] # Shuffle inside a bucket bucket_keys = list(buckets.keys()) for full_bucket_key in buckets.keys(): if full_bucket_key.bucket == FAILED_FIRST_LAST_FAILED_BUCKET_KEY: # Do not shuffle the last failed bucket continue # depends on [control=['if'], data=[]] if not full_bucket_key.disabled: random.shuffle(buckets[full_bucket_key]) # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['full_bucket_key']] # Shuffle buckets # Only the first bucket can be FAILED_FIRST_LAST_FAILED_BUCKET_KEY if bucket_keys and bucket_keys[0].bucket == FAILED_FIRST_LAST_FAILED_BUCKET_KEY: new_bucket_keys = list(buckets.keys())[1:] random.shuffle(new_bucket_keys) new_bucket_keys.insert(0, bucket_keys[0]) # depends on [control=['if'], data=[]] else: new_bucket_keys = list(buckets.keys()) random.shuffle(new_bucket_keys) items[:] = [item for bk in new_bucket_keys for item in buckets[bk]] return
def _call_dunder_colr(cls, obj): """ Call __colr__ on an object, after some checks. If color is disabled, the object itself is returned. If __colr__ doesn't return a Colr instance, TypeError is raised. On success, a Colr instance is returned from obj.__colr__(). """ if _disabled: # No colorization when disabled. Just use str. return obj clr = obj.__colr__() if not isinstance(clr, cls): # __colr__ should always return a Colr. # Future development may assume a Colr was returned. raise TypeError( ' '.join(( '__colr__ methods should return a {} instance.', 'Got: {}', )).format( cls.__name__, type(clr).__name__, ) ) return clr
def function[_call_dunder_colr, parameter[cls, obj]]: constant[ Call __colr__ on an object, after some checks. If color is disabled, the object itself is returned. If __colr__ doesn't return a Colr instance, TypeError is raised. On success, a Colr instance is returned from obj.__colr__(). ] if name[_disabled] begin[:] return[name[obj]] variable[clr] assign[=] call[name[obj].__colr__, parameter[]] if <ast.UnaryOp object at 0x7da1b02be9b0> begin[:] <ast.Raise object at 0x7da1b02bde70> return[name[clr]]
keyword[def] identifier[_call_dunder_colr] ( identifier[cls] , identifier[obj] ): literal[string] keyword[if] identifier[_disabled] : keyword[return] identifier[obj] identifier[clr] = identifier[obj] . identifier[__colr__] () keyword[if] keyword[not] identifier[isinstance] ( identifier[clr] , identifier[cls] ): keyword[raise] identifier[TypeError] ( literal[string] . identifier[join] (( literal[string] , literal[string] , )). identifier[format] ( identifier[cls] . identifier[__name__] , identifier[type] ( identifier[clr] ). identifier[__name__] , ) ) keyword[return] identifier[clr]
def _call_dunder_colr(cls, obj): """ Call __colr__ on an object, after some checks. If color is disabled, the object itself is returned. If __colr__ doesn't return a Colr instance, TypeError is raised. On success, a Colr instance is returned from obj.__colr__(). """ if _disabled: # No colorization when disabled. Just use str. return obj # depends on [control=['if'], data=[]] clr = obj.__colr__() if not isinstance(clr, cls): # __colr__ should always return a Colr. # Future development may assume a Colr was returned. raise TypeError(' '.join(('__colr__ methods should return a {} instance.', 'Got: {}')).format(cls.__name__, type(clr).__name__)) # depends on [control=['if'], data=[]] return clr
def bitceil(N): """ Find the bit (i.e. power of 2) immediately greater than or equal to N Note: this works for numbers up to 2 ** 64. Roughly equivalent to int(2 ** np.ceil(np.log2(N))) """ # Note: for Python 2.7 and 3.x, this is faster: # return 1 << int(N - 1).bit_length() N = int(N) - 1 for i in [1, 2, 4, 8, 16, 32]: N |= N >> i return N + 1
def function[bitceil, parameter[N]]: constant[ Find the bit (i.e. power of 2) immediately greater than or equal to N Note: this works for numbers up to 2 ** 64. Roughly equivalent to int(2 ** np.ceil(np.log2(N))) ] variable[N] assign[=] binary_operation[call[name[int], parameter[name[N]]] - constant[1]] for taget[name[i]] in starred[list[[<ast.Constant object at 0x7da1b05f2f80>, <ast.Constant object at 0x7da1b05f1570>, <ast.Constant object at 0x7da1b05f1930>, <ast.Constant object at 0x7da1b05f1f90>, <ast.Constant object at 0x7da1b05f3a00>, <ast.Constant object at 0x7da1b05f3a60>]]] begin[:] <ast.AugAssign object at 0x7da1b05f08e0> return[binary_operation[name[N] + constant[1]]]
keyword[def] identifier[bitceil] ( identifier[N] ): literal[string] identifier[N] = identifier[int] ( identifier[N] )- literal[int] keyword[for] identifier[i] keyword[in] [ literal[int] , literal[int] , literal[int] , literal[int] , literal[int] , literal[int] ]: identifier[N] |= identifier[N] >> identifier[i] keyword[return] identifier[N] + literal[int]
def bitceil(N): """ Find the bit (i.e. power of 2) immediately greater than or equal to N Note: this works for numbers up to 2 ** 64. Roughly equivalent to int(2 ** np.ceil(np.log2(N))) """ # Note: for Python 2.7 and 3.x, this is faster: # return 1 << int(N - 1).bit_length() N = int(N) - 1 for i in [1, 2, 4, 8, 16, 32]: N |= N >> i # depends on [control=['for'], data=['i']] return N + 1
def parse(binary, **params): """Turns a TAR file into a frozen sample.""" binary = io.BytesIO(binary) collection = list() with tarfile.TarFile(fileobj=binary, mode='r') as tar: for tar_info in tar.getmembers(): content_type, encoding = mimetypes.guess_type(tar_info.name) content = tar.extractfile(tar_info) content = content_encodings.get(encoding).decode(content) content = content_types.get(content_type).parse(content, **params) collection.apppend((tar_info.name, content)) return collection
def function[parse, parameter[binary]]: constant[Turns a TAR file into a frozen sample.] variable[binary] assign[=] call[name[io].BytesIO, parameter[name[binary]]] variable[collection] assign[=] call[name[list], parameter[]] with call[name[tarfile].TarFile, parameter[]] begin[:] for taget[name[tar_info]] in starred[call[name[tar].getmembers, parameter[]]] begin[:] <ast.Tuple object at 0x7da18f09e860> assign[=] call[name[mimetypes].guess_type, parameter[name[tar_info].name]] variable[content] assign[=] call[name[tar].extractfile, parameter[name[tar_info]]] variable[content] assign[=] call[call[name[content_encodings].get, parameter[name[encoding]]].decode, parameter[name[content]]] variable[content] assign[=] call[call[name[content_types].get, parameter[name[content_type]]].parse, parameter[name[content]]] call[name[collection].apppend, parameter[tuple[[<ast.Attribute object at 0x7da18f09ebf0>, <ast.Name object at 0x7da18f09c970>]]]] return[name[collection]]
keyword[def] identifier[parse] ( identifier[binary] ,** identifier[params] ): literal[string] identifier[binary] = identifier[io] . identifier[BytesIO] ( identifier[binary] ) identifier[collection] = identifier[list] () keyword[with] identifier[tarfile] . identifier[TarFile] ( identifier[fileobj] = identifier[binary] , identifier[mode] = literal[string] ) keyword[as] identifier[tar] : keyword[for] identifier[tar_info] keyword[in] identifier[tar] . identifier[getmembers] (): identifier[content_type] , identifier[encoding] = identifier[mimetypes] . identifier[guess_type] ( identifier[tar_info] . identifier[name] ) identifier[content] = identifier[tar] . identifier[extractfile] ( identifier[tar_info] ) identifier[content] = identifier[content_encodings] . identifier[get] ( identifier[encoding] ). identifier[decode] ( identifier[content] ) identifier[content] = identifier[content_types] . identifier[get] ( identifier[content_type] ). identifier[parse] ( identifier[content] ,** identifier[params] ) identifier[collection] . identifier[apppend] (( identifier[tar_info] . identifier[name] , identifier[content] )) keyword[return] identifier[collection]
def parse(binary, **params): """Turns a TAR file into a frozen sample.""" binary = io.BytesIO(binary) collection = list() with tarfile.TarFile(fileobj=binary, mode='r') as tar: for tar_info in tar.getmembers(): (content_type, encoding) = mimetypes.guess_type(tar_info.name) content = tar.extractfile(tar_info) content = content_encodings.get(encoding).decode(content) content = content_types.get(content_type).parse(content, **params) collection.apppend((tar_info.name, content)) # depends on [control=['for'], data=['tar_info']] # depends on [control=['with'], data=['tar']] return collection
def add_actor(self, uinput, reset_camera=False, name=None, loc=None, culling=False): """ Adds an actor to render window. Creates an actor if input is a mapper. Parameters ---------- uinput : vtk.vtkMapper or vtk.vtkActor vtk mapper or vtk actor to be added. reset_camera : bool, optional Resets the camera when true. loc : int, tuple, or list Index of the renderer to add the actor to. For example, ``loc=2`` or ``loc=(1, 1)``. culling : bool optional Does not render faces that should not be visible to the plotter. This can be helpful for dense surface meshes, especially when edges are visible, but can cause flat meshes to be partially displayed. Default False. Returns ------- actor : vtk.vtkActor The actor. actor_properties : vtk.Properties Actor properties. """ # Remove actor by that name if present rv = self.remove_actor(name, reset_camera=False) if isinstance(uinput, vtk.vtkMapper): actor = vtk.vtkActor() actor.SetMapper(uinput) else: actor = uinput self.AddActor(actor) actor.renderer = proxy(self) if name is None: name = str(hex(id(actor))) self._actors[name] = actor if reset_camera: self.reset_camera() elif not self.camera_set and reset_camera is None and not rv: self.reset_camera() else: self.parent._render() self.update_bounds_axes() if culling: try: actor.GetProperty().BackfaceCullingOn() except AttributeError: # pragma: no cover pass return actor, actor.GetProperty()
def function[add_actor, parameter[self, uinput, reset_camera, name, loc, culling]]: constant[ Adds an actor to render window. Creates an actor if input is a mapper. Parameters ---------- uinput : vtk.vtkMapper or vtk.vtkActor vtk mapper or vtk actor to be added. reset_camera : bool, optional Resets the camera when true. loc : int, tuple, or list Index of the renderer to add the actor to. For example, ``loc=2`` or ``loc=(1, 1)``. culling : bool optional Does not render faces that should not be visible to the plotter. This can be helpful for dense surface meshes, especially when edges are visible, but can cause flat meshes to be partially displayed. Default False. Returns ------- actor : vtk.vtkActor The actor. actor_properties : vtk.Properties Actor properties. ] variable[rv] assign[=] call[name[self].remove_actor, parameter[name[name]]] if call[name[isinstance], parameter[name[uinput], name[vtk].vtkMapper]] begin[:] variable[actor] assign[=] call[name[vtk].vtkActor, parameter[]] call[name[actor].SetMapper, parameter[name[uinput]]] call[name[self].AddActor, parameter[name[actor]]] name[actor].renderer assign[=] call[name[proxy], parameter[name[self]]] if compare[name[name] is constant[None]] begin[:] variable[name] assign[=] call[name[str], parameter[call[name[hex], parameter[call[name[id], parameter[name[actor]]]]]]] call[name[self]._actors][name[name]] assign[=] name[actor] if name[reset_camera] begin[:] call[name[self].reset_camera, parameter[]] call[name[self].update_bounds_axes, parameter[]] if name[culling] begin[:] <ast.Try object at 0x7da18bccb910> return[tuple[[<ast.Name object at 0x7da18bcc89d0>, <ast.Call object at 0x7da18bcc9de0>]]]
keyword[def] identifier[add_actor] ( identifier[self] , identifier[uinput] , identifier[reset_camera] = keyword[False] , identifier[name] = keyword[None] , identifier[loc] = keyword[None] , identifier[culling] = keyword[False] ): literal[string] identifier[rv] = identifier[self] . identifier[remove_actor] ( identifier[name] , identifier[reset_camera] = keyword[False] ) keyword[if] identifier[isinstance] ( identifier[uinput] , identifier[vtk] . identifier[vtkMapper] ): identifier[actor] = identifier[vtk] . identifier[vtkActor] () identifier[actor] . identifier[SetMapper] ( identifier[uinput] ) keyword[else] : identifier[actor] = identifier[uinput] identifier[self] . identifier[AddActor] ( identifier[actor] ) identifier[actor] . identifier[renderer] = identifier[proxy] ( identifier[self] ) keyword[if] identifier[name] keyword[is] keyword[None] : identifier[name] = identifier[str] ( identifier[hex] ( identifier[id] ( identifier[actor] ))) identifier[self] . identifier[_actors] [ identifier[name] ]= identifier[actor] keyword[if] identifier[reset_camera] : identifier[self] . identifier[reset_camera] () keyword[elif] keyword[not] identifier[self] . identifier[camera_set] keyword[and] identifier[reset_camera] keyword[is] keyword[None] keyword[and] keyword[not] identifier[rv] : identifier[self] . identifier[reset_camera] () keyword[else] : identifier[self] . identifier[parent] . identifier[_render] () identifier[self] . identifier[update_bounds_axes] () keyword[if] identifier[culling] : keyword[try] : identifier[actor] . identifier[GetProperty] (). identifier[BackfaceCullingOn] () keyword[except] identifier[AttributeError] : keyword[pass] keyword[return] identifier[actor] , identifier[actor] . identifier[GetProperty] ()
def add_actor(self, uinput, reset_camera=False, name=None, loc=None, culling=False): """ Adds an actor to render window. Creates an actor if input is a mapper. Parameters ---------- uinput : vtk.vtkMapper or vtk.vtkActor vtk mapper or vtk actor to be added. reset_camera : bool, optional Resets the camera when true. loc : int, tuple, or list Index of the renderer to add the actor to. For example, ``loc=2`` or ``loc=(1, 1)``. culling : bool optional Does not render faces that should not be visible to the plotter. This can be helpful for dense surface meshes, especially when edges are visible, but can cause flat meshes to be partially displayed. Default False. Returns ------- actor : vtk.vtkActor The actor. actor_properties : vtk.Properties Actor properties. """ # Remove actor by that name if present rv = self.remove_actor(name, reset_camera=False) if isinstance(uinput, vtk.vtkMapper): actor = vtk.vtkActor() actor.SetMapper(uinput) # depends on [control=['if'], data=[]] else: actor = uinput self.AddActor(actor) actor.renderer = proxy(self) if name is None: name = str(hex(id(actor))) # depends on [control=['if'], data=['name']] self._actors[name] = actor if reset_camera: self.reset_camera() # depends on [control=['if'], data=[]] elif not self.camera_set and reset_camera is None and (not rv): self.reset_camera() # depends on [control=['if'], data=[]] else: self.parent._render() self.update_bounds_axes() if culling: try: actor.GetProperty().BackfaceCullingOn() # depends on [control=['try'], data=[]] except AttributeError: # pragma: no cover pass # depends on [control=['except'], data=[]] # depends on [control=['if'], data=[]] return (actor, actor.GetProperty())
def _CreateFeed(client): """Creates the feed for DSA page URLs. Args: client: an AdWordsClient instance. Returns: A _DSAFeedDetails instance containing details about the created feed. """ # Get the FeedService. feed_service = client.GetService('FeedService', version='v201809') # Create operation. operation = { # Create the feed. 'operand': { 'name': 'DSA Feed %s' % uuid.uuid4(), # Create attributes. 'attributes': [ {'type': 'URL_LIST', 'name': 'Page URL'}, {'type': 'STRING_LIST', 'name': 'Label'} ], 'origin': 'USER' }, 'operator': 'ADD' } # Add the feed. feed = feed_service.mutate([operation])['value'][0] return _DSAFeedDetails(feed['id'], feed['attributes'][0]['id'], feed['attributes'][1]['id'])
def function[_CreateFeed, parameter[client]]: constant[Creates the feed for DSA page URLs. Args: client: an AdWordsClient instance. Returns: A _DSAFeedDetails instance containing details about the created feed. ] variable[feed_service] assign[=] call[name[client].GetService, parameter[constant[FeedService]]] variable[operation] assign[=] dictionary[[<ast.Constant object at 0x7da1b1c0c9d0>, <ast.Constant object at 0x7da1b1c0da50>], [<ast.Dict object at 0x7da1b1c0c550>, <ast.Constant object at 0x7da1b1c0dc00>]] variable[feed] assign[=] call[call[call[name[feed_service].mutate, parameter[list[[<ast.Name object at 0x7da1b1b0f220>]]]]][constant[value]]][constant[0]] return[call[name[_DSAFeedDetails], parameter[call[name[feed]][constant[id]], call[call[call[name[feed]][constant[attributes]]][constant[0]]][constant[id]], call[call[call[name[feed]][constant[attributes]]][constant[1]]][constant[id]]]]]
keyword[def] identifier[_CreateFeed] ( identifier[client] ): literal[string] identifier[feed_service] = identifier[client] . identifier[GetService] ( literal[string] , identifier[version] = literal[string] ) identifier[operation] ={ literal[string] :{ literal[string] : literal[string] % identifier[uuid] . identifier[uuid4] (), literal[string] :[ { literal[string] : literal[string] , literal[string] : literal[string] }, { literal[string] : literal[string] , literal[string] : literal[string] } ], literal[string] : literal[string] }, literal[string] : literal[string] } identifier[feed] = identifier[feed_service] . identifier[mutate] ([ identifier[operation] ])[ literal[string] ][ literal[int] ] keyword[return] identifier[_DSAFeedDetails] ( identifier[feed] [ literal[string] ], identifier[feed] [ literal[string] ][ literal[int] ][ literal[string] ], identifier[feed] [ literal[string] ][ literal[int] ][ literal[string] ])
def _CreateFeed(client): """Creates the feed for DSA page URLs. Args: client: an AdWordsClient instance. Returns: A _DSAFeedDetails instance containing details about the created feed. """ # Get the FeedService. feed_service = client.GetService('FeedService', version='v201809') # Create operation. # Create the feed. # Create attributes. operation = {'operand': {'name': 'DSA Feed %s' % uuid.uuid4(), 'attributes': [{'type': 'URL_LIST', 'name': 'Page URL'}, {'type': 'STRING_LIST', 'name': 'Label'}], 'origin': 'USER'}, 'operator': 'ADD'} # Add the feed. feed = feed_service.mutate([operation])['value'][0] return _DSAFeedDetails(feed['id'], feed['attributes'][0]['id'], feed['attributes'][1]['id'])
def filter_extant_exports(client, bucket, prefix, days, start, end=None): """Filter days where the bucket already has extant export keys. """ end = end or datetime.now() # days = [start + timedelta(i) for i in range((end-start).days)] try: tag_set = client.get_object_tagging(Bucket=bucket, Key=prefix).get('TagSet', []) except ClientError as e: if e.response['Error']['Code'] != 'NoSuchKey': raise tag_set = [] tags = {t['Key']: t['Value'] for t in tag_set} if 'LastExport' not in tags: return sorted(days) last_export = parse(tags['LastExport']) if last_export.tzinfo is None: last_export = last_export.replace(tzinfo=tzutc()) return [d for d in sorted(days) if d > last_export]
def function[filter_extant_exports, parameter[client, bucket, prefix, days, start, end]]: constant[Filter days where the bucket already has extant export keys. ] variable[end] assign[=] <ast.BoolOp object at 0x7da18f09d240> <ast.Try object at 0x7da18f09eb30> variable[tags] assign[=] <ast.DictComp object at 0x7da18f09e5c0> if compare[constant[LastExport] <ast.NotIn object at 0x7da2590d7190> name[tags]] begin[:] return[call[name[sorted], parameter[name[days]]]] variable[last_export] assign[=] call[name[parse], parameter[call[name[tags]][constant[LastExport]]]] if compare[name[last_export].tzinfo is constant[None]] begin[:] variable[last_export] assign[=] call[name[last_export].replace, parameter[]] return[<ast.ListComp object at 0x7da18f09fac0>]
keyword[def] identifier[filter_extant_exports] ( identifier[client] , identifier[bucket] , identifier[prefix] , identifier[days] , identifier[start] , identifier[end] = keyword[None] ): literal[string] identifier[end] = identifier[end] keyword[or] identifier[datetime] . identifier[now] () keyword[try] : identifier[tag_set] = identifier[client] . identifier[get_object_tagging] ( identifier[Bucket] = identifier[bucket] , identifier[Key] = identifier[prefix] ). identifier[get] ( literal[string] ,[]) keyword[except] identifier[ClientError] keyword[as] identifier[e] : keyword[if] identifier[e] . identifier[response] [ literal[string] ][ literal[string] ]!= literal[string] : keyword[raise] identifier[tag_set] =[] identifier[tags] ={ identifier[t] [ literal[string] ]: identifier[t] [ literal[string] ] keyword[for] identifier[t] keyword[in] identifier[tag_set] } keyword[if] literal[string] keyword[not] keyword[in] identifier[tags] : keyword[return] identifier[sorted] ( identifier[days] ) identifier[last_export] = identifier[parse] ( identifier[tags] [ literal[string] ]) keyword[if] identifier[last_export] . identifier[tzinfo] keyword[is] keyword[None] : identifier[last_export] = identifier[last_export] . identifier[replace] ( identifier[tzinfo] = identifier[tzutc] ()) keyword[return] [ identifier[d] keyword[for] identifier[d] keyword[in] identifier[sorted] ( identifier[days] ) keyword[if] identifier[d] > identifier[last_export] ]
def filter_extant_exports(client, bucket, prefix, days, start, end=None): """Filter days where the bucket already has extant export keys. """ end = end or datetime.now() # days = [start + timedelta(i) for i in range((end-start).days)] try: tag_set = client.get_object_tagging(Bucket=bucket, Key=prefix).get('TagSet', []) # depends on [control=['try'], data=[]] except ClientError as e: if e.response['Error']['Code'] != 'NoSuchKey': raise # depends on [control=['if'], data=[]] tag_set = [] # depends on [control=['except'], data=['e']] tags = {t['Key']: t['Value'] for t in tag_set} if 'LastExport' not in tags: return sorted(days) # depends on [control=['if'], data=[]] last_export = parse(tags['LastExport']) if last_export.tzinfo is None: last_export = last_export.replace(tzinfo=tzutc()) # depends on [control=['if'], data=[]] return [d for d in sorted(days) if d > last_export]
def plfit_lsq(x,y): """ Returns A and B in y=Ax^B http://mathworld.wolfram.com/LeastSquaresFittingPowerLaw.html """ n = len(x) btop = n * (log(x)*log(y)).sum() - (log(x)).sum()*(log(y)).sum() bbottom = n*(log(x)**2).sum() - (log(x).sum())**2 b = btop / bbottom a = ( log(y).sum() - b * log(x).sum() ) / n A = exp(a) return A,b
def function[plfit_lsq, parameter[x, y]]: constant[ Returns A and B in y=Ax^B http://mathworld.wolfram.com/LeastSquaresFittingPowerLaw.html ] variable[n] assign[=] call[name[len], parameter[name[x]]] variable[btop] assign[=] binary_operation[binary_operation[name[n] * call[binary_operation[call[name[log], parameter[name[x]]] * call[name[log], parameter[name[y]]]].sum, parameter[]]] - binary_operation[call[call[name[log], parameter[name[x]]].sum, parameter[]] * call[call[name[log], parameter[name[y]]].sum, parameter[]]]] variable[bbottom] assign[=] binary_operation[binary_operation[name[n] * call[binary_operation[call[name[log], parameter[name[x]]] ** constant[2]].sum, parameter[]]] - binary_operation[call[call[name[log], parameter[name[x]]].sum, parameter[]] ** constant[2]]] variable[b] assign[=] binary_operation[name[btop] / name[bbottom]] variable[a] assign[=] binary_operation[binary_operation[call[call[name[log], parameter[name[y]]].sum, parameter[]] - binary_operation[name[b] * call[call[name[log], parameter[name[x]]].sum, parameter[]]]] / name[n]] variable[A] assign[=] call[name[exp], parameter[name[a]]] return[tuple[[<ast.Name object at 0x7da20e9576d0>, <ast.Name object at 0x7da20e957d60>]]]
keyword[def] identifier[plfit_lsq] ( identifier[x] , identifier[y] ): literal[string] identifier[n] = identifier[len] ( identifier[x] ) identifier[btop] = identifier[n] *( identifier[log] ( identifier[x] )* identifier[log] ( identifier[y] )). identifier[sum] ()-( identifier[log] ( identifier[x] )). identifier[sum] ()*( identifier[log] ( identifier[y] )). identifier[sum] () identifier[bbottom] = identifier[n] *( identifier[log] ( identifier[x] )** literal[int] ). identifier[sum] ()-( identifier[log] ( identifier[x] ). identifier[sum] ())** literal[int] identifier[b] = identifier[btop] / identifier[bbottom] identifier[a] =( identifier[log] ( identifier[y] ). identifier[sum] ()- identifier[b] * identifier[log] ( identifier[x] ). identifier[sum] ())/ identifier[n] identifier[A] = identifier[exp] ( identifier[a] ) keyword[return] identifier[A] , identifier[b]
def plfit_lsq(x, y): """ Returns A and B in y=Ax^B http://mathworld.wolfram.com/LeastSquaresFittingPowerLaw.html """ n = len(x) btop = n * (log(x) * log(y)).sum() - log(x).sum() * log(y).sum() bbottom = n * (log(x) ** 2).sum() - log(x).sum() ** 2 b = btop / bbottom a = (log(y).sum() - b * log(x).sum()) / n A = exp(a) return (A, b)
def chr(self): """the reference chromosome. greedy return the first chromosome in exon array :return: chromosome :rtype: string """ if len(self.exons)==0: sys.stderr.write("WARNING can't return chromsome with nothing here\n") return None return self._rngs[0].chr
def function[chr, parameter[self]]: constant[the reference chromosome. greedy return the first chromosome in exon array :return: chromosome :rtype: string ] if compare[call[name[len], parameter[name[self].exons]] equal[==] constant[0]] begin[:] call[name[sys].stderr.write, parameter[constant[WARNING can't return chromsome with nothing here ]]] return[constant[None]] return[call[name[self]._rngs][constant[0]].chr]
keyword[def] identifier[chr] ( identifier[self] ): literal[string] keyword[if] identifier[len] ( identifier[self] . identifier[exons] )== literal[int] : identifier[sys] . identifier[stderr] . identifier[write] ( literal[string] ) keyword[return] keyword[None] keyword[return] identifier[self] . identifier[_rngs] [ literal[int] ]. identifier[chr]
def chr(self): """the reference chromosome. greedy return the first chromosome in exon array :return: chromosome :rtype: string """ if len(self.exons) == 0: sys.stderr.write("WARNING can't return chromsome with nothing here\n") return None # depends on [control=['if'], data=[]] return self._rngs[0].chr
def select_specimen(self, specimen): """ Goes through the calculations necessary to plot measurement data for specimen and sets specimen as current GUI specimen, also attempts to handle changing current fit. """ try: fit_index = self.pmag_results_data['specimens'][self.s].index( self.current_fit) except KeyError: fit_index = None except ValueError: fit_index = None # sets self.s to specimen calculates params etc. self.initialize_CART_rot(specimen) self.list_bound_loc = 0 if fit_index != None and self.s in self.pmag_results_data['specimens']: try: self.current_fit = self.pmag_results_data['specimens'][self.s][fit_index] except IndexError: self.current_fit = None else: self.current_fit = None if self.s != self.specimens_box.GetValue(): self.specimens_box.SetValue(self.s)
def function[select_specimen, parameter[self, specimen]]: constant[ Goes through the calculations necessary to plot measurement data for specimen and sets specimen as current GUI specimen, also attempts to handle changing current fit. ] <ast.Try object at 0x7da18c4cece0> call[name[self].initialize_CART_rot, parameter[name[specimen]]] name[self].list_bound_loc assign[=] constant[0] if <ast.BoolOp object at 0x7da18c4cebc0> begin[:] <ast.Try object at 0x7da18c4cd930> if compare[name[self].s not_equal[!=] call[name[self].specimens_box.GetValue, parameter[]]] begin[:] call[name[self].specimens_box.SetValue, parameter[name[self].s]]
keyword[def] identifier[select_specimen] ( identifier[self] , identifier[specimen] ): literal[string] keyword[try] : identifier[fit_index] = identifier[self] . identifier[pmag_results_data] [ literal[string] ][ identifier[self] . identifier[s] ]. identifier[index] ( identifier[self] . identifier[current_fit] ) keyword[except] identifier[KeyError] : identifier[fit_index] = keyword[None] keyword[except] identifier[ValueError] : identifier[fit_index] = keyword[None] identifier[self] . identifier[initialize_CART_rot] ( identifier[specimen] ) identifier[self] . identifier[list_bound_loc] = literal[int] keyword[if] identifier[fit_index] != keyword[None] keyword[and] identifier[self] . identifier[s] keyword[in] identifier[self] . identifier[pmag_results_data] [ literal[string] ]: keyword[try] : identifier[self] . identifier[current_fit] = identifier[self] . identifier[pmag_results_data] [ literal[string] ][ identifier[self] . identifier[s] ][ identifier[fit_index] ] keyword[except] identifier[IndexError] : identifier[self] . identifier[current_fit] = keyword[None] keyword[else] : identifier[self] . identifier[current_fit] = keyword[None] keyword[if] identifier[self] . identifier[s] != identifier[self] . identifier[specimens_box] . identifier[GetValue] (): identifier[self] . identifier[specimens_box] . identifier[SetValue] ( identifier[self] . identifier[s] )
def select_specimen(self, specimen): """ Goes through the calculations necessary to plot measurement data for specimen and sets specimen as current GUI specimen, also attempts to handle changing current fit. """ try: fit_index = self.pmag_results_data['specimens'][self.s].index(self.current_fit) # depends on [control=['try'], data=[]] except KeyError: fit_index = None # depends on [control=['except'], data=[]] except ValueError: fit_index = None # depends on [control=['except'], data=[]] # sets self.s to specimen calculates params etc. self.initialize_CART_rot(specimen) self.list_bound_loc = 0 if fit_index != None and self.s in self.pmag_results_data['specimens']: try: self.current_fit = self.pmag_results_data['specimens'][self.s][fit_index] # depends on [control=['try'], data=[]] except IndexError: self.current_fit = None # depends on [control=['except'], data=[]] # depends on [control=['if'], data=[]] else: self.current_fit = None if self.s != self.specimens_box.GetValue(): self.specimens_box.SetValue(self.s) # depends on [control=['if'], data=[]]
def sizes(args): """ %prog sizes gaps.bed a.fasta b.fasta Take the flanks of gaps within a.fasta, map them onto b.fasta. Compile the results to the gap size estimates in b. The output is detailed below: Columns are: 1. A scaffold 2. Start position 3. End position 4. Gap identifier 5. Gap size in A (= End - Start) 6. Gap size in B (based on BLAST, see below) For each gap, I extracted the left and right sequence (mostly 2Kb, but can be shorter if it runs into another gap) flanking the gap. The flanker names look like gap.00003L and gap.00003R means the left and right flanker of this particular gap, respectively. The BLAST output is used to calculate the gap size. For each flanker sequence, I took the best hit, and calculate the inner distance between the L match range and R range. The two flankers must map with at least 98% identity, and in the same orientation. NOTE the sixth column in the list file is not always a valid number. Other values are: - na: both flankers are missing in B - Singleton: one flanker is missing - Different chr: flankers map to different scaffolds - Strand +|-: flankers map in different orientations - Negative value: the R flanker map before L flanker """ from jcvi.formats.base import DictFile from jcvi.apps.align import blast p = OptionParser(sizes.__doc__) opts, args = p.parse_args(args) if len(args) != 3: sys.exit(not p.print_help()) gapsbed, afasta, bfasta = args pf = gapsbed.rsplit(".", 1)[0] extbed = pf + ".ext.bed" extfasta = pf + ".ext.fasta" if need_update(gapsbed, extfasta): extbed, extfasta = flanks([gapsbed, afasta]) q = op.basename(extfasta).split(".")[0] r = op.basename(bfasta).split(".")[0] blastfile = "{0}.{1}.blast".format(q, r) if need_update([extfasta, bfasta], blastfile): blastfile = blast([bfasta, extfasta, "--wordsize=50", "--pctid=98"]) labelsfile = blast_to_twobeds(blastfile) labels = DictFile(labelsfile, delimiter='\t') bed = Bed(gapsbed) for b in bed: b.score = b.span accn = b.accn print("\t".join((str(x) for x in (b.seqid, b.start - 1, b.end, accn, b.score, labels.get(accn, "na")))))
def function[sizes, parameter[args]]: constant[ %prog sizes gaps.bed a.fasta b.fasta Take the flanks of gaps within a.fasta, map them onto b.fasta. Compile the results to the gap size estimates in b. The output is detailed below: Columns are: 1. A scaffold 2. Start position 3. End position 4. Gap identifier 5. Gap size in A (= End - Start) 6. Gap size in B (based on BLAST, see below) For each gap, I extracted the left and right sequence (mostly 2Kb, but can be shorter if it runs into another gap) flanking the gap. The flanker names look like gap.00003L and gap.00003R means the left and right flanker of this particular gap, respectively. The BLAST output is used to calculate the gap size. For each flanker sequence, I took the best hit, and calculate the inner distance between the L match range and R range. The two flankers must map with at least 98% identity, and in the same orientation. NOTE the sixth column in the list file is not always a valid number. Other values are: - na: both flankers are missing in B - Singleton: one flanker is missing - Different chr: flankers map to different scaffolds - Strand +|-: flankers map in different orientations - Negative value: the R flanker map before L flanker ] from relative_module[jcvi.formats.base] import module[DictFile] from relative_module[jcvi.apps.align] import module[blast] variable[p] assign[=] call[name[OptionParser], parameter[name[sizes].__doc__]] <ast.Tuple object at 0x7da1b08ebc70> assign[=] call[name[p].parse_args, parameter[name[args]]] if compare[call[name[len], parameter[name[args]]] not_equal[!=] constant[3]] begin[:] call[name[sys].exit, parameter[<ast.UnaryOp object at 0x7da1b08eb940>]] <ast.Tuple object at 0x7da1b08eb820> assign[=] name[args] variable[pf] assign[=] call[call[name[gapsbed].rsplit, parameter[constant[.], constant[1]]]][constant[0]] variable[extbed] assign[=] binary_operation[name[pf] + constant[.ext.bed]] variable[extfasta] assign[=] binary_operation[name[pf] + constant[.ext.fasta]] if call[name[need_update], parameter[name[gapsbed], name[extfasta]]] begin[:] <ast.Tuple object at 0x7da1b08eb280> assign[=] call[name[flanks], parameter[list[[<ast.Name object at 0x7da1b08eb160>, <ast.Name object at 0x7da1b08eb130>]]]] variable[q] assign[=] call[call[call[name[op].basename, parameter[name[extfasta]]].split, parameter[constant[.]]]][constant[0]] variable[r] assign[=] call[call[call[name[op].basename, parameter[name[bfasta]]].split, parameter[constant[.]]]][constant[0]] variable[blastfile] assign[=] call[constant[{0}.{1}.blast].format, parameter[name[q], name[r]]] if call[name[need_update], parameter[list[[<ast.Name object at 0x7da1b08e8130>, <ast.Name object at 0x7da1b08e8160>]], name[blastfile]]] begin[:] variable[blastfile] assign[=] call[name[blast], parameter[list[[<ast.Name object at 0x7da1b08e82b0>, <ast.Name object at 0x7da1b08e82e0>, <ast.Constant object at 0x7da1b08e8310>, <ast.Constant object at 0x7da1b08e8340>]]]] variable[labelsfile] assign[=] call[name[blast_to_twobeds], parameter[name[blastfile]]] variable[labels] assign[=] call[name[DictFile], parameter[name[labelsfile]]] variable[bed] assign[=] call[name[Bed], parameter[name[gapsbed]]] for taget[name[b]] in starred[name[bed]] begin[:] name[b].score assign[=] name[b].span variable[accn] assign[=] name[b].accn call[name[print], parameter[call[constant[ ].join, parameter[<ast.GeneratorExp object at 0x7da1b08e8a00>]]]]
keyword[def] identifier[sizes] ( identifier[args] ): literal[string] keyword[from] identifier[jcvi] . identifier[formats] . identifier[base] keyword[import] identifier[DictFile] keyword[from] identifier[jcvi] . identifier[apps] . identifier[align] keyword[import] identifier[blast] identifier[p] = identifier[OptionParser] ( identifier[sizes] . identifier[__doc__] ) identifier[opts] , identifier[args] = identifier[p] . identifier[parse_args] ( identifier[args] ) keyword[if] identifier[len] ( identifier[args] )!= literal[int] : identifier[sys] . identifier[exit] ( keyword[not] identifier[p] . identifier[print_help] ()) identifier[gapsbed] , identifier[afasta] , identifier[bfasta] = identifier[args] identifier[pf] = identifier[gapsbed] . identifier[rsplit] ( literal[string] , literal[int] )[ literal[int] ] identifier[extbed] = identifier[pf] + literal[string] identifier[extfasta] = identifier[pf] + literal[string] keyword[if] identifier[need_update] ( identifier[gapsbed] , identifier[extfasta] ): identifier[extbed] , identifier[extfasta] = identifier[flanks] ([ identifier[gapsbed] , identifier[afasta] ]) identifier[q] = identifier[op] . identifier[basename] ( identifier[extfasta] ). identifier[split] ( literal[string] )[ literal[int] ] identifier[r] = identifier[op] . identifier[basename] ( identifier[bfasta] ). identifier[split] ( literal[string] )[ literal[int] ] identifier[blastfile] = literal[string] . identifier[format] ( identifier[q] , identifier[r] ) keyword[if] identifier[need_update] ([ identifier[extfasta] , identifier[bfasta] ], identifier[blastfile] ): identifier[blastfile] = identifier[blast] ([ identifier[bfasta] , identifier[extfasta] , literal[string] , literal[string] ]) identifier[labelsfile] = identifier[blast_to_twobeds] ( identifier[blastfile] ) identifier[labels] = identifier[DictFile] ( identifier[labelsfile] , identifier[delimiter] = literal[string] ) identifier[bed] = identifier[Bed] ( identifier[gapsbed] ) keyword[for] identifier[b] keyword[in] identifier[bed] : identifier[b] . identifier[score] = identifier[b] . identifier[span] identifier[accn] = identifier[b] . identifier[accn] identifier[print] ( literal[string] . identifier[join] (( identifier[str] ( identifier[x] ) keyword[for] identifier[x] keyword[in] ( identifier[b] . identifier[seqid] , identifier[b] . identifier[start] - literal[int] , identifier[b] . identifier[end] , identifier[accn] , identifier[b] . identifier[score] , identifier[labels] . identifier[get] ( identifier[accn] , literal[string] )))))
def sizes(args): """ %prog sizes gaps.bed a.fasta b.fasta Take the flanks of gaps within a.fasta, map them onto b.fasta. Compile the results to the gap size estimates in b. The output is detailed below: Columns are: 1. A scaffold 2. Start position 3. End position 4. Gap identifier 5. Gap size in A (= End - Start) 6. Gap size in B (based on BLAST, see below) For each gap, I extracted the left and right sequence (mostly 2Kb, but can be shorter if it runs into another gap) flanking the gap. The flanker names look like gap.00003L and gap.00003R means the left and right flanker of this particular gap, respectively. The BLAST output is used to calculate the gap size. For each flanker sequence, I took the best hit, and calculate the inner distance between the L match range and R range. The two flankers must map with at least 98% identity, and in the same orientation. NOTE the sixth column in the list file is not always a valid number. Other values are: - na: both flankers are missing in B - Singleton: one flanker is missing - Different chr: flankers map to different scaffolds - Strand +|-: flankers map in different orientations - Negative value: the R flanker map before L flanker """ from jcvi.formats.base import DictFile from jcvi.apps.align import blast p = OptionParser(sizes.__doc__) (opts, args) = p.parse_args(args) if len(args) != 3: sys.exit(not p.print_help()) # depends on [control=['if'], data=[]] (gapsbed, afasta, bfasta) = args pf = gapsbed.rsplit('.', 1)[0] extbed = pf + '.ext.bed' extfasta = pf + '.ext.fasta' if need_update(gapsbed, extfasta): (extbed, extfasta) = flanks([gapsbed, afasta]) # depends on [control=['if'], data=[]] q = op.basename(extfasta).split('.')[0] r = op.basename(bfasta).split('.')[0] blastfile = '{0}.{1}.blast'.format(q, r) if need_update([extfasta, bfasta], blastfile): blastfile = blast([bfasta, extfasta, '--wordsize=50', '--pctid=98']) # depends on [control=['if'], data=[]] labelsfile = blast_to_twobeds(blastfile) labels = DictFile(labelsfile, delimiter='\t') bed = Bed(gapsbed) for b in bed: b.score = b.span accn = b.accn print('\t'.join((str(x) for x in (b.seqid, b.start - 1, b.end, accn, b.score, labels.get(accn, 'na'))))) # depends on [control=['for'], data=['b']]
def add_missing_price_information_message(request, item): """ Add a message to the Django messages store indicating that we failed to retrieve price information about an item. :param request: The current request. :param item: The item for which price information is missing. Example: a program title, or a course. """ messages.warning( request, _( '{strong_start}We could not gather price information for {em_start}{item}{em_end}.{strong_end} ' '{span_start}If you continue to have these issues, please contact ' '{link_start}{platform_name} support{link_end}.{span_end}' ).format( item=item, em_start='<em>', em_end='</em>', link_start='<a href="{support_link}" target="_blank">'.format( support_link=get_configuration_value('ENTERPRISE_SUPPORT_URL', settings.ENTERPRISE_SUPPORT_URL), ), platform_name=get_configuration_value('PLATFORM_NAME', settings.PLATFORM_NAME), link_end='</a>', span_start='<span>', span_end='</span>', strong_start='<strong>', strong_end='</strong>', ) )
def function[add_missing_price_information_message, parameter[request, item]]: constant[ Add a message to the Django messages store indicating that we failed to retrieve price information about an item. :param request: The current request. :param item: The item for which price information is missing. Example: a program title, or a course. ] call[name[messages].warning, parameter[name[request], call[call[name[_], parameter[constant[{strong_start}We could not gather price information for {em_start}{item}{em_end}.{strong_end} {span_start}If you continue to have these issues, please contact {link_start}{platform_name} support{link_end}.{span_end}]]].format, parameter[]]]]
keyword[def] identifier[add_missing_price_information_message] ( identifier[request] , identifier[item] ): literal[string] identifier[messages] . identifier[warning] ( identifier[request] , identifier[_] ( literal[string] literal[string] literal[string] ). identifier[format] ( identifier[item] = identifier[item] , identifier[em_start] = literal[string] , identifier[em_end] = literal[string] , identifier[link_start] = literal[string] . identifier[format] ( identifier[support_link] = identifier[get_configuration_value] ( literal[string] , identifier[settings] . identifier[ENTERPRISE_SUPPORT_URL] ), ), identifier[platform_name] = identifier[get_configuration_value] ( literal[string] , identifier[settings] . identifier[PLATFORM_NAME] ), identifier[link_end] = literal[string] , identifier[span_start] = literal[string] , identifier[span_end] = literal[string] , identifier[strong_start] = literal[string] , identifier[strong_end] = literal[string] , ) )
def add_missing_price_information_message(request, item): """ Add a message to the Django messages store indicating that we failed to retrieve price information about an item. :param request: The current request. :param item: The item for which price information is missing. Example: a program title, or a course. """ messages.warning(request, _('{strong_start}We could not gather price information for {em_start}{item}{em_end}.{strong_end} {span_start}If you continue to have these issues, please contact {link_start}{platform_name} support{link_end}.{span_end}').format(item=item, em_start='<em>', em_end='</em>', link_start='<a href="{support_link}" target="_blank">'.format(support_link=get_configuration_value('ENTERPRISE_SUPPORT_URL', settings.ENTERPRISE_SUPPORT_URL)), platform_name=get_configuration_value('PLATFORM_NAME', settings.PLATFORM_NAME), link_end='</a>', span_start='<span>', span_end='</span>', strong_start='<strong>', strong_end='</strong>'))
def write_single_coil(self, starting_address, value): """ Write single Coil to Master device (Function code 5) starting_address: Coil to be written value: Coil Value to be written """ self.__transactionIdentifier+=1 if (self.__ser is not None): if (self.__ser.closed): raise Exception.SerialPortNotOpenedException("serial port not opened") function_code = 5 length = 6; transaction_identifier_lsb = self.__transactionIdentifier&0xFF transaction_identifier_msb = ((self.__transactionIdentifier&0xFF00) >> 8) length_lsb = length&0xFF length_msb = (length&0xFF00) >> 8 starting_address_lsb = starting_address&0xFF starting_address_msb = (starting_address&0xFF00) >> 8 if value: valueLSB = 0x00 valueMSB = (0xFF00) >> 8 else: valueLSB = 0x00 valueMSB = (0x00) >> 8 if (self.__ser is not None): data = bytearray([self.__unitIdentifier, function_code, starting_address_msb, starting_address_lsb, valueMSB, valueLSB, 0, 0]) crc = self.__calculateCRC(data, len(data)-2, 0) crcLSB = crc&0xFF crcMSB = (crc&0xFF00) >> 8 data[6] = crcLSB data[7] = crcMSB self.__ser.write(data) bytes_to_read = 8 data = self.__ser.read(bytes_to_read) b=bytearray(data) data = b if (len(data) < bytes_to_read): raise Exceptions.TimeoutError('Read timeout Exception') if ((data[1] == 0x85) & (data[2] == 0x01)): raise Exceptions.function_codeNotSupportedException("Function code not supported by master"); if ((data[1] == 0x85) & (data[2] == 0x02)): raise Exceptions.starting_addressInvalidException("Address invalid"); if ((data[1] == 0x85) & (data[2] == 0x03)): raise Exceptions.QuantityInvalidException("Value invalid"); if ((data[1] == 0x85) & (data[2] == 0x04)): raise Exceptions.ModbusException("error reading"); crc = self.__calculateCRC(data, len(data) - 2, 0) crcLSB = crc&0xFF crcMSB = (crc&0xFF00) >> 8 if ((crcLSB != data[len(data)-2]) & (crcMSB != data[len(data)-1])): raise Exceptions.CRCCheckFailedException("CRC check failed"); if data[1] == self.__unitIdentifier: return True else: return False else: protocolIdentifierLSB = 0x00; protocolIdentifierMSB = 0x00; length_lsb = 0x06; length_msb = 0x00; data = bytearray([transaction_identifier_msb, transaction_identifier_lsb, protocolIdentifierMSB, protocolIdentifierLSB, length_msb, length_lsb, self.__unitIdentifier, function_code, starting_address_msb, starting_address_lsb, valueMSB, valueLSB]) self.__tcpClientSocket.send(data) bytes_to_read = 12 self.__receivedata = bytearray() try: while (len(self.__receivedata) == 0): pass except Exception: raise Exception('Read Timeout') data = bytearray(self.__receivedata) if ((data[1+6] == 0x85) & (data[2+6] == 0x01)): raise Exceptions.function_codeNotSupportedException("Function code not supported by master"); if ((data[1+6] == 0x85) & (data[2+6] == 0x02)): raise Exceptions.starting_addressInvalidException("Address invalid"); if ((data[1+6] == 0x85) & (data[2+6] == 0x03)): raise Exceptions.QuantityInvalidException("Value invalid"); if ((data[1+6] == 0x85) & (data[2+6] == 0x04)): raise Exceptions.ModbusException("error reading"); return True
def function[write_single_coil, parameter[self, starting_address, value]]: constant[ Write single Coil to Master device (Function code 5) starting_address: Coil to be written value: Coil Value to be written ] <ast.AugAssign object at 0x7da1b1c60ee0> if compare[name[self].__ser is_not constant[None]] begin[:] if name[self].__ser.closed begin[:] <ast.Raise object at 0x7da1b1c60940> variable[function_code] assign[=] constant[5] variable[length] assign[=] constant[6] variable[transaction_identifier_lsb] assign[=] binary_operation[name[self].__transactionIdentifier <ast.BitAnd object at 0x7da2590d6b60> constant[255]] variable[transaction_identifier_msb] assign[=] binary_operation[binary_operation[name[self].__transactionIdentifier <ast.BitAnd object at 0x7da2590d6b60> constant[65280]] <ast.RShift object at 0x7da2590d6a40> constant[8]] variable[length_lsb] assign[=] binary_operation[name[length] <ast.BitAnd object at 0x7da2590d6b60> constant[255]] variable[length_msb] assign[=] binary_operation[binary_operation[name[length] <ast.BitAnd object at 0x7da2590d6b60> constant[65280]] <ast.RShift object at 0x7da2590d6a40> constant[8]] variable[starting_address_lsb] assign[=] binary_operation[name[starting_address] <ast.BitAnd object at 0x7da2590d6b60> constant[255]] variable[starting_address_msb] assign[=] binary_operation[binary_operation[name[starting_address] <ast.BitAnd object at 0x7da2590d6b60> constant[65280]] <ast.RShift object at 0x7da2590d6a40> constant[8]] if name[value] begin[:] variable[valueLSB] assign[=] constant[0] variable[valueMSB] assign[=] binary_operation[constant[65280] <ast.RShift object at 0x7da2590d6a40> constant[8]] if compare[name[self].__ser is_not constant[None]] begin[:] variable[data] assign[=] call[name[bytearray], parameter[list[[<ast.Attribute object at 0x7da1b1b17160>, <ast.Name object at 0x7da1b1b178e0>, <ast.Name object at 0x7da1b1b17c10>, <ast.Name object at 0x7da1b1b17b50>, <ast.Name object at 0x7da1b1b14160>, <ast.Name object at 0x7da1b1b16950>, <ast.Constant object at 0x7da1b1b16f80>, <ast.Constant object at 0x7da1b1b17be0>]]]] variable[crc] assign[=] call[name[self].__calculateCRC, parameter[name[data], binary_operation[call[name[len], parameter[name[data]]] - constant[2]], constant[0]]] variable[crcLSB] assign[=] binary_operation[name[crc] <ast.BitAnd object at 0x7da2590d6b60> constant[255]] variable[crcMSB] assign[=] binary_operation[binary_operation[name[crc] <ast.BitAnd object at 0x7da2590d6b60> constant[65280]] <ast.RShift object at 0x7da2590d6a40> constant[8]] call[name[data]][constant[6]] assign[=] name[crcLSB] call[name[data]][constant[7]] assign[=] name[crcMSB] call[name[self].__ser.write, parameter[name[data]]] variable[bytes_to_read] assign[=] constant[8] variable[data] assign[=] call[name[self].__ser.read, parameter[name[bytes_to_read]]] variable[b] assign[=] call[name[bytearray], parameter[name[data]]] variable[data] assign[=] name[b] if compare[call[name[len], parameter[name[data]]] less[<] name[bytes_to_read]] begin[:] <ast.Raise object at 0x7da1b1b17c40> if binary_operation[compare[call[name[data]][constant[1]] equal[==] constant[133]] <ast.BitAnd object at 0x7da2590d6b60> compare[call[name[data]][constant[2]] equal[==] constant[1]]] begin[:] <ast.Raise object at 0x7da1b1b17b80> if binary_operation[compare[call[name[data]][constant[1]] equal[==] constant[133]] <ast.BitAnd object at 0x7da2590d6b60> compare[call[name[data]][constant[2]] equal[==] constant[2]]] begin[:] <ast.Raise object at 0x7da1b1b15270> if binary_operation[compare[call[name[data]][constant[1]] equal[==] constant[133]] <ast.BitAnd object at 0x7da2590d6b60> compare[call[name[data]][constant[2]] equal[==] constant[3]]] begin[:] <ast.Raise object at 0x7da1b1b85c00> if binary_operation[compare[call[name[data]][constant[1]] equal[==] constant[133]] <ast.BitAnd object at 0x7da2590d6b60> compare[call[name[data]][constant[2]] equal[==] constant[4]]] begin[:] <ast.Raise object at 0x7da1b1b87e20> variable[crc] assign[=] call[name[self].__calculateCRC, parameter[name[data], binary_operation[call[name[len], parameter[name[data]]] - constant[2]], constant[0]]] variable[crcLSB] assign[=] binary_operation[name[crc] <ast.BitAnd object at 0x7da2590d6b60> constant[255]] variable[crcMSB] assign[=] binary_operation[binary_operation[name[crc] <ast.BitAnd object at 0x7da2590d6b60> constant[65280]] <ast.RShift object at 0x7da2590d6a40> constant[8]] if binary_operation[compare[name[crcLSB] not_equal[!=] call[name[data]][binary_operation[call[name[len], parameter[name[data]]] - constant[2]]]] <ast.BitAnd object at 0x7da2590d6b60> compare[name[crcMSB] not_equal[!=] call[name[data]][binary_operation[call[name[len], parameter[name[data]]] - constant[1]]]]] begin[:] <ast.Raise object at 0x7da1b1ba96c0> if compare[call[name[data]][constant[1]] equal[==] name[self].__unitIdentifier] begin[:] return[constant[True]]
keyword[def] identifier[write_single_coil] ( identifier[self] , identifier[starting_address] , identifier[value] ): literal[string] identifier[self] . identifier[__transactionIdentifier] += literal[int] keyword[if] ( identifier[self] . identifier[__ser] keyword[is] keyword[not] keyword[None] ): keyword[if] ( identifier[self] . identifier[__ser] . identifier[closed] ): keyword[raise] identifier[Exception] . identifier[SerialPortNotOpenedException] ( literal[string] ) identifier[function_code] = literal[int] identifier[length] = literal[int] ; identifier[transaction_identifier_lsb] = identifier[self] . identifier[__transactionIdentifier] & literal[int] identifier[transaction_identifier_msb] =(( identifier[self] . identifier[__transactionIdentifier] & literal[int] )>> literal[int] ) identifier[length_lsb] = identifier[length] & literal[int] identifier[length_msb] =( identifier[length] & literal[int] )>> literal[int] identifier[starting_address_lsb] = identifier[starting_address] & literal[int] identifier[starting_address_msb] =( identifier[starting_address] & literal[int] )>> literal[int] keyword[if] identifier[value] : identifier[valueLSB] = literal[int] identifier[valueMSB] =( literal[int] )>> literal[int] keyword[else] : identifier[valueLSB] = literal[int] identifier[valueMSB] =( literal[int] )>> literal[int] keyword[if] ( identifier[self] . identifier[__ser] keyword[is] keyword[not] keyword[None] ): identifier[data] = identifier[bytearray] ([ identifier[self] . identifier[__unitIdentifier] , identifier[function_code] , identifier[starting_address_msb] , identifier[starting_address_lsb] , identifier[valueMSB] , identifier[valueLSB] , literal[int] , literal[int] ]) identifier[crc] = identifier[self] . identifier[__calculateCRC] ( identifier[data] , identifier[len] ( identifier[data] )- literal[int] , literal[int] ) identifier[crcLSB] = identifier[crc] & literal[int] identifier[crcMSB] =( identifier[crc] & literal[int] )>> literal[int] identifier[data] [ literal[int] ]= identifier[crcLSB] identifier[data] [ literal[int] ]= identifier[crcMSB] identifier[self] . identifier[__ser] . identifier[write] ( identifier[data] ) identifier[bytes_to_read] = literal[int] identifier[data] = identifier[self] . identifier[__ser] . identifier[read] ( identifier[bytes_to_read] ) identifier[b] = identifier[bytearray] ( identifier[data] ) identifier[data] = identifier[b] keyword[if] ( identifier[len] ( identifier[data] )< identifier[bytes_to_read] ): keyword[raise] identifier[Exceptions] . identifier[TimeoutError] ( literal[string] ) keyword[if] (( identifier[data] [ literal[int] ]== literal[int] )&( identifier[data] [ literal[int] ]== literal[int] )): keyword[raise] identifier[Exceptions] . identifier[function_codeNotSupportedException] ( literal[string] ); keyword[if] (( identifier[data] [ literal[int] ]== literal[int] )&( identifier[data] [ literal[int] ]== literal[int] )): keyword[raise] identifier[Exceptions] . identifier[starting_addressInvalidException] ( literal[string] ); keyword[if] (( identifier[data] [ literal[int] ]== literal[int] )&( identifier[data] [ literal[int] ]== literal[int] )): keyword[raise] identifier[Exceptions] . identifier[QuantityInvalidException] ( literal[string] ); keyword[if] (( identifier[data] [ literal[int] ]== literal[int] )&( identifier[data] [ literal[int] ]== literal[int] )): keyword[raise] identifier[Exceptions] . identifier[ModbusException] ( literal[string] ); identifier[crc] = identifier[self] . identifier[__calculateCRC] ( identifier[data] , identifier[len] ( identifier[data] )- literal[int] , literal[int] ) identifier[crcLSB] = identifier[crc] & literal[int] identifier[crcMSB] =( identifier[crc] & literal[int] )>> literal[int] keyword[if] (( identifier[crcLSB] != identifier[data] [ identifier[len] ( identifier[data] )- literal[int] ])&( identifier[crcMSB] != identifier[data] [ identifier[len] ( identifier[data] )- literal[int] ])): keyword[raise] identifier[Exceptions] . identifier[CRCCheckFailedException] ( literal[string] ); keyword[if] identifier[data] [ literal[int] ]== identifier[self] . identifier[__unitIdentifier] : keyword[return] keyword[True] keyword[else] : keyword[return] keyword[False] keyword[else] : identifier[protocolIdentifierLSB] = literal[int] ; identifier[protocolIdentifierMSB] = literal[int] ; identifier[length_lsb] = literal[int] ; identifier[length_msb] = literal[int] ; identifier[data] = identifier[bytearray] ([ identifier[transaction_identifier_msb] , identifier[transaction_identifier_lsb] , identifier[protocolIdentifierMSB] , identifier[protocolIdentifierLSB] , identifier[length_msb] , identifier[length_lsb] , identifier[self] . identifier[__unitIdentifier] , identifier[function_code] , identifier[starting_address_msb] , identifier[starting_address_lsb] , identifier[valueMSB] , identifier[valueLSB] ]) identifier[self] . identifier[__tcpClientSocket] . identifier[send] ( identifier[data] ) identifier[bytes_to_read] = literal[int] identifier[self] . identifier[__receivedata] = identifier[bytearray] () keyword[try] : keyword[while] ( identifier[len] ( identifier[self] . identifier[__receivedata] )== literal[int] ): keyword[pass] keyword[except] identifier[Exception] : keyword[raise] identifier[Exception] ( literal[string] ) identifier[data] = identifier[bytearray] ( identifier[self] . identifier[__receivedata] ) keyword[if] (( identifier[data] [ literal[int] + literal[int] ]== literal[int] )&( identifier[data] [ literal[int] + literal[int] ]== literal[int] )): keyword[raise] identifier[Exceptions] . identifier[function_codeNotSupportedException] ( literal[string] ); keyword[if] (( identifier[data] [ literal[int] + literal[int] ]== literal[int] )&( identifier[data] [ literal[int] + literal[int] ]== literal[int] )): keyword[raise] identifier[Exceptions] . identifier[starting_addressInvalidException] ( literal[string] ); keyword[if] (( identifier[data] [ literal[int] + literal[int] ]== literal[int] )&( identifier[data] [ literal[int] + literal[int] ]== literal[int] )): keyword[raise] identifier[Exceptions] . identifier[QuantityInvalidException] ( literal[string] ); keyword[if] (( identifier[data] [ literal[int] + literal[int] ]== literal[int] )&( identifier[data] [ literal[int] + literal[int] ]== literal[int] )): keyword[raise] identifier[Exceptions] . identifier[ModbusException] ( literal[string] ); keyword[return] keyword[True]
def write_single_coil(self, starting_address, value): """ Write single Coil to Master device (Function code 5) starting_address: Coil to be written value: Coil Value to be written """ self.__transactionIdentifier += 1 if self.__ser is not None: if self.__ser.closed: raise Exception.SerialPortNotOpenedException('serial port not opened') # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]] function_code = 5 length = 6 transaction_identifier_lsb = self.__transactionIdentifier & 255 transaction_identifier_msb = (self.__transactionIdentifier & 65280) >> 8 length_lsb = length & 255 length_msb = (length & 65280) >> 8 starting_address_lsb = starting_address & 255 starting_address_msb = (starting_address & 65280) >> 8 if value: valueLSB = 0 valueMSB = 65280 >> 8 # depends on [control=['if'], data=[]] else: valueLSB = 0 valueMSB = 0 >> 8 if self.__ser is not None: data = bytearray([self.__unitIdentifier, function_code, starting_address_msb, starting_address_lsb, valueMSB, valueLSB, 0, 0]) crc = self.__calculateCRC(data, len(data) - 2, 0) crcLSB = crc & 255 crcMSB = (crc & 65280) >> 8 data[6] = crcLSB data[7] = crcMSB self.__ser.write(data) bytes_to_read = 8 data = self.__ser.read(bytes_to_read) b = bytearray(data) data = b if len(data) < bytes_to_read: raise Exceptions.TimeoutError('Read timeout Exception') # depends on [control=['if'], data=[]] if (data[1] == 133) & (data[2] == 1): raise Exceptions.function_codeNotSupportedException('Function code not supported by master') # depends on [control=['if'], data=[]] if (data[1] == 133) & (data[2] == 2): raise Exceptions.starting_addressInvalidException('Address invalid') # depends on [control=['if'], data=[]] if (data[1] == 133) & (data[2] == 3): raise Exceptions.QuantityInvalidException('Value invalid') # depends on [control=['if'], data=[]] if (data[1] == 133) & (data[2] == 4): raise Exceptions.ModbusException('error reading') # depends on [control=['if'], data=[]] crc = self.__calculateCRC(data, len(data) - 2, 0) crcLSB = crc & 255 crcMSB = (crc & 65280) >> 8 if (crcLSB != data[len(data) - 2]) & (crcMSB != data[len(data) - 1]): raise Exceptions.CRCCheckFailedException('CRC check failed') # depends on [control=['if'], data=[]] if data[1] == self.__unitIdentifier: return True # depends on [control=['if'], data=[]] else: return False # depends on [control=['if'], data=[]] else: protocolIdentifierLSB = 0 protocolIdentifierMSB = 0 length_lsb = 6 length_msb = 0 data = bytearray([transaction_identifier_msb, transaction_identifier_lsb, protocolIdentifierMSB, protocolIdentifierLSB, length_msb, length_lsb, self.__unitIdentifier, function_code, starting_address_msb, starting_address_lsb, valueMSB, valueLSB]) self.__tcpClientSocket.send(data) bytes_to_read = 12 self.__receivedata = bytearray() try: while len(self.__receivedata) == 0: pass # depends on [control=['while'], data=[]] # depends on [control=['try'], data=[]] except Exception: raise Exception('Read Timeout') # depends on [control=['except'], data=[]] data = bytearray(self.__receivedata) if (data[1 + 6] == 133) & (data[2 + 6] == 1): raise Exceptions.function_codeNotSupportedException('Function code not supported by master') # depends on [control=['if'], data=[]] if (data[1 + 6] == 133) & (data[2 + 6] == 2): raise Exceptions.starting_addressInvalidException('Address invalid') # depends on [control=['if'], data=[]] if (data[1 + 6] == 133) & (data[2 + 6] == 3): raise Exceptions.QuantityInvalidException('Value invalid') # depends on [control=['if'], data=[]] if (data[1 + 6] == 133) & (data[2 + 6] == 4): raise Exceptions.ModbusException('error reading') return True # depends on [control=['if'], data=[]]
def get_fields_by_prop(cls, prop_key, prop_val): """ Return a list of field names matching a prop key/val :param prop_key: key name :param prop_val: value :return: list """ ret = [] for key, val in cls.get_fields_with_prop(prop_key): if val == prop_val: ret.append(key) return ret
def function[get_fields_by_prop, parameter[cls, prop_key, prop_val]]: constant[ Return a list of field names matching a prop key/val :param prop_key: key name :param prop_val: value :return: list ] variable[ret] assign[=] list[[]] for taget[tuple[[<ast.Name object at 0x7da1b131bdc0>, <ast.Name object at 0x7da1b131ba90>]]] in starred[call[name[cls].get_fields_with_prop, parameter[name[prop_key]]]] begin[:] if compare[name[val] equal[==] name[prop_val]] begin[:] call[name[ret].append, parameter[name[key]]] return[name[ret]]
keyword[def] identifier[get_fields_by_prop] ( identifier[cls] , identifier[prop_key] , identifier[prop_val] ): literal[string] identifier[ret] =[] keyword[for] identifier[key] , identifier[val] keyword[in] identifier[cls] . identifier[get_fields_with_prop] ( identifier[prop_key] ): keyword[if] identifier[val] == identifier[prop_val] : identifier[ret] . identifier[append] ( identifier[key] ) keyword[return] identifier[ret]
def get_fields_by_prop(cls, prop_key, prop_val): """ Return a list of field names matching a prop key/val :param prop_key: key name :param prop_val: value :return: list """ ret = [] for (key, val) in cls.get_fields_with_prop(prop_key): if val == prop_val: ret.append(key) # depends on [control=['if'], data=[]] # depends on [control=['for'], data=[]] return ret
def thresholdcands(candsfile, threshold, numberperscan=1): """ Returns list of significant candidate loc in candsfile. Can define threshold and maximum number of locs per scan. Works on merge or per-scan cands pkls. """ # read metadata and define columns of interest loc, prop, d = read_candidates(candsfile, returnstate=True) try: scancol = d['featureind'].index('scan') except ValueError: scancol = -1 if 'snr2' in d['features']: snrcol = d['features'].index('snr2') elif 'snr1' in d['features']: snrcol = d['features'].index('snr1') snrs = [prop[i][snrcol] for i in range(len(prop)) if prop[i][snrcol] > threshold] # calculate unique list of locs of interest siglocs = [list(loc[i]) for i in range(len(prop)) if prop[i][snrcol] > threshold] siglocssort = sorted(zip([list(ll) for ll in siglocs], snrs), key=lambda stuff: stuff[1], reverse=True) if scancol >= 0: scanset = list(set([siglocs[i][scancol] for i in range(len(siglocs))])) candlist= [] for scan in scanset: logger.debug('looking in scan %d' % scan) count = 0 for sigloc in siglocssort: if sigloc[0][scancol] == scan: logger.debug('adding sigloc %s' % str(sigloc)) candlist.append(sigloc) count += 1 if count >= numberperscan: break else: candlist = siglocssort[:numberperscan] logger.debug('Returning %d cands above threshold %.1f' % (len(candlist), threshold)) return [loc for loc,snr in candlist]
def function[thresholdcands, parameter[candsfile, threshold, numberperscan]]: constant[ Returns list of significant candidate loc in candsfile. Can define threshold and maximum number of locs per scan. Works on merge or per-scan cands pkls. ] <ast.Tuple object at 0x7da20c6c7100> assign[=] call[name[read_candidates], parameter[name[candsfile]]] <ast.Try object at 0x7da20c6c72e0> if compare[constant[snr2] in call[name[d]][constant[features]]] begin[:] variable[snrcol] assign[=] call[call[name[d]][constant[features]].index, parameter[constant[snr2]]] variable[snrs] assign[=] <ast.ListComp object at 0x7da1b2524940> variable[siglocs] assign[=] <ast.ListComp object at 0x7da1b2524cd0> variable[siglocssort] assign[=] call[name[sorted], parameter[call[name[zip], parameter[<ast.ListComp object at 0x7da1b25261d0>, name[snrs]]]]] if compare[name[scancol] greater_or_equal[>=] constant[0]] begin[:] variable[scanset] assign[=] call[name[list], parameter[call[name[set], parameter[<ast.ListComp object at 0x7da1b25270d0>]]]] variable[candlist] assign[=] list[[]] for taget[name[scan]] in starred[name[scanset]] begin[:] call[name[logger].debug, parameter[binary_operation[constant[looking in scan %d] <ast.Mod object at 0x7da2590d6920> name[scan]]]] variable[count] assign[=] constant[0] for taget[name[sigloc]] in starred[name[siglocssort]] begin[:] if compare[call[call[name[sigloc]][constant[0]]][name[scancol]] equal[==] name[scan]] begin[:] call[name[logger].debug, parameter[binary_operation[constant[adding sigloc %s] <ast.Mod object at 0x7da2590d6920> call[name[str], parameter[name[sigloc]]]]]] call[name[candlist].append, parameter[name[sigloc]]] <ast.AugAssign object at 0x7da1b25264d0> if compare[name[count] greater_or_equal[>=] name[numberperscan]] begin[:] break call[name[logger].debug, parameter[binary_operation[constant[Returning %d cands above threshold %.1f] <ast.Mod object at 0x7da2590d6920> tuple[[<ast.Call object at 0x7da1b2526d70>, <ast.Name object at 0x7da1b2525ff0>]]]]] return[<ast.ListComp object at 0x7da1b2524160>]
keyword[def] identifier[thresholdcands] ( identifier[candsfile] , identifier[threshold] , identifier[numberperscan] = literal[int] ): literal[string] identifier[loc] , identifier[prop] , identifier[d] = identifier[read_candidates] ( identifier[candsfile] , identifier[returnstate] = keyword[True] ) keyword[try] : identifier[scancol] = identifier[d] [ literal[string] ]. identifier[index] ( literal[string] ) keyword[except] identifier[ValueError] : identifier[scancol] =- literal[int] keyword[if] literal[string] keyword[in] identifier[d] [ literal[string] ]: identifier[snrcol] = identifier[d] [ literal[string] ]. identifier[index] ( literal[string] ) keyword[elif] literal[string] keyword[in] identifier[d] [ literal[string] ]: identifier[snrcol] = identifier[d] [ literal[string] ]. identifier[index] ( literal[string] ) identifier[snrs] =[ identifier[prop] [ identifier[i] ][ identifier[snrcol] ] keyword[for] identifier[i] keyword[in] identifier[range] ( identifier[len] ( identifier[prop] )) keyword[if] identifier[prop] [ identifier[i] ][ identifier[snrcol] ]> identifier[threshold] ] identifier[siglocs] =[ identifier[list] ( identifier[loc] [ identifier[i] ]) keyword[for] identifier[i] keyword[in] identifier[range] ( identifier[len] ( identifier[prop] )) keyword[if] identifier[prop] [ identifier[i] ][ identifier[snrcol] ]> identifier[threshold] ] identifier[siglocssort] = identifier[sorted] ( identifier[zip] ([ identifier[list] ( identifier[ll] ) keyword[for] identifier[ll] keyword[in] identifier[siglocs] ], identifier[snrs] ), identifier[key] = keyword[lambda] identifier[stuff] : identifier[stuff] [ literal[int] ], identifier[reverse] = keyword[True] ) keyword[if] identifier[scancol] >= literal[int] : identifier[scanset] = identifier[list] ( identifier[set] ([ identifier[siglocs] [ identifier[i] ][ identifier[scancol] ] keyword[for] identifier[i] keyword[in] identifier[range] ( identifier[len] ( identifier[siglocs] ))])) identifier[candlist] =[] keyword[for] identifier[scan] keyword[in] identifier[scanset] : identifier[logger] . identifier[debug] ( literal[string] % identifier[scan] ) identifier[count] = literal[int] keyword[for] identifier[sigloc] keyword[in] identifier[siglocssort] : keyword[if] identifier[sigloc] [ literal[int] ][ identifier[scancol] ]== identifier[scan] : identifier[logger] . identifier[debug] ( literal[string] % identifier[str] ( identifier[sigloc] )) identifier[candlist] . identifier[append] ( identifier[sigloc] ) identifier[count] += literal[int] keyword[if] identifier[count] >= identifier[numberperscan] : keyword[break] keyword[else] : identifier[candlist] = identifier[siglocssort] [: identifier[numberperscan] ] identifier[logger] . identifier[debug] ( literal[string] %( identifier[len] ( identifier[candlist] ), identifier[threshold] )) keyword[return] [ identifier[loc] keyword[for] identifier[loc] , identifier[snr] keyword[in] identifier[candlist] ]
def thresholdcands(candsfile, threshold, numberperscan=1): """ Returns list of significant candidate loc in candsfile. Can define threshold and maximum number of locs per scan. Works on merge or per-scan cands pkls. """ # read metadata and define columns of interest (loc, prop, d) = read_candidates(candsfile, returnstate=True) try: scancol = d['featureind'].index('scan') # depends on [control=['try'], data=[]] except ValueError: scancol = -1 # depends on [control=['except'], data=[]] if 'snr2' in d['features']: snrcol = d['features'].index('snr2') # depends on [control=['if'], data=[]] elif 'snr1' in d['features']: snrcol = d['features'].index('snr1') # depends on [control=['if'], data=[]] snrs = [prop[i][snrcol] for i in range(len(prop)) if prop[i][snrcol] > threshold] # calculate unique list of locs of interest siglocs = [list(loc[i]) for i in range(len(prop)) if prop[i][snrcol] > threshold] siglocssort = sorted(zip([list(ll) for ll in siglocs], snrs), key=lambda stuff: stuff[1], reverse=True) if scancol >= 0: scanset = list(set([siglocs[i][scancol] for i in range(len(siglocs))])) candlist = [] for scan in scanset: logger.debug('looking in scan %d' % scan) count = 0 for sigloc in siglocssort: if sigloc[0][scancol] == scan: logger.debug('adding sigloc %s' % str(sigloc)) candlist.append(sigloc) count += 1 # depends on [control=['if'], data=[]] if count >= numberperscan: break # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['sigloc']] # depends on [control=['for'], data=['scan']] # depends on [control=['if'], data=['scancol']] else: candlist = siglocssort[:numberperscan] logger.debug('Returning %d cands above threshold %.1f' % (len(candlist), threshold)) return [loc for (loc, snr) in candlist]
def progressbar(iterable, length=23): """Print a simple progress bar while processing the given iterable. Function |progressbar| does print the progress bar when option `printprogress` is activted: >>> from hydpy import pub >>> pub.options.printprogress = True You can pass an iterable object. Say you want to calculate the the sum of all integer values from 1 to 100 and print the progress of the calculation. Using function |range| (which returns a list in Python 2 and an iterator in Python3, but both are fine), one just has to interpose function |progressbar|: >>> from hydpy.core.printtools import progressbar >>> x_sum = 0 >>> for x in progressbar(range(1, 101)): ... x_sum += x |---------------------| *********************** >>> x_sum 5050 To prevent possible interim print commands from dismembering the status bar, they are delayed until the status bar is complete. For intermediate print outs of each fiftieth calculation, the result looks as follows: >>> x_sum = 0 >>> for x in progressbar(range(1, 101)): ... x_sum += x ... if not x % 50: ... print(x, x_sum) |---------------------| *********************** 50 1275 100 5050 The number of characters of the progress bar can be changed: >>> for i in progressbar(range(100), length=50): ... continue |------------------------------------------------| ************************************************** But its maximum number of characters is restricted by the length of the given iterable: >>> for i in progressbar(range(10), length=50): ... continue |--------| ********** The smallest possible progress bar has two characters: >>> for i in progressbar(range(2)): ... continue || ** For iterables of length one or zero, no progress bar is plottet: >>> for i in progressbar(range(1)): ... continue The same is True when the `printprogress` option is inactivated: >>> pub.options.printprogress = False >>> for i in progressbar(range(100)): ... continue """ if hydpy.pub.options.printprogress and (len(iterable) > 1): temp_name = os.path.join(tempfile.gettempdir(), 'HydPy_progressbar_stdout') temp_stdout = open(temp_name, 'w') real_stdout = sys.stdout try: sys.stdout = temp_stdout nmbstars = min(len(iterable), length) nmbcounts = len(iterable)/nmbstars indentation = ' '*max(_printprogress_indentation, 0) with PrintStyle(color=36, font=1, file=real_stdout): print(' %s|%s|\n%s ' % (indentation, '-'*(nmbstars-2), indentation), end='', file=real_stdout) counts = 1. for next_ in iterable: counts += 1. if counts >= nmbcounts: print(end='*', file=real_stdout) counts -= nmbcounts yield next_ finally: try: temp_stdout.close() except BaseException: pass sys.stdout = real_stdout print() with open(temp_name, 'r') as temp_stdout: sys.stdout.write(temp_stdout.read()) sys.stdout.flush() else: for next_ in iterable: yield next_
def function[progressbar, parameter[iterable, length]]: constant[Print a simple progress bar while processing the given iterable. Function |progressbar| does print the progress bar when option `printprogress` is activted: >>> from hydpy import pub >>> pub.options.printprogress = True You can pass an iterable object. Say you want to calculate the the sum of all integer values from 1 to 100 and print the progress of the calculation. Using function |range| (which returns a list in Python 2 and an iterator in Python3, but both are fine), one just has to interpose function |progressbar|: >>> from hydpy.core.printtools import progressbar >>> x_sum = 0 >>> for x in progressbar(range(1, 101)): ... x_sum += x |---------------------| *********************** >>> x_sum 5050 To prevent possible interim print commands from dismembering the status bar, they are delayed until the status bar is complete. For intermediate print outs of each fiftieth calculation, the result looks as follows: >>> x_sum = 0 >>> for x in progressbar(range(1, 101)): ... x_sum += x ... if not x % 50: ... print(x, x_sum) |---------------------| *********************** 50 1275 100 5050 The number of characters of the progress bar can be changed: >>> for i in progressbar(range(100), length=50): ... continue |------------------------------------------------| ************************************************** But its maximum number of characters is restricted by the length of the given iterable: >>> for i in progressbar(range(10), length=50): ... continue |--------| ********** The smallest possible progress bar has two characters: >>> for i in progressbar(range(2)): ... continue || ** For iterables of length one or zero, no progress bar is plottet: >>> for i in progressbar(range(1)): ... continue The same is True when the `printprogress` option is inactivated: >>> pub.options.printprogress = False >>> for i in progressbar(range(100)): ... continue ] if <ast.BoolOp object at 0x7da20c6e5150> begin[:] variable[temp_name] assign[=] call[name[os].path.join, parameter[call[name[tempfile].gettempdir, parameter[]], constant[HydPy_progressbar_stdout]]] variable[temp_stdout] assign[=] call[name[open], parameter[name[temp_name], constant[w]]] variable[real_stdout] assign[=] name[sys].stdout <ast.Try object at 0x7da20c6e5ea0>
keyword[def] identifier[progressbar] ( identifier[iterable] , identifier[length] = literal[int] ): literal[string] keyword[if] identifier[hydpy] . identifier[pub] . identifier[options] . identifier[printprogress] keyword[and] ( identifier[len] ( identifier[iterable] )> literal[int] ): identifier[temp_name] = identifier[os] . identifier[path] . identifier[join] ( identifier[tempfile] . identifier[gettempdir] (), literal[string] ) identifier[temp_stdout] = identifier[open] ( identifier[temp_name] , literal[string] ) identifier[real_stdout] = identifier[sys] . identifier[stdout] keyword[try] : identifier[sys] . identifier[stdout] = identifier[temp_stdout] identifier[nmbstars] = identifier[min] ( identifier[len] ( identifier[iterable] ), identifier[length] ) identifier[nmbcounts] = identifier[len] ( identifier[iterable] )/ identifier[nmbstars] identifier[indentation] = literal[string] * identifier[max] ( identifier[_printprogress_indentation] , literal[int] ) keyword[with] identifier[PrintStyle] ( identifier[color] = literal[int] , identifier[font] = literal[int] , identifier[file] = identifier[real_stdout] ): identifier[print] ( literal[string] %( identifier[indentation] , literal[string] *( identifier[nmbstars] - literal[int] ), identifier[indentation] ), identifier[end] = literal[string] , identifier[file] = identifier[real_stdout] ) identifier[counts] = literal[int] keyword[for] identifier[next_] keyword[in] identifier[iterable] : identifier[counts] += literal[int] keyword[if] identifier[counts] >= identifier[nmbcounts] : identifier[print] ( identifier[end] = literal[string] , identifier[file] = identifier[real_stdout] ) identifier[counts] -= identifier[nmbcounts] keyword[yield] identifier[next_] keyword[finally] : keyword[try] : identifier[temp_stdout] . identifier[close] () keyword[except] identifier[BaseException] : keyword[pass] identifier[sys] . identifier[stdout] = identifier[real_stdout] identifier[print] () keyword[with] identifier[open] ( identifier[temp_name] , literal[string] ) keyword[as] identifier[temp_stdout] : identifier[sys] . identifier[stdout] . identifier[write] ( identifier[temp_stdout] . identifier[read] ()) identifier[sys] . identifier[stdout] . identifier[flush] () keyword[else] : keyword[for] identifier[next_] keyword[in] identifier[iterable] : keyword[yield] identifier[next_]
def progressbar(iterable, length=23): """Print a simple progress bar while processing the given iterable. Function |progressbar| does print the progress bar when option `printprogress` is activted: >>> from hydpy import pub >>> pub.options.printprogress = True You can pass an iterable object. Say you want to calculate the the sum of all integer values from 1 to 100 and print the progress of the calculation. Using function |range| (which returns a list in Python 2 and an iterator in Python3, but both are fine), one just has to interpose function |progressbar|: >>> from hydpy.core.printtools import progressbar >>> x_sum = 0 >>> for x in progressbar(range(1, 101)): ... x_sum += x |---------------------| *********************** >>> x_sum 5050 To prevent possible interim print commands from dismembering the status bar, they are delayed until the status bar is complete. For intermediate print outs of each fiftieth calculation, the result looks as follows: >>> x_sum = 0 >>> for x in progressbar(range(1, 101)): ... x_sum += x ... if not x % 50: ... print(x, x_sum) |---------------------| *********************** 50 1275 100 5050 The number of characters of the progress bar can be changed: >>> for i in progressbar(range(100), length=50): ... continue |------------------------------------------------| ************************************************** But its maximum number of characters is restricted by the length of the given iterable: >>> for i in progressbar(range(10), length=50): ... continue |--------| ********** The smallest possible progress bar has two characters: >>> for i in progressbar(range(2)): ... continue || ** For iterables of length one or zero, no progress bar is plottet: >>> for i in progressbar(range(1)): ... continue The same is True when the `printprogress` option is inactivated: >>> pub.options.printprogress = False >>> for i in progressbar(range(100)): ... continue """ if hydpy.pub.options.printprogress and len(iterable) > 1: temp_name = os.path.join(tempfile.gettempdir(), 'HydPy_progressbar_stdout') temp_stdout = open(temp_name, 'w') real_stdout = sys.stdout try: sys.stdout = temp_stdout nmbstars = min(len(iterable), length) nmbcounts = len(iterable) / nmbstars indentation = ' ' * max(_printprogress_indentation, 0) with PrintStyle(color=36, font=1, file=real_stdout): print(' %s|%s|\n%s ' % (indentation, '-' * (nmbstars - 2), indentation), end='', file=real_stdout) counts = 1.0 for next_ in iterable: counts += 1.0 if counts >= nmbcounts: print(end='*', file=real_stdout) counts -= nmbcounts # depends on [control=['if'], data=['counts', 'nmbcounts']] yield next_ # depends on [control=['for'], data=['next_']] # depends on [control=['with'], data=[]] # depends on [control=['try'], data=[]] finally: try: temp_stdout.close() # depends on [control=['try'], data=[]] except BaseException: pass # depends on [control=['except'], data=[]] sys.stdout = real_stdout print() with open(temp_name, 'r') as temp_stdout: sys.stdout.write(temp_stdout.read()) # depends on [control=['with'], data=['temp_stdout']] sys.stdout.flush() # depends on [control=['if'], data=[]] else: for next_ in iterable: yield next_ # depends on [control=['for'], data=['next_']]
def remap_index_fn(ref_file): """Map sequence references to equivalent novoalign indexes. """ checks = [os.path.splitext(ref_file)[0].replace("/seq/", "/novoalign/"), os.path.splitext(ref_file)[0] + ".ndx", ref_file + ".bs.ndx", ref_file + ".ndx"] for check in checks: if os.path.exists(check): return check return checks[0]
def function[remap_index_fn, parameter[ref_file]]: constant[Map sequence references to equivalent novoalign indexes. ] variable[checks] assign[=] list[[<ast.Call object at 0x7da1b2345360>, <ast.BinOp object at 0x7da1b23446a0>, <ast.BinOp object at 0x7da1b170b400>, <ast.BinOp object at 0x7da1b17088e0>]] for taget[name[check]] in starred[name[checks]] begin[:] if call[name[os].path.exists, parameter[name[check]]] begin[:] return[name[check]] return[call[name[checks]][constant[0]]]
keyword[def] identifier[remap_index_fn] ( identifier[ref_file] ): literal[string] identifier[checks] =[ identifier[os] . identifier[path] . identifier[splitext] ( identifier[ref_file] )[ literal[int] ]. identifier[replace] ( literal[string] , literal[string] ), identifier[os] . identifier[path] . identifier[splitext] ( identifier[ref_file] )[ literal[int] ]+ literal[string] , identifier[ref_file] + literal[string] , identifier[ref_file] + literal[string] ] keyword[for] identifier[check] keyword[in] identifier[checks] : keyword[if] identifier[os] . identifier[path] . identifier[exists] ( identifier[check] ): keyword[return] identifier[check] keyword[return] identifier[checks] [ literal[int] ]
def remap_index_fn(ref_file): """Map sequence references to equivalent novoalign indexes. """ checks = [os.path.splitext(ref_file)[0].replace('/seq/', '/novoalign/'), os.path.splitext(ref_file)[0] + '.ndx', ref_file + '.bs.ndx', ref_file + '.ndx'] for check in checks: if os.path.exists(check): return check # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['check']] return checks[0]
def getDarkCurrentAverages(exposuretimes, imgs): ''' return exposure times, image averages for each exposure time ''' x, imgs_p = sortForSameExpTime(exposuretimes, imgs) s0, s1 = imgs[0].shape imgs = np.empty(shape=(len(x), s0, s1), dtype=imgs[0].dtype) for i, ip in zip(imgs, imgs_p): if len(ip) == 1: i[:] = ip[0] else: i[:] = averageSameExpTimes(ip) return x, imgs
def function[getDarkCurrentAverages, parameter[exposuretimes, imgs]]: constant[ return exposure times, image averages for each exposure time ] <ast.Tuple object at 0x7da1b11ed0c0> assign[=] call[name[sortForSameExpTime], parameter[name[exposuretimes], name[imgs]]] <ast.Tuple object at 0x7da1b11ef4c0> assign[=] call[name[imgs]][constant[0]].shape variable[imgs] assign[=] call[name[np].empty, parameter[]] for taget[tuple[[<ast.Name object at 0x7da1b11efdf0>, <ast.Name object at 0x7da1b11eece0>]]] in starred[call[name[zip], parameter[name[imgs], name[imgs_p]]]] begin[:] if compare[call[name[len], parameter[name[ip]]] equal[==] constant[1]] begin[:] call[name[i]][<ast.Slice object at 0x7da1b11ef6d0>] assign[=] call[name[ip]][constant[0]] return[tuple[[<ast.Name object at 0x7da1b11ed810>, <ast.Name object at 0x7da1b11ee980>]]]
keyword[def] identifier[getDarkCurrentAverages] ( identifier[exposuretimes] , identifier[imgs] ): literal[string] identifier[x] , identifier[imgs_p] = identifier[sortForSameExpTime] ( identifier[exposuretimes] , identifier[imgs] ) identifier[s0] , identifier[s1] = identifier[imgs] [ literal[int] ]. identifier[shape] identifier[imgs] = identifier[np] . identifier[empty] ( identifier[shape] =( identifier[len] ( identifier[x] ), identifier[s0] , identifier[s1] ), identifier[dtype] = identifier[imgs] [ literal[int] ]. identifier[dtype] ) keyword[for] identifier[i] , identifier[ip] keyword[in] identifier[zip] ( identifier[imgs] , identifier[imgs_p] ): keyword[if] identifier[len] ( identifier[ip] )== literal[int] : identifier[i] [:]= identifier[ip] [ literal[int] ] keyword[else] : identifier[i] [:]= identifier[averageSameExpTimes] ( identifier[ip] ) keyword[return] identifier[x] , identifier[imgs]
def getDarkCurrentAverages(exposuretimes, imgs): """ return exposure times, image averages for each exposure time """ (x, imgs_p) = sortForSameExpTime(exposuretimes, imgs) (s0, s1) = imgs[0].shape imgs = np.empty(shape=(len(x), s0, s1), dtype=imgs[0].dtype) for (i, ip) in zip(imgs, imgs_p): if len(ip) == 1: i[:] = ip[0] # depends on [control=['if'], data=[]] else: i[:] = averageSameExpTimes(ip) # depends on [control=['for'], data=[]] return (x, imgs)
def shot(self, **kwargs): # type: (Any) -> str """ Helper to save the screen shot of the 1st monitor, by default. You can pass the same arguments as for ``save``. """ kwargs["mon"] = kwargs.get("mon", 1) return next(self.save(**kwargs))
def function[shot, parameter[self]]: constant[ Helper to save the screen shot of the 1st monitor, by default. You can pass the same arguments as for ``save``. ] call[name[kwargs]][constant[mon]] assign[=] call[name[kwargs].get, parameter[constant[mon], constant[1]]] return[call[name[next], parameter[call[name[self].save, parameter[]]]]]
keyword[def] identifier[shot] ( identifier[self] ,** identifier[kwargs] ): literal[string] identifier[kwargs] [ literal[string] ]= identifier[kwargs] . identifier[get] ( literal[string] , literal[int] ) keyword[return] identifier[next] ( identifier[self] . identifier[save] (** identifier[kwargs] ))
def shot(self, **kwargs): # type: (Any) -> str '\n Helper to save the screen shot of the 1st monitor, by default.\n You can pass the same arguments as for ``save``.\n ' kwargs['mon'] = kwargs.get('mon', 1) return next(self.save(**kwargs))
def _status_apf(): ''' Return True if apf is running otherwise return False ''' status = 0 table = iptc.Table(iptc.Table.FILTER) for chain in table.chains: if 'sanity' in chain.name.lower(): status = 1 return True if status else False
def function[_status_apf, parameter[]]: constant[ Return True if apf is running otherwise return False ] variable[status] assign[=] constant[0] variable[table] assign[=] call[name[iptc].Table, parameter[name[iptc].Table.FILTER]] for taget[name[chain]] in starred[name[table].chains] begin[:] if compare[constant[sanity] in call[name[chain].name.lower, parameter[]]] begin[:] variable[status] assign[=] constant[1] return[<ast.IfExp object at 0x7da1b2160550>]
keyword[def] identifier[_status_apf] (): literal[string] identifier[status] = literal[int] identifier[table] = identifier[iptc] . identifier[Table] ( identifier[iptc] . identifier[Table] . identifier[FILTER] ) keyword[for] identifier[chain] keyword[in] identifier[table] . identifier[chains] : keyword[if] literal[string] keyword[in] identifier[chain] . identifier[name] . identifier[lower] (): identifier[status] = literal[int] keyword[return] keyword[True] keyword[if] identifier[status] keyword[else] keyword[False]
def _status_apf(): """ Return True if apf is running otherwise return False """ status = 0 table = iptc.Table(iptc.Table.FILTER) for chain in table.chains: if 'sanity' in chain.name.lower(): status = 1 # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['chain']] return True if status else False
def random(length, chars=None): """Generates a random string. :param length: Length of the string to generate. This can be a numbe or a pair: ``(min_length, max_length)`` :param chars: String of characters to choose from """ if chars is None: chars = string.ascii_letters + string.digits else: ensure_string(chars) if not chars: raise ValueError("character set must not be empty") if is_pair(length): length = randint(*length) elif isinstance(length, Integral): if not length > 0: raise ValueError( "random string length must be positive (got %r)" % (length,)) else: raise TypeError("random string length must be an integer; " "got '%s'" % type(length).__name__) return join(chars.__class__(), (choice(chars) for _ in xrange(length)))
def function[random, parameter[length, chars]]: constant[Generates a random string. :param length: Length of the string to generate. This can be a numbe or a pair: ``(min_length, max_length)`` :param chars: String of characters to choose from ] if compare[name[chars] is constant[None]] begin[:] variable[chars] assign[=] binary_operation[name[string].ascii_letters + name[string].digits] if call[name[is_pair], parameter[name[length]]] begin[:] variable[length] assign[=] call[name[randint], parameter[<ast.Starred object at 0x7da1b209c550>]] return[call[name[join], parameter[call[name[chars].__class__, parameter[]], <ast.GeneratorExp object at 0x7da1b20558d0>]]]
keyword[def] identifier[random] ( identifier[length] , identifier[chars] = keyword[None] ): literal[string] keyword[if] identifier[chars] keyword[is] keyword[None] : identifier[chars] = identifier[string] . identifier[ascii_letters] + identifier[string] . identifier[digits] keyword[else] : identifier[ensure_string] ( identifier[chars] ) keyword[if] keyword[not] identifier[chars] : keyword[raise] identifier[ValueError] ( literal[string] ) keyword[if] identifier[is_pair] ( identifier[length] ): identifier[length] = identifier[randint] (* identifier[length] ) keyword[elif] identifier[isinstance] ( identifier[length] , identifier[Integral] ): keyword[if] keyword[not] identifier[length] > literal[int] : keyword[raise] identifier[ValueError] ( literal[string] %( identifier[length] ,)) keyword[else] : keyword[raise] identifier[TypeError] ( literal[string] literal[string] % identifier[type] ( identifier[length] ). identifier[__name__] ) keyword[return] identifier[join] ( identifier[chars] . identifier[__class__] (),( identifier[choice] ( identifier[chars] ) keyword[for] identifier[_] keyword[in] identifier[xrange] ( identifier[length] )))
def random(length, chars=None): """Generates a random string. :param length: Length of the string to generate. This can be a numbe or a pair: ``(min_length, max_length)`` :param chars: String of characters to choose from """ if chars is None: chars = string.ascii_letters + string.digits # depends on [control=['if'], data=['chars']] else: ensure_string(chars) if not chars: raise ValueError('character set must not be empty') # depends on [control=['if'], data=[]] if is_pair(length): length = randint(*length) # depends on [control=['if'], data=[]] elif isinstance(length, Integral): if not length > 0: raise ValueError('random string length must be positive (got %r)' % (length,)) # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]] else: raise TypeError("random string length must be an integer; got '%s'" % type(length).__name__) return join(chars.__class__(), (choice(chars) for _ in xrange(length)))
def base_elts(elt, cls=None, depth=None): """Get bases elements of the input elt. - If elt is an instance, get class and all base classes. - If elt is a method, get all base methods. - If elt is a class, get all base classes. - In other case, get an empty list. :param elt: supposed inherited elt. :param cls: cls from where find attributes equal to elt. If None, it is found as much as possible. Required in python3 for function classes. :type cls: type or list :param int depth: search depth. If None (default), depth is maximal. :return: elt bases elements. if elt has not base elements, result is empty. :rtype: list """ result = [] elt_name = getattr(elt, '__name__', None) if elt_name is not None: cls = [] if cls is None else ensureiterable(cls) elt_is_class = False # if cls is None and elt is routine, it is possible to find the cls if not cls and isroutine(elt): if hasattr(elt, '__self__'): # from the instance instance = get_method_self(elt) # get instance if instance is None and PY2: # get base im_class if PY2 cls = list(elt.im_class.__bases__) else: # use instance class cls = [instance.__class__] # cls is elt if elt is a class elif isclass(elt): elt_is_class = True cls = list(elt.__bases__) if cls: # if cls is not empty, find all base classes index_of_found_classes = 0 # get last visited class index visited_classes = set(cls) # cache for visited classes len_classes = len(cls) if depth is None: # if depth is None, get maximal value depth = -1 # set negative value while depth != 0 and index_of_found_classes != len_classes: len_classes = len(cls) for index in range(index_of_found_classes, len_classes): _cls = cls[index] for base_cls in _cls.__bases__: if base_cls in visited_classes: continue else: visited_classes.add(base_cls) cls.append(base_cls) index_of_found_classes = len_classes depth -= 1 if elt_is_class: # if cls is elt, result is classes minus first class result = cls elif isroutine(elt): # get an elt to compare with found element if ismethod(elt): elt_to_compare = get_method_function(elt) else: elt_to_compare = elt for _cls in cls: # for all classes # get possible base elt b_elt = getattr(_cls, elt_name, None) if b_elt is not None: # compare funcs if ismethod(b_elt): bec = get_method_function(b_elt) else: bec = b_elt # if matching, add to result if bec is elt_to_compare: result.append(b_elt) return result
def function[base_elts, parameter[elt, cls, depth]]: constant[Get bases elements of the input elt. - If elt is an instance, get class and all base classes. - If elt is a method, get all base methods. - If elt is a class, get all base classes. - In other case, get an empty list. :param elt: supposed inherited elt. :param cls: cls from where find attributes equal to elt. If None, it is found as much as possible. Required in python3 for function classes. :type cls: type or list :param int depth: search depth. If None (default), depth is maximal. :return: elt bases elements. if elt has not base elements, result is empty. :rtype: list ] variable[result] assign[=] list[[]] variable[elt_name] assign[=] call[name[getattr], parameter[name[elt], constant[__name__], constant[None]]] if compare[name[elt_name] is_not constant[None]] begin[:] variable[cls] assign[=] <ast.IfExp object at 0x7da1b26ad900> variable[elt_is_class] assign[=] constant[False] if <ast.BoolOp object at 0x7da1b26adc60> begin[:] if call[name[hasattr], parameter[name[elt], constant[__self__]]] begin[:] variable[instance] assign[=] call[name[get_method_self], parameter[name[elt]]] if <ast.BoolOp object at 0x7da1b26ae320> begin[:] variable[cls] assign[=] call[name[list], parameter[name[elt].im_class.__bases__]] if name[cls] begin[:] variable[index_of_found_classes] assign[=] constant[0] variable[visited_classes] assign[=] call[name[set], parameter[name[cls]]] variable[len_classes] assign[=] call[name[len], parameter[name[cls]]] if compare[name[depth] is constant[None]] begin[:] variable[depth] assign[=] <ast.UnaryOp object at 0x7da18dc988b0> while <ast.BoolOp object at 0x7da18dc994b0> begin[:] variable[len_classes] assign[=] call[name[len], parameter[name[cls]]] for taget[name[index]] in starred[call[name[range], parameter[name[index_of_found_classes], name[len_classes]]]] begin[:] variable[_cls] assign[=] call[name[cls]][name[index]] for taget[name[base_cls]] in starred[name[_cls].__bases__] begin[:] if compare[name[base_cls] in name[visited_classes]] begin[:] continue variable[index_of_found_classes] assign[=] name[len_classes] <ast.AugAssign object at 0x7da18dc9b130> if name[elt_is_class] begin[:] variable[result] assign[=] name[cls] return[name[result]]
keyword[def] identifier[base_elts] ( identifier[elt] , identifier[cls] = keyword[None] , identifier[depth] = keyword[None] ): literal[string] identifier[result] =[] identifier[elt_name] = identifier[getattr] ( identifier[elt] , literal[string] , keyword[None] ) keyword[if] identifier[elt_name] keyword[is] keyword[not] keyword[None] : identifier[cls] =[] keyword[if] identifier[cls] keyword[is] keyword[None] keyword[else] identifier[ensureiterable] ( identifier[cls] ) identifier[elt_is_class] = keyword[False] keyword[if] keyword[not] identifier[cls] keyword[and] identifier[isroutine] ( identifier[elt] ): keyword[if] identifier[hasattr] ( identifier[elt] , literal[string] ): identifier[instance] = identifier[get_method_self] ( identifier[elt] ) keyword[if] identifier[instance] keyword[is] keyword[None] keyword[and] identifier[PY2] : identifier[cls] = identifier[list] ( identifier[elt] . identifier[im_class] . identifier[__bases__] ) keyword[else] : identifier[cls] =[ identifier[instance] . identifier[__class__] ] keyword[elif] identifier[isclass] ( identifier[elt] ): identifier[elt_is_class] = keyword[True] identifier[cls] = identifier[list] ( identifier[elt] . identifier[__bases__] ) keyword[if] identifier[cls] : identifier[index_of_found_classes] = literal[int] identifier[visited_classes] = identifier[set] ( identifier[cls] ) identifier[len_classes] = identifier[len] ( identifier[cls] ) keyword[if] identifier[depth] keyword[is] keyword[None] : identifier[depth] =- literal[int] keyword[while] identifier[depth] != literal[int] keyword[and] identifier[index_of_found_classes] != identifier[len_classes] : identifier[len_classes] = identifier[len] ( identifier[cls] ) keyword[for] identifier[index] keyword[in] identifier[range] ( identifier[index_of_found_classes] , identifier[len_classes] ): identifier[_cls] = identifier[cls] [ identifier[index] ] keyword[for] identifier[base_cls] keyword[in] identifier[_cls] . identifier[__bases__] : keyword[if] identifier[base_cls] keyword[in] identifier[visited_classes] : keyword[continue] keyword[else] : identifier[visited_classes] . identifier[add] ( identifier[base_cls] ) identifier[cls] . identifier[append] ( identifier[base_cls] ) identifier[index_of_found_classes] = identifier[len_classes] identifier[depth] -= literal[int] keyword[if] identifier[elt_is_class] : identifier[result] = identifier[cls] keyword[elif] identifier[isroutine] ( identifier[elt] ): keyword[if] identifier[ismethod] ( identifier[elt] ): identifier[elt_to_compare] = identifier[get_method_function] ( identifier[elt] ) keyword[else] : identifier[elt_to_compare] = identifier[elt] keyword[for] identifier[_cls] keyword[in] identifier[cls] : identifier[b_elt] = identifier[getattr] ( identifier[_cls] , identifier[elt_name] , keyword[None] ) keyword[if] identifier[b_elt] keyword[is] keyword[not] keyword[None] : keyword[if] identifier[ismethod] ( identifier[b_elt] ): identifier[bec] = identifier[get_method_function] ( identifier[b_elt] ) keyword[else] : identifier[bec] = identifier[b_elt] keyword[if] identifier[bec] keyword[is] identifier[elt_to_compare] : identifier[result] . identifier[append] ( identifier[b_elt] ) keyword[return] identifier[result]
def base_elts(elt, cls=None, depth=None): """Get bases elements of the input elt. - If elt is an instance, get class and all base classes. - If elt is a method, get all base methods. - If elt is a class, get all base classes. - In other case, get an empty list. :param elt: supposed inherited elt. :param cls: cls from where find attributes equal to elt. If None, it is found as much as possible. Required in python3 for function classes. :type cls: type or list :param int depth: search depth. If None (default), depth is maximal. :return: elt bases elements. if elt has not base elements, result is empty. :rtype: list """ result = [] elt_name = getattr(elt, '__name__', None) if elt_name is not None: cls = [] if cls is None else ensureiterable(cls) elt_is_class = False # if cls is None and elt is routine, it is possible to find the cls if not cls and isroutine(elt): if hasattr(elt, '__self__'): # from the instance instance = get_method_self(elt) # get instance if instance is None and PY2: # get base im_class if PY2 cls = list(elt.im_class.__bases__) # depends on [control=['if'], data=[]] else: # use instance class cls = [instance.__class__] # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]] # cls is elt if elt is a class elif isclass(elt): elt_is_class = True cls = list(elt.__bases__) # depends on [control=['if'], data=[]] if cls: # if cls is not empty, find all base classes index_of_found_classes = 0 # get last visited class index visited_classes = set(cls) # cache for visited classes len_classes = len(cls) if depth is None: # if depth is None, get maximal value depth = -1 # set negative value # depends on [control=['if'], data=['depth']] while depth != 0 and index_of_found_classes != len_classes: len_classes = len(cls) for index in range(index_of_found_classes, len_classes): _cls = cls[index] for base_cls in _cls.__bases__: if base_cls in visited_classes: continue # depends on [control=['if'], data=[]] else: visited_classes.add(base_cls) cls.append(base_cls) # depends on [control=['for'], data=['base_cls']] # depends on [control=['for'], data=['index']] index_of_found_classes = len_classes depth -= 1 # depends on [control=['while'], data=[]] if elt_is_class: # if cls is elt, result is classes minus first class result = cls # depends on [control=['if'], data=[]] elif isroutine(elt): # get an elt to compare with found element if ismethod(elt): elt_to_compare = get_method_function(elt) # depends on [control=['if'], data=[]] else: elt_to_compare = elt for _cls in cls: # for all classes # get possible base elt b_elt = getattr(_cls, elt_name, None) if b_elt is not None: # compare funcs if ismethod(b_elt): bec = get_method_function(b_elt) # depends on [control=['if'], data=[]] else: bec = b_elt # if matching, add to result if bec is elt_to_compare: result.append(b_elt) # depends on [control=['if'], data=[]] # depends on [control=['if'], data=['b_elt']] # depends on [control=['for'], data=['_cls']] # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]] # depends on [control=['if'], data=['elt_name']] return result
def builds(self, confs): """For retro compatibility directly assigning builds""" self._named_builds = {} self._builds = [] for values in confs: if len(values) == 2: self._builds.append(BuildConf(values[0], values[1], {}, {}, self.reference)) elif len(values) == 4: self._builds.append(BuildConf(values[0], values[1], values[2], values[3], self.reference)) elif len(values) != 5: raise Exception("Invalid build configuration, has to be a tuple of " "(settings, options, env_vars, build_requires, reference)") else: self._builds.append(BuildConf(*values))
def function[builds, parameter[self, confs]]: constant[For retro compatibility directly assigning builds] name[self]._named_builds assign[=] dictionary[[], []] name[self]._builds assign[=] list[[]] for taget[name[values]] in starred[name[confs]] begin[:] if compare[call[name[len], parameter[name[values]]] equal[==] constant[2]] begin[:] call[name[self]._builds.append, parameter[call[name[BuildConf], parameter[call[name[values]][constant[0]], call[name[values]][constant[1]], dictionary[[], []], dictionary[[], []], name[self].reference]]]]
keyword[def] identifier[builds] ( identifier[self] , identifier[confs] ): literal[string] identifier[self] . identifier[_named_builds] ={} identifier[self] . identifier[_builds] =[] keyword[for] identifier[values] keyword[in] identifier[confs] : keyword[if] identifier[len] ( identifier[values] )== literal[int] : identifier[self] . identifier[_builds] . identifier[append] ( identifier[BuildConf] ( identifier[values] [ literal[int] ], identifier[values] [ literal[int] ],{},{}, identifier[self] . identifier[reference] )) keyword[elif] identifier[len] ( identifier[values] )== literal[int] : identifier[self] . identifier[_builds] . identifier[append] ( identifier[BuildConf] ( identifier[values] [ literal[int] ], identifier[values] [ literal[int] ], identifier[values] [ literal[int] ], identifier[values] [ literal[int] ], identifier[self] . identifier[reference] )) keyword[elif] identifier[len] ( identifier[values] )!= literal[int] : keyword[raise] identifier[Exception] ( literal[string] literal[string] ) keyword[else] : identifier[self] . identifier[_builds] . identifier[append] ( identifier[BuildConf] (* identifier[values] ))
def builds(self, confs): """For retro compatibility directly assigning builds""" self._named_builds = {} self._builds = [] for values in confs: if len(values) == 2: self._builds.append(BuildConf(values[0], values[1], {}, {}, self.reference)) # depends on [control=['if'], data=[]] elif len(values) == 4: self._builds.append(BuildConf(values[0], values[1], values[2], values[3], self.reference)) # depends on [control=['if'], data=[]] elif len(values) != 5: raise Exception('Invalid build configuration, has to be a tuple of (settings, options, env_vars, build_requires, reference)') # depends on [control=['if'], data=[]] else: self._builds.append(BuildConf(*values)) # depends on [control=['for'], data=['values']]
def _prepare_coords(lons1, lats1, lons2, lats2): """ Convert two pairs of spherical coordinates in decimal degrees to numpy arrays of radians. Makes sure that respective coordinates in pairs have the same shape. """ lons1 = numpy.radians(lons1) lats1 = numpy.radians(lats1) assert lons1.shape == lats1.shape lons2 = numpy.radians(lons2) lats2 = numpy.radians(lats2) assert lons2.shape == lats2.shape return lons1, lats1, lons2, lats2
def function[_prepare_coords, parameter[lons1, lats1, lons2, lats2]]: constant[ Convert two pairs of spherical coordinates in decimal degrees to numpy arrays of radians. Makes sure that respective coordinates in pairs have the same shape. ] variable[lons1] assign[=] call[name[numpy].radians, parameter[name[lons1]]] variable[lats1] assign[=] call[name[numpy].radians, parameter[name[lats1]]] assert[compare[name[lons1].shape equal[==] name[lats1].shape]] variable[lons2] assign[=] call[name[numpy].radians, parameter[name[lons2]]] variable[lats2] assign[=] call[name[numpy].radians, parameter[name[lats2]]] assert[compare[name[lons2].shape equal[==] name[lats2].shape]] return[tuple[[<ast.Name object at 0x7da207f00730>, <ast.Name object at 0x7da207f03160>, <ast.Name object at 0x7da207f03d90>, <ast.Name object at 0x7da207f018a0>]]]
keyword[def] identifier[_prepare_coords] ( identifier[lons1] , identifier[lats1] , identifier[lons2] , identifier[lats2] ): literal[string] identifier[lons1] = identifier[numpy] . identifier[radians] ( identifier[lons1] ) identifier[lats1] = identifier[numpy] . identifier[radians] ( identifier[lats1] ) keyword[assert] identifier[lons1] . identifier[shape] == identifier[lats1] . identifier[shape] identifier[lons2] = identifier[numpy] . identifier[radians] ( identifier[lons2] ) identifier[lats2] = identifier[numpy] . identifier[radians] ( identifier[lats2] ) keyword[assert] identifier[lons2] . identifier[shape] == identifier[lats2] . identifier[shape] keyword[return] identifier[lons1] , identifier[lats1] , identifier[lons2] , identifier[lats2]
def _prepare_coords(lons1, lats1, lons2, lats2): """ Convert two pairs of spherical coordinates in decimal degrees to numpy arrays of radians. Makes sure that respective coordinates in pairs have the same shape. """ lons1 = numpy.radians(lons1) lats1 = numpy.radians(lats1) assert lons1.shape == lats1.shape lons2 = numpy.radians(lons2) lats2 = numpy.radians(lats2) assert lons2.shape == lats2.shape return (lons1, lats1, lons2, lats2)
def start_environment( self, user_name, environment_id, custom_headers=None, raw=False, polling=True, **operation_config): """Starts an environment by starting all resources inside the environment. This operation can take a while to complete. :param user_name: The name of the user. :type user_name: str :param environment_id: The resourceId of the environment :type environment_id: str :param dict custom_headers: headers that will be added to the request :param bool raw: The poller return type is ClientRawResponse, the direct response alongside the deserialized response :param polling: True for ARMPolling, False for no polling, or a polling object for personal polling strategy :return: An instance of LROPoller that returns None or ClientRawResponse<None> if raw==True :rtype: ~msrestazure.azure_operation.AzureOperationPoller[None] or ~msrestazure.azure_operation.AzureOperationPoller[~msrest.pipeline.ClientRawResponse[None]] :raises: :class:`CloudError<msrestazure.azure_exceptions.CloudError>` """ raw_result = self._start_environment_initial( user_name=user_name, environment_id=environment_id, custom_headers=custom_headers, raw=True, **operation_config ) def get_long_running_output(response): if raw: client_raw_response = ClientRawResponse(None, response) return client_raw_response lro_delay = operation_config.get( 'long_running_operation_timeout', self.config.long_running_operation_timeout) if polling is True: polling_method = ARMPolling(lro_delay, **operation_config) elif polling is False: polling_method = NoPolling() else: polling_method = polling return LROPoller(self._client, raw_result, get_long_running_output, polling_method)
def function[start_environment, parameter[self, user_name, environment_id, custom_headers, raw, polling]]: constant[Starts an environment by starting all resources inside the environment. This operation can take a while to complete. :param user_name: The name of the user. :type user_name: str :param environment_id: The resourceId of the environment :type environment_id: str :param dict custom_headers: headers that will be added to the request :param bool raw: The poller return type is ClientRawResponse, the direct response alongside the deserialized response :param polling: True for ARMPolling, False for no polling, or a polling object for personal polling strategy :return: An instance of LROPoller that returns None or ClientRawResponse<None> if raw==True :rtype: ~msrestazure.azure_operation.AzureOperationPoller[None] or ~msrestazure.azure_operation.AzureOperationPoller[~msrest.pipeline.ClientRawResponse[None]] :raises: :class:`CloudError<msrestazure.azure_exceptions.CloudError>` ] variable[raw_result] assign[=] call[name[self]._start_environment_initial, parameter[]] def function[get_long_running_output, parameter[response]]: if name[raw] begin[:] variable[client_raw_response] assign[=] call[name[ClientRawResponse], parameter[constant[None], name[response]]] return[name[client_raw_response]] variable[lro_delay] assign[=] call[name[operation_config].get, parameter[constant[long_running_operation_timeout], name[self].config.long_running_operation_timeout]] if compare[name[polling] is constant[True]] begin[:] variable[polling_method] assign[=] call[name[ARMPolling], parameter[name[lro_delay]]] return[call[name[LROPoller], parameter[name[self]._client, name[raw_result], name[get_long_running_output], name[polling_method]]]]
keyword[def] identifier[start_environment] ( identifier[self] , identifier[user_name] , identifier[environment_id] , identifier[custom_headers] = keyword[None] , identifier[raw] = keyword[False] , identifier[polling] = keyword[True] ,** identifier[operation_config] ): literal[string] identifier[raw_result] = identifier[self] . identifier[_start_environment_initial] ( identifier[user_name] = identifier[user_name] , identifier[environment_id] = identifier[environment_id] , identifier[custom_headers] = identifier[custom_headers] , identifier[raw] = keyword[True] , ** identifier[operation_config] ) keyword[def] identifier[get_long_running_output] ( identifier[response] ): keyword[if] identifier[raw] : identifier[client_raw_response] = identifier[ClientRawResponse] ( keyword[None] , identifier[response] ) keyword[return] identifier[client_raw_response] identifier[lro_delay] = identifier[operation_config] . identifier[get] ( literal[string] , identifier[self] . identifier[config] . identifier[long_running_operation_timeout] ) keyword[if] identifier[polling] keyword[is] keyword[True] : identifier[polling_method] = identifier[ARMPolling] ( identifier[lro_delay] ,** identifier[operation_config] ) keyword[elif] identifier[polling] keyword[is] keyword[False] : identifier[polling_method] = identifier[NoPolling] () keyword[else] : identifier[polling_method] = identifier[polling] keyword[return] identifier[LROPoller] ( identifier[self] . identifier[_client] , identifier[raw_result] , identifier[get_long_running_output] , identifier[polling_method] )
def start_environment(self, user_name, environment_id, custom_headers=None, raw=False, polling=True, **operation_config): """Starts an environment by starting all resources inside the environment. This operation can take a while to complete. :param user_name: The name of the user. :type user_name: str :param environment_id: The resourceId of the environment :type environment_id: str :param dict custom_headers: headers that will be added to the request :param bool raw: The poller return type is ClientRawResponse, the direct response alongside the deserialized response :param polling: True for ARMPolling, False for no polling, or a polling object for personal polling strategy :return: An instance of LROPoller that returns None or ClientRawResponse<None> if raw==True :rtype: ~msrestazure.azure_operation.AzureOperationPoller[None] or ~msrestazure.azure_operation.AzureOperationPoller[~msrest.pipeline.ClientRawResponse[None]] :raises: :class:`CloudError<msrestazure.azure_exceptions.CloudError>` """ raw_result = self._start_environment_initial(user_name=user_name, environment_id=environment_id, custom_headers=custom_headers, raw=True, **operation_config) def get_long_running_output(response): if raw: client_raw_response = ClientRawResponse(None, response) return client_raw_response # depends on [control=['if'], data=[]] lro_delay = operation_config.get('long_running_operation_timeout', self.config.long_running_operation_timeout) if polling is True: polling_method = ARMPolling(lro_delay, **operation_config) # depends on [control=['if'], data=[]] elif polling is False: polling_method = NoPolling() # depends on [control=['if'], data=[]] else: polling_method = polling return LROPoller(self._client, raw_result, get_long_running_output, polling_method)
def cancel(self): """Cancel the future if possible. Returns True if the future was cancelled, False otherwise. A future cannot be cancelled if it is running or has already completed. """ with self._condition: if self._state in [RUNNING, FINISHED]: return False if self._state in [CANCELLED, CANCELLED_AND_NOTIFIED]: return True self._state = CANCELLED self._condition.notify_all() self._invoke_callbacks() return True
def function[cancel, parameter[self]]: constant[Cancel the future if possible. Returns True if the future was cancelled, False otherwise. A future cannot be cancelled if it is running or has already completed. ] with name[self]._condition begin[:] if compare[name[self]._state in list[[<ast.Name object at 0x7da20c7cb7c0>, <ast.Name object at 0x7da20c7c8460>]]] begin[:] return[constant[False]] if compare[name[self]._state in list[[<ast.Name object at 0x7da20e74a1a0>, <ast.Name object at 0x7da20e74bf40>]]] begin[:] return[constant[True]] name[self]._state assign[=] name[CANCELLED] call[name[self]._condition.notify_all, parameter[]] call[name[self]._invoke_callbacks, parameter[]] return[constant[True]]
keyword[def] identifier[cancel] ( identifier[self] ): literal[string] keyword[with] identifier[self] . identifier[_condition] : keyword[if] identifier[self] . identifier[_state] keyword[in] [ identifier[RUNNING] , identifier[FINISHED] ]: keyword[return] keyword[False] keyword[if] identifier[self] . identifier[_state] keyword[in] [ identifier[CANCELLED] , identifier[CANCELLED_AND_NOTIFIED] ]: keyword[return] keyword[True] identifier[self] . identifier[_state] = identifier[CANCELLED] identifier[self] . identifier[_condition] . identifier[notify_all] () identifier[self] . identifier[_invoke_callbacks] () keyword[return] keyword[True]
def cancel(self): """Cancel the future if possible. Returns True if the future was cancelled, False otherwise. A future cannot be cancelled if it is running or has already completed. """ with self._condition: if self._state in [RUNNING, FINISHED]: return False # depends on [control=['if'], data=[]] if self._state in [CANCELLED, CANCELLED_AND_NOTIFIED]: return True # depends on [control=['if'], data=[]] self._state = CANCELLED self._condition.notify_all() # depends on [control=['with'], data=[]] self._invoke_callbacks() return True
def skip_read_line(fd, no_eof=False): """ Read the first non-empty line (if any) from the given file object. Return an empty string at EOF, if `no_eof` is False. If it is True, raise the EOFError instead. """ ls = '' while 1: try: line = fd.readline() except EOFError: break if not line: if no_eof: raise EOFError else: break ls = line.strip() if ls and (ls[0] != '#'): break return ls
def function[skip_read_line, parameter[fd, no_eof]]: constant[ Read the first non-empty line (if any) from the given file object. Return an empty string at EOF, if `no_eof` is False. If it is True, raise the EOFError instead. ] variable[ls] assign[=] constant[] while constant[1] begin[:] <ast.Try object at 0x7da18dc98af0> if <ast.UnaryOp object at 0x7da20e74b7f0> begin[:] if name[no_eof] begin[:] <ast.Raise object at 0x7da20e748400> variable[ls] assign[=] call[name[line].strip, parameter[]] if <ast.BoolOp object at 0x7da18c4ce2f0> begin[:] break return[name[ls]]
keyword[def] identifier[skip_read_line] ( identifier[fd] , identifier[no_eof] = keyword[False] ): literal[string] identifier[ls] = literal[string] keyword[while] literal[int] : keyword[try] : identifier[line] = identifier[fd] . identifier[readline] () keyword[except] identifier[EOFError] : keyword[break] keyword[if] keyword[not] identifier[line] : keyword[if] identifier[no_eof] : keyword[raise] identifier[EOFError] keyword[else] : keyword[break] identifier[ls] = identifier[line] . identifier[strip] () keyword[if] identifier[ls] keyword[and] ( identifier[ls] [ literal[int] ]!= literal[string] ): keyword[break] keyword[return] identifier[ls]
def skip_read_line(fd, no_eof=False): """ Read the first non-empty line (if any) from the given file object. Return an empty string at EOF, if `no_eof` is False. If it is True, raise the EOFError instead. """ ls = '' while 1: try: line = fd.readline() # depends on [control=['try'], data=[]] except EOFError: break # depends on [control=['except'], data=[]] if not line: if no_eof: raise EOFError # depends on [control=['if'], data=[]] else: break # depends on [control=['if'], data=[]] ls = line.strip() if ls and ls[0] != '#': break # depends on [control=['if'], data=[]] # depends on [control=['while'], data=[]] return ls
def smsc(self, smscNumber): """ Set the default SMSC number to use when sending SMS messages """ if smscNumber != self._smscNumber: if self.alive: self.write('AT+CSCA="{0}"'.format(smscNumber)) self._smscNumber = smscNumber
def function[smsc, parameter[self, smscNumber]]: constant[ Set the default SMSC number to use when sending SMS messages ] if compare[name[smscNumber] not_equal[!=] name[self]._smscNumber] begin[:] if name[self].alive begin[:] call[name[self].write, parameter[call[constant[AT+CSCA="{0}"].format, parameter[name[smscNumber]]]]] name[self]._smscNumber assign[=] name[smscNumber]
keyword[def] identifier[smsc] ( identifier[self] , identifier[smscNumber] ): literal[string] keyword[if] identifier[smscNumber] != identifier[self] . identifier[_smscNumber] : keyword[if] identifier[self] . identifier[alive] : identifier[self] . identifier[write] ( literal[string] . identifier[format] ( identifier[smscNumber] )) identifier[self] . identifier[_smscNumber] = identifier[smscNumber]
def smsc(self, smscNumber): """ Set the default SMSC number to use when sending SMS messages """ if smscNumber != self._smscNumber: if self.alive: self.write('AT+CSCA="{0}"'.format(smscNumber)) # depends on [control=['if'], data=[]] self._smscNumber = smscNumber # depends on [control=['if'], data=['smscNumber']]
def list_tables(self, exclusive_start_table_name=None, limit=None): """ Invoke the `ListTables`_ function. Returns an array of table names associated with the current account and endpoint. The output from *ListTables* is paginated, with each page returning a maximum of ``100`` table names. :param str exclusive_start_table_name: The first table name that this operation will evaluate. Use the value that was returned for ``LastEvaluatedTableName`` in a previous operation, so that you can obtain the next page of results. :param int limit: A maximum number of table names to return. If this parameter is not specified, the limit is ``100``. .. _ListTables: http://docs.aws.amazon.com/amazondynamodb/ latest/APIReference/API_ListTables.html """ payload = {} if exclusive_start_table_name: payload['ExclusiveStartTableName'] = exclusive_start_table_name if limit: payload['Limit'] = limit return self.execute('ListTables', payload)
def function[list_tables, parameter[self, exclusive_start_table_name, limit]]: constant[ Invoke the `ListTables`_ function. Returns an array of table names associated with the current account and endpoint. The output from *ListTables* is paginated, with each page returning a maximum of ``100`` table names. :param str exclusive_start_table_name: The first table name that this operation will evaluate. Use the value that was returned for ``LastEvaluatedTableName`` in a previous operation, so that you can obtain the next page of results. :param int limit: A maximum number of table names to return. If this parameter is not specified, the limit is ``100``. .. _ListTables: http://docs.aws.amazon.com/amazondynamodb/ latest/APIReference/API_ListTables.html ] variable[payload] assign[=] dictionary[[], []] if name[exclusive_start_table_name] begin[:] call[name[payload]][constant[ExclusiveStartTableName]] assign[=] name[exclusive_start_table_name] if name[limit] begin[:] call[name[payload]][constant[Limit]] assign[=] name[limit] return[call[name[self].execute, parameter[constant[ListTables], name[payload]]]]
keyword[def] identifier[list_tables] ( identifier[self] , identifier[exclusive_start_table_name] = keyword[None] , identifier[limit] = keyword[None] ): literal[string] identifier[payload] ={} keyword[if] identifier[exclusive_start_table_name] : identifier[payload] [ literal[string] ]= identifier[exclusive_start_table_name] keyword[if] identifier[limit] : identifier[payload] [ literal[string] ]= identifier[limit] keyword[return] identifier[self] . identifier[execute] ( literal[string] , identifier[payload] )
def list_tables(self, exclusive_start_table_name=None, limit=None): """ Invoke the `ListTables`_ function. Returns an array of table names associated with the current account and endpoint. The output from *ListTables* is paginated, with each page returning a maximum of ``100`` table names. :param str exclusive_start_table_name: The first table name that this operation will evaluate. Use the value that was returned for ``LastEvaluatedTableName`` in a previous operation, so that you can obtain the next page of results. :param int limit: A maximum number of table names to return. If this parameter is not specified, the limit is ``100``. .. _ListTables: http://docs.aws.amazon.com/amazondynamodb/ latest/APIReference/API_ListTables.html """ payload = {} if exclusive_start_table_name: payload['ExclusiveStartTableName'] = exclusive_start_table_name # depends on [control=['if'], data=[]] if limit: payload['Limit'] = limit # depends on [control=['if'], data=[]] return self.execute('ListTables', payload)
def srcnode(self): """If this node is in a build path, return the node corresponding to its source file. Otherwise, return ourself. """ srcdir_list = self.dir.srcdir_list() if srcdir_list: srcnode = srcdir_list[0].Entry(self.name) srcnode.must_be_same(self.__class__) return srcnode return self
def function[srcnode, parameter[self]]: constant[If this node is in a build path, return the node corresponding to its source file. Otherwise, return ourself. ] variable[srcdir_list] assign[=] call[name[self].dir.srcdir_list, parameter[]] if name[srcdir_list] begin[:] variable[srcnode] assign[=] call[call[name[srcdir_list]][constant[0]].Entry, parameter[name[self].name]] call[name[srcnode].must_be_same, parameter[name[self].__class__]] return[name[srcnode]] return[name[self]]
keyword[def] identifier[srcnode] ( identifier[self] ): literal[string] identifier[srcdir_list] = identifier[self] . identifier[dir] . identifier[srcdir_list] () keyword[if] identifier[srcdir_list] : identifier[srcnode] = identifier[srcdir_list] [ literal[int] ]. identifier[Entry] ( identifier[self] . identifier[name] ) identifier[srcnode] . identifier[must_be_same] ( identifier[self] . identifier[__class__] ) keyword[return] identifier[srcnode] keyword[return] identifier[self]
def srcnode(self): """If this node is in a build path, return the node corresponding to its source file. Otherwise, return ourself. """ srcdir_list = self.dir.srcdir_list() if srcdir_list: srcnode = srcdir_list[0].Entry(self.name) srcnode.must_be_same(self.__class__) return srcnode # depends on [control=['if'], data=[]] return self
def fastp_read_n_plot(self): """ Make the read N content plot for Fastp """ data_labels, pdata = self.filter_pconfig_pdata_subplots(self.fastp_n_content_data, 'Base Content Percent') pconfig = { 'id': 'fastp-seq-content-n-plot', 'title': 'Fastp: Read N Content', 'xlab': 'Read Position', 'ylab': 'R1 Before filtering: Base Content Percent', 'yCeiling': 100, 'yMinRange': 5, 'ymin': 0, 'xDecimals': False, 'yLabelFormat': '{value}%', 'tt_label': '{point.x}: {point.y:.2f}%', 'data_labels': data_labels } return linegraph.plot(pdata, pconfig)
def function[fastp_read_n_plot, parameter[self]]: constant[ Make the read N content plot for Fastp ] <ast.Tuple object at 0x7da18eb55ae0> assign[=] call[name[self].filter_pconfig_pdata_subplots, parameter[name[self].fastp_n_content_data, constant[Base Content Percent]]] variable[pconfig] assign[=] dictionary[[<ast.Constant object at 0x7da18eb55c60>, <ast.Constant object at 0x7da18eb54670>, <ast.Constant object at 0x7da18eb558d0>, <ast.Constant object at 0x7da18eb54fa0>, <ast.Constant object at 0x7da18eb57490>, <ast.Constant object at 0x7da18eb57b20>, <ast.Constant object at 0x7da18eb557b0>, <ast.Constant object at 0x7da18eb565c0>, <ast.Constant object at 0x7da18eb55660>, <ast.Constant object at 0x7da18eb543d0>, <ast.Constant object at 0x7da18eb54550>], [<ast.Constant object at 0x7da18eb57760>, <ast.Constant object at 0x7da20c7ca230>, <ast.Constant object at 0x7da20c7cb9a0>, <ast.Constant object at 0x7da20c7cb0d0>, <ast.Constant object at 0x7da20c7caaa0>, <ast.Constant object at 0x7da20c7cbaf0>, <ast.Constant object at 0x7da20c7cada0>, <ast.Constant object at 0x7da20c7caf20>, <ast.Constant object at 0x7da20c7c8c40>, <ast.Constant object at 0x7da20c7c8e20>, <ast.Name object at 0x7da20c7cbd90>]] return[call[name[linegraph].plot, parameter[name[pdata], name[pconfig]]]]
keyword[def] identifier[fastp_read_n_plot] ( identifier[self] ): literal[string] identifier[data_labels] , identifier[pdata] = identifier[self] . identifier[filter_pconfig_pdata_subplots] ( identifier[self] . identifier[fastp_n_content_data] , literal[string] ) identifier[pconfig] ={ literal[string] : literal[string] , literal[string] : literal[string] , literal[string] : literal[string] , literal[string] : literal[string] , literal[string] : literal[int] , literal[string] : literal[int] , literal[string] : literal[int] , literal[string] : keyword[False] , literal[string] : literal[string] , literal[string] : literal[string] , literal[string] : identifier[data_labels] } keyword[return] identifier[linegraph] . identifier[plot] ( identifier[pdata] , identifier[pconfig] )
def fastp_read_n_plot(self): """ Make the read N content plot for Fastp """ (data_labels, pdata) = self.filter_pconfig_pdata_subplots(self.fastp_n_content_data, 'Base Content Percent') pconfig = {'id': 'fastp-seq-content-n-plot', 'title': 'Fastp: Read N Content', 'xlab': 'Read Position', 'ylab': 'R1 Before filtering: Base Content Percent', 'yCeiling': 100, 'yMinRange': 5, 'ymin': 0, 'xDecimals': False, 'yLabelFormat': '{value}%', 'tt_label': '{point.x}: {point.y:.2f}%', 'data_labels': data_labels} return linegraph.plot(pdata, pconfig)
def partial(cls, id, token, *, adapter): """Creates a partial :class:`Webhook`. A partial webhook is just a webhook object with an ID and a token. Parameters ----------- id: :class:`int` The ID of the webhook. token: :class:`str` The authentication token of the webhook. adapter: :class:`WebhookAdapter` The webhook adapter to use when sending requests. This is typically :class:`AsyncWebhookAdapter` for ``aiohttp`` or :class:`RequestsWebhookAdapter` for ``requests``. """ if not isinstance(adapter, WebhookAdapter): raise TypeError('adapter must be a subclass of WebhookAdapter') data = { 'id': id, 'token': token } return cls(data, adapter=adapter)
def function[partial, parameter[cls, id, token]]: constant[Creates a partial :class:`Webhook`. A partial webhook is just a webhook object with an ID and a token. Parameters ----------- id: :class:`int` The ID of the webhook. token: :class:`str` The authentication token of the webhook. adapter: :class:`WebhookAdapter` The webhook adapter to use when sending requests. This is typically :class:`AsyncWebhookAdapter` for ``aiohttp`` or :class:`RequestsWebhookAdapter` for ``requests``. ] if <ast.UnaryOp object at 0x7da1b2041690> begin[:] <ast.Raise object at 0x7da1b2042920> variable[data] assign[=] dictionary[[<ast.Constant object at 0x7da1b2040ee0>, <ast.Constant object at 0x7da1b2042aa0>], [<ast.Name object at 0x7da1b2042470>, <ast.Name object at 0x7da1b2042f80>]] return[call[name[cls], parameter[name[data]]]]
keyword[def] identifier[partial] ( identifier[cls] , identifier[id] , identifier[token] ,*, identifier[adapter] ): literal[string] keyword[if] keyword[not] identifier[isinstance] ( identifier[adapter] , identifier[WebhookAdapter] ): keyword[raise] identifier[TypeError] ( literal[string] ) identifier[data] ={ literal[string] : identifier[id] , literal[string] : identifier[token] } keyword[return] identifier[cls] ( identifier[data] , identifier[adapter] = identifier[adapter] )
def partial(cls, id, token, *, adapter): """Creates a partial :class:`Webhook`. A partial webhook is just a webhook object with an ID and a token. Parameters ----------- id: :class:`int` The ID of the webhook. token: :class:`str` The authentication token of the webhook. adapter: :class:`WebhookAdapter` The webhook adapter to use when sending requests. This is typically :class:`AsyncWebhookAdapter` for ``aiohttp`` or :class:`RequestsWebhookAdapter` for ``requests``. """ if not isinstance(adapter, WebhookAdapter): raise TypeError('adapter must be a subclass of WebhookAdapter') # depends on [control=['if'], data=[]] data = {'id': id, 'token': token} return cls(data, adapter=adapter)
def disable_host_event_handler(self, host): """Disable event handlers for a host Format of the line that triggers function call:: DISABLE_HOST_EVENT_HANDLER;<host_name> :param host: host to edit :type host: alignak.objects.host.Host :return: None """ if host.event_handler_enabled: host.modified_attributes |= DICT_MODATTR["MODATTR_EVENT_HANDLER_ENABLED"].value host.event_handler_enabled = False self.send_an_element(host.get_update_status_brok())
def function[disable_host_event_handler, parameter[self, host]]: constant[Disable event handlers for a host Format of the line that triggers function call:: DISABLE_HOST_EVENT_HANDLER;<host_name> :param host: host to edit :type host: alignak.objects.host.Host :return: None ] if name[host].event_handler_enabled begin[:] <ast.AugAssign object at 0x7da18f58ecb0> name[host].event_handler_enabled assign[=] constant[False] call[name[self].send_an_element, parameter[call[name[host].get_update_status_brok, parameter[]]]]
keyword[def] identifier[disable_host_event_handler] ( identifier[self] , identifier[host] ): literal[string] keyword[if] identifier[host] . identifier[event_handler_enabled] : identifier[host] . identifier[modified_attributes] |= identifier[DICT_MODATTR] [ literal[string] ]. identifier[value] identifier[host] . identifier[event_handler_enabled] = keyword[False] identifier[self] . identifier[send_an_element] ( identifier[host] . identifier[get_update_status_brok] ())
def disable_host_event_handler(self, host): """Disable event handlers for a host Format of the line that triggers function call:: DISABLE_HOST_EVENT_HANDLER;<host_name> :param host: host to edit :type host: alignak.objects.host.Host :return: None """ if host.event_handler_enabled: host.modified_attributes |= DICT_MODATTR['MODATTR_EVENT_HANDLER_ENABLED'].value host.event_handler_enabled = False self.send_an_element(host.get_update_status_brok()) # depends on [control=['if'], data=[]]
def render_POST(self, request): """Dispatch Method called by twisted render, creates a request/response handler chain. request -- twisted.web.server.Request """ from twisted.internet.defer import maybeDeferred chain = self.factory.newInstance() data = request.content.read() d = maybeDeferred(chain.processRequest, data, request=request, resource=self) d.addCallback(chain.processResponse, request=request, resource=self) d.addCallback(self._writeResponse, request) d.addErrback(self._writeFault, request) return NOT_DONE_YET
def function[render_POST, parameter[self, request]]: constant[Dispatch Method called by twisted render, creates a request/response handler chain. request -- twisted.web.server.Request ] from relative_module[twisted.internet.defer] import module[maybeDeferred] variable[chain] assign[=] call[name[self].factory.newInstance, parameter[]] variable[data] assign[=] call[name[request].content.read, parameter[]] variable[d] assign[=] call[name[maybeDeferred], parameter[name[chain].processRequest, name[data]]] call[name[d].addCallback, parameter[name[chain].processResponse]] call[name[d].addCallback, parameter[name[self]._writeResponse, name[request]]] call[name[d].addErrback, parameter[name[self]._writeFault, name[request]]] return[name[NOT_DONE_YET]]
keyword[def] identifier[render_POST] ( identifier[self] , identifier[request] ): literal[string] keyword[from] identifier[twisted] . identifier[internet] . identifier[defer] keyword[import] identifier[maybeDeferred] identifier[chain] = identifier[self] . identifier[factory] . identifier[newInstance] () identifier[data] = identifier[request] . identifier[content] . identifier[read] () identifier[d] = identifier[maybeDeferred] ( identifier[chain] . identifier[processRequest] , identifier[data] , identifier[request] = identifier[request] , identifier[resource] = identifier[self] ) identifier[d] . identifier[addCallback] ( identifier[chain] . identifier[processResponse] , identifier[request] = identifier[request] , identifier[resource] = identifier[self] ) identifier[d] . identifier[addCallback] ( identifier[self] . identifier[_writeResponse] , identifier[request] ) identifier[d] . identifier[addErrback] ( identifier[self] . identifier[_writeFault] , identifier[request] ) keyword[return] identifier[NOT_DONE_YET]
def render_POST(self, request): """Dispatch Method called by twisted render, creates a request/response handler chain. request -- twisted.web.server.Request """ from twisted.internet.defer import maybeDeferred chain = self.factory.newInstance() data = request.content.read() d = maybeDeferred(chain.processRequest, data, request=request, resource=self) d.addCallback(chain.processResponse, request=request, resource=self) d.addCallback(self._writeResponse, request) d.addErrback(self._writeFault, request) return NOT_DONE_YET
def design_stat_extremes(self, value="Extremes"): """Corresponds to IDD Field `design_stat_extremes` Args: value (str): value for IDD Field `design_stat_extremes` Accepted values are: - Extremes Default value: Extremes if `value` is None it will not be checked against the specification and is assumed to be a missing value Raises: ValueError: if `value` is not a valid value """ if value is not None: try: value = str(value) except ValueError: raise ValueError( 'value {} need to be of type str ' 'for field `design_stat_extremes`'.format(value)) if ',' in value: raise ValueError('value should not contain a comma ' 'for field `design_stat_extremes`') vals = set() vals.add("Extremes") if value not in vals: raise ValueError('value {} is not an accepted value for ' 'field `design_stat_extremes`'.format(value)) self._design_stat_extremes = value
def function[design_stat_extremes, parameter[self, value]]: constant[Corresponds to IDD Field `design_stat_extremes` Args: value (str): value for IDD Field `design_stat_extremes` Accepted values are: - Extremes Default value: Extremes if `value` is None it will not be checked against the specification and is assumed to be a missing value Raises: ValueError: if `value` is not a valid value ] if compare[name[value] is_not constant[None]] begin[:] <ast.Try object at 0x7da1b0fed9f0> if compare[constant[,] in name[value]] begin[:] <ast.Raise object at 0x7da1b0fed7e0> variable[vals] assign[=] call[name[set], parameter[]] call[name[vals].add, parameter[constant[Extremes]]] if compare[name[value] <ast.NotIn object at 0x7da2590d7190> name[vals]] begin[:] <ast.Raise object at 0x7da1b0feca30> name[self]._design_stat_extremes assign[=] name[value]
keyword[def] identifier[design_stat_extremes] ( identifier[self] , identifier[value] = literal[string] ): literal[string] keyword[if] identifier[value] keyword[is] keyword[not] keyword[None] : keyword[try] : identifier[value] = identifier[str] ( identifier[value] ) keyword[except] identifier[ValueError] : keyword[raise] identifier[ValueError] ( literal[string] literal[string] . identifier[format] ( identifier[value] )) keyword[if] literal[string] keyword[in] identifier[value] : keyword[raise] identifier[ValueError] ( literal[string] literal[string] ) identifier[vals] = identifier[set] () identifier[vals] . identifier[add] ( literal[string] ) keyword[if] identifier[value] keyword[not] keyword[in] identifier[vals] : keyword[raise] identifier[ValueError] ( literal[string] literal[string] . identifier[format] ( identifier[value] )) identifier[self] . identifier[_design_stat_extremes] = identifier[value]
def design_stat_extremes(self, value='Extremes'): """Corresponds to IDD Field `design_stat_extremes` Args: value (str): value for IDD Field `design_stat_extremes` Accepted values are: - Extremes Default value: Extremes if `value` is None it will not be checked against the specification and is assumed to be a missing value Raises: ValueError: if `value` is not a valid value """ if value is not None: try: value = str(value) # depends on [control=['try'], data=[]] except ValueError: raise ValueError('value {} need to be of type str for field `design_stat_extremes`'.format(value)) # depends on [control=['except'], data=[]] if ',' in value: raise ValueError('value should not contain a comma for field `design_stat_extremes`') # depends on [control=['if'], data=[]] vals = set() vals.add('Extremes') if value not in vals: raise ValueError('value {} is not an accepted value for field `design_stat_extremes`'.format(value)) # depends on [control=['if'], data=['value']] # depends on [control=['if'], data=['value']] self._design_stat_extremes = value
def update(self, searched_resource, uri_parameters=None, request_body_dict=None, query_parameters_dict=None, additional_headers=None): """ This method is used to update a resource using the PUT HTTP Method :param searched_resource: A valid display name in the RAML file matching the resource :param uri_parameters: A dictionary with the URI Parameters expected by the resource :param request_body_dict: A dictionary containing the body parameter in the format {'baseObject': {nested parameters}}. You can use extract_resource_body_schema to create it :param query_parameters_dict: A dictionary containing optional or mandatory query parameters :param additional_headers: a dictionary of additional Headers to send in your request, e.g. if-match used with the dfw calls :return: This method returns a dictionary containing the received header and body data NOTE: The _resource_url and _request_body are constructed and passed by the decorator function """ return self._request(searched_resource, 'put', uri_parameters, request_body_dict, query_parameters_dict, additional_headers)
def function[update, parameter[self, searched_resource, uri_parameters, request_body_dict, query_parameters_dict, additional_headers]]: constant[ This method is used to update a resource using the PUT HTTP Method :param searched_resource: A valid display name in the RAML file matching the resource :param uri_parameters: A dictionary with the URI Parameters expected by the resource :param request_body_dict: A dictionary containing the body parameter in the format {'baseObject': {nested parameters}}. You can use extract_resource_body_schema to create it :param query_parameters_dict: A dictionary containing optional or mandatory query parameters :param additional_headers: a dictionary of additional Headers to send in your request, e.g. if-match used with the dfw calls :return: This method returns a dictionary containing the received header and body data NOTE: The _resource_url and _request_body are constructed and passed by the decorator function ] return[call[name[self]._request, parameter[name[searched_resource], constant[put], name[uri_parameters], name[request_body_dict], name[query_parameters_dict], name[additional_headers]]]]
keyword[def] identifier[update] ( identifier[self] , identifier[searched_resource] , identifier[uri_parameters] = keyword[None] , identifier[request_body_dict] = keyword[None] , identifier[query_parameters_dict] = keyword[None] , identifier[additional_headers] = keyword[None] ): literal[string] keyword[return] identifier[self] . identifier[_request] ( identifier[searched_resource] , literal[string] , identifier[uri_parameters] , identifier[request_body_dict] , identifier[query_parameters_dict] , identifier[additional_headers] )
def update(self, searched_resource, uri_parameters=None, request_body_dict=None, query_parameters_dict=None, additional_headers=None): """ This method is used to update a resource using the PUT HTTP Method :param searched_resource: A valid display name in the RAML file matching the resource :param uri_parameters: A dictionary with the URI Parameters expected by the resource :param request_body_dict: A dictionary containing the body parameter in the format {'baseObject': {nested parameters}}. You can use extract_resource_body_schema to create it :param query_parameters_dict: A dictionary containing optional or mandatory query parameters :param additional_headers: a dictionary of additional Headers to send in your request, e.g. if-match used with the dfw calls :return: This method returns a dictionary containing the received header and body data NOTE: The _resource_url and _request_body are constructed and passed by the decorator function """ return self._request(searched_resource, 'put', uri_parameters, request_body_dict, query_parameters_dict, additional_headers)
def detachPanelCopy(self): """ Detaches the current panel as a floating window. """ from projexui.widgets.xviewwidget import XViewDialog dlg = XViewDialog(self._viewWidget, self._viewWidget.viewTypes()) size = self._currentPanel.size() view = self._currentPanel.currentView() # duplicate the current view if view: new_view = view.duplicate(dlg.viewWidget().currentPanel()) view_widget = dlg.viewWidget() view_panel = view_widget.currentPanel() view_panel.addTab(new_view, new_view.windowTitle()) dlg.resize(size) dlg.show()
def function[detachPanelCopy, parameter[self]]: constant[ Detaches the current panel as a floating window. ] from relative_module[projexui.widgets.xviewwidget] import module[XViewDialog] variable[dlg] assign[=] call[name[XViewDialog], parameter[name[self]._viewWidget, call[name[self]._viewWidget.viewTypes, parameter[]]]] variable[size] assign[=] call[name[self]._currentPanel.size, parameter[]] variable[view] assign[=] call[name[self]._currentPanel.currentView, parameter[]] if name[view] begin[:] variable[new_view] assign[=] call[name[view].duplicate, parameter[call[call[name[dlg].viewWidget, parameter[]].currentPanel, parameter[]]]] variable[view_widget] assign[=] call[name[dlg].viewWidget, parameter[]] variable[view_panel] assign[=] call[name[view_widget].currentPanel, parameter[]] call[name[view_panel].addTab, parameter[name[new_view], call[name[new_view].windowTitle, parameter[]]]] call[name[dlg].resize, parameter[name[size]]] call[name[dlg].show, parameter[]]
keyword[def] identifier[detachPanelCopy] ( identifier[self] ): literal[string] keyword[from] identifier[projexui] . identifier[widgets] . identifier[xviewwidget] keyword[import] identifier[XViewDialog] identifier[dlg] = identifier[XViewDialog] ( identifier[self] . identifier[_viewWidget] , identifier[self] . identifier[_viewWidget] . identifier[viewTypes] ()) identifier[size] = identifier[self] . identifier[_currentPanel] . identifier[size] () identifier[view] = identifier[self] . identifier[_currentPanel] . identifier[currentView] () keyword[if] identifier[view] : identifier[new_view] = identifier[view] . identifier[duplicate] ( identifier[dlg] . identifier[viewWidget] (). identifier[currentPanel] ()) identifier[view_widget] = identifier[dlg] . identifier[viewWidget] () identifier[view_panel] = identifier[view_widget] . identifier[currentPanel] () identifier[view_panel] . identifier[addTab] ( identifier[new_view] , identifier[new_view] . identifier[windowTitle] ()) identifier[dlg] . identifier[resize] ( identifier[size] ) identifier[dlg] . identifier[show] ()
def detachPanelCopy(self): """ Detaches the current panel as a floating window. """ from projexui.widgets.xviewwidget import XViewDialog dlg = XViewDialog(self._viewWidget, self._viewWidget.viewTypes()) size = self._currentPanel.size() view = self._currentPanel.currentView() # duplicate the current view if view: new_view = view.duplicate(dlg.viewWidget().currentPanel()) view_widget = dlg.viewWidget() view_panel = view_widget.currentPanel() view_panel.addTab(new_view, new_view.windowTitle()) # depends on [control=['if'], data=[]] dlg.resize(size) dlg.show()
def get_value(kv, key, value = None): """get value from the keyvalues (options)""" res = [] for k, v in kv: if k == key: value = v else: res.append([k, v]) return value, res
def function[get_value, parameter[kv, key, value]]: constant[get value from the keyvalues (options)] variable[res] assign[=] list[[]] for taget[tuple[[<ast.Name object at 0x7da20c7cb520>, <ast.Name object at 0x7da20c7cb220>]]] in starred[name[kv]] begin[:] if compare[name[k] equal[==] name[key]] begin[:] variable[value] assign[=] name[v] return[tuple[[<ast.Name object at 0x7da20c7cb910>, <ast.Name object at 0x7da20c7c9600>]]]
keyword[def] identifier[get_value] ( identifier[kv] , identifier[key] , identifier[value] = keyword[None] ): literal[string] identifier[res] =[] keyword[for] identifier[k] , identifier[v] keyword[in] identifier[kv] : keyword[if] identifier[k] == identifier[key] : identifier[value] = identifier[v] keyword[else] : identifier[res] . identifier[append] ([ identifier[k] , identifier[v] ]) keyword[return] identifier[value] , identifier[res]
def get_value(kv, key, value=None): """get value from the keyvalues (options)""" res = [] for (k, v) in kv: if k == key: value = v # depends on [control=['if'], data=[]] else: res.append([k, v]) # depends on [control=['for'], data=[]] return (value, res)
def get_url_parameters(self): """Create a dictionary of parameters used in URLs for this model.""" url_fields = {} for field in self.url_fields: url_fields[field] = getattr(self, field) return url_fields
def function[get_url_parameters, parameter[self]]: constant[Create a dictionary of parameters used in URLs for this model.] variable[url_fields] assign[=] dictionary[[], []] for taget[name[field]] in starred[name[self].url_fields] begin[:] call[name[url_fields]][name[field]] assign[=] call[name[getattr], parameter[name[self], name[field]]] return[name[url_fields]]
keyword[def] identifier[get_url_parameters] ( identifier[self] ): literal[string] identifier[url_fields] ={} keyword[for] identifier[field] keyword[in] identifier[self] . identifier[url_fields] : identifier[url_fields] [ identifier[field] ]= identifier[getattr] ( identifier[self] , identifier[field] ) keyword[return] identifier[url_fields]
def get_url_parameters(self): """Create a dictionary of parameters used in URLs for this model.""" url_fields = {} for field in self.url_fields: url_fields[field] = getattr(self, field) # depends on [control=['for'], data=['field']] return url_fields
def normalize(lx): """ Accepts log-values as input, exponentiates them, normalizes and returns the result. Handles underflow by rescaling so that the largest values is exactly 1.0. """ lx = numpy.asarray(lx) base = lx.max() x = numpy.exp(lx - base) result = x / x.sum() conventional = (numpy.exp(lx) / numpy.exp(lx).sum()) assert similar(result, conventional) return result
def function[normalize, parameter[lx]]: constant[ Accepts log-values as input, exponentiates them, normalizes and returns the result. Handles underflow by rescaling so that the largest values is exactly 1.0. ] variable[lx] assign[=] call[name[numpy].asarray, parameter[name[lx]]] variable[base] assign[=] call[name[lx].max, parameter[]] variable[x] assign[=] call[name[numpy].exp, parameter[binary_operation[name[lx] - name[base]]]] variable[result] assign[=] binary_operation[name[x] / call[name[x].sum, parameter[]]] variable[conventional] assign[=] binary_operation[call[name[numpy].exp, parameter[name[lx]]] / call[call[name[numpy].exp, parameter[name[lx]]].sum, parameter[]]] assert[call[name[similar], parameter[name[result], name[conventional]]]] return[name[result]]
keyword[def] identifier[normalize] ( identifier[lx] ): literal[string] identifier[lx] = identifier[numpy] . identifier[asarray] ( identifier[lx] ) identifier[base] = identifier[lx] . identifier[max] () identifier[x] = identifier[numpy] . identifier[exp] ( identifier[lx] - identifier[base] ) identifier[result] = identifier[x] / identifier[x] . identifier[sum] () identifier[conventional] =( identifier[numpy] . identifier[exp] ( identifier[lx] )/ identifier[numpy] . identifier[exp] ( identifier[lx] ). identifier[sum] ()) keyword[assert] identifier[similar] ( identifier[result] , identifier[conventional] ) keyword[return] identifier[result]
def normalize(lx): """ Accepts log-values as input, exponentiates them, normalizes and returns the result. Handles underflow by rescaling so that the largest values is exactly 1.0. """ lx = numpy.asarray(lx) base = lx.max() x = numpy.exp(lx - base) result = x / x.sum() conventional = numpy.exp(lx) / numpy.exp(lx).sum() assert similar(result, conventional) return result
def image_height(image): """ Returns the height of the image found at the path supplied by `image` relative to your project's images directory. """ image_size_cache = _get_cache('image_size_cache') if not Image: raise SassMissingDependency('PIL', 'image manipulation') filepath = String.unquoted(image).value path = None try: height = image_size_cache[filepath][1] except KeyError: height = 0 IMAGES_ROOT = _images_root() if callable(IMAGES_ROOT): try: _file, _storage = list(IMAGES_ROOT(filepath))[0] except IndexError: pass else: path = _storage.open(_file) else: _path = os.path.join(IMAGES_ROOT, filepath.strip(os.sep)) if os.path.exists(_path): path = open(_path, 'rb') if path: image = Image.open(path) size = image.size height = size[1] image_size_cache[filepath] = size return Number(height, 'px')
def function[image_height, parameter[image]]: constant[ Returns the height of the image found at the path supplied by `image` relative to your project's images directory. ] variable[image_size_cache] assign[=] call[name[_get_cache], parameter[constant[image_size_cache]]] if <ast.UnaryOp object at 0x7da1b26aec80> begin[:] <ast.Raise object at 0x7da1b26ae1a0> variable[filepath] assign[=] call[name[String].unquoted, parameter[name[image]]].value variable[path] assign[=] constant[None] <ast.Try object at 0x7da1b26af490> return[call[name[Number], parameter[name[height], constant[px]]]]
keyword[def] identifier[image_height] ( identifier[image] ): literal[string] identifier[image_size_cache] = identifier[_get_cache] ( literal[string] ) keyword[if] keyword[not] identifier[Image] : keyword[raise] identifier[SassMissingDependency] ( literal[string] , literal[string] ) identifier[filepath] = identifier[String] . identifier[unquoted] ( identifier[image] ). identifier[value] identifier[path] = keyword[None] keyword[try] : identifier[height] = identifier[image_size_cache] [ identifier[filepath] ][ literal[int] ] keyword[except] identifier[KeyError] : identifier[height] = literal[int] identifier[IMAGES_ROOT] = identifier[_images_root] () keyword[if] identifier[callable] ( identifier[IMAGES_ROOT] ): keyword[try] : identifier[_file] , identifier[_storage] = identifier[list] ( identifier[IMAGES_ROOT] ( identifier[filepath] ))[ literal[int] ] keyword[except] identifier[IndexError] : keyword[pass] keyword[else] : identifier[path] = identifier[_storage] . identifier[open] ( identifier[_file] ) keyword[else] : identifier[_path] = identifier[os] . identifier[path] . identifier[join] ( identifier[IMAGES_ROOT] , identifier[filepath] . identifier[strip] ( identifier[os] . identifier[sep] )) keyword[if] identifier[os] . identifier[path] . identifier[exists] ( identifier[_path] ): identifier[path] = identifier[open] ( identifier[_path] , literal[string] ) keyword[if] identifier[path] : identifier[image] = identifier[Image] . identifier[open] ( identifier[path] ) identifier[size] = identifier[image] . identifier[size] identifier[height] = identifier[size] [ literal[int] ] identifier[image_size_cache] [ identifier[filepath] ]= identifier[size] keyword[return] identifier[Number] ( identifier[height] , literal[string] )
def image_height(image): """ Returns the height of the image found at the path supplied by `image` relative to your project's images directory. """ image_size_cache = _get_cache('image_size_cache') if not Image: raise SassMissingDependency('PIL', 'image manipulation') # depends on [control=['if'], data=[]] filepath = String.unquoted(image).value path = None try: height = image_size_cache[filepath][1] # depends on [control=['try'], data=[]] except KeyError: height = 0 IMAGES_ROOT = _images_root() if callable(IMAGES_ROOT): try: (_file, _storage) = list(IMAGES_ROOT(filepath))[0] # depends on [control=['try'], data=[]] except IndexError: pass # depends on [control=['except'], data=[]] else: path = _storage.open(_file) # depends on [control=['if'], data=[]] else: _path = os.path.join(IMAGES_ROOT, filepath.strip(os.sep)) if os.path.exists(_path): path = open(_path, 'rb') # depends on [control=['if'], data=[]] if path: image = Image.open(path) size = image.size height = size[1] image_size_cache[filepath] = size # depends on [control=['if'], data=[]] # depends on [control=['except'], data=[]] return Number(height, 'px')
def build_pipeline(cls, project, zones, min_cores, min_ram, disk_size, boot_disk_size, preemptible, accelerator_type, accelerator_count, image, script_name, envs, inputs, outputs, pipeline_name): """Builds a pipeline configuration for execution. Args: project: string name of project. zones: list of zone names for jobs to be run at. min_cores: int number of CPU cores required per job. min_ram: int GB of RAM required per job. disk_size: int GB of disk to attach under /mnt/data. boot_disk_size: int GB of disk for boot. preemptible: use a preemptible VM for the job accelerator_type: string GCE defined accelerator type. accelerator_count: int number of accelerators of the specified type to attach. image: string Docker image name in which to run. script_name: file name of the script to run. envs: list of EnvParam objects specifying environment variables to set within each job. inputs: list of FileParam objects specifying input variables to set within each job. outputs: list of FileParam objects specifying output variables to set within each job. pipeline_name: string name of pipeline. Returns: A nested dictionary with one entry under the key ephemeralPipeline containing the pipeline configuration. """ if min_cores is None: min_cores = job_model.DEFAULT_MIN_CORES if min_ram is None: min_ram = job_model.DEFAULT_MIN_RAM if disk_size is None: disk_size = job_model.DEFAULT_DISK_SIZE if boot_disk_size is None: boot_disk_size = job_model.DEFAULT_BOOT_DISK_SIZE if preemptible is None: preemptible = job_model.DEFAULT_PREEMPTIBLE # Format the docker command docker_command = cls._build_pipeline_docker_command(script_name, inputs, outputs, envs) # Pipelines inputParameters can be both simple name/value pairs which get # set as environment variables, as well as input file paths which the # Pipelines controller will automatically localize to the Pipeline VM. # In the ephemeralPipeline object, the inputParameters are only defined; # the values are passed in the pipelineArgs. # Pipelines outputParameters are only output file paths, which the # Pipelines controller can automatically de-localize after the docker # command completes. # The Pipelines API does not support recursive copy of file parameters, # so it is implemented within the dsub-generated pipeline. # Any inputs or outputs marked as "recursive" are completely omitted here; # their environment variables will be set in the docker command, and # recursive copy code will be generated there as well. # The Pipelines API does not accept empty environment variables. Set them to # empty in DOCKER_COMMAND instead. input_envs = [{ 'name': SCRIPT_VARNAME }] + [{ 'name': env.name } for env in envs if env.value] input_files = [ cls._build_pipeline_input_file_param(var.name, var.docker_path) for var in inputs if not var.recursive and var.value ] # Outputs are an array of file parameters output_files = [ cls._build_pipeline_file_param(var.name, var.docker_path) for var in outputs if not var.recursive and var.value ] # The ephemeralPipeline provides the template for the pipeline. # pyformat: disable return { 'ephemeralPipeline': { 'projectId': project, 'name': pipeline_name, # Define the resources needed for this pipeline. 'resources': { 'minimumCpuCores': min_cores, 'minimumRamGb': min_ram, 'bootDiskSizeGb': boot_disk_size, 'preemptible': preemptible, 'zones': google_base.get_zones(zones), 'acceleratorType': accelerator_type, 'acceleratorCount': accelerator_count, # Create a data disk that is attached to the VM and destroyed # when the pipeline terminates. 'disks': [{ 'name': 'datadisk', 'autoDelete': True, 'sizeGb': disk_size, 'mountPoint': providers_util.DATA_MOUNT_POINT, }], }, 'inputParameters': input_envs + input_files, 'outputParameters': output_files, 'docker': { 'imageName': image, 'cmd': docker_command, } } }
def function[build_pipeline, parameter[cls, project, zones, min_cores, min_ram, disk_size, boot_disk_size, preemptible, accelerator_type, accelerator_count, image, script_name, envs, inputs, outputs, pipeline_name]]: constant[Builds a pipeline configuration for execution. Args: project: string name of project. zones: list of zone names for jobs to be run at. min_cores: int number of CPU cores required per job. min_ram: int GB of RAM required per job. disk_size: int GB of disk to attach under /mnt/data. boot_disk_size: int GB of disk for boot. preemptible: use a preemptible VM for the job accelerator_type: string GCE defined accelerator type. accelerator_count: int number of accelerators of the specified type to attach. image: string Docker image name in which to run. script_name: file name of the script to run. envs: list of EnvParam objects specifying environment variables to set within each job. inputs: list of FileParam objects specifying input variables to set within each job. outputs: list of FileParam objects specifying output variables to set within each job. pipeline_name: string name of pipeline. Returns: A nested dictionary with one entry under the key ephemeralPipeline containing the pipeline configuration. ] if compare[name[min_cores] is constant[None]] begin[:] variable[min_cores] assign[=] name[job_model].DEFAULT_MIN_CORES if compare[name[min_ram] is constant[None]] begin[:] variable[min_ram] assign[=] name[job_model].DEFAULT_MIN_RAM if compare[name[disk_size] is constant[None]] begin[:] variable[disk_size] assign[=] name[job_model].DEFAULT_DISK_SIZE if compare[name[boot_disk_size] is constant[None]] begin[:] variable[boot_disk_size] assign[=] name[job_model].DEFAULT_BOOT_DISK_SIZE if compare[name[preemptible] is constant[None]] begin[:] variable[preemptible] assign[=] name[job_model].DEFAULT_PREEMPTIBLE variable[docker_command] assign[=] call[name[cls]._build_pipeline_docker_command, parameter[name[script_name], name[inputs], name[outputs], name[envs]]] variable[input_envs] assign[=] binary_operation[list[[<ast.Dict object at 0x7da1b0057910>]] + <ast.ListComp object at 0x7da1b00553c0>] variable[input_files] assign[=] <ast.ListComp object at 0x7da1b0055ba0> variable[output_files] assign[=] <ast.ListComp object at 0x7da1b00579a0> return[dictionary[[<ast.Constant object at 0x7da1b0055ed0>], [<ast.Dict object at 0x7da1b0055b70>]]]
keyword[def] identifier[build_pipeline] ( identifier[cls] , identifier[project] , identifier[zones] , identifier[min_cores] , identifier[min_ram] , identifier[disk_size] , identifier[boot_disk_size] , identifier[preemptible] , identifier[accelerator_type] , identifier[accelerator_count] , identifier[image] , identifier[script_name] , identifier[envs] , identifier[inputs] , identifier[outputs] , identifier[pipeline_name] ): literal[string] keyword[if] identifier[min_cores] keyword[is] keyword[None] : identifier[min_cores] = identifier[job_model] . identifier[DEFAULT_MIN_CORES] keyword[if] identifier[min_ram] keyword[is] keyword[None] : identifier[min_ram] = identifier[job_model] . identifier[DEFAULT_MIN_RAM] keyword[if] identifier[disk_size] keyword[is] keyword[None] : identifier[disk_size] = identifier[job_model] . identifier[DEFAULT_DISK_SIZE] keyword[if] identifier[boot_disk_size] keyword[is] keyword[None] : identifier[boot_disk_size] = identifier[job_model] . identifier[DEFAULT_BOOT_DISK_SIZE] keyword[if] identifier[preemptible] keyword[is] keyword[None] : identifier[preemptible] = identifier[job_model] . identifier[DEFAULT_PREEMPTIBLE] identifier[docker_command] = identifier[cls] . identifier[_build_pipeline_docker_command] ( identifier[script_name] , identifier[inputs] , identifier[outputs] , identifier[envs] ) identifier[input_envs] =[{ literal[string] : identifier[SCRIPT_VARNAME] }]+[{ literal[string] : identifier[env] . identifier[name] } keyword[for] identifier[env] keyword[in] identifier[envs] keyword[if] identifier[env] . identifier[value] ] identifier[input_files] =[ identifier[cls] . identifier[_build_pipeline_input_file_param] ( identifier[var] . identifier[name] , identifier[var] . identifier[docker_path] ) keyword[for] identifier[var] keyword[in] identifier[inputs] keyword[if] keyword[not] identifier[var] . identifier[recursive] keyword[and] identifier[var] . identifier[value] ] identifier[output_files] =[ identifier[cls] . identifier[_build_pipeline_file_param] ( identifier[var] . identifier[name] , identifier[var] . identifier[docker_path] ) keyword[for] identifier[var] keyword[in] identifier[outputs] keyword[if] keyword[not] identifier[var] . identifier[recursive] keyword[and] identifier[var] . identifier[value] ] keyword[return] { literal[string] :{ literal[string] : identifier[project] , literal[string] : identifier[pipeline_name] , literal[string] :{ literal[string] : identifier[min_cores] , literal[string] : identifier[min_ram] , literal[string] : identifier[boot_disk_size] , literal[string] : identifier[preemptible] , literal[string] : identifier[google_base] . identifier[get_zones] ( identifier[zones] ), literal[string] : identifier[accelerator_type] , literal[string] : identifier[accelerator_count] , literal[string] :[{ literal[string] : literal[string] , literal[string] : keyword[True] , literal[string] : identifier[disk_size] , literal[string] : identifier[providers_util] . identifier[DATA_MOUNT_POINT] , }], }, literal[string] : identifier[input_envs] + identifier[input_files] , literal[string] : identifier[output_files] , literal[string] :{ literal[string] : identifier[image] , literal[string] : identifier[docker_command] , } } }
def build_pipeline(cls, project, zones, min_cores, min_ram, disk_size, boot_disk_size, preemptible, accelerator_type, accelerator_count, image, script_name, envs, inputs, outputs, pipeline_name): """Builds a pipeline configuration for execution. Args: project: string name of project. zones: list of zone names for jobs to be run at. min_cores: int number of CPU cores required per job. min_ram: int GB of RAM required per job. disk_size: int GB of disk to attach under /mnt/data. boot_disk_size: int GB of disk for boot. preemptible: use a preemptible VM for the job accelerator_type: string GCE defined accelerator type. accelerator_count: int number of accelerators of the specified type to attach. image: string Docker image name in which to run. script_name: file name of the script to run. envs: list of EnvParam objects specifying environment variables to set within each job. inputs: list of FileParam objects specifying input variables to set within each job. outputs: list of FileParam objects specifying output variables to set within each job. pipeline_name: string name of pipeline. Returns: A nested dictionary with one entry under the key ephemeralPipeline containing the pipeline configuration. """ if min_cores is None: min_cores = job_model.DEFAULT_MIN_CORES # depends on [control=['if'], data=['min_cores']] if min_ram is None: min_ram = job_model.DEFAULT_MIN_RAM # depends on [control=['if'], data=['min_ram']] if disk_size is None: disk_size = job_model.DEFAULT_DISK_SIZE # depends on [control=['if'], data=['disk_size']] if boot_disk_size is None: boot_disk_size = job_model.DEFAULT_BOOT_DISK_SIZE # depends on [control=['if'], data=['boot_disk_size']] if preemptible is None: preemptible = job_model.DEFAULT_PREEMPTIBLE # depends on [control=['if'], data=['preemptible']] # Format the docker command docker_command = cls._build_pipeline_docker_command(script_name, inputs, outputs, envs) # Pipelines inputParameters can be both simple name/value pairs which get # set as environment variables, as well as input file paths which the # Pipelines controller will automatically localize to the Pipeline VM. # In the ephemeralPipeline object, the inputParameters are only defined; # the values are passed in the pipelineArgs. # Pipelines outputParameters are only output file paths, which the # Pipelines controller can automatically de-localize after the docker # command completes. # The Pipelines API does not support recursive copy of file parameters, # so it is implemented within the dsub-generated pipeline. # Any inputs or outputs marked as "recursive" are completely omitted here; # their environment variables will be set in the docker command, and # recursive copy code will be generated there as well. # The Pipelines API does not accept empty environment variables. Set them to # empty in DOCKER_COMMAND instead. input_envs = [{'name': SCRIPT_VARNAME}] + [{'name': env.name} for env in envs if env.value] input_files = [cls._build_pipeline_input_file_param(var.name, var.docker_path) for var in inputs if not var.recursive and var.value] # Outputs are an array of file parameters output_files = [cls._build_pipeline_file_param(var.name, var.docker_path) for var in outputs if not var.recursive and var.value] # The ephemeralPipeline provides the template for the pipeline. # pyformat: disable # Define the resources needed for this pipeline. # Create a data disk that is attached to the VM and destroyed # when the pipeline terminates. return {'ephemeralPipeline': {'projectId': project, 'name': pipeline_name, 'resources': {'minimumCpuCores': min_cores, 'minimumRamGb': min_ram, 'bootDiskSizeGb': boot_disk_size, 'preemptible': preemptible, 'zones': google_base.get_zones(zones), 'acceleratorType': accelerator_type, 'acceleratorCount': accelerator_count, 'disks': [{'name': 'datadisk', 'autoDelete': True, 'sizeGb': disk_size, 'mountPoint': providers_util.DATA_MOUNT_POINT}]}, 'inputParameters': input_envs + input_files, 'outputParameters': output_files, 'docker': {'imageName': image, 'cmd': docker_command}}}
def OnTableChanged(self, event): """Table changed event handler""" if hasattr(event, 'table'): self.Select(event.table) self.EnsureVisible(event.table) event.Skip()
def function[OnTableChanged, parameter[self, event]]: constant[Table changed event handler] if call[name[hasattr], parameter[name[event], constant[table]]] begin[:] call[name[self].Select, parameter[name[event].table]] call[name[self].EnsureVisible, parameter[name[event].table]] call[name[event].Skip, parameter[]]
keyword[def] identifier[OnTableChanged] ( identifier[self] , identifier[event] ): literal[string] keyword[if] identifier[hasattr] ( identifier[event] , literal[string] ): identifier[self] . identifier[Select] ( identifier[event] . identifier[table] ) identifier[self] . identifier[EnsureVisible] ( identifier[event] . identifier[table] ) identifier[event] . identifier[Skip] ()
def OnTableChanged(self, event): """Table changed event handler""" if hasattr(event, 'table'): self.Select(event.table) self.EnsureVisible(event.table) # depends on [control=['if'], data=[]] event.Skip()
def invoked(self, ctx): """Method called when the command is invoked.""" if not ctx.ansi.is_enabled: print("You need color support to use this demo") else: print(ctx.ansi.cmd('erase_display')) self._demo_fg_color(ctx) self._demo_bg_color(ctx) self._demo_bg_indexed(ctx) self._demo_rgb(ctx) self._demo_style(ctx)
def function[invoked, parameter[self, ctx]]: constant[Method called when the command is invoked.] if <ast.UnaryOp object at 0x7da2044c0670> begin[:] call[name[print], parameter[constant[You need color support to use this demo]]]
keyword[def] identifier[invoked] ( identifier[self] , identifier[ctx] ): literal[string] keyword[if] keyword[not] identifier[ctx] . identifier[ansi] . identifier[is_enabled] : identifier[print] ( literal[string] ) keyword[else] : identifier[print] ( identifier[ctx] . identifier[ansi] . identifier[cmd] ( literal[string] )) identifier[self] . identifier[_demo_fg_color] ( identifier[ctx] ) identifier[self] . identifier[_demo_bg_color] ( identifier[ctx] ) identifier[self] . identifier[_demo_bg_indexed] ( identifier[ctx] ) identifier[self] . identifier[_demo_rgb] ( identifier[ctx] ) identifier[self] . identifier[_demo_style] ( identifier[ctx] )
def invoked(self, ctx): """Method called when the command is invoked.""" if not ctx.ansi.is_enabled: print('You need color support to use this demo') # depends on [control=['if'], data=[]] else: print(ctx.ansi.cmd('erase_display')) self._demo_fg_color(ctx) self._demo_bg_color(ctx) self._demo_bg_indexed(ctx) self._demo_rgb(ctx) self._demo_style(ctx)
def delete(self, url, headers=None, kwargs=None): """Make a DELETE request. To make a DELETE request pass, ``url`` :param url: ``str`` :param headers: ``dict`` :param kwargs: ``dict`` """ return self._request( method='delete', url=url, headers=headers, kwargs=kwargs )
def function[delete, parameter[self, url, headers, kwargs]]: constant[Make a DELETE request. To make a DELETE request pass, ``url`` :param url: ``str`` :param headers: ``dict`` :param kwargs: ``dict`` ] return[call[name[self]._request, parameter[]]]
keyword[def] identifier[delete] ( identifier[self] , identifier[url] , identifier[headers] = keyword[None] , identifier[kwargs] = keyword[None] ): literal[string] keyword[return] identifier[self] . identifier[_request] ( identifier[method] = literal[string] , identifier[url] = identifier[url] , identifier[headers] = identifier[headers] , identifier[kwargs] = identifier[kwargs] )
def delete(self, url, headers=None, kwargs=None): """Make a DELETE request. To make a DELETE request pass, ``url`` :param url: ``str`` :param headers: ``dict`` :param kwargs: ``dict`` """ return self._request(method='delete', url=url, headers=headers, kwargs=kwargs)
def _as_in_context(data, ctx): """Move data into new context.""" if isinstance(data, nd.NDArray): return data.as_in_context(ctx) elif isinstance(data, (list, tuple)): return [_as_in_context(d, ctx) for d in data] return data
def function[_as_in_context, parameter[data, ctx]]: constant[Move data into new context.] if call[name[isinstance], parameter[name[data], name[nd].NDArray]] begin[:] return[call[name[data].as_in_context, parameter[name[ctx]]]] return[name[data]]
keyword[def] identifier[_as_in_context] ( identifier[data] , identifier[ctx] ): literal[string] keyword[if] identifier[isinstance] ( identifier[data] , identifier[nd] . identifier[NDArray] ): keyword[return] identifier[data] . identifier[as_in_context] ( identifier[ctx] ) keyword[elif] identifier[isinstance] ( identifier[data] ,( identifier[list] , identifier[tuple] )): keyword[return] [ identifier[_as_in_context] ( identifier[d] , identifier[ctx] ) keyword[for] identifier[d] keyword[in] identifier[data] ] keyword[return] identifier[data]
def _as_in_context(data, ctx): """Move data into new context.""" if isinstance(data, nd.NDArray): return data.as_in_context(ctx) # depends on [control=['if'], data=[]] elif isinstance(data, (list, tuple)): return [_as_in_context(d, ctx) for d in data] # depends on [control=['if'], data=[]] return data
def choice_voters_changed_update_cache( sender, instance, action, reverse, model, pk_set, **kwargs): """Update cache when choice.voters changes.""" if action not in ('post_add', 'post_remove', 'post_clear'): # post_clear is not handled, because clear is called in # django.db.models.fields.related.ReverseManyRelatedObjects.__set__ # before setting the new order return if model == User: assert type(instance) == Choice choices = [instance] if pk_set: users = list(User.objects.filter(pk__in=pk_set)) else: users = [] else: if pk_set: choices = list(Choice.objects.filter(pk__in=pk_set)) else: choices = [] users = [instance] from .tasks import update_cache_for_instance for choice in choices: update_cache_for_instance('Choice', choice.pk, choice) for user in users: update_cache_for_instance('User', user.pk, user)
def function[choice_voters_changed_update_cache, parameter[sender, instance, action, reverse, model, pk_set]]: constant[Update cache when choice.voters changes.] if compare[name[action] <ast.NotIn object at 0x7da2590d7190> tuple[[<ast.Constant object at 0x7da204621e70>, <ast.Constant object at 0x7da2046225c0>, <ast.Constant object at 0x7da204622e60>]]] begin[:] return[None] if compare[name[model] equal[==] name[User]] begin[:] assert[compare[call[name[type], parameter[name[instance]]] equal[==] name[Choice]]] variable[choices] assign[=] list[[<ast.Name object at 0x7da204621420>]] if name[pk_set] begin[:] variable[users] assign[=] call[name[list], parameter[call[name[User].objects.filter, parameter[]]]] from relative_module[tasks] import module[update_cache_for_instance] for taget[name[choice]] in starred[name[choices]] begin[:] call[name[update_cache_for_instance], parameter[constant[Choice], name[choice].pk, name[choice]]] for taget[name[user]] in starred[name[users]] begin[:] call[name[update_cache_for_instance], parameter[constant[User], name[user].pk, name[user]]]
keyword[def] identifier[choice_voters_changed_update_cache] ( identifier[sender] , identifier[instance] , identifier[action] , identifier[reverse] , identifier[model] , identifier[pk_set] ,** identifier[kwargs] ): literal[string] keyword[if] identifier[action] keyword[not] keyword[in] ( literal[string] , literal[string] , literal[string] ): keyword[return] keyword[if] identifier[model] == identifier[User] : keyword[assert] identifier[type] ( identifier[instance] )== identifier[Choice] identifier[choices] =[ identifier[instance] ] keyword[if] identifier[pk_set] : identifier[users] = identifier[list] ( identifier[User] . identifier[objects] . identifier[filter] ( identifier[pk__in] = identifier[pk_set] )) keyword[else] : identifier[users] =[] keyword[else] : keyword[if] identifier[pk_set] : identifier[choices] = identifier[list] ( identifier[Choice] . identifier[objects] . identifier[filter] ( identifier[pk__in] = identifier[pk_set] )) keyword[else] : identifier[choices] =[] identifier[users] =[ identifier[instance] ] keyword[from] . identifier[tasks] keyword[import] identifier[update_cache_for_instance] keyword[for] identifier[choice] keyword[in] identifier[choices] : identifier[update_cache_for_instance] ( literal[string] , identifier[choice] . identifier[pk] , identifier[choice] ) keyword[for] identifier[user] keyword[in] identifier[users] : identifier[update_cache_for_instance] ( literal[string] , identifier[user] . identifier[pk] , identifier[user] )
def choice_voters_changed_update_cache(sender, instance, action, reverse, model, pk_set, **kwargs): """Update cache when choice.voters changes.""" if action not in ('post_add', 'post_remove', 'post_clear'): # post_clear is not handled, because clear is called in # django.db.models.fields.related.ReverseManyRelatedObjects.__set__ # before setting the new order return # depends on [control=['if'], data=[]] if model == User: assert type(instance) == Choice choices = [instance] if pk_set: users = list(User.objects.filter(pk__in=pk_set)) # depends on [control=['if'], data=[]] else: users = [] # depends on [control=['if'], data=['User']] else: if pk_set: choices = list(Choice.objects.filter(pk__in=pk_set)) # depends on [control=['if'], data=[]] else: choices = [] users = [instance] from .tasks import update_cache_for_instance for choice in choices: update_cache_for_instance('Choice', choice.pk, choice) # depends on [control=['for'], data=['choice']] for user in users: update_cache_for_instance('User', user.pk, user) # depends on [control=['for'], data=['user']]
def append_system_paths(self): """Append system paths to $PATH.""" from rez.shells import Shell, create_shell sh = self.interpreter if isinstance(self.interpreter, Shell) \ else create_shell() paths = sh.get_syspaths() paths_str = os.pathsep.join(paths) self.env.PATH.append(paths_str)
def function[append_system_paths, parameter[self]]: constant[Append system paths to $PATH.] from relative_module[rez.shells] import module[Shell], module[create_shell] variable[sh] assign[=] <ast.IfExp object at 0x7da18f8127d0> variable[paths] assign[=] call[name[sh].get_syspaths, parameter[]] variable[paths_str] assign[=] call[name[os].pathsep.join, parameter[name[paths]]] call[name[self].env.PATH.append, parameter[name[paths_str]]]
keyword[def] identifier[append_system_paths] ( identifier[self] ): literal[string] keyword[from] identifier[rez] . identifier[shells] keyword[import] identifier[Shell] , identifier[create_shell] identifier[sh] = identifier[self] . identifier[interpreter] keyword[if] identifier[isinstance] ( identifier[self] . identifier[interpreter] , identifier[Shell] ) keyword[else] identifier[create_shell] () identifier[paths] = identifier[sh] . identifier[get_syspaths] () identifier[paths_str] = identifier[os] . identifier[pathsep] . identifier[join] ( identifier[paths] ) identifier[self] . identifier[env] . identifier[PATH] . identifier[append] ( identifier[paths_str] )
def append_system_paths(self): """Append system paths to $PATH.""" from rez.shells import Shell, create_shell sh = self.interpreter if isinstance(self.interpreter, Shell) else create_shell() paths = sh.get_syspaths() paths_str = os.pathsep.join(paths) self.env.PATH.append(paths_str)
async def _sign_submit(self, req_json: str) -> str: """ Sign and submit (json) request to ledger; return (json) result. Raise ClosedPool if pool is not yet open, CorruptWallet if existing wallet's pool is no longer extant, or BadLedgerTxn on any other failure. :param req_json: json of request to sign and submit :return: json response """ LOGGER.debug('_BaseAgent._sign_submit >>> json: %s', req_json) if not self.pool.handle: LOGGER.debug('_BaseAgent._submit <!< closed pool %s', self.pool.name) raise ClosedPool('Cannot submit request to closed pool {}'.format(self.pool.name)) try: rv_json = await ledger.sign_and_submit_request(self.pool.handle, self.wallet.handle, self.did, req_json) await asyncio.sleep(0) except IndyError as x_indy: if x_indy.error_code == ErrorCode.WalletIncompatiblePoolError: LOGGER.debug( '_BaseAgent._sign_submit: <!< Corrupt wallet %s is not compatible with pool %s', self.wallet.name, self.pool.name) raise CorruptWallet( 'Corrupt wallet {} is not compatible with pool {}'.format(self.wallet.name, self.pool.name)) else: LOGGER.debug( '_BaseAgent._sign_submit: <!< cannot sign/submit request for ledger: indy error code %s', self.wallet.name) raise BadLedgerTxn('Cannot sign/submit request for ledger: indy error code {}'.format( x_indy.error_code)) resp = json.loads(rv_json) if ('op' in resp) and (resp['op'] in ('REQNACK', 'REJECT')): LOGGER.debug('_BaseAgent._sign_submit: ledger rejected request: %s', resp['reason']) raise BadLedgerTxn('Ledger rejected transaction request: {}'.format(resp['reason'])) if 'reason' in resp and 'result' in resp and resp['result'].get('seqNo', None) is None: LOGGER.debug('_BaseAgent._sign_submit: <!< response indicates no transaction: %s', resp['reason']) raise BadLedgerTxn('Response indicates no transaction: {}'.format(resp['reason'])) LOGGER.debug('_BaseAgent._sign_submit <<< %s', rv_json) return rv_json
<ast.AsyncFunctionDef object at 0x7da18dc9b190>
keyword[async] keyword[def] identifier[_sign_submit] ( identifier[self] , identifier[req_json] : identifier[str] )-> identifier[str] : literal[string] identifier[LOGGER] . identifier[debug] ( literal[string] , identifier[req_json] ) keyword[if] keyword[not] identifier[self] . identifier[pool] . identifier[handle] : identifier[LOGGER] . identifier[debug] ( literal[string] , identifier[self] . identifier[pool] . identifier[name] ) keyword[raise] identifier[ClosedPool] ( literal[string] . identifier[format] ( identifier[self] . identifier[pool] . identifier[name] )) keyword[try] : identifier[rv_json] = keyword[await] identifier[ledger] . identifier[sign_and_submit_request] ( identifier[self] . identifier[pool] . identifier[handle] , identifier[self] . identifier[wallet] . identifier[handle] , identifier[self] . identifier[did] , identifier[req_json] ) keyword[await] identifier[asyncio] . identifier[sleep] ( literal[int] ) keyword[except] identifier[IndyError] keyword[as] identifier[x_indy] : keyword[if] identifier[x_indy] . identifier[error_code] == identifier[ErrorCode] . identifier[WalletIncompatiblePoolError] : identifier[LOGGER] . identifier[debug] ( literal[string] , identifier[self] . identifier[wallet] . identifier[name] , identifier[self] . identifier[pool] . identifier[name] ) keyword[raise] identifier[CorruptWallet] ( literal[string] . identifier[format] ( identifier[self] . identifier[wallet] . identifier[name] , identifier[self] . identifier[pool] . identifier[name] )) keyword[else] : identifier[LOGGER] . identifier[debug] ( literal[string] , identifier[self] . identifier[wallet] . identifier[name] ) keyword[raise] identifier[BadLedgerTxn] ( literal[string] . identifier[format] ( identifier[x_indy] . identifier[error_code] )) identifier[resp] = identifier[json] . identifier[loads] ( identifier[rv_json] ) keyword[if] ( literal[string] keyword[in] identifier[resp] ) keyword[and] ( identifier[resp] [ literal[string] ] keyword[in] ( literal[string] , literal[string] )): identifier[LOGGER] . identifier[debug] ( literal[string] , identifier[resp] [ literal[string] ]) keyword[raise] identifier[BadLedgerTxn] ( literal[string] . identifier[format] ( identifier[resp] [ literal[string] ])) keyword[if] literal[string] keyword[in] identifier[resp] keyword[and] literal[string] keyword[in] identifier[resp] keyword[and] identifier[resp] [ literal[string] ]. identifier[get] ( literal[string] , keyword[None] ) keyword[is] keyword[None] : identifier[LOGGER] . identifier[debug] ( literal[string] , identifier[resp] [ literal[string] ]) keyword[raise] identifier[BadLedgerTxn] ( literal[string] . identifier[format] ( identifier[resp] [ literal[string] ])) identifier[LOGGER] . identifier[debug] ( literal[string] , identifier[rv_json] ) keyword[return] identifier[rv_json]
async def _sign_submit(self, req_json: str) -> str: """ Sign and submit (json) request to ledger; return (json) result. Raise ClosedPool if pool is not yet open, CorruptWallet if existing wallet's pool is no longer extant, or BadLedgerTxn on any other failure. :param req_json: json of request to sign and submit :return: json response """ LOGGER.debug('_BaseAgent._sign_submit >>> json: %s', req_json) if not self.pool.handle: LOGGER.debug('_BaseAgent._submit <!< closed pool %s', self.pool.name) raise ClosedPool('Cannot submit request to closed pool {}'.format(self.pool.name)) # depends on [control=['if'], data=[]] try: rv_json = await ledger.sign_and_submit_request(self.pool.handle, self.wallet.handle, self.did, req_json) await asyncio.sleep(0) # depends on [control=['try'], data=[]] except IndyError as x_indy: if x_indy.error_code == ErrorCode.WalletIncompatiblePoolError: LOGGER.debug('_BaseAgent._sign_submit: <!< Corrupt wallet %s is not compatible with pool %s', self.wallet.name, self.pool.name) raise CorruptWallet('Corrupt wallet {} is not compatible with pool {}'.format(self.wallet.name, self.pool.name)) # depends on [control=['if'], data=[]] else: LOGGER.debug('_BaseAgent._sign_submit: <!< cannot sign/submit request for ledger: indy error code %s', self.wallet.name) raise BadLedgerTxn('Cannot sign/submit request for ledger: indy error code {}'.format(x_indy.error_code)) # depends on [control=['except'], data=['x_indy']] resp = json.loads(rv_json) if 'op' in resp and resp['op'] in ('REQNACK', 'REJECT'): LOGGER.debug('_BaseAgent._sign_submit: ledger rejected request: %s', resp['reason']) raise BadLedgerTxn('Ledger rejected transaction request: {}'.format(resp['reason'])) # depends on [control=['if'], data=[]] if 'reason' in resp and 'result' in resp and (resp['result'].get('seqNo', None) is None): LOGGER.debug('_BaseAgent._sign_submit: <!< response indicates no transaction: %s', resp['reason']) raise BadLedgerTxn('Response indicates no transaction: {}'.format(resp['reason'])) # depends on [control=['if'], data=[]] LOGGER.debug('_BaseAgent._sign_submit <<< %s', rv_json) return rv_json
def cleanup_deployments(self): """ Delete all deployments created in namespaces associated with this backend :return: None """ deployments = self.list_deployments() for deployment in deployments: if deployment.namespace in self.managed_namespaces: deployment.delete()
def function[cleanup_deployments, parameter[self]]: constant[ Delete all deployments created in namespaces associated with this backend :return: None ] variable[deployments] assign[=] call[name[self].list_deployments, parameter[]] for taget[name[deployment]] in starred[name[deployments]] begin[:] if compare[name[deployment].namespace in name[self].managed_namespaces] begin[:] call[name[deployment].delete, parameter[]]
keyword[def] identifier[cleanup_deployments] ( identifier[self] ): literal[string] identifier[deployments] = identifier[self] . identifier[list_deployments] () keyword[for] identifier[deployment] keyword[in] identifier[deployments] : keyword[if] identifier[deployment] . identifier[namespace] keyword[in] identifier[self] . identifier[managed_namespaces] : identifier[deployment] . identifier[delete] ()
def cleanup_deployments(self): """ Delete all deployments created in namespaces associated with this backend :return: None """ deployments = self.list_deployments() for deployment in deployments: if deployment.namespace in self.managed_namespaces: deployment.delete() # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['deployment']]
def random(length: int = 8, chars: str = digits + ascii_lowercase) -> Iterator[str]: """ A random string. Not unique, but has around 1 in a million chance of collision (with the default 8 character length). e.g. 'fubui5e6' Args: length: Length of the random string. chars: The characters to randomly choose from. """ while True: yield "".join([choice(chars) for _ in range(length)])
def function[random, parameter[length, chars]]: constant[ A random string. Not unique, but has around 1 in a million chance of collision (with the default 8 character length). e.g. 'fubui5e6' Args: length: Length of the random string. chars: The characters to randomly choose from. ] while constant[True] begin[:] <ast.Yield object at 0x7da20e957670>
keyword[def] identifier[random] ( identifier[length] : identifier[int] = literal[int] , identifier[chars] : identifier[str] = identifier[digits] + identifier[ascii_lowercase] )-> identifier[Iterator] [ identifier[str] ]: literal[string] keyword[while] keyword[True] : keyword[yield] literal[string] . identifier[join] ([ identifier[choice] ( identifier[chars] ) keyword[for] identifier[_] keyword[in] identifier[range] ( identifier[length] )])
def random(length: int=8, chars: str=digits + ascii_lowercase) -> Iterator[str]: """ A random string. Not unique, but has around 1 in a million chance of collision (with the default 8 character length). e.g. 'fubui5e6' Args: length: Length of the random string. chars: The characters to randomly choose from. """ while True: yield ''.join([choice(chars) for _ in range(length)]) # depends on [control=['while'], data=[]]
def save_file_revisions(self,snapshot,file_revisions): """ We convert various items in the file revision to documents, so that we can easily search and retrieve them... """ annotations = defaultdict(list) for file_revision in file_revisions: issues_results = {} for analyzer_name,results in file_revision.results.items(): if 'issues' in results: issues_results[analyzer_name] = results['issues'] del results['issues'] if len(issues_results) > 1000: issues_results[analyzer_name] = [{ 'code' : 'TooManyIssues', 'analyzer' : analyzer_name, }] with self.project.backend.transaction(): self.project.backend.save(file_revision) def location_sorter(issue): if issue['location'] and issue['location'][0] and issue['location'][0][0]: return issue['location'][0][0][0] return 0 with self.project.backend.transaction(): for analyzer_name,issues in issues_results.items(): grouped_issues = group_issues_by_fingerprint(issues) for issue_dict in grouped_issues: hasher = Hasher() hasher.add(analyzer_name) hasher.add(issue_dict['code']) hasher.add(issue_dict['fingerprint']) issue_dict['hash'] = hasher.digest.hexdigest() try: #we check if the issue already exists issue = self.project.backend.get(Issue,{'hash' : issue_dict['hash'], 'project' : self.project }) except Issue.DoesNotExist: #if not, we create it d = issue_dict.copy() d['analyzer'] = analyzer_name if 'location' in d: del d['location'] if 'occurrences' in d: del d['occurrences'] issue = Issue(d) issue.project = self.project self.project.backend.save(issue) for occurrence in issue_dict['occurrences']: hasher = Hasher() hasher.add(file_revision.hash) hasher.add(issue.hash) hasher.add(occurrence.get('from_row')) hasher.add(occurrence.get('from_column')) hasher.add(occurrence.get('to_row')) hasher.add(occurrence.get('to_column')) hasher.add(occurrence.get('sequence')) occurrence['hash'] = hasher.digest.hexdigest() try: #we check if the occurrence already exists occurrence = self.project.backend.get(IssueOccurrence,{'hash' : occurrence['hash'], 'issue' : issue }) except IssueOccurrence.DoesNotExist: #if not, we create it occurrence = IssueOccurrence(occurrence) occurrence.issue = issue occurrence.file_revision = file_revision self.project.backend.save(occurrence) annotations['occurrences'].append(occurrence) annotations['issues'].append(issue) return annotations
def function[save_file_revisions, parameter[self, snapshot, file_revisions]]: constant[ We convert various items in the file revision to documents, so that we can easily search and retrieve them... ] variable[annotations] assign[=] call[name[defaultdict], parameter[name[list]]] for taget[name[file_revision]] in starred[name[file_revisions]] begin[:] variable[issues_results] assign[=] dictionary[[], []] for taget[tuple[[<ast.Name object at 0x7da1b0578d00>, <ast.Name object at 0x7da1b0579150>]]] in starred[call[name[file_revision].results.items, parameter[]]] begin[:] if compare[constant[issues] in name[results]] begin[:] call[name[issues_results]][name[analyzer_name]] assign[=] call[name[results]][constant[issues]] <ast.Delete object at 0x7da1b0579240> if compare[call[name[len], parameter[name[issues_results]]] greater[>] constant[1000]] begin[:] call[name[issues_results]][name[analyzer_name]] assign[=] list[[<ast.Dict object at 0x7da1b057b460>]] with call[name[self].project.backend.transaction, parameter[]] begin[:] call[name[self].project.backend.save, parameter[name[file_revision]]] def function[location_sorter, parameter[issue]]: if <ast.BoolOp object at 0x7da1b057ab90> begin[:] return[call[call[call[call[name[issue]][constant[location]]][constant[0]]][constant[0]]][constant[0]]] return[constant[0]] with call[name[self].project.backend.transaction, parameter[]] begin[:] for taget[tuple[[<ast.Name object at 0x7da1b0578310>, <ast.Name object at 0x7da1b057b700>]]] in starred[call[name[issues_results].items, parameter[]]] begin[:] variable[grouped_issues] assign[=] call[name[group_issues_by_fingerprint], parameter[name[issues]]] for taget[name[issue_dict]] in starred[name[grouped_issues]] begin[:] variable[hasher] assign[=] call[name[Hasher], parameter[]] call[name[hasher].add, parameter[name[analyzer_name]]] call[name[hasher].add, parameter[call[name[issue_dict]][constant[code]]]] call[name[hasher].add, parameter[call[name[issue_dict]][constant[fingerprint]]]] call[name[issue_dict]][constant[hash]] assign[=] call[name[hasher].digest.hexdigest, parameter[]] <ast.Try object at 0x7da1b0578d60> for taget[name[occurrence]] in starred[call[name[issue_dict]][constant[occurrences]]] begin[:] variable[hasher] assign[=] call[name[Hasher], parameter[]] call[name[hasher].add, parameter[name[file_revision].hash]] call[name[hasher].add, parameter[name[issue].hash]] call[name[hasher].add, parameter[call[name[occurrence].get, parameter[constant[from_row]]]]] call[name[hasher].add, parameter[call[name[occurrence].get, parameter[constant[from_column]]]]] call[name[hasher].add, parameter[call[name[occurrence].get, parameter[constant[to_row]]]]] call[name[hasher].add, parameter[call[name[occurrence].get, parameter[constant[to_column]]]]] call[name[hasher].add, parameter[call[name[occurrence].get, parameter[constant[sequence]]]]] call[name[occurrence]][constant[hash]] assign[=] call[name[hasher].digest.hexdigest, parameter[]] <ast.Try object at 0x7da1b05ff340> call[call[name[annotations]][constant[occurrences]].append, parameter[name[occurrence]]] call[call[name[annotations]][constant[issues]].append, parameter[name[issue]]] return[name[annotations]]
keyword[def] identifier[save_file_revisions] ( identifier[self] , identifier[snapshot] , identifier[file_revisions] ): literal[string] identifier[annotations] = identifier[defaultdict] ( identifier[list] ) keyword[for] identifier[file_revision] keyword[in] identifier[file_revisions] : identifier[issues_results] ={} keyword[for] identifier[analyzer_name] , identifier[results] keyword[in] identifier[file_revision] . identifier[results] . identifier[items] (): keyword[if] literal[string] keyword[in] identifier[results] : identifier[issues_results] [ identifier[analyzer_name] ]= identifier[results] [ literal[string] ] keyword[del] identifier[results] [ literal[string] ] keyword[if] identifier[len] ( identifier[issues_results] )> literal[int] : identifier[issues_results] [ identifier[analyzer_name] ]=[{ literal[string] : literal[string] , literal[string] : identifier[analyzer_name] , }] keyword[with] identifier[self] . identifier[project] . identifier[backend] . identifier[transaction] (): identifier[self] . identifier[project] . identifier[backend] . identifier[save] ( identifier[file_revision] ) keyword[def] identifier[location_sorter] ( identifier[issue] ): keyword[if] identifier[issue] [ literal[string] ] keyword[and] identifier[issue] [ literal[string] ][ literal[int] ] keyword[and] identifier[issue] [ literal[string] ][ literal[int] ][ literal[int] ]: keyword[return] identifier[issue] [ literal[string] ][ literal[int] ][ literal[int] ][ literal[int] ] keyword[return] literal[int] keyword[with] identifier[self] . identifier[project] . identifier[backend] . identifier[transaction] (): keyword[for] identifier[analyzer_name] , identifier[issues] keyword[in] identifier[issues_results] . identifier[items] (): identifier[grouped_issues] = identifier[group_issues_by_fingerprint] ( identifier[issues] ) keyword[for] identifier[issue_dict] keyword[in] identifier[grouped_issues] : identifier[hasher] = identifier[Hasher] () identifier[hasher] . identifier[add] ( identifier[analyzer_name] ) identifier[hasher] . identifier[add] ( identifier[issue_dict] [ literal[string] ]) identifier[hasher] . identifier[add] ( identifier[issue_dict] [ literal[string] ]) identifier[issue_dict] [ literal[string] ]= identifier[hasher] . identifier[digest] . identifier[hexdigest] () keyword[try] : identifier[issue] = identifier[self] . identifier[project] . identifier[backend] . identifier[get] ( identifier[Issue] ,{ literal[string] : identifier[issue_dict] [ literal[string] ], literal[string] : identifier[self] . identifier[project] }) keyword[except] identifier[Issue] . identifier[DoesNotExist] : identifier[d] = identifier[issue_dict] . identifier[copy] () identifier[d] [ literal[string] ]= identifier[analyzer_name] keyword[if] literal[string] keyword[in] identifier[d] : keyword[del] identifier[d] [ literal[string] ] keyword[if] literal[string] keyword[in] identifier[d] : keyword[del] identifier[d] [ literal[string] ] identifier[issue] = identifier[Issue] ( identifier[d] ) identifier[issue] . identifier[project] = identifier[self] . identifier[project] identifier[self] . identifier[project] . identifier[backend] . identifier[save] ( identifier[issue] ) keyword[for] identifier[occurrence] keyword[in] identifier[issue_dict] [ literal[string] ]: identifier[hasher] = identifier[Hasher] () identifier[hasher] . identifier[add] ( identifier[file_revision] . identifier[hash] ) identifier[hasher] . identifier[add] ( identifier[issue] . identifier[hash] ) identifier[hasher] . identifier[add] ( identifier[occurrence] . identifier[get] ( literal[string] )) identifier[hasher] . identifier[add] ( identifier[occurrence] . identifier[get] ( literal[string] )) identifier[hasher] . identifier[add] ( identifier[occurrence] . identifier[get] ( literal[string] )) identifier[hasher] . identifier[add] ( identifier[occurrence] . identifier[get] ( literal[string] )) identifier[hasher] . identifier[add] ( identifier[occurrence] . identifier[get] ( literal[string] )) identifier[occurrence] [ literal[string] ]= identifier[hasher] . identifier[digest] . identifier[hexdigest] () keyword[try] : identifier[occurrence] = identifier[self] . identifier[project] . identifier[backend] . identifier[get] ( identifier[IssueOccurrence] ,{ literal[string] : identifier[occurrence] [ literal[string] ], literal[string] : identifier[issue] }) keyword[except] identifier[IssueOccurrence] . identifier[DoesNotExist] : identifier[occurrence] = identifier[IssueOccurrence] ( identifier[occurrence] ) identifier[occurrence] . identifier[issue] = identifier[issue] identifier[occurrence] . identifier[file_revision] = identifier[file_revision] identifier[self] . identifier[project] . identifier[backend] . identifier[save] ( identifier[occurrence] ) identifier[annotations] [ literal[string] ]. identifier[append] ( identifier[occurrence] ) identifier[annotations] [ literal[string] ]. identifier[append] ( identifier[issue] ) keyword[return] identifier[annotations]
def save_file_revisions(self, snapshot, file_revisions): """ We convert various items in the file revision to documents, so that we can easily search and retrieve them... """ annotations = defaultdict(list) for file_revision in file_revisions: issues_results = {} for (analyzer_name, results) in file_revision.results.items(): if 'issues' in results: issues_results[analyzer_name] = results['issues'] del results['issues'] if len(issues_results) > 1000: issues_results[analyzer_name] = [{'code': 'TooManyIssues', 'analyzer': analyzer_name}] # depends on [control=['if'], data=[]] # depends on [control=['if'], data=['results']] # depends on [control=['for'], data=[]] with self.project.backend.transaction(): self.project.backend.save(file_revision) # depends on [control=['with'], data=[]] def location_sorter(issue): if issue['location'] and issue['location'][0] and issue['location'][0][0]: return issue['location'][0][0][0] # depends on [control=['if'], data=[]] return 0 with self.project.backend.transaction(): for (analyzer_name, issues) in issues_results.items(): grouped_issues = group_issues_by_fingerprint(issues) for issue_dict in grouped_issues: hasher = Hasher() hasher.add(analyzer_name) hasher.add(issue_dict['code']) hasher.add(issue_dict['fingerprint']) issue_dict['hash'] = hasher.digest.hexdigest() try: #we check if the issue already exists issue = self.project.backend.get(Issue, {'hash': issue_dict['hash'], 'project': self.project}) # depends on [control=['try'], data=[]] except Issue.DoesNotExist: #if not, we create it d = issue_dict.copy() d['analyzer'] = analyzer_name if 'location' in d: del d['location'] # depends on [control=['if'], data=['d']] if 'occurrences' in d: del d['occurrences'] # depends on [control=['if'], data=['d']] issue = Issue(d) issue.project = self.project self.project.backend.save(issue) # depends on [control=['except'], data=[]] for occurrence in issue_dict['occurrences']: hasher = Hasher() hasher.add(file_revision.hash) hasher.add(issue.hash) hasher.add(occurrence.get('from_row')) hasher.add(occurrence.get('from_column')) hasher.add(occurrence.get('to_row')) hasher.add(occurrence.get('to_column')) hasher.add(occurrence.get('sequence')) occurrence['hash'] = hasher.digest.hexdigest() try: #we check if the occurrence already exists occurrence = self.project.backend.get(IssueOccurrence, {'hash': occurrence['hash'], 'issue': issue}) # depends on [control=['try'], data=[]] except IssueOccurrence.DoesNotExist: #if not, we create it occurrence = IssueOccurrence(occurrence) occurrence.issue = issue occurrence.file_revision = file_revision self.project.backend.save(occurrence) # depends on [control=['except'], data=[]] annotations['occurrences'].append(occurrence) # depends on [control=['for'], data=['occurrence']] annotations['issues'].append(issue) # depends on [control=['for'], data=['issue_dict']] # depends on [control=['for'], data=[]] # depends on [control=['with'], data=[]] # depends on [control=['for'], data=['file_revision']] return annotations
def stop_server(self, datacenter_id, server_id): """ Stops the server. :param datacenter_id: The unique ID of the data center. :type datacenter_id: ``str`` :param server_id: The unique ID of the server. :type server_id: ``str`` """ response = self._perform_request( url='/datacenters/%s/servers/%s/stop' % ( datacenter_id, server_id), method='POST-ACTION') return response
def function[stop_server, parameter[self, datacenter_id, server_id]]: constant[ Stops the server. :param datacenter_id: The unique ID of the data center. :type datacenter_id: ``str`` :param server_id: The unique ID of the server. :type server_id: ``str`` ] variable[response] assign[=] call[name[self]._perform_request, parameter[]] return[name[response]]
keyword[def] identifier[stop_server] ( identifier[self] , identifier[datacenter_id] , identifier[server_id] ): literal[string] identifier[response] = identifier[self] . identifier[_perform_request] ( identifier[url] = literal[string] %( identifier[datacenter_id] , identifier[server_id] ), identifier[method] = literal[string] ) keyword[return] identifier[response]
def stop_server(self, datacenter_id, server_id): """ Stops the server. :param datacenter_id: The unique ID of the data center. :type datacenter_id: ``str`` :param server_id: The unique ID of the server. :type server_id: ``str`` """ response = self._perform_request(url='/datacenters/%s/servers/%s/stop' % (datacenter_id, server_id), method='POST-ACTION') return response
def _get_magnitude_vector_properties(catalogue, config): '''If an input minimum magnitude is given then consider catalogue only above the minimum magnitude - returns corresponding properties''' mmin = config.get('input_mmin', np.min(catalogue['magnitude'])) neq = np.float(np.sum(catalogue['magnitude'] >= mmin - 1.E-7)) return neq, mmin
def function[_get_magnitude_vector_properties, parameter[catalogue, config]]: constant[If an input minimum magnitude is given then consider catalogue only above the minimum magnitude - returns corresponding properties] variable[mmin] assign[=] call[name[config].get, parameter[constant[input_mmin], call[name[np].min, parameter[call[name[catalogue]][constant[magnitude]]]]]] variable[neq] assign[=] call[name[np].float, parameter[call[name[np].sum, parameter[compare[call[name[catalogue]][constant[magnitude]] greater_or_equal[>=] binary_operation[name[mmin] - constant[1e-07]]]]]]] return[tuple[[<ast.Name object at 0x7da20c795090>, <ast.Name object at 0x7da20c795c60>]]]
keyword[def] identifier[_get_magnitude_vector_properties] ( identifier[catalogue] , identifier[config] ): literal[string] identifier[mmin] = identifier[config] . identifier[get] ( literal[string] , identifier[np] . identifier[min] ( identifier[catalogue] [ literal[string] ])) identifier[neq] = identifier[np] . identifier[float] ( identifier[np] . identifier[sum] ( identifier[catalogue] [ literal[string] ]>= identifier[mmin] - literal[int] )) keyword[return] identifier[neq] , identifier[mmin]
def _get_magnitude_vector_properties(catalogue, config): """If an input minimum magnitude is given then consider catalogue only above the minimum magnitude - returns corresponding properties""" mmin = config.get('input_mmin', np.min(catalogue['magnitude'])) neq = np.float(np.sum(catalogue['magnitude'] >= mmin - 1e-07)) return (neq, mmin)
def sum_distances(self, indices, distance_matrix): """Calculate combinatorial distance between a select group of trajectories, indicated by indices Arguments --------- indices : tuple distance_matrix : numpy.ndarray (M,M) Returns ------- numpy.ndarray Notes ----- This function can perhaps be quickened by calculating the sum of the distances. The calculated distances, as they are right now, are only used in a relative way. Purely summing distances would lead to the same result, at a perhaps quicker rate. """ combs_tup = np.array(tuple(combinations(indices, 2))) # Put indices from tuples into two-dimensional array. combs = np.array([[i[0] for i in combs_tup], [i[1] for i in combs_tup]]) # Calculate distance (vectorized) dist = np.sqrt( np.sum(np.square(distance_matrix[combs[0], combs[1]]), axis=0)) return dist
def function[sum_distances, parameter[self, indices, distance_matrix]]: constant[Calculate combinatorial distance between a select group of trajectories, indicated by indices Arguments --------- indices : tuple distance_matrix : numpy.ndarray (M,M) Returns ------- numpy.ndarray Notes ----- This function can perhaps be quickened by calculating the sum of the distances. The calculated distances, as they are right now, are only used in a relative way. Purely summing distances would lead to the same result, at a perhaps quicker rate. ] variable[combs_tup] assign[=] call[name[np].array, parameter[call[name[tuple], parameter[call[name[combinations], parameter[name[indices], constant[2]]]]]]] variable[combs] assign[=] call[name[np].array, parameter[list[[<ast.ListComp object at 0x7da1b18cbd90>, <ast.ListComp object at 0x7da1b18cad70>]]]] variable[dist] assign[=] call[name[np].sqrt, parameter[call[name[np].sum, parameter[call[name[np].square, parameter[call[name[distance_matrix]][tuple[[<ast.Subscript object at 0x7da1b18ca170>, <ast.Subscript object at 0x7da1b18cb5e0>]]]]]]]]] return[name[dist]]
keyword[def] identifier[sum_distances] ( identifier[self] , identifier[indices] , identifier[distance_matrix] ): literal[string] identifier[combs_tup] = identifier[np] . identifier[array] ( identifier[tuple] ( identifier[combinations] ( identifier[indices] , literal[int] ))) identifier[combs] = identifier[np] . identifier[array] ([[ identifier[i] [ literal[int] ] keyword[for] identifier[i] keyword[in] identifier[combs_tup] ], [ identifier[i] [ literal[int] ] keyword[for] identifier[i] keyword[in] identifier[combs_tup] ]]) identifier[dist] = identifier[np] . identifier[sqrt] ( identifier[np] . identifier[sum] ( identifier[np] . identifier[square] ( identifier[distance_matrix] [ identifier[combs] [ literal[int] ], identifier[combs] [ literal[int] ]]), identifier[axis] = literal[int] )) keyword[return] identifier[dist]
def sum_distances(self, indices, distance_matrix): """Calculate combinatorial distance between a select group of trajectories, indicated by indices Arguments --------- indices : tuple distance_matrix : numpy.ndarray (M,M) Returns ------- numpy.ndarray Notes ----- This function can perhaps be quickened by calculating the sum of the distances. The calculated distances, as they are right now, are only used in a relative way. Purely summing distances would lead to the same result, at a perhaps quicker rate. """ combs_tup = np.array(tuple(combinations(indices, 2))) # Put indices from tuples into two-dimensional array. combs = np.array([[i[0] for i in combs_tup], [i[1] for i in combs_tup]]) # Calculate distance (vectorized) dist = np.sqrt(np.sum(np.square(distance_matrix[combs[0], combs[1]]), axis=0)) return dist
def set_run_completed(self, boolean, start_datetime, end_datetime): """Set the value of _run_completed.""" self._run_completed = boolean if (start_datetime, end_datetime) != (None, None): # start_datetime: Sat Feb 28 23:54:27 2015 # end_datetime: Sat Feb 28 23:54:30 2015 try: fmt = "%a %b %d %H:%M:%S %Y" self.start_datetime = datetime.datetime.strptime(start_datetime, fmt) self.end_datetime = datetime.datetime.strptime(end_datetime, fmt) except Exception as exc: # Maybe LOCALE != en_US logger.warning(str(exc))
def function[set_run_completed, parameter[self, boolean, start_datetime, end_datetime]]: constant[Set the value of _run_completed.] name[self]._run_completed assign[=] name[boolean] if compare[tuple[[<ast.Name object at 0x7da20c6a8520>, <ast.Name object at 0x7da20c6a8a00>]] not_equal[!=] tuple[[<ast.Constant object at 0x7da207f9ace0>, <ast.Constant object at 0x7da207f99810>]]] begin[:] <ast.Try object at 0x7da207f9a080>
keyword[def] identifier[set_run_completed] ( identifier[self] , identifier[boolean] , identifier[start_datetime] , identifier[end_datetime] ): literal[string] identifier[self] . identifier[_run_completed] = identifier[boolean] keyword[if] ( identifier[start_datetime] , identifier[end_datetime] )!=( keyword[None] , keyword[None] ): keyword[try] : identifier[fmt] = literal[string] identifier[self] . identifier[start_datetime] = identifier[datetime] . identifier[datetime] . identifier[strptime] ( identifier[start_datetime] , identifier[fmt] ) identifier[self] . identifier[end_datetime] = identifier[datetime] . identifier[datetime] . identifier[strptime] ( identifier[end_datetime] , identifier[fmt] ) keyword[except] identifier[Exception] keyword[as] identifier[exc] : identifier[logger] . identifier[warning] ( identifier[str] ( identifier[exc] ))
def set_run_completed(self, boolean, start_datetime, end_datetime): """Set the value of _run_completed.""" self._run_completed = boolean if (start_datetime, end_datetime) != (None, None): # start_datetime: Sat Feb 28 23:54:27 2015 # end_datetime: Sat Feb 28 23:54:30 2015 try: fmt = '%a %b %d %H:%M:%S %Y' self.start_datetime = datetime.datetime.strptime(start_datetime, fmt) self.end_datetime = datetime.datetime.strptime(end_datetime, fmt) # depends on [control=['try'], data=[]] except Exception as exc: # Maybe LOCALE != en_US logger.warning(str(exc)) # depends on [control=['except'], data=['exc']] # depends on [control=['if'], data=[]]
def get_variation(self, experiment, user_id, attributes, ignore_user_profile=False): """ Top-level function to help determine variation user should be put in. First, check if experiment is running. Second, check if user is forced in a variation. Third, check if there is a stored decision for the user and return the corresponding variation. Fourth, figure out if user is in the experiment by evaluating audience conditions if any. Fifth, bucket the user and return the variation. Args: experiment: Experiment for which user variation needs to be determined. user_id: ID for user. attributes: Dict representing user attributes. ignore_user_profile: True to ignore the user profile lookup. Defaults to False. Returns: Variation user should see. None if user is not in experiment or experiment is not running. """ # Check if experiment is running if not experiment_helper.is_experiment_running(experiment): self.logger.info('Experiment "%s" is not running.' % experiment.key) return None # Check if the user is forced into a variation variation = self.config.get_forced_variation(experiment.key, user_id) if variation: return variation # Check to see if user is white-listed for a certain variation variation = self.get_forced_variation(experiment, user_id) if variation: return variation # Check to see if user has a decision available for the given experiment user_profile = UserProfile(user_id) if not ignore_user_profile and self.user_profile_service: try: retrieved_profile = self.user_profile_service.lookup(user_id) except: self.logger.exception('Unable to retrieve user profile for user "%s" as lookup failed.' % user_id) retrieved_profile = None if validator.is_user_profile_valid(retrieved_profile): user_profile = UserProfile(**retrieved_profile) variation = self.get_stored_variation(experiment, user_profile) if variation: return variation else: self.logger.warning('User profile has invalid format.') # Bucket user and store the new decision if not audience_helper.is_user_in_experiment(self.config, experiment, attributes, self.logger): self.logger.info('User "%s" does not meet conditions to be in experiment "%s".' % ( user_id, experiment.key )) return None # Determine bucketing ID to be used bucketing_id = self._get_bucketing_id(user_id, attributes) variation = self.bucketer.bucket(experiment, user_id, bucketing_id) if variation: # Store this new decision and return the variation for the user if not ignore_user_profile and self.user_profile_service: try: user_profile.save_variation_for_experiment(experiment.id, variation.id) self.user_profile_service.save(user_profile.__dict__) except: self.logger.exception('Unable to save user profile for user "%s".' % user_id) return variation return None
def function[get_variation, parameter[self, experiment, user_id, attributes, ignore_user_profile]]: constant[ Top-level function to help determine variation user should be put in. First, check if experiment is running. Second, check if user is forced in a variation. Third, check if there is a stored decision for the user and return the corresponding variation. Fourth, figure out if user is in the experiment by evaluating audience conditions if any. Fifth, bucket the user and return the variation. Args: experiment: Experiment for which user variation needs to be determined. user_id: ID for user. attributes: Dict representing user attributes. ignore_user_profile: True to ignore the user profile lookup. Defaults to False. Returns: Variation user should see. None if user is not in experiment or experiment is not running. ] if <ast.UnaryOp object at 0x7da18bc73850> begin[:] call[name[self].logger.info, parameter[binary_operation[constant[Experiment "%s" is not running.] <ast.Mod object at 0x7da2590d6920> name[experiment].key]]] return[constant[None]] variable[variation] assign[=] call[name[self].config.get_forced_variation, parameter[name[experiment].key, name[user_id]]] if name[variation] begin[:] return[name[variation]] variable[variation] assign[=] call[name[self].get_forced_variation, parameter[name[experiment], name[user_id]]] if name[variation] begin[:] return[name[variation]] variable[user_profile] assign[=] call[name[UserProfile], parameter[name[user_id]]] if <ast.BoolOp object at 0x7da18bc72ef0> begin[:] <ast.Try object at 0x7da18bc707c0> if call[name[validator].is_user_profile_valid, parameter[name[retrieved_profile]]] begin[:] variable[user_profile] assign[=] call[name[UserProfile], parameter[]] variable[variation] assign[=] call[name[self].get_stored_variation, parameter[name[experiment], name[user_profile]]] if name[variation] begin[:] return[name[variation]] if <ast.UnaryOp object at 0x7da18bc70e80> begin[:] call[name[self].logger.info, parameter[binary_operation[constant[User "%s" does not meet conditions to be in experiment "%s".] <ast.Mod object at 0x7da2590d6920> tuple[[<ast.Name object at 0x7da18bc70790>, <ast.Attribute object at 0x7da18bc73dc0>]]]]] return[constant[None]] variable[bucketing_id] assign[=] call[name[self]._get_bucketing_id, parameter[name[user_id], name[attributes]]] variable[variation] assign[=] call[name[self].bucketer.bucket, parameter[name[experiment], name[user_id], name[bucketing_id]]] if name[variation] begin[:] if <ast.BoolOp object at 0x7da18bc70100> begin[:] <ast.Try object at 0x7da18bc70fd0> return[name[variation]] return[constant[None]]
keyword[def] identifier[get_variation] ( identifier[self] , identifier[experiment] , identifier[user_id] , identifier[attributes] , identifier[ignore_user_profile] = keyword[False] ): literal[string] keyword[if] keyword[not] identifier[experiment_helper] . identifier[is_experiment_running] ( identifier[experiment] ): identifier[self] . identifier[logger] . identifier[info] ( literal[string] % identifier[experiment] . identifier[key] ) keyword[return] keyword[None] identifier[variation] = identifier[self] . identifier[config] . identifier[get_forced_variation] ( identifier[experiment] . identifier[key] , identifier[user_id] ) keyword[if] identifier[variation] : keyword[return] identifier[variation] identifier[variation] = identifier[self] . identifier[get_forced_variation] ( identifier[experiment] , identifier[user_id] ) keyword[if] identifier[variation] : keyword[return] identifier[variation] identifier[user_profile] = identifier[UserProfile] ( identifier[user_id] ) keyword[if] keyword[not] identifier[ignore_user_profile] keyword[and] identifier[self] . identifier[user_profile_service] : keyword[try] : identifier[retrieved_profile] = identifier[self] . identifier[user_profile_service] . identifier[lookup] ( identifier[user_id] ) keyword[except] : identifier[self] . identifier[logger] . identifier[exception] ( literal[string] % identifier[user_id] ) identifier[retrieved_profile] = keyword[None] keyword[if] identifier[validator] . identifier[is_user_profile_valid] ( identifier[retrieved_profile] ): identifier[user_profile] = identifier[UserProfile] (** identifier[retrieved_profile] ) identifier[variation] = identifier[self] . identifier[get_stored_variation] ( identifier[experiment] , identifier[user_profile] ) keyword[if] identifier[variation] : keyword[return] identifier[variation] keyword[else] : identifier[self] . identifier[logger] . identifier[warning] ( literal[string] ) keyword[if] keyword[not] identifier[audience_helper] . identifier[is_user_in_experiment] ( identifier[self] . identifier[config] , identifier[experiment] , identifier[attributes] , identifier[self] . identifier[logger] ): identifier[self] . identifier[logger] . identifier[info] ( literal[string] %( identifier[user_id] , identifier[experiment] . identifier[key] )) keyword[return] keyword[None] identifier[bucketing_id] = identifier[self] . identifier[_get_bucketing_id] ( identifier[user_id] , identifier[attributes] ) identifier[variation] = identifier[self] . identifier[bucketer] . identifier[bucket] ( identifier[experiment] , identifier[user_id] , identifier[bucketing_id] ) keyword[if] identifier[variation] : keyword[if] keyword[not] identifier[ignore_user_profile] keyword[and] identifier[self] . identifier[user_profile_service] : keyword[try] : identifier[user_profile] . identifier[save_variation_for_experiment] ( identifier[experiment] . identifier[id] , identifier[variation] . identifier[id] ) identifier[self] . identifier[user_profile_service] . identifier[save] ( identifier[user_profile] . identifier[__dict__] ) keyword[except] : identifier[self] . identifier[logger] . identifier[exception] ( literal[string] % identifier[user_id] ) keyword[return] identifier[variation] keyword[return] keyword[None]
def get_variation(self, experiment, user_id, attributes, ignore_user_profile=False): """ Top-level function to help determine variation user should be put in. First, check if experiment is running. Second, check if user is forced in a variation. Third, check if there is a stored decision for the user and return the corresponding variation. Fourth, figure out if user is in the experiment by evaluating audience conditions if any. Fifth, bucket the user and return the variation. Args: experiment: Experiment for which user variation needs to be determined. user_id: ID for user. attributes: Dict representing user attributes. ignore_user_profile: True to ignore the user profile lookup. Defaults to False. Returns: Variation user should see. None if user is not in experiment or experiment is not running. """ # Check if experiment is running if not experiment_helper.is_experiment_running(experiment): self.logger.info('Experiment "%s" is not running.' % experiment.key) return None # depends on [control=['if'], data=[]] # Check if the user is forced into a variation variation = self.config.get_forced_variation(experiment.key, user_id) if variation: return variation # depends on [control=['if'], data=[]] # Check to see if user is white-listed for a certain variation variation = self.get_forced_variation(experiment, user_id) if variation: return variation # depends on [control=['if'], data=[]] # Check to see if user has a decision available for the given experiment user_profile = UserProfile(user_id) if not ignore_user_profile and self.user_profile_service: try: retrieved_profile = self.user_profile_service.lookup(user_id) # depends on [control=['try'], data=[]] except: self.logger.exception('Unable to retrieve user profile for user "%s" as lookup failed.' % user_id) retrieved_profile = None # depends on [control=['except'], data=[]] if validator.is_user_profile_valid(retrieved_profile): user_profile = UserProfile(**retrieved_profile) variation = self.get_stored_variation(experiment, user_profile) if variation: return variation # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]] else: self.logger.warning('User profile has invalid format.') # depends on [control=['if'], data=[]] # Bucket user and store the new decision if not audience_helper.is_user_in_experiment(self.config, experiment, attributes, self.logger): self.logger.info('User "%s" does not meet conditions to be in experiment "%s".' % (user_id, experiment.key)) return None # depends on [control=['if'], data=[]] # Determine bucketing ID to be used bucketing_id = self._get_bucketing_id(user_id, attributes) variation = self.bucketer.bucket(experiment, user_id, bucketing_id) if variation: # Store this new decision and return the variation for the user if not ignore_user_profile and self.user_profile_service: try: user_profile.save_variation_for_experiment(experiment.id, variation.id) self.user_profile_service.save(user_profile.__dict__) # depends on [control=['try'], data=[]] except: self.logger.exception('Unable to save user profile for user "%s".' % user_id) # depends on [control=['except'], data=[]] # depends on [control=['if'], data=[]] return variation # depends on [control=['if'], data=[]] return None
def delete(self, **kwds): """ Endpoint: /album/<id>/delete.json Deletes this album. Returns True if successful. Raises a TroveboxError if not. """ result = self._client.album.delete(self, **kwds) self._delete_fields() return result
def function[delete, parameter[self]]: constant[ Endpoint: /album/<id>/delete.json Deletes this album. Returns True if successful. Raises a TroveboxError if not. ] variable[result] assign[=] call[name[self]._client.album.delete, parameter[name[self]]] call[name[self]._delete_fields, parameter[]] return[name[result]]
keyword[def] identifier[delete] ( identifier[self] ,** identifier[kwds] ): literal[string] identifier[result] = identifier[self] . identifier[_client] . identifier[album] . identifier[delete] ( identifier[self] ,** identifier[kwds] ) identifier[self] . identifier[_delete_fields] () keyword[return] identifier[result]
def delete(self, **kwds): """ Endpoint: /album/<id>/delete.json Deletes this album. Returns True if successful. Raises a TroveboxError if not. """ result = self._client.album.delete(self, **kwds) self._delete_fields() return result
def send(self, message, socket_): """ Sends a message (dict) to the socket. Message consists of a 8-byte len header followed by a msgpack-numpy encoded dict. Args: message: The message dict (e.g. {"cmd": "reset"}) socket_: The python socket object to use. """ if not socket_: raise TensorForceError("No socket given in call to `send`!") elif not isinstance(message, dict): raise TensorForceError("Message to be sent must be a dict!") message = msgpack.packb(message) len_ = len(message) # prepend 8-byte len field to all our messages socket_.send(bytes("{:08d}".format(len_), encoding="ascii") + message)
def function[send, parameter[self, message, socket_]]: constant[ Sends a message (dict) to the socket. Message consists of a 8-byte len header followed by a msgpack-numpy encoded dict. Args: message: The message dict (e.g. {"cmd": "reset"}) socket_: The python socket object to use. ] if <ast.UnaryOp object at 0x7da1b002a350> begin[:] <ast.Raise object at 0x7da1b0028e80> variable[message] assign[=] call[name[msgpack].packb, parameter[name[message]]] variable[len_] assign[=] call[name[len], parameter[name[message]]] call[name[socket_].send, parameter[binary_operation[call[name[bytes], parameter[call[constant[{:08d}].format, parameter[name[len_]]]]] + name[message]]]]
keyword[def] identifier[send] ( identifier[self] , identifier[message] , identifier[socket_] ): literal[string] keyword[if] keyword[not] identifier[socket_] : keyword[raise] identifier[TensorForceError] ( literal[string] ) keyword[elif] keyword[not] identifier[isinstance] ( identifier[message] , identifier[dict] ): keyword[raise] identifier[TensorForceError] ( literal[string] ) identifier[message] = identifier[msgpack] . identifier[packb] ( identifier[message] ) identifier[len_] = identifier[len] ( identifier[message] ) identifier[socket_] . identifier[send] ( identifier[bytes] ( literal[string] . identifier[format] ( identifier[len_] ), identifier[encoding] = literal[string] )+ identifier[message] )
def send(self, message, socket_): """ Sends a message (dict) to the socket. Message consists of a 8-byte len header followed by a msgpack-numpy encoded dict. Args: message: The message dict (e.g. {"cmd": "reset"}) socket_: The python socket object to use. """ if not socket_: raise TensorForceError('No socket given in call to `send`!') # depends on [control=['if'], data=[]] elif not isinstance(message, dict): raise TensorForceError('Message to be sent must be a dict!') # depends on [control=['if'], data=[]] message = msgpack.packb(message) len_ = len(message) # prepend 8-byte len field to all our messages socket_.send(bytes('{:08d}'.format(len_), encoding='ascii') + message)
def get_i2c_bus_numbers(glober = glob.glob): """Search all the available I2C devices in the system""" res = [] for device in glober("/dev/i2c-*"): r = re.match("/dev/i2c-([\d]){1,2}", device) res.append(int(r.group(1))) return res
def function[get_i2c_bus_numbers, parameter[glober]]: constant[Search all the available I2C devices in the system] variable[res] assign[=] list[[]] for taget[name[device]] in starred[call[name[glober], parameter[constant[/dev/i2c-*]]]] begin[:] variable[r] assign[=] call[name[re].match, parameter[constant[/dev/i2c-([\d]){1,2}], name[device]]] call[name[res].append, parameter[call[name[int], parameter[call[name[r].group, parameter[constant[1]]]]]]] return[name[res]]
keyword[def] identifier[get_i2c_bus_numbers] ( identifier[glober] = identifier[glob] . identifier[glob] ): literal[string] identifier[res] =[] keyword[for] identifier[device] keyword[in] identifier[glober] ( literal[string] ): identifier[r] = identifier[re] . identifier[match] ( literal[string] , identifier[device] ) identifier[res] . identifier[append] ( identifier[int] ( identifier[r] . identifier[group] ( literal[int] ))) keyword[return] identifier[res]
def get_i2c_bus_numbers(glober=glob.glob): """Search all the available I2C devices in the system""" res = [] for device in glober('/dev/i2c-*'): r = re.match('/dev/i2c-([\\d]){1,2}', device) res.append(int(r.group(1))) # depends on [control=['for'], data=['device']] return res
def freeze(plugins_directory): ''' Parameters ---------- plugins_directory : str Path to MicroDrop user plugins directory. Returns ------- list List of package strings corresponding to installed plugin versions. ''' # Check existing version (if any). package_versions = [] for plugin_path_i in plugins_directory.dirs(): try: plugin_metadata = yaml.load(plugin_path_i .joinpath('properties.yml').bytes()) if plugin_path_i.name != plugin_metadata['package_name']: continue package_versions.append((plugin_metadata['package_name'], plugin_metadata['version'])) except: continue return ['%s==%s' % v for v in package_versions]
def function[freeze, parameter[plugins_directory]]: constant[ Parameters ---------- plugins_directory : str Path to MicroDrop user plugins directory. Returns ------- list List of package strings corresponding to installed plugin versions. ] variable[package_versions] assign[=] list[[]] for taget[name[plugin_path_i]] in starred[call[name[plugins_directory].dirs, parameter[]]] begin[:] <ast.Try object at 0x7da2054a6350> return[<ast.ListComp object at 0x7da1b149f220>]
keyword[def] identifier[freeze] ( identifier[plugins_directory] ): literal[string] identifier[package_versions] =[] keyword[for] identifier[plugin_path_i] keyword[in] identifier[plugins_directory] . identifier[dirs] (): keyword[try] : identifier[plugin_metadata] = identifier[yaml] . identifier[load] ( identifier[plugin_path_i] . identifier[joinpath] ( literal[string] ). identifier[bytes] ()) keyword[if] identifier[plugin_path_i] . identifier[name] != identifier[plugin_metadata] [ literal[string] ]: keyword[continue] identifier[package_versions] . identifier[append] (( identifier[plugin_metadata] [ literal[string] ], identifier[plugin_metadata] [ literal[string] ])) keyword[except] : keyword[continue] keyword[return] [ literal[string] % identifier[v] keyword[for] identifier[v] keyword[in] identifier[package_versions] ]
def freeze(plugins_directory): """ Parameters ---------- plugins_directory : str Path to MicroDrop user plugins directory. Returns ------- list List of package strings corresponding to installed plugin versions. """ # Check existing version (if any). package_versions = [] for plugin_path_i in plugins_directory.dirs(): try: plugin_metadata = yaml.load(plugin_path_i.joinpath('properties.yml').bytes()) if plugin_path_i.name != plugin_metadata['package_name']: continue # depends on [control=['if'], data=[]] package_versions.append((plugin_metadata['package_name'], plugin_metadata['version'])) # depends on [control=['try'], data=[]] except: continue # depends on [control=['except'], data=[]] # depends on [control=['for'], data=['plugin_path_i']] return ['%s==%s' % v for v in package_versions]
def _query(self, path: str, method: str, data: Dict[str, Any]=None, expected_status: int = 200) \ -> Union[List[Dict[str, Any]], Dict[str, Any], None]: """Make an HTTP request Args: path: the URI path (not including the base url, start with the first uri segment, like 'users/...') method: the HTTP method to use (GET, POST, PATCH, ...) data: the data to send as JSON data expected_status: expected HTTP status; other statuses received will raise an Exception Returns: Data from the endpoint's response """ url = Pycord.url_base + path self.logger.debug(f'Making {method} request to "{url}"') if method == 'GET': r = requests.get(url, headers=self._build_headers()) elif method == 'POST': r = requests.post(url, headers=self._build_headers(), json=data) r = requests.get(url, headers=self._build_headers()) elif method == 'PATCH': r = requests.patch(url, headers=self._build_headers(), json=data) else: raise ValueError(f'Unknown HTTP method {method}') self.logger.debug(f'{method} response from "{url}" was "{r.status_code}"') if r.status_code != expected_status: raise ValueError(f'Non-{expected_status} {method} response from Discord API ({r.status_code}): {r.text}') if expected_status == 200: return r.json() return None
def function[_query, parameter[self, path, method, data, expected_status]]: constant[Make an HTTP request Args: path: the URI path (not including the base url, start with the first uri segment, like 'users/...') method: the HTTP method to use (GET, POST, PATCH, ...) data: the data to send as JSON data expected_status: expected HTTP status; other statuses received will raise an Exception Returns: Data from the endpoint's response ] variable[url] assign[=] binary_operation[name[Pycord].url_base + name[path]] call[name[self].logger.debug, parameter[<ast.JoinedStr object at 0x7da18eb57730>]] if compare[name[method] equal[==] constant[GET]] begin[:] variable[r] assign[=] call[name[requests].get, parameter[name[url]]] call[name[self].logger.debug, parameter[<ast.JoinedStr object at 0x7da207f024a0>]] if compare[name[r].status_code not_equal[!=] name[expected_status]] begin[:] <ast.Raise object at 0x7da18fe929e0> if compare[name[expected_status] equal[==] constant[200]] begin[:] return[call[name[r].json, parameter[]]] return[constant[None]]
keyword[def] identifier[_query] ( identifier[self] , identifier[path] : identifier[str] , identifier[method] : identifier[str] , identifier[data] : identifier[Dict] [ identifier[str] , identifier[Any] ]= keyword[None] , identifier[expected_status] : identifier[int] = literal[int] )-> identifier[Union] [ identifier[List] [ identifier[Dict] [ identifier[str] , identifier[Any] ]], identifier[Dict] [ identifier[str] , identifier[Any] ], keyword[None] ]: literal[string] identifier[url] = identifier[Pycord] . identifier[url_base] + identifier[path] identifier[self] . identifier[logger] . identifier[debug] ( literal[string] ) keyword[if] identifier[method] == literal[string] : identifier[r] = identifier[requests] . identifier[get] ( identifier[url] , identifier[headers] = identifier[self] . identifier[_build_headers] ()) keyword[elif] identifier[method] == literal[string] : identifier[r] = identifier[requests] . identifier[post] ( identifier[url] , identifier[headers] = identifier[self] . identifier[_build_headers] (), identifier[json] = identifier[data] ) identifier[r] = identifier[requests] . identifier[get] ( identifier[url] , identifier[headers] = identifier[self] . identifier[_build_headers] ()) keyword[elif] identifier[method] == literal[string] : identifier[r] = identifier[requests] . identifier[patch] ( identifier[url] , identifier[headers] = identifier[self] . identifier[_build_headers] (), identifier[json] = identifier[data] ) keyword[else] : keyword[raise] identifier[ValueError] ( literal[string] ) identifier[self] . identifier[logger] . identifier[debug] ( literal[string] ) keyword[if] identifier[r] . identifier[status_code] != identifier[expected_status] : keyword[raise] identifier[ValueError] ( literal[string] ) keyword[if] identifier[expected_status] == literal[int] : keyword[return] identifier[r] . identifier[json] () keyword[return] keyword[None]
def _query(self, path: str, method: str, data: Dict[str, Any]=None, expected_status: int=200) -> Union[List[Dict[str, Any]], Dict[str, Any], None]: """Make an HTTP request Args: path: the URI path (not including the base url, start with the first uri segment, like 'users/...') method: the HTTP method to use (GET, POST, PATCH, ...) data: the data to send as JSON data expected_status: expected HTTP status; other statuses received will raise an Exception Returns: Data from the endpoint's response """ url = Pycord.url_base + path self.logger.debug(f'Making {method} request to "{url}"') if method == 'GET': r = requests.get(url, headers=self._build_headers()) # depends on [control=['if'], data=[]] elif method == 'POST': r = requests.post(url, headers=self._build_headers(), json=data) r = requests.get(url, headers=self._build_headers()) # depends on [control=['if'], data=[]] elif method == 'PATCH': r = requests.patch(url, headers=self._build_headers(), json=data) # depends on [control=['if'], data=[]] else: raise ValueError(f'Unknown HTTP method {method}') self.logger.debug(f'{method} response from "{url}" was "{r.status_code}"') if r.status_code != expected_status: raise ValueError(f'Non-{expected_status} {method} response from Discord API ({r.status_code}): {r.text}') # depends on [control=['if'], data=['expected_status']] if expected_status == 200: return r.json() # depends on [control=['if'], data=[]] return None
def qteToBeKilled(self): """ Remove all selections and install the original lexer. """ self.qteWidget.SCISetStylingEx(0, 0, self.styleOrig) self.qteWidget.qteSetLexer(self.originalLexer)
def function[qteToBeKilled, parameter[self]]: constant[ Remove all selections and install the original lexer. ] call[name[self].qteWidget.SCISetStylingEx, parameter[constant[0], constant[0], name[self].styleOrig]] call[name[self].qteWidget.qteSetLexer, parameter[name[self].originalLexer]]
keyword[def] identifier[qteToBeKilled] ( identifier[self] ): literal[string] identifier[self] . identifier[qteWidget] . identifier[SCISetStylingEx] ( literal[int] , literal[int] , identifier[self] . identifier[styleOrig] ) identifier[self] . identifier[qteWidget] . identifier[qteSetLexer] ( identifier[self] . identifier[originalLexer] )
def qteToBeKilled(self): """ Remove all selections and install the original lexer. """ self.qteWidget.SCISetStylingEx(0, 0, self.styleOrig) self.qteWidget.qteSetLexer(self.originalLexer)
def validate_list_of_strings_param(param_name, param_argument): """Validate that an argument is a list of strings. :param param_name: The name of the parameter being validated. Used in any resulting exception messages. :type param_name: str | unicode :param param_argument: The argument to validate. :type param_argument: list :return: True if the argument is validated, False otherwise. :rtype: bool """ if param_argument is None: param_argument = [] if isinstance(param_argument, str): param_argument = param_argument.split(',') if not isinstance(param_argument, list) or not all([isinstance(p, str) for p in param_argument]): error_msg = 'unsupported {param} argument provided "{arg}" ({arg_type}), required type: List[str]' raise exceptions.ParamValidationError(error_msg.format( param=param_name, arg=param_argument, arg_type=type(param_argument), ))
def function[validate_list_of_strings_param, parameter[param_name, param_argument]]: constant[Validate that an argument is a list of strings. :param param_name: The name of the parameter being validated. Used in any resulting exception messages. :type param_name: str | unicode :param param_argument: The argument to validate. :type param_argument: list :return: True if the argument is validated, False otherwise. :rtype: bool ] if compare[name[param_argument] is constant[None]] begin[:] variable[param_argument] assign[=] list[[]] if call[name[isinstance], parameter[name[param_argument], name[str]]] begin[:] variable[param_argument] assign[=] call[name[param_argument].split, parameter[constant[,]]] if <ast.BoolOp object at 0x7da20e963b80> begin[:] variable[error_msg] assign[=] constant[unsupported {param} argument provided "{arg}" ({arg_type}), required type: List[str]] <ast.Raise object at 0x7da20e74bc70>
keyword[def] identifier[validate_list_of_strings_param] ( identifier[param_name] , identifier[param_argument] ): literal[string] keyword[if] identifier[param_argument] keyword[is] keyword[None] : identifier[param_argument] =[] keyword[if] identifier[isinstance] ( identifier[param_argument] , identifier[str] ): identifier[param_argument] = identifier[param_argument] . identifier[split] ( literal[string] ) keyword[if] keyword[not] identifier[isinstance] ( identifier[param_argument] , identifier[list] ) keyword[or] keyword[not] identifier[all] ([ identifier[isinstance] ( identifier[p] , identifier[str] ) keyword[for] identifier[p] keyword[in] identifier[param_argument] ]): identifier[error_msg] = literal[string] keyword[raise] identifier[exceptions] . identifier[ParamValidationError] ( identifier[error_msg] . identifier[format] ( identifier[param] = identifier[param_name] , identifier[arg] = identifier[param_argument] , identifier[arg_type] = identifier[type] ( identifier[param_argument] ), ))
def validate_list_of_strings_param(param_name, param_argument): """Validate that an argument is a list of strings. :param param_name: The name of the parameter being validated. Used in any resulting exception messages. :type param_name: str | unicode :param param_argument: The argument to validate. :type param_argument: list :return: True if the argument is validated, False otherwise. :rtype: bool """ if param_argument is None: param_argument = [] # depends on [control=['if'], data=['param_argument']] if isinstance(param_argument, str): param_argument = param_argument.split(',') # depends on [control=['if'], data=[]] if not isinstance(param_argument, list) or not all([isinstance(p, str) for p in param_argument]): error_msg = 'unsupported {param} argument provided "{arg}" ({arg_type}), required type: List[str]' raise exceptions.ParamValidationError(error_msg.format(param=param_name, arg=param_argument, arg_type=type(param_argument))) # depends on [control=['if'], data=[]]
def user_view_task(self, ): """View the task that is selected :returns: None :rtype: None :raises: None """ if not self.cur_user: return i = self.user_task_treev.currentIndex() item = i.internalPointer() if item: task = item.internal_data() if isinstance(task, djadapter.models.Task): self.view_task(task)
def function[user_view_task, parameter[self]]: constant[View the task that is selected :returns: None :rtype: None :raises: None ] if <ast.UnaryOp object at 0x7da1b16ab820> begin[:] return[None] variable[i] assign[=] call[name[self].user_task_treev.currentIndex, parameter[]] variable[item] assign[=] call[name[i].internalPointer, parameter[]] if name[item] begin[:] variable[task] assign[=] call[name[item].internal_data, parameter[]] if call[name[isinstance], parameter[name[task], name[djadapter].models.Task]] begin[:] call[name[self].view_task, parameter[name[task]]]
keyword[def] identifier[user_view_task] ( identifier[self] ,): literal[string] keyword[if] keyword[not] identifier[self] . identifier[cur_user] : keyword[return] identifier[i] = identifier[self] . identifier[user_task_treev] . identifier[currentIndex] () identifier[item] = identifier[i] . identifier[internalPointer] () keyword[if] identifier[item] : identifier[task] = identifier[item] . identifier[internal_data] () keyword[if] identifier[isinstance] ( identifier[task] , identifier[djadapter] . identifier[models] . identifier[Task] ): identifier[self] . identifier[view_task] ( identifier[task] )
def user_view_task(self): """View the task that is selected :returns: None :rtype: None :raises: None """ if not self.cur_user: return # depends on [control=['if'], data=[]] i = self.user_task_treev.currentIndex() item = i.internalPointer() if item: task = item.internal_data() if isinstance(task, djadapter.models.Task): self.view_task(task) # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]]
def _fromData(cls, header, tflags, data): """Construct this ID3 frame from raw string data. Raises: ID3JunkFrameError in case parsing failed NotImplementedError in case parsing isn't implemented ID3EncryptionUnsupportedError in case the frame is encrypted. """ if header.version >= header._V24: if tflags & (Frame.FLAG24_COMPRESS | Frame.FLAG24_DATALEN): # The data length int is syncsafe in 2.4 (but not 2.3). # However, we don't actually need the data length int, # except to work around a QL 0.12 bug, and in that case # all we need are the raw bytes. datalen_bytes = data[:4] data = data[4:] if tflags & Frame.FLAG24_UNSYNCH or header.f_unsynch: try: data = unsynch.decode(data) except ValueError: # Some things write synch-unsafe data with either the frame # or global unsynch flag set. Try to load them as is. # https://github.com/quodlibet/mutagen/issues/210 # https://github.com/quodlibet/mutagen/issues/223 pass if tflags & Frame.FLAG24_ENCRYPT: raise ID3EncryptionUnsupportedError if tflags & Frame.FLAG24_COMPRESS: try: data = zlib.decompress(data) except zlib.error: # the initial mutagen that went out with QL 0.12 did not # write the 4 bytes of uncompressed size. Compensate. data = datalen_bytes + data try: data = zlib.decompress(data) except zlib.error as err: raise ID3JunkFrameError( 'zlib: %s: %r' % (err, data)) elif header.version >= header._V23: if tflags & Frame.FLAG23_COMPRESS: usize, = unpack('>L', data[:4]) data = data[4:] if tflags & Frame.FLAG23_ENCRYPT: raise ID3EncryptionUnsupportedError if tflags & Frame.FLAG23_COMPRESS: try: data = zlib.decompress(data) except zlib.error as err: raise ID3JunkFrameError('zlib: %s: %r' % (err, data)) frame = cls() frame._readData(header, data) return frame
def function[_fromData, parameter[cls, header, tflags, data]]: constant[Construct this ID3 frame from raw string data. Raises: ID3JunkFrameError in case parsing failed NotImplementedError in case parsing isn't implemented ID3EncryptionUnsupportedError in case the frame is encrypted. ] if compare[name[header].version greater_or_equal[>=] name[header]._V24] begin[:] if binary_operation[name[tflags] <ast.BitAnd object at 0x7da2590d6b60> binary_operation[name[Frame].FLAG24_COMPRESS <ast.BitOr object at 0x7da2590d6aa0> name[Frame].FLAG24_DATALEN]] begin[:] variable[datalen_bytes] assign[=] call[name[data]][<ast.Slice object at 0x7da1b204a050>] variable[data] assign[=] call[name[data]][<ast.Slice object at 0x7da1b204ba60>] if <ast.BoolOp object at 0x7da1b204ba30> begin[:] <ast.Try object at 0x7da1b204a8f0> if binary_operation[name[tflags] <ast.BitAnd object at 0x7da2590d6b60> name[Frame].FLAG24_ENCRYPT] begin[:] <ast.Raise object at 0x7da1b204b010> if binary_operation[name[tflags] <ast.BitAnd object at 0x7da2590d6b60> name[Frame].FLAG24_COMPRESS] begin[:] <ast.Try object at 0x7da1b20484f0> variable[frame] assign[=] call[name[cls], parameter[]] call[name[frame]._readData, parameter[name[header], name[data]]] return[name[frame]]
keyword[def] identifier[_fromData] ( identifier[cls] , identifier[header] , identifier[tflags] , identifier[data] ): literal[string] keyword[if] identifier[header] . identifier[version] >= identifier[header] . identifier[_V24] : keyword[if] identifier[tflags] &( identifier[Frame] . identifier[FLAG24_COMPRESS] | identifier[Frame] . identifier[FLAG24_DATALEN] ): identifier[datalen_bytes] = identifier[data] [: literal[int] ] identifier[data] = identifier[data] [ literal[int] :] keyword[if] identifier[tflags] & identifier[Frame] . identifier[FLAG24_UNSYNCH] keyword[or] identifier[header] . identifier[f_unsynch] : keyword[try] : identifier[data] = identifier[unsynch] . identifier[decode] ( identifier[data] ) keyword[except] identifier[ValueError] : keyword[pass] keyword[if] identifier[tflags] & identifier[Frame] . identifier[FLAG24_ENCRYPT] : keyword[raise] identifier[ID3EncryptionUnsupportedError] keyword[if] identifier[tflags] & identifier[Frame] . identifier[FLAG24_COMPRESS] : keyword[try] : identifier[data] = identifier[zlib] . identifier[decompress] ( identifier[data] ) keyword[except] identifier[zlib] . identifier[error] : identifier[data] = identifier[datalen_bytes] + identifier[data] keyword[try] : identifier[data] = identifier[zlib] . identifier[decompress] ( identifier[data] ) keyword[except] identifier[zlib] . identifier[error] keyword[as] identifier[err] : keyword[raise] identifier[ID3JunkFrameError] ( literal[string] %( identifier[err] , identifier[data] )) keyword[elif] identifier[header] . identifier[version] >= identifier[header] . identifier[_V23] : keyword[if] identifier[tflags] & identifier[Frame] . identifier[FLAG23_COMPRESS] : identifier[usize] ,= identifier[unpack] ( literal[string] , identifier[data] [: literal[int] ]) identifier[data] = identifier[data] [ literal[int] :] keyword[if] identifier[tflags] & identifier[Frame] . identifier[FLAG23_ENCRYPT] : keyword[raise] identifier[ID3EncryptionUnsupportedError] keyword[if] identifier[tflags] & identifier[Frame] . identifier[FLAG23_COMPRESS] : keyword[try] : identifier[data] = identifier[zlib] . identifier[decompress] ( identifier[data] ) keyword[except] identifier[zlib] . identifier[error] keyword[as] identifier[err] : keyword[raise] identifier[ID3JunkFrameError] ( literal[string] %( identifier[err] , identifier[data] )) identifier[frame] = identifier[cls] () identifier[frame] . identifier[_readData] ( identifier[header] , identifier[data] ) keyword[return] identifier[frame]
def _fromData(cls, header, tflags, data): """Construct this ID3 frame from raw string data. Raises: ID3JunkFrameError in case parsing failed NotImplementedError in case parsing isn't implemented ID3EncryptionUnsupportedError in case the frame is encrypted. """ if header.version >= header._V24: if tflags & (Frame.FLAG24_COMPRESS | Frame.FLAG24_DATALEN): # The data length int is syncsafe in 2.4 (but not 2.3). # However, we don't actually need the data length int, # except to work around a QL 0.12 bug, and in that case # all we need are the raw bytes. datalen_bytes = data[:4] data = data[4:] # depends on [control=['if'], data=[]] if tflags & Frame.FLAG24_UNSYNCH or header.f_unsynch: try: data = unsynch.decode(data) # depends on [control=['try'], data=[]] except ValueError: # Some things write synch-unsafe data with either the frame # or global unsynch flag set. Try to load them as is. # https://github.com/quodlibet/mutagen/issues/210 # https://github.com/quodlibet/mutagen/issues/223 pass # depends on [control=['except'], data=[]] # depends on [control=['if'], data=[]] if tflags & Frame.FLAG24_ENCRYPT: raise ID3EncryptionUnsupportedError # depends on [control=['if'], data=[]] if tflags & Frame.FLAG24_COMPRESS: try: data = zlib.decompress(data) # depends on [control=['try'], data=[]] except zlib.error: # the initial mutagen that went out with QL 0.12 did not # write the 4 bytes of uncompressed size. Compensate. data = datalen_bytes + data try: data = zlib.decompress(data) # depends on [control=['try'], data=[]] except zlib.error as err: raise ID3JunkFrameError('zlib: %s: %r' % (err, data)) # depends on [control=['except'], data=['err']] # depends on [control=['except'], data=[]] # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]] elif header.version >= header._V23: if tflags & Frame.FLAG23_COMPRESS: (usize,) = unpack('>L', data[:4]) data = data[4:] # depends on [control=['if'], data=[]] if tflags & Frame.FLAG23_ENCRYPT: raise ID3EncryptionUnsupportedError # depends on [control=['if'], data=[]] if tflags & Frame.FLAG23_COMPRESS: try: data = zlib.decompress(data) # depends on [control=['try'], data=[]] except zlib.error as err: raise ID3JunkFrameError('zlib: %s: %r' % (err, data)) # depends on [control=['except'], data=['err']] # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]] frame = cls() frame._readData(header, data) return frame
def recvSecurityResult(self, data): """ Read security result packet Use by server to inform connection status of client @param data: Stream that contain well formed packet """ result = UInt32Be() data.readType(result) if result == UInt32Be(1): log.info("Authentification failed") if self._version.value == ProtocolVersion.RFB003008: self.expectWithHeader(4, self.recvSecurityFailed) else: log.debug("Authentification OK") self.sendClientInit()
def function[recvSecurityResult, parameter[self, data]]: constant[ Read security result packet Use by server to inform connection status of client @param data: Stream that contain well formed packet ] variable[result] assign[=] call[name[UInt32Be], parameter[]] call[name[data].readType, parameter[name[result]]] if compare[name[result] equal[==] call[name[UInt32Be], parameter[constant[1]]]] begin[:] call[name[log].info, parameter[constant[Authentification failed]]] if compare[name[self]._version.value equal[==] name[ProtocolVersion].RFB003008] begin[:] call[name[self].expectWithHeader, parameter[constant[4], name[self].recvSecurityFailed]]
keyword[def] identifier[recvSecurityResult] ( identifier[self] , identifier[data] ): literal[string] identifier[result] = identifier[UInt32Be] () identifier[data] . identifier[readType] ( identifier[result] ) keyword[if] identifier[result] == identifier[UInt32Be] ( literal[int] ): identifier[log] . identifier[info] ( literal[string] ) keyword[if] identifier[self] . identifier[_version] . identifier[value] == identifier[ProtocolVersion] . identifier[RFB003008] : identifier[self] . identifier[expectWithHeader] ( literal[int] , identifier[self] . identifier[recvSecurityFailed] ) keyword[else] : identifier[log] . identifier[debug] ( literal[string] ) identifier[self] . identifier[sendClientInit] ()
def recvSecurityResult(self, data): """ Read security result packet Use by server to inform connection status of client @param data: Stream that contain well formed packet """ result = UInt32Be() data.readType(result) if result == UInt32Be(1): log.info('Authentification failed') if self._version.value == ProtocolVersion.RFB003008: self.expectWithHeader(4, self.recvSecurityFailed) # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]] else: log.debug('Authentification OK') self.sendClientInit()