code
stringlengths
75
104k
code_sememe
stringlengths
47
309k
token_type
stringlengths
215
214k
code_dependency
stringlengths
75
155k
def _sample_item(self, **kwargs): """Sample an item from the pool according to the instrumental distribution """ t = self.t_ # Update instrumental distribution self._calc_inst_pmf() if self.record_inst_hist: inst_pmf = self._inst_pmf[:,t] else: inst_pmf = self._inst_pmf # Sample label and record weight loc, stratum_idx = self.strata.sample(pmf = inst_pmf) weight = self.strata.weights_[stratum_idx]/inst_pmf[stratum_idx] return loc, weight, {'stratum': stratum_idx}
def function[_sample_item, parameter[self]]: constant[Sample an item from the pool according to the instrumental distribution ] variable[t] assign[=] name[self].t_ call[name[self]._calc_inst_pmf, parameter[]] if name[self].record_inst_hist begin[:] variable[inst_pmf] assign[=] call[name[self]._inst_pmf][tuple[[<ast.Slice object at 0x7da204963460>, <ast.Name object at 0x7da204963ee0>]]] <ast.Tuple object at 0x7da2049635b0> assign[=] call[name[self].strata.sample, parameter[]] variable[weight] assign[=] binary_operation[call[name[self].strata.weights_][name[stratum_idx]] / call[name[inst_pmf]][name[stratum_idx]]] return[tuple[[<ast.Name object at 0x7da204962710>, <ast.Name object at 0x7da204960f10>, <ast.Dict object at 0x7da204960040>]]]
keyword[def] identifier[_sample_item] ( identifier[self] ,** identifier[kwargs] ): literal[string] identifier[t] = identifier[self] . identifier[t_] identifier[self] . identifier[_calc_inst_pmf] () keyword[if] identifier[self] . identifier[record_inst_hist] : identifier[inst_pmf] = identifier[self] . identifier[_inst_pmf] [:, identifier[t] ] keyword[else] : identifier[inst_pmf] = identifier[self] . identifier[_inst_pmf] identifier[loc] , identifier[stratum_idx] = identifier[self] . identifier[strata] . identifier[sample] ( identifier[pmf] = identifier[inst_pmf] ) identifier[weight] = identifier[self] . identifier[strata] . identifier[weights_] [ identifier[stratum_idx] ]/ identifier[inst_pmf] [ identifier[stratum_idx] ] keyword[return] identifier[loc] , identifier[weight] ,{ literal[string] : identifier[stratum_idx] }
def _sample_item(self, **kwargs): """Sample an item from the pool according to the instrumental distribution """ t = self.t_ # Update instrumental distribution self._calc_inst_pmf() if self.record_inst_hist: inst_pmf = self._inst_pmf[:, t] # depends on [control=['if'], data=[]] else: inst_pmf = self._inst_pmf # Sample label and record weight (loc, stratum_idx) = self.strata.sample(pmf=inst_pmf) weight = self.strata.weights_[stratum_idx] / inst_pmf[stratum_idx] return (loc, weight, {'stratum': stratum_idx})
def setPluginPath(cls, pluginpath): """ Sets the plugin path for this class to the given path. The inputted pluginpath value can either be a list of strings, or a string containing paths separated by the OS specific path separator (':' on Mac & Linux, ';' on Windows) :param pluginpath | [<str>, ..] || <str> """ setattr(cls, '_%s__pluginpath' % cls.__name__, None) cls.addPluginPath(pluginpath)
def function[setPluginPath, parameter[cls, pluginpath]]: constant[ Sets the plugin path for this class to the given path. The inputted pluginpath value can either be a list of strings, or a string containing paths separated by the OS specific path separator (':' on Mac & Linux, ';' on Windows) :param pluginpath | [<str>, ..] || <str> ] call[name[setattr], parameter[name[cls], binary_operation[constant[_%s__pluginpath] <ast.Mod object at 0x7da2590d6920> name[cls].__name__], constant[None]]] call[name[cls].addPluginPath, parameter[name[pluginpath]]]
keyword[def] identifier[setPluginPath] ( identifier[cls] , identifier[pluginpath] ): literal[string] identifier[setattr] ( identifier[cls] , literal[string] % identifier[cls] . identifier[__name__] , keyword[None] ) identifier[cls] . identifier[addPluginPath] ( identifier[pluginpath] )
def setPluginPath(cls, pluginpath): """ Sets the plugin path for this class to the given path. The inputted pluginpath value can either be a list of strings, or a string containing paths separated by the OS specific path separator (':' on Mac & Linux, ';' on Windows) :param pluginpath | [<str>, ..] || <str> """ setattr(cls, '_%s__pluginpath' % cls.__name__, None) cls.addPluginPath(pluginpath)
def _find_experiment_tag(self): """Finds the experiment associcated with the metadata.EXPERIMENT_TAG tag. Caches the experiment if it was found. Returns: The experiment or None if no such experiment is found. """ with self._experiment_from_tag_lock: if self._experiment_from_tag is None: mapping = self.multiplexer.PluginRunToTagToContent( metadata.PLUGIN_NAME) for tag_to_content in mapping.values(): if metadata.EXPERIMENT_TAG in tag_to_content: self._experiment_from_tag = metadata.parse_experiment_plugin_data( tag_to_content[metadata.EXPERIMENT_TAG]) break return self._experiment_from_tag
def function[_find_experiment_tag, parameter[self]]: constant[Finds the experiment associcated with the metadata.EXPERIMENT_TAG tag. Caches the experiment if it was found. Returns: The experiment or None if no such experiment is found. ] with name[self]._experiment_from_tag_lock begin[:] if compare[name[self]._experiment_from_tag is constant[None]] begin[:] variable[mapping] assign[=] call[name[self].multiplexer.PluginRunToTagToContent, parameter[name[metadata].PLUGIN_NAME]] for taget[name[tag_to_content]] in starred[call[name[mapping].values, parameter[]]] begin[:] if compare[name[metadata].EXPERIMENT_TAG in name[tag_to_content]] begin[:] name[self]._experiment_from_tag assign[=] call[name[metadata].parse_experiment_plugin_data, parameter[call[name[tag_to_content]][name[metadata].EXPERIMENT_TAG]]] break return[name[self]._experiment_from_tag]
keyword[def] identifier[_find_experiment_tag] ( identifier[self] ): literal[string] keyword[with] identifier[self] . identifier[_experiment_from_tag_lock] : keyword[if] identifier[self] . identifier[_experiment_from_tag] keyword[is] keyword[None] : identifier[mapping] = identifier[self] . identifier[multiplexer] . identifier[PluginRunToTagToContent] ( identifier[metadata] . identifier[PLUGIN_NAME] ) keyword[for] identifier[tag_to_content] keyword[in] identifier[mapping] . identifier[values] (): keyword[if] identifier[metadata] . identifier[EXPERIMENT_TAG] keyword[in] identifier[tag_to_content] : identifier[self] . identifier[_experiment_from_tag] = identifier[metadata] . identifier[parse_experiment_plugin_data] ( identifier[tag_to_content] [ identifier[metadata] . identifier[EXPERIMENT_TAG] ]) keyword[break] keyword[return] identifier[self] . identifier[_experiment_from_tag]
def _find_experiment_tag(self): """Finds the experiment associcated with the metadata.EXPERIMENT_TAG tag. Caches the experiment if it was found. Returns: The experiment or None if no such experiment is found. """ with self._experiment_from_tag_lock: if self._experiment_from_tag is None: mapping = self.multiplexer.PluginRunToTagToContent(metadata.PLUGIN_NAME) for tag_to_content in mapping.values(): if metadata.EXPERIMENT_TAG in tag_to_content: self._experiment_from_tag = metadata.parse_experiment_plugin_data(tag_to_content[metadata.EXPERIMENT_TAG]) break # depends on [control=['if'], data=['tag_to_content']] # depends on [control=['for'], data=['tag_to_content']] # depends on [control=['if'], data=[]] # depends on [control=['with'], data=[]] return self._experiment_from_tag
def postinit(self, dest=None, values=None): """Do some setup after initialisation. :param dest: Where to print to. :type dest: NodeNG or None :param values: What to print. :type values: list(NodeNG) or None """ self.dest = dest self.values = values
def function[postinit, parameter[self, dest, values]]: constant[Do some setup after initialisation. :param dest: Where to print to. :type dest: NodeNG or None :param values: What to print. :type values: list(NodeNG) or None ] name[self].dest assign[=] name[dest] name[self].values assign[=] name[values]
keyword[def] identifier[postinit] ( identifier[self] , identifier[dest] = keyword[None] , identifier[values] = keyword[None] ): literal[string] identifier[self] . identifier[dest] = identifier[dest] identifier[self] . identifier[values] = identifier[values]
def postinit(self, dest=None, values=None): """Do some setup after initialisation. :param dest: Where to print to. :type dest: NodeNG or None :param values: What to print. :type values: list(NodeNG) or None """ self.dest = dest self.values = values
def get_response_for_url(self, url): """ Accepts a fully-qualified url. Returns an HttpResponse, passing through all headers and the status code. """ if not url or "//" not in url: raise ValueError("Missing or invalid url: %s" % url) render_url = self.BASE_URL + url headers = { 'X-Prerender-Token': self.token, } r = self.session.get(render_url, headers=headers, allow_redirects=False) assert r.status_code < 500 return self.build_django_response_from_requests_response(r)
def function[get_response_for_url, parameter[self, url]]: constant[ Accepts a fully-qualified url. Returns an HttpResponse, passing through all headers and the status code. ] if <ast.BoolOp object at 0x7da1b1b14910> begin[:] <ast.Raise object at 0x7da1b1b15ea0> variable[render_url] assign[=] binary_operation[name[self].BASE_URL + name[url]] variable[headers] assign[=] dictionary[[<ast.Constant object at 0x7da1b1b16140>], [<ast.Attribute object at 0x7da1b1b176d0>]] variable[r] assign[=] call[name[self].session.get, parameter[name[render_url]]] assert[compare[name[r].status_code less[<] constant[500]]] return[call[name[self].build_django_response_from_requests_response, parameter[name[r]]]]
keyword[def] identifier[get_response_for_url] ( identifier[self] , identifier[url] ): literal[string] keyword[if] keyword[not] identifier[url] keyword[or] literal[string] keyword[not] keyword[in] identifier[url] : keyword[raise] identifier[ValueError] ( literal[string] % identifier[url] ) identifier[render_url] = identifier[self] . identifier[BASE_URL] + identifier[url] identifier[headers] ={ literal[string] : identifier[self] . identifier[token] , } identifier[r] = identifier[self] . identifier[session] . identifier[get] ( identifier[render_url] , identifier[headers] = identifier[headers] , identifier[allow_redirects] = keyword[False] ) keyword[assert] identifier[r] . identifier[status_code] < literal[int] keyword[return] identifier[self] . identifier[build_django_response_from_requests_response] ( identifier[r] )
def get_response_for_url(self, url): """ Accepts a fully-qualified url. Returns an HttpResponse, passing through all headers and the status code. """ if not url or '//' not in url: raise ValueError('Missing or invalid url: %s' % url) # depends on [control=['if'], data=[]] render_url = self.BASE_URL + url headers = {'X-Prerender-Token': self.token} r = self.session.get(render_url, headers=headers, allow_redirects=False) assert r.status_code < 500 return self.build_django_response_from_requests_response(r)
def get_fitted_lv1_prim(self, reqef, bitcount): """ request r - A C 0 1 e -|? ! ! ! ! s A|? βœ“ βœ“ 0 1 Check this logic u C|? m βœ“ 0 1 l 0|? M M 0 ! t 1|? M M ! 1 - = No Care A = arbitrary C = Constant 0 = ZERO 1 = ONE ! = ERROR ? = NO CARE RESULT βœ“ = Pass data directly m = will require reconfiguring argument and using multiple of prim M = Requires using multiple of several prims to satisfy requirement """ res = self._fitted_lv1_prim_cache.get(reqef) if res: return res prim = self.get_best_lv1_prim(reqef, bitcount) dispatcher = PrimitiveLv1Dispatcher(self, prim, reqef) self._fitted_lv1_prim_cache[reqef] = dispatcher return dispatcher
def function[get_fitted_lv1_prim, parameter[self, reqef, bitcount]]: constant[ request r - A C 0 1 e -|? ! ! ! ! s A|? βœ“ βœ“ 0 1 Check this logic u C|? m βœ“ 0 1 l 0|? M M 0 ! t 1|? M M ! 1 - = No Care A = arbitrary C = Constant 0 = ZERO 1 = ONE ! = ERROR ? = NO CARE RESULT βœ“ = Pass data directly m = will require reconfiguring argument and using multiple of prim M = Requires using multiple of several prims to satisfy requirement ] variable[res] assign[=] call[name[self]._fitted_lv1_prim_cache.get, parameter[name[reqef]]] if name[res] begin[:] return[name[res]] variable[prim] assign[=] call[name[self].get_best_lv1_prim, parameter[name[reqef], name[bitcount]]] variable[dispatcher] assign[=] call[name[PrimitiveLv1Dispatcher], parameter[name[self], name[prim], name[reqef]]] call[name[self]._fitted_lv1_prim_cache][name[reqef]] assign[=] name[dispatcher] return[name[dispatcher]]
keyword[def] identifier[get_fitted_lv1_prim] ( identifier[self] , identifier[reqef] , identifier[bitcount] ): literal[string] identifier[res] = identifier[self] . identifier[_fitted_lv1_prim_cache] . identifier[get] ( identifier[reqef] ) keyword[if] identifier[res] : keyword[return] identifier[res] identifier[prim] = identifier[self] . identifier[get_best_lv1_prim] ( identifier[reqef] , identifier[bitcount] ) identifier[dispatcher] = identifier[PrimitiveLv1Dispatcher] ( identifier[self] , identifier[prim] , identifier[reqef] ) identifier[self] . identifier[_fitted_lv1_prim_cache] [ identifier[reqef] ]= identifier[dispatcher] keyword[return] identifier[dispatcher]
def get_fitted_lv1_prim(self, reqef, bitcount): """ request r - A C 0 1 e -|? ! ! ! ! s A|? βœ“ βœ“ 0 1 Check this logic u C|? m βœ“ 0 1 l 0|? M M 0 ! t 1|? M M ! 1 - = No Care A = arbitrary C = Constant 0 = ZERO 1 = ONE ! = ERROR ? = NO CARE RESULT βœ“ = Pass data directly m = will require reconfiguring argument and using multiple of prim M = Requires using multiple of several prims to satisfy requirement """ res = self._fitted_lv1_prim_cache.get(reqef) if res: return res # depends on [control=['if'], data=[]] prim = self.get_best_lv1_prim(reqef, bitcount) dispatcher = PrimitiveLv1Dispatcher(self, prim, reqef) self._fitted_lv1_prim_cache[reqef] = dispatcher return dispatcher
def get_oauth_token_key_name(self, provider): """ Returns the token_key name for the oauth provider if none is configured defaults to oauth_token this is configured using OAUTH_PROVIDERS and token_key key. """ for _provider in self.oauth_providers: if _provider["name"] == provider: return _provider.get("token_key", "oauth_token")
def function[get_oauth_token_key_name, parameter[self, provider]]: constant[ Returns the token_key name for the oauth provider if none is configured defaults to oauth_token this is configured using OAUTH_PROVIDERS and token_key key. ] for taget[name[_provider]] in starred[name[self].oauth_providers] begin[:] if compare[call[name[_provider]][constant[name]] equal[==] name[provider]] begin[:] return[call[name[_provider].get, parameter[constant[token_key], constant[oauth_token]]]]
keyword[def] identifier[get_oauth_token_key_name] ( identifier[self] , identifier[provider] ): literal[string] keyword[for] identifier[_provider] keyword[in] identifier[self] . identifier[oauth_providers] : keyword[if] identifier[_provider] [ literal[string] ]== identifier[provider] : keyword[return] identifier[_provider] . identifier[get] ( literal[string] , literal[string] )
def get_oauth_token_key_name(self, provider): """ Returns the token_key name for the oauth provider if none is configured defaults to oauth_token this is configured using OAUTH_PROVIDERS and token_key key. """ for _provider in self.oauth_providers: if _provider['name'] == provider: return _provider.get('token_key', 'oauth_token') # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['_provider']]
def merge(l1, l2): """ merge 2 lines together """ x1, y1, x2, y2 = l1 xx1, yy1, xx2, yy2 = l2 comb = ((x1, y1, xx1, yy1), (x1, y1, xx2, yy2), (x2, y2, xx1, yy1), (x2, y2, xx2, yy2)) d = [length(c) for c in comb] i = argmax(d) dist = d[i] mid = middle(comb[i]) a = (angle(l1) + angle(l2)) * 0.5 return fromAttr(mid, a, dist)
def function[merge, parameter[l1, l2]]: constant[ merge 2 lines together ] <ast.Tuple object at 0x7da20c993f40> assign[=] name[l1] <ast.Tuple object at 0x7da20c992d70> assign[=] name[l2] variable[comb] assign[=] tuple[[<ast.Tuple object at 0x7da20c993f70>, <ast.Tuple object at 0x7da20c990fd0>, <ast.Tuple object at 0x7da20c9935b0>, <ast.Tuple object at 0x7da20c993fd0>]] variable[d] assign[=] <ast.ListComp object at 0x7da20c990040> variable[i] assign[=] call[name[argmax], parameter[name[d]]] variable[dist] assign[=] call[name[d]][name[i]] variable[mid] assign[=] call[name[middle], parameter[call[name[comb]][name[i]]]] variable[a] assign[=] binary_operation[binary_operation[call[name[angle], parameter[name[l1]]] + call[name[angle], parameter[name[l2]]]] * constant[0.5]] return[call[name[fromAttr], parameter[name[mid], name[a], name[dist]]]]
keyword[def] identifier[merge] ( identifier[l1] , identifier[l2] ): literal[string] identifier[x1] , identifier[y1] , identifier[x2] , identifier[y2] = identifier[l1] identifier[xx1] , identifier[yy1] , identifier[xx2] , identifier[yy2] = identifier[l2] identifier[comb] =(( identifier[x1] , identifier[y1] , identifier[xx1] , identifier[yy1] ), ( identifier[x1] , identifier[y1] , identifier[xx2] , identifier[yy2] ), ( identifier[x2] , identifier[y2] , identifier[xx1] , identifier[yy1] ), ( identifier[x2] , identifier[y2] , identifier[xx2] , identifier[yy2] )) identifier[d] =[ identifier[length] ( identifier[c] ) keyword[for] identifier[c] keyword[in] identifier[comb] ] identifier[i] = identifier[argmax] ( identifier[d] ) identifier[dist] = identifier[d] [ identifier[i] ] identifier[mid] = identifier[middle] ( identifier[comb] [ identifier[i] ]) identifier[a] =( identifier[angle] ( identifier[l1] )+ identifier[angle] ( identifier[l2] ))* literal[int] keyword[return] identifier[fromAttr] ( identifier[mid] , identifier[a] , identifier[dist] )
def merge(l1, l2): """ merge 2 lines together """ (x1, y1, x2, y2) = l1 (xx1, yy1, xx2, yy2) = l2 comb = ((x1, y1, xx1, yy1), (x1, y1, xx2, yy2), (x2, y2, xx1, yy1), (x2, y2, xx2, yy2)) d = [length(c) for c in comb] i = argmax(d) dist = d[i] mid = middle(comb[i]) a = (angle(l1) + angle(l2)) * 0.5 return fromAttr(mid, a, dist)
def roles(self): """ Roles accessor """ roles = list(self.__roles) default_role = Role( handle='user', title='User role', description='All registered users get this role by default' ) roles.append(default_role) return tuple(roles)
def function[roles, parameter[self]]: constant[ Roles accessor ] variable[roles] assign[=] call[name[list], parameter[name[self].__roles]] variable[default_role] assign[=] call[name[Role], parameter[]] call[name[roles].append, parameter[name[default_role]]] return[call[name[tuple], parameter[name[roles]]]]
keyword[def] identifier[roles] ( identifier[self] ): literal[string] identifier[roles] = identifier[list] ( identifier[self] . identifier[__roles] ) identifier[default_role] = identifier[Role] ( identifier[handle] = literal[string] , identifier[title] = literal[string] , identifier[description] = literal[string] ) identifier[roles] . identifier[append] ( identifier[default_role] ) keyword[return] identifier[tuple] ( identifier[roles] )
def roles(self): """ Roles accessor """ roles = list(self.__roles) default_role = Role(handle='user', title='User role', description='All registered users get this role by default') roles.append(default_role) return tuple(roles)
def check_int(self, param, error_msg): """ This function check if the parameter is int. If yes, the function returns the parameter, if not, it raises error message. **Args:** * `param` : parameter to check (int or similar) * `error_ms` : lowest allowed value (int), or None **Returns:** * `param` : parameter (int) """ if type(param) == int: return int(param) else: raise ValueError(error_msg)
def function[check_int, parameter[self, param, error_msg]]: constant[ This function check if the parameter is int. If yes, the function returns the parameter, if not, it raises error message. **Args:** * `param` : parameter to check (int or similar) * `error_ms` : lowest allowed value (int), or None **Returns:** * `param` : parameter (int) ] if compare[call[name[type], parameter[name[param]]] equal[==] name[int]] begin[:] return[call[name[int], parameter[name[param]]]]
keyword[def] identifier[check_int] ( identifier[self] , identifier[param] , identifier[error_msg] ): literal[string] keyword[if] identifier[type] ( identifier[param] )== identifier[int] : keyword[return] identifier[int] ( identifier[param] ) keyword[else] : keyword[raise] identifier[ValueError] ( identifier[error_msg] )
def check_int(self, param, error_msg): """ This function check if the parameter is int. If yes, the function returns the parameter, if not, it raises error message. **Args:** * `param` : parameter to check (int or similar) * `error_ms` : lowest allowed value (int), or None **Returns:** * `param` : parameter (int) """ if type(param) == int: return int(param) # depends on [control=['if'], data=['int']] else: raise ValueError(error_msg)
def start_cluster_server(self, num_gpus=1, rdma=False): """Convenience function to access ``TFNode.start_cluster_server`` directly from this object instance.""" return TFNode.start_cluster_server(self, num_gpus, rdma)
def function[start_cluster_server, parameter[self, num_gpus, rdma]]: constant[Convenience function to access ``TFNode.start_cluster_server`` directly from this object instance.] return[call[name[TFNode].start_cluster_server, parameter[name[self], name[num_gpus], name[rdma]]]]
keyword[def] identifier[start_cluster_server] ( identifier[self] , identifier[num_gpus] = literal[int] , identifier[rdma] = keyword[False] ): literal[string] keyword[return] identifier[TFNode] . identifier[start_cluster_server] ( identifier[self] , identifier[num_gpus] , identifier[rdma] )
def start_cluster_server(self, num_gpus=1, rdma=False): """Convenience function to access ``TFNode.start_cluster_server`` directly from this object instance.""" return TFNode.start_cluster_server(self, num_gpus, rdma)
def sync(self, folders): """Syncs a list of folders to their assicated buckets. folders: A list of 2-tuples in the form (folder, bucket) """ if not folders: raise ValueError("No folders to sync given") for folder in folders: self.sync_folder(*folder)
def function[sync, parameter[self, folders]]: constant[Syncs a list of folders to their assicated buckets. folders: A list of 2-tuples in the form (folder, bucket) ] if <ast.UnaryOp object at 0x7da18fe91870> begin[:] <ast.Raise object at 0x7da18fe926e0> for taget[name[folder]] in starred[name[folders]] begin[:] call[name[self].sync_folder, parameter[<ast.Starred object at 0x7da2054a7130>]]
keyword[def] identifier[sync] ( identifier[self] , identifier[folders] ): literal[string] keyword[if] keyword[not] identifier[folders] : keyword[raise] identifier[ValueError] ( literal[string] ) keyword[for] identifier[folder] keyword[in] identifier[folders] : identifier[self] . identifier[sync_folder] (* identifier[folder] )
def sync(self, folders): """Syncs a list of folders to their assicated buckets. folders: A list of 2-tuples in the form (folder, bucket) """ if not folders: raise ValueError('No folders to sync given') # depends on [control=['if'], data=[]] for folder in folders: self.sync_folder(*folder) # depends on [control=['for'], data=['folder']]
def create_launch_configuration(name, image_id, key_name=None, vpc_id=None, vpc_name=None, security_groups=None, user_data=None, instance_type='m1.small', kernel_id=None, ramdisk_id=None, block_device_mappings=None, instance_monitoring=False, spot_price=None, instance_profile_name=None, ebs_optimized=False, associate_public_ip_address=None, volume_type=None, delete_on_termination=True, iops=None, use_block_device_types=False, region=None, key=None, keyid=None, profile=None): ''' Create a launch configuration. CLI example:: salt myminion boto_asg.create_launch_configuration mylc image_id=ami-0b9c9f62 key_name='mykey' security_groups='["mygroup"]' instance_type='c3.2xlarge' ''' conn = _get_conn(region=region, key=key, keyid=keyid, profile=profile) if isinstance(security_groups, six.string_types): security_groups = salt.utils.json.loads(security_groups) if isinstance(block_device_mappings, six.string_types): block_device_mappings = salt.utils.json.loads(block_device_mappings) _bdms = [] if block_device_mappings: # Boto requires objects for the mappings and the devices. _block_device_map = blockdevicemapping.BlockDeviceMapping() for block_device_dict in block_device_mappings: for block_device, attributes in six.iteritems(block_device_dict): _block_device = blockdevicemapping.EBSBlockDeviceType() for attribute, value in six.iteritems(attributes): setattr(_block_device, attribute, value) _block_device_map[block_device] = _block_device _bdms = [_block_device_map] # If a VPC is specified, then determine the secgroup id's within that VPC, not # within the default VPC. If a security group id is already part of the list, # convert_to_group_ids leaves that entry without attempting a lookup on it. if security_groups and (vpc_id or vpc_name): security_groups = __salt__['boto_secgroup.convert_to_group_ids']( security_groups, vpc_id=vpc_id, vpc_name=vpc_name, region=region, key=key, keyid=keyid, profile=profile ) lc = autoscale.LaunchConfiguration( name=name, image_id=image_id, key_name=key_name, security_groups=security_groups, user_data=user_data, instance_type=instance_type, kernel_id=kernel_id, ramdisk_id=ramdisk_id, block_device_mappings=_bdms, instance_monitoring=instance_monitoring, spot_price=spot_price, instance_profile_name=instance_profile_name, ebs_optimized=ebs_optimized, associate_public_ip_address=associate_public_ip_address, volume_type=volume_type, delete_on_termination=delete_on_termination, iops=iops, use_block_device_types=use_block_device_types) retries = 30 while True: try: conn.create_launch_configuration(lc) log.info('Created LC %s', name) return True except boto.exception.BotoServerError as e: if retries and e.code == 'Throttling': log.debug('Throttled by AWS API, retrying in 5 seconds...') time.sleep(5) retries -= 1 continue log.error(e) msg = 'Failed to create LC {0}'.format(name) log.error(msg) return False
def function[create_launch_configuration, parameter[name, image_id, key_name, vpc_id, vpc_name, security_groups, user_data, instance_type, kernel_id, ramdisk_id, block_device_mappings, instance_monitoring, spot_price, instance_profile_name, ebs_optimized, associate_public_ip_address, volume_type, delete_on_termination, iops, use_block_device_types, region, key, keyid, profile]]: constant[ Create a launch configuration. CLI example:: salt myminion boto_asg.create_launch_configuration mylc image_id=ami-0b9c9f62 key_name='mykey' security_groups='["mygroup"]' instance_type='c3.2xlarge' ] variable[conn] assign[=] call[name[_get_conn], parameter[]] if call[name[isinstance], parameter[name[security_groups], name[six].string_types]] begin[:] variable[security_groups] assign[=] call[name[salt].utils.json.loads, parameter[name[security_groups]]] if call[name[isinstance], parameter[name[block_device_mappings], name[six].string_types]] begin[:] variable[block_device_mappings] assign[=] call[name[salt].utils.json.loads, parameter[name[block_device_mappings]]] variable[_bdms] assign[=] list[[]] if name[block_device_mappings] begin[:] variable[_block_device_map] assign[=] call[name[blockdevicemapping].BlockDeviceMapping, parameter[]] for taget[name[block_device_dict]] in starred[name[block_device_mappings]] begin[:] for taget[tuple[[<ast.Name object at 0x7da1b217e680>, <ast.Name object at 0x7da1b217e650>]]] in starred[call[name[six].iteritems, parameter[name[block_device_dict]]]] begin[:] variable[_block_device] assign[=] call[name[blockdevicemapping].EBSBlockDeviceType, parameter[]] for taget[tuple[[<ast.Name object at 0x7da1b217e410>, <ast.Name object at 0x7da1b217e3e0>]]] in starred[call[name[six].iteritems, parameter[name[attributes]]]] begin[:] call[name[setattr], parameter[name[_block_device], name[attribute], name[value]]] call[name[_block_device_map]][name[block_device]] assign[=] name[_block_device] variable[_bdms] assign[=] list[[<ast.Name object at 0x7da1b217e020>]] if <ast.BoolOp object at 0x7da1b217dfc0> begin[:] variable[security_groups] assign[=] call[call[name[__salt__]][constant[boto_secgroup.convert_to_group_ids]], parameter[name[security_groups]]] variable[lc] assign[=] call[name[autoscale].LaunchConfiguration, parameter[]] variable[retries] assign[=] constant[30] while constant[True] begin[:] <ast.Try object at 0x7da1b21ed300>
keyword[def] identifier[create_launch_configuration] ( identifier[name] , identifier[image_id] , identifier[key_name] = keyword[None] , identifier[vpc_id] = keyword[None] , identifier[vpc_name] = keyword[None] , identifier[security_groups] = keyword[None] , identifier[user_data] = keyword[None] , identifier[instance_type] = literal[string] , identifier[kernel_id] = keyword[None] , identifier[ramdisk_id] = keyword[None] , identifier[block_device_mappings] = keyword[None] , identifier[instance_monitoring] = keyword[False] , identifier[spot_price] = keyword[None] , identifier[instance_profile_name] = keyword[None] , identifier[ebs_optimized] = keyword[False] , identifier[associate_public_ip_address] = keyword[None] , identifier[volume_type] = keyword[None] , identifier[delete_on_termination] = keyword[True] , identifier[iops] = keyword[None] , identifier[use_block_device_types] = keyword[False] , identifier[region] = keyword[None] , identifier[key] = keyword[None] , identifier[keyid] = keyword[None] , identifier[profile] = keyword[None] ): literal[string] identifier[conn] = identifier[_get_conn] ( identifier[region] = identifier[region] , identifier[key] = identifier[key] , identifier[keyid] = identifier[keyid] , identifier[profile] = identifier[profile] ) keyword[if] identifier[isinstance] ( identifier[security_groups] , identifier[six] . identifier[string_types] ): identifier[security_groups] = identifier[salt] . identifier[utils] . identifier[json] . identifier[loads] ( identifier[security_groups] ) keyword[if] identifier[isinstance] ( identifier[block_device_mappings] , identifier[six] . identifier[string_types] ): identifier[block_device_mappings] = identifier[salt] . identifier[utils] . identifier[json] . identifier[loads] ( identifier[block_device_mappings] ) identifier[_bdms] =[] keyword[if] identifier[block_device_mappings] : identifier[_block_device_map] = identifier[blockdevicemapping] . identifier[BlockDeviceMapping] () keyword[for] identifier[block_device_dict] keyword[in] identifier[block_device_mappings] : keyword[for] identifier[block_device] , identifier[attributes] keyword[in] identifier[six] . identifier[iteritems] ( identifier[block_device_dict] ): identifier[_block_device] = identifier[blockdevicemapping] . identifier[EBSBlockDeviceType] () keyword[for] identifier[attribute] , identifier[value] keyword[in] identifier[six] . identifier[iteritems] ( identifier[attributes] ): identifier[setattr] ( identifier[_block_device] , identifier[attribute] , identifier[value] ) identifier[_block_device_map] [ identifier[block_device] ]= identifier[_block_device] identifier[_bdms] =[ identifier[_block_device_map] ] keyword[if] identifier[security_groups] keyword[and] ( identifier[vpc_id] keyword[or] identifier[vpc_name] ): identifier[security_groups] = identifier[__salt__] [ literal[string] ]( identifier[security_groups] , identifier[vpc_id] = identifier[vpc_id] , identifier[vpc_name] = identifier[vpc_name] , identifier[region] = identifier[region] , identifier[key] = identifier[key] , identifier[keyid] = identifier[keyid] , identifier[profile] = identifier[profile] ) identifier[lc] = identifier[autoscale] . identifier[LaunchConfiguration] ( identifier[name] = identifier[name] , identifier[image_id] = identifier[image_id] , identifier[key_name] = identifier[key_name] , identifier[security_groups] = identifier[security_groups] , identifier[user_data] = identifier[user_data] , identifier[instance_type] = identifier[instance_type] , identifier[kernel_id] = identifier[kernel_id] , identifier[ramdisk_id] = identifier[ramdisk_id] , identifier[block_device_mappings] = identifier[_bdms] , identifier[instance_monitoring] = identifier[instance_monitoring] , identifier[spot_price] = identifier[spot_price] , identifier[instance_profile_name] = identifier[instance_profile_name] , identifier[ebs_optimized] = identifier[ebs_optimized] , identifier[associate_public_ip_address] = identifier[associate_public_ip_address] , identifier[volume_type] = identifier[volume_type] , identifier[delete_on_termination] = identifier[delete_on_termination] , identifier[iops] = identifier[iops] , identifier[use_block_device_types] = identifier[use_block_device_types] ) identifier[retries] = literal[int] keyword[while] keyword[True] : keyword[try] : identifier[conn] . identifier[create_launch_configuration] ( identifier[lc] ) identifier[log] . identifier[info] ( literal[string] , identifier[name] ) keyword[return] keyword[True] keyword[except] identifier[boto] . identifier[exception] . identifier[BotoServerError] keyword[as] identifier[e] : keyword[if] identifier[retries] keyword[and] identifier[e] . identifier[code] == literal[string] : identifier[log] . identifier[debug] ( literal[string] ) identifier[time] . identifier[sleep] ( literal[int] ) identifier[retries] -= literal[int] keyword[continue] identifier[log] . identifier[error] ( identifier[e] ) identifier[msg] = literal[string] . identifier[format] ( identifier[name] ) identifier[log] . identifier[error] ( identifier[msg] ) keyword[return] keyword[False]
def create_launch_configuration(name, image_id, key_name=None, vpc_id=None, vpc_name=None, security_groups=None, user_data=None, instance_type='m1.small', kernel_id=None, ramdisk_id=None, block_device_mappings=None, instance_monitoring=False, spot_price=None, instance_profile_name=None, ebs_optimized=False, associate_public_ip_address=None, volume_type=None, delete_on_termination=True, iops=None, use_block_device_types=False, region=None, key=None, keyid=None, profile=None): """ Create a launch configuration. CLI example:: salt myminion boto_asg.create_launch_configuration mylc image_id=ami-0b9c9f62 key_name='mykey' security_groups='["mygroup"]' instance_type='c3.2xlarge' """ conn = _get_conn(region=region, key=key, keyid=keyid, profile=profile) if isinstance(security_groups, six.string_types): security_groups = salt.utils.json.loads(security_groups) # depends on [control=['if'], data=[]] if isinstance(block_device_mappings, six.string_types): block_device_mappings = salt.utils.json.loads(block_device_mappings) # depends on [control=['if'], data=[]] _bdms = [] if block_device_mappings: # Boto requires objects for the mappings and the devices. _block_device_map = blockdevicemapping.BlockDeviceMapping() for block_device_dict in block_device_mappings: for (block_device, attributes) in six.iteritems(block_device_dict): _block_device = blockdevicemapping.EBSBlockDeviceType() for (attribute, value) in six.iteritems(attributes): setattr(_block_device, attribute, value) # depends on [control=['for'], data=[]] _block_device_map[block_device] = _block_device # depends on [control=['for'], data=[]] # depends on [control=['for'], data=['block_device_dict']] _bdms = [_block_device_map] # depends on [control=['if'], data=[]] # If a VPC is specified, then determine the secgroup id's within that VPC, not # within the default VPC. If a security group id is already part of the list, # convert_to_group_ids leaves that entry without attempting a lookup on it. if security_groups and (vpc_id or vpc_name): security_groups = __salt__['boto_secgroup.convert_to_group_ids'](security_groups, vpc_id=vpc_id, vpc_name=vpc_name, region=region, key=key, keyid=keyid, profile=profile) # depends on [control=['if'], data=[]] lc = autoscale.LaunchConfiguration(name=name, image_id=image_id, key_name=key_name, security_groups=security_groups, user_data=user_data, instance_type=instance_type, kernel_id=kernel_id, ramdisk_id=ramdisk_id, block_device_mappings=_bdms, instance_monitoring=instance_monitoring, spot_price=spot_price, instance_profile_name=instance_profile_name, ebs_optimized=ebs_optimized, associate_public_ip_address=associate_public_ip_address, volume_type=volume_type, delete_on_termination=delete_on_termination, iops=iops, use_block_device_types=use_block_device_types) retries = 30 while True: try: conn.create_launch_configuration(lc) log.info('Created LC %s', name) return True # depends on [control=['try'], data=[]] except boto.exception.BotoServerError as e: if retries and e.code == 'Throttling': log.debug('Throttled by AWS API, retrying in 5 seconds...') time.sleep(5) retries -= 1 continue # depends on [control=['if'], data=[]] log.error(e) msg = 'Failed to create LC {0}'.format(name) log.error(msg) return False # depends on [control=['except'], data=['e']] # depends on [control=['while'], data=[]]
def pass_to_client(self, event, data, *args): """Passes data up to the client via a Queue(). :param event: :param data: :param args: :return: """ self.q.put((event, data, *args))
def function[pass_to_client, parameter[self, event, data]]: constant[Passes data up to the client via a Queue(). :param event: :param data: :param args: :return: ] call[name[self].q.put, parameter[tuple[[<ast.Name object at 0x7da2041d8a30>, <ast.Name object at 0x7da2041d81f0>, <ast.Starred object at 0x7da1b0530ee0>]]]]
keyword[def] identifier[pass_to_client] ( identifier[self] , identifier[event] , identifier[data] ,* identifier[args] ): literal[string] identifier[self] . identifier[q] . identifier[put] (( identifier[event] , identifier[data] ,* identifier[args] ))
def pass_to_client(self, event, data, *args): """Passes data up to the client via a Queue(). :param event: :param data: :param args: :return: """ self.q.put((event, data, *args))
def consume(generator): # pragma: no cover """ Helper function to consume a synchronous or asynchronous generator. Arguments: generator (generator|asyncgenerator): generator to consume. Returns: list """ # If synchronous generator, just consume and return as list if hasattr(generator, '__next__'): return list(generator) if not PY_35: raise RuntimeError( 'paco: asynchronous iterator protocol not supported') # If asynchronous generator, consume it generator protocol manually buf = [] while True: try: buf.append((yield from generator.__anext__())) except StopAsyncIteration: # noqa break return buf
def function[consume, parameter[generator]]: constant[ Helper function to consume a synchronous or asynchronous generator. Arguments: generator (generator|asyncgenerator): generator to consume. Returns: list ] if call[name[hasattr], parameter[name[generator], constant[__next__]]] begin[:] return[call[name[list], parameter[name[generator]]]] if <ast.UnaryOp object at 0x7da1b27b9540> begin[:] <ast.Raise object at 0x7da1b27b8e80> variable[buf] assign[=] list[[]] while constant[True] begin[:] <ast.Try object at 0x7da1b27b85b0> return[name[buf]]
keyword[def] identifier[consume] ( identifier[generator] ): literal[string] keyword[if] identifier[hasattr] ( identifier[generator] , literal[string] ): keyword[return] identifier[list] ( identifier[generator] ) keyword[if] keyword[not] identifier[PY_35] : keyword[raise] identifier[RuntimeError] ( literal[string] ) identifier[buf] =[] keyword[while] keyword[True] : keyword[try] : identifier[buf] . identifier[append] (( keyword[yield] keyword[from] identifier[generator] . identifier[__anext__] ())) keyword[except] identifier[StopAsyncIteration] : keyword[break] keyword[return] identifier[buf]
def consume(generator): # pragma: no cover '\n Helper function to consume a synchronous or asynchronous generator.\n\n Arguments:\n generator (generator|asyncgenerator): generator to consume.\n\n Returns:\n list\n ' # If synchronous generator, just consume and return as list if hasattr(generator, '__next__'): return list(generator) # depends on [control=['if'], data=[]] if not PY_35: raise RuntimeError('paco: asynchronous iterator protocol not supported') # depends on [control=['if'], data=[]] # If asynchronous generator, consume it generator protocol manually buf = [] while True: try: buf.append((yield from generator.__anext__())) # depends on [control=['try'], data=[]] except StopAsyncIteration: # noqa break # depends on [control=['except'], data=[]] # depends on [control=['while'], data=[]] return buf
def _format_msg(text, width, indent=0, prefix=""): r""" Format exception message. Replace newline characters \n with ``\n``, ` with \` and then wrap text as needed """ text = repr(text).replace("`", "\\`").replace("\\n", " ``\\n`` ") sindent = " " * indent if not prefix else prefix wrapped_text = textwrap.wrap(text, width, subsequent_indent=sindent) # [1:-1] eliminates quotes generated by repr in first line return ("\n".join(wrapped_text))[1:-1].rstrip()
def function[_format_msg, parameter[text, width, indent, prefix]]: constant[ Format exception message. Replace newline characters \n with ``\n``, ` with \` and then wrap text as needed ] variable[text] assign[=] call[call[call[name[repr], parameter[name[text]]].replace, parameter[constant[`], constant[\`]]].replace, parameter[constant[\n], constant[ ``\n`` ]]] variable[sindent] assign[=] <ast.IfExp object at 0x7da18ede4cd0> variable[wrapped_text] assign[=] call[name[textwrap].wrap, parameter[name[text], name[width]]] return[call[call[call[constant[ ].join, parameter[name[wrapped_text]]]][<ast.Slice object at 0x7da20e9b3610>].rstrip, parameter[]]]
keyword[def] identifier[_format_msg] ( identifier[text] , identifier[width] , identifier[indent] = literal[int] , identifier[prefix] = literal[string] ): literal[string] identifier[text] = identifier[repr] ( identifier[text] ). identifier[replace] ( literal[string] , literal[string] ). identifier[replace] ( literal[string] , literal[string] ) identifier[sindent] = literal[string] * identifier[indent] keyword[if] keyword[not] identifier[prefix] keyword[else] identifier[prefix] identifier[wrapped_text] = identifier[textwrap] . identifier[wrap] ( identifier[text] , identifier[width] , identifier[subsequent_indent] = identifier[sindent] ) keyword[return] ( literal[string] . identifier[join] ( identifier[wrapped_text] ))[ literal[int] :- literal[int] ]. identifier[rstrip] ()
def _format_msg(text, width, indent=0, prefix=''): """ Format exception message. Replace newline characters \\n with ``\\n``, ` with \\` and then wrap text as needed """ text = repr(text).replace('`', '\\`').replace('\\n', ' ``\\n`` ') sindent = ' ' * indent if not prefix else prefix wrapped_text = textwrap.wrap(text, width, subsequent_indent=sindent) # [1:-1] eliminates quotes generated by repr in first line return '\n'.join(wrapped_text)[1:-1].rstrip()
def get_value(self, subsystem, option): """ Read the given value from the given subsystem. Do not include the subsystem name in the option name. Only call this method if the given subsystem is available. """ assert subsystem in self, 'Subsystem {} is missing'.format(subsystem) return util.read_file(self.per_subsystem[subsystem], subsystem + '.' + option)
def function[get_value, parameter[self, subsystem, option]]: constant[ Read the given value from the given subsystem. Do not include the subsystem name in the option name. Only call this method if the given subsystem is available. ] assert[compare[name[subsystem] in name[self]]] return[call[name[util].read_file, parameter[call[name[self].per_subsystem][name[subsystem]], binary_operation[binary_operation[name[subsystem] + constant[.]] + name[option]]]]]
keyword[def] identifier[get_value] ( identifier[self] , identifier[subsystem] , identifier[option] ): literal[string] keyword[assert] identifier[subsystem] keyword[in] identifier[self] , literal[string] . identifier[format] ( identifier[subsystem] ) keyword[return] identifier[util] . identifier[read_file] ( identifier[self] . identifier[per_subsystem] [ identifier[subsystem] ], identifier[subsystem] + literal[string] + identifier[option] )
def get_value(self, subsystem, option): """ Read the given value from the given subsystem. Do not include the subsystem name in the option name. Only call this method if the given subsystem is available. """ assert subsystem in self, 'Subsystem {} is missing'.format(subsystem) return util.read_file(self.per_subsystem[subsystem], subsystem + '.' + option)
def all_pairs_normalized_distances_reference(X): """ Reference implementation of normalized all-pairs distance, used for testing the more efficient implementation above for equivalence. """ n_samples, n_cols = X.shape # matrix of mean squared difference between between samples D = np.ones((n_samples, n_samples), dtype="float32") * np.inf for i in range(n_samples): diffs = X - X[i, :].reshape((1, n_cols)) missing_diffs = np.isnan(diffs) missing_counts_per_row = missing_diffs.sum(axis=1) valid_rows = missing_counts_per_row < n_cols D[i, valid_rows] = np.nanmean( diffs[valid_rows, :] ** 2, axis=1) return D
def function[all_pairs_normalized_distances_reference, parameter[X]]: constant[ Reference implementation of normalized all-pairs distance, used for testing the more efficient implementation above for equivalence. ] <ast.Tuple object at 0x7da207f03df0> assign[=] name[X].shape variable[D] assign[=] binary_operation[call[name[np].ones, parameter[tuple[[<ast.Name object at 0x7da207f03a30>, <ast.Name object at 0x7da207f013c0>]]]] * name[np].inf] for taget[name[i]] in starred[call[name[range], parameter[name[n_samples]]]] begin[:] variable[diffs] assign[=] binary_operation[name[X] - call[call[name[X]][tuple[[<ast.Name object at 0x7da207f01150>, <ast.Slice object at 0x7da207f00040>]]].reshape, parameter[tuple[[<ast.Constant object at 0x7da207f03940>, <ast.Name object at 0x7da207f037f0>]]]]] variable[missing_diffs] assign[=] call[name[np].isnan, parameter[name[diffs]]] variable[missing_counts_per_row] assign[=] call[name[missing_diffs].sum, parameter[]] variable[valid_rows] assign[=] compare[name[missing_counts_per_row] less[<] name[n_cols]] call[name[D]][tuple[[<ast.Name object at 0x7da207f00e50>, <ast.Name object at 0x7da207f00490>]]] assign[=] call[name[np].nanmean, parameter[binary_operation[call[name[diffs]][tuple[[<ast.Name object at 0x7da207f02a10>, <ast.Slice object at 0x7da207f01cf0>]]] ** constant[2]]]] return[name[D]]
keyword[def] identifier[all_pairs_normalized_distances_reference] ( identifier[X] ): literal[string] identifier[n_samples] , identifier[n_cols] = identifier[X] . identifier[shape] identifier[D] = identifier[np] . identifier[ones] (( identifier[n_samples] , identifier[n_samples] ), identifier[dtype] = literal[string] )* identifier[np] . identifier[inf] keyword[for] identifier[i] keyword[in] identifier[range] ( identifier[n_samples] ): identifier[diffs] = identifier[X] - identifier[X] [ identifier[i] ,:]. identifier[reshape] (( literal[int] , identifier[n_cols] )) identifier[missing_diffs] = identifier[np] . identifier[isnan] ( identifier[diffs] ) identifier[missing_counts_per_row] = identifier[missing_diffs] . identifier[sum] ( identifier[axis] = literal[int] ) identifier[valid_rows] = identifier[missing_counts_per_row] < identifier[n_cols] identifier[D] [ identifier[i] , identifier[valid_rows] ]= identifier[np] . identifier[nanmean] ( identifier[diffs] [ identifier[valid_rows] ,:]** literal[int] , identifier[axis] = literal[int] ) keyword[return] identifier[D]
def all_pairs_normalized_distances_reference(X): """ Reference implementation of normalized all-pairs distance, used for testing the more efficient implementation above for equivalence. """ (n_samples, n_cols) = X.shape # matrix of mean squared difference between between samples D = np.ones((n_samples, n_samples), dtype='float32') * np.inf for i in range(n_samples): diffs = X - X[i, :].reshape((1, n_cols)) missing_diffs = np.isnan(diffs) missing_counts_per_row = missing_diffs.sum(axis=1) valid_rows = missing_counts_per_row < n_cols D[i, valid_rows] = np.nanmean(diffs[valid_rows, :] ** 2, axis=1) # depends on [control=['for'], data=['i']] return D
def transition_run(self, pipeline_key, blocking_slot_keys=None, fanned_out_pipelines=None, pipelines_to_run=None): """Marks an asynchronous or generator pipeline as running. Does nothing if the pipeline is no longer in a runnable state. Args: pipeline_key: The db.Key of the _PipelineRecord to update. blocking_slot_keys: List of db.Key instances that this pipeline's finalization barrier should wait on in addition to the existing one. This is used to update the barrier to include all child outputs. When None, the barrier will not be updated. fanned_out_pipelines: List of db.Key instances of _PipelineRecords that were fanned out by this generator pipeline. This is distinct from the 'pipelines_to_run' list because not all of the pipelines listed here will be immediately ready to execute. When None, then this generator yielded no children. pipelines_to_run: List of db.Key instances of _PipelineRecords that should be kicked off (fan-out) transactionally as part of this transition. When None, no child pipelines will run. All db.Keys in this list must also be present in the fanned_out_pipelines list. Raises: UnexpectedPipelineError if blocking_slot_keys was not empty and the _BarrierRecord has gone missing. """ def txn(): pipeline_record = db.get(pipeline_key) if pipeline_record is None: logging.warning('Pipeline ID "%s" cannot be marked as run. ' 'Does not exist.', pipeline_key.name()) raise db.Rollback() if pipeline_record.status != _PipelineRecord.WAITING: logging.warning('Pipeline ID "%s" in bad state to be marked as run: %s', pipeline_key.name(), pipeline_record.status) raise db.Rollback() pipeline_record.status = _PipelineRecord.RUN if fanned_out_pipelines: # NOTE: We must model the pipeline relationship in a top-down manner, # meaning each pipeline must point forward to the pipelines that it # fanned out to. The reason is race conditions. If evaluate() # dies early, it may create many unused _PipelineRecord and _SlotRecord # instances that never progress. The only way we know which of these # are valid is by traversing the graph from the root, where the # fanned_out property refers to those pipelines that were run using a # transactional task. child_pipeline_list = list(fanned_out_pipelines) pipeline_record.fanned_out = child_pipeline_list if pipelines_to_run: child_indexes = [ child_pipeline_list.index(p) for p in pipelines_to_run] child_indexes.sort() task = taskqueue.Task( url=self.fanout_handler_path, params=dict(parent_key=str(pipeline_key), child_indexes=child_indexes)) task.add(queue_name=self.queue_name, transactional=True) pipeline_record.put() if blocking_slot_keys: # NOTE: Always update a generator pipeline's finalization barrier to # include all of the outputs of any pipelines that it runs, to ensure # that finalized calls will not happen until all child pipelines have # completed. This must happen transactionally with the enqueue of # the fan-out kickoff task above to ensure the child output slots and # the barrier blocking slots are the same. barrier_key = db.Key.from_path( _BarrierRecord.kind(), _BarrierRecord.FINALIZE, parent=pipeline_key) finalize_barrier = db.get(barrier_key) if finalize_barrier is None: raise UnexpectedPipelineError( 'Pipeline ID "%s" cannot update finalize barrier. ' 'Does not exist.' % pipeline_key.name()) else: finalize_barrier.blocking_slots = list( blocking_slot_keys.union(set(finalize_barrier.blocking_slots))) finalize_barrier.put() db.run_in_transaction(txn)
def function[transition_run, parameter[self, pipeline_key, blocking_slot_keys, fanned_out_pipelines, pipelines_to_run]]: constant[Marks an asynchronous or generator pipeline as running. Does nothing if the pipeline is no longer in a runnable state. Args: pipeline_key: The db.Key of the _PipelineRecord to update. blocking_slot_keys: List of db.Key instances that this pipeline's finalization barrier should wait on in addition to the existing one. This is used to update the barrier to include all child outputs. When None, the barrier will not be updated. fanned_out_pipelines: List of db.Key instances of _PipelineRecords that were fanned out by this generator pipeline. This is distinct from the 'pipelines_to_run' list because not all of the pipelines listed here will be immediately ready to execute. When None, then this generator yielded no children. pipelines_to_run: List of db.Key instances of _PipelineRecords that should be kicked off (fan-out) transactionally as part of this transition. When None, no child pipelines will run. All db.Keys in this list must also be present in the fanned_out_pipelines list. Raises: UnexpectedPipelineError if blocking_slot_keys was not empty and the _BarrierRecord has gone missing. ] def function[txn, parameter[]]: variable[pipeline_record] assign[=] call[name[db].get, parameter[name[pipeline_key]]] if compare[name[pipeline_record] is constant[None]] begin[:] call[name[logging].warning, parameter[constant[Pipeline ID "%s" cannot be marked as run. Does not exist.], call[name[pipeline_key].name, parameter[]]]] <ast.Raise object at 0x7da18eb56260> if compare[name[pipeline_record].status not_equal[!=] name[_PipelineRecord].WAITING] begin[:] call[name[logging].warning, parameter[constant[Pipeline ID "%s" in bad state to be marked as run: %s], call[name[pipeline_key].name, parameter[]], name[pipeline_record].status]] <ast.Raise object at 0x7da18eb54fd0> name[pipeline_record].status assign[=] name[_PipelineRecord].RUN if name[fanned_out_pipelines] begin[:] variable[child_pipeline_list] assign[=] call[name[list], parameter[name[fanned_out_pipelines]]] name[pipeline_record].fanned_out assign[=] name[child_pipeline_list] if name[pipelines_to_run] begin[:] variable[child_indexes] assign[=] <ast.ListComp object at 0x7da18eb55b10> call[name[child_indexes].sort, parameter[]] variable[task] assign[=] call[name[taskqueue].Task, parameter[]] call[name[task].add, parameter[]] call[name[pipeline_record].put, parameter[]] if name[blocking_slot_keys] begin[:] variable[barrier_key] assign[=] call[name[db].Key.from_path, parameter[call[name[_BarrierRecord].kind, parameter[]], name[_BarrierRecord].FINALIZE]] variable[finalize_barrier] assign[=] call[name[db].get, parameter[name[barrier_key]]] if compare[name[finalize_barrier] is constant[None]] begin[:] <ast.Raise object at 0x7da18eb55d50> call[name[db].run_in_transaction, parameter[name[txn]]]
keyword[def] identifier[transition_run] ( identifier[self] , identifier[pipeline_key] , identifier[blocking_slot_keys] = keyword[None] , identifier[fanned_out_pipelines] = keyword[None] , identifier[pipelines_to_run] = keyword[None] ): literal[string] keyword[def] identifier[txn] (): identifier[pipeline_record] = identifier[db] . identifier[get] ( identifier[pipeline_key] ) keyword[if] identifier[pipeline_record] keyword[is] keyword[None] : identifier[logging] . identifier[warning] ( literal[string] literal[string] , identifier[pipeline_key] . identifier[name] ()) keyword[raise] identifier[db] . identifier[Rollback] () keyword[if] identifier[pipeline_record] . identifier[status] != identifier[_PipelineRecord] . identifier[WAITING] : identifier[logging] . identifier[warning] ( literal[string] , identifier[pipeline_key] . identifier[name] (), identifier[pipeline_record] . identifier[status] ) keyword[raise] identifier[db] . identifier[Rollback] () identifier[pipeline_record] . identifier[status] = identifier[_PipelineRecord] . identifier[RUN] keyword[if] identifier[fanned_out_pipelines] : identifier[child_pipeline_list] = identifier[list] ( identifier[fanned_out_pipelines] ) identifier[pipeline_record] . identifier[fanned_out] = identifier[child_pipeline_list] keyword[if] identifier[pipelines_to_run] : identifier[child_indexes] =[ identifier[child_pipeline_list] . identifier[index] ( identifier[p] ) keyword[for] identifier[p] keyword[in] identifier[pipelines_to_run] ] identifier[child_indexes] . identifier[sort] () identifier[task] = identifier[taskqueue] . identifier[Task] ( identifier[url] = identifier[self] . identifier[fanout_handler_path] , identifier[params] = identifier[dict] ( identifier[parent_key] = identifier[str] ( identifier[pipeline_key] ), identifier[child_indexes] = identifier[child_indexes] )) identifier[task] . identifier[add] ( identifier[queue_name] = identifier[self] . identifier[queue_name] , identifier[transactional] = keyword[True] ) identifier[pipeline_record] . identifier[put] () keyword[if] identifier[blocking_slot_keys] : identifier[barrier_key] = identifier[db] . identifier[Key] . identifier[from_path] ( identifier[_BarrierRecord] . identifier[kind] (), identifier[_BarrierRecord] . identifier[FINALIZE] , identifier[parent] = identifier[pipeline_key] ) identifier[finalize_barrier] = identifier[db] . identifier[get] ( identifier[barrier_key] ) keyword[if] identifier[finalize_barrier] keyword[is] keyword[None] : keyword[raise] identifier[UnexpectedPipelineError] ( literal[string] literal[string] % identifier[pipeline_key] . identifier[name] ()) keyword[else] : identifier[finalize_barrier] . identifier[blocking_slots] = identifier[list] ( identifier[blocking_slot_keys] . identifier[union] ( identifier[set] ( identifier[finalize_barrier] . identifier[blocking_slots] ))) identifier[finalize_barrier] . identifier[put] () identifier[db] . identifier[run_in_transaction] ( identifier[txn] )
def transition_run(self, pipeline_key, blocking_slot_keys=None, fanned_out_pipelines=None, pipelines_to_run=None): """Marks an asynchronous or generator pipeline as running. Does nothing if the pipeline is no longer in a runnable state. Args: pipeline_key: The db.Key of the _PipelineRecord to update. blocking_slot_keys: List of db.Key instances that this pipeline's finalization barrier should wait on in addition to the existing one. This is used to update the barrier to include all child outputs. When None, the barrier will not be updated. fanned_out_pipelines: List of db.Key instances of _PipelineRecords that were fanned out by this generator pipeline. This is distinct from the 'pipelines_to_run' list because not all of the pipelines listed here will be immediately ready to execute. When None, then this generator yielded no children. pipelines_to_run: List of db.Key instances of _PipelineRecords that should be kicked off (fan-out) transactionally as part of this transition. When None, no child pipelines will run. All db.Keys in this list must also be present in the fanned_out_pipelines list. Raises: UnexpectedPipelineError if blocking_slot_keys was not empty and the _BarrierRecord has gone missing. """ def txn(): pipeline_record = db.get(pipeline_key) if pipeline_record is None: logging.warning('Pipeline ID "%s" cannot be marked as run. Does not exist.', pipeline_key.name()) raise db.Rollback() # depends on [control=['if'], data=[]] if pipeline_record.status != _PipelineRecord.WAITING: logging.warning('Pipeline ID "%s" in bad state to be marked as run: %s', pipeline_key.name(), pipeline_record.status) raise db.Rollback() # depends on [control=['if'], data=[]] pipeline_record.status = _PipelineRecord.RUN if fanned_out_pipelines: # NOTE: We must model the pipeline relationship in a top-down manner, # meaning each pipeline must point forward to the pipelines that it # fanned out to. The reason is race conditions. If evaluate() # dies early, it may create many unused _PipelineRecord and _SlotRecord # instances that never progress. The only way we know which of these # are valid is by traversing the graph from the root, where the # fanned_out property refers to those pipelines that were run using a # transactional task. child_pipeline_list = list(fanned_out_pipelines) pipeline_record.fanned_out = child_pipeline_list if pipelines_to_run: child_indexes = [child_pipeline_list.index(p) for p in pipelines_to_run] child_indexes.sort() task = taskqueue.Task(url=self.fanout_handler_path, params=dict(parent_key=str(pipeline_key), child_indexes=child_indexes)) task.add(queue_name=self.queue_name, transactional=True) # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]] pipeline_record.put() if blocking_slot_keys: # NOTE: Always update a generator pipeline's finalization barrier to # include all of the outputs of any pipelines that it runs, to ensure # that finalized calls will not happen until all child pipelines have # completed. This must happen transactionally with the enqueue of # the fan-out kickoff task above to ensure the child output slots and # the barrier blocking slots are the same. barrier_key = db.Key.from_path(_BarrierRecord.kind(), _BarrierRecord.FINALIZE, parent=pipeline_key) finalize_barrier = db.get(barrier_key) if finalize_barrier is None: raise UnexpectedPipelineError('Pipeline ID "%s" cannot update finalize barrier. Does not exist.' % pipeline_key.name()) # depends on [control=['if'], data=[]] else: finalize_barrier.blocking_slots = list(blocking_slot_keys.union(set(finalize_barrier.blocking_slots))) finalize_barrier.put() # depends on [control=['if'], data=[]] db.run_in_transaction(txn)
def create_state_multi_precision(self, index, weight): """Creates auxiliary state for a given weight, including FP32 high precision copy if original weight is FP16. This method is provided to perform automatic mixed precision training for optimizers that do not support it themselves. Parameters ---------- index : int An unique index to identify the weight. weight : NDArray The weight. Returns ------- state : any obj The state associated with the weight. """ weight_master_copy = None if self.multi_precision and weight.dtype == numpy.float16: weight_master_copy = weight.astype(numpy.float32) return (weight_master_copy,) + (self.create_state(index, weight_master_copy),) if weight.dtype == numpy.float16 and not self.multi_precision: warnings.warn("Accumulating with float16 in optimizer can lead to " "poor accuracy or slow convergence. " "Consider using multi_precision=True option of the " "optimizer") return self.create_state(index, weight)
def function[create_state_multi_precision, parameter[self, index, weight]]: constant[Creates auxiliary state for a given weight, including FP32 high precision copy if original weight is FP16. This method is provided to perform automatic mixed precision training for optimizers that do not support it themselves. Parameters ---------- index : int An unique index to identify the weight. weight : NDArray The weight. Returns ------- state : any obj The state associated with the weight. ] variable[weight_master_copy] assign[=] constant[None] if <ast.BoolOp object at 0x7da1b2065ff0> begin[:] variable[weight_master_copy] assign[=] call[name[weight].astype, parameter[name[numpy].float32]] return[binary_operation[tuple[[<ast.Name object at 0x7da1b2066140>]] + tuple[[<ast.Call object at 0x7da1b2065060>]]]] if <ast.BoolOp object at 0x7da1b2066bf0> begin[:] call[name[warnings].warn, parameter[constant[Accumulating with float16 in optimizer can lead to poor accuracy or slow convergence. Consider using multi_precision=True option of the optimizer]]] return[call[name[self].create_state, parameter[name[index], name[weight]]]]
keyword[def] identifier[create_state_multi_precision] ( identifier[self] , identifier[index] , identifier[weight] ): literal[string] identifier[weight_master_copy] = keyword[None] keyword[if] identifier[self] . identifier[multi_precision] keyword[and] identifier[weight] . identifier[dtype] == identifier[numpy] . identifier[float16] : identifier[weight_master_copy] = identifier[weight] . identifier[astype] ( identifier[numpy] . identifier[float32] ) keyword[return] ( identifier[weight_master_copy] ,)+( identifier[self] . identifier[create_state] ( identifier[index] , identifier[weight_master_copy] ),) keyword[if] identifier[weight] . identifier[dtype] == identifier[numpy] . identifier[float16] keyword[and] keyword[not] identifier[self] . identifier[multi_precision] : identifier[warnings] . identifier[warn] ( literal[string] literal[string] literal[string] literal[string] ) keyword[return] identifier[self] . identifier[create_state] ( identifier[index] , identifier[weight] )
def create_state_multi_precision(self, index, weight): """Creates auxiliary state for a given weight, including FP32 high precision copy if original weight is FP16. This method is provided to perform automatic mixed precision training for optimizers that do not support it themselves. Parameters ---------- index : int An unique index to identify the weight. weight : NDArray The weight. Returns ------- state : any obj The state associated with the weight. """ weight_master_copy = None if self.multi_precision and weight.dtype == numpy.float16: weight_master_copy = weight.astype(numpy.float32) return (weight_master_copy,) + (self.create_state(index, weight_master_copy),) # depends on [control=['if'], data=[]] if weight.dtype == numpy.float16 and (not self.multi_precision): warnings.warn('Accumulating with float16 in optimizer can lead to poor accuracy or slow convergence. Consider using multi_precision=True option of the optimizer') # depends on [control=['if'], data=[]] return self.create_state(index, weight)
def benchmark(self, func, gpu_args, threads, grid, times): """runs the kernel and measures time repeatedly, returns average time Runs the kernel and measures kernel execution time repeatedly, number of iterations is set during the creation of CudaFunctions. Benchmark returns a robust average, from all measurements the fastest and slowest runs are discarded and the rest is included in the returned average. The reason for this is to be robust against initialization artifacts and other exceptional cases. :param func: A PyCuda kernel compiled for this specific kernel configuration :type func: pycuda.driver.Function :param gpu_args: A list of arguments to the kernel, order should match the order in the code. Allowed values are either variables in global memory or single values passed by value. :type gpu_args: list( pycuda.driver.DeviceAllocation, numpy.int32, ...) :param threads: A tuple listing the number of threads in each dimension of the thread block :type threads: tuple(int, int, int) :param grid: A tuple listing the number of thread blocks in each dimension of the grid :type grid: tuple(int, int) :param times: Return the execution time of all iterations. :type times: bool :returns: All execution times, if times=True, or a robust average for the kernel execution time. :rtype: float """ start = drv.Event() end = drv.Event() time = [] for _ in range(self.iterations): self.context.synchronize() start.record() self.run_kernel(func, gpu_args, threads, grid) end.record() self.context.synchronize() time.append(end.time_since(start)) time = sorted(time) if times: return time else: if self.iterations > 4: return numpy.mean(time[1:-1]) else: return numpy.mean(time)
def function[benchmark, parameter[self, func, gpu_args, threads, grid, times]]: constant[runs the kernel and measures time repeatedly, returns average time Runs the kernel and measures kernel execution time repeatedly, number of iterations is set during the creation of CudaFunctions. Benchmark returns a robust average, from all measurements the fastest and slowest runs are discarded and the rest is included in the returned average. The reason for this is to be robust against initialization artifacts and other exceptional cases. :param func: A PyCuda kernel compiled for this specific kernel configuration :type func: pycuda.driver.Function :param gpu_args: A list of arguments to the kernel, order should match the order in the code. Allowed values are either variables in global memory or single values passed by value. :type gpu_args: list( pycuda.driver.DeviceAllocation, numpy.int32, ...) :param threads: A tuple listing the number of threads in each dimension of the thread block :type threads: tuple(int, int, int) :param grid: A tuple listing the number of thread blocks in each dimension of the grid :type grid: tuple(int, int) :param times: Return the execution time of all iterations. :type times: bool :returns: All execution times, if times=True, or a robust average for the kernel execution time. :rtype: float ] variable[start] assign[=] call[name[drv].Event, parameter[]] variable[end] assign[=] call[name[drv].Event, parameter[]] variable[time] assign[=] list[[]] for taget[name[_]] in starred[call[name[range], parameter[name[self].iterations]]] begin[:] call[name[self].context.synchronize, parameter[]] call[name[start].record, parameter[]] call[name[self].run_kernel, parameter[name[func], name[gpu_args], name[threads], name[grid]]] call[name[end].record, parameter[]] call[name[self].context.synchronize, parameter[]] call[name[time].append, parameter[call[name[end].time_since, parameter[name[start]]]]] variable[time] assign[=] call[name[sorted], parameter[name[time]]] if name[times] begin[:] return[name[time]]
keyword[def] identifier[benchmark] ( identifier[self] , identifier[func] , identifier[gpu_args] , identifier[threads] , identifier[grid] , identifier[times] ): literal[string] identifier[start] = identifier[drv] . identifier[Event] () identifier[end] = identifier[drv] . identifier[Event] () identifier[time] =[] keyword[for] identifier[_] keyword[in] identifier[range] ( identifier[self] . identifier[iterations] ): identifier[self] . identifier[context] . identifier[synchronize] () identifier[start] . identifier[record] () identifier[self] . identifier[run_kernel] ( identifier[func] , identifier[gpu_args] , identifier[threads] , identifier[grid] ) identifier[end] . identifier[record] () identifier[self] . identifier[context] . identifier[synchronize] () identifier[time] . identifier[append] ( identifier[end] . identifier[time_since] ( identifier[start] )) identifier[time] = identifier[sorted] ( identifier[time] ) keyword[if] identifier[times] : keyword[return] identifier[time] keyword[else] : keyword[if] identifier[self] . identifier[iterations] > literal[int] : keyword[return] identifier[numpy] . identifier[mean] ( identifier[time] [ literal[int] :- literal[int] ]) keyword[else] : keyword[return] identifier[numpy] . identifier[mean] ( identifier[time] )
def benchmark(self, func, gpu_args, threads, grid, times): """runs the kernel and measures time repeatedly, returns average time Runs the kernel and measures kernel execution time repeatedly, number of iterations is set during the creation of CudaFunctions. Benchmark returns a robust average, from all measurements the fastest and slowest runs are discarded and the rest is included in the returned average. The reason for this is to be robust against initialization artifacts and other exceptional cases. :param func: A PyCuda kernel compiled for this specific kernel configuration :type func: pycuda.driver.Function :param gpu_args: A list of arguments to the kernel, order should match the order in the code. Allowed values are either variables in global memory or single values passed by value. :type gpu_args: list( pycuda.driver.DeviceAllocation, numpy.int32, ...) :param threads: A tuple listing the number of threads in each dimension of the thread block :type threads: tuple(int, int, int) :param grid: A tuple listing the number of thread blocks in each dimension of the grid :type grid: tuple(int, int) :param times: Return the execution time of all iterations. :type times: bool :returns: All execution times, if times=True, or a robust average for the kernel execution time. :rtype: float """ start = drv.Event() end = drv.Event() time = [] for _ in range(self.iterations): self.context.synchronize() start.record() self.run_kernel(func, gpu_args, threads, grid) end.record() self.context.synchronize() time.append(end.time_since(start)) # depends on [control=['for'], data=[]] time = sorted(time) if times: return time # depends on [control=['if'], data=[]] elif self.iterations > 4: return numpy.mean(time[1:-1]) # depends on [control=['if'], data=[]] else: return numpy.mean(time)
def minibatch_by_words(items, size, tuples=True, count_words=len): """Create minibatches of a given number of words.""" if isinstance(size, int): size_ = itertools.repeat(size) else: size_ = size items = iter(items) while True: batch_size = next(size_) batch = [] while batch_size >= 0: try: if tuples: doc, gold = next(items) else: doc = next(items) except StopIteration: if batch: yield batch return batch_size -= count_words(doc) if tuples: batch.append((doc, gold)) else: batch.append(doc) if batch: yield batch
def function[minibatch_by_words, parameter[items, size, tuples, count_words]]: constant[Create minibatches of a given number of words.] if call[name[isinstance], parameter[name[size], name[int]]] begin[:] variable[size_] assign[=] call[name[itertools].repeat, parameter[name[size]]] variable[items] assign[=] call[name[iter], parameter[name[items]]] while constant[True] begin[:] variable[batch_size] assign[=] call[name[next], parameter[name[size_]]] variable[batch] assign[=] list[[]] while compare[name[batch_size] greater_or_equal[>=] constant[0]] begin[:] <ast.Try object at 0x7da1b1ef9720> <ast.AugAssign object at 0x7da1b1efb220> if name[tuples] begin[:] call[name[batch].append, parameter[tuple[[<ast.Name object at 0x7da1b1ef8c40>, <ast.Name object at 0x7da1b1ef8640>]]]] if name[batch] begin[:] <ast.Yield object at 0x7da1b1ef8af0>
keyword[def] identifier[minibatch_by_words] ( identifier[items] , identifier[size] , identifier[tuples] = keyword[True] , identifier[count_words] = identifier[len] ): literal[string] keyword[if] identifier[isinstance] ( identifier[size] , identifier[int] ): identifier[size_] = identifier[itertools] . identifier[repeat] ( identifier[size] ) keyword[else] : identifier[size_] = identifier[size] identifier[items] = identifier[iter] ( identifier[items] ) keyword[while] keyword[True] : identifier[batch_size] = identifier[next] ( identifier[size_] ) identifier[batch] =[] keyword[while] identifier[batch_size] >= literal[int] : keyword[try] : keyword[if] identifier[tuples] : identifier[doc] , identifier[gold] = identifier[next] ( identifier[items] ) keyword[else] : identifier[doc] = identifier[next] ( identifier[items] ) keyword[except] identifier[StopIteration] : keyword[if] identifier[batch] : keyword[yield] identifier[batch] keyword[return] identifier[batch_size] -= identifier[count_words] ( identifier[doc] ) keyword[if] identifier[tuples] : identifier[batch] . identifier[append] (( identifier[doc] , identifier[gold] )) keyword[else] : identifier[batch] . identifier[append] ( identifier[doc] ) keyword[if] identifier[batch] : keyword[yield] identifier[batch]
def minibatch_by_words(items, size, tuples=True, count_words=len): """Create minibatches of a given number of words.""" if isinstance(size, int): size_ = itertools.repeat(size) # depends on [control=['if'], data=[]] else: size_ = size items = iter(items) while True: batch_size = next(size_) batch = [] while batch_size >= 0: try: if tuples: (doc, gold) = next(items) # depends on [control=['if'], data=[]] else: doc = next(items) # depends on [control=['try'], data=[]] except StopIteration: if batch: yield batch # depends on [control=['if'], data=[]] return # depends on [control=['except'], data=[]] batch_size -= count_words(doc) if tuples: batch.append((doc, gold)) # depends on [control=['if'], data=[]] else: batch.append(doc) # depends on [control=['while'], data=['batch_size']] if batch: yield batch # depends on [control=['if'], data=[]] # depends on [control=['while'], data=[]]
def get_media_timestamp(self, last_timestamp=None): """ Retrieves the most recent timestamp of the media in the static root. If last_timestamp is given, retrieves the first timestamp more recent than this value. """ r = self.local_renderer _latest_timestamp = -1e9999999999999999 for path in self.iter_static_paths(): path = r.env.static_root + '/' + path self.vprint('checking timestamp of path:', path) if not os.path.isfile(path): continue #print('path:', path) _latest_timestamp = max(_latest_timestamp, get_last_modified_timestamp(path) or _latest_timestamp) if last_timestamp is not None and _latest_timestamp > last_timestamp: break self.vprint('latest_timestamp:', _latest_timestamp) return _latest_timestamp
def function[get_media_timestamp, parameter[self, last_timestamp]]: constant[ Retrieves the most recent timestamp of the media in the static root. If last_timestamp is given, retrieves the first timestamp more recent than this value. ] variable[r] assign[=] name[self].local_renderer variable[_latest_timestamp] assign[=] <ast.UnaryOp object at 0x7da1b00df580> for taget[name[path]] in starred[call[name[self].iter_static_paths, parameter[]]] begin[:] variable[path] assign[=] binary_operation[binary_operation[name[r].env.static_root + constant[/]] + name[path]] call[name[self].vprint, parameter[constant[checking timestamp of path:], name[path]]] if <ast.UnaryOp object at 0x7da1b00de320> begin[:] continue variable[_latest_timestamp] assign[=] call[name[max], parameter[name[_latest_timestamp], <ast.BoolOp object at 0x7da1b00dcf40>]] if <ast.BoolOp object at 0x7da1b00dd3c0> begin[:] break call[name[self].vprint, parameter[constant[latest_timestamp:], name[_latest_timestamp]]] return[name[_latest_timestamp]]
keyword[def] identifier[get_media_timestamp] ( identifier[self] , identifier[last_timestamp] = keyword[None] ): literal[string] identifier[r] = identifier[self] . identifier[local_renderer] identifier[_latest_timestamp] =- literal[int] keyword[for] identifier[path] keyword[in] identifier[self] . identifier[iter_static_paths] (): identifier[path] = identifier[r] . identifier[env] . identifier[static_root] + literal[string] + identifier[path] identifier[self] . identifier[vprint] ( literal[string] , identifier[path] ) keyword[if] keyword[not] identifier[os] . identifier[path] . identifier[isfile] ( identifier[path] ): keyword[continue] identifier[_latest_timestamp] = identifier[max] ( identifier[_latest_timestamp] , identifier[get_last_modified_timestamp] ( identifier[path] ) keyword[or] identifier[_latest_timestamp] ) keyword[if] identifier[last_timestamp] keyword[is] keyword[not] keyword[None] keyword[and] identifier[_latest_timestamp] > identifier[last_timestamp] : keyword[break] identifier[self] . identifier[vprint] ( literal[string] , identifier[_latest_timestamp] ) keyword[return] identifier[_latest_timestamp]
def get_media_timestamp(self, last_timestamp=None): """ Retrieves the most recent timestamp of the media in the static root. If last_timestamp is given, retrieves the first timestamp more recent than this value. """ r = self.local_renderer _latest_timestamp = -1e309 for path in self.iter_static_paths(): path = r.env.static_root + '/' + path self.vprint('checking timestamp of path:', path) if not os.path.isfile(path): continue # depends on [control=['if'], data=[]] #print('path:', path) _latest_timestamp = max(_latest_timestamp, get_last_modified_timestamp(path) or _latest_timestamp) if last_timestamp is not None and _latest_timestamp > last_timestamp: break # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['path']] self.vprint('latest_timestamp:', _latest_timestamp) return _latest_timestamp
def gen_slide_seg_list(mm_begin, mm_end, seg_duration, slide_step): """ η”Ÿζˆζ—Άι—΄η‰‡εΌ€ε§‹ζ—Άεˆ»εˆ—θ‘¨,既间片δ»₯slide_stepζ­₯ι•ΏθΏ›θ‘Œζ»‘εŠ¨ :param mm_begin: :param mm_end: :param seg_duration: :param slide_step: :return: """ seg_begin_list = [i for i in range(mm_begin, mm_end - seg_duration + 1, slide_step)] seg_list = list(map(time_util.minutes_to_time_str, seg_begin_list)) return seg_list
def function[gen_slide_seg_list, parameter[mm_begin, mm_end, seg_duration, slide_step]]: constant[ η”Ÿζˆζ—Άι—΄η‰‡εΌ€ε§‹ζ—Άεˆ»εˆ—θ‘¨,既间片δ»₯slide_stepζ­₯ι•ΏθΏ›θ‘Œζ»‘εŠ¨ :param mm_begin: :param mm_end: :param seg_duration: :param slide_step: :return: ] variable[seg_begin_list] assign[=] <ast.ListComp object at 0x7da1b26ad030> variable[seg_list] assign[=] call[name[list], parameter[call[name[map], parameter[name[time_util].minutes_to_time_str, name[seg_begin_list]]]]] return[name[seg_list]]
keyword[def] identifier[gen_slide_seg_list] ( identifier[mm_begin] , identifier[mm_end] , identifier[seg_duration] , identifier[slide_step] ): literal[string] identifier[seg_begin_list] =[ identifier[i] keyword[for] identifier[i] keyword[in] identifier[range] ( identifier[mm_begin] , identifier[mm_end] - identifier[seg_duration] + literal[int] , identifier[slide_step] )] identifier[seg_list] = identifier[list] ( identifier[map] ( identifier[time_util] . identifier[minutes_to_time_str] , identifier[seg_begin_list] )) keyword[return] identifier[seg_list]
def gen_slide_seg_list(mm_begin, mm_end, seg_duration, slide_step): """ η”Ÿζˆζ—Άι—΄η‰‡εΌ€ε§‹ζ—Άεˆ»εˆ—θ‘¨,既间片δ»₯slide_stepζ­₯ι•ΏθΏ›θ‘Œζ»‘εŠ¨ :param mm_begin: :param mm_end: :param seg_duration: :param slide_step: :return: """ seg_begin_list = [i for i in range(mm_begin, mm_end - seg_duration + 1, slide_step)] seg_list = list(map(time_util.minutes_to_time_str, seg_begin_list)) return seg_list
def add_skip_connection(self, u, v, connection_type): """ Add a skip-connection to the descriptor. Args: u: Number of convolutional layers before the starting point. v: Number of convolutional layers before the ending point. connection_type: Must be either CONCAT_CONNECT or ADD_CONNECT. """ if connection_type not in [self.CONCAT_CONNECT, self.ADD_CONNECT]: raise ValueError( "connection_type should be NetworkDescriptor.CONCAT_CONNECT " "or NetworkDescriptor.ADD_CONNECT." ) self.skip_connections.append((u, v, connection_type))
def function[add_skip_connection, parameter[self, u, v, connection_type]]: constant[ Add a skip-connection to the descriptor. Args: u: Number of convolutional layers before the starting point. v: Number of convolutional layers before the ending point. connection_type: Must be either CONCAT_CONNECT or ADD_CONNECT. ] if compare[name[connection_type] <ast.NotIn object at 0x7da2590d7190> list[[<ast.Attribute object at 0x7da18dc05f90>, <ast.Attribute object at 0x7da18dc076a0>]]] begin[:] <ast.Raise object at 0x7da18dc04160> call[name[self].skip_connections.append, parameter[tuple[[<ast.Name object at 0x7da18dc07400>, <ast.Name object at 0x7da18dc064a0>, <ast.Name object at 0x7da18dc06c50>]]]]
keyword[def] identifier[add_skip_connection] ( identifier[self] , identifier[u] , identifier[v] , identifier[connection_type] ): literal[string] keyword[if] identifier[connection_type] keyword[not] keyword[in] [ identifier[self] . identifier[CONCAT_CONNECT] , identifier[self] . identifier[ADD_CONNECT] ]: keyword[raise] identifier[ValueError] ( literal[string] literal[string] ) identifier[self] . identifier[skip_connections] . identifier[append] (( identifier[u] , identifier[v] , identifier[connection_type] ))
def add_skip_connection(self, u, v, connection_type): """ Add a skip-connection to the descriptor. Args: u: Number of convolutional layers before the starting point. v: Number of convolutional layers before the ending point. connection_type: Must be either CONCAT_CONNECT or ADD_CONNECT. """ if connection_type not in [self.CONCAT_CONNECT, self.ADD_CONNECT]: raise ValueError('connection_type should be NetworkDescriptor.CONCAT_CONNECT or NetworkDescriptor.ADD_CONNECT.') # depends on [control=['if'], data=[]] self.skip_connections.append((u, v, connection_type))
def kma(inputfile_1, out_path, databases, db_path_kma, min_cov=0.6, threshold=0.9, kma_path="cge/kma/kma", sample_name="", inputfile_2=None, kma_mrs=None, kma_gapopen=None, kma_gapextend=None, kma_penalty=None, kma_reward=None, kma_pm=None, kma_fpm=None, kma_memmode=False, kma_nanopore=False, debug=False, kma_add_args=None): """ I expect that there will only be one hit pr gene, but if there are more, I assume that the sequence of the hits are the same in the res file and the aln file. """ threshold = threshold * 100 min_cov = min_cov * 100 kma_results = dict() kma_results["excluded"] = dict() if(sample_name): sample_name = "_" + sample_name # Initiate output dicts. gene_align_sbjct = {} gene_align_query = {} gene_align_homo = {} for db in databases: kma_db = db_path_kma + "/" + db kma_outfile = out_path + "/kma_" + db + sample_name kma_cmd = ("%s -t_db %s -o %s -e 1.0" % (kma_path, kma_db, kma_outfile)) if(inputfile_2 is not None): kma_cmd += " -ipe " + inputfile_1 + " " + inputfile_2 else: kma_cmd += " -i " + inputfile_1 if(kma_mrs is not None): kma_cmd += " -mrs " + str(kma_mrs) if(kma_gapopen is not None): kma_cmd += " -gapopen " + str(kma_gapopen) if(kma_gapextend is not None): kma_cmd += " -gapextend " + str(kma_gapextend) if(kma_penalty is not None): kma_cmd += " -penalty " + str(kma_penalty) if(kma_reward is not None): kma_cmd += " -reward " + str(kma_reward) if(kma_pm is not None): kma_cmd += " -pm " + kma_pm if(kma_fpm is not None): kma_cmd += " -fpm " + kma_fpm if (kma_memmode): kma_cmd += " -mem_mode " if (kma_nanopore): kma_cmd += " -bcNano " kma_cmd += " -mp 20 " if (kma_add_args is not None): kma_cmd += " " + kma_add_args + " " # kma output files align_filename = kma_outfile + ".aln" res_filename = kma_outfile + ".res" # If .res file exists then skip mapping if os.path.isfile(res_filename) and os.access(res_filename, os.R_OK): print("Found " + res_filename + " skipping DB.") else: # Call KMA if(debug): print("KMA cmd: " + kma_cmd) process = subprocess.Popen(kma_cmd, shell=True, stdout=subprocess.PIPE, stderr=subprocess.PIPE) out, err = process.communicate() kma_results[db] = 'No hit found' # Open res file try: res_file = open(res_filename, "r") header = res_file.readline() except IOError as error: sys.exit("Error: KMA did not run as expected.\n" + "KMA finished with the following response:" + "\n{}\n{}".format(out.decode("utf-8"), err.decode("utf-8"))) for line in res_file: if kma_results[db] == 'No hit found': kma_results[db] = dict() # kma_results[db]["excluded"] = dict() # continue data = [data.strip() for data in line.split("\t")] gene = data[0] sbjct_len = int(data[3]) sbjct_ident = float(data[4]) coverage = float(data[5]) depth = float(data[-3]) q_value = float(data[-2]) p_value = float(data[-1]) if gene not in kma_results[db]: hit = gene else: hit = gene + "_" + str(len(kma_results[db][gene]) + 1) exclude_reasons = [] if(coverage < min_cov or sbjct_ident < threshold): exclude_reasons.append(coverage) exclude_reasons.append(sbjct_ident) if(exclude_reasons): # kma_results[db]["excluded"][hit] = exclude_reasons kma_results["excluded"][hit] = exclude_reasons kma_results[db][hit] = dict() kma_results[db][hit]['sbjct_length'] = sbjct_len kma_results[db][hit]["perc_coverage"] = coverage kma_results[db][hit]["sbjct_string"] = [] kma_results[db][hit]["query_string"] = [] kma_results[db][hit]["homo_string"] = [] kma_results[db][hit]["sbjct_header"] = gene kma_results[db][hit]["perc_ident"] = sbjct_ident kma_results[db][hit]["query_start"] = "NA" kma_results[db][hit]["query_end"] = "NA" kma_results[db][hit]["contig_name"] = "NA" kma_results[db][hit]["HSP_length"] = "" kma_results[db][hit]["cal_score"] = q_value kma_results[db][hit]["depth"] = depth kma_results[db][hit]["p_value"] = p_value res_file.close() if kma_results[db] == 'No hit found': continue # Open align file with open(align_filename, "r") as align_file: hit_no = dict() gene = "" # Parse through alignments for line in align_file: # Skip empty lines if(not line.strip()): continue # Check when a new gene alignment start if line.startswith("#"): gene = line[1:].strip() if gene not in hit_no: hit_no[gene] = str(1) else: hit_no[gene] += str(int(hit_no[gene]) + 1) else: # Check if gene one of the user specified genes if hit_no[gene] == '1': hit = gene else: hit = gene + "_" + hit_no[gene] if hit in kma_results[db]: line_data = line.split("\t")[-1].strip() if line.startswith("template"): kma_results[db][hit]["sbjct_string"] += ( [line_data]) elif line.startswith("query"): kma_results[db][hit]["query_string"] += ( [line_data]) else: kma_results[db][hit]["homo_string"] += ( [line_data]) else: print(hit + " not in results: ", kma_results) # concatinate all sequences lists and find subject start # and subject end gene_align_sbjct[db] = {} gene_align_query[db] = {} gene_align_homo[db] = {} for hit in kma_results[db]: # if(hit == "excluded"): # continue align_sbjct = "".join(kma_results[db][hit]['sbjct_string']) align_query = "".join(kma_results[db][hit]['query_string']) align_homo = "".join(kma_results[db][hit]['homo_string']) # Extract only aligned sequences start = re.search("^-*(\w+)", align_query).start(1) end = re.search("\w+(-*)$", align_query).start(1) kma_results[db][hit]['sbjct_string'] = align_sbjct[start:end] kma_results[db][hit]['query_string'] = align_query[start:end] kma_results[db][hit]['homo_string'] = align_homo[start:end] # Save align start and stop positions relative to # subject sequence kma_results[db][hit]['sbjct_start'] = start + 1 kma_results[db][hit]["sbjct_end"] = end + 1 kma_results[db][hit]["HSP_length"] = end - start # Count gaps in the alignment kma_results[db][hit]["gaps"] = ( kma_results[db][hit]['sbjct_string'].count("-") + kma_results[db][hit]['query_string'].count("-")) # Save sequences covering the entire subject sequence # in seperate variables gene_align_sbjct[db][hit] = align_sbjct gene_align_query[db][hit] = align_query gene_align_homo[db][hit] = align_homo return FinderResult(kma_results, gene_align_sbjct, gene_align_query, gene_align_homo)
def function[kma, parameter[inputfile_1, out_path, databases, db_path_kma, min_cov, threshold, kma_path, sample_name, inputfile_2, kma_mrs, kma_gapopen, kma_gapextend, kma_penalty, kma_reward, kma_pm, kma_fpm, kma_memmode, kma_nanopore, debug, kma_add_args]]: constant[ I expect that there will only be one hit pr gene, but if there are more, I assume that the sequence of the hits are the same in the res file and the aln file. ] variable[threshold] assign[=] binary_operation[name[threshold] * constant[100]] variable[min_cov] assign[=] binary_operation[name[min_cov] * constant[100]] variable[kma_results] assign[=] call[name[dict], parameter[]] call[name[kma_results]][constant[excluded]] assign[=] call[name[dict], parameter[]] if name[sample_name] begin[:] variable[sample_name] assign[=] binary_operation[constant[_] + name[sample_name]] variable[gene_align_sbjct] assign[=] dictionary[[], []] variable[gene_align_query] assign[=] dictionary[[], []] variable[gene_align_homo] assign[=] dictionary[[], []] for taget[name[db]] in starred[name[databases]] begin[:] variable[kma_db] assign[=] binary_operation[binary_operation[name[db_path_kma] + constant[/]] + name[db]] variable[kma_outfile] assign[=] binary_operation[binary_operation[binary_operation[name[out_path] + constant[/kma_]] + name[db]] + name[sample_name]] variable[kma_cmd] assign[=] binary_operation[constant[%s -t_db %s -o %s -e 1.0] <ast.Mod object at 0x7da2590d6920> tuple[[<ast.Name object at 0x7da1b0f02680>, <ast.Name object at 0x7da1b0f02650>, <ast.Name object at 0x7da1b0f02620>]]] if compare[name[inputfile_2] is_not constant[None]] begin[:] <ast.AugAssign object at 0x7da1b0f02530> if compare[name[kma_mrs] is_not constant[None]] begin[:] <ast.AugAssign object at 0x7da1b0f021d0> if compare[name[kma_gapopen] is_not constant[None]] begin[:] <ast.AugAssign object at 0x7da1b0f01fc0> if compare[name[kma_gapextend] is_not constant[None]] begin[:] <ast.AugAssign object at 0x7da1b0f01db0> if compare[name[kma_penalty] is_not constant[None]] begin[:] <ast.AugAssign object at 0x7da1b0f01ba0> if compare[name[kma_reward] is_not constant[None]] begin[:] <ast.AugAssign object at 0x7da1b0f01990> if compare[name[kma_pm] is_not constant[None]] begin[:] <ast.AugAssign object at 0x7da1b0f01780> if compare[name[kma_fpm] is_not constant[None]] begin[:] <ast.AugAssign object at 0x7da1b0f015d0> if name[kma_memmode] begin[:] <ast.AugAssign object at 0x7da1b0f01480> if name[kma_nanopore] begin[:] <ast.AugAssign object at 0x7da1b0f01390> <ast.AugAssign object at 0x7da1b0f01300> if compare[name[kma_add_args] is_not constant[None]] begin[:] <ast.AugAssign object at 0x7da1b0f011b0> variable[align_filename] assign[=] binary_operation[name[kma_outfile] + constant[.aln]] variable[res_filename] assign[=] binary_operation[name[kma_outfile] + constant[.res]] if <ast.BoolOp object at 0x7da1b0f00e50> begin[:] call[name[print], parameter[binary_operation[binary_operation[constant[Found ] + name[res_filename]] + constant[ skipping DB.]]]] call[name[kma_results]][name[db]] assign[=] constant[No hit found] <ast.Try object at 0x7da1b0f003d0> for taget[name[line]] in starred[name[res_file]] begin[:] if compare[call[name[kma_results]][name[db]] equal[==] constant[No hit found]] begin[:] call[name[kma_results]][name[db]] assign[=] call[name[dict], parameter[]] variable[data] assign[=] <ast.ListComp object at 0x7da1b0f3ba00> variable[gene] assign[=] call[name[data]][constant[0]] variable[sbjct_len] assign[=] call[name[int], parameter[call[name[data]][constant[3]]]] variable[sbjct_ident] assign[=] call[name[float], parameter[call[name[data]][constant[4]]]] variable[coverage] assign[=] call[name[float], parameter[call[name[data]][constant[5]]]] variable[depth] assign[=] call[name[float], parameter[call[name[data]][<ast.UnaryOp object at 0x7da1b0f38e50>]]] variable[q_value] assign[=] call[name[float], parameter[call[name[data]][<ast.UnaryOp object at 0x7da1b0f38cd0>]]] variable[p_value] assign[=] call[name[float], parameter[call[name[data]][<ast.UnaryOp object at 0x7da1b0f38b50>]]] if compare[name[gene] <ast.NotIn object at 0x7da2590d7190> call[name[kma_results]][name[db]]] begin[:] variable[hit] assign[=] name[gene] variable[exclude_reasons] assign[=] list[[]] if <ast.BoolOp object at 0x7da1b0f38550> begin[:] call[name[exclude_reasons].append, parameter[name[coverage]]] call[name[exclude_reasons].append, parameter[name[sbjct_ident]]] if name[exclude_reasons] begin[:] call[call[name[kma_results]][constant[excluded]]][name[hit]] assign[=] name[exclude_reasons] call[call[name[kma_results]][name[db]]][name[hit]] assign[=] call[name[dict], parameter[]] call[call[call[name[kma_results]][name[db]]][name[hit]]][constant[sbjct_length]] assign[=] name[sbjct_len] call[call[call[name[kma_results]][name[db]]][name[hit]]][constant[perc_coverage]] assign[=] name[coverage] call[call[call[name[kma_results]][name[db]]][name[hit]]][constant[sbjct_string]] assign[=] list[[]] call[call[call[name[kma_results]][name[db]]][name[hit]]][constant[query_string]] assign[=] list[[]] call[call[call[name[kma_results]][name[db]]][name[hit]]][constant[homo_string]] assign[=] list[[]] call[call[call[name[kma_results]][name[db]]][name[hit]]][constant[sbjct_header]] assign[=] name[gene] call[call[call[name[kma_results]][name[db]]][name[hit]]][constant[perc_ident]] assign[=] name[sbjct_ident] call[call[call[name[kma_results]][name[db]]][name[hit]]][constant[query_start]] assign[=] constant[NA] call[call[call[name[kma_results]][name[db]]][name[hit]]][constant[query_end]] assign[=] constant[NA] call[call[call[name[kma_results]][name[db]]][name[hit]]][constant[contig_name]] assign[=] constant[NA] call[call[call[name[kma_results]][name[db]]][name[hit]]][constant[HSP_length]] assign[=] constant[] call[call[call[name[kma_results]][name[db]]][name[hit]]][constant[cal_score]] assign[=] name[q_value] call[call[call[name[kma_results]][name[db]]][name[hit]]][constant[depth]] assign[=] name[depth] call[call[call[name[kma_results]][name[db]]][name[hit]]][constant[p_value]] assign[=] name[p_value] call[name[res_file].close, parameter[]] if compare[call[name[kma_results]][name[db]] equal[==] constant[No hit found]] begin[:] continue with call[name[open], parameter[name[align_filename], constant[r]]] begin[:] variable[hit_no] assign[=] call[name[dict], parameter[]] variable[gene] assign[=] constant[] for taget[name[line]] in starred[name[align_file]] begin[:] if <ast.UnaryOp object at 0x7da1b0f129b0> begin[:] continue if call[name[line].startswith, parameter[constant[#]]] begin[:] variable[gene] assign[=] call[call[name[line]][<ast.Slice object at 0x7da1b0f11600>].strip, parameter[]] if compare[name[gene] <ast.NotIn object at 0x7da2590d7190> name[hit_no]] begin[:] call[name[hit_no]][name[gene]] assign[=] call[name[str], parameter[constant[1]]] call[name[gene_align_sbjct]][name[db]] assign[=] dictionary[[], []] call[name[gene_align_query]][name[db]] assign[=] dictionary[[], []] call[name[gene_align_homo]][name[db]] assign[=] dictionary[[], []] for taget[name[hit]] in starred[call[name[kma_results]][name[db]]] begin[:] variable[align_sbjct] assign[=] call[constant[].join, parameter[call[call[call[name[kma_results]][name[db]]][name[hit]]][constant[sbjct_string]]]] variable[align_query] assign[=] call[constant[].join, parameter[call[call[call[name[kma_results]][name[db]]][name[hit]]][constant[query_string]]]] variable[align_homo] assign[=] call[constant[].join, parameter[call[call[call[name[kma_results]][name[db]]][name[hit]]][constant[homo_string]]]] variable[start] assign[=] call[call[name[re].search, parameter[constant[^-*(\w+)], name[align_query]]].start, parameter[constant[1]]] variable[end] assign[=] call[call[name[re].search, parameter[constant[\w+(-*)$], name[align_query]]].start, parameter[constant[1]]] call[call[call[name[kma_results]][name[db]]][name[hit]]][constant[sbjct_string]] assign[=] call[name[align_sbjct]][<ast.Slice object at 0x7da1b0f43eb0>] call[call[call[name[kma_results]][name[db]]][name[hit]]][constant[query_string]] assign[=] call[name[align_query]][<ast.Slice object at 0x7da1b0f42860>] call[call[call[name[kma_results]][name[db]]][name[hit]]][constant[homo_string]] assign[=] call[name[align_homo]][<ast.Slice object at 0x7da1b0f41b70>] call[call[call[name[kma_results]][name[db]]][name[hit]]][constant[sbjct_start]] assign[=] binary_operation[name[start] + constant[1]] call[call[call[name[kma_results]][name[db]]][name[hit]]][constant[sbjct_end]] assign[=] binary_operation[name[end] + constant[1]] call[call[call[name[kma_results]][name[db]]][name[hit]]][constant[HSP_length]] assign[=] binary_operation[name[end] - name[start]] call[call[call[name[kma_results]][name[db]]][name[hit]]][constant[gaps]] assign[=] binary_operation[call[call[call[call[name[kma_results]][name[db]]][name[hit]]][constant[sbjct_string]].count, parameter[constant[-]]] + call[call[call[call[name[kma_results]][name[db]]][name[hit]]][constant[query_string]].count, parameter[constant[-]]]] call[call[name[gene_align_sbjct]][name[db]]][name[hit]] assign[=] name[align_sbjct] call[call[name[gene_align_query]][name[db]]][name[hit]] assign[=] name[align_query] call[call[name[gene_align_homo]][name[db]]][name[hit]] assign[=] name[align_homo] return[call[name[FinderResult], parameter[name[kma_results], name[gene_align_sbjct], name[gene_align_query], name[gene_align_homo]]]]
keyword[def] identifier[kma] ( identifier[inputfile_1] , identifier[out_path] , identifier[databases] , identifier[db_path_kma] , identifier[min_cov] = literal[int] , identifier[threshold] = literal[int] , identifier[kma_path] = literal[string] , identifier[sample_name] = literal[string] , identifier[inputfile_2] = keyword[None] , identifier[kma_mrs] = keyword[None] , identifier[kma_gapopen] = keyword[None] , identifier[kma_gapextend] = keyword[None] , identifier[kma_penalty] = keyword[None] , identifier[kma_reward] = keyword[None] , identifier[kma_pm] = keyword[None] , identifier[kma_fpm] = keyword[None] , identifier[kma_memmode] = keyword[False] , identifier[kma_nanopore] = keyword[False] , identifier[debug] = keyword[False] , identifier[kma_add_args] = keyword[None] ): literal[string] identifier[threshold] = identifier[threshold] * literal[int] identifier[min_cov] = identifier[min_cov] * literal[int] identifier[kma_results] = identifier[dict] () identifier[kma_results] [ literal[string] ]= identifier[dict] () keyword[if] ( identifier[sample_name] ): identifier[sample_name] = literal[string] + identifier[sample_name] identifier[gene_align_sbjct] ={} identifier[gene_align_query] ={} identifier[gene_align_homo] ={} keyword[for] identifier[db] keyword[in] identifier[databases] : identifier[kma_db] = identifier[db_path_kma] + literal[string] + identifier[db] identifier[kma_outfile] = identifier[out_path] + literal[string] + identifier[db] + identifier[sample_name] identifier[kma_cmd] =( literal[string] %( identifier[kma_path] , identifier[kma_db] , identifier[kma_outfile] )) keyword[if] ( identifier[inputfile_2] keyword[is] keyword[not] keyword[None] ): identifier[kma_cmd] += literal[string] + identifier[inputfile_1] + literal[string] + identifier[inputfile_2] keyword[else] : identifier[kma_cmd] += literal[string] + identifier[inputfile_1] keyword[if] ( identifier[kma_mrs] keyword[is] keyword[not] keyword[None] ): identifier[kma_cmd] += literal[string] + identifier[str] ( identifier[kma_mrs] ) keyword[if] ( identifier[kma_gapopen] keyword[is] keyword[not] keyword[None] ): identifier[kma_cmd] += literal[string] + identifier[str] ( identifier[kma_gapopen] ) keyword[if] ( identifier[kma_gapextend] keyword[is] keyword[not] keyword[None] ): identifier[kma_cmd] += literal[string] + identifier[str] ( identifier[kma_gapextend] ) keyword[if] ( identifier[kma_penalty] keyword[is] keyword[not] keyword[None] ): identifier[kma_cmd] += literal[string] + identifier[str] ( identifier[kma_penalty] ) keyword[if] ( identifier[kma_reward] keyword[is] keyword[not] keyword[None] ): identifier[kma_cmd] += literal[string] + identifier[str] ( identifier[kma_reward] ) keyword[if] ( identifier[kma_pm] keyword[is] keyword[not] keyword[None] ): identifier[kma_cmd] += literal[string] + identifier[kma_pm] keyword[if] ( identifier[kma_fpm] keyword[is] keyword[not] keyword[None] ): identifier[kma_cmd] += literal[string] + identifier[kma_fpm] keyword[if] ( identifier[kma_memmode] ): identifier[kma_cmd] += literal[string] keyword[if] ( identifier[kma_nanopore] ): identifier[kma_cmd] += literal[string] identifier[kma_cmd] += literal[string] keyword[if] ( identifier[kma_add_args] keyword[is] keyword[not] keyword[None] ): identifier[kma_cmd] += literal[string] + identifier[kma_add_args] + literal[string] identifier[align_filename] = identifier[kma_outfile] + literal[string] identifier[res_filename] = identifier[kma_outfile] + literal[string] keyword[if] identifier[os] . identifier[path] . identifier[isfile] ( identifier[res_filename] ) keyword[and] identifier[os] . identifier[access] ( identifier[res_filename] , identifier[os] . identifier[R_OK] ): identifier[print] ( literal[string] + identifier[res_filename] + literal[string] ) keyword[else] : keyword[if] ( identifier[debug] ): identifier[print] ( literal[string] + identifier[kma_cmd] ) identifier[process] = identifier[subprocess] . identifier[Popen] ( identifier[kma_cmd] , identifier[shell] = keyword[True] , identifier[stdout] = identifier[subprocess] . identifier[PIPE] , identifier[stderr] = identifier[subprocess] . identifier[PIPE] ) identifier[out] , identifier[err] = identifier[process] . identifier[communicate] () identifier[kma_results] [ identifier[db] ]= literal[string] keyword[try] : identifier[res_file] = identifier[open] ( identifier[res_filename] , literal[string] ) identifier[header] = identifier[res_file] . identifier[readline] () keyword[except] identifier[IOError] keyword[as] identifier[error] : identifier[sys] . identifier[exit] ( literal[string] + literal[string] + literal[string] . identifier[format] ( identifier[out] . identifier[decode] ( literal[string] ), identifier[err] . identifier[decode] ( literal[string] ))) keyword[for] identifier[line] keyword[in] identifier[res_file] : keyword[if] identifier[kma_results] [ identifier[db] ]== literal[string] : identifier[kma_results] [ identifier[db] ]= identifier[dict] () identifier[data] =[ identifier[data] . identifier[strip] () keyword[for] identifier[data] keyword[in] identifier[line] . identifier[split] ( literal[string] )] identifier[gene] = identifier[data] [ literal[int] ] identifier[sbjct_len] = identifier[int] ( identifier[data] [ literal[int] ]) identifier[sbjct_ident] = identifier[float] ( identifier[data] [ literal[int] ]) identifier[coverage] = identifier[float] ( identifier[data] [ literal[int] ]) identifier[depth] = identifier[float] ( identifier[data] [- literal[int] ]) identifier[q_value] = identifier[float] ( identifier[data] [- literal[int] ]) identifier[p_value] = identifier[float] ( identifier[data] [- literal[int] ]) keyword[if] identifier[gene] keyword[not] keyword[in] identifier[kma_results] [ identifier[db] ]: identifier[hit] = identifier[gene] keyword[else] : identifier[hit] = identifier[gene] + literal[string] + identifier[str] ( identifier[len] ( identifier[kma_results] [ identifier[db] ][ identifier[gene] ])+ literal[int] ) identifier[exclude_reasons] =[] keyword[if] ( identifier[coverage] < identifier[min_cov] keyword[or] identifier[sbjct_ident] < identifier[threshold] ): identifier[exclude_reasons] . identifier[append] ( identifier[coverage] ) identifier[exclude_reasons] . identifier[append] ( identifier[sbjct_ident] ) keyword[if] ( identifier[exclude_reasons] ): identifier[kma_results] [ literal[string] ][ identifier[hit] ]= identifier[exclude_reasons] identifier[kma_results] [ identifier[db] ][ identifier[hit] ]= identifier[dict] () identifier[kma_results] [ identifier[db] ][ identifier[hit] ][ literal[string] ]= identifier[sbjct_len] identifier[kma_results] [ identifier[db] ][ identifier[hit] ][ literal[string] ]= identifier[coverage] identifier[kma_results] [ identifier[db] ][ identifier[hit] ][ literal[string] ]=[] identifier[kma_results] [ identifier[db] ][ identifier[hit] ][ literal[string] ]=[] identifier[kma_results] [ identifier[db] ][ identifier[hit] ][ literal[string] ]=[] identifier[kma_results] [ identifier[db] ][ identifier[hit] ][ literal[string] ]= identifier[gene] identifier[kma_results] [ identifier[db] ][ identifier[hit] ][ literal[string] ]= identifier[sbjct_ident] identifier[kma_results] [ identifier[db] ][ identifier[hit] ][ literal[string] ]= literal[string] identifier[kma_results] [ identifier[db] ][ identifier[hit] ][ literal[string] ]= literal[string] identifier[kma_results] [ identifier[db] ][ identifier[hit] ][ literal[string] ]= literal[string] identifier[kma_results] [ identifier[db] ][ identifier[hit] ][ literal[string] ]= literal[string] identifier[kma_results] [ identifier[db] ][ identifier[hit] ][ literal[string] ]= identifier[q_value] identifier[kma_results] [ identifier[db] ][ identifier[hit] ][ literal[string] ]= identifier[depth] identifier[kma_results] [ identifier[db] ][ identifier[hit] ][ literal[string] ]= identifier[p_value] identifier[res_file] . identifier[close] () keyword[if] identifier[kma_results] [ identifier[db] ]== literal[string] : keyword[continue] keyword[with] identifier[open] ( identifier[align_filename] , literal[string] ) keyword[as] identifier[align_file] : identifier[hit_no] = identifier[dict] () identifier[gene] = literal[string] keyword[for] identifier[line] keyword[in] identifier[align_file] : keyword[if] ( keyword[not] identifier[line] . identifier[strip] ()): keyword[continue] keyword[if] identifier[line] . identifier[startswith] ( literal[string] ): identifier[gene] = identifier[line] [ literal[int] :]. identifier[strip] () keyword[if] identifier[gene] keyword[not] keyword[in] identifier[hit_no] : identifier[hit_no] [ identifier[gene] ]= identifier[str] ( literal[int] ) keyword[else] : identifier[hit_no] [ identifier[gene] ]+= identifier[str] ( identifier[int] ( identifier[hit_no] [ identifier[gene] ])+ literal[int] ) keyword[else] : keyword[if] identifier[hit_no] [ identifier[gene] ]== literal[string] : identifier[hit] = identifier[gene] keyword[else] : identifier[hit] = identifier[gene] + literal[string] + identifier[hit_no] [ identifier[gene] ] keyword[if] identifier[hit] keyword[in] identifier[kma_results] [ identifier[db] ]: identifier[line_data] = identifier[line] . identifier[split] ( literal[string] )[- literal[int] ]. identifier[strip] () keyword[if] identifier[line] . identifier[startswith] ( literal[string] ): identifier[kma_results] [ identifier[db] ][ identifier[hit] ][ literal[string] ]+=( [ identifier[line_data] ]) keyword[elif] identifier[line] . identifier[startswith] ( literal[string] ): identifier[kma_results] [ identifier[db] ][ identifier[hit] ][ literal[string] ]+=( [ identifier[line_data] ]) keyword[else] : identifier[kma_results] [ identifier[db] ][ identifier[hit] ][ literal[string] ]+=( [ identifier[line_data] ]) keyword[else] : identifier[print] ( identifier[hit] + literal[string] , identifier[kma_results] ) identifier[gene_align_sbjct] [ identifier[db] ]={} identifier[gene_align_query] [ identifier[db] ]={} identifier[gene_align_homo] [ identifier[db] ]={} keyword[for] identifier[hit] keyword[in] identifier[kma_results] [ identifier[db] ]: identifier[align_sbjct] = literal[string] . identifier[join] ( identifier[kma_results] [ identifier[db] ][ identifier[hit] ][ literal[string] ]) identifier[align_query] = literal[string] . identifier[join] ( identifier[kma_results] [ identifier[db] ][ identifier[hit] ][ literal[string] ]) identifier[align_homo] = literal[string] . identifier[join] ( identifier[kma_results] [ identifier[db] ][ identifier[hit] ][ literal[string] ]) identifier[start] = identifier[re] . identifier[search] ( literal[string] , identifier[align_query] ). identifier[start] ( literal[int] ) identifier[end] = identifier[re] . identifier[search] ( literal[string] , identifier[align_query] ). identifier[start] ( literal[int] ) identifier[kma_results] [ identifier[db] ][ identifier[hit] ][ literal[string] ]= identifier[align_sbjct] [ identifier[start] : identifier[end] ] identifier[kma_results] [ identifier[db] ][ identifier[hit] ][ literal[string] ]= identifier[align_query] [ identifier[start] : identifier[end] ] identifier[kma_results] [ identifier[db] ][ identifier[hit] ][ literal[string] ]= identifier[align_homo] [ identifier[start] : identifier[end] ] identifier[kma_results] [ identifier[db] ][ identifier[hit] ][ literal[string] ]= identifier[start] + literal[int] identifier[kma_results] [ identifier[db] ][ identifier[hit] ][ literal[string] ]= identifier[end] + literal[int] identifier[kma_results] [ identifier[db] ][ identifier[hit] ][ literal[string] ]= identifier[end] - identifier[start] identifier[kma_results] [ identifier[db] ][ identifier[hit] ][ literal[string] ]=( identifier[kma_results] [ identifier[db] ][ identifier[hit] ][ literal[string] ]. identifier[count] ( literal[string] )+ identifier[kma_results] [ identifier[db] ][ identifier[hit] ][ literal[string] ]. identifier[count] ( literal[string] )) identifier[gene_align_sbjct] [ identifier[db] ][ identifier[hit] ]= identifier[align_sbjct] identifier[gene_align_query] [ identifier[db] ][ identifier[hit] ]= identifier[align_query] identifier[gene_align_homo] [ identifier[db] ][ identifier[hit] ]= identifier[align_homo] keyword[return] identifier[FinderResult] ( identifier[kma_results] , identifier[gene_align_sbjct] , identifier[gene_align_query] , identifier[gene_align_homo] )
def kma(inputfile_1, out_path, databases, db_path_kma, min_cov=0.6, threshold=0.9, kma_path='cge/kma/kma', sample_name='', inputfile_2=None, kma_mrs=None, kma_gapopen=None, kma_gapextend=None, kma_penalty=None, kma_reward=None, kma_pm=None, kma_fpm=None, kma_memmode=False, kma_nanopore=False, debug=False, kma_add_args=None): """ I expect that there will only be one hit pr gene, but if there are more, I assume that the sequence of the hits are the same in the res file and the aln file. """ threshold = threshold * 100 min_cov = min_cov * 100 kma_results = dict() kma_results['excluded'] = dict() if sample_name: sample_name = '_' + sample_name # depends on [control=['if'], data=[]] # Initiate output dicts. gene_align_sbjct = {} gene_align_query = {} gene_align_homo = {} for db in databases: kma_db = db_path_kma + '/' + db kma_outfile = out_path + '/kma_' + db + sample_name kma_cmd = '%s -t_db %s -o %s -e 1.0' % (kma_path, kma_db, kma_outfile) if inputfile_2 is not None: kma_cmd += ' -ipe ' + inputfile_1 + ' ' + inputfile_2 # depends on [control=['if'], data=['inputfile_2']] else: kma_cmd += ' -i ' + inputfile_1 if kma_mrs is not None: kma_cmd += ' -mrs ' + str(kma_mrs) # depends on [control=['if'], data=['kma_mrs']] if kma_gapopen is not None: kma_cmd += ' -gapopen ' + str(kma_gapopen) # depends on [control=['if'], data=['kma_gapopen']] if kma_gapextend is not None: kma_cmd += ' -gapextend ' + str(kma_gapextend) # depends on [control=['if'], data=['kma_gapextend']] if kma_penalty is not None: kma_cmd += ' -penalty ' + str(kma_penalty) # depends on [control=['if'], data=['kma_penalty']] if kma_reward is not None: kma_cmd += ' -reward ' + str(kma_reward) # depends on [control=['if'], data=['kma_reward']] if kma_pm is not None: kma_cmd += ' -pm ' + kma_pm # depends on [control=['if'], data=['kma_pm']] if kma_fpm is not None: kma_cmd += ' -fpm ' + kma_fpm # depends on [control=['if'], data=['kma_fpm']] if kma_memmode: kma_cmd += ' -mem_mode ' # depends on [control=['if'], data=[]] if kma_nanopore: kma_cmd += ' -bcNano ' kma_cmd += ' -mp 20 ' # depends on [control=['if'], data=[]] if kma_add_args is not None: kma_cmd += ' ' + kma_add_args + ' ' # depends on [control=['if'], data=['kma_add_args']] # kma output files align_filename = kma_outfile + '.aln' res_filename = kma_outfile + '.res' # If .res file exists then skip mapping if os.path.isfile(res_filename) and os.access(res_filename, os.R_OK): print('Found ' + res_filename + ' skipping DB.') # depends on [control=['if'], data=[]] else: # Call KMA if debug: print('KMA cmd: ' + kma_cmd) # depends on [control=['if'], data=[]] process = subprocess.Popen(kma_cmd, shell=True, stdout=subprocess.PIPE, stderr=subprocess.PIPE) (out, err) = process.communicate() kma_results[db] = 'No hit found' # Open res file try: res_file = open(res_filename, 'r') header = res_file.readline() # depends on [control=['try'], data=[]] except IOError as error: sys.exit('Error: KMA did not run as expected.\n' + 'KMA finished with the following response:' + '\n{}\n{}'.format(out.decode('utf-8'), err.decode('utf-8'))) # depends on [control=['except'], data=[]] for line in res_file: if kma_results[db] == 'No hit found': kma_results[db] = dict() # depends on [control=['if'], data=[]] # kma_results[db]["excluded"] = dict() # continue data = [data.strip() for data in line.split('\t')] gene = data[0] sbjct_len = int(data[3]) sbjct_ident = float(data[4]) coverage = float(data[5]) depth = float(data[-3]) q_value = float(data[-2]) p_value = float(data[-1]) if gene not in kma_results[db]: hit = gene # depends on [control=['if'], data=['gene']] else: hit = gene + '_' + str(len(kma_results[db][gene]) + 1) exclude_reasons = [] if coverage < min_cov or sbjct_ident < threshold: exclude_reasons.append(coverage) exclude_reasons.append(sbjct_ident) # depends on [control=['if'], data=[]] if exclude_reasons: # kma_results[db]["excluded"][hit] = exclude_reasons kma_results['excluded'][hit] = exclude_reasons # depends on [control=['if'], data=[]] kma_results[db][hit] = dict() kma_results[db][hit]['sbjct_length'] = sbjct_len kma_results[db][hit]['perc_coverage'] = coverage kma_results[db][hit]['sbjct_string'] = [] kma_results[db][hit]['query_string'] = [] kma_results[db][hit]['homo_string'] = [] kma_results[db][hit]['sbjct_header'] = gene kma_results[db][hit]['perc_ident'] = sbjct_ident kma_results[db][hit]['query_start'] = 'NA' kma_results[db][hit]['query_end'] = 'NA' kma_results[db][hit]['contig_name'] = 'NA' kma_results[db][hit]['HSP_length'] = '' kma_results[db][hit]['cal_score'] = q_value kma_results[db][hit]['depth'] = depth kma_results[db][hit]['p_value'] = p_value # depends on [control=['for'], data=['line']] res_file.close() if kma_results[db] == 'No hit found': continue # depends on [control=['if'], data=[]] # Open align file with open(align_filename, 'r') as align_file: hit_no = dict() gene = '' # Parse through alignments for line in align_file: # Skip empty lines if not line.strip(): continue # depends on [control=['if'], data=[]] # Check when a new gene alignment start if line.startswith('#'): gene = line[1:].strip() if gene not in hit_no: hit_no[gene] = str(1) # depends on [control=['if'], data=['gene', 'hit_no']] else: hit_no[gene] += str(int(hit_no[gene]) + 1) # depends on [control=['if'], data=[]] else: # Check if gene one of the user specified genes if hit_no[gene] == '1': hit = gene # depends on [control=['if'], data=[]] else: hit = gene + '_' + hit_no[gene] if hit in kma_results[db]: line_data = line.split('\t')[-1].strip() if line.startswith('template'): kma_results[db][hit]['sbjct_string'] += [line_data] # depends on [control=['if'], data=[]] elif line.startswith('query'): kma_results[db][hit]['query_string'] += [line_data] # depends on [control=['if'], data=[]] else: kma_results[db][hit]['homo_string'] += [line_data] # depends on [control=['if'], data=['hit']] else: print(hit + ' not in results: ', kma_results) # depends on [control=['for'], data=['line']] # depends on [control=['with'], data=['align_file']] # concatinate all sequences lists and find subject start # and subject end gene_align_sbjct[db] = {} gene_align_query[db] = {} gene_align_homo[db] = {} for hit in kma_results[db]: # if(hit == "excluded"): # continue align_sbjct = ''.join(kma_results[db][hit]['sbjct_string']) align_query = ''.join(kma_results[db][hit]['query_string']) align_homo = ''.join(kma_results[db][hit]['homo_string']) # Extract only aligned sequences start = re.search('^-*(\\w+)', align_query).start(1) end = re.search('\\w+(-*)$', align_query).start(1) kma_results[db][hit]['sbjct_string'] = align_sbjct[start:end] kma_results[db][hit]['query_string'] = align_query[start:end] kma_results[db][hit]['homo_string'] = align_homo[start:end] # Save align start and stop positions relative to # subject sequence kma_results[db][hit]['sbjct_start'] = start + 1 kma_results[db][hit]['sbjct_end'] = end + 1 kma_results[db][hit]['HSP_length'] = end - start # Count gaps in the alignment kma_results[db][hit]['gaps'] = kma_results[db][hit]['sbjct_string'].count('-') + kma_results[db][hit]['query_string'].count('-') # Save sequences covering the entire subject sequence # in seperate variables gene_align_sbjct[db][hit] = align_sbjct gene_align_query[db][hit] = align_query gene_align_homo[db][hit] = align_homo # depends on [control=['for'], data=['hit']] # depends on [control=['for'], data=['db']] return FinderResult(kma_results, gene_align_sbjct, gene_align_query, gene_align_homo)
def read_sha1( file_path, buf_size = None, start_byte = 0, read_size = None, extra_hashers = [], # update(data) will be called on all of these ): ''' Determines the sha1 hash of a file in chunks, to prevent loading the entire file at once into memory ''' read_size = read_size or os.stat(file_path).st_size buf_size = buf_size or DEFAULT_BUFFER_SIZE data_read = 0 total_sha1 = hashlib.sha1() while data_read < read_size: with open( file_path, 'rb', buffering = 0 ) as f: f.seek( start_byte ) data = f.read( min(buf_size, read_size - data_read) ) assert( len(data) > 0 ) total_sha1.update( data ) for hasher in extra_hashers: hasher.update( data ) data_read += len(data) start_byte += len(data) assert( data_read == read_size ) return total_sha1
def function[read_sha1, parameter[file_path, buf_size, start_byte, read_size, extra_hashers]]: constant[ Determines the sha1 hash of a file in chunks, to prevent loading the entire file at once into memory ] variable[read_size] assign[=] <ast.BoolOp object at 0x7da204567820> variable[buf_size] assign[=] <ast.BoolOp object at 0x7da2045647c0> variable[data_read] assign[=] constant[0] variable[total_sha1] assign[=] call[name[hashlib].sha1, parameter[]] while compare[name[data_read] less[<] name[read_size]] begin[:] with call[name[open], parameter[name[file_path], constant[rb]]] begin[:] call[name[f].seek, parameter[name[start_byte]]] variable[data] assign[=] call[name[f].read, parameter[call[name[min], parameter[name[buf_size], binary_operation[name[read_size] - name[data_read]]]]]] assert[compare[call[name[len], parameter[name[data]]] greater[>] constant[0]]] call[name[total_sha1].update, parameter[name[data]]] for taget[name[hasher]] in starred[name[extra_hashers]] begin[:] call[name[hasher].update, parameter[name[data]]] <ast.AugAssign object at 0x7da204620e20> <ast.AugAssign object at 0x7da2046226e0> assert[compare[name[data_read] equal[==] name[read_size]]] return[name[total_sha1]]
keyword[def] identifier[read_sha1] ( identifier[file_path] , identifier[buf_size] = keyword[None] , identifier[start_byte] = literal[int] , identifier[read_size] = keyword[None] , identifier[extra_hashers] =[], ): literal[string] identifier[read_size] = identifier[read_size] keyword[or] identifier[os] . identifier[stat] ( identifier[file_path] ). identifier[st_size] identifier[buf_size] = identifier[buf_size] keyword[or] identifier[DEFAULT_BUFFER_SIZE] identifier[data_read] = literal[int] identifier[total_sha1] = identifier[hashlib] . identifier[sha1] () keyword[while] identifier[data_read] < identifier[read_size] : keyword[with] identifier[open] ( identifier[file_path] , literal[string] , identifier[buffering] = literal[int] ) keyword[as] identifier[f] : identifier[f] . identifier[seek] ( identifier[start_byte] ) identifier[data] = identifier[f] . identifier[read] ( identifier[min] ( identifier[buf_size] , identifier[read_size] - identifier[data_read] )) keyword[assert] ( identifier[len] ( identifier[data] )> literal[int] ) identifier[total_sha1] . identifier[update] ( identifier[data] ) keyword[for] identifier[hasher] keyword[in] identifier[extra_hashers] : identifier[hasher] . identifier[update] ( identifier[data] ) identifier[data_read] += identifier[len] ( identifier[data] ) identifier[start_byte] += identifier[len] ( identifier[data] ) keyword[assert] ( identifier[data_read] == identifier[read_size] ) keyword[return] identifier[total_sha1]
def read_sha1(file_path, buf_size=None, start_byte=0, read_size=None, extra_hashers=[]): # update(data) will be called on all of these '\n Determines the sha1 hash of a file in chunks, to prevent loading the entire file at once into memory\n ' read_size = read_size or os.stat(file_path).st_size buf_size = buf_size or DEFAULT_BUFFER_SIZE data_read = 0 total_sha1 = hashlib.sha1() while data_read < read_size: with open(file_path, 'rb', buffering=0) as f: f.seek(start_byte) data = f.read(min(buf_size, read_size - data_read)) assert len(data) > 0 total_sha1.update(data) for hasher in extra_hashers: hasher.update(data) # depends on [control=['for'], data=['hasher']] data_read += len(data) start_byte += len(data) # depends on [control=['with'], data=['f']] # depends on [control=['while'], data=['data_read', 'read_size']] assert data_read == read_size return total_sha1
def addContinuousSet(self, continuousSet): """ Adds the specified continuousSet to this dataset. """ id_ = continuousSet.getId() self._continuousSetIdMap[id_] = continuousSet self._continuousSetIds.append(id_) name = continuousSet.getLocalId() self._continuousSetNameMap[name] = continuousSet
def function[addContinuousSet, parameter[self, continuousSet]]: constant[ Adds the specified continuousSet to this dataset. ] variable[id_] assign[=] call[name[continuousSet].getId, parameter[]] call[name[self]._continuousSetIdMap][name[id_]] assign[=] name[continuousSet] call[name[self]._continuousSetIds.append, parameter[name[id_]]] variable[name] assign[=] call[name[continuousSet].getLocalId, parameter[]] call[name[self]._continuousSetNameMap][name[name]] assign[=] name[continuousSet]
keyword[def] identifier[addContinuousSet] ( identifier[self] , identifier[continuousSet] ): literal[string] identifier[id_] = identifier[continuousSet] . identifier[getId] () identifier[self] . identifier[_continuousSetIdMap] [ identifier[id_] ]= identifier[continuousSet] identifier[self] . identifier[_continuousSetIds] . identifier[append] ( identifier[id_] ) identifier[name] = identifier[continuousSet] . identifier[getLocalId] () identifier[self] . identifier[_continuousSetNameMap] [ identifier[name] ]= identifier[continuousSet]
def addContinuousSet(self, continuousSet): """ Adds the specified continuousSet to this dataset. """ id_ = continuousSet.getId() self._continuousSetIdMap[id_] = continuousSet self._continuousSetIds.append(id_) name = continuousSet.getLocalId() self._continuousSetNameMap[name] = continuousSet
def update_payload_in_draft_for_edit_extension(self, upload_stream, publisher_name, extension_name, draft_id, file_name=None, **kwargs): """UpdatePayloadInDraftForEditExtension. [Preview API] :param object upload_stream: Stream to upload :param str publisher_name: :param str extension_name: :param str draft_id: :param String file_name: Header to pass the filename of the uploaded data :rtype: :class:`<ExtensionDraft> <azure.devops.v5_0.gallery.models.ExtensionDraft>` """ route_values = {} if publisher_name is not None: route_values['publisherName'] = self._serialize.url('publisher_name', publisher_name, 'str') if extension_name is not None: route_values['extensionName'] = self._serialize.url('extension_name', extension_name, 'str') if draft_id is not None: route_values['draftId'] = self._serialize.url('draft_id', draft_id, 'str') if "callback" in kwargs: callback = kwargs["callback"] else: callback = None content = self._client.stream_upload(upload_stream, callback=callback) response = self._send(http_method='PUT', location_id='02b33873-4e61-496e-83a2-59d1df46b7d8', version='5.0-preview.1', route_values=route_values, content=content, media_type='application/octet-stream') return self._deserialize('ExtensionDraft', response)
def function[update_payload_in_draft_for_edit_extension, parameter[self, upload_stream, publisher_name, extension_name, draft_id, file_name]]: constant[UpdatePayloadInDraftForEditExtension. [Preview API] :param object upload_stream: Stream to upload :param str publisher_name: :param str extension_name: :param str draft_id: :param String file_name: Header to pass the filename of the uploaded data :rtype: :class:`<ExtensionDraft> <azure.devops.v5_0.gallery.models.ExtensionDraft>` ] variable[route_values] assign[=] dictionary[[], []] if compare[name[publisher_name] is_not constant[None]] begin[:] call[name[route_values]][constant[publisherName]] assign[=] call[name[self]._serialize.url, parameter[constant[publisher_name], name[publisher_name], constant[str]]] if compare[name[extension_name] is_not constant[None]] begin[:] call[name[route_values]][constant[extensionName]] assign[=] call[name[self]._serialize.url, parameter[constant[extension_name], name[extension_name], constant[str]]] if compare[name[draft_id] is_not constant[None]] begin[:] call[name[route_values]][constant[draftId]] assign[=] call[name[self]._serialize.url, parameter[constant[draft_id], name[draft_id], constant[str]]] if compare[constant[callback] in name[kwargs]] begin[:] variable[callback] assign[=] call[name[kwargs]][constant[callback]] variable[content] assign[=] call[name[self]._client.stream_upload, parameter[name[upload_stream]]] variable[response] assign[=] call[name[self]._send, parameter[]] return[call[name[self]._deserialize, parameter[constant[ExtensionDraft], name[response]]]]
keyword[def] identifier[update_payload_in_draft_for_edit_extension] ( identifier[self] , identifier[upload_stream] , identifier[publisher_name] , identifier[extension_name] , identifier[draft_id] , identifier[file_name] = keyword[None] ,** identifier[kwargs] ): literal[string] identifier[route_values] ={} keyword[if] identifier[publisher_name] keyword[is] keyword[not] keyword[None] : identifier[route_values] [ literal[string] ]= identifier[self] . identifier[_serialize] . identifier[url] ( literal[string] , identifier[publisher_name] , literal[string] ) keyword[if] identifier[extension_name] keyword[is] keyword[not] keyword[None] : identifier[route_values] [ literal[string] ]= identifier[self] . identifier[_serialize] . identifier[url] ( literal[string] , identifier[extension_name] , literal[string] ) keyword[if] identifier[draft_id] keyword[is] keyword[not] keyword[None] : identifier[route_values] [ literal[string] ]= identifier[self] . identifier[_serialize] . identifier[url] ( literal[string] , identifier[draft_id] , literal[string] ) keyword[if] literal[string] keyword[in] identifier[kwargs] : identifier[callback] = identifier[kwargs] [ literal[string] ] keyword[else] : identifier[callback] = keyword[None] identifier[content] = identifier[self] . identifier[_client] . identifier[stream_upload] ( identifier[upload_stream] , identifier[callback] = identifier[callback] ) identifier[response] = identifier[self] . identifier[_send] ( identifier[http_method] = literal[string] , identifier[location_id] = literal[string] , identifier[version] = literal[string] , identifier[route_values] = identifier[route_values] , identifier[content] = identifier[content] , identifier[media_type] = literal[string] ) keyword[return] identifier[self] . identifier[_deserialize] ( literal[string] , identifier[response] )
def update_payload_in_draft_for_edit_extension(self, upload_stream, publisher_name, extension_name, draft_id, file_name=None, **kwargs): """UpdatePayloadInDraftForEditExtension. [Preview API] :param object upload_stream: Stream to upload :param str publisher_name: :param str extension_name: :param str draft_id: :param String file_name: Header to pass the filename of the uploaded data :rtype: :class:`<ExtensionDraft> <azure.devops.v5_0.gallery.models.ExtensionDraft>` """ route_values = {} if publisher_name is not None: route_values['publisherName'] = self._serialize.url('publisher_name', publisher_name, 'str') # depends on [control=['if'], data=['publisher_name']] if extension_name is not None: route_values['extensionName'] = self._serialize.url('extension_name', extension_name, 'str') # depends on [control=['if'], data=['extension_name']] if draft_id is not None: route_values['draftId'] = self._serialize.url('draft_id', draft_id, 'str') # depends on [control=['if'], data=['draft_id']] if 'callback' in kwargs: callback = kwargs['callback'] # depends on [control=['if'], data=['kwargs']] else: callback = None content = self._client.stream_upload(upload_stream, callback=callback) response = self._send(http_method='PUT', location_id='02b33873-4e61-496e-83a2-59d1df46b7d8', version='5.0-preview.1', route_values=route_values, content=content, media_type='application/octet-stream') return self._deserialize('ExtensionDraft', response)
def user_config_file(self): """Get the absolute path to the user config file.""" return os.path.join( get_user_config_dir(self.app_name, self.app_author), self.filename)
def function[user_config_file, parameter[self]]: constant[Get the absolute path to the user config file.] return[call[name[os].path.join, parameter[call[name[get_user_config_dir], parameter[name[self].app_name, name[self].app_author]], name[self].filename]]]
keyword[def] identifier[user_config_file] ( identifier[self] ): literal[string] keyword[return] identifier[os] . identifier[path] . identifier[join] ( identifier[get_user_config_dir] ( identifier[self] . identifier[app_name] , identifier[self] . identifier[app_author] ), identifier[self] . identifier[filename] )
def user_config_file(self): """Get the absolute path to the user config file.""" return os.path.join(get_user_config_dir(self.app_name, self.app_author), self.filename)
def process_item(self, item, spider): """ Use the Pa11y command line tool to get an a11y report. """ config_file = write_pa11y_config(item) args = [ self.pa11y_path, item["url"], '--config={file}'.format(file=config_file.name), ] for flag, value in self.cli_flags.items(): args.append("--{flag}={value}".format(flag=flag, value=value)) retries_remaining = 3 while retries_remaining: logline = " ".join(args) if retries_remaining != 3: logline += u" # (retry {num})".format(num=3-retries_remaining) spider.logger.info(logline) proc = sp.Popen( args, shell=False, stdout=sp.PIPE, stderr=sp.PIPE, ) stdout, stderr = proc.communicate() if proc.returncode in (0, 2): # `pa11y` ran successfully! # Return code 0 means no a11y errors. # Return code 2 means `pa11y` identified a11y errors. # Either way, we're done, so break out of the `while` loop break else: # `pa11y` did _not_ run successfully! # We sometimes get the error "Truffler timed out": # truffler is what accesses the web page for `pa11y1`. # https://www.npmjs.com/package/truffler # If this is the error, we can resolve it just by trying again, # so decrement the retries_remaining and start over. retries_remaining -= 1 if retries_remaining == 0: raise DropItem( u"Couldn't get pa11y results for {url}. Error:\n{err}".format( url=item['url'], err=stderr, ) ) pa11y_results = load_pa11y_results(stdout, spider, item['url']) check_title_match(item['page_title'], pa11y_results, spider.logger) track_pa11y_stats(pa11y_results, spider) os.remove(config_file.name) write_pa11y_results(item, pa11y_results, Path(spider.data_dir)) return item
def function[process_item, parameter[self, item, spider]]: constant[ Use the Pa11y command line tool to get an a11y report. ] variable[config_file] assign[=] call[name[write_pa11y_config], parameter[name[item]]] variable[args] assign[=] list[[<ast.Attribute object at 0x7da18ede4700>, <ast.Subscript object at 0x7da18ede72e0>, <ast.Call object at 0x7da18ede6d70>]] for taget[tuple[[<ast.Name object at 0x7da18ede4730>, <ast.Name object at 0x7da18ede7430>]]] in starred[call[name[self].cli_flags.items, parameter[]]] begin[:] call[name[args].append, parameter[call[constant[--{flag}={value}].format, parameter[]]]] variable[retries_remaining] assign[=] constant[3] while name[retries_remaining] begin[:] variable[logline] assign[=] call[constant[ ].join, parameter[name[args]]] if compare[name[retries_remaining] not_equal[!=] constant[3]] begin[:] <ast.AugAssign object at 0x7da1b2345d20> call[name[spider].logger.info, parameter[name[logline]]] variable[proc] assign[=] call[name[sp].Popen, parameter[name[args]]] <ast.Tuple object at 0x7da1b23452a0> assign[=] call[name[proc].communicate, parameter[]] if compare[name[proc].returncode in tuple[[<ast.Constant object at 0x7da18bc72f20>, <ast.Constant object at 0x7da18bc71600>]]] begin[:] break if compare[name[retries_remaining] equal[==] constant[0]] begin[:] <ast.Raise object at 0x7da18bc72d40> variable[pa11y_results] assign[=] call[name[load_pa11y_results], parameter[name[stdout], name[spider], call[name[item]][constant[url]]]] call[name[check_title_match], parameter[call[name[item]][constant[page_title]], name[pa11y_results], name[spider].logger]] call[name[track_pa11y_stats], parameter[name[pa11y_results], name[spider]]] call[name[os].remove, parameter[name[config_file].name]] call[name[write_pa11y_results], parameter[name[item], name[pa11y_results], call[name[Path], parameter[name[spider].data_dir]]]] return[name[item]]
keyword[def] identifier[process_item] ( identifier[self] , identifier[item] , identifier[spider] ): literal[string] identifier[config_file] = identifier[write_pa11y_config] ( identifier[item] ) identifier[args] =[ identifier[self] . identifier[pa11y_path] , identifier[item] [ literal[string] ], literal[string] . identifier[format] ( identifier[file] = identifier[config_file] . identifier[name] ), ] keyword[for] identifier[flag] , identifier[value] keyword[in] identifier[self] . identifier[cli_flags] . identifier[items] (): identifier[args] . identifier[append] ( literal[string] . identifier[format] ( identifier[flag] = identifier[flag] , identifier[value] = identifier[value] )) identifier[retries_remaining] = literal[int] keyword[while] identifier[retries_remaining] : identifier[logline] = literal[string] . identifier[join] ( identifier[args] ) keyword[if] identifier[retries_remaining] != literal[int] : identifier[logline] += literal[string] . identifier[format] ( identifier[num] = literal[int] - identifier[retries_remaining] ) identifier[spider] . identifier[logger] . identifier[info] ( identifier[logline] ) identifier[proc] = identifier[sp] . identifier[Popen] ( identifier[args] , identifier[shell] = keyword[False] , identifier[stdout] = identifier[sp] . identifier[PIPE] , identifier[stderr] = identifier[sp] . identifier[PIPE] , ) identifier[stdout] , identifier[stderr] = identifier[proc] . identifier[communicate] () keyword[if] identifier[proc] . identifier[returncode] keyword[in] ( literal[int] , literal[int] ): keyword[break] keyword[else] : identifier[retries_remaining] -= literal[int] keyword[if] identifier[retries_remaining] == literal[int] : keyword[raise] identifier[DropItem] ( literal[string] . identifier[format] ( identifier[url] = identifier[item] [ literal[string] ], identifier[err] = identifier[stderr] , ) ) identifier[pa11y_results] = identifier[load_pa11y_results] ( identifier[stdout] , identifier[spider] , identifier[item] [ literal[string] ]) identifier[check_title_match] ( identifier[item] [ literal[string] ], identifier[pa11y_results] , identifier[spider] . identifier[logger] ) identifier[track_pa11y_stats] ( identifier[pa11y_results] , identifier[spider] ) identifier[os] . identifier[remove] ( identifier[config_file] . identifier[name] ) identifier[write_pa11y_results] ( identifier[item] , identifier[pa11y_results] , identifier[Path] ( identifier[spider] . identifier[data_dir] )) keyword[return] identifier[item]
def process_item(self, item, spider): """ Use the Pa11y command line tool to get an a11y report. """ config_file = write_pa11y_config(item) args = [self.pa11y_path, item['url'], '--config={file}'.format(file=config_file.name)] for (flag, value) in self.cli_flags.items(): args.append('--{flag}={value}'.format(flag=flag, value=value)) # depends on [control=['for'], data=[]] retries_remaining = 3 while retries_remaining: logline = ' '.join(args) if retries_remaining != 3: logline += u' # (retry {num})'.format(num=3 - retries_remaining) # depends on [control=['if'], data=['retries_remaining']] spider.logger.info(logline) proc = sp.Popen(args, shell=False, stdout=sp.PIPE, stderr=sp.PIPE) (stdout, stderr) = proc.communicate() if proc.returncode in (0, 2): # `pa11y` ran successfully! # Return code 0 means no a11y errors. # Return code 2 means `pa11y` identified a11y errors. # Either way, we're done, so break out of the `while` loop break # depends on [control=['if'], data=[]] else: # `pa11y` did _not_ run successfully! # We sometimes get the error "Truffler timed out": # truffler is what accesses the web page for `pa11y1`. # https://www.npmjs.com/package/truffler # If this is the error, we can resolve it just by trying again, # so decrement the retries_remaining and start over. retries_remaining -= 1 # depends on [control=['while'], data=[]] if retries_remaining == 0: raise DropItem(u"Couldn't get pa11y results for {url}. Error:\n{err}".format(url=item['url'], err=stderr)) # depends on [control=['if'], data=[]] pa11y_results = load_pa11y_results(stdout, spider, item['url']) check_title_match(item['page_title'], pa11y_results, spider.logger) track_pa11y_stats(pa11y_results, spider) os.remove(config_file.name) write_pa11y_results(item, pa11y_results, Path(spider.data_dir)) return item
def azm(self): """Corrected azimuth, taking into account backsight, declination, and compass corrections.""" azm1 = self.get('BEARING', None) azm2 = self.get('AZM2', None) if azm1 is None and azm2 is None: return None if azm2 is None: return azm1 + self.declination if azm1 is None: return (azm2 + 180) % 360 + self.declination return (azm1 + (azm2 + 180) % 360) / 2.0 + self.declination
def function[azm, parameter[self]]: constant[Corrected azimuth, taking into account backsight, declination, and compass corrections.] variable[azm1] assign[=] call[name[self].get, parameter[constant[BEARING], constant[None]]] variable[azm2] assign[=] call[name[self].get, parameter[constant[AZM2], constant[None]]] if <ast.BoolOp object at 0x7da1b0946530> begin[:] return[constant[None]] if compare[name[azm2] is constant[None]] begin[:] return[binary_operation[name[azm1] + name[self].declination]] if compare[name[azm1] is constant[None]] begin[:] return[binary_operation[binary_operation[binary_operation[name[azm2] + constant[180]] <ast.Mod object at 0x7da2590d6920> constant[360]] + name[self].declination]] return[binary_operation[binary_operation[binary_operation[name[azm1] + binary_operation[binary_operation[name[azm2] + constant[180]] <ast.Mod object at 0x7da2590d6920> constant[360]]] / constant[2.0]] + name[self].declination]]
keyword[def] identifier[azm] ( identifier[self] ): literal[string] identifier[azm1] = identifier[self] . identifier[get] ( literal[string] , keyword[None] ) identifier[azm2] = identifier[self] . identifier[get] ( literal[string] , keyword[None] ) keyword[if] identifier[azm1] keyword[is] keyword[None] keyword[and] identifier[azm2] keyword[is] keyword[None] : keyword[return] keyword[None] keyword[if] identifier[azm2] keyword[is] keyword[None] : keyword[return] identifier[azm1] + identifier[self] . identifier[declination] keyword[if] identifier[azm1] keyword[is] keyword[None] : keyword[return] ( identifier[azm2] + literal[int] )% literal[int] + identifier[self] . identifier[declination] keyword[return] ( identifier[azm1] +( identifier[azm2] + literal[int] )% literal[int] )/ literal[int] + identifier[self] . identifier[declination]
def azm(self): """Corrected azimuth, taking into account backsight, declination, and compass corrections.""" azm1 = self.get('BEARING', None) azm2 = self.get('AZM2', None) if azm1 is None and azm2 is None: return None # depends on [control=['if'], data=[]] if azm2 is None: return azm1 + self.declination # depends on [control=['if'], data=[]] if azm1 is None: return (azm2 + 180) % 360 + self.declination # depends on [control=['if'], data=[]] return (azm1 + (azm2 + 180) % 360) / 2.0 + self.declination
def pm(client, event, channel, nick, rest): 'Arggh matey' if rest: rest = rest.strip() Karma.store.change(rest, 2) rcpt = rest else: rcpt = channel if random.random() > 0.95: return f"Arrggh ye be doin' great, grand work, {rcpt}!" return f"Arrggh ye be doin' good work, {rcpt}!"
def function[pm, parameter[client, event, channel, nick, rest]]: constant[Arggh matey] if name[rest] begin[:] variable[rest] assign[=] call[name[rest].strip, parameter[]] call[name[Karma].store.change, parameter[name[rest], constant[2]]] variable[rcpt] assign[=] name[rest] if compare[call[name[random].random, parameter[]] greater[>] constant[0.95]] begin[:] return[<ast.JoinedStr object at 0x7da20c6e51b0>] return[<ast.JoinedStr object at 0x7da20c6e5e10>]
keyword[def] identifier[pm] ( identifier[client] , identifier[event] , identifier[channel] , identifier[nick] , identifier[rest] ): literal[string] keyword[if] identifier[rest] : identifier[rest] = identifier[rest] . identifier[strip] () identifier[Karma] . identifier[store] . identifier[change] ( identifier[rest] , literal[int] ) identifier[rcpt] = identifier[rest] keyword[else] : identifier[rcpt] = identifier[channel] keyword[if] identifier[random] . identifier[random] ()> literal[int] : keyword[return] literal[string] keyword[return] literal[string]
def pm(client, event, channel, nick, rest): """Arggh matey""" if rest: rest = rest.strip() Karma.store.change(rest, 2) rcpt = rest # depends on [control=['if'], data=[]] else: rcpt = channel if random.random() > 0.95: return f"Arrggh ye be doin' great, grand work, {rcpt}!" # depends on [control=['if'], data=[]] return f"Arrggh ye be doin' good work, {rcpt}!"
def _apply_callables(self, acl, obj=None): """ Iterate over ACEs from :acl: and apply callable principals if any. Principals are passed 3 arguments on call: :ace: Single ACE object that looks like (action, callable, permission or [permission]) :request: Current request object :obj: Object instance to be accessed via the ACL Principals must return a single ACE or a list of ACEs. :param acl: Sequence of valid Pyramid ACEs which will be processed :param obj: Object to be accessed via the ACL """ new_acl = [] for i, ace in enumerate(acl): principal = ace[1] if six.callable(principal): ace = principal(ace=ace, request=self.request, obj=obj) if not ace: continue if not isinstance(ace[0], (list, tuple)): ace = [ace] ace = [(a, b, validate_permissions(c)) for a, b, c in ace] else: ace = [ace] new_acl += ace return tuple(new_acl)
def function[_apply_callables, parameter[self, acl, obj]]: constant[ Iterate over ACEs from :acl: and apply callable principals if any. Principals are passed 3 arguments on call: :ace: Single ACE object that looks like (action, callable, permission or [permission]) :request: Current request object :obj: Object instance to be accessed via the ACL Principals must return a single ACE or a list of ACEs. :param acl: Sequence of valid Pyramid ACEs which will be processed :param obj: Object to be accessed via the ACL ] variable[new_acl] assign[=] list[[]] for taget[tuple[[<ast.Name object at 0x7da18f720c70>, <ast.Name object at 0x7da18f722b30>]]] in starred[call[name[enumerate], parameter[name[acl]]]] begin[:] variable[principal] assign[=] call[name[ace]][constant[1]] if call[name[six].callable, parameter[name[principal]]] begin[:] variable[ace] assign[=] call[name[principal], parameter[]] if <ast.UnaryOp object at 0x7da18f720820> begin[:] continue if <ast.UnaryOp object at 0x7da18f7223b0> begin[:] variable[ace] assign[=] list[[<ast.Name object at 0x7da18f723490>]] variable[ace] assign[=] <ast.ListComp object at 0x7da18f722ec0> <ast.AugAssign object at 0x7da18f723820> return[call[name[tuple], parameter[name[new_acl]]]]
keyword[def] identifier[_apply_callables] ( identifier[self] , identifier[acl] , identifier[obj] = keyword[None] ): literal[string] identifier[new_acl] =[] keyword[for] identifier[i] , identifier[ace] keyword[in] identifier[enumerate] ( identifier[acl] ): identifier[principal] = identifier[ace] [ literal[int] ] keyword[if] identifier[six] . identifier[callable] ( identifier[principal] ): identifier[ace] = identifier[principal] ( identifier[ace] = identifier[ace] , identifier[request] = identifier[self] . identifier[request] , identifier[obj] = identifier[obj] ) keyword[if] keyword[not] identifier[ace] : keyword[continue] keyword[if] keyword[not] identifier[isinstance] ( identifier[ace] [ literal[int] ],( identifier[list] , identifier[tuple] )): identifier[ace] =[ identifier[ace] ] identifier[ace] =[( identifier[a] , identifier[b] , identifier[validate_permissions] ( identifier[c] )) keyword[for] identifier[a] , identifier[b] , identifier[c] keyword[in] identifier[ace] ] keyword[else] : identifier[ace] =[ identifier[ace] ] identifier[new_acl] += identifier[ace] keyword[return] identifier[tuple] ( identifier[new_acl] )
def _apply_callables(self, acl, obj=None): """ Iterate over ACEs from :acl: and apply callable principals if any. Principals are passed 3 arguments on call: :ace: Single ACE object that looks like (action, callable, permission or [permission]) :request: Current request object :obj: Object instance to be accessed via the ACL Principals must return a single ACE or a list of ACEs. :param acl: Sequence of valid Pyramid ACEs which will be processed :param obj: Object to be accessed via the ACL """ new_acl = [] for (i, ace) in enumerate(acl): principal = ace[1] if six.callable(principal): ace = principal(ace=ace, request=self.request, obj=obj) if not ace: continue # depends on [control=['if'], data=[]] if not isinstance(ace[0], (list, tuple)): ace = [ace] # depends on [control=['if'], data=[]] ace = [(a, b, validate_permissions(c)) for (a, b, c) in ace] # depends on [control=['if'], data=[]] else: ace = [ace] new_acl += ace # depends on [control=['for'], data=[]] return tuple(new_acl)
def getVentRequiredEnabled(self): """Returns True if enabled, False if disabled""" command = '$GE' settings = self.sendCommand(command) flags = int(settings[2], 16) return not (flags & 0x0004)
def function[getVentRequiredEnabled, parameter[self]]: constant[Returns True if enabled, False if disabled] variable[command] assign[=] constant[$GE] variable[settings] assign[=] call[name[self].sendCommand, parameter[name[command]]] variable[flags] assign[=] call[name[int], parameter[call[name[settings]][constant[2]], constant[16]]] return[<ast.UnaryOp object at 0x7da1b0ab8640>]
keyword[def] identifier[getVentRequiredEnabled] ( identifier[self] ): literal[string] identifier[command] = literal[string] identifier[settings] = identifier[self] . identifier[sendCommand] ( identifier[command] ) identifier[flags] = identifier[int] ( identifier[settings] [ literal[int] ], literal[int] ) keyword[return] keyword[not] ( identifier[flags] & literal[int] )
def getVentRequiredEnabled(self): """Returns True if enabled, False if disabled""" command = '$GE' settings = self.sendCommand(command) flags = int(settings[2], 16) return not flags & 4
def _check_yaml_and_paths(ymlfilepath, yamldefs): """ Checks YAML for errors and resolves all paths """ relpath = os.path.relpath(ymlfilepath) if '/' not in relpath: relpath = './%s' % relpath pathroot = os.path.abspath(os.path.dirname(ymlfilepath)) for imagename, defn in iteritems(yamldefs): if imagename == '_SOURCES_': yamldefs['_SOURCES_'] = [os.path.relpath(_get_abspath(pathroot, p)) for p in yamldefs['_SOURCES_']] continue elif imagename in SPECIAL_FIELDS: continue for key in ('build_directory', 'FROM_DOCKERFILE', 'ignorefile'): if key in defn: defn[key] = _get_abspath(pathroot, defn[key]) if 'copy_from' in defn: if not isinstance(defn['copy_from'], dict): raise errors.ParsingFailure(( 'Syntax error in file "%s": \n' + 'The "copy_from" field in image definition "%s" is not \n' 'a key:value list.') % (ymlfilepath, imagename)) for otherimg, value in defn.get('copy_from', {}).items(): if not isinstance(value, dict): raise errors.ParsingFailure(( 'Syntax error in field:\n' ' %s . copy_from . %s\nin file "%s". \n' 'All entries must be of the form "sourcepath: destpath"')% (imagename, otherimg, ymlfilepath)) # save the file path for logging defn['_sourcefile'] = relpath if 'ignore' in defn and 'ignorefile' in defn: raise errors.MultipleIgnoreError( 'Image "%s" has both "ignore" AND "ignorefile" fields.' % imagename + ' At most ONE of these should be defined') if 'secret_files' in defn and not defn.get('squash', True): raise errors.ParsingFailure( "Step '%s' defines secret_files, so 'squash' cannot be set to 'false'" % imagename) if defn.get('secret_files', None) and defn.get('copy_from', False): raise errors.ParsingFailure( '`secret_files` currently is not implmemented to handle `copy_from`' ' (step %s)' % imagename) for key in defn: if key not in RECOGNIZED_KEYS: raise errors.UnrecognizedKeyError( 'Field "%s" in image "%s" in file "%s" not recognized' % (key, imagename, relpath))
def function[_check_yaml_and_paths, parameter[ymlfilepath, yamldefs]]: constant[ Checks YAML for errors and resolves all paths ] variable[relpath] assign[=] call[name[os].path.relpath, parameter[name[ymlfilepath]]] if compare[constant[/] <ast.NotIn object at 0x7da2590d7190> name[relpath]] begin[:] variable[relpath] assign[=] binary_operation[constant[./%s] <ast.Mod object at 0x7da2590d6920> name[relpath]] variable[pathroot] assign[=] call[name[os].path.abspath, parameter[call[name[os].path.dirname, parameter[name[ymlfilepath]]]]] for taget[tuple[[<ast.Name object at 0x7da20c992290>, <ast.Name object at 0x7da20c990b80>]]] in starred[call[name[iteritems], parameter[name[yamldefs]]]] begin[:] if compare[name[imagename] equal[==] constant[_SOURCES_]] begin[:] call[name[yamldefs]][constant[_SOURCES_]] assign[=] <ast.ListComp object at 0x7da20c9907f0> continue for taget[name[key]] in starred[tuple[[<ast.Constant object at 0x7da20c9909d0>, <ast.Constant object at 0x7da20c992e60>, <ast.Constant object at 0x7da20c993640>]]] begin[:] if compare[name[key] in name[defn]] begin[:] call[name[defn]][name[key]] assign[=] call[name[_get_abspath], parameter[name[pathroot], call[name[defn]][name[key]]]] if compare[constant[copy_from] in name[defn]] begin[:] if <ast.UnaryOp object at 0x7da20c990c40> begin[:] <ast.Raise object at 0x7da20c993610> for taget[tuple[[<ast.Name object at 0x7da20c993b20>, <ast.Name object at 0x7da20c992980>]]] in starred[call[call[name[defn].get, parameter[constant[copy_from], dictionary[[], []]]].items, parameter[]]] begin[:] if <ast.UnaryOp object at 0x7da20c993010> begin[:] <ast.Raise object at 0x7da20c9914e0> call[name[defn]][constant[_sourcefile]] assign[=] name[relpath] if <ast.BoolOp object at 0x7da1b26acd90> begin[:] <ast.Raise object at 0x7da1b26ac1f0> if <ast.BoolOp object at 0x7da1b26ad270> begin[:] <ast.Raise object at 0x7da1b26aeaa0> if <ast.BoolOp object at 0x7da1b26ac910> begin[:] <ast.Raise object at 0x7da1b26af400> for taget[name[key]] in starred[name[defn]] begin[:] if compare[name[key] <ast.NotIn object at 0x7da2590d7190> name[RECOGNIZED_KEYS]] begin[:] <ast.Raise object at 0x7da1b26acca0>
keyword[def] identifier[_check_yaml_and_paths] ( identifier[ymlfilepath] , identifier[yamldefs] ): literal[string] identifier[relpath] = identifier[os] . identifier[path] . identifier[relpath] ( identifier[ymlfilepath] ) keyword[if] literal[string] keyword[not] keyword[in] identifier[relpath] : identifier[relpath] = literal[string] % identifier[relpath] identifier[pathroot] = identifier[os] . identifier[path] . identifier[abspath] ( identifier[os] . identifier[path] . identifier[dirname] ( identifier[ymlfilepath] )) keyword[for] identifier[imagename] , identifier[defn] keyword[in] identifier[iteritems] ( identifier[yamldefs] ): keyword[if] identifier[imagename] == literal[string] : identifier[yamldefs] [ literal[string] ]=[ identifier[os] . identifier[path] . identifier[relpath] ( identifier[_get_abspath] ( identifier[pathroot] , identifier[p] )) keyword[for] identifier[p] keyword[in] identifier[yamldefs] [ literal[string] ]] keyword[continue] keyword[elif] identifier[imagename] keyword[in] identifier[SPECIAL_FIELDS] : keyword[continue] keyword[for] identifier[key] keyword[in] ( literal[string] , literal[string] , literal[string] ): keyword[if] identifier[key] keyword[in] identifier[defn] : identifier[defn] [ identifier[key] ]= identifier[_get_abspath] ( identifier[pathroot] , identifier[defn] [ identifier[key] ]) keyword[if] literal[string] keyword[in] identifier[defn] : keyword[if] keyword[not] identifier[isinstance] ( identifier[defn] [ literal[string] ], identifier[dict] ): keyword[raise] identifier[errors] . identifier[ParsingFailure] (( literal[string] + literal[string] literal[string] )%( identifier[ymlfilepath] , identifier[imagename] )) keyword[for] identifier[otherimg] , identifier[value] keyword[in] identifier[defn] . identifier[get] ( literal[string] ,{}). identifier[items] (): keyword[if] keyword[not] identifier[isinstance] ( identifier[value] , identifier[dict] ): keyword[raise] identifier[errors] . identifier[ParsingFailure] (( literal[string] literal[string] literal[string] )% ( identifier[imagename] , identifier[otherimg] , identifier[ymlfilepath] )) identifier[defn] [ literal[string] ]= identifier[relpath] keyword[if] literal[string] keyword[in] identifier[defn] keyword[and] literal[string] keyword[in] identifier[defn] : keyword[raise] identifier[errors] . identifier[MultipleIgnoreError] ( literal[string] % identifier[imagename] + literal[string] ) keyword[if] literal[string] keyword[in] identifier[defn] keyword[and] keyword[not] identifier[defn] . identifier[get] ( literal[string] , keyword[True] ): keyword[raise] identifier[errors] . identifier[ParsingFailure] ( literal[string] % identifier[imagename] ) keyword[if] identifier[defn] . identifier[get] ( literal[string] , keyword[None] ) keyword[and] identifier[defn] . identifier[get] ( literal[string] , keyword[False] ): keyword[raise] identifier[errors] . identifier[ParsingFailure] ( literal[string] literal[string] % identifier[imagename] ) keyword[for] identifier[key] keyword[in] identifier[defn] : keyword[if] identifier[key] keyword[not] keyword[in] identifier[RECOGNIZED_KEYS] : keyword[raise] identifier[errors] . identifier[UnrecognizedKeyError] ( literal[string] % ( identifier[key] , identifier[imagename] , identifier[relpath] ))
def _check_yaml_and_paths(ymlfilepath, yamldefs): """ Checks YAML for errors and resolves all paths """ relpath = os.path.relpath(ymlfilepath) if '/' not in relpath: relpath = './%s' % relpath # depends on [control=['if'], data=['relpath']] pathroot = os.path.abspath(os.path.dirname(ymlfilepath)) for (imagename, defn) in iteritems(yamldefs): if imagename == '_SOURCES_': yamldefs['_SOURCES_'] = [os.path.relpath(_get_abspath(pathroot, p)) for p in yamldefs['_SOURCES_']] continue # depends on [control=['if'], data=[]] elif imagename in SPECIAL_FIELDS: continue # depends on [control=['if'], data=[]] for key in ('build_directory', 'FROM_DOCKERFILE', 'ignorefile'): if key in defn: defn[key] = _get_abspath(pathroot, defn[key]) # depends on [control=['if'], data=['key', 'defn']] # depends on [control=['for'], data=['key']] if 'copy_from' in defn: if not isinstance(defn['copy_from'], dict): raise errors.ParsingFailure(('Syntax error in file "%s": \n' + 'The "copy_from" field in image definition "%s" is not \na key:value list.') % (ymlfilepath, imagename)) # depends on [control=['if'], data=[]] for (otherimg, value) in defn.get('copy_from', {}).items(): if not isinstance(value, dict): raise errors.ParsingFailure('Syntax error in field:\n %s . copy_from . %s\nin file "%s". \nAll entries must be of the form "sourcepath: destpath"' % (imagename, otherimg, ymlfilepath)) # depends on [control=['if'], data=[]] # depends on [control=['for'], data=[]] # depends on [control=['if'], data=['defn']] # save the file path for logging defn['_sourcefile'] = relpath if 'ignore' in defn and 'ignorefile' in defn: raise errors.MultipleIgnoreError('Image "%s" has both "ignore" AND "ignorefile" fields.' % imagename + ' At most ONE of these should be defined') # depends on [control=['if'], data=[]] if 'secret_files' in defn and (not defn.get('squash', True)): raise errors.ParsingFailure("Step '%s' defines secret_files, so 'squash' cannot be set to 'false'" % imagename) # depends on [control=['if'], data=[]] if defn.get('secret_files', None) and defn.get('copy_from', False): raise errors.ParsingFailure('`secret_files` currently is not implmemented to handle `copy_from` (step %s)' % imagename) # depends on [control=['if'], data=[]] for key in defn: if key not in RECOGNIZED_KEYS: raise errors.UnrecognizedKeyError('Field "%s" in image "%s" in file "%s" not recognized' % (key, imagename, relpath)) # depends on [control=['if'], data=['key']] # depends on [control=['for'], data=['key']] # depends on [control=['for'], data=[]]
def find(self, value): """ returns a dictionary of items based on the a lowercase search args: value: the value to search by """ value = str(value).lower() rtn_dict = RegistryDictionary() for key, item in self.items(): if value in key.lower(): rtn_dict[key] = item return rtn_dict
def function[find, parameter[self, value]]: constant[ returns a dictionary of items based on the a lowercase search args: value: the value to search by ] variable[value] assign[=] call[call[name[str], parameter[name[value]]].lower, parameter[]] variable[rtn_dict] assign[=] call[name[RegistryDictionary], parameter[]] for taget[tuple[[<ast.Name object at 0x7da1b15f4370>, <ast.Name object at 0x7da1b15f49d0>]]] in starred[call[name[self].items, parameter[]]] begin[:] if compare[name[value] in call[name[key].lower, parameter[]]] begin[:] call[name[rtn_dict]][name[key]] assign[=] name[item] return[name[rtn_dict]]
keyword[def] identifier[find] ( identifier[self] , identifier[value] ): literal[string] identifier[value] = identifier[str] ( identifier[value] ). identifier[lower] () identifier[rtn_dict] = identifier[RegistryDictionary] () keyword[for] identifier[key] , identifier[item] keyword[in] identifier[self] . identifier[items] (): keyword[if] identifier[value] keyword[in] identifier[key] . identifier[lower] (): identifier[rtn_dict] [ identifier[key] ]= identifier[item] keyword[return] identifier[rtn_dict]
def find(self, value): """ returns a dictionary of items based on the a lowercase search args: value: the value to search by """ value = str(value).lower() rtn_dict = RegistryDictionary() for (key, item) in self.items(): if value in key.lower(): rtn_dict[key] = item # depends on [control=['if'], data=[]] # depends on [control=['for'], data=[]] return rtn_dict
async def update_read_timestamp(self, read_timestamp=None): """Update the timestamp of the latest event which has been read. This method will avoid making an API request if it will have no effect. Args: read_timestamp (datetime.datetime): (optional) Timestamp to set. Defaults to the timestamp of the newest event. Raises: .NetworkError: If the timestamp cannot be updated. """ if read_timestamp is None: read_timestamp = (self.events[-1].timestamp if self.events else datetime.datetime.now(datetime.timezone.utc)) if read_timestamp > self.latest_read_timestamp: logger.info( 'Setting {} latest_read_timestamp from {} to {}' .format(self.id_, self.latest_read_timestamp, read_timestamp) ) # Prevent duplicate requests by updating the conversation now. state = self._conversation.self_conversation_state state.self_read_state.latest_read_timestamp = ( parsers.to_timestamp(read_timestamp) ) try: await self._client.update_watermark( hangouts_pb2.UpdateWatermarkRequest( request_header=self._client.get_request_header(), conversation_id=hangouts_pb2.ConversationId( id=self.id_ ), last_read_timestamp=parsers.to_timestamp( read_timestamp ), ) ) except exceptions.NetworkError as e: logger.warning('Failed to update read timestamp: {}'.format(e)) raise
<ast.AsyncFunctionDef object at 0x7da20e74bca0>
keyword[async] keyword[def] identifier[update_read_timestamp] ( identifier[self] , identifier[read_timestamp] = keyword[None] ): literal[string] keyword[if] identifier[read_timestamp] keyword[is] keyword[None] : identifier[read_timestamp] =( identifier[self] . identifier[events] [- literal[int] ]. identifier[timestamp] keyword[if] identifier[self] . identifier[events] keyword[else] identifier[datetime] . identifier[datetime] . identifier[now] ( identifier[datetime] . identifier[timezone] . identifier[utc] )) keyword[if] identifier[read_timestamp] > identifier[self] . identifier[latest_read_timestamp] : identifier[logger] . identifier[info] ( literal[string] . identifier[format] ( identifier[self] . identifier[id_] , identifier[self] . identifier[latest_read_timestamp] , identifier[read_timestamp] ) ) identifier[state] = identifier[self] . identifier[_conversation] . identifier[self_conversation_state] identifier[state] . identifier[self_read_state] . identifier[latest_read_timestamp] =( identifier[parsers] . identifier[to_timestamp] ( identifier[read_timestamp] ) ) keyword[try] : keyword[await] identifier[self] . identifier[_client] . identifier[update_watermark] ( identifier[hangouts_pb2] . identifier[UpdateWatermarkRequest] ( identifier[request_header] = identifier[self] . identifier[_client] . identifier[get_request_header] (), identifier[conversation_id] = identifier[hangouts_pb2] . identifier[ConversationId] ( identifier[id] = identifier[self] . identifier[id_] ), identifier[last_read_timestamp] = identifier[parsers] . identifier[to_timestamp] ( identifier[read_timestamp] ), ) ) keyword[except] identifier[exceptions] . identifier[NetworkError] keyword[as] identifier[e] : identifier[logger] . identifier[warning] ( literal[string] . identifier[format] ( identifier[e] )) keyword[raise]
async def update_read_timestamp(self, read_timestamp=None): """Update the timestamp of the latest event which has been read. This method will avoid making an API request if it will have no effect. Args: read_timestamp (datetime.datetime): (optional) Timestamp to set. Defaults to the timestamp of the newest event. Raises: .NetworkError: If the timestamp cannot be updated. """ if read_timestamp is None: read_timestamp = self.events[-1].timestamp if self.events else datetime.datetime.now(datetime.timezone.utc) # depends on [control=['if'], data=['read_timestamp']] if read_timestamp > self.latest_read_timestamp: logger.info('Setting {} latest_read_timestamp from {} to {}'.format(self.id_, self.latest_read_timestamp, read_timestamp)) # Prevent duplicate requests by updating the conversation now. state = self._conversation.self_conversation_state state.self_read_state.latest_read_timestamp = parsers.to_timestamp(read_timestamp) try: await self._client.update_watermark(hangouts_pb2.UpdateWatermarkRequest(request_header=self._client.get_request_header(), conversation_id=hangouts_pb2.ConversationId(id=self.id_), last_read_timestamp=parsers.to_timestamp(read_timestamp))) # depends on [control=['try'], data=[]] except exceptions.NetworkError as e: logger.warning('Failed to update read timestamp: {}'.format(e)) raise # depends on [control=['except'], data=['e']] # depends on [control=['if'], data=['read_timestamp']]
def wait_for_server(host, port, cancel_event=None): """ Blocks until a local server is listening on the specified host and port. Set cancel_event to cancel the wait. This is intended to be used in conjunction with running the Flask server. """ while not is_server_running(host, port): # Stop waiting if shutting down if cancel_event and cancel_event.is_set(): return False time.sleep(0.1) return True
def function[wait_for_server, parameter[host, port, cancel_event]]: constant[ Blocks until a local server is listening on the specified host and port. Set cancel_event to cancel the wait. This is intended to be used in conjunction with running the Flask server. ] while <ast.UnaryOp object at 0x7da1b1d368c0> begin[:] if <ast.BoolOp object at 0x7da1b1d342e0> begin[:] return[constant[False]] call[name[time].sleep, parameter[constant[0.1]]] return[constant[True]]
keyword[def] identifier[wait_for_server] ( identifier[host] , identifier[port] , identifier[cancel_event] = keyword[None] ): literal[string] keyword[while] keyword[not] identifier[is_server_running] ( identifier[host] , identifier[port] ): keyword[if] identifier[cancel_event] keyword[and] identifier[cancel_event] . identifier[is_set] (): keyword[return] keyword[False] identifier[time] . identifier[sleep] ( literal[int] ) keyword[return] keyword[True]
def wait_for_server(host, port, cancel_event=None): """ Blocks until a local server is listening on the specified host and port. Set cancel_event to cancel the wait. This is intended to be used in conjunction with running the Flask server. """ while not is_server_running(host, port): # Stop waiting if shutting down if cancel_event and cancel_event.is_set(): return False # depends on [control=['if'], data=[]] time.sleep(0.1) # depends on [control=['while'], data=[]] return True
def stats_observer(population, num_generations, num_evaluations, args): """Print the statistics of the evolutionary computation to the screen. This function displays the statistics of the evolutionary computation to the screen. The output includes the generation number, the current number of evaluations, the maximum fitness, the minimum fitness, the average fitness, and the standard deviation. .. note:: This function makes use of the ``inspyred.ec.analysis.fitness_statistics`` function, so it is subject to the same requirements. .. Arguments: population -- the population of Individuals num_generations -- the number of elapsed generations num_evaluations -- the number of candidate solution evaluations args -- a dictionary of keyword arguments """ stats = inspyred.ec.analysis.fitness_statistics(population) worst_fit = '{0:>10}'.format(stats['worst'])[:10] best_fit = '{0:>10}'.format(stats['best'])[:10] avg_fit = '{0:>10}'.format(stats['mean'])[:10] med_fit = '{0:>10}'.format(stats['median'])[:10] std_fit = '{0:>10}'.format(stats['std'])[:10] print('Generation Evaluation Worst Best Median Average Std Dev') print('---------- ---------- ---------- ---------- ---------- ---------- ----------') print('{0:>10} {1:>10} {2:>10} {3:>10} {4:>10} {5:>10} {6:>10}\n'.format(num_generations, num_evaluations, worst_fit, best_fit, med_fit, avg_fit, std_fit))
def function[stats_observer, parameter[population, num_generations, num_evaluations, args]]: constant[Print the statistics of the evolutionary computation to the screen. This function displays the statistics of the evolutionary computation to the screen. The output includes the generation number, the current number of evaluations, the maximum fitness, the minimum fitness, the average fitness, and the standard deviation. .. note:: This function makes use of the ``inspyred.ec.analysis.fitness_statistics`` function, so it is subject to the same requirements. .. Arguments: population -- the population of Individuals num_generations -- the number of elapsed generations num_evaluations -- the number of candidate solution evaluations args -- a dictionary of keyword arguments ] variable[stats] assign[=] call[name[inspyred].ec.analysis.fitness_statistics, parameter[name[population]]] variable[worst_fit] assign[=] call[call[constant[{0:>10}].format, parameter[call[name[stats]][constant[worst]]]]][<ast.Slice object at 0x7da1b1231bd0>] variable[best_fit] assign[=] call[call[constant[{0:>10}].format, parameter[call[name[stats]][constant[best]]]]][<ast.Slice object at 0x7da1b13457b0>] variable[avg_fit] assign[=] call[call[constant[{0:>10}].format, parameter[call[name[stats]][constant[mean]]]]][<ast.Slice object at 0x7da1b13478e0>] variable[med_fit] assign[=] call[call[constant[{0:>10}].format, parameter[call[name[stats]][constant[median]]]]][<ast.Slice object at 0x7da1b1344ee0>] variable[std_fit] assign[=] call[call[constant[{0:>10}].format, parameter[call[name[stats]][constant[std]]]]][<ast.Slice object at 0x7da1b1346f50>] call[name[print], parameter[constant[Generation Evaluation Worst Best Median Average Std Dev]]] call[name[print], parameter[constant[---------- ---------- ---------- ---------- ---------- ---------- ----------]]] call[name[print], parameter[call[constant[{0:>10} {1:>10} {2:>10} {3:>10} {4:>10} {5:>10} {6:>10} ].format, parameter[name[num_generations], name[num_evaluations], name[worst_fit], name[best_fit], name[med_fit], name[avg_fit], name[std_fit]]]]]
keyword[def] identifier[stats_observer] ( identifier[population] , identifier[num_generations] , identifier[num_evaluations] , identifier[args] ): literal[string] identifier[stats] = identifier[inspyred] . identifier[ec] . identifier[analysis] . identifier[fitness_statistics] ( identifier[population] ) identifier[worst_fit] = literal[string] . identifier[format] ( identifier[stats] [ literal[string] ])[: literal[int] ] identifier[best_fit] = literal[string] . identifier[format] ( identifier[stats] [ literal[string] ])[: literal[int] ] identifier[avg_fit] = literal[string] . identifier[format] ( identifier[stats] [ literal[string] ])[: literal[int] ] identifier[med_fit] = literal[string] . identifier[format] ( identifier[stats] [ literal[string] ])[: literal[int] ] identifier[std_fit] = literal[string] . identifier[format] ( identifier[stats] [ literal[string] ])[: literal[int] ] identifier[print] ( literal[string] ) identifier[print] ( literal[string] ) identifier[print] ( literal[string] . identifier[format] ( identifier[num_generations] , identifier[num_evaluations] , identifier[worst_fit] , identifier[best_fit] , identifier[med_fit] , identifier[avg_fit] , identifier[std_fit] ))
def stats_observer(population, num_generations, num_evaluations, args): """Print the statistics of the evolutionary computation to the screen. This function displays the statistics of the evolutionary computation to the screen. The output includes the generation number, the current number of evaluations, the maximum fitness, the minimum fitness, the average fitness, and the standard deviation. .. note:: This function makes use of the ``inspyred.ec.analysis.fitness_statistics`` function, so it is subject to the same requirements. .. Arguments: population -- the population of Individuals num_generations -- the number of elapsed generations num_evaluations -- the number of candidate solution evaluations args -- a dictionary of keyword arguments """ stats = inspyred.ec.analysis.fitness_statistics(population) worst_fit = '{0:>10}'.format(stats['worst'])[:10] best_fit = '{0:>10}'.format(stats['best'])[:10] avg_fit = '{0:>10}'.format(stats['mean'])[:10] med_fit = '{0:>10}'.format(stats['median'])[:10] std_fit = '{0:>10}'.format(stats['std'])[:10] print('Generation Evaluation Worst Best Median Average Std Dev') print('---------- ---------- ---------- ---------- ---------- ---------- ----------') print('{0:>10} {1:>10} {2:>10} {3:>10} {4:>10} {5:>10} {6:>10}\n'.format(num_generations, num_evaluations, worst_fit, best_fit, med_fit, avg_fit, std_fit))
def model_argmax(sess, x, predictions, samples, feed=None): """ Helper function that computes the current class prediction :param sess: TF session :param x: the input placeholder :param predictions: the model's symbolic output :param samples: numpy array with input samples (dims must match x) :param feed: An optional dictionary that is appended to the feeding dictionary before the session runs. Can be used to feed the learning phase of a Keras model for instance. :return: the argmax output of predictions, i.e. the current predicted class """ feed_dict = {x: samples} if feed is not None: feed_dict.update(feed) probabilities = sess.run(predictions, feed_dict) if samples.shape[0] == 1: return np.argmax(probabilities) else: return np.argmax(probabilities, axis=1)
def function[model_argmax, parameter[sess, x, predictions, samples, feed]]: constant[ Helper function that computes the current class prediction :param sess: TF session :param x: the input placeholder :param predictions: the model's symbolic output :param samples: numpy array with input samples (dims must match x) :param feed: An optional dictionary that is appended to the feeding dictionary before the session runs. Can be used to feed the learning phase of a Keras model for instance. :return: the argmax output of predictions, i.e. the current predicted class ] variable[feed_dict] assign[=] dictionary[[<ast.Name object at 0x7da18f810a90>], [<ast.Name object at 0x7da18f810e80>]] if compare[name[feed] is_not constant[None]] begin[:] call[name[feed_dict].update, parameter[name[feed]]] variable[probabilities] assign[=] call[name[sess].run, parameter[name[predictions], name[feed_dict]]] if compare[call[name[samples].shape][constant[0]] equal[==] constant[1]] begin[:] return[call[name[np].argmax, parameter[name[probabilities]]]]
keyword[def] identifier[model_argmax] ( identifier[sess] , identifier[x] , identifier[predictions] , identifier[samples] , identifier[feed] = keyword[None] ): literal[string] identifier[feed_dict] ={ identifier[x] : identifier[samples] } keyword[if] identifier[feed] keyword[is] keyword[not] keyword[None] : identifier[feed_dict] . identifier[update] ( identifier[feed] ) identifier[probabilities] = identifier[sess] . identifier[run] ( identifier[predictions] , identifier[feed_dict] ) keyword[if] identifier[samples] . identifier[shape] [ literal[int] ]== literal[int] : keyword[return] identifier[np] . identifier[argmax] ( identifier[probabilities] ) keyword[else] : keyword[return] identifier[np] . identifier[argmax] ( identifier[probabilities] , identifier[axis] = literal[int] )
def model_argmax(sess, x, predictions, samples, feed=None): """ Helper function that computes the current class prediction :param sess: TF session :param x: the input placeholder :param predictions: the model's symbolic output :param samples: numpy array with input samples (dims must match x) :param feed: An optional dictionary that is appended to the feeding dictionary before the session runs. Can be used to feed the learning phase of a Keras model for instance. :return: the argmax output of predictions, i.e. the current predicted class """ feed_dict = {x: samples} if feed is not None: feed_dict.update(feed) # depends on [control=['if'], data=['feed']] probabilities = sess.run(predictions, feed_dict) if samples.shape[0] == 1: return np.argmax(probabilities) # depends on [control=['if'], data=[]] else: return np.argmax(probabilities, axis=1)
def improvise(oracle, seq_len, k=1, LRS=0, weight=None, continuity=1): """ Given an oracle and length, generate an improvised sequence of the given length. :param oracle: an indexed vmo object :param seq_len: the length of the returned improvisation sequence :param k: the starting improvisation time step in oracle :param LRS: the length of minimum longest repeated suffixes allowed to jump :param weight: if None, jump to possible candidate time step uniformly, if "lrs", the probability is proportional to the LRS of each candidate time step :param continuity: the number of time steps guaranteed to continue before next jump is executed :return: the improvised sequence """ s = [] if k + continuity < oracle.n_states - 1: s.extend(range(k, k + continuity)) k = s[-1] seq_len -= continuity while seq_len > 0: s.append(improvise_step(oracle, k, LRS, weight)) k = s[-1] if k + 1 < oracle.n_states - 1: k += 1 else: k = 1 if k + continuity < oracle.n_states - 1: s.extend(range(k, k + continuity)) seq_len -= continuity k = s[-1] seq_len -= 1 return s
def function[improvise, parameter[oracle, seq_len, k, LRS, weight, continuity]]: constant[ Given an oracle and length, generate an improvised sequence of the given length. :param oracle: an indexed vmo object :param seq_len: the length of the returned improvisation sequence :param k: the starting improvisation time step in oracle :param LRS: the length of minimum longest repeated suffixes allowed to jump :param weight: if None, jump to possible candidate time step uniformly, if "lrs", the probability is proportional to the LRS of each candidate time step :param continuity: the number of time steps guaranteed to continue before next jump is executed :return: the improvised sequence ] variable[s] assign[=] list[[]] if compare[binary_operation[name[k] + name[continuity]] less[<] binary_operation[name[oracle].n_states - constant[1]]] begin[:] call[name[s].extend, parameter[call[name[range], parameter[name[k], binary_operation[name[k] + name[continuity]]]]]] variable[k] assign[=] call[name[s]][<ast.UnaryOp object at 0x7da20c6c50c0>] <ast.AugAssign object at 0x7da20c6c7160> while compare[name[seq_len] greater[>] constant[0]] begin[:] call[name[s].append, parameter[call[name[improvise_step], parameter[name[oracle], name[k], name[LRS], name[weight]]]]] variable[k] assign[=] call[name[s]][<ast.UnaryOp object at 0x7da20c6c6920>] if compare[binary_operation[name[k] + constant[1]] less[<] binary_operation[name[oracle].n_states - constant[1]]] begin[:] <ast.AugAssign object at 0x7da20e74b7f0> if compare[binary_operation[name[k] + name[continuity]] less[<] binary_operation[name[oracle].n_states - constant[1]]] begin[:] call[name[s].extend, parameter[call[name[range], parameter[name[k], binary_operation[name[k] + name[continuity]]]]]] <ast.AugAssign object at 0x7da20e74ba30> variable[k] assign[=] call[name[s]][<ast.UnaryOp object at 0x7da20e74b6a0>] <ast.AugAssign object at 0x7da20e74b370> return[name[s]]
keyword[def] identifier[improvise] ( identifier[oracle] , identifier[seq_len] , identifier[k] = literal[int] , identifier[LRS] = literal[int] , identifier[weight] = keyword[None] , identifier[continuity] = literal[int] ): literal[string] identifier[s] =[] keyword[if] identifier[k] + identifier[continuity] < identifier[oracle] . identifier[n_states] - literal[int] : identifier[s] . identifier[extend] ( identifier[range] ( identifier[k] , identifier[k] + identifier[continuity] )) identifier[k] = identifier[s] [- literal[int] ] identifier[seq_len] -= identifier[continuity] keyword[while] identifier[seq_len] > literal[int] : identifier[s] . identifier[append] ( identifier[improvise_step] ( identifier[oracle] , identifier[k] , identifier[LRS] , identifier[weight] )) identifier[k] = identifier[s] [- literal[int] ] keyword[if] identifier[k] + literal[int] < identifier[oracle] . identifier[n_states] - literal[int] : identifier[k] += literal[int] keyword[else] : identifier[k] = literal[int] keyword[if] identifier[k] + identifier[continuity] < identifier[oracle] . identifier[n_states] - literal[int] : identifier[s] . identifier[extend] ( identifier[range] ( identifier[k] , identifier[k] + identifier[continuity] )) identifier[seq_len] -= identifier[continuity] identifier[k] = identifier[s] [- literal[int] ] identifier[seq_len] -= literal[int] keyword[return] identifier[s]
def improvise(oracle, seq_len, k=1, LRS=0, weight=None, continuity=1): """ Given an oracle and length, generate an improvised sequence of the given length. :param oracle: an indexed vmo object :param seq_len: the length of the returned improvisation sequence :param k: the starting improvisation time step in oracle :param LRS: the length of minimum longest repeated suffixes allowed to jump :param weight: if None, jump to possible candidate time step uniformly, if "lrs", the probability is proportional to the LRS of each candidate time step :param continuity: the number of time steps guaranteed to continue before next jump is executed :return: the improvised sequence """ s = [] if k + continuity < oracle.n_states - 1: s.extend(range(k, k + continuity)) k = s[-1] seq_len -= continuity # depends on [control=['if'], data=[]] while seq_len > 0: s.append(improvise_step(oracle, k, LRS, weight)) k = s[-1] if k + 1 < oracle.n_states - 1: k += 1 # depends on [control=['if'], data=[]] else: k = 1 if k + continuity < oracle.n_states - 1: s.extend(range(k, k + continuity)) seq_len -= continuity # depends on [control=['if'], data=[]] k = s[-1] seq_len -= 1 # depends on [control=['while'], data=['seq_len']] return s
def __field_to_subfields(self, field): """Fully describes data represented by field, including the nested case. In the case that the field is not a message field, we have no fields nested within a message definition, so we can simply return that field. However, in the nested case, we can't simply describe the data with one field or even with one chain of fields. For example, if we have a message field m_field = messages.MessageField(RefClass, 1) which references a class with two fields: class RefClass(messages.Message): one = messages.StringField(1) two = messages.IntegerField(2) then we would need to include both one and two to represent all the data contained. Calling __field_to_subfields(m_field) would return: [ [<MessageField "m_field">, <StringField "one">], [<MessageField "m_field">, <StringField "two">], ] If the second field was instead a message field class RefClass(messages.Message): one = messages.StringField(1) two = messages.MessageField(OtherRefClass, 2) referencing another class with two fields class OtherRefClass(messages.Message): three = messages.BooleanField(1) four = messages.FloatField(2) then we would need to recurse one level deeper for two. With this change, calling __field_to_subfields(m_field) would return: [ [<MessageField "m_field">, <StringField "one">], [<MessageField "m_field">, <StringField "two">, <StringField "three">], [<MessageField "m_field">, <StringField "two">, <StringField "four">], ] Args: field: An instance of a subclass of messages.Field. Returns: A list of lists, where each sublist is a list of fields. """ # Termination condition if not isinstance(field, messages.MessageField): return [[field]] result = [] for subfield in sorted(field.message_type.all_fields(), key=lambda f: f.number): subfield_results = self.__field_to_subfields(subfield) for subfields_list in subfield_results: subfields_list.insert(0, field) result.append(subfields_list) return result
def function[__field_to_subfields, parameter[self, field]]: constant[Fully describes data represented by field, including the nested case. In the case that the field is not a message field, we have no fields nested within a message definition, so we can simply return that field. However, in the nested case, we can't simply describe the data with one field or even with one chain of fields. For example, if we have a message field m_field = messages.MessageField(RefClass, 1) which references a class with two fields: class RefClass(messages.Message): one = messages.StringField(1) two = messages.IntegerField(2) then we would need to include both one and two to represent all the data contained. Calling __field_to_subfields(m_field) would return: [ [<MessageField "m_field">, <StringField "one">], [<MessageField "m_field">, <StringField "two">], ] If the second field was instead a message field class RefClass(messages.Message): one = messages.StringField(1) two = messages.MessageField(OtherRefClass, 2) referencing another class with two fields class OtherRefClass(messages.Message): three = messages.BooleanField(1) four = messages.FloatField(2) then we would need to recurse one level deeper for two. With this change, calling __field_to_subfields(m_field) would return: [ [<MessageField "m_field">, <StringField "one">], [<MessageField "m_field">, <StringField "two">, <StringField "three">], [<MessageField "m_field">, <StringField "two">, <StringField "four">], ] Args: field: An instance of a subclass of messages.Field. Returns: A list of lists, where each sublist is a list of fields. ] if <ast.UnaryOp object at 0x7da1b0ef0190> begin[:] return[list[[<ast.List object at 0x7da1b0ef1090>]]] variable[result] assign[=] list[[]] for taget[name[subfield]] in starred[call[name[sorted], parameter[call[name[field].message_type.all_fields, parameter[]]]]] begin[:] variable[subfield_results] assign[=] call[name[self].__field_to_subfields, parameter[name[subfield]]] for taget[name[subfields_list]] in starred[name[subfield_results]] begin[:] call[name[subfields_list].insert, parameter[constant[0], name[field]]] call[name[result].append, parameter[name[subfields_list]]] return[name[result]]
keyword[def] identifier[__field_to_subfields] ( identifier[self] , identifier[field] ): literal[string] keyword[if] keyword[not] identifier[isinstance] ( identifier[field] , identifier[messages] . identifier[MessageField] ): keyword[return] [[ identifier[field] ]] identifier[result] =[] keyword[for] identifier[subfield] keyword[in] identifier[sorted] ( identifier[field] . identifier[message_type] . identifier[all_fields] (), identifier[key] = keyword[lambda] identifier[f] : identifier[f] . identifier[number] ): identifier[subfield_results] = identifier[self] . identifier[__field_to_subfields] ( identifier[subfield] ) keyword[for] identifier[subfields_list] keyword[in] identifier[subfield_results] : identifier[subfields_list] . identifier[insert] ( literal[int] , identifier[field] ) identifier[result] . identifier[append] ( identifier[subfields_list] ) keyword[return] identifier[result]
def __field_to_subfields(self, field): """Fully describes data represented by field, including the nested case. In the case that the field is not a message field, we have no fields nested within a message definition, so we can simply return that field. However, in the nested case, we can't simply describe the data with one field or even with one chain of fields. For example, if we have a message field m_field = messages.MessageField(RefClass, 1) which references a class with two fields: class RefClass(messages.Message): one = messages.StringField(1) two = messages.IntegerField(2) then we would need to include both one and two to represent all the data contained. Calling __field_to_subfields(m_field) would return: [ [<MessageField "m_field">, <StringField "one">], [<MessageField "m_field">, <StringField "two">], ] If the second field was instead a message field class RefClass(messages.Message): one = messages.StringField(1) two = messages.MessageField(OtherRefClass, 2) referencing another class with two fields class OtherRefClass(messages.Message): three = messages.BooleanField(1) four = messages.FloatField(2) then we would need to recurse one level deeper for two. With this change, calling __field_to_subfields(m_field) would return: [ [<MessageField "m_field">, <StringField "one">], [<MessageField "m_field">, <StringField "two">, <StringField "three">], [<MessageField "m_field">, <StringField "two">, <StringField "four">], ] Args: field: An instance of a subclass of messages.Field. Returns: A list of lists, where each sublist is a list of fields. """ # Termination condition if not isinstance(field, messages.MessageField): return [[field]] # depends on [control=['if'], data=[]] result = [] for subfield in sorted(field.message_type.all_fields(), key=lambda f: f.number): subfield_results = self.__field_to_subfields(subfield) for subfields_list in subfield_results: subfields_list.insert(0, field) result.append(subfields_list) # depends on [control=['for'], data=['subfields_list']] # depends on [control=['for'], data=['subfield']] return result
def contract_creation_exceptions(): """Special migration for the blueprint to support Keystone V3. We drop all tenant_id columns and create project_id columns instead. """ return { sa.Column: ['.'.join([table, 'project_id']) for table in get_tables()], sa.Index: get_tables() }
def function[contract_creation_exceptions, parameter[]]: constant[Special migration for the blueprint to support Keystone V3. We drop all tenant_id columns and create project_id columns instead. ] return[dictionary[[<ast.Attribute object at 0x7da1b1b14a00>, <ast.Attribute object at 0x7da1b1b16d10>], [<ast.ListComp object at 0x7da1b1b16cb0>, <ast.Call object at 0x7da1b1b16140>]]]
keyword[def] identifier[contract_creation_exceptions] (): literal[string] keyword[return] { identifier[sa] . identifier[Column] :[ literal[string] . identifier[join] ([ identifier[table] , literal[string] ]) keyword[for] identifier[table] keyword[in] identifier[get_tables] ()], identifier[sa] . identifier[Index] : identifier[get_tables] () }
def contract_creation_exceptions(): """Special migration for the blueprint to support Keystone V3. We drop all tenant_id columns and create project_id columns instead. """ return {sa.Column: ['.'.join([table, 'project_id']) for table in get_tables()], sa.Index: get_tables()}
def host_info(host=None): ''' Return information about the host connected to this master ''' data = query(host, quiet=True) for id_ in data: if 'vm_info' in data[id_]: data[id_].pop('vm_info') __jid_event__.fire_event({'data': data, 'outputter': 'nested'}, 'progress') return data
def function[host_info, parameter[host]]: constant[ Return information about the host connected to this master ] variable[data] assign[=] call[name[query], parameter[name[host]]] for taget[name[id_]] in starred[name[data]] begin[:] if compare[constant[vm_info] in call[name[data]][name[id_]]] begin[:] call[call[name[data]][name[id_]].pop, parameter[constant[vm_info]]] call[name[__jid_event__].fire_event, parameter[dictionary[[<ast.Constant object at 0x7da1b200a1d0>, <ast.Constant object at 0x7da1b2008250>], [<ast.Name object at 0x7da1b2009630>, <ast.Constant object at 0x7da1b200ad70>]], constant[progress]]] return[name[data]]
keyword[def] identifier[host_info] ( identifier[host] = keyword[None] ): literal[string] identifier[data] = identifier[query] ( identifier[host] , identifier[quiet] = keyword[True] ) keyword[for] identifier[id_] keyword[in] identifier[data] : keyword[if] literal[string] keyword[in] identifier[data] [ identifier[id_] ]: identifier[data] [ identifier[id_] ]. identifier[pop] ( literal[string] ) identifier[__jid_event__] . identifier[fire_event] ({ literal[string] : identifier[data] , literal[string] : literal[string] }, literal[string] ) keyword[return] identifier[data]
def host_info(host=None): """ Return information about the host connected to this master """ data = query(host, quiet=True) for id_ in data: if 'vm_info' in data[id_]: data[id_].pop('vm_info') # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['id_']] __jid_event__.fire_event({'data': data, 'outputter': 'nested'}, 'progress') return data
def fermat_potential(self, x_image, y_image, x_source, y_source, kwargs_lens, k=None): """ fermat potential (negative sign means earlier arrival time) :param x_image: image position :param y_image: image position :param x_source: source position :param y_source: source position :param kwargs_lens: list of keyword arguments of lens model parameters matching the lens model classes :return: fermat potential in arcsec**2 without geometry term (second part of Eqn 1 in Suyu et al. 2013) as a list """ potential = self.potential(x_image, y_image, kwargs_lens, k=k) geometry = ((x_image - x_source)**2 + (y_image - y_source)**2) / 2. return geometry - potential
def function[fermat_potential, parameter[self, x_image, y_image, x_source, y_source, kwargs_lens, k]]: constant[ fermat potential (negative sign means earlier arrival time) :param x_image: image position :param y_image: image position :param x_source: source position :param y_source: source position :param kwargs_lens: list of keyword arguments of lens model parameters matching the lens model classes :return: fermat potential in arcsec**2 without geometry term (second part of Eqn 1 in Suyu et al. 2013) as a list ] variable[potential] assign[=] call[name[self].potential, parameter[name[x_image], name[y_image], name[kwargs_lens]]] variable[geometry] assign[=] binary_operation[binary_operation[binary_operation[binary_operation[name[x_image] - name[x_source]] ** constant[2]] + binary_operation[binary_operation[name[y_image] - name[y_source]] ** constant[2]]] / constant[2.0]] return[binary_operation[name[geometry] - name[potential]]]
keyword[def] identifier[fermat_potential] ( identifier[self] , identifier[x_image] , identifier[y_image] , identifier[x_source] , identifier[y_source] , identifier[kwargs_lens] , identifier[k] = keyword[None] ): literal[string] identifier[potential] = identifier[self] . identifier[potential] ( identifier[x_image] , identifier[y_image] , identifier[kwargs_lens] , identifier[k] = identifier[k] ) identifier[geometry] =(( identifier[x_image] - identifier[x_source] )** literal[int] +( identifier[y_image] - identifier[y_source] )** literal[int] )/ literal[int] keyword[return] identifier[geometry] - identifier[potential]
def fermat_potential(self, x_image, y_image, x_source, y_source, kwargs_lens, k=None): """ fermat potential (negative sign means earlier arrival time) :param x_image: image position :param y_image: image position :param x_source: source position :param y_source: source position :param kwargs_lens: list of keyword arguments of lens model parameters matching the lens model classes :return: fermat potential in arcsec**2 without geometry term (second part of Eqn 1 in Suyu et al. 2013) as a list """ potential = self.potential(x_image, y_image, kwargs_lens, k=k) geometry = ((x_image - x_source) ** 2 + (y_image - y_source) ** 2) / 2.0 return geometry - potential
def drawWeibull(N, scale=1.0, shape=1.0, seed=0): ''' Generate arrays of Weibull draws. The scale and shape inputs can be numbers or list-likes. If a number, output is a length N array of draws from the Weibull distribution with the given scale and shape. If a list, output is a length T list whose t-th entry is a length N array with draws from the Weibull distribution with scale scale[t] and shape shape[t]. Note: When shape=1, the Weibull distribution is simply the exponential dist. Mean: scale*Gamma(1 + 1/shape) Parameters ---------- N : int Number of draws in each row. scale : float or [float] One or more scales. Number of elements T in scale determines number of rows of output. shape : float or [float] One or more shape parameters. Number of elements T in scale determines number of rows of output. seed : int Seed for random number generator. Returns: ------------ draws : np.array or [np.array] T-length list of arrays of Weibull draws each of size N, or a single array of size N (if sigma is a scalar). ''' # Set up the RNG RNG = np.random.RandomState(seed) if scale == 1: scale = float(scale) if isinstance(scale,float): # Return a single array of length N draws = scale*(-np.log(1.0-RNG.rand(N)))**(1.0/shape) else: # Set up empty list to populate, then loop and populate list with draws draws=[] for t in range(len(scale)): draws.append(scale[t]*(-np.log(1.0-RNG.rand(N)))**(1.0/shape[t])) return draws
def function[drawWeibull, parameter[N, scale, shape, seed]]: constant[ Generate arrays of Weibull draws. The scale and shape inputs can be numbers or list-likes. If a number, output is a length N array of draws from the Weibull distribution with the given scale and shape. If a list, output is a length T list whose t-th entry is a length N array with draws from the Weibull distribution with scale scale[t] and shape shape[t]. Note: When shape=1, the Weibull distribution is simply the exponential dist. Mean: scale*Gamma(1 + 1/shape) Parameters ---------- N : int Number of draws in each row. scale : float or [float] One or more scales. Number of elements T in scale determines number of rows of output. shape : float or [float] One or more shape parameters. Number of elements T in scale determines number of rows of output. seed : int Seed for random number generator. Returns: ------------ draws : np.array or [np.array] T-length list of arrays of Weibull draws each of size N, or a single array of size N (if sigma is a scalar). ] variable[RNG] assign[=] call[name[np].random.RandomState, parameter[name[seed]]] if compare[name[scale] equal[==] constant[1]] begin[:] variable[scale] assign[=] call[name[float], parameter[name[scale]]] if call[name[isinstance], parameter[name[scale], name[float]]] begin[:] variable[draws] assign[=] binary_operation[name[scale] * binary_operation[<ast.UnaryOp object at 0x7da20e9b07f0> ** binary_operation[constant[1.0] / name[shape]]]] return[name[draws]]
keyword[def] identifier[drawWeibull] ( identifier[N] , identifier[scale] = literal[int] , identifier[shape] = literal[int] , identifier[seed] = literal[int] ): literal[string] identifier[RNG] = identifier[np] . identifier[random] . identifier[RandomState] ( identifier[seed] ) keyword[if] identifier[scale] == literal[int] : identifier[scale] = identifier[float] ( identifier[scale] ) keyword[if] identifier[isinstance] ( identifier[scale] , identifier[float] ): identifier[draws] = identifier[scale] *(- identifier[np] . identifier[log] ( literal[int] - identifier[RNG] . identifier[rand] ( identifier[N] )))**( literal[int] / identifier[shape] ) keyword[else] : identifier[draws] =[] keyword[for] identifier[t] keyword[in] identifier[range] ( identifier[len] ( identifier[scale] )): identifier[draws] . identifier[append] ( identifier[scale] [ identifier[t] ]*(- identifier[np] . identifier[log] ( literal[int] - identifier[RNG] . identifier[rand] ( identifier[N] )))**( literal[int] / identifier[shape] [ identifier[t] ])) keyword[return] identifier[draws]
def drawWeibull(N, scale=1.0, shape=1.0, seed=0): """ Generate arrays of Weibull draws. The scale and shape inputs can be numbers or list-likes. If a number, output is a length N array of draws from the Weibull distribution with the given scale and shape. If a list, output is a length T list whose t-th entry is a length N array with draws from the Weibull distribution with scale scale[t] and shape shape[t]. Note: When shape=1, the Weibull distribution is simply the exponential dist. Mean: scale*Gamma(1 + 1/shape) Parameters ---------- N : int Number of draws in each row. scale : float or [float] One or more scales. Number of elements T in scale determines number of rows of output. shape : float or [float] One or more shape parameters. Number of elements T in scale determines number of rows of output. seed : int Seed for random number generator. Returns: ------------ draws : np.array or [np.array] T-length list of arrays of Weibull draws each of size N, or a single array of size N (if sigma is a scalar). """ # Set up the RNG RNG = np.random.RandomState(seed) if scale == 1: scale = float(scale) # depends on [control=['if'], data=['scale']] if isinstance(scale, float): # Return a single array of length N draws = scale * (-np.log(1.0 - RNG.rand(N))) ** (1.0 / shape) # depends on [control=['if'], data=[]] else: # Set up empty list to populate, then loop and populate list with draws draws = [] for t in range(len(scale)): draws.append(scale[t] * (-np.log(1.0 - RNG.rand(N))) ** (1.0 / shape[t])) # depends on [control=['for'], data=['t']] return draws
def _check_and_update_params(self, required, params): """ Ensure all required parameters were passed to the API call and format them correctly. """ for r in required: if r not in params: raise PayPalError("Missing required param: %s" % r) # Upper case all the parameters for PayPal. return (dict((k.upper(), v) for k, v in params.items()))
def function[_check_and_update_params, parameter[self, required, params]]: constant[ Ensure all required parameters were passed to the API call and format them correctly. ] for taget[name[r]] in starred[name[required]] begin[:] if compare[name[r] <ast.NotIn object at 0x7da2590d7190> name[params]] begin[:] <ast.Raise object at 0x7da18f00eb00> return[call[name[dict], parameter[<ast.GeneratorExp object at 0x7da18f00f670>]]]
keyword[def] identifier[_check_and_update_params] ( identifier[self] , identifier[required] , identifier[params] ): literal[string] keyword[for] identifier[r] keyword[in] identifier[required] : keyword[if] identifier[r] keyword[not] keyword[in] identifier[params] : keyword[raise] identifier[PayPalError] ( literal[string] % identifier[r] ) keyword[return] ( identifier[dict] (( identifier[k] . identifier[upper] (), identifier[v] ) keyword[for] identifier[k] , identifier[v] keyword[in] identifier[params] . identifier[items] ()))
def _check_and_update_params(self, required, params): """ Ensure all required parameters were passed to the API call and format them correctly. """ for r in required: if r not in params: raise PayPalError('Missing required param: %s' % r) # depends on [control=['if'], data=['r']] # depends on [control=['for'], data=['r']] # Upper case all the parameters for PayPal. return dict(((k.upper(), v) for (k, v) in params.items()))
def _init_kws(self, **kws_usr): """Return a dict containing user-specified plotting options.""" kws_self = {} user_keys = set(kws_usr) for objname, expset in self.exp_keys.items(): usrkeys_curr = user_keys.intersection(expset) kws_self[objname] = get_kwargs(kws_usr, usrkeys_curr, usrkeys_curr) dpi = str(kws_self['dag'].get('dpi', self.dflts['dpi'])) kws_self['dag']['dpi'] = dpi return kws_self
def function[_init_kws, parameter[self]]: constant[Return a dict containing user-specified plotting options.] variable[kws_self] assign[=] dictionary[[], []] variable[user_keys] assign[=] call[name[set], parameter[name[kws_usr]]] for taget[tuple[[<ast.Name object at 0x7da18f812d40>, <ast.Name object at 0x7da18f813f10>]]] in starred[call[name[self].exp_keys.items, parameter[]]] begin[:] variable[usrkeys_curr] assign[=] call[name[user_keys].intersection, parameter[name[expset]]] call[name[kws_self]][name[objname]] assign[=] call[name[get_kwargs], parameter[name[kws_usr], name[usrkeys_curr], name[usrkeys_curr]]] variable[dpi] assign[=] call[name[str], parameter[call[call[name[kws_self]][constant[dag]].get, parameter[constant[dpi], call[name[self].dflts][constant[dpi]]]]]] call[call[name[kws_self]][constant[dag]]][constant[dpi]] assign[=] name[dpi] return[name[kws_self]]
keyword[def] identifier[_init_kws] ( identifier[self] ,** identifier[kws_usr] ): literal[string] identifier[kws_self] ={} identifier[user_keys] = identifier[set] ( identifier[kws_usr] ) keyword[for] identifier[objname] , identifier[expset] keyword[in] identifier[self] . identifier[exp_keys] . identifier[items] (): identifier[usrkeys_curr] = identifier[user_keys] . identifier[intersection] ( identifier[expset] ) identifier[kws_self] [ identifier[objname] ]= identifier[get_kwargs] ( identifier[kws_usr] , identifier[usrkeys_curr] , identifier[usrkeys_curr] ) identifier[dpi] = identifier[str] ( identifier[kws_self] [ literal[string] ]. identifier[get] ( literal[string] , identifier[self] . identifier[dflts] [ literal[string] ])) identifier[kws_self] [ literal[string] ][ literal[string] ]= identifier[dpi] keyword[return] identifier[kws_self]
def _init_kws(self, **kws_usr): """Return a dict containing user-specified plotting options.""" kws_self = {} user_keys = set(kws_usr) for (objname, expset) in self.exp_keys.items(): usrkeys_curr = user_keys.intersection(expset) kws_self[objname] = get_kwargs(kws_usr, usrkeys_curr, usrkeys_curr) # depends on [control=['for'], data=[]] dpi = str(kws_self['dag'].get('dpi', self.dflts['dpi'])) kws_self['dag']['dpi'] = dpi return kws_self
def template(page=None, layout=None, **kwargs): """ Decorator to change the view template and layout. It works on both View class and view methods on class only $layout is applied, everything else will be passed to the kwargs Using as first argument, it will be the layout. :first arg or $layout: The layout to use for that view :param layout: The layout to use for that view :param kwargs: get pass to the TEMPLATE_CONTEXT ** on method that return a dict page or layout are optional :param page: The html page :param layout: The layout to use for that view :param kwargs: get pass to the view as k/V ** on other methods that return other type, it doesn't apply :return: """ pkey = "_template_extends__" def decorator(f): if inspect.isclass(f): layout_ = layout or page extends = kwargs.pop("extends", None) if extends and hasattr(extends, pkey): items = getattr(extends, pkey).items() if "layout" in items: layout_ = items.pop("layout") for k, v in items: kwargs.setdefault(k, v) if not layout_: layout_ = "layout.html" kwargs.setdefault("brand_name", "") kwargs["layout"] = layout_ setattr(f, pkey, kwargs) setattr(f, "base_layout", kwargs.get("layout")) f.g(TEMPLATE_CONTEXT=kwargs) return f else: @functools.wraps(f) def wrap(*args2, **kwargs2): response = f(*args2, **kwargs2) if isinstance(response, dict) or response is None: response = response or {} if page: response.setdefault("template_", page) if layout: response.setdefault("layout_", layout) for k, v in kwargs.items(): response.setdefault(k, v) return response return wrap return decorator
def function[template, parameter[page, layout]]: constant[ Decorator to change the view template and layout. It works on both View class and view methods on class only $layout is applied, everything else will be passed to the kwargs Using as first argument, it will be the layout. :first arg or $layout: The layout to use for that view :param layout: The layout to use for that view :param kwargs: get pass to the TEMPLATE_CONTEXT ** on method that return a dict page or layout are optional :param page: The html page :param layout: The layout to use for that view :param kwargs: get pass to the view as k/V ** on other methods that return other type, it doesn't apply :return: ] variable[pkey] assign[=] constant[_template_extends__] def function[decorator, parameter[f]]: if call[name[inspect].isclass, parameter[name[f]]] begin[:] variable[layout_] assign[=] <ast.BoolOp object at 0x7da1b20d46d0> variable[extends] assign[=] call[name[kwargs].pop, parameter[constant[extends], constant[None]]] if <ast.BoolOp object at 0x7da1b20d5360> begin[:] variable[items] assign[=] call[call[name[getattr], parameter[name[extends], name[pkey]]].items, parameter[]] if compare[constant[layout] in name[items]] begin[:] variable[layout_] assign[=] call[name[items].pop, parameter[constant[layout]]] for taget[tuple[[<ast.Name object at 0x7da1b1ff5de0>, <ast.Name object at 0x7da1b1ff7ca0>]]] in starred[name[items]] begin[:] call[name[kwargs].setdefault, parameter[name[k], name[v]]] if <ast.UnaryOp object at 0x7da1b1ff7250> begin[:] variable[layout_] assign[=] constant[layout.html] call[name[kwargs].setdefault, parameter[constant[brand_name], constant[]]] call[name[kwargs]][constant[layout]] assign[=] name[layout_] call[name[setattr], parameter[name[f], name[pkey], name[kwargs]]] call[name[setattr], parameter[name[f], constant[base_layout], call[name[kwargs].get, parameter[constant[layout]]]]] call[name[f].g, parameter[]] return[name[f]] return[name[decorator]]
keyword[def] identifier[template] ( identifier[page] = keyword[None] , identifier[layout] = keyword[None] ,** identifier[kwargs] ): literal[string] identifier[pkey] = literal[string] keyword[def] identifier[decorator] ( identifier[f] ): keyword[if] identifier[inspect] . identifier[isclass] ( identifier[f] ): identifier[layout_] = identifier[layout] keyword[or] identifier[page] identifier[extends] = identifier[kwargs] . identifier[pop] ( literal[string] , keyword[None] ) keyword[if] identifier[extends] keyword[and] identifier[hasattr] ( identifier[extends] , identifier[pkey] ): identifier[items] = identifier[getattr] ( identifier[extends] , identifier[pkey] ). identifier[items] () keyword[if] literal[string] keyword[in] identifier[items] : identifier[layout_] = identifier[items] . identifier[pop] ( literal[string] ) keyword[for] identifier[k] , identifier[v] keyword[in] identifier[items] : identifier[kwargs] . identifier[setdefault] ( identifier[k] , identifier[v] ) keyword[if] keyword[not] identifier[layout_] : identifier[layout_] = literal[string] identifier[kwargs] . identifier[setdefault] ( literal[string] , literal[string] ) identifier[kwargs] [ literal[string] ]= identifier[layout_] identifier[setattr] ( identifier[f] , identifier[pkey] , identifier[kwargs] ) identifier[setattr] ( identifier[f] , literal[string] , identifier[kwargs] . identifier[get] ( literal[string] )) identifier[f] . identifier[g] ( identifier[TEMPLATE_CONTEXT] = identifier[kwargs] ) keyword[return] identifier[f] keyword[else] : @ identifier[functools] . identifier[wraps] ( identifier[f] ) keyword[def] identifier[wrap] (* identifier[args2] ,** identifier[kwargs2] ): identifier[response] = identifier[f] (* identifier[args2] ,** identifier[kwargs2] ) keyword[if] identifier[isinstance] ( identifier[response] , identifier[dict] ) keyword[or] identifier[response] keyword[is] keyword[None] : identifier[response] = identifier[response] keyword[or] {} keyword[if] identifier[page] : identifier[response] . identifier[setdefault] ( literal[string] , identifier[page] ) keyword[if] identifier[layout] : identifier[response] . identifier[setdefault] ( literal[string] , identifier[layout] ) keyword[for] identifier[k] , identifier[v] keyword[in] identifier[kwargs] . identifier[items] (): identifier[response] . identifier[setdefault] ( identifier[k] , identifier[v] ) keyword[return] identifier[response] keyword[return] identifier[wrap] keyword[return] identifier[decorator]
def template(page=None, layout=None, **kwargs): """ Decorator to change the view template and layout. It works on both View class and view methods on class only $layout is applied, everything else will be passed to the kwargs Using as first argument, it will be the layout. :first arg or $layout: The layout to use for that view :param layout: The layout to use for that view :param kwargs: get pass to the TEMPLATE_CONTEXT ** on method that return a dict page or layout are optional :param page: The html page :param layout: The layout to use for that view :param kwargs: get pass to the view as k/V ** on other methods that return other type, it doesn't apply :return: """ pkey = '_template_extends__' def decorator(f): if inspect.isclass(f): layout_ = layout or page extends = kwargs.pop('extends', None) if extends and hasattr(extends, pkey): items = getattr(extends, pkey).items() if 'layout' in items: layout_ = items.pop('layout') # depends on [control=['if'], data=['items']] for (k, v) in items: kwargs.setdefault(k, v) # depends on [control=['for'], data=[]] # depends on [control=['if'], data=[]] if not layout_: layout_ = 'layout.html' # depends on [control=['if'], data=[]] kwargs.setdefault('brand_name', '') kwargs['layout'] = layout_ setattr(f, pkey, kwargs) setattr(f, 'base_layout', kwargs.get('layout')) f.g(TEMPLATE_CONTEXT=kwargs) return f # depends on [control=['if'], data=[]] else: @functools.wraps(f) def wrap(*args2, **kwargs2): response = f(*args2, **kwargs2) if isinstance(response, dict) or response is None: response = response or {} if page: response.setdefault('template_', page) # depends on [control=['if'], data=[]] if layout: response.setdefault('layout_', layout) # depends on [control=['if'], data=[]] for (k, v) in kwargs.items(): response.setdefault(k, v) # depends on [control=['for'], data=[]] # depends on [control=['if'], data=[]] return response return wrap return decorator
def function_completions( completion_text: str, bel_spec: BELSpec, function_list: list, bel_fmt: str, size: int, ) -> list: """Filter BEL functions by prefix Args: prefix: completion string bel_fmt: short, medium, long BEL formats spec: BEL specification Returns: list: list of BEL functions that match prefix """ # Convert provided function list to correct bel_fmt if isinstance(function_list, list): if bel_fmt in ["short", "medium"]: function_list = [ bel_spec["functions"]["to_short"][fn] for fn in function_list ] else: function_list = [ bel_spec["functions"]["to_long"][fn] for fn in function_list ] elif bel_fmt in ["short", "medium"]: function_list = bel_spec["functions"]["primary"]["list_short"] else: function_list = bel_spec["functions"]["primary"]["list_long"] matches = [] for f in function_list: escaped_completion_text = completion_text.replace(r"(", r"\(").replace( r")", r"\)" ) log.debug(f"Completion match: {escaped_completion_text} F: {f}") if re.match(escaped_completion_text, f): matches.append(f) replace_list = [] for match in matches: if completion_text: highlight = match.replace(completion_text, f"<em>{completion_text}</em>") else: highlight = completion_text replace_list.append( { "replacement": match, "label": f"{match}()", "highlight": highlight, "type": "Function", } ) return replace_list[:size]
def function[function_completions, parameter[completion_text, bel_spec, function_list, bel_fmt, size]]: constant[Filter BEL functions by prefix Args: prefix: completion string bel_fmt: short, medium, long BEL formats spec: BEL specification Returns: list: list of BEL functions that match prefix ] if call[name[isinstance], parameter[name[function_list], name[list]]] begin[:] if compare[name[bel_fmt] in list[[<ast.Constant object at 0x7da1b18fd2a0>, <ast.Constant object at 0x7da1b18fd2d0>]]] begin[:] variable[function_list] assign[=] <ast.ListComp object at 0x7da1b18fd360> variable[matches] assign[=] list[[]] for taget[name[f]] in starred[name[function_list]] begin[:] variable[escaped_completion_text] assign[=] call[call[name[completion_text].replace, parameter[constant[(], constant[\(]]].replace, parameter[constant[)], constant[\)]]] call[name[log].debug, parameter[<ast.JoinedStr object at 0x7da1b18fe0b0>]] if call[name[re].match, parameter[name[escaped_completion_text], name[f]]] begin[:] call[name[matches].append, parameter[name[f]]] variable[replace_list] assign[=] list[[]] for taget[name[match]] in starred[name[matches]] begin[:] if name[completion_text] begin[:] variable[highlight] assign[=] call[name[match].replace, parameter[name[completion_text], <ast.JoinedStr object at 0x7da1b18feb30>]] call[name[replace_list].append, parameter[dictionary[[<ast.Constant object at 0x7da1b18fe890>, <ast.Constant object at 0x7da1b18fe860>, <ast.Constant object at 0x7da1b18fe350>, <ast.Constant object at 0x7da1b18fe320>], [<ast.Name object at 0x7da1b18fe2f0>, <ast.JoinedStr object at 0x7da1b18fe2c0>, <ast.Name object at 0x7da1b18fe1d0>, <ast.Constant object at 0x7da1b18fe1a0>]]]] return[call[name[replace_list]][<ast.Slice object at 0x7da1b18fc0d0>]]
keyword[def] identifier[function_completions] ( identifier[completion_text] : identifier[str] , identifier[bel_spec] : identifier[BELSpec] , identifier[function_list] : identifier[list] , identifier[bel_fmt] : identifier[str] , identifier[size] : identifier[int] , )-> identifier[list] : literal[string] keyword[if] identifier[isinstance] ( identifier[function_list] , identifier[list] ): keyword[if] identifier[bel_fmt] keyword[in] [ literal[string] , literal[string] ]: identifier[function_list] =[ identifier[bel_spec] [ literal[string] ][ literal[string] ][ identifier[fn] ] keyword[for] identifier[fn] keyword[in] identifier[function_list] ] keyword[else] : identifier[function_list] =[ identifier[bel_spec] [ literal[string] ][ literal[string] ][ identifier[fn] ] keyword[for] identifier[fn] keyword[in] identifier[function_list] ] keyword[elif] identifier[bel_fmt] keyword[in] [ literal[string] , literal[string] ]: identifier[function_list] = identifier[bel_spec] [ literal[string] ][ literal[string] ][ literal[string] ] keyword[else] : identifier[function_list] = identifier[bel_spec] [ literal[string] ][ literal[string] ][ literal[string] ] identifier[matches] =[] keyword[for] identifier[f] keyword[in] identifier[function_list] : identifier[escaped_completion_text] = identifier[completion_text] . identifier[replace] ( literal[string] , literal[string] ). identifier[replace] ( literal[string] , literal[string] ) identifier[log] . identifier[debug] ( literal[string] ) keyword[if] identifier[re] . identifier[match] ( identifier[escaped_completion_text] , identifier[f] ): identifier[matches] . identifier[append] ( identifier[f] ) identifier[replace_list] =[] keyword[for] identifier[match] keyword[in] identifier[matches] : keyword[if] identifier[completion_text] : identifier[highlight] = identifier[match] . identifier[replace] ( identifier[completion_text] , literal[string] ) keyword[else] : identifier[highlight] = identifier[completion_text] identifier[replace_list] . identifier[append] ( { literal[string] : identifier[match] , literal[string] : literal[string] , literal[string] : identifier[highlight] , literal[string] : literal[string] , } ) keyword[return] identifier[replace_list] [: identifier[size] ]
def function_completions(completion_text: str, bel_spec: BELSpec, function_list: list, bel_fmt: str, size: int) -> list: """Filter BEL functions by prefix Args: prefix: completion string bel_fmt: short, medium, long BEL formats spec: BEL specification Returns: list: list of BEL functions that match prefix """ # Convert provided function list to correct bel_fmt if isinstance(function_list, list): if bel_fmt in ['short', 'medium']: function_list = [bel_spec['functions']['to_short'][fn] for fn in function_list] # depends on [control=['if'], data=[]] else: function_list = [bel_spec['functions']['to_long'][fn] for fn in function_list] # depends on [control=['if'], data=[]] elif bel_fmt in ['short', 'medium']: function_list = bel_spec['functions']['primary']['list_short'] # depends on [control=['if'], data=[]] else: function_list = bel_spec['functions']['primary']['list_long'] matches = [] for f in function_list: escaped_completion_text = completion_text.replace('(', '\\(').replace(')', '\\)') log.debug(f'Completion match: {escaped_completion_text} F: {f}') if re.match(escaped_completion_text, f): matches.append(f) # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['f']] replace_list = [] for match in matches: if completion_text: highlight = match.replace(completion_text, f'<em>{completion_text}</em>') # depends on [control=['if'], data=[]] else: highlight = completion_text replace_list.append({'replacement': match, 'label': f'{match}()', 'highlight': highlight, 'type': 'Function'}) # depends on [control=['for'], data=['match']] return replace_list[:size]
def display_files(self, pcs_files): '''ι‡ζ–°ζ ΌεΌεŒ–δΈ€δΈ‹ζ–‡δ»Άεˆ—θ‘¨, εŽ»ι™€δΈιœ€θ¦ηš„δΏ‘ζ― θΏ™δΈ€ζ“δ½œδΈ»θ¦ζ˜―δΈΊδΊ†δΎΏδΊŽζŽ₯δΈ‹ζ₯ηš„ζŸ₯ζ‰Ύε·₯作. ζ–‡δ»Άηš„path都蒫提取出ζ₯, η„ΆεŽζ”Ύεˆ°δΊ†δΈ€δΈͺlistδΈ­. ''' tree_iters = [] for pcs_file in pcs_files: path = pcs_file['path'] pixbuf, type_ = self.app.mime.get(path, pcs_file['isdir'], icon_size=self.ICON_SIZE) name = os.path.split(path)[NAME_COL] tooltip = gutil.escape(name) size = pcs_file.get('size', 0) if pcs_file['isdir']: human_size = '--' else: human_size = util.get_human_size(pcs_file['size'])[0] mtime = pcs_file.get('server_mtime', 0) human_mtime = time.ctime(mtime) tree_iter = self.liststore.append([ pixbuf, name, path, tooltip, size, human_size, pcs_file['isdir'], mtime, human_mtime, type_, json.dumps(pcs_file) ]) tree_iters.append(tree_iter) cache_path = Config.get_cache_path(self.app.profile['username']) gutil.async_call(gutil.update_liststore_image, self.liststore, tree_iters, PIXBUF_COL, pcs_files, cache_path, self.ICON_SIZE)
def function[display_files, parameter[self, pcs_files]]: constant[ι‡ζ–°ζ ΌεΌεŒ–δΈ€δΈ‹ζ–‡δ»Άεˆ—θ‘¨, εŽ»ι™€δΈιœ€θ¦ηš„δΏ‘ζ― θΏ™δΈ€ζ“δ½œδΈ»θ¦ζ˜―δΈΊδΊ†δΎΏδΊŽζŽ₯δΈ‹ζ₯ηš„ζŸ₯ζ‰Ύε·₯作. ζ–‡δ»Άηš„path都蒫提取出ζ₯, η„ΆεŽζ”Ύεˆ°δΊ†δΈ€δΈͺlistδΈ­. ] variable[tree_iters] assign[=] list[[]] for taget[name[pcs_file]] in starred[name[pcs_files]] begin[:] variable[path] assign[=] call[name[pcs_file]][constant[path]] <ast.Tuple object at 0x7da1b1d51840> assign[=] call[name[self].app.mime.get, parameter[name[path], call[name[pcs_file]][constant[isdir]]]] variable[name] assign[=] call[call[name[os].path.split, parameter[name[path]]]][name[NAME_COL]] variable[tooltip] assign[=] call[name[gutil].escape, parameter[name[name]]] variable[size] assign[=] call[name[pcs_file].get, parameter[constant[size], constant[0]]] if call[name[pcs_file]][constant[isdir]] begin[:] variable[human_size] assign[=] constant[--] variable[mtime] assign[=] call[name[pcs_file].get, parameter[constant[server_mtime], constant[0]]] variable[human_mtime] assign[=] call[name[time].ctime, parameter[name[mtime]]] variable[tree_iter] assign[=] call[name[self].liststore.append, parameter[list[[<ast.Name object at 0x7da1b1d51b10>, <ast.Name object at 0x7da1b1d51a50>, <ast.Name object at 0x7da1b1d51d50>, <ast.Name object at 0x7da1b1d51d80>, <ast.Name object at 0x7da1b1d51d20>, <ast.Name object at 0x7da1b1d51cf0>, <ast.Subscript object at 0x7da1b1d51c90>, <ast.Name object at 0x7da1b1d52aa0>, <ast.Name object at 0x7da1b1d504f0>, <ast.Name object at 0x7da1b1d50520>, <ast.Call object at 0x7da1b1d504c0>]]]] call[name[tree_iters].append, parameter[name[tree_iter]]] variable[cache_path] assign[=] call[name[Config].get_cache_path, parameter[call[name[self].app.profile][constant[username]]]] call[name[gutil].async_call, parameter[name[gutil].update_liststore_image, name[self].liststore, name[tree_iters], name[PIXBUF_COL], name[pcs_files], name[cache_path], name[self].ICON_SIZE]]
keyword[def] identifier[display_files] ( identifier[self] , identifier[pcs_files] ): literal[string] identifier[tree_iters] =[] keyword[for] identifier[pcs_file] keyword[in] identifier[pcs_files] : identifier[path] = identifier[pcs_file] [ literal[string] ] identifier[pixbuf] , identifier[type_] = identifier[self] . identifier[app] . identifier[mime] . identifier[get] ( identifier[path] , identifier[pcs_file] [ literal[string] ], identifier[icon_size] = identifier[self] . identifier[ICON_SIZE] ) identifier[name] = identifier[os] . identifier[path] . identifier[split] ( identifier[path] )[ identifier[NAME_COL] ] identifier[tooltip] = identifier[gutil] . identifier[escape] ( identifier[name] ) identifier[size] = identifier[pcs_file] . identifier[get] ( literal[string] , literal[int] ) keyword[if] identifier[pcs_file] [ literal[string] ]: identifier[human_size] = literal[string] keyword[else] : identifier[human_size] = identifier[util] . identifier[get_human_size] ( identifier[pcs_file] [ literal[string] ])[ literal[int] ] identifier[mtime] = identifier[pcs_file] . identifier[get] ( literal[string] , literal[int] ) identifier[human_mtime] = identifier[time] . identifier[ctime] ( identifier[mtime] ) identifier[tree_iter] = identifier[self] . identifier[liststore] . identifier[append] ([ identifier[pixbuf] , identifier[name] , identifier[path] , identifier[tooltip] , identifier[size] , identifier[human_size] , identifier[pcs_file] [ literal[string] ], identifier[mtime] , identifier[human_mtime] , identifier[type_] , identifier[json] . identifier[dumps] ( identifier[pcs_file] ) ]) identifier[tree_iters] . identifier[append] ( identifier[tree_iter] ) identifier[cache_path] = identifier[Config] . identifier[get_cache_path] ( identifier[self] . identifier[app] . identifier[profile] [ literal[string] ]) identifier[gutil] . identifier[async_call] ( identifier[gutil] . identifier[update_liststore_image] , identifier[self] . identifier[liststore] , identifier[tree_iters] , identifier[PIXBUF_COL] , identifier[pcs_files] , identifier[cache_path] , identifier[self] . identifier[ICON_SIZE] )
def display_files(self, pcs_files): """ι‡ζ–°ζ ΌεΌεŒ–δΈ€δΈ‹ζ–‡δ»Άεˆ—θ‘¨, εŽ»ι™€δΈιœ€θ¦ηš„δΏ‘ζ― θΏ™δΈ€ζ“δ½œδΈ»θ¦ζ˜―δΈΊδΊ†δΎΏδΊŽζŽ₯δΈ‹ζ₯ηš„ζŸ₯ζ‰Ύε·₯作. ζ–‡δ»Άηš„path都蒫提取出ζ₯, η„ΆεŽζ”Ύεˆ°δΊ†δΈ€δΈͺlistδΈ­. """ tree_iters = [] for pcs_file in pcs_files: path = pcs_file['path'] (pixbuf, type_) = self.app.mime.get(path, pcs_file['isdir'], icon_size=self.ICON_SIZE) name = os.path.split(path)[NAME_COL] tooltip = gutil.escape(name) size = pcs_file.get('size', 0) if pcs_file['isdir']: human_size = '--' # depends on [control=['if'], data=[]] else: human_size = util.get_human_size(pcs_file['size'])[0] mtime = pcs_file.get('server_mtime', 0) human_mtime = time.ctime(mtime) tree_iter = self.liststore.append([pixbuf, name, path, tooltip, size, human_size, pcs_file['isdir'], mtime, human_mtime, type_, json.dumps(pcs_file)]) tree_iters.append(tree_iter) # depends on [control=['for'], data=['pcs_file']] cache_path = Config.get_cache_path(self.app.profile['username']) gutil.async_call(gutil.update_liststore_image, self.liststore, tree_iters, PIXBUF_COL, pcs_files, cache_path, self.ICON_SIZE)
def rpc_get_name_DID(self, name, **con_info): """ Given a name or subdomain, return its DID. """ did_info = None if check_name(name): did_info = self.get_name_DID_info(name) elif check_subdomain(name): did_info = self.get_subdomain_DID_info(name) else: return {'error': 'Invalid name or subdomain', 'http_status': 400} if did_info is None: return {'error': 'No DID for this name', 'http_status': 404} did = make_DID(did_info['name_type'], did_info['address'], did_info['index']) return self.success_response({'did': did})
def function[rpc_get_name_DID, parameter[self, name]]: constant[ Given a name or subdomain, return its DID. ] variable[did_info] assign[=] constant[None] if call[name[check_name], parameter[name[name]]] begin[:] variable[did_info] assign[=] call[name[self].get_name_DID_info, parameter[name[name]]] if compare[name[did_info] is constant[None]] begin[:] return[dictionary[[<ast.Constant object at 0x7da1b17d6080>, <ast.Constant object at 0x7da1b17d7580>], [<ast.Constant object at 0x7da1b17d5ab0>, <ast.Constant object at 0x7da1b17d6e90>]]] variable[did] assign[=] call[name[make_DID], parameter[call[name[did_info]][constant[name_type]], call[name[did_info]][constant[address]], call[name[did_info]][constant[index]]]] return[call[name[self].success_response, parameter[dictionary[[<ast.Constant object at 0x7da1b17d5d50>], [<ast.Name object at 0x7da1b17d6c80>]]]]]
keyword[def] identifier[rpc_get_name_DID] ( identifier[self] , identifier[name] ,** identifier[con_info] ): literal[string] identifier[did_info] = keyword[None] keyword[if] identifier[check_name] ( identifier[name] ): identifier[did_info] = identifier[self] . identifier[get_name_DID_info] ( identifier[name] ) keyword[elif] identifier[check_subdomain] ( identifier[name] ): identifier[did_info] = identifier[self] . identifier[get_subdomain_DID_info] ( identifier[name] ) keyword[else] : keyword[return] { literal[string] : literal[string] , literal[string] : literal[int] } keyword[if] identifier[did_info] keyword[is] keyword[None] : keyword[return] { literal[string] : literal[string] , literal[string] : literal[int] } identifier[did] = identifier[make_DID] ( identifier[did_info] [ literal[string] ], identifier[did_info] [ literal[string] ], identifier[did_info] [ literal[string] ]) keyword[return] identifier[self] . identifier[success_response] ({ literal[string] : identifier[did] })
def rpc_get_name_DID(self, name, **con_info): """ Given a name or subdomain, return its DID. """ did_info = None if check_name(name): did_info = self.get_name_DID_info(name) # depends on [control=['if'], data=[]] elif check_subdomain(name): did_info = self.get_subdomain_DID_info(name) # depends on [control=['if'], data=[]] else: return {'error': 'Invalid name or subdomain', 'http_status': 400} if did_info is None: return {'error': 'No DID for this name', 'http_status': 404} # depends on [control=['if'], data=[]] did = make_DID(did_info['name_type'], did_info['address'], did_info['index']) return self.success_response({'did': did})
def load_config(self, path='', in_candidate=False, empty_candidate=False, config_text=None): """ This method will load a block of config represented as a :py:class:`FortiConfig` object in the running config, in the candidate config or in both. Args: * **path** (str) -- This is the block of config you want to load. For example *system interface*\ or *router bgp* * **in_candidate** (bool): * If ``True`` the config will be loaded as *candidate* * If ``False`` the config will be loaded as *running* * **empty_candidate** (bool): * If ``True`` the *candidate* config will be left unmodified. * If ``False`` the *candidate* config will be loaded with a block of config containing\ the same information as the config loaded in the *running* config. * **config_text** (str) -- Instead of loading the config from the device itself (using the ``path``\ variable, you can specify here the config as text. """ logger.info('Loading config. path:%s, in_candidate:%s, empty_candidate:%s, config_text:%s' % ( path, in_candidate, empty_candidate, config_text is not None)) if config_text is None: if self.vdom is not None: if self.vdom == 'global': command = 'conf global\nshow %s\nend' % path else: command = 'conf vdom\nedit %s\nshow %s\nend' % (self.vdom, path) else: command = 'show %s' % path config_text = self.execute_command(command) if not in_candidate: self.running_config.parse_config_output(config_text) self.running_config.add_path(path) if not empty_candidate or in_candidate: self.candidate_config.parse_config_output(config_text) self.candidate_config.add_path(path)
def function[load_config, parameter[self, path, in_candidate, empty_candidate, config_text]]: constant[ This method will load a block of config represented as a :py:class:`FortiConfig` object in the running config, in the candidate config or in both. Args: * **path** (str) -- This is the block of config you want to load. For example *system interface* or *router bgp* * **in_candidate** (bool): * If ``True`` the config will be loaded as *candidate* * If ``False`` the config will be loaded as *running* * **empty_candidate** (bool): * If ``True`` the *candidate* config will be left unmodified. * If ``False`` the *candidate* config will be loaded with a block of config containing the same information as the config loaded in the *running* config. * **config_text** (str) -- Instead of loading the config from the device itself (using the ``path`` variable, you can specify here the config as text. ] call[name[logger].info, parameter[binary_operation[constant[Loading config. path:%s, in_candidate:%s, empty_candidate:%s, config_text:%s] <ast.Mod object at 0x7da2590d6920> tuple[[<ast.Name object at 0x7da1b1278910>, <ast.Name object at 0x7da1b127b4f0>, <ast.Name object at 0x7da1b127b9a0>, <ast.Compare object at 0x7da1b127b160>]]]]] if compare[name[config_text] is constant[None]] begin[:] if compare[name[self].vdom is_not constant[None]] begin[:] if compare[name[self].vdom equal[==] constant[global]] begin[:] variable[command] assign[=] binary_operation[constant[conf global show %s end] <ast.Mod object at 0x7da2590d6920> name[path]] variable[config_text] assign[=] call[name[self].execute_command, parameter[name[command]]] if <ast.UnaryOp object at 0x7da1b127ac80> begin[:] call[name[self].running_config.parse_config_output, parameter[name[config_text]]] call[name[self].running_config.add_path, parameter[name[path]]] if <ast.BoolOp object at 0x7da1b12794b0> begin[:] call[name[self].candidate_config.parse_config_output, parameter[name[config_text]]] call[name[self].candidate_config.add_path, parameter[name[path]]]
keyword[def] identifier[load_config] ( identifier[self] , identifier[path] = literal[string] , identifier[in_candidate] = keyword[False] , identifier[empty_candidate] = keyword[False] , identifier[config_text] = keyword[None] ): literal[string] identifier[logger] . identifier[info] ( literal[string] %( identifier[path] , identifier[in_candidate] , identifier[empty_candidate] , identifier[config_text] keyword[is] keyword[not] keyword[None] )) keyword[if] identifier[config_text] keyword[is] keyword[None] : keyword[if] identifier[self] . identifier[vdom] keyword[is] keyword[not] keyword[None] : keyword[if] identifier[self] . identifier[vdom] == literal[string] : identifier[command] = literal[string] % identifier[path] keyword[else] : identifier[command] = literal[string] %( identifier[self] . identifier[vdom] , identifier[path] ) keyword[else] : identifier[command] = literal[string] % identifier[path] identifier[config_text] = identifier[self] . identifier[execute_command] ( identifier[command] ) keyword[if] keyword[not] identifier[in_candidate] : identifier[self] . identifier[running_config] . identifier[parse_config_output] ( identifier[config_text] ) identifier[self] . identifier[running_config] . identifier[add_path] ( identifier[path] ) keyword[if] keyword[not] identifier[empty_candidate] keyword[or] identifier[in_candidate] : identifier[self] . identifier[candidate_config] . identifier[parse_config_output] ( identifier[config_text] ) identifier[self] . identifier[candidate_config] . identifier[add_path] ( identifier[path] )
def load_config(self, path='', in_candidate=False, empty_candidate=False, config_text=None): """ This method will load a block of config represented as a :py:class:`FortiConfig` object in the running config, in the candidate config or in both. Args: * **path** (str) -- This is the block of config you want to load. For example *system interface* or *router bgp* * **in_candidate** (bool): * If ``True`` the config will be loaded as *candidate* * If ``False`` the config will be loaded as *running* * **empty_candidate** (bool): * If ``True`` the *candidate* config will be left unmodified. * If ``False`` the *candidate* config will be loaded with a block of config containing the same information as the config loaded in the *running* config. * **config_text** (str) -- Instead of loading the config from the device itself (using the ``path`` variable, you can specify here the config as text. """ logger.info('Loading config. path:%s, in_candidate:%s, empty_candidate:%s, config_text:%s' % (path, in_candidate, empty_candidate, config_text is not None)) if config_text is None: if self.vdom is not None: if self.vdom == 'global': command = 'conf global\nshow %s\nend' % path # depends on [control=['if'], data=[]] else: command = 'conf vdom\nedit %s\nshow %s\nend' % (self.vdom, path) # depends on [control=['if'], data=[]] else: command = 'show %s' % path config_text = self.execute_command(command) # depends on [control=['if'], data=['config_text']] if not in_candidate: self.running_config.parse_config_output(config_text) self.running_config.add_path(path) # depends on [control=['if'], data=[]] if not empty_candidate or in_candidate: self.candidate_config.parse_config_output(config_text) self.candidate_config.add_path(path) # depends on [control=['if'], data=[]]
def open(self): '''Initialises connection to q service. If the connection hasn't been initialised yet, invoking the :func:`.open` creates a new socket and performs a handshake with a q service. :raises: :class:`.QConnectionException`, :class:`.QAuthenticationException` ''' if not self._connection: if not self.host: raise QConnectionException('Host cannot be None') self._init_socket() self._initialize() self._writer = self._writer_class(self._connection, protocol_version = self._protocol_version, encoding = self._encoding) self._reader = self._reader_class(self._connection_file, encoding = self._encoding)
def function[open, parameter[self]]: constant[Initialises connection to q service. If the connection hasn't been initialised yet, invoking the :func:`.open` creates a new socket and performs a handshake with a q service. :raises: :class:`.QConnectionException`, :class:`.QAuthenticationException` ] if <ast.UnaryOp object at 0x7da1b0bdbca0> begin[:] if <ast.UnaryOp object at 0x7da1b0bdafb0> begin[:] <ast.Raise object at 0x7da1b0bdbd60> call[name[self]._init_socket, parameter[]] call[name[self]._initialize, parameter[]] name[self]._writer assign[=] call[name[self]._writer_class, parameter[name[self]._connection]] name[self]._reader assign[=] call[name[self]._reader_class, parameter[name[self]._connection_file]]
keyword[def] identifier[open] ( identifier[self] ): literal[string] keyword[if] keyword[not] identifier[self] . identifier[_connection] : keyword[if] keyword[not] identifier[self] . identifier[host] : keyword[raise] identifier[QConnectionException] ( literal[string] ) identifier[self] . identifier[_init_socket] () identifier[self] . identifier[_initialize] () identifier[self] . identifier[_writer] = identifier[self] . identifier[_writer_class] ( identifier[self] . identifier[_connection] , identifier[protocol_version] = identifier[self] . identifier[_protocol_version] , identifier[encoding] = identifier[self] . identifier[_encoding] ) identifier[self] . identifier[_reader] = identifier[self] . identifier[_reader_class] ( identifier[self] . identifier[_connection_file] , identifier[encoding] = identifier[self] . identifier[_encoding] )
def open(self): """Initialises connection to q service. If the connection hasn't been initialised yet, invoking the :func:`.open` creates a new socket and performs a handshake with a q service. :raises: :class:`.QConnectionException`, :class:`.QAuthenticationException` """ if not self._connection: if not self.host: raise QConnectionException('Host cannot be None') # depends on [control=['if'], data=[]] self._init_socket() self._initialize() self._writer = self._writer_class(self._connection, protocol_version=self._protocol_version, encoding=self._encoding) self._reader = self._reader_class(self._connection_file, encoding=self._encoding) # depends on [control=['if'], data=[]]
def run_loop(leds=all_leds): """ Start the loop. :param `leds`: Which LEDs to light up upon switch press. :type `leds`: sequence of LED objects """ print('Loop started.\nPress Ctrl+C to break out of the loop.') while 1: try: if switch(): [led.on() for led in leds] else: [led.off() for led in leds] except OSError: # VCPInterrupt # Ctrl+C in interpreter mode. break
def function[run_loop, parameter[leds]]: constant[ Start the loop. :param `leds`: Which LEDs to light up upon switch press. :type `leds`: sequence of LED objects ] call[name[print], parameter[constant[Loop started. Press Ctrl+C to break out of the loop.]]] while constant[1] begin[:] <ast.Try object at 0x7da1b1f965f0>
keyword[def] identifier[run_loop] ( identifier[leds] = identifier[all_leds] ): literal[string] identifier[print] ( literal[string] ) keyword[while] literal[int] : keyword[try] : keyword[if] identifier[switch] (): [ identifier[led] . identifier[on] () keyword[for] identifier[led] keyword[in] identifier[leds] ] keyword[else] : [ identifier[led] . identifier[off] () keyword[for] identifier[led] keyword[in] identifier[leds] ] keyword[except] identifier[OSError] : keyword[break]
def run_loop(leds=all_leds): """ Start the loop. :param `leds`: Which LEDs to light up upon switch press. :type `leds`: sequence of LED objects """ print('Loop started.\nPress Ctrl+C to break out of the loop.') while 1: try: if switch(): [led.on() for led in leds] # depends on [control=['if'], data=[]] else: [led.off() for led in leds] # depends on [control=['try'], data=[]] except OSError: # VCPInterrupt # Ctrl+C in interpreter mode. break # depends on [control=['except'], data=[]] # depends on [control=['while'], data=[]]
def process_backup_instance_faulty_msg(self, backup_faulty: BackupInstanceFaulty, frm: str) -> None: ''' The method for processing BackupInstanceFaulty from nodes and removing replicas with performance were degraded :param backup_faulty: BackupInstanceFaulty message with instances for removing :param frm: :return: ''' logger.debug("{} receive BackupInstanceFaulty " "from {}: {}".format(self.node, frm, backup_faulty)) instances = getattr(backup_faulty, f.INSTANCES.nm) if getattr(backup_faulty, f.VIEW_NO.nm) != self.node.viewNo or \ self.node.master_replica.instId in instances: return # Don't process BackupInstanceFaulty if strategy for this reason is not need quorum reason = Suspicions.get_by_code(getattr(backup_faulty, f.REASON.nm)) if ( reason == Suspicions.BACKUP_PRIMARY_DISCONNECTED and not self.is_quorum_strategy(self.node.config.REPLICAS_REMOVING_WITH_PRIMARY_DISCONNECTED) ) or ( reason == Suspicions.BACKUP_PRIMARY_DEGRADED and not self.is_quorum_strategy(self.node.config.REPLICAS_REMOVING_WITH_DEGRADATION) ): return for inst_id in getattr(backup_faulty, f.INSTANCES.nm): if inst_id not in self.node.replicas.keys(): continue self.backup_instances_faulty.setdefault(inst_id, dict()).setdefault(frm, 0) self.backup_instances_faulty[inst_id][frm] += 1 all_nodes_condition = self.node.quorums.backup_instance_faulty.is_reached( len(self.backup_instances_faulty[inst_id].keys())) this_node_condition = (self.node.name in self.backup_instances_faulty[inst_id] and self.node.quorums.backup_instance_faulty.is_reached( self.backup_instances_faulty[inst_id][self.node.name])) if all_nodes_condition or this_node_condition: self.node.replicas.remove_replica(inst_id) self.backup_instances_faulty.pop(inst_id)
def function[process_backup_instance_faulty_msg, parameter[self, backup_faulty, frm]]: constant[ The method for processing BackupInstanceFaulty from nodes and removing replicas with performance were degraded :param backup_faulty: BackupInstanceFaulty message with instances for removing :param frm: :return: ] call[name[logger].debug, parameter[call[constant[{} receive BackupInstanceFaulty from {}: {}].format, parameter[name[self].node, name[frm], name[backup_faulty]]]]] variable[instances] assign[=] call[name[getattr], parameter[name[backup_faulty], name[f].INSTANCES.nm]] if <ast.BoolOp object at 0x7da1b1726530> begin[:] return[None] variable[reason] assign[=] call[name[Suspicions].get_by_code, parameter[call[name[getattr], parameter[name[backup_faulty], name[f].REASON.nm]]]] if <ast.BoolOp object at 0x7da1b1726d70> begin[:] return[None] for taget[name[inst_id]] in starred[call[name[getattr], parameter[name[backup_faulty], name[f].INSTANCES.nm]]] begin[:] if compare[name[inst_id] <ast.NotIn object at 0x7da2590d7190> call[name[self].node.replicas.keys, parameter[]]] begin[:] continue call[call[name[self].backup_instances_faulty.setdefault, parameter[name[inst_id], call[name[dict], parameter[]]]].setdefault, parameter[name[frm], constant[0]]] <ast.AugAssign object at 0x7da18dc9ba00> variable[all_nodes_condition] assign[=] call[name[self].node.quorums.backup_instance_faulty.is_reached, parameter[call[name[len], parameter[call[call[name[self].backup_instances_faulty][name[inst_id]].keys, parameter[]]]]]] variable[this_node_condition] assign[=] <ast.BoolOp object at 0x7da18dc98af0> if <ast.BoolOp object at 0x7da18dc98430> begin[:] call[name[self].node.replicas.remove_replica, parameter[name[inst_id]]] call[name[self].backup_instances_faulty.pop, parameter[name[inst_id]]]
keyword[def] identifier[process_backup_instance_faulty_msg] ( identifier[self] , identifier[backup_faulty] : identifier[BackupInstanceFaulty] , identifier[frm] : identifier[str] )-> keyword[None] : literal[string] identifier[logger] . identifier[debug] ( literal[string] literal[string] . identifier[format] ( identifier[self] . identifier[node] , identifier[frm] , identifier[backup_faulty] )) identifier[instances] = identifier[getattr] ( identifier[backup_faulty] , identifier[f] . identifier[INSTANCES] . identifier[nm] ) keyword[if] identifier[getattr] ( identifier[backup_faulty] , identifier[f] . identifier[VIEW_NO] . identifier[nm] )!= identifier[self] . identifier[node] . identifier[viewNo] keyword[or] identifier[self] . identifier[node] . identifier[master_replica] . identifier[instId] keyword[in] identifier[instances] : keyword[return] identifier[reason] = identifier[Suspicions] . identifier[get_by_code] ( identifier[getattr] ( identifier[backup_faulty] , identifier[f] . identifier[REASON] . identifier[nm] )) keyword[if] ( identifier[reason] == identifier[Suspicions] . identifier[BACKUP_PRIMARY_DISCONNECTED] keyword[and] keyword[not] identifier[self] . identifier[is_quorum_strategy] ( identifier[self] . identifier[node] . identifier[config] . identifier[REPLICAS_REMOVING_WITH_PRIMARY_DISCONNECTED] ) ) keyword[or] ( identifier[reason] == identifier[Suspicions] . identifier[BACKUP_PRIMARY_DEGRADED] keyword[and] keyword[not] identifier[self] . identifier[is_quorum_strategy] ( identifier[self] . identifier[node] . identifier[config] . identifier[REPLICAS_REMOVING_WITH_DEGRADATION] ) ): keyword[return] keyword[for] identifier[inst_id] keyword[in] identifier[getattr] ( identifier[backup_faulty] , identifier[f] . identifier[INSTANCES] . identifier[nm] ): keyword[if] identifier[inst_id] keyword[not] keyword[in] identifier[self] . identifier[node] . identifier[replicas] . identifier[keys] (): keyword[continue] identifier[self] . identifier[backup_instances_faulty] . identifier[setdefault] ( identifier[inst_id] , identifier[dict] ()). identifier[setdefault] ( identifier[frm] , literal[int] ) identifier[self] . identifier[backup_instances_faulty] [ identifier[inst_id] ][ identifier[frm] ]+= literal[int] identifier[all_nodes_condition] = identifier[self] . identifier[node] . identifier[quorums] . identifier[backup_instance_faulty] . identifier[is_reached] ( identifier[len] ( identifier[self] . identifier[backup_instances_faulty] [ identifier[inst_id] ]. identifier[keys] ())) identifier[this_node_condition] =( identifier[self] . identifier[node] . identifier[name] keyword[in] identifier[self] . identifier[backup_instances_faulty] [ identifier[inst_id] ] keyword[and] identifier[self] . identifier[node] . identifier[quorums] . identifier[backup_instance_faulty] . identifier[is_reached] ( identifier[self] . identifier[backup_instances_faulty] [ identifier[inst_id] ][ identifier[self] . identifier[node] . identifier[name] ])) keyword[if] identifier[all_nodes_condition] keyword[or] identifier[this_node_condition] : identifier[self] . identifier[node] . identifier[replicas] . identifier[remove_replica] ( identifier[inst_id] ) identifier[self] . identifier[backup_instances_faulty] . identifier[pop] ( identifier[inst_id] )
def process_backup_instance_faulty_msg(self, backup_faulty: BackupInstanceFaulty, frm: str) -> None: """ The method for processing BackupInstanceFaulty from nodes and removing replicas with performance were degraded :param backup_faulty: BackupInstanceFaulty message with instances for removing :param frm: :return: """ logger.debug('{} receive BackupInstanceFaulty from {}: {}'.format(self.node, frm, backup_faulty)) instances = getattr(backup_faulty, f.INSTANCES.nm) if getattr(backup_faulty, f.VIEW_NO.nm) != self.node.viewNo or self.node.master_replica.instId in instances: return # depends on [control=['if'], data=[]] # Don't process BackupInstanceFaulty if strategy for this reason is not need quorum reason = Suspicions.get_by_code(getattr(backup_faulty, f.REASON.nm)) if reason == Suspicions.BACKUP_PRIMARY_DISCONNECTED and (not self.is_quorum_strategy(self.node.config.REPLICAS_REMOVING_WITH_PRIMARY_DISCONNECTED)) or (reason == Suspicions.BACKUP_PRIMARY_DEGRADED and (not self.is_quorum_strategy(self.node.config.REPLICAS_REMOVING_WITH_DEGRADATION))): return # depends on [control=['if'], data=[]] for inst_id in getattr(backup_faulty, f.INSTANCES.nm): if inst_id not in self.node.replicas.keys(): continue # depends on [control=['if'], data=[]] self.backup_instances_faulty.setdefault(inst_id, dict()).setdefault(frm, 0) self.backup_instances_faulty[inst_id][frm] += 1 all_nodes_condition = self.node.quorums.backup_instance_faulty.is_reached(len(self.backup_instances_faulty[inst_id].keys())) this_node_condition = self.node.name in self.backup_instances_faulty[inst_id] and self.node.quorums.backup_instance_faulty.is_reached(self.backup_instances_faulty[inst_id][self.node.name]) if all_nodes_condition or this_node_condition: self.node.replicas.remove_replica(inst_id) self.backup_instances_faulty.pop(inst_id) # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['inst_id']]
def load_SUEWS_Forcing_met_df_pattern(path_input, forcingfile_met_pattern): """Short summary. Parameters ---------- forcingfile_met_pattern : type Description of parameter `forcingfile_met_pattern`. Returns ------- type Description of returned object. """ # list of met forcing files path_input = path_input.resolve() # forcingfile_met_pattern = os.path.abspath(forcingfile_met_pattern) list_file_MetForcing = sorted([ f for f in path_input.glob(forcingfile_met_pattern) if 'ESTM' not in f.name]) # print(forcingfile_met_pattern) # print(list_file_MetForcing) # load raw data # read in forcing with dask.dataframe in parallel dd_forcing_met = dd.read_csv( list_file_MetForcing, delim_whitespace=True, comment='!', error_bad_lines=True ) # convert to normal pandas dataframe df_forcing_met = dd_forcing_met.compute() # `drop_duplicates` in case some duplicates mixed df_forcing_met = df_forcing_met.drop_duplicates() col_suews_met_forcing = [ 'iy', 'id', 'it', 'imin', 'qn', 'qh', 'qe', 'qs', 'qf', 'U', 'RH', 'Tair', 'pres', 'rain', 'kdown', 'snow', 'ldown', 'fcld', 'Wuh', 'xsmd', 'lai', 'kdiff', 'kdir', 'wdir' ] # rename these columns to match variables via the driver interface df_forcing_met.columns = col_suews_met_forcing # convert unit from kPa to hPa df_forcing_met['pres'] *= 10 # add `isec` for WRF-SUEWS interface df_forcing_met['isec'] = 0 # set correct data types df_forcing_met[['iy', 'id', 'it', 'imin', 'isec']] = df_forcing_met[[ 'iy', 'id', 'it', 'imin', 'isec']].astype(np.int64) # set timestamp as index idx_dt = pd.date_range( *df_forcing_met.iloc[[0, -1], :4].astype(int).astype(str).apply( lambda ser: ser.str.cat(sep=' '), axis=1).map( lambda dt: pd.Timestamp.strptime(dt, '%Y %j %H %M')), periods=df_forcing_met.shape[0]) df_forcing_met = df_forcing_met.set_index(idx_dt) return df_forcing_met
def function[load_SUEWS_Forcing_met_df_pattern, parameter[path_input, forcingfile_met_pattern]]: constant[Short summary. Parameters ---------- forcingfile_met_pattern : type Description of parameter `forcingfile_met_pattern`. Returns ------- type Description of returned object. ] variable[path_input] assign[=] call[name[path_input].resolve, parameter[]] variable[list_file_MetForcing] assign[=] call[name[sorted], parameter[<ast.ListComp object at 0x7da18f723070>]] variable[dd_forcing_met] assign[=] call[name[dd].read_csv, parameter[name[list_file_MetForcing]]] variable[df_forcing_met] assign[=] call[name[dd_forcing_met].compute, parameter[]] variable[df_forcing_met] assign[=] call[name[df_forcing_met].drop_duplicates, parameter[]] variable[col_suews_met_forcing] assign[=] list[[<ast.Constant object at 0x7da18bc73010>, <ast.Constant object at 0x7da18bc711e0>, <ast.Constant object at 0x7da18bc71270>, <ast.Constant object at 0x7da18bc70fd0>, <ast.Constant object at 0x7da18bc72110>, <ast.Constant object at 0x7da18bc71210>, <ast.Constant object at 0x7da18bc73a00>, <ast.Constant object at 0x7da18bc710c0>, <ast.Constant object at 0x7da18bc73670>, <ast.Constant object at 0x7da18bc718d0>, <ast.Constant object at 0x7da1b0f60310>, <ast.Constant object at 0x7da1b0f603a0>, <ast.Constant object at 0x7da1b0f60df0>, <ast.Constant object at 0x7da1b0f61090>, <ast.Constant object at 0x7da1b0f60100>, <ast.Constant object at 0x7da1b0f62bc0>, <ast.Constant object at 0x7da1b0f60c10>, <ast.Constant object at 0x7da1b0f61510>, <ast.Constant object at 0x7da1b0f606d0>, <ast.Constant object at 0x7da1b0f61660>, <ast.Constant object at 0x7da1b0f60c40>, <ast.Constant object at 0x7da1b0f602b0>, <ast.Constant object at 0x7da1b0f60160>, <ast.Constant object at 0x7da1b0f609d0>]] name[df_forcing_met].columns assign[=] name[col_suews_met_forcing] <ast.AugAssign object at 0x7da1b0f61600> call[name[df_forcing_met]][constant[isec]] assign[=] constant[0] call[name[df_forcing_met]][list[[<ast.Constant object at 0x7da1b0f18eb0>, <ast.Constant object at 0x7da1b0f19e70>, <ast.Constant object at 0x7da1b0f185e0>, <ast.Constant object at 0x7da1b0f1afe0>, <ast.Constant object at 0x7da1b0f1b700>]]] assign[=] call[call[name[df_forcing_met]][list[[<ast.Constant object at 0x7da1b0f19360>, <ast.Constant object at 0x7da1b0f1a950>, <ast.Constant object at 0x7da1b0f183d0>, <ast.Constant object at 0x7da1b0f1abc0>, <ast.Constant object at 0x7da1b0f19780>]]].astype, parameter[name[np].int64]] variable[idx_dt] assign[=] call[name[pd].date_range, parameter[<ast.Starred object at 0x7da1b0f1bb50>]] variable[df_forcing_met] assign[=] call[name[df_forcing_met].set_index, parameter[name[idx_dt]]] return[name[df_forcing_met]]
keyword[def] identifier[load_SUEWS_Forcing_met_df_pattern] ( identifier[path_input] , identifier[forcingfile_met_pattern] ): literal[string] identifier[path_input] = identifier[path_input] . identifier[resolve] () identifier[list_file_MetForcing] = identifier[sorted] ([ identifier[f] keyword[for] identifier[f] keyword[in] identifier[path_input] . identifier[glob] ( identifier[forcingfile_met_pattern] ) keyword[if] literal[string] keyword[not] keyword[in] identifier[f] . identifier[name] ]) identifier[dd_forcing_met] = identifier[dd] . identifier[read_csv] ( identifier[list_file_MetForcing] , identifier[delim_whitespace] = keyword[True] , identifier[comment] = literal[string] , identifier[error_bad_lines] = keyword[True] ) identifier[df_forcing_met] = identifier[dd_forcing_met] . identifier[compute] () identifier[df_forcing_met] = identifier[df_forcing_met] . identifier[drop_duplicates] () identifier[col_suews_met_forcing] =[ literal[string] , literal[string] , literal[string] , literal[string] , literal[string] , literal[string] , literal[string] , literal[string] , literal[string] , literal[string] , literal[string] , literal[string] , literal[string] , literal[string] , literal[string] , literal[string] , literal[string] , literal[string] , literal[string] , literal[string] , literal[string] , literal[string] , literal[string] , literal[string] ] identifier[df_forcing_met] . identifier[columns] = identifier[col_suews_met_forcing] identifier[df_forcing_met] [ literal[string] ]*= literal[int] identifier[df_forcing_met] [ literal[string] ]= literal[int] identifier[df_forcing_met] [[ literal[string] , literal[string] , literal[string] , literal[string] , literal[string] ]]= identifier[df_forcing_met] [[ literal[string] , literal[string] , literal[string] , literal[string] , literal[string] ]]. identifier[astype] ( identifier[np] . identifier[int64] ) identifier[idx_dt] = identifier[pd] . identifier[date_range] ( * identifier[df_forcing_met] . identifier[iloc] [[ literal[int] ,- literal[int] ],: literal[int] ]. identifier[astype] ( identifier[int] ). identifier[astype] ( identifier[str] ). identifier[apply] ( keyword[lambda] identifier[ser] : identifier[ser] . identifier[str] . identifier[cat] ( identifier[sep] = literal[string] ), identifier[axis] = literal[int] ). identifier[map] ( keyword[lambda] identifier[dt] : identifier[pd] . identifier[Timestamp] . identifier[strptime] ( identifier[dt] , literal[string] )), identifier[periods] = identifier[df_forcing_met] . identifier[shape] [ literal[int] ]) identifier[df_forcing_met] = identifier[df_forcing_met] . identifier[set_index] ( identifier[idx_dt] ) keyword[return] identifier[df_forcing_met]
def load_SUEWS_Forcing_met_df_pattern(path_input, forcingfile_met_pattern): """Short summary. Parameters ---------- forcingfile_met_pattern : type Description of parameter `forcingfile_met_pattern`. Returns ------- type Description of returned object. """ # list of met forcing files path_input = path_input.resolve() # forcingfile_met_pattern = os.path.abspath(forcingfile_met_pattern) list_file_MetForcing = sorted([f for f in path_input.glob(forcingfile_met_pattern) if 'ESTM' not in f.name]) # print(forcingfile_met_pattern) # print(list_file_MetForcing) # load raw data # read in forcing with dask.dataframe in parallel dd_forcing_met = dd.read_csv(list_file_MetForcing, delim_whitespace=True, comment='!', error_bad_lines=True) # convert to normal pandas dataframe df_forcing_met = dd_forcing_met.compute() # `drop_duplicates` in case some duplicates mixed df_forcing_met = df_forcing_met.drop_duplicates() col_suews_met_forcing = ['iy', 'id', 'it', 'imin', 'qn', 'qh', 'qe', 'qs', 'qf', 'U', 'RH', 'Tair', 'pres', 'rain', 'kdown', 'snow', 'ldown', 'fcld', 'Wuh', 'xsmd', 'lai', 'kdiff', 'kdir', 'wdir'] # rename these columns to match variables via the driver interface df_forcing_met.columns = col_suews_met_forcing # convert unit from kPa to hPa df_forcing_met['pres'] *= 10 # add `isec` for WRF-SUEWS interface df_forcing_met['isec'] = 0 # set correct data types df_forcing_met[['iy', 'id', 'it', 'imin', 'isec']] = df_forcing_met[['iy', 'id', 'it', 'imin', 'isec']].astype(np.int64) # set timestamp as index idx_dt = pd.date_range(*df_forcing_met.iloc[[0, -1], :4].astype(int).astype(str).apply(lambda ser: ser.str.cat(sep=' '), axis=1).map(lambda dt: pd.Timestamp.strptime(dt, '%Y %j %H %M')), periods=df_forcing_met.shape[0]) df_forcing_met = df_forcing_met.set_index(idx_dt) return df_forcing_met
def _get_coordinates_for_dataset_keys(self, dsids): """Get all coordinates.""" coordinates = {} for dsid in dsids: cids = self._get_coordinates_for_dataset_key(dsid) coordinates.setdefault(dsid, []).extend(cids) return coordinates
def function[_get_coordinates_for_dataset_keys, parameter[self, dsids]]: constant[Get all coordinates.] variable[coordinates] assign[=] dictionary[[], []] for taget[name[dsid]] in starred[name[dsids]] begin[:] variable[cids] assign[=] call[name[self]._get_coordinates_for_dataset_key, parameter[name[dsid]]] call[call[name[coordinates].setdefault, parameter[name[dsid], list[[]]]].extend, parameter[name[cids]]] return[name[coordinates]]
keyword[def] identifier[_get_coordinates_for_dataset_keys] ( identifier[self] , identifier[dsids] ): literal[string] identifier[coordinates] ={} keyword[for] identifier[dsid] keyword[in] identifier[dsids] : identifier[cids] = identifier[self] . identifier[_get_coordinates_for_dataset_key] ( identifier[dsid] ) identifier[coordinates] . identifier[setdefault] ( identifier[dsid] ,[]). identifier[extend] ( identifier[cids] ) keyword[return] identifier[coordinates]
def _get_coordinates_for_dataset_keys(self, dsids): """Get all coordinates.""" coordinates = {} for dsid in dsids: cids = self._get_coordinates_for_dataset_key(dsid) coordinates.setdefault(dsid, []).extend(cids) # depends on [control=['for'], data=['dsid']] return coordinates
def connect(self, **kwargs): """ Connect to the Redis Server :param kwargs: Parameters passed directly to redis library :return: Boolean indicating if connection successful :kwarg host: Hostname of the Redis server :kwarg port: Port of the Redis server :kwarg password: Auth key for the Redis server """ self.__db = redis.Redis(**kwargs) try: self.__db.info() self.connected = True except redis.ConnectionError as e: self.logger.error("Failed to connect to Redis server: ", e) raise QueueNotConnectedError(e) return True
def function[connect, parameter[self]]: constant[ Connect to the Redis Server :param kwargs: Parameters passed directly to redis library :return: Boolean indicating if connection successful :kwarg host: Hostname of the Redis server :kwarg port: Port of the Redis server :kwarg password: Auth key for the Redis server ] name[self].__db assign[=] call[name[redis].Redis, parameter[]] <ast.Try object at 0x7da18fe924d0> return[constant[True]]
keyword[def] identifier[connect] ( identifier[self] ,** identifier[kwargs] ): literal[string] identifier[self] . identifier[__db] = identifier[redis] . identifier[Redis] (** identifier[kwargs] ) keyword[try] : identifier[self] . identifier[__db] . identifier[info] () identifier[self] . identifier[connected] = keyword[True] keyword[except] identifier[redis] . identifier[ConnectionError] keyword[as] identifier[e] : identifier[self] . identifier[logger] . identifier[error] ( literal[string] , identifier[e] ) keyword[raise] identifier[QueueNotConnectedError] ( identifier[e] ) keyword[return] keyword[True]
def connect(self, **kwargs): """ Connect to the Redis Server :param kwargs: Parameters passed directly to redis library :return: Boolean indicating if connection successful :kwarg host: Hostname of the Redis server :kwarg port: Port of the Redis server :kwarg password: Auth key for the Redis server """ self.__db = redis.Redis(**kwargs) try: self.__db.info() self.connected = True # depends on [control=['try'], data=[]] except redis.ConnectionError as e: self.logger.error('Failed to connect to Redis server: ', e) raise QueueNotConnectedError(e) # depends on [control=['except'], data=['e']] return True
def xypix_to_ipix(self, xypix, colwise=False): """Return the flattened pixel indices from an array multi-dimensional pixel indices. Parameters ---------- xypix : list List of pixel indices in the order (LON,LAT,ENERGY). colwise : bool Use column-wise pixel indexing. """ return np.ravel_multi_index(xypix, self.npix, order='F' if colwise else 'C', mode='raise')
def function[xypix_to_ipix, parameter[self, xypix, colwise]]: constant[Return the flattened pixel indices from an array multi-dimensional pixel indices. Parameters ---------- xypix : list List of pixel indices in the order (LON,LAT,ENERGY). colwise : bool Use column-wise pixel indexing. ] return[call[name[np].ravel_multi_index, parameter[name[xypix], name[self].npix]]]
keyword[def] identifier[xypix_to_ipix] ( identifier[self] , identifier[xypix] , identifier[colwise] = keyword[False] ): literal[string] keyword[return] identifier[np] . identifier[ravel_multi_index] ( identifier[xypix] , identifier[self] . identifier[npix] , identifier[order] = literal[string] keyword[if] identifier[colwise] keyword[else] literal[string] , identifier[mode] = literal[string] )
def xypix_to_ipix(self, xypix, colwise=False): """Return the flattened pixel indices from an array multi-dimensional pixel indices. Parameters ---------- xypix : list List of pixel indices in the order (LON,LAT,ENERGY). colwise : bool Use column-wise pixel indexing. """ return np.ravel_multi_index(xypix, self.npix, order='F' if colwise else 'C', mode='raise')
def unpack_numeric(self, data, cimtype): """ Unpack a string value of a numeric CIM type and return its CIM data type object, or None. data (unicode string): CIM-XML string value, or None (in which case None is returned). cimtype (string): CIM data type name (e.g. 'uint8'), or None (in which case the value is returned as a Python int/long or float). """ if data is None: return None # DSP0201 defines numeric values to be whitespace-tolerant data = data.strip() # Decode the CIM-XML string representation into a Python number # # Some notes: # * For integer numbers, only decimal and hexadecimal strings are # allowed - no binary or octal. # * In Python 2, int() automatically returns a long, if needed. # * For real values, DSP0201 defines a subset of the syntax supported # by Python float(), including the special states Inf, -Inf, NaN. The # only known difference is that DSP0201 requires a digit after the # decimal dot, while Python does not. if CIMXML_HEX_PATTERN.match(data): value = int(data, 16) else: try: value = int(data) except ValueError: try: value = float(data) except ValueError: raise CIMXMLParseError( _format("Invalid numeric value {0!A}", data), conn_id=self.conn_id) # Convert the Python number into a CIM data type if cimtype is None: return value # int/long or float (used for keybindings) # The caller ensured a numeric type for cimtype CIMType = type_from_name(cimtype) try: value = CIMType(value) except ValueError as exc: raise CIMXMLParseError( _format("Cannot convert value {0!A} to numeric CIM type {1}", exc, CIMType), conn_id=self.conn_id) return value
def function[unpack_numeric, parameter[self, data, cimtype]]: constant[ Unpack a string value of a numeric CIM type and return its CIM data type object, or None. data (unicode string): CIM-XML string value, or None (in which case None is returned). cimtype (string): CIM data type name (e.g. 'uint8'), or None (in which case the value is returned as a Python int/long or float). ] if compare[name[data] is constant[None]] begin[:] return[constant[None]] variable[data] assign[=] call[name[data].strip, parameter[]] if call[name[CIMXML_HEX_PATTERN].match, parameter[name[data]]] begin[:] variable[value] assign[=] call[name[int], parameter[name[data], constant[16]]] if compare[name[cimtype] is constant[None]] begin[:] return[name[value]] variable[CIMType] assign[=] call[name[type_from_name], parameter[name[cimtype]]] <ast.Try object at 0x7da204347640> return[name[value]]
keyword[def] identifier[unpack_numeric] ( identifier[self] , identifier[data] , identifier[cimtype] ): literal[string] keyword[if] identifier[data] keyword[is] keyword[None] : keyword[return] keyword[None] identifier[data] = identifier[data] . identifier[strip] () keyword[if] identifier[CIMXML_HEX_PATTERN] . identifier[match] ( identifier[data] ): identifier[value] = identifier[int] ( identifier[data] , literal[int] ) keyword[else] : keyword[try] : identifier[value] = identifier[int] ( identifier[data] ) keyword[except] identifier[ValueError] : keyword[try] : identifier[value] = identifier[float] ( identifier[data] ) keyword[except] identifier[ValueError] : keyword[raise] identifier[CIMXMLParseError] ( identifier[_format] ( literal[string] , identifier[data] ), identifier[conn_id] = identifier[self] . identifier[conn_id] ) keyword[if] identifier[cimtype] keyword[is] keyword[None] : keyword[return] identifier[value] identifier[CIMType] = identifier[type_from_name] ( identifier[cimtype] ) keyword[try] : identifier[value] = identifier[CIMType] ( identifier[value] ) keyword[except] identifier[ValueError] keyword[as] identifier[exc] : keyword[raise] identifier[CIMXMLParseError] ( identifier[_format] ( literal[string] , identifier[exc] , identifier[CIMType] ), identifier[conn_id] = identifier[self] . identifier[conn_id] ) keyword[return] identifier[value]
def unpack_numeric(self, data, cimtype): """ Unpack a string value of a numeric CIM type and return its CIM data type object, or None. data (unicode string): CIM-XML string value, or None (in which case None is returned). cimtype (string): CIM data type name (e.g. 'uint8'), or None (in which case the value is returned as a Python int/long or float). """ if data is None: return None # depends on [control=['if'], data=[]] # DSP0201 defines numeric values to be whitespace-tolerant data = data.strip() # Decode the CIM-XML string representation into a Python number # # Some notes: # * For integer numbers, only decimal and hexadecimal strings are # allowed - no binary or octal. # * In Python 2, int() automatically returns a long, if needed. # * For real values, DSP0201 defines a subset of the syntax supported # by Python float(), including the special states Inf, -Inf, NaN. The # only known difference is that DSP0201 requires a digit after the # decimal dot, while Python does not. if CIMXML_HEX_PATTERN.match(data): value = int(data, 16) # depends on [control=['if'], data=[]] else: try: value = int(data) # depends on [control=['try'], data=[]] except ValueError: try: value = float(data) # depends on [control=['try'], data=[]] except ValueError: raise CIMXMLParseError(_format('Invalid numeric value {0!A}', data), conn_id=self.conn_id) # depends on [control=['except'], data=[]] # depends on [control=['except'], data=[]] # Convert the Python number into a CIM data type if cimtype is None: return value # int/long or float (used for keybindings) # depends on [control=['if'], data=[]] # The caller ensured a numeric type for cimtype CIMType = type_from_name(cimtype) try: value = CIMType(value) # depends on [control=['try'], data=[]] except ValueError as exc: raise CIMXMLParseError(_format('Cannot convert value {0!A} to numeric CIM type {1}', exc, CIMType), conn_id=self.conn_id) # depends on [control=['except'], data=['exc']] return value
def tapered_gutenberg_richter_pdf(moment, moment_threshold, beta, corner_moment): ''' Tapered Gutenberg-Richter Probability Density Function :param float or numpy.ndarray moment: Moment for calculation of rate :param float or numpy.ndarray moment_threshold: Threshold Moment of the distribution (moment rate essentially!) :param float beta: Beta value (b * ln(10.)) of the Tapered Gutenberg-Richter Function :param float corner_momnet: Corner moment of the Tapered Gutenberg-Richter Function :returns: Absolute probability of moment release > moment ''' return ((beta / moment + 1. / corner_moment) * tapered_gutenberg_richter_cdf(moment, moment_threshold, beta, corner_moment))
def function[tapered_gutenberg_richter_pdf, parameter[moment, moment_threshold, beta, corner_moment]]: constant[ Tapered Gutenberg-Richter Probability Density Function :param float or numpy.ndarray moment: Moment for calculation of rate :param float or numpy.ndarray moment_threshold: Threshold Moment of the distribution (moment rate essentially!) :param float beta: Beta value (b * ln(10.)) of the Tapered Gutenberg-Richter Function :param float corner_momnet: Corner moment of the Tapered Gutenberg-Richter Function :returns: Absolute probability of moment release > moment ] return[binary_operation[binary_operation[binary_operation[name[beta] / name[moment]] + binary_operation[constant[1.0] / name[corner_moment]]] * call[name[tapered_gutenberg_richter_cdf], parameter[name[moment], name[moment_threshold], name[beta], name[corner_moment]]]]]
keyword[def] identifier[tapered_gutenberg_richter_pdf] ( identifier[moment] , identifier[moment_threshold] , identifier[beta] , identifier[corner_moment] ): literal[string] keyword[return] (( identifier[beta] / identifier[moment] + literal[int] / identifier[corner_moment] )* identifier[tapered_gutenberg_richter_cdf] ( identifier[moment] , identifier[moment_threshold] , identifier[beta] , identifier[corner_moment] ))
def tapered_gutenberg_richter_pdf(moment, moment_threshold, beta, corner_moment): """ Tapered Gutenberg-Richter Probability Density Function :param float or numpy.ndarray moment: Moment for calculation of rate :param float or numpy.ndarray moment_threshold: Threshold Moment of the distribution (moment rate essentially!) :param float beta: Beta value (b * ln(10.)) of the Tapered Gutenberg-Richter Function :param float corner_momnet: Corner moment of the Tapered Gutenberg-Richter Function :returns: Absolute probability of moment release > moment """ return (beta / moment + 1.0 / corner_moment) * tapered_gutenberg_richter_cdf(moment, moment_threshold, beta, corner_moment)
def ui_search_image(self, value): """ Setter for **self.__ui_search_image** attribute. :param value: Attribute value. :type value: unicode """ if value is not None: assert type(value) is unicode, "'{0}' attribute: '{1}' type is not 'unicode'!".format( "ui_search_image", value) assert os.path.exists(value), "'{0}' attribute: '{1}' file doesn't exists!".format( "ui_search_image", value) self.__ui_search_image = value
def function[ui_search_image, parameter[self, value]]: constant[ Setter for **self.__ui_search_image** attribute. :param value: Attribute value. :type value: unicode ] if compare[name[value] is_not constant[None]] begin[:] assert[compare[call[name[type], parameter[name[value]]] is name[unicode]]] assert[call[name[os].path.exists, parameter[name[value]]]] name[self].__ui_search_image assign[=] name[value]
keyword[def] identifier[ui_search_image] ( identifier[self] , identifier[value] ): literal[string] keyword[if] identifier[value] keyword[is] keyword[not] keyword[None] : keyword[assert] identifier[type] ( identifier[value] ) keyword[is] identifier[unicode] , literal[string] . identifier[format] ( literal[string] , identifier[value] ) keyword[assert] identifier[os] . identifier[path] . identifier[exists] ( identifier[value] ), literal[string] . identifier[format] ( literal[string] , identifier[value] ) identifier[self] . identifier[__ui_search_image] = identifier[value]
def ui_search_image(self, value): """ Setter for **self.__ui_search_image** attribute. :param value: Attribute value. :type value: unicode """ if value is not None: assert type(value) is unicode, "'{0}' attribute: '{1}' type is not 'unicode'!".format('ui_search_image', value) assert os.path.exists(value), "'{0}' attribute: '{1}' file doesn't exists!".format('ui_search_image', value) # depends on [control=['if'], data=['value']] self.__ui_search_image = value
def _iter_cache(cachefile, gpstype=LIGOTimeGPS): """Internal method that yields a `_CacheEntry` for each line in the file This method supports reading LAL- and (nested) FFL-format cache files. """ try: path = os.path.abspath(cachefile.name) except AttributeError: path = None for line in cachefile: try: yield _CacheEntry.parse(line, gpstype=LIGOTimeGPS) except ValueError: # virgo FFL format (seemingly) supports nested FFL files parts = line.split() if len(parts) == 3 and os.path.abspath(parts[0]) != path: with open(parts[0], 'r') as cache2: for entry in _iter_cache(cache2): yield entry else: raise
def function[_iter_cache, parameter[cachefile, gpstype]]: constant[Internal method that yields a `_CacheEntry` for each line in the file This method supports reading LAL- and (nested) FFL-format cache files. ] <ast.Try object at 0x7da1b06082e0> for taget[name[line]] in starred[name[cachefile]] begin[:] <ast.Try object at 0x7da1b060a9b0>
keyword[def] identifier[_iter_cache] ( identifier[cachefile] , identifier[gpstype] = identifier[LIGOTimeGPS] ): literal[string] keyword[try] : identifier[path] = identifier[os] . identifier[path] . identifier[abspath] ( identifier[cachefile] . identifier[name] ) keyword[except] identifier[AttributeError] : identifier[path] = keyword[None] keyword[for] identifier[line] keyword[in] identifier[cachefile] : keyword[try] : keyword[yield] identifier[_CacheEntry] . identifier[parse] ( identifier[line] , identifier[gpstype] = identifier[LIGOTimeGPS] ) keyword[except] identifier[ValueError] : identifier[parts] = identifier[line] . identifier[split] () keyword[if] identifier[len] ( identifier[parts] )== literal[int] keyword[and] identifier[os] . identifier[path] . identifier[abspath] ( identifier[parts] [ literal[int] ])!= identifier[path] : keyword[with] identifier[open] ( identifier[parts] [ literal[int] ], literal[string] ) keyword[as] identifier[cache2] : keyword[for] identifier[entry] keyword[in] identifier[_iter_cache] ( identifier[cache2] ): keyword[yield] identifier[entry] keyword[else] : keyword[raise]
def _iter_cache(cachefile, gpstype=LIGOTimeGPS): """Internal method that yields a `_CacheEntry` for each line in the file This method supports reading LAL- and (nested) FFL-format cache files. """ try: path = os.path.abspath(cachefile.name) # depends on [control=['try'], data=[]] except AttributeError: path = None # depends on [control=['except'], data=[]] for line in cachefile: try: yield _CacheEntry.parse(line, gpstype=LIGOTimeGPS) # depends on [control=['try'], data=[]] except ValueError: # virgo FFL format (seemingly) supports nested FFL files parts = line.split() if len(parts) == 3 and os.path.abspath(parts[0]) != path: with open(parts[0], 'r') as cache2: for entry in _iter_cache(cache2): yield entry # depends on [control=['for'], data=['entry']] # depends on [control=['with'], data=['cache2']] # depends on [control=['if'], data=[]] else: raise # depends on [control=['except'], data=[]] # depends on [control=['for'], data=['line']]
def connect( self, login, password, authz_id=b"", starttls=False, authmech=None): """Establish a connection with the server. This function must be used. It read the server capabilities and wraps calls to STARTTLS and AUTHENTICATE commands. :param login: username :param password: clear password :param starttls: use a TLS connection or not :param authmech: prefered authenticate mechanism :rtype: boolean """ try: self.sock = socket.create_connection((self.srvaddr, self.srvport)) self.sock.settimeout(Client.read_timeout) except socket.error as msg: raise Error("Connection to server failed: %s" % str(msg)) if not self.__get_capabilities(): raise Error("Failed to read capabilities from server") if starttls and not self.__starttls(): return False if self.__authenticate(login, password, authz_id, authmech): return True return False
def function[connect, parameter[self, login, password, authz_id, starttls, authmech]]: constant[Establish a connection with the server. This function must be used. It read the server capabilities and wraps calls to STARTTLS and AUTHENTICATE commands. :param login: username :param password: clear password :param starttls: use a TLS connection or not :param authmech: prefered authenticate mechanism :rtype: boolean ] <ast.Try object at 0x7da18f00d4b0> if <ast.UnaryOp object at 0x7da2041dbd30> begin[:] <ast.Raise object at 0x7da2041dbdf0> if <ast.BoolOp object at 0x7da2041d8c10> begin[:] return[constant[False]] if call[name[self].__authenticate, parameter[name[login], name[password], name[authz_id], name[authmech]]] begin[:] return[constant[True]] return[constant[False]]
keyword[def] identifier[connect] ( identifier[self] , identifier[login] , identifier[password] , identifier[authz_id] = literal[string] , identifier[starttls] = keyword[False] , identifier[authmech] = keyword[None] ): literal[string] keyword[try] : identifier[self] . identifier[sock] = identifier[socket] . identifier[create_connection] (( identifier[self] . identifier[srvaddr] , identifier[self] . identifier[srvport] )) identifier[self] . identifier[sock] . identifier[settimeout] ( identifier[Client] . identifier[read_timeout] ) keyword[except] identifier[socket] . identifier[error] keyword[as] identifier[msg] : keyword[raise] identifier[Error] ( literal[string] % identifier[str] ( identifier[msg] )) keyword[if] keyword[not] identifier[self] . identifier[__get_capabilities] (): keyword[raise] identifier[Error] ( literal[string] ) keyword[if] identifier[starttls] keyword[and] keyword[not] identifier[self] . identifier[__starttls] (): keyword[return] keyword[False] keyword[if] identifier[self] . identifier[__authenticate] ( identifier[login] , identifier[password] , identifier[authz_id] , identifier[authmech] ): keyword[return] keyword[True] keyword[return] keyword[False]
def connect(self, login, password, authz_id=b'', starttls=False, authmech=None): """Establish a connection with the server. This function must be used. It read the server capabilities and wraps calls to STARTTLS and AUTHENTICATE commands. :param login: username :param password: clear password :param starttls: use a TLS connection or not :param authmech: prefered authenticate mechanism :rtype: boolean """ try: self.sock = socket.create_connection((self.srvaddr, self.srvport)) self.sock.settimeout(Client.read_timeout) # depends on [control=['try'], data=[]] except socket.error as msg: raise Error('Connection to server failed: %s' % str(msg)) # depends on [control=['except'], data=['msg']] if not self.__get_capabilities(): raise Error('Failed to read capabilities from server') # depends on [control=['if'], data=[]] if starttls and (not self.__starttls()): return False # depends on [control=['if'], data=[]] if self.__authenticate(login, password, authz_id, authmech): return True # depends on [control=['if'], data=[]] return False
def close(self): """ Just send a message off to all the pool members which contains the special :class:`_close_pool_message` sentinel. """ if self.is_master(): for i in range(self.size): self.comm.isend(_close_pool_message(), dest=i + 1)
def function[close, parameter[self]]: constant[ Just send a message off to all the pool members which contains the special :class:`_close_pool_message` sentinel. ] if call[name[self].is_master, parameter[]] begin[:] for taget[name[i]] in starred[call[name[range], parameter[name[self].size]]] begin[:] call[name[self].comm.isend, parameter[call[name[_close_pool_message], parameter[]]]]
keyword[def] identifier[close] ( identifier[self] ): literal[string] keyword[if] identifier[self] . identifier[is_master] (): keyword[for] identifier[i] keyword[in] identifier[range] ( identifier[self] . identifier[size] ): identifier[self] . identifier[comm] . identifier[isend] ( identifier[_close_pool_message] (), identifier[dest] = identifier[i] + literal[int] )
def close(self): """ Just send a message off to all the pool members which contains the special :class:`_close_pool_message` sentinel. """ if self.is_master(): for i in range(self.size): self.comm.isend(_close_pool_message(), dest=i + 1) # depends on [control=['for'], data=['i']] # depends on [control=['if'], data=[]]
def insert(self): """persist the .fields""" self.default_val = 0 #fields = self.fields #fields = self.orm_class.depart(self.fields, is_update=False) #self.set_fields(fields) return self.interface.insert( self.schema, self.fields ) return self.interface.insert(self.schema, self.fields)
def function[insert, parameter[self]]: constant[persist the .fields] name[self].default_val assign[=] constant[0] return[call[name[self].interface.insert, parameter[name[self].schema, name[self].fields]]] return[call[name[self].interface.insert, parameter[name[self].schema, name[self].fields]]]
keyword[def] identifier[insert] ( identifier[self] ): literal[string] identifier[self] . identifier[default_val] = literal[int] keyword[return] identifier[self] . identifier[interface] . identifier[insert] ( identifier[self] . identifier[schema] , identifier[self] . identifier[fields] ) keyword[return] identifier[self] . identifier[interface] . identifier[insert] ( identifier[self] . identifier[schema] , identifier[self] . identifier[fields] )
def insert(self): """persist the .fields""" self.default_val = 0 #fields = self.fields #fields = self.orm_class.depart(self.fields, is_update=False) #self.set_fields(fields) return self.interface.insert(self.schema, self.fields) return self.interface.insert(self.schema, self.fields)
def addLNT(LocalName, phenoId, predicate, g=None): # XXX deprecated """ Add a local name for a phenotype from a pair of identifiers """ if g is None: s = inspect.stack(0) # horribly inefficient checkCalledInside('LocalNameManager', s) g = s[1][0].f_locals # get globals of calling scope addLN(LocalName, Phenotype(phenoId, predicate), g)
def function[addLNT, parameter[LocalName, phenoId, predicate, g]]: constant[ Add a local name for a phenotype from a pair of identifiers ] if compare[name[g] is constant[None]] begin[:] variable[s] assign[=] call[name[inspect].stack, parameter[constant[0]]] call[name[checkCalledInside], parameter[constant[LocalNameManager], name[s]]] variable[g] assign[=] call[call[name[s]][constant[1]]][constant[0]].f_locals call[name[addLN], parameter[name[LocalName], call[name[Phenotype], parameter[name[phenoId], name[predicate]]], name[g]]]
keyword[def] identifier[addLNT] ( identifier[LocalName] , identifier[phenoId] , identifier[predicate] , identifier[g] = keyword[None] ): literal[string] keyword[if] identifier[g] keyword[is] keyword[None] : identifier[s] = identifier[inspect] . identifier[stack] ( literal[int] ) identifier[checkCalledInside] ( literal[string] , identifier[s] ) identifier[g] = identifier[s] [ literal[int] ][ literal[int] ]. identifier[f_locals] identifier[addLN] ( identifier[LocalName] , identifier[Phenotype] ( identifier[phenoId] , identifier[predicate] ), identifier[g] )
def addLNT(LocalName, phenoId, predicate, g=None): # XXX deprecated ' Add a local name for a phenotype from a pair of identifiers ' if g is None: s = inspect.stack(0) # horribly inefficient checkCalledInside('LocalNameManager', s) g = s[1][0].f_locals # get globals of calling scope # depends on [control=['if'], data=['g']] addLN(LocalName, Phenotype(phenoId, predicate), g)
def bands(self, telescope): """Return a list of bands associated with the specified telescope.""" q = self._seen_bands.get(telescope) if q is None: return [] return list(q)
def function[bands, parameter[self, telescope]]: constant[Return a list of bands associated with the specified telescope.] variable[q] assign[=] call[name[self]._seen_bands.get, parameter[name[telescope]]] if compare[name[q] is constant[None]] begin[:] return[list[[]]] return[call[name[list], parameter[name[q]]]]
keyword[def] identifier[bands] ( identifier[self] , identifier[telescope] ): literal[string] identifier[q] = identifier[self] . identifier[_seen_bands] . identifier[get] ( identifier[telescope] ) keyword[if] identifier[q] keyword[is] keyword[None] : keyword[return] [] keyword[return] identifier[list] ( identifier[q] )
def bands(self, telescope): """Return a list of bands associated with the specified telescope.""" q = self._seen_bands.get(telescope) if q is None: return [] # depends on [control=['if'], data=[]] return list(q)
def add_team_repo(repo_name, team_name, profile="github", permission=None): ''' Adds a repository to a team with team_name. repo_name The name of the repository to add. team_name The name of the team of which to add the repository. profile The name of the profile configuration to use. Defaults to ``github``. permission The permission for team members within the repository, can be 'pull', 'push' or 'admin'. If not specified, the default permission specified on the team will be used. .. versionadded:: 2017.7.0 CLI Example: .. code-block:: bash salt myminion github.add_team_repo 'my_repo' 'team_name' .. versionadded:: 2016.11.0 ''' team = get_team(team_name, profile=profile) if not team: log.error('Team %s does not exist', team_name) return False try: client = _get_client(profile) organization = client.get_organization( _get_config_value(profile, 'org_name') ) team = organization.get_team(team['id']) repo = organization.get_repo(repo_name) except UnknownObjectException: log.exception('Resource not found: %s', team['id']) return False params = None if permission is not None: params = {'permission': permission} headers, data = team._requester.requestJsonAndCheck( "PUT", team.url + "/repos/" + repo._identity, input=params ) # Try to refresh cache list_team_repos(team_name, profile=profile, ignore_cache=True) return True
def function[add_team_repo, parameter[repo_name, team_name, profile, permission]]: constant[ Adds a repository to a team with team_name. repo_name The name of the repository to add. team_name The name of the team of which to add the repository. profile The name of the profile configuration to use. Defaults to ``github``. permission The permission for team members within the repository, can be 'pull', 'push' or 'admin'. If not specified, the default permission specified on the team will be used. .. versionadded:: 2017.7.0 CLI Example: .. code-block:: bash salt myminion github.add_team_repo 'my_repo' 'team_name' .. versionadded:: 2016.11.0 ] variable[team] assign[=] call[name[get_team], parameter[name[team_name]]] if <ast.UnaryOp object at 0x7da1b1c610c0> begin[:] call[name[log].error, parameter[constant[Team %s does not exist], name[team_name]]] return[constant[False]] <ast.Try object at 0x7da1b1c635e0> variable[params] assign[=] constant[None] if compare[name[permission] is_not constant[None]] begin[:] variable[params] assign[=] dictionary[[<ast.Constant object at 0x7da1b1c62710>], [<ast.Name object at 0x7da1b1c627a0>]] <ast.Tuple object at 0x7da1b1c62500> assign[=] call[name[team]._requester.requestJsonAndCheck, parameter[constant[PUT], binary_operation[binary_operation[name[team].url + constant[/repos/]] + name[repo]._identity]]] call[name[list_team_repos], parameter[name[team_name]]] return[constant[True]]
keyword[def] identifier[add_team_repo] ( identifier[repo_name] , identifier[team_name] , identifier[profile] = literal[string] , identifier[permission] = keyword[None] ): literal[string] identifier[team] = identifier[get_team] ( identifier[team_name] , identifier[profile] = identifier[profile] ) keyword[if] keyword[not] identifier[team] : identifier[log] . identifier[error] ( literal[string] , identifier[team_name] ) keyword[return] keyword[False] keyword[try] : identifier[client] = identifier[_get_client] ( identifier[profile] ) identifier[organization] = identifier[client] . identifier[get_organization] ( identifier[_get_config_value] ( identifier[profile] , literal[string] ) ) identifier[team] = identifier[organization] . identifier[get_team] ( identifier[team] [ literal[string] ]) identifier[repo] = identifier[organization] . identifier[get_repo] ( identifier[repo_name] ) keyword[except] identifier[UnknownObjectException] : identifier[log] . identifier[exception] ( literal[string] , identifier[team] [ literal[string] ]) keyword[return] keyword[False] identifier[params] = keyword[None] keyword[if] identifier[permission] keyword[is] keyword[not] keyword[None] : identifier[params] ={ literal[string] : identifier[permission] } identifier[headers] , identifier[data] = identifier[team] . identifier[_requester] . identifier[requestJsonAndCheck] ( literal[string] , identifier[team] . identifier[url] + literal[string] + identifier[repo] . identifier[_identity] , identifier[input] = identifier[params] ) identifier[list_team_repos] ( identifier[team_name] , identifier[profile] = identifier[profile] , identifier[ignore_cache] = keyword[True] ) keyword[return] keyword[True]
def add_team_repo(repo_name, team_name, profile='github', permission=None): """ Adds a repository to a team with team_name. repo_name The name of the repository to add. team_name The name of the team of which to add the repository. profile The name of the profile configuration to use. Defaults to ``github``. permission The permission for team members within the repository, can be 'pull', 'push' or 'admin'. If not specified, the default permission specified on the team will be used. .. versionadded:: 2017.7.0 CLI Example: .. code-block:: bash salt myminion github.add_team_repo 'my_repo' 'team_name' .. versionadded:: 2016.11.0 """ team = get_team(team_name, profile=profile) if not team: log.error('Team %s does not exist', team_name) return False # depends on [control=['if'], data=[]] try: client = _get_client(profile) organization = client.get_organization(_get_config_value(profile, 'org_name')) team = organization.get_team(team['id']) repo = organization.get_repo(repo_name) # depends on [control=['try'], data=[]] except UnknownObjectException: log.exception('Resource not found: %s', team['id']) return False # depends on [control=['except'], data=[]] params = None if permission is not None: params = {'permission': permission} # depends on [control=['if'], data=['permission']] (headers, data) = team._requester.requestJsonAndCheck('PUT', team.url + '/repos/' + repo._identity, input=params) # Try to refresh cache list_team_repos(team_name, profile=profile, ignore_cache=True) return True
def find_by_id(self, repoid): """ Returns the repo with the specified <repoid> """ for row in self.jsondata: if repoid == row["repoid"]: return self._infofromdict(row)
def function[find_by_id, parameter[self, repoid]]: constant[ Returns the repo with the specified <repoid> ] for taget[name[row]] in starred[name[self].jsondata] begin[:] if compare[name[repoid] equal[==] call[name[row]][constant[repoid]]] begin[:] return[call[name[self]._infofromdict, parameter[name[row]]]]
keyword[def] identifier[find_by_id] ( identifier[self] , identifier[repoid] ): literal[string] keyword[for] identifier[row] keyword[in] identifier[self] . identifier[jsondata] : keyword[if] identifier[repoid] == identifier[row] [ literal[string] ]: keyword[return] identifier[self] . identifier[_infofromdict] ( identifier[row] )
def find_by_id(self, repoid): """ Returns the repo with the specified <repoid> """ for row in self.jsondata: if repoid == row['repoid']: return self._infofromdict(row) # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['row']]
def _self_pipe(self): """ This sets up a self-pipe so we can hand back an fd to the caller allowing the object to manage event triggers. The ends of the pipe are set non-blocking so it doesn't really matter if a bunch of events fill the pipe buffer. """ import fcntl self._poll_fd, self._poll_send = os.pipe() for fd in [self._poll_fd, self._poll_send]: fl = fcntl.fcntl(fd, fcntl.F_GETFL) fcntl.fcntl(fd, fcntl.F_SETFL, fl | os.O_NONBLOCK)
def function[_self_pipe, parameter[self]]: constant[ This sets up a self-pipe so we can hand back an fd to the caller allowing the object to manage event triggers. The ends of the pipe are set non-blocking so it doesn't really matter if a bunch of events fill the pipe buffer. ] import module[fcntl] <ast.Tuple object at 0x7da204961a50> assign[=] call[name[os].pipe, parameter[]] for taget[name[fd]] in starred[list[[<ast.Attribute object at 0x7da204960ee0>, <ast.Attribute object at 0x7da2049609a0>]]] begin[:] variable[fl] assign[=] call[name[fcntl].fcntl, parameter[name[fd], name[fcntl].F_GETFL]] call[name[fcntl].fcntl, parameter[name[fd], name[fcntl].F_SETFL, binary_operation[name[fl] <ast.BitOr object at 0x7da2590d6aa0> name[os].O_NONBLOCK]]]
keyword[def] identifier[_self_pipe] ( identifier[self] ): literal[string] keyword[import] identifier[fcntl] identifier[self] . identifier[_poll_fd] , identifier[self] . identifier[_poll_send] = identifier[os] . identifier[pipe] () keyword[for] identifier[fd] keyword[in] [ identifier[self] . identifier[_poll_fd] , identifier[self] . identifier[_poll_send] ]: identifier[fl] = identifier[fcntl] . identifier[fcntl] ( identifier[fd] , identifier[fcntl] . identifier[F_GETFL] ) identifier[fcntl] . identifier[fcntl] ( identifier[fd] , identifier[fcntl] . identifier[F_SETFL] , identifier[fl] | identifier[os] . identifier[O_NONBLOCK] )
def _self_pipe(self): """ This sets up a self-pipe so we can hand back an fd to the caller allowing the object to manage event triggers. The ends of the pipe are set non-blocking so it doesn't really matter if a bunch of events fill the pipe buffer. """ import fcntl (self._poll_fd, self._poll_send) = os.pipe() for fd in [self._poll_fd, self._poll_send]: fl = fcntl.fcntl(fd, fcntl.F_GETFL) fcntl.fcntl(fd, fcntl.F_SETFL, fl | os.O_NONBLOCK) # depends on [control=['for'], data=['fd']]
def evaluate(self, verbose=False, decode=True, passes=None, num_threads=1, apply_experimental=True): """Evaluates by creating a MultiIndex containing evaluated data and index. See `LazyResult` Returns ------- MultiIndex MultiIndex with evaluated data. """ evaluated_data = [v.evaluate(verbose, decode, passes, num_threads, apply_experimental) for v in self.values] return MultiIndex(evaluated_data, self.names)
def function[evaluate, parameter[self, verbose, decode, passes, num_threads, apply_experimental]]: constant[Evaluates by creating a MultiIndex containing evaluated data and index. See `LazyResult` Returns ------- MultiIndex MultiIndex with evaluated data. ] variable[evaluated_data] assign[=] <ast.ListComp object at 0x7da1b0ada710> return[call[name[MultiIndex], parameter[name[evaluated_data], name[self].names]]]
keyword[def] identifier[evaluate] ( identifier[self] , identifier[verbose] = keyword[False] , identifier[decode] = keyword[True] , identifier[passes] = keyword[None] , identifier[num_threads] = literal[int] , identifier[apply_experimental] = keyword[True] ): literal[string] identifier[evaluated_data] =[ identifier[v] . identifier[evaluate] ( identifier[verbose] , identifier[decode] , identifier[passes] , identifier[num_threads] , identifier[apply_experimental] ) keyword[for] identifier[v] keyword[in] identifier[self] . identifier[values] ] keyword[return] identifier[MultiIndex] ( identifier[evaluated_data] , identifier[self] . identifier[names] )
def evaluate(self, verbose=False, decode=True, passes=None, num_threads=1, apply_experimental=True): """Evaluates by creating a MultiIndex containing evaluated data and index. See `LazyResult` Returns ------- MultiIndex MultiIndex with evaluated data. """ evaluated_data = [v.evaluate(verbose, decode, passes, num_threads, apply_experimental) for v in self.values] return MultiIndex(evaluated_data, self.names)
def timer_expired(self): """ This method is invoked in context of the timer thread, so we cannot directly throw exceptions (we can, but they would be in the wrong thread), so instead we shut down the socket of the connection. When the timeout happens in early phases of the connection setup, there is no socket object on the HTTP connection yet, in that case we retry after the retry duration, indefinitely. So we do not guarantee in all cases that the overall operation times out after the specified timeout. """ if self._http_conn.sock is not None: self._shutdown = True self._http_conn.sock.shutdown(socket.SHUT_RDWR) else: # Retry after the retry duration self._timer.cancel() self._timer = threading.Timer(self._retrytime, HTTPTimeout.timer_expired, [self]) self._timer.start()
def function[timer_expired, parameter[self]]: constant[ This method is invoked in context of the timer thread, so we cannot directly throw exceptions (we can, but they would be in the wrong thread), so instead we shut down the socket of the connection. When the timeout happens in early phases of the connection setup, there is no socket object on the HTTP connection yet, in that case we retry after the retry duration, indefinitely. So we do not guarantee in all cases that the overall operation times out after the specified timeout. ] if compare[name[self]._http_conn.sock is_not constant[None]] begin[:] name[self]._shutdown assign[=] constant[True] call[name[self]._http_conn.sock.shutdown, parameter[name[socket].SHUT_RDWR]]
keyword[def] identifier[timer_expired] ( identifier[self] ): literal[string] keyword[if] identifier[self] . identifier[_http_conn] . identifier[sock] keyword[is] keyword[not] keyword[None] : identifier[self] . identifier[_shutdown] = keyword[True] identifier[self] . identifier[_http_conn] . identifier[sock] . identifier[shutdown] ( identifier[socket] . identifier[SHUT_RDWR] ) keyword[else] : identifier[self] . identifier[_timer] . identifier[cancel] () identifier[self] . identifier[_timer] = identifier[threading] . identifier[Timer] ( identifier[self] . identifier[_retrytime] , identifier[HTTPTimeout] . identifier[timer_expired] ,[ identifier[self] ]) identifier[self] . identifier[_timer] . identifier[start] ()
def timer_expired(self): """ This method is invoked in context of the timer thread, so we cannot directly throw exceptions (we can, but they would be in the wrong thread), so instead we shut down the socket of the connection. When the timeout happens in early phases of the connection setup, there is no socket object on the HTTP connection yet, in that case we retry after the retry duration, indefinitely. So we do not guarantee in all cases that the overall operation times out after the specified timeout. """ if self._http_conn.sock is not None: self._shutdown = True self._http_conn.sock.shutdown(socket.SHUT_RDWR) # depends on [control=['if'], data=[]] else: # Retry after the retry duration self._timer.cancel() self._timer = threading.Timer(self._retrytime, HTTPTimeout.timer_expired, [self]) self._timer.start()
def _construct_surface(lons, lats, upper_depth, lower_depth): """ Utility method that constructs and return a simple fault surface with top edge specified by `lons` and `lats` and extending vertically from `upper_depth` to `lower_depth`. The underlying mesh is built by repeating the same coordinates (`lons` and `lats`) at the two specified depth levels. """ depths = np.array([ np.zeros_like(lons) + upper_depth, np.zeros_like(lats) + lower_depth ]) mesh = RectangularMesh( np.tile(lons, (2, 1)), np.tile(lats, (2, 1)), depths ) return SimpleFaultSurface(mesh)
def function[_construct_surface, parameter[lons, lats, upper_depth, lower_depth]]: constant[ Utility method that constructs and return a simple fault surface with top edge specified by `lons` and `lats` and extending vertically from `upper_depth` to `lower_depth`. The underlying mesh is built by repeating the same coordinates (`lons` and `lats`) at the two specified depth levels. ] variable[depths] assign[=] call[name[np].array, parameter[list[[<ast.BinOp object at 0x7da2054a4df0>, <ast.BinOp object at 0x7da18f09f130>]]]] variable[mesh] assign[=] call[name[RectangularMesh], parameter[call[name[np].tile, parameter[name[lons], tuple[[<ast.Constant object at 0x7da18f09c850>, <ast.Constant object at 0x7da18f09ffa0>]]]], call[name[np].tile, parameter[name[lats], tuple[[<ast.Constant object at 0x7da18f09e980>, <ast.Constant object at 0x7da18f09cac0>]]]], name[depths]]] return[call[name[SimpleFaultSurface], parameter[name[mesh]]]]
keyword[def] identifier[_construct_surface] ( identifier[lons] , identifier[lats] , identifier[upper_depth] , identifier[lower_depth] ): literal[string] identifier[depths] = identifier[np] . identifier[array] ([ identifier[np] . identifier[zeros_like] ( identifier[lons] )+ identifier[upper_depth] , identifier[np] . identifier[zeros_like] ( identifier[lats] )+ identifier[lower_depth] ]) identifier[mesh] = identifier[RectangularMesh] ( identifier[np] . identifier[tile] ( identifier[lons] ,( literal[int] , literal[int] )), identifier[np] . identifier[tile] ( identifier[lats] ,( literal[int] , literal[int] )), identifier[depths] ) keyword[return] identifier[SimpleFaultSurface] ( identifier[mesh] )
def _construct_surface(lons, lats, upper_depth, lower_depth): """ Utility method that constructs and return a simple fault surface with top edge specified by `lons` and `lats` and extending vertically from `upper_depth` to `lower_depth`. The underlying mesh is built by repeating the same coordinates (`lons` and `lats`) at the two specified depth levels. """ depths = np.array([np.zeros_like(lons) + upper_depth, np.zeros_like(lats) + lower_depth]) mesh = RectangularMesh(np.tile(lons, (2, 1)), np.tile(lats, (2, 1)), depths) return SimpleFaultSurface(mesh)
def _get_salt_params(): ''' Try to get all sort of parameters for Server Density server info. NOTE: Missing publicDNS and publicIPs parameters. There might be way of getting them with salt-cloud. ''' all_stats = __salt__['status.all_status']() all_grains = __salt__['grains.items']() params = {} try: params['name'] = all_grains['id'] params['hostname'] = all_grains['host'] if all_grains['kernel'] == 'Darwin': sd_os = {'code': 'mac', 'name': 'Mac'} else: sd_os = {'code': all_grains['kernel'].lower(), 'name': all_grains['kernel']} params['os'] = salt.utils.json.dumps(sd_os) params['cpuCores'] = all_stats['cpuinfo']['cpu cores'] params['installedRAM'] = six.text_type(int(all_stats['meminfo']['MemTotal']['value']) / 1024) params['swapSpace'] = six.text_type(int(all_stats['meminfo']['SwapTotal']['value']) / 1024) params['privateIPs'] = salt.utils.json.dumps(all_grains['fqdn_ip4']) params['privateDNS'] = salt.utils.json.dumps(all_grains['fqdn']) except KeyError: pass return params
def function[_get_salt_params, parameter[]]: constant[ Try to get all sort of parameters for Server Density server info. NOTE: Missing publicDNS and publicIPs parameters. There might be way of getting them with salt-cloud. ] variable[all_stats] assign[=] call[call[name[__salt__]][constant[status.all_status]], parameter[]] variable[all_grains] assign[=] call[call[name[__salt__]][constant[grains.items]], parameter[]] variable[params] assign[=] dictionary[[], []] <ast.Try object at 0x7da20e9546a0> return[name[params]]
keyword[def] identifier[_get_salt_params] (): literal[string] identifier[all_stats] = identifier[__salt__] [ literal[string] ]() identifier[all_grains] = identifier[__salt__] [ literal[string] ]() identifier[params] ={} keyword[try] : identifier[params] [ literal[string] ]= identifier[all_grains] [ literal[string] ] identifier[params] [ literal[string] ]= identifier[all_grains] [ literal[string] ] keyword[if] identifier[all_grains] [ literal[string] ]== literal[string] : identifier[sd_os] ={ literal[string] : literal[string] , literal[string] : literal[string] } keyword[else] : identifier[sd_os] ={ literal[string] : identifier[all_grains] [ literal[string] ]. identifier[lower] (), literal[string] : identifier[all_grains] [ literal[string] ]} identifier[params] [ literal[string] ]= identifier[salt] . identifier[utils] . identifier[json] . identifier[dumps] ( identifier[sd_os] ) identifier[params] [ literal[string] ]= identifier[all_stats] [ literal[string] ][ literal[string] ] identifier[params] [ literal[string] ]= identifier[six] . identifier[text_type] ( identifier[int] ( identifier[all_stats] [ literal[string] ][ literal[string] ][ literal[string] ])/ literal[int] ) identifier[params] [ literal[string] ]= identifier[six] . identifier[text_type] ( identifier[int] ( identifier[all_stats] [ literal[string] ][ literal[string] ][ literal[string] ])/ literal[int] ) identifier[params] [ literal[string] ]= identifier[salt] . identifier[utils] . identifier[json] . identifier[dumps] ( identifier[all_grains] [ literal[string] ]) identifier[params] [ literal[string] ]= identifier[salt] . identifier[utils] . identifier[json] . identifier[dumps] ( identifier[all_grains] [ literal[string] ]) keyword[except] identifier[KeyError] : keyword[pass] keyword[return] identifier[params]
def _get_salt_params(): """ Try to get all sort of parameters for Server Density server info. NOTE: Missing publicDNS and publicIPs parameters. There might be way of getting them with salt-cloud. """ all_stats = __salt__['status.all_status']() all_grains = __salt__['grains.items']() params = {} try: params['name'] = all_grains['id'] params['hostname'] = all_grains['host'] if all_grains['kernel'] == 'Darwin': sd_os = {'code': 'mac', 'name': 'Mac'} # depends on [control=['if'], data=[]] else: sd_os = {'code': all_grains['kernel'].lower(), 'name': all_grains['kernel']} params['os'] = salt.utils.json.dumps(sd_os) params['cpuCores'] = all_stats['cpuinfo']['cpu cores'] params['installedRAM'] = six.text_type(int(all_stats['meminfo']['MemTotal']['value']) / 1024) params['swapSpace'] = six.text_type(int(all_stats['meminfo']['SwapTotal']['value']) / 1024) params['privateIPs'] = salt.utils.json.dumps(all_grains['fqdn_ip4']) params['privateDNS'] = salt.utils.json.dumps(all_grains['fqdn']) # depends on [control=['try'], data=[]] except KeyError: pass # depends on [control=['except'], data=[]] return params
def image_convert(fname,saveAs=True,showToo=False): """ Convert weird TIF files into web-friendly versions. Auto contrast is applied (saturating lower and upper 0.1%). make saveAs True to save as .TIF.png make saveAs False and it won't save at all make saveAs "someFile.jpg" to save it as a different path/format """ # load the image #im = Image.open(fname) #PIL can't handle 12-bit TIFs well im=scipy.ndimage.imread(fname) #scipy does better with it im=np.array(im,dtype=float) # now it's a numpy array # do all image enhancement here cutoffLow=np.percentile(im,.01) cutoffHigh=np.percentile(im,99.99) im[np.where(im<cutoffLow)]=cutoffLow im[np.where(im>cutoffHigh)]=cutoffHigh # IMAGE FORMATTING im-=np.min(im) #auto contrast im/=np.max(im) #normalize im*=255 #stretch contrast (8-bit) im = Image.fromarray(im) # IMAGE DRAWING msg="Filename: %s\n"%os.path.basename(fname) timestamp = datetime.datetime.fromtimestamp(os.path.getctime(fname)) msg+="Created: %s\n"%timestamp.strftime('%Y-%m-%d %H:%M:%S') d = ImageDraw.Draw(im) fnt = ImageFont.truetype("arial.ttf", 20) d.text((6,6),msg,font=fnt,fill=0) d.text((4,4),msg,font=fnt,fill=255) if showToo: im.show() if saveAs is False: return if saveAs is True: saveAs=fname+".png" im.convert('RGB').save(saveAs)
def function[image_convert, parameter[fname, saveAs, showToo]]: constant[ Convert weird TIF files into web-friendly versions. Auto contrast is applied (saturating lower and upper 0.1%). make saveAs True to save as .TIF.png make saveAs False and it won't save at all make saveAs "someFile.jpg" to save it as a different path/format ] variable[im] assign[=] call[name[scipy].ndimage.imread, parameter[name[fname]]] variable[im] assign[=] call[name[np].array, parameter[name[im]]] variable[cutoffLow] assign[=] call[name[np].percentile, parameter[name[im], constant[0.01]]] variable[cutoffHigh] assign[=] call[name[np].percentile, parameter[name[im], constant[99.99]]] call[name[im]][call[name[np].where, parameter[compare[name[im] less[<] name[cutoffLow]]]]] assign[=] name[cutoffLow] call[name[im]][call[name[np].where, parameter[compare[name[im] greater[>] name[cutoffHigh]]]]] assign[=] name[cutoffHigh] <ast.AugAssign object at 0x7da1afe44cd0> <ast.AugAssign object at 0x7da1afe44df0> <ast.AugAssign object at 0x7da1afe44f10> variable[im] assign[=] call[name[Image].fromarray, parameter[name[im]]] variable[msg] assign[=] binary_operation[constant[Filename: %s ] <ast.Mod object at 0x7da2590d6920> call[name[os].path.basename, parameter[name[fname]]]] variable[timestamp] assign[=] call[name[datetime].datetime.fromtimestamp, parameter[call[name[os].path.getctime, parameter[name[fname]]]]] <ast.AugAssign object at 0x7da1afe45480> variable[d] assign[=] call[name[ImageDraw].Draw, parameter[name[im]]] variable[fnt] assign[=] call[name[ImageFont].truetype, parameter[constant[arial.ttf], constant[20]]] call[name[d].text, parameter[tuple[[<ast.Constant object at 0x7da1afe45960>, <ast.Constant object at 0x7da1afe45990>]], name[msg]]] call[name[d].text, parameter[tuple[[<ast.Constant object at 0x7da1afe45bd0>, <ast.Constant object at 0x7da1afe45c00>]], name[msg]]] if name[showToo] begin[:] call[name[im].show, parameter[]] if compare[name[saveAs] is constant[False]] begin[:] return[None] if compare[name[saveAs] is constant[True]] begin[:] variable[saveAs] assign[=] binary_operation[name[fname] + constant[.png]] call[call[name[im].convert, parameter[constant[RGB]]].save, parameter[name[saveAs]]]
keyword[def] identifier[image_convert] ( identifier[fname] , identifier[saveAs] = keyword[True] , identifier[showToo] = keyword[False] ): literal[string] identifier[im] = identifier[scipy] . identifier[ndimage] . identifier[imread] ( identifier[fname] ) identifier[im] = identifier[np] . identifier[array] ( identifier[im] , identifier[dtype] = identifier[float] ) identifier[cutoffLow] = identifier[np] . identifier[percentile] ( identifier[im] , literal[int] ) identifier[cutoffHigh] = identifier[np] . identifier[percentile] ( identifier[im] , literal[int] ) identifier[im] [ identifier[np] . identifier[where] ( identifier[im] < identifier[cutoffLow] )]= identifier[cutoffLow] identifier[im] [ identifier[np] . identifier[where] ( identifier[im] > identifier[cutoffHigh] )]= identifier[cutoffHigh] identifier[im] -= identifier[np] . identifier[min] ( identifier[im] ) identifier[im] /= identifier[np] . identifier[max] ( identifier[im] ) identifier[im] *= literal[int] identifier[im] = identifier[Image] . identifier[fromarray] ( identifier[im] ) identifier[msg] = literal[string] % identifier[os] . identifier[path] . identifier[basename] ( identifier[fname] ) identifier[timestamp] = identifier[datetime] . identifier[datetime] . identifier[fromtimestamp] ( identifier[os] . identifier[path] . identifier[getctime] ( identifier[fname] )) identifier[msg] += literal[string] % identifier[timestamp] . identifier[strftime] ( literal[string] ) identifier[d] = identifier[ImageDraw] . identifier[Draw] ( identifier[im] ) identifier[fnt] = identifier[ImageFont] . identifier[truetype] ( literal[string] , literal[int] ) identifier[d] . identifier[text] (( literal[int] , literal[int] ), identifier[msg] , identifier[font] = identifier[fnt] , identifier[fill] = literal[int] ) identifier[d] . identifier[text] (( literal[int] , literal[int] ), identifier[msg] , identifier[font] = identifier[fnt] , identifier[fill] = literal[int] ) keyword[if] identifier[showToo] : identifier[im] . identifier[show] () keyword[if] identifier[saveAs] keyword[is] keyword[False] : keyword[return] keyword[if] identifier[saveAs] keyword[is] keyword[True] : identifier[saveAs] = identifier[fname] + literal[string] identifier[im] . identifier[convert] ( literal[string] ). identifier[save] ( identifier[saveAs] )
def image_convert(fname, saveAs=True, showToo=False): """ Convert weird TIF files into web-friendly versions. Auto contrast is applied (saturating lower and upper 0.1%). make saveAs True to save as .TIF.png make saveAs False and it won't save at all make saveAs "someFile.jpg" to save it as a different path/format """ # load the image #im = Image.open(fname) #PIL can't handle 12-bit TIFs well im = scipy.ndimage.imread(fname) #scipy does better with it im = np.array(im, dtype=float) # now it's a numpy array # do all image enhancement here cutoffLow = np.percentile(im, 0.01) cutoffHigh = np.percentile(im, 99.99) im[np.where(im < cutoffLow)] = cutoffLow im[np.where(im > cutoffHigh)] = cutoffHigh # IMAGE FORMATTING im -= np.min(im) #auto contrast im /= np.max(im) #normalize im *= 255 #stretch contrast (8-bit) im = Image.fromarray(im) # IMAGE DRAWING msg = 'Filename: %s\n' % os.path.basename(fname) timestamp = datetime.datetime.fromtimestamp(os.path.getctime(fname)) msg += 'Created: %s\n' % timestamp.strftime('%Y-%m-%d %H:%M:%S') d = ImageDraw.Draw(im) fnt = ImageFont.truetype('arial.ttf', 20) d.text((6, 6), msg, font=fnt, fill=0) d.text((4, 4), msg, font=fnt, fill=255) if showToo: im.show() # depends on [control=['if'], data=[]] if saveAs is False: return # depends on [control=['if'], data=[]] if saveAs is True: saveAs = fname + '.png' # depends on [control=['if'], data=['saveAs']] im.convert('RGB').save(saveAs)
def fillna(self, value, subset=None): """Replace null values, alias for ``na.fill()``. :func:`DataFrame.fillna` and :func:`DataFrameNaFunctions.fill` are aliases of each other. :param value: int, long, float, string, bool or dict. Value to replace null values with. If the value is a dict, then `subset` is ignored and `value` must be a mapping from column name (string) to replacement value. The replacement value must be an int, long, float, boolean, or string. :param subset: optional list of column names to consider. Columns specified in subset that do not have matching data type are ignored. For example, if `value` is a string, and subset contains a non-string column, then the non-string column is simply ignored. >>> df4.na.fill(50).show() +---+------+-----+ |age|height| name| +---+------+-----+ | 10| 80|Alice| | 5| 50| Bob| | 50| 50| Tom| | 50| 50| null| +---+------+-----+ >>> df5.na.fill(False).show() +----+-------+-----+ | age| name| spy| +----+-------+-----+ | 10| Alice|false| | 5| Bob|false| |null|Mallory| true| +----+-------+-----+ >>> df4.na.fill({'age': 50, 'name': 'unknown'}).show() +---+------+-------+ |age|height| name| +---+------+-------+ | 10| 80| Alice| | 5| null| Bob| | 50| null| Tom| | 50| null|unknown| +---+------+-------+ """ if not isinstance(value, (float, int, long, basestring, bool, dict)): raise ValueError("value should be a float, int, long, string, bool or dict") # Note that bool validates isinstance(int), but we don't want to # convert bools to floats if not isinstance(value, bool) and isinstance(value, (int, long)): value = float(value) if isinstance(value, dict): return DataFrame(self._jdf.na().fill(value), self.sql_ctx) elif subset is None: return DataFrame(self._jdf.na().fill(value), self.sql_ctx) else: if isinstance(subset, basestring): subset = [subset] elif not isinstance(subset, (list, tuple)): raise ValueError("subset should be a list or tuple of column names") return DataFrame(self._jdf.na().fill(value, self._jseq(subset)), self.sql_ctx)
def function[fillna, parameter[self, value, subset]]: constant[Replace null values, alias for ``na.fill()``. :func:`DataFrame.fillna` and :func:`DataFrameNaFunctions.fill` are aliases of each other. :param value: int, long, float, string, bool or dict. Value to replace null values with. If the value is a dict, then `subset` is ignored and `value` must be a mapping from column name (string) to replacement value. The replacement value must be an int, long, float, boolean, or string. :param subset: optional list of column names to consider. Columns specified in subset that do not have matching data type are ignored. For example, if `value` is a string, and subset contains a non-string column, then the non-string column is simply ignored. >>> df4.na.fill(50).show() +---+------+-----+ |age|height| name| +---+------+-----+ | 10| 80|Alice| | 5| 50| Bob| | 50| 50| Tom| | 50| 50| null| +---+------+-----+ >>> df5.na.fill(False).show() +----+-------+-----+ | age| name| spy| +----+-------+-----+ | 10| Alice|false| | 5| Bob|false| |null|Mallory| true| +----+-------+-----+ >>> df4.na.fill({'age': 50, 'name': 'unknown'}).show() +---+------+-------+ |age|height| name| +---+------+-------+ | 10| 80| Alice| | 5| null| Bob| | 50| null| Tom| | 50| null|unknown| +---+------+-------+ ] if <ast.UnaryOp object at 0x7da1b20a9330> begin[:] <ast.Raise object at 0x7da1b20ab820> if <ast.BoolOp object at 0x7da1b20ab970> begin[:] variable[value] assign[=] call[name[float], parameter[name[value]]] if call[name[isinstance], parameter[name[value], name[dict]]] begin[:] return[call[name[DataFrame], parameter[call[call[name[self]._jdf.na, parameter[]].fill, parameter[name[value]]], name[self].sql_ctx]]]
keyword[def] identifier[fillna] ( identifier[self] , identifier[value] , identifier[subset] = keyword[None] ): literal[string] keyword[if] keyword[not] identifier[isinstance] ( identifier[value] ,( identifier[float] , identifier[int] , identifier[long] , identifier[basestring] , identifier[bool] , identifier[dict] )): keyword[raise] identifier[ValueError] ( literal[string] ) keyword[if] keyword[not] identifier[isinstance] ( identifier[value] , identifier[bool] ) keyword[and] identifier[isinstance] ( identifier[value] ,( identifier[int] , identifier[long] )): identifier[value] = identifier[float] ( identifier[value] ) keyword[if] identifier[isinstance] ( identifier[value] , identifier[dict] ): keyword[return] identifier[DataFrame] ( identifier[self] . identifier[_jdf] . identifier[na] (). identifier[fill] ( identifier[value] ), identifier[self] . identifier[sql_ctx] ) keyword[elif] identifier[subset] keyword[is] keyword[None] : keyword[return] identifier[DataFrame] ( identifier[self] . identifier[_jdf] . identifier[na] (). identifier[fill] ( identifier[value] ), identifier[self] . identifier[sql_ctx] ) keyword[else] : keyword[if] identifier[isinstance] ( identifier[subset] , identifier[basestring] ): identifier[subset] =[ identifier[subset] ] keyword[elif] keyword[not] identifier[isinstance] ( identifier[subset] ,( identifier[list] , identifier[tuple] )): keyword[raise] identifier[ValueError] ( literal[string] ) keyword[return] identifier[DataFrame] ( identifier[self] . identifier[_jdf] . identifier[na] (). identifier[fill] ( identifier[value] , identifier[self] . identifier[_jseq] ( identifier[subset] )), identifier[self] . identifier[sql_ctx] )
def fillna(self, value, subset=None): """Replace null values, alias for ``na.fill()``. :func:`DataFrame.fillna` and :func:`DataFrameNaFunctions.fill` are aliases of each other. :param value: int, long, float, string, bool or dict. Value to replace null values with. If the value is a dict, then `subset` is ignored and `value` must be a mapping from column name (string) to replacement value. The replacement value must be an int, long, float, boolean, or string. :param subset: optional list of column names to consider. Columns specified in subset that do not have matching data type are ignored. For example, if `value` is a string, and subset contains a non-string column, then the non-string column is simply ignored. >>> df4.na.fill(50).show() +---+------+-----+ |age|height| name| +---+------+-----+ | 10| 80|Alice| | 5| 50| Bob| | 50| 50| Tom| | 50| 50| null| +---+------+-----+ >>> df5.na.fill(False).show() +----+-------+-----+ | age| name| spy| +----+-------+-----+ | 10| Alice|false| | 5| Bob|false| |null|Mallory| true| +----+-------+-----+ >>> df4.na.fill({'age': 50, 'name': 'unknown'}).show() +---+------+-------+ |age|height| name| +---+------+-------+ | 10| 80| Alice| | 5| null| Bob| | 50| null| Tom| | 50| null|unknown| +---+------+-------+ """ if not isinstance(value, (float, int, long, basestring, bool, dict)): raise ValueError('value should be a float, int, long, string, bool or dict') # depends on [control=['if'], data=[]] # Note that bool validates isinstance(int), but we don't want to # convert bools to floats if not isinstance(value, bool) and isinstance(value, (int, long)): value = float(value) # depends on [control=['if'], data=[]] if isinstance(value, dict): return DataFrame(self._jdf.na().fill(value), self.sql_ctx) # depends on [control=['if'], data=[]] elif subset is None: return DataFrame(self._jdf.na().fill(value), self.sql_ctx) # depends on [control=['if'], data=[]] else: if isinstance(subset, basestring): subset = [subset] # depends on [control=['if'], data=[]] elif not isinstance(subset, (list, tuple)): raise ValueError('subset should be a list or tuple of column names') # depends on [control=['if'], data=[]] return DataFrame(self._jdf.na().fill(value, self._jseq(subset)), self.sql_ctx)
async def close_websession(self): """Close web session if not already closed and created by us.""" # We do not close the web session if it was provided. if self._supplied_websession or self._websession is None: return _LOGGER.debug('Closing connections') # Need to set _websession to none first to prevent any other task # from closing it as well. temp_websession = self._websession self._websession = None await temp_websession.close() await asyncio.sleep(0) _LOGGER.debug('Connections closed')
<ast.AsyncFunctionDef object at 0x7da1b1529570>
keyword[async] keyword[def] identifier[close_websession] ( identifier[self] ): literal[string] keyword[if] identifier[self] . identifier[_supplied_websession] keyword[or] identifier[self] . identifier[_websession] keyword[is] keyword[None] : keyword[return] identifier[_LOGGER] . identifier[debug] ( literal[string] ) identifier[temp_websession] = identifier[self] . identifier[_websession] identifier[self] . identifier[_websession] = keyword[None] keyword[await] identifier[temp_websession] . identifier[close] () keyword[await] identifier[asyncio] . identifier[sleep] ( literal[int] ) identifier[_LOGGER] . identifier[debug] ( literal[string] )
async def close_websession(self): """Close web session if not already closed and created by us.""" # We do not close the web session if it was provided. if self._supplied_websession or self._websession is None: return # depends on [control=['if'], data=[]] _LOGGER.debug('Closing connections') # Need to set _websession to none first to prevent any other task # from closing it as well. temp_websession = self._websession self._websession = None await temp_websession.close() await asyncio.sleep(0) _LOGGER.debug('Connections closed')
def slugify(text, length_limit=0, delimiter=u'-'): """Generates an ASCII-only slug of a string.""" result = [] for word in _punctuation_regex.split(text.lower()): word = _available_unicode_handlers[0](word) if word: result.append(word) slug = delimiter.join(result) if length_limit > 0: return slug[0:length_limit] return slug
def function[slugify, parameter[text, length_limit, delimiter]]: constant[Generates an ASCII-only slug of a string.] variable[result] assign[=] list[[]] for taget[name[word]] in starred[call[name[_punctuation_regex].split, parameter[call[name[text].lower, parameter[]]]]] begin[:] variable[word] assign[=] call[call[name[_available_unicode_handlers]][constant[0]], parameter[name[word]]] if name[word] begin[:] call[name[result].append, parameter[name[word]]] variable[slug] assign[=] call[name[delimiter].join, parameter[name[result]]] if compare[name[length_limit] greater[>] constant[0]] begin[:] return[call[name[slug]][<ast.Slice object at 0x7da1b1364ee0>]] return[name[slug]]
keyword[def] identifier[slugify] ( identifier[text] , identifier[length_limit] = literal[int] , identifier[delimiter] = literal[string] ): literal[string] identifier[result] =[] keyword[for] identifier[word] keyword[in] identifier[_punctuation_regex] . identifier[split] ( identifier[text] . identifier[lower] ()): identifier[word] = identifier[_available_unicode_handlers] [ literal[int] ]( identifier[word] ) keyword[if] identifier[word] : identifier[result] . identifier[append] ( identifier[word] ) identifier[slug] = identifier[delimiter] . identifier[join] ( identifier[result] ) keyword[if] identifier[length_limit] > literal[int] : keyword[return] identifier[slug] [ literal[int] : identifier[length_limit] ] keyword[return] identifier[slug]
def slugify(text, length_limit=0, delimiter=u'-'): """Generates an ASCII-only slug of a string.""" result = [] for word in _punctuation_regex.split(text.lower()): word = _available_unicode_handlers[0](word) if word: result.append(word) # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['word']] slug = delimiter.join(result) if length_limit > 0: return slug[0:length_limit] # depends on [control=['if'], data=['length_limit']] return slug
def markdown(self,md,base_url='..'): """ Process the documentation with Markdown to produce HTML. """ print("\nProcessing documentation comments...") ford.sourceform.set_base_url(base_url) if self.settings['warn'].lower() == 'true': print() for src in self.allfiles: src.markdown(md, self)
def function[markdown, parameter[self, md, base_url]]: constant[ Process the documentation with Markdown to produce HTML. ] call[name[print], parameter[constant[ Processing documentation comments...]]] call[name[ford].sourceform.set_base_url, parameter[name[base_url]]] if compare[call[call[name[self].settings][constant[warn]].lower, parameter[]] equal[==] constant[true]] begin[:] call[name[print], parameter[]] for taget[name[src]] in starred[name[self].allfiles] begin[:] call[name[src].markdown, parameter[name[md], name[self]]]
keyword[def] identifier[markdown] ( identifier[self] , identifier[md] , identifier[base_url] = literal[string] ): literal[string] identifier[print] ( literal[string] ) identifier[ford] . identifier[sourceform] . identifier[set_base_url] ( identifier[base_url] ) keyword[if] identifier[self] . identifier[settings] [ literal[string] ]. identifier[lower] ()== literal[string] : identifier[print] () keyword[for] identifier[src] keyword[in] identifier[self] . identifier[allfiles] : identifier[src] . identifier[markdown] ( identifier[md] , identifier[self] )
def markdown(self, md, base_url='..'): """ Process the documentation with Markdown to produce HTML. """ print('\nProcessing documentation comments...') ford.sourceform.set_base_url(base_url) if self.settings['warn'].lower() == 'true': print() # depends on [control=['if'], data=[]] for src in self.allfiles: src.markdown(md, self) # depends on [control=['for'], data=['src']]
def show_domain(self, domain_id): """ This method returns the specified domain. Required parameters domain_id: Integer or Domain Name (e.g. domain.com), specifies the domain to display. """ json = self.request('/domains/%s' % domain_id, method='GET') status = json.get('status') if status == 'OK': domain_json = json.get('domain') domain = Domain.from_json(domain_json) return domain else: message = json.get('message') raise DOPException('[%s]: %s' % (status, message))
def function[show_domain, parameter[self, domain_id]]: constant[ This method returns the specified domain. Required parameters domain_id: Integer or Domain Name (e.g. domain.com), specifies the domain to display. ] variable[json] assign[=] call[name[self].request, parameter[binary_operation[constant[/domains/%s] <ast.Mod object at 0x7da2590d6920> name[domain_id]]]] variable[status] assign[=] call[name[json].get, parameter[constant[status]]] if compare[name[status] equal[==] constant[OK]] begin[:] variable[domain_json] assign[=] call[name[json].get, parameter[constant[domain]]] variable[domain] assign[=] call[name[Domain].from_json, parameter[name[domain_json]]] return[name[domain]]
keyword[def] identifier[show_domain] ( identifier[self] , identifier[domain_id] ): literal[string] identifier[json] = identifier[self] . identifier[request] ( literal[string] % identifier[domain_id] , identifier[method] = literal[string] ) identifier[status] = identifier[json] . identifier[get] ( literal[string] ) keyword[if] identifier[status] == literal[string] : identifier[domain_json] = identifier[json] . identifier[get] ( literal[string] ) identifier[domain] = identifier[Domain] . identifier[from_json] ( identifier[domain_json] ) keyword[return] identifier[domain] keyword[else] : identifier[message] = identifier[json] . identifier[get] ( literal[string] ) keyword[raise] identifier[DOPException] ( literal[string] %( identifier[status] , identifier[message] ))
def show_domain(self, domain_id): """ This method returns the specified domain. Required parameters domain_id: Integer or Domain Name (e.g. domain.com), specifies the domain to display. """ json = self.request('/domains/%s' % domain_id, method='GET') status = json.get('status') if status == 'OK': domain_json = json.get('domain') domain = Domain.from_json(domain_json) return domain # depends on [control=['if'], data=[]] else: message = json.get('message') raise DOPException('[%s]: %s' % (status, message))
def element(cls, name, parent=None, interleave=None, occur=0): """Create an element node.""" node = cls("element", parent, interleave=interleave) node.attr["name"] = name node.occur = occur return node
def function[element, parameter[cls, name, parent, interleave, occur]]: constant[Create an element node.] variable[node] assign[=] call[name[cls], parameter[constant[element], name[parent]]] call[name[node].attr][constant[name]] assign[=] name[name] name[node].occur assign[=] name[occur] return[name[node]]
keyword[def] identifier[element] ( identifier[cls] , identifier[name] , identifier[parent] = keyword[None] , identifier[interleave] = keyword[None] , identifier[occur] = literal[int] ): literal[string] identifier[node] = identifier[cls] ( literal[string] , identifier[parent] , identifier[interleave] = identifier[interleave] ) identifier[node] . identifier[attr] [ literal[string] ]= identifier[name] identifier[node] . identifier[occur] = identifier[occur] keyword[return] identifier[node]
def element(cls, name, parent=None, interleave=None, occur=0): """Create an element node.""" node = cls('element', parent, interleave=interleave) node.attr['name'] = name node.occur = occur return node
def plot_cable_length(stats, plotpath): """ Cable length per MV grid district """ # cable and line kilometer distribution f, axarr = plt.subplots(2, 2, sharex=True) stats.hist(column=['Length of MV overhead lines'], bins=5, alpha=0.5, ax=axarr[0, 0]) stats.hist(column=['Length of MV underground cables'], bins=5, alpha=0.5, ax=axarr[0, 1]) stats.hist(column=['Length of LV overhead lines'], bins=5, alpha=0.5, ax=axarr[1, 0]) stats.hist(column=['Length of LV underground cables'], bins=5, alpha=0.5, ax=axarr[1, 1]) plt.savefig(os.path.join(plotpath, 'Histogram_cable_line_length.pdf'))
def function[plot_cable_length, parameter[stats, plotpath]]: constant[ Cable length per MV grid district ] <ast.Tuple object at 0x7da20c6a8c10> assign[=] call[name[plt].subplots, parameter[constant[2], constant[2]]] call[name[stats].hist, parameter[]] call[name[stats].hist, parameter[]] call[name[stats].hist, parameter[]] call[name[stats].hist, parameter[]] call[name[plt].savefig, parameter[call[name[os].path.join, parameter[name[plotpath], constant[Histogram_cable_line_length.pdf]]]]]
keyword[def] identifier[plot_cable_length] ( identifier[stats] , identifier[plotpath] ): literal[string] identifier[f] , identifier[axarr] = identifier[plt] . identifier[subplots] ( literal[int] , literal[int] , identifier[sharex] = keyword[True] ) identifier[stats] . identifier[hist] ( identifier[column] =[ literal[string] ], identifier[bins] = literal[int] , identifier[alpha] = literal[int] , identifier[ax] = identifier[axarr] [ literal[int] , literal[int] ]) identifier[stats] . identifier[hist] ( identifier[column] =[ literal[string] ], identifier[bins] = literal[int] , identifier[alpha] = literal[int] , identifier[ax] = identifier[axarr] [ literal[int] , literal[int] ]) identifier[stats] . identifier[hist] ( identifier[column] =[ literal[string] ], identifier[bins] = literal[int] , identifier[alpha] = literal[int] , identifier[ax] = identifier[axarr] [ literal[int] , literal[int] ]) identifier[stats] . identifier[hist] ( identifier[column] =[ literal[string] ], identifier[bins] = literal[int] , identifier[alpha] = literal[int] , identifier[ax] = identifier[axarr] [ literal[int] , literal[int] ]) identifier[plt] . identifier[savefig] ( identifier[os] . identifier[path] . identifier[join] ( identifier[plotpath] , literal[string] ))
def plot_cable_length(stats, plotpath): """ Cable length per MV grid district """ # cable and line kilometer distribution (f, axarr) = plt.subplots(2, 2, sharex=True) stats.hist(column=['Length of MV overhead lines'], bins=5, alpha=0.5, ax=axarr[0, 0]) stats.hist(column=['Length of MV underground cables'], bins=5, alpha=0.5, ax=axarr[0, 1]) stats.hist(column=['Length of LV overhead lines'], bins=5, alpha=0.5, ax=axarr[1, 0]) stats.hist(column=['Length of LV underground cables'], bins=5, alpha=0.5, ax=axarr[1, 1]) plt.savefig(os.path.join(plotpath, 'Histogram_cable_line_length.pdf'))
def _convert_to_config(self): """self.parsed_data->self.config, parse unrecognized extra args via KVLoader.""" # remove subconfigs list from namespace before transforming the Namespace if '_flags' in self.parsed_data: subcs = self.parsed_data._flags del self.parsed_data._flags else: subcs = [] for k, v in vars(self.parsed_data).iteritems(): if v is None: # it was a flag that shares the name of an alias subcs.append(self.alias_flags[k]) else: # eval the KV assignment self._exec_config_str(k, v) for subc in subcs: self._load_flag(subc) if self.extra_args: sub_parser = KeyValueConfigLoader() sub_parser.load_config(self.extra_args) self.config._merge(sub_parser.config) self.extra_args = sub_parser.extra_args
def function[_convert_to_config, parameter[self]]: constant[self.parsed_data->self.config, parse unrecognized extra args via KVLoader.] if compare[constant[_flags] in name[self].parsed_data] begin[:] variable[subcs] assign[=] name[self].parsed_data._flags <ast.Delete object at 0x7da18ede6e30> for taget[tuple[[<ast.Name object at 0x7da18ede58a0>, <ast.Name object at 0x7da18ede5810>]]] in starred[call[call[name[vars], parameter[name[self].parsed_data]].iteritems, parameter[]]] begin[:] if compare[name[v] is constant[None]] begin[:] call[name[subcs].append, parameter[call[name[self].alias_flags][name[k]]]] for taget[name[subc]] in starred[name[subcs]] begin[:] call[name[self]._load_flag, parameter[name[subc]]] if name[self].extra_args begin[:] variable[sub_parser] assign[=] call[name[KeyValueConfigLoader], parameter[]] call[name[sub_parser].load_config, parameter[name[self].extra_args]] call[name[self].config._merge, parameter[name[sub_parser].config]] name[self].extra_args assign[=] name[sub_parser].extra_args
keyword[def] identifier[_convert_to_config] ( identifier[self] ): literal[string] keyword[if] literal[string] keyword[in] identifier[self] . identifier[parsed_data] : identifier[subcs] = identifier[self] . identifier[parsed_data] . identifier[_flags] keyword[del] identifier[self] . identifier[parsed_data] . identifier[_flags] keyword[else] : identifier[subcs] =[] keyword[for] identifier[k] , identifier[v] keyword[in] identifier[vars] ( identifier[self] . identifier[parsed_data] ). identifier[iteritems] (): keyword[if] identifier[v] keyword[is] keyword[None] : identifier[subcs] . identifier[append] ( identifier[self] . identifier[alias_flags] [ identifier[k] ]) keyword[else] : identifier[self] . identifier[_exec_config_str] ( identifier[k] , identifier[v] ) keyword[for] identifier[subc] keyword[in] identifier[subcs] : identifier[self] . identifier[_load_flag] ( identifier[subc] ) keyword[if] identifier[self] . identifier[extra_args] : identifier[sub_parser] = identifier[KeyValueConfigLoader] () identifier[sub_parser] . identifier[load_config] ( identifier[self] . identifier[extra_args] ) identifier[self] . identifier[config] . identifier[_merge] ( identifier[sub_parser] . identifier[config] ) identifier[self] . identifier[extra_args] = identifier[sub_parser] . identifier[extra_args]
def _convert_to_config(self): """self.parsed_data->self.config, parse unrecognized extra args via KVLoader.""" # remove subconfigs list from namespace before transforming the Namespace if '_flags' in self.parsed_data: subcs = self.parsed_data._flags del self.parsed_data._flags # depends on [control=['if'], data=[]] else: subcs = [] for (k, v) in vars(self.parsed_data).iteritems(): if v is None: # it was a flag that shares the name of an alias subcs.append(self.alias_flags[k]) # depends on [control=['if'], data=[]] else: # eval the KV assignment self._exec_config_str(k, v) # depends on [control=['for'], data=[]] for subc in subcs: self._load_flag(subc) # depends on [control=['for'], data=['subc']] if self.extra_args: sub_parser = KeyValueConfigLoader() sub_parser.load_config(self.extra_args) self.config._merge(sub_parser.config) self.extra_args = sub_parser.extra_args # depends on [control=['if'], data=[]]
def _distribution_distance(simulated_trajectories, observed_trajectories_lookup, distribution): """ Returns the distance between the simulated and observed trajectory, w.r.t. the assumed distribution :param simulated_trajectories: Simulated trajectories :type simulated_trajectories: list[:class:`means.simulation.Trajectory`] :param observed_trajectories_lookup: A dictionary of (trajectory.description: trajectory) of observed trajectories :type observed_trajectories_lookup: dict :param distribution: Distribution to use. See :func:`_eval_density` for the list of available distributions :return: """ mean_variance_lookup = _compile_mean_variance_lookup(simulated_trajectories) # get moment expansion result with current parameters log_likelihood = 0 for trajectory in observed_trajectories_lookup.itervalues(): moment = trajectory.description assert(isinstance(moment, Moment)) assert(moment.order == 1) species = np.where(moment.n_vector == 1)[0][0] mean_variance = mean_variance_lookup[species] if (mean_variance.mean < 0).any() or (mean_variance.variance < 0).any(): return float('inf') term = _eval_density(mean_variance.mean, mean_variance.variance, trajectory.values, distribution) log_likelihood += term dist = -log_likelihood return dist
def function[_distribution_distance, parameter[simulated_trajectories, observed_trajectories_lookup, distribution]]: constant[ Returns the distance between the simulated and observed trajectory, w.r.t. the assumed distribution :param simulated_trajectories: Simulated trajectories :type simulated_trajectories: list[:class:`means.simulation.Trajectory`] :param observed_trajectories_lookup: A dictionary of (trajectory.description: trajectory) of observed trajectories :type observed_trajectories_lookup: dict :param distribution: Distribution to use. See :func:`_eval_density` for the list of available distributions :return: ] variable[mean_variance_lookup] assign[=] call[name[_compile_mean_variance_lookup], parameter[name[simulated_trajectories]]] variable[log_likelihood] assign[=] constant[0] for taget[name[trajectory]] in starred[call[name[observed_trajectories_lookup].itervalues, parameter[]]] begin[:] variable[moment] assign[=] name[trajectory].description assert[call[name[isinstance], parameter[name[moment], name[Moment]]]] assert[compare[name[moment].order equal[==] constant[1]]] variable[species] assign[=] call[call[call[name[np].where, parameter[compare[name[moment].n_vector equal[==] constant[1]]]]][constant[0]]][constant[0]] variable[mean_variance] assign[=] call[name[mean_variance_lookup]][name[species]] if <ast.BoolOp object at 0x7da20e957880> begin[:] return[call[name[float], parameter[constant[inf]]]] variable[term] assign[=] call[name[_eval_density], parameter[name[mean_variance].mean, name[mean_variance].variance, name[trajectory].values, name[distribution]]] <ast.AugAssign object at 0x7da20e9577f0> variable[dist] assign[=] <ast.UnaryOp object at 0x7da20e957340> return[name[dist]]
keyword[def] identifier[_distribution_distance] ( identifier[simulated_trajectories] , identifier[observed_trajectories_lookup] , identifier[distribution] ): literal[string] identifier[mean_variance_lookup] = identifier[_compile_mean_variance_lookup] ( identifier[simulated_trajectories] ) identifier[log_likelihood] = literal[int] keyword[for] identifier[trajectory] keyword[in] identifier[observed_trajectories_lookup] . identifier[itervalues] (): identifier[moment] = identifier[trajectory] . identifier[description] keyword[assert] ( identifier[isinstance] ( identifier[moment] , identifier[Moment] )) keyword[assert] ( identifier[moment] . identifier[order] == literal[int] ) identifier[species] = identifier[np] . identifier[where] ( identifier[moment] . identifier[n_vector] == literal[int] )[ literal[int] ][ literal[int] ] identifier[mean_variance] = identifier[mean_variance_lookup] [ identifier[species] ] keyword[if] ( identifier[mean_variance] . identifier[mean] < literal[int] ). identifier[any] () keyword[or] ( identifier[mean_variance] . identifier[variance] < literal[int] ). identifier[any] (): keyword[return] identifier[float] ( literal[string] ) identifier[term] = identifier[_eval_density] ( identifier[mean_variance] . identifier[mean] , identifier[mean_variance] . identifier[variance] , identifier[trajectory] . identifier[values] , identifier[distribution] ) identifier[log_likelihood] += identifier[term] identifier[dist] =- identifier[log_likelihood] keyword[return] identifier[dist]
def _distribution_distance(simulated_trajectories, observed_trajectories_lookup, distribution): """ Returns the distance between the simulated and observed trajectory, w.r.t. the assumed distribution :param simulated_trajectories: Simulated trajectories :type simulated_trajectories: list[:class:`means.simulation.Trajectory`] :param observed_trajectories_lookup: A dictionary of (trajectory.description: trajectory) of observed trajectories :type observed_trajectories_lookup: dict :param distribution: Distribution to use. See :func:`_eval_density` for the list of available distributions :return: """ mean_variance_lookup = _compile_mean_variance_lookup(simulated_trajectories) # get moment expansion result with current parameters log_likelihood = 0 for trajectory in observed_trajectories_lookup.itervalues(): moment = trajectory.description assert isinstance(moment, Moment) assert moment.order == 1 species = np.where(moment.n_vector == 1)[0][0] mean_variance = mean_variance_lookup[species] if (mean_variance.mean < 0).any() or (mean_variance.variance < 0).any(): return float('inf') # depends on [control=['if'], data=[]] term = _eval_density(mean_variance.mean, mean_variance.variance, trajectory.values, distribution) log_likelihood += term # depends on [control=['for'], data=['trajectory']] dist = -log_likelihood return dist
def check_signature(signature, private_key, full_path, payload): """ Checks signature received and verifies that we are able to re-create it from the private key, path, and payload given. :param signature: Signature received from request. :param private_key: Base 64, url encoded private key. :full_path: Full path of request, including GET query string (excluding host) :payload: The request.POST data if present. None if not. :returns: Boolean of whether signature matched or not. """ if isinstance(private_key, bytes): private_key = private_key.decode("ascii") if isinstance(payload, bytes): payload = payload.decode() url_to_check = _strip_signature_from_url(signature, full_path) computed_signature = apysigner.get_signature(private_key, url_to_check, payload) return constant_time_compare(signature, computed_signature)
def function[check_signature, parameter[signature, private_key, full_path, payload]]: constant[ Checks signature received and verifies that we are able to re-create it from the private key, path, and payload given. :param signature: Signature received from request. :param private_key: Base 64, url encoded private key. :full_path: Full path of request, including GET query string (excluding host) :payload: The request.POST data if present. None if not. :returns: Boolean of whether signature matched or not. ] if call[name[isinstance], parameter[name[private_key], name[bytes]]] begin[:] variable[private_key] assign[=] call[name[private_key].decode, parameter[constant[ascii]]] if call[name[isinstance], parameter[name[payload], name[bytes]]] begin[:] variable[payload] assign[=] call[name[payload].decode, parameter[]] variable[url_to_check] assign[=] call[name[_strip_signature_from_url], parameter[name[signature], name[full_path]]] variable[computed_signature] assign[=] call[name[apysigner].get_signature, parameter[name[private_key], name[url_to_check], name[payload]]] return[call[name[constant_time_compare], parameter[name[signature], name[computed_signature]]]]
keyword[def] identifier[check_signature] ( identifier[signature] , identifier[private_key] , identifier[full_path] , identifier[payload] ): literal[string] keyword[if] identifier[isinstance] ( identifier[private_key] , identifier[bytes] ): identifier[private_key] = identifier[private_key] . identifier[decode] ( literal[string] ) keyword[if] identifier[isinstance] ( identifier[payload] , identifier[bytes] ): identifier[payload] = identifier[payload] . identifier[decode] () identifier[url_to_check] = identifier[_strip_signature_from_url] ( identifier[signature] , identifier[full_path] ) identifier[computed_signature] = identifier[apysigner] . identifier[get_signature] ( identifier[private_key] , identifier[url_to_check] , identifier[payload] ) keyword[return] identifier[constant_time_compare] ( identifier[signature] , identifier[computed_signature] )
def check_signature(signature, private_key, full_path, payload): """ Checks signature received and verifies that we are able to re-create it from the private key, path, and payload given. :param signature: Signature received from request. :param private_key: Base 64, url encoded private key. :full_path: Full path of request, including GET query string (excluding host) :payload: The request.POST data if present. None if not. :returns: Boolean of whether signature matched or not. """ if isinstance(private_key, bytes): private_key = private_key.decode('ascii') # depends on [control=['if'], data=[]] if isinstance(payload, bytes): payload = payload.decode() # depends on [control=['if'], data=[]] url_to_check = _strip_signature_from_url(signature, full_path) computed_signature = apysigner.get_signature(private_key, url_to_check, payload) return constant_time_compare(signature, computed_signature)
def get(cls, key, default=None): """ Resembles the :meth:`dict.get` method. :returns: A configuration dictionary for specified provider. """ # Query datastore. result = cls.query(cls.provider_name == key).get() if result: result_dict = result.to_dict() # Use NDBOpenIDStore by default result_dict['store'] = NDBOpenIDStore # Convert coma-separated values to list. Currently only scope is # csv. for i in ('scope', ): prop = result_dict.get(i) if prop: result_dict[i] = [s.strip() for s in prop.split(',')] else: result_dict[i] = None return result_dict else: return default
def function[get, parameter[cls, key, default]]: constant[ Resembles the :meth:`dict.get` method. :returns: A configuration dictionary for specified provider. ] variable[result] assign[=] call[call[name[cls].query, parameter[compare[name[cls].provider_name equal[==] name[key]]]].get, parameter[]] if name[result] begin[:] variable[result_dict] assign[=] call[name[result].to_dict, parameter[]] call[name[result_dict]][constant[store]] assign[=] name[NDBOpenIDStore] for taget[name[i]] in starred[tuple[[<ast.Constant object at 0x7da1b0532b60>]]] begin[:] variable[prop] assign[=] call[name[result_dict].get, parameter[name[i]]] if name[prop] begin[:] call[name[result_dict]][name[i]] assign[=] <ast.ListComp object at 0x7da1b0530e50> return[name[result_dict]]
keyword[def] identifier[get] ( identifier[cls] , identifier[key] , identifier[default] = keyword[None] ): literal[string] identifier[result] = identifier[cls] . identifier[query] ( identifier[cls] . identifier[provider_name] == identifier[key] ). identifier[get] () keyword[if] identifier[result] : identifier[result_dict] = identifier[result] . identifier[to_dict] () identifier[result_dict] [ literal[string] ]= identifier[NDBOpenIDStore] keyword[for] identifier[i] keyword[in] ( literal[string] ,): identifier[prop] = identifier[result_dict] . identifier[get] ( identifier[i] ) keyword[if] identifier[prop] : identifier[result_dict] [ identifier[i] ]=[ identifier[s] . identifier[strip] () keyword[for] identifier[s] keyword[in] identifier[prop] . identifier[split] ( literal[string] )] keyword[else] : identifier[result_dict] [ identifier[i] ]= keyword[None] keyword[return] identifier[result_dict] keyword[else] : keyword[return] identifier[default]
def get(cls, key, default=None): """ Resembles the :meth:`dict.get` method. :returns: A configuration dictionary for specified provider. """ # Query datastore. result = cls.query(cls.provider_name == key).get() if result: result_dict = result.to_dict() # Use NDBOpenIDStore by default result_dict['store'] = NDBOpenIDStore # Convert coma-separated values to list. Currently only scope is # csv. for i in ('scope',): prop = result_dict.get(i) if prop: result_dict[i] = [s.strip() for s in prop.split(',')] # depends on [control=['if'], data=[]] else: result_dict[i] = None # depends on [control=['for'], data=['i']] return result_dict # depends on [control=['if'], data=[]] else: return default
def saveOverlayToDicomCopy(input_dcmfilelist, output_dicom_dir, overlays, crinfo, orig_shape): """ Save overlay to dicom. """ from . import datawriter as dwriter # import qmisc if not os.path.exists(output_dicom_dir): os.makedirs(output_dicom_dir) import imtools.image_manipulation # uncrop all overlays for key in overlays: overlays[key] = imtools.image_manipulation.uncrop(overlays[key], crinfo, orig_shape) dw = dwriter.DataWriter() dw.DataCopyWithOverlay(input_dcmfilelist, output_dicom_dir, overlays)
def function[saveOverlayToDicomCopy, parameter[input_dcmfilelist, output_dicom_dir, overlays, crinfo, orig_shape]]: constant[ Save overlay to dicom. ] from relative_module[None] import module[datawriter] if <ast.UnaryOp object at 0x7da18dc99210> begin[:] call[name[os].makedirs, parameter[name[output_dicom_dir]]] import module[imtools.image_manipulation] for taget[name[key]] in starred[name[overlays]] begin[:] call[name[overlays]][name[key]] assign[=] call[name[imtools].image_manipulation.uncrop, parameter[call[name[overlays]][name[key]], name[crinfo], name[orig_shape]]] variable[dw] assign[=] call[name[dwriter].DataWriter, parameter[]] call[name[dw].DataCopyWithOverlay, parameter[name[input_dcmfilelist], name[output_dicom_dir], name[overlays]]]
keyword[def] identifier[saveOverlayToDicomCopy] ( identifier[input_dcmfilelist] , identifier[output_dicom_dir] , identifier[overlays] , identifier[crinfo] , identifier[orig_shape] ): literal[string] keyword[from] . keyword[import] identifier[datawriter] keyword[as] identifier[dwriter] keyword[if] keyword[not] identifier[os] . identifier[path] . identifier[exists] ( identifier[output_dicom_dir] ): identifier[os] . identifier[makedirs] ( identifier[output_dicom_dir] ) keyword[import] identifier[imtools] . identifier[image_manipulation] keyword[for] identifier[key] keyword[in] identifier[overlays] : identifier[overlays] [ identifier[key] ]= identifier[imtools] . identifier[image_manipulation] . identifier[uncrop] ( identifier[overlays] [ identifier[key] ], identifier[crinfo] , identifier[orig_shape] ) identifier[dw] = identifier[dwriter] . identifier[DataWriter] () identifier[dw] . identifier[DataCopyWithOverlay] ( identifier[input_dcmfilelist] , identifier[output_dicom_dir] , identifier[overlays] )
def saveOverlayToDicomCopy(input_dcmfilelist, output_dicom_dir, overlays, crinfo, orig_shape): """ Save overlay to dicom. """ from . import datawriter as dwriter # import qmisc if not os.path.exists(output_dicom_dir): os.makedirs(output_dicom_dir) # depends on [control=['if'], data=[]] import imtools.image_manipulation # uncrop all overlays for key in overlays: overlays[key] = imtools.image_manipulation.uncrop(overlays[key], crinfo, orig_shape) # depends on [control=['for'], data=['key']] dw = dwriter.DataWriter() dw.DataCopyWithOverlay(input_dcmfilelist, output_dicom_dir, overlays)
def mission_request_list_send(self, target_system, target_component, force_mavlink1=False): ''' Request the overall list of mission items from the system/component. target_system : System ID (uint8_t) target_component : Component ID (uint8_t) ''' return self.send(self.mission_request_list_encode(target_system, target_component), force_mavlink1=force_mavlink1)
def function[mission_request_list_send, parameter[self, target_system, target_component, force_mavlink1]]: constant[ Request the overall list of mission items from the system/component. target_system : System ID (uint8_t) target_component : Component ID (uint8_t) ] return[call[name[self].send, parameter[call[name[self].mission_request_list_encode, parameter[name[target_system], name[target_component]]]]]]
keyword[def] identifier[mission_request_list_send] ( identifier[self] , identifier[target_system] , identifier[target_component] , identifier[force_mavlink1] = keyword[False] ): literal[string] keyword[return] identifier[self] . identifier[send] ( identifier[self] . identifier[mission_request_list_encode] ( identifier[target_system] , identifier[target_component] ), identifier[force_mavlink1] = identifier[force_mavlink1] )
def mission_request_list_send(self, target_system, target_component, force_mavlink1=False): """ Request the overall list of mission items from the system/component. target_system : System ID (uint8_t) target_component : Component ID (uint8_t) """ return self.send(self.mission_request_list_encode(target_system, target_component), force_mavlink1=force_mavlink1)
def open_webpage(self, url): """Launch Web Browser and open url""" params = ('<X_AppType>vc_app</X_AppType>' '<X_LaunchKeyword>resource_id={resource_id}</X_LaunchKeyword>' ).format(resource_id=1063) res = self.soap_request(URL_CONTROL_NRC, URN_REMOTE_CONTROL, 'X_LaunchApp', params, body_elem="s") root = ET.fromstring(res) el_sessionId = root.find('.//X_SessionId') #setup a server socket where URL will be served server_socket = socket.socket(socket.AF_INET, socket.SOCK_STREAM) server_socket.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1) localip = self._get_local_ip() localport = random.randint(1025,65535) server_socket.bind((localip, localport)) server_socket.listen(1) _LOGGER.debug("Listening on {}:{}".format(localip,localport)) params = ('<X_AppType>vc_app</X_AppType>' '<X_SessionId>{sessionId}</X_SessionId>' '<X_ConnectKeyword>panasonic-viera 0.2</X_ConnectKeyword>' '<X_ConnectAddr>{localip}:{localport}</X_ConnectAddr>' ).format(sessionId=el_sessionId.text, localip=localip, localport=localport) res = self.soap_request(URL_CONTROL_NRC, URN_REMOTE_CONTROL, 'X_ConnectApp', params, body_elem="s") sockfd, addr = server_socket.accept() _LOGGER.debug("Client (%s, %s) connected" % addr) packet = bytearray([0xf4, 0x01, 0x01, 0x00, 0x00, 0x00, 0x00, len(url)]) packet.extend(map(ord, url)) packet.append(0x00) sockfd.send(packet) sockfd.close() server_socket.close()
def function[open_webpage, parameter[self, url]]: constant[Launch Web Browser and open url] variable[params] assign[=] call[constant[<X_AppType>vc_app</X_AppType><X_LaunchKeyword>resource_id={resource_id}</X_LaunchKeyword>].format, parameter[]] variable[res] assign[=] call[name[self].soap_request, parameter[name[URL_CONTROL_NRC], name[URN_REMOTE_CONTROL], constant[X_LaunchApp], name[params]]] variable[root] assign[=] call[name[ET].fromstring, parameter[name[res]]] variable[el_sessionId] assign[=] call[name[root].find, parameter[constant[.//X_SessionId]]] variable[server_socket] assign[=] call[name[socket].socket, parameter[name[socket].AF_INET, name[socket].SOCK_STREAM]] call[name[server_socket].setsockopt, parameter[name[socket].SOL_SOCKET, name[socket].SO_REUSEADDR, constant[1]]] variable[localip] assign[=] call[name[self]._get_local_ip, parameter[]] variable[localport] assign[=] call[name[random].randint, parameter[constant[1025], constant[65535]]] call[name[server_socket].bind, parameter[tuple[[<ast.Name object at 0x7da1b0c52020>, <ast.Name object at 0x7da1b0c52a10>]]]] call[name[server_socket].listen, parameter[constant[1]]] call[name[_LOGGER].debug, parameter[call[constant[Listening on {}:{}].format, parameter[name[localip], name[localport]]]]] variable[params] assign[=] call[constant[<X_AppType>vc_app</X_AppType><X_SessionId>{sessionId}</X_SessionId><X_ConnectKeyword>panasonic-viera 0.2</X_ConnectKeyword><X_ConnectAddr>{localip}:{localport}</X_ConnectAddr>].format, parameter[]] variable[res] assign[=] call[name[self].soap_request, parameter[name[URL_CONTROL_NRC], name[URN_REMOTE_CONTROL], constant[X_ConnectApp], name[params]]] <ast.Tuple object at 0x7da1b0c531f0> assign[=] call[name[server_socket].accept, parameter[]] call[name[_LOGGER].debug, parameter[binary_operation[constant[Client (%s, %s) connected] <ast.Mod object at 0x7da2590d6920> name[addr]]]] variable[packet] assign[=] call[name[bytearray], parameter[list[[<ast.Constant object at 0x7da1b0c50850>, <ast.Constant object at 0x7da1b0c500d0>, <ast.Constant object at 0x7da1b0c50b20>, <ast.Constant object at 0x7da1b0c51360>, <ast.Constant object at 0x7da1b0c510f0>, <ast.Constant object at 0x7da1b0c51d80>, <ast.Constant object at 0x7da1b0c51090>, <ast.Call object at 0x7da1b0c50d30>]]]] call[name[packet].extend, parameter[call[name[map], parameter[name[ord], name[url]]]]] call[name[packet].append, parameter[constant[0]]] call[name[sockfd].send, parameter[name[packet]]] call[name[sockfd].close, parameter[]] call[name[server_socket].close, parameter[]]
keyword[def] identifier[open_webpage] ( identifier[self] , identifier[url] ): literal[string] identifier[params] =( literal[string] literal[string] ). identifier[format] ( identifier[resource_id] = literal[int] ) identifier[res] = identifier[self] . identifier[soap_request] ( identifier[URL_CONTROL_NRC] , identifier[URN_REMOTE_CONTROL] , literal[string] , identifier[params] , identifier[body_elem] = literal[string] ) identifier[root] = identifier[ET] . identifier[fromstring] ( identifier[res] ) identifier[el_sessionId] = identifier[root] . identifier[find] ( literal[string] ) identifier[server_socket] = identifier[socket] . identifier[socket] ( identifier[socket] . identifier[AF_INET] , identifier[socket] . identifier[SOCK_STREAM] ) identifier[server_socket] . identifier[setsockopt] ( identifier[socket] . identifier[SOL_SOCKET] , identifier[socket] . identifier[SO_REUSEADDR] , literal[int] ) identifier[localip] = identifier[self] . identifier[_get_local_ip] () identifier[localport] = identifier[random] . identifier[randint] ( literal[int] , literal[int] ) identifier[server_socket] . identifier[bind] (( identifier[localip] , identifier[localport] )) identifier[server_socket] . identifier[listen] ( literal[int] ) identifier[_LOGGER] . identifier[debug] ( literal[string] . identifier[format] ( identifier[localip] , identifier[localport] )) identifier[params] =( literal[string] literal[string] literal[string] literal[string] ). identifier[format] ( identifier[sessionId] = identifier[el_sessionId] . identifier[text] , identifier[localip] = identifier[localip] , identifier[localport] = identifier[localport] ) identifier[res] = identifier[self] . identifier[soap_request] ( identifier[URL_CONTROL_NRC] , identifier[URN_REMOTE_CONTROL] , literal[string] , identifier[params] , identifier[body_elem] = literal[string] ) identifier[sockfd] , identifier[addr] = identifier[server_socket] . identifier[accept] () identifier[_LOGGER] . identifier[debug] ( literal[string] % identifier[addr] ) identifier[packet] = identifier[bytearray] ([ literal[int] , literal[int] , literal[int] , literal[int] , literal[int] , literal[int] , literal[int] , identifier[len] ( identifier[url] )]) identifier[packet] . identifier[extend] ( identifier[map] ( identifier[ord] , identifier[url] )) identifier[packet] . identifier[append] ( literal[int] ) identifier[sockfd] . identifier[send] ( identifier[packet] ) identifier[sockfd] . identifier[close] () identifier[server_socket] . identifier[close] ()
def open_webpage(self, url): """Launch Web Browser and open url""" params = '<X_AppType>vc_app</X_AppType><X_LaunchKeyword>resource_id={resource_id}</X_LaunchKeyword>'.format(resource_id=1063) res = self.soap_request(URL_CONTROL_NRC, URN_REMOTE_CONTROL, 'X_LaunchApp', params, body_elem='s') root = ET.fromstring(res) el_sessionId = root.find('.//X_SessionId') #setup a server socket where URL will be served server_socket = socket.socket(socket.AF_INET, socket.SOCK_STREAM) server_socket.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1) localip = self._get_local_ip() localport = random.randint(1025, 65535) server_socket.bind((localip, localport)) server_socket.listen(1) _LOGGER.debug('Listening on {}:{}'.format(localip, localport)) params = '<X_AppType>vc_app</X_AppType><X_SessionId>{sessionId}</X_SessionId><X_ConnectKeyword>panasonic-viera 0.2</X_ConnectKeyword><X_ConnectAddr>{localip}:{localport}</X_ConnectAddr>'.format(sessionId=el_sessionId.text, localip=localip, localport=localport) res = self.soap_request(URL_CONTROL_NRC, URN_REMOTE_CONTROL, 'X_ConnectApp', params, body_elem='s') (sockfd, addr) = server_socket.accept() _LOGGER.debug('Client (%s, %s) connected' % addr) packet = bytearray([244, 1, 1, 0, 0, 0, 0, len(url)]) packet.extend(map(ord, url)) packet.append(0) sockfd.send(packet) sockfd.close() server_socket.close()
def save(self, form, name, composite_form, commit): """ This method is called by :meth:`django_superform.forms.SuperModelForm.save` in order to save the modelform that this field takes care of and calls on the nested form's ``save()`` method. But only if :meth:`~django_superform.fields.ModelFormField.shall_save` returns ``True``. """ if self.shall_save(form, name, composite_form): return composite_form.save(commit=commit) return None
def function[save, parameter[self, form, name, composite_form, commit]]: constant[ This method is called by :meth:`django_superform.forms.SuperModelForm.save` in order to save the modelform that this field takes care of and calls on the nested form's ``save()`` method. But only if :meth:`~django_superform.fields.ModelFormField.shall_save` returns ``True``. ] if call[name[self].shall_save, parameter[name[form], name[name], name[composite_form]]] begin[:] return[call[name[composite_form].save, parameter[]]] return[constant[None]]
keyword[def] identifier[save] ( identifier[self] , identifier[form] , identifier[name] , identifier[composite_form] , identifier[commit] ): literal[string] keyword[if] identifier[self] . identifier[shall_save] ( identifier[form] , identifier[name] , identifier[composite_form] ): keyword[return] identifier[composite_form] . identifier[save] ( identifier[commit] = identifier[commit] ) keyword[return] keyword[None]
def save(self, form, name, composite_form, commit): """ This method is called by :meth:`django_superform.forms.SuperModelForm.save` in order to save the modelform that this field takes care of and calls on the nested form's ``save()`` method. But only if :meth:`~django_superform.fields.ModelFormField.shall_save` returns ``True``. """ if self.shall_save(form, name, composite_form): return composite_form.save(commit=commit) # depends on [control=['if'], data=[]] return None
def _StartProfiling(self, configuration): """Starts profiling. Args: configuration (ProfilingConfiguration): profiling configuration. """ if not configuration: return if configuration.HaveProfileMemoryGuppy(): self._guppy_memory_profiler = profilers.GuppyMemoryProfiler( self._name, configuration) self._guppy_memory_profiler.Start() if configuration.HaveProfileMemory(): self._memory_profiler = profilers.MemoryProfiler( self._name, configuration) self._memory_profiler.Start() if configuration.HaveProfileProcessing(): identifier = '{0:s}-processing'.format(self._name) self._processing_profiler = profilers.ProcessingProfiler( identifier, configuration) self._processing_profiler.Start() if configuration.HaveProfileSerializers(): identifier = '{0:s}-serializers'.format(self._name) self._serializers_profiler = profilers.SerializersProfiler( identifier, configuration) self._serializers_profiler.Start() if configuration.HaveProfileStorage(): self._storage_profiler = profilers.StorageProfiler( self._name, configuration) self._storage_profiler.Start() if configuration.HaveProfileTasks(): self._tasks_profiler = profilers.TasksProfiler(self._name, configuration) self._tasks_profiler.Start()
def function[_StartProfiling, parameter[self, configuration]]: constant[Starts profiling. Args: configuration (ProfilingConfiguration): profiling configuration. ] if <ast.UnaryOp object at 0x7da18bc70580> begin[:] return[None] if call[name[configuration].HaveProfileMemoryGuppy, parameter[]] begin[:] name[self]._guppy_memory_profiler assign[=] call[name[profilers].GuppyMemoryProfiler, parameter[name[self]._name, name[configuration]]] call[name[self]._guppy_memory_profiler.Start, parameter[]] if call[name[configuration].HaveProfileMemory, parameter[]] begin[:] name[self]._memory_profiler assign[=] call[name[profilers].MemoryProfiler, parameter[name[self]._name, name[configuration]]] call[name[self]._memory_profiler.Start, parameter[]] if call[name[configuration].HaveProfileProcessing, parameter[]] begin[:] variable[identifier] assign[=] call[constant[{0:s}-processing].format, parameter[name[self]._name]] name[self]._processing_profiler assign[=] call[name[profilers].ProcessingProfiler, parameter[name[identifier], name[configuration]]] call[name[self]._processing_profiler.Start, parameter[]] if call[name[configuration].HaveProfileSerializers, parameter[]] begin[:] variable[identifier] assign[=] call[constant[{0:s}-serializers].format, parameter[name[self]._name]] name[self]._serializers_profiler assign[=] call[name[profilers].SerializersProfiler, parameter[name[identifier], name[configuration]]] call[name[self]._serializers_profiler.Start, parameter[]] if call[name[configuration].HaveProfileStorage, parameter[]] begin[:] name[self]._storage_profiler assign[=] call[name[profilers].StorageProfiler, parameter[name[self]._name, name[configuration]]] call[name[self]._storage_profiler.Start, parameter[]] if call[name[configuration].HaveProfileTasks, parameter[]] begin[:] name[self]._tasks_profiler assign[=] call[name[profilers].TasksProfiler, parameter[name[self]._name, name[configuration]]] call[name[self]._tasks_profiler.Start, parameter[]]
keyword[def] identifier[_StartProfiling] ( identifier[self] , identifier[configuration] ): literal[string] keyword[if] keyword[not] identifier[configuration] : keyword[return] keyword[if] identifier[configuration] . identifier[HaveProfileMemoryGuppy] (): identifier[self] . identifier[_guppy_memory_profiler] = identifier[profilers] . identifier[GuppyMemoryProfiler] ( identifier[self] . identifier[_name] , identifier[configuration] ) identifier[self] . identifier[_guppy_memory_profiler] . identifier[Start] () keyword[if] identifier[configuration] . identifier[HaveProfileMemory] (): identifier[self] . identifier[_memory_profiler] = identifier[profilers] . identifier[MemoryProfiler] ( identifier[self] . identifier[_name] , identifier[configuration] ) identifier[self] . identifier[_memory_profiler] . identifier[Start] () keyword[if] identifier[configuration] . identifier[HaveProfileProcessing] (): identifier[identifier] = literal[string] . identifier[format] ( identifier[self] . identifier[_name] ) identifier[self] . identifier[_processing_profiler] = identifier[profilers] . identifier[ProcessingProfiler] ( identifier[identifier] , identifier[configuration] ) identifier[self] . identifier[_processing_profiler] . identifier[Start] () keyword[if] identifier[configuration] . identifier[HaveProfileSerializers] (): identifier[identifier] = literal[string] . identifier[format] ( identifier[self] . identifier[_name] ) identifier[self] . identifier[_serializers_profiler] = identifier[profilers] . identifier[SerializersProfiler] ( identifier[identifier] , identifier[configuration] ) identifier[self] . identifier[_serializers_profiler] . identifier[Start] () keyword[if] identifier[configuration] . identifier[HaveProfileStorage] (): identifier[self] . identifier[_storage_profiler] = identifier[profilers] . identifier[StorageProfiler] ( identifier[self] . identifier[_name] , identifier[configuration] ) identifier[self] . identifier[_storage_profiler] . identifier[Start] () keyword[if] identifier[configuration] . identifier[HaveProfileTasks] (): identifier[self] . identifier[_tasks_profiler] = identifier[profilers] . identifier[TasksProfiler] ( identifier[self] . identifier[_name] , identifier[configuration] ) identifier[self] . identifier[_tasks_profiler] . identifier[Start] ()
def _StartProfiling(self, configuration): """Starts profiling. Args: configuration (ProfilingConfiguration): profiling configuration. """ if not configuration: return # depends on [control=['if'], data=[]] if configuration.HaveProfileMemoryGuppy(): self._guppy_memory_profiler = profilers.GuppyMemoryProfiler(self._name, configuration) self._guppy_memory_profiler.Start() # depends on [control=['if'], data=[]] if configuration.HaveProfileMemory(): self._memory_profiler = profilers.MemoryProfiler(self._name, configuration) self._memory_profiler.Start() # depends on [control=['if'], data=[]] if configuration.HaveProfileProcessing(): identifier = '{0:s}-processing'.format(self._name) self._processing_profiler = profilers.ProcessingProfiler(identifier, configuration) self._processing_profiler.Start() # depends on [control=['if'], data=[]] if configuration.HaveProfileSerializers(): identifier = '{0:s}-serializers'.format(self._name) self._serializers_profiler = profilers.SerializersProfiler(identifier, configuration) self._serializers_profiler.Start() # depends on [control=['if'], data=[]] if configuration.HaveProfileStorage(): self._storage_profiler = profilers.StorageProfiler(self._name, configuration) self._storage_profiler.Start() # depends on [control=['if'], data=[]] if configuration.HaveProfileTasks(): self._tasks_profiler = profilers.TasksProfiler(self._name, configuration) self._tasks_profiler.Start() # depends on [control=['if'], data=[]]
def exec(self, *command_tokens, command_context=None, **command_env): """ Execute command :param command_tokens: command tokens to execute :param command_context: command context :param command_env: command environment :return: WCommandResultProto """ if self.adapter().match(command_context, **command_env) is False: cmd = WCommandProto.join_tokens(*command_tokens) spec = self.adapter().specification() if spec is not None: spec = [x.context_name() for x in spec] spec.reverse() spec = ','.join(spec) raise RuntimeError('Command mismatch: %s (context: %s)' % (cmd, spec)) command_tokens = self.adapter().adapt(*command_tokens, command_context=command_context, **command_env) return self.original_command().exec(*command_tokens, command_context=command_context, **command_env)
def function[exec, parameter[self]]: constant[ Execute command :param command_tokens: command tokens to execute :param command_context: command context :param command_env: command environment :return: WCommandResultProto ] if compare[call[call[name[self].adapter, parameter[]].match, parameter[name[command_context]]] is constant[False]] begin[:] variable[cmd] assign[=] call[name[WCommandProto].join_tokens, parameter[<ast.Starred object at 0x7da2044c0fa0>]] variable[spec] assign[=] call[call[name[self].adapter, parameter[]].specification, parameter[]] if compare[name[spec] is_not constant[None]] begin[:] variable[spec] assign[=] <ast.ListComp object at 0x7da2044c1de0> call[name[spec].reverse, parameter[]] variable[spec] assign[=] call[constant[,].join, parameter[name[spec]]] <ast.Raise object at 0x7da2044c2680> variable[command_tokens] assign[=] call[call[name[self].adapter, parameter[]].adapt, parameter[<ast.Starred object at 0x7da2044c2650>]] return[call[call[name[self].original_command, parameter[]].exec, parameter[<ast.Starred object at 0x7da2044c1930>]]]
keyword[def] identifier[exec] ( identifier[self] ,* identifier[command_tokens] , identifier[command_context] = keyword[None] ,** identifier[command_env] ): literal[string] keyword[if] identifier[self] . identifier[adapter] (). identifier[match] ( identifier[command_context] ,** identifier[command_env] ) keyword[is] keyword[False] : identifier[cmd] = identifier[WCommandProto] . identifier[join_tokens] (* identifier[command_tokens] ) identifier[spec] = identifier[self] . identifier[adapter] (). identifier[specification] () keyword[if] identifier[spec] keyword[is] keyword[not] keyword[None] : identifier[spec] =[ identifier[x] . identifier[context_name] () keyword[for] identifier[x] keyword[in] identifier[spec] ] identifier[spec] . identifier[reverse] () identifier[spec] = literal[string] . identifier[join] ( identifier[spec] ) keyword[raise] identifier[RuntimeError] ( literal[string] %( identifier[cmd] , identifier[spec] )) identifier[command_tokens] = identifier[self] . identifier[adapter] (). identifier[adapt] (* identifier[command_tokens] , identifier[command_context] = identifier[command_context] ,** identifier[command_env] ) keyword[return] identifier[self] . identifier[original_command] (). identifier[exec] (* identifier[command_tokens] , identifier[command_context] = identifier[command_context] ,** identifier[command_env] )
def exec(self, *command_tokens, command_context=None, **command_env): """ Execute command :param command_tokens: command tokens to execute :param command_context: command context :param command_env: command environment :return: WCommandResultProto """ if self.adapter().match(command_context, **command_env) is False: cmd = WCommandProto.join_tokens(*command_tokens) spec = self.adapter().specification() if spec is not None: spec = [x.context_name() for x in spec] spec.reverse() spec = ','.join(spec) # depends on [control=['if'], data=['spec']] raise RuntimeError('Command mismatch: %s (context: %s)' % (cmd, spec)) # depends on [control=['if'], data=[]] command_tokens = self.adapter().adapt(*command_tokens, command_context=command_context, **command_env) return self.original_command().exec(*command_tokens, command_context=command_context, **command_env)
def get_user(self, id, **kwargs): # noqa: E501 """Retrieves a user by identifier (email addr) # noqa: E501 # noqa: E501 This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async_req=True >>> thread = api.get_user(id, async_req=True) >>> result = thread.get() :param async_req bool :param str id: (required) :return: UserModel If the method is called asynchronously, returns the request thread. """ kwargs['_return_http_data_only'] = True if kwargs.get('async_req'): return self.get_user_with_http_info(id, **kwargs) # noqa: E501 else: (data) = self.get_user_with_http_info(id, **kwargs) # noqa: E501 return data
def function[get_user, parameter[self, id]]: constant[Retrieves a user by identifier (email addr) # noqa: E501 # noqa: E501 This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async_req=True >>> thread = api.get_user(id, async_req=True) >>> result = thread.get() :param async_req bool :param str id: (required) :return: UserModel If the method is called asynchronously, returns the request thread. ] call[name[kwargs]][constant[_return_http_data_only]] assign[=] constant[True] if call[name[kwargs].get, parameter[constant[async_req]]] begin[:] return[call[name[self].get_user_with_http_info, parameter[name[id]]]]
keyword[def] identifier[get_user] ( identifier[self] , identifier[id] ,** identifier[kwargs] ): literal[string] identifier[kwargs] [ literal[string] ]= keyword[True] keyword[if] identifier[kwargs] . identifier[get] ( literal[string] ): keyword[return] identifier[self] . identifier[get_user_with_http_info] ( identifier[id] ,** identifier[kwargs] ) keyword[else] : ( identifier[data] )= identifier[self] . identifier[get_user_with_http_info] ( identifier[id] ,** identifier[kwargs] ) keyword[return] identifier[data]
def get_user(self, id, **kwargs): # noqa: E501 'Retrieves a user by identifier (email addr) # noqa: E501\n\n # noqa: E501\n This method makes a synchronous HTTP request by default. To make an\n asynchronous HTTP request, please pass async_req=True\n >>> thread = api.get_user(id, async_req=True)\n >>> result = thread.get()\n\n :param async_req bool\n :param str id: (required)\n :return: UserModel\n If the method is called asynchronously,\n returns the request thread.\n ' kwargs['_return_http_data_only'] = True if kwargs.get('async_req'): return self.get_user_with_http_info(id, **kwargs) # noqa: E501 # depends on [control=['if'], data=[]] else: data = self.get_user_with_http_info(id, **kwargs) # noqa: E501 return data
def object_upload(self, bucket, key, content, content_type): """Writes text content to the object. Args: bucket: the name of the bucket containing the object. key: the key of the object to be written. content: the text content to be written. content_type: the type of text content. Raises: Exception if the object could not be written to. """ args = {'uploadType': 'media', 'name': key} headers = {'Content-Type': content_type} url = Api._UPLOAD_ENDPOINT + (Api._OBJECT_PATH % (bucket, '')) return google.datalab.utils.Http.request(url, args=args, data=content, headers=headers, credentials=self._credentials, raw_response=True)
def function[object_upload, parameter[self, bucket, key, content, content_type]]: constant[Writes text content to the object. Args: bucket: the name of the bucket containing the object. key: the key of the object to be written. content: the text content to be written. content_type: the type of text content. Raises: Exception if the object could not be written to. ] variable[args] assign[=] dictionary[[<ast.Constant object at 0x7da18c4cd900>, <ast.Constant object at 0x7da18c4cfb80>], [<ast.Constant object at 0x7da18c4ce4d0>, <ast.Name object at 0x7da18c4cf550>]] variable[headers] assign[=] dictionary[[<ast.Constant object at 0x7da18c4ccf40>], [<ast.Name object at 0x7da18c4cf6d0>]] variable[url] assign[=] binary_operation[name[Api]._UPLOAD_ENDPOINT + binary_operation[name[Api]._OBJECT_PATH <ast.Mod object at 0x7da2590d6920> tuple[[<ast.Name object at 0x7da18c4cc4c0>, <ast.Constant object at 0x7da18c4cdcf0>]]]] return[call[name[google].datalab.utils.Http.request, parameter[name[url]]]]
keyword[def] identifier[object_upload] ( identifier[self] , identifier[bucket] , identifier[key] , identifier[content] , identifier[content_type] ): literal[string] identifier[args] ={ literal[string] : literal[string] , literal[string] : identifier[key] } identifier[headers] ={ literal[string] : identifier[content_type] } identifier[url] = identifier[Api] . identifier[_UPLOAD_ENDPOINT] +( identifier[Api] . identifier[_OBJECT_PATH] %( identifier[bucket] , literal[string] )) keyword[return] identifier[google] . identifier[datalab] . identifier[utils] . identifier[Http] . identifier[request] ( identifier[url] , identifier[args] = identifier[args] , identifier[data] = identifier[content] , identifier[headers] = identifier[headers] , identifier[credentials] = identifier[self] . identifier[_credentials] , identifier[raw_response] = keyword[True] )
def object_upload(self, bucket, key, content, content_type): """Writes text content to the object. Args: bucket: the name of the bucket containing the object. key: the key of the object to be written. content: the text content to be written. content_type: the type of text content. Raises: Exception if the object could not be written to. """ args = {'uploadType': 'media', 'name': key} headers = {'Content-Type': content_type} url = Api._UPLOAD_ENDPOINT + Api._OBJECT_PATH % (bucket, '') return google.datalab.utils.Http.request(url, args=args, data=content, headers=headers, credentials=self._credentials, raw_response=True)
def scaleToBoxParam(quad, shape): ''' so, you have a [quad] ((x0,y0),,,) inside a box [shape](width,height) this function gives you center [x,y], and scale factor [x,y] you would need to apply to [quad] in order to scale it to the same shape as the box !quad corners needs to be sorted like in example ''' #get edge middle points x0 = 0.5*(quad[0][0]+quad[1][0]) x1 = 0.5*(quad[2][0]+quad[3][0]) y0 = 0.5*(quad[0][1]+quad[3][1]) y1 = 0.5*(quad[1][1]+quad[2][1]) cx = (x0*shape[0]) / (x0-x1+shape[0]) cy = (y0*shape[1]) / (y0-y1+shape[1]) fx = abs(cx/(x0-cx)) fy = abs(cy/(y0-cy)) return (fx,fy), (cx,cy)
def function[scaleToBoxParam, parameter[quad, shape]]: constant[ so, you have a [quad] ((x0,y0),,,) inside a box [shape](width,height) this function gives you center [x,y], and scale factor [x,y] you would need to apply to [quad] in order to scale it to the same shape as the box !quad corners needs to be sorted like in example ] variable[x0] assign[=] binary_operation[constant[0.5] * binary_operation[call[call[name[quad]][constant[0]]][constant[0]] + call[call[name[quad]][constant[1]]][constant[0]]]] variable[x1] assign[=] binary_operation[constant[0.5] * binary_operation[call[call[name[quad]][constant[2]]][constant[0]] + call[call[name[quad]][constant[3]]][constant[0]]]] variable[y0] assign[=] binary_operation[constant[0.5] * binary_operation[call[call[name[quad]][constant[0]]][constant[1]] + call[call[name[quad]][constant[3]]][constant[1]]]] variable[y1] assign[=] binary_operation[constant[0.5] * binary_operation[call[call[name[quad]][constant[1]]][constant[1]] + call[call[name[quad]][constant[2]]][constant[1]]]] variable[cx] assign[=] binary_operation[binary_operation[name[x0] * call[name[shape]][constant[0]]] / binary_operation[binary_operation[name[x0] - name[x1]] + call[name[shape]][constant[0]]]] variable[cy] assign[=] binary_operation[binary_operation[name[y0] * call[name[shape]][constant[1]]] / binary_operation[binary_operation[name[y0] - name[y1]] + call[name[shape]][constant[1]]]] variable[fx] assign[=] call[name[abs], parameter[binary_operation[name[cx] / binary_operation[name[x0] - name[cx]]]]] variable[fy] assign[=] call[name[abs], parameter[binary_operation[name[cy] / binary_operation[name[y0] - name[cy]]]]] return[tuple[[<ast.Tuple object at 0x7da18f00ec20>, <ast.Tuple object at 0x7da18f00cfa0>]]]
keyword[def] identifier[scaleToBoxParam] ( identifier[quad] , identifier[shape] ): literal[string] identifier[x0] = literal[int] *( identifier[quad] [ literal[int] ][ literal[int] ]+ identifier[quad] [ literal[int] ][ literal[int] ]) identifier[x1] = literal[int] *( identifier[quad] [ literal[int] ][ literal[int] ]+ identifier[quad] [ literal[int] ][ literal[int] ]) identifier[y0] = literal[int] *( identifier[quad] [ literal[int] ][ literal[int] ]+ identifier[quad] [ literal[int] ][ literal[int] ]) identifier[y1] = literal[int] *( identifier[quad] [ literal[int] ][ literal[int] ]+ identifier[quad] [ literal[int] ][ literal[int] ]) identifier[cx] =( identifier[x0] * identifier[shape] [ literal[int] ])/( identifier[x0] - identifier[x1] + identifier[shape] [ literal[int] ]) identifier[cy] =( identifier[y0] * identifier[shape] [ literal[int] ])/( identifier[y0] - identifier[y1] + identifier[shape] [ literal[int] ]) identifier[fx] = identifier[abs] ( identifier[cx] /( identifier[x0] - identifier[cx] )) identifier[fy] = identifier[abs] ( identifier[cy] /( identifier[y0] - identifier[cy] )) keyword[return] ( identifier[fx] , identifier[fy] ),( identifier[cx] , identifier[cy] )
def scaleToBoxParam(quad, shape): """ so, you have a [quad] ((x0,y0),,,) inside a box [shape](width,height) this function gives you center [x,y], and scale factor [x,y] you would need to apply to [quad] in order to scale it to the same shape as the box !quad corners needs to be sorted like in example """ #get edge middle points x0 = 0.5 * (quad[0][0] + quad[1][0]) x1 = 0.5 * (quad[2][0] + quad[3][0]) y0 = 0.5 * (quad[0][1] + quad[3][1]) y1 = 0.5 * (quad[1][1] + quad[2][1]) cx = x0 * shape[0] / (x0 - x1 + shape[0]) cy = y0 * shape[1] / (y0 - y1 + shape[1]) fx = abs(cx / (x0 - cx)) fy = abs(cy / (y0 - cy)) return ((fx, fy), (cx, cy))
def linkify_one_command_with_commands(self, commands, prop): """ Link a command :param commands: object commands :type commands: object :param prop: property name :type prop: str :return: None """ if not hasattr(self, prop): return command = getattr(self, prop).strip() if not command: setattr(self, prop, None) return data = {"commands": commands, "call": command} if hasattr(self, 'poller_tag'): data.update({"poller_tag": self.poller_tag}) if hasattr(self, 'reactionner_tag'): data.update({"reactionner_tag": self.reactionner_tag}) setattr(self, prop, CommandCall(data))
def function[linkify_one_command_with_commands, parameter[self, commands, prop]]: constant[ Link a command :param commands: object commands :type commands: object :param prop: property name :type prop: str :return: None ] if <ast.UnaryOp object at 0x7da18fe91180> begin[:] return[None] variable[command] assign[=] call[call[name[getattr], parameter[name[self], name[prop]]].strip, parameter[]] if <ast.UnaryOp object at 0x7da18fe93b80> begin[:] call[name[setattr], parameter[name[self], name[prop], constant[None]]] return[None] variable[data] assign[=] dictionary[[<ast.Constant object at 0x7da18fe92920>, <ast.Constant object at 0x7da18fe92110>], [<ast.Name object at 0x7da18fe90f40>, <ast.Name object at 0x7da18fe907c0>]] if call[name[hasattr], parameter[name[self], constant[poller_tag]]] begin[:] call[name[data].update, parameter[dictionary[[<ast.Constant object at 0x7da18fe931f0>], [<ast.Attribute object at 0x7da18fe920e0>]]]] if call[name[hasattr], parameter[name[self], constant[reactionner_tag]]] begin[:] call[name[data].update, parameter[dictionary[[<ast.Constant object at 0x7da1b26ac580>], [<ast.Attribute object at 0x7da1b26ae200>]]]] call[name[setattr], parameter[name[self], name[prop], call[name[CommandCall], parameter[name[data]]]]]
keyword[def] identifier[linkify_one_command_with_commands] ( identifier[self] , identifier[commands] , identifier[prop] ): literal[string] keyword[if] keyword[not] identifier[hasattr] ( identifier[self] , identifier[prop] ): keyword[return] identifier[command] = identifier[getattr] ( identifier[self] , identifier[prop] ). identifier[strip] () keyword[if] keyword[not] identifier[command] : identifier[setattr] ( identifier[self] , identifier[prop] , keyword[None] ) keyword[return] identifier[data] ={ literal[string] : identifier[commands] , literal[string] : identifier[command] } keyword[if] identifier[hasattr] ( identifier[self] , literal[string] ): identifier[data] . identifier[update] ({ literal[string] : identifier[self] . identifier[poller_tag] }) keyword[if] identifier[hasattr] ( identifier[self] , literal[string] ): identifier[data] . identifier[update] ({ literal[string] : identifier[self] . identifier[reactionner_tag] }) identifier[setattr] ( identifier[self] , identifier[prop] , identifier[CommandCall] ( identifier[data] ))
def linkify_one_command_with_commands(self, commands, prop): """ Link a command :param commands: object commands :type commands: object :param prop: property name :type prop: str :return: None """ if not hasattr(self, prop): return # depends on [control=['if'], data=[]] command = getattr(self, prop).strip() if not command: setattr(self, prop, None) return # depends on [control=['if'], data=[]] data = {'commands': commands, 'call': command} if hasattr(self, 'poller_tag'): data.update({'poller_tag': self.poller_tag}) # depends on [control=['if'], data=[]] if hasattr(self, 'reactionner_tag'): data.update({'reactionner_tag': self.reactionner_tag}) # depends on [control=['if'], data=[]] setattr(self, prop, CommandCall(data))
def reduce_by_flightmodes(self, flightmode_selections): '''reduce data using flightmode selections''' if len(flightmode_selections) == 0: return all_false = True for s in flightmode_selections: if s: all_false = False if all_false: # treat all false as all modes wanted''' return new_msgs = [] idx = 0 for m in self._msgs: while idx < len(self._flightmodes) and m._timestamp >= self._flightmodes[idx][2]: idx += 1 if idx < len(flightmode_selections) and flightmode_selections[idx]: new_msgs.append(m) self._msgs = new_msgs self._count = len(new_msgs) self.rewind()
def function[reduce_by_flightmodes, parameter[self, flightmode_selections]]: constant[reduce data using flightmode selections] if compare[call[name[len], parameter[name[flightmode_selections]]] equal[==] constant[0]] begin[:] return[None] variable[all_false] assign[=] constant[True] for taget[name[s]] in starred[name[flightmode_selections]] begin[:] if name[s] begin[:] variable[all_false] assign[=] constant[False] if name[all_false] begin[:] return[None] variable[new_msgs] assign[=] list[[]] variable[idx] assign[=] constant[0] for taget[name[m]] in starred[name[self]._msgs] begin[:] while <ast.BoolOp object at 0x7da20c76fee0> begin[:] <ast.AugAssign object at 0x7da20c76c0d0> if <ast.BoolOp object at 0x7da1b2347be0> begin[:] call[name[new_msgs].append, parameter[name[m]]] name[self]._msgs assign[=] name[new_msgs] name[self]._count assign[=] call[name[len], parameter[name[new_msgs]]] call[name[self].rewind, parameter[]]
keyword[def] identifier[reduce_by_flightmodes] ( identifier[self] , identifier[flightmode_selections] ): literal[string] keyword[if] identifier[len] ( identifier[flightmode_selections] )== literal[int] : keyword[return] identifier[all_false] = keyword[True] keyword[for] identifier[s] keyword[in] identifier[flightmode_selections] : keyword[if] identifier[s] : identifier[all_false] = keyword[False] keyword[if] identifier[all_false] : keyword[return] identifier[new_msgs] =[] identifier[idx] = literal[int] keyword[for] identifier[m] keyword[in] identifier[self] . identifier[_msgs] : keyword[while] identifier[idx] < identifier[len] ( identifier[self] . identifier[_flightmodes] ) keyword[and] identifier[m] . identifier[_timestamp] >= identifier[self] . identifier[_flightmodes] [ identifier[idx] ][ literal[int] ]: identifier[idx] += literal[int] keyword[if] identifier[idx] < identifier[len] ( identifier[flightmode_selections] ) keyword[and] identifier[flightmode_selections] [ identifier[idx] ]: identifier[new_msgs] . identifier[append] ( identifier[m] ) identifier[self] . identifier[_msgs] = identifier[new_msgs] identifier[self] . identifier[_count] = identifier[len] ( identifier[new_msgs] ) identifier[self] . identifier[rewind] ()
def reduce_by_flightmodes(self, flightmode_selections): """reduce data using flightmode selections""" if len(flightmode_selections) == 0: return # depends on [control=['if'], data=[]] all_false = True for s in flightmode_selections: if s: all_false = False # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['s']] if all_false: # treat all false as all modes wanted''' return # depends on [control=['if'], data=[]] new_msgs = [] idx = 0 for m in self._msgs: while idx < len(self._flightmodes) and m._timestamp >= self._flightmodes[idx][2]: idx += 1 # depends on [control=['while'], data=[]] if idx < len(flightmode_selections) and flightmode_selections[idx]: new_msgs.append(m) # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['m']] self._msgs = new_msgs self._count = len(new_msgs) self.rewind()