code
stringlengths
75
104k
code_sememe
stringlengths
47
309k
token_type
stringlengths
215
214k
code_dependency
stringlengths
75
155k
def dbname(self, value): """ Set the connection's database name property. Args: value: New name of the database. String. Returns: Nothing. """ self._dbname = value self._connectionXML.set('dbname', value)
def function[dbname, parameter[self, value]]: constant[ Set the connection's database name property. Args: value: New name of the database. String. Returns: Nothing. ] name[self]._dbname assign[=] name[value] call[name[self]._connectionXML.set, parameter[constant[dbname], name[value]]]
keyword[def] identifier[dbname] ( identifier[self] , identifier[value] ): literal[string] identifier[self] . identifier[_dbname] = identifier[value] identifier[self] . identifier[_connectionXML] . identifier[set] ( literal[string] , identifier[value] )
def dbname(self, value): """ Set the connection's database name property. Args: value: New name of the database. String. Returns: Nothing. """ self._dbname = value self._connectionXML.set('dbname', value)
def get(cls, group_id, db_session=None): """ Fetch row using primary key - will use existing object in session if already present :param group_id: :param db_session: :return: """ db_session = get_db_session(db_session) return db_session.query(cls.model).get(group_id)
def function[get, parameter[cls, group_id, db_session]]: constant[ Fetch row using primary key - will use existing object in session if already present :param group_id: :param db_session: :return: ] variable[db_session] assign[=] call[name[get_db_session], parameter[name[db_session]]] return[call[call[name[db_session].query, parameter[name[cls].model]].get, parameter[name[group_id]]]]
keyword[def] identifier[get] ( identifier[cls] , identifier[group_id] , identifier[db_session] = keyword[None] ): literal[string] identifier[db_session] = identifier[get_db_session] ( identifier[db_session] ) keyword[return] identifier[db_session] . identifier[query] ( identifier[cls] . identifier[model] ). identifier[get] ( identifier[group_id] )
def get(cls, group_id, db_session=None): """ Fetch row using primary key - will use existing object in session if already present :param group_id: :param db_session: :return: """ db_session = get_db_session(db_session) return db_session.query(cls.model).get(group_id)
def get_vulnerability_functions_04(fname): """ Parse the vulnerability model in NRML 0.4 format. :param fname: path of the vulnerability file :returns: a dictionary imt, taxonomy -> vulnerability function + vset """ categories = dict(assetCategory=set(), lossCategory=set(), vulnerabilitySetID=set()) imts = set() taxonomies = set() vf_dict = {} # imt, taxonomy -> vulnerability function for vset in nrml.read(fname).vulnerabilityModel: categories['assetCategory'].add(vset['assetCategory']) categories['lossCategory'].add(vset['lossCategory']) categories['vulnerabilitySetID'].add(vset['vulnerabilitySetID']) IML = vset.IML imt_str = IML['IMT'] imls = ~IML imts.add(imt_str) for vfun in vset.getnodes('discreteVulnerability'): taxonomy = vfun['vulnerabilityFunctionID'] if taxonomy in taxonomies: raise InvalidFile( 'Duplicated vulnerabilityFunctionID: %s: %s, line %d' % (taxonomy, fname, vfun.lineno)) taxonomies.add(taxonomy) with context(fname, vfun): loss_ratios = ~vfun.lossRatio coefficients = ~vfun.coefficientsVariation if len(loss_ratios) != len(imls): raise InvalidFile( 'There are %d loss ratios, but %d imls: %s, line %d' % (len(loss_ratios), len(imls), fname, vfun.lossRatio.lineno)) if len(coefficients) != len(imls): raise InvalidFile( 'There are %d coefficients, but %d imls: %s, line %d' % (len(coefficients), len(imls), fname, vfun.coefficientsVariation.lineno)) with context(fname, vfun): vf_dict[imt_str, taxonomy] = scientific.VulnerabilityFunction( taxonomy, imt_str, imls, loss_ratios, coefficients, vfun['probabilisticDistribution']) categories['id'] = '_'.join(sorted(categories['vulnerabilitySetID'])) del categories['vulnerabilitySetID'] return vf_dict, categories
def function[get_vulnerability_functions_04, parameter[fname]]: constant[ Parse the vulnerability model in NRML 0.4 format. :param fname: path of the vulnerability file :returns: a dictionary imt, taxonomy -> vulnerability function + vset ] variable[categories] assign[=] call[name[dict], parameter[]] variable[imts] assign[=] call[name[set], parameter[]] variable[taxonomies] assign[=] call[name[set], parameter[]] variable[vf_dict] assign[=] dictionary[[], []] for taget[name[vset]] in starred[call[name[nrml].read, parameter[name[fname]]].vulnerabilityModel] begin[:] call[call[name[categories]][constant[assetCategory]].add, parameter[call[name[vset]][constant[assetCategory]]]] call[call[name[categories]][constant[lossCategory]].add, parameter[call[name[vset]][constant[lossCategory]]]] call[call[name[categories]][constant[vulnerabilitySetID]].add, parameter[call[name[vset]][constant[vulnerabilitySetID]]]] variable[IML] assign[=] name[vset].IML variable[imt_str] assign[=] call[name[IML]][constant[IMT]] variable[imls] assign[=] <ast.UnaryOp object at 0x7da207f039a0> call[name[imts].add, parameter[name[imt_str]]] for taget[name[vfun]] in starred[call[name[vset].getnodes, parameter[constant[discreteVulnerability]]]] begin[:] variable[taxonomy] assign[=] call[name[vfun]][constant[vulnerabilityFunctionID]] if compare[name[taxonomy] in name[taxonomies]] begin[:] <ast.Raise object at 0x7da207f00310> call[name[taxonomies].add, parameter[name[taxonomy]]] with call[name[context], parameter[name[fname], name[vfun]]] begin[:] variable[loss_ratios] assign[=] <ast.UnaryOp object at 0x7da207f02e30> variable[coefficients] assign[=] <ast.UnaryOp object at 0x7da207f005e0> if compare[call[name[len], parameter[name[loss_ratios]]] not_equal[!=] call[name[len], parameter[name[imls]]]] begin[:] <ast.Raise object at 0x7da207f01300> if compare[call[name[len], parameter[name[coefficients]]] not_equal[!=] call[name[len], parameter[name[imls]]]] begin[:] <ast.Raise object at 0x7da207f00640> with call[name[context], parameter[name[fname], name[vfun]]] begin[:] call[name[vf_dict]][tuple[[<ast.Name object at 0x7da207f03a30>, <ast.Name object at 0x7da207f01de0>]]] assign[=] call[name[scientific].VulnerabilityFunction, parameter[name[taxonomy], name[imt_str], name[imls], name[loss_ratios], name[coefficients], call[name[vfun]][constant[probabilisticDistribution]]]] call[name[categories]][constant[id]] assign[=] call[constant[_].join, parameter[call[name[sorted], parameter[call[name[categories]][constant[vulnerabilitySetID]]]]]] <ast.Delete object at 0x7da207f01ba0> return[tuple[[<ast.Name object at 0x7da207f03670>, <ast.Name object at 0x7da207f00610>]]]
keyword[def] identifier[get_vulnerability_functions_04] ( identifier[fname] ): literal[string] identifier[categories] = identifier[dict] ( identifier[assetCategory] = identifier[set] (), identifier[lossCategory] = identifier[set] (), identifier[vulnerabilitySetID] = identifier[set] ()) identifier[imts] = identifier[set] () identifier[taxonomies] = identifier[set] () identifier[vf_dict] ={} keyword[for] identifier[vset] keyword[in] identifier[nrml] . identifier[read] ( identifier[fname] ). identifier[vulnerabilityModel] : identifier[categories] [ literal[string] ]. identifier[add] ( identifier[vset] [ literal[string] ]) identifier[categories] [ literal[string] ]. identifier[add] ( identifier[vset] [ literal[string] ]) identifier[categories] [ literal[string] ]. identifier[add] ( identifier[vset] [ literal[string] ]) identifier[IML] = identifier[vset] . identifier[IML] identifier[imt_str] = identifier[IML] [ literal[string] ] identifier[imls] =~ identifier[IML] identifier[imts] . identifier[add] ( identifier[imt_str] ) keyword[for] identifier[vfun] keyword[in] identifier[vset] . identifier[getnodes] ( literal[string] ): identifier[taxonomy] = identifier[vfun] [ literal[string] ] keyword[if] identifier[taxonomy] keyword[in] identifier[taxonomies] : keyword[raise] identifier[InvalidFile] ( literal[string] % ( identifier[taxonomy] , identifier[fname] , identifier[vfun] . identifier[lineno] )) identifier[taxonomies] . identifier[add] ( identifier[taxonomy] ) keyword[with] identifier[context] ( identifier[fname] , identifier[vfun] ): identifier[loss_ratios] =~ identifier[vfun] . identifier[lossRatio] identifier[coefficients] =~ identifier[vfun] . identifier[coefficientsVariation] keyword[if] identifier[len] ( identifier[loss_ratios] )!= identifier[len] ( identifier[imls] ): keyword[raise] identifier[InvalidFile] ( literal[string] % ( identifier[len] ( identifier[loss_ratios] ), identifier[len] ( identifier[imls] ), identifier[fname] , identifier[vfun] . identifier[lossRatio] . identifier[lineno] )) keyword[if] identifier[len] ( identifier[coefficients] )!= identifier[len] ( identifier[imls] ): keyword[raise] identifier[InvalidFile] ( literal[string] % ( identifier[len] ( identifier[coefficients] ), identifier[len] ( identifier[imls] ), identifier[fname] , identifier[vfun] . identifier[coefficientsVariation] . identifier[lineno] )) keyword[with] identifier[context] ( identifier[fname] , identifier[vfun] ): identifier[vf_dict] [ identifier[imt_str] , identifier[taxonomy] ]= identifier[scientific] . identifier[VulnerabilityFunction] ( identifier[taxonomy] , identifier[imt_str] , identifier[imls] , identifier[loss_ratios] , identifier[coefficients] , identifier[vfun] [ literal[string] ]) identifier[categories] [ literal[string] ]= literal[string] . identifier[join] ( identifier[sorted] ( identifier[categories] [ literal[string] ])) keyword[del] identifier[categories] [ literal[string] ] keyword[return] identifier[vf_dict] , identifier[categories]
def get_vulnerability_functions_04(fname): """ Parse the vulnerability model in NRML 0.4 format. :param fname: path of the vulnerability file :returns: a dictionary imt, taxonomy -> vulnerability function + vset """ categories = dict(assetCategory=set(), lossCategory=set(), vulnerabilitySetID=set()) imts = set() taxonomies = set() vf_dict = {} # imt, taxonomy -> vulnerability function for vset in nrml.read(fname).vulnerabilityModel: categories['assetCategory'].add(vset['assetCategory']) categories['lossCategory'].add(vset['lossCategory']) categories['vulnerabilitySetID'].add(vset['vulnerabilitySetID']) IML = vset.IML imt_str = IML['IMT'] imls = ~IML imts.add(imt_str) for vfun in vset.getnodes('discreteVulnerability'): taxonomy = vfun['vulnerabilityFunctionID'] if taxonomy in taxonomies: raise InvalidFile('Duplicated vulnerabilityFunctionID: %s: %s, line %d' % (taxonomy, fname, vfun.lineno)) # depends on [control=['if'], data=['taxonomy']] taxonomies.add(taxonomy) with context(fname, vfun): loss_ratios = ~vfun.lossRatio coefficients = ~vfun.coefficientsVariation # depends on [control=['with'], data=[]] if len(loss_ratios) != len(imls): raise InvalidFile('There are %d loss ratios, but %d imls: %s, line %d' % (len(loss_ratios), len(imls), fname, vfun.lossRatio.lineno)) # depends on [control=['if'], data=[]] if len(coefficients) != len(imls): raise InvalidFile('There are %d coefficients, but %d imls: %s, line %d' % (len(coefficients), len(imls), fname, vfun.coefficientsVariation.lineno)) # depends on [control=['if'], data=[]] with context(fname, vfun): vf_dict[imt_str, taxonomy] = scientific.VulnerabilityFunction(taxonomy, imt_str, imls, loss_ratios, coefficients, vfun['probabilisticDistribution']) # depends on [control=['with'], data=[]] # depends on [control=['for'], data=['vfun']] # depends on [control=['for'], data=['vset']] categories['id'] = '_'.join(sorted(categories['vulnerabilitySetID'])) del categories['vulnerabilitySetID'] return (vf_dict, categories)
def _confused_state(self, request: Request) -> Type[BaseState]: """ If we're confused, find which state to call. """ origin = request.register.get(Register.STATE) if origin in self._allowed_states: try: return import_class(origin) except (AttributeError, ImportError): pass return import_class(settings.DEFAULT_STATE)
def function[_confused_state, parameter[self, request]]: constant[ If we're confused, find which state to call. ] variable[origin] assign[=] call[name[request].register.get, parameter[name[Register].STATE]] if compare[name[origin] in name[self]._allowed_states] begin[:] <ast.Try object at 0x7da18dc04700> return[call[name[import_class], parameter[name[settings].DEFAULT_STATE]]]
keyword[def] identifier[_confused_state] ( identifier[self] , identifier[request] : identifier[Request] )-> identifier[Type] [ identifier[BaseState] ]: literal[string] identifier[origin] = identifier[request] . identifier[register] . identifier[get] ( identifier[Register] . identifier[STATE] ) keyword[if] identifier[origin] keyword[in] identifier[self] . identifier[_allowed_states] : keyword[try] : keyword[return] identifier[import_class] ( identifier[origin] ) keyword[except] ( identifier[AttributeError] , identifier[ImportError] ): keyword[pass] keyword[return] identifier[import_class] ( identifier[settings] . identifier[DEFAULT_STATE] )
def _confused_state(self, request: Request) -> Type[BaseState]: """ If we're confused, find which state to call. """ origin = request.register.get(Register.STATE) if origin in self._allowed_states: try: return import_class(origin) # depends on [control=['try'], data=[]] except (AttributeError, ImportError): pass # depends on [control=['except'], data=[]] # depends on [control=['if'], data=['origin']] return import_class(settings.DEFAULT_STATE)
def getTopRight(self): """ Retrieves a tuple with the x,y coordinates of the upper right point of the rect. Requires the coordinates, width, height to be numbers """ return (float(self.get_x()) + float(self.get_width()), float(self.get_y()) + float(self.get_height()))
def function[getTopRight, parameter[self]]: constant[ Retrieves a tuple with the x,y coordinates of the upper right point of the rect. Requires the coordinates, width, height to be numbers ] return[tuple[[<ast.BinOp object at 0x7da18c4ccfd0>, <ast.BinOp object at 0x7da18c4cd6c0>]]]
keyword[def] identifier[getTopRight] ( identifier[self] ): literal[string] keyword[return] ( identifier[float] ( identifier[self] . identifier[get_x] ())+ identifier[float] ( identifier[self] . identifier[get_width] ()), identifier[float] ( identifier[self] . identifier[get_y] ())+ identifier[float] ( identifier[self] . identifier[get_height] ()))
def getTopRight(self): """ Retrieves a tuple with the x,y coordinates of the upper right point of the rect. Requires the coordinates, width, height to be numbers """ return (float(self.get_x()) + float(self.get_width()), float(self.get_y()) + float(self.get_height()))
def __create_grid_four_connections(self): """! @brief Creates network with connections that make up four grid structure. @details Each oscillator may be connected with four neighbors in line with 'grid' structure: right, upper, left, lower. """ side_size = self.__width; if (self._conn_represent == conn_represent.MATRIX): self._osc_conn = [[0] * self._num_osc for index in range(0, self._num_osc, 1)]; elif (self._conn_represent == conn_represent.LIST): self._osc_conn = [[] for index in range(0, self._num_osc, 1)]; else: raise NameError("Unknown type of representation of connections"); for index in range(0, self._num_osc, 1): upper_index = index - side_size; lower_index = index + side_size; left_index = index - 1; right_index = index + 1; node_row_index = math.ceil(index / side_size); if (upper_index >= 0): self.__create_connection(index, upper_index); if (lower_index < self._num_osc): self.__create_connection(index, lower_index); if ( (left_index >= 0) and (math.ceil(left_index / side_size) == node_row_index) ): self.__create_connection(index, left_index); if ( (right_index < self._num_osc) and (math.ceil(right_index / side_size) == node_row_index) ): self.__create_connection(index, right_index);
def function[__create_grid_four_connections, parameter[self]]: constant[! @brief Creates network with connections that make up four grid structure. @details Each oscillator may be connected with four neighbors in line with 'grid' structure: right, upper, left, lower. ] variable[side_size] assign[=] name[self].__width if compare[name[self]._conn_represent equal[==] name[conn_represent].MATRIX] begin[:] name[self]._osc_conn assign[=] <ast.ListComp object at 0x7da1b016ef80> for taget[name[index]] in starred[call[name[range], parameter[constant[0], name[self]._num_osc, constant[1]]]] begin[:] variable[upper_index] assign[=] binary_operation[name[index] - name[side_size]] variable[lower_index] assign[=] binary_operation[name[index] + name[side_size]] variable[left_index] assign[=] binary_operation[name[index] - constant[1]] variable[right_index] assign[=] binary_operation[name[index] + constant[1]] variable[node_row_index] assign[=] call[name[math].ceil, parameter[binary_operation[name[index] / name[side_size]]]] if compare[name[upper_index] greater_or_equal[>=] constant[0]] begin[:] call[name[self].__create_connection, parameter[name[index], name[upper_index]]] if compare[name[lower_index] less[<] name[self]._num_osc] begin[:] call[name[self].__create_connection, parameter[name[index], name[lower_index]]] if <ast.BoolOp object at 0x7da1b016fbe0> begin[:] call[name[self].__create_connection, parameter[name[index], name[left_index]]] if <ast.BoolOp object at 0x7da1b016c610> begin[:] call[name[self].__create_connection, parameter[name[index], name[right_index]]]
keyword[def] identifier[__create_grid_four_connections] ( identifier[self] ): literal[string] identifier[side_size] = identifier[self] . identifier[__width] ; keyword[if] ( identifier[self] . identifier[_conn_represent] == identifier[conn_represent] . identifier[MATRIX] ): identifier[self] . identifier[_osc_conn] =[[ literal[int] ]* identifier[self] . identifier[_num_osc] keyword[for] identifier[index] keyword[in] identifier[range] ( literal[int] , identifier[self] . identifier[_num_osc] , literal[int] )]; keyword[elif] ( identifier[self] . identifier[_conn_represent] == identifier[conn_represent] . identifier[LIST] ): identifier[self] . identifier[_osc_conn] =[[] keyword[for] identifier[index] keyword[in] identifier[range] ( literal[int] , identifier[self] . identifier[_num_osc] , literal[int] )]; keyword[else] : keyword[raise] identifier[NameError] ( literal[string] ); keyword[for] identifier[index] keyword[in] identifier[range] ( literal[int] , identifier[self] . identifier[_num_osc] , literal[int] ): identifier[upper_index] = identifier[index] - identifier[side_size] ; identifier[lower_index] = identifier[index] + identifier[side_size] ; identifier[left_index] = identifier[index] - literal[int] ; identifier[right_index] = identifier[index] + literal[int] ; identifier[node_row_index] = identifier[math] . identifier[ceil] ( identifier[index] / identifier[side_size] ); keyword[if] ( identifier[upper_index] >= literal[int] ): identifier[self] . identifier[__create_connection] ( identifier[index] , identifier[upper_index] ); keyword[if] ( identifier[lower_index] < identifier[self] . identifier[_num_osc] ): identifier[self] . identifier[__create_connection] ( identifier[index] , identifier[lower_index] ); keyword[if] (( identifier[left_index] >= literal[int] ) keyword[and] ( identifier[math] . identifier[ceil] ( identifier[left_index] / identifier[side_size] )== identifier[node_row_index] )): identifier[self] . identifier[__create_connection] ( identifier[index] , identifier[left_index] ); keyword[if] (( identifier[right_index] < identifier[self] . identifier[_num_osc] ) keyword[and] ( identifier[math] . identifier[ceil] ( identifier[right_index] / identifier[side_size] )== identifier[node_row_index] )): identifier[self] . identifier[__create_connection] ( identifier[index] , identifier[right_index] );
def __create_grid_four_connections(self): """! @brief Creates network with connections that make up four grid structure. @details Each oscillator may be connected with four neighbors in line with 'grid' structure: right, upper, left, lower. """ side_size = self.__width if self._conn_represent == conn_represent.MATRIX: self._osc_conn = [[0] * self._num_osc for index in range(0, self._num_osc, 1)] # depends on [control=['if'], data=[]] elif self._conn_represent == conn_represent.LIST: self._osc_conn = [[] for index in range(0, self._num_osc, 1)] # depends on [control=['if'], data=[]] else: raise NameError('Unknown type of representation of connections') for index in range(0, self._num_osc, 1): upper_index = index - side_size lower_index = index + side_size left_index = index - 1 right_index = index + 1 node_row_index = math.ceil(index / side_size) if upper_index >= 0: self.__create_connection(index, upper_index) # depends on [control=['if'], data=['upper_index']] if lower_index < self._num_osc: self.__create_connection(index, lower_index) # depends on [control=['if'], data=['lower_index']] if left_index >= 0 and math.ceil(left_index / side_size) == node_row_index: self.__create_connection(index, left_index) # depends on [control=['if'], data=[]] if right_index < self._num_osc and math.ceil(right_index / side_size) == node_row_index: self.__create_connection(index, right_index) # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['index']]
def display_lookback_returns(self): """ Displays the current lookback returns for each series. """ return self.lookback_returns.apply( lambda x: x.map('{:,.2%}'.format), axis=1)
def function[display_lookback_returns, parameter[self]]: constant[ Displays the current lookback returns for each series. ] return[call[name[self].lookback_returns.apply, parameter[<ast.Lambda object at 0x7da204567d30>]]]
keyword[def] identifier[display_lookback_returns] ( identifier[self] ): literal[string] keyword[return] identifier[self] . identifier[lookback_returns] . identifier[apply] ( keyword[lambda] identifier[x] : identifier[x] . identifier[map] ( literal[string] . identifier[format] ), identifier[axis] = literal[int] )
def display_lookback_returns(self): """ Displays the current lookback returns for each series. """ return self.lookback_returns.apply(lambda x: x.map('{:,.2%}'.format), axis=1)
def do_GET(self, ): """Handle GET requests If the path is '/', a site which extracts the token will be generated. This will redirect the user to the '/sucess' page, which shows a success message. :returns: None :rtype: None :raises: None """ urld = {self.extract_site_url: 'extract_token_site.html', self.success_site_url: 'success_site.html'} site = urld.get(self.path) if not site: log.debug("Requesting false url on login server.") self.send_error(404) return log.debug('Requesting the login server. Responding with %s.', urld) self._set_headers() self._write_html(site)
def function[do_GET, parameter[self]]: constant[Handle GET requests If the path is '/', a site which extracts the token will be generated. This will redirect the user to the '/sucess' page, which shows a success message. :returns: None :rtype: None :raises: None ] variable[urld] assign[=] dictionary[[<ast.Attribute object at 0x7da1b0a325f0>, <ast.Attribute object at 0x7da1b0a33520>], [<ast.Constant object at 0x7da1b0a333d0>, <ast.Constant object at 0x7da1b0a337c0>]] variable[site] assign[=] call[name[urld].get, parameter[name[self].path]] if <ast.UnaryOp object at 0x7da1b0a31f60> begin[:] call[name[log].debug, parameter[constant[Requesting false url on login server.]]] call[name[self].send_error, parameter[constant[404]]] return[None] call[name[log].debug, parameter[constant[Requesting the login server. Responding with %s.], name[urld]]] call[name[self]._set_headers, parameter[]] call[name[self]._write_html, parameter[name[site]]]
keyword[def] identifier[do_GET] ( identifier[self] ,): literal[string] identifier[urld] ={ identifier[self] . identifier[extract_site_url] : literal[string] , identifier[self] . identifier[success_site_url] : literal[string] } identifier[site] = identifier[urld] . identifier[get] ( identifier[self] . identifier[path] ) keyword[if] keyword[not] identifier[site] : identifier[log] . identifier[debug] ( literal[string] ) identifier[self] . identifier[send_error] ( literal[int] ) keyword[return] identifier[log] . identifier[debug] ( literal[string] , identifier[urld] ) identifier[self] . identifier[_set_headers] () identifier[self] . identifier[_write_html] ( identifier[site] )
def do_GET(self): """Handle GET requests If the path is '/', a site which extracts the token will be generated. This will redirect the user to the '/sucess' page, which shows a success message. :returns: None :rtype: None :raises: None """ urld = {self.extract_site_url: 'extract_token_site.html', self.success_site_url: 'success_site.html'} site = urld.get(self.path) if not site: log.debug('Requesting false url on login server.') self.send_error(404) return # depends on [control=['if'], data=[]] log.debug('Requesting the login server. Responding with %s.', urld) self._set_headers() self._write_html(site)
def delete(self): """Deletes the local marker file and also any data in the Fuseki server. """ MetadataCache.delete(self) try: self.graph.query('DELETE WHERE { ?s ?p ?o . }') except ResultException: # this is often just a false positive since Jena Fuseki does not # return tuples for a deletion query, so swallowing the exception # here is fine logging.exception('error when deleting graph')
def function[delete, parameter[self]]: constant[Deletes the local marker file and also any data in the Fuseki server. ] call[name[MetadataCache].delete, parameter[name[self]]] <ast.Try object at 0x7da18f09e2c0>
keyword[def] identifier[delete] ( identifier[self] ): literal[string] identifier[MetadataCache] . identifier[delete] ( identifier[self] ) keyword[try] : identifier[self] . identifier[graph] . identifier[query] ( literal[string] ) keyword[except] identifier[ResultException] : identifier[logging] . identifier[exception] ( literal[string] )
def delete(self): """Deletes the local marker file and also any data in the Fuseki server. """ MetadataCache.delete(self) try: self.graph.query('DELETE WHERE { ?s ?p ?o . }') # depends on [control=['try'], data=[]] except ResultException: # this is often just a false positive since Jena Fuseki does not # return tuples for a deletion query, so swallowing the exception # here is fine logging.exception('error when deleting graph') # depends on [control=['except'], data=[]]
def get_command_response_from_cache(self, device_id, command, command2): """Gets response""" key = self.create_key_from_command(command, command2) command_cache = self.get_cache_from_file(device_id) if device_id not in command_cache: command_cache[device_id] = {} return False elif key not in command_cache[device_id]: return False response = command_cache[device_id][key] expired = False if response['ttl'] < int(time()): self.logger.info("cache expired for device %s", device_id) expired = True if os.path.exists(LOCK_FILE): self.logger.info("cache locked - will wait to rebuild %s", device_id) else: self.logger.info("cache unlocked - will rebuild %s", device_id) newpid = os.fork() if newpid == 0: self.rebuild_cache(device_id, command, command2) if expired: self.logger.info("returning expired cached device status %s", device_id) else: self.logger.info("returning unexpired cached device status %s", device_id) return response['response']
def function[get_command_response_from_cache, parameter[self, device_id, command, command2]]: constant[Gets response] variable[key] assign[=] call[name[self].create_key_from_command, parameter[name[command], name[command2]]] variable[command_cache] assign[=] call[name[self].get_cache_from_file, parameter[name[device_id]]] if compare[name[device_id] <ast.NotIn object at 0x7da2590d7190> name[command_cache]] begin[:] call[name[command_cache]][name[device_id]] assign[=] dictionary[[], []] return[constant[False]] variable[response] assign[=] call[call[name[command_cache]][name[device_id]]][name[key]] variable[expired] assign[=] constant[False] if compare[call[name[response]][constant[ttl]] less[<] call[name[int], parameter[call[name[time], parameter[]]]]] begin[:] call[name[self].logger.info, parameter[constant[cache expired for device %s], name[device_id]]] variable[expired] assign[=] constant[True] if call[name[os].path.exists, parameter[name[LOCK_FILE]]] begin[:] call[name[self].logger.info, parameter[constant[cache locked - will wait to rebuild %s], name[device_id]]] if name[expired] begin[:] call[name[self].logger.info, parameter[constant[returning expired cached device status %s], name[device_id]]] return[call[name[response]][constant[response]]]
keyword[def] identifier[get_command_response_from_cache] ( identifier[self] , identifier[device_id] , identifier[command] , identifier[command2] ): literal[string] identifier[key] = identifier[self] . identifier[create_key_from_command] ( identifier[command] , identifier[command2] ) identifier[command_cache] = identifier[self] . identifier[get_cache_from_file] ( identifier[device_id] ) keyword[if] identifier[device_id] keyword[not] keyword[in] identifier[command_cache] : identifier[command_cache] [ identifier[device_id] ]={} keyword[return] keyword[False] keyword[elif] identifier[key] keyword[not] keyword[in] identifier[command_cache] [ identifier[device_id] ]: keyword[return] keyword[False] identifier[response] = identifier[command_cache] [ identifier[device_id] ][ identifier[key] ] identifier[expired] = keyword[False] keyword[if] identifier[response] [ literal[string] ]< identifier[int] ( identifier[time] ()): identifier[self] . identifier[logger] . identifier[info] ( literal[string] , identifier[device_id] ) identifier[expired] = keyword[True] keyword[if] identifier[os] . identifier[path] . identifier[exists] ( identifier[LOCK_FILE] ): identifier[self] . identifier[logger] . identifier[info] ( literal[string] , identifier[device_id] ) keyword[else] : identifier[self] . identifier[logger] . identifier[info] ( literal[string] , identifier[device_id] ) identifier[newpid] = identifier[os] . identifier[fork] () keyword[if] identifier[newpid] == literal[int] : identifier[self] . identifier[rebuild_cache] ( identifier[device_id] , identifier[command] , identifier[command2] ) keyword[if] identifier[expired] : identifier[self] . identifier[logger] . identifier[info] ( literal[string] , identifier[device_id] ) keyword[else] : identifier[self] . identifier[logger] . identifier[info] ( literal[string] , identifier[device_id] ) keyword[return] identifier[response] [ literal[string] ]
def get_command_response_from_cache(self, device_id, command, command2): """Gets response""" key = self.create_key_from_command(command, command2) command_cache = self.get_cache_from_file(device_id) if device_id not in command_cache: command_cache[device_id] = {} return False # depends on [control=['if'], data=['device_id', 'command_cache']] elif key not in command_cache[device_id]: return False # depends on [control=['if'], data=[]] response = command_cache[device_id][key] expired = False if response['ttl'] < int(time()): self.logger.info('cache expired for device %s', device_id) expired = True if os.path.exists(LOCK_FILE): self.logger.info('cache locked - will wait to rebuild %s', device_id) # depends on [control=['if'], data=[]] else: self.logger.info('cache unlocked - will rebuild %s', device_id) newpid = os.fork() if newpid == 0: self.rebuild_cache(device_id, command, command2) # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]] if expired: self.logger.info('returning expired cached device status %s', device_id) # depends on [control=['if'], data=[]] else: self.logger.info('returning unexpired cached device status %s', device_id) return response['response']
def create_group_groups(self, description=None, is_public=None, join_level=None, name=None, storage_quota_mb=None): """ Create a group. Creates a new group. Groups created using the "/api/v1/groups/" endpoint will be community groups. """ path = {} data = {} params = {} # OPTIONAL - name """The name of the group""" if name is not None: data["name"] = name # OPTIONAL - description """A description of the group""" if description is not None: data["description"] = description # OPTIONAL - is_public """whether the group is public (applies only to community groups)""" if is_public is not None: data["is_public"] = is_public # OPTIONAL - join_level """no description""" if join_level is not None: self._validate_enum(join_level, ["parent_context_auto_join", "parent_context_request", "invitation_only"]) data["join_level"] = join_level # OPTIONAL - storage_quota_mb """The allowed file storage for the group, in megabytes. This parameter is ignored if the caller does not have the manage_storage_quotas permission.""" if storage_quota_mb is not None: data["storage_quota_mb"] = storage_quota_mb self.logger.debug("POST /api/v1/groups with query params: {params} and form data: {data}".format(params=params, data=data, **path)) return self.generic_request("POST", "/api/v1/groups".format(**path), data=data, params=params, single_item=True)
def function[create_group_groups, parameter[self, description, is_public, join_level, name, storage_quota_mb]]: constant[ Create a group. Creates a new group. Groups created using the "/api/v1/groups/" endpoint will be community groups. ] variable[path] assign[=] dictionary[[], []] variable[data] assign[=] dictionary[[], []] variable[params] assign[=] dictionary[[], []] constant[The name of the group] if compare[name[name] is_not constant[None]] begin[:] call[name[data]][constant[name]] assign[=] name[name] constant[A description of the group] if compare[name[description] is_not constant[None]] begin[:] call[name[data]][constant[description]] assign[=] name[description] constant[whether the group is public (applies only to community groups)] if compare[name[is_public] is_not constant[None]] begin[:] call[name[data]][constant[is_public]] assign[=] name[is_public] constant[no description] if compare[name[join_level] is_not constant[None]] begin[:] call[name[self]._validate_enum, parameter[name[join_level], list[[<ast.Constant object at 0x7da1b0a86f80>, <ast.Constant object at 0x7da1b0a86f50>, <ast.Constant object at 0x7da1b0a86ef0>]]]] call[name[data]][constant[join_level]] assign[=] name[join_level] constant[The allowed file storage for the group, in megabytes. This parameter is ignored if the caller does not have the manage_storage_quotas permission.] if compare[name[storage_quota_mb] is_not constant[None]] begin[:] call[name[data]][constant[storage_quota_mb]] assign[=] name[storage_quota_mb] call[name[self].logger.debug, parameter[call[constant[POST /api/v1/groups with query params: {params} and form data: {data}].format, parameter[]]]] return[call[name[self].generic_request, parameter[constant[POST], call[constant[/api/v1/groups].format, parameter[]]]]]
keyword[def] identifier[create_group_groups] ( identifier[self] , identifier[description] = keyword[None] , identifier[is_public] = keyword[None] , identifier[join_level] = keyword[None] , identifier[name] = keyword[None] , identifier[storage_quota_mb] = keyword[None] ): literal[string] identifier[path] ={} identifier[data] ={} identifier[params] ={} literal[string] keyword[if] identifier[name] keyword[is] keyword[not] keyword[None] : identifier[data] [ literal[string] ]= identifier[name] literal[string] keyword[if] identifier[description] keyword[is] keyword[not] keyword[None] : identifier[data] [ literal[string] ]= identifier[description] literal[string] keyword[if] identifier[is_public] keyword[is] keyword[not] keyword[None] : identifier[data] [ literal[string] ]= identifier[is_public] literal[string] keyword[if] identifier[join_level] keyword[is] keyword[not] keyword[None] : identifier[self] . identifier[_validate_enum] ( identifier[join_level] ,[ literal[string] , literal[string] , literal[string] ]) identifier[data] [ literal[string] ]= identifier[join_level] literal[string] keyword[if] identifier[storage_quota_mb] keyword[is] keyword[not] keyword[None] : identifier[data] [ literal[string] ]= identifier[storage_quota_mb] identifier[self] . identifier[logger] . identifier[debug] ( literal[string] . identifier[format] ( identifier[params] = identifier[params] , identifier[data] = identifier[data] ,** identifier[path] )) keyword[return] identifier[self] . identifier[generic_request] ( literal[string] , literal[string] . identifier[format] (** identifier[path] ), identifier[data] = identifier[data] , identifier[params] = identifier[params] , identifier[single_item] = keyword[True] )
def create_group_groups(self, description=None, is_public=None, join_level=None, name=None, storage_quota_mb=None): """ Create a group. Creates a new group. Groups created using the "/api/v1/groups/" endpoint will be community groups. """ path = {} data = {} params = {} # OPTIONAL - name 'The name of the group' if name is not None: data['name'] = name # depends on [control=['if'], data=['name']] # OPTIONAL - description 'A description of the group' if description is not None: data['description'] = description # depends on [control=['if'], data=['description']] # OPTIONAL - is_public 'whether the group is public (applies only to community groups)' if is_public is not None: data['is_public'] = is_public # depends on [control=['if'], data=['is_public']] # OPTIONAL - join_level 'no description' if join_level is not None: self._validate_enum(join_level, ['parent_context_auto_join', 'parent_context_request', 'invitation_only']) data['join_level'] = join_level # depends on [control=['if'], data=['join_level']] # OPTIONAL - storage_quota_mb 'The allowed file storage for the group, in megabytes. This parameter is\n ignored if the caller does not have the manage_storage_quotas permission.' if storage_quota_mb is not None: data['storage_quota_mb'] = storage_quota_mb # depends on [control=['if'], data=['storage_quota_mb']] self.logger.debug('POST /api/v1/groups with query params: {params} and form data: {data}'.format(params=params, data=data, **path)) return self.generic_request('POST', '/api/v1/groups'.format(**path), data=data, params=params, single_item=True)
def convert_all(folder, dest_path='.', force_all=False): "Convert modified notebooks in `folder` to html pages in `dest_path`." path = Path(folder) changed_cnt = 0 for fname in path.glob("*.ipynb"): # only rebuild modified files fname_out = Path(dest_path)/fname.with_suffix('.html').name if not force_all and fname_out.exists(): in_mod = os.path.getmtime(fname) out_mod = os.path.getmtime(fname_out) if in_mod < out_mod: continue print(f"converting: {fname} => {fname_out}") changed_cnt += 1 convert_nb(fname, dest_path=dest_path) if not changed_cnt: print("No notebooks were modified")
def function[convert_all, parameter[folder, dest_path, force_all]]: constant[Convert modified notebooks in `folder` to html pages in `dest_path`.] variable[path] assign[=] call[name[Path], parameter[name[folder]]] variable[changed_cnt] assign[=] constant[0] for taget[name[fname]] in starred[call[name[path].glob, parameter[constant[*.ipynb]]]] begin[:] variable[fname_out] assign[=] binary_operation[call[name[Path], parameter[name[dest_path]]] / call[name[fname].with_suffix, parameter[constant[.html]]].name] if <ast.BoolOp object at 0x7da20cabdfc0> begin[:] variable[in_mod] assign[=] call[name[os].path.getmtime, parameter[name[fname]]] variable[out_mod] assign[=] call[name[os].path.getmtime, parameter[name[fname_out]]] if compare[name[in_mod] less[<] name[out_mod]] begin[:] continue call[name[print], parameter[<ast.JoinedStr object at 0x7da20cabff70>]] <ast.AugAssign object at 0x7da20cabe980> call[name[convert_nb], parameter[name[fname]]] if <ast.UnaryOp object at 0x7da20cabf640> begin[:] call[name[print], parameter[constant[No notebooks were modified]]]
keyword[def] identifier[convert_all] ( identifier[folder] , identifier[dest_path] = literal[string] , identifier[force_all] = keyword[False] ): literal[string] identifier[path] = identifier[Path] ( identifier[folder] ) identifier[changed_cnt] = literal[int] keyword[for] identifier[fname] keyword[in] identifier[path] . identifier[glob] ( literal[string] ): identifier[fname_out] = identifier[Path] ( identifier[dest_path] )/ identifier[fname] . identifier[with_suffix] ( literal[string] ). identifier[name] keyword[if] keyword[not] identifier[force_all] keyword[and] identifier[fname_out] . identifier[exists] (): identifier[in_mod] = identifier[os] . identifier[path] . identifier[getmtime] ( identifier[fname] ) identifier[out_mod] = identifier[os] . identifier[path] . identifier[getmtime] ( identifier[fname_out] ) keyword[if] identifier[in_mod] < identifier[out_mod] : keyword[continue] identifier[print] ( literal[string] ) identifier[changed_cnt] += literal[int] identifier[convert_nb] ( identifier[fname] , identifier[dest_path] = identifier[dest_path] ) keyword[if] keyword[not] identifier[changed_cnt] : identifier[print] ( literal[string] )
def convert_all(folder, dest_path='.', force_all=False): """Convert modified notebooks in `folder` to html pages in `dest_path`.""" path = Path(folder) changed_cnt = 0 for fname in path.glob('*.ipynb'): # only rebuild modified files fname_out = Path(dest_path) / fname.with_suffix('.html').name if not force_all and fname_out.exists(): in_mod = os.path.getmtime(fname) out_mod = os.path.getmtime(fname_out) if in_mod < out_mod: continue # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]] print(f'converting: {fname} => {fname_out}') changed_cnt += 1 convert_nb(fname, dest_path=dest_path) # depends on [control=['for'], data=['fname']] if not changed_cnt: print('No notebooks were modified') # depends on [control=['if'], data=[]]
def _ellipsoid_phantom_3d(space, ellipsoids): """Create an ellipsoid phantom in 3d space. Parameters ---------- space : `DiscreteLp` Space in which the phantom should be generated. If ``space.shape`` is 1 in an axis, a corresponding slice of the phantom is created (instead of squashing the whole phantom into the slice). ellipsoids : list of lists Each row should contain the entries :: 'value', 'axis_1', 'axis_2', 'axis_3', 'center_x', 'center_y', 'center_z', 'rotation_phi', 'rotation_theta', 'rotation_psi' The provided ellipsoids need to be specified relative to the reference cube ``[-1, -1, -1] x [1, 1, 1]``. Angles are to be given in radians. Returns ------- phantom : ``space`` element 3D ellipsoid phantom in ``space``. See Also -------- shepp_logan : The typical use-case for this function. """ # Blank volume p = np.zeros(space.shape, dtype=space.dtype) minp = space.grid.min_pt maxp = space.grid.max_pt # Create the pixel grid grid_in = space.grid.meshgrid # Move points to [-1, 1] grid = [] for i in range(3): mean_i = (minp[i] + maxp[i]) / 2.0 # Where space.shape = 1, we have minp = maxp, so we set diff_i = 1 # to avoid division by zero. Effectively, this allows constructing # a slice of a 3D phantom. diff_i = (maxp[i] - minp[i]) / 2.0 or 1.0 grid.append((grid_in[i] - mean_i) / diff_i) for ellip in ellipsoids: assert len(ellip) == 10 intensity = ellip[0] a_squared = ellip[1] ** 2 b_squared = ellip[2] ** 2 c_squared = ellip[3] ** 2 x0 = ellip[4] y0 = ellip[5] z0 = ellip[6] phi = ellip[7] theta = ellip[8] psi = ellip[9] scales = [1 / a_squared, 1 / b_squared, 1 / c_squared] center = (np.array([x0, y0, z0]) + 1.0) / 2.0 # Create the offset x,y and z values for the grid if any([phi, theta, psi]): # Rotate the points to the expected coordinate system. cphi = np.cos(phi) sphi = np.sin(phi) ctheta = np.cos(theta) stheta = np.sin(theta) cpsi = np.cos(psi) spsi = np.sin(psi) mat = np.array([[cpsi * cphi - ctheta * sphi * spsi, cpsi * sphi + ctheta * cphi * spsi, spsi * stheta], [-spsi * cphi - ctheta * sphi * cpsi, -spsi * sphi + ctheta * cphi * cpsi, cpsi * stheta], [stheta * sphi, -stheta * cphi, ctheta]]) # Calculate the points that could possibly be inside the volume # Since the points are rotated, we cannot do anything directional # without more logic max_radius = np.sqrt( np.abs(mat).dot([a_squared, b_squared, c_squared])) idx, shapes = _getshapes_3d(center, max_radius, space.shape) subgrid = [g[idi] for g, idi in zip(grid, shapes)] offset_points = [vec * (xi - x0i)[..., None] for xi, vec, x0i in zip(subgrid, mat.T, [x0, y0, z0])] rotated = offset_points[0] + offset_points[1] + offset_points[2] np.square(rotated, out=rotated) radius = np.dot(rotated, scales) else: # Calculate the points that could possibly be inside the volume max_radius = np.sqrt([a_squared, b_squared, c_squared]) idx, shapes = _getshapes_3d(center, max_radius, space.shape) subgrid = [g[idi] for g, idi in zip(grid, shapes)] squared_dist = [ai * (xi - x0i) ** 2 for xi, ai, x0i in zip(subgrid, scales, [x0, y0, z0])] # Parentheses to get best order for broadcasting radius = squared_dist[0] + (squared_dist[1] + squared_dist[2]) # Find the points within the ellipse inside = radius <= 1 # Add the ellipse intensity to those points p[idx][inside] += intensity return space.element(p)
def function[_ellipsoid_phantom_3d, parameter[space, ellipsoids]]: constant[Create an ellipsoid phantom in 3d space. Parameters ---------- space : `DiscreteLp` Space in which the phantom should be generated. If ``space.shape`` is 1 in an axis, a corresponding slice of the phantom is created (instead of squashing the whole phantom into the slice). ellipsoids : list of lists Each row should contain the entries :: 'value', 'axis_1', 'axis_2', 'axis_3', 'center_x', 'center_y', 'center_z', 'rotation_phi', 'rotation_theta', 'rotation_psi' The provided ellipsoids need to be specified relative to the reference cube ``[-1, -1, -1] x [1, 1, 1]``. Angles are to be given in radians. Returns ------- phantom : ``space`` element 3D ellipsoid phantom in ``space``. See Also -------- shepp_logan : The typical use-case for this function. ] variable[p] assign[=] call[name[np].zeros, parameter[name[space].shape]] variable[minp] assign[=] name[space].grid.min_pt variable[maxp] assign[=] name[space].grid.max_pt variable[grid_in] assign[=] name[space].grid.meshgrid variable[grid] assign[=] list[[]] for taget[name[i]] in starred[call[name[range], parameter[constant[3]]]] begin[:] variable[mean_i] assign[=] binary_operation[binary_operation[call[name[minp]][name[i]] + call[name[maxp]][name[i]]] / constant[2.0]] variable[diff_i] assign[=] <ast.BoolOp object at 0x7da1b1e5b0d0> call[name[grid].append, parameter[binary_operation[binary_operation[call[name[grid_in]][name[i]] - name[mean_i]] / name[diff_i]]]] for taget[name[ellip]] in starred[name[ellipsoids]] begin[:] assert[compare[call[name[len], parameter[name[ellip]]] equal[==] constant[10]]] variable[intensity] assign[=] call[name[ellip]][constant[0]] variable[a_squared] assign[=] binary_operation[call[name[ellip]][constant[1]] ** constant[2]] variable[b_squared] assign[=] binary_operation[call[name[ellip]][constant[2]] ** constant[2]] variable[c_squared] assign[=] binary_operation[call[name[ellip]][constant[3]] ** constant[2]] variable[x0] assign[=] call[name[ellip]][constant[4]] variable[y0] assign[=] call[name[ellip]][constant[5]] variable[z0] assign[=] call[name[ellip]][constant[6]] variable[phi] assign[=] call[name[ellip]][constant[7]] variable[theta] assign[=] call[name[ellip]][constant[8]] variable[psi] assign[=] call[name[ellip]][constant[9]] variable[scales] assign[=] list[[<ast.BinOp object at 0x7da1b1e59fc0>, <ast.BinOp object at 0x7da1b1e59f30>, <ast.BinOp object at 0x7da1b1e59ea0>]] variable[center] assign[=] binary_operation[binary_operation[call[name[np].array, parameter[list[[<ast.Name object at 0x7da1b1e59c90>, <ast.Name object at 0x7da1b1e59c60>, <ast.Name object at 0x7da1b1e59c30>]]]] + constant[1.0]] / constant[2.0]] if call[name[any], parameter[list[[<ast.Name object at 0x7da1b1e59ae0>, <ast.Name object at 0x7da1b1e59ab0>, <ast.Name object at 0x7da1b1e59a80>]]]] begin[:] variable[cphi] assign[=] call[name[np].cos, parameter[name[phi]]] variable[sphi] assign[=] call[name[np].sin, parameter[name[phi]]] variable[ctheta] assign[=] call[name[np].cos, parameter[name[theta]]] variable[stheta] assign[=] call[name[np].sin, parameter[name[theta]]] variable[cpsi] assign[=] call[name[np].cos, parameter[name[psi]]] variable[spsi] assign[=] call[name[np].sin, parameter[name[psi]]] variable[mat] assign[=] call[name[np].array, parameter[list[[<ast.List object at 0x7da1b1e59270>, <ast.List object at 0x7da1b1e58e50>, <ast.List object at 0x7da1b1e58220>]]]] variable[max_radius] assign[=] call[name[np].sqrt, parameter[call[call[name[np].abs, parameter[name[mat]]].dot, parameter[list[[<ast.Name object at 0x7da1b1e58610>, <ast.Name object at 0x7da1b1e58640>, <ast.Name object at 0x7da1b1e58670>]]]]]] <ast.Tuple object at 0x7da1b1e586d0> assign[=] call[name[_getshapes_3d], parameter[name[center], name[max_radius], name[space].shape]] variable[subgrid] assign[=] <ast.ListComp object at 0x7da1b1e588e0> variable[offset_points] assign[=] <ast.ListComp object at 0x7da1b1e58bb0> variable[rotated] assign[=] binary_operation[binary_operation[call[name[offset_points]][constant[0]] + call[name[offset_points]][constant[1]]] + call[name[offset_points]][constant[2]]] call[name[np].square, parameter[name[rotated]]] variable[radius] assign[=] call[name[np].dot, parameter[name[rotated], name[scales]]] variable[inside] assign[=] compare[name[radius] less_or_equal[<=] constant[1]] <ast.AugAssign object at 0x7da18f58c190> return[call[name[space].element, parameter[name[p]]]]
keyword[def] identifier[_ellipsoid_phantom_3d] ( identifier[space] , identifier[ellipsoids] ): literal[string] identifier[p] = identifier[np] . identifier[zeros] ( identifier[space] . identifier[shape] , identifier[dtype] = identifier[space] . identifier[dtype] ) identifier[minp] = identifier[space] . identifier[grid] . identifier[min_pt] identifier[maxp] = identifier[space] . identifier[grid] . identifier[max_pt] identifier[grid_in] = identifier[space] . identifier[grid] . identifier[meshgrid] identifier[grid] =[] keyword[for] identifier[i] keyword[in] identifier[range] ( literal[int] ): identifier[mean_i] =( identifier[minp] [ identifier[i] ]+ identifier[maxp] [ identifier[i] ])/ literal[int] identifier[diff_i] =( identifier[maxp] [ identifier[i] ]- identifier[minp] [ identifier[i] ])/ literal[int] keyword[or] literal[int] identifier[grid] . identifier[append] (( identifier[grid_in] [ identifier[i] ]- identifier[mean_i] )/ identifier[diff_i] ) keyword[for] identifier[ellip] keyword[in] identifier[ellipsoids] : keyword[assert] identifier[len] ( identifier[ellip] )== literal[int] identifier[intensity] = identifier[ellip] [ literal[int] ] identifier[a_squared] = identifier[ellip] [ literal[int] ]** literal[int] identifier[b_squared] = identifier[ellip] [ literal[int] ]** literal[int] identifier[c_squared] = identifier[ellip] [ literal[int] ]** literal[int] identifier[x0] = identifier[ellip] [ literal[int] ] identifier[y0] = identifier[ellip] [ literal[int] ] identifier[z0] = identifier[ellip] [ literal[int] ] identifier[phi] = identifier[ellip] [ literal[int] ] identifier[theta] = identifier[ellip] [ literal[int] ] identifier[psi] = identifier[ellip] [ literal[int] ] identifier[scales] =[ literal[int] / identifier[a_squared] , literal[int] / identifier[b_squared] , literal[int] / identifier[c_squared] ] identifier[center] =( identifier[np] . identifier[array] ([ identifier[x0] , identifier[y0] , identifier[z0] ])+ literal[int] )/ literal[int] keyword[if] identifier[any] ([ identifier[phi] , identifier[theta] , identifier[psi] ]): identifier[cphi] = identifier[np] . identifier[cos] ( identifier[phi] ) identifier[sphi] = identifier[np] . identifier[sin] ( identifier[phi] ) identifier[ctheta] = identifier[np] . identifier[cos] ( identifier[theta] ) identifier[stheta] = identifier[np] . identifier[sin] ( identifier[theta] ) identifier[cpsi] = identifier[np] . identifier[cos] ( identifier[psi] ) identifier[spsi] = identifier[np] . identifier[sin] ( identifier[psi] ) identifier[mat] = identifier[np] . identifier[array] ([[ identifier[cpsi] * identifier[cphi] - identifier[ctheta] * identifier[sphi] * identifier[spsi] , identifier[cpsi] * identifier[sphi] + identifier[ctheta] * identifier[cphi] * identifier[spsi] , identifier[spsi] * identifier[stheta] ], [- identifier[spsi] * identifier[cphi] - identifier[ctheta] * identifier[sphi] * identifier[cpsi] , - identifier[spsi] * identifier[sphi] + identifier[ctheta] * identifier[cphi] * identifier[cpsi] , identifier[cpsi] * identifier[stheta] ], [ identifier[stheta] * identifier[sphi] , - identifier[stheta] * identifier[cphi] , identifier[ctheta] ]]) identifier[max_radius] = identifier[np] . identifier[sqrt] ( identifier[np] . identifier[abs] ( identifier[mat] ). identifier[dot] ([ identifier[a_squared] , identifier[b_squared] , identifier[c_squared] ])) identifier[idx] , identifier[shapes] = identifier[_getshapes_3d] ( identifier[center] , identifier[max_radius] , identifier[space] . identifier[shape] ) identifier[subgrid] =[ identifier[g] [ identifier[idi] ] keyword[for] identifier[g] , identifier[idi] keyword[in] identifier[zip] ( identifier[grid] , identifier[shapes] )] identifier[offset_points] =[ identifier[vec] *( identifier[xi] - identifier[x0i] )[..., keyword[None] ] keyword[for] identifier[xi] , identifier[vec] , identifier[x0i] keyword[in] identifier[zip] ( identifier[subgrid] , identifier[mat] . identifier[T] , [ identifier[x0] , identifier[y0] , identifier[z0] ])] identifier[rotated] = identifier[offset_points] [ literal[int] ]+ identifier[offset_points] [ literal[int] ]+ identifier[offset_points] [ literal[int] ] identifier[np] . identifier[square] ( identifier[rotated] , identifier[out] = identifier[rotated] ) identifier[radius] = identifier[np] . identifier[dot] ( identifier[rotated] , identifier[scales] ) keyword[else] : identifier[max_radius] = identifier[np] . identifier[sqrt] ([ identifier[a_squared] , identifier[b_squared] , identifier[c_squared] ]) identifier[idx] , identifier[shapes] = identifier[_getshapes_3d] ( identifier[center] , identifier[max_radius] , identifier[space] . identifier[shape] ) identifier[subgrid] =[ identifier[g] [ identifier[idi] ] keyword[for] identifier[g] , identifier[idi] keyword[in] identifier[zip] ( identifier[grid] , identifier[shapes] )] identifier[squared_dist] =[ identifier[ai] *( identifier[xi] - identifier[x0i] )** literal[int] keyword[for] identifier[xi] , identifier[ai] , identifier[x0i] keyword[in] identifier[zip] ( identifier[subgrid] , identifier[scales] , [ identifier[x0] , identifier[y0] , identifier[z0] ])] identifier[radius] = identifier[squared_dist] [ literal[int] ]+( identifier[squared_dist] [ literal[int] ]+ identifier[squared_dist] [ literal[int] ]) identifier[inside] = identifier[radius] <= literal[int] identifier[p] [ identifier[idx] ][ identifier[inside] ]+= identifier[intensity] keyword[return] identifier[space] . identifier[element] ( identifier[p] )
def _ellipsoid_phantom_3d(space, ellipsoids): """Create an ellipsoid phantom in 3d space. Parameters ---------- space : `DiscreteLp` Space in which the phantom should be generated. If ``space.shape`` is 1 in an axis, a corresponding slice of the phantom is created (instead of squashing the whole phantom into the slice). ellipsoids : list of lists Each row should contain the entries :: 'value', 'axis_1', 'axis_2', 'axis_3', 'center_x', 'center_y', 'center_z', 'rotation_phi', 'rotation_theta', 'rotation_psi' The provided ellipsoids need to be specified relative to the reference cube ``[-1, -1, -1] x [1, 1, 1]``. Angles are to be given in radians. Returns ------- phantom : ``space`` element 3D ellipsoid phantom in ``space``. See Also -------- shepp_logan : The typical use-case for this function. """ # Blank volume p = np.zeros(space.shape, dtype=space.dtype) minp = space.grid.min_pt maxp = space.grid.max_pt # Create the pixel grid grid_in = space.grid.meshgrid # Move points to [-1, 1] grid = [] for i in range(3): mean_i = (minp[i] + maxp[i]) / 2.0 # Where space.shape = 1, we have minp = maxp, so we set diff_i = 1 # to avoid division by zero. Effectively, this allows constructing # a slice of a 3D phantom. diff_i = (maxp[i] - minp[i]) / 2.0 or 1.0 grid.append((grid_in[i] - mean_i) / diff_i) # depends on [control=['for'], data=['i']] for ellip in ellipsoids: assert len(ellip) == 10 intensity = ellip[0] a_squared = ellip[1] ** 2 b_squared = ellip[2] ** 2 c_squared = ellip[3] ** 2 x0 = ellip[4] y0 = ellip[5] z0 = ellip[6] phi = ellip[7] theta = ellip[8] psi = ellip[9] scales = [1 / a_squared, 1 / b_squared, 1 / c_squared] center = (np.array([x0, y0, z0]) + 1.0) / 2.0 # Create the offset x,y and z values for the grid if any([phi, theta, psi]): # Rotate the points to the expected coordinate system. cphi = np.cos(phi) sphi = np.sin(phi) ctheta = np.cos(theta) stheta = np.sin(theta) cpsi = np.cos(psi) spsi = np.sin(psi) mat = np.array([[cpsi * cphi - ctheta * sphi * spsi, cpsi * sphi + ctheta * cphi * spsi, spsi * stheta], [-spsi * cphi - ctheta * sphi * cpsi, -spsi * sphi + ctheta * cphi * cpsi, cpsi * stheta], [stheta * sphi, -stheta * cphi, ctheta]]) # Calculate the points that could possibly be inside the volume # Since the points are rotated, we cannot do anything directional # without more logic max_radius = np.sqrt(np.abs(mat).dot([a_squared, b_squared, c_squared])) (idx, shapes) = _getshapes_3d(center, max_radius, space.shape) subgrid = [g[idi] for (g, idi) in zip(grid, shapes)] offset_points = [vec * (xi - x0i)[..., None] for (xi, vec, x0i) in zip(subgrid, mat.T, [x0, y0, z0])] rotated = offset_points[0] + offset_points[1] + offset_points[2] np.square(rotated, out=rotated) radius = np.dot(rotated, scales) # depends on [control=['if'], data=[]] else: # Calculate the points that could possibly be inside the volume max_radius = np.sqrt([a_squared, b_squared, c_squared]) (idx, shapes) = _getshapes_3d(center, max_radius, space.shape) subgrid = [g[idi] for (g, idi) in zip(grid, shapes)] squared_dist = [ai * (xi - x0i) ** 2 for (xi, ai, x0i) in zip(subgrid, scales, [x0, y0, z0])] # Parentheses to get best order for broadcasting radius = squared_dist[0] + (squared_dist[1] + squared_dist[2]) # Find the points within the ellipse inside = radius <= 1 # Add the ellipse intensity to those points p[idx][inside] += intensity # depends on [control=['for'], data=['ellip']] return space.element(p)
def select(self, *column_or_columns): """Return a table with only the columns in ``column_or_columns``. Args: ``column_or_columns``: Columns to select from the ``Table`` as either column labels (``str``) or column indices (``int``). Returns: A new instance of ``Table`` containing only selected columns. The columns of the new ``Table`` are in the order given in ``column_or_columns``. Raises: ``KeyError`` if any of ``column_or_columns`` are not in the table. >>> flowers = Table().with_columns( ... 'Number of petals', make_array(8, 34, 5), ... 'Name', make_array('lotus', 'sunflower', 'rose'), ... 'Weight', make_array(10, 5, 6) ... ) >>> flowers Number of petals | Name | Weight 8 | lotus | 10 34 | sunflower | 5 5 | rose | 6 >>> flowers.select('Number of petals', 'Weight') Number of petals | Weight 8 | 10 34 | 5 5 | 6 >>> flowers # original table unchanged Number of petals | Name | Weight 8 | lotus | 10 34 | sunflower | 5 5 | rose | 6 >>> flowers.select(0, 2) Number of petals | Weight 8 | 10 34 | 5 5 | 6 """ labels = self._varargs_as_labels(column_or_columns) table = type(self)() for label in labels: self._add_column_and_format(table, label, np.copy(self[label])) return table
def function[select, parameter[self]]: constant[Return a table with only the columns in ``column_or_columns``. Args: ``column_or_columns``: Columns to select from the ``Table`` as either column labels (``str``) or column indices (``int``). Returns: A new instance of ``Table`` containing only selected columns. The columns of the new ``Table`` are in the order given in ``column_or_columns``. Raises: ``KeyError`` if any of ``column_or_columns`` are not in the table. >>> flowers = Table().with_columns( ... 'Number of petals', make_array(8, 34, 5), ... 'Name', make_array('lotus', 'sunflower', 'rose'), ... 'Weight', make_array(10, 5, 6) ... ) >>> flowers Number of petals | Name | Weight 8 | lotus | 10 34 | sunflower | 5 5 | rose | 6 >>> flowers.select('Number of petals', 'Weight') Number of petals | Weight 8 | 10 34 | 5 5 | 6 >>> flowers # original table unchanged Number of petals | Name | Weight 8 | lotus | 10 34 | sunflower | 5 5 | rose | 6 >>> flowers.select(0, 2) Number of petals | Weight 8 | 10 34 | 5 5 | 6 ] variable[labels] assign[=] call[name[self]._varargs_as_labels, parameter[name[column_or_columns]]] variable[table] assign[=] call[call[name[type], parameter[name[self]]], parameter[]] for taget[name[label]] in starred[name[labels]] begin[:] call[name[self]._add_column_and_format, parameter[name[table], name[label], call[name[np].copy, parameter[call[name[self]][name[label]]]]]] return[name[table]]
keyword[def] identifier[select] ( identifier[self] ,* identifier[column_or_columns] ): literal[string] identifier[labels] = identifier[self] . identifier[_varargs_as_labels] ( identifier[column_or_columns] ) identifier[table] = identifier[type] ( identifier[self] )() keyword[for] identifier[label] keyword[in] identifier[labels] : identifier[self] . identifier[_add_column_and_format] ( identifier[table] , identifier[label] , identifier[np] . identifier[copy] ( identifier[self] [ identifier[label] ])) keyword[return] identifier[table]
def select(self, *column_or_columns): """Return a table with only the columns in ``column_or_columns``. Args: ``column_or_columns``: Columns to select from the ``Table`` as either column labels (``str``) or column indices (``int``). Returns: A new instance of ``Table`` containing only selected columns. The columns of the new ``Table`` are in the order given in ``column_or_columns``. Raises: ``KeyError`` if any of ``column_or_columns`` are not in the table. >>> flowers = Table().with_columns( ... 'Number of petals', make_array(8, 34, 5), ... 'Name', make_array('lotus', 'sunflower', 'rose'), ... 'Weight', make_array(10, 5, 6) ... ) >>> flowers Number of petals | Name | Weight 8 | lotus | 10 34 | sunflower | 5 5 | rose | 6 >>> flowers.select('Number of petals', 'Weight') Number of petals | Weight 8 | 10 34 | 5 5 | 6 >>> flowers # original table unchanged Number of petals | Name | Weight 8 | lotus | 10 34 | sunflower | 5 5 | rose | 6 >>> flowers.select(0, 2) Number of petals | Weight 8 | 10 34 | 5 5 | 6 """ labels = self._varargs_as_labels(column_or_columns) table = type(self)() for label in labels: self._add_column_and_format(table, label, np.copy(self[label])) # depends on [control=['for'], data=['label']] return table
def context(self): """ Provides request context """ type = "client_associate" if self.key is None else "client_update" data = { "type": type, "application_type": self.type, } # is this an update? if self.key: data["client_id"] = self.key data["client_secret"] = self.secret # Add optional params if self.name: data["application_name"] = self.name if self.logo: data["logo_url"] = self.logo if self.contacts: # space seporated list data["contacts"] = " ".join(self.contacts) if self.redirect: data["redirect_uri"] = " ".join(self.redirect) # Convert to JSON and send return json.dumps(data)
def function[context, parameter[self]]: constant[ Provides request context ] variable[type] assign[=] <ast.IfExp object at 0x7da1b2851630> variable[data] assign[=] dictionary[[<ast.Constant object at 0x7da1b2850220>, <ast.Constant object at 0x7da1b28517e0>], [<ast.Name object at 0x7da1b28506d0>, <ast.Attribute object at 0x7da1b28502b0>]] if name[self].key begin[:] call[name[data]][constant[client_id]] assign[=] name[self].key call[name[data]][constant[client_secret]] assign[=] name[self].secret if name[self].name begin[:] call[name[data]][constant[application_name]] assign[=] name[self].name if name[self].logo begin[:] call[name[data]][constant[logo_url]] assign[=] name[self].logo if name[self].contacts begin[:] call[name[data]][constant[contacts]] assign[=] call[constant[ ].join, parameter[name[self].contacts]] if name[self].redirect begin[:] call[name[data]][constant[redirect_uri]] assign[=] call[constant[ ].join, parameter[name[self].redirect]] return[call[name[json].dumps, parameter[name[data]]]]
keyword[def] identifier[context] ( identifier[self] ): literal[string] identifier[type] = literal[string] keyword[if] identifier[self] . identifier[key] keyword[is] keyword[None] keyword[else] literal[string] identifier[data] ={ literal[string] : identifier[type] , literal[string] : identifier[self] . identifier[type] , } keyword[if] identifier[self] . identifier[key] : identifier[data] [ literal[string] ]= identifier[self] . identifier[key] identifier[data] [ literal[string] ]= identifier[self] . identifier[secret] keyword[if] identifier[self] . identifier[name] : identifier[data] [ literal[string] ]= identifier[self] . identifier[name] keyword[if] identifier[self] . identifier[logo] : identifier[data] [ literal[string] ]= identifier[self] . identifier[logo] keyword[if] identifier[self] . identifier[contacts] : identifier[data] [ literal[string] ]= literal[string] . identifier[join] ( identifier[self] . identifier[contacts] ) keyword[if] identifier[self] . identifier[redirect] : identifier[data] [ literal[string] ]= literal[string] . identifier[join] ( identifier[self] . identifier[redirect] ) keyword[return] identifier[json] . identifier[dumps] ( identifier[data] )
def context(self): """ Provides request context """ type = 'client_associate' if self.key is None else 'client_update' data = {'type': type, 'application_type': self.type} # is this an update? if self.key: data['client_id'] = self.key data['client_secret'] = self.secret # depends on [control=['if'], data=[]] # Add optional params if self.name: data['application_name'] = self.name # depends on [control=['if'], data=[]] if self.logo: data['logo_url'] = self.logo # depends on [control=['if'], data=[]] if self.contacts: # space seporated list data['contacts'] = ' '.join(self.contacts) # depends on [control=['if'], data=[]] if self.redirect: data['redirect_uri'] = ' '.join(self.redirect) # depends on [control=['if'], data=[]] # Convert to JSON and send return json.dumps(data)
def _get_field_method(self, tp): """Returns a reference to the form element's constructor method.""" method = self.field_constructor.get(tp) if method and hasattr(self, method.__name__): return getattr(self, method.__name__) return method
def function[_get_field_method, parameter[self, tp]]: constant[Returns a reference to the form element's constructor method.] variable[method] assign[=] call[name[self].field_constructor.get, parameter[name[tp]]] if <ast.BoolOp object at 0x7da18f813d60> begin[:] return[call[name[getattr], parameter[name[self], name[method].__name__]]] return[name[method]]
keyword[def] identifier[_get_field_method] ( identifier[self] , identifier[tp] ): literal[string] identifier[method] = identifier[self] . identifier[field_constructor] . identifier[get] ( identifier[tp] ) keyword[if] identifier[method] keyword[and] identifier[hasattr] ( identifier[self] , identifier[method] . identifier[__name__] ): keyword[return] identifier[getattr] ( identifier[self] , identifier[method] . identifier[__name__] ) keyword[return] identifier[method]
def _get_field_method(self, tp): """Returns a reference to the form element's constructor method.""" method = self.field_constructor.get(tp) if method and hasattr(self, method.__name__): return getattr(self, method.__name__) # depends on [control=['if'], data=[]] return method
def get_collection(self, **kwargs): """ Establish a connection with the database. Returns MongoDb collection """ from pymongo import MongoClient if self.host and self.port: client = MongoClient(host=config.host, port=config.port) else: client = MongoClient() db = client[self.dbname] # Authenticate if needed if self.user and self.password: db.autenticate(self.user, password=self.password) return db[self.collection]
def function[get_collection, parameter[self]]: constant[ Establish a connection with the database. Returns MongoDb collection ] from relative_module[pymongo] import module[MongoClient] if <ast.BoolOp object at 0x7da18eb56440> begin[:] variable[client] assign[=] call[name[MongoClient], parameter[]] variable[db] assign[=] call[name[client]][name[self].dbname] if <ast.BoolOp object at 0x7da18eb551e0> begin[:] call[name[db].autenticate, parameter[name[self].user]] return[call[name[db]][name[self].collection]]
keyword[def] identifier[get_collection] ( identifier[self] ,** identifier[kwargs] ): literal[string] keyword[from] identifier[pymongo] keyword[import] identifier[MongoClient] keyword[if] identifier[self] . identifier[host] keyword[and] identifier[self] . identifier[port] : identifier[client] = identifier[MongoClient] ( identifier[host] = identifier[config] . identifier[host] , identifier[port] = identifier[config] . identifier[port] ) keyword[else] : identifier[client] = identifier[MongoClient] () identifier[db] = identifier[client] [ identifier[self] . identifier[dbname] ] keyword[if] identifier[self] . identifier[user] keyword[and] identifier[self] . identifier[password] : identifier[db] . identifier[autenticate] ( identifier[self] . identifier[user] , identifier[password] = identifier[self] . identifier[password] ) keyword[return] identifier[db] [ identifier[self] . identifier[collection] ]
def get_collection(self, **kwargs): """ Establish a connection with the database. Returns MongoDb collection """ from pymongo import MongoClient if self.host and self.port: client = MongoClient(host=config.host, port=config.port) # depends on [control=['if'], data=[]] else: client = MongoClient() db = client[self.dbname] # Authenticate if needed if self.user and self.password: db.autenticate(self.user, password=self.password) # depends on [control=['if'], data=[]] return db[self.collection]
def is_deleted(self, record=None): """Check if record is deleted.""" record = record or self.revisions[-1][1] return any( col == 'deleted' for col in record.get('collections', []) )
def function[is_deleted, parameter[self, record]]: constant[Check if record is deleted.] variable[record] assign[=] <ast.BoolOp object at 0x7da1b016a9b0> return[call[name[any], parameter[<ast.GeneratorExp object at 0x7da1b01695a0>]]]
keyword[def] identifier[is_deleted] ( identifier[self] , identifier[record] = keyword[None] ): literal[string] identifier[record] = identifier[record] keyword[or] identifier[self] . identifier[revisions] [- literal[int] ][ literal[int] ] keyword[return] identifier[any] ( identifier[col] == literal[string] keyword[for] identifier[col] keyword[in] identifier[record] . identifier[get] ( literal[string] ,[]) )
def is_deleted(self, record=None): """Check if record is deleted.""" record = record or self.revisions[-1][1] return any((col == 'deleted' for col in record.get('collections', [])))
def detach_popen(**kwargs): """ Use :class:`subprocess.Popen` to construct a child process, then hack the Popen so that it forgets the child it created, allowing it to survive a call to Popen.__del__. If the child process is not detached, there is a race between it exitting and __del__ being called. If it exits before __del__ runs, then __del__'s call to :func:`os.waitpid` will capture the one and only exit event delivered to this process, causing later 'legitimate' calls to fail with ECHILD. :param list close_on_error: Array of integer file descriptors to close on exception. :returns: Process ID of the new child. """ # This allows Popen() to be used for e.g. graceful post-fork error # handling, without tying the surrounding code into managing a Popen # object, which isn't possible for at least :mod:`mitogen.fork`. This # should be replaced by a swappable helper class in a future version. real_preexec_fn = kwargs.pop('preexec_fn', None) def preexec_fn(): if _preexec_hook: _preexec_hook() if real_preexec_fn: real_preexec_fn() proc = subprocess.Popen(preexec_fn=preexec_fn, **kwargs) proc._child_created = False return proc.pid
def function[detach_popen, parameter[]]: constant[ Use :class:`subprocess.Popen` to construct a child process, then hack the Popen so that it forgets the child it created, allowing it to survive a call to Popen.__del__. If the child process is not detached, there is a race between it exitting and __del__ being called. If it exits before __del__ runs, then __del__'s call to :func:`os.waitpid` will capture the one and only exit event delivered to this process, causing later 'legitimate' calls to fail with ECHILD. :param list close_on_error: Array of integer file descriptors to close on exception. :returns: Process ID of the new child. ] variable[real_preexec_fn] assign[=] call[name[kwargs].pop, parameter[constant[preexec_fn], constant[None]]] def function[preexec_fn, parameter[]]: if name[_preexec_hook] begin[:] call[name[_preexec_hook], parameter[]] if name[real_preexec_fn] begin[:] call[name[real_preexec_fn], parameter[]] variable[proc] assign[=] call[name[subprocess].Popen, parameter[]] name[proc]._child_created assign[=] constant[False] return[name[proc].pid]
keyword[def] identifier[detach_popen] (** identifier[kwargs] ): literal[string] identifier[real_preexec_fn] = identifier[kwargs] . identifier[pop] ( literal[string] , keyword[None] ) keyword[def] identifier[preexec_fn] (): keyword[if] identifier[_preexec_hook] : identifier[_preexec_hook] () keyword[if] identifier[real_preexec_fn] : identifier[real_preexec_fn] () identifier[proc] = identifier[subprocess] . identifier[Popen] ( identifier[preexec_fn] = identifier[preexec_fn] ,** identifier[kwargs] ) identifier[proc] . identifier[_child_created] = keyword[False] keyword[return] identifier[proc] . identifier[pid]
def detach_popen(**kwargs): """ Use :class:`subprocess.Popen` to construct a child process, then hack the Popen so that it forgets the child it created, allowing it to survive a call to Popen.__del__. If the child process is not detached, there is a race between it exitting and __del__ being called. If it exits before __del__ runs, then __del__'s call to :func:`os.waitpid` will capture the one and only exit event delivered to this process, causing later 'legitimate' calls to fail with ECHILD. :param list close_on_error: Array of integer file descriptors to close on exception. :returns: Process ID of the new child. """ # This allows Popen() to be used for e.g. graceful post-fork error # handling, without tying the surrounding code into managing a Popen # object, which isn't possible for at least :mod:`mitogen.fork`. This # should be replaced by a swappable helper class in a future version. real_preexec_fn = kwargs.pop('preexec_fn', None) def preexec_fn(): if _preexec_hook: _preexec_hook() # depends on [control=['if'], data=[]] if real_preexec_fn: real_preexec_fn() # depends on [control=['if'], data=[]] proc = subprocess.Popen(preexec_fn=preexec_fn, **kwargs) proc._child_created = False return proc.pid
def bans_list(self, limit=None, max_id=None, since_id=None, **kwargs): "https://developer.zendesk.com/rest_api/docs/chat/bans#get-all-bans" api_path = "/api/v2/bans" api_query = {} if "query" in kwargs.keys(): api_query.update(kwargs["query"]) del kwargs["query"] if limit: api_query.update({ "limit": limit, }) if max_id: api_query.update({ "max_id": max_id, }) if since_id: api_query.update({ "since_id": since_id, }) return self.call(api_path, query=api_query, **kwargs)
def function[bans_list, parameter[self, limit, max_id, since_id]]: constant[https://developer.zendesk.com/rest_api/docs/chat/bans#get-all-bans] variable[api_path] assign[=] constant[/api/v2/bans] variable[api_query] assign[=] dictionary[[], []] if compare[constant[query] in call[name[kwargs].keys, parameter[]]] begin[:] call[name[api_query].update, parameter[call[name[kwargs]][constant[query]]]] <ast.Delete object at 0x7da1b0f38fa0> if name[limit] begin[:] call[name[api_query].update, parameter[dictionary[[<ast.Constant object at 0x7da1b0f3a0b0>], [<ast.Name object at 0x7da1b0f38cd0>]]]] if name[max_id] begin[:] call[name[api_query].update, parameter[dictionary[[<ast.Constant object at 0x7da1b0f3a830>], [<ast.Name object at 0x7da1b0f39390>]]]] if name[since_id] begin[:] call[name[api_query].update, parameter[dictionary[[<ast.Constant object at 0x7da1b0f3a440>], [<ast.Name object at 0x7da1b0f389d0>]]]] return[call[name[self].call, parameter[name[api_path]]]]
keyword[def] identifier[bans_list] ( identifier[self] , identifier[limit] = keyword[None] , identifier[max_id] = keyword[None] , identifier[since_id] = keyword[None] ,** identifier[kwargs] ): literal[string] identifier[api_path] = literal[string] identifier[api_query] ={} keyword[if] literal[string] keyword[in] identifier[kwargs] . identifier[keys] (): identifier[api_query] . identifier[update] ( identifier[kwargs] [ literal[string] ]) keyword[del] identifier[kwargs] [ literal[string] ] keyword[if] identifier[limit] : identifier[api_query] . identifier[update] ({ literal[string] : identifier[limit] , }) keyword[if] identifier[max_id] : identifier[api_query] . identifier[update] ({ literal[string] : identifier[max_id] , }) keyword[if] identifier[since_id] : identifier[api_query] . identifier[update] ({ literal[string] : identifier[since_id] , }) keyword[return] identifier[self] . identifier[call] ( identifier[api_path] , identifier[query] = identifier[api_query] ,** identifier[kwargs] )
def bans_list(self, limit=None, max_id=None, since_id=None, **kwargs): """https://developer.zendesk.com/rest_api/docs/chat/bans#get-all-bans""" api_path = '/api/v2/bans' api_query = {} if 'query' in kwargs.keys(): api_query.update(kwargs['query']) del kwargs['query'] # depends on [control=['if'], data=[]] if limit: api_query.update({'limit': limit}) # depends on [control=['if'], data=[]] if max_id: api_query.update({'max_id': max_id}) # depends on [control=['if'], data=[]] if since_id: api_query.update({'since_id': since_id}) # depends on [control=['if'], data=[]] return self.call(api_path, query=api_query, **kwargs)
def print_config(): # pragma: no cover """Print config entry function.""" description = """\ Print the deployment settings for a Pyramid application. Example: 'psettings deployment.ini' """ parser = argparse.ArgumentParser( description=textwrap.dedent(description) ) parser.add_argument( 'config_uri', type=str, help='an integer for the accumulator' ) parser.add_argument( '-k', '--key', dest='key', metavar='PREFIX', type=str, action='store', help=( "Tells script to print only specified" " config tree provided by dotted name" ) ) args = parser.parse_args(sys.argv[1:]) config_uri = args.config_uri env = bootstrap(config_uri) config, closer = env['registry']['config'], env['closer'] try: print(printer(slice_config(config, args.key))) except KeyError: print( 'Sorry, but the key path {0}, does not exists in Your config!' .format(args.key) ) finally: closer()
def function[print_config, parameter[]]: constant[Print config entry function.] variable[description] assign[=] constant[ Print the deployment settings for a Pyramid application. Example: 'psettings deployment.ini' ] variable[parser] assign[=] call[name[argparse].ArgumentParser, parameter[]] call[name[parser].add_argument, parameter[constant[config_uri]]] call[name[parser].add_argument, parameter[constant[-k], constant[--key]]] variable[args] assign[=] call[name[parser].parse_args, parameter[call[name[sys].argv][<ast.Slice object at 0x7da1b14345e0>]]] variable[config_uri] assign[=] name[args].config_uri variable[env] assign[=] call[name[bootstrap], parameter[name[config_uri]]] <ast.Tuple object at 0x7da1b1435240> assign[=] tuple[[<ast.Subscript object at 0x7da1b1434b50>, <ast.Subscript object at 0x7da1b14358a0>]] <ast.Try object at 0x7da1b1435ff0>
keyword[def] identifier[print_config] (): literal[string] identifier[description] = literal[string] identifier[parser] = identifier[argparse] . identifier[ArgumentParser] ( identifier[description] = identifier[textwrap] . identifier[dedent] ( identifier[description] ) ) identifier[parser] . identifier[add_argument] ( literal[string] , identifier[type] = identifier[str] , identifier[help] = literal[string] ) identifier[parser] . identifier[add_argument] ( literal[string] , literal[string] , identifier[dest] = literal[string] , identifier[metavar] = literal[string] , identifier[type] = identifier[str] , identifier[action] = literal[string] , identifier[help] =( literal[string] literal[string] ) ) identifier[args] = identifier[parser] . identifier[parse_args] ( identifier[sys] . identifier[argv] [ literal[int] :]) identifier[config_uri] = identifier[args] . identifier[config_uri] identifier[env] = identifier[bootstrap] ( identifier[config_uri] ) identifier[config] , identifier[closer] = identifier[env] [ literal[string] ][ literal[string] ], identifier[env] [ literal[string] ] keyword[try] : identifier[print] ( identifier[printer] ( identifier[slice_config] ( identifier[config] , identifier[args] . identifier[key] ))) keyword[except] identifier[KeyError] : identifier[print] ( literal[string] . identifier[format] ( identifier[args] . identifier[key] ) ) keyword[finally] : identifier[closer] ()
def print_config(): # pragma: no cover 'Print config entry function.' description = " Print the deployment settings for a Pyramid application. Example:\n 'psettings deployment.ini'\n " parser = argparse.ArgumentParser(description=textwrap.dedent(description)) parser.add_argument('config_uri', type=str, help='an integer for the accumulator') parser.add_argument('-k', '--key', dest='key', metavar='PREFIX', type=str, action='store', help='Tells script to print only specified config tree provided by dotted name') args = parser.parse_args(sys.argv[1:]) config_uri = args.config_uri env = bootstrap(config_uri) (config, closer) = (env['registry']['config'], env['closer']) try: print(printer(slice_config(config, args.key))) # depends on [control=['try'], data=[]] except KeyError: print('Sorry, but the key path {0}, does not exists in Your config!'.format(args.key)) # depends on [control=['except'], data=[]] finally: closer()
def get_submodules(mod): """Get all submodules of a given module""" def catch_exceptions(module): pass try: m = __import__(mod) submodules = [mod] submods = pkgutil.walk_packages(m.__path__, m.__name__ + '.', catch_exceptions) for sm in submods: sm_name = sm[1] submodules.append(sm_name) except ImportError: return [] except: return [mod] return submodules
def function[get_submodules, parameter[mod]]: constant[Get all submodules of a given module] def function[catch_exceptions, parameter[module]]: pass <ast.Try object at 0x7da20e9b2cb0> return[name[submodules]]
keyword[def] identifier[get_submodules] ( identifier[mod] ): literal[string] keyword[def] identifier[catch_exceptions] ( identifier[module] ): keyword[pass] keyword[try] : identifier[m] = identifier[__import__] ( identifier[mod] ) identifier[submodules] =[ identifier[mod] ] identifier[submods] = identifier[pkgutil] . identifier[walk_packages] ( identifier[m] . identifier[__path__] , identifier[m] . identifier[__name__] + literal[string] , identifier[catch_exceptions] ) keyword[for] identifier[sm] keyword[in] identifier[submods] : identifier[sm_name] = identifier[sm] [ literal[int] ] identifier[submodules] . identifier[append] ( identifier[sm_name] ) keyword[except] identifier[ImportError] : keyword[return] [] keyword[except] : keyword[return] [ identifier[mod] ] keyword[return] identifier[submodules]
def get_submodules(mod): """Get all submodules of a given module""" def catch_exceptions(module): pass try: m = __import__(mod) submodules = [mod] submods = pkgutil.walk_packages(m.__path__, m.__name__ + '.', catch_exceptions) for sm in submods: sm_name = sm[1] submodules.append(sm_name) # depends on [control=['for'], data=['sm']] # depends on [control=['try'], data=[]] except ImportError: return [] # depends on [control=['except'], data=[]] except: return [mod] # depends on [control=['except'], data=[]] return submodules
def create_class(self, name: str) -> ConstantClass: """ Creates a new :class:`ConstantClass`, adding it to the pool and returning it. :param name: The name of the new class. """ self.append(( 7, self.create_utf8(name).index )) return self.get(self.raw_count - 1)
def function[create_class, parameter[self, name]]: constant[ Creates a new :class:`ConstantClass`, adding it to the pool and returning it. :param name: The name of the new class. ] call[name[self].append, parameter[tuple[[<ast.Constant object at 0x7da1b259d510>, <ast.Attribute object at 0x7da1b259d0f0>]]]] return[call[name[self].get, parameter[binary_operation[name[self].raw_count - constant[1]]]]]
keyword[def] identifier[create_class] ( identifier[self] , identifier[name] : identifier[str] )-> identifier[ConstantClass] : literal[string] identifier[self] . identifier[append] (( literal[int] , identifier[self] . identifier[create_utf8] ( identifier[name] ). identifier[index] )) keyword[return] identifier[self] . identifier[get] ( identifier[self] . identifier[raw_count] - literal[int] )
def create_class(self, name: str) -> ConstantClass: """ Creates a new :class:`ConstantClass`, adding it to the pool and returning it. :param name: The name of the new class. """ self.append((7, self.create_utf8(name).index)) return self.get(self.raw_count - 1)
def _maybe_extract(compressed_filename, directory, extension=None): """ Extract a compressed file to ``directory``. Args: compressed_filename (str): Compressed file. directory (str): Extract to directory. extension (str, optional): Extension of the file; Otherwise, attempts to extract extension from the filename. """ logger.info('Extracting {}'.format(compressed_filename)) if extension is None: basename = os.path.basename(compressed_filename) extension = basename.split('.', 1)[1] if 'zip' in extension: with zipfile.ZipFile(compressed_filename, "r") as zip_: zip_.extractall(directory) elif 'tar' in extension or 'tgz' in extension: with tarfile.open(compressed_filename, mode='r') as tar: tar.extractall(path=directory) logger.info('Extracted {}'.format(compressed_filename))
def function[_maybe_extract, parameter[compressed_filename, directory, extension]]: constant[ Extract a compressed file to ``directory``. Args: compressed_filename (str): Compressed file. directory (str): Extract to directory. extension (str, optional): Extension of the file; Otherwise, attempts to extract extension from the filename. ] call[name[logger].info, parameter[call[constant[Extracting {}].format, parameter[name[compressed_filename]]]]] if compare[name[extension] is constant[None]] begin[:] variable[basename] assign[=] call[name[os].path.basename, parameter[name[compressed_filename]]] variable[extension] assign[=] call[call[name[basename].split, parameter[constant[.], constant[1]]]][constant[1]] if compare[constant[zip] in name[extension]] begin[:] with call[name[zipfile].ZipFile, parameter[name[compressed_filename], constant[r]]] begin[:] call[name[zip_].extractall, parameter[name[directory]]] call[name[logger].info, parameter[call[constant[Extracted {}].format, parameter[name[compressed_filename]]]]]
keyword[def] identifier[_maybe_extract] ( identifier[compressed_filename] , identifier[directory] , identifier[extension] = keyword[None] ): literal[string] identifier[logger] . identifier[info] ( literal[string] . identifier[format] ( identifier[compressed_filename] )) keyword[if] identifier[extension] keyword[is] keyword[None] : identifier[basename] = identifier[os] . identifier[path] . identifier[basename] ( identifier[compressed_filename] ) identifier[extension] = identifier[basename] . identifier[split] ( literal[string] , literal[int] )[ literal[int] ] keyword[if] literal[string] keyword[in] identifier[extension] : keyword[with] identifier[zipfile] . identifier[ZipFile] ( identifier[compressed_filename] , literal[string] ) keyword[as] identifier[zip_] : identifier[zip_] . identifier[extractall] ( identifier[directory] ) keyword[elif] literal[string] keyword[in] identifier[extension] keyword[or] literal[string] keyword[in] identifier[extension] : keyword[with] identifier[tarfile] . identifier[open] ( identifier[compressed_filename] , identifier[mode] = literal[string] ) keyword[as] identifier[tar] : identifier[tar] . identifier[extractall] ( identifier[path] = identifier[directory] ) identifier[logger] . identifier[info] ( literal[string] . identifier[format] ( identifier[compressed_filename] ))
def _maybe_extract(compressed_filename, directory, extension=None): """ Extract a compressed file to ``directory``. Args: compressed_filename (str): Compressed file. directory (str): Extract to directory. extension (str, optional): Extension of the file; Otherwise, attempts to extract extension from the filename. """ logger.info('Extracting {}'.format(compressed_filename)) if extension is None: basename = os.path.basename(compressed_filename) extension = basename.split('.', 1)[1] # depends on [control=['if'], data=['extension']] if 'zip' in extension: with zipfile.ZipFile(compressed_filename, 'r') as zip_: zip_.extractall(directory) # depends on [control=['with'], data=['zip_']] # depends on [control=['if'], data=[]] elif 'tar' in extension or 'tgz' in extension: with tarfile.open(compressed_filename, mode='r') as tar: tar.extractall(path=directory) # depends on [control=['with'], data=['tar']] # depends on [control=['if'], data=[]] logger.info('Extracted {}'.format(compressed_filename))
def compare_registries(fs0, fs1, concurrent=False): """Compares the Windows Registry contained within the two File Systems. If the concurrent flag is True, two processes will be used speeding up the comparison on multiple CPUs. Returns a dictionary. {'created_keys': {'\\Reg\\Key': (('Key', 'Type', 'Value'), ...)} 'deleted_keys': ['\\Reg\\Key', ...], 'created_values': {'\\Reg\\Key': (('Key', 'Type', 'NewValue'), ...)}, 'deleted_values': {'\\Reg\\Key': (('Key', 'Type', 'OldValue'), ...)}, 'modified_values': {'\\Reg\\Key': (('Key', 'Type', 'NewValue'), ...)}} """ hives = compare_hives(fs0, fs1) if concurrent: future0 = concurrent_parse_registries(fs0, hives) future1 = concurrent_parse_registries(fs1, hives) registry0 = future0.result() registry1 = future1.result() else: registry0 = parse_registries(fs0, hives) registry1 = parse_registries(fs1, hives) return registry_comparison(registry0, registry1)
def function[compare_registries, parameter[fs0, fs1, concurrent]]: constant[Compares the Windows Registry contained within the two File Systems. If the concurrent flag is True, two processes will be used speeding up the comparison on multiple CPUs. Returns a dictionary. {'created_keys': {'\Reg\Key': (('Key', 'Type', 'Value'), ...)} 'deleted_keys': ['\Reg\Key', ...], 'created_values': {'\Reg\Key': (('Key', 'Type', 'NewValue'), ...)}, 'deleted_values': {'\Reg\Key': (('Key', 'Type', 'OldValue'), ...)}, 'modified_values': {'\Reg\Key': (('Key', 'Type', 'NewValue'), ...)}} ] variable[hives] assign[=] call[name[compare_hives], parameter[name[fs0], name[fs1]]] if name[concurrent] begin[:] variable[future0] assign[=] call[name[concurrent_parse_registries], parameter[name[fs0], name[hives]]] variable[future1] assign[=] call[name[concurrent_parse_registries], parameter[name[fs1], name[hives]]] variable[registry0] assign[=] call[name[future0].result, parameter[]] variable[registry1] assign[=] call[name[future1].result, parameter[]] return[call[name[registry_comparison], parameter[name[registry0], name[registry1]]]]
keyword[def] identifier[compare_registries] ( identifier[fs0] , identifier[fs1] , identifier[concurrent] = keyword[False] ): literal[string] identifier[hives] = identifier[compare_hives] ( identifier[fs0] , identifier[fs1] ) keyword[if] identifier[concurrent] : identifier[future0] = identifier[concurrent_parse_registries] ( identifier[fs0] , identifier[hives] ) identifier[future1] = identifier[concurrent_parse_registries] ( identifier[fs1] , identifier[hives] ) identifier[registry0] = identifier[future0] . identifier[result] () identifier[registry1] = identifier[future1] . identifier[result] () keyword[else] : identifier[registry0] = identifier[parse_registries] ( identifier[fs0] , identifier[hives] ) identifier[registry1] = identifier[parse_registries] ( identifier[fs1] , identifier[hives] ) keyword[return] identifier[registry_comparison] ( identifier[registry0] , identifier[registry1] )
def compare_registries(fs0, fs1, concurrent=False): """Compares the Windows Registry contained within the two File Systems. If the concurrent flag is True, two processes will be used speeding up the comparison on multiple CPUs. Returns a dictionary. {'created_keys': {'\\Reg\\Key': (('Key', 'Type', 'Value'), ...)} 'deleted_keys': ['\\Reg\\Key', ...], 'created_values': {'\\Reg\\Key': (('Key', 'Type', 'NewValue'), ...)}, 'deleted_values': {'\\Reg\\Key': (('Key', 'Type', 'OldValue'), ...)}, 'modified_values': {'\\Reg\\Key': (('Key', 'Type', 'NewValue'), ...)}} """ hives = compare_hives(fs0, fs1) if concurrent: future0 = concurrent_parse_registries(fs0, hives) future1 = concurrent_parse_registries(fs1, hives) registry0 = future0.result() registry1 = future1.result() # depends on [control=['if'], data=[]] else: registry0 = parse_registries(fs0, hives) registry1 = parse_registries(fs1, hives) return registry_comparison(registry0, registry1)
def main(): """Ideally we shouldn't lose the first second of events""" with Input() as input_generator: def extra_bytes_callback(string): print('got extra bytes', repr(string)) print('type:', type(string)) input_generator.unget_bytes(string) time.sleep(1) with CursorAwareWindow(extra_bytes_callback=extra_bytes_callback) as window: window.get_cursor_position() for e in input_generator: print(repr(e))
def function[main, parameter[]]: constant[Ideally we shouldn't lose the first second of events] with call[name[Input], parameter[]] begin[:] def function[extra_bytes_callback, parameter[string]]: call[name[print], parameter[constant[got extra bytes], call[name[repr], parameter[name[string]]]]] call[name[print], parameter[constant[type:], call[name[type], parameter[name[string]]]]] call[name[input_generator].unget_bytes, parameter[name[string]]] call[name[time].sleep, parameter[constant[1]]] with call[name[CursorAwareWindow], parameter[]] begin[:] call[name[window].get_cursor_position, parameter[]] for taget[name[e]] in starred[name[input_generator]] begin[:] call[name[print], parameter[call[name[repr], parameter[name[e]]]]]
keyword[def] identifier[main] (): literal[string] keyword[with] identifier[Input] () keyword[as] identifier[input_generator] : keyword[def] identifier[extra_bytes_callback] ( identifier[string] ): identifier[print] ( literal[string] , identifier[repr] ( identifier[string] )) identifier[print] ( literal[string] , identifier[type] ( identifier[string] )) identifier[input_generator] . identifier[unget_bytes] ( identifier[string] ) identifier[time] . identifier[sleep] ( literal[int] ) keyword[with] identifier[CursorAwareWindow] ( identifier[extra_bytes_callback] = identifier[extra_bytes_callback] ) keyword[as] identifier[window] : identifier[window] . identifier[get_cursor_position] () keyword[for] identifier[e] keyword[in] identifier[input_generator] : identifier[print] ( identifier[repr] ( identifier[e] ))
def main(): """Ideally we shouldn't lose the first second of events""" with Input() as input_generator: def extra_bytes_callback(string): print('got extra bytes', repr(string)) print('type:', type(string)) input_generator.unget_bytes(string) time.sleep(1) with CursorAwareWindow(extra_bytes_callback=extra_bytes_callback) as window: window.get_cursor_position() for e in input_generator: print(repr(e)) # depends on [control=['for'], data=['e']] # depends on [control=['with'], data=['window']] # depends on [control=['with'], data=['input_generator']]
def _set_bgp_state(self, v, load=False): """ Setter method for bgp_state, mapped from YANG variable /bgp_state (container) If this variable is read-only (config: false) in the source YANG file, then _set_bgp_state is considered as a private method. Backends looking to populate this variable should do so via calling thisObj._set_bgp_state() directly. YANG Description: BGP operational information """ if hasattr(v, "_utype"): v = v._utype(v) try: t = YANGDynClass(v,base=bgp_state.bgp_state, is_container='container', presence=False, yang_name="bgp-state", rest_name="bgp-state", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'callpoint': u'bgp-bgp', u'cli-suppress-show-path': None}}, namespace='urn:brocade.com:mgmt:brocade-bgp-operational', defining_module='brocade-bgp-operational', yang_type='container', is_config=True) except (TypeError, ValueError): raise ValueError({ 'error-string': """bgp_state must be of a type compatible with container""", 'defined-type': "container", 'generated-type': """YANGDynClass(base=bgp_state.bgp_state, is_container='container', presence=False, yang_name="bgp-state", rest_name="bgp-state", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'callpoint': u'bgp-bgp', u'cli-suppress-show-path': None}}, namespace='urn:brocade.com:mgmt:brocade-bgp-operational', defining_module='brocade-bgp-operational', yang_type='container', is_config=True)""", }) self.__bgp_state = t if hasattr(self, '_set'): self._set()
def function[_set_bgp_state, parameter[self, v, load]]: constant[ Setter method for bgp_state, mapped from YANG variable /bgp_state (container) If this variable is read-only (config: false) in the source YANG file, then _set_bgp_state is considered as a private method. Backends looking to populate this variable should do so via calling thisObj._set_bgp_state() directly. YANG Description: BGP operational information ] if call[name[hasattr], parameter[name[v], constant[_utype]]] begin[:] variable[v] assign[=] call[name[v]._utype, parameter[name[v]]] <ast.Try object at 0x7da204623d90> name[self].__bgp_state assign[=] name[t] if call[name[hasattr], parameter[name[self], constant[_set]]] begin[:] call[name[self]._set, parameter[]]
keyword[def] identifier[_set_bgp_state] ( identifier[self] , identifier[v] , identifier[load] = keyword[False] ): literal[string] keyword[if] identifier[hasattr] ( identifier[v] , literal[string] ): identifier[v] = identifier[v] . identifier[_utype] ( identifier[v] ) keyword[try] : identifier[t] = identifier[YANGDynClass] ( identifier[v] , identifier[base] = identifier[bgp_state] . identifier[bgp_state] , identifier[is_container] = literal[string] , identifier[presence] = keyword[False] , identifier[yang_name] = literal[string] , identifier[rest_name] = literal[string] , identifier[parent] = identifier[self] , identifier[path_helper] = identifier[self] . identifier[_path_helper] , identifier[extmethods] = identifier[self] . identifier[_extmethods] , identifier[register_paths] = keyword[True] , identifier[extensions] ={ literal[string] :{ literal[string] : literal[string] , literal[string] : keyword[None] }}, identifier[namespace] = literal[string] , identifier[defining_module] = literal[string] , identifier[yang_type] = literal[string] , identifier[is_config] = keyword[True] ) keyword[except] ( identifier[TypeError] , identifier[ValueError] ): keyword[raise] identifier[ValueError] ({ literal[string] : literal[string] , literal[string] : literal[string] , literal[string] : literal[string] , }) identifier[self] . identifier[__bgp_state] = identifier[t] keyword[if] identifier[hasattr] ( identifier[self] , literal[string] ): identifier[self] . identifier[_set] ()
def _set_bgp_state(self, v, load=False): """ Setter method for bgp_state, mapped from YANG variable /bgp_state (container) If this variable is read-only (config: false) in the source YANG file, then _set_bgp_state is considered as a private method. Backends looking to populate this variable should do so via calling thisObj._set_bgp_state() directly. YANG Description: BGP operational information """ if hasattr(v, '_utype'): v = v._utype(v) # depends on [control=['if'], data=[]] try: t = YANGDynClass(v, base=bgp_state.bgp_state, is_container='container', presence=False, yang_name='bgp-state', rest_name='bgp-state', parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'callpoint': u'bgp-bgp', u'cli-suppress-show-path': None}}, namespace='urn:brocade.com:mgmt:brocade-bgp-operational', defining_module='brocade-bgp-operational', yang_type='container', is_config=True) # depends on [control=['try'], data=[]] except (TypeError, ValueError): raise ValueError({'error-string': 'bgp_state must be of a type compatible with container', 'defined-type': 'container', 'generated-type': 'YANGDynClass(base=bgp_state.bgp_state, is_container=\'container\', presence=False, yang_name="bgp-state", rest_name="bgp-state", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u\'tailf-common\': {u\'callpoint\': u\'bgp-bgp\', u\'cli-suppress-show-path\': None}}, namespace=\'urn:brocade.com:mgmt:brocade-bgp-operational\', defining_module=\'brocade-bgp-operational\', yang_type=\'container\', is_config=True)'}) # depends on [control=['except'], data=[]] self.__bgp_state = t if hasattr(self, '_set'): self._set() # depends on [control=['if'], data=[]]
def cor(y_true, y_pred): """Compute Pearson correlation coefficient. """ y_true, y_pred = _mask_nan(y_true, y_pred) return np.corrcoef(y_true, y_pred)[0, 1]
def function[cor, parameter[y_true, y_pred]]: constant[Compute Pearson correlation coefficient. ] <ast.Tuple object at 0x7da20e955120> assign[=] call[name[_mask_nan], parameter[name[y_true], name[y_pred]]] return[call[call[name[np].corrcoef, parameter[name[y_true], name[y_pred]]]][tuple[[<ast.Constant object at 0x7da20e957ca0>, <ast.Constant object at 0x7da20e956c80>]]]]
keyword[def] identifier[cor] ( identifier[y_true] , identifier[y_pred] ): literal[string] identifier[y_true] , identifier[y_pred] = identifier[_mask_nan] ( identifier[y_true] , identifier[y_pred] ) keyword[return] identifier[np] . identifier[corrcoef] ( identifier[y_true] , identifier[y_pred] )[ literal[int] , literal[int] ]
def cor(y_true, y_pred): """Compute Pearson correlation coefficient. """ (y_true, y_pred) = _mask_nan(y_true, y_pred) return np.corrcoef(y_true, y_pred)[0, 1]
def LogContrast(gain=1, per_channel=False, name=None, deterministic=False, random_state=None): """ Adjust contrast by scaling each pixel value to ``255 * gain * log_2(1 + I_ij/255)``. dtype support:: See :func:`imgaug.augmenters.contrast.adjust_contrast_log`. Parameters ---------- gain : number or tuple of number or list of number or imgaug.parameters.StochasticParameter, optional Multiplier for the logarithm result. Values around 1.0 lead to a contrast-adjusted images. Values above 1.0 quickly lead to partially broken images due to exceeding the datatype's value range. * If a number, then that value will be used for all images. * If a tuple ``(a, b)``, then a value from the range ``[a, b]`` will be used per image. * If a list, then a random value will be sampled from that list per image. * If a StochasticParameter, then a value will be sampled per image from that parameter. per_channel : bool or float, optional Whether to use the same value for all channels (False) or to sample a new value for each channel (True). If this value is a float ``p``, then for ``p`` percent of all images `per_channel` will be treated as True, otherwise as False. name : None or str, optional See :func:`imgaug.augmenters.meta.Augmenter.__init__`. deterministic : bool, optional See :func:`imgaug.augmenters.meta.Augmenter.__init__`. random_state : None or int or numpy.random.RandomState, optional See :func:`imgaug.augmenters.meta.Augmenter.__init__`. Returns ------- _ContrastFuncWrapper Augmenter to perform logarithmic contrast adjustment. """ # TODO add inv parameter? params1d = [iap.handle_continuous_param(gain, "gain", value_range=(0, None), tuple_to_uniform=True, list_to_choice=True)] func = adjust_contrast_log return _ContrastFuncWrapper( func, params1d, per_channel, dtypes_allowed=["uint8", "uint16", "uint32", "uint64", "int8", "int16", "int32", "int64", "float16", "float32", "float64"], dtypes_disallowed=["float96", "float128", "float256", "bool"], name=name if name is not None else ia.caller_name(), deterministic=deterministic, random_state=random_state )
def function[LogContrast, parameter[gain, per_channel, name, deterministic, random_state]]: constant[ Adjust contrast by scaling each pixel value to ``255 * gain * log_2(1 + I_ij/255)``. dtype support:: See :func:`imgaug.augmenters.contrast.adjust_contrast_log`. Parameters ---------- gain : number or tuple of number or list of number or imgaug.parameters.StochasticParameter, optional Multiplier for the logarithm result. Values around 1.0 lead to a contrast-adjusted images. Values above 1.0 quickly lead to partially broken images due to exceeding the datatype's value range. * If a number, then that value will be used for all images. * If a tuple ``(a, b)``, then a value from the range ``[a, b]`` will be used per image. * If a list, then a random value will be sampled from that list per image. * If a StochasticParameter, then a value will be sampled per image from that parameter. per_channel : bool or float, optional Whether to use the same value for all channels (False) or to sample a new value for each channel (True). If this value is a float ``p``, then for ``p`` percent of all images `per_channel` will be treated as True, otherwise as False. name : None or str, optional See :func:`imgaug.augmenters.meta.Augmenter.__init__`. deterministic : bool, optional See :func:`imgaug.augmenters.meta.Augmenter.__init__`. random_state : None or int or numpy.random.RandomState, optional See :func:`imgaug.augmenters.meta.Augmenter.__init__`. Returns ------- _ContrastFuncWrapper Augmenter to perform logarithmic contrast adjustment. ] variable[params1d] assign[=] list[[<ast.Call object at 0x7da1b0265f00>]] variable[func] assign[=] name[adjust_contrast_log] return[call[name[_ContrastFuncWrapper], parameter[name[func], name[params1d], name[per_channel]]]]
keyword[def] identifier[LogContrast] ( identifier[gain] = literal[int] , identifier[per_channel] = keyword[False] , identifier[name] = keyword[None] , identifier[deterministic] = keyword[False] , identifier[random_state] = keyword[None] ): literal[string] identifier[params1d] =[ identifier[iap] . identifier[handle_continuous_param] ( identifier[gain] , literal[string] , identifier[value_range] =( literal[int] , keyword[None] ), identifier[tuple_to_uniform] = keyword[True] , identifier[list_to_choice] = keyword[True] )] identifier[func] = identifier[adjust_contrast_log] keyword[return] identifier[_ContrastFuncWrapper] ( identifier[func] , identifier[params1d] , identifier[per_channel] , identifier[dtypes_allowed] =[ literal[string] , literal[string] , literal[string] , literal[string] , literal[string] , literal[string] , literal[string] , literal[string] , literal[string] , literal[string] , literal[string] ], identifier[dtypes_disallowed] =[ literal[string] , literal[string] , literal[string] , literal[string] ], identifier[name] = identifier[name] keyword[if] identifier[name] keyword[is] keyword[not] keyword[None] keyword[else] identifier[ia] . identifier[caller_name] (), identifier[deterministic] = identifier[deterministic] , identifier[random_state] = identifier[random_state] )
def LogContrast(gain=1, per_channel=False, name=None, deterministic=False, random_state=None): """ Adjust contrast by scaling each pixel value to ``255 * gain * log_2(1 + I_ij/255)``. dtype support:: See :func:`imgaug.augmenters.contrast.adjust_contrast_log`. Parameters ---------- gain : number or tuple of number or list of number or imgaug.parameters.StochasticParameter, optional Multiplier for the logarithm result. Values around 1.0 lead to a contrast-adjusted images. Values above 1.0 quickly lead to partially broken images due to exceeding the datatype's value range. * If a number, then that value will be used for all images. * If a tuple ``(a, b)``, then a value from the range ``[a, b]`` will be used per image. * If a list, then a random value will be sampled from that list per image. * If a StochasticParameter, then a value will be sampled per image from that parameter. per_channel : bool or float, optional Whether to use the same value for all channels (False) or to sample a new value for each channel (True). If this value is a float ``p``, then for ``p`` percent of all images `per_channel` will be treated as True, otherwise as False. name : None or str, optional See :func:`imgaug.augmenters.meta.Augmenter.__init__`. deterministic : bool, optional See :func:`imgaug.augmenters.meta.Augmenter.__init__`. random_state : None or int or numpy.random.RandomState, optional See :func:`imgaug.augmenters.meta.Augmenter.__init__`. Returns ------- _ContrastFuncWrapper Augmenter to perform logarithmic contrast adjustment. """ # TODO add inv parameter? params1d = [iap.handle_continuous_param(gain, 'gain', value_range=(0, None), tuple_to_uniform=True, list_to_choice=True)] func = adjust_contrast_log return _ContrastFuncWrapper(func, params1d, per_channel, dtypes_allowed=['uint8', 'uint16', 'uint32', 'uint64', 'int8', 'int16', 'int32', 'int64', 'float16', 'float32', 'float64'], dtypes_disallowed=['float96', 'float128', 'float256', 'bool'], name=name if name is not None else ia.caller_name(), deterministic=deterministic, random_state=random_state)
def variance_inflation_factors(df): ''' Computes the variance inflation factor (VIF) for each column in the df. Returns a pandas Series of VIFs Args: df: pandas DataFrame with columns to run diagnostics on ''' corr = np.corrcoef(df, rowvar=0) corr_inv = np.linalg.inv(corr) vifs = np.diagonal(corr_inv) return pd.Series(vifs, df.columns, name='VIF')
def function[variance_inflation_factors, parameter[df]]: constant[ Computes the variance inflation factor (VIF) for each column in the df. Returns a pandas Series of VIFs Args: df: pandas DataFrame with columns to run diagnostics on ] variable[corr] assign[=] call[name[np].corrcoef, parameter[name[df]]] variable[corr_inv] assign[=] call[name[np].linalg.inv, parameter[name[corr]]] variable[vifs] assign[=] call[name[np].diagonal, parameter[name[corr_inv]]] return[call[name[pd].Series, parameter[name[vifs], name[df].columns]]]
keyword[def] identifier[variance_inflation_factors] ( identifier[df] ): literal[string] identifier[corr] = identifier[np] . identifier[corrcoef] ( identifier[df] , identifier[rowvar] = literal[int] ) identifier[corr_inv] = identifier[np] . identifier[linalg] . identifier[inv] ( identifier[corr] ) identifier[vifs] = identifier[np] . identifier[diagonal] ( identifier[corr_inv] ) keyword[return] identifier[pd] . identifier[Series] ( identifier[vifs] , identifier[df] . identifier[columns] , identifier[name] = literal[string] )
def variance_inflation_factors(df): """ Computes the variance inflation factor (VIF) for each column in the df. Returns a pandas Series of VIFs Args: df: pandas DataFrame with columns to run diagnostics on """ corr = np.corrcoef(df, rowvar=0) corr_inv = np.linalg.inv(corr) vifs = np.diagonal(corr_inv) return pd.Series(vifs, df.columns, name='VIF')
def watch_log_for(self, exprs, from_mark=None, timeout=600, process=None, verbose=False, filename='system.log'): """ Watch the log until one or more (regular) expression are found. This methods when all the expressions have been found or the method timeouts (a TimeoutError is then raised). On successful completion, a list of pair (line matched, match object) is returned. """ start = time.time() tofind = [exprs] if isinstance(exprs, string_types) else exprs tofind = [re.compile(e) for e in tofind] matchings = [] reads = "" if len(tofind) == 0: return None log_file = os.path.join(self.get_path(), 'logs', filename) output_read = False while not os.path.exists(log_file): time.sleep(.5) if start + timeout < time.time(): raise TimeoutError(time.strftime("%d %b %Y %H:%M:%S", time.gmtime()) + " [" + self.name + "] Timed out waiting for {} to be created.".format(log_file)) if process and not output_read: process.poll() if process.returncode is not None: self.print_process_output(self.name, process, verbose) output_read = True if process.returncode != 0: raise RuntimeError() # Shouldn't reuse RuntimeError but I'm lazy with open(log_file) as f: if from_mark: f.seek(from_mark) while True: # First, if we have a process to check, then check it. # Skip on Windows - stdout/stderr is cassandra.bat if not common.is_win() and not output_read: if process: process.poll() if process.returncode is not None: self.print_process_output(self.name, process, verbose) output_read = True if process.returncode != 0: raise RuntimeError() # Shouldn't reuse RuntimeError but I'm lazy line = f.readline() if line: reads = reads + line for e in tofind: m = e.search(line) if m: matchings.append((line, m)) tofind.remove(e) if len(tofind) == 0: return matchings[0] if isinstance(exprs, string_types) else matchings else: # yep, it's ugly time.sleep(1) if start + timeout < time.time(): raise TimeoutError(time.strftime("%d %b %Y %H:%M:%S", time.gmtime()) + " [" + self.name + "] Missing: " + str([e.pattern for e in tofind]) + ":\n" + reads[:50] + ".....\nSee {} for remainder".format(filename)) if process: if common.is_win(): if not self.is_running(): return None else: process.poll() if process.returncode == 0: return None
def function[watch_log_for, parameter[self, exprs, from_mark, timeout, process, verbose, filename]]: constant[ Watch the log until one or more (regular) expression are found. This methods when all the expressions have been found or the method timeouts (a TimeoutError is then raised). On successful completion, a list of pair (line matched, match object) is returned. ] variable[start] assign[=] call[name[time].time, parameter[]] variable[tofind] assign[=] <ast.IfExp object at 0x7da1b11bbaf0> variable[tofind] assign[=] <ast.ListComp object at 0x7da1b11bb910> variable[matchings] assign[=] list[[]] variable[reads] assign[=] constant[] if compare[call[name[len], parameter[name[tofind]]] equal[==] constant[0]] begin[:] return[constant[None]] variable[log_file] assign[=] call[name[os].path.join, parameter[call[name[self].get_path, parameter[]], constant[logs], name[filename]]] variable[output_read] assign[=] constant[False] while <ast.UnaryOp object at 0x7da1b11bb1c0> begin[:] call[name[time].sleep, parameter[constant[0.5]]] if compare[binary_operation[name[start] + name[timeout]] less[<] call[name[time].time, parameter[]]] begin[:] <ast.Raise object at 0x7da1b11bae00> if <ast.BoolOp object at 0x7da1b11baa10> begin[:] call[name[process].poll, parameter[]] if compare[name[process].returncode is_not constant[None]] begin[:] call[name[self].print_process_output, parameter[name[self].name, name[process], name[verbose]]] variable[output_read] assign[=] constant[True] if compare[name[process].returncode not_equal[!=] constant[0]] begin[:] <ast.Raise object at 0x7da1b11b9210> with call[name[open], parameter[name[log_file]]] begin[:] if name[from_mark] begin[:] call[name[f].seek, parameter[name[from_mark]]] while constant[True] begin[:] if <ast.BoolOp object at 0x7da1b11b8e20> begin[:] if name[process] begin[:] call[name[process].poll, parameter[]] if compare[name[process].returncode is_not constant[None]] begin[:] call[name[self].print_process_output, parameter[name[self].name, name[process], name[verbose]]] variable[output_read] assign[=] constant[True] if compare[name[process].returncode not_equal[!=] constant[0]] begin[:] <ast.Raise object at 0x7da1b11b8760> variable[line] assign[=] call[name[f].readline, parameter[]] if name[line] begin[:] variable[reads] assign[=] binary_operation[name[reads] + name[line]] for taget[name[e]] in starred[name[tofind]] begin[:] variable[m] assign[=] call[name[e].search, parameter[name[line]]] if name[m] begin[:] call[name[matchings].append, parameter[tuple[[<ast.Name object at 0x7da1b11b8190>, <ast.Name object at 0x7da1b11b8160>]]]] call[name[tofind].remove, parameter[name[e]]] if compare[call[name[len], parameter[name[tofind]]] equal[==] constant[0]] begin[:] return[<ast.IfExp object at 0x7da1b113fe50>] if name[process] begin[:] if call[name[common].is_win, parameter[]] begin[:] if <ast.UnaryOp object at 0x7da1b113d660> begin[:] return[constant[None]]
keyword[def] identifier[watch_log_for] ( identifier[self] , identifier[exprs] , identifier[from_mark] = keyword[None] , identifier[timeout] = literal[int] , identifier[process] = keyword[None] , identifier[verbose] = keyword[False] , identifier[filename] = literal[string] ): literal[string] identifier[start] = identifier[time] . identifier[time] () identifier[tofind] =[ identifier[exprs] ] keyword[if] identifier[isinstance] ( identifier[exprs] , identifier[string_types] ) keyword[else] identifier[exprs] identifier[tofind] =[ identifier[re] . identifier[compile] ( identifier[e] ) keyword[for] identifier[e] keyword[in] identifier[tofind] ] identifier[matchings] =[] identifier[reads] = literal[string] keyword[if] identifier[len] ( identifier[tofind] )== literal[int] : keyword[return] keyword[None] identifier[log_file] = identifier[os] . identifier[path] . identifier[join] ( identifier[self] . identifier[get_path] (), literal[string] , identifier[filename] ) identifier[output_read] = keyword[False] keyword[while] keyword[not] identifier[os] . identifier[path] . identifier[exists] ( identifier[log_file] ): identifier[time] . identifier[sleep] ( literal[int] ) keyword[if] identifier[start] + identifier[timeout] < identifier[time] . identifier[time] (): keyword[raise] identifier[TimeoutError] ( identifier[time] . identifier[strftime] ( literal[string] , identifier[time] . identifier[gmtime] ())+ literal[string] + identifier[self] . identifier[name] + literal[string] . identifier[format] ( identifier[log_file] )) keyword[if] identifier[process] keyword[and] keyword[not] identifier[output_read] : identifier[process] . identifier[poll] () keyword[if] identifier[process] . identifier[returncode] keyword[is] keyword[not] keyword[None] : identifier[self] . identifier[print_process_output] ( identifier[self] . identifier[name] , identifier[process] , identifier[verbose] ) identifier[output_read] = keyword[True] keyword[if] identifier[process] . identifier[returncode] != literal[int] : keyword[raise] identifier[RuntimeError] () keyword[with] identifier[open] ( identifier[log_file] ) keyword[as] identifier[f] : keyword[if] identifier[from_mark] : identifier[f] . identifier[seek] ( identifier[from_mark] ) keyword[while] keyword[True] : keyword[if] keyword[not] identifier[common] . identifier[is_win] () keyword[and] keyword[not] identifier[output_read] : keyword[if] identifier[process] : identifier[process] . identifier[poll] () keyword[if] identifier[process] . identifier[returncode] keyword[is] keyword[not] keyword[None] : identifier[self] . identifier[print_process_output] ( identifier[self] . identifier[name] , identifier[process] , identifier[verbose] ) identifier[output_read] = keyword[True] keyword[if] identifier[process] . identifier[returncode] != literal[int] : keyword[raise] identifier[RuntimeError] () identifier[line] = identifier[f] . identifier[readline] () keyword[if] identifier[line] : identifier[reads] = identifier[reads] + identifier[line] keyword[for] identifier[e] keyword[in] identifier[tofind] : identifier[m] = identifier[e] . identifier[search] ( identifier[line] ) keyword[if] identifier[m] : identifier[matchings] . identifier[append] (( identifier[line] , identifier[m] )) identifier[tofind] . identifier[remove] ( identifier[e] ) keyword[if] identifier[len] ( identifier[tofind] )== literal[int] : keyword[return] identifier[matchings] [ literal[int] ] keyword[if] identifier[isinstance] ( identifier[exprs] , identifier[string_types] ) keyword[else] identifier[matchings] keyword[else] : identifier[time] . identifier[sleep] ( literal[int] ) keyword[if] identifier[start] + identifier[timeout] < identifier[time] . identifier[time] (): keyword[raise] identifier[TimeoutError] ( identifier[time] . identifier[strftime] ( literal[string] , identifier[time] . identifier[gmtime] ())+ literal[string] + identifier[self] . identifier[name] + literal[string] + identifier[str] ([ identifier[e] . identifier[pattern] keyword[for] identifier[e] keyword[in] identifier[tofind] ])+ literal[string] + identifier[reads] [: literal[int] ]+ literal[string] . identifier[format] ( identifier[filename] )) keyword[if] identifier[process] : keyword[if] identifier[common] . identifier[is_win] (): keyword[if] keyword[not] identifier[self] . identifier[is_running] (): keyword[return] keyword[None] keyword[else] : identifier[process] . identifier[poll] () keyword[if] identifier[process] . identifier[returncode] == literal[int] : keyword[return] keyword[None]
def watch_log_for(self, exprs, from_mark=None, timeout=600, process=None, verbose=False, filename='system.log'): """ Watch the log until one or more (regular) expression are found. This methods when all the expressions have been found or the method timeouts (a TimeoutError is then raised). On successful completion, a list of pair (line matched, match object) is returned. """ start = time.time() tofind = [exprs] if isinstance(exprs, string_types) else exprs tofind = [re.compile(e) for e in tofind] matchings = [] reads = '' if len(tofind) == 0: return None # depends on [control=['if'], data=[]] log_file = os.path.join(self.get_path(), 'logs', filename) output_read = False while not os.path.exists(log_file): time.sleep(0.5) if start + timeout < time.time(): raise TimeoutError(time.strftime('%d %b %Y %H:%M:%S', time.gmtime()) + ' [' + self.name + '] Timed out waiting for {} to be created.'.format(log_file)) # depends on [control=['if'], data=[]] if process and (not output_read): process.poll() if process.returncode is not None: self.print_process_output(self.name, process, verbose) output_read = True if process.returncode != 0: raise RuntimeError() # Shouldn't reuse RuntimeError but I'm lazy # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]] # depends on [control=['while'], data=[]] with open(log_file) as f: if from_mark: f.seek(from_mark) # depends on [control=['if'], data=[]] while True: # First, if we have a process to check, then check it. # Skip on Windows - stdout/stderr is cassandra.bat if not common.is_win() and (not output_read): if process: process.poll() if process.returncode is not None: self.print_process_output(self.name, process, verbose) output_read = True if process.returncode != 0: raise RuntimeError() # Shouldn't reuse RuntimeError but I'm lazy # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]] line = f.readline() if line: reads = reads + line for e in tofind: m = e.search(line) if m: matchings.append((line, m)) tofind.remove(e) if len(tofind) == 0: return matchings[0] if isinstance(exprs, string_types) else matchings # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['e']] # depends on [control=['if'], data=[]] else: # yep, it's ugly time.sleep(1) if start + timeout < time.time(): raise TimeoutError(time.strftime('%d %b %Y %H:%M:%S', time.gmtime()) + ' [' + self.name + '] Missing: ' + str([e.pattern for e in tofind]) + ':\n' + reads[:50] + '.....\nSee {} for remainder'.format(filename)) # depends on [control=['if'], data=[]] if process: if common.is_win(): if not self.is_running(): return None # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]] else: process.poll() if process.returncode == 0: return None # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]] # depends on [control=['while'], data=[]] # depends on [control=['with'], data=['f']]
def getField(self, fld_name): """ Return :class:`~ekmmeters.Field` content, scaled and formatted. Args: fld_name (str): A :class:`~ekmmeters.Field` value which is on your meter. Returns: str: String value (scaled if numeric) for the field. """ result = "" if fld_name in self.m_req: result = self.m_req[fld_name][MeterData.StringValue] else: ekm_log("Requested nonexistent field: " + fld_name) return result
def function[getField, parameter[self, fld_name]]: constant[ Return :class:`~ekmmeters.Field` content, scaled and formatted. Args: fld_name (str): A :class:`~ekmmeters.Field` value which is on your meter. Returns: str: String value (scaled if numeric) for the field. ] variable[result] assign[=] constant[] if compare[name[fld_name] in name[self].m_req] begin[:] variable[result] assign[=] call[call[name[self].m_req][name[fld_name]]][name[MeterData].StringValue] return[name[result]]
keyword[def] identifier[getField] ( identifier[self] , identifier[fld_name] ): literal[string] identifier[result] = literal[string] keyword[if] identifier[fld_name] keyword[in] identifier[self] . identifier[m_req] : identifier[result] = identifier[self] . identifier[m_req] [ identifier[fld_name] ][ identifier[MeterData] . identifier[StringValue] ] keyword[else] : identifier[ekm_log] ( literal[string] + identifier[fld_name] ) keyword[return] identifier[result]
def getField(self, fld_name): """ Return :class:`~ekmmeters.Field` content, scaled and formatted. Args: fld_name (str): A :class:`~ekmmeters.Field` value which is on your meter. Returns: str: String value (scaled if numeric) for the field. """ result = '' if fld_name in self.m_req: result = self.m_req[fld_name][MeterData.StringValue] # depends on [control=['if'], data=['fld_name']] else: ekm_log('Requested nonexistent field: ' + fld_name) return result
def walk_files_info(self, relativePath=""): """ Walk the repository and yield tuples as the following:\n (relative path to relativePath joined with file name, file info dict). :parameters: #. relativePath (str): The relative path from which start the walk. """ def walk_files(directory, relativePath): directories = dict.__getitem__(directory, 'directories') files = dict.__getitem__(directory, 'files') for fname in sorted(files): info = dict.__getitem__(files,fname) yield os.path.join(relativePath, fname), info for k in sorted(dict.keys(directories)): path = os.path.join(relativePath, k) dir = dict.__getitem__(directories, k) for e in walk_files(dir, path): yield e dir, errorMessage = self.get_directory_info(relativePath) assert dir is not None, errorMessage return walk_files(dir, relativePath='')
def function[walk_files_info, parameter[self, relativePath]]: constant[ Walk the repository and yield tuples as the following: (relative path to relativePath joined with file name, file info dict). :parameters: #. relativePath (str): The relative path from which start the walk. ] def function[walk_files, parameter[directory, relativePath]]: variable[directories] assign[=] call[name[dict].__getitem__, parameter[name[directory], constant[directories]]] variable[files] assign[=] call[name[dict].__getitem__, parameter[name[directory], constant[files]]] for taget[name[fname]] in starred[call[name[sorted], parameter[name[files]]]] begin[:] variable[info] assign[=] call[name[dict].__getitem__, parameter[name[files], name[fname]]] <ast.Yield object at 0x7da207f03520> for taget[name[k]] in starred[call[name[sorted], parameter[call[name[dict].keys, parameter[name[directories]]]]]] begin[:] variable[path] assign[=] call[name[os].path.join, parameter[name[relativePath], name[k]]] variable[dir] assign[=] call[name[dict].__getitem__, parameter[name[directories], name[k]]] for taget[name[e]] in starred[call[name[walk_files], parameter[name[dir], name[path]]]] begin[:] <ast.Yield object at 0x7da207f02740> <ast.Tuple object at 0x7da207f03a60> assign[=] call[name[self].get_directory_info, parameter[name[relativePath]]] assert[compare[name[dir] is_not constant[None]]] return[call[name[walk_files], parameter[name[dir]]]]
keyword[def] identifier[walk_files_info] ( identifier[self] , identifier[relativePath] = literal[string] ): literal[string] keyword[def] identifier[walk_files] ( identifier[directory] , identifier[relativePath] ): identifier[directories] = identifier[dict] . identifier[__getitem__] ( identifier[directory] , literal[string] ) identifier[files] = identifier[dict] . identifier[__getitem__] ( identifier[directory] , literal[string] ) keyword[for] identifier[fname] keyword[in] identifier[sorted] ( identifier[files] ): identifier[info] = identifier[dict] . identifier[__getitem__] ( identifier[files] , identifier[fname] ) keyword[yield] identifier[os] . identifier[path] . identifier[join] ( identifier[relativePath] , identifier[fname] ), identifier[info] keyword[for] identifier[k] keyword[in] identifier[sorted] ( identifier[dict] . identifier[keys] ( identifier[directories] )): identifier[path] = identifier[os] . identifier[path] . identifier[join] ( identifier[relativePath] , identifier[k] ) identifier[dir] = identifier[dict] . identifier[__getitem__] ( identifier[directories] , identifier[k] ) keyword[for] identifier[e] keyword[in] identifier[walk_files] ( identifier[dir] , identifier[path] ): keyword[yield] identifier[e] identifier[dir] , identifier[errorMessage] = identifier[self] . identifier[get_directory_info] ( identifier[relativePath] ) keyword[assert] identifier[dir] keyword[is] keyword[not] keyword[None] , identifier[errorMessage] keyword[return] identifier[walk_files] ( identifier[dir] , identifier[relativePath] = literal[string] )
def walk_files_info(self, relativePath=''): """ Walk the repository and yield tuples as the following: (relative path to relativePath joined with file name, file info dict). :parameters: #. relativePath (str): The relative path from which start the walk. """ def walk_files(directory, relativePath): directories = dict.__getitem__(directory, 'directories') files = dict.__getitem__(directory, 'files') for fname in sorted(files): info = dict.__getitem__(files, fname) yield (os.path.join(relativePath, fname), info) # depends on [control=['for'], data=['fname']] for k in sorted(dict.keys(directories)): path = os.path.join(relativePath, k) dir = dict.__getitem__(directories, k) for e in walk_files(dir, path): yield e # depends on [control=['for'], data=['e']] # depends on [control=['for'], data=['k']] (dir, errorMessage) = self.get_directory_info(relativePath) assert dir is not None, errorMessage return walk_files(dir, relativePath='')
def extract_endpoints(api_module): """Return the endpoints from an API implementation module. The results returned by this are used to populate your HTTP layer's route handler, as well as by the documentation generator. """ if not hasattr(api_module, 'endpoints'): raise ValueError(("pale.extract_endpoints expected the passed in " "api_module to have an `endpoints` attribute, but it didn't!")) endpoints = api_module.endpoints if isinstance(endpoints, types.ModuleType): classes = [v for (k,v) in inspect.getmembers(endpoints, inspect.isclass)] elif isinstance(endpoints, (list, tuple)): classes = endpoints else: raise ValueError("Endpoints is not a module or list type!") instances = [] for cls in classes: if cls not in (Endpoint, PatchEndpoint, PutResourceEndpoint) and \ Endpoint in inspect.getmro(cls): source_code = inspect.getsource(cls) if "@requires_permission" in source_code: permission_match = re.search(r"@requires_permission\(\[?[\'\"]+(\w+)[\'\"]+", source_code) if permission_match != None: cls._requires_permission = permission_match.group(1) instances.append(cls()) return instances
def function[extract_endpoints, parameter[api_module]]: constant[Return the endpoints from an API implementation module. The results returned by this are used to populate your HTTP layer's route handler, as well as by the documentation generator. ] if <ast.UnaryOp object at 0x7da204962bf0> begin[:] <ast.Raise object at 0x7da204960400> variable[endpoints] assign[=] name[api_module].endpoints if call[name[isinstance], parameter[name[endpoints], name[types].ModuleType]] begin[:] variable[classes] assign[=] <ast.ListComp object at 0x7da2049621d0> variable[instances] assign[=] list[[]] for taget[name[cls]] in starred[name[classes]] begin[:] if <ast.BoolOp object at 0x7da204963850> begin[:] variable[source_code] assign[=] call[name[inspect].getsource, parameter[name[cls]]] if compare[constant[@requires_permission] in name[source_code]] begin[:] variable[permission_match] assign[=] call[name[re].search, parameter[constant[@requires_permission\(\[?[\'\"]+(\w+)[\'\"]+], name[source_code]]] if compare[name[permission_match] not_equal[!=] constant[None]] begin[:] name[cls]._requires_permission assign[=] call[name[permission_match].group, parameter[constant[1]]] call[name[instances].append, parameter[call[name[cls], parameter[]]]] return[name[instances]]
keyword[def] identifier[extract_endpoints] ( identifier[api_module] ): literal[string] keyword[if] keyword[not] identifier[hasattr] ( identifier[api_module] , literal[string] ): keyword[raise] identifier[ValueError] (( literal[string] literal[string] )) identifier[endpoints] = identifier[api_module] . identifier[endpoints] keyword[if] identifier[isinstance] ( identifier[endpoints] , identifier[types] . identifier[ModuleType] ): identifier[classes] =[ identifier[v] keyword[for] ( identifier[k] , identifier[v] ) keyword[in] identifier[inspect] . identifier[getmembers] ( identifier[endpoints] , identifier[inspect] . identifier[isclass] )] keyword[elif] identifier[isinstance] ( identifier[endpoints] ,( identifier[list] , identifier[tuple] )): identifier[classes] = identifier[endpoints] keyword[else] : keyword[raise] identifier[ValueError] ( literal[string] ) identifier[instances] =[] keyword[for] identifier[cls] keyword[in] identifier[classes] : keyword[if] identifier[cls] keyword[not] keyword[in] ( identifier[Endpoint] , identifier[PatchEndpoint] , identifier[PutResourceEndpoint] ) keyword[and] identifier[Endpoint] keyword[in] identifier[inspect] . identifier[getmro] ( identifier[cls] ): identifier[source_code] = identifier[inspect] . identifier[getsource] ( identifier[cls] ) keyword[if] literal[string] keyword[in] identifier[source_code] : identifier[permission_match] = identifier[re] . identifier[search] ( literal[string] , identifier[source_code] ) keyword[if] identifier[permission_match] != keyword[None] : identifier[cls] . identifier[_requires_permission] = identifier[permission_match] . identifier[group] ( literal[int] ) identifier[instances] . identifier[append] ( identifier[cls] ()) keyword[return] identifier[instances]
def extract_endpoints(api_module): """Return the endpoints from an API implementation module. The results returned by this are used to populate your HTTP layer's route handler, as well as by the documentation generator. """ if not hasattr(api_module, 'endpoints'): raise ValueError("pale.extract_endpoints expected the passed in api_module to have an `endpoints` attribute, but it didn't!") # depends on [control=['if'], data=[]] endpoints = api_module.endpoints if isinstance(endpoints, types.ModuleType): classes = [v for (k, v) in inspect.getmembers(endpoints, inspect.isclass)] # depends on [control=['if'], data=[]] elif isinstance(endpoints, (list, tuple)): classes = endpoints # depends on [control=['if'], data=[]] else: raise ValueError('Endpoints is not a module or list type!') instances = [] for cls in classes: if cls not in (Endpoint, PatchEndpoint, PutResourceEndpoint) and Endpoint in inspect.getmro(cls): source_code = inspect.getsource(cls) if '@requires_permission' in source_code: permission_match = re.search('@requires_permission\\(\\[?[\\\'\\"]+(\\w+)[\\\'\\"]+', source_code) if permission_match != None: cls._requires_permission = permission_match.group(1) # depends on [control=['if'], data=['permission_match']] # depends on [control=['if'], data=['source_code']] instances.append(cls()) # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['cls']] return instances
def _parseSymbols(self, sections): """Sets a list of symbols in each DYNSYM and SYMTAB section""" for section in sections: strtab = sections[section.header.sh_link] if section.header.sh_type in (int(SHT.DYNSYM), int(SHT.SYMTAB)): section.symbols = self.__parseSymbolEntriesForSection(section, strtab)
def function[_parseSymbols, parameter[self, sections]]: constant[Sets a list of symbols in each DYNSYM and SYMTAB section] for taget[name[section]] in starred[name[sections]] begin[:] variable[strtab] assign[=] call[name[sections]][name[section].header.sh_link] if compare[name[section].header.sh_type in tuple[[<ast.Call object at 0x7da18f09cdf0>, <ast.Call object at 0x7da18f09f1c0>]]] begin[:] name[section].symbols assign[=] call[name[self].__parseSymbolEntriesForSection, parameter[name[section], name[strtab]]]
keyword[def] identifier[_parseSymbols] ( identifier[self] , identifier[sections] ): literal[string] keyword[for] identifier[section] keyword[in] identifier[sections] : identifier[strtab] = identifier[sections] [ identifier[section] . identifier[header] . identifier[sh_link] ] keyword[if] identifier[section] . identifier[header] . identifier[sh_type] keyword[in] ( identifier[int] ( identifier[SHT] . identifier[DYNSYM] ), identifier[int] ( identifier[SHT] . identifier[SYMTAB] )): identifier[section] . identifier[symbols] = identifier[self] . identifier[__parseSymbolEntriesForSection] ( identifier[section] , identifier[strtab] )
def _parseSymbols(self, sections): """Sets a list of symbols in each DYNSYM and SYMTAB section""" for section in sections: strtab = sections[section.header.sh_link] if section.header.sh_type in (int(SHT.DYNSYM), int(SHT.SYMTAB)): section.symbols = self.__parseSymbolEntriesForSection(section, strtab) # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['section']]
def trainable_params(m:nn.Module)->ParamList: "Return list of trainable params in `m`." res = filter(lambda p: p.requires_grad, m.parameters()) return res
def function[trainable_params, parameter[m]]: constant[Return list of trainable params in `m`.] variable[res] assign[=] call[name[filter], parameter[<ast.Lambda object at 0x7da1b1e99f60>, call[name[m].parameters, parameter[]]]] return[name[res]]
keyword[def] identifier[trainable_params] ( identifier[m] : identifier[nn] . identifier[Module] )-> identifier[ParamList] : literal[string] identifier[res] = identifier[filter] ( keyword[lambda] identifier[p] : identifier[p] . identifier[requires_grad] , identifier[m] . identifier[parameters] ()) keyword[return] identifier[res]
def trainable_params(m: nn.Module) -> ParamList: """Return list of trainable params in `m`.""" res = filter(lambda p: p.requires_grad, m.parameters()) return res
def restore_package_version_from_recycle_bin(self, package_version_details, feed_id, package_name, package_version): """RestorePackageVersionFromRecycleBin. [Preview API] Restore a package version from the recycle bin to its associated feed. :param :class:`<PyPiRecycleBinPackageVersionDetails> <azure.devops.v5_0.py_pi_api.models.PyPiRecycleBinPackageVersionDetails>` package_version_details: Set the 'Deleted' state to 'false' to restore the package to its feed. :param str feed_id: Name or ID of the feed. :param str package_name: Name of the package. :param str package_version: Version of the package. """ route_values = {} if feed_id is not None: route_values['feedId'] = self._serialize.url('feed_id', feed_id, 'str') if package_name is not None: route_values['packageName'] = self._serialize.url('package_name', package_name, 'str') if package_version is not None: route_values['packageVersion'] = self._serialize.url('package_version', package_version, 'str') content = self._serialize.body(package_version_details, 'PyPiRecycleBinPackageVersionDetails') self._send(http_method='PATCH', location_id='07143752-3d94-45fd-86c2-0c77ed87847b', version='5.0-preview.1', route_values=route_values, content=content)
def function[restore_package_version_from_recycle_bin, parameter[self, package_version_details, feed_id, package_name, package_version]]: constant[RestorePackageVersionFromRecycleBin. [Preview API] Restore a package version from the recycle bin to its associated feed. :param :class:`<PyPiRecycleBinPackageVersionDetails> <azure.devops.v5_0.py_pi_api.models.PyPiRecycleBinPackageVersionDetails>` package_version_details: Set the 'Deleted' state to 'false' to restore the package to its feed. :param str feed_id: Name or ID of the feed. :param str package_name: Name of the package. :param str package_version: Version of the package. ] variable[route_values] assign[=] dictionary[[], []] if compare[name[feed_id] is_not constant[None]] begin[:] call[name[route_values]][constant[feedId]] assign[=] call[name[self]._serialize.url, parameter[constant[feed_id], name[feed_id], constant[str]]] if compare[name[package_name] is_not constant[None]] begin[:] call[name[route_values]][constant[packageName]] assign[=] call[name[self]._serialize.url, parameter[constant[package_name], name[package_name], constant[str]]] if compare[name[package_version] is_not constant[None]] begin[:] call[name[route_values]][constant[packageVersion]] assign[=] call[name[self]._serialize.url, parameter[constant[package_version], name[package_version], constant[str]]] variable[content] assign[=] call[name[self]._serialize.body, parameter[name[package_version_details], constant[PyPiRecycleBinPackageVersionDetails]]] call[name[self]._send, parameter[]]
keyword[def] identifier[restore_package_version_from_recycle_bin] ( identifier[self] , identifier[package_version_details] , identifier[feed_id] , identifier[package_name] , identifier[package_version] ): literal[string] identifier[route_values] ={} keyword[if] identifier[feed_id] keyword[is] keyword[not] keyword[None] : identifier[route_values] [ literal[string] ]= identifier[self] . identifier[_serialize] . identifier[url] ( literal[string] , identifier[feed_id] , literal[string] ) keyword[if] identifier[package_name] keyword[is] keyword[not] keyword[None] : identifier[route_values] [ literal[string] ]= identifier[self] . identifier[_serialize] . identifier[url] ( literal[string] , identifier[package_name] , literal[string] ) keyword[if] identifier[package_version] keyword[is] keyword[not] keyword[None] : identifier[route_values] [ literal[string] ]= identifier[self] . identifier[_serialize] . identifier[url] ( literal[string] , identifier[package_version] , literal[string] ) identifier[content] = identifier[self] . identifier[_serialize] . identifier[body] ( identifier[package_version_details] , literal[string] ) identifier[self] . identifier[_send] ( identifier[http_method] = literal[string] , identifier[location_id] = literal[string] , identifier[version] = literal[string] , identifier[route_values] = identifier[route_values] , identifier[content] = identifier[content] )
def restore_package_version_from_recycle_bin(self, package_version_details, feed_id, package_name, package_version): """RestorePackageVersionFromRecycleBin. [Preview API] Restore a package version from the recycle bin to its associated feed. :param :class:`<PyPiRecycleBinPackageVersionDetails> <azure.devops.v5_0.py_pi_api.models.PyPiRecycleBinPackageVersionDetails>` package_version_details: Set the 'Deleted' state to 'false' to restore the package to its feed. :param str feed_id: Name or ID of the feed. :param str package_name: Name of the package. :param str package_version: Version of the package. """ route_values = {} if feed_id is not None: route_values['feedId'] = self._serialize.url('feed_id', feed_id, 'str') # depends on [control=['if'], data=['feed_id']] if package_name is not None: route_values['packageName'] = self._serialize.url('package_name', package_name, 'str') # depends on [control=['if'], data=['package_name']] if package_version is not None: route_values['packageVersion'] = self._serialize.url('package_version', package_version, 'str') # depends on [control=['if'], data=['package_version']] content = self._serialize.body(package_version_details, 'PyPiRecycleBinPackageVersionDetails') self._send(http_method='PATCH', location_id='07143752-3d94-45fd-86c2-0c77ed87847b', version='5.0-preview.1', route_values=route_values, content=content)
def owner_type(self, value): """Set ``owner_type`` to the given value. In addition: * Update the internal type of the ``owner`` field. * Update the value of the ``owner`` field if a value is already set. """ self._owner_type = value if value == 'User': self._fields['owner'] = entity_fields.OneToOneField(User) if hasattr(self, 'owner'): # pylint:disable=no-member self.owner = User( self._server_config, id=self.owner.id if isinstance(self.owner, Entity) else self.owner ) elif value == 'Usergroup': self._fields['owner'] = entity_fields.OneToOneField(UserGroup) if hasattr(self, 'owner'): # pylint:disable=no-member self.owner = UserGroup( self._server_config, id=self.owner.id if isinstance(self.owner, Entity) else self.owner )
def function[owner_type, parameter[self, value]]: constant[Set ``owner_type`` to the given value. In addition: * Update the internal type of the ``owner`` field. * Update the value of the ``owner`` field if a value is already set. ] name[self]._owner_type assign[=] name[value] if compare[name[value] equal[==] constant[User]] begin[:] call[name[self]._fields][constant[owner]] assign[=] call[name[entity_fields].OneToOneField, parameter[name[User]]] if call[name[hasattr], parameter[name[self], constant[owner]]] begin[:] name[self].owner assign[=] call[name[User], parameter[name[self]._server_config]]
keyword[def] identifier[owner_type] ( identifier[self] , identifier[value] ): literal[string] identifier[self] . identifier[_owner_type] = identifier[value] keyword[if] identifier[value] == literal[string] : identifier[self] . identifier[_fields] [ literal[string] ]= identifier[entity_fields] . identifier[OneToOneField] ( identifier[User] ) keyword[if] identifier[hasattr] ( identifier[self] , literal[string] ): identifier[self] . identifier[owner] = identifier[User] ( identifier[self] . identifier[_server_config] , identifier[id] = identifier[self] . identifier[owner] . identifier[id] keyword[if] identifier[isinstance] ( identifier[self] . identifier[owner] , identifier[Entity] ) keyword[else] identifier[self] . identifier[owner] ) keyword[elif] identifier[value] == literal[string] : identifier[self] . identifier[_fields] [ literal[string] ]= identifier[entity_fields] . identifier[OneToOneField] ( identifier[UserGroup] ) keyword[if] identifier[hasattr] ( identifier[self] , literal[string] ): identifier[self] . identifier[owner] = identifier[UserGroup] ( identifier[self] . identifier[_server_config] , identifier[id] = identifier[self] . identifier[owner] . identifier[id] keyword[if] identifier[isinstance] ( identifier[self] . identifier[owner] , identifier[Entity] ) keyword[else] identifier[self] . identifier[owner] )
def owner_type(self, value): """Set ``owner_type`` to the given value. In addition: * Update the internal type of the ``owner`` field. * Update the value of the ``owner`` field if a value is already set. """ self._owner_type = value if value == 'User': self._fields['owner'] = entity_fields.OneToOneField(User) if hasattr(self, 'owner'): # pylint:disable=no-member self.owner = User(self._server_config, id=self.owner.id if isinstance(self.owner, Entity) else self.owner) # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]] elif value == 'Usergroup': self._fields['owner'] = entity_fields.OneToOneField(UserGroup) if hasattr(self, 'owner'): # pylint:disable=no-member self.owner = UserGroup(self._server_config, id=self.owner.id if isinstance(self.owner, Entity) else self.owner) # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]]
def _ige(message, key, iv, operation="decrypt"): """Given a key, given an iv, and message do whatever operation asked in the operation field. Operation will be checked for: "decrypt" and "encrypt" strings. Returns the message encrypted/decrypted. message must be a multiple by 16 bytes (for division in 16 byte blocks) key must be 32 byte iv must be 32 byte (it's not internally used in AES 256 ECB, but it's needed for IGE)""" message = bytes(message) if len(key) != 32: raise ValueError("key must be 32 bytes long (was " + str(len(key)) + " bytes)") if len(iv) != 32: raise ValueError("iv must be 32 bytes long (was " + str(len(iv)) + " bytes)") cipher = AES.new(key, AES.MODE_ECB, iv) blocksize = cipher.block_size if len(message) % blocksize != 0: raise ValueError("message must be a multiple of 16 bytes (try adding " + str(16 - len(message) % 16) + " bytes of padding)") ivp = iv[0:blocksize] ivp2 = iv[blocksize:] ciphered = bytes() for i in range(0, len(message), blocksize): indata = message[i:i+blocksize] if operation == "decrypt": xored = strxor(indata, ivp2) decrypt_xored = cipher.decrypt(xored) outdata = strxor(decrypt_xored, ivp) ivp = indata ivp2 = outdata elif operation == "encrypt": xored = strxor(indata, ivp) encrypt_xored = cipher.encrypt(xored) outdata = strxor(encrypt_xored, ivp2) ivp = outdata ivp2 = indata else: raise ValueError("operation must be either 'decrypt' or 'encrypt'") ciphered += outdata return ciphered
def function[_ige, parameter[message, key, iv, operation]]: constant[Given a key, given an iv, and message do whatever operation asked in the operation field. Operation will be checked for: "decrypt" and "encrypt" strings. Returns the message encrypted/decrypted. message must be a multiple by 16 bytes (for division in 16 byte blocks) key must be 32 byte iv must be 32 byte (it's not internally used in AES 256 ECB, but it's needed for IGE)] variable[message] assign[=] call[name[bytes], parameter[name[message]]] if compare[call[name[len], parameter[name[key]]] not_equal[!=] constant[32]] begin[:] <ast.Raise object at 0x7da18f00f070> if compare[call[name[len], parameter[name[iv]]] not_equal[!=] constant[32]] begin[:] <ast.Raise object at 0x7da18f00f700> variable[cipher] assign[=] call[name[AES].new, parameter[name[key], name[AES].MODE_ECB, name[iv]]] variable[blocksize] assign[=] name[cipher].block_size if compare[binary_operation[call[name[len], parameter[name[message]]] <ast.Mod object at 0x7da2590d6920> name[blocksize]] not_equal[!=] constant[0]] begin[:] <ast.Raise object at 0x7da18f00f550> variable[ivp] assign[=] call[name[iv]][<ast.Slice object at 0x7da18f00de70>] variable[ivp2] assign[=] call[name[iv]][<ast.Slice object at 0x7da18f00e5c0>] variable[ciphered] assign[=] call[name[bytes], parameter[]] for taget[name[i]] in starred[call[name[range], parameter[constant[0], call[name[len], parameter[name[message]]], name[blocksize]]]] begin[:] variable[indata] assign[=] call[name[message]][<ast.Slice object at 0x7da18f00d000>] if compare[name[operation] equal[==] constant[decrypt]] begin[:] variable[xored] assign[=] call[name[strxor], parameter[name[indata], name[ivp2]]] variable[decrypt_xored] assign[=] call[name[cipher].decrypt, parameter[name[xored]]] variable[outdata] assign[=] call[name[strxor], parameter[name[decrypt_xored], name[ivp]]] variable[ivp] assign[=] name[indata] variable[ivp2] assign[=] name[outdata] <ast.AugAssign object at 0x7da18f00c8b0> return[name[ciphered]]
keyword[def] identifier[_ige] ( identifier[message] , identifier[key] , identifier[iv] , identifier[operation] = literal[string] ): literal[string] identifier[message] = identifier[bytes] ( identifier[message] ) keyword[if] identifier[len] ( identifier[key] )!= literal[int] : keyword[raise] identifier[ValueError] ( literal[string] + identifier[str] ( identifier[len] ( identifier[key] ))+ literal[string] ) keyword[if] identifier[len] ( identifier[iv] )!= literal[int] : keyword[raise] identifier[ValueError] ( literal[string] + identifier[str] ( identifier[len] ( identifier[iv] ))+ literal[string] ) identifier[cipher] = identifier[AES] . identifier[new] ( identifier[key] , identifier[AES] . identifier[MODE_ECB] , identifier[iv] ) identifier[blocksize] = identifier[cipher] . identifier[block_size] keyword[if] identifier[len] ( identifier[message] )% identifier[blocksize] != literal[int] : keyword[raise] identifier[ValueError] ( literal[string] + identifier[str] ( literal[int] - identifier[len] ( identifier[message] )% literal[int] )+ literal[string] ) identifier[ivp] = identifier[iv] [ literal[int] : identifier[blocksize] ] identifier[ivp2] = identifier[iv] [ identifier[blocksize] :] identifier[ciphered] = identifier[bytes] () keyword[for] identifier[i] keyword[in] identifier[range] ( literal[int] , identifier[len] ( identifier[message] ), identifier[blocksize] ): identifier[indata] = identifier[message] [ identifier[i] : identifier[i] + identifier[blocksize] ] keyword[if] identifier[operation] == literal[string] : identifier[xored] = identifier[strxor] ( identifier[indata] , identifier[ivp2] ) identifier[decrypt_xored] = identifier[cipher] . identifier[decrypt] ( identifier[xored] ) identifier[outdata] = identifier[strxor] ( identifier[decrypt_xored] , identifier[ivp] ) identifier[ivp] = identifier[indata] identifier[ivp2] = identifier[outdata] keyword[elif] identifier[operation] == literal[string] : identifier[xored] = identifier[strxor] ( identifier[indata] , identifier[ivp] ) identifier[encrypt_xored] = identifier[cipher] . identifier[encrypt] ( identifier[xored] ) identifier[outdata] = identifier[strxor] ( identifier[encrypt_xored] , identifier[ivp2] ) identifier[ivp] = identifier[outdata] identifier[ivp2] = identifier[indata] keyword[else] : keyword[raise] identifier[ValueError] ( literal[string] ) identifier[ciphered] += identifier[outdata] keyword[return] identifier[ciphered]
def _ige(message, key, iv, operation='decrypt'): """Given a key, given an iv, and message do whatever operation asked in the operation field. Operation will be checked for: "decrypt" and "encrypt" strings. Returns the message encrypted/decrypted. message must be a multiple by 16 bytes (for division in 16 byte blocks) key must be 32 byte iv must be 32 byte (it's not internally used in AES 256 ECB, but it's needed for IGE)""" message = bytes(message) if len(key) != 32: raise ValueError('key must be 32 bytes long (was ' + str(len(key)) + ' bytes)') # depends on [control=['if'], data=[]] if len(iv) != 32: raise ValueError('iv must be 32 bytes long (was ' + str(len(iv)) + ' bytes)') # depends on [control=['if'], data=[]] cipher = AES.new(key, AES.MODE_ECB, iv) blocksize = cipher.block_size if len(message) % blocksize != 0: raise ValueError('message must be a multiple of 16 bytes (try adding ' + str(16 - len(message) % 16) + ' bytes of padding)') # depends on [control=['if'], data=[]] ivp = iv[0:blocksize] ivp2 = iv[blocksize:] ciphered = bytes() for i in range(0, len(message), blocksize): indata = message[i:i + blocksize] if operation == 'decrypt': xored = strxor(indata, ivp2) decrypt_xored = cipher.decrypt(xored) outdata = strxor(decrypt_xored, ivp) ivp = indata ivp2 = outdata # depends on [control=['if'], data=[]] elif operation == 'encrypt': xored = strxor(indata, ivp) encrypt_xored = cipher.encrypt(xored) outdata = strxor(encrypt_xored, ivp2) ivp = outdata ivp2 = indata # depends on [control=['if'], data=[]] else: raise ValueError("operation must be either 'decrypt' or 'encrypt'") ciphered += outdata # depends on [control=['for'], data=['i']] return ciphered
def mutex(): ''' Tests the implementation of mutex CLI Examples: .. code-block:: bash salt '*' sysbench.mutex ''' # Test options and the values they take # --mutex-num = [50,500,1000] # --mutex-locks = [10000,25000,50000] # --mutex-loops = [2500,5000,10000] # Test data (Orthogonal test cases) mutex_num = [50, 50, 50, 500, 500, 500, 1000, 1000, 1000] locks = [10000, 25000, 50000, 10000, 25000, 50000, 10000, 25000, 50000] mutex_locks = [] mutex_locks.extend(locks) mutex_loops = [2500, 5000, 10000, 10000, 2500, 5000, 5000, 10000, 2500] # Initializing the test variables test_command = 'sysbench --num-threads=250 --test=mutex ' test_command += '--mutex-num={0} --mutex-locks={1} --mutex-loops={2} run ' result = None ret_val = {} # Test begins! for num, locks, loops in zip(mutex_num, mutex_locks, mutex_loops): key = 'Mutex: {0} Locks: {1} Loops: {2}'.format(num, locks, loops) run_command = test_command.format(num, locks, loops) result = __salt__['cmd.run'](run_command) ret_val[key] = _parser(result) return ret_val
def function[mutex, parameter[]]: constant[ Tests the implementation of mutex CLI Examples: .. code-block:: bash salt '*' sysbench.mutex ] variable[mutex_num] assign[=] list[[<ast.Constant object at 0x7da1b216bca0>, <ast.Constant object at 0x7da1b2168040>, <ast.Constant object at 0x7da1b2168700>, <ast.Constant object at 0x7da1b216bb50>, <ast.Constant object at 0x7da1b216bdc0>, <ast.Constant object at 0x7da1b2168640>, <ast.Constant object at 0x7da1b216b0a0>, <ast.Constant object at 0x7da1b2168850>, <ast.Constant object at 0x7da1b216bd90>]] variable[locks] assign[=] list[[<ast.Constant object at 0x7da1b216b520>, <ast.Constant object at 0x7da1b216bc10>, <ast.Constant object at 0x7da1b2169c00>, <ast.Constant object at 0x7da1b216b820>, <ast.Constant object at 0x7da1b2169030>, <ast.Constant object at 0x7da1b2168730>, <ast.Constant object at 0x7da1b216a560>, <ast.Constant object at 0x7da1b2169750>, <ast.Constant object at 0x7da1b21692a0>]] variable[mutex_locks] assign[=] list[[]] call[name[mutex_locks].extend, parameter[name[locks]]] variable[mutex_loops] assign[=] list[[<ast.Constant object at 0x7da1b216a6b0>, <ast.Constant object at 0x7da1b2169f60>, <ast.Constant object at 0x7da1b2168d30>, <ast.Constant object at 0x7da1b216be50>, <ast.Constant object at 0x7da1b2169ff0>, <ast.Constant object at 0x7da1b216b610>, <ast.Constant object at 0x7da1b2169e10>, <ast.Constant object at 0x7da1b2169690>, <ast.Constant object at 0x7da1b216bbb0>]] variable[test_command] assign[=] constant[sysbench --num-threads=250 --test=mutex ] <ast.AugAssign object at 0x7da1b216b7f0> variable[result] assign[=] constant[None] variable[ret_val] assign[=] dictionary[[], []] for taget[tuple[[<ast.Name object at 0x7da1b216b4c0>, <ast.Name object at 0x7da1b2168280>, <ast.Name object at 0x7da1b2169db0>]]] in starred[call[name[zip], parameter[name[mutex_num], name[mutex_locks], name[mutex_loops]]]] begin[:] variable[key] assign[=] call[constant[Mutex: {0} Locks: {1} Loops: {2}].format, parameter[name[num], name[locks], name[loops]]] variable[run_command] assign[=] call[name[test_command].format, parameter[name[num], name[locks], name[loops]]] variable[result] assign[=] call[call[name[__salt__]][constant[cmd.run]], parameter[name[run_command]]] call[name[ret_val]][name[key]] assign[=] call[name[_parser], parameter[name[result]]] return[name[ret_val]]
keyword[def] identifier[mutex] (): literal[string] identifier[mutex_num] =[ literal[int] , literal[int] , literal[int] , literal[int] , literal[int] , literal[int] , literal[int] , literal[int] , literal[int] ] identifier[locks] =[ literal[int] , literal[int] , literal[int] , literal[int] , literal[int] , literal[int] , literal[int] , literal[int] , literal[int] ] identifier[mutex_locks] =[] identifier[mutex_locks] . identifier[extend] ( identifier[locks] ) identifier[mutex_loops] =[ literal[int] , literal[int] , literal[int] , literal[int] , literal[int] , literal[int] , literal[int] , literal[int] , literal[int] ] identifier[test_command] = literal[string] identifier[test_command] += literal[string] identifier[result] = keyword[None] identifier[ret_val] ={} keyword[for] identifier[num] , identifier[locks] , identifier[loops] keyword[in] identifier[zip] ( identifier[mutex_num] , identifier[mutex_locks] , identifier[mutex_loops] ): identifier[key] = literal[string] . identifier[format] ( identifier[num] , identifier[locks] , identifier[loops] ) identifier[run_command] = identifier[test_command] . identifier[format] ( identifier[num] , identifier[locks] , identifier[loops] ) identifier[result] = identifier[__salt__] [ literal[string] ]( identifier[run_command] ) identifier[ret_val] [ identifier[key] ]= identifier[_parser] ( identifier[result] ) keyword[return] identifier[ret_val]
def mutex(): """ Tests the implementation of mutex CLI Examples: .. code-block:: bash salt '*' sysbench.mutex """ # Test options and the values they take # --mutex-num = [50,500,1000] # --mutex-locks = [10000,25000,50000] # --mutex-loops = [2500,5000,10000] # Test data (Orthogonal test cases) mutex_num = [50, 50, 50, 500, 500, 500, 1000, 1000, 1000] locks = [10000, 25000, 50000, 10000, 25000, 50000, 10000, 25000, 50000] mutex_locks = [] mutex_locks.extend(locks) mutex_loops = [2500, 5000, 10000, 10000, 2500, 5000, 5000, 10000, 2500] # Initializing the test variables test_command = 'sysbench --num-threads=250 --test=mutex ' test_command += '--mutex-num={0} --mutex-locks={1} --mutex-loops={2} run ' result = None ret_val = {} # Test begins! for (num, locks, loops) in zip(mutex_num, mutex_locks, mutex_loops): key = 'Mutex: {0} Locks: {1} Loops: {2}'.format(num, locks, loops) run_command = test_command.format(num, locks, loops) result = __salt__['cmd.run'](run_command) ret_val[key] = _parser(result) # depends on [control=['for'], data=[]] return ret_val
def encode(signer, payload, header=None, key_id=None): """Make a signed JWT. Args: signer (google.auth.crypt.Signer): The signer used to sign the JWT. payload (Mapping[str, str]): The JWT payload. header (Mapping[str, str]): Additional JWT header payload. key_id (str): The key id to add to the JWT header. If the signer has a key id it will be used as the default. If this is specified it will override the signer's key id. Returns: bytes: The encoded JWT. """ if header is None: header = {} if key_id is None: key_id = signer.key_id header.update({'typ': 'JWT', 'alg': 'RS256'}) if key_id is not None: header['kid'] = key_id segments = [ _helpers.unpadded_urlsafe_b64encode( json.dumps(header).encode('utf-8') ), _helpers.unpadded_urlsafe_b64encode( json.dumps(payload).encode('utf-8') ), ] signing_input = b'.'.join(segments) signature = signer.sign(signing_input) segments.append( _helpers.unpadded_urlsafe_b64encode(signature) ) return b'.'.join(segments)
def function[encode, parameter[signer, payload, header, key_id]]: constant[Make a signed JWT. Args: signer (google.auth.crypt.Signer): The signer used to sign the JWT. payload (Mapping[str, str]): The JWT payload. header (Mapping[str, str]): Additional JWT header payload. key_id (str): The key id to add to the JWT header. If the signer has a key id it will be used as the default. If this is specified it will override the signer's key id. Returns: bytes: The encoded JWT. ] if compare[name[header] is constant[None]] begin[:] variable[header] assign[=] dictionary[[], []] if compare[name[key_id] is constant[None]] begin[:] variable[key_id] assign[=] name[signer].key_id call[name[header].update, parameter[dictionary[[<ast.Constant object at 0x7da20e9b0220>, <ast.Constant object at 0x7da20e9b0790>], [<ast.Constant object at 0x7da20e9b3310>, <ast.Constant object at 0x7da20e9b1bd0>]]]] if compare[name[key_id] is_not constant[None]] begin[:] call[name[header]][constant[kid]] assign[=] name[key_id] variable[segments] assign[=] list[[<ast.Call object at 0x7da20e9b2ce0>, <ast.Call object at 0x7da20e9b1870>]] variable[signing_input] assign[=] call[constant[b'.'].join, parameter[name[segments]]] variable[signature] assign[=] call[name[signer].sign, parameter[name[signing_input]]] call[name[segments].append, parameter[call[name[_helpers].unpadded_urlsafe_b64encode, parameter[name[signature]]]]] return[call[constant[b'.'].join, parameter[name[segments]]]]
keyword[def] identifier[encode] ( identifier[signer] , identifier[payload] , identifier[header] = keyword[None] , identifier[key_id] = keyword[None] ): literal[string] keyword[if] identifier[header] keyword[is] keyword[None] : identifier[header] ={} keyword[if] identifier[key_id] keyword[is] keyword[None] : identifier[key_id] = identifier[signer] . identifier[key_id] identifier[header] . identifier[update] ({ literal[string] : literal[string] , literal[string] : literal[string] }) keyword[if] identifier[key_id] keyword[is] keyword[not] keyword[None] : identifier[header] [ literal[string] ]= identifier[key_id] identifier[segments] =[ identifier[_helpers] . identifier[unpadded_urlsafe_b64encode] ( identifier[json] . identifier[dumps] ( identifier[header] ). identifier[encode] ( literal[string] ) ), identifier[_helpers] . identifier[unpadded_urlsafe_b64encode] ( identifier[json] . identifier[dumps] ( identifier[payload] ). identifier[encode] ( literal[string] ) ), ] identifier[signing_input] = literal[string] . identifier[join] ( identifier[segments] ) identifier[signature] = identifier[signer] . identifier[sign] ( identifier[signing_input] ) identifier[segments] . identifier[append] ( identifier[_helpers] . identifier[unpadded_urlsafe_b64encode] ( identifier[signature] ) ) keyword[return] literal[string] . identifier[join] ( identifier[segments] )
def encode(signer, payload, header=None, key_id=None): """Make a signed JWT. Args: signer (google.auth.crypt.Signer): The signer used to sign the JWT. payload (Mapping[str, str]): The JWT payload. header (Mapping[str, str]): Additional JWT header payload. key_id (str): The key id to add to the JWT header. If the signer has a key id it will be used as the default. If this is specified it will override the signer's key id. Returns: bytes: The encoded JWT. """ if header is None: header = {} # depends on [control=['if'], data=['header']] if key_id is None: key_id = signer.key_id # depends on [control=['if'], data=['key_id']] header.update({'typ': 'JWT', 'alg': 'RS256'}) if key_id is not None: header['kid'] = key_id # depends on [control=['if'], data=['key_id']] segments = [_helpers.unpadded_urlsafe_b64encode(json.dumps(header).encode('utf-8')), _helpers.unpadded_urlsafe_b64encode(json.dumps(payload).encode('utf-8'))] signing_input = b'.'.join(segments) signature = signer.sign(signing_input) segments.append(_helpers.unpadded_urlsafe_b64encode(signature)) return b'.'.join(segments)
def make_password(length, chars=string.letters + string.digits + '#$%&!'): """ Generate and return a random password :param length: Desired length :param chars: Character set to use """ return get_random_string(length, chars)
def function[make_password, parameter[length, chars]]: constant[ Generate and return a random password :param length: Desired length :param chars: Character set to use ] return[call[name[get_random_string], parameter[name[length], name[chars]]]]
keyword[def] identifier[make_password] ( identifier[length] , identifier[chars] = identifier[string] . identifier[letters] + identifier[string] . identifier[digits] + literal[string] ): literal[string] keyword[return] identifier[get_random_string] ( identifier[length] , identifier[chars] )
def make_password(length, chars=string.letters + string.digits + '#$%&!'): """ Generate and return a random password :param length: Desired length :param chars: Character set to use """ return get_random_string(length, chars)
def transFringe(beta=None, rho=None): """ Transport matrix of fringe field :param beta: angle of rotation of pole-face in [RAD] :param rho: bending radius in [m] :return: 6x6 numpy array """ m = np.eye(6, 6, dtype=np.float64) if None in (beta, rho): print("warning: 'theta', 'rho' should be positive float numbers.") return m else: m[1, 0] = np.tan(beta) / rho m[3, 2] = -np.tan(beta) / rho return m
def function[transFringe, parameter[beta, rho]]: constant[ Transport matrix of fringe field :param beta: angle of rotation of pole-face in [RAD] :param rho: bending radius in [m] :return: 6x6 numpy array ] variable[m] assign[=] call[name[np].eye, parameter[constant[6], constant[6]]] if compare[constant[None] in tuple[[<ast.Name object at 0x7da1b09d2aa0>, <ast.Name object at 0x7da1b09d0400>]]] begin[:] call[name[print], parameter[constant[warning: 'theta', 'rho' should be positive float numbers.]]] return[name[m]]
keyword[def] identifier[transFringe] ( identifier[beta] = keyword[None] , identifier[rho] = keyword[None] ): literal[string] identifier[m] = identifier[np] . identifier[eye] ( literal[int] , literal[int] , identifier[dtype] = identifier[np] . identifier[float64] ) keyword[if] keyword[None] keyword[in] ( identifier[beta] , identifier[rho] ): identifier[print] ( literal[string] ) keyword[return] identifier[m] keyword[else] : identifier[m] [ literal[int] , literal[int] ]= identifier[np] . identifier[tan] ( identifier[beta] )/ identifier[rho] identifier[m] [ literal[int] , literal[int] ]=- identifier[np] . identifier[tan] ( identifier[beta] )/ identifier[rho] keyword[return] identifier[m]
def transFringe(beta=None, rho=None): """ Transport matrix of fringe field :param beta: angle of rotation of pole-face in [RAD] :param rho: bending radius in [m] :return: 6x6 numpy array """ m = np.eye(6, 6, dtype=np.float64) if None in (beta, rho): print("warning: 'theta', 'rho' should be positive float numbers.") return m # depends on [control=['if'], data=[]] else: m[1, 0] = np.tan(beta) / rho m[3, 2] = -np.tan(beta) / rho return m
def save_map(dsp, path): """ Write Dispatcher graph object in Python pickle format. Pickles are a serialized byte stream of a Python object. This format will preserve Python objects used as nodes or edges. :param dsp: A dispatcher that identifies the model adopted. :type dsp: schedula.Dispatcher :param path: File or filename to write. File names ending in .gz or .bz2 will be compressed. :type path: str, file .. testsetup:: >>> from tempfile import mkstemp >>> file_name = mkstemp()[1] Example:: >>> from schedula import Dispatcher >>> dsp = Dispatcher() >>> dsp.add_function(function=max, inputs=['a', 'b'], outputs=['c']) 'max' >>> save_map(dsp, file_name) """ import dill with open(path, 'wb') as f: dill.dump(dsp.dmap, f)
def function[save_map, parameter[dsp, path]]: constant[ Write Dispatcher graph object in Python pickle format. Pickles are a serialized byte stream of a Python object. This format will preserve Python objects used as nodes or edges. :param dsp: A dispatcher that identifies the model adopted. :type dsp: schedula.Dispatcher :param path: File or filename to write. File names ending in .gz or .bz2 will be compressed. :type path: str, file .. testsetup:: >>> from tempfile import mkstemp >>> file_name = mkstemp()[1] Example:: >>> from schedula import Dispatcher >>> dsp = Dispatcher() >>> dsp.add_function(function=max, inputs=['a', 'b'], outputs=['c']) 'max' >>> save_map(dsp, file_name) ] import module[dill] with call[name[open], parameter[name[path], constant[wb]]] begin[:] call[name[dill].dump, parameter[name[dsp].dmap, name[f]]]
keyword[def] identifier[save_map] ( identifier[dsp] , identifier[path] ): literal[string] keyword[import] identifier[dill] keyword[with] identifier[open] ( identifier[path] , literal[string] ) keyword[as] identifier[f] : identifier[dill] . identifier[dump] ( identifier[dsp] . identifier[dmap] , identifier[f] )
def save_map(dsp, path): """ Write Dispatcher graph object in Python pickle format. Pickles are a serialized byte stream of a Python object. This format will preserve Python objects used as nodes or edges. :param dsp: A dispatcher that identifies the model adopted. :type dsp: schedula.Dispatcher :param path: File or filename to write. File names ending in .gz or .bz2 will be compressed. :type path: str, file .. testsetup:: >>> from tempfile import mkstemp >>> file_name = mkstemp()[1] Example:: >>> from schedula import Dispatcher >>> dsp = Dispatcher() >>> dsp.add_function(function=max, inputs=['a', 'b'], outputs=['c']) 'max' >>> save_map(dsp, file_name) """ import dill with open(path, 'wb') as f: dill.dump(dsp.dmap, f) # depends on [control=['with'], data=['f']]
def _create_run_ini(self, port, production, output='development.ini', source='development.ini', override_site_url=True): """ Create run/development.ini in datadir with debug and site_url overridden and with correct db passwords inserted """ cp = SafeConfigParser() try: cp.read([self.target + '/' + source]) except ConfigParserError: raise DatacatsError('Error reading development.ini') cp.set('DEFAULT', 'debug', 'false' if production else 'true') if self.site_url: site_url = self.site_url else: if is_boot2docker(): web_address = socket.gethostbyname(docker_host()) else: web_address = self.address site_url = 'http://{}:{}'.format(web_address, port) if override_site_url: cp.set('app:main', 'ckan.site_url', site_url) cp.set('app:main', 'sqlalchemy.url', 'postgresql://ckan:{0}@db:5432/ckan' .format(self.passwords['CKAN_PASSWORD'])) cp.set('app:main', 'ckan.datastore.read_url', 'postgresql://ckan_datastore_readonly:{0}@db:5432/ckan_datastore' .format(self.passwords['DATASTORE_RO_PASSWORD'])) cp.set('app:main', 'ckan.datastore.write_url', 'postgresql://ckan_datastore_readwrite:{0}@db:5432/ckan_datastore' .format(self.passwords['DATASTORE_RW_PASSWORD'])) cp.set('app:main', 'solr_url', 'http://solr:8080/solr') cp.set('app:main', 'ckan.redis.url', 'http://redis:6379') cp.set('app:main', 'beaker.session.secret', self.passwords['BEAKER_SESSION_SECRET']) if not isdir(self.sitedir + '/run'): makedirs(self.sitedir + '/run') # upgrade old datadir with open(self.sitedir + '/run/' + output, 'w') as runini: cp.write(runini)
def function[_create_run_ini, parameter[self, port, production, output, source, override_site_url]]: constant[ Create run/development.ini in datadir with debug and site_url overridden and with correct db passwords inserted ] variable[cp] assign[=] call[name[SafeConfigParser], parameter[]] <ast.Try object at 0x7da20e9578e0> call[name[cp].set, parameter[constant[DEFAULT], constant[debug], <ast.IfExp object at 0x7da20e957880>]] if name[self].site_url begin[:] variable[site_url] assign[=] name[self].site_url if name[override_site_url] begin[:] call[name[cp].set, parameter[constant[app:main], constant[ckan.site_url], name[site_url]]] call[name[cp].set, parameter[constant[app:main], constant[sqlalchemy.url], call[constant[postgresql://ckan:{0}@db:5432/ckan].format, parameter[call[name[self].passwords][constant[CKAN_PASSWORD]]]]]] call[name[cp].set, parameter[constant[app:main], constant[ckan.datastore.read_url], call[constant[postgresql://ckan_datastore_readonly:{0}@db:5432/ckan_datastore].format, parameter[call[name[self].passwords][constant[DATASTORE_RO_PASSWORD]]]]]] call[name[cp].set, parameter[constant[app:main], constant[ckan.datastore.write_url], call[constant[postgresql://ckan_datastore_readwrite:{0}@db:5432/ckan_datastore].format, parameter[call[name[self].passwords][constant[DATASTORE_RW_PASSWORD]]]]]] call[name[cp].set, parameter[constant[app:main], constant[solr_url], constant[http://solr:8080/solr]]] call[name[cp].set, parameter[constant[app:main], constant[ckan.redis.url], constant[http://redis:6379]]] call[name[cp].set, parameter[constant[app:main], constant[beaker.session.secret], call[name[self].passwords][constant[BEAKER_SESSION_SECRET]]]] if <ast.UnaryOp object at 0x7da204961a80> begin[:] call[name[makedirs], parameter[binary_operation[name[self].sitedir + constant[/run]]]] with call[name[open], parameter[binary_operation[binary_operation[name[self].sitedir + constant[/run/]] + name[output]], constant[w]]] begin[:] call[name[cp].write, parameter[name[runini]]]
keyword[def] identifier[_create_run_ini] ( identifier[self] , identifier[port] , identifier[production] , identifier[output] = literal[string] , identifier[source] = literal[string] , identifier[override_site_url] = keyword[True] ): literal[string] identifier[cp] = identifier[SafeConfigParser] () keyword[try] : identifier[cp] . identifier[read] ([ identifier[self] . identifier[target] + literal[string] + identifier[source] ]) keyword[except] identifier[ConfigParserError] : keyword[raise] identifier[DatacatsError] ( literal[string] ) identifier[cp] . identifier[set] ( literal[string] , literal[string] , literal[string] keyword[if] identifier[production] keyword[else] literal[string] ) keyword[if] identifier[self] . identifier[site_url] : identifier[site_url] = identifier[self] . identifier[site_url] keyword[else] : keyword[if] identifier[is_boot2docker] (): identifier[web_address] = identifier[socket] . identifier[gethostbyname] ( identifier[docker_host] ()) keyword[else] : identifier[web_address] = identifier[self] . identifier[address] identifier[site_url] = literal[string] . identifier[format] ( identifier[web_address] , identifier[port] ) keyword[if] identifier[override_site_url] : identifier[cp] . identifier[set] ( literal[string] , literal[string] , identifier[site_url] ) identifier[cp] . identifier[set] ( literal[string] , literal[string] , literal[string] . identifier[format] ( identifier[self] . identifier[passwords] [ literal[string] ])) identifier[cp] . identifier[set] ( literal[string] , literal[string] , literal[string] . identifier[format] ( identifier[self] . identifier[passwords] [ literal[string] ])) identifier[cp] . identifier[set] ( literal[string] , literal[string] , literal[string] . identifier[format] ( identifier[self] . identifier[passwords] [ literal[string] ])) identifier[cp] . identifier[set] ( literal[string] , literal[string] , literal[string] ) identifier[cp] . identifier[set] ( literal[string] , literal[string] , literal[string] ) identifier[cp] . identifier[set] ( literal[string] , literal[string] , identifier[self] . identifier[passwords] [ literal[string] ]) keyword[if] keyword[not] identifier[isdir] ( identifier[self] . identifier[sitedir] + literal[string] ): identifier[makedirs] ( identifier[self] . identifier[sitedir] + literal[string] ) keyword[with] identifier[open] ( identifier[self] . identifier[sitedir] + literal[string] + identifier[output] , literal[string] ) keyword[as] identifier[runini] : identifier[cp] . identifier[write] ( identifier[runini] )
def _create_run_ini(self, port, production, output='development.ini', source='development.ini', override_site_url=True): """ Create run/development.ini in datadir with debug and site_url overridden and with correct db passwords inserted """ cp = SafeConfigParser() try: cp.read([self.target + '/' + source]) # depends on [control=['try'], data=[]] except ConfigParserError: raise DatacatsError('Error reading development.ini') # depends on [control=['except'], data=[]] cp.set('DEFAULT', 'debug', 'false' if production else 'true') if self.site_url: site_url = self.site_url # depends on [control=['if'], data=[]] else: if is_boot2docker(): web_address = socket.gethostbyname(docker_host()) # depends on [control=['if'], data=[]] else: web_address = self.address site_url = 'http://{}:{}'.format(web_address, port) if override_site_url: cp.set('app:main', 'ckan.site_url', site_url) # depends on [control=['if'], data=[]] cp.set('app:main', 'sqlalchemy.url', 'postgresql://ckan:{0}@db:5432/ckan'.format(self.passwords['CKAN_PASSWORD'])) cp.set('app:main', 'ckan.datastore.read_url', 'postgresql://ckan_datastore_readonly:{0}@db:5432/ckan_datastore'.format(self.passwords['DATASTORE_RO_PASSWORD'])) cp.set('app:main', 'ckan.datastore.write_url', 'postgresql://ckan_datastore_readwrite:{0}@db:5432/ckan_datastore'.format(self.passwords['DATASTORE_RW_PASSWORD'])) cp.set('app:main', 'solr_url', 'http://solr:8080/solr') cp.set('app:main', 'ckan.redis.url', 'http://redis:6379') cp.set('app:main', 'beaker.session.secret', self.passwords['BEAKER_SESSION_SECRET']) if not isdir(self.sitedir + '/run'): makedirs(self.sitedir + '/run') # upgrade old datadir # depends on [control=['if'], data=[]] with open(self.sitedir + '/run/' + output, 'w') as runini: cp.write(runini) # depends on [control=['with'], data=['runini']]
def isometric(script, targetAbstractMinFaceNum=140, targetAbstractMaxFaceNum=180, stopCriteria=1, convergenceSpeed=1, DoubleStep=True): """Isometric parameterization """ filter_xml = ''.join([ ' <filter name="Iso Parametrization">\n', ' <Param name="targetAbstractMinFaceNum"', 'value="%d"' % targetAbstractMinFaceNum, 'description="Abstract Min Mesh Size"', 'type="RichInt"', 'tooltip="This number and the following one indicate the range face number of the abstract mesh that is used for the parametrization process. The algorithm will choose the best abstract mesh with the number of triangles within the specified interval. If the mesh has a very simple structure this range can be very low and strict; for a roughly spherical object if you can specify a range of [8,8] faces you get a octahedral abstract mesh, e.g. a geometry image. &lt;br>Large numbers (greater than 400) are usually not of practical use."', '/>\n', ' <Param name="targetAbstractMaxFaceNum"', 'value="%d"' % targetAbstractMaxFaceNum, 'description="Abstract Max Mesh Size"', 'type="RichInt"', 'tooltip="Please notice that a large interval requires huge amount of memory to be allocated, in order save the intermediate results. An interval of 40 should be fine."', '/>\n', ' <Param name="stopCriteria"', 'value="%d"' % stopCriteria, 'description="Optimization Criteria"', 'enum_val0="Best Heuristic"', 'enum_val1="Area + Angle"', 'enum_val2="Regularity"', 'enum_val3="L2"', 'enum_cardinality="4"', 'type="RichEnum"', 'tooltip="Choose a metric to stop the parametrization within the interval. 1: Best Heuristic : stop considering both isometry and number of faces of base domain. 2: Area + Angle : stop at minimum area and angle distorsion. 3: Regularity : stop at minimum number of irregular vertices. 4: L2 : stop at minimum OneWay L2 Stretch Eff"', '/>\n', ' <Param name="convergenceSpeed"', 'value="%d"' % convergenceSpeed, 'description="Convergence Precision"', 'type="RichInt"', 'tooltip="This parameter controls the convergence speed/precision of the optimization of the texture coordinates. Larger the number slower the processing and, eventually, slightly better results"', '/>\n', ' <Param name="DoubleStep"', 'value="%s"' % str(DoubleStep).lower(), 'description="Double Step"', 'type="RichBool"', 'tooltip="Use this bool to divide the parameterization in 2 steps. Double step makes the overall process faster and robust. Consider to disable this bool in case the object has topologycal noise or small handles."', '/>\n', ' </filter>\n']) util.write_filter(script, filter_xml) return None
def function[isometric, parameter[script, targetAbstractMinFaceNum, targetAbstractMaxFaceNum, stopCriteria, convergenceSpeed, DoubleStep]]: constant[Isometric parameterization ] variable[filter_xml] assign[=] call[constant[].join, parameter[list[[<ast.Constant object at 0x7da1b024edd0>, <ast.Constant object at 0x7da1b024f490>, <ast.BinOp object at 0x7da1b024ded0>, <ast.Constant object at 0x7da2047e8c70>, <ast.Constant object at 0x7da2047e91e0>, <ast.Constant object at 0x7da2047ebe20>, <ast.Constant object at 0x7da2047ea560>, <ast.Constant object at 0x7da2047eb850>, <ast.BinOp object at 0x7da2047e8370>, <ast.Constant object at 0x7da2047ea1d0>, <ast.Constant object at 0x7da2047eacb0>, <ast.Constant object at 0x7da2047eb070>, <ast.Constant object at 0x7da2047ea230>, <ast.Constant object at 0x7da2047ead70>, <ast.BinOp object at 0x7da2047ebd30>, <ast.Constant object at 0x7da2047e87f0>, <ast.Constant object at 0x7da2047eb190>, <ast.Constant object at 0x7da1b02940a0>, <ast.Constant object at 0x7da1b0295300>, <ast.Constant object at 0x7da1b0295cf0>, <ast.Constant object at 0x7da1b0294fd0>, <ast.Constant object at 0x7da1b0297460>, <ast.Constant object at 0x7da1b0297c10>, <ast.Constant object at 0x7da1b02975b0>, <ast.Constant object at 0x7da1b02954e0>, <ast.BinOp object at 0x7da1b02957b0>, <ast.Constant object at 0x7da1b0297790>, <ast.Constant object at 0x7da1b0297a00>, <ast.Constant object at 0x7da1b0294cd0>, <ast.Constant object at 0x7da1b0296140>, <ast.Constant object at 0x7da1b0294610>, <ast.BinOp object at 0x7da1b0295030>, <ast.Constant object at 0x7da1b0295390>, <ast.Constant object at 0x7da1b0294580>, <ast.Constant object at 0x7da1b0297100>, <ast.Constant object at 0x7da1b0297730>, <ast.Constant object at 0x7da1b02979a0>]]]] call[name[util].write_filter, parameter[name[script], name[filter_xml]]] return[constant[None]]
keyword[def] identifier[isometric] ( identifier[script] , identifier[targetAbstractMinFaceNum] = literal[int] , identifier[targetAbstractMaxFaceNum] = literal[int] , identifier[stopCriteria] = literal[int] , identifier[convergenceSpeed] = literal[int] , identifier[DoubleStep] = keyword[True] ): literal[string] identifier[filter_xml] = literal[string] . identifier[join] ([ literal[string] , literal[string] , literal[string] % identifier[targetAbstractMinFaceNum] , literal[string] , literal[string] , literal[string] , literal[string] , literal[string] , literal[string] % identifier[targetAbstractMaxFaceNum] , literal[string] , literal[string] , literal[string] , literal[string] , literal[string] , literal[string] % identifier[stopCriteria] , literal[string] , literal[string] , literal[string] , literal[string] , literal[string] , literal[string] , literal[string] , literal[string] , literal[string] , literal[string] , literal[string] % identifier[convergenceSpeed] , literal[string] , literal[string] , literal[string] , literal[string] , literal[string] , literal[string] % identifier[str] ( identifier[DoubleStep] ). identifier[lower] (), literal[string] , literal[string] , literal[string] , literal[string] , literal[string] ]) identifier[util] . identifier[write_filter] ( identifier[script] , identifier[filter_xml] ) keyword[return] keyword[None]
def isometric(script, targetAbstractMinFaceNum=140, targetAbstractMaxFaceNum=180, stopCriteria=1, convergenceSpeed=1, DoubleStep=True): """Isometric parameterization """ filter_xml = ''.join([' <filter name="Iso Parametrization">\n', ' <Param name="targetAbstractMinFaceNum"', 'value="%d"' % targetAbstractMinFaceNum, 'description="Abstract Min Mesh Size"', 'type="RichInt"', 'tooltip="This number and the following one indicate the range face number of the abstract mesh that is used for the parametrization process. The algorithm will choose the best abstract mesh with the number of triangles within the specified interval. If the mesh has a very simple structure this range can be very low and strict; for a roughly spherical object if you can specify a range of [8,8] faces you get a octahedral abstract mesh, e.g. a geometry image. &lt;br>Large numbers (greater than 400) are usually not of practical use."', '/>\n', ' <Param name="targetAbstractMaxFaceNum"', 'value="%d"' % targetAbstractMaxFaceNum, 'description="Abstract Max Mesh Size"', 'type="RichInt"', 'tooltip="Please notice that a large interval requires huge amount of memory to be allocated, in order save the intermediate results. An interval of 40 should be fine."', '/>\n', ' <Param name="stopCriteria"', 'value="%d"' % stopCriteria, 'description="Optimization Criteria"', 'enum_val0="Best Heuristic"', 'enum_val1="Area + Angle"', 'enum_val2="Regularity"', 'enum_val3="L2"', 'enum_cardinality="4"', 'type="RichEnum"', 'tooltip="Choose a metric to stop the parametrization within the interval. 1: Best Heuristic : stop considering both isometry and number of faces of base domain. 2: Area + Angle : stop at minimum area and angle distorsion. 3: Regularity : stop at minimum number of irregular vertices. 4: L2 : stop at minimum OneWay L2 Stretch Eff"', '/>\n', ' <Param name="convergenceSpeed"', 'value="%d"' % convergenceSpeed, 'description="Convergence Precision"', 'type="RichInt"', 'tooltip="This parameter controls the convergence speed/precision of the optimization of the texture coordinates. Larger the number slower the processing and, eventually, slightly better results"', '/>\n', ' <Param name="DoubleStep"', 'value="%s"' % str(DoubleStep).lower(), 'description="Double Step"', 'type="RichBool"', 'tooltip="Use this bool to divide the parameterization in 2 steps. Double step makes the overall process faster and robust. Consider to disable this bool in case the object has topologycal noise or small handles."', '/>\n', ' </filter>\n']) util.write_filter(script, filter_xml) return None
def log_setup(debug_bool): """Set up logging. We output only to stdout. Instead of also writing to a log file, redirect stdout to a log file when the script is executed from cron. """ level = logging.DEBUG if debug_bool else logging.INFO logging.config.dictConfig( { "version": 1, "disable_existing_loggers": False, "formatters": { "verbose": { "format": "%(asctime)s %(levelname)-8s %(name)s %(module)s " "%(process)d %(thread)d %(message)s", "datefmt": "%Y-%m-%d %H:%M:%S", } }, "handlers": { "console": { "class": "logging.StreamHandler", "formatter": "verbose", "level": level, "stream": "ext://sys.stdout", } }, "loggers": { "": { "handlers": ["console"], "level": level, "class": "logging.StreamHandler", } }, } )
def function[log_setup, parameter[debug_bool]]: constant[Set up logging. We output only to stdout. Instead of also writing to a log file, redirect stdout to a log file when the script is executed from cron. ] variable[level] assign[=] <ast.IfExp object at 0x7da1b1b6bfa0> call[name[logging].config.dictConfig, parameter[dictionary[[<ast.Constant object at 0x7da1b1b69cc0>, <ast.Constant object at 0x7da1b1b6ac50>, <ast.Constant object at 0x7da1b1b6a440>, <ast.Constant object at 0x7da1b1b6ada0>, <ast.Constant object at 0x7da1b1b6ad10>], [<ast.Constant object at 0x7da1b1b691b0>, <ast.Constant object at 0x7da1b1b69240>, <ast.Dict object at 0x7da1b1b6b910>, <ast.Dict object at 0x7da1b1b68f10>, <ast.Dict object at 0x7da1b1b69330>]]]]
keyword[def] identifier[log_setup] ( identifier[debug_bool] ): literal[string] identifier[level] = identifier[logging] . identifier[DEBUG] keyword[if] identifier[debug_bool] keyword[else] identifier[logging] . identifier[INFO] identifier[logging] . identifier[config] . identifier[dictConfig] ( { literal[string] : literal[int] , literal[string] : keyword[False] , literal[string] :{ literal[string] :{ literal[string] : literal[string] literal[string] , literal[string] : literal[string] , } }, literal[string] :{ literal[string] :{ literal[string] : literal[string] , literal[string] : literal[string] , literal[string] : identifier[level] , literal[string] : literal[string] , } }, literal[string] :{ literal[string] :{ literal[string] :[ literal[string] ], literal[string] : identifier[level] , literal[string] : literal[string] , } }, } )
def log_setup(debug_bool): """Set up logging. We output only to stdout. Instead of also writing to a log file, redirect stdout to a log file when the script is executed from cron. """ level = logging.DEBUG if debug_bool else logging.INFO logging.config.dictConfig({'version': 1, 'disable_existing_loggers': False, 'formatters': {'verbose': {'format': '%(asctime)s %(levelname)-8s %(name)s %(module)s %(process)d %(thread)d %(message)s', 'datefmt': '%Y-%m-%d %H:%M:%S'}}, 'handlers': {'console': {'class': 'logging.StreamHandler', 'formatter': 'verbose', 'level': level, 'stream': 'ext://sys.stdout'}}, 'loggers': {'': {'handlers': ['console'], 'level': level, 'class': 'logging.StreamHandler'}}})
def with_reactor(*dec_args, **dec_kwargs): """ Decorator for test functions that require a running reactor. Can be used like this:: @with_reactor def test_connect_to_server(self): ... Or like this:: @with_reactor(timeout=10) def test_connect_to_server(self): ... If the test function returns a deferred then the test will be successful if the deferred resolves to a value or unsuccessful if the deferred errbacks. The test must not leave any connections or a like open. This will otherwise result in a reactor-unclean failure of the test. If there is a function called `twisted_setup()` in the same class as the test function is defined, then this function will be invoked before the test, but already in the context of the reactor. Note that the regular setup function provided by the testing framework will be executed too, but not in the reactor context. Accordingly, if there is a `twisted_teardown()` it executes after the test function, even if the test failed. If the test, including `twisted_setup` and `twisted_teardown`, has not completed within the timout, the test fails. The timeout defaults to two minutes. A timeout duration of zero disables the timeout. """ # This method takes care of the decorator protocol, it # distinguishes between using the decorator with brackets # and without brackets. It then calls `_twisted_test_sync()`. if len(dec_args) == 1 and callable(dec_args[0]) and not dec_kwargs: # decorator used without brackets: # @twisted_test # def test_xxx(): # .... callee = dec_args[0] dec_args = () dec_kwargs = {} @functools.wraps(callee) def wrapper(*call_args, **call_kwargs): return _twisted_test_sync(callee, call_args, call_kwargs) return wrapper else: # decorator used with brackets: # @twisted_test(*dec_args, **dec_args) # def test_xxx(): # .... def decorator(callee): @functools.wraps(callee) def wrapper(*call_args, **call_kwargs): return _twisted_test_sync(callee, call_args, call_kwargs, *dec_args, **dec_kwargs) return wrapper return decorator
def function[with_reactor, parameter[]]: constant[ Decorator for test functions that require a running reactor. Can be used like this:: @with_reactor def test_connect_to_server(self): ... Or like this:: @with_reactor(timeout=10) def test_connect_to_server(self): ... If the test function returns a deferred then the test will be successful if the deferred resolves to a value or unsuccessful if the deferred errbacks. The test must not leave any connections or a like open. This will otherwise result in a reactor-unclean failure of the test. If there is a function called `twisted_setup()` in the same class as the test function is defined, then this function will be invoked before the test, but already in the context of the reactor. Note that the regular setup function provided by the testing framework will be executed too, but not in the reactor context. Accordingly, if there is a `twisted_teardown()` it executes after the test function, even if the test failed. If the test, including `twisted_setup` and `twisted_teardown`, has not completed within the timout, the test fails. The timeout defaults to two minutes. A timeout duration of zero disables the timeout. ] if <ast.BoolOp object at 0x7da20c795630> begin[:] variable[callee] assign[=] call[name[dec_args]][constant[0]] variable[dec_args] assign[=] tuple[[]] variable[dec_kwargs] assign[=] dictionary[[], []] def function[wrapper, parameter[]]: return[call[name[_twisted_test_sync], parameter[name[callee], name[call_args], name[call_kwargs]]]] return[name[wrapper]]
keyword[def] identifier[with_reactor] (* identifier[dec_args] ,** identifier[dec_kwargs] ): literal[string] keyword[if] identifier[len] ( identifier[dec_args] )== literal[int] keyword[and] identifier[callable] ( identifier[dec_args] [ literal[int] ]) keyword[and] keyword[not] identifier[dec_kwargs] : identifier[callee] = identifier[dec_args] [ literal[int] ] identifier[dec_args] =() identifier[dec_kwargs] ={} @ identifier[functools] . identifier[wraps] ( identifier[callee] ) keyword[def] identifier[wrapper] (* identifier[call_args] ,** identifier[call_kwargs] ): keyword[return] identifier[_twisted_test_sync] ( identifier[callee] , identifier[call_args] , identifier[call_kwargs] ) keyword[return] identifier[wrapper] keyword[else] : keyword[def] identifier[decorator] ( identifier[callee] ): @ identifier[functools] . identifier[wraps] ( identifier[callee] ) keyword[def] identifier[wrapper] (* identifier[call_args] ,** identifier[call_kwargs] ): keyword[return] identifier[_twisted_test_sync] ( identifier[callee] , identifier[call_args] , identifier[call_kwargs] ,* identifier[dec_args] ,** identifier[dec_kwargs] ) keyword[return] identifier[wrapper] keyword[return] identifier[decorator]
def with_reactor(*dec_args, **dec_kwargs): """ Decorator for test functions that require a running reactor. Can be used like this:: @with_reactor def test_connect_to_server(self): ... Or like this:: @with_reactor(timeout=10) def test_connect_to_server(self): ... If the test function returns a deferred then the test will be successful if the deferred resolves to a value or unsuccessful if the deferred errbacks. The test must not leave any connections or a like open. This will otherwise result in a reactor-unclean failure of the test. If there is a function called `twisted_setup()` in the same class as the test function is defined, then this function will be invoked before the test, but already in the context of the reactor. Note that the regular setup function provided by the testing framework will be executed too, but not in the reactor context. Accordingly, if there is a `twisted_teardown()` it executes after the test function, even if the test failed. If the test, including `twisted_setup` and `twisted_teardown`, has not completed within the timout, the test fails. The timeout defaults to two minutes. A timeout duration of zero disables the timeout. """ # This method takes care of the decorator protocol, it # distinguishes between using the decorator with brackets # and without brackets. It then calls `_twisted_test_sync()`. if len(dec_args) == 1 and callable(dec_args[0]) and (not dec_kwargs): # decorator used without brackets: # @twisted_test # def test_xxx(): # .... callee = dec_args[0] dec_args = () dec_kwargs = {} @functools.wraps(callee) def wrapper(*call_args, **call_kwargs): return _twisted_test_sync(callee, call_args, call_kwargs) return wrapper # depends on [control=['if'], data=[]] else: # decorator used with brackets: # @twisted_test(*dec_args, **dec_args) # def test_xxx(): # .... def decorator(callee): @functools.wraps(callee) def wrapper(*call_args, **call_kwargs): return _twisted_test_sync(callee, call_args, call_kwargs, *dec_args, **dec_kwargs) return wrapper return decorator
def print_name(self, indent=0, end='\n'): """Print name with optional indent and end.""" print(Style.BRIGHT + ' ' * indent + self.name, end=end)
def function[print_name, parameter[self, indent, end]]: constant[Print name with optional indent and end.] call[name[print], parameter[binary_operation[binary_operation[name[Style].BRIGHT + binary_operation[constant[ ] * name[indent]]] + name[self].name]]]
keyword[def] identifier[print_name] ( identifier[self] , identifier[indent] = literal[int] , identifier[end] = literal[string] ): literal[string] identifier[print] ( identifier[Style] . identifier[BRIGHT] + literal[string] * identifier[indent] + identifier[self] . identifier[name] , identifier[end] = identifier[end] )
def print_name(self, indent=0, end='\n'): """Print name with optional indent and end.""" print(Style.BRIGHT + ' ' * indent + self.name, end=end)
def extract_execution_state(self, topology): """ Returns the repesentation of execution state that will be returned from Tracker. """ execution_state = topology.execution_state executionState = { "cluster": execution_state.cluster, "environ": execution_state.environ, "role": execution_state.role, "jobname": topology.name, "submission_time": execution_state.submission_time, "submission_user": execution_state.submission_user, "release_username": execution_state.release_state.release_username, "release_tag": execution_state.release_state.release_tag, "release_version": execution_state.release_state.release_version, "has_physical_plan": None, "has_tmaster_location": None, "has_scheduler_location": None, "extra_links": [], } for extra_link in self.config.extra_links: link = extra_link.copy() link["url"] = self.config.get_formatted_url(executionState, link[EXTRA_LINK_FORMATTER_KEY]) executionState["extra_links"].append(link) return executionState
def function[extract_execution_state, parameter[self, topology]]: constant[ Returns the repesentation of execution state that will be returned from Tracker. ] variable[execution_state] assign[=] name[topology].execution_state variable[executionState] assign[=] dictionary[[<ast.Constant object at 0x7da20c76de10>, <ast.Constant object at 0x7da20c76da20>, <ast.Constant object at 0x7da20c76ef20>, <ast.Constant object at 0x7da20c76d9f0>, <ast.Constant object at 0x7da20c76c7c0>, <ast.Constant object at 0x7da20c76d420>, <ast.Constant object at 0x7da20c76c880>, <ast.Constant object at 0x7da20c76f910>, <ast.Constant object at 0x7da20c76caf0>, <ast.Constant object at 0x7da20c76e410>, <ast.Constant object at 0x7da20c76eaa0>, <ast.Constant object at 0x7da20c76c5e0>, <ast.Constant object at 0x7da20c76d4b0>], [<ast.Attribute object at 0x7da20c76cfa0>, <ast.Attribute object at 0x7da20c76e6b0>, <ast.Attribute object at 0x7da20c76f5b0>, <ast.Attribute object at 0x7da20c76dcf0>, <ast.Attribute object at 0x7da20c76da50>, <ast.Attribute object at 0x7da20c76e980>, <ast.Attribute object at 0x7da20c76d750>, <ast.Attribute object at 0x7da20c76ef50>, <ast.Attribute object at 0x7da20c76f310>, <ast.Constant object at 0x7da20c76fd00>, <ast.Constant object at 0x7da20c76f5e0>, <ast.Constant object at 0x7da20c76e500>, <ast.List object at 0x7da20c76cc40>]] for taget[name[extra_link]] in starred[name[self].config.extra_links] begin[:] variable[link] assign[=] call[name[extra_link].copy, parameter[]] call[name[link]][constant[url]] assign[=] call[name[self].config.get_formatted_url, parameter[name[executionState], call[name[link]][name[EXTRA_LINK_FORMATTER_KEY]]]] call[call[name[executionState]][constant[extra_links]].append, parameter[name[link]]] return[name[executionState]]
keyword[def] identifier[extract_execution_state] ( identifier[self] , identifier[topology] ): literal[string] identifier[execution_state] = identifier[topology] . identifier[execution_state] identifier[executionState] ={ literal[string] : identifier[execution_state] . identifier[cluster] , literal[string] : identifier[execution_state] . identifier[environ] , literal[string] : identifier[execution_state] . identifier[role] , literal[string] : identifier[topology] . identifier[name] , literal[string] : identifier[execution_state] . identifier[submission_time] , literal[string] : identifier[execution_state] . identifier[submission_user] , literal[string] : identifier[execution_state] . identifier[release_state] . identifier[release_username] , literal[string] : identifier[execution_state] . identifier[release_state] . identifier[release_tag] , literal[string] : identifier[execution_state] . identifier[release_state] . identifier[release_version] , literal[string] : keyword[None] , literal[string] : keyword[None] , literal[string] : keyword[None] , literal[string] :[], } keyword[for] identifier[extra_link] keyword[in] identifier[self] . identifier[config] . identifier[extra_links] : identifier[link] = identifier[extra_link] . identifier[copy] () identifier[link] [ literal[string] ]= identifier[self] . identifier[config] . identifier[get_formatted_url] ( identifier[executionState] , identifier[link] [ identifier[EXTRA_LINK_FORMATTER_KEY] ]) identifier[executionState] [ literal[string] ]. identifier[append] ( identifier[link] ) keyword[return] identifier[executionState]
def extract_execution_state(self, topology): """ Returns the repesentation of execution state that will be returned from Tracker. """ execution_state = topology.execution_state executionState = {'cluster': execution_state.cluster, 'environ': execution_state.environ, 'role': execution_state.role, 'jobname': topology.name, 'submission_time': execution_state.submission_time, 'submission_user': execution_state.submission_user, 'release_username': execution_state.release_state.release_username, 'release_tag': execution_state.release_state.release_tag, 'release_version': execution_state.release_state.release_version, 'has_physical_plan': None, 'has_tmaster_location': None, 'has_scheduler_location': None, 'extra_links': []} for extra_link in self.config.extra_links: link = extra_link.copy() link['url'] = self.config.get_formatted_url(executionState, link[EXTRA_LINK_FORMATTER_KEY]) executionState['extra_links'].append(link) # depends on [control=['for'], data=['extra_link']] return executionState
def _TransmissionThreadProc(self): """Entry point for the transmission worker thread.""" reconnect = True while not self._shutdown: self._new_updates.clear() if reconnect: service = self._BuildService() reconnect = False reconnect, delay = self._TransmitBreakpointUpdates(service) self._new_updates.wait(delay)
def function[_TransmissionThreadProc, parameter[self]]: constant[Entry point for the transmission worker thread.] variable[reconnect] assign[=] constant[True] while <ast.UnaryOp object at 0x7da204962110> begin[:] call[name[self]._new_updates.clear, parameter[]] if name[reconnect] begin[:] variable[service] assign[=] call[name[self]._BuildService, parameter[]] variable[reconnect] assign[=] constant[False] <ast.Tuple object at 0x7da20c6a9000> assign[=] call[name[self]._TransmitBreakpointUpdates, parameter[name[service]]] call[name[self]._new_updates.wait, parameter[name[delay]]]
keyword[def] identifier[_TransmissionThreadProc] ( identifier[self] ): literal[string] identifier[reconnect] = keyword[True] keyword[while] keyword[not] identifier[self] . identifier[_shutdown] : identifier[self] . identifier[_new_updates] . identifier[clear] () keyword[if] identifier[reconnect] : identifier[service] = identifier[self] . identifier[_BuildService] () identifier[reconnect] = keyword[False] identifier[reconnect] , identifier[delay] = identifier[self] . identifier[_TransmitBreakpointUpdates] ( identifier[service] ) identifier[self] . identifier[_new_updates] . identifier[wait] ( identifier[delay] )
def _TransmissionThreadProc(self): """Entry point for the transmission worker thread.""" reconnect = True while not self._shutdown: self._new_updates.clear() if reconnect: service = self._BuildService() reconnect = False # depends on [control=['if'], data=[]] (reconnect, delay) = self._TransmitBreakpointUpdates(service) self._new_updates.wait(delay) # depends on [control=['while'], data=[]]
def create_discrete_action_masking_layer(all_logits, action_masks, action_size): """ Creates a masking layer for the discrete actions :param all_logits: The concatenated unnormalized action probabilities for all branches :param action_masks: The mask for the logits. Must be of dimension [None x total_number_of_action] :param action_size: A list containing the number of possible actions for each branch :return: The action output dimension [batch_size, num_branches] and the concatenated normalized logits """ action_idx = [0] + list(np.cumsum(action_size)) branches_logits = [all_logits[:, action_idx[i]:action_idx[i + 1]] for i in range(len(action_size))] branch_masks = [action_masks[:, action_idx[i]:action_idx[i + 1]] for i in range(len(action_size))] raw_probs = [tf.multiply(tf.nn.softmax(branches_logits[k]) + 1.0e-10, branch_masks[k]) for k in range(len(action_size))] normalized_probs = [ tf.divide(raw_probs[k], tf.reduce_sum(raw_probs[k], axis=1, keepdims=True)) for k in range(len(action_size))] output = tf.concat([tf.multinomial(tf.log(normalized_probs[k]), 1) for k in range(len(action_size))], axis=1) return output, tf.concat([tf.log(normalized_probs[k] + 1.0e-10) for k in range(len(action_size))], axis=1)
def function[create_discrete_action_masking_layer, parameter[all_logits, action_masks, action_size]]: constant[ Creates a masking layer for the discrete actions :param all_logits: The concatenated unnormalized action probabilities for all branches :param action_masks: The mask for the logits. Must be of dimension [None x total_number_of_action] :param action_size: A list containing the number of possible actions for each branch :return: The action output dimension [batch_size, num_branches] and the concatenated normalized logits ] variable[action_idx] assign[=] binary_operation[list[[<ast.Constant object at 0x7da1b1ef81f0>]] + call[name[list], parameter[call[name[np].cumsum, parameter[name[action_size]]]]]] variable[branches_logits] assign[=] <ast.ListComp object at 0x7da1b1efa110> variable[branch_masks] assign[=] <ast.ListComp object at 0x7da1b1efab30> variable[raw_probs] assign[=] <ast.ListComp object at 0x7da1b1ef8190> variable[normalized_probs] assign[=] <ast.ListComp object at 0x7da1b1e10a90> variable[output] assign[=] call[name[tf].concat, parameter[<ast.ListComp object at 0x7da1b1e10730>]] return[tuple[[<ast.Name object at 0x7da1b1eeba90>, <ast.Call object at 0x7da1b1eea1a0>]]]
keyword[def] identifier[create_discrete_action_masking_layer] ( identifier[all_logits] , identifier[action_masks] , identifier[action_size] ): literal[string] identifier[action_idx] =[ literal[int] ]+ identifier[list] ( identifier[np] . identifier[cumsum] ( identifier[action_size] )) identifier[branches_logits] =[ identifier[all_logits] [:, identifier[action_idx] [ identifier[i] ]: identifier[action_idx] [ identifier[i] + literal[int] ]] keyword[for] identifier[i] keyword[in] identifier[range] ( identifier[len] ( identifier[action_size] ))] identifier[branch_masks] =[ identifier[action_masks] [:, identifier[action_idx] [ identifier[i] ]: identifier[action_idx] [ identifier[i] + literal[int] ]] keyword[for] identifier[i] keyword[in] identifier[range] ( identifier[len] ( identifier[action_size] ))] identifier[raw_probs] =[ identifier[tf] . identifier[multiply] ( identifier[tf] . identifier[nn] . identifier[softmax] ( identifier[branches_logits] [ identifier[k] ])+ literal[int] , identifier[branch_masks] [ identifier[k] ]) keyword[for] identifier[k] keyword[in] identifier[range] ( identifier[len] ( identifier[action_size] ))] identifier[normalized_probs] =[ identifier[tf] . identifier[divide] ( identifier[raw_probs] [ identifier[k] ], identifier[tf] . identifier[reduce_sum] ( identifier[raw_probs] [ identifier[k] ], identifier[axis] = literal[int] , identifier[keepdims] = keyword[True] )) keyword[for] identifier[k] keyword[in] identifier[range] ( identifier[len] ( identifier[action_size] ))] identifier[output] = identifier[tf] . identifier[concat] ([ identifier[tf] . identifier[multinomial] ( identifier[tf] . identifier[log] ( identifier[normalized_probs] [ identifier[k] ]), literal[int] ) keyword[for] identifier[k] keyword[in] identifier[range] ( identifier[len] ( identifier[action_size] ))], identifier[axis] = literal[int] ) keyword[return] identifier[output] , identifier[tf] . identifier[concat] ([ identifier[tf] . identifier[log] ( identifier[normalized_probs] [ identifier[k] ]+ literal[int] ) keyword[for] identifier[k] keyword[in] identifier[range] ( identifier[len] ( identifier[action_size] ))], identifier[axis] = literal[int] )
def create_discrete_action_masking_layer(all_logits, action_masks, action_size): """ Creates a masking layer for the discrete actions :param all_logits: The concatenated unnormalized action probabilities for all branches :param action_masks: The mask for the logits. Must be of dimension [None x total_number_of_action] :param action_size: A list containing the number of possible actions for each branch :return: The action output dimension [batch_size, num_branches] and the concatenated normalized logits """ action_idx = [0] + list(np.cumsum(action_size)) branches_logits = [all_logits[:, action_idx[i]:action_idx[i + 1]] for i in range(len(action_size))] branch_masks = [action_masks[:, action_idx[i]:action_idx[i + 1]] for i in range(len(action_size))] raw_probs = [tf.multiply(tf.nn.softmax(branches_logits[k]) + 1e-10, branch_masks[k]) for k in range(len(action_size))] normalized_probs = [tf.divide(raw_probs[k], tf.reduce_sum(raw_probs[k], axis=1, keepdims=True)) for k in range(len(action_size))] output = tf.concat([tf.multinomial(tf.log(normalized_probs[k]), 1) for k in range(len(action_size))], axis=1) return (output, tf.concat([tf.log(normalized_probs[k] + 1e-10) for k in range(len(action_size))], axis=1))
def format_legacy_trace_json(span_datas): """Formats a list of SpanData tuples into the legacy 'trace' dictionary format for backwards compatibility :type span_datas: list of :class: `~opencensus.trace.span_data.SpanData` :param list of opencensus.trace.span_data.SpanData span_datas: SpanData tuples to emit :rtype: dict :return: Legacy 'trace' dictionary representing given SpanData tuples """ if not span_datas: return {} top_span = span_datas[0] assert isinstance(top_span, SpanData) trace_id = top_span.context.trace_id if top_span.context is not None \ else None assert trace_id is not None return { 'traceId': trace_id, 'spans': [_format_legacy_span_json(sd) for sd in span_datas], }
def function[format_legacy_trace_json, parameter[span_datas]]: constant[Formats a list of SpanData tuples into the legacy 'trace' dictionary format for backwards compatibility :type span_datas: list of :class: `~opencensus.trace.span_data.SpanData` :param list of opencensus.trace.span_data.SpanData span_datas: SpanData tuples to emit :rtype: dict :return: Legacy 'trace' dictionary representing given SpanData tuples ] if <ast.UnaryOp object at 0x7da20cabd7e0> begin[:] return[dictionary[[], []]] variable[top_span] assign[=] call[name[span_datas]][constant[0]] assert[call[name[isinstance], parameter[name[top_span], name[SpanData]]]] variable[trace_id] assign[=] <ast.IfExp object at 0x7da20cabc250> assert[compare[name[trace_id] is_not constant[None]]] return[dictionary[[<ast.Constant object at 0x7da2047e9810>, <ast.Constant object at 0x7da2047e9db0>], [<ast.Name object at 0x7da2047eaa70>, <ast.ListComp object at 0x7da2047e9d20>]]]
keyword[def] identifier[format_legacy_trace_json] ( identifier[span_datas] ): literal[string] keyword[if] keyword[not] identifier[span_datas] : keyword[return] {} identifier[top_span] = identifier[span_datas] [ literal[int] ] keyword[assert] identifier[isinstance] ( identifier[top_span] , identifier[SpanData] ) identifier[trace_id] = identifier[top_span] . identifier[context] . identifier[trace_id] keyword[if] identifier[top_span] . identifier[context] keyword[is] keyword[not] keyword[None] keyword[else] keyword[None] keyword[assert] identifier[trace_id] keyword[is] keyword[not] keyword[None] keyword[return] { literal[string] : identifier[trace_id] , literal[string] :[ identifier[_format_legacy_span_json] ( identifier[sd] ) keyword[for] identifier[sd] keyword[in] identifier[span_datas] ], }
def format_legacy_trace_json(span_datas): """Formats a list of SpanData tuples into the legacy 'trace' dictionary format for backwards compatibility :type span_datas: list of :class: `~opencensus.trace.span_data.SpanData` :param list of opencensus.trace.span_data.SpanData span_datas: SpanData tuples to emit :rtype: dict :return: Legacy 'trace' dictionary representing given SpanData tuples """ if not span_datas: return {} # depends on [control=['if'], data=[]] top_span = span_datas[0] assert isinstance(top_span, SpanData) trace_id = top_span.context.trace_id if top_span.context is not None else None assert trace_id is not None return {'traceId': trace_id, 'spans': [_format_legacy_span_json(sd) for sd in span_datas]}
def expand_focussed(self): """ Expand currently focussed position; works only if the underlying tree allows it. """ if implementsCollapseAPI(self._tree): w, focuspos = self.get_focus() self._tree.expand(focuspos) self._walker.clear_cache() self.refresh()
def function[expand_focussed, parameter[self]]: constant[ Expand currently focussed position; works only if the underlying tree allows it. ] if call[name[implementsCollapseAPI], parameter[name[self]._tree]] begin[:] <ast.Tuple object at 0x7da20c991570> assign[=] call[name[self].get_focus, parameter[]] call[name[self]._tree.expand, parameter[name[focuspos]]] call[name[self]._walker.clear_cache, parameter[]] call[name[self].refresh, parameter[]]
keyword[def] identifier[expand_focussed] ( identifier[self] ): literal[string] keyword[if] identifier[implementsCollapseAPI] ( identifier[self] . identifier[_tree] ): identifier[w] , identifier[focuspos] = identifier[self] . identifier[get_focus] () identifier[self] . identifier[_tree] . identifier[expand] ( identifier[focuspos] ) identifier[self] . identifier[_walker] . identifier[clear_cache] () identifier[self] . identifier[refresh] ()
def expand_focussed(self): """ Expand currently focussed position; works only if the underlying tree allows it. """ if implementsCollapseAPI(self._tree): (w, focuspos) = self.get_focus() self._tree.expand(focuspos) self._walker.clear_cache() self.refresh() # depends on [control=['if'], data=[]]
def register_aliases(self, aliases): """Registers the given aliases to be exposed in parsed BUILD files. :param aliases: The BuildFileAliases to register. :type aliases: :class:`pants.build_graph.build_file_aliases.BuildFileAliases` """ if not isinstance(aliases, BuildFileAliases): raise TypeError('The aliases must be a BuildFileAliases, given {}'.format(aliases)) for alias, target_type in aliases.target_types.items(): self._register_target_alias(alias, target_type) for alias, target_macro_factory in aliases.target_macro_factories.items(): self._register_target_macro_factory_alias(alias, target_macro_factory) for alias, obj in aliases.objects.items(): self._register_exposed_object(alias, obj) for alias, context_aware_object_factory in aliases.context_aware_object_factories.items(): self._register_exposed_context_aware_object_factory(alias, context_aware_object_factory)
def function[register_aliases, parameter[self, aliases]]: constant[Registers the given aliases to be exposed in parsed BUILD files. :param aliases: The BuildFileAliases to register. :type aliases: :class:`pants.build_graph.build_file_aliases.BuildFileAliases` ] if <ast.UnaryOp object at 0x7da1b227b310> begin[:] <ast.Raise object at 0x7da1b227aef0> for taget[tuple[[<ast.Name object at 0x7da1b1e8f2e0>, <ast.Name object at 0x7da1b1e8c610>]]] in starred[call[name[aliases].target_types.items, parameter[]]] begin[:] call[name[self]._register_target_alias, parameter[name[alias], name[target_type]]] for taget[tuple[[<ast.Name object at 0x7da1b1e8cc40>, <ast.Name object at 0x7da1b1e8ec50>]]] in starred[call[name[aliases].target_macro_factories.items, parameter[]]] begin[:] call[name[self]._register_target_macro_factory_alias, parameter[name[alias], name[target_macro_factory]]] for taget[tuple[[<ast.Name object at 0x7da1b1e8e110>, <ast.Name object at 0x7da1b1e8eb60>]]] in starred[call[name[aliases].objects.items, parameter[]]] begin[:] call[name[self]._register_exposed_object, parameter[name[alias], name[obj]]] for taget[tuple[[<ast.Name object at 0x7da1b1e8c970>, <ast.Name object at 0x7da1b1e8e410>]]] in starred[call[name[aliases].context_aware_object_factories.items, parameter[]]] begin[:] call[name[self]._register_exposed_context_aware_object_factory, parameter[name[alias], name[context_aware_object_factory]]]
keyword[def] identifier[register_aliases] ( identifier[self] , identifier[aliases] ): literal[string] keyword[if] keyword[not] identifier[isinstance] ( identifier[aliases] , identifier[BuildFileAliases] ): keyword[raise] identifier[TypeError] ( literal[string] . identifier[format] ( identifier[aliases] )) keyword[for] identifier[alias] , identifier[target_type] keyword[in] identifier[aliases] . identifier[target_types] . identifier[items] (): identifier[self] . identifier[_register_target_alias] ( identifier[alias] , identifier[target_type] ) keyword[for] identifier[alias] , identifier[target_macro_factory] keyword[in] identifier[aliases] . identifier[target_macro_factories] . identifier[items] (): identifier[self] . identifier[_register_target_macro_factory_alias] ( identifier[alias] , identifier[target_macro_factory] ) keyword[for] identifier[alias] , identifier[obj] keyword[in] identifier[aliases] . identifier[objects] . identifier[items] (): identifier[self] . identifier[_register_exposed_object] ( identifier[alias] , identifier[obj] ) keyword[for] identifier[alias] , identifier[context_aware_object_factory] keyword[in] identifier[aliases] . identifier[context_aware_object_factories] . identifier[items] (): identifier[self] . identifier[_register_exposed_context_aware_object_factory] ( identifier[alias] , identifier[context_aware_object_factory] )
def register_aliases(self, aliases): """Registers the given aliases to be exposed in parsed BUILD files. :param aliases: The BuildFileAliases to register. :type aliases: :class:`pants.build_graph.build_file_aliases.BuildFileAliases` """ if not isinstance(aliases, BuildFileAliases): raise TypeError('The aliases must be a BuildFileAliases, given {}'.format(aliases)) # depends on [control=['if'], data=[]] for (alias, target_type) in aliases.target_types.items(): self._register_target_alias(alias, target_type) # depends on [control=['for'], data=[]] for (alias, target_macro_factory) in aliases.target_macro_factories.items(): self._register_target_macro_factory_alias(alias, target_macro_factory) # depends on [control=['for'], data=[]] for (alias, obj) in aliases.objects.items(): self._register_exposed_object(alias, obj) # depends on [control=['for'], data=[]] for (alias, context_aware_object_factory) in aliases.context_aware_object_factories.items(): self._register_exposed_context_aware_object_factory(alias, context_aware_object_factory) # depends on [control=['for'], data=[]]
def get_selected_text(self): """ Return text selected by current text cursor, converted in unicode Replace the unicode line separator character \u2029 by the line separator characters returned by get_line_separator """ return to_text_string(self.textCursor().selectedText()).replace(u"\u2029", self.get_line_separator())
def function[get_selected_text, parameter[self]]: constant[ Return text selected by current text cursor, converted in unicode Replace the unicode line separator character 
 by the line separator characters returned by get_line_separator ] return[call[call[name[to_text_string], parameter[call[call[name[self].textCursor, parameter[]].selectedText, parameter[]]]].replace, parameter[constant[
], call[name[self].get_line_separator, parameter[]]]]]
keyword[def] identifier[get_selected_text] ( identifier[self] ): literal[string] keyword[return] identifier[to_text_string] ( identifier[self] . identifier[textCursor] (). identifier[selectedText] ()). identifier[replace] ( literal[string] , identifier[self] . identifier[get_line_separator] ())
def get_selected_text(self): """ Return text selected by current text cursor, converted in unicode Replace the unicode line separator character \u2029 by the line separator characters returned by get_line_separator """ return to_text_string(self.textCursor().selectedText()).replace(u'\u2029', self.get_line_separator())
def get_club_members(self, club_id, limit=None): """ Gets the member objects for specified club ID. http://strava.github.io/api/v3/clubs/#get-members :param club_id: The numeric ID for the club. :type club_id: int :param limit: Maximum number of athletes to return. (default unlimited) :type limit: int :return: An iterator of :class:`stravalib.model.Athlete` objects. :rtype: :class:`BatchedResultsIterator` """ result_fetcher = functools.partial(self.protocol.get, '/clubs/{id}/members', id=club_id) return BatchedResultsIterator(entity=model.Athlete, bind_client=self, result_fetcher=result_fetcher, limit=limit)
def function[get_club_members, parameter[self, club_id, limit]]: constant[ Gets the member objects for specified club ID. http://strava.github.io/api/v3/clubs/#get-members :param club_id: The numeric ID for the club. :type club_id: int :param limit: Maximum number of athletes to return. (default unlimited) :type limit: int :return: An iterator of :class:`stravalib.model.Athlete` objects. :rtype: :class:`BatchedResultsIterator` ] variable[result_fetcher] assign[=] call[name[functools].partial, parameter[name[self].protocol.get, constant[/clubs/{id}/members]]] return[call[name[BatchedResultsIterator], parameter[]]]
keyword[def] identifier[get_club_members] ( identifier[self] , identifier[club_id] , identifier[limit] = keyword[None] ): literal[string] identifier[result_fetcher] = identifier[functools] . identifier[partial] ( identifier[self] . identifier[protocol] . identifier[get] , literal[string] , identifier[id] = identifier[club_id] ) keyword[return] identifier[BatchedResultsIterator] ( identifier[entity] = identifier[model] . identifier[Athlete] , identifier[bind_client] = identifier[self] , identifier[result_fetcher] = identifier[result_fetcher] , identifier[limit] = identifier[limit] )
def get_club_members(self, club_id, limit=None): """ Gets the member objects for specified club ID. http://strava.github.io/api/v3/clubs/#get-members :param club_id: The numeric ID for the club. :type club_id: int :param limit: Maximum number of athletes to return. (default unlimited) :type limit: int :return: An iterator of :class:`stravalib.model.Athlete` objects. :rtype: :class:`BatchedResultsIterator` """ result_fetcher = functools.partial(self.protocol.get, '/clubs/{id}/members', id=club_id) return BatchedResultsIterator(entity=model.Athlete, bind_client=self, result_fetcher=result_fetcher, limit=limit)
def _delete_port_channel_resources(self, host_id, switch_ip, intf_type, nexus_port, port_id): '''This determines if port channel id needs to be freed.''' # if this connection is not a port-channel, nothing to do. if intf_type != 'port-channel': return # Check if this driver created it and its no longer needed. try: vpc = nxos_db.get_switch_vpc_alloc( switch_ip, nexus_port) except excep.NexusVPCAllocNotFound: # This can occur for non-baremetal configured # port-channels. Nothing more to do. LOG.debug("Switch %s portchannel %s vpc entry not " "found in vpcid alloc table.", switch_ip, nexus_port) return # if this isn't one which was allocated or learned, # don't do any further processing. if not vpc.active: LOG.debug("Switch %s portchannel %s vpc entry not " "active.", switch_ip, nexus_port) return # Is this port-channel still in use? # If so, nothing more to do. try: nxos_db.get_nexus_switchport_binding(port_id, switch_ip) LOG.debug("Switch %s portchannel %s port entries " "in use. Skipping port-channel clean-up.", switch_ip, nexus_port) return except excep.NexusPortBindingNotFound: pass # need to get ethernet interface name try: mapping = nxos_db.get_switch_and_host_mappings( host_id, switch_ip) eth_type, eth_port = nexus_help.split_interface_name( mapping[0].if_id) except excep.NexusHostMappingNotFound: LOG.warning("Switch %s hostid %s host_mapping not " "found. Skipping port-channel clean-up.", switch_ip, host_id) return # Remove the channel group from ethernet interface # and remove port channel from this switch. if not vpc.learned: self.driver.delete_ch_grp_to_interface( switch_ip, eth_type, eth_port, nexus_port) self.driver.delete_port_channel(switch_ip, nexus_port) try: nxos_db.free_vpcid_for_switch(nexus_port, switch_ip) LOG.info("Released portchannel %s resources for " "switch %s", nexus_port, switch_ip) except excep.NexusVPCAllocNotFound: # Not all learned port channels will be in this db when # they're outside the configured vpc_pool so # this exception may be possible. LOG.warning("Failed to free vpcid %s for switch %s " "since it did not exist in table.", nexus_port, switch_ip)
def function[_delete_port_channel_resources, parameter[self, host_id, switch_ip, intf_type, nexus_port, port_id]]: constant[This determines if port channel id needs to be freed.] if compare[name[intf_type] not_equal[!=] constant[port-channel]] begin[:] return[None] <ast.Try object at 0x7da18ede78e0> if <ast.UnaryOp object at 0x7da18ede4160> begin[:] call[name[LOG].debug, parameter[constant[Switch %s portchannel %s vpc entry not active.], name[switch_ip], name[nexus_port]]] return[None] <ast.Try object at 0x7da18ede7ca0> <ast.Try object at 0x7da2041db2b0> if <ast.UnaryOp object at 0x7da2041da890> begin[:] call[name[self].driver.delete_ch_grp_to_interface, parameter[name[switch_ip], name[eth_type], name[eth_port], name[nexus_port]]] call[name[self].driver.delete_port_channel, parameter[name[switch_ip], name[nexus_port]]] <ast.Try object at 0x7da2041da3b0>
keyword[def] identifier[_delete_port_channel_resources] ( identifier[self] , identifier[host_id] , identifier[switch_ip] , identifier[intf_type] , identifier[nexus_port] , identifier[port_id] ): literal[string] keyword[if] identifier[intf_type] != literal[string] : keyword[return] keyword[try] : identifier[vpc] = identifier[nxos_db] . identifier[get_switch_vpc_alloc] ( identifier[switch_ip] , identifier[nexus_port] ) keyword[except] identifier[excep] . identifier[NexusVPCAllocNotFound] : identifier[LOG] . identifier[debug] ( literal[string] literal[string] , identifier[switch_ip] , identifier[nexus_port] ) keyword[return] keyword[if] keyword[not] identifier[vpc] . identifier[active] : identifier[LOG] . identifier[debug] ( literal[string] literal[string] , identifier[switch_ip] , identifier[nexus_port] ) keyword[return] keyword[try] : identifier[nxos_db] . identifier[get_nexus_switchport_binding] ( identifier[port_id] , identifier[switch_ip] ) identifier[LOG] . identifier[debug] ( literal[string] literal[string] , identifier[switch_ip] , identifier[nexus_port] ) keyword[return] keyword[except] identifier[excep] . identifier[NexusPortBindingNotFound] : keyword[pass] keyword[try] : identifier[mapping] = identifier[nxos_db] . identifier[get_switch_and_host_mappings] ( identifier[host_id] , identifier[switch_ip] ) identifier[eth_type] , identifier[eth_port] = identifier[nexus_help] . identifier[split_interface_name] ( identifier[mapping] [ literal[int] ]. identifier[if_id] ) keyword[except] identifier[excep] . identifier[NexusHostMappingNotFound] : identifier[LOG] . identifier[warning] ( literal[string] literal[string] , identifier[switch_ip] , identifier[host_id] ) keyword[return] keyword[if] keyword[not] identifier[vpc] . identifier[learned] : identifier[self] . identifier[driver] . identifier[delete_ch_grp_to_interface] ( identifier[switch_ip] , identifier[eth_type] , identifier[eth_port] , identifier[nexus_port] ) identifier[self] . identifier[driver] . identifier[delete_port_channel] ( identifier[switch_ip] , identifier[nexus_port] ) keyword[try] : identifier[nxos_db] . identifier[free_vpcid_for_switch] ( identifier[nexus_port] , identifier[switch_ip] ) identifier[LOG] . identifier[info] ( literal[string] literal[string] , identifier[nexus_port] , identifier[switch_ip] ) keyword[except] identifier[excep] . identifier[NexusVPCAllocNotFound] : identifier[LOG] . identifier[warning] ( literal[string] literal[string] , identifier[nexus_port] , identifier[switch_ip] )
def _delete_port_channel_resources(self, host_id, switch_ip, intf_type, nexus_port, port_id): """This determines if port channel id needs to be freed.""" # if this connection is not a port-channel, nothing to do. if intf_type != 'port-channel': return # depends on [control=['if'], data=[]] # Check if this driver created it and its no longer needed. try: vpc = nxos_db.get_switch_vpc_alloc(switch_ip, nexus_port) # depends on [control=['try'], data=[]] except excep.NexusVPCAllocNotFound: # This can occur for non-baremetal configured # port-channels. Nothing more to do. LOG.debug('Switch %s portchannel %s vpc entry not found in vpcid alloc table.', switch_ip, nexus_port) return # depends on [control=['except'], data=[]] # if this isn't one which was allocated or learned, # don't do any further processing. if not vpc.active: LOG.debug('Switch %s portchannel %s vpc entry not active.', switch_ip, nexus_port) return # depends on [control=['if'], data=[]] # Is this port-channel still in use? # If so, nothing more to do. try: nxos_db.get_nexus_switchport_binding(port_id, switch_ip) LOG.debug('Switch %s portchannel %s port entries in use. Skipping port-channel clean-up.', switch_ip, nexus_port) return # depends on [control=['try'], data=[]] except excep.NexusPortBindingNotFound: pass # depends on [control=['except'], data=[]] # need to get ethernet interface name try: mapping = nxos_db.get_switch_and_host_mappings(host_id, switch_ip) (eth_type, eth_port) = nexus_help.split_interface_name(mapping[0].if_id) # depends on [control=['try'], data=[]] except excep.NexusHostMappingNotFound: LOG.warning('Switch %s hostid %s host_mapping not found. Skipping port-channel clean-up.', switch_ip, host_id) return # depends on [control=['except'], data=[]] # Remove the channel group from ethernet interface # and remove port channel from this switch. if not vpc.learned: self.driver.delete_ch_grp_to_interface(switch_ip, eth_type, eth_port, nexus_port) self.driver.delete_port_channel(switch_ip, nexus_port) # depends on [control=['if'], data=[]] try: nxos_db.free_vpcid_for_switch(nexus_port, switch_ip) LOG.info('Released portchannel %s resources for switch %s', nexus_port, switch_ip) # depends on [control=['try'], data=[]] except excep.NexusVPCAllocNotFound: # Not all learned port channels will be in this db when # they're outside the configured vpc_pool so # this exception may be possible. LOG.warning('Failed to free vpcid %s for switch %s since it did not exist in table.', nexus_port, switch_ip) # depends on [control=['except'], data=[]]
def _migrate_subresource(subresource, parent, migrations): """ Migrate a resource's subresource :param subresource: the perch.SubResource instance :param parent: the parent perch.Document instance :param migrations: the migrations for a resource """ for key, doc in getattr(parent, subresource.parent_key, {}).items(): for migration in migrations['migrations']: instance = migration(subresource(id=key, **doc)) parent._resource['doc_version'] = unicode(migration.version) instance = _migrate_subresources( instance, migrations['subresources'] ) doc = instance._resource doc.pop('id', None) doc.pop(instance.resource_type + '_id', None) getattr(parent, subresource.parent_key)[key] = doc return parent
def function[_migrate_subresource, parameter[subresource, parent, migrations]]: constant[ Migrate a resource's subresource :param subresource: the perch.SubResource instance :param parent: the parent perch.Document instance :param migrations: the migrations for a resource ] for taget[tuple[[<ast.Name object at 0x7da1b141b880>, <ast.Name object at 0x7da1b1419d20>]]] in starred[call[call[name[getattr], parameter[name[parent], name[subresource].parent_key, dictionary[[], []]]].items, parameter[]]] begin[:] for taget[name[migration]] in starred[call[name[migrations]][constant[migrations]]] begin[:] variable[instance] assign[=] call[name[migration], parameter[call[name[subresource], parameter[]]]] call[name[parent]._resource][constant[doc_version]] assign[=] call[name[unicode], parameter[name[migration].version]] variable[instance] assign[=] call[name[_migrate_subresources], parameter[name[instance], call[name[migrations]][constant[subresources]]]] variable[doc] assign[=] name[instance]._resource call[name[doc].pop, parameter[constant[id], constant[None]]] call[name[doc].pop, parameter[binary_operation[name[instance].resource_type + constant[_id]], constant[None]]] call[call[name[getattr], parameter[name[parent], name[subresource].parent_key]]][name[key]] assign[=] name[doc] return[name[parent]]
keyword[def] identifier[_migrate_subresource] ( identifier[subresource] , identifier[parent] , identifier[migrations] ): literal[string] keyword[for] identifier[key] , identifier[doc] keyword[in] identifier[getattr] ( identifier[parent] , identifier[subresource] . identifier[parent_key] ,{}). identifier[items] (): keyword[for] identifier[migration] keyword[in] identifier[migrations] [ literal[string] ]: identifier[instance] = identifier[migration] ( identifier[subresource] ( identifier[id] = identifier[key] ,** identifier[doc] )) identifier[parent] . identifier[_resource] [ literal[string] ]= identifier[unicode] ( identifier[migration] . identifier[version] ) identifier[instance] = identifier[_migrate_subresources] ( identifier[instance] , identifier[migrations] [ literal[string] ] ) identifier[doc] = identifier[instance] . identifier[_resource] identifier[doc] . identifier[pop] ( literal[string] , keyword[None] ) identifier[doc] . identifier[pop] ( identifier[instance] . identifier[resource_type] + literal[string] , keyword[None] ) identifier[getattr] ( identifier[parent] , identifier[subresource] . identifier[parent_key] )[ identifier[key] ]= identifier[doc] keyword[return] identifier[parent]
def _migrate_subresource(subresource, parent, migrations): """ Migrate a resource's subresource :param subresource: the perch.SubResource instance :param parent: the parent perch.Document instance :param migrations: the migrations for a resource """ for (key, doc) in getattr(parent, subresource.parent_key, {}).items(): for migration in migrations['migrations']: instance = migration(subresource(id=key, **doc)) parent._resource['doc_version'] = unicode(migration.version) instance = _migrate_subresources(instance, migrations['subresources']) # depends on [control=['for'], data=['migration']] doc = instance._resource doc.pop('id', None) doc.pop(instance.resource_type + '_id', None) getattr(parent, subresource.parent_key)[key] = doc # depends on [control=['for'], data=[]] return parent
def term_counts(self): """ Returns: OrderedDict: An ordered dictionary of term counts. """ counts = OrderedDict() for term in self.terms: counts[term] = len(self.terms[term]) return utils.sort_dict(counts)
def function[term_counts, parameter[self]]: constant[ Returns: OrderedDict: An ordered dictionary of term counts. ] variable[counts] assign[=] call[name[OrderedDict], parameter[]] for taget[name[term]] in starred[name[self].terms] begin[:] call[name[counts]][name[term]] assign[=] call[name[len], parameter[call[name[self].terms][name[term]]]] return[call[name[utils].sort_dict, parameter[name[counts]]]]
keyword[def] identifier[term_counts] ( identifier[self] ): literal[string] identifier[counts] = identifier[OrderedDict] () keyword[for] identifier[term] keyword[in] identifier[self] . identifier[terms] : identifier[counts] [ identifier[term] ]= identifier[len] ( identifier[self] . identifier[terms] [ identifier[term] ]) keyword[return] identifier[utils] . identifier[sort_dict] ( identifier[counts] )
def term_counts(self): """ Returns: OrderedDict: An ordered dictionary of term counts. """ counts = OrderedDict() for term in self.terms: counts[term] = len(self.terms[term]) # depends on [control=['for'], data=['term']] return utils.sort_dict(counts)
def get_full_path(request, remove_querystrings=[]): """Gets the current path, removing specified querstrings""" path = request.get_full_path() for qs in remove_querystrings: path = re.sub(r'&?' + qs + '=?(.+)?&?', '', path) return path
def function[get_full_path, parameter[request, remove_querystrings]]: constant[Gets the current path, removing specified querstrings] variable[path] assign[=] call[name[request].get_full_path, parameter[]] for taget[name[qs]] in starred[name[remove_querystrings]] begin[:] variable[path] assign[=] call[name[re].sub, parameter[binary_operation[binary_operation[constant[&?] + name[qs]] + constant[=?(.+)?&?]], constant[], name[path]]] return[name[path]]
keyword[def] identifier[get_full_path] ( identifier[request] , identifier[remove_querystrings] =[]): literal[string] identifier[path] = identifier[request] . identifier[get_full_path] () keyword[for] identifier[qs] keyword[in] identifier[remove_querystrings] : identifier[path] = identifier[re] . identifier[sub] ( literal[string] + identifier[qs] + literal[string] , literal[string] , identifier[path] ) keyword[return] identifier[path]
def get_full_path(request, remove_querystrings=[]): """Gets the current path, removing specified querstrings""" path = request.get_full_path() for qs in remove_querystrings: path = re.sub('&?' + qs + '=?(.+)?&?', '', path) # depends on [control=['for'], data=['qs']] return path
def remove_adapter(widget_class, flavour=None): """Removes the given widget class information from the default set of adapters. If widget_class had been previously added by using add_adapter, the added adapter will be removed, restoring possibly previusly existing adapter(s). Notice that this function will remove only *one* adapter about given wiget_class (the first found in order), even if many are currently stored. @param flavour has to be used when the entry was added with a particular flavour. Returns True if one adapter was removed, False if no adapter was removed.""" for it,tu in enumerate(__def_adapter): if (widget_class == tu[WIDGET] and flavour == tu[FLAVOUR]): del __def_adapter[it] return True return False
def function[remove_adapter, parameter[widget_class, flavour]]: constant[Removes the given widget class information from the default set of adapters. If widget_class had been previously added by using add_adapter, the added adapter will be removed, restoring possibly previusly existing adapter(s). Notice that this function will remove only *one* adapter about given wiget_class (the first found in order), even if many are currently stored. @param flavour has to be used when the entry was added with a particular flavour. Returns True if one adapter was removed, False if no adapter was removed.] for taget[tuple[[<ast.Name object at 0x7da1b16d5450>, <ast.Name object at 0x7da1b16d72e0>]]] in starred[call[name[enumerate], parameter[name[__def_adapter]]]] begin[:] if <ast.BoolOp object at 0x7da1b16d4880> begin[:] <ast.Delete object at 0x7da1b16d67d0> return[constant[True]] return[constant[False]]
keyword[def] identifier[remove_adapter] ( identifier[widget_class] , identifier[flavour] = keyword[None] ): literal[string] keyword[for] identifier[it] , identifier[tu] keyword[in] identifier[enumerate] ( identifier[__def_adapter] ): keyword[if] ( identifier[widget_class] == identifier[tu] [ identifier[WIDGET] ] keyword[and] identifier[flavour] == identifier[tu] [ identifier[FLAVOUR] ]): keyword[del] identifier[__def_adapter] [ identifier[it] ] keyword[return] keyword[True] keyword[return] keyword[False]
def remove_adapter(widget_class, flavour=None): """Removes the given widget class information from the default set of adapters. If widget_class had been previously added by using add_adapter, the added adapter will be removed, restoring possibly previusly existing adapter(s). Notice that this function will remove only *one* adapter about given wiget_class (the first found in order), even if many are currently stored. @param flavour has to be used when the entry was added with a particular flavour. Returns True if one adapter was removed, False if no adapter was removed.""" for (it, tu) in enumerate(__def_adapter): if widget_class == tu[WIDGET] and flavour == tu[FLAVOUR]: del __def_adapter[it] return True # depends on [control=['if'], data=[]] # depends on [control=['for'], data=[]] return False
def safe_zip(*args): """like zip but with these properties: - returns a list, rather than an iterator. This is the old Python2 zip behavior. - a guarantee that all arguments are the same length. (normal zip silently drops entries to make them the same length) """ length = len(args[0]) if not all(len(arg) == length for arg in args): raise ValueError("Lengths of arguments do not match: " + str([len(arg) for arg in args])) return list(zip(*args))
def function[safe_zip, parameter[]]: constant[like zip but with these properties: - returns a list, rather than an iterator. This is the old Python2 zip behavior. - a guarantee that all arguments are the same length. (normal zip silently drops entries to make them the same length) ] variable[length] assign[=] call[name[len], parameter[call[name[args]][constant[0]]]] if <ast.UnaryOp object at 0x7da204961a50> begin[:] <ast.Raise object at 0x7da204961120> return[call[name[list], parameter[call[name[zip], parameter[<ast.Starred object at 0x7da204963160>]]]]]
keyword[def] identifier[safe_zip] (* identifier[args] ): literal[string] identifier[length] = identifier[len] ( identifier[args] [ literal[int] ]) keyword[if] keyword[not] identifier[all] ( identifier[len] ( identifier[arg] )== identifier[length] keyword[for] identifier[arg] keyword[in] identifier[args] ): keyword[raise] identifier[ValueError] ( literal[string] + identifier[str] ([ identifier[len] ( identifier[arg] ) keyword[for] identifier[arg] keyword[in] identifier[args] ])) keyword[return] identifier[list] ( identifier[zip] (* identifier[args] ))
def safe_zip(*args): """like zip but with these properties: - returns a list, rather than an iterator. This is the old Python2 zip behavior. - a guarantee that all arguments are the same length. (normal zip silently drops entries to make them the same length) """ length = len(args[0]) if not all((len(arg) == length for arg in args)): raise ValueError('Lengths of arguments do not match: ' + str([len(arg) for arg in args])) # depends on [control=['if'], data=[]] return list(zip(*args))
def validate_username_for_new_person(username): """ Validate the new username for a new person. If the username is invalid or in use, raises :py:exc:`UsernameInvalid` or :py:exc:`UsernameTaken`. :param username: Username to validate. """ # is the username valid? validate_username(username) # Check for existing people count = Person.objects.filter(username__exact=username).count() if count >= 1: raise UsernameTaken(six.u( 'The username is already taken. Please choose another. ' 'If this was the name of your old account please email %s') % settings.ACCOUNTS_EMAIL) # Check for existing accounts count = Account.objects.filter(username__exact=username).count() if count >= 1: raise UsernameTaken(six.u( 'The username is already taken. Please choose another. ' 'If this was the name of your old account please email %s') % settings.ACCOUNTS_EMAIL) # Check account datastore, in case username created outside Karaage. if account_exists(username): raise UsernameTaken( six.u('Username is already in external account datastore.')) return username
def function[validate_username_for_new_person, parameter[username]]: constant[ Validate the new username for a new person. If the username is invalid or in use, raises :py:exc:`UsernameInvalid` or :py:exc:`UsernameTaken`. :param username: Username to validate. ] call[name[validate_username], parameter[name[username]]] variable[count] assign[=] call[call[name[Person].objects.filter, parameter[]].count, parameter[]] if compare[name[count] greater_or_equal[>=] constant[1]] begin[:] <ast.Raise object at 0x7da1b0240b20> variable[count] assign[=] call[call[name[Account].objects.filter, parameter[]].count, parameter[]] if compare[name[count] greater_or_equal[>=] constant[1]] begin[:] <ast.Raise object at 0x7da1b02d9720> if call[name[account_exists], parameter[name[username]]] begin[:] <ast.Raise object at 0x7da20c991c30> return[name[username]]
keyword[def] identifier[validate_username_for_new_person] ( identifier[username] ): literal[string] identifier[validate_username] ( identifier[username] ) identifier[count] = identifier[Person] . identifier[objects] . identifier[filter] ( identifier[username__exact] = identifier[username] ). identifier[count] () keyword[if] identifier[count] >= literal[int] : keyword[raise] identifier[UsernameTaken] ( identifier[six] . identifier[u] ( literal[string] literal[string] ) % identifier[settings] . identifier[ACCOUNTS_EMAIL] ) identifier[count] = identifier[Account] . identifier[objects] . identifier[filter] ( identifier[username__exact] = identifier[username] ). identifier[count] () keyword[if] identifier[count] >= literal[int] : keyword[raise] identifier[UsernameTaken] ( identifier[six] . identifier[u] ( literal[string] literal[string] ) % identifier[settings] . identifier[ACCOUNTS_EMAIL] ) keyword[if] identifier[account_exists] ( identifier[username] ): keyword[raise] identifier[UsernameTaken] ( identifier[six] . identifier[u] ( literal[string] )) keyword[return] identifier[username]
def validate_username_for_new_person(username): """ Validate the new username for a new person. If the username is invalid or in use, raises :py:exc:`UsernameInvalid` or :py:exc:`UsernameTaken`. :param username: Username to validate. """ # is the username valid? validate_username(username) # Check for existing people count = Person.objects.filter(username__exact=username).count() if count >= 1: raise UsernameTaken(six.u('The username is already taken. Please choose another. If this was the name of your old account please email %s') % settings.ACCOUNTS_EMAIL) # depends on [control=['if'], data=[]] # Check for existing accounts count = Account.objects.filter(username__exact=username).count() if count >= 1: raise UsernameTaken(six.u('The username is already taken. Please choose another. If this was the name of your old account please email %s') % settings.ACCOUNTS_EMAIL) # depends on [control=['if'], data=[]] # Check account datastore, in case username created outside Karaage. if account_exists(username): raise UsernameTaken(six.u('Username is already in external account datastore.')) # depends on [control=['if'], data=[]] return username
def node_query(self, node): """ Return the query for the gql call node """ if isinstance(node, ast.Call): assert node.args arg = node.args[0] if not isinstance(arg, ast.Str): return else: raise TypeError(type(node)) return arg.s
def function[node_query, parameter[self, node]]: constant[ Return the query for the gql call node ] if call[name[isinstance], parameter[name[node], name[ast].Call]] begin[:] assert[name[node].args] variable[arg] assign[=] call[name[node].args][constant[0]] if <ast.UnaryOp object at 0x7da20c6c7be0> begin[:] return[None] return[name[arg].s]
keyword[def] identifier[node_query] ( identifier[self] , identifier[node] ): literal[string] keyword[if] identifier[isinstance] ( identifier[node] , identifier[ast] . identifier[Call] ): keyword[assert] identifier[node] . identifier[args] identifier[arg] = identifier[node] . identifier[args] [ literal[int] ] keyword[if] keyword[not] identifier[isinstance] ( identifier[arg] , identifier[ast] . identifier[Str] ): keyword[return] keyword[else] : keyword[raise] identifier[TypeError] ( identifier[type] ( identifier[node] )) keyword[return] identifier[arg] . identifier[s]
def node_query(self, node): """ Return the query for the gql call node """ if isinstance(node, ast.Call): assert node.args arg = node.args[0] if not isinstance(arg, ast.Str): return # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]] else: raise TypeError(type(node)) return arg.s
def intervals(annotation, **kwargs): '''Plotting wrapper for labeled intervals''' times, labels = annotation.to_interval_values() return mir_eval.display.labeled_intervals(times, labels, **kwargs)
def function[intervals, parameter[annotation]]: constant[Plotting wrapper for labeled intervals] <ast.Tuple object at 0x7da1b004c580> assign[=] call[name[annotation].to_interval_values, parameter[]] return[call[name[mir_eval].display.labeled_intervals, parameter[name[times], name[labels]]]]
keyword[def] identifier[intervals] ( identifier[annotation] ,** identifier[kwargs] ): literal[string] identifier[times] , identifier[labels] = identifier[annotation] . identifier[to_interval_values] () keyword[return] identifier[mir_eval] . identifier[display] . identifier[labeled_intervals] ( identifier[times] , identifier[labels] ,** identifier[kwargs] )
def intervals(annotation, **kwargs): """Plotting wrapper for labeled intervals""" (times, labels) = annotation.to_interval_values() return mir_eval.display.labeled_intervals(times, labels, **kwargs)
def provider(function): """Decorator for :class:`Module` methods, registering a provider of a type. >>> class MyModule(Module): ... @provider ... def provide_name(self) -> str: ... return 'Bob' @provider-decoration implies @inject so you can omit it and things will work just the same: >>> class MyModule2(Module): ... def configure(self, binder): ... binder.bind(int, to=654) ... ... @provider ... def provide_str(self, i: int) -> str: ... return str(i) ... >>> injector = Injector(MyModule2) >>> injector.get(str) '654' """ scope_ = getattr(function, '__scope__', None) annotations = inspect.getfullargspec(function).annotations return_type = annotations['return'] function.__binding__ = Binding(return_type, inject(function), scope_) return function
def function[provider, parameter[function]]: constant[Decorator for :class:`Module` methods, registering a provider of a type. >>> class MyModule(Module): ... @provider ... def provide_name(self) -> str: ... return 'Bob' @provider-decoration implies @inject so you can omit it and things will work just the same: >>> class MyModule2(Module): ... def configure(self, binder): ... binder.bind(int, to=654) ... ... @provider ... def provide_str(self, i: int) -> str: ... return str(i) ... >>> injector = Injector(MyModule2) >>> injector.get(str) '654' ] variable[scope_] assign[=] call[name[getattr], parameter[name[function], constant[__scope__], constant[None]]] variable[annotations] assign[=] call[name[inspect].getfullargspec, parameter[name[function]]].annotations variable[return_type] assign[=] call[name[annotations]][constant[return]] name[function].__binding__ assign[=] call[name[Binding], parameter[name[return_type], call[name[inject], parameter[name[function]]], name[scope_]]] return[name[function]]
keyword[def] identifier[provider] ( identifier[function] ): literal[string] identifier[scope_] = identifier[getattr] ( identifier[function] , literal[string] , keyword[None] ) identifier[annotations] = identifier[inspect] . identifier[getfullargspec] ( identifier[function] ). identifier[annotations] identifier[return_type] = identifier[annotations] [ literal[string] ] identifier[function] . identifier[__binding__] = identifier[Binding] ( identifier[return_type] , identifier[inject] ( identifier[function] ), identifier[scope_] ) keyword[return] identifier[function]
def provider(function): """Decorator for :class:`Module` methods, registering a provider of a type. >>> class MyModule(Module): ... @provider ... def provide_name(self) -> str: ... return 'Bob' @provider-decoration implies @inject so you can omit it and things will work just the same: >>> class MyModule2(Module): ... def configure(self, binder): ... binder.bind(int, to=654) ... ... @provider ... def provide_str(self, i: int) -> str: ... return str(i) ... >>> injector = Injector(MyModule2) >>> injector.get(str) '654' """ scope_ = getattr(function, '__scope__', None) annotations = inspect.getfullargspec(function).annotations return_type = annotations['return'] function.__binding__ = Binding(return_type, inject(function), scope_) return function
def create_ramp_plan(err, ramp): """ Formulate and execute on a plan to slowly add heat or cooling to the system `err` initial error (PV - SP) `ramp` the size of the ramp A ramp plan might yield MVs in this order at every timestep: [5, 0, 4, 0, 3, 0, 2, 0, 1] where err == 5 + 4 + 3 + 2 + 1 """ if ramp == 1: # basecase yield int(err) while True: yield 0 # np.arange(n).sum() == err # --> solve for n # err = (n - 1) * (n // 2) == .5 * n**2 - .5 * n # 0 = n**2 - n --> solve for n n = np.abs(np.roots([.5, -.5, 0]).max()) niter = int(ramp // (2 * n)) # 2 means add all MV in first half of ramp MV = n log.info('Initializing a ramp plan', extra=dict( ramp_size=ramp, err=err, niter=niter)) for x in range(int(n)): budget = MV for x in range(niter): budget -= MV // niter yield int(np.sign(err) * (MV // niter)) yield int(budget * np.sign(err)) MV -= 1 while True: yield 0
def function[create_ramp_plan, parameter[err, ramp]]: constant[ Formulate and execute on a plan to slowly add heat or cooling to the system `err` initial error (PV - SP) `ramp` the size of the ramp A ramp plan might yield MVs in this order at every timestep: [5, 0, 4, 0, 3, 0, 2, 0, 1] where err == 5 + 4 + 3 + 2 + 1 ] if compare[name[ramp] equal[==] constant[1]] begin[:] <ast.Yield object at 0x7da20c7cb130> while constant[True] begin[:] <ast.Yield object at 0x7da20c7960e0> variable[n] assign[=] call[name[np].abs, parameter[call[call[name[np].roots, parameter[list[[<ast.Constant object at 0x7da20c7941f0>, <ast.UnaryOp object at 0x7da20c795f00>, <ast.Constant object at 0x7da20c7950f0>]]]].max, parameter[]]]] variable[niter] assign[=] call[name[int], parameter[binary_operation[name[ramp] <ast.FloorDiv object at 0x7da2590d6bc0> binary_operation[constant[2] * name[n]]]]] variable[MV] assign[=] name[n] call[name[log].info, parameter[constant[Initializing a ramp plan]]] for taget[name[x]] in starred[call[name[range], parameter[call[name[int], parameter[name[n]]]]]] begin[:] variable[budget] assign[=] name[MV] for taget[name[x]] in starred[call[name[range], parameter[name[niter]]]] begin[:] <ast.AugAssign object at 0x7da2043456f0> <ast.Yield object at 0x7da204345ba0> <ast.Yield object at 0x7da2043467a0> <ast.AugAssign object at 0x7da204344e20> while constant[True] begin[:] <ast.Yield object at 0x7da204347b50>
keyword[def] identifier[create_ramp_plan] ( identifier[err] , identifier[ramp] ): literal[string] keyword[if] identifier[ramp] == literal[int] : keyword[yield] identifier[int] ( identifier[err] ) keyword[while] keyword[True] : keyword[yield] literal[int] identifier[n] = identifier[np] . identifier[abs] ( identifier[np] . identifier[roots] ([ literal[int] ,- literal[int] , literal[int] ]). identifier[max] ()) identifier[niter] = identifier[int] ( identifier[ramp] //( literal[int] * identifier[n] )) identifier[MV] = identifier[n] identifier[log] . identifier[info] ( literal[string] , identifier[extra] = identifier[dict] ( identifier[ramp_size] = identifier[ramp] , identifier[err] = identifier[err] , identifier[niter] = identifier[niter] )) keyword[for] identifier[x] keyword[in] identifier[range] ( identifier[int] ( identifier[n] )): identifier[budget] = identifier[MV] keyword[for] identifier[x] keyword[in] identifier[range] ( identifier[niter] ): identifier[budget] -= identifier[MV] // identifier[niter] keyword[yield] identifier[int] ( identifier[np] . identifier[sign] ( identifier[err] )*( identifier[MV] // identifier[niter] )) keyword[yield] identifier[int] ( identifier[budget] * identifier[np] . identifier[sign] ( identifier[err] )) identifier[MV] -= literal[int] keyword[while] keyword[True] : keyword[yield] literal[int]
def create_ramp_plan(err, ramp): """ Formulate and execute on a plan to slowly add heat or cooling to the system `err` initial error (PV - SP) `ramp` the size of the ramp A ramp plan might yield MVs in this order at every timestep: [5, 0, 4, 0, 3, 0, 2, 0, 1] where err == 5 + 4 + 3 + 2 + 1 """ if ramp == 1: # basecase yield int(err) while True: yield 0 # depends on [control=['while'], data=[]] # depends on [control=['if'], data=[]] # np.arange(n).sum() == err # --> solve for n # err = (n - 1) * (n // 2) == .5 * n**2 - .5 * n # 0 = n**2 - n --> solve for n n = np.abs(np.roots([0.5, -0.5, 0]).max()) niter = int(ramp // (2 * n)) # 2 means add all MV in first half of ramp MV = n log.info('Initializing a ramp plan', extra=dict(ramp_size=ramp, err=err, niter=niter)) for x in range(int(n)): budget = MV for x in range(niter): budget -= MV // niter yield int(np.sign(err) * (MV // niter)) # depends on [control=['for'], data=[]] yield int(budget * np.sign(err)) MV -= 1 # depends on [control=['for'], data=['x']] while True: yield 0 # depends on [control=['while'], data=[]]
def get_instance(name, provider=None): ''' Return details on an instance. Similar to the cloud action show_instance but returns only the instance details. CLI Example: .. code-block:: bash salt minionname cloud.get_instance myinstance SLS Example: .. code-block:: bash {{ salt['cloud.get_instance']('myinstance')['mac_address'] }} ''' data = action(fun='show_instance', names=[name], provider=provider) info = salt.utils.data.simple_types_filter(data) try: # get the first: [alias][driver][vm_name] info = next(six.itervalues(next(six.itervalues(next(six.itervalues(info)))))) except AttributeError: return None return info
def function[get_instance, parameter[name, provider]]: constant[ Return details on an instance. Similar to the cloud action show_instance but returns only the instance details. CLI Example: .. code-block:: bash salt minionname cloud.get_instance myinstance SLS Example: .. code-block:: bash {{ salt['cloud.get_instance']('myinstance')['mac_address'] }} ] variable[data] assign[=] call[name[action], parameter[]] variable[info] assign[=] call[name[salt].utils.data.simple_types_filter, parameter[name[data]]] <ast.Try object at 0x7da18dc049a0> return[name[info]]
keyword[def] identifier[get_instance] ( identifier[name] , identifier[provider] = keyword[None] ): literal[string] identifier[data] = identifier[action] ( identifier[fun] = literal[string] , identifier[names] =[ identifier[name] ], identifier[provider] = identifier[provider] ) identifier[info] = identifier[salt] . identifier[utils] . identifier[data] . identifier[simple_types_filter] ( identifier[data] ) keyword[try] : identifier[info] = identifier[next] ( identifier[six] . identifier[itervalues] ( identifier[next] ( identifier[six] . identifier[itervalues] ( identifier[next] ( identifier[six] . identifier[itervalues] ( identifier[info] )))))) keyword[except] identifier[AttributeError] : keyword[return] keyword[None] keyword[return] identifier[info]
def get_instance(name, provider=None): """ Return details on an instance. Similar to the cloud action show_instance but returns only the instance details. CLI Example: .. code-block:: bash salt minionname cloud.get_instance myinstance SLS Example: .. code-block:: bash {{ salt['cloud.get_instance']('myinstance')['mac_address'] }} """ data = action(fun='show_instance', names=[name], provider=provider) info = salt.utils.data.simple_types_filter(data) try: # get the first: [alias][driver][vm_name] info = next(six.itervalues(next(six.itervalues(next(six.itervalues(info)))))) # depends on [control=['try'], data=[]] except AttributeError: return None # depends on [control=['except'], data=[]] return info
def fetch(self): """ Fetch a DocumentPermissionInstance :returns: Fetched DocumentPermissionInstance :rtype: twilio.rest.sync.v1.service.document.document_permission.DocumentPermissionInstance """ params = values.of({}) payload = self._version.fetch( 'GET', self._uri, params=params, ) return DocumentPermissionInstance( self._version, payload, service_sid=self._solution['service_sid'], document_sid=self._solution['document_sid'], identity=self._solution['identity'], )
def function[fetch, parameter[self]]: constant[ Fetch a DocumentPermissionInstance :returns: Fetched DocumentPermissionInstance :rtype: twilio.rest.sync.v1.service.document.document_permission.DocumentPermissionInstance ] variable[params] assign[=] call[name[values].of, parameter[dictionary[[], []]]] variable[payload] assign[=] call[name[self]._version.fetch, parameter[constant[GET], name[self]._uri]] return[call[name[DocumentPermissionInstance], parameter[name[self]._version, name[payload]]]]
keyword[def] identifier[fetch] ( identifier[self] ): literal[string] identifier[params] = identifier[values] . identifier[of] ({}) identifier[payload] = identifier[self] . identifier[_version] . identifier[fetch] ( literal[string] , identifier[self] . identifier[_uri] , identifier[params] = identifier[params] , ) keyword[return] identifier[DocumentPermissionInstance] ( identifier[self] . identifier[_version] , identifier[payload] , identifier[service_sid] = identifier[self] . identifier[_solution] [ literal[string] ], identifier[document_sid] = identifier[self] . identifier[_solution] [ literal[string] ], identifier[identity] = identifier[self] . identifier[_solution] [ literal[string] ], )
def fetch(self): """ Fetch a DocumentPermissionInstance :returns: Fetched DocumentPermissionInstance :rtype: twilio.rest.sync.v1.service.document.document_permission.DocumentPermissionInstance """ params = values.of({}) payload = self._version.fetch('GET', self._uri, params=params) return DocumentPermissionInstance(self._version, payload, service_sid=self._solution['service_sid'], document_sid=self._solution['document_sid'], identity=self._solution['identity'])
def md5_for_file(f, block_size=2 ** 20): """Generate an MD5 has for a possibly large file by breaking it into chunks.""" md5 = hashlib.md5() try: # Guess that f is a FLO. f.seek(0) return md5_for_stream(f, block_size=block_size) except AttributeError: # Nope, not a FLO. Maybe string? file_name = f with open(file_name, 'rb') as f: return md5_for_file(f, block_size)
def function[md5_for_file, parameter[f, block_size]]: constant[Generate an MD5 has for a possibly large file by breaking it into chunks.] variable[md5] assign[=] call[name[hashlib].md5, parameter[]] <ast.Try object at 0x7da18f58fcd0>
keyword[def] identifier[md5_for_file] ( identifier[f] , identifier[block_size] = literal[int] ** literal[int] ): literal[string] identifier[md5] = identifier[hashlib] . identifier[md5] () keyword[try] : identifier[f] . identifier[seek] ( literal[int] ) keyword[return] identifier[md5_for_stream] ( identifier[f] , identifier[block_size] = identifier[block_size] ) keyword[except] identifier[AttributeError] : identifier[file_name] = identifier[f] keyword[with] identifier[open] ( identifier[file_name] , literal[string] ) keyword[as] identifier[f] : keyword[return] identifier[md5_for_file] ( identifier[f] , identifier[block_size] )
def md5_for_file(f, block_size=2 ** 20): """Generate an MD5 has for a possibly large file by breaking it into chunks.""" md5 = hashlib.md5() try: # Guess that f is a FLO. f.seek(0) return md5_for_stream(f, block_size=block_size) # depends on [control=['try'], data=[]] except AttributeError: # Nope, not a FLO. Maybe string? file_name = f with open(file_name, 'rb') as f: return md5_for_file(f, block_size) # depends on [control=['with'], data=['f']] # depends on [control=['except'], data=[]]
def _GetParsersFromPresetCategory(cls, category): """Retrieves the parser names of specific preset category. Args: category (str): parser preset categories. Returns: list[str]: parser names in alphabetical order. """ preset_definition = cls._presets.GetPresetByName(category) if preset_definition is None: return [] preset_names = cls._presets.GetNames() parser_names = set() for element_name in preset_definition.parsers: if element_name in preset_names: category_parser_names = cls._GetParsersFromPresetCategory(element_name) parser_names.update(category_parser_names) else: parser_names.add(element_name) return sorted(parser_names)
def function[_GetParsersFromPresetCategory, parameter[cls, category]]: constant[Retrieves the parser names of specific preset category. Args: category (str): parser preset categories. Returns: list[str]: parser names in alphabetical order. ] variable[preset_definition] assign[=] call[name[cls]._presets.GetPresetByName, parameter[name[category]]] if compare[name[preset_definition] is constant[None]] begin[:] return[list[[]]] variable[preset_names] assign[=] call[name[cls]._presets.GetNames, parameter[]] variable[parser_names] assign[=] call[name[set], parameter[]] for taget[name[element_name]] in starred[name[preset_definition].parsers] begin[:] if compare[name[element_name] in name[preset_names]] begin[:] variable[category_parser_names] assign[=] call[name[cls]._GetParsersFromPresetCategory, parameter[name[element_name]]] call[name[parser_names].update, parameter[name[category_parser_names]]] return[call[name[sorted], parameter[name[parser_names]]]]
keyword[def] identifier[_GetParsersFromPresetCategory] ( identifier[cls] , identifier[category] ): literal[string] identifier[preset_definition] = identifier[cls] . identifier[_presets] . identifier[GetPresetByName] ( identifier[category] ) keyword[if] identifier[preset_definition] keyword[is] keyword[None] : keyword[return] [] identifier[preset_names] = identifier[cls] . identifier[_presets] . identifier[GetNames] () identifier[parser_names] = identifier[set] () keyword[for] identifier[element_name] keyword[in] identifier[preset_definition] . identifier[parsers] : keyword[if] identifier[element_name] keyword[in] identifier[preset_names] : identifier[category_parser_names] = identifier[cls] . identifier[_GetParsersFromPresetCategory] ( identifier[element_name] ) identifier[parser_names] . identifier[update] ( identifier[category_parser_names] ) keyword[else] : identifier[parser_names] . identifier[add] ( identifier[element_name] ) keyword[return] identifier[sorted] ( identifier[parser_names] )
def _GetParsersFromPresetCategory(cls, category): """Retrieves the parser names of specific preset category. Args: category (str): parser preset categories. Returns: list[str]: parser names in alphabetical order. """ preset_definition = cls._presets.GetPresetByName(category) if preset_definition is None: return [] # depends on [control=['if'], data=[]] preset_names = cls._presets.GetNames() parser_names = set() for element_name in preset_definition.parsers: if element_name in preset_names: category_parser_names = cls._GetParsersFromPresetCategory(element_name) parser_names.update(category_parser_names) # depends on [control=['if'], data=['element_name']] else: parser_names.add(element_name) # depends on [control=['for'], data=['element_name']] return sorted(parser_names)
def calculate_single_terms(self): """Apply all methods stored in the hidden attribute `PART_ODE_METHODS`. >>> from hydpy.models.test_v1 import * >>> parameterstep() >>> k(0.25) >>> states.s = 1.0 >>> model.calculate_single_terms() >>> fluxes.q q(0.25) """ self.numvars.nmb_calls = self.numvars.nmb_calls+1 for method in self.PART_ODE_METHODS: method(self)
def function[calculate_single_terms, parameter[self]]: constant[Apply all methods stored in the hidden attribute `PART_ODE_METHODS`. >>> from hydpy.models.test_v1 import * >>> parameterstep() >>> k(0.25) >>> states.s = 1.0 >>> model.calculate_single_terms() >>> fluxes.q q(0.25) ] name[self].numvars.nmb_calls assign[=] binary_operation[name[self].numvars.nmb_calls + constant[1]] for taget[name[method]] in starred[name[self].PART_ODE_METHODS] begin[:] call[name[method], parameter[name[self]]]
keyword[def] identifier[calculate_single_terms] ( identifier[self] ): literal[string] identifier[self] . identifier[numvars] . identifier[nmb_calls] = identifier[self] . identifier[numvars] . identifier[nmb_calls] + literal[int] keyword[for] identifier[method] keyword[in] identifier[self] . identifier[PART_ODE_METHODS] : identifier[method] ( identifier[self] )
def calculate_single_terms(self): """Apply all methods stored in the hidden attribute `PART_ODE_METHODS`. >>> from hydpy.models.test_v1 import * >>> parameterstep() >>> k(0.25) >>> states.s = 1.0 >>> model.calculate_single_terms() >>> fluxes.q q(0.25) """ self.numvars.nmb_calls = self.numvars.nmb_calls + 1 for method in self.PART_ODE_METHODS: method(self) # depends on [control=['for'], data=['method']]
def _jog(self, axis, direction, step): """ Move the pipette on `axis` in `direction` by `step` and update the position tracker """ jog(axis, direction, step, self.hardware, self._current_mount) self.current_position = self._position() return 'Jog: {}'.format([axis, str(direction), str(step)])
def function[_jog, parameter[self, axis, direction, step]]: constant[ Move the pipette on `axis` in `direction` by `step` and update the position tracker ] call[name[jog], parameter[name[axis], name[direction], name[step], name[self].hardware, name[self]._current_mount]] name[self].current_position assign[=] call[name[self]._position, parameter[]] return[call[constant[Jog: {}].format, parameter[list[[<ast.Name object at 0x7da204344700>, <ast.Call object at 0x7da204346d40>, <ast.Call object at 0x7da204345f00>]]]]]
keyword[def] identifier[_jog] ( identifier[self] , identifier[axis] , identifier[direction] , identifier[step] ): literal[string] identifier[jog] ( identifier[axis] , identifier[direction] , identifier[step] , identifier[self] . identifier[hardware] , identifier[self] . identifier[_current_mount] ) identifier[self] . identifier[current_position] = identifier[self] . identifier[_position] () keyword[return] literal[string] . identifier[format] ([ identifier[axis] , identifier[str] ( identifier[direction] ), identifier[str] ( identifier[step] )])
def _jog(self, axis, direction, step): """ Move the pipette on `axis` in `direction` by `step` and update the position tracker """ jog(axis, direction, step, self.hardware, self._current_mount) self.current_position = self._position() return 'Jog: {}'.format([axis, str(direction), str(step)])
def require_auth_captcha(self, response, query_params, login_form_data, http_session): """Resolve auth captcha case :param response: http response :param query_params: dict: response query params, for example: {'s': '0', 'email': 'my@email', 'dif': '1', 'role': 'fast', 'sid': '1'} :param login_form_data: dict :param http_session: requests.Session :return: :raise VkAuthError: """ logger.info('Captcha is needed. Query params: %s', query_params) form_text = response.text action_url = parse_form_action_url(form_text) logger.debug('form action url: %s', action_url) if not action_url: raise VkAuthError('Cannot find form action url') captcha_sid, captcha_url = parse_captcha_html( html=response.text, response_url=response.url) logger.info('Captcha url %s', captcha_url) login_form_data['captcha_sid'] = captcha_sid login_form_data['captcha_key'] = self.get_captcha_key(captcha_url) response = http_session.post(action_url, login_form_data) return response
def function[require_auth_captcha, parameter[self, response, query_params, login_form_data, http_session]]: constant[Resolve auth captcha case :param response: http response :param query_params: dict: response query params, for example: {'s': '0', 'email': 'my@email', 'dif': '1', 'role': 'fast', 'sid': '1'} :param login_form_data: dict :param http_session: requests.Session :return: :raise VkAuthError: ] call[name[logger].info, parameter[constant[Captcha is needed. Query params: %s], name[query_params]]] variable[form_text] assign[=] name[response].text variable[action_url] assign[=] call[name[parse_form_action_url], parameter[name[form_text]]] call[name[logger].debug, parameter[constant[form action url: %s], name[action_url]]] if <ast.UnaryOp object at 0x7da20c6e6b90> begin[:] <ast.Raise object at 0x7da20c6e7670> <ast.Tuple object at 0x7da20c6e6c80> assign[=] call[name[parse_captcha_html], parameter[]] call[name[logger].info, parameter[constant[Captcha url %s], name[captcha_url]]] call[name[login_form_data]][constant[captcha_sid]] assign[=] name[captcha_sid] call[name[login_form_data]][constant[captcha_key]] assign[=] call[name[self].get_captcha_key, parameter[name[captcha_url]]] variable[response] assign[=] call[name[http_session].post, parameter[name[action_url], name[login_form_data]]] return[name[response]]
keyword[def] identifier[require_auth_captcha] ( identifier[self] , identifier[response] , identifier[query_params] , identifier[login_form_data] , identifier[http_session] ): literal[string] identifier[logger] . identifier[info] ( literal[string] , identifier[query_params] ) identifier[form_text] = identifier[response] . identifier[text] identifier[action_url] = identifier[parse_form_action_url] ( identifier[form_text] ) identifier[logger] . identifier[debug] ( literal[string] , identifier[action_url] ) keyword[if] keyword[not] identifier[action_url] : keyword[raise] identifier[VkAuthError] ( literal[string] ) identifier[captcha_sid] , identifier[captcha_url] = identifier[parse_captcha_html] ( identifier[html] = identifier[response] . identifier[text] , identifier[response_url] = identifier[response] . identifier[url] ) identifier[logger] . identifier[info] ( literal[string] , identifier[captcha_url] ) identifier[login_form_data] [ literal[string] ]= identifier[captcha_sid] identifier[login_form_data] [ literal[string] ]= identifier[self] . identifier[get_captcha_key] ( identifier[captcha_url] ) identifier[response] = identifier[http_session] . identifier[post] ( identifier[action_url] , identifier[login_form_data] ) keyword[return] identifier[response]
def require_auth_captcha(self, response, query_params, login_form_data, http_session): """Resolve auth captcha case :param response: http response :param query_params: dict: response query params, for example: {'s': '0', 'email': 'my@email', 'dif': '1', 'role': 'fast', 'sid': '1'} :param login_form_data: dict :param http_session: requests.Session :return: :raise VkAuthError: """ logger.info('Captcha is needed. Query params: %s', query_params) form_text = response.text action_url = parse_form_action_url(form_text) logger.debug('form action url: %s', action_url) if not action_url: raise VkAuthError('Cannot find form action url') # depends on [control=['if'], data=[]] (captcha_sid, captcha_url) = parse_captcha_html(html=response.text, response_url=response.url) logger.info('Captcha url %s', captcha_url) login_form_data['captcha_sid'] = captcha_sid login_form_data['captcha_key'] = self.get_captcha_key(captcha_url) response = http_session.post(action_url, login_form_data) return response
def validate_bagit_file(bagit_path): """Check if a BagIt file is valid. Raises: ServiceFailure If the BagIt zip archive file fails any of the following checks: - Is a valid zip file. - The tag and manifest files are correctly formatted. - Contains all the files listed in the manifests. - The file checksums match the manifests. """ _assert_zip_file(bagit_path) bagit_zip = zipfile.ZipFile(bagit_path) manifest_info_list = _get_manifest_info_list(bagit_zip) _validate_checksums(bagit_zip, manifest_info_list) return True
def function[validate_bagit_file, parameter[bagit_path]]: constant[Check if a BagIt file is valid. Raises: ServiceFailure If the BagIt zip archive file fails any of the following checks: - Is a valid zip file. - The tag and manifest files are correctly formatted. - Contains all the files listed in the manifests. - The file checksums match the manifests. ] call[name[_assert_zip_file], parameter[name[bagit_path]]] variable[bagit_zip] assign[=] call[name[zipfile].ZipFile, parameter[name[bagit_path]]] variable[manifest_info_list] assign[=] call[name[_get_manifest_info_list], parameter[name[bagit_zip]]] call[name[_validate_checksums], parameter[name[bagit_zip], name[manifest_info_list]]] return[constant[True]]
keyword[def] identifier[validate_bagit_file] ( identifier[bagit_path] ): literal[string] identifier[_assert_zip_file] ( identifier[bagit_path] ) identifier[bagit_zip] = identifier[zipfile] . identifier[ZipFile] ( identifier[bagit_path] ) identifier[manifest_info_list] = identifier[_get_manifest_info_list] ( identifier[bagit_zip] ) identifier[_validate_checksums] ( identifier[bagit_zip] , identifier[manifest_info_list] ) keyword[return] keyword[True]
def validate_bagit_file(bagit_path): """Check if a BagIt file is valid. Raises: ServiceFailure If the BagIt zip archive file fails any of the following checks: - Is a valid zip file. - The tag and manifest files are correctly formatted. - Contains all the files listed in the manifests. - The file checksums match the manifests. """ _assert_zip_file(bagit_path) bagit_zip = zipfile.ZipFile(bagit_path) manifest_info_list = _get_manifest_info_list(bagit_zip) _validate_checksums(bagit_zip, manifest_info_list) return True
def _af_filter(data, in_file, out_file): """Soft-filter variants with AF below min_allele_fraction (appends "MinAF" to FILTER) """ min_freq = float(utils.get_in(data["config"], ("algorithm", "min_allele_fraction"), 10)) / 100.0 logger.debug("Filtering MuTect2 calls with allele fraction threshold of %s" % min_freq) ungz_out_file = "%s.vcf" % utils.splitext_plus(out_file)[0] if not utils.file_exists(ungz_out_file) and not utils.file_exists(ungz_out_file + ".gz"): with file_transaction(data, ungz_out_file) as tx_out_file: vcf = cyvcf2.VCF(in_file) vcf.add_filter_to_header({ 'ID': 'MinAF', 'Description': 'Allele frequency is lower than %s%% ' % (min_freq*100) + ( '(configured in bcbio as min_allele_fraction)' if utils.get_in(data["config"], ("algorithm", "min_allele_fraction")) else '(default threshold in bcbio; override with min_allele_fraction in the algorithm section)')}) w = cyvcf2.Writer(tx_out_file, vcf) # GATK 3.x can produce VCFs without sample names for empty VCFs try: tumor_index = vcf.samples.index(dd.get_sample_name(data)) except ValueError: tumor_index = None for rec in vcf: if tumor_index is not None and np.all(rec.format('AF')[tumor_index] < min_freq): vcfutils.cyvcf_add_filter(rec, 'MinAF') w.write_record(rec) w.close() return vcfutils.bgzip_and_index(ungz_out_file, data["config"])
def function[_af_filter, parameter[data, in_file, out_file]]: constant[Soft-filter variants with AF below min_allele_fraction (appends "MinAF" to FILTER) ] variable[min_freq] assign[=] binary_operation[call[name[float], parameter[call[name[utils].get_in, parameter[call[name[data]][constant[config]], tuple[[<ast.Constant object at 0x7da18bcc91b0>, <ast.Constant object at 0x7da18bcc9720>]], constant[10]]]]] / constant[100.0]] call[name[logger].debug, parameter[binary_operation[constant[Filtering MuTect2 calls with allele fraction threshold of %s] <ast.Mod object at 0x7da2590d6920> name[min_freq]]]] variable[ungz_out_file] assign[=] binary_operation[constant[%s.vcf] <ast.Mod object at 0x7da2590d6920> call[call[name[utils].splitext_plus, parameter[name[out_file]]]][constant[0]]] if <ast.BoolOp object at 0x7da18f09e5c0> begin[:] with call[name[file_transaction], parameter[name[data], name[ungz_out_file]]] begin[:] variable[vcf] assign[=] call[name[cyvcf2].VCF, parameter[name[in_file]]] call[name[vcf].add_filter_to_header, parameter[dictionary[[<ast.Constant object at 0x7da18f09cb80>, <ast.Constant object at 0x7da18f09ca30>], [<ast.Constant object at 0x7da18f09ccd0>, <ast.BinOp object at 0x7da18f09ed40>]]]] variable[w] assign[=] call[name[cyvcf2].Writer, parameter[name[tx_out_file], name[vcf]]] <ast.Try object at 0x7da18f09ffa0> for taget[name[rec]] in starred[name[vcf]] begin[:] if <ast.BoolOp object at 0x7da1b17125c0> begin[:] call[name[vcfutils].cyvcf_add_filter, parameter[name[rec], constant[MinAF]]] call[name[w].write_record, parameter[name[rec]]] call[name[w].close, parameter[]] return[call[name[vcfutils].bgzip_and_index, parameter[name[ungz_out_file], call[name[data]][constant[config]]]]]
keyword[def] identifier[_af_filter] ( identifier[data] , identifier[in_file] , identifier[out_file] ): literal[string] identifier[min_freq] = identifier[float] ( identifier[utils] . identifier[get_in] ( identifier[data] [ literal[string] ],( literal[string] , literal[string] ), literal[int] ))/ literal[int] identifier[logger] . identifier[debug] ( literal[string] % identifier[min_freq] ) identifier[ungz_out_file] = literal[string] % identifier[utils] . identifier[splitext_plus] ( identifier[out_file] )[ literal[int] ] keyword[if] keyword[not] identifier[utils] . identifier[file_exists] ( identifier[ungz_out_file] ) keyword[and] keyword[not] identifier[utils] . identifier[file_exists] ( identifier[ungz_out_file] + literal[string] ): keyword[with] identifier[file_transaction] ( identifier[data] , identifier[ungz_out_file] ) keyword[as] identifier[tx_out_file] : identifier[vcf] = identifier[cyvcf2] . identifier[VCF] ( identifier[in_file] ) identifier[vcf] . identifier[add_filter_to_header] ({ literal[string] : literal[string] , literal[string] : literal[string] %( identifier[min_freq] * literal[int] )+( literal[string] keyword[if] identifier[utils] . identifier[get_in] ( identifier[data] [ literal[string] ],( literal[string] , literal[string] )) keyword[else] literal[string] )}) identifier[w] = identifier[cyvcf2] . identifier[Writer] ( identifier[tx_out_file] , identifier[vcf] ) keyword[try] : identifier[tumor_index] = identifier[vcf] . identifier[samples] . identifier[index] ( identifier[dd] . identifier[get_sample_name] ( identifier[data] )) keyword[except] identifier[ValueError] : identifier[tumor_index] = keyword[None] keyword[for] identifier[rec] keyword[in] identifier[vcf] : keyword[if] identifier[tumor_index] keyword[is] keyword[not] keyword[None] keyword[and] identifier[np] . identifier[all] ( identifier[rec] . identifier[format] ( literal[string] )[ identifier[tumor_index] ]< identifier[min_freq] ): identifier[vcfutils] . identifier[cyvcf_add_filter] ( identifier[rec] , literal[string] ) identifier[w] . identifier[write_record] ( identifier[rec] ) identifier[w] . identifier[close] () keyword[return] identifier[vcfutils] . identifier[bgzip_and_index] ( identifier[ungz_out_file] , identifier[data] [ literal[string] ])
def _af_filter(data, in_file, out_file): """Soft-filter variants with AF below min_allele_fraction (appends "MinAF" to FILTER) """ min_freq = float(utils.get_in(data['config'], ('algorithm', 'min_allele_fraction'), 10)) / 100.0 logger.debug('Filtering MuTect2 calls with allele fraction threshold of %s' % min_freq) ungz_out_file = '%s.vcf' % utils.splitext_plus(out_file)[0] if not utils.file_exists(ungz_out_file) and (not utils.file_exists(ungz_out_file + '.gz')): with file_transaction(data, ungz_out_file) as tx_out_file: vcf = cyvcf2.VCF(in_file) vcf.add_filter_to_header({'ID': 'MinAF', 'Description': 'Allele frequency is lower than %s%% ' % (min_freq * 100) + ('(configured in bcbio as min_allele_fraction)' if utils.get_in(data['config'], ('algorithm', 'min_allele_fraction')) else '(default threshold in bcbio; override with min_allele_fraction in the algorithm section)')}) w = cyvcf2.Writer(tx_out_file, vcf) # GATK 3.x can produce VCFs without sample names for empty VCFs try: tumor_index = vcf.samples.index(dd.get_sample_name(data)) # depends on [control=['try'], data=[]] except ValueError: tumor_index = None # depends on [control=['except'], data=[]] for rec in vcf: if tumor_index is not None and np.all(rec.format('AF')[tumor_index] < min_freq): vcfutils.cyvcf_add_filter(rec, 'MinAF') # depends on [control=['if'], data=[]] w.write_record(rec) # depends on [control=['for'], data=['rec']] w.close() # depends on [control=['with'], data=['tx_out_file']] # depends on [control=['if'], data=[]] return vcfutils.bgzip_and_index(ungz_out_file, data['config'])
def timetopythonvalue(time_val): "Convert a time or time range from ArcGIS REST server format to Python" if isinstance(time_val, sequence): return map(timetopythonvalue, time_val) elif isinstance(time_val, numeric): return datetime.datetime(*(time.gmtime(time_val))[:6]) elif isinstance(time_val, numeric): values = [] try: values = map(long, time_val.split(",")) except: pass if values: return map(timetopythonvalue, values) raise ValueError(repr(time_val))
def function[timetopythonvalue, parameter[time_val]]: constant[Convert a time or time range from ArcGIS REST server format to Python] if call[name[isinstance], parameter[name[time_val], name[sequence]]] begin[:] return[call[name[map], parameter[name[timetopythonvalue], name[time_val]]]] <ast.Raise object at 0x7da204565ae0>
keyword[def] identifier[timetopythonvalue] ( identifier[time_val] ): literal[string] keyword[if] identifier[isinstance] ( identifier[time_val] , identifier[sequence] ): keyword[return] identifier[map] ( identifier[timetopythonvalue] , identifier[time_val] ) keyword[elif] identifier[isinstance] ( identifier[time_val] , identifier[numeric] ): keyword[return] identifier[datetime] . identifier[datetime] (*( identifier[time] . identifier[gmtime] ( identifier[time_val] ))[: literal[int] ]) keyword[elif] identifier[isinstance] ( identifier[time_val] , identifier[numeric] ): identifier[values] =[] keyword[try] : identifier[values] = identifier[map] ( identifier[long] , identifier[time_val] . identifier[split] ( literal[string] )) keyword[except] : keyword[pass] keyword[if] identifier[values] : keyword[return] identifier[map] ( identifier[timetopythonvalue] , identifier[values] ) keyword[raise] identifier[ValueError] ( identifier[repr] ( identifier[time_val] ))
def timetopythonvalue(time_val): """Convert a time or time range from ArcGIS REST server format to Python""" if isinstance(time_val, sequence): return map(timetopythonvalue, time_val) # depends on [control=['if'], data=[]] elif isinstance(time_val, numeric): return datetime.datetime(*time.gmtime(time_val)[:6]) # depends on [control=['if'], data=[]] elif isinstance(time_val, numeric): values = [] try: values = map(long, time_val.split(',')) # depends on [control=['try'], data=[]] except: pass # depends on [control=['except'], data=[]] if values: return map(timetopythonvalue, values) # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]] raise ValueError(repr(time_val))
def as_odict(self): """ returns an odict version of the object, based on it's attributes """ if hasattr(self, 'cust_odict'): return self.cust_odict if hasattr(self, 'attr_check'): self.attr_check() odc = odict() for attr in self.attrorder: odc[attr] = getattr(self, attr) return odc
def function[as_odict, parameter[self]]: constant[ returns an odict version of the object, based on it's attributes ] if call[name[hasattr], parameter[name[self], constant[cust_odict]]] begin[:] return[name[self].cust_odict] if call[name[hasattr], parameter[name[self], constant[attr_check]]] begin[:] call[name[self].attr_check, parameter[]] variable[odc] assign[=] call[name[odict], parameter[]] for taget[name[attr]] in starred[name[self].attrorder] begin[:] call[name[odc]][name[attr]] assign[=] call[name[getattr], parameter[name[self], name[attr]]] return[name[odc]]
keyword[def] identifier[as_odict] ( identifier[self] ): literal[string] keyword[if] identifier[hasattr] ( identifier[self] , literal[string] ): keyword[return] identifier[self] . identifier[cust_odict] keyword[if] identifier[hasattr] ( identifier[self] , literal[string] ): identifier[self] . identifier[attr_check] () identifier[odc] = identifier[odict] () keyword[for] identifier[attr] keyword[in] identifier[self] . identifier[attrorder] : identifier[odc] [ identifier[attr] ]= identifier[getattr] ( identifier[self] , identifier[attr] ) keyword[return] identifier[odc]
def as_odict(self): """ returns an odict version of the object, based on it's attributes """ if hasattr(self, 'cust_odict'): return self.cust_odict # depends on [control=['if'], data=[]] if hasattr(self, 'attr_check'): self.attr_check() # depends on [control=['if'], data=[]] odc = odict() for attr in self.attrorder: odc[attr] = getattr(self, attr) # depends on [control=['for'], data=['attr']] return odc
def np_to_list(elem): """Returns list from list, tuple or ndarray.""" if isinstance(elem, list): return elem elif isinstance(elem, tuple): return list(elem) elif isinstance(elem, np.ndarray): return list(elem) else: raise ValueError( 'Input elements of a sequence should be either a numpy array, a ' 'python list or tuple. Got {}'.format(type(elem)))
def function[np_to_list, parameter[elem]]: constant[Returns list from list, tuple or ndarray.] if call[name[isinstance], parameter[name[elem], name[list]]] begin[:] return[name[elem]]
keyword[def] identifier[np_to_list] ( identifier[elem] ): literal[string] keyword[if] identifier[isinstance] ( identifier[elem] , identifier[list] ): keyword[return] identifier[elem] keyword[elif] identifier[isinstance] ( identifier[elem] , identifier[tuple] ): keyword[return] identifier[list] ( identifier[elem] ) keyword[elif] identifier[isinstance] ( identifier[elem] , identifier[np] . identifier[ndarray] ): keyword[return] identifier[list] ( identifier[elem] ) keyword[else] : keyword[raise] identifier[ValueError] ( literal[string] literal[string] . identifier[format] ( identifier[type] ( identifier[elem] )))
def np_to_list(elem): """Returns list from list, tuple or ndarray.""" if isinstance(elem, list): return elem # depends on [control=['if'], data=[]] elif isinstance(elem, tuple): return list(elem) # depends on [control=['if'], data=[]] elif isinstance(elem, np.ndarray): return list(elem) # depends on [control=['if'], data=[]] else: raise ValueError('Input elements of a sequence should be either a numpy array, a python list or tuple. Got {}'.format(type(elem)))
def normalize_names(self): """ It is internally used to normalize the name of the widgets. It means a widget named foo:vbox-dialog in glade is refered self.vbox_dialog in the code. It also sets a data "prefixes" with the list of prefixes a widget has for each widget. """ for widget in self.get_widgets(): if isinstance(widget, Gtk.Buildable): widget_name = Gtk.Buildable.get_name(widget) prefixes_name_l = widget_name.split(":") prefixes = prefixes_name_l[:-1] widget_api_name = prefixes_name_l[-1] widget_api_name = "_".join(re.findall(tokenize.Name, widget_api_name)) widget_name = Gtk.Buildable.set_name(widget, widget_api_name) if hasattr(self, widget_api_name): raise AttributeError( "instance %s already has an attribute %s" % (self, widget_api_name) ) else: setattr(self, widget_api_name, widget) if prefixes: # TODO is is a guess Gtk.Buildable.set_data(widget, "prefixes", prefixes)
def function[normalize_names, parameter[self]]: constant[ It is internally used to normalize the name of the widgets. It means a widget named foo:vbox-dialog in glade is refered self.vbox_dialog in the code. It also sets a data "prefixes" with the list of prefixes a widget has for each widget. ] for taget[name[widget]] in starred[call[name[self].get_widgets, parameter[]]] begin[:] if call[name[isinstance], parameter[name[widget], name[Gtk].Buildable]] begin[:] variable[widget_name] assign[=] call[name[Gtk].Buildable.get_name, parameter[name[widget]]] variable[prefixes_name_l] assign[=] call[name[widget_name].split, parameter[constant[:]]] variable[prefixes] assign[=] call[name[prefixes_name_l]][<ast.Slice object at 0x7da1b26aff70>] variable[widget_api_name] assign[=] call[name[prefixes_name_l]][<ast.UnaryOp object at 0x7da1b26ad2d0>] variable[widget_api_name] assign[=] call[constant[_].join, parameter[call[name[re].findall, parameter[name[tokenize].Name, name[widget_api_name]]]]] variable[widget_name] assign[=] call[name[Gtk].Buildable.set_name, parameter[name[widget], name[widget_api_name]]] if call[name[hasattr], parameter[name[self], name[widget_api_name]]] begin[:] <ast.Raise object at 0x7da1b26ac5e0>
keyword[def] identifier[normalize_names] ( identifier[self] ): literal[string] keyword[for] identifier[widget] keyword[in] identifier[self] . identifier[get_widgets] (): keyword[if] identifier[isinstance] ( identifier[widget] , identifier[Gtk] . identifier[Buildable] ): identifier[widget_name] = identifier[Gtk] . identifier[Buildable] . identifier[get_name] ( identifier[widget] ) identifier[prefixes_name_l] = identifier[widget_name] . identifier[split] ( literal[string] ) identifier[prefixes] = identifier[prefixes_name_l] [:- literal[int] ] identifier[widget_api_name] = identifier[prefixes_name_l] [- literal[int] ] identifier[widget_api_name] = literal[string] . identifier[join] ( identifier[re] . identifier[findall] ( identifier[tokenize] . identifier[Name] , identifier[widget_api_name] )) identifier[widget_name] = identifier[Gtk] . identifier[Buildable] . identifier[set_name] ( identifier[widget] , identifier[widget_api_name] ) keyword[if] identifier[hasattr] ( identifier[self] , identifier[widget_api_name] ): keyword[raise] identifier[AttributeError] ( literal[string] %( identifier[self] , identifier[widget_api_name] ) ) keyword[else] : identifier[setattr] ( identifier[self] , identifier[widget_api_name] , identifier[widget] ) keyword[if] identifier[prefixes] : identifier[Gtk] . identifier[Buildable] . identifier[set_data] ( identifier[widget] , literal[string] , identifier[prefixes] )
def normalize_names(self): """ It is internally used to normalize the name of the widgets. It means a widget named foo:vbox-dialog in glade is refered self.vbox_dialog in the code. It also sets a data "prefixes" with the list of prefixes a widget has for each widget. """ for widget in self.get_widgets(): if isinstance(widget, Gtk.Buildable): widget_name = Gtk.Buildable.get_name(widget) prefixes_name_l = widget_name.split(':') prefixes = prefixes_name_l[:-1] widget_api_name = prefixes_name_l[-1] widget_api_name = '_'.join(re.findall(tokenize.Name, widget_api_name)) widget_name = Gtk.Buildable.set_name(widget, widget_api_name) if hasattr(self, widget_api_name): raise AttributeError('instance %s already has an attribute %s' % (self, widget_api_name)) # depends on [control=['if'], data=[]] else: setattr(self, widget_api_name, widget) if prefixes: # TODO is is a guess Gtk.Buildable.set_data(widget, 'prefixes', prefixes) # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['widget']]
def _lim_moment(self, u, order=1): """ This method calculates the kth order limiting moment of the distribution. It is given by - E(u) = Integral (-inf to u) [ (x^k)*pdf(x) dx ] + (u^k)(1-cdf(u)) where, pdf is the probability density function and cdf is the cumulative density function of the distribution. Reference --------- Klugman, S. A., Panjer, H. H. and Willmot, G. E., Loss Models, From Data to Decisions, Fourth Edition, Wiley, definition 3.5 and equation 3.8. Parameters ---------- u: float The point at which the moment is to be calculated. order: int The order of the moment, default is first order. """ def fun(x): return np.power(x, order) * self.factor.pdf(x) return (integrate.quad(fun, -np.inf, u)[0] + np.power(u, order)*(1 - self.factor.cdf(u)))
def function[_lim_moment, parameter[self, u, order]]: constant[ This method calculates the kth order limiting moment of the distribution. It is given by - E(u) = Integral (-inf to u) [ (x^k)*pdf(x) dx ] + (u^k)(1-cdf(u)) where, pdf is the probability density function and cdf is the cumulative density function of the distribution. Reference --------- Klugman, S. A., Panjer, H. H. and Willmot, G. E., Loss Models, From Data to Decisions, Fourth Edition, Wiley, definition 3.5 and equation 3.8. Parameters ---------- u: float The point at which the moment is to be calculated. order: int The order of the moment, default is first order. ] def function[fun, parameter[x]]: return[binary_operation[call[name[np].power, parameter[name[x], name[order]]] * call[name[self].factor.pdf, parameter[name[x]]]]] return[binary_operation[call[call[name[integrate].quad, parameter[name[fun], <ast.UnaryOp object at 0x7da18ede4190>, name[u]]]][constant[0]] + binary_operation[call[name[np].power, parameter[name[u], name[order]]] * binary_operation[constant[1] - call[name[self].factor.cdf, parameter[name[u]]]]]]]
keyword[def] identifier[_lim_moment] ( identifier[self] , identifier[u] , identifier[order] = literal[int] ): literal[string] keyword[def] identifier[fun] ( identifier[x] ): keyword[return] identifier[np] . identifier[power] ( identifier[x] , identifier[order] )* identifier[self] . identifier[factor] . identifier[pdf] ( identifier[x] ) keyword[return] ( identifier[integrate] . identifier[quad] ( identifier[fun] ,- identifier[np] . identifier[inf] , identifier[u] )[ literal[int] ]+ identifier[np] . identifier[power] ( identifier[u] , identifier[order] )*( literal[int] - identifier[self] . identifier[factor] . identifier[cdf] ( identifier[u] )))
def _lim_moment(self, u, order=1): """ This method calculates the kth order limiting moment of the distribution. It is given by - E(u) = Integral (-inf to u) [ (x^k)*pdf(x) dx ] + (u^k)(1-cdf(u)) where, pdf is the probability density function and cdf is the cumulative density function of the distribution. Reference --------- Klugman, S. A., Panjer, H. H. and Willmot, G. E., Loss Models, From Data to Decisions, Fourth Edition, Wiley, definition 3.5 and equation 3.8. Parameters ---------- u: float The point at which the moment is to be calculated. order: int The order of the moment, default is first order. """ def fun(x): return np.power(x, order) * self.factor.pdf(x) return integrate.quad(fun, -np.inf, u)[0] + np.power(u, order) * (1 - self.factor.cdf(u))
def to_cloudformation(self, **kwargs): """Returns the CloudWatch Logs Subscription Filter and Lambda Permission to which this CloudWatch Logs event source corresponds. :param dict kwargs: no existing resources need to be modified :returns: a list of vanilla CloudFormation Resources, to which this push event expands :rtype: list """ function = kwargs.get('function') if not function: raise TypeError("Missing required keyword argument: function") source_arn = self.get_source_arn() permission = self._construct_permission(function, source_arn=source_arn) subscription_filter = self.get_subscription_filter(function, permission) resources = [permission, subscription_filter] return resources
def function[to_cloudformation, parameter[self]]: constant[Returns the CloudWatch Logs Subscription Filter and Lambda Permission to which this CloudWatch Logs event source corresponds. :param dict kwargs: no existing resources need to be modified :returns: a list of vanilla CloudFormation Resources, to which this push event expands :rtype: list ] variable[function] assign[=] call[name[kwargs].get, parameter[constant[function]]] if <ast.UnaryOp object at 0x7da20c7c9690> begin[:] <ast.Raise object at 0x7da20c7c85e0> variable[source_arn] assign[=] call[name[self].get_source_arn, parameter[]] variable[permission] assign[=] call[name[self]._construct_permission, parameter[name[function]]] variable[subscription_filter] assign[=] call[name[self].get_subscription_filter, parameter[name[function], name[permission]]] variable[resources] assign[=] list[[<ast.Name object at 0x7da20c7cb640>, <ast.Name object at 0x7da20c7cb790>]] return[name[resources]]
keyword[def] identifier[to_cloudformation] ( identifier[self] ,** identifier[kwargs] ): literal[string] identifier[function] = identifier[kwargs] . identifier[get] ( literal[string] ) keyword[if] keyword[not] identifier[function] : keyword[raise] identifier[TypeError] ( literal[string] ) identifier[source_arn] = identifier[self] . identifier[get_source_arn] () identifier[permission] = identifier[self] . identifier[_construct_permission] ( identifier[function] , identifier[source_arn] = identifier[source_arn] ) identifier[subscription_filter] = identifier[self] . identifier[get_subscription_filter] ( identifier[function] , identifier[permission] ) identifier[resources] =[ identifier[permission] , identifier[subscription_filter] ] keyword[return] identifier[resources]
def to_cloudformation(self, **kwargs): """Returns the CloudWatch Logs Subscription Filter and Lambda Permission to which this CloudWatch Logs event source corresponds. :param dict kwargs: no existing resources need to be modified :returns: a list of vanilla CloudFormation Resources, to which this push event expands :rtype: list """ function = kwargs.get('function') if not function: raise TypeError('Missing required keyword argument: function') # depends on [control=['if'], data=[]] source_arn = self.get_source_arn() permission = self._construct_permission(function, source_arn=source_arn) subscription_filter = self.get_subscription_filter(function, permission) resources = [permission, subscription_filter] return resources
def H_iso(x,params): """ Isochrone Hamiltonian = -GM/(b+sqrt(b**2+(r-r0)**2))""" #r = (np.sqrt(np.sum(x[:3]**2))-params[2])**2 r = np.sum(x[:3]**2) return 0.5*np.sum(x[3:]**2)-Grav*params[0]/(params[1]+np.sqrt(params[1]**2+r))
def function[H_iso, parameter[x, params]]: constant[ Isochrone Hamiltonian = -GM/(b+sqrt(b**2+(r-r0)**2))] variable[r] assign[=] call[name[np].sum, parameter[binary_operation[call[name[x]][<ast.Slice object at 0x7da1b0efd5a0>] ** constant[2]]]] return[binary_operation[binary_operation[constant[0.5] * call[name[np].sum, parameter[binary_operation[call[name[x]][<ast.Slice object at 0x7da1b0dc2170>] ** constant[2]]]]] - binary_operation[binary_operation[name[Grav] * call[name[params]][constant[0]]] / binary_operation[call[name[params]][constant[1]] + call[name[np].sqrt, parameter[binary_operation[binary_operation[call[name[params]][constant[1]] ** constant[2]] + name[r]]]]]]]]
keyword[def] identifier[H_iso] ( identifier[x] , identifier[params] ): literal[string] identifier[r] = identifier[np] . identifier[sum] ( identifier[x] [: literal[int] ]** literal[int] ) keyword[return] literal[int] * identifier[np] . identifier[sum] ( identifier[x] [ literal[int] :]** literal[int] )- identifier[Grav] * identifier[params] [ literal[int] ]/( identifier[params] [ literal[int] ]+ identifier[np] . identifier[sqrt] ( identifier[params] [ literal[int] ]** literal[int] + identifier[r] ))
def H_iso(x, params): """ Isochrone Hamiltonian = -GM/(b+sqrt(b**2+(r-r0)**2))""" #r = (np.sqrt(np.sum(x[:3]**2))-params[2])**2 r = np.sum(x[:3] ** 2) return 0.5 * np.sum(x[3:] ** 2) - Grav * params[0] / (params[1] + np.sqrt(params[1] ** 2 + r))
def verify(self, type_): """ Check whether a type implements ``self``. Parameters ---------- type_ : type The type to check. Raises ------ TypeError If ``type_`` doesn't conform to our interface. Returns ------- None """ raw_missing, mistyped, mismatched = self._diff_signatures(type_) # See if we have defaults for missing methods. missing = [] defaults_to_use = {} for name in raw_missing: try: defaults_to_use[name] = self._defaults[name].implementation except KeyError: missing.append(name) if not any((missing, mistyped, mismatched)): return defaults_to_use raise self._invalid_implementation(type_, missing, mistyped, mismatched)
def function[verify, parameter[self, type_]]: constant[ Check whether a type implements ``self``. Parameters ---------- type_ : type The type to check. Raises ------ TypeError If ``type_`` doesn't conform to our interface. Returns ------- None ] <ast.Tuple object at 0x7da207f02620> assign[=] call[name[self]._diff_signatures, parameter[name[type_]]] variable[missing] assign[=] list[[]] variable[defaults_to_use] assign[=] dictionary[[], []] for taget[name[name]] in starred[name[raw_missing]] begin[:] <ast.Try object at 0x7da1b06de410> if <ast.UnaryOp object at 0x7da1b06de9e0> begin[:] return[name[defaults_to_use]] <ast.Raise object at 0x7da1b06debc0>
keyword[def] identifier[verify] ( identifier[self] , identifier[type_] ): literal[string] identifier[raw_missing] , identifier[mistyped] , identifier[mismatched] = identifier[self] . identifier[_diff_signatures] ( identifier[type_] ) identifier[missing] =[] identifier[defaults_to_use] ={} keyword[for] identifier[name] keyword[in] identifier[raw_missing] : keyword[try] : identifier[defaults_to_use] [ identifier[name] ]= identifier[self] . identifier[_defaults] [ identifier[name] ]. identifier[implementation] keyword[except] identifier[KeyError] : identifier[missing] . identifier[append] ( identifier[name] ) keyword[if] keyword[not] identifier[any] (( identifier[missing] , identifier[mistyped] , identifier[mismatched] )): keyword[return] identifier[defaults_to_use] keyword[raise] identifier[self] . identifier[_invalid_implementation] ( identifier[type_] , identifier[missing] , identifier[mistyped] , identifier[mismatched] )
def verify(self, type_): """ Check whether a type implements ``self``. Parameters ---------- type_ : type The type to check. Raises ------ TypeError If ``type_`` doesn't conform to our interface. Returns ------- None """ (raw_missing, mistyped, mismatched) = self._diff_signatures(type_) # See if we have defaults for missing methods. missing = [] defaults_to_use = {} for name in raw_missing: try: defaults_to_use[name] = self._defaults[name].implementation # depends on [control=['try'], data=[]] except KeyError: missing.append(name) # depends on [control=['except'], data=[]] # depends on [control=['for'], data=['name']] if not any((missing, mistyped, mismatched)): return defaults_to_use # depends on [control=['if'], data=[]] raise self._invalid_implementation(type_, missing, mistyped, mismatched)
def _googleauth(key_file=None, scopes=[], user_agent=None): """ Google http_auth helper. If key_file is not specified, default credentials will be used. If scopes is specified (and key_file), will be used instead of DEFAULT_SCOPES :param key_file: path to key file to use. Default is None :type key_file: ``str`` :param scopes: scopes to set. Default is DEFAUL_SCOPES :type scopes: ``list`` :param user_agent: User Agent string to use in requests. Default is None. :type http_auth: ``str`` or None :return: HTTPLib2 authorized client. :rtype: :class: `HTTPLib2` """ if key_file: if not scopes: scopes = DEFAULT_SCOPES creds = ServiceAccountCredentials.from_json_keyfile_name(key_file, scopes=scopes) else: creds = GoogleCredentials.get_application_default() http = Http() if user_agent: http = set_user_agent(http, user_agent) http_auth = creds.authorize(http) return http_auth
def function[_googleauth, parameter[key_file, scopes, user_agent]]: constant[ Google http_auth helper. If key_file is not specified, default credentials will be used. If scopes is specified (and key_file), will be used instead of DEFAULT_SCOPES :param key_file: path to key file to use. Default is None :type key_file: ``str`` :param scopes: scopes to set. Default is DEFAUL_SCOPES :type scopes: ``list`` :param user_agent: User Agent string to use in requests. Default is None. :type http_auth: ``str`` or None :return: HTTPLib2 authorized client. :rtype: :class: `HTTPLib2` ] if name[key_file] begin[:] if <ast.UnaryOp object at 0x7da1b0190280> begin[:] variable[scopes] assign[=] name[DEFAULT_SCOPES] variable[creds] assign[=] call[name[ServiceAccountCredentials].from_json_keyfile_name, parameter[name[key_file]]] variable[http] assign[=] call[name[Http], parameter[]] if name[user_agent] begin[:] variable[http] assign[=] call[name[set_user_agent], parameter[name[http], name[user_agent]]] variable[http_auth] assign[=] call[name[creds].authorize, parameter[name[http]]] return[name[http_auth]]
keyword[def] identifier[_googleauth] ( identifier[key_file] = keyword[None] , identifier[scopes] =[], identifier[user_agent] = keyword[None] ): literal[string] keyword[if] identifier[key_file] : keyword[if] keyword[not] identifier[scopes] : identifier[scopes] = identifier[DEFAULT_SCOPES] identifier[creds] = identifier[ServiceAccountCredentials] . identifier[from_json_keyfile_name] ( identifier[key_file] , identifier[scopes] = identifier[scopes] ) keyword[else] : identifier[creds] = identifier[GoogleCredentials] . identifier[get_application_default] () identifier[http] = identifier[Http] () keyword[if] identifier[user_agent] : identifier[http] = identifier[set_user_agent] ( identifier[http] , identifier[user_agent] ) identifier[http_auth] = identifier[creds] . identifier[authorize] ( identifier[http] ) keyword[return] identifier[http_auth]
def _googleauth(key_file=None, scopes=[], user_agent=None): """ Google http_auth helper. If key_file is not specified, default credentials will be used. If scopes is specified (and key_file), will be used instead of DEFAULT_SCOPES :param key_file: path to key file to use. Default is None :type key_file: ``str`` :param scopes: scopes to set. Default is DEFAUL_SCOPES :type scopes: ``list`` :param user_agent: User Agent string to use in requests. Default is None. :type http_auth: ``str`` or None :return: HTTPLib2 authorized client. :rtype: :class: `HTTPLib2` """ if key_file: if not scopes: scopes = DEFAULT_SCOPES # depends on [control=['if'], data=[]] creds = ServiceAccountCredentials.from_json_keyfile_name(key_file, scopes=scopes) # depends on [control=['if'], data=[]] else: creds = GoogleCredentials.get_application_default() http = Http() if user_agent: http = set_user_agent(http, user_agent) # depends on [control=['if'], data=[]] http_auth = creds.authorize(http) return http_auth
def _meet(intervals_hier, labels_hier, frame_size): '''Compute the (sparse) least-common-ancestor (LCA) matrix for a hierarchical segmentation. For any pair of frames ``(s, t)``, the LCA is the deepest level in the hierarchy such that ``(s, t)`` are contained within a single segment at that level. Parameters ---------- intervals_hier : list of ndarray An ordered list of segment interval arrays. The list is assumed to be ordered by increasing specificity (depth). labels_hier : list of list of str ``labels_hier[i]`` contains the segment labels for the ``i``th layer of the annotations frame_size : number The length of the sample frames (in seconds) Returns ------- meet_matrix : scipy.sparse.csr_matrix A sparse matrix such that ``meet_matrix[i, j]`` contains the depth of the deepest segment label containing both ``i`` and ``j``. ''' frame_size = float(frame_size) # Figure out how many frames we need n_start, n_end = _hierarchy_bounds(intervals_hier) n = int((_round(n_end, frame_size) - _round(n_start, frame_size)) / frame_size) # Initialize the meet matrix meet_matrix = scipy.sparse.lil_matrix((n, n), dtype=np.uint8) for level, (intervals, labels) in enumerate(zip(intervals_hier, labels_hier), 1): # Encode the labels at this level lab_enc = util.index_labels(labels)[0] # Find unique agreements int_agree = np.triu(np.equal.outer(lab_enc, lab_enc)) # Map intervals to frame indices int_frames = (_round(intervals, frame_size) / frame_size).astype(int) # For each intervals i, j where labels agree, update the meet matrix for (seg_i, seg_j) in zip(*np.where(int_agree)): idx_i = slice(*list(int_frames[seg_i])) idx_j = slice(*list(int_frames[seg_j])) meet_matrix[idx_i, idx_j] = level if seg_i != seg_j: meet_matrix[idx_j, idx_i] = level return scipy.sparse.csr_matrix(meet_matrix)
def function[_meet, parameter[intervals_hier, labels_hier, frame_size]]: constant[Compute the (sparse) least-common-ancestor (LCA) matrix for a hierarchical segmentation. For any pair of frames ``(s, t)``, the LCA is the deepest level in the hierarchy such that ``(s, t)`` are contained within a single segment at that level. Parameters ---------- intervals_hier : list of ndarray An ordered list of segment interval arrays. The list is assumed to be ordered by increasing specificity (depth). labels_hier : list of list of str ``labels_hier[i]`` contains the segment labels for the ``i``th layer of the annotations frame_size : number The length of the sample frames (in seconds) Returns ------- meet_matrix : scipy.sparse.csr_matrix A sparse matrix such that ``meet_matrix[i, j]`` contains the depth of the deepest segment label containing both ``i`` and ``j``. ] variable[frame_size] assign[=] call[name[float], parameter[name[frame_size]]] <ast.Tuple object at 0x7da1b0fccd30> assign[=] call[name[_hierarchy_bounds], parameter[name[intervals_hier]]] variable[n] assign[=] call[name[int], parameter[binary_operation[binary_operation[call[name[_round], parameter[name[n_end], name[frame_size]]] - call[name[_round], parameter[name[n_start], name[frame_size]]]] / name[frame_size]]]] variable[meet_matrix] assign[=] call[name[scipy].sparse.lil_matrix, parameter[tuple[[<ast.Name object at 0x7da1b0fcf3d0>, <ast.Name object at 0x7da1b0fcd5d0>]]]] for taget[tuple[[<ast.Name object at 0x7da1b0fcca00>, <ast.Tuple object at 0x7da1b0fcd720>]]] in starred[call[name[enumerate], parameter[call[name[zip], parameter[name[intervals_hier], name[labels_hier]]], constant[1]]]] begin[:] variable[lab_enc] assign[=] call[call[name[util].index_labels, parameter[name[labels]]]][constant[0]] variable[int_agree] assign[=] call[name[np].triu, parameter[call[name[np].equal.outer, parameter[name[lab_enc], name[lab_enc]]]]] variable[int_frames] assign[=] call[binary_operation[call[name[_round], parameter[name[intervals], name[frame_size]]] / name[frame_size]].astype, parameter[name[int]]] for taget[tuple[[<ast.Name object at 0x7da1b0fcfcd0>, <ast.Name object at 0x7da1b0fcd210>]]] in starred[call[name[zip], parameter[<ast.Starred object at 0x7da1b0fce560>]]] begin[:] variable[idx_i] assign[=] call[name[slice], parameter[<ast.Starred object at 0x7da1b0fcded0>]] variable[idx_j] assign[=] call[name[slice], parameter[<ast.Starred object at 0x7da1b0fcd3f0>]] call[name[meet_matrix]][tuple[[<ast.Name object at 0x7da1b0fcf880>, <ast.Name object at 0x7da1b0fcd150>]]] assign[=] name[level] if compare[name[seg_i] not_equal[!=] name[seg_j]] begin[:] call[name[meet_matrix]][tuple[[<ast.Name object at 0x7da1b0fce620>, <ast.Name object at 0x7da1b0fcde40>]]] assign[=] name[level] return[call[name[scipy].sparse.csr_matrix, parameter[name[meet_matrix]]]]
keyword[def] identifier[_meet] ( identifier[intervals_hier] , identifier[labels_hier] , identifier[frame_size] ): literal[string] identifier[frame_size] = identifier[float] ( identifier[frame_size] ) identifier[n_start] , identifier[n_end] = identifier[_hierarchy_bounds] ( identifier[intervals_hier] ) identifier[n] = identifier[int] (( identifier[_round] ( identifier[n_end] , identifier[frame_size] )- identifier[_round] ( identifier[n_start] , identifier[frame_size] ))/ identifier[frame_size] ) identifier[meet_matrix] = identifier[scipy] . identifier[sparse] . identifier[lil_matrix] (( identifier[n] , identifier[n] ), identifier[dtype] = identifier[np] . identifier[uint8] ) keyword[for] identifier[level] ,( identifier[intervals] , identifier[labels] ) keyword[in] identifier[enumerate] ( identifier[zip] ( identifier[intervals_hier] , identifier[labels_hier] ), literal[int] ): identifier[lab_enc] = identifier[util] . identifier[index_labels] ( identifier[labels] )[ literal[int] ] identifier[int_agree] = identifier[np] . identifier[triu] ( identifier[np] . identifier[equal] . identifier[outer] ( identifier[lab_enc] , identifier[lab_enc] )) identifier[int_frames] =( identifier[_round] ( identifier[intervals] , identifier[frame_size] )/ identifier[frame_size] ). identifier[astype] ( identifier[int] ) keyword[for] ( identifier[seg_i] , identifier[seg_j] ) keyword[in] identifier[zip] (* identifier[np] . identifier[where] ( identifier[int_agree] )): identifier[idx_i] = identifier[slice] (* identifier[list] ( identifier[int_frames] [ identifier[seg_i] ])) identifier[idx_j] = identifier[slice] (* identifier[list] ( identifier[int_frames] [ identifier[seg_j] ])) identifier[meet_matrix] [ identifier[idx_i] , identifier[idx_j] ]= identifier[level] keyword[if] identifier[seg_i] != identifier[seg_j] : identifier[meet_matrix] [ identifier[idx_j] , identifier[idx_i] ]= identifier[level] keyword[return] identifier[scipy] . identifier[sparse] . identifier[csr_matrix] ( identifier[meet_matrix] )
def _meet(intervals_hier, labels_hier, frame_size): """Compute the (sparse) least-common-ancestor (LCA) matrix for a hierarchical segmentation. For any pair of frames ``(s, t)``, the LCA is the deepest level in the hierarchy such that ``(s, t)`` are contained within a single segment at that level. Parameters ---------- intervals_hier : list of ndarray An ordered list of segment interval arrays. The list is assumed to be ordered by increasing specificity (depth). labels_hier : list of list of str ``labels_hier[i]`` contains the segment labels for the ``i``th layer of the annotations frame_size : number The length of the sample frames (in seconds) Returns ------- meet_matrix : scipy.sparse.csr_matrix A sparse matrix such that ``meet_matrix[i, j]`` contains the depth of the deepest segment label containing both ``i`` and ``j``. """ frame_size = float(frame_size) # Figure out how many frames we need (n_start, n_end) = _hierarchy_bounds(intervals_hier) n = int((_round(n_end, frame_size) - _round(n_start, frame_size)) / frame_size) # Initialize the meet matrix meet_matrix = scipy.sparse.lil_matrix((n, n), dtype=np.uint8) for (level, (intervals, labels)) in enumerate(zip(intervals_hier, labels_hier), 1): # Encode the labels at this level lab_enc = util.index_labels(labels)[0] # Find unique agreements int_agree = np.triu(np.equal.outer(lab_enc, lab_enc)) # Map intervals to frame indices int_frames = (_round(intervals, frame_size) / frame_size).astype(int) # For each intervals i, j where labels agree, update the meet matrix for (seg_i, seg_j) in zip(*np.where(int_agree)): idx_i = slice(*list(int_frames[seg_i])) idx_j = slice(*list(int_frames[seg_j])) meet_matrix[idx_i, idx_j] = level if seg_i != seg_j: meet_matrix[idx_j, idx_i] = level # depends on [control=['if'], data=[]] # depends on [control=['for'], data=[]] # depends on [control=['for'], data=[]] return scipy.sparse.csr_matrix(meet_matrix)
def verify(self) -> None: """Raises a |RuntimeError| if at least one of the required values of a |Variable| object is |None| or |numpy.nan|. The descriptor `mask` defines, which values are considered to be necessary. Example on a 0-dimensional |Variable|: >>> from hydpy.core.variabletools import Variable >>> class Var(Variable): ... NDIM = 0 ... TYPE = float ... __hydpy__connect_variable2subgroup__ = None ... initinfo = 0.0, False >>> var = Var(None) >>> import numpy >>> var.shape = () >>> var.value = 1.0 >>> var.verify() >>> var.value = numpy.nan >>> var.verify() Traceback (most recent call last): ... RuntimeError: For variable `var`, 1 required value has not been set yet. Example on a 2-dimensional |Variable|: >>> Var.NDIM = 2 >>> var = Var(None) >>> var.shape = (2, 3) >>> var.value = numpy.ones((2,3)) >>> var.value[:, 1] = numpy.nan >>> var.verify() Traceback (most recent call last): ... RuntimeError: For variable `var`, 2 required values \ have not been set yet. >>> Var.mask = var.mask >>> Var.mask[0, 1] = False >>> var.verify() Traceback (most recent call last): ... RuntimeError: For variable `var`, 1 required value has not been set yet. >>> Var.mask[1, 1] = False >>> var.verify() """ nmbnan: int = numpy.sum(numpy.isnan( numpy.array(self.value)[self.mask])) if nmbnan: if nmbnan == 1: text = 'value has' else: text = 'values have' raise RuntimeError( f'For variable {objecttools.devicephrase(self)}, ' f'{nmbnan} required {text} not been set yet.')
def function[verify, parameter[self]]: constant[Raises a |RuntimeError| if at least one of the required values of a |Variable| object is |None| or |numpy.nan|. The descriptor `mask` defines, which values are considered to be necessary. Example on a 0-dimensional |Variable|: >>> from hydpy.core.variabletools import Variable >>> class Var(Variable): ... NDIM = 0 ... TYPE = float ... __hydpy__connect_variable2subgroup__ = None ... initinfo = 0.0, False >>> var = Var(None) >>> import numpy >>> var.shape = () >>> var.value = 1.0 >>> var.verify() >>> var.value = numpy.nan >>> var.verify() Traceback (most recent call last): ... RuntimeError: For variable `var`, 1 required value has not been set yet. Example on a 2-dimensional |Variable|: >>> Var.NDIM = 2 >>> var = Var(None) >>> var.shape = (2, 3) >>> var.value = numpy.ones((2,3)) >>> var.value[:, 1] = numpy.nan >>> var.verify() Traceback (most recent call last): ... RuntimeError: For variable `var`, 2 required values have not been set yet. >>> Var.mask = var.mask >>> Var.mask[0, 1] = False >>> var.verify() Traceback (most recent call last): ... RuntimeError: For variable `var`, 1 required value has not been set yet. >>> Var.mask[1, 1] = False >>> var.verify() ] <ast.AnnAssign object at 0x7da18f09f2e0> if name[nmbnan] begin[:] if compare[name[nmbnan] equal[==] constant[1]] begin[:] variable[text] assign[=] constant[value has] <ast.Raise object at 0x7da18fe90190>
keyword[def] identifier[verify] ( identifier[self] )-> keyword[None] : literal[string] identifier[nmbnan] : identifier[int] = identifier[numpy] . identifier[sum] ( identifier[numpy] . identifier[isnan] ( identifier[numpy] . identifier[array] ( identifier[self] . identifier[value] )[ identifier[self] . identifier[mask] ])) keyword[if] identifier[nmbnan] : keyword[if] identifier[nmbnan] == literal[int] : identifier[text] = literal[string] keyword[else] : identifier[text] = literal[string] keyword[raise] identifier[RuntimeError] ( literal[string] literal[string] )
def verify(self) -> None: """Raises a |RuntimeError| if at least one of the required values of a |Variable| object is |None| or |numpy.nan|. The descriptor `mask` defines, which values are considered to be necessary. Example on a 0-dimensional |Variable|: >>> from hydpy.core.variabletools import Variable >>> class Var(Variable): ... NDIM = 0 ... TYPE = float ... __hydpy__connect_variable2subgroup__ = None ... initinfo = 0.0, False >>> var = Var(None) >>> import numpy >>> var.shape = () >>> var.value = 1.0 >>> var.verify() >>> var.value = numpy.nan >>> var.verify() Traceback (most recent call last): ... RuntimeError: For variable `var`, 1 required value has not been set yet. Example on a 2-dimensional |Variable|: >>> Var.NDIM = 2 >>> var = Var(None) >>> var.shape = (2, 3) >>> var.value = numpy.ones((2,3)) >>> var.value[:, 1] = numpy.nan >>> var.verify() Traceback (most recent call last): ... RuntimeError: For variable `var`, 2 required values have not been set yet. >>> Var.mask = var.mask >>> Var.mask[0, 1] = False >>> var.verify() Traceback (most recent call last): ... RuntimeError: For variable `var`, 1 required value has not been set yet. >>> Var.mask[1, 1] = False >>> var.verify() """ nmbnan: int = numpy.sum(numpy.isnan(numpy.array(self.value)[self.mask])) if nmbnan: if nmbnan == 1: text = 'value has' # depends on [control=['if'], data=[]] else: text = 'values have' raise RuntimeError(f'For variable {objecttools.devicephrase(self)}, {nmbnan} required {text} not been set yet.') # depends on [control=['if'], data=[]]
def plot_composition(df, intervals, axes=None): """ Plot time series of generics and label underlying instruments which these series are composed of. Parameters: ----------- df: pd.DataFrame DataFrame of time series to be plotted. Each column is a generic time series. intervals: pd.DataFrame A DataFrame including information for when each contract is used in the generic series. Columns are['contract', 'generic', 'start_date', 'end_date'] axes: list List of matplotlib.axes.Axes Example ------- >>> import mapping.plot as mplot >>> import pandas as pd >>> from pandas import Timestamp as TS >>> idx = pd.date_range("2017-01-01", "2017-01-15") >>> rets_data = pd.np.random.randn(len(idx)) >>> rets = pd.DataFrame({"CL1": rets_data, "CL2": rets_data}, index=idx) >>> intervals = pd.DataFrame( ... [(TS("2017-01-01"), TS("2017-01-05"), "2017_CL_F", "CL1"), ... (TS("2017-01-05"), TS("2017-01-15"), "2017_CL_G", "CL1"), ... (TS("2017-01-01"), TS("2017-01-12"), "2017_CL_G", "CL2"), ... (TS("2017-01-10"), TS("2017-01-15"), "2017_CL_H", "CL2")], ... columns=["start_date", "end_date", "contract", "generic"]) >>> mplot.plot_composition(rets, intervals) """ generics = df.columns if (axes is not None) and (len(axes) != len(generics)): raise ValueError("If 'axes' is not None then it must be the same " "length as 'df.columns'") if axes is None: _, axes = plt.subplots(nrows=len(generics), ncols=1) if len(generics) == 1: axes = [axes] for ax, generic in zip(axes, generics): ax.plot(df.loc[:, generic], label=generic) # no legend line to avoid clutter ax.legend(loc='center right', handlelength=0) dates = intervals.loc[intervals.loc[:, "generic"] == generic, ["start_date", "end_date", "contract"]] date_ticks = set( dates.loc[:, "start_date"].tolist() + dates.loc[:, "end_date"].tolist() ) xticks = [ts.toordinal() for ts in date_ticks] xlabels = [ts.strftime("%Y-%m-%d") for ts in date_ticks] ax.set_xticks(xticks) ax.set_xticklabels(xlabels) y_top = ax.get_ylim()[1] count = 0 # label and colour each underlying for _, dt1, dt2, instr in dates.itertuples(): if count % 2: fc = "b" else: fc = "r" count += 1 ax.axvspan(dt1, dt2, facecolor=fc, alpha=0.2) x_mid = dt1 + (dt2 - dt1) / 2 ax.text(x_mid, y_top, instr, rotation=45) return axes
def function[plot_composition, parameter[df, intervals, axes]]: constant[ Plot time series of generics and label underlying instruments which these series are composed of. Parameters: ----------- df: pd.DataFrame DataFrame of time series to be plotted. Each column is a generic time series. intervals: pd.DataFrame A DataFrame including information for when each contract is used in the generic series. Columns are['contract', 'generic', 'start_date', 'end_date'] axes: list List of matplotlib.axes.Axes Example ------- >>> import mapping.plot as mplot >>> import pandas as pd >>> from pandas import Timestamp as TS >>> idx = pd.date_range("2017-01-01", "2017-01-15") >>> rets_data = pd.np.random.randn(len(idx)) >>> rets = pd.DataFrame({"CL1": rets_data, "CL2": rets_data}, index=idx) >>> intervals = pd.DataFrame( ... [(TS("2017-01-01"), TS("2017-01-05"), "2017_CL_F", "CL1"), ... (TS("2017-01-05"), TS("2017-01-15"), "2017_CL_G", "CL1"), ... (TS("2017-01-01"), TS("2017-01-12"), "2017_CL_G", "CL2"), ... (TS("2017-01-10"), TS("2017-01-15"), "2017_CL_H", "CL2")], ... columns=["start_date", "end_date", "contract", "generic"]) >>> mplot.plot_composition(rets, intervals) ] variable[generics] assign[=] name[df].columns if <ast.BoolOp object at 0x7da1b1c7ceb0> begin[:] <ast.Raise object at 0x7da1b1c7ff10> if compare[name[axes] is constant[None]] begin[:] <ast.Tuple object at 0x7da1b1c7e4a0> assign[=] call[name[plt].subplots, parameter[]] if compare[call[name[len], parameter[name[generics]]] equal[==] constant[1]] begin[:] variable[axes] assign[=] list[[<ast.Name object at 0x7da1b197fc70>]] for taget[tuple[[<ast.Name object at 0x7da1b197f0a0>, <ast.Name object at 0x7da1b197dd50>]]] in starred[call[name[zip], parameter[name[axes], name[generics]]]] begin[:] call[name[ax].plot, parameter[call[name[df].loc][tuple[[<ast.Slice object at 0x7da1b193efe0>, <ast.Name object at 0x7da1b193fc40>]]]]] call[name[ax].legend, parameter[]] variable[dates] assign[=] call[name[intervals].loc][tuple[[<ast.Compare object at 0x7da1b193e410>, <ast.List object at 0x7da1b197f820>]]] variable[date_ticks] assign[=] call[name[set], parameter[binary_operation[call[call[name[dates].loc][tuple[[<ast.Slice object at 0x7da1b197eb30>, <ast.Constant object at 0x7da1b197f1c0>]]].tolist, parameter[]] + call[call[name[dates].loc][tuple[[<ast.Slice object at 0x7da1b197fe50>, <ast.Constant object at 0x7da1b197d930>]]].tolist, parameter[]]]]] variable[xticks] assign[=] <ast.ListComp object at 0x7da1b197fd00> variable[xlabels] assign[=] <ast.ListComp object at 0x7da1b197e6b0> call[name[ax].set_xticks, parameter[name[xticks]]] call[name[ax].set_xticklabels, parameter[name[xlabels]]] variable[y_top] assign[=] call[call[name[ax].get_ylim, parameter[]]][constant[1]] variable[count] assign[=] constant[0] for taget[tuple[[<ast.Name object at 0x7da1b1951f30>, <ast.Name object at 0x7da1b1951f60>, <ast.Name object at 0x7da1b1951360>, <ast.Name object at 0x7da1b1952d10>]]] in starred[call[name[dates].itertuples, parameter[]]] begin[:] if binary_operation[name[count] <ast.Mod object at 0x7da2590d6920> constant[2]] begin[:] variable[fc] assign[=] constant[b] <ast.AugAssign object at 0x7da1b1950f70> call[name[ax].axvspan, parameter[name[dt1], name[dt2]]] variable[x_mid] assign[=] binary_operation[name[dt1] + binary_operation[binary_operation[name[dt2] - name[dt1]] / constant[2]]] call[name[ax].text, parameter[name[x_mid], name[y_top], name[instr]]] return[name[axes]]
keyword[def] identifier[plot_composition] ( identifier[df] , identifier[intervals] , identifier[axes] = keyword[None] ): literal[string] identifier[generics] = identifier[df] . identifier[columns] keyword[if] ( identifier[axes] keyword[is] keyword[not] keyword[None] ) keyword[and] ( identifier[len] ( identifier[axes] )!= identifier[len] ( identifier[generics] )): keyword[raise] identifier[ValueError] ( literal[string] literal[string] ) keyword[if] identifier[axes] keyword[is] keyword[None] : identifier[_] , identifier[axes] = identifier[plt] . identifier[subplots] ( identifier[nrows] = identifier[len] ( identifier[generics] ), identifier[ncols] = literal[int] ) keyword[if] identifier[len] ( identifier[generics] )== literal[int] : identifier[axes] =[ identifier[axes] ] keyword[for] identifier[ax] , identifier[generic] keyword[in] identifier[zip] ( identifier[axes] , identifier[generics] ): identifier[ax] . identifier[plot] ( identifier[df] . identifier[loc] [:, identifier[generic] ], identifier[label] = identifier[generic] ) identifier[ax] . identifier[legend] ( identifier[loc] = literal[string] , identifier[handlelength] = literal[int] ) identifier[dates] = identifier[intervals] . identifier[loc] [ identifier[intervals] . identifier[loc] [:, literal[string] ]== identifier[generic] , [ literal[string] , literal[string] , literal[string] ]] identifier[date_ticks] = identifier[set] ( identifier[dates] . identifier[loc] [:, literal[string] ]. identifier[tolist] ()+ identifier[dates] . identifier[loc] [:, literal[string] ]. identifier[tolist] () ) identifier[xticks] =[ identifier[ts] . identifier[toordinal] () keyword[for] identifier[ts] keyword[in] identifier[date_ticks] ] identifier[xlabels] =[ identifier[ts] . identifier[strftime] ( literal[string] ) keyword[for] identifier[ts] keyword[in] identifier[date_ticks] ] identifier[ax] . identifier[set_xticks] ( identifier[xticks] ) identifier[ax] . identifier[set_xticklabels] ( identifier[xlabels] ) identifier[y_top] = identifier[ax] . identifier[get_ylim] ()[ literal[int] ] identifier[count] = literal[int] keyword[for] identifier[_] , identifier[dt1] , identifier[dt2] , identifier[instr] keyword[in] identifier[dates] . identifier[itertuples] (): keyword[if] identifier[count] % literal[int] : identifier[fc] = literal[string] keyword[else] : identifier[fc] = literal[string] identifier[count] += literal[int] identifier[ax] . identifier[axvspan] ( identifier[dt1] , identifier[dt2] , identifier[facecolor] = identifier[fc] , identifier[alpha] = literal[int] ) identifier[x_mid] = identifier[dt1] +( identifier[dt2] - identifier[dt1] )/ literal[int] identifier[ax] . identifier[text] ( identifier[x_mid] , identifier[y_top] , identifier[instr] , identifier[rotation] = literal[int] ) keyword[return] identifier[axes]
def plot_composition(df, intervals, axes=None): """ Plot time series of generics and label underlying instruments which these series are composed of. Parameters: ----------- df: pd.DataFrame DataFrame of time series to be plotted. Each column is a generic time series. intervals: pd.DataFrame A DataFrame including information for when each contract is used in the generic series. Columns are['contract', 'generic', 'start_date', 'end_date'] axes: list List of matplotlib.axes.Axes Example ------- >>> import mapping.plot as mplot >>> import pandas as pd >>> from pandas import Timestamp as TS >>> idx = pd.date_range("2017-01-01", "2017-01-15") >>> rets_data = pd.np.random.randn(len(idx)) >>> rets = pd.DataFrame({"CL1": rets_data, "CL2": rets_data}, index=idx) >>> intervals = pd.DataFrame( ... [(TS("2017-01-01"), TS("2017-01-05"), "2017_CL_F", "CL1"), ... (TS("2017-01-05"), TS("2017-01-15"), "2017_CL_G", "CL1"), ... (TS("2017-01-01"), TS("2017-01-12"), "2017_CL_G", "CL2"), ... (TS("2017-01-10"), TS("2017-01-15"), "2017_CL_H", "CL2")], ... columns=["start_date", "end_date", "contract", "generic"]) >>> mplot.plot_composition(rets, intervals) """ generics = df.columns if axes is not None and len(axes) != len(generics): raise ValueError("If 'axes' is not None then it must be the same length as 'df.columns'") # depends on [control=['if'], data=[]] if axes is None: (_, axes) = plt.subplots(nrows=len(generics), ncols=1) if len(generics) == 1: axes = [axes] # depends on [control=['if'], data=[]] # depends on [control=['if'], data=['axes']] for (ax, generic) in zip(axes, generics): ax.plot(df.loc[:, generic], label=generic) # no legend line to avoid clutter ax.legend(loc='center right', handlelength=0) dates = intervals.loc[intervals.loc[:, 'generic'] == generic, ['start_date', 'end_date', 'contract']] date_ticks = set(dates.loc[:, 'start_date'].tolist() + dates.loc[:, 'end_date'].tolist()) xticks = [ts.toordinal() for ts in date_ticks] xlabels = [ts.strftime('%Y-%m-%d') for ts in date_ticks] ax.set_xticks(xticks) ax.set_xticklabels(xlabels) y_top = ax.get_ylim()[1] count = 0 # label and colour each underlying for (_, dt1, dt2, instr) in dates.itertuples(): if count % 2: fc = 'b' # depends on [control=['if'], data=[]] else: fc = 'r' count += 1 ax.axvspan(dt1, dt2, facecolor=fc, alpha=0.2) x_mid = dt1 + (dt2 - dt1) / 2 ax.text(x_mid, y_top, instr, rotation=45) # depends on [control=['for'], data=[]] # depends on [control=['for'], data=[]] return axes
def _has_integer_used_as_pointers(self): """ Test if there is any (suspicious) pointer decryption in the code. :return: True if there is any pointer decryption, False otherwise. :rtype: bool """ # check all integer accesses and see if there is any integer being used as a pointer later, but it wasn't # classified as a pointer reference # we only care about unknown memory data that are 4 bytes long, and is directly referenced from an IRSB candidates = [ i for i in self.cfg.memory_data.values() if i.sort in ('unknown', 'integer') and i.size == self.project.arch.bytes and i.irsb_addr is not None ] if not candidates: return False for candidate in candidates: # if the candidate is in .bss, we don't care about it sec = self.cfg.project.loader.find_section_containing(candidate.address) if sec.name in ('.bss', '.got.plt'): continue # execute the single basic block and see how the value is used base_graph = networkx.DiGraph() candidate_node = self.cfg.get_any_node(candidate.irsb_addr) # type: angr.analyses.cfg_node.CFGNode if candidate_node is None: continue base_graph.add_node(candidate_node) tmp_kb = KnowledgeBase(self.project) cfg = self.project.analyses.CFGEmulated(kb=tmp_kb, starts=(candidate.irsb_addr,), keep_state=True, base_graph=base_graph ) candidate_irsb = cfg.get_any_irsb(candidate.irsb_addr) # type: SimIRSB ddg = self.project.analyses.DDG(kb=tmp_kb, cfg=cfg) mem_var_node = None for node in ddg.simplified_data_graph.nodes(): if isinstance(node.variable, SimMemoryVariable) and node.location.ins_addr == candidate.insn_addr: # found it! mem_var_node = node break else: # mem_var_node is not found continue # get a sub graph subgraph = ddg.data_sub_graph(mem_var_node, simplified=False, killing_edges=False, excluding_types={'mem_addr'}, ) # is it used as a memory address anywhere? # TODO: # is it used as a jump target? next_tmp = None if isinstance(candidate_irsb.irsb.next, pyvex.IRExpr.RdTmp): next_tmp = candidate_irsb.irsb.next.tmp if next_tmp is not None: next_tmp_node = next((node for node in subgraph.nodes() if isinstance(node.variable, SimTemporaryVariable) and node.variable.tmp_id == next_tmp), None ) if next_tmp_node is not None: # ouch it's used as a jump target return True return False
def function[_has_integer_used_as_pointers, parameter[self]]: constant[ Test if there is any (suspicious) pointer decryption in the code. :return: True if there is any pointer decryption, False otherwise. :rtype: bool ] variable[candidates] assign[=] <ast.ListComp object at 0x7da20c6c4820> if <ast.UnaryOp object at 0x7da20c6c5c60> begin[:] return[constant[False]] for taget[name[candidate]] in starred[name[candidates]] begin[:] variable[sec] assign[=] call[name[self].cfg.project.loader.find_section_containing, parameter[name[candidate].address]] if compare[name[sec].name in tuple[[<ast.Constant object at 0x7da20c6c7610>, <ast.Constant object at 0x7da20c6c5600>]]] begin[:] continue variable[base_graph] assign[=] call[name[networkx].DiGraph, parameter[]] variable[candidate_node] assign[=] call[name[self].cfg.get_any_node, parameter[name[candidate].irsb_addr]] if compare[name[candidate_node] is constant[None]] begin[:] continue call[name[base_graph].add_node, parameter[name[candidate_node]]] variable[tmp_kb] assign[=] call[name[KnowledgeBase], parameter[name[self].project]] variable[cfg] assign[=] call[name[self].project.analyses.CFGEmulated, parameter[]] variable[candidate_irsb] assign[=] call[name[cfg].get_any_irsb, parameter[name[candidate].irsb_addr]] variable[ddg] assign[=] call[name[self].project.analyses.DDG, parameter[]] variable[mem_var_node] assign[=] constant[None] for taget[name[node]] in starred[call[name[ddg].simplified_data_graph.nodes, parameter[]]] begin[:] if <ast.BoolOp object at 0x7da20c6c4ca0> begin[:] variable[mem_var_node] assign[=] name[node] break variable[subgraph] assign[=] call[name[ddg].data_sub_graph, parameter[name[mem_var_node]]] variable[next_tmp] assign[=] constant[None] if call[name[isinstance], parameter[name[candidate_irsb].irsb.next, name[pyvex].IRExpr.RdTmp]] begin[:] variable[next_tmp] assign[=] name[candidate_irsb].irsb.next.tmp if compare[name[next_tmp] is_not constant[None]] begin[:] variable[next_tmp_node] assign[=] call[name[next], parameter[<ast.GeneratorExp object at 0x7da20c6c4f70>, constant[None]]] if compare[name[next_tmp_node] is_not constant[None]] begin[:] return[constant[True]] return[constant[False]]
keyword[def] identifier[_has_integer_used_as_pointers] ( identifier[self] ): literal[string] identifier[candidates] =[ identifier[i] keyword[for] identifier[i] keyword[in] identifier[self] . identifier[cfg] . identifier[memory_data] . identifier[values] () keyword[if] identifier[i] . identifier[sort] keyword[in] ( literal[string] , literal[string] ) keyword[and] identifier[i] . identifier[size] == identifier[self] . identifier[project] . identifier[arch] . identifier[bytes] keyword[and] identifier[i] . identifier[irsb_addr] keyword[is] keyword[not] keyword[None] ] keyword[if] keyword[not] identifier[candidates] : keyword[return] keyword[False] keyword[for] identifier[candidate] keyword[in] identifier[candidates] : identifier[sec] = identifier[self] . identifier[cfg] . identifier[project] . identifier[loader] . identifier[find_section_containing] ( identifier[candidate] . identifier[address] ) keyword[if] identifier[sec] . identifier[name] keyword[in] ( literal[string] , literal[string] ): keyword[continue] identifier[base_graph] = identifier[networkx] . identifier[DiGraph] () identifier[candidate_node] = identifier[self] . identifier[cfg] . identifier[get_any_node] ( identifier[candidate] . identifier[irsb_addr] ) keyword[if] identifier[candidate_node] keyword[is] keyword[None] : keyword[continue] identifier[base_graph] . identifier[add_node] ( identifier[candidate_node] ) identifier[tmp_kb] = identifier[KnowledgeBase] ( identifier[self] . identifier[project] ) identifier[cfg] = identifier[self] . identifier[project] . identifier[analyses] . identifier[CFGEmulated] ( identifier[kb] = identifier[tmp_kb] , identifier[starts] =( identifier[candidate] . identifier[irsb_addr] ,), identifier[keep_state] = keyword[True] , identifier[base_graph] = identifier[base_graph] ) identifier[candidate_irsb] = identifier[cfg] . identifier[get_any_irsb] ( identifier[candidate] . identifier[irsb_addr] ) identifier[ddg] = identifier[self] . identifier[project] . identifier[analyses] . identifier[DDG] ( identifier[kb] = identifier[tmp_kb] , identifier[cfg] = identifier[cfg] ) identifier[mem_var_node] = keyword[None] keyword[for] identifier[node] keyword[in] identifier[ddg] . identifier[simplified_data_graph] . identifier[nodes] (): keyword[if] identifier[isinstance] ( identifier[node] . identifier[variable] , identifier[SimMemoryVariable] ) keyword[and] identifier[node] . identifier[location] . identifier[ins_addr] == identifier[candidate] . identifier[insn_addr] : identifier[mem_var_node] = identifier[node] keyword[break] keyword[else] : keyword[continue] identifier[subgraph] = identifier[ddg] . identifier[data_sub_graph] ( identifier[mem_var_node] , identifier[simplified] = keyword[False] , identifier[killing_edges] = keyword[False] , identifier[excluding_types] ={ literal[string] }, ) identifier[next_tmp] = keyword[None] keyword[if] identifier[isinstance] ( identifier[candidate_irsb] . identifier[irsb] . identifier[next] , identifier[pyvex] . identifier[IRExpr] . identifier[RdTmp] ): identifier[next_tmp] = identifier[candidate_irsb] . identifier[irsb] . identifier[next] . identifier[tmp] keyword[if] identifier[next_tmp] keyword[is] keyword[not] keyword[None] : identifier[next_tmp_node] = identifier[next] (( identifier[node] keyword[for] identifier[node] keyword[in] identifier[subgraph] . identifier[nodes] () keyword[if] identifier[isinstance] ( identifier[node] . identifier[variable] , identifier[SimTemporaryVariable] ) keyword[and] identifier[node] . identifier[variable] . identifier[tmp_id] == identifier[next_tmp] ), keyword[None] ) keyword[if] identifier[next_tmp_node] keyword[is] keyword[not] keyword[None] : keyword[return] keyword[True] keyword[return] keyword[False]
def _has_integer_used_as_pointers(self): """ Test if there is any (suspicious) pointer decryption in the code. :return: True if there is any pointer decryption, False otherwise. :rtype: bool """ # check all integer accesses and see if there is any integer being used as a pointer later, but it wasn't # classified as a pointer reference # we only care about unknown memory data that are 4 bytes long, and is directly referenced from an IRSB candidates = [i for i in self.cfg.memory_data.values() if i.sort in ('unknown', 'integer') and i.size == self.project.arch.bytes and (i.irsb_addr is not None)] if not candidates: return False # depends on [control=['if'], data=[]] for candidate in candidates: # if the candidate is in .bss, we don't care about it sec = self.cfg.project.loader.find_section_containing(candidate.address) if sec.name in ('.bss', '.got.plt'): continue # depends on [control=['if'], data=[]] # execute the single basic block and see how the value is used base_graph = networkx.DiGraph() candidate_node = self.cfg.get_any_node(candidate.irsb_addr) # type: angr.analyses.cfg_node.CFGNode if candidate_node is None: continue # depends on [control=['if'], data=[]] base_graph.add_node(candidate_node) tmp_kb = KnowledgeBase(self.project) cfg = self.project.analyses.CFGEmulated(kb=tmp_kb, starts=(candidate.irsb_addr,), keep_state=True, base_graph=base_graph) candidate_irsb = cfg.get_any_irsb(candidate.irsb_addr) # type: SimIRSB ddg = self.project.analyses.DDG(kb=tmp_kb, cfg=cfg) mem_var_node = None for node in ddg.simplified_data_graph.nodes(): if isinstance(node.variable, SimMemoryVariable) and node.location.ins_addr == candidate.insn_addr: # found it! mem_var_node = node break # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['node']] else: # mem_var_node is not found continue # get a sub graph subgraph = ddg.data_sub_graph(mem_var_node, simplified=False, killing_edges=False, excluding_types={'mem_addr'}) # is it used as a memory address anywhere? # TODO: # is it used as a jump target? next_tmp = None if isinstance(candidate_irsb.irsb.next, pyvex.IRExpr.RdTmp): next_tmp = candidate_irsb.irsb.next.tmp # depends on [control=['if'], data=[]] if next_tmp is not None: next_tmp_node = next((node for node in subgraph.nodes() if isinstance(node.variable, SimTemporaryVariable) and node.variable.tmp_id == next_tmp), None) if next_tmp_node is not None: # ouch it's used as a jump target return True # depends on [control=['if'], data=[]] # depends on [control=['if'], data=['next_tmp']] # depends on [control=['for'], data=['candidate']] return False
def _read_map(ctx: ReaderContext) -> lmap.Map: """Return a map from the input stream.""" reader = ctx.reader start = reader.advance() assert start == "{" d: MutableMapping[Any, Any] = {} while True: if reader.peek() == "}": reader.next_token() break k = _read_next(ctx) if k is COMMENT: continue while True: if reader.peek() == "}": raise SyntaxError("Unexpected token '}'; expected map value") v = _read_next(ctx) if v is COMMENT: continue if k in d: raise SyntaxError(f"Duplicate key '{k}' in map literal") break d[k] = v return lmap.map(d)
def function[_read_map, parameter[ctx]]: constant[Return a map from the input stream.] variable[reader] assign[=] name[ctx].reader variable[start] assign[=] call[name[reader].advance, parameter[]] assert[compare[name[start] equal[==] constant[{]]] <ast.AnnAssign object at 0x7da1b0213820> while constant[True] begin[:] if compare[call[name[reader].peek, parameter[]] equal[==] constant[}]] begin[:] call[name[reader].next_token, parameter[]] break variable[k] assign[=] call[name[_read_next], parameter[name[ctx]]] if compare[name[k] is name[COMMENT]] begin[:] continue while constant[True] begin[:] if compare[call[name[reader].peek, parameter[]] equal[==] constant[}]] begin[:] <ast.Raise object at 0x7da1b0210c10> variable[v] assign[=] call[name[_read_next], parameter[name[ctx]]] if compare[name[v] is name[COMMENT]] begin[:] continue if compare[name[k] in name[d]] begin[:] <ast.Raise object at 0x7da1b0211660> break call[name[d]][name[k]] assign[=] name[v] return[call[name[lmap].map, parameter[name[d]]]]
keyword[def] identifier[_read_map] ( identifier[ctx] : identifier[ReaderContext] )-> identifier[lmap] . identifier[Map] : literal[string] identifier[reader] = identifier[ctx] . identifier[reader] identifier[start] = identifier[reader] . identifier[advance] () keyword[assert] identifier[start] == literal[string] identifier[d] : identifier[MutableMapping] [ identifier[Any] , identifier[Any] ]={} keyword[while] keyword[True] : keyword[if] identifier[reader] . identifier[peek] ()== literal[string] : identifier[reader] . identifier[next_token] () keyword[break] identifier[k] = identifier[_read_next] ( identifier[ctx] ) keyword[if] identifier[k] keyword[is] identifier[COMMENT] : keyword[continue] keyword[while] keyword[True] : keyword[if] identifier[reader] . identifier[peek] ()== literal[string] : keyword[raise] identifier[SyntaxError] ( literal[string] ) identifier[v] = identifier[_read_next] ( identifier[ctx] ) keyword[if] identifier[v] keyword[is] identifier[COMMENT] : keyword[continue] keyword[if] identifier[k] keyword[in] identifier[d] : keyword[raise] identifier[SyntaxError] ( literal[string] ) keyword[break] identifier[d] [ identifier[k] ]= identifier[v] keyword[return] identifier[lmap] . identifier[map] ( identifier[d] )
def _read_map(ctx: ReaderContext) -> lmap.Map: """Return a map from the input stream.""" reader = ctx.reader start = reader.advance() assert start == '{' d: MutableMapping[Any, Any] = {} while True: if reader.peek() == '}': reader.next_token() break # depends on [control=['if'], data=[]] k = _read_next(ctx) if k is COMMENT: continue # depends on [control=['if'], data=[]] while True: if reader.peek() == '}': raise SyntaxError("Unexpected token '}'; expected map value") # depends on [control=['if'], data=[]] v = _read_next(ctx) if v is COMMENT: continue # depends on [control=['if'], data=[]] if k in d: raise SyntaxError(f"Duplicate key '{k}' in map literal") # depends on [control=['if'], data=['k']] break # depends on [control=['while'], data=[]] d[k] = v # depends on [control=['while'], data=[]] return lmap.map(d)
def run(self, args): """Runs the firmware command. Args: self (FirmwareCommand): the ``FirmwareCommand`` instance args (Namespace): arguments to parse Returns: ``None`` """ jlink = self.create_jlink(args) if args.downgrade: if not jlink.firmware_newer(): print('DLL firmware is not older than J-Link firmware.') else: jlink.invalidate_firmware() try: # Change to the firmware of the connected DLL. jlink.update_firmware() except pylink.JLinkException as e: # On J-Link versions < 5.0.0, an exception will be thrown as # the connection will be lost, so we have to re-establish. jlink = self.create_jlink(args) print('Firmware Downgraded: %s' % jlink.firmware_version) elif args.upgrade: if not jlink.firmware_outdated(): print('DLL firmware is not newer than J-Link firmware.') else: try: # Upgrade the firmware. jlink.update_firmware() except pylink.JLinkException as e: # On J-Link versions < 5.0.0, an exception will be thrown as # the connection will be lost, so we have to re-establish. jlink = self.create_jlink(args) print('Firmware Updated: %s' % jlink.firmware_version) return None
def function[run, parameter[self, args]]: constant[Runs the firmware command. Args: self (FirmwareCommand): the ``FirmwareCommand`` instance args (Namespace): arguments to parse Returns: ``None`` ] variable[jlink] assign[=] call[name[self].create_jlink, parameter[name[args]]] if name[args].downgrade begin[:] if <ast.UnaryOp object at 0x7da204623400> begin[:] call[name[print], parameter[constant[DLL firmware is not older than J-Link firmware.]]] return[constant[None]]
keyword[def] identifier[run] ( identifier[self] , identifier[args] ): literal[string] identifier[jlink] = identifier[self] . identifier[create_jlink] ( identifier[args] ) keyword[if] identifier[args] . identifier[downgrade] : keyword[if] keyword[not] identifier[jlink] . identifier[firmware_newer] (): identifier[print] ( literal[string] ) keyword[else] : identifier[jlink] . identifier[invalidate_firmware] () keyword[try] : identifier[jlink] . identifier[update_firmware] () keyword[except] identifier[pylink] . identifier[JLinkException] keyword[as] identifier[e] : identifier[jlink] = identifier[self] . identifier[create_jlink] ( identifier[args] ) identifier[print] ( literal[string] % identifier[jlink] . identifier[firmware_version] ) keyword[elif] identifier[args] . identifier[upgrade] : keyword[if] keyword[not] identifier[jlink] . identifier[firmware_outdated] (): identifier[print] ( literal[string] ) keyword[else] : keyword[try] : identifier[jlink] . identifier[update_firmware] () keyword[except] identifier[pylink] . identifier[JLinkException] keyword[as] identifier[e] : identifier[jlink] = identifier[self] . identifier[create_jlink] ( identifier[args] ) identifier[print] ( literal[string] % identifier[jlink] . identifier[firmware_version] ) keyword[return] keyword[None]
def run(self, args): """Runs the firmware command. Args: self (FirmwareCommand): the ``FirmwareCommand`` instance args (Namespace): arguments to parse Returns: ``None`` """ jlink = self.create_jlink(args) if args.downgrade: if not jlink.firmware_newer(): print('DLL firmware is not older than J-Link firmware.') # depends on [control=['if'], data=[]] else: jlink.invalidate_firmware() try: # Change to the firmware of the connected DLL. jlink.update_firmware() # depends on [control=['try'], data=[]] except pylink.JLinkException as e: # On J-Link versions < 5.0.0, an exception will be thrown as # the connection will be lost, so we have to re-establish. jlink = self.create_jlink(args) # depends on [control=['except'], data=[]] print('Firmware Downgraded: %s' % jlink.firmware_version) # depends on [control=['if'], data=[]] elif args.upgrade: if not jlink.firmware_outdated(): print('DLL firmware is not newer than J-Link firmware.') # depends on [control=['if'], data=[]] else: try: # Upgrade the firmware. jlink.update_firmware() # depends on [control=['try'], data=[]] except pylink.JLinkException as e: # On J-Link versions < 5.0.0, an exception will be thrown as # the connection will be lost, so we have to re-establish. jlink = self.create_jlink(args) # depends on [control=['except'], data=[]] print('Firmware Updated: %s' % jlink.firmware_version) # depends on [control=['if'], data=[]] return None
def damping(self): """Strain-compatible damping.""" try: value = self._damping.value except AttributeError: value = self._damping return value
def function[damping, parameter[self]]: constant[Strain-compatible damping.] <ast.Try object at 0x7da1b2487580> return[name[value]]
keyword[def] identifier[damping] ( identifier[self] ): literal[string] keyword[try] : identifier[value] = identifier[self] . identifier[_damping] . identifier[value] keyword[except] identifier[AttributeError] : identifier[value] = identifier[self] . identifier[_damping] keyword[return] identifier[value]
def damping(self): """Strain-compatible damping.""" try: value = self._damping.value # depends on [control=['try'], data=[]] except AttributeError: value = self._damping # depends on [control=['except'], data=[]] return value
def find_records(self, check, keys=None): """Find records matching a query dict, optionally extracting subset of keys. Returns dict keyed by msg_id of matching records. Parameters ---------- check: dict mongodb-style query argument keys: list of strs [optional] if specified, the subset of keys to extract. msg_id will *always* be included. """ matches = self._match(check) if keys: return [ self._extract_subdict(rec, keys) for rec in matches ] else: return matches
def function[find_records, parameter[self, check, keys]]: constant[Find records matching a query dict, optionally extracting subset of keys. Returns dict keyed by msg_id of matching records. Parameters ---------- check: dict mongodb-style query argument keys: list of strs [optional] if specified, the subset of keys to extract. msg_id will *always* be included. ] variable[matches] assign[=] call[name[self]._match, parameter[name[check]]] if name[keys] begin[:] return[<ast.ListComp object at 0x7da18ede6c80>]
keyword[def] identifier[find_records] ( identifier[self] , identifier[check] , identifier[keys] = keyword[None] ): literal[string] identifier[matches] = identifier[self] . identifier[_match] ( identifier[check] ) keyword[if] identifier[keys] : keyword[return] [ identifier[self] . identifier[_extract_subdict] ( identifier[rec] , identifier[keys] ) keyword[for] identifier[rec] keyword[in] identifier[matches] ] keyword[else] : keyword[return] identifier[matches]
def find_records(self, check, keys=None): """Find records matching a query dict, optionally extracting subset of keys. Returns dict keyed by msg_id of matching records. Parameters ---------- check: dict mongodb-style query argument keys: list of strs [optional] if specified, the subset of keys to extract. msg_id will *always* be included. """ matches = self._match(check) if keys: return [self._extract_subdict(rec, keys) for rec in matches] # depends on [control=['if'], data=[]] else: return matches
def enums(): """ Return the dictionary of H₂O enums, retrieved from data in schemas(). For each entry in the dictionary its key is the name of the enum, and the value is the set of all enum values. """ enumset = defaultdict(set) for schema in schemas(): for field in schema["fields"]: if field["type"] == "enum": enumset[field["schema_name"]].update(field["values"]) return enumset
def function[enums, parameter[]]: constant[ Return the dictionary of H₂O enums, retrieved from data in schemas(). For each entry in the dictionary its key is the name of the enum, and the value is the set of all enum values. ] variable[enumset] assign[=] call[name[defaultdict], parameter[name[set]]] for taget[name[schema]] in starred[call[name[schemas], parameter[]]] begin[:] for taget[name[field]] in starred[call[name[schema]][constant[fields]]] begin[:] if compare[call[name[field]][constant[type]] equal[==] constant[enum]] begin[:] call[call[name[enumset]][call[name[field]][constant[schema_name]]].update, parameter[call[name[field]][constant[values]]]] return[name[enumset]]
keyword[def] identifier[enums] (): literal[string] identifier[enumset] = identifier[defaultdict] ( identifier[set] ) keyword[for] identifier[schema] keyword[in] identifier[schemas] (): keyword[for] identifier[field] keyword[in] identifier[schema] [ literal[string] ]: keyword[if] identifier[field] [ literal[string] ]== literal[string] : identifier[enumset] [ identifier[field] [ literal[string] ]]. identifier[update] ( identifier[field] [ literal[string] ]) keyword[return] identifier[enumset]
def enums(): """ Return the dictionary of H₂O enums, retrieved from data in schemas(). For each entry in the dictionary its key is the name of the enum, and the value is the set of all enum values. """ enumset = defaultdict(set) for schema in schemas(): for field in schema['fields']: if field['type'] == 'enum': enumset[field['schema_name']].update(field['values']) # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['field']] # depends on [control=['for'], data=['schema']] return enumset
def _maybe_cache(arg, format, cache, convert_listlike): """ Create a cache of unique dates from an array of dates Parameters ---------- arg : integer, float, string, datetime, list, tuple, 1-d array, Series format : string Strftime format to parse time cache : boolean True attempts to create a cache of converted values convert_listlike : function Conversion function to apply on dates Returns ------- cache_array : Series Cache of converted, unique dates. Can be empty """ from pandas import Series cache_array = Series() if cache: # Perform a quicker unique check from pandas import Index unique_dates = Index(arg).unique() if len(unique_dates) < len(arg): cache_dates = convert_listlike(unique_dates.to_numpy(), True, format) cache_array = Series(cache_dates, index=unique_dates) return cache_array
def function[_maybe_cache, parameter[arg, format, cache, convert_listlike]]: constant[ Create a cache of unique dates from an array of dates Parameters ---------- arg : integer, float, string, datetime, list, tuple, 1-d array, Series format : string Strftime format to parse time cache : boolean True attempts to create a cache of converted values convert_listlike : function Conversion function to apply on dates Returns ------- cache_array : Series Cache of converted, unique dates. Can be empty ] from relative_module[pandas] import module[Series] variable[cache_array] assign[=] call[name[Series], parameter[]] if name[cache] begin[:] from relative_module[pandas] import module[Index] variable[unique_dates] assign[=] call[call[name[Index], parameter[name[arg]]].unique, parameter[]] if compare[call[name[len], parameter[name[unique_dates]]] less[<] call[name[len], parameter[name[arg]]]] begin[:] variable[cache_dates] assign[=] call[name[convert_listlike], parameter[call[name[unique_dates].to_numpy, parameter[]], constant[True], name[format]]] variable[cache_array] assign[=] call[name[Series], parameter[name[cache_dates]]] return[name[cache_array]]
keyword[def] identifier[_maybe_cache] ( identifier[arg] , identifier[format] , identifier[cache] , identifier[convert_listlike] ): literal[string] keyword[from] identifier[pandas] keyword[import] identifier[Series] identifier[cache_array] = identifier[Series] () keyword[if] identifier[cache] : keyword[from] identifier[pandas] keyword[import] identifier[Index] identifier[unique_dates] = identifier[Index] ( identifier[arg] ). identifier[unique] () keyword[if] identifier[len] ( identifier[unique_dates] )< identifier[len] ( identifier[arg] ): identifier[cache_dates] = identifier[convert_listlike] ( identifier[unique_dates] . identifier[to_numpy] (), keyword[True] , identifier[format] ) identifier[cache_array] = identifier[Series] ( identifier[cache_dates] , identifier[index] = identifier[unique_dates] ) keyword[return] identifier[cache_array]
def _maybe_cache(arg, format, cache, convert_listlike): """ Create a cache of unique dates from an array of dates Parameters ---------- arg : integer, float, string, datetime, list, tuple, 1-d array, Series format : string Strftime format to parse time cache : boolean True attempts to create a cache of converted values convert_listlike : function Conversion function to apply on dates Returns ------- cache_array : Series Cache of converted, unique dates. Can be empty """ from pandas import Series cache_array = Series() if cache: # Perform a quicker unique check from pandas import Index unique_dates = Index(arg).unique() if len(unique_dates) < len(arg): cache_dates = convert_listlike(unique_dates.to_numpy(), True, format) cache_array = Series(cache_dates, index=unique_dates) # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]] return cache_array
def p_state_action_constraint_section(self, p): '''state_action_constraint_section : STATE_ACTION_CONSTRAINTS LCURLY state_cons_list RCURLY SEMI | STATE_ACTION_CONSTRAINTS LCURLY RCURLY SEMI''' if len(p) == 6: p[0] = ('constraints', p[3]) elif len(p) == 5: p[0] = ('constraints', []) self._print_verbose('state-action-constraints')
def function[p_state_action_constraint_section, parameter[self, p]]: constant[state_action_constraint_section : STATE_ACTION_CONSTRAINTS LCURLY state_cons_list RCURLY SEMI | STATE_ACTION_CONSTRAINTS LCURLY RCURLY SEMI] if compare[call[name[len], parameter[name[p]]] equal[==] constant[6]] begin[:] call[name[p]][constant[0]] assign[=] tuple[[<ast.Constant object at 0x7da1b0966f20>, <ast.Subscript object at 0x7da1b0965e70>]] call[name[self]._print_verbose, parameter[constant[state-action-constraints]]]
keyword[def] identifier[p_state_action_constraint_section] ( identifier[self] , identifier[p] ): literal[string] keyword[if] identifier[len] ( identifier[p] )== literal[int] : identifier[p] [ literal[int] ]=( literal[string] , identifier[p] [ literal[int] ]) keyword[elif] identifier[len] ( identifier[p] )== literal[int] : identifier[p] [ literal[int] ]=( literal[string] ,[]) identifier[self] . identifier[_print_verbose] ( literal[string] )
def p_state_action_constraint_section(self, p): """state_action_constraint_section : STATE_ACTION_CONSTRAINTS LCURLY state_cons_list RCURLY SEMI | STATE_ACTION_CONSTRAINTS LCURLY RCURLY SEMI""" if len(p) == 6: p[0] = ('constraints', p[3]) # depends on [control=['if'], data=[]] elif len(p) == 5: p[0] = ('constraints', []) # depends on [control=['if'], data=[]] self._print_verbose('state-action-constraints')
def remove_tmp_dir(self, directory): """ Remove the directory if it is located in /tmp """ if(not directory.startswith('/tmp/')): print('Directory not in /tmp') exit() print('Deleting directory: ' + directory) shutil.rmtree(directory)
def function[remove_tmp_dir, parameter[self, directory]]: constant[ Remove the directory if it is located in /tmp ] if <ast.UnaryOp object at 0x7da18fe91a20> begin[:] call[name[print], parameter[constant[Directory not in /tmp]]] call[name[exit], parameter[]] call[name[print], parameter[binary_operation[constant[Deleting directory: ] + name[directory]]]] call[name[shutil].rmtree, parameter[name[directory]]]
keyword[def] identifier[remove_tmp_dir] ( identifier[self] , identifier[directory] ): literal[string] keyword[if] ( keyword[not] identifier[directory] . identifier[startswith] ( literal[string] )): identifier[print] ( literal[string] ) identifier[exit] () identifier[print] ( literal[string] + identifier[directory] ) identifier[shutil] . identifier[rmtree] ( identifier[directory] )
def remove_tmp_dir(self, directory): """ Remove the directory if it is located in /tmp """ if not directory.startswith('/tmp/'): print('Directory not in /tmp') exit() # depends on [control=['if'], data=[]] print('Deleting directory: ' + directory) shutil.rmtree(directory)
def Latex( formula, pos=(0, 0, 0), normal=(0, 0, 1), c='k', s=1, bg=None, alpha=1, res=30, usetex=False, fromweb=False, ): """ Render Latex formulas. :param str formula: latex text string :param list pos: position coordinates in space :param list normal: normal to the plane of the image :param c: face color :param bg: background color box :param int res: dpi resolution :param bool usetex: use latex compiler of matplotlib :param fromweb: retrieve the latex image from online server (codecogs) .. hint:: |latex| |latex.py|_ """ try: #def _Latex(formula, pos, normal, c, s, bg, alpha, res, usetex, fromweb): def build_img_web(formula, tfile): import requests if c == 'k': ct = 'Black' else: ct = 'White' wsite = 'http://latex.codecogs.com/png.latex' try: r = requests.get(wsite+'?\dpi{100} \huge \color{'+ct+'} ' + formula) f = open(tfile, 'wb') f.write(r.content) f.close() except requests.exceptions.ConnectionError: colors.printc('Latex error. Web site unavailable?', wsite, c=1) return None def build_img_plt(formula, tfile): import matplotlib.pyplot as plt plt.rc('text', usetex=usetex) formula1 = '$'+formula+'$' plt.axis('off') col = colors.getColor(c) if bg: bx = dict(boxstyle="square", ec=col, fc=colors.getColor(bg)) else: bx = None plt.text(0.5, 0.5, formula1, size=res, color=col, alpha=alpha, ha="center", va="center", bbox=bx) plt.savefig('_lateximg.png', format='png', transparent=True, bbox_inches='tight', pad_inches=0) plt.close() if fromweb: build_img_web(formula, '_lateximg.png') else: build_img_plt(formula, '_lateximg.png') from vtkplotter.actors import ImageActor picr = vtk.vtkPNGReader() picr.SetFileName('_lateximg.png') picr.Update() vactor = ImageActor() vactor.SetInputData(picr.GetOutput()) vactor.alpha(alpha) b = vactor.GetBounds() xm, ym = (b[1]+b[0])/200*s, (b[3]+b[2])/200*s vactor.SetOrigin(-xm, -ym, 0) nax = np.linalg.norm(normal) if nax: normal = np.array(normal) / nax theta = np.arccos(normal[2]) phi = np.arctan2(normal[1], normal[0]) vactor.SetScale(0.25/res*s, 0.25/res*s, 0.25/res*s) vactor.RotateZ(phi * 57.3) vactor.RotateY(theta * 57.3) vactor.SetPosition(pos) try: import os os.unlink('_lateximg.png') except FileNotFoundError: pass return vactor except: colors.printc('Error in Latex()\n', formula, c=1) colors.printc(' latex or dvipng not installed?', c=1) colors.printc(' Try: usetex=False' , c=1) colors.printc(' Try: sudo apt install dvipng' , c=1) return None
def function[Latex, parameter[formula, pos, normal, c, s, bg, alpha, res, usetex, fromweb]]: constant[ Render Latex formulas. :param str formula: latex text string :param list pos: position coordinates in space :param list normal: normal to the plane of the image :param c: face color :param bg: background color box :param int res: dpi resolution :param bool usetex: use latex compiler of matplotlib :param fromweb: retrieve the latex image from online server (codecogs) .. hint:: |latex| |latex.py|_ ] <ast.Try object at 0x7da20e956590>
keyword[def] identifier[Latex] ( identifier[formula] , identifier[pos] =( literal[int] , literal[int] , literal[int] ), identifier[normal] =( literal[int] , literal[int] , literal[int] ), identifier[c] = literal[string] , identifier[s] = literal[int] , identifier[bg] = keyword[None] , identifier[alpha] = literal[int] , identifier[res] = literal[int] , identifier[usetex] = keyword[False] , identifier[fromweb] = keyword[False] , ): literal[string] keyword[try] : keyword[def] identifier[build_img_web] ( identifier[formula] , identifier[tfile] ): keyword[import] identifier[requests] keyword[if] identifier[c] == literal[string] : identifier[ct] = literal[string] keyword[else] : identifier[ct] = literal[string] identifier[wsite] = literal[string] keyword[try] : identifier[r] = identifier[requests] . identifier[get] ( identifier[wsite] + literal[string] + identifier[ct] + literal[string] + identifier[formula] ) identifier[f] = identifier[open] ( identifier[tfile] , literal[string] ) identifier[f] . identifier[write] ( identifier[r] . identifier[content] ) identifier[f] . identifier[close] () keyword[except] identifier[requests] . identifier[exceptions] . identifier[ConnectionError] : identifier[colors] . identifier[printc] ( literal[string] , identifier[wsite] , identifier[c] = literal[int] ) keyword[return] keyword[None] keyword[def] identifier[build_img_plt] ( identifier[formula] , identifier[tfile] ): keyword[import] identifier[matplotlib] . identifier[pyplot] keyword[as] identifier[plt] identifier[plt] . identifier[rc] ( literal[string] , identifier[usetex] = identifier[usetex] ) identifier[formula1] = literal[string] + identifier[formula] + literal[string] identifier[plt] . identifier[axis] ( literal[string] ) identifier[col] = identifier[colors] . identifier[getColor] ( identifier[c] ) keyword[if] identifier[bg] : identifier[bx] = identifier[dict] ( identifier[boxstyle] = literal[string] , identifier[ec] = identifier[col] , identifier[fc] = identifier[colors] . identifier[getColor] ( identifier[bg] )) keyword[else] : identifier[bx] = keyword[None] identifier[plt] . identifier[text] ( literal[int] , literal[int] , identifier[formula1] , identifier[size] = identifier[res] , identifier[color] = identifier[col] , identifier[alpha] = identifier[alpha] , identifier[ha] = literal[string] , identifier[va] = literal[string] , identifier[bbox] = identifier[bx] ) identifier[plt] . identifier[savefig] ( literal[string] , identifier[format] = literal[string] , identifier[transparent] = keyword[True] , identifier[bbox_inches] = literal[string] , identifier[pad_inches] = literal[int] ) identifier[plt] . identifier[close] () keyword[if] identifier[fromweb] : identifier[build_img_web] ( identifier[formula] , literal[string] ) keyword[else] : identifier[build_img_plt] ( identifier[formula] , literal[string] ) keyword[from] identifier[vtkplotter] . identifier[actors] keyword[import] identifier[ImageActor] identifier[picr] = identifier[vtk] . identifier[vtkPNGReader] () identifier[picr] . identifier[SetFileName] ( literal[string] ) identifier[picr] . identifier[Update] () identifier[vactor] = identifier[ImageActor] () identifier[vactor] . identifier[SetInputData] ( identifier[picr] . identifier[GetOutput] ()) identifier[vactor] . identifier[alpha] ( identifier[alpha] ) identifier[b] = identifier[vactor] . identifier[GetBounds] () identifier[xm] , identifier[ym] =( identifier[b] [ literal[int] ]+ identifier[b] [ literal[int] ])/ literal[int] * identifier[s] ,( identifier[b] [ literal[int] ]+ identifier[b] [ literal[int] ])/ literal[int] * identifier[s] identifier[vactor] . identifier[SetOrigin] (- identifier[xm] ,- identifier[ym] , literal[int] ) identifier[nax] = identifier[np] . identifier[linalg] . identifier[norm] ( identifier[normal] ) keyword[if] identifier[nax] : identifier[normal] = identifier[np] . identifier[array] ( identifier[normal] )/ identifier[nax] identifier[theta] = identifier[np] . identifier[arccos] ( identifier[normal] [ literal[int] ]) identifier[phi] = identifier[np] . identifier[arctan2] ( identifier[normal] [ literal[int] ], identifier[normal] [ literal[int] ]) identifier[vactor] . identifier[SetScale] ( literal[int] / identifier[res] * identifier[s] , literal[int] / identifier[res] * identifier[s] , literal[int] / identifier[res] * identifier[s] ) identifier[vactor] . identifier[RotateZ] ( identifier[phi] * literal[int] ) identifier[vactor] . identifier[RotateY] ( identifier[theta] * literal[int] ) identifier[vactor] . identifier[SetPosition] ( identifier[pos] ) keyword[try] : keyword[import] identifier[os] identifier[os] . identifier[unlink] ( literal[string] ) keyword[except] identifier[FileNotFoundError] : keyword[pass] keyword[return] identifier[vactor] keyword[except] : identifier[colors] . identifier[printc] ( literal[string] , identifier[formula] , identifier[c] = literal[int] ) identifier[colors] . identifier[printc] ( literal[string] , identifier[c] = literal[int] ) identifier[colors] . identifier[printc] ( literal[string] , identifier[c] = literal[int] ) identifier[colors] . identifier[printc] ( literal[string] , identifier[c] = literal[int] ) keyword[return] keyword[None]
def Latex(formula, pos=(0, 0, 0), normal=(0, 0, 1), c='k', s=1, bg=None, alpha=1, res=30, usetex=False, fromweb=False): """ Render Latex formulas. :param str formula: latex text string :param list pos: position coordinates in space :param list normal: normal to the plane of the image :param c: face color :param bg: background color box :param int res: dpi resolution :param bool usetex: use latex compiler of matplotlib :param fromweb: retrieve the latex image from online server (codecogs) .. hint:: |latex| |latex.py|_ """ try: #def _Latex(formula, pos, normal, c, s, bg, alpha, res, usetex, fromweb): def build_img_web(formula, tfile): import requests if c == 'k': ct = 'Black' # depends on [control=['if'], data=[]] else: ct = 'White' wsite = 'http://latex.codecogs.com/png.latex' try: r = requests.get(wsite + '?\\dpi{100} \\huge \\color{' + ct + '} ' + formula) f = open(tfile, 'wb') f.write(r.content) f.close() # depends on [control=['try'], data=[]] except requests.exceptions.ConnectionError: colors.printc('Latex error. Web site unavailable?', wsite, c=1) return None # depends on [control=['except'], data=[]] def build_img_plt(formula, tfile): import matplotlib.pyplot as plt plt.rc('text', usetex=usetex) formula1 = '$' + formula + '$' plt.axis('off') col = colors.getColor(c) if bg: bx = dict(boxstyle='square', ec=col, fc=colors.getColor(bg)) # depends on [control=['if'], data=[]] else: bx = None plt.text(0.5, 0.5, formula1, size=res, color=col, alpha=alpha, ha='center', va='center', bbox=bx) plt.savefig('_lateximg.png', format='png', transparent=True, bbox_inches='tight', pad_inches=0) plt.close() if fromweb: build_img_web(formula, '_lateximg.png') # depends on [control=['if'], data=[]] else: build_img_plt(formula, '_lateximg.png') from vtkplotter.actors import ImageActor picr = vtk.vtkPNGReader() picr.SetFileName('_lateximg.png') picr.Update() vactor = ImageActor() vactor.SetInputData(picr.GetOutput()) vactor.alpha(alpha) b = vactor.GetBounds() (xm, ym) = ((b[1] + b[0]) / 200 * s, (b[3] + b[2]) / 200 * s) vactor.SetOrigin(-xm, -ym, 0) nax = np.linalg.norm(normal) if nax: normal = np.array(normal) / nax # depends on [control=['if'], data=[]] theta = np.arccos(normal[2]) phi = np.arctan2(normal[1], normal[0]) vactor.SetScale(0.25 / res * s, 0.25 / res * s, 0.25 / res * s) vactor.RotateZ(phi * 57.3) vactor.RotateY(theta * 57.3) vactor.SetPosition(pos) try: import os os.unlink('_lateximg.png') # depends on [control=['try'], data=[]] except FileNotFoundError: pass # depends on [control=['except'], data=[]] return vactor # depends on [control=['try'], data=[]] except: colors.printc('Error in Latex()\n', formula, c=1) colors.printc(' latex or dvipng not installed?', c=1) colors.printc(' Try: usetex=False', c=1) colors.printc(' Try: sudo apt install dvipng', c=1) return None # depends on [control=['except'], data=[]]
def insertRows(self, row, count, parent=QModelIndex()): """ Reimplements the :meth:`QAbstractItemModel.insertRows` method. :param row: Row. :type row: int :param count: Count. :type count: int :param parent: Parent. :type parent: QModelIndex :return: Method success. :rtype: bool """ parent_node = self.get_node(parent) self.beginInsertRows(parent, row, row + count - 1) success = True for i in range(count): childNode = self.__default_node() success *= True if parent_node.insert_child(childNode, row) else False self.endInsertRows() return success
def function[insertRows, parameter[self, row, count, parent]]: constant[ Reimplements the :meth:`QAbstractItemModel.insertRows` method. :param row: Row. :type row: int :param count: Count. :type count: int :param parent: Parent. :type parent: QModelIndex :return: Method success. :rtype: bool ] variable[parent_node] assign[=] call[name[self].get_node, parameter[name[parent]]] call[name[self].beginInsertRows, parameter[name[parent], name[row], binary_operation[binary_operation[name[row] + name[count]] - constant[1]]]] variable[success] assign[=] constant[True] for taget[name[i]] in starred[call[name[range], parameter[name[count]]]] begin[:] variable[childNode] assign[=] call[name[self].__default_node, parameter[]] <ast.AugAssign object at 0x7da1b09e8160> call[name[self].endInsertRows, parameter[]] return[name[success]]
keyword[def] identifier[insertRows] ( identifier[self] , identifier[row] , identifier[count] , identifier[parent] = identifier[QModelIndex] ()): literal[string] identifier[parent_node] = identifier[self] . identifier[get_node] ( identifier[parent] ) identifier[self] . identifier[beginInsertRows] ( identifier[parent] , identifier[row] , identifier[row] + identifier[count] - literal[int] ) identifier[success] = keyword[True] keyword[for] identifier[i] keyword[in] identifier[range] ( identifier[count] ): identifier[childNode] = identifier[self] . identifier[__default_node] () identifier[success] *= keyword[True] keyword[if] identifier[parent_node] . identifier[insert_child] ( identifier[childNode] , identifier[row] ) keyword[else] keyword[False] identifier[self] . identifier[endInsertRows] () keyword[return] identifier[success]
def insertRows(self, row, count, parent=QModelIndex()): """ Reimplements the :meth:`QAbstractItemModel.insertRows` method. :param row: Row. :type row: int :param count: Count. :type count: int :param parent: Parent. :type parent: QModelIndex :return: Method success. :rtype: bool """ parent_node = self.get_node(parent) self.beginInsertRows(parent, row, row + count - 1) success = True for i in range(count): childNode = self.__default_node() success *= True if parent_node.insert_child(childNode, row) else False # depends on [control=['for'], data=[]] self.endInsertRows() return success