code
stringlengths
75
104k
code_sememe
stringlengths
47
309k
token_type
stringlengths
215
214k
code_dependency
stringlengths
75
155k
def dm_soundex(word, max_length=6, zero_pad=True): """Return the Daitch-Mokotoff Soundex code for a word. This is a wrapper for :py:meth:`DaitchMokotoff.encode`. Parameters ---------- word : str The word to transform max_length : int The length of the code returned (defaults to 6; must be between 6 and 64) zero_pad : bool Pad the end of the return value with 0s to achieve a max_length string Returns ------- str The Daitch-Mokotoff Soundex value Examples -------- >>> sorted(dm_soundex('Christopher')) ['494379', '594379'] >>> dm_soundex('Niall') {'680000'} >>> dm_soundex('Smith') {'463000'} >>> dm_soundex('Schmidt') {'463000'} >>> sorted(dm_soundex('The quick brown fox', max_length=20, ... zero_pad=False)) ['35457976754', '3557976754'] """ return DaitchMokotoff().encode(word, max_length, zero_pad)
def function[dm_soundex, parameter[word, max_length, zero_pad]]: constant[Return the Daitch-Mokotoff Soundex code for a word. This is a wrapper for :py:meth:`DaitchMokotoff.encode`. Parameters ---------- word : str The word to transform max_length : int The length of the code returned (defaults to 6; must be between 6 and 64) zero_pad : bool Pad the end of the return value with 0s to achieve a max_length string Returns ------- str The Daitch-Mokotoff Soundex value Examples -------- >>> sorted(dm_soundex('Christopher')) ['494379', '594379'] >>> dm_soundex('Niall') {'680000'} >>> dm_soundex('Smith') {'463000'} >>> dm_soundex('Schmidt') {'463000'} >>> sorted(dm_soundex('The quick brown fox', max_length=20, ... zero_pad=False)) ['35457976754', '3557976754'] ] return[call[call[name[DaitchMokotoff], parameter[]].encode, parameter[name[word], name[max_length], name[zero_pad]]]]
keyword[def] identifier[dm_soundex] ( identifier[word] , identifier[max_length] = literal[int] , identifier[zero_pad] = keyword[True] ): literal[string] keyword[return] identifier[DaitchMokotoff] (). identifier[encode] ( identifier[word] , identifier[max_length] , identifier[zero_pad] )
def dm_soundex(word, max_length=6, zero_pad=True): """Return the Daitch-Mokotoff Soundex code for a word. This is a wrapper for :py:meth:`DaitchMokotoff.encode`. Parameters ---------- word : str The word to transform max_length : int The length of the code returned (defaults to 6; must be between 6 and 64) zero_pad : bool Pad the end of the return value with 0s to achieve a max_length string Returns ------- str The Daitch-Mokotoff Soundex value Examples -------- >>> sorted(dm_soundex('Christopher')) ['494379', '594379'] >>> dm_soundex('Niall') {'680000'} >>> dm_soundex('Smith') {'463000'} >>> dm_soundex('Schmidt') {'463000'} >>> sorted(dm_soundex('The quick brown fox', max_length=20, ... zero_pad=False)) ['35457976754', '3557976754'] """ return DaitchMokotoff().encode(word, max_length, zero_pad)
def assumption_list_string(assumptions, assumption_dict): ''' Takes in a list of short forms of assumptions and an assumption dictionary, and returns a "list" form of the long form of the assumptions. Raises ------ ValueError if one of the assumptions is not in assumption_dict. ''' if isinstance(assumptions, six.string_types): raise TypeError('assumptions must be an iterable of strings, not a ' 'string itself') for a in assumptions: if a not in assumption_dict.keys(): raise ValueError('{} not present in assumption_dict'.format(a)) assumption_strings = [assumption_dict[a] for a in assumptions] return strings_to_list_string(assumption_strings)
def function[assumption_list_string, parameter[assumptions, assumption_dict]]: constant[ Takes in a list of short forms of assumptions and an assumption dictionary, and returns a "list" form of the long form of the assumptions. Raises ------ ValueError if one of the assumptions is not in assumption_dict. ] if call[name[isinstance], parameter[name[assumptions], name[six].string_types]] begin[:] <ast.Raise object at 0x7da20c7cb820> for taget[name[a]] in starred[name[assumptions]] begin[:] if compare[name[a] <ast.NotIn object at 0x7da2590d7190> call[name[assumption_dict].keys, parameter[]]] begin[:] <ast.Raise object at 0x7da20c7c9690> variable[assumption_strings] assign[=] <ast.ListComp object at 0x7da20c7cb580> return[call[name[strings_to_list_string], parameter[name[assumption_strings]]]]
keyword[def] identifier[assumption_list_string] ( identifier[assumptions] , identifier[assumption_dict] ): literal[string] keyword[if] identifier[isinstance] ( identifier[assumptions] , identifier[six] . identifier[string_types] ): keyword[raise] identifier[TypeError] ( literal[string] literal[string] ) keyword[for] identifier[a] keyword[in] identifier[assumptions] : keyword[if] identifier[a] keyword[not] keyword[in] identifier[assumption_dict] . identifier[keys] (): keyword[raise] identifier[ValueError] ( literal[string] . identifier[format] ( identifier[a] )) identifier[assumption_strings] =[ identifier[assumption_dict] [ identifier[a] ] keyword[for] identifier[a] keyword[in] identifier[assumptions] ] keyword[return] identifier[strings_to_list_string] ( identifier[assumption_strings] )
def assumption_list_string(assumptions, assumption_dict): """ Takes in a list of short forms of assumptions and an assumption dictionary, and returns a "list" form of the long form of the assumptions. Raises ------ ValueError if one of the assumptions is not in assumption_dict. """ if isinstance(assumptions, six.string_types): raise TypeError('assumptions must be an iterable of strings, not a string itself') # depends on [control=['if'], data=[]] for a in assumptions: if a not in assumption_dict.keys(): raise ValueError('{} not present in assumption_dict'.format(a)) # depends on [control=['if'], data=['a']] # depends on [control=['for'], data=['a']] assumption_strings = [assumption_dict[a] for a in assumptions] return strings_to_list_string(assumption_strings)
def F_value(ER, EF, dfnum, dfden): """ Returns an F-statistic given the following: ER = error associated with the null hypothesis (the Restricted model) EF = error associated with the alternate hypothesis (the Full model) dfR-dfF = degrees of freedom of the numerator dfF = degrees of freedom associated with the denominator/Full model Usage: lF_value(ER,EF,dfnum,dfden) """ return ((ER - EF) / float(dfnum) / (EF / float(dfden)))
def function[F_value, parameter[ER, EF, dfnum, dfden]]: constant[ Returns an F-statistic given the following: ER = error associated with the null hypothesis (the Restricted model) EF = error associated with the alternate hypothesis (the Full model) dfR-dfF = degrees of freedom of the numerator dfF = degrees of freedom associated with the denominator/Full model Usage: lF_value(ER,EF,dfnum,dfden) ] return[binary_operation[binary_operation[binary_operation[name[ER] - name[EF]] / call[name[float], parameter[name[dfnum]]]] / binary_operation[name[EF] / call[name[float], parameter[name[dfden]]]]]]
keyword[def] identifier[F_value] ( identifier[ER] , identifier[EF] , identifier[dfnum] , identifier[dfden] ): literal[string] keyword[return] (( identifier[ER] - identifier[EF] )/ identifier[float] ( identifier[dfnum] )/( identifier[EF] / identifier[float] ( identifier[dfden] )))
def F_value(ER, EF, dfnum, dfden): """ Returns an F-statistic given the following: ER = error associated with the null hypothesis (the Restricted model) EF = error associated with the alternate hypothesis (the Full model) dfR-dfF = degrees of freedom of the numerator dfF = degrees of freedom associated with the denominator/Full model Usage: lF_value(ER,EF,dfnum,dfden) """ return (ER - EF) / float(dfnum) / (EF / float(dfden))
def _uniquify(_list): """Remove duplicates in a list.""" seen = set() result = [] for x in _list: if x not in seen: result.append(x) seen.add(x) return result
def function[_uniquify, parameter[_list]]: constant[Remove duplicates in a list.] variable[seen] assign[=] call[name[set], parameter[]] variable[result] assign[=] list[[]] for taget[name[x]] in starred[name[_list]] begin[:] if compare[name[x] <ast.NotIn object at 0x7da2590d7190> name[seen]] begin[:] call[name[result].append, parameter[name[x]]] call[name[seen].add, parameter[name[x]]] return[name[result]]
keyword[def] identifier[_uniquify] ( identifier[_list] ): literal[string] identifier[seen] = identifier[set] () identifier[result] =[] keyword[for] identifier[x] keyword[in] identifier[_list] : keyword[if] identifier[x] keyword[not] keyword[in] identifier[seen] : identifier[result] . identifier[append] ( identifier[x] ) identifier[seen] . identifier[add] ( identifier[x] ) keyword[return] identifier[result]
def _uniquify(_list): """Remove duplicates in a list.""" seen = set() result = [] for x in _list: if x not in seen: result.append(x) seen.add(x) # depends on [control=['if'], data=['x', 'seen']] # depends on [control=['for'], data=['x']] return result
def generichash_blake2b_update(state, data): """Update the blake2b hash state :param state: a initialized Blake2bState object as returned from :py:func:`.crypto_generichash_blake2b_init` :type state: :py:class:`.Blake2State` :param data: :type data: bytes """ ensure(isinstance(state, Blake2State), 'State must be a Blake2State object', raising=exc.TypeError) ensure(isinstance(data, bytes), 'Input data must be a bytes sequence', raising=exc.TypeError) rc = lib.crypto_generichash_blake2b_update(state._statebuf, data, len(data)) ensure(rc == 0, 'Unexpected failure', raising=exc.RuntimeError)
def function[generichash_blake2b_update, parameter[state, data]]: constant[Update the blake2b hash state :param state: a initialized Blake2bState object as returned from :py:func:`.crypto_generichash_blake2b_init` :type state: :py:class:`.Blake2State` :param data: :type data: bytes ] call[name[ensure], parameter[call[name[isinstance], parameter[name[state], name[Blake2State]]], constant[State must be a Blake2State object]]] call[name[ensure], parameter[call[name[isinstance], parameter[name[data], name[bytes]]], constant[Input data must be a bytes sequence]]] variable[rc] assign[=] call[name[lib].crypto_generichash_blake2b_update, parameter[name[state]._statebuf, name[data], call[name[len], parameter[name[data]]]]] call[name[ensure], parameter[compare[name[rc] equal[==] constant[0]], constant[Unexpected failure]]]
keyword[def] identifier[generichash_blake2b_update] ( identifier[state] , identifier[data] ): literal[string] identifier[ensure] ( identifier[isinstance] ( identifier[state] , identifier[Blake2State] ), literal[string] , identifier[raising] = identifier[exc] . identifier[TypeError] ) identifier[ensure] ( identifier[isinstance] ( identifier[data] , identifier[bytes] ), literal[string] , identifier[raising] = identifier[exc] . identifier[TypeError] ) identifier[rc] = identifier[lib] . identifier[crypto_generichash_blake2b_update] ( identifier[state] . identifier[_statebuf] , identifier[data] , identifier[len] ( identifier[data] )) identifier[ensure] ( identifier[rc] == literal[int] , literal[string] , identifier[raising] = identifier[exc] . identifier[RuntimeError] )
def generichash_blake2b_update(state, data): """Update the blake2b hash state :param state: a initialized Blake2bState object as returned from :py:func:`.crypto_generichash_blake2b_init` :type state: :py:class:`.Blake2State` :param data: :type data: bytes """ ensure(isinstance(state, Blake2State), 'State must be a Blake2State object', raising=exc.TypeError) ensure(isinstance(data, bytes), 'Input data must be a bytes sequence', raising=exc.TypeError) rc = lib.crypto_generichash_blake2b_update(state._statebuf, data, len(data)) ensure(rc == 0, 'Unexpected failure', raising=exc.RuntimeError)
def split_namespace(clarkName): """Return (namespace, localname) tuple for a property name in Clark Notation. Namespace defaults to ''. Example: '{DAV:}foo' -> ('DAV:', 'foo') 'bar' -> ('', 'bar') """ if clarkName.startswith("{") and "}" in clarkName: ns, localname = clarkName.split("}", 1) return (ns[1:], localname) return ("", clarkName)
def function[split_namespace, parameter[clarkName]]: constant[Return (namespace, localname) tuple for a property name in Clark Notation. Namespace defaults to ''. Example: '{DAV:}foo' -> ('DAV:', 'foo') 'bar' -> ('', 'bar') ] if <ast.BoolOp object at 0x7da1b0052470> begin[:] <ast.Tuple object at 0x7da1b0051900> assign[=] call[name[clarkName].split, parameter[constant[}], constant[1]]] return[tuple[[<ast.Subscript object at 0x7da1b0051db0>, <ast.Name object at 0x7da1b0053940>]]] return[tuple[[<ast.Constant object at 0x7da1b00525f0>, <ast.Name object at 0x7da1b0051870>]]]
keyword[def] identifier[split_namespace] ( identifier[clarkName] ): literal[string] keyword[if] identifier[clarkName] . identifier[startswith] ( literal[string] ) keyword[and] literal[string] keyword[in] identifier[clarkName] : identifier[ns] , identifier[localname] = identifier[clarkName] . identifier[split] ( literal[string] , literal[int] ) keyword[return] ( identifier[ns] [ literal[int] :], identifier[localname] ) keyword[return] ( literal[string] , identifier[clarkName] )
def split_namespace(clarkName): """Return (namespace, localname) tuple for a property name in Clark Notation. Namespace defaults to ''. Example: '{DAV:}foo' -> ('DAV:', 'foo') 'bar' -> ('', 'bar') """ if clarkName.startswith('{') and '}' in clarkName: (ns, localname) = clarkName.split('}', 1) return (ns[1:], localname) # depends on [control=['if'], data=[]] return ('', clarkName)
def attachRequest(PTmsiSignature_presence=0, GprsTimer_presence=0, TmsiStatus_presence=0): """ATTACH REQUEST Section 9.4.1""" a = TpPd(pd=0x3) b = MessageType(mesType=0x1) # 0000001 c = MsNetworkCapability() d = AttachTypeAndCiphKeySeqNr() f = DrxParameter() g = MobileId() h = RoutingAreaIdentification() i = MsRadioAccessCapability() packet = a / b / c / d / f / g / h / i if PTmsiSignature_presence is 1: j = PTmsiSignature(ieiPTS=0x19) packet = packet / j if GprsTimer_presence is 1: k = GprsTimer(ieiGT=0x17) packet = packet / k if TmsiStatus_presence is 1: l = TmsiStatus(ieiTS=0x9) packet = packet / l return packet
def function[attachRequest, parameter[PTmsiSignature_presence, GprsTimer_presence, TmsiStatus_presence]]: constant[ATTACH REQUEST Section 9.4.1] variable[a] assign[=] call[name[TpPd], parameter[]] variable[b] assign[=] call[name[MessageType], parameter[]] variable[c] assign[=] call[name[MsNetworkCapability], parameter[]] variable[d] assign[=] call[name[AttachTypeAndCiphKeySeqNr], parameter[]] variable[f] assign[=] call[name[DrxParameter], parameter[]] variable[g] assign[=] call[name[MobileId], parameter[]] variable[h] assign[=] call[name[RoutingAreaIdentification], parameter[]] variable[i] assign[=] call[name[MsRadioAccessCapability], parameter[]] variable[packet] assign[=] binary_operation[binary_operation[binary_operation[binary_operation[binary_operation[binary_operation[binary_operation[name[a] / name[b]] / name[c]] / name[d]] / name[f]] / name[g]] / name[h]] / name[i]] if compare[name[PTmsiSignature_presence] is constant[1]] begin[:] variable[j] assign[=] call[name[PTmsiSignature], parameter[]] variable[packet] assign[=] binary_operation[name[packet] / name[j]] if compare[name[GprsTimer_presence] is constant[1]] begin[:] variable[k] assign[=] call[name[GprsTimer], parameter[]] variable[packet] assign[=] binary_operation[name[packet] / name[k]] if compare[name[TmsiStatus_presence] is constant[1]] begin[:] variable[l] assign[=] call[name[TmsiStatus], parameter[]] variable[packet] assign[=] binary_operation[name[packet] / name[l]] return[name[packet]]
keyword[def] identifier[attachRequest] ( identifier[PTmsiSignature_presence] = literal[int] , identifier[GprsTimer_presence] = literal[int] , identifier[TmsiStatus_presence] = literal[int] ): literal[string] identifier[a] = identifier[TpPd] ( identifier[pd] = literal[int] ) identifier[b] = identifier[MessageType] ( identifier[mesType] = literal[int] ) identifier[c] = identifier[MsNetworkCapability] () identifier[d] = identifier[AttachTypeAndCiphKeySeqNr] () identifier[f] = identifier[DrxParameter] () identifier[g] = identifier[MobileId] () identifier[h] = identifier[RoutingAreaIdentification] () identifier[i] = identifier[MsRadioAccessCapability] () identifier[packet] = identifier[a] / identifier[b] / identifier[c] / identifier[d] / identifier[f] / identifier[g] / identifier[h] / identifier[i] keyword[if] identifier[PTmsiSignature_presence] keyword[is] literal[int] : identifier[j] = identifier[PTmsiSignature] ( identifier[ieiPTS] = literal[int] ) identifier[packet] = identifier[packet] / identifier[j] keyword[if] identifier[GprsTimer_presence] keyword[is] literal[int] : identifier[k] = identifier[GprsTimer] ( identifier[ieiGT] = literal[int] ) identifier[packet] = identifier[packet] / identifier[k] keyword[if] identifier[TmsiStatus_presence] keyword[is] literal[int] : identifier[l] = identifier[TmsiStatus] ( identifier[ieiTS] = literal[int] ) identifier[packet] = identifier[packet] / identifier[l] keyword[return] identifier[packet]
def attachRequest(PTmsiSignature_presence=0, GprsTimer_presence=0, TmsiStatus_presence=0): """ATTACH REQUEST Section 9.4.1""" a = TpPd(pd=3) b = MessageType(mesType=1) # 0000001 c = MsNetworkCapability() d = AttachTypeAndCiphKeySeqNr() f = DrxParameter() g = MobileId() h = RoutingAreaIdentification() i = MsRadioAccessCapability() packet = a / b / c / d / f / g / h / i if PTmsiSignature_presence is 1: j = PTmsiSignature(ieiPTS=25) packet = packet / j # depends on [control=['if'], data=[]] if GprsTimer_presence is 1: k = GprsTimer(ieiGT=23) packet = packet / k # depends on [control=['if'], data=[]] if TmsiStatus_presence is 1: l = TmsiStatus(ieiTS=9) packet = packet / l # depends on [control=['if'], data=[]] return packet
def disable_active_checks(self, checks): """Disable active checks for this host/service Update check in progress with current object information :param checks: Checks object, to change all checks in progress :type checks: alignak.objects.check.Checks :return: None """ self.active_checks_enabled = False for chk_id in self.checks_in_progress: chk = checks[chk_id] chk.status = ACT_STATUS_WAIT_CONSUME chk.exit_status = self.state_id chk.output = self.output chk.check_time = time.time() chk.execution_time = 0 chk.perf_data = self.perf_data
def function[disable_active_checks, parameter[self, checks]]: constant[Disable active checks for this host/service Update check in progress with current object information :param checks: Checks object, to change all checks in progress :type checks: alignak.objects.check.Checks :return: None ] name[self].active_checks_enabled assign[=] constant[False] for taget[name[chk_id]] in starred[name[self].checks_in_progress] begin[:] variable[chk] assign[=] call[name[checks]][name[chk_id]] name[chk].status assign[=] name[ACT_STATUS_WAIT_CONSUME] name[chk].exit_status assign[=] name[self].state_id name[chk].output assign[=] name[self].output name[chk].check_time assign[=] call[name[time].time, parameter[]] name[chk].execution_time assign[=] constant[0] name[chk].perf_data assign[=] name[self].perf_data
keyword[def] identifier[disable_active_checks] ( identifier[self] , identifier[checks] ): literal[string] identifier[self] . identifier[active_checks_enabled] = keyword[False] keyword[for] identifier[chk_id] keyword[in] identifier[self] . identifier[checks_in_progress] : identifier[chk] = identifier[checks] [ identifier[chk_id] ] identifier[chk] . identifier[status] = identifier[ACT_STATUS_WAIT_CONSUME] identifier[chk] . identifier[exit_status] = identifier[self] . identifier[state_id] identifier[chk] . identifier[output] = identifier[self] . identifier[output] identifier[chk] . identifier[check_time] = identifier[time] . identifier[time] () identifier[chk] . identifier[execution_time] = literal[int] identifier[chk] . identifier[perf_data] = identifier[self] . identifier[perf_data]
def disable_active_checks(self, checks): """Disable active checks for this host/service Update check in progress with current object information :param checks: Checks object, to change all checks in progress :type checks: alignak.objects.check.Checks :return: None """ self.active_checks_enabled = False for chk_id in self.checks_in_progress: chk = checks[chk_id] chk.status = ACT_STATUS_WAIT_CONSUME chk.exit_status = self.state_id chk.output = self.output chk.check_time = time.time() chk.execution_time = 0 chk.perf_data = self.perf_data # depends on [control=['for'], data=['chk_id']]
def confd_state_snmp_listen_udp_port(self, **kwargs): """Auto Generated Code """ config = ET.Element("config") confd_state = ET.SubElement(config, "confd-state", xmlns="http://tail-f.com/yang/confd-monitoring") snmp = ET.SubElement(confd_state, "snmp") listen = ET.SubElement(snmp, "listen") udp = ET.SubElement(listen, "udp") port = ET.SubElement(udp, "port") port.text = kwargs.pop('port') callback = kwargs.pop('callback', self._callback) return callback(config)
def function[confd_state_snmp_listen_udp_port, parameter[self]]: constant[Auto Generated Code ] variable[config] assign[=] call[name[ET].Element, parameter[constant[config]]] variable[confd_state] assign[=] call[name[ET].SubElement, parameter[name[config], constant[confd-state]]] variable[snmp] assign[=] call[name[ET].SubElement, parameter[name[confd_state], constant[snmp]]] variable[listen] assign[=] call[name[ET].SubElement, parameter[name[snmp], constant[listen]]] variable[udp] assign[=] call[name[ET].SubElement, parameter[name[listen], constant[udp]]] variable[port] assign[=] call[name[ET].SubElement, parameter[name[udp], constant[port]]] name[port].text assign[=] call[name[kwargs].pop, parameter[constant[port]]] variable[callback] assign[=] call[name[kwargs].pop, parameter[constant[callback], name[self]._callback]] return[call[name[callback], parameter[name[config]]]]
keyword[def] identifier[confd_state_snmp_listen_udp_port] ( identifier[self] ,** identifier[kwargs] ): literal[string] identifier[config] = identifier[ET] . identifier[Element] ( literal[string] ) identifier[confd_state] = identifier[ET] . identifier[SubElement] ( identifier[config] , literal[string] , identifier[xmlns] = literal[string] ) identifier[snmp] = identifier[ET] . identifier[SubElement] ( identifier[confd_state] , literal[string] ) identifier[listen] = identifier[ET] . identifier[SubElement] ( identifier[snmp] , literal[string] ) identifier[udp] = identifier[ET] . identifier[SubElement] ( identifier[listen] , literal[string] ) identifier[port] = identifier[ET] . identifier[SubElement] ( identifier[udp] , literal[string] ) identifier[port] . identifier[text] = identifier[kwargs] . identifier[pop] ( literal[string] ) identifier[callback] = identifier[kwargs] . identifier[pop] ( literal[string] , identifier[self] . identifier[_callback] ) keyword[return] identifier[callback] ( identifier[config] )
def confd_state_snmp_listen_udp_port(self, **kwargs): """Auto Generated Code """ config = ET.Element('config') confd_state = ET.SubElement(config, 'confd-state', xmlns='http://tail-f.com/yang/confd-monitoring') snmp = ET.SubElement(confd_state, 'snmp') listen = ET.SubElement(snmp, 'listen') udp = ET.SubElement(listen, 'udp') port = ET.SubElement(udp, 'port') port.text = kwargs.pop('port') callback = kwargs.pop('callback', self._callback) return callback(config)
def get_timeline(self, auth_secret, max_cnt_tweets): """Get the general or user timeline. If an empty authentication secret is given, this method returns the general timeline. If an authentication secret is given and it is valid, this method returns the user timeline. If an authentication secret is given but it is invalid, this method returns an error. Parameters ---------- auth_secret: str Either the authentication secret of the logged-in user or an empty string. max_cnt_tweets: int The maximum number of tweets included in the timeline. If it is set to -1, then all the available tweets will be included. Returns ------- bool True if the timeline is successfully retrieved, False otherwise. result A dict containing a list of tweets with the key TWEETS_KEY if the timeline is successfully retrieved, a dict containing the error string with the key ERROR_KEY otherwise. Note ---- Possible error strings are listed as below: - ERROR_NOT_LOGGED_IN """ result = {pytwis_constants.ERROR_KEY: None} if auth_secret == '': # An empty authentication secret implies getting the general timeline. timeline_key = pytwis_constants.GENERAL_TIMELINE_KEY else: # Check if the user is logged in. loggedin, userid = self._is_loggedin(auth_secret) if not loggedin: result[pytwis_constants.ERROR_KEY] = pytwis_constants.ERROR_NOT_LOGGED_IN return (False, result) # Get the user timeline. timeline_key = pytwis_constants.USER_TIMELINE_KEY_FORMAT.format(userid) result[pytwis_constants.TWEETS_KEY] = self._get_tweets(timeline_key, max_cnt_tweets) return (True, result)
def function[get_timeline, parameter[self, auth_secret, max_cnt_tweets]]: constant[Get the general or user timeline. If an empty authentication secret is given, this method returns the general timeline. If an authentication secret is given and it is valid, this method returns the user timeline. If an authentication secret is given but it is invalid, this method returns an error. Parameters ---------- auth_secret: str Either the authentication secret of the logged-in user or an empty string. max_cnt_tweets: int The maximum number of tweets included in the timeline. If it is set to -1, then all the available tweets will be included. Returns ------- bool True if the timeline is successfully retrieved, False otherwise. result A dict containing a list of tweets with the key TWEETS_KEY if the timeline is successfully retrieved, a dict containing the error string with the key ERROR_KEY otherwise. Note ---- Possible error strings are listed as below: - ERROR_NOT_LOGGED_IN ] variable[result] assign[=] dictionary[[<ast.Attribute object at 0x7da18c4cc6d0>], [<ast.Constant object at 0x7da18c4cd060>]] if compare[name[auth_secret] equal[==] constant[]] begin[:] variable[timeline_key] assign[=] name[pytwis_constants].GENERAL_TIMELINE_KEY call[name[result]][name[pytwis_constants].TWEETS_KEY] assign[=] call[name[self]._get_tweets, parameter[name[timeline_key], name[max_cnt_tweets]]] return[tuple[[<ast.Constant object at 0x7da1b0a4c4f0>, <ast.Name object at 0x7da1b0a4d7b0>]]]
keyword[def] identifier[get_timeline] ( identifier[self] , identifier[auth_secret] , identifier[max_cnt_tweets] ): literal[string] identifier[result] ={ identifier[pytwis_constants] . identifier[ERROR_KEY] : keyword[None] } keyword[if] identifier[auth_secret] == literal[string] : identifier[timeline_key] = identifier[pytwis_constants] . identifier[GENERAL_TIMELINE_KEY] keyword[else] : identifier[loggedin] , identifier[userid] = identifier[self] . identifier[_is_loggedin] ( identifier[auth_secret] ) keyword[if] keyword[not] identifier[loggedin] : identifier[result] [ identifier[pytwis_constants] . identifier[ERROR_KEY] ]= identifier[pytwis_constants] . identifier[ERROR_NOT_LOGGED_IN] keyword[return] ( keyword[False] , identifier[result] ) identifier[timeline_key] = identifier[pytwis_constants] . identifier[USER_TIMELINE_KEY_FORMAT] . identifier[format] ( identifier[userid] ) identifier[result] [ identifier[pytwis_constants] . identifier[TWEETS_KEY] ]= identifier[self] . identifier[_get_tweets] ( identifier[timeline_key] , identifier[max_cnt_tweets] ) keyword[return] ( keyword[True] , identifier[result] )
def get_timeline(self, auth_secret, max_cnt_tweets): """Get the general or user timeline. If an empty authentication secret is given, this method returns the general timeline. If an authentication secret is given and it is valid, this method returns the user timeline. If an authentication secret is given but it is invalid, this method returns an error. Parameters ---------- auth_secret: str Either the authentication secret of the logged-in user or an empty string. max_cnt_tweets: int The maximum number of tweets included in the timeline. If it is set to -1, then all the available tweets will be included. Returns ------- bool True if the timeline is successfully retrieved, False otherwise. result A dict containing a list of tweets with the key TWEETS_KEY if the timeline is successfully retrieved, a dict containing the error string with the key ERROR_KEY otherwise. Note ---- Possible error strings are listed as below: - ERROR_NOT_LOGGED_IN """ result = {pytwis_constants.ERROR_KEY: None} if auth_secret == '': # An empty authentication secret implies getting the general timeline. timeline_key = pytwis_constants.GENERAL_TIMELINE_KEY # depends on [control=['if'], data=[]] else: # Check if the user is logged in. (loggedin, userid) = self._is_loggedin(auth_secret) if not loggedin: result[pytwis_constants.ERROR_KEY] = pytwis_constants.ERROR_NOT_LOGGED_IN return (False, result) # depends on [control=['if'], data=[]] # Get the user timeline. timeline_key = pytwis_constants.USER_TIMELINE_KEY_FORMAT.format(userid) result[pytwis_constants.TWEETS_KEY] = self._get_tweets(timeline_key, max_cnt_tweets) return (True, result)
def get_dependency_graph(component): """ Generate a component's graph of dependencies, which can be passed to :func:`run` or :func:`run_incremental`. """ if component not in DEPENDENCIES: raise Exception("%s is not a registered component." % get_name(component)) if not DEPENDENCIES[component]: return {component: set()} graph = defaultdict(set) def visitor(c, parent): if parent is not None: graph[parent].add(c) walk_dependencies(component, visitor) graph = dict(graph) # Find all items that don't depend on anything. extra_items_in_deps = _reduce(set.union, graph.values(), set()) - set(graph.keys()) # Add empty dependencies where needed. graph.update(dict((item, set()) for item in extra_items_in_deps)) return graph
def function[get_dependency_graph, parameter[component]]: constant[ Generate a component's graph of dependencies, which can be passed to :func:`run` or :func:`run_incremental`. ] if compare[name[component] <ast.NotIn object at 0x7da2590d7190> name[DEPENDENCIES]] begin[:] <ast.Raise object at 0x7da20e956650> if <ast.UnaryOp object at 0x7da18dc9aec0> begin[:] return[dictionary[[<ast.Name object at 0x7da18dc99ba0>], [<ast.Call object at 0x7da18dc98c70>]]] variable[graph] assign[=] call[name[defaultdict], parameter[name[set]]] def function[visitor, parameter[c, parent]]: if compare[name[parent] is_not constant[None]] begin[:] call[call[name[graph]][name[parent]].add, parameter[name[c]]] call[name[walk_dependencies], parameter[name[component], name[visitor]]] variable[graph] assign[=] call[name[dict], parameter[name[graph]]] variable[extra_items_in_deps] assign[=] binary_operation[call[name[_reduce], parameter[name[set].union, call[name[graph].values, parameter[]], call[name[set], parameter[]]]] - call[name[set], parameter[call[name[graph].keys, parameter[]]]]] call[name[graph].update, parameter[call[name[dict], parameter[<ast.GeneratorExp object at 0x7da18dc9a020>]]]] return[name[graph]]
keyword[def] identifier[get_dependency_graph] ( identifier[component] ): literal[string] keyword[if] identifier[component] keyword[not] keyword[in] identifier[DEPENDENCIES] : keyword[raise] identifier[Exception] ( literal[string] % identifier[get_name] ( identifier[component] )) keyword[if] keyword[not] identifier[DEPENDENCIES] [ identifier[component] ]: keyword[return] { identifier[component] : identifier[set] ()} identifier[graph] = identifier[defaultdict] ( identifier[set] ) keyword[def] identifier[visitor] ( identifier[c] , identifier[parent] ): keyword[if] identifier[parent] keyword[is] keyword[not] keyword[None] : identifier[graph] [ identifier[parent] ]. identifier[add] ( identifier[c] ) identifier[walk_dependencies] ( identifier[component] , identifier[visitor] ) identifier[graph] = identifier[dict] ( identifier[graph] ) identifier[extra_items_in_deps] = identifier[_reduce] ( identifier[set] . identifier[union] , identifier[graph] . identifier[values] (), identifier[set] ())- identifier[set] ( identifier[graph] . identifier[keys] ()) identifier[graph] . identifier[update] ( identifier[dict] (( identifier[item] , identifier[set] ()) keyword[for] identifier[item] keyword[in] identifier[extra_items_in_deps] )) keyword[return] identifier[graph]
def get_dependency_graph(component): """ Generate a component's graph of dependencies, which can be passed to :func:`run` or :func:`run_incremental`. """ if component not in DEPENDENCIES: raise Exception('%s is not a registered component.' % get_name(component)) # depends on [control=['if'], data=['component']] if not DEPENDENCIES[component]: return {component: set()} # depends on [control=['if'], data=[]] graph = defaultdict(set) def visitor(c, parent): if parent is not None: graph[parent].add(c) # depends on [control=['if'], data=['parent']] walk_dependencies(component, visitor) graph = dict(graph) # Find all items that don't depend on anything. extra_items_in_deps = _reduce(set.union, graph.values(), set()) - set(graph.keys()) # Add empty dependencies where needed. graph.update(dict(((item, set()) for item in extra_items_in_deps))) return graph
def _safe_write_to_file(self, file, message): """ Writes a string to a file safely (with file locks). """ target = file lock_name = make_lock_name(target, self.outfolder) lock_file = self._make_lock_path(lock_name) while True: if os.path.isfile(lock_file): self._wait_for_lock(lock_file) else: try: self.locks.append(lock_file) self._create_file_racefree(lock_file) except OSError as e: if e.errno == errno.EEXIST: print ("Lock file created after test! Looping again.") continue # Go back to start # Proceed with file writing with open(file, "a") as myfile: myfile.write(message + "\n") os.remove(lock_file) self.locks.remove(lock_file) # If you make it to the end of the while loop, you're done break
def function[_safe_write_to_file, parameter[self, file, message]]: constant[ Writes a string to a file safely (with file locks). ] variable[target] assign[=] name[file] variable[lock_name] assign[=] call[name[make_lock_name], parameter[name[target], name[self].outfolder]] variable[lock_file] assign[=] call[name[self]._make_lock_path, parameter[name[lock_name]]] while constant[True] begin[:] if call[name[os].path.isfile, parameter[name[lock_file]]] begin[:] call[name[self]._wait_for_lock, parameter[name[lock_file]]]
keyword[def] identifier[_safe_write_to_file] ( identifier[self] , identifier[file] , identifier[message] ): literal[string] identifier[target] = identifier[file] identifier[lock_name] = identifier[make_lock_name] ( identifier[target] , identifier[self] . identifier[outfolder] ) identifier[lock_file] = identifier[self] . identifier[_make_lock_path] ( identifier[lock_name] ) keyword[while] keyword[True] : keyword[if] identifier[os] . identifier[path] . identifier[isfile] ( identifier[lock_file] ): identifier[self] . identifier[_wait_for_lock] ( identifier[lock_file] ) keyword[else] : keyword[try] : identifier[self] . identifier[locks] . identifier[append] ( identifier[lock_file] ) identifier[self] . identifier[_create_file_racefree] ( identifier[lock_file] ) keyword[except] identifier[OSError] keyword[as] identifier[e] : keyword[if] identifier[e] . identifier[errno] == identifier[errno] . identifier[EEXIST] : identifier[print] ( literal[string] ) keyword[continue] keyword[with] identifier[open] ( identifier[file] , literal[string] ) keyword[as] identifier[myfile] : identifier[myfile] . identifier[write] ( identifier[message] + literal[string] ) identifier[os] . identifier[remove] ( identifier[lock_file] ) identifier[self] . identifier[locks] . identifier[remove] ( identifier[lock_file] ) keyword[break]
def _safe_write_to_file(self, file, message): """ Writes a string to a file safely (with file locks). """ target = file lock_name = make_lock_name(target, self.outfolder) lock_file = self._make_lock_path(lock_name) while True: if os.path.isfile(lock_file): self._wait_for_lock(lock_file) # depends on [control=['if'], data=[]] else: try: self.locks.append(lock_file) self._create_file_racefree(lock_file) # depends on [control=['try'], data=[]] except OSError as e: if e.errno == errno.EEXIST: print('Lock file created after test! Looping again.') continue # Go back to start # depends on [control=['if'], data=[]] # depends on [control=['except'], data=['e']] # Proceed with file writing with open(file, 'a') as myfile: myfile.write(message + '\n') # depends on [control=['with'], data=['myfile']] os.remove(lock_file) self.locks.remove(lock_file) # If you make it to the end of the while loop, you're done break # depends on [control=['while'], data=[]]
def get_instance(self, payload): """ Build an instance of WorkflowStatisticsInstance :param dict payload: Payload response from the API :returns: twilio.rest.taskrouter.v1.workspace.workflow.workflow_statistics.WorkflowStatisticsInstance :rtype: twilio.rest.taskrouter.v1.workspace.workflow.workflow_statistics.WorkflowStatisticsInstance """ return WorkflowStatisticsInstance( self._version, payload, workspace_sid=self._solution['workspace_sid'], workflow_sid=self._solution['workflow_sid'], )
def function[get_instance, parameter[self, payload]]: constant[ Build an instance of WorkflowStatisticsInstance :param dict payload: Payload response from the API :returns: twilio.rest.taskrouter.v1.workspace.workflow.workflow_statistics.WorkflowStatisticsInstance :rtype: twilio.rest.taskrouter.v1.workspace.workflow.workflow_statistics.WorkflowStatisticsInstance ] return[call[name[WorkflowStatisticsInstance], parameter[name[self]._version, name[payload]]]]
keyword[def] identifier[get_instance] ( identifier[self] , identifier[payload] ): literal[string] keyword[return] identifier[WorkflowStatisticsInstance] ( identifier[self] . identifier[_version] , identifier[payload] , identifier[workspace_sid] = identifier[self] . identifier[_solution] [ literal[string] ], identifier[workflow_sid] = identifier[self] . identifier[_solution] [ literal[string] ], )
def get_instance(self, payload): """ Build an instance of WorkflowStatisticsInstance :param dict payload: Payload response from the API :returns: twilio.rest.taskrouter.v1.workspace.workflow.workflow_statistics.WorkflowStatisticsInstance :rtype: twilio.rest.taskrouter.v1.workspace.workflow.workflow_statistics.WorkflowStatisticsInstance """ return WorkflowStatisticsInstance(self._version, payload, workspace_sid=self._solution['workspace_sid'], workflow_sid=self._solution['workflow_sid'])
def is_jail(name): ''' Return True if jail exists False if not CLI Example: .. code-block:: bash salt '*' poudriere.is_jail <jail name> ''' jails = list_jails() for jail in jails: if jail.split()[0] == name: return True return False
def function[is_jail, parameter[name]]: constant[ Return True if jail exists False if not CLI Example: .. code-block:: bash salt '*' poudriere.is_jail <jail name> ] variable[jails] assign[=] call[name[list_jails], parameter[]] for taget[name[jail]] in starred[name[jails]] begin[:] if compare[call[call[name[jail].split, parameter[]]][constant[0]] equal[==] name[name]] begin[:] return[constant[True]] return[constant[False]]
keyword[def] identifier[is_jail] ( identifier[name] ): literal[string] identifier[jails] = identifier[list_jails] () keyword[for] identifier[jail] keyword[in] identifier[jails] : keyword[if] identifier[jail] . identifier[split] ()[ literal[int] ]== identifier[name] : keyword[return] keyword[True] keyword[return] keyword[False]
def is_jail(name): """ Return True if jail exists False if not CLI Example: .. code-block:: bash salt '*' poudriere.is_jail <jail name> """ jails = list_jails() for jail in jails: if jail.split()[0] == name: return True # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['jail']] return False
def write_extracted_licenses(lics, out): """ Write extracted licenses fields to out. """ write_value('LicenseID', lics.identifier, out) if lics.full_name is not None: write_value('LicenseName', lics.full_name, out) if lics.comment is not None: write_text_value('LicenseComment', lics.comment, out) for xref in sorted(lics.cross_ref): write_value('LicenseCrossReference', xref, out) write_text_value('ExtractedText', lics.text, out)
def function[write_extracted_licenses, parameter[lics, out]]: constant[ Write extracted licenses fields to out. ] call[name[write_value], parameter[constant[LicenseID], name[lics].identifier, name[out]]] if compare[name[lics].full_name is_not constant[None]] begin[:] call[name[write_value], parameter[constant[LicenseName], name[lics].full_name, name[out]]] if compare[name[lics].comment is_not constant[None]] begin[:] call[name[write_text_value], parameter[constant[LicenseComment], name[lics].comment, name[out]]] for taget[name[xref]] in starred[call[name[sorted], parameter[name[lics].cross_ref]]] begin[:] call[name[write_value], parameter[constant[LicenseCrossReference], name[xref], name[out]]] call[name[write_text_value], parameter[constant[ExtractedText], name[lics].text, name[out]]]
keyword[def] identifier[write_extracted_licenses] ( identifier[lics] , identifier[out] ): literal[string] identifier[write_value] ( literal[string] , identifier[lics] . identifier[identifier] , identifier[out] ) keyword[if] identifier[lics] . identifier[full_name] keyword[is] keyword[not] keyword[None] : identifier[write_value] ( literal[string] , identifier[lics] . identifier[full_name] , identifier[out] ) keyword[if] identifier[lics] . identifier[comment] keyword[is] keyword[not] keyword[None] : identifier[write_text_value] ( literal[string] , identifier[lics] . identifier[comment] , identifier[out] ) keyword[for] identifier[xref] keyword[in] identifier[sorted] ( identifier[lics] . identifier[cross_ref] ): identifier[write_value] ( literal[string] , identifier[xref] , identifier[out] ) identifier[write_text_value] ( literal[string] , identifier[lics] . identifier[text] , identifier[out] )
def write_extracted_licenses(lics, out): """ Write extracted licenses fields to out. """ write_value('LicenseID', lics.identifier, out) if lics.full_name is not None: write_value('LicenseName', lics.full_name, out) # depends on [control=['if'], data=[]] if lics.comment is not None: write_text_value('LicenseComment', lics.comment, out) # depends on [control=['if'], data=[]] for xref in sorted(lics.cross_ref): write_value('LicenseCrossReference', xref, out) # depends on [control=['for'], data=['xref']] write_text_value('ExtractedText', lics.text, out)
def create_jinja_env(template_path): """ Creates a Jinja2 environment with a specific template path. """ jinja_env = jinja2.Environment( loader=jinja2.FileSystemLoader(template_path), block_start_string='{%', block_end_string='%}', variable_start_string='${', variable_end_string='}', comment_start_string='{#', comment_end_string='#}', line_statement_prefix=None, line_comment_prefix=None, trim_blocks=True, lstrip_blocks=True, newline_sequence='\n' ) jinja_env.filters['regexreplace'] = regex_replace jinja_env.globals.update(uuidgen=uuidgen) return jinja_env
def function[create_jinja_env, parameter[template_path]]: constant[ Creates a Jinja2 environment with a specific template path. ] variable[jinja_env] assign[=] call[name[jinja2].Environment, parameter[]] call[name[jinja_env].filters][constant[regexreplace]] assign[=] name[regex_replace] call[name[jinja_env].globals.update, parameter[]] return[name[jinja_env]]
keyword[def] identifier[create_jinja_env] ( identifier[template_path] ): literal[string] identifier[jinja_env] = identifier[jinja2] . identifier[Environment] ( identifier[loader] = identifier[jinja2] . identifier[FileSystemLoader] ( identifier[template_path] ), identifier[block_start_string] = literal[string] , identifier[block_end_string] = literal[string] , identifier[variable_start_string] = literal[string] , identifier[variable_end_string] = literal[string] , identifier[comment_start_string] = literal[string] , identifier[comment_end_string] = literal[string] , identifier[line_statement_prefix] = keyword[None] , identifier[line_comment_prefix] = keyword[None] , identifier[trim_blocks] = keyword[True] , identifier[lstrip_blocks] = keyword[True] , identifier[newline_sequence] = literal[string] ) identifier[jinja_env] . identifier[filters] [ literal[string] ]= identifier[regex_replace] identifier[jinja_env] . identifier[globals] . identifier[update] ( identifier[uuidgen] = identifier[uuidgen] ) keyword[return] identifier[jinja_env]
def create_jinja_env(template_path): """ Creates a Jinja2 environment with a specific template path. """ jinja_env = jinja2.Environment(loader=jinja2.FileSystemLoader(template_path), block_start_string='{%', block_end_string='%}', variable_start_string='${', variable_end_string='}', comment_start_string='{#', comment_end_string='#}', line_statement_prefix=None, line_comment_prefix=None, trim_blocks=True, lstrip_blocks=True, newline_sequence='\n') jinja_env.filters['regexreplace'] = regex_replace jinja_env.globals.update(uuidgen=uuidgen) return jinja_env
def chunks(seq, chunk_size): # type: (Sequence[T], int) -> Iterable[Sequence[T]] """ Split seq into chunk_size-sized chunks. :param seq: A sequence to chunk. :param chunk_size: The size of chunk. """ return (seq[i:i + chunk_size] for i in range(0, len(seq), chunk_size))
def function[chunks, parameter[seq, chunk_size]]: constant[ Split seq into chunk_size-sized chunks. :param seq: A sequence to chunk. :param chunk_size: The size of chunk. ] return[<ast.GeneratorExp object at 0x7da207f028c0>]
keyword[def] identifier[chunks] ( identifier[seq] , identifier[chunk_size] ): literal[string] keyword[return] ( identifier[seq] [ identifier[i] : identifier[i] + identifier[chunk_size] ] keyword[for] identifier[i] keyword[in] identifier[range] ( literal[int] , identifier[len] ( identifier[seq] ), identifier[chunk_size] ))
def chunks(seq, chunk_size): # type: (Sequence[T], int) -> Iterable[Sequence[T]] ' Split seq into chunk_size-sized chunks.\n\n :param seq: A sequence to chunk.\n :param chunk_size: The size of chunk.\n ' return (seq[i:i + chunk_size] for i in range(0, len(seq), chunk_size))
def get_unique_id(element): """Returns a unique id for a given element""" this_id = make_id(element) dup = True while dup: if this_id not in ids: dup = False ids.append(this_id) else: this_id = make_id(element) return ids[-1]
def function[get_unique_id, parameter[element]]: constant[Returns a unique id for a given element] variable[this_id] assign[=] call[name[make_id], parameter[name[element]]] variable[dup] assign[=] constant[True] while name[dup] begin[:] if compare[name[this_id] <ast.NotIn object at 0x7da2590d7190> name[ids]] begin[:] variable[dup] assign[=] constant[False] call[name[ids].append, parameter[name[this_id]]] return[call[name[ids]][<ast.UnaryOp object at 0x7da1b1238580>]]
keyword[def] identifier[get_unique_id] ( identifier[element] ): literal[string] identifier[this_id] = identifier[make_id] ( identifier[element] ) identifier[dup] = keyword[True] keyword[while] identifier[dup] : keyword[if] identifier[this_id] keyword[not] keyword[in] identifier[ids] : identifier[dup] = keyword[False] identifier[ids] . identifier[append] ( identifier[this_id] ) keyword[else] : identifier[this_id] = identifier[make_id] ( identifier[element] ) keyword[return] identifier[ids] [- literal[int] ]
def get_unique_id(element): """Returns a unique id for a given element""" this_id = make_id(element) dup = True while dup: if this_id not in ids: dup = False ids.append(this_id) # depends on [control=['if'], data=['this_id', 'ids']] else: this_id = make_id(element) # depends on [control=['while'], data=[]] return ids[-1]
def _request_add_dns_record(self, record): """Sends Add_DNS_Record request""" return self._request_internal("Add_DNS_Record", domain=self.domain, record=record)
def function[_request_add_dns_record, parameter[self, record]]: constant[Sends Add_DNS_Record request] return[call[name[self]._request_internal, parameter[constant[Add_DNS_Record]]]]
keyword[def] identifier[_request_add_dns_record] ( identifier[self] , identifier[record] ): literal[string] keyword[return] identifier[self] . identifier[_request_internal] ( literal[string] , identifier[domain] = identifier[self] . identifier[domain] , identifier[record] = identifier[record] )
def _request_add_dns_record(self, record): """Sends Add_DNS_Record request""" return self._request_internal('Add_DNS_Record', domain=self.domain, record=record)
def apply_and_get_result(self, string): """ Perform the substitution represented by this object on string and return the result. """ if self.is_multiline: compiled_pattern = re.compile(self.pattern, re.MULTILINE) else: compiled_pattern = re.compile(self.pattern) result = re.sub(compiled_pattern, self.repl, string) return result
def function[apply_and_get_result, parameter[self, string]]: constant[ Perform the substitution represented by this object on string and return the result. ] if name[self].is_multiline begin[:] variable[compiled_pattern] assign[=] call[name[re].compile, parameter[name[self].pattern, name[re].MULTILINE]] variable[result] assign[=] call[name[re].sub, parameter[name[compiled_pattern], name[self].repl, name[string]]] return[name[result]]
keyword[def] identifier[apply_and_get_result] ( identifier[self] , identifier[string] ): literal[string] keyword[if] identifier[self] . identifier[is_multiline] : identifier[compiled_pattern] = identifier[re] . identifier[compile] ( identifier[self] . identifier[pattern] , identifier[re] . identifier[MULTILINE] ) keyword[else] : identifier[compiled_pattern] = identifier[re] . identifier[compile] ( identifier[self] . identifier[pattern] ) identifier[result] = identifier[re] . identifier[sub] ( identifier[compiled_pattern] , identifier[self] . identifier[repl] , identifier[string] ) keyword[return] identifier[result]
def apply_and_get_result(self, string): """ Perform the substitution represented by this object on string and return the result. """ if self.is_multiline: compiled_pattern = re.compile(self.pattern, re.MULTILINE) # depends on [control=['if'], data=[]] else: compiled_pattern = re.compile(self.pattern) result = re.sub(compiled_pattern, self.repl, string) return result
def _get_imagesave_wildcards(self): 'return the wildcard string for the filesave dialog' default_filetype = self.get_default_filetype() filetypes = self.get_supported_filetypes_grouped() sorted_filetypes = filetypes.items() sorted_filetypes.sort() wildcards = [] extensions = [] filter_index = 0 for i, (name, exts) in enumerate(sorted_filetypes): ext_list = ';'.join(['*.%s' % ext for ext in exts]) extensions.append(exts[0]) wildcard = '%s (%s)|%s' % (name, ext_list, ext_list) if default_filetype in exts: filter_index = i wildcards.append(wildcard) wildcards = '|'.join(wildcards) return wildcards, extensions, filter_index
def function[_get_imagesave_wildcards, parameter[self]]: constant[return the wildcard string for the filesave dialog] variable[default_filetype] assign[=] call[name[self].get_default_filetype, parameter[]] variable[filetypes] assign[=] call[name[self].get_supported_filetypes_grouped, parameter[]] variable[sorted_filetypes] assign[=] call[name[filetypes].items, parameter[]] call[name[sorted_filetypes].sort, parameter[]] variable[wildcards] assign[=] list[[]] variable[extensions] assign[=] list[[]] variable[filter_index] assign[=] constant[0] for taget[tuple[[<ast.Name object at 0x7da1b16d06d0>, <ast.Tuple object at 0x7da1b16d0520>]]] in starred[call[name[enumerate], parameter[name[sorted_filetypes]]]] begin[:] variable[ext_list] assign[=] call[constant[;].join, parameter[<ast.ListComp object at 0x7da1b16d1fc0>]] call[name[extensions].append, parameter[call[name[exts]][constant[0]]]] variable[wildcard] assign[=] binary_operation[constant[%s (%s)|%s] <ast.Mod object at 0x7da2590d6920> tuple[[<ast.Name object at 0x7da1b16d1390>, <ast.Name object at 0x7da1b16d02b0>, <ast.Name object at 0x7da1b16d1000>]]] if compare[name[default_filetype] in name[exts]] begin[:] variable[filter_index] assign[=] name[i] call[name[wildcards].append, parameter[name[wildcard]]] variable[wildcards] assign[=] call[constant[|].join, parameter[name[wildcards]]] return[tuple[[<ast.Name object at 0x7da1b16d1570>, <ast.Name object at 0x7da1b16d2da0>, <ast.Name object at 0x7da1b16d04f0>]]]
keyword[def] identifier[_get_imagesave_wildcards] ( identifier[self] ): literal[string] identifier[default_filetype] = identifier[self] . identifier[get_default_filetype] () identifier[filetypes] = identifier[self] . identifier[get_supported_filetypes_grouped] () identifier[sorted_filetypes] = identifier[filetypes] . identifier[items] () identifier[sorted_filetypes] . identifier[sort] () identifier[wildcards] =[] identifier[extensions] =[] identifier[filter_index] = literal[int] keyword[for] identifier[i] ,( identifier[name] , identifier[exts] ) keyword[in] identifier[enumerate] ( identifier[sorted_filetypes] ): identifier[ext_list] = literal[string] . identifier[join] ([ literal[string] % identifier[ext] keyword[for] identifier[ext] keyword[in] identifier[exts] ]) identifier[extensions] . identifier[append] ( identifier[exts] [ literal[int] ]) identifier[wildcard] = literal[string] %( identifier[name] , identifier[ext_list] , identifier[ext_list] ) keyword[if] identifier[default_filetype] keyword[in] identifier[exts] : identifier[filter_index] = identifier[i] identifier[wildcards] . identifier[append] ( identifier[wildcard] ) identifier[wildcards] = literal[string] . identifier[join] ( identifier[wildcards] ) keyword[return] identifier[wildcards] , identifier[extensions] , identifier[filter_index]
def _get_imagesave_wildcards(self): """return the wildcard string for the filesave dialog""" default_filetype = self.get_default_filetype() filetypes = self.get_supported_filetypes_grouped() sorted_filetypes = filetypes.items() sorted_filetypes.sort() wildcards = [] extensions = [] filter_index = 0 for (i, (name, exts)) in enumerate(sorted_filetypes): ext_list = ';'.join(['*.%s' % ext for ext in exts]) extensions.append(exts[0]) wildcard = '%s (%s)|%s' % (name, ext_list, ext_list) if default_filetype in exts: filter_index = i # depends on [control=['if'], data=[]] wildcards.append(wildcard) # depends on [control=['for'], data=[]] wildcards = '|'.join(wildcards) return (wildcards, extensions, filter_index)
def get_jid(jid): ''' Return the information returned when the specified job id was executed ''' serv = _get_serv(ret=None) ret = {} for minion, data in six.iteritems(serv.hgetall('ret:{0}'.format(jid))): if data: ret[minion] = salt.utils.json.loads(data) return ret
def function[get_jid, parameter[jid]]: constant[ Return the information returned when the specified job id was executed ] variable[serv] assign[=] call[name[_get_serv], parameter[]] variable[ret] assign[=] dictionary[[], []] for taget[tuple[[<ast.Name object at 0x7da1b21f3af0>, <ast.Name object at 0x7da1b21f1d20>]]] in starred[call[name[six].iteritems, parameter[call[name[serv].hgetall, parameter[call[constant[ret:{0}].format, parameter[name[jid]]]]]]]] begin[:] if name[data] begin[:] call[name[ret]][name[minion]] assign[=] call[name[salt].utils.json.loads, parameter[name[data]]] return[name[ret]]
keyword[def] identifier[get_jid] ( identifier[jid] ): literal[string] identifier[serv] = identifier[_get_serv] ( identifier[ret] = keyword[None] ) identifier[ret] ={} keyword[for] identifier[minion] , identifier[data] keyword[in] identifier[six] . identifier[iteritems] ( identifier[serv] . identifier[hgetall] ( literal[string] . identifier[format] ( identifier[jid] ))): keyword[if] identifier[data] : identifier[ret] [ identifier[minion] ]= identifier[salt] . identifier[utils] . identifier[json] . identifier[loads] ( identifier[data] ) keyword[return] identifier[ret]
def get_jid(jid): """ Return the information returned when the specified job id was executed """ serv = _get_serv(ret=None) ret = {} for (minion, data) in six.iteritems(serv.hgetall('ret:{0}'.format(jid))): if data: ret[minion] = salt.utils.json.loads(data) # depends on [control=['if'], data=[]] # depends on [control=['for'], data=[]] return ret
def hash_data(self, hash_input, algorithm="sha2-256", output_format="hex", mount_point=DEFAULT_MOUNT_POINT): """Return the cryptographic hash of given data using the specified algorithm. Supported methods: POST: /{mount_point}/hash(/{algorithm}). Produces: 200 application/json :param hash_input: Specifies the base64 encoded input data. :type hash_input: str | unicode :param algorithm: Specifies the hash algorithm to use. This can also be specified as part of the URL. Currently-supported algorithms are: sha2-224, sha2-256, sha2-384, sha2-512 :type algorithm: str | unicode :param output_format: Specifies the output encoding. This can be either hex or base64. :type output_format: str | unicode :param mount_point: The "path" the method/backend was mounted on. :type mount_point: str | unicode :return: The JSON response of the request. :rtype: requests.Response """ if algorithm not in transit_constants.ALLOWED_HASH_DATA_ALGORITHMS: error_msg = 'invalid algorithm argument provided "{arg}", supported types: "{allowed_types}"' raise exceptions.ParamValidationError(error_msg.format( arg=algorithm, allowed_types=', '.join(transit_constants.ALLOWED_HASH_DATA_ALGORITHMS), )) if output_format not in transit_constants.ALLOWED_HASH_DATA_FORMATS: error_msg = 'invalid output_format argument provided "{arg}", supported types: "{allowed_types}"' raise exceptions.ParamValidationError(error_msg.format( arg=output_format, allowed_types=', '.join(transit_constants.ALLOWED_HASH_DATA_FORMATS), )) params = { 'input': hash_input, 'algorithm': algorithm, 'format': output_format, } api_path = '/v1/{mount_point}/hash'.format(mount_point=mount_point) response = self._adapter.post( url=api_path, json=params, ) return response.json()
def function[hash_data, parameter[self, hash_input, algorithm, output_format, mount_point]]: constant[Return the cryptographic hash of given data using the specified algorithm. Supported methods: POST: /{mount_point}/hash(/{algorithm}). Produces: 200 application/json :param hash_input: Specifies the base64 encoded input data. :type hash_input: str | unicode :param algorithm: Specifies the hash algorithm to use. This can also be specified as part of the URL. Currently-supported algorithms are: sha2-224, sha2-256, sha2-384, sha2-512 :type algorithm: str | unicode :param output_format: Specifies the output encoding. This can be either hex or base64. :type output_format: str | unicode :param mount_point: The "path" the method/backend was mounted on. :type mount_point: str | unicode :return: The JSON response of the request. :rtype: requests.Response ] if compare[name[algorithm] <ast.NotIn object at 0x7da2590d7190> name[transit_constants].ALLOWED_HASH_DATA_ALGORITHMS] begin[:] variable[error_msg] assign[=] constant[invalid algorithm argument provided "{arg}", supported types: "{allowed_types}"] <ast.Raise object at 0x7da18ede4e80> if compare[name[output_format] <ast.NotIn object at 0x7da2590d7190> name[transit_constants].ALLOWED_HASH_DATA_FORMATS] begin[:] variable[error_msg] assign[=] constant[invalid output_format argument provided "{arg}", supported types: "{allowed_types}"] <ast.Raise object at 0x7da18ede7e50> variable[params] assign[=] dictionary[[<ast.Constant object at 0x7da18ede5360>, <ast.Constant object at 0x7da18ede4880>, <ast.Constant object at 0x7da18ede69b0>], [<ast.Name object at 0x7da18ede6b00>, <ast.Name object at 0x7da18ede4b50>, <ast.Name object at 0x7da18ede41c0>]] variable[api_path] assign[=] call[constant[/v1/{mount_point}/hash].format, parameter[]] variable[response] assign[=] call[name[self]._adapter.post, parameter[]] return[call[name[response].json, parameter[]]]
keyword[def] identifier[hash_data] ( identifier[self] , identifier[hash_input] , identifier[algorithm] = literal[string] , identifier[output_format] = literal[string] , identifier[mount_point] = identifier[DEFAULT_MOUNT_POINT] ): literal[string] keyword[if] identifier[algorithm] keyword[not] keyword[in] identifier[transit_constants] . identifier[ALLOWED_HASH_DATA_ALGORITHMS] : identifier[error_msg] = literal[string] keyword[raise] identifier[exceptions] . identifier[ParamValidationError] ( identifier[error_msg] . identifier[format] ( identifier[arg] = identifier[algorithm] , identifier[allowed_types] = literal[string] . identifier[join] ( identifier[transit_constants] . identifier[ALLOWED_HASH_DATA_ALGORITHMS] ), )) keyword[if] identifier[output_format] keyword[not] keyword[in] identifier[transit_constants] . identifier[ALLOWED_HASH_DATA_FORMATS] : identifier[error_msg] = literal[string] keyword[raise] identifier[exceptions] . identifier[ParamValidationError] ( identifier[error_msg] . identifier[format] ( identifier[arg] = identifier[output_format] , identifier[allowed_types] = literal[string] . identifier[join] ( identifier[transit_constants] . identifier[ALLOWED_HASH_DATA_FORMATS] ), )) identifier[params] ={ literal[string] : identifier[hash_input] , literal[string] : identifier[algorithm] , literal[string] : identifier[output_format] , } identifier[api_path] = literal[string] . identifier[format] ( identifier[mount_point] = identifier[mount_point] ) identifier[response] = identifier[self] . identifier[_adapter] . identifier[post] ( identifier[url] = identifier[api_path] , identifier[json] = identifier[params] , ) keyword[return] identifier[response] . identifier[json] ()
def hash_data(self, hash_input, algorithm='sha2-256', output_format='hex', mount_point=DEFAULT_MOUNT_POINT): """Return the cryptographic hash of given data using the specified algorithm. Supported methods: POST: /{mount_point}/hash(/{algorithm}). Produces: 200 application/json :param hash_input: Specifies the base64 encoded input data. :type hash_input: str | unicode :param algorithm: Specifies the hash algorithm to use. This can also be specified as part of the URL. Currently-supported algorithms are: sha2-224, sha2-256, sha2-384, sha2-512 :type algorithm: str | unicode :param output_format: Specifies the output encoding. This can be either hex or base64. :type output_format: str | unicode :param mount_point: The "path" the method/backend was mounted on. :type mount_point: str | unicode :return: The JSON response of the request. :rtype: requests.Response """ if algorithm not in transit_constants.ALLOWED_HASH_DATA_ALGORITHMS: error_msg = 'invalid algorithm argument provided "{arg}", supported types: "{allowed_types}"' raise exceptions.ParamValidationError(error_msg.format(arg=algorithm, allowed_types=', '.join(transit_constants.ALLOWED_HASH_DATA_ALGORITHMS))) # depends on [control=['if'], data=['algorithm']] if output_format not in transit_constants.ALLOWED_HASH_DATA_FORMATS: error_msg = 'invalid output_format argument provided "{arg}", supported types: "{allowed_types}"' raise exceptions.ParamValidationError(error_msg.format(arg=output_format, allowed_types=', '.join(transit_constants.ALLOWED_HASH_DATA_FORMATS))) # depends on [control=['if'], data=['output_format']] params = {'input': hash_input, 'algorithm': algorithm, 'format': output_format} api_path = '/v1/{mount_point}/hash'.format(mount_point=mount_point) response = self._adapter.post(url=api_path, json=params) return response.json()
def asDict(self): """Returns a serializable object""" return { 'isError': self.isError, 'message': self.message, 'values': self.values, 'value': self.value, }
def function[asDict, parameter[self]]: constant[Returns a serializable object] return[dictionary[[<ast.Constant object at 0x7da18dc058d0>, <ast.Constant object at 0x7da18dc066b0>, <ast.Constant object at 0x7da18dc07580>, <ast.Constant object at 0x7da18dc06020>], [<ast.Attribute object at 0x7da18dc05630>, <ast.Attribute object at 0x7da18dc05d50>, <ast.Attribute object at 0x7da18dc04700>, <ast.Attribute object at 0x7da18dc05150>]]]
keyword[def] identifier[asDict] ( identifier[self] ): literal[string] keyword[return] { literal[string] : identifier[self] . identifier[isError] , literal[string] : identifier[self] . identifier[message] , literal[string] : identifier[self] . identifier[values] , literal[string] : identifier[self] . identifier[value] , }
def asDict(self): """Returns a serializable object""" return {'isError': self.isError, 'message': self.message, 'values': self.values, 'value': self.value}
def is_unclaimed(work): """Returns True if work piece is unclaimed.""" if work['is_completed']: return False cutoff_time = time.time() - MAX_PROCESSING_TIME if (work['claimed_worker_id'] and work['claimed_worker_start_time'] is not None and work['claimed_worker_start_time'] >= cutoff_time): return False return True
def function[is_unclaimed, parameter[work]]: constant[Returns True if work piece is unclaimed.] if call[name[work]][constant[is_completed]] begin[:] return[constant[False]] variable[cutoff_time] assign[=] binary_operation[call[name[time].time, parameter[]] - name[MAX_PROCESSING_TIME]] if <ast.BoolOp object at 0x7da1b1fca530> begin[:] return[constant[False]] return[constant[True]]
keyword[def] identifier[is_unclaimed] ( identifier[work] ): literal[string] keyword[if] identifier[work] [ literal[string] ]: keyword[return] keyword[False] identifier[cutoff_time] = identifier[time] . identifier[time] ()- identifier[MAX_PROCESSING_TIME] keyword[if] ( identifier[work] [ literal[string] ] keyword[and] identifier[work] [ literal[string] ] keyword[is] keyword[not] keyword[None] keyword[and] identifier[work] [ literal[string] ]>= identifier[cutoff_time] ): keyword[return] keyword[False] keyword[return] keyword[True]
def is_unclaimed(work): """Returns True if work piece is unclaimed.""" if work['is_completed']: return False # depends on [control=['if'], data=[]] cutoff_time = time.time() - MAX_PROCESSING_TIME if work['claimed_worker_id'] and work['claimed_worker_start_time'] is not None and (work['claimed_worker_start_time'] >= cutoff_time): return False # depends on [control=['if'], data=[]] return True
def cleanup_properties(rdf): """Remove unnecessary property definitions. Reemoves SKOS and DC property definitions and definitions of unused properties.""" for t in (RDF.Property, OWL.DatatypeProperty, OWL.ObjectProperty, OWL.SymmetricProperty, OWL.TransitiveProperty, OWL.InverseFunctionalProperty, OWL.FunctionalProperty): for prop in rdf.subjects(RDF.type, t): if prop.startswith(SKOS): logging.debug( "removing SKOS property definition: %s", prop) replace_subject(rdf, prop, None) continue if prop.startswith(DC): logging.debug("removing DC property definition: %s", prop) replace_subject(rdf, prop, None) continue # if there are triples using the property, keep the property def if len(list(rdf.subject_objects(prop))) > 0: continue logging.debug("removing unused property definition: %s", prop) replace_subject(rdf, prop, None)
def function[cleanup_properties, parameter[rdf]]: constant[Remove unnecessary property definitions. Reemoves SKOS and DC property definitions and definitions of unused properties.] for taget[name[t]] in starred[tuple[[<ast.Attribute object at 0x7da1b0400700>, <ast.Attribute object at 0x7da1b0400070>, <ast.Attribute object at 0x7da1b0400550>, <ast.Attribute object at 0x7da1b0401780>, <ast.Attribute object at 0x7da1b0402aa0>, <ast.Attribute object at 0x7da1b0400c70>, <ast.Attribute object at 0x7da1b0402230>]]] begin[:] for taget[name[prop]] in starred[call[name[rdf].subjects, parameter[name[RDF].type, name[t]]]] begin[:] if call[name[prop].startswith, parameter[name[SKOS]]] begin[:] call[name[logging].debug, parameter[constant[removing SKOS property definition: %s], name[prop]]] call[name[replace_subject], parameter[name[rdf], name[prop], constant[None]]] continue if call[name[prop].startswith, parameter[name[DC]]] begin[:] call[name[logging].debug, parameter[constant[removing DC property definition: %s], name[prop]]] call[name[replace_subject], parameter[name[rdf], name[prop], constant[None]]] continue if compare[call[name[len], parameter[call[name[list], parameter[call[name[rdf].subject_objects, parameter[name[prop]]]]]]] greater[>] constant[0]] begin[:] continue call[name[logging].debug, parameter[constant[removing unused property definition: %s], name[prop]]] call[name[replace_subject], parameter[name[rdf], name[prop], constant[None]]]
keyword[def] identifier[cleanup_properties] ( identifier[rdf] ): literal[string] keyword[for] identifier[t] keyword[in] ( identifier[RDF] . identifier[Property] , identifier[OWL] . identifier[DatatypeProperty] , identifier[OWL] . identifier[ObjectProperty] , identifier[OWL] . identifier[SymmetricProperty] , identifier[OWL] . identifier[TransitiveProperty] , identifier[OWL] . identifier[InverseFunctionalProperty] , identifier[OWL] . identifier[FunctionalProperty] ): keyword[for] identifier[prop] keyword[in] identifier[rdf] . identifier[subjects] ( identifier[RDF] . identifier[type] , identifier[t] ): keyword[if] identifier[prop] . identifier[startswith] ( identifier[SKOS] ): identifier[logging] . identifier[debug] ( literal[string] , identifier[prop] ) identifier[replace_subject] ( identifier[rdf] , identifier[prop] , keyword[None] ) keyword[continue] keyword[if] identifier[prop] . identifier[startswith] ( identifier[DC] ): identifier[logging] . identifier[debug] ( literal[string] , identifier[prop] ) identifier[replace_subject] ( identifier[rdf] , identifier[prop] , keyword[None] ) keyword[continue] keyword[if] identifier[len] ( identifier[list] ( identifier[rdf] . identifier[subject_objects] ( identifier[prop] )))> literal[int] : keyword[continue] identifier[logging] . identifier[debug] ( literal[string] , identifier[prop] ) identifier[replace_subject] ( identifier[rdf] , identifier[prop] , keyword[None] )
def cleanup_properties(rdf): """Remove unnecessary property definitions. Reemoves SKOS and DC property definitions and definitions of unused properties.""" for t in (RDF.Property, OWL.DatatypeProperty, OWL.ObjectProperty, OWL.SymmetricProperty, OWL.TransitiveProperty, OWL.InverseFunctionalProperty, OWL.FunctionalProperty): for prop in rdf.subjects(RDF.type, t): if prop.startswith(SKOS): logging.debug('removing SKOS property definition: %s', prop) replace_subject(rdf, prop, None) continue # depends on [control=['if'], data=[]] if prop.startswith(DC): logging.debug('removing DC property definition: %s', prop) replace_subject(rdf, prop, None) continue # depends on [control=['if'], data=[]] # if there are triples using the property, keep the property def if len(list(rdf.subject_objects(prop))) > 0: continue # depends on [control=['if'], data=[]] logging.debug('removing unused property definition: %s', prop) replace_subject(rdf, prop, None) # depends on [control=['for'], data=['prop']] # depends on [control=['for'], data=['t']]
def get_edef_props( object_class, exported_cfgs, ep_namespace, ep_id, ecf_ep_id, ep_rsvc_id, ep_ts, remote_intents=None, fw_id=None, pkg_ver=None, service_intents=None, ): """ Prepares the EDEF properties of an endpoint, merge of RSA and ECF properties """ osgi_props = get_rsa_props( object_class, exported_cfgs, remote_intents, ep_rsvc_id, fw_id, pkg_ver, service_intents, ) ecf_props = get_ecf_props(ecf_ep_id, ep_namespace, ep_rsvc_id, ep_ts) return merge_dicts(osgi_props, ecf_props)
def function[get_edef_props, parameter[object_class, exported_cfgs, ep_namespace, ep_id, ecf_ep_id, ep_rsvc_id, ep_ts, remote_intents, fw_id, pkg_ver, service_intents]]: constant[ Prepares the EDEF properties of an endpoint, merge of RSA and ECF properties ] variable[osgi_props] assign[=] call[name[get_rsa_props], parameter[name[object_class], name[exported_cfgs], name[remote_intents], name[ep_rsvc_id], name[fw_id], name[pkg_ver], name[service_intents]]] variable[ecf_props] assign[=] call[name[get_ecf_props], parameter[name[ecf_ep_id], name[ep_namespace], name[ep_rsvc_id], name[ep_ts]]] return[call[name[merge_dicts], parameter[name[osgi_props], name[ecf_props]]]]
keyword[def] identifier[get_edef_props] ( identifier[object_class] , identifier[exported_cfgs] , identifier[ep_namespace] , identifier[ep_id] , identifier[ecf_ep_id] , identifier[ep_rsvc_id] , identifier[ep_ts] , identifier[remote_intents] = keyword[None] , identifier[fw_id] = keyword[None] , identifier[pkg_ver] = keyword[None] , identifier[service_intents] = keyword[None] , ): literal[string] identifier[osgi_props] = identifier[get_rsa_props] ( identifier[object_class] , identifier[exported_cfgs] , identifier[remote_intents] , identifier[ep_rsvc_id] , identifier[fw_id] , identifier[pkg_ver] , identifier[service_intents] , ) identifier[ecf_props] = identifier[get_ecf_props] ( identifier[ecf_ep_id] , identifier[ep_namespace] , identifier[ep_rsvc_id] , identifier[ep_ts] ) keyword[return] identifier[merge_dicts] ( identifier[osgi_props] , identifier[ecf_props] )
def get_edef_props(object_class, exported_cfgs, ep_namespace, ep_id, ecf_ep_id, ep_rsvc_id, ep_ts, remote_intents=None, fw_id=None, pkg_ver=None, service_intents=None): """ Prepares the EDEF properties of an endpoint, merge of RSA and ECF properties """ osgi_props = get_rsa_props(object_class, exported_cfgs, remote_intents, ep_rsvc_id, fw_id, pkg_ver, service_intents) ecf_props = get_ecf_props(ecf_ep_id, ep_namespace, ep_rsvc_id, ep_ts) return merge_dicts(osgi_props, ecf_props)
def install(ctx, integrations, delete_after_install=False): """Install a honeycomb integration from the online library, local path or zipfile.""" logger.debug("running command %s (%s)", ctx.command.name, ctx.params, extra={"command": ctx.command.name, "params": ctx.params}) home = ctx.obj["HOME"] integrations_path = os.path.join(home, INTEGRATIONS) installed_all_plugins = True for integration in integrations: try: plugin_utils.install_plugin(integration, INTEGRATION, integrations_path, register_integration) except exceptions.PluginAlreadyInstalled as exc: click.echo(exc) installed_all_plugins = False if not installed_all_plugins: raise ctx.exit(errno.EEXIST)
def function[install, parameter[ctx, integrations, delete_after_install]]: constant[Install a honeycomb integration from the online library, local path or zipfile.] call[name[logger].debug, parameter[constant[running command %s (%s)], name[ctx].command.name, name[ctx].params]] variable[home] assign[=] call[name[ctx].obj][constant[HOME]] variable[integrations_path] assign[=] call[name[os].path.join, parameter[name[home], name[INTEGRATIONS]]] variable[installed_all_plugins] assign[=] constant[True] for taget[name[integration]] in starred[name[integrations]] begin[:] <ast.Try object at 0x7da1b1254fa0> if <ast.UnaryOp object at 0x7da1b1254640> begin[:] <ast.Raise object at 0x7da1b1254a90>
keyword[def] identifier[install] ( identifier[ctx] , identifier[integrations] , identifier[delete_after_install] = keyword[False] ): literal[string] identifier[logger] . identifier[debug] ( literal[string] , identifier[ctx] . identifier[command] . identifier[name] , identifier[ctx] . identifier[params] , identifier[extra] ={ literal[string] : identifier[ctx] . identifier[command] . identifier[name] , literal[string] : identifier[ctx] . identifier[params] }) identifier[home] = identifier[ctx] . identifier[obj] [ literal[string] ] identifier[integrations_path] = identifier[os] . identifier[path] . identifier[join] ( identifier[home] , identifier[INTEGRATIONS] ) identifier[installed_all_plugins] = keyword[True] keyword[for] identifier[integration] keyword[in] identifier[integrations] : keyword[try] : identifier[plugin_utils] . identifier[install_plugin] ( identifier[integration] , identifier[INTEGRATION] , identifier[integrations_path] , identifier[register_integration] ) keyword[except] identifier[exceptions] . identifier[PluginAlreadyInstalled] keyword[as] identifier[exc] : identifier[click] . identifier[echo] ( identifier[exc] ) identifier[installed_all_plugins] = keyword[False] keyword[if] keyword[not] identifier[installed_all_plugins] : keyword[raise] identifier[ctx] . identifier[exit] ( identifier[errno] . identifier[EEXIST] )
def install(ctx, integrations, delete_after_install=False): """Install a honeycomb integration from the online library, local path or zipfile.""" logger.debug('running command %s (%s)', ctx.command.name, ctx.params, extra={'command': ctx.command.name, 'params': ctx.params}) home = ctx.obj['HOME'] integrations_path = os.path.join(home, INTEGRATIONS) installed_all_plugins = True for integration in integrations: try: plugin_utils.install_plugin(integration, INTEGRATION, integrations_path, register_integration) # depends on [control=['try'], data=[]] except exceptions.PluginAlreadyInstalled as exc: click.echo(exc) installed_all_plugins = False # depends on [control=['except'], data=['exc']] # depends on [control=['for'], data=['integration']] if not installed_all_plugins: raise ctx.exit(errno.EEXIST) # depends on [control=['if'], data=[]]
def normalize(self, bias_range=1, quadratic_range=None, ignored_variables=None, ignored_interactions=None, ignore_offset=False): """Normalizes the biases of the binary quadratic model such that they fall in the provided range(s), and adjusts the offset appropriately. If `quadratic_range` is provided, then `bias_range` will be treated as the range for the linear biases and `quadratic_range` will be used for the range of the quadratic biases. Args: bias_range (number/pair): Value/range by which to normalize the all the biases, or if `quadratic_range` is provided, just the linear biases. quadratic_range (number/pair): Value/range by which to normalize the quadratic biases. ignored_variables (iterable, optional): Biases associated with these variables are not scaled. ignored_interactions (iterable[tuple], optional): As an iterable of 2-tuples. Biases associated with these interactions are not scaled. ignore_offset (bool, default=False): If True, the offset is not scaled. Examples: >>> bqm = dimod.BinaryQuadraticModel({'a': -2.0, 'b': 1.5}, ... {('a', 'b'): -1.0}, ... 1.0, dimod.SPIN) >>> max(abs(bias) for bias in bqm.linear.values()) 2.0 >>> max(abs(bias) for bias in bqm.quadratic.values()) 1.0 >>> bqm.normalize([-1.0, 1.0]) >>> max(abs(bias) for bias in bqm.linear.values()) 1.0 >>> max(abs(bias) for bias in bqm.quadratic.values()) 0.5 """ def parse_range(r): if isinstance(r, Number): return -abs(r), abs(r) return r def min_and_max(iterable): if not iterable: return 0, 0 return min(iterable), max(iterable) if ignored_variables is None: ignored_variables = set() elif not isinstance(ignored_variables, abc.Container): ignored_variables = set(ignored_variables) if ignored_interactions is None: ignored_interactions = set() elif not isinstance(ignored_interactions, abc.Container): ignored_interactions = set(ignored_interactions) if quadratic_range is None: linear_range, quadratic_range = bias_range, bias_range else: linear_range = bias_range lin_range, quad_range = map(parse_range, (linear_range, quadratic_range)) lin_min, lin_max = min_and_max([v for k, v in self.linear.items() if k not in ignored_variables]) quad_min, quad_max = min_and_max([v for (a, b), v in self.quadratic.items() if ((a, b) not in ignored_interactions and (b, a) not in ignored_interactions)]) inv_scalar = max(lin_min / lin_range[0], lin_max / lin_range[1], quad_min / quad_range[0], quad_max / quad_range[1]) if inv_scalar != 0: self.scale(1 / inv_scalar, ignored_variables=ignored_variables, ignored_interactions=ignored_interactions, ignore_offset=ignore_offset)
def function[normalize, parameter[self, bias_range, quadratic_range, ignored_variables, ignored_interactions, ignore_offset]]: constant[Normalizes the biases of the binary quadratic model such that they fall in the provided range(s), and adjusts the offset appropriately. If `quadratic_range` is provided, then `bias_range` will be treated as the range for the linear biases and `quadratic_range` will be used for the range of the quadratic biases. Args: bias_range (number/pair): Value/range by which to normalize the all the biases, or if `quadratic_range` is provided, just the linear biases. quadratic_range (number/pair): Value/range by which to normalize the quadratic biases. ignored_variables (iterable, optional): Biases associated with these variables are not scaled. ignored_interactions (iterable[tuple], optional): As an iterable of 2-tuples. Biases associated with these interactions are not scaled. ignore_offset (bool, default=False): If True, the offset is not scaled. Examples: >>> bqm = dimod.BinaryQuadraticModel({'a': -2.0, 'b': 1.5}, ... {('a', 'b'): -1.0}, ... 1.0, dimod.SPIN) >>> max(abs(bias) for bias in bqm.linear.values()) 2.0 >>> max(abs(bias) for bias in bqm.quadratic.values()) 1.0 >>> bqm.normalize([-1.0, 1.0]) >>> max(abs(bias) for bias in bqm.linear.values()) 1.0 >>> max(abs(bias) for bias in bqm.quadratic.values()) 0.5 ] def function[parse_range, parameter[r]]: if call[name[isinstance], parameter[name[r], name[Number]]] begin[:] return[tuple[[<ast.UnaryOp object at 0x7da1b0786e00>, <ast.Call object at 0x7da1b0786a10>]]] return[name[r]] def function[min_and_max, parameter[iterable]]: if <ast.UnaryOp object at 0x7da1b0784700> begin[:] return[tuple[[<ast.Constant object at 0x7da1b07860b0>, <ast.Constant object at 0x7da1b0784a30>]]] return[tuple[[<ast.Call object at 0x7da1b0785030>, <ast.Call object at 0x7da1b0786860>]]] if compare[name[ignored_variables] is constant[None]] begin[:] variable[ignored_variables] assign[=] call[name[set], parameter[]] if compare[name[ignored_interactions] is constant[None]] begin[:] variable[ignored_interactions] assign[=] call[name[set], parameter[]] if compare[name[quadratic_range] is constant[None]] begin[:] <ast.Tuple object at 0x7da1b0786a40> assign[=] tuple[[<ast.Name object at 0x7da1b0784ac0>, <ast.Name object at 0x7da1b0786950>]] <ast.Tuple object at 0x7da1b0784c70> assign[=] call[name[map], parameter[name[parse_range], tuple[[<ast.Name object at 0x7da1b0785c30>, <ast.Name object at 0x7da1b0786f20>]]]] <ast.Tuple object at 0x7da1b07858d0> assign[=] call[name[min_and_max], parameter[<ast.ListComp object at 0x7da1b07849a0>]] <ast.Tuple object at 0x7da1b0785a20> assign[=] call[name[min_and_max], parameter[<ast.ListComp object at 0x7da1b0785960>]] variable[inv_scalar] assign[=] call[name[max], parameter[binary_operation[name[lin_min] / call[name[lin_range]][constant[0]]], binary_operation[name[lin_max] / call[name[lin_range]][constant[1]]], binary_operation[name[quad_min] / call[name[quad_range]][constant[0]]], binary_operation[name[quad_max] / call[name[quad_range]][constant[1]]]]] if compare[name[inv_scalar] not_equal[!=] constant[0]] begin[:] call[name[self].scale, parameter[binary_operation[constant[1] / name[inv_scalar]]]]
keyword[def] identifier[normalize] ( identifier[self] , identifier[bias_range] = literal[int] , identifier[quadratic_range] = keyword[None] , identifier[ignored_variables] = keyword[None] , identifier[ignored_interactions] = keyword[None] , identifier[ignore_offset] = keyword[False] ): literal[string] keyword[def] identifier[parse_range] ( identifier[r] ): keyword[if] identifier[isinstance] ( identifier[r] , identifier[Number] ): keyword[return] - identifier[abs] ( identifier[r] ), identifier[abs] ( identifier[r] ) keyword[return] identifier[r] keyword[def] identifier[min_and_max] ( identifier[iterable] ): keyword[if] keyword[not] identifier[iterable] : keyword[return] literal[int] , literal[int] keyword[return] identifier[min] ( identifier[iterable] ), identifier[max] ( identifier[iterable] ) keyword[if] identifier[ignored_variables] keyword[is] keyword[None] : identifier[ignored_variables] = identifier[set] () keyword[elif] keyword[not] identifier[isinstance] ( identifier[ignored_variables] , identifier[abc] . identifier[Container] ): identifier[ignored_variables] = identifier[set] ( identifier[ignored_variables] ) keyword[if] identifier[ignored_interactions] keyword[is] keyword[None] : identifier[ignored_interactions] = identifier[set] () keyword[elif] keyword[not] identifier[isinstance] ( identifier[ignored_interactions] , identifier[abc] . identifier[Container] ): identifier[ignored_interactions] = identifier[set] ( identifier[ignored_interactions] ) keyword[if] identifier[quadratic_range] keyword[is] keyword[None] : identifier[linear_range] , identifier[quadratic_range] = identifier[bias_range] , identifier[bias_range] keyword[else] : identifier[linear_range] = identifier[bias_range] identifier[lin_range] , identifier[quad_range] = identifier[map] ( identifier[parse_range] ,( identifier[linear_range] , identifier[quadratic_range] )) identifier[lin_min] , identifier[lin_max] = identifier[min_and_max] ([ identifier[v] keyword[for] identifier[k] , identifier[v] keyword[in] identifier[self] . identifier[linear] . identifier[items] () keyword[if] identifier[k] keyword[not] keyword[in] identifier[ignored_variables] ]) identifier[quad_min] , identifier[quad_max] = identifier[min_and_max] ([ identifier[v] keyword[for] ( identifier[a] , identifier[b] ), identifier[v] keyword[in] identifier[self] . identifier[quadratic] . identifier[items] () keyword[if] (( identifier[a] , identifier[b] ) keyword[not] keyword[in] identifier[ignored_interactions] keyword[and] ( identifier[b] , identifier[a] ) keyword[not] keyword[in] identifier[ignored_interactions] )]) identifier[inv_scalar] = identifier[max] ( identifier[lin_min] / identifier[lin_range] [ literal[int] ], identifier[lin_max] / identifier[lin_range] [ literal[int] ], identifier[quad_min] / identifier[quad_range] [ literal[int] ], identifier[quad_max] / identifier[quad_range] [ literal[int] ]) keyword[if] identifier[inv_scalar] != literal[int] : identifier[self] . identifier[scale] ( literal[int] / identifier[inv_scalar] , identifier[ignored_variables] = identifier[ignored_variables] , identifier[ignored_interactions] = identifier[ignored_interactions] , identifier[ignore_offset] = identifier[ignore_offset] )
def normalize(self, bias_range=1, quadratic_range=None, ignored_variables=None, ignored_interactions=None, ignore_offset=False): """Normalizes the biases of the binary quadratic model such that they fall in the provided range(s), and adjusts the offset appropriately. If `quadratic_range` is provided, then `bias_range` will be treated as the range for the linear biases and `quadratic_range` will be used for the range of the quadratic biases. Args: bias_range (number/pair): Value/range by which to normalize the all the biases, or if `quadratic_range` is provided, just the linear biases. quadratic_range (number/pair): Value/range by which to normalize the quadratic biases. ignored_variables (iterable, optional): Biases associated with these variables are not scaled. ignored_interactions (iterable[tuple], optional): As an iterable of 2-tuples. Biases associated with these interactions are not scaled. ignore_offset (bool, default=False): If True, the offset is not scaled. Examples: >>> bqm = dimod.BinaryQuadraticModel({'a': -2.0, 'b': 1.5}, ... {('a', 'b'): -1.0}, ... 1.0, dimod.SPIN) >>> max(abs(bias) for bias in bqm.linear.values()) 2.0 >>> max(abs(bias) for bias in bqm.quadratic.values()) 1.0 >>> bqm.normalize([-1.0, 1.0]) >>> max(abs(bias) for bias in bqm.linear.values()) 1.0 >>> max(abs(bias) for bias in bqm.quadratic.values()) 0.5 """ def parse_range(r): if isinstance(r, Number): return (-abs(r), abs(r)) # depends on [control=['if'], data=[]] return r def min_and_max(iterable): if not iterable: return (0, 0) # depends on [control=['if'], data=[]] return (min(iterable), max(iterable)) if ignored_variables is None: ignored_variables = set() # depends on [control=['if'], data=['ignored_variables']] elif not isinstance(ignored_variables, abc.Container): ignored_variables = set(ignored_variables) # depends on [control=['if'], data=[]] if ignored_interactions is None: ignored_interactions = set() # depends on [control=['if'], data=['ignored_interactions']] elif not isinstance(ignored_interactions, abc.Container): ignored_interactions = set(ignored_interactions) # depends on [control=['if'], data=[]] if quadratic_range is None: (linear_range, quadratic_range) = (bias_range, bias_range) # depends on [control=['if'], data=['quadratic_range']] else: linear_range = bias_range (lin_range, quad_range) = map(parse_range, (linear_range, quadratic_range)) (lin_min, lin_max) = min_and_max([v for (k, v) in self.linear.items() if k not in ignored_variables]) (quad_min, quad_max) = min_and_max([v for ((a, b), v) in self.quadratic.items() if (a, b) not in ignored_interactions and (b, a) not in ignored_interactions]) inv_scalar = max(lin_min / lin_range[0], lin_max / lin_range[1], quad_min / quad_range[0], quad_max / quad_range[1]) if inv_scalar != 0: self.scale(1 / inv_scalar, ignored_variables=ignored_variables, ignored_interactions=ignored_interactions, ignore_offset=ignore_offset) # depends on [control=['if'], data=['inv_scalar']]
def collect_dashboard_js(collector): """Generate dashboard javascript for each dashboard""" dashmat = collector.configuration["dashmat"] modules = collector.configuration["__active_modules__"] compiled_static_prep = dashmat.compiled_static_prep compiled_static_folder = dashmat.compiled_static_folder npm_deps = list_npm_modules(collector, no_print=True) react_server = ReactServer() react_server.prepare(npm_deps, compiled_static_folder) for dashboard in collector.configuration["dashboards"].values(): log.info("Generating compiled javascript for dashboard:{0}".format(dashboard.path)) filename = dashboard.path.replace("_", "__").replace("/", "_") location = os.path.join(compiled_static_folder, "dashboards", "{0}.js".format(filename)) if os.path.exists(location): os.remove(location) generate_dashboard_js(dashboard, react_server, compiled_static_folder, compiled_static_prep, modules)
def function[collect_dashboard_js, parameter[collector]]: constant[Generate dashboard javascript for each dashboard] variable[dashmat] assign[=] call[name[collector].configuration][constant[dashmat]] variable[modules] assign[=] call[name[collector].configuration][constant[__active_modules__]] variable[compiled_static_prep] assign[=] name[dashmat].compiled_static_prep variable[compiled_static_folder] assign[=] name[dashmat].compiled_static_folder variable[npm_deps] assign[=] call[name[list_npm_modules], parameter[name[collector]]] variable[react_server] assign[=] call[name[ReactServer], parameter[]] call[name[react_server].prepare, parameter[name[npm_deps], name[compiled_static_folder]]] for taget[name[dashboard]] in starred[call[call[name[collector].configuration][constant[dashboards]].values, parameter[]]] begin[:] call[name[log].info, parameter[call[constant[Generating compiled javascript for dashboard:{0}].format, parameter[name[dashboard].path]]]] variable[filename] assign[=] call[call[name[dashboard].path.replace, parameter[constant[_], constant[__]]].replace, parameter[constant[/], constant[_]]] variable[location] assign[=] call[name[os].path.join, parameter[name[compiled_static_folder], constant[dashboards], call[constant[{0}.js].format, parameter[name[filename]]]]] if call[name[os].path.exists, parameter[name[location]]] begin[:] call[name[os].remove, parameter[name[location]]] call[name[generate_dashboard_js], parameter[name[dashboard], name[react_server], name[compiled_static_folder], name[compiled_static_prep], name[modules]]]
keyword[def] identifier[collect_dashboard_js] ( identifier[collector] ): literal[string] identifier[dashmat] = identifier[collector] . identifier[configuration] [ literal[string] ] identifier[modules] = identifier[collector] . identifier[configuration] [ literal[string] ] identifier[compiled_static_prep] = identifier[dashmat] . identifier[compiled_static_prep] identifier[compiled_static_folder] = identifier[dashmat] . identifier[compiled_static_folder] identifier[npm_deps] = identifier[list_npm_modules] ( identifier[collector] , identifier[no_print] = keyword[True] ) identifier[react_server] = identifier[ReactServer] () identifier[react_server] . identifier[prepare] ( identifier[npm_deps] , identifier[compiled_static_folder] ) keyword[for] identifier[dashboard] keyword[in] identifier[collector] . identifier[configuration] [ literal[string] ]. identifier[values] (): identifier[log] . identifier[info] ( literal[string] . identifier[format] ( identifier[dashboard] . identifier[path] )) identifier[filename] = identifier[dashboard] . identifier[path] . identifier[replace] ( literal[string] , literal[string] ). identifier[replace] ( literal[string] , literal[string] ) identifier[location] = identifier[os] . identifier[path] . identifier[join] ( identifier[compiled_static_folder] , literal[string] , literal[string] . identifier[format] ( identifier[filename] )) keyword[if] identifier[os] . identifier[path] . identifier[exists] ( identifier[location] ): identifier[os] . identifier[remove] ( identifier[location] ) identifier[generate_dashboard_js] ( identifier[dashboard] , identifier[react_server] , identifier[compiled_static_folder] , identifier[compiled_static_prep] , identifier[modules] )
def collect_dashboard_js(collector): """Generate dashboard javascript for each dashboard""" dashmat = collector.configuration['dashmat'] modules = collector.configuration['__active_modules__'] compiled_static_prep = dashmat.compiled_static_prep compiled_static_folder = dashmat.compiled_static_folder npm_deps = list_npm_modules(collector, no_print=True) react_server = ReactServer() react_server.prepare(npm_deps, compiled_static_folder) for dashboard in collector.configuration['dashboards'].values(): log.info('Generating compiled javascript for dashboard:{0}'.format(dashboard.path)) filename = dashboard.path.replace('_', '__').replace('/', '_') location = os.path.join(compiled_static_folder, 'dashboards', '{0}.js'.format(filename)) if os.path.exists(location): os.remove(location) # depends on [control=['if'], data=[]] generate_dashboard_js(dashboard, react_server, compiled_static_folder, compiled_static_prep, modules) # depends on [control=['for'], data=['dashboard']]
def update(self, ttl=values.unset): """ Update the SyncStreamInstance :param unicode ttl: Stream TTL. :returns: Updated SyncStreamInstance :rtype: twilio.rest.sync.v1.service.sync_stream.SyncStreamInstance """ return self._proxy.update(ttl=ttl, )
def function[update, parameter[self, ttl]]: constant[ Update the SyncStreamInstance :param unicode ttl: Stream TTL. :returns: Updated SyncStreamInstance :rtype: twilio.rest.sync.v1.service.sync_stream.SyncStreamInstance ] return[call[name[self]._proxy.update, parameter[]]]
keyword[def] identifier[update] ( identifier[self] , identifier[ttl] = identifier[values] . identifier[unset] ): literal[string] keyword[return] identifier[self] . identifier[_proxy] . identifier[update] ( identifier[ttl] = identifier[ttl] ,)
def update(self, ttl=values.unset): """ Update the SyncStreamInstance :param unicode ttl: Stream TTL. :returns: Updated SyncStreamInstance :rtype: twilio.rest.sync.v1.service.sync_stream.SyncStreamInstance """ return self._proxy.update(ttl=ttl)
def p_propertyDeclaration_8(p): # pylint: disable=line-too-long """propertyDeclaration_8 : qualifierList dataType propertyName array defaultValue ';'""" # noqa: E501 quals = OrderedDict([(x.name, x) for x in p[1]]) p[0] = CIMProperty(p[3], cimvalue(p[5], p[2]), type=p[2], qualifiers=quals, is_array=True, array_size=p[4])
def function[p_propertyDeclaration_8, parameter[p]]: constant[propertyDeclaration_8 : qualifierList dataType propertyName array defaultValue ';'] variable[quals] assign[=] call[name[OrderedDict], parameter[<ast.ListComp object at 0x7da18f00eb30>]] call[name[p]][constant[0]] assign[=] call[name[CIMProperty], parameter[call[name[p]][constant[3]], call[name[cimvalue], parameter[call[name[p]][constant[5]], call[name[p]][constant[2]]]]]]
keyword[def] identifier[p_propertyDeclaration_8] ( identifier[p] ): literal[string] identifier[quals] = identifier[OrderedDict] ([( identifier[x] . identifier[name] , identifier[x] ) keyword[for] identifier[x] keyword[in] identifier[p] [ literal[int] ]]) identifier[p] [ literal[int] ]= identifier[CIMProperty] ( identifier[p] [ literal[int] ], identifier[cimvalue] ( identifier[p] [ literal[int] ], identifier[p] [ literal[int] ]), identifier[type] = identifier[p] [ literal[int] ], identifier[qualifiers] = identifier[quals] , identifier[is_array] = keyword[True] , identifier[array_size] = identifier[p] [ literal[int] ])
def p_propertyDeclaration_8(p): # pylint: disable=line-too-long "propertyDeclaration_8 : qualifierList dataType propertyName array defaultValue ';'" # noqa: E501 quals = OrderedDict([(x.name, x) for x in p[1]]) p[0] = CIMProperty(p[3], cimvalue(p[5], p[2]), type=p[2], qualifiers=quals, is_array=True, array_size=p[4])
def compute_steadystate(self, nnc=2): """ Computes the non-stochastic steady-state of the economy. Parameters ---------- nnc : array_like(float) nnc is the location of the constant in the state vector x_t """ zx = np.eye(self.A0.shape[0])-self.A0 self.zz = nullspace(zx) self.zz /= self.zz[nnc] self.css = self.Sc.dot(self.zz) self.sss = self.Ss.dot(self.zz) self.iss = self.Si.dot(self.zz) self.dss = self.Sd.dot(self.zz) self.bss = self.Sb.dot(self.zz) self.kss = self.Sk.dot(self.zz) self.hss = self.Sh.dot(self.zz)
def function[compute_steadystate, parameter[self, nnc]]: constant[ Computes the non-stochastic steady-state of the economy. Parameters ---------- nnc : array_like(float) nnc is the location of the constant in the state vector x_t ] variable[zx] assign[=] binary_operation[call[name[np].eye, parameter[call[name[self].A0.shape][constant[0]]]] - name[self].A0] name[self].zz assign[=] call[name[nullspace], parameter[name[zx]]] <ast.AugAssign object at 0x7da2043458d0> name[self].css assign[=] call[name[self].Sc.dot, parameter[name[self].zz]] name[self].sss assign[=] call[name[self].Ss.dot, parameter[name[self].zz]] name[self].iss assign[=] call[name[self].Si.dot, parameter[name[self].zz]] name[self].dss assign[=] call[name[self].Sd.dot, parameter[name[self].zz]] name[self].bss assign[=] call[name[self].Sb.dot, parameter[name[self].zz]] name[self].kss assign[=] call[name[self].Sk.dot, parameter[name[self].zz]] name[self].hss assign[=] call[name[self].Sh.dot, parameter[name[self].zz]]
keyword[def] identifier[compute_steadystate] ( identifier[self] , identifier[nnc] = literal[int] ): literal[string] identifier[zx] = identifier[np] . identifier[eye] ( identifier[self] . identifier[A0] . identifier[shape] [ literal[int] ])- identifier[self] . identifier[A0] identifier[self] . identifier[zz] = identifier[nullspace] ( identifier[zx] ) identifier[self] . identifier[zz] /= identifier[self] . identifier[zz] [ identifier[nnc] ] identifier[self] . identifier[css] = identifier[self] . identifier[Sc] . identifier[dot] ( identifier[self] . identifier[zz] ) identifier[self] . identifier[sss] = identifier[self] . identifier[Ss] . identifier[dot] ( identifier[self] . identifier[zz] ) identifier[self] . identifier[iss] = identifier[self] . identifier[Si] . identifier[dot] ( identifier[self] . identifier[zz] ) identifier[self] . identifier[dss] = identifier[self] . identifier[Sd] . identifier[dot] ( identifier[self] . identifier[zz] ) identifier[self] . identifier[bss] = identifier[self] . identifier[Sb] . identifier[dot] ( identifier[self] . identifier[zz] ) identifier[self] . identifier[kss] = identifier[self] . identifier[Sk] . identifier[dot] ( identifier[self] . identifier[zz] ) identifier[self] . identifier[hss] = identifier[self] . identifier[Sh] . identifier[dot] ( identifier[self] . identifier[zz] )
def compute_steadystate(self, nnc=2): """ Computes the non-stochastic steady-state of the economy. Parameters ---------- nnc : array_like(float) nnc is the location of the constant in the state vector x_t """ zx = np.eye(self.A0.shape[0]) - self.A0 self.zz = nullspace(zx) self.zz /= self.zz[nnc] self.css = self.Sc.dot(self.zz) self.sss = self.Ss.dot(self.zz) self.iss = self.Si.dot(self.zz) self.dss = self.Sd.dot(self.zz) self.bss = self.Sb.dot(self.zz) self.kss = self.Sk.dot(self.zz) self.hss = self.Sh.dot(self.zz)
def enter_plane(self, plane): """Enter the device plane. Enter the device plane a.k.a. mode, i.e. admin, qnx, calvados """ try: cmd = CONF['driver'][self.platform]['planes'][plane] self.plane = plane except KeyError: cmd = None if cmd: self.log("Entering the {} plane".format(plane)) self.device.send(cmd)
def function[enter_plane, parameter[self, plane]]: constant[Enter the device plane. Enter the device plane a.k.a. mode, i.e. admin, qnx, calvados ] <ast.Try object at 0x7da1b259fbe0> if name[cmd] begin[:] call[name[self].log, parameter[call[constant[Entering the {} plane].format, parameter[name[plane]]]]] call[name[self].device.send, parameter[name[cmd]]]
keyword[def] identifier[enter_plane] ( identifier[self] , identifier[plane] ): literal[string] keyword[try] : identifier[cmd] = identifier[CONF] [ literal[string] ][ identifier[self] . identifier[platform] ][ literal[string] ][ identifier[plane] ] identifier[self] . identifier[plane] = identifier[plane] keyword[except] identifier[KeyError] : identifier[cmd] = keyword[None] keyword[if] identifier[cmd] : identifier[self] . identifier[log] ( literal[string] . identifier[format] ( identifier[plane] )) identifier[self] . identifier[device] . identifier[send] ( identifier[cmd] )
def enter_plane(self, plane): """Enter the device plane. Enter the device plane a.k.a. mode, i.e. admin, qnx, calvados """ try: cmd = CONF['driver'][self.platform]['planes'][plane] self.plane = plane # depends on [control=['try'], data=[]] except KeyError: cmd = None # depends on [control=['except'], data=[]] if cmd: self.log('Entering the {} plane'.format(plane)) self.device.send(cmd) # depends on [control=['if'], data=[]]
def host(self, hostname=None): """Get or set host (IPv4/IPv6 or hostname like 'plc.domain.net') :param hostname: hostname or IPv4/IPv6 address or None for get value :type hostname: str or None :returns: hostname or None if set fail :rtype: str or None """ if (hostname is None) or (hostname == self.__hostname): return self.__hostname # when hostname change ensure old socket is close self.close() # IPv4 ? try: socket.inet_pton(socket.AF_INET, hostname) self.__hostname = hostname return self.__hostname except socket.error: pass # IPv6 ? try: socket.inet_pton(socket.AF_INET6, hostname) self.__hostname = hostname return self.__hostname except socket.error: pass # DNS name ? if re.match('^[a-z][a-z0-9\.\-]+$', hostname): self.__hostname = hostname return self.__hostname else: return None
def function[host, parameter[self, hostname]]: constant[Get or set host (IPv4/IPv6 or hostname like 'plc.domain.net') :param hostname: hostname or IPv4/IPv6 address or None for get value :type hostname: str or None :returns: hostname or None if set fail :rtype: str or None ] if <ast.BoolOp object at 0x7da1b16a7700> begin[:] return[name[self].__hostname] call[name[self].close, parameter[]] <ast.Try object at 0x7da1b16a6a70> <ast.Try object at 0x7da1b16a6710> if call[name[re].match, parameter[constant[^[a-z][a-z0-9\.\-]+$], name[hostname]]] begin[:] name[self].__hostname assign[=] name[hostname] return[name[self].__hostname]
keyword[def] identifier[host] ( identifier[self] , identifier[hostname] = keyword[None] ): literal[string] keyword[if] ( identifier[hostname] keyword[is] keyword[None] ) keyword[or] ( identifier[hostname] == identifier[self] . identifier[__hostname] ): keyword[return] identifier[self] . identifier[__hostname] identifier[self] . identifier[close] () keyword[try] : identifier[socket] . identifier[inet_pton] ( identifier[socket] . identifier[AF_INET] , identifier[hostname] ) identifier[self] . identifier[__hostname] = identifier[hostname] keyword[return] identifier[self] . identifier[__hostname] keyword[except] identifier[socket] . identifier[error] : keyword[pass] keyword[try] : identifier[socket] . identifier[inet_pton] ( identifier[socket] . identifier[AF_INET6] , identifier[hostname] ) identifier[self] . identifier[__hostname] = identifier[hostname] keyword[return] identifier[self] . identifier[__hostname] keyword[except] identifier[socket] . identifier[error] : keyword[pass] keyword[if] identifier[re] . identifier[match] ( literal[string] , identifier[hostname] ): identifier[self] . identifier[__hostname] = identifier[hostname] keyword[return] identifier[self] . identifier[__hostname] keyword[else] : keyword[return] keyword[None]
def host(self, hostname=None): """Get or set host (IPv4/IPv6 or hostname like 'plc.domain.net') :param hostname: hostname or IPv4/IPv6 address or None for get value :type hostname: str or None :returns: hostname or None if set fail :rtype: str or None """ if hostname is None or hostname == self.__hostname: return self.__hostname # depends on [control=['if'], data=[]] # when hostname change ensure old socket is close self.close() # IPv4 ? try: socket.inet_pton(socket.AF_INET, hostname) self.__hostname = hostname return self.__hostname # depends on [control=['try'], data=[]] except socket.error: pass # depends on [control=['except'], data=[]] # IPv6 ? try: socket.inet_pton(socket.AF_INET6, hostname) self.__hostname = hostname return self.__hostname # depends on [control=['try'], data=[]] except socket.error: pass # depends on [control=['except'], data=[]] # DNS name ? if re.match('^[a-z][a-z0-9\\.\\-]+$', hostname): self.__hostname = hostname return self.__hostname # depends on [control=['if'], data=[]] else: return None
def parse_error(self, response): "Parse an error response" error_code = response.split(' ')[0] if error_code in self.EXCEPTION_CLASSES: response = response[len(error_code) + 1:] exception_class = self.EXCEPTION_CLASSES[error_code] if isinstance(exception_class, dict): exception_class = exception_class.get(response, ResponseError) return exception_class(response) return ResponseError(response)
def function[parse_error, parameter[self, response]]: constant[Parse an error response] variable[error_code] assign[=] call[call[name[response].split, parameter[constant[ ]]]][constant[0]] if compare[name[error_code] in name[self].EXCEPTION_CLASSES] begin[:] variable[response] assign[=] call[name[response]][<ast.Slice object at 0x7da20e956fe0>] variable[exception_class] assign[=] call[name[self].EXCEPTION_CLASSES][name[error_code]] if call[name[isinstance], parameter[name[exception_class], name[dict]]] begin[:] variable[exception_class] assign[=] call[name[exception_class].get, parameter[name[response], name[ResponseError]]] return[call[name[exception_class], parameter[name[response]]]] return[call[name[ResponseError], parameter[name[response]]]]
keyword[def] identifier[parse_error] ( identifier[self] , identifier[response] ): literal[string] identifier[error_code] = identifier[response] . identifier[split] ( literal[string] )[ literal[int] ] keyword[if] identifier[error_code] keyword[in] identifier[self] . identifier[EXCEPTION_CLASSES] : identifier[response] = identifier[response] [ identifier[len] ( identifier[error_code] )+ literal[int] :] identifier[exception_class] = identifier[self] . identifier[EXCEPTION_CLASSES] [ identifier[error_code] ] keyword[if] identifier[isinstance] ( identifier[exception_class] , identifier[dict] ): identifier[exception_class] = identifier[exception_class] . identifier[get] ( identifier[response] , identifier[ResponseError] ) keyword[return] identifier[exception_class] ( identifier[response] ) keyword[return] identifier[ResponseError] ( identifier[response] )
def parse_error(self, response): """Parse an error response""" error_code = response.split(' ')[0] if error_code in self.EXCEPTION_CLASSES: response = response[len(error_code) + 1:] exception_class = self.EXCEPTION_CLASSES[error_code] if isinstance(exception_class, dict): exception_class = exception_class.get(response, ResponseError) # depends on [control=['if'], data=[]] return exception_class(response) # depends on [control=['if'], data=['error_code']] return ResponseError(response)
def provider_parser(subparser): """Specify arguments for AWS Route 53 Lexicon Provider.""" subparser.add_argument("--auth-access-key", help="specify ACCESS_KEY for authentication") subparser.add_argument("--auth-access-secret", help="specify ACCESS_SECRET for authentication") subparser.add_argument( "--private-zone", help=("indicates what kind of hosted zone to use. If true, use " "only private zones. If false, use only public zones")) # TODO: these are only required for testing, we should figure out # a way to remove them & update the integration tests # to dynamically populate the auth credentials that are required. subparser.add_argument( "--auth-username", help="alternative way to specify the ACCESS_KEY for authentication") subparser.add_argument( "--auth-token", help="alternative way to specify the ACCESS_SECRET for authentication")
def function[provider_parser, parameter[subparser]]: constant[Specify arguments for AWS Route 53 Lexicon Provider.] call[name[subparser].add_argument, parameter[constant[--auth-access-key]]] call[name[subparser].add_argument, parameter[constant[--auth-access-secret]]] call[name[subparser].add_argument, parameter[constant[--private-zone]]] call[name[subparser].add_argument, parameter[constant[--auth-username]]] call[name[subparser].add_argument, parameter[constant[--auth-token]]]
keyword[def] identifier[provider_parser] ( identifier[subparser] ): literal[string] identifier[subparser] . identifier[add_argument] ( literal[string] , identifier[help] = literal[string] ) identifier[subparser] . identifier[add_argument] ( literal[string] , identifier[help] = literal[string] ) identifier[subparser] . identifier[add_argument] ( literal[string] , identifier[help] =( literal[string] literal[string] )) identifier[subparser] . identifier[add_argument] ( literal[string] , identifier[help] = literal[string] ) identifier[subparser] . identifier[add_argument] ( literal[string] , identifier[help] = literal[string] )
def provider_parser(subparser): """Specify arguments for AWS Route 53 Lexicon Provider.""" subparser.add_argument('--auth-access-key', help='specify ACCESS_KEY for authentication') subparser.add_argument('--auth-access-secret', help='specify ACCESS_SECRET for authentication') subparser.add_argument('--private-zone', help='indicates what kind of hosted zone to use. If true, use only private zones. If false, use only public zones') # TODO: these are only required for testing, we should figure out # a way to remove them & update the integration tests # to dynamically populate the auth credentials that are required. subparser.add_argument('--auth-username', help='alternative way to specify the ACCESS_KEY for authentication') subparser.add_argument('--auth-token', help='alternative way to specify the ACCESS_SECRET for authentication')
def getmergerequests(self, project_id, page=1, per_page=20, state=None): """ Get all the merge requests for a project. :param project_id: ID of the project to retrieve merge requests for :param page: Page Number :param per_page: Records per page :param state: Passes merge request state to filter them by it :return: list with all the merge requests """ data = {'page': page, 'per_page': per_page, 'state': state} request = requests.get( '{0}/{1}/merge_requests'.format(self.projects_url, project_id), params=data, headers=self.headers, verify=self.verify_ssl, auth=self.auth, timeout=self.timeout) if request.status_code == 200: return request.json() else: return False
def function[getmergerequests, parameter[self, project_id, page, per_page, state]]: constant[ Get all the merge requests for a project. :param project_id: ID of the project to retrieve merge requests for :param page: Page Number :param per_page: Records per page :param state: Passes merge request state to filter them by it :return: list with all the merge requests ] variable[data] assign[=] dictionary[[<ast.Constant object at 0x7da1b28b7370>, <ast.Constant object at 0x7da1b28b5090>, <ast.Constant object at 0x7da1b28b7fd0>], [<ast.Name object at 0x7da1b28b7730>, <ast.Name object at 0x7da1b28b4ca0>, <ast.Name object at 0x7da1b28b4f70>]] variable[request] assign[=] call[name[requests].get, parameter[call[constant[{0}/{1}/merge_requests].format, parameter[name[self].projects_url, name[project_id]]]]] if compare[name[request].status_code equal[==] constant[200]] begin[:] return[call[name[request].json, parameter[]]]
keyword[def] identifier[getmergerequests] ( identifier[self] , identifier[project_id] , identifier[page] = literal[int] , identifier[per_page] = literal[int] , identifier[state] = keyword[None] ): literal[string] identifier[data] ={ literal[string] : identifier[page] , literal[string] : identifier[per_page] , literal[string] : identifier[state] } identifier[request] = identifier[requests] . identifier[get] ( literal[string] . identifier[format] ( identifier[self] . identifier[projects_url] , identifier[project_id] ), identifier[params] = identifier[data] , identifier[headers] = identifier[self] . identifier[headers] , identifier[verify] = identifier[self] . identifier[verify_ssl] , identifier[auth] = identifier[self] . identifier[auth] , identifier[timeout] = identifier[self] . identifier[timeout] ) keyword[if] identifier[request] . identifier[status_code] == literal[int] : keyword[return] identifier[request] . identifier[json] () keyword[else] : keyword[return] keyword[False]
def getmergerequests(self, project_id, page=1, per_page=20, state=None): """ Get all the merge requests for a project. :param project_id: ID of the project to retrieve merge requests for :param page: Page Number :param per_page: Records per page :param state: Passes merge request state to filter them by it :return: list with all the merge requests """ data = {'page': page, 'per_page': per_page, 'state': state} request = requests.get('{0}/{1}/merge_requests'.format(self.projects_url, project_id), params=data, headers=self.headers, verify=self.verify_ssl, auth=self.auth, timeout=self.timeout) if request.status_code == 200: return request.json() # depends on [control=['if'], data=[]] else: return False
def generate_module_content(self, module): """Generate module.rst text content. :: {{ module_name }} ================= .. automodule:: {{ module_fullname }} :members: """ if isinstance(module, Module): return module.render() else: # pragma: no cover raise Exception("%r is not a Module object" % module)
def function[generate_module_content, parameter[self, module]]: constant[Generate module.rst text content. :: {{ module_name }} ================= .. automodule:: {{ module_fullname }} :members: ] if call[name[isinstance], parameter[name[module], name[Module]]] begin[:] return[call[name[module].render, parameter[]]]
keyword[def] identifier[generate_module_content] ( identifier[self] , identifier[module] ): literal[string] keyword[if] identifier[isinstance] ( identifier[module] , identifier[Module] ): keyword[return] identifier[module] . identifier[render] () keyword[else] : keyword[raise] identifier[Exception] ( literal[string] % identifier[module] )
def generate_module_content(self, module): """Generate module.rst text content. :: {{ module_name }} ================= .. automodule:: {{ module_fullname }} :members: """ if isinstance(module, Module): return module.render() # depends on [control=['if'], data=[]] else: # pragma: no cover raise Exception('%r is not a Module object' % module)
def get_func(func_ea): """get_func(func_t or ea) -> func_t Take an IDA function (``idaapi.func_t``) or an address (EA) and return an IDA function object. Use this when APIs can take either a function or an address. Args: func_ea: ``idaapi.func_t`` or ea of the function. Returns: An ``idaapi.func_t`` object for the given address. If a ``func_t`` is provided, it is returned. """ if isinstance(func_ea, idaapi.func_t): return func_ea func = idaapi.get_func(func_ea) if func is None: raise exceptions.SarkNoFunction("No function at 0x{:08X}".format(func_ea)) return func
def function[get_func, parameter[func_ea]]: constant[get_func(func_t or ea) -> func_t Take an IDA function (``idaapi.func_t``) or an address (EA) and return an IDA function object. Use this when APIs can take either a function or an address. Args: func_ea: ``idaapi.func_t`` or ea of the function. Returns: An ``idaapi.func_t`` object for the given address. If a ``func_t`` is provided, it is returned. ] if call[name[isinstance], parameter[name[func_ea], name[idaapi].func_t]] begin[:] return[name[func_ea]] variable[func] assign[=] call[name[idaapi].get_func, parameter[name[func_ea]]] if compare[name[func] is constant[None]] begin[:] <ast.Raise object at 0x7da1b12f08b0> return[name[func]]
keyword[def] identifier[get_func] ( identifier[func_ea] ): literal[string] keyword[if] identifier[isinstance] ( identifier[func_ea] , identifier[idaapi] . identifier[func_t] ): keyword[return] identifier[func_ea] identifier[func] = identifier[idaapi] . identifier[get_func] ( identifier[func_ea] ) keyword[if] identifier[func] keyword[is] keyword[None] : keyword[raise] identifier[exceptions] . identifier[SarkNoFunction] ( literal[string] . identifier[format] ( identifier[func_ea] )) keyword[return] identifier[func]
def get_func(func_ea): """get_func(func_t or ea) -> func_t Take an IDA function (``idaapi.func_t``) or an address (EA) and return an IDA function object. Use this when APIs can take either a function or an address. Args: func_ea: ``idaapi.func_t`` or ea of the function. Returns: An ``idaapi.func_t`` object for the given address. If a ``func_t`` is provided, it is returned. """ if isinstance(func_ea, idaapi.func_t): return func_ea # depends on [control=['if'], data=[]] func = idaapi.get_func(func_ea) if func is None: raise exceptions.SarkNoFunction('No function at 0x{:08X}'.format(func_ea)) # depends on [control=['if'], data=[]] return func
def time_at_elevation(self, elevation, direction=SUN_RISING, date=None, local=True): """Calculate the time when the sun is at the specified elevation. Note: This method uses positive elevations for those above the horizon. Elevations greater than 90 degrees are converted to a setting sun i.e. an elevation of 110 will calculate a setting sun at 70 degrees. :param elevation: Elevation in degrees above the horizon to calculate for. :type elevation: float :param direction: Determines whether the time is for the sun rising or setting. Use ``astral.SUN_RISING`` or ``astral.SUN_SETTING``. Default is rising. :type direction: int :param date: The date for which to calculate the elevation time. If no date is specified then the current date will be used. :type date: :class:`~datetime.date` :param local: True = Time to be returned in location's time zone; False = Time to be returned in UTC. If not specified then the time will be returned in local time :type local: bool :returns: The date and time at which dusk occurs. :rtype: :class:`~datetime.datetime` """ if local and self.timezone is None: raise ValueError("Local time requested but Location has no timezone set.") if self.astral is None: self.astral = Astral() if date is None: date = datetime.date.today() if elevation > 90.0: elevation = 180.0 - elevation direction = SUN_SETTING time_ = self.astral.time_at_elevation_utc( elevation, direction, date, self.latitude, self.longitude ) if local: return time_.astimezone(self.tz) else: return time_
def function[time_at_elevation, parameter[self, elevation, direction, date, local]]: constant[Calculate the time when the sun is at the specified elevation. Note: This method uses positive elevations for those above the horizon. Elevations greater than 90 degrees are converted to a setting sun i.e. an elevation of 110 will calculate a setting sun at 70 degrees. :param elevation: Elevation in degrees above the horizon to calculate for. :type elevation: float :param direction: Determines whether the time is for the sun rising or setting. Use ``astral.SUN_RISING`` or ``astral.SUN_SETTING``. Default is rising. :type direction: int :param date: The date for which to calculate the elevation time. If no date is specified then the current date will be used. :type date: :class:`~datetime.date` :param local: True = Time to be returned in location's time zone; False = Time to be returned in UTC. If not specified then the time will be returned in local time :type local: bool :returns: The date and time at which dusk occurs. :rtype: :class:`~datetime.datetime` ] if <ast.BoolOp object at 0x7da1b2347160> begin[:] <ast.Raise object at 0x7da1b2344f40> if compare[name[self].astral is constant[None]] begin[:] name[self].astral assign[=] call[name[Astral], parameter[]] if compare[name[date] is constant[None]] begin[:] variable[date] assign[=] call[name[datetime].date.today, parameter[]] if compare[name[elevation] greater[>] constant[90.0]] begin[:] variable[elevation] assign[=] binary_operation[constant[180.0] - name[elevation]] variable[direction] assign[=] name[SUN_SETTING] variable[time_] assign[=] call[name[self].astral.time_at_elevation_utc, parameter[name[elevation], name[direction], name[date], name[self].latitude, name[self].longitude]] if name[local] begin[:] return[call[name[time_].astimezone, parameter[name[self].tz]]]
keyword[def] identifier[time_at_elevation] ( identifier[self] , identifier[elevation] , identifier[direction] = identifier[SUN_RISING] , identifier[date] = keyword[None] , identifier[local] = keyword[True] ): literal[string] keyword[if] identifier[local] keyword[and] identifier[self] . identifier[timezone] keyword[is] keyword[None] : keyword[raise] identifier[ValueError] ( literal[string] ) keyword[if] identifier[self] . identifier[astral] keyword[is] keyword[None] : identifier[self] . identifier[astral] = identifier[Astral] () keyword[if] identifier[date] keyword[is] keyword[None] : identifier[date] = identifier[datetime] . identifier[date] . identifier[today] () keyword[if] identifier[elevation] > literal[int] : identifier[elevation] = literal[int] - identifier[elevation] identifier[direction] = identifier[SUN_SETTING] identifier[time_] = identifier[self] . identifier[astral] . identifier[time_at_elevation_utc] ( identifier[elevation] , identifier[direction] , identifier[date] , identifier[self] . identifier[latitude] , identifier[self] . identifier[longitude] ) keyword[if] identifier[local] : keyword[return] identifier[time_] . identifier[astimezone] ( identifier[self] . identifier[tz] ) keyword[else] : keyword[return] identifier[time_]
def time_at_elevation(self, elevation, direction=SUN_RISING, date=None, local=True): """Calculate the time when the sun is at the specified elevation. Note: This method uses positive elevations for those above the horizon. Elevations greater than 90 degrees are converted to a setting sun i.e. an elevation of 110 will calculate a setting sun at 70 degrees. :param elevation: Elevation in degrees above the horizon to calculate for. :type elevation: float :param direction: Determines whether the time is for the sun rising or setting. Use ``astral.SUN_RISING`` or ``astral.SUN_SETTING``. Default is rising. :type direction: int :param date: The date for which to calculate the elevation time. If no date is specified then the current date will be used. :type date: :class:`~datetime.date` :param local: True = Time to be returned in location's time zone; False = Time to be returned in UTC. If not specified then the time will be returned in local time :type local: bool :returns: The date and time at which dusk occurs. :rtype: :class:`~datetime.datetime` """ if local and self.timezone is None: raise ValueError('Local time requested but Location has no timezone set.') # depends on [control=['if'], data=[]] if self.astral is None: self.astral = Astral() # depends on [control=['if'], data=[]] if date is None: date = datetime.date.today() # depends on [control=['if'], data=['date']] if elevation > 90.0: elevation = 180.0 - elevation direction = SUN_SETTING # depends on [control=['if'], data=['elevation']] time_ = self.astral.time_at_elevation_utc(elevation, direction, date, self.latitude, self.longitude) if local: return time_.astimezone(self.tz) # depends on [control=['if'], data=[]] else: return time_
def update(self): ''' Updates the non-primitive objects needed for solution, using primitive attributes. This includes defining a "utility from conformity" function conformUtilityFunc, a grid of population punk proportions, and an array of future punk proportions (for each value in the grid). Results are stored as attributes of self. Parameters ---------- none Returns ------- none ''' self.conformUtilityFunc = lambda x : stats.beta.pdf(x,self.uParamA,self.uParamB) self.pGrid = np.linspace(0.0001,0.9999,self.pCount) self.updateEvolution()
def function[update, parameter[self]]: constant[ Updates the non-primitive objects needed for solution, using primitive attributes. This includes defining a "utility from conformity" function conformUtilityFunc, a grid of population punk proportions, and an array of future punk proportions (for each value in the grid). Results are stored as attributes of self. Parameters ---------- none Returns ------- none ] name[self].conformUtilityFunc assign[=] <ast.Lambda object at 0x7da2041db070> name[self].pGrid assign[=] call[name[np].linspace, parameter[constant[0.0001], constant[0.9999], name[self].pCount]] call[name[self].updateEvolution, parameter[]]
keyword[def] identifier[update] ( identifier[self] ): literal[string] identifier[self] . identifier[conformUtilityFunc] = keyword[lambda] identifier[x] : identifier[stats] . identifier[beta] . identifier[pdf] ( identifier[x] , identifier[self] . identifier[uParamA] , identifier[self] . identifier[uParamB] ) identifier[self] . identifier[pGrid] = identifier[np] . identifier[linspace] ( literal[int] , literal[int] , identifier[self] . identifier[pCount] ) identifier[self] . identifier[updateEvolution] ()
def update(self): """ Updates the non-primitive objects needed for solution, using primitive attributes. This includes defining a "utility from conformity" function conformUtilityFunc, a grid of population punk proportions, and an array of future punk proportions (for each value in the grid). Results are stored as attributes of self. Parameters ---------- none Returns ------- none """ self.conformUtilityFunc = lambda x: stats.beta.pdf(x, self.uParamA, self.uParamB) self.pGrid = np.linspace(0.0001, 0.9999, self.pCount) self.updateEvolution()
def read(self): ''' Read tagged doc from mutliple files (sents, tokens, concepts, links, tags) ''' warnings.warn("Document.read() is deprecated and will be removed in near future.", DeprecationWarning) with TxtReader.from_doc(self) as reader: reader.read(self) return self
def function[read, parameter[self]]: constant[ Read tagged doc from mutliple files (sents, tokens, concepts, links, tags) ] call[name[warnings].warn, parameter[constant[Document.read() is deprecated and will be removed in near future.], name[DeprecationWarning]]] with call[name[TxtReader].from_doc, parameter[name[self]]] begin[:] call[name[reader].read, parameter[name[self]]] return[name[self]]
keyword[def] identifier[read] ( identifier[self] ): literal[string] identifier[warnings] . identifier[warn] ( literal[string] , identifier[DeprecationWarning] ) keyword[with] identifier[TxtReader] . identifier[from_doc] ( identifier[self] ) keyword[as] identifier[reader] : identifier[reader] . identifier[read] ( identifier[self] ) keyword[return] identifier[self]
def read(self): """ Read tagged doc from mutliple files (sents, tokens, concepts, links, tags) """ warnings.warn('Document.read() is deprecated and will be removed in near future.', DeprecationWarning) with TxtReader.from_doc(self) as reader: reader.read(self) # depends on [control=['with'], data=['reader']] return self
def log_request_end_send(self, target_system, target_component, force_mavlink1=False): ''' Stop log transfer and resume normal logging target_system : System ID (uint8_t) target_component : Component ID (uint8_t) ''' return self.send(self.log_request_end_encode(target_system, target_component), force_mavlink1=force_mavlink1)
def function[log_request_end_send, parameter[self, target_system, target_component, force_mavlink1]]: constant[ Stop log transfer and resume normal logging target_system : System ID (uint8_t) target_component : Component ID (uint8_t) ] return[call[name[self].send, parameter[call[name[self].log_request_end_encode, parameter[name[target_system], name[target_component]]]]]]
keyword[def] identifier[log_request_end_send] ( identifier[self] , identifier[target_system] , identifier[target_component] , identifier[force_mavlink1] = keyword[False] ): literal[string] keyword[return] identifier[self] . identifier[send] ( identifier[self] . identifier[log_request_end_encode] ( identifier[target_system] , identifier[target_component] ), identifier[force_mavlink1] = identifier[force_mavlink1] )
def log_request_end_send(self, target_system, target_component, force_mavlink1=False): """ Stop log transfer and resume normal logging target_system : System ID (uint8_t) target_component : Component ID (uint8_t) """ return self.send(self.log_request_end_encode(target_system, target_component), force_mavlink1=force_mavlink1)
def squash_children(self, options): """ reduces the memory footprint of this super-change by converting all child changes into squashed changes """ oldsubs = self.collect() self.changes = tuple(squash(c, options=options) for c in oldsubs) for change in oldsubs: change.clear()
def function[squash_children, parameter[self, options]]: constant[ reduces the memory footprint of this super-change by converting all child changes into squashed changes ] variable[oldsubs] assign[=] call[name[self].collect, parameter[]] name[self].changes assign[=] call[name[tuple], parameter[<ast.GeneratorExp object at 0x7da1b0ebdfc0>]] for taget[name[change]] in starred[name[oldsubs]] begin[:] call[name[change].clear, parameter[]]
keyword[def] identifier[squash_children] ( identifier[self] , identifier[options] ): literal[string] identifier[oldsubs] = identifier[self] . identifier[collect] () identifier[self] . identifier[changes] = identifier[tuple] ( identifier[squash] ( identifier[c] , identifier[options] = identifier[options] ) keyword[for] identifier[c] keyword[in] identifier[oldsubs] ) keyword[for] identifier[change] keyword[in] identifier[oldsubs] : identifier[change] . identifier[clear] ()
def squash_children(self, options): """ reduces the memory footprint of this super-change by converting all child changes into squashed changes """ oldsubs = self.collect() self.changes = tuple((squash(c, options=options) for c in oldsubs)) for change in oldsubs: change.clear() # depends on [control=['for'], data=['change']]
def certificate(args): """ %prog certificate tpffile certificatefile Generate certificate file for all overlaps in tpffile. tpffile can be generated by jcvi.formats.agp.tpf(). North chr1 2 0 AC229737.8 telomere 58443 South chr1 2 1 AC229737.8 AC202463.29 58443 37835 58443 + Non-terminal Each line describes a relationship between the current BAC and the north/south BAC. First, "North/South" tag, then the chromosome, phases of the two BACs, ids of the two BACs, the size and the overlap start-stop of the CURRENT BAC, and orientation. Each BAC will have two lines in the certificate file. """ p = OptionParser(certificate.__doc__) opts, args = p.parse_args(args) if len(args) != 2: sys.exit(not p.print_help()) tpffile, certificatefile = args fastadir = "fasta" tpf = TPF(tpffile) data = check_certificate(certificatefile) fw = must_open(certificatefile, "w") for i, a in enumerate(tpf): if a.is_gap: continue aid = a.component_id af = op.join(fastadir, aid + ".fasta") if not op.exists(af): # Check to avoid redownload entrez([aid, "--skipcheck", "--outdir=" + fastadir]) north, south = tpf.getNorthSouthClone(i) aphase, asize = phase(aid) for tag, p in (("North", north), ("South", south)): if not p: # end of the chromosome ov = "telomere\t{0}".format(asize) elif p.isCloneGap: bphase = "0" ov = "{0}\t{1}".format(p.gap_type, asize) else: bid = p.component_id bphase, bsize = phase(bid) key = (tag, aid, bid) if key in data: print(data[key], file=fw) continue ar = [aid, bid, "--dir=" + fastadir] o = overlap(ar) ov = o.certificateline if o \ else "{0}\t{1}\tNone".format(bid, asize) print("\t".join(str(x) for x in \ (tag, a.object, aphase, bphase, aid, ov)), file=fw) fw.flush()
def function[certificate, parameter[args]]: constant[ %prog certificate tpffile certificatefile Generate certificate file for all overlaps in tpffile. tpffile can be generated by jcvi.formats.agp.tpf(). North chr1 2 0 AC229737.8 telomere 58443 South chr1 2 1 AC229737.8 AC202463.29 58443 37835 58443 + Non-terminal Each line describes a relationship between the current BAC and the north/south BAC. First, "North/South" tag, then the chromosome, phases of the two BACs, ids of the two BACs, the size and the overlap start-stop of the CURRENT BAC, and orientation. Each BAC will have two lines in the certificate file. ] variable[p] assign[=] call[name[OptionParser], parameter[name[certificate].__doc__]] <ast.Tuple object at 0x7da18f00fa60> assign[=] call[name[p].parse_args, parameter[name[args]]] if compare[call[name[len], parameter[name[args]]] not_equal[!=] constant[2]] begin[:] call[name[sys].exit, parameter[<ast.UnaryOp object at 0x7da18f00fb50>]] <ast.Tuple object at 0x7da18f00f6a0> assign[=] name[args] variable[fastadir] assign[=] constant[fasta] variable[tpf] assign[=] call[name[TPF], parameter[name[tpffile]]] variable[data] assign[=] call[name[check_certificate], parameter[name[certificatefile]]] variable[fw] assign[=] call[name[must_open], parameter[name[certificatefile], constant[w]]] for taget[tuple[[<ast.Name object at 0x7da18f00c970>, <ast.Name object at 0x7da18f00e0b0>]]] in starred[call[name[enumerate], parameter[name[tpf]]]] begin[:] if name[a].is_gap begin[:] continue variable[aid] assign[=] name[a].component_id variable[af] assign[=] call[name[op].join, parameter[name[fastadir], binary_operation[name[aid] + constant[.fasta]]]] if <ast.UnaryOp object at 0x7da18f00c670> begin[:] call[name[entrez], parameter[list[[<ast.Name object at 0x7da18f00cd90>, <ast.Constant object at 0x7da18f00d960>, <ast.BinOp object at 0x7da18f00ef80>]]]] <ast.Tuple object at 0x7da18f00d750> assign[=] call[name[tpf].getNorthSouthClone, parameter[name[i]]] <ast.Tuple object at 0x7da18f00e470> assign[=] call[name[phase], parameter[name[aid]]] for taget[tuple[[<ast.Name object at 0x7da18f00ef50>, <ast.Name object at 0x7da18f00d600>]]] in starred[tuple[[<ast.Tuple object at 0x7da18f00d0c0>, <ast.Tuple object at 0x7da18f00d240>]]] begin[:] if <ast.UnaryOp object at 0x7da18f00d8a0> begin[:] variable[ov] assign[=] call[constant[telomere {0}].format, parameter[name[asize]]] call[name[print], parameter[call[constant[ ].join, parameter[<ast.GeneratorExp object at 0x7da20c76cc40>]]]] call[name[fw].flush, parameter[]]
keyword[def] identifier[certificate] ( identifier[args] ): literal[string] identifier[p] = identifier[OptionParser] ( identifier[certificate] . identifier[__doc__] ) identifier[opts] , identifier[args] = identifier[p] . identifier[parse_args] ( identifier[args] ) keyword[if] identifier[len] ( identifier[args] )!= literal[int] : identifier[sys] . identifier[exit] ( keyword[not] identifier[p] . identifier[print_help] ()) identifier[tpffile] , identifier[certificatefile] = identifier[args] identifier[fastadir] = literal[string] identifier[tpf] = identifier[TPF] ( identifier[tpffile] ) identifier[data] = identifier[check_certificate] ( identifier[certificatefile] ) identifier[fw] = identifier[must_open] ( identifier[certificatefile] , literal[string] ) keyword[for] identifier[i] , identifier[a] keyword[in] identifier[enumerate] ( identifier[tpf] ): keyword[if] identifier[a] . identifier[is_gap] : keyword[continue] identifier[aid] = identifier[a] . identifier[component_id] identifier[af] = identifier[op] . identifier[join] ( identifier[fastadir] , identifier[aid] + literal[string] ) keyword[if] keyword[not] identifier[op] . identifier[exists] ( identifier[af] ): identifier[entrez] ([ identifier[aid] , literal[string] , literal[string] + identifier[fastadir] ]) identifier[north] , identifier[south] = identifier[tpf] . identifier[getNorthSouthClone] ( identifier[i] ) identifier[aphase] , identifier[asize] = identifier[phase] ( identifier[aid] ) keyword[for] identifier[tag] , identifier[p] keyword[in] (( literal[string] , identifier[north] ),( literal[string] , identifier[south] )): keyword[if] keyword[not] identifier[p] : identifier[ov] = literal[string] . identifier[format] ( identifier[asize] ) keyword[elif] identifier[p] . identifier[isCloneGap] : identifier[bphase] = literal[string] identifier[ov] = literal[string] . identifier[format] ( identifier[p] . identifier[gap_type] , identifier[asize] ) keyword[else] : identifier[bid] = identifier[p] . identifier[component_id] identifier[bphase] , identifier[bsize] = identifier[phase] ( identifier[bid] ) identifier[key] =( identifier[tag] , identifier[aid] , identifier[bid] ) keyword[if] identifier[key] keyword[in] identifier[data] : identifier[print] ( identifier[data] [ identifier[key] ], identifier[file] = identifier[fw] ) keyword[continue] identifier[ar] =[ identifier[aid] , identifier[bid] , literal[string] + identifier[fastadir] ] identifier[o] = identifier[overlap] ( identifier[ar] ) identifier[ov] = identifier[o] . identifier[certificateline] keyword[if] identifier[o] keyword[else] literal[string] . identifier[format] ( identifier[bid] , identifier[asize] ) identifier[print] ( literal[string] . identifier[join] ( identifier[str] ( identifier[x] ) keyword[for] identifier[x] keyword[in] ( identifier[tag] , identifier[a] . identifier[object] , identifier[aphase] , identifier[bphase] , identifier[aid] , identifier[ov] )), identifier[file] = identifier[fw] ) identifier[fw] . identifier[flush] ()
def certificate(args): """ %prog certificate tpffile certificatefile Generate certificate file for all overlaps in tpffile. tpffile can be generated by jcvi.formats.agp.tpf(). North chr1 2 0 AC229737.8 telomere 58443 South chr1 2 1 AC229737.8 AC202463.29 58443 37835 58443 + Non-terminal Each line describes a relationship between the current BAC and the north/south BAC. First, "North/South" tag, then the chromosome, phases of the two BACs, ids of the two BACs, the size and the overlap start-stop of the CURRENT BAC, and orientation. Each BAC will have two lines in the certificate file. """ p = OptionParser(certificate.__doc__) (opts, args) = p.parse_args(args) if len(args) != 2: sys.exit(not p.print_help()) # depends on [control=['if'], data=[]] (tpffile, certificatefile) = args fastadir = 'fasta' tpf = TPF(tpffile) data = check_certificate(certificatefile) fw = must_open(certificatefile, 'w') for (i, a) in enumerate(tpf): if a.is_gap: continue # depends on [control=['if'], data=[]] aid = a.component_id af = op.join(fastadir, aid + '.fasta') if not op.exists(af): # Check to avoid redownload entrez([aid, '--skipcheck', '--outdir=' + fastadir]) # depends on [control=['if'], data=[]] (north, south) = tpf.getNorthSouthClone(i) (aphase, asize) = phase(aid) for (tag, p) in (('North', north), ('South', south)): if not p: # end of the chromosome ov = 'telomere\t{0}'.format(asize) # depends on [control=['if'], data=[]] elif p.isCloneGap: bphase = '0' ov = '{0}\t{1}'.format(p.gap_type, asize) # depends on [control=['if'], data=[]] else: bid = p.component_id (bphase, bsize) = phase(bid) key = (tag, aid, bid) if key in data: print(data[key], file=fw) continue # depends on [control=['if'], data=['key', 'data']] ar = [aid, bid, '--dir=' + fastadir] o = overlap(ar) ov = o.certificateline if o else '{0}\t{1}\tNone'.format(bid, asize) print('\t'.join((str(x) for x in (tag, a.object, aphase, bphase, aid, ov))), file=fw) fw.flush() # depends on [control=['for'], data=[]] # depends on [control=['for'], data=[]]
def decrypt_verify(encrypted, session_keys=None): """Decrypts the given ciphertext string and returns both the signatures (if any) and the plaintext. :param bytes encrypted: the mail to decrypt :param list[str] session_keys: a list OpenPGP session keys :returns: the signatures and decrypted plaintext data :rtype: tuple[list[gpg.resuit.Signature], str] :raises alot.errors.GPGProblem: if the decryption fails """ if session_keys is not None: try: return _decrypt_verify_session_keys(encrypted, session_keys) except GPGProblem: pass ctx = gpg.core.Context() return _decrypt_verify_with_context(ctx, encrypted)
def function[decrypt_verify, parameter[encrypted, session_keys]]: constant[Decrypts the given ciphertext string and returns both the signatures (if any) and the plaintext. :param bytes encrypted: the mail to decrypt :param list[str] session_keys: a list OpenPGP session keys :returns: the signatures and decrypted plaintext data :rtype: tuple[list[gpg.resuit.Signature], str] :raises alot.errors.GPGProblem: if the decryption fails ] if compare[name[session_keys] is_not constant[None]] begin[:] <ast.Try object at 0x7da1b07aaa10> variable[ctx] assign[=] call[name[gpg].core.Context, parameter[]] return[call[name[_decrypt_verify_with_context], parameter[name[ctx], name[encrypted]]]]
keyword[def] identifier[decrypt_verify] ( identifier[encrypted] , identifier[session_keys] = keyword[None] ): literal[string] keyword[if] identifier[session_keys] keyword[is] keyword[not] keyword[None] : keyword[try] : keyword[return] identifier[_decrypt_verify_session_keys] ( identifier[encrypted] , identifier[session_keys] ) keyword[except] identifier[GPGProblem] : keyword[pass] identifier[ctx] = identifier[gpg] . identifier[core] . identifier[Context] () keyword[return] identifier[_decrypt_verify_with_context] ( identifier[ctx] , identifier[encrypted] )
def decrypt_verify(encrypted, session_keys=None): """Decrypts the given ciphertext string and returns both the signatures (if any) and the plaintext. :param bytes encrypted: the mail to decrypt :param list[str] session_keys: a list OpenPGP session keys :returns: the signatures and decrypted plaintext data :rtype: tuple[list[gpg.resuit.Signature], str] :raises alot.errors.GPGProblem: if the decryption fails """ if session_keys is not None: try: return _decrypt_verify_session_keys(encrypted, session_keys) # depends on [control=['try'], data=[]] except GPGProblem: pass # depends on [control=['except'], data=[]] # depends on [control=['if'], data=['session_keys']] ctx = gpg.core.Context() return _decrypt_verify_with_context(ctx, encrypted)
def getEntropies(m): """ Recursively get the current and max entropies from every child module :param m: any module :return: (currentEntropy, maxEntropy) """ entropy = 0.0 max_entropy = 0.0 for module in m.children(): e, m = getEntropies(module) entropy += e max_entropy += m e, m = getEntropy(m) entropy += e max_entropy += m return entropy, max_entropy
def function[getEntropies, parameter[m]]: constant[ Recursively get the current and max entropies from every child module :param m: any module :return: (currentEntropy, maxEntropy) ] variable[entropy] assign[=] constant[0.0] variable[max_entropy] assign[=] constant[0.0] for taget[name[module]] in starred[call[name[m].children, parameter[]]] begin[:] <ast.Tuple object at 0x7da1b08d7fd0> assign[=] call[name[getEntropies], parameter[name[module]]] <ast.AugAssign object at 0x7da1b08d4d00> <ast.AugAssign object at 0x7da1b08d69e0> <ast.Tuple object at 0x7da1b08d5bd0> assign[=] call[name[getEntropy], parameter[name[m]]] <ast.AugAssign object at 0x7da1b0860c70> <ast.AugAssign object at 0x7da1b0862470> return[tuple[[<ast.Name object at 0x7da1b0860520>, <ast.Name object at 0x7da1b08623b0>]]]
keyword[def] identifier[getEntropies] ( identifier[m] ): literal[string] identifier[entropy] = literal[int] identifier[max_entropy] = literal[int] keyword[for] identifier[module] keyword[in] identifier[m] . identifier[children] (): identifier[e] , identifier[m] = identifier[getEntropies] ( identifier[module] ) identifier[entropy] += identifier[e] identifier[max_entropy] += identifier[m] identifier[e] , identifier[m] = identifier[getEntropy] ( identifier[m] ) identifier[entropy] += identifier[e] identifier[max_entropy] += identifier[m] keyword[return] identifier[entropy] , identifier[max_entropy]
def getEntropies(m): """ Recursively get the current and max entropies from every child module :param m: any module :return: (currentEntropy, maxEntropy) """ entropy = 0.0 max_entropy = 0.0 for module in m.children(): (e, m) = getEntropies(module) entropy += e max_entropy += m # depends on [control=['for'], data=['module']] (e, m) = getEntropy(m) entropy += e max_entropy += m return (entropy, max_entropy)
def init_plugins(): """Return dictionary of available plugins Returns ------- plugins : dictionary key is plugin name, value Plugin object """ find_plugins() d = {} for c in Plugin.__subclasses__(): ins = c() if ins.name() in config.get("plugin", []): ins.activate() d[ins.name()] = ins return d
def function[init_plugins, parameter[]]: constant[Return dictionary of available plugins Returns ------- plugins : dictionary key is plugin name, value Plugin object ] call[name[find_plugins], parameter[]] variable[d] assign[=] dictionary[[], []] for taget[name[c]] in starred[call[name[Plugin].__subclasses__, parameter[]]] begin[:] variable[ins] assign[=] call[name[c], parameter[]] if compare[call[name[ins].name, parameter[]] in call[name[config].get, parameter[constant[plugin], list[[]]]]] begin[:] call[name[ins].activate, parameter[]] call[name[d]][call[name[ins].name, parameter[]]] assign[=] name[ins] return[name[d]]
keyword[def] identifier[init_plugins] (): literal[string] identifier[find_plugins] () identifier[d] ={} keyword[for] identifier[c] keyword[in] identifier[Plugin] . identifier[__subclasses__] (): identifier[ins] = identifier[c] () keyword[if] identifier[ins] . identifier[name] () keyword[in] identifier[config] . identifier[get] ( literal[string] ,[]): identifier[ins] . identifier[activate] () identifier[d] [ identifier[ins] . identifier[name] ()]= identifier[ins] keyword[return] identifier[d]
def init_plugins(): """Return dictionary of available plugins Returns ------- plugins : dictionary key is plugin name, value Plugin object """ find_plugins() d = {} for c in Plugin.__subclasses__(): ins = c() if ins.name() in config.get('plugin', []): ins.activate() # depends on [control=['if'], data=[]] d[ins.name()] = ins # depends on [control=['for'], data=['c']] return d
def save_sentences(twg, stmts, filename, agent_limit=300): """Write evidence sentences for stmts with ungrounded agents to csv file. Parameters ---------- twg: list of tuple list of tuples of ungrounded agent_texts with counts of the number of times they are mentioned in the list of statements. Should be sorted in descending order by the counts. This is of the form output by the function ungrounded texts. stmts: list of :py:class:`indra.statements.Statement` filename : str Path to output file agent_limit : Optional[int] Number of agents to include in output file. Takes the top agents by count. """ sentences = [] unmapped_texts = [t[0] for t in twg] counter = 0 logger.info('Getting sentences for top %d unmapped agent texts.' % agent_limit) for text in unmapped_texts: agent_sentences = get_sentences_for_agent(text, stmts) sentences += map(lambda tup: (text,) + tup, agent_sentences) counter += 1 if counter >= agent_limit: break # Write sentences to CSV file write_unicode_csv(filename, sentences, delimiter=',', quotechar='"', quoting=csv.QUOTE_MINIMAL, lineterminator='\r\n')
def function[save_sentences, parameter[twg, stmts, filename, agent_limit]]: constant[Write evidence sentences for stmts with ungrounded agents to csv file. Parameters ---------- twg: list of tuple list of tuples of ungrounded agent_texts with counts of the number of times they are mentioned in the list of statements. Should be sorted in descending order by the counts. This is of the form output by the function ungrounded texts. stmts: list of :py:class:`indra.statements.Statement` filename : str Path to output file agent_limit : Optional[int] Number of agents to include in output file. Takes the top agents by count. ] variable[sentences] assign[=] list[[]] variable[unmapped_texts] assign[=] <ast.ListComp object at 0x7da2041d9db0> variable[counter] assign[=] constant[0] call[name[logger].info, parameter[binary_operation[constant[Getting sentences for top %d unmapped agent texts.] <ast.Mod object at 0x7da2590d6920> name[agent_limit]]]] for taget[name[text]] in starred[name[unmapped_texts]] begin[:] variable[agent_sentences] assign[=] call[name[get_sentences_for_agent], parameter[name[text], name[stmts]]] <ast.AugAssign object at 0x7da18ede4e80> <ast.AugAssign object at 0x7da18f00e890> if compare[name[counter] greater_or_equal[>=] name[agent_limit]] begin[:] break call[name[write_unicode_csv], parameter[name[filename], name[sentences]]]
keyword[def] identifier[save_sentences] ( identifier[twg] , identifier[stmts] , identifier[filename] , identifier[agent_limit] = literal[int] ): literal[string] identifier[sentences] =[] identifier[unmapped_texts] =[ identifier[t] [ literal[int] ] keyword[for] identifier[t] keyword[in] identifier[twg] ] identifier[counter] = literal[int] identifier[logger] . identifier[info] ( literal[string] % identifier[agent_limit] ) keyword[for] identifier[text] keyword[in] identifier[unmapped_texts] : identifier[agent_sentences] = identifier[get_sentences_for_agent] ( identifier[text] , identifier[stmts] ) identifier[sentences] += identifier[map] ( keyword[lambda] identifier[tup] :( identifier[text] ,)+ identifier[tup] , identifier[agent_sentences] ) identifier[counter] += literal[int] keyword[if] identifier[counter] >= identifier[agent_limit] : keyword[break] identifier[write_unicode_csv] ( identifier[filename] , identifier[sentences] , identifier[delimiter] = literal[string] , identifier[quotechar] = literal[string] , identifier[quoting] = identifier[csv] . identifier[QUOTE_MINIMAL] , identifier[lineterminator] = literal[string] )
def save_sentences(twg, stmts, filename, agent_limit=300): """Write evidence sentences for stmts with ungrounded agents to csv file. Parameters ---------- twg: list of tuple list of tuples of ungrounded agent_texts with counts of the number of times they are mentioned in the list of statements. Should be sorted in descending order by the counts. This is of the form output by the function ungrounded texts. stmts: list of :py:class:`indra.statements.Statement` filename : str Path to output file agent_limit : Optional[int] Number of agents to include in output file. Takes the top agents by count. """ sentences = [] unmapped_texts = [t[0] for t in twg] counter = 0 logger.info('Getting sentences for top %d unmapped agent texts.' % agent_limit) for text in unmapped_texts: agent_sentences = get_sentences_for_agent(text, stmts) sentences += map(lambda tup: (text,) + tup, agent_sentences) counter += 1 if counter >= agent_limit: break # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['text']] # Write sentences to CSV file write_unicode_csv(filename, sentences, delimiter=',', quotechar='"', quoting=csv.QUOTE_MINIMAL, lineterminator='\r\n')
def combined_stop_to_stop_transit_network(gtfs, start_time_ut=None, end_time_ut=None): """ Compute stop-to-stop networks for all travel modes and combine them into a single network. The modes of transport are encoded to a single network. The network consists of multiple links corresponding to each travel mode. Walk mode is not included. Parameters ---------- gtfs: gtfspy.GTFS Returns ------- net: networkx.MultiDiGraph keys should be one of route_types.TRANSIT_ROUTE_TYPES (i.e. GTFS route_types) """ multi_di_graph = networkx.MultiDiGraph() for route_type in route_types.TRANSIT_ROUTE_TYPES: graph = stop_to_stop_network_for_route_type(gtfs, route_type, start_time_ut=start_time_ut, end_time_ut=end_time_ut) for from_node, to_node, data in graph.edges(data=True): data['route_type'] = route_type multi_di_graph.add_edges_from(graph.edges(data=True)) multi_di_graph.add_nodes_from(graph.nodes(data=True)) return multi_di_graph
def function[combined_stop_to_stop_transit_network, parameter[gtfs, start_time_ut, end_time_ut]]: constant[ Compute stop-to-stop networks for all travel modes and combine them into a single network. The modes of transport are encoded to a single network. The network consists of multiple links corresponding to each travel mode. Walk mode is not included. Parameters ---------- gtfs: gtfspy.GTFS Returns ------- net: networkx.MultiDiGraph keys should be one of route_types.TRANSIT_ROUTE_TYPES (i.e. GTFS route_types) ] variable[multi_di_graph] assign[=] call[name[networkx].MultiDiGraph, parameter[]] for taget[name[route_type]] in starred[name[route_types].TRANSIT_ROUTE_TYPES] begin[:] variable[graph] assign[=] call[name[stop_to_stop_network_for_route_type], parameter[name[gtfs], name[route_type]]] for taget[tuple[[<ast.Name object at 0x7da1b0089330>, <ast.Name object at 0x7da1b008b040>, <ast.Name object at 0x7da1b008a8f0>]]] in starred[call[name[graph].edges, parameter[]]] begin[:] call[name[data]][constant[route_type]] assign[=] name[route_type] call[name[multi_di_graph].add_edges_from, parameter[call[name[graph].edges, parameter[]]]] call[name[multi_di_graph].add_nodes_from, parameter[call[name[graph].nodes, parameter[]]]] return[name[multi_di_graph]]
keyword[def] identifier[combined_stop_to_stop_transit_network] ( identifier[gtfs] , identifier[start_time_ut] = keyword[None] , identifier[end_time_ut] = keyword[None] ): literal[string] identifier[multi_di_graph] = identifier[networkx] . identifier[MultiDiGraph] () keyword[for] identifier[route_type] keyword[in] identifier[route_types] . identifier[TRANSIT_ROUTE_TYPES] : identifier[graph] = identifier[stop_to_stop_network_for_route_type] ( identifier[gtfs] , identifier[route_type] , identifier[start_time_ut] = identifier[start_time_ut] , identifier[end_time_ut] = identifier[end_time_ut] ) keyword[for] identifier[from_node] , identifier[to_node] , identifier[data] keyword[in] identifier[graph] . identifier[edges] ( identifier[data] = keyword[True] ): identifier[data] [ literal[string] ]= identifier[route_type] identifier[multi_di_graph] . identifier[add_edges_from] ( identifier[graph] . identifier[edges] ( identifier[data] = keyword[True] )) identifier[multi_di_graph] . identifier[add_nodes_from] ( identifier[graph] . identifier[nodes] ( identifier[data] = keyword[True] )) keyword[return] identifier[multi_di_graph]
def combined_stop_to_stop_transit_network(gtfs, start_time_ut=None, end_time_ut=None): """ Compute stop-to-stop networks for all travel modes and combine them into a single network. The modes of transport are encoded to a single network. The network consists of multiple links corresponding to each travel mode. Walk mode is not included. Parameters ---------- gtfs: gtfspy.GTFS Returns ------- net: networkx.MultiDiGraph keys should be one of route_types.TRANSIT_ROUTE_TYPES (i.e. GTFS route_types) """ multi_di_graph = networkx.MultiDiGraph() for route_type in route_types.TRANSIT_ROUTE_TYPES: graph = stop_to_stop_network_for_route_type(gtfs, route_type, start_time_ut=start_time_ut, end_time_ut=end_time_ut) for (from_node, to_node, data) in graph.edges(data=True): data['route_type'] = route_type # depends on [control=['for'], data=[]] multi_di_graph.add_edges_from(graph.edges(data=True)) multi_di_graph.add_nodes_from(graph.nodes(data=True)) # depends on [control=['for'], data=['route_type']] return multi_di_graph
def scan_devices(self, subnet, timeout=None): """ Scan cameras in a range of ips Params: subnet - subnet, i.e: 192.168.1.0/24 if mask not used, assuming mask 24 timeout_sec - timeout in sec Returns: """ # Maximum range from mask # Format is mask: max_range max_range = { 16: 256, 24: 256, 25: 128, 27: 32, 28: 16, 29: 8, 30: 4, 31: 2 } # If user didn't provide mask, use /24 if "/" not in subnet: mask = int(24) network = subnet else: network, mask = subnet.split("/") mask = int(mask) if mask not in max_range: raise RuntimeError("Cannot determine the subnet mask!") # Default logic is remove everything from last "." to the end # This logic change in case mask is 16 network = network.rpartition(".")[0] if mask == 16: # For mask 16, we must cut the last two # entries with . # pylint: disable=unused-variable for i in range(0, 1): network = network.rpartition(".")[0] # Trigger the scan # For clear coding, let's keep the logic in if/else (mask16) # instead of only one if if mask == 16: for seq1 in range(0, max_range[mask]): for seq2 in range(0, max_range[mask]): ipaddr = "{0}.{1}.{2}".format(network, seq1, seq2) thd = threading.Thread( target=self.__raw_scan, args=(ipaddr, timeout) ) thd.start() else: for seq1 in range(0, max_range[mask]): ipaddr = "{0}.{1}".format(network, seq1) thd = threading.Thread( target=self.__raw_scan, args=(ipaddr, timeout) ) thd.start() return self.amcrest_ips
def function[scan_devices, parameter[self, subnet, timeout]]: constant[ Scan cameras in a range of ips Params: subnet - subnet, i.e: 192.168.1.0/24 if mask not used, assuming mask 24 timeout_sec - timeout in sec Returns: ] variable[max_range] assign[=] dictionary[[<ast.Constant object at 0x7da1b1115c30>, <ast.Constant object at 0x7da1b1117460>, <ast.Constant object at 0x7da1b11164a0>, <ast.Constant object at 0x7da1b1117b80>, <ast.Constant object at 0x7da1b1115b10>, <ast.Constant object at 0x7da1b1116da0>, <ast.Constant object at 0x7da1b1116410>, <ast.Constant object at 0x7da1b1115990>], [<ast.Constant object at 0x7da1b11152d0>, <ast.Constant object at 0x7da1b1117a30>, <ast.Constant object at 0x7da1b1115c00>, <ast.Constant object at 0x7da1b1115810>, <ast.Constant object at 0x7da1b1117c40>, <ast.Constant object at 0x7da1b1115a80>, <ast.Constant object at 0x7da1b11174f0>, <ast.Constant object at 0x7da1b1116ec0>]] if compare[constant[/] <ast.NotIn object at 0x7da2590d7190> name[subnet]] begin[:] variable[mask] assign[=] call[name[int], parameter[constant[24]]] variable[network] assign[=] name[subnet] if compare[name[mask] <ast.NotIn object at 0x7da2590d7190> name[max_range]] begin[:] <ast.Raise object at 0x7da1b11159c0> variable[network] assign[=] call[call[name[network].rpartition, parameter[constant[.]]]][constant[0]] if compare[name[mask] equal[==] constant[16]] begin[:] for taget[name[i]] in starred[call[name[range], parameter[constant[0], constant[1]]]] begin[:] variable[network] assign[=] call[call[name[network].rpartition, parameter[constant[.]]]][constant[0]] if compare[name[mask] equal[==] constant[16]] begin[:] for taget[name[seq1]] in starred[call[name[range], parameter[constant[0], call[name[max_range]][name[mask]]]]] begin[:] for taget[name[seq2]] in starred[call[name[range], parameter[constant[0], call[name[max_range]][name[mask]]]]] begin[:] variable[ipaddr] assign[=] call[constant[{0}.{1}.{2}].format, parameter[name[network], name[seq1], name[seq2]]] variable[thd] assign[=] call[name[threading].Thread, parameter[]] call[name[thd].start, parameter[]] return[name[self].amcrest_ips]
keyword[def] identifier[scan_devices] ( identifier[self] , identifier[subnet] , identifier[timeout] = keyword[None] ): literal[string] identifier[max_range] ={ literal[int] : literal[int] , literal[int] : literal[int] , literal[int] : literal[int] , literal[int] : literal[int] , literal[int] : literal[int] , literal[int] : literal[int] , literal[int] : literal[int] , literal[int] : literal[int] } keyword[if] literal[string] keyword[not] keyword[in] identifier[subnet] : identifier[mask] = identifier[int] ( literal[int] ) identifier[network] = identifier[subnet] keyword[else] : identifier[network] , identifier[mask] = identifier[subnet] . identifier[split] ( literal[string] ) identifier[mask] = identifier[int] ( identifier[mask] ) keyword[if] identifier[mask] keyword[not] keyword[in] identifier[max_range] : keyword[raise] identifier[RuntimeError] ( literal[string] ) identifier[network] = identifier[network] . identifier[rpartition] ( literal[string] )[ literal[int] ] keyword[if] identifier[mask] == literal[int] : keyword[for] identifier[i] keyword[in] identifier[range] ( literal[int] , literal[int] ): identifier[network] = identifier[network] . identifier[rpartition] ( literal[string] )[ literal[int] ] keyword[if] identifier[mask] == literal[int] : keyword[for] identifier[seq1] keyword[in] identifier[range] ( literal[int] , identifier[max_range] [ identifier[mask] ]): keyword[for] identifier[seq2] keyword[in] identifier[range] ( literal[int] , identifier[max_range] [ identifier[mask] ]): identifier[ipaddr] = literal[string] . identifier[format] ( identifier[network] , identifier[seq1] , identifier[seq2] ) identifier[thd] = identifier[threading] . identifier[Thread] ( identifier[target] = identifier[self] . identifier[__raw_scan] , identifier[args] =( identifier[ipaddr] , identifier[timeout] ) ) identifier[thd] . identifier[start] () keyword[else] : keyword[for] identifier[seq1] keyword[in] identifier[range] ( literal[int] , identifier[max_range] [ identifier[mask] ]): identifier[ipaddr] = literal[string] . identifier[format] ( identifier[network] , identifier[seq1] ) identifier[thd] = identifier[threading] . identifier[Thread] ( identifier[target] = identifier[self] . identifier[__raw_scan] , identifier[args] =( identifier[ipaddr] , identifier[timeout] ) ) identifier[thd] . identifier[start] () keyword[return] identifier[self] . identifier[amcrest_ips]
def scan_devices(self, subnet, timeout=None): """ Scan cameras in a range of ips Params: subnet - subnet, i.e: 192.168.1.0/24 if mask not used, assuming mask 24 timeout_sec - timeout in sec Returns: """ # Maximum range from mask # Format is mask: max_range max_range = {16: 256, 24: 256, 25: 128, 27: 32, 28: 16, 29: 8, 30: 4, 31: 2} # If user didn't provide mask, use /24 if '/' not in subnet: mask = int(24) network = subnet # depends on [control=['if'], data=['subnet']] else: (network, mask) = subnet.split('/') mask = int(mask) if mask not in max_range: raise RuntimeError('Cannot determine the subnet mask!') # depends on [control=['if'], data=[]] # Default logic is remove everything from last "." to the end # This logic change in case mask is 16 network = network.rpartition('.')[0] if mask == 16: # For mask 16, we must cut the last two # entries with . # pylint: disable=unused-variable for i in range(0, 1): network = network.rpartition('.')[0] # depends on [control=['for'], data=[]] # depends on [control=['if'], data=[]] # Trigger the scan # For clear coding, let's keep the logic in if/else (mask16) # instead of only one if if mask == 16: for seq1 in range(0, max_range[mask]): for seq2 in range(0, max_range[mask]): ipaddr = '{0}.{1}.{2}'.format(network, seq1, seq2) thd = threading.Thread(target=self.__raw_scan, args=(ipaddr, timeout)) thd.start() # depends on [control=['for'], data=['seq2']] # depends on [control=['for'], data=['seq1']] # depends on [control=['if'], data=['mask']] else: for seq1 in range(0, max_range[mask]): ipaddr = '{0}.{1}'.format(network, seq1) thd = threading.Thread(target=self.__raw_scan, args=(ipaddr, timeout)) thd.start() # depends on [control=['for'], data=['seq1']] return self.amcrest_ips
def reset_object(self, driver_wrapper=None): """Reset each page element object :param driver_wrapper: driver wrapper instance """ from toolium.pageelements.page_elements import PageElements if driver_wrapper: self.driver_wrapper = driver_wrapper self._web_element = None for element in self._get_page_elements(): element.reset_object(driver_wrapper) if isinstance(element, (PageElement, PageElements)): # If element is not a page object, update element parent element.parent = self
def function[reset_object, parameter[self, driver_wrapper]]: constant[Reset each page element object :param driver_wrapper: driver wrapper instance ] from relative_module[toolium.pageelements.page_elements] import module[PageElements] if name[driver_wrapper] begin[:] name[self].driver_wrapper assign[=] name[driver_wrapper] name[self]._web_element assign[=] constant[None] for taget[name[element]] in starred[call[name[self]._get_page_elements, parameter[]]] begin[:] call[name[element].reset_object, parameter[name[driver_wrapper]]] if call[name[isinstance], parameter[name[element], tuple[[<ast.Name object at 0x7da18dc9bd90>, <ast.Name object at 0x7da18dc99900>]]]] begin[:] name[element].parent assign[=] name[self]
keyword[def] identifier[reset_object] ( identifier[self] , identifier[driver_wrapper] = keyword[None] ): literal[string] keyword[from] identifier[toolium] . identifier[pageelements] . identifier[page_elements] keyword[import] identifier[PageElements] keyword[if] identifier[driver_wrapper] : identifier[self] . identifier[driver_wrapper] = identifier[driver_wrapper] identifier[self] . identifier[_web_element] = keyword[None] keyword[for] identifier[element] keyword[in] identifier[self] . identifier[_get_page_elements] (): identifier[element] . identifier[reset_object] ( identifier[driver_wrapper] ) keyword[if] identifier[isinstance] ( identifier[element] ,( identifier[PageElement] , identifier[PageElements] )): identifier[element] . identifier[parent] = identifier[self]
def reset_object(self, driver_wrapper=None): """Reset each page element object :param driver_wrapper: driver wrapper instance """ from toolium.pageelements.page_elements import PageElements if driver_wrapper: self.driver_wrapper = driver_wrapper # depends on [control=['if'], data=[]] self._web_element = None for element in self._get_page_elements(): element.reset_object(driver_wrapper) if isinstance(element, (PageElement, PageElements)): # If element is not a page object, update element parent element.parent = self # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['element']]
def copy_document(destination, identifier, pointer): """Copy file to a new destination.""" record = Record.get_record(identifier) click.echo(json.dumps( Document(record, pointer).copy(destination) ))
def function[copy_document, parameter[destination, identifier, pointer]]: constant[Copy file to a new destination.] variable[record] assign[=] call[name[Record].get_record, parameter[name[identifier]]] call[name[click].echo, parameter[call[name[json].dumps, parameter[call[call[name[Document], parameter[name[record], name[pointer]]].copy, parameter[name[destination]]]]]]]
keyword[def] identifier[copy_document] ( identifier[destination] , identifier[identifier] , identifier[pointer] ): literal[string] identifier[record] = identifier[Record] . identifier[get_record] ( identifier[identifier] ) identifier[click] . identifier[echo] ( identifier[json] . identifier[dumps] ( identifier[Document] ( identifier[record] , identifier[pointer] ). identifier[copy] ( identifier[destination] ) ))
def copy_document(destination, identifier, pointer): """Copy file to a new destination.""" record = Record.get_record(identifier) click.echo(json.dumps(Document(record, pointer).copy(destination)))
def rackconnectv3(vm_): ''' Determine if server is using rackconnectv3 or not Return the rackconnect network name or False ''' return config.get_cloud_config_value( 'rackconnectv3', vm_, __opts__, default=False, search_global=False )
def function[rackconnectv3, parameter[vm_]]: constant[ Determine if server is using rackconnectv3 or not Return the rackconnect network name or False ] return[call[name[config].get_cloud_config_value, parameter[constant[rackconnectv3], name[vm_], name[__opts__]]]]
keyword[def] identifier[rackconnectv3] ( identifier[vm_] ): literal[string] keyword[return] identifier[config] . identifier[get_cloud_config_value] ( literal[string] , identifier[vm_] , identifier[__opts__] , identifier[default] = keyword[False] , identifier[search_global] = keyword[False] )
def rackconnectv3(vm_): """ Determine if server is using rackconnectv3 or not Return the rackconnect network name or False """ return config.get_cloud_config_value('rackconnectv3', vm_, __opts__, default=False, search_global=False)
def get_property(self): """ Returns the property of the variable Example ------------- >>> from pgmpy.readwrite import BIFReader >>> reader = BIFReader("bif_test.bif") >>> reader.get_property() {'bowel-problem': ['position = (335, 99)'], 'dog-out': ['position = (300, 195)'], 'family-out': ['position = (257, 99)'], 'hear-bark': ['position = (296, 268)'], 'light-on': ['position = (218, 195)']} """ variable_properties = {} for block in self.variable_block(): name = self.name_expr.searchString(block)[0][0] properties = self.property_expr.searchString(block) variable_properties[name] = [y.strip() for x in properties for y in x] return variable_properties
def function[get_property, parameter[self]]: constant[ Returns the property of the variable Example ------------- >>> from pgmpy.readwrite import BIFReader >>> reader = BIFReader("bif_test.bif") >>> reader.get_property() {'bowel-problem': ['position = (335, 99)'], 'dog-out': ['position = (300, 195)'], 'family-out': ['position = (257, 99)'], 'hear-bark': ['position = (296, 268)'], 'light-on': ['position = (218, 195)']} ] variable[variable_properties] assign[=] dictionary[[], []] for taget[name[block]] in starred[call[name[self].variable_block, parameter[]]] begin[:] variable[name] assign[=] call[call[call[name[self].name_expr.searchString, parameter[name[block]]]][constant[0]]][constant[0]] variable[properties] assign[=] call[name[self].property_expr.searchString, parameter[name[block]]] call[name[variable_properties]][name[name]] assign[=] <ast.ListComp object at 0x7da20c990e80> return[name[variable_properties]]
keyword[def] identifier[get_property] ( identifier[self] ): literal[string] identifier[variable_properties] ={} keyword[for] identifier[block] keyword[in] identifier[self] . identifier[variable_block] (): identifier[name] = identifier[self] . identifier[name_expr] . identifier[searchString] ( identifier[block] )[ literal[int] ][ literal[int] ] identifier[properties] = identifier[self] . identifier[property_expr] . identifier[searchString] ( identifier[block] ) identifier[variable_properties] [ identifier[name] ]=[ identifier[y] . identifier[strip] () keyword[for] identifier[x] keyword[in] identifier[properties] keyword[for] identifier[y] keyword[in] identifier[x] ] keyword[return] identifier[variable_properties]
def get_property(self): """ Returns the property of the variable Example ------------- >>> from pgmpy.readwrite import BIFReader >>> reader = BIFReader("bif_test.bif") >>> reader.get_property() {'bowel-problem': ['position = (335, 99)'], 'dog-out': ['position = (300, 195)'], 'family-out': ['position = (257, 99)'], 'hear-bark': ['position = (296, 268)'], 'light-on': ['position = (218, 195)']} """ variable_properties = {} for block in self.variable_block(): name = self.name_expr.searchString(block)[0][0] properties = self.property_expr.searchString(block) variable_properties[name] = [y.strip() for x in properties for y in x] # depends on [control=['for'], data=['block']] return variable_properties
def execute_sql(self, sql, commit=False): """Log and then execute a SQL query""" logger.info("Running sqlite query: \"%s\"", sql) self.connection.execute(sql) if commit: self.connection.commit()
def function[execute_sql, parameter[self, sql, commit]]: constant[Log and then execute a SQL query] call[name[logger].info, parameter[constant[Running sqlite query: "%s"], name[sql]]] call[name[self].connection.execute, parameter[name[sql]]] if name[commit] begin[:] call[name[self].connection.commit, parameter[]]
keyword[def] identifier[execute_sql] ( identifier[self] , identifier[sql] , identifier[commit] = keyword[False] ): literal[string] identifier[logger] . identifier[info] ( literal[string] , identifier[sql] ) identifier[self] . identifier[connection] . identifier[execute] ( identifier[sql] ) keyword[if] identifier[commit] : identifier[self] . identifier[connection] . identifier[commit] ()
def execute_sql(self, sql, commit=False): """Log and then execute a SQL query""" logger.info('Running sqlite query: "%s"', sql) self.connection.execute(sql) if commit: self.connection.commit() # depends on [control=['if'], data=[]]
def verify_pattern(pattern): """Verifies if pattern for matching and finding fulfill expected structure. :param pattern: string pattern to verify :return: True if pattern has proper syntax, False otherwise """ regex = re.compile("^!?[a-zA-Z]+$|[*]{1,2}$") def __verify_pattern__(__pattern__): if not __pattern__: return False elif __pattern__[0] == "!": return __verify_pattern__(__pattern__[1:]) elif __pattern__[0] == "[" and __pattern__[-1] == "]": return all(__verify_pattern__(p) for p in __pattern__[1:-1].split(",")) else: return regex.match(__pattern__) return all(__verify_pattern__(p) for p in pattern.split("/"))
def function[verify_pattern, parameter[pattern]]: constant[Verifies if pattern for matching and finding fulfill expected structure. :param pattern: string pattern to verify :return: True if pattern has proper syntax, False otherwise ] variable[regex] assign[=] call[name[re].compile, parameter[constant[^!?[a-zA-Z]+$|[*]{1,2}$]]] def function[__verify_pattern__, parameter[__pattern__]]: if <ast.UnaryOp object at 0x7da1b106ee90> begin[:] return[constant[False]] return[call[name[all], parameter[<ast.GeneratorExp object at 0x7da1b10400d0>]]]
keyword[def] identifier[verify_pattern] ( identifier[pattern] ): literal[string] identifier[regex] = identifier[re] . identifier[compile] ( literal[string] ) keyword[def] identifier[__verify_pattern__] ( identifier[__pattern__] ): keyword[if] keyword[not] identifier[__pattern__] : keyword[return] keyword[False] keyword[elif] identifier[__pattern__] [ literal[int] ]== literal[string] : keyword[return] identifier[__verify_pattern__] ( identifier[__pattern__] [ literal[int] :]) keyword[elif] identifier[__pattern__] [ literal[int] ]== literal[string] keyword[and] identifier[__pattern__] [- literal[int] ]== literal[string] : keyword[return] identifier[all] ( identifier[__verify_pattern__] ( identifier[p] ) keyword[for] identifier[p] keyword[in] identifier[__pattern__] [ literal[int] :- literal[int] ]. identifier[split] ( literal[string] )) keyword[else] : keyword[return] identifier[regex] . identifier[match] ( identifier[__pattern__] ) keyword[return] identifier[all] ( identifier[__verify_pattern__] ( identifier[p] ) keyword[for] identifier[p] keyword[in] identifier[pattern] . identifier[split] ( literal[string] ))
def verify_pattern(pattern): """Verifies if pattern for matching and finding fulfill expected structure. :param pattern: string pattern to verify :return: True if pattern has proper syntax, False otherwise """ regex = re.compile('^!?[a-zA-Z]+$|[*]{1,2}$') def __verify_pattern__(__pattern__): if not __pattern__: return False # depends on [control=['if'], data=[]] elif __pattern__[0] == '!': return __verify_pattern__(__pattern__[1:]) # depends on [control=['if'], data=[]] elif __pattern__[0] == '[' and __pattern__[-1] == ']': return all((__verify_pattern__(p) for p in __pattern__[1:-1].split(','))) # depends on [control=['if'], data=[]] else: return regex.match(__pattern__) return all((__verify_pattern__(p) for p in pattern.split('/')))
def getParameterArrayCount(self, name, index): """ Overrides :meth:`~nupic.bindings.regions.PyRegion.PyRegion.getParameterArrayCount`. TODO: as a temporary hack, getParameterArrayCount checks to see if there's a variable, private or not, with that name. If so, it returns the value of the variable. """ p = self.getParameter(name) if (not hasattr(p, '__len__')): raise Exception("Attempt to access parameter '%s' as an array but it is not an array" % name) return len(p)
def function[getParameterArrayCount, parameter[self, name, index]]: constant[ Overrides :meth:`~nupic.bindings.regions.PyRegion.PyRegion.getParameterArrayCount`. TODO: as a temporary hack, getParameterArrayCount checks to see if there's a variable, private or not, with that name. If so, it returns the value of the variable. ] variable[p] assign[=] call[name[self].getParameter, parameter[name[name]]] if <ast.UnaryOp object at 0x7da20c7cb880> begin[:] <ast.Raise object at 0x7da20c7ca170> return[call[name[len], parameter[name[p]]]]
keyword[def] identifier[getParameterArrayCount] ( identifier[self] , identifier[name] , identifier[index] ): literal[string] identifier[p] = identifier[self] . identifier[getParameter] ( identifier[name] ) keyword[if] ( keyword[not] identifier[hasattr] ( identifier[p] , literal[string] )): keyword[raise] identifier[Exception] ( literal[string] % identifier[name] ) keyword[return] identifier[len] ( identifier[p] )
def getParameterArrayCount(self, name, index): """ Overrides :meth:`~nupic.bindings.regions.PyRegion.PyRegion.getParameterArrayCount`. TODO: as a temporary hack, getParameterArrayCount checks to see if there's a variable, private or not, with that name. If so, it returns the value of the variable. """ p = self.getParameter(name) if not hasattr(p, '__len__'): raise Exception("Attempt to access parameter '%s' as an array but it is not an array" % name) # depends on [control=['if'], data=[]] return len(p)
async def vcx_ledger_get_fees() -> str: """ Get ledger fees from the sovrin network Example: fees = await vcx_ledger_get_fees() :return: JSON representing fees """ logger = logging.getLogger(__name__) if not hasattr(vcx_ledger_get_fees, "cb"): logger.debug("vcx_ledger_get_fees: Creating callback") vcx_ledger_get_fees.cb = create_cb(CFUNCTYPE(None, c_uint32)) result = await do_call('vcx_ledger_get_fees', vcx_ledger_get_fees.cb) logger.debug("vcx_ledger_get_fees completed") return result
<ast.AsyncFunctionDef object at 0x7da1b1c78af0>
keyword[async] keyword[def] identifier[vcx_ledger_get_fees] ()-> identifier[str] : literal[string] identifier[logger] = identifier[logging] . identifier[getLogger] ( identifier[__name__] ) keyword[if] keyword[not] identifier[hasattr] ( identifier[vcx_ledger_get_fees] , literal[string] ): identifier[logger] . identifier[debug] ( literal[string] ) identifier[vcx_ledger_get_fees] . identifier[cb] = identifier[create_cb] ( identifier[CFUNCTYPE] ( keyword[None] , identifier[c_uint32] )) identifier[result] = keyword[await] identifier[do_call] ( literal[string] , identifier[vcx_ledger_get_fees] . identifier[cb] ) identifier[logger] . identifier[debug] ( literal[string] ) keyword[return] identifier[result]
async def vcx_ledger_get_fees() -> str: """ Get ledger fees from the sovrin network Example: fees = await vcx_ledger_get_fees() :return: JSON representing fees """ logger = logging.getLogger(__name__) if not hasattr(vcx_ledger_get_fees, 'cb'): logger.debug('vcx_ledger_get_fees: Creating callback') vcx_ledger_get_fees.cb = create_cb(CFUNCTYPE(None, c_uint32)) # depends on [control=['if'], data=[]] result = await do_call('vcx_ledger_get_fees', vcx_ledger_get_fees.cb) logger.debug('vcx_ledger_get_fees completed') return result
def get_labels(cs): """Return list of every label.""" records = [] for c in cs: records.extend(c.get('labels', [])) return records
def function[get_labels, parameter[cs]]: constant[Return list of every label.] variable[records] assign[=] list[[]] for taget[name[c]] in starred[name[cs]] begin[:] call[name[records].extend, parameter[call[name[c].get, parameter[constant[labels], list[[]]]]]] return[name[records]]
keyword[def] identifier[get_labels] ( identifier[cs] ): literal[string] identifier[records] =[] keyword[for] identifier[c] keyword[in] identifier[cs] : identifier[records] . identifier[extend] ( identifier[c] . identifier[get] ( literal[string] ,[])) keyword[return] identifier[records]
def get_labels(cs): """Return list of every label.""" records = [] for c in cs: records.extend(c.get('labels', [])) # depends on [control=['for'], data=['c']] return records
def create_response_adu(self, meta_data, response_pdu): """ Build response ADU from meta data and response PDU and return it. :param meta_data: A dict with meta data. :param request_pdu: A bytearray containing request PDU. :return: A bytearray containing request ADU. """ first_part_adu = struct.pack('>B', meta_data['unit_id']) + response_pdu return first_part_adu + get_crc(first_part_adu)
def function[create_response_adu, parameter[self, meta_data, response_pdu]]: constant[ Build response ADU from meta data and response PDU and return it. :param meta_data: A dict with meta data. :param request_pdu: A bytearray containing request PDU. :return: A bytearray containing request ADU. ] variable[first_part_adu] assign[=] binary_operation[call[name[struct].pack, parameter[constant[>B], call[name[meta_data]][constant[unit_id]]]] + name[response_pdu]] return[binary_operation[name[first_part_adu] + call[name[get_crc], parameter[name[first_part_adu]]]]]
keyword[def] identifier[create_response_adu] ( identifier[self] , identifier[meta_data] , identifier[response_pdu] ): literal[string] identifier[first_part_adu] = identifier[struct] . identifier[pack] ( literal[string] , identifier[meta_data] [ literal[string] ])+ identifier[response_pdu] keyword[return] identifier[first_part_adu] + identifier[get_crc] ( identifier[first_part_adu] )
def create_response_adu(self, meta_data, response_pdu): """ Build response ADU from meta data and response PDU and return it. :param meta_data: A dict with meta data. :param request_pdu: A bytearray containing request PDU. :return: A bytearray containing request ADU. """ first_part_adu = struct.pack('>B', meta_data['unit_id']) + response_pdu return first_part_adu + get_crc(first_part_adu)
def set_objective(self, measured_metabolites): ''' Updates objective function for given measured metabolites. :param dict measured_metabolites: dict in which keys are metabolite names and values are float numbers represent fold changes in metabolites. ''' self.clean_objective() for k, v in measured_metabolites.items(): m = self.model.metabolites.get_by_id(k) total_stoichiometry = m.total_stoichiometry( self.without_transports) for r in m.producers(self.without_transports): update_rate = v * r.metabolites[m] / total_stoichiometry r.objective_coefficient += update_rate
def function[set_objective, parameter[self, measured_metabolites]]: constant[ Updates objective function for given measured metabolites. :param dict measured_metabolites: dict in which keys are metabolite names and values are float numbers represent fold changes in metabolites. ] call[name[self].clean_objective, parameter[]] for taget[tuple[[<ast.Name object at 0x7da204961600>, <ast.Name object at 0x7da204962b90>]]] in starred[call[name[measured_metabolites].items, parameter[]]] begin[:] variable[m] assign[=] call[name[self].model.metabolites.get_by_id, parameter[name[k]]] variable[total_stoichiometry] assign[=] call[name[m].total_stoichiometry, parameter[name[self].without_transports]] for taget[name[r]] in starred[call[name[m].producers, parameter[name[self].without_transports]]] begin[:] variable[update_rate] assign[=] binary_operation[binary_operation[name[v] * call[name[r].metabolites][name[m]]] / name[total_stoichiometry]] <ast.AugAssign object at 0x7da204961450>
keyword[def] identifier[set_objective] ( identifier[self] , identifier[measured_metabolites] ): literal[string] identifier[self] . identifier[clean_objective] () keyword[for] identifier[k] , identifier[v] keyword[in] identifier[measured_metabolites] . identifier[items] (): identifier[m] = identifier[self] . identifier[model] . identifier[metabolites] . identifier[get_by_id] ( identifier[k] ) identifier[total_stoichiometry] = identifier[m] . identifier[total_stoichiometry] ( identifier[self] . identifier[without_transports] ) keyword[for] identifier[r] keyword[in] identifier[m] . identifier[producers] ( identifier[self] . identifier[without_transports] ): identifier[update_rate] = identifier[v] * identifier[r] . identifier[metabolites] [ identifier[m] ]/ identifier[total_stoichiometry] identifier[r] . identifier[objective_coefficient] += identifier[update_rate]
def set_objective(self, measured_metabolites): """ Updates objective function for given measured metabolites. :param dict measured_metabolites: dict in which keys are metabolite names and values are float numbers represent fold changes in metabolites. """ self.clean_objective() for (k, v) in measured_metabolites.items(): m = self.model.metabolites.get_by_id(k) total_stoichiometry = m.total_stoichiometry(self.without_transports) for r in m.producers(self.without_transports): update_rate = v * r.metabolites[m] / total_stoichiometry r.objective_coefficient += update_rate # depends on [control=['for'], data=['r']] # depends on [control=['for'], data=[]]
def nodes(self, type=None, failed=False, participant_id=None): """Get nodes in the network. type specifies the type of Node. Failed can be "all", False (default) or True. If a participant_id is passed only nodes with that participant_id will be returned. """ if type is None: type = Node if not issubclass(type, Node): raise TypeError("{} is not a valid node type.".format(type)) if failed not in ["all", False, True]: raise ValueError("{} is not a valid node failed".format(failed)) if participant_id is not None: if failed == "all": return type.query.filter_by( network_id=self.id, participant_id=participant_id ).all() else: return type.query.filter_by( network_id=self.id, participant_id=participant_id, failed=failed ).all() else: if failed == "all": return type.query.filter_by(network_id=self.id).all() else: return type.query.filter_by(failed=failed, network_id=self.id).all()
def function[nodes, parameter[self, type, failed, participant_id]]: constant[Get nodes in the network. type specifies the type of Node. Failed can be "all", False (default) or True. If a participant_id is passed only nodes with that participant_id will be returned. ] if compare[name[type] is constant[None]] begin[:] variable[type] assign[=] name[Node] if <ast.UnaryOp object at 0x7da1b040cf40> begin[:] <ast.Raise object at 0x7da1b040fc10> if compare[name[failed] <ast.NotIn object at 0x7da2590d7190> list[[<ast.Constant object at 0x7da1b040f0d0>, <ast.Constant object at 0x7da1b040f430>, <ast.Constant object at 0x7da1b040d300>]]] begin[:] <ast.Raise object at 0x7da1b040f130> if compare[name[participant_id] is_not constant[None]] begin[:] if compare[name[failed] equal[==] constant[all]] begin[:] return[call[call[name[type].query.filter_by, parameter[]].all, parameter[]]]
keyword[def] identifier[nodes] ( identifier[self] , identifier[type] = keyword[None] , identifier[failed] = keyword[False] , identifier[participant_id] = keyword[None] ): literal[string] keyword[if] identifier[type] keyword[is] keyword[None] : identifier[type] = identifier[Node] keyword[if] keyword[not] identifier[issubclass] ( identifier[type] , identifier[Node] ): keyword[raise] identifier[TypeError] ( literal[string] . identifier[format] ( identifier[type] )) keyword[if] identifier[failed] keyword[not] keyword[in] [ literal[string] , keyword[False] , keyword[True] ]: keyword[raise] identifier[ValueError] ( literal[string] . identifier[format] ( identifier[failed] )) keyword[if] identifier[participant_id] keyword[is] keyword[not] keyword[None] : keyword[if] identifier[failed] == literal[string] : keyword[return] identifier[type] . identifier[query] . identifier[filter_by] ( identifier[network_id] = identifier[self] . identifier[id] , identifier[participant_id] = identifier[participant_id] ). identifier[all] () keyword[else] : keyword[return] identifier[type] . identifier[query] . identifier[filter_by] ( identifier[network_id] = identifier[self] . identifier[id] , identifier[participant_id] = identifier[participant_id] , identifier[failed] = identifier[failed] ). identifier[all] () keyword[else] : keyword[if] identifier[failed] == literal[string] : keyword[return] identifier[type] . identifier[query] . identifier[filter_by] ( identifier[network_id] = identifier[self] . identifier[id] ). identifier[all] () keyword[else] : keyword[return] identifier[type] . identifier[query] . identifier[filter_by] ( identifier[failed] = identifier[failed] , identifier[network_id] = identifier[self] . identifier[id] ). identifier[all] ()
def nodes(self, type=None, failed=False, participant_id=None): """Get nodes in the network. type specifies the type of Node. Failed can be "all", False (default) or True. If a participant_id is passed only nodes with that participant_id will be returned. """ if type is None: type = Node # depends on [control=['if'], data=['type']] if not issubclass(type, Node): raise TypeError('{} is not a valid node type.'.format(type)) # depends on [control=['if'], data=[]] if failed not in ['all', False, True]: raise ValueError('{} is not a valid node failed'.format(failed)) # depends on [control=['if'], data=['failed']] if participant_id is not None: if failed == 'all': return type.query.filter_by(network_id=self.id, participant_id=participant_id).all() # depends on [control=['if'], data=[]] else: return type.query.filter_by(network_id=self.id, participant_id=participant_id, failed=failed).all() # depends on [control=['if'], data=['participant_id']] elif failed == 'all': return type.query.filter_by(network_id=self.id).all() # depends on [control=['if'], data=[]] else: return type.query.filter_by(failed=failed, network_id=self.id).all()
def tokenize(expr): """ Parse a string expression into a set of tokens that can be used as a path into a Python datastructure. """ tokens = [] escape = False cur_token = '' for c in expr: if escape == True: cur_token += c escape = False else: if c == '\\': # Next char will be escaped escape = True continue elif c == '[': # Next token is of type index (list) if len(cur_token) > 0: tokens.append(cur_token) cur_token = '' elif c == ']': # End of index token. Next token defaults to a key (dict) if len(cur_token) > 0: tokens.append(int(cur_token)) cur_token = '' elif c == '.': # End of key token. Next token defaults to a key (dict) if len(cur_token) > 0: tokens.append(cur_token) cur_token = '' else: # Append char to token name cur_token += c if len(cur_token) > 0: tokens.append(cur_token) return tokens
def function[tokenize, parameter[expr]]: constant[ Parse a string expression into a set of tokens that can be used as a path into a Python datastructure. ] variable[tokens] assign[=] list[[]] variable[escape] assign[=] constant[False] variable[cur_token] assign[=] constant[] for taget[name[c]] in starred[name[expr]] begin[:] if compare[name[escape] equal[==] constant[True]] begin[:] <ast.AugAssign object at 0x7da1b22a5f30> variable[escape] assign[=] constant[False] if compare[call[name[len], parameter[name[cur_token]]] greater[>] constant[0]] begin[:] call[name[tokens].append, parameter[name[cur_token]]] return[name[tokens]]
keyword[def] identifier[tokenize] ( identifier[expr] ): literal[string] identifier[tokens] =[] identifier[escape] = keyword[False] identifier[cur_token] = literal[string] keyword[for] identifier[c] keyword[in] identifier[expr] : keyword[if] identifier[escape] == keyword[True] : identifier[cur_token] += identifier[c] identifier[escape] = keyword[False] keyword[else] : keyword[if] identifier[c] == literal[string] : identifier[escape] = keyword[True] keyword[continue] keyword[elif] identifier[c] == literal[string] : keyword[if] identifier[len] ( identifier[cur_token] )> literal[int] : identifier[tokens] . identifier[append] ( identifier[cur_token] ) identifier[cur_token] = literal[string] keyword[elif] identifier[c] == literal[string] : keyword[if] identifier[len] ( identifier[cur_token] )> literal[int] : identifier[tokens] . identifier[append] ( identifier[int] ( identifier[cur_token] )) identifier[cur_token] = literal[string] keyword[elif] identifier[c] == literal[string] : keyword[if] identifier[len] ( identifier[cur_token] )> literal[int] : identifier[tokens] . identifier[append] ( identifier[cur_token] ) identifier[cur_token] = literal[string] keyword[else] : identifier[cur_token] += identifier[c] keyword[if] identifier[len] ( identifier[cur_token] )> literal[int] : identifier[tokens] . identifier[append] ( identifier[cur_token] ) keyword[return] identifier[tokens]
def tokenize(expr): """ Parse a string expression into a set of tokens that can be used as a path into a Python datastructure. """ tokens = [] escape = False cur_token = '' for c in expr: if escape == True: cur_token += c escape = False # depends on [control=['if'], data=['escape']] elif c == '\\': # Next char will be escaped escape = True continue # depends on [control=['if'], data=[]] elif c == '[': # Next token is of type index (list) if len(cur_token) > 0: tokens.append(cur_token) cur_token = '' # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]] elif c == ']': # End of index token. Next token defaults to a key (dict) if len(cur_token) > 0: tokens.append(int(cur_token)) cur_token = '' # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]] elif c == '.': # End of key token. Next token defaults to a key (dict) if len(cur_token) > 0: tokens.append(cur_token) cur_token = '' # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]] else: # Append char to token name cur_token += c # depends on [control=['for'], data=['c']] if len(cur_token) > 0: tokens.append(cur_token) # depends on [control=['if'], data=[]] return tokens
def cmd(send, msg, args): """Reposts a url. Syntax: {command} """ result = args['db'].query(Urls).order_by(func.random()).first() send("%s" % result.url)
def function[cmd, parameter[send, msg, args]]: constant[Reposts a url. Syntax: {command} ] variable[result] assign[=] call[call[call[call[name[args]][constant[db]].query, parameter[name[Urls]]].order_by, parameter[call[name[func].random, parameter[]]]].first, parameter[]] call[name[send], parameter[binary_operation[constant[%s] <ast.Mod object at 0x7da2590d6920> name[result].url]]]
keyword[def] identifier[cmd] ( identifier[send] , identifier[msg] , identifier[args] ): literal[string] identifier[result] = identifier[args] [ literal[string] ]. identifier[query] ( identifier[Urls] ). identifier[order_by] ( identifier[func] . identifier[random] ()). identifier[first] () identifier[send] ( literal[string] % identifier[result] . identifier[url] )
def cmd(send, msg, args): """Reposts a url. Syntax: {command} """ result = args['db'].query(Urls).order_by(func.random()).first() send('%s' % result.url)
def commented(self, user): """ True if comment was added in given time frame """ for comment in self.comments: # Description (comment #0) is not considered as a comment if comment["count"] == 0: continue if (comment.get('author', comment.get('creator')) == user.email and comment["creation_time"] >= self.options.since.date and comment["creation_time"] < self.options.until.date): return True return False
def function[commented, parameter[self, user]]: constant[ True if comment was added in given time frame ] for taget[name[comment]] in starred[name[self].comments] begin[:] if compare[call[name[comment]][constant[count]] equal[==] constant[0]] begin[:] continue if <ast.BoolOp object at 0x7da1b1d48280> begin[:] return[constant[True]] return[constant[False]]
keyword[def] identifier[commented] ( identifier[self] , identifier[user] ): literal[string] keyword[for] identifier[comment] keyword[in] identifier[self] . identifier[comments] : keyword[if] identifier[comment] [ literal[string] ]== literal[int] : keyword[continue] keyword[if] ( identifier[comment] . identifier[get] ( literal[string] , identifier[comment] . identifier[get] ( literal[string] ))== identifier[user] . identifier[email] keyword[and] identifier[comment] [ literal[string] ]>= identifier[self] . identifier[options] . identifier[since] . identifier[date] keyword[and] identifier[comment] [ literal[string] ]< identifier[self] . identifier[options] . identifier[until] . identifier[date] ): keyword[return] keyword[True] keyword[return] keyword[False]
def commented(self, user): """ True if comment was added in given time frame """ for comment in self.comments: # Description (comment #0) is not considered as a comment if comment['count'] == 0: continue # depends on [control=['if'], data=[]] if comment.get('author', comment.get('creator')) == user.email and comment['creation_time'] >= self.options.since.date and (comment['creation_time'] < self.options.until.date): return True # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['comment']] return False
def home_page(self, tld_type: Optional[TLDType] = None) -> str: """Generate a random home page. :param tld_type: TLD type. :return: Random home page. :Example: http://www.fontir.info """ resource = self.random.choice(USERNAMES) domain = self.top_level_domain( tld_type=tld_type, ) return 'http://www.{}{}'.format( resource, domain)
def function[home_page, parameter[self, tld_type]]: constant[Generate a random home page. :param tld_type: TLD type. :return: Random home page. :Example: http://www.fontir.info ] variable[resource] assign[=] call[name[self].random.choice, parameter[name[USERNAMES]]] variable[domain] assign[=] call[name[self].top_level_domain, parameter[]] return[call[constant[http://www.{}{}].format, parameter[name[resource], name[domain]]]]
keyword[def] identifier[home_page] ( identifier[self] , identifier[tld_type] : identifier[Optional] [ identifier[TLDType] ]= keyword[None] )-> identifier[str] : literal[string] identifier[resource] = identifier[self] . identifier[random] . identifier[choice] ( identifier[USERNAMES] ) identifier[domain] = identifier[self] . identifier[top_level_domain] ( identifier[tld_type] = identifier[tld_type] , ) keyword[return] literal[string] . identifier[format] ( identifier[resource] , identifier[domain] )
def home_page(self, tld_type: Optional[TLDType]=None) -> str: """Generate a random home page. :param tld_type: TLD type. :return: Random home page. :Example: http://www.fontir.info """ resource = self.random.choice(USERNAMES) domain = self.top_level_domain(tld_type=tld_type) return 'http://www.{}{}'.format(resource, domain)
def configuration(self, plugin): """ Get plugin configuration. Return a tuple of (on|off|default, args) """ conf = self.config.get(plugin, "default;").split(';') if len(conf) == 1: conf.append('') return tuple(conf)
def function[configuration, parameter[self, plugin]]: constant[ Get plugin configuration. Return a tuple of (on|off|default, args) ] variable[conf] assign[=] call[call[name[self].config.get, parameter[name[plugin], constant[default;]]].split, parameter[constant[;]]] if compare[call[name[len], parameter[name[conf]]] equal[==] constant[1]] begin[:] call[name[conf].append, parameter[constant[]]] return[call[name[tuple], parameter[name[conf]]]]
keyword[def] identifier[configuration] ( identifier[self] , identifier[plugin] ): literal[string] identifier[conf] = identifier[self] . identifier[config] . identifier[get] ( identifier[plugin] , literal[string] ). identifier[split] ( literal[string] ) keyword[if] identifier[len] ( identifier[conf] )== literal[int] : identifier[conf] . identifier[append] ( literal[string] ) keyword[return] identifier[tuple] ( identifier[conf] )
def configuration(self, plugin): """ Get plugin configuration. Return a tuple of (on|off|default, args) """ conf = self.config.get(plugin, 'default;').split(';') if len(conf) == 1: conf.append('') # depends on [control=['if'], data=[]] return tuple(conf)
def naturaldate(date, include_seconds=False): """Convert datetime into a human natural date string.""" if not date: return '' right_now = now() today = datetime(right_now.year, right_now.month, right_now.day, tzinfo=right_now.tzinfo) delta = right_now - date delta_midnight = today - date days = delta.days hours = delta.seconds // 3600 minutes = delta.seconds // 60 seconds = delta.seconds if days < 0: return _('just now') if days == 0: if hours == 0: if minutes > 0: return ungettext( _('{minutes} minute ago'), _('{minutes} minutes ago'), minutes ).format(minutes=minutes) else: if include_seconds and seconds: return ungettext( _('{seconds} second ago'), _('{seconds} seconds ago'), seconds ).format(seconds=seconds) return _('just now') else: return ungettext( _('{hours} hour ago'), _('{hours} hours ago'), hours ).format(hours=hours) if delta_midnight.days == 0: return _('yesterday at {time}').format(time=date.strftime('%H:%M')) count = 0 for chunk, pluralizefun in OLDER_CHUNKS: if days >= chunk: count = int(round((delta_midnight.days + 1) / chunk, 0)) fmt = pluralizefun(count) return fmt.format(num=count)
def function[naturaldate, parameter[date, include_seconds]]: constant[Convert datetime into a human natural date string.] if <ast.UnaryOp object at 0x7da1b2345c30> begin[:] return[constant[]] variable[right_now] assign[=] call[name[now], parameter[]] variable[today] assign[=] call[name[datetime], parameter[name[right_now].year, name[right_now].month, name[right_now].day]] variable[delta] assign[=] binary_operation[name[right_now] - name[date]] variable[delta_midnight] assign[=] binary_operation[name[today] - name[date]] variable[days] assign[=] name[delta].days variable[hours] assign[=] binary_operation[name[delta].seconds <ast.FloorDiv object at 0x7da2590d6bc0> constant[3600]] variable[minutes] assign[=] binary_operation[name[delta].seconds <ast.FloorDiv object at 0x7da2590d6bc0> constant[60]] variable[seconds] assign[=] name[delta].seconds if compare[name[days] less[<] constant[0]] begin[:] return[call[name[_], parameter[constant[just now]]]] if compare[name[days] equal[==] constant[0]] begin[:] if compare[name[hours] equal[==] constant[0]] begin[:] if compare[name[minutes] greater[>] constant[0]] begin[:] return[call[call[name[ungettext], parameter[call[name[_], parameter[constant[{minutes} minute ago]]], call[name[_], parameter[constant[{minutes} minutes ago]]], name[minutes]]].format, parameter[]]] if compare[name[delta_midnight].days equal[==] constant[0]] begin[:] return[call[call[name[_], parameter[constant[yesterday at {time}]]].format, parameter[]]] variable[count] assign[=] constant[0] for taget[tuple[[<ast.Name object at 0x7da1b23452d0>, <ast.Name object at 0x7da1b2344340>]]] in starred[name[OLDER_CHUNKS]] begin[:] if compare[name[days] greater_or_equal[>=] name[chunk]] begin[:] variable[count] assign[=] call[name[int], parameter[call[name[round], parameter[binary_operation[binary_operation[name[delta_midnight].days + constant[1]] / name[chunk]], constant[0]]]]] variable[fmt] assign[=] call[name[pluralizefun], parameter[name[count]]] return[call[name[fmt].format, parameter[]]]
keyword[def] identifier[naturaldate] ( identifier[date] , identifier[include_seconds] = keyword[False] ): literal[string] keyword[if] keyword[not] identifier[date] : keyword[return] literal[string] identifier[right_now] = identifier[now] () identifier[today] = identifier[datetime] ( identifier[right_now] . identifier[year] , identifier[right_now] . identifier[month] , identifier[right_now] . identifier[day] , identifier[tzinfo] = identifier[right_now] . identifier[tzinfo] ) identifier[delta] = identifier[right_now] - identifier[date] identifier[delta_midnight] = identifier[today] - identifier[date] identifier[days] = identifier[delta] . identifier[days] identifier[hours] = identifier[delta] . identifier[seconds] // literal[int] identifier[minutes] = identifier[delta] . identifier[seconds] // literal[int] identifier[seconds] = identifier[delta] . identifier[seconds] keyword[if] identifier[days] < literal[int] : keyword[return] identifier[_] ( literal[string] ) keyword[if] identifier[days] == literal[int] : keyword[if] identifier[hours] == literal[int] : keyword[if] identifier[minutes] > literal[int] : keyword[return] identifier[ungettext] ( identifier[_] ( literal[string] ), identifier[_] ( literal[string] ), identifier[minutes] ). identifier[format] ( identifier[minutes] = identifier[minutes] ) keyword[else] : keyword[if] identifier[include_seconds] keyword[and] identifier[seconds] : keyword[return] identifier[ungettext] ( identifier[_] ( literal[string] ), identifier[_] ( literal[string] ), identifier[seconds] ). identifier[format] ( identifier[seconds] = identifier[seconds] ) keyword[return] identifier[_] ( literal[string] ) keyword[else] : keyword[return] identifier[ungettext] ( identifier[_] ( literal[string] ), identifier[_] ( literal[string] ), identifier[hours] ). identifier[format] ( identifier[hours] = identifier[hours] ) keyword[if] identifier[delta_midnight] . identifier[days] == literal[int] : keyword[return] identifier[_] ( literal[string] ). identifier[format] ( identifier[time] = identifier[date] . identifier[strftime] ( literal[string] )) identifier[count] = literal[int] keyword[for] identifier[chunk] , identifier[pluralizefun] keyword[in] identifier[OLDER_CHUNKS] : keyword[if] identifier[days] >= identifier[chunk] : identifier[count] = identifier[int] ( identifier[round] (( identifier[delta_midnight] . identifier[days] + literal[int] )/ identifier[chunk] , literal[int] )) identifier[fmt] = identifier[pluralizefun] ( identifier[count] ) keyword[return] identifier[fmt] . identifier[format] ( identifier[num] = identifier[count] )
def naturaldate(date, include_seconds=False): """Convert datetime into a human natural date string.""" if not date: return '' # depends on [control=['if'], data=[]] right_now = now() today = datetime(right_now.year, right_now.month, right_now.day, tzinfo=right_now.tzinfo) delta = right_now - date delta_midnight = today - date days = delta.days hours = delta.seconds // 3600 minutes = delta.seconds // 60 seconds = delta.seconds if days < 0: return _('just now') # depends on [control=['if'], data=[]] if days == 0: if hours == 0: if minutes > 0: return ungettext(_('{minutes} minute ago'), _('{minutes} minutes ago'), minutes).format(minutes=minutes) # depends on [control=['if'], data=['minutes']] else: if include_seconds and seconds: return ungettext(_('{seconds} second ago'), _('{seconds} seconds ago'), seconds).format(seconds=seconds) # depends on [control=['if'], data=[]] return _('just now') # depends on [control=['if'], data=[]] else: return ungettext(_('{hours} hour ago'), _('{hours} hours ago'), hours).format(hours=hours) # depends on [control=['if'], data=[]] if delta_midnight.days == 0: return _('yesterday at {time}').format(time=date.strftime('%H:%M')) # depends on [control=['if'], data=[]] count = 0 for (chunk, pluralizefun) in OLDER_CHUNKS: if days >= chunk: count = int(round((delta_midnight.days + 1) / chunk, 0)) fmt = pluralizefun(count) return fmt.format(num=count) # depends on [control=['if'], data=['chunk']] # depends on [control=['for'], data=[]]
def moveToPoint(self, xxx_todo_changeme1): """ Moves the circle to the point x,y """ (x,y) = xxx_todo_changeme1 self.set_cx(float(self.get_cx()) + float(x)) self.set_cy(float(self.get_cy()) + float(y))
def function[moveToPoint, parameter[self, xxx_todo_changeme1]]: constant[ Moves the circle to the point x,y ] <ast.Tuple object at 0x7da18c4cd420> assign[=] name[xxx_todo_changeme1] call[name[self].set_cx, parameter[binary_operation[call[name[float], parameter[call[name[self].get_cx, parameter[]]]] + call[name[float], parameter[name[x]]]]]] call[name[self].set_cy, parameter[binary_operation[call[name[float], parameter[call[name[self].get_cy, parameter[]]]] + call[name[float], parameter[name[y]]]]]]
keyword[def] identifier[moveToPoint] ( identifier[self] , identifier[xxx_todo_changeme1] ): literal[string] ( identifier[x] , identifier[y] )= identifier[xxx_todo_changeme1] identifier[self] . identifier[set_cx] ( identifier[float] ( identifier[self] . identifier[get_cx] ())+ identifier[float] ( identifier[x] )) identifier[self] . identifier[set_cy] ( identifier[float] ( identifier[self] . identifier[get_cy] ())+ identifier[float] ( identifier[y] ))
def moveToPoint(self, xxx_todo_changeme1): """ Moves the circle to the point x,y """ (x, y) = xxx_todo_changeme1 self.set_cx(float(self.get_cx()) + float(x)) self.set_cy(float(self.get_cy()) + float(y))
def force_ascii(value): """ Return enforced ascii string éko=>ko """ if not isinstance(value, (str, unicode)): return unicode(value) if isinstance(value, unicode): return unidecode.unidecode(value) else: return unidecode.unidecode(force_unicode(value))
def function[force_ascii, parameter[value]]: constant[ Return enforced ascii string éko=>ko ] if <ast.UnaryOp object at 0x7da1b1454400> begin[:] return[call[name[unicode], parameter[name[value]]]] if call[name[isinstance], parameter[name[value], name[unicode]]] begin[:] return[call[name[unidecode].unidecode, parameter[name[value]]]]
keyword[def] identifier[force_ascii] ( identifier[value] ): literal[string] keyword[if] keyword[not] identifier[isinstance] ( identifier[value] ,( identifier[str] , identifier[unicode] )): keyword[return] identifier[unicode] ( identifier[value] ) keyword[if] identifier[isinstance] ( identifier[value] , identifier[unicode] ): keyword[return] identifier[unidecode] . identifier[unidecode] ( identifier[value] ) keyword[else] : keyword[return] identifier[unidecode] . identifier[unidecode] ( identifier[force_unicode] ( identifier[value] ))
def force_ascii(value): """ Return enforced ascii string éko=>ko """ if not isinstance(value, (str, unicode)): return unicode(value) # depends on [control=['if'], data=[]] if isinstance(value, unicode): return unidecode.unidecode(value) # depends on [control=['if'], data=[]] else: return unidecode.unidecode(force_unicode(value))
def marketsDF(token='', version=''): '''https://iextrading.com/developer/docs/#intraday''' df = pd.DataFrame(markets(token, version)) _toDatetime(df) return df
def function[marketsDF, parameter[token, version]]: constant[https://iextrading.com/developer/docs/#intraday] variable[df] assign[=] call[name[pd].DataFrame, parameter[call[name[markets], parameter[name[token], name[version]]]]] call[name[_toDatetime], parameter[name[df]]] return[name[df]]
keyword[def] identifier[marketsDF] ( identifier[token] = literal[string] , identifier[version] = literal[string] ): literal[string] identifier[df] = identifier[pd] . identifier[DataFrame] ( identifier[markets] ( identifier[token] , identifier[version] )) identifier[_toDatetime] ( identifier[df] ) keyword[return] identifier[df]
def marketsDF(token='', version=''): """https://iextrading.com/developer/docs/#intraday""" df = pd.DataFrame(markets(token, version)) _toDatetime(df) return df
def compute_region_border(start, end): """ given the buffer start and end indices of a range, compute the border edges that should be drawn to enclose the range. this function currently assumes 0x10 length rows. the result is a dictionary from buffer index to Cell instance. the Cell instance has boolean properties "top", "bottom", "left", and "right" that describe if a border should be drawn on that side of the cell view. :rtype: Mapping[int, CellT] """ cells = defaultdict(Cell) start_row = row_number(start) end_row = row_number(end) if end % 0x10 == 0: end_row -= 1 ## topmost cells if start_row == end_row: for i in range(start, end): cells[i].top = True else: for i in range(start, row_end_index(start) + 1): cells[i].top = True # cells on second row, top left if start_row != end_row: next_row_start = row_start_index(start) + 0x10 for i in range(next_row_start, next_row_start + column_number(start)): cells[i].top = True ## bottommost cells if start_row == end_row: for i in range(start, end): cells[i].bottom = True else: for i in range(row_start_index(end), end): cells[i].bottom = True # cells on second-to-last row, bottom right if start_row != end_row: prev_row_end = row_end_index(end) - 0x10 for i in range(prev_row_end - (0x10 - column_number(end) - 1), prev_row_end + 1): cells[i].bottom = True ## leftmost cells if start_row == end_row: cells[start].left = True else: second_row_start = row_start_index(start) + 0x10 for i in range(second_row_start, row_start_index(end) + 0x10, 0x10): cells[i].left = True # cells in first row, top left if start_row != end_row: cells[start].left = True ## rightmost cells if start_row == end_row: cells[end - 1].right = True else: penultimate_row_end = row_end_index(end) - 0x10 for i in range(row_end_index(start), penultimate_row_end + 0x10, 0x10): cells[i].right = True # cells in last row, bottom right if start_row != end_row: cells[end - 1].right = True # convert back to standard dict # trick from: http://stackoverflow.com/a/20428703/87207 cells.default_factory = None return cells
def function[compute_region_border, parameter[start, end]]: constant[ given the buffer start and end indices of a range, compute the border edges that should be drawn to enclose the range. this function currently assumes 0x10 length rows. the result is a dictionary from buffer index to Cell instance. the Cell instance has boolean properties "top", "bottom", "left", and "right" that describe if a border should be drawn on that side of the cell view. :rtype: Mapping[int, CellT] ] variable[cells] assign[=] call[name[defaultdict], parameter[name[Cell]]] variable[start_row] assign[=] call[name[row_number], parameter[name[start]]] variable[end_row] assign[=] call[name[row_number], parameter[name[end]]] if compare[binary_operation[name[end] <ast.Mod object at 0x7da2590d6920> constant[16]] equal[==] constant[0]] begin[:] <ast.AugAssign object at 0x7da1b26ae980> if compare[name[start_row] equal[==] name[end_row]] begin[:] for taget[name[i]] in starred[call[name[range], parameter[name[start], name[end]]]] begin[:] call[name[cells]][name[i]].top assign[=] constant[True] if compare[name[start_row] not_equal[!=] name[end_row]] begin[:] variable[next_row_start] assign[=] binary_operation[call[name[row_start_index], parameter[name[start]]] + constant[16]] for taget[name[i]] in starred[call[name[range], parameter[name[next_row_start], binary_operation[name[next_row_start] + call[name[column_number], parameter[name[start]]]]]]] begin[:] call[name[cells]][name[i]].top assign[=] constant[True] if compare[name[start_row] equal[==] name[end_row]] begin[:] for taget[name[i]] in starred[call[name[range], parameter[name[start], name[end]]]] begin[:] call[name[cells]][name[i]].bottom assign[=] constant[True] if compare[name[start_row] not_equal[!=] name[end_row]] begin[:] variable[prev_row_end] assign[=] binary_operation[call[name[row_end_index], parameter[name[end]]] - constant[16]] for taget[name[i]] in starred[call[name[range], parameter[binary_operation[name[prev_row_end] - binary_operation[binary_operation[constant[16] - call[name[column_number], parameter[name[end]]]] - constant[1]]], binary_operation[name[prev_row_end] + constant[1]]]]] begin[:] call[name[cells]][name[i]].bottom assign[=] constant[True] if compare[name[start_row] equal[==] name[end_row]] begin[:] call[name[cells]][name[start]].left assign[=] constant[True] if compare[name[start_row] not_equal[!=] name[end_row]] begin[:] call[name[cells]][name[start]].left assign[=] constant[True] if compare[name[start_row] equal[==] name[end_row]] begin[:] call[name[cells]][binary_operation[name[end] - constant[1]]].right assign[=] constant[True] if compare[name[start_row] not_equal[!=] name[end_row]] begin[:] call[name[cells]][binary_operation[name[end] - constant[1]]].right assign[=] constant[True] name[cells].default_factory assign[=] constant[None] return[name[cells]]
keyword[def] identifier[compute_region_border] ( identifier[start] , identifier[end] ): literal[string] identifier[cells] = identifier[defaultdict] ( identifier[Cell] ) identifier[start_row] = identifier[row_number] ( identifier[start] ) identifier[end_row] = identifier[row_number] ( identifier[end] ) keyword[if] identifier[end] % literal[int] == literal[int] : identifier[end_row] -= literal[int] keyword[if] identifier[start_row] == identifier[end_row] : keyword[for] identifier[i] keyword[in] identifier[range] ( identifier[start] , identifier[end] ): identifier[cells] [ identifier[i] ]. identifier[top] = keyword[True] keyword[else] : keyword[for] identifier[i] keyword[in] identifier[range] ( identifier[start] , identifier[row_end_index] ( identifier[start] )+ literal[int] ): identifier[cells] [ identifier[i] ]. identifier[top] = keyword[True] keyword[if] identifier[start_row] != identifier[end_row] : identifier[next_row_start] = identifier[row_start_index] ( identifier[start] )+ literal[int] keyword[for] identifier[i] keyword[in] identifier[range] ( identifier[next_row_start] , identifier[next_row_start] + identifier[column_number] ( identifier[start] )): identifier[cells] [ identifier[i] ]. identifier[top] = keyword[True] keyword[if] identifier[start_row] == identifier[end_row] : keyword[for] identifier[i] keyword[in] identifier[range] ( identifier[start] , identifier[end] ): identifier[cells] [ identifier[i] ]. identifier[bottom] = keyword[True] keyword[else] : keyword[for] identifier[i] keyword[in] identifier[range] ( identifier[row_start_index] ( identifier[end] ), identifier[end] ): identifier[cells] [ identifier[i] ]. identifier[bottom] = keyword[True] keyword[if] identifier[start_row] != identifier[end_row] : identifier[prev_row_end] = identifier[row_end_index] ( identifier[end] )- literal[int] keyword[for] identifier[i] keyword[in] identifier[range] ( identifier[prev_row_end] -( literal[int] - identifier[column_number] ( identifier[end] )- literal[int] ), identifier[prev_row_end] + literal[int] ): identifier[cells] [ identifier[i] ]. identifier[bottom] = keyword[True] keyword[if] identifier[start_row] == identifier[end_row] : identifier[cells] [ identifier[start] ]. identifier[left] = keyword[True] keyword[else] : identifier[second_row_start] = identifier[row_start_index] ( identifier[start] )+ literal[int] keyword[for] identifier[i] keyword[in] identifier[range] ( identifier[second_row_start] , identifier[row_start_index] ( identifier[end] )+ literal[int] , literal[int] ): identifier[cells] [ identifier[i] ]. identifier[left] = keyword[True] keyword[if] identifier[start_row] != identifier[end_row] : identifier[cells] [ identifier[start] ]. identifier[left] = keyword[True] keyword[if] identifier[start_row] == identifier[end_row] : identifier[cells] [ identifier[end] - literal[int] ]. identifier[right] = keyword[True] keyword[else] : identifier[penultimate_row_end] = identifier[row_end_index] ( identifier[end] )- literal[int] keyword[for] identifier[i] keyword[in] identifier[range] ( identifier[row_end_index] ( identifier[start] ), identifier[penultimate_row_end] + literal[int] , literal[int] ): identifier[cells] [ identifier[i] ]. identifier[right] = keyword[True] keyword[if] identifier[start_row] != identifier[end_row] : identifier[cells] [ identifier[end] - literal[int] ]. identifier[right] = keyword[True] identifier[cells] . identifier[default_factory] = keyword[None] keyword[return] identifier[cells]
def compute_region_border(start, end): """ given the buffer start and end indices of a range, compute the border edges that should be drawn to enclose the range. this function currently assumes 0x10 length rows. the result is a dictionary from buffer index to Cell instance. the Cell instance has boolean properties "top", "bottom", "left", and "right" that describe if a border should be drawn on that side of the cell view. :rtype: Mapping[int, CellT] """ cells = defaultdict(Cell) start_row = row_number(start) end_row = row_number(end) if end % 16 == 0: end_row -= 1 # depends on [control=['if'], data=[]] ## topmost cells if start_row == end_row: for i in range(start, end): cells[i].top = True # depends on [control=['for'], data=['i']] # depends on [control=['if'], data=[]] else: for i in range(start, row_end_index(start) + 1): cells[i].top = True # depends on [control=['for'], data=['i']] # cells on second row, top left if start_row != end_row: next_row_start = row_start_index(start) + 16 for i in range(next_row_start, next_row_start + column_number(start)): cells[i].top = True # depends on [control=['for'], data=['i']] # depends on [control=['if'], data=[]] ## bottommost cells if start_row == end_row: for i in range(start, end): cells[i].bottom = True # depends on [control=['for'], data=['i']] # depends on [control=['if'], data=[]] else: for i in range(row_start_index(end), end): cells[i].bottom = True # depends on [control=['for'], data=['i']] # cells on second-to-last row, bottom right if start_row != end_row: prev_row_end = row_end_index(end) - 16 for i in range(prev_row_end - (16 - column_number(end) - 1), prev_row_end + 1): cells[i].bottom = True # depends on [control=['for'], data=['i']] # depends on [control=['if'], data=[]] ## leftmost cells if start_row == end_row: cells[start].left = True # depends on [control=['if'], data=[]] else: second_row_start = row_start_index(start) + 16 for i in range(second_row_start, row_start_index(end) + 16, 16): cells[i].left = True # depends on [control=['for'], data=['i']] # cells in first row, top left if start_row != end_row: cells[start].left = True # depends on [control=['if'], data=[]] ## rightmost cells if start_row == end_row: cells[end - 1].right = True # depends on [control=['if'], data=[]] else: penultimate_row_end = row_end_index(end) - 16 for i in range(row_end_index(start), penultimate_row_end + 16, 16): cells[i].right = True # depends on [control=['for'], data=['i']] # cells in last row, bottom right if start_row != end_row: cells[end - 1].right = True # depends on [control=['if'], data=[]] # convert back to standard dict # trick from: http://stackoverflow.com/a/20428703/87207 cells.default_factory = None return cells
def inject(self, span_context, format, carrier): """Injects `span_context` into `carrier`. The type of `carrier` is determined by `format`. See the :class:`Format` class/namespace for the built-in OpenTracing formats. Implementations *must* raise :exc:`UnsupportedFormatException` if `format` is unknown or disallowed. :param span_context: the :class:`SpanContext` instance to inject :type span_context: SpanContext :param format: a python object instance that represents a given carrier format. `format` may be of any type, and `format` equality is defined by python ``==`` equality. :type format: Format :param carrier: the format-specific carrier object to inject into """ if format in Tracer._supported_formats: return raise UnsupportedFormatException(format)
def function[inject, parameter[self, span_context, format, carrier]]: constant[Injects `span_context` into `carrier`. The type of `carrier` is determined by `format`. See the :class:`Format` class/namespace for the built-in OpenTracing formats. Implementations *must* raise :exc:`UnsupportedFormatException` if `format` is unknown or disallowed. :param span_context: the :class:`SpanContext` instance to inject :type span_context: SpanContext :param format: a python object instance that represents a given carrier format. `format` may be of any type, and `format` equality is defined by python ``==`` equality. :type format: Format :param carrier: the format-specific carrier object to inject into ] if compare[name[format] in name[Tracer]._supported_formats] begin[:] return[None] <ast.Raise object at 0x7da204962bf0>
keyword[def] identifier[inject] ( identifier[self] , identifier[span_context] , identifier[format] , identifier[carrier] ): literal[string] keyword[if] identifier[format] keyword[in] identifier[Tracer] . identifier[_supported_formats] : keyword[return] keyword[raise] identifier[UnsupportedFormatException] ( identifier[format] )
def inject(self, span_context, format, carrier): """Injects `span_context` into `carrier`. The type of `carrier` is determined by `format`. See the :class:`Format` class/namespace for the built-in OpenTracing formats. Implementations *must* raise :exc:`UnsupportedFormatException` if `format` is unknown or disallowed. :param span_context: the :class:`SpanContext` instance to inject :type span_context: SpanContext :param format: a python object instance that represents a given carrier format. `format` may be of any type, and `format` equality is defined by python ``==`` equality. :type format: Format :param carrier: the format-specific carrier object to inject into """ if format in Tracer._supported_formats: return # depends on [control=['if'], data=[]] raise UnsupportedFormatException(format)
def profileit(func): """ Decorator straight up stolen from stackoverflow """ def wrapper(*args, **kwargs): datafn = func.__name__ + ".profile" # Name the data file sensibly prof = cProfile.Profile() prof.enable() retval = prof.runcall(func, *args, **kwargs) prof.disable() stats = pstats.Stats(prof) try: stats.sort_stats('cumtime').print_stats() except KeyError: pass # breaks in python 2.6 return retval return wrapper
def function[profileit, parameter[func]]: constant[ Decorator straight up stolen from stackoverflow ] def function[wrapper, parameter[]]: variable[datafn] assign[=] binary_operation[name[func].__name__ + constant[.profile]] variable[prof] assign[=] call[name[cProfile].Profile, parameter[]] call[name[prof].enable, parameter[]] variable[retval] assign[=] call[name[prof].runcall, parameter[name[func], <ast.Starred object at 0x7da1b10a5120>]] call[name[prof].disable, parameter[]] variable[stats] assign[=] call[name[pstats].Stats, parameter[name[prof]]] <ast.Try object at 0x7da1b10a7df0> return[name[retval]] return[name[wrapper]]
keyword[def] identifier[profileit] ( identifier[func] ): literal[string] keyword[def] identifier[wrapper] (* identifier[args] ,** identifier[kwargs] ): identifier[datafn] = identifier[func] . identifier[__name__] + literal[string] identifier[prof] = identifier[cProfile] . identifier[Profile] () identifier[prof] . identifier[enable] () identifier[retval] = identifier[prof] . identifier[runcall] ( identifier[func] ,* identifier[args] ,** identifier[kwargs] ) identifier[prof] . identifier[disable] () identifier[stats] = identifier[pstats] . identifier[Stats] ( identifier[prof] ) keyword[try] : identifier[stats] . identifier[sort_stats] ( literal[string] ). identifier[print_stats] () keyword[except] identifier[KeyError] : keyword[pass] keyword[return] identifier[retval] keyword[return] identifier[wrapper]
def profileit(func): """ Decorator straight up stolen from stackoverflow """ def wrapper(*args, **kwargs): datafn = func.__name__ + '.profile' # Name the data file sensibly prof = cProfile.Profile() prof.enable() retval = prof.runcall(func, *args, **kwargs) prof.disable() stats = pstats.Stats(prof) try: stats.sort_stats('cumtime').print_stats() # depends on [control=['try'], data=[]] except KeyError: pass # breaks in python 2.6 # depends on [control=['except'], data=[]] return retval return wrapper
def contains(self, column, value): """ Set the main dataframe instance to rows that contains a string value in a column """ df = self.df[self.df[column].str.contains(value) == True] if df is None: self.err("Can not select contained data") return self.df = df
def function[contains, parameter[self, column, value]]: constant[ Set the main dataframe instance to rows that contains a string value in a column ] variable[df] assign[=] call[name[self].df][compare[call[call[name[self].df][name[column]].str.contains, parameter[name[value]]] equal[==] constant[True]]] if compare[name[df] is constant[None]] begin[:] call[name[self].err, parameter[constant[Can not select contained data]]] return[None] name[self].df assign[=] name[df]
keyword[def] identifier[contains] ( identifier[self] , identifier[column] , identifier[value] ): literal[string] identifier[df] = identifier[self] . identifier[df] [ identifier[self] . identifier[df] [ identifier[column] ]. identifier[str] . identifier[contains] ( identifier[value] )== keyword[True] ] keyword[if] identifier[df] keyword[is] keyword[None] : identifier[self] . identifier[err] ( literal[string] ) keyword[return] identifier[self] . identifier[df] = identifier[df]
def contains(self, column, value): """ Set the main dataframe instance to rows that contains a string value in a column """ df = self.df[self.df[column].str.contains(value) == True] if df is None: self.err('Can not select contained data') return # depends on [control=['if'], data=[]] self.df = df
def variable_specifier(self) -> dict: """Return the variable specifier for this variable. The specifier can be used to lookup the value of this variable in a computation context. """ if self.value_type is not None: return {"type": "variable", "version": 1, "uuid": str(self.uuid), "x-name": self.name, "x-value": self.value} else: return self.specifier
def function[variable_specifier, parameter[self]]: constant[Return the variable specifier for this variable. The specifier can be used to lookup the value of this variable in a computation context. ] if compare[name[self].value_type is_not constant[None]] begin[:] return[dictionary[[<ast.Constant object at 0x7da204345450>, <ast.Constant object at 0x7da204344610>, <ast.Constant object at 0x7da2043448b0>, <ast.Constant object at 0x7da204347d30>, <ast.Constant object at 0x7da2043467a0>], [<ast.Constant object at 0x7da2043463e0>, <ast.Constant object at 0x7da204347a90>, <ast.Call object at 0x7da2043442b0>, <ast.Attribute object at 0x7da204346a40>, <ast.Attribute object at 0x7da204346e00>]]]
keyword[def] identifier[variable_specifier] ( identifier[self] )-> identifier[dict] : literal[string] keyword[if] identifier[self] . identifier[value_type] keyword[is] keyword[not] keyword[None] : keyword[return] { literal[string] : literal[string] , literal[string] : literal[int] , literal[string] : identifier[str] ( identifier[self] . identifier[uuid] ), literal[string] : identifier[self] . identifier[name] , literal[string] : identifier[self] . identifier[value] } keyword[else] : keyword[return] identifier[self] . identifier[specifier]
def variable_specifier(self) -> dict: """Return the variable specifier for this variable. The specifier can be used to lookup the value of this variable in a computation context. """ if self.value_type is not None: return {'type': 'variable', 'version': 1, 'uuid': str(self.uuid), 'x-name': self.name, 'x-value': self.value} # depends on [control=['if'], data=[]] else: return self.specifier
def sendSMS(self, CorpNum, ItemCode, MgtKey, Sender, Receiver, Contents, UserID=None): """ 알림문자 전송 args CorpNum : 팝빌회원 사업자번호 ItemCode : 명세서 종류 코드 [121 - 거래명세서], [122 - 청구서], [123 - 견적서], [124 - 발주서], [125 - 입금표], [126 - 영수증] MgtKey : 파트너 문서관리번호 Sender : 발신번호 Receiver : 수신번호 Contents : 문자메시지 내용(최대 90Byte), 최대길이를 초과한경우 길이가 조정되어 전송됨 UserID : 팝빌 회원아이디 return 처리결과. consist of code and message raise PopbillException """ if MgtKey == None or MgtKey == "": raise PopbillException(-99999999, "관리번호가 입력되지 않았습니다.") if ItemCode == None or ItemCode == "": raise PopbillException(-99999999, "명세서 종류 코드가 입력되지 않았습니다.") postData = self._stringtify({ "sender": Sender, "receiver": Receiver, "contents": Contents }) return self._httppost('/Statement/' + str(ItemCode) + '/' + MgtKey, postData, CorpNum, UserID, "SMS")
def function[sendSMS, parameter[self, CorpNum, ItemCode, MgtKey, Sender, Receiver, Contents, UserID]]: constant[ 알림문자 전송 args CorpNum : 팝빌회원 사업자번호 ItemCode : 명세서 종류 코드 [121 - 거래명세서], [122 - 청구서], [123 - 견적서], [124 - 발주서], [125 - 입금표], [126 - 영수증] MgtKey : 파트너 문서관리번호 Sender : 발신번호 Receiver : 수신번호 Contents : 문자메시지 내용(최대 90Byte), 최대길이를 초과한경우 길이가 조정되어 전송됨 UserID : 팝빌 회원아이디 return 처리결과. consist of code and message raise PopbillException ] if <ast.BoolOp object at 0x7da18bc72ce0> begin[:] <ast.Raise object at 0x7da18bc725c0> if <ast.BoolOp object at 0x7da18bc72ef0> begin[:] <ast.Raise object at 0x7da18bc730a0> variable[postData] assign[=] call[name[self]._stringtify, parameter[dictionary[[<ast.Constant object at 0x7da18bc71000>, <ast.Constant object at 0x7da18bc71c30>, <ast.Constant object at 0x7da18bc70d60>], [<ast.Name object at 0x7da18bc711b0>, <ast.Name object at 0x7da18bc73e80>, <ast.Name object at 0x7da18bc72800>]]]] return[call[name[self]._httppost, parameter[binary_operation[binary_operation[binary_operation[constant[/Statement/] + call[name[str], parameter[name[ItemCode]]]] + constant[/]] + name[MgtKey]], name[postData], name[CorpNum], name[UserID], constant[SMS]]]]
keyword[def] identifier[sendSMS] ( identifier[self] , identifier[CorpNum] , identifier[ItemCode] , identifier[MgtKey] , identifier[Sender] , identifier[Receiver] , identifier[Contents] , identifier[UserID] = keyword[None] ): literal[string] keyword[if] identifier[MgtKey] == keyword[None] keyword[or] identifier[MgtKey] == literal[string] : keyword[raise] identifier[PopbillException] (- literal[int] , literal[string] ) keyword[if] identifier[ItemCode] == keyword[None] keyword[or] identifier[ItemCode] == literal[string] : keyword[raise] identifier[PopbillException] (- literal[int] , literal[string] ) identifier[postData] = identifier[self] . identifier[_stringtify] ({ literal[string] : identifier[Sender] , literal[string] : identifier[Receiver] , literal[string] : identifier[Contents] }) keyword[return] identifier[self] . identifier[_httppost] ( literal[string] + identifier[str] ( identifier[ItemCode] )+ literal[string] + identifier[MgtKey] , identifier[postData] , identifier[CorpNum] , identifier[UserID] , literal[string] )
def sendSMS(self, CorpNum, ItemCode, MgtKey, Sender, Receiver, Contents, UserID=None): """ 알림문자 전송 args CorpNum : 팝빌회원 사업자번호 ItemCode : 명세서 종류 코드 [121 - 거래명세서], [122 - 청구서], [123 - 견적서], [124 - 발주서], [125 - 입금표], [126 - 영수증] MgtKey : 파트너 문서관리번호 Sender : 발신번호 Receiver : 수신번호 Contents : 문자메시지 내용(최대 90Byte), 최대길이를 초과한경우 길이가 조정되어 전송됨 UserID : 팝빌 회원아이디 return 처리결과. consist of code and message raise PopbillException """ if MgtKey == None or MgtKey == '': raise PopbillException(-99999999, '관리번호가 입력되지 않았습니다.') # depends on [control=['if'], data=[]] if ItemCode == None or ItemCode == '': raise PopbillException(-99999999, '명세서 종류 코드가 입력되지 않았습니다.') # depends on [control=['if'], data=[]] postData = self._stringtify({'sender': Sender, 'receiver': Receiver, 'contents': Contents}) return self._httppost('/Statement/' + str(ItemCode) + '/' + MgtKey, postData, CorpNum, UserID, 'SMS')
def get_elements(self, tag_name, attribute): """ Return elements in xml files which match with the tag name and the specific attribute :param tag_name: a string which specify the tag name :param attribute: a string which specify the attribute """ l = [] for i in self.xml: for item in self.xml[i].getElementsByTagName(tag_name): value = item.getAttributeNS(NS_ANDROID_URI, attribute) value = self.format_value(value) l.append(str(value)) return l
def function[get_elements, parameter[self, tag_name, attribute]]: constant[ Return elements in xml files which match with the tag name and the specific attribute :param tag_name: a string which specify the tag name :param attribute: a string which specify the attribute ] variable[l] assign[=] list[[]] for taget[name[i]] in starred[name[self].xml] begin[:] for taget[name[item]] in starred[call[call[name[self].xml][name[i]].getElementsByTagName, parameter[name[tag_name]]]] begin[:] variable[value] assign[=] call[name[item].getAttributeNS, parameter[name[NS_ANDROID_URI], name[attribute]]] variable[value] assign[=] call[name[self].format_value, parameter[name[value]]] call[name[l].append, parameter[call[name[str], parameter[name[value]]]]] return[name[l]]
keyword[def] identifier[get_elements] ( identifier[self] , identifier[tag_name] , identifier[attribute] ): literal[string] identifier[l] =[] keyword[for] identifier[i] keyword[in] identifier[self] . identifier[xml] : keyword[for] identifier[item] keyword[in] identifier[self] . identifier[xml] [ identifier[i] ]. identifier[getElementsByTagName] ( identifier[tag_name] ): identifier[value] = identifier[item] . identifier[getAttributeNS] ( identifier[NS_ANDROID_URI] , identifier[attribute] ) identifier[value] = identifier[self] . identifier[format_value] ( identifier[value] ) identifier[l] . identifier[append] ( identifier[str] ( identifier[value] )) keyword[return] identifier[l]
def get_elements(self, tag_name, attribute): """ Return elements in xml files which match with the tag name and the specific attribute :param tag_name: a string which specify the tag name :param attribute: a string which specify the attribute """ l = [] for i in self.xml: for item in self.xml[i].getElementsByTagName(tag_name): value = item.getAttributeNS(NS_ANDROID_URI, attribute) value = self.format_value(value) l.append(str(value)) # depends on [control=['for'], data=['item']] # depends on [control=['for'], data=['i']] return l
def check_configuration_string( self, config_string, is_job=True, external_name=False ): """ Check whether the given job or task configuration string is well-formed (if ``is_bstring`` is ``True``) and it has all the required parameters. :param string config_string: the byte string or Unicode string to be checked :param bool is_job: if ``True``, ``config_string`` is a job config string :param bool external_name: if ``True``, the task name is provided externally, and it is not required to appear in the config string :rtype: :class:`~aeneas.validator.ValidatorResult` """ if is_job: self.log(u"Checking job configuration string") else: self.log(u"Checking task configuration string") self.result = ValidatorResult() if self._are_safety_checks_disabled(u"check_configuration_string"): return self.result if is_job: required_parameters = self.JOB_REQUIRED_PARAMETERS elif external_name: required_parameters = self.TASK_REQUIRED_PARAMETERS_EXTERNAL_NAME else: required_parameters = self.TASK_REQUIRED_PARAMETERS is_bstring = gf.is_bytes(config_string) if is_bstring: self.log(u"Checking that config_string is well formed") self.check_raw_string(config_string, is_bstring=True) if not self.result.passed: return self.result config_string = gf.safe_unicode(config_string) self.log(u"Checking required parameters") parameters = gf.config_string_to_dict(config_string, self.result) self._check_required_parameters(required_parameters, parameters) self.log([u"Checking config_string: returning %s", self.result.passed]) return self.result
def function[check_configuration_string, parameter[self, config_string, is_job, external_name]]: constant[ Check whether the given job or task configuration string is well-formed (if ``is_bstring`` is ``True``) and it has all the required parameters. :param string config_string: the byte string or Unicode string to be checked :param bool is_job: if ``True``, ``config_string`` is a job config string :param bool external_name: if ``True``, the task name is provided externally, and it is not required to appear in the config string :rtype: :class:`~aeneas.validator.ValidatorResult` ] if name[is_job] begin[:] call[name[self].log, parameter[constant[Checking job configuration string]]] name[self].result assign[=] call[name[ValidatorResult], parameter[]] if call[name[self]._are_safety_checks_disabled, parameter[constant[check_configuration_string]]] begin[:] return[name[self].result] if name[is_job] begin[:] variable[required_parameters] assign[=] name[self].JOB_REQUIRED_PARAMETERS variable[is_bstring] assign[=] call[name[gf].is_bytes, parameter[name[config_string]]] if name[is_bstring] begin[:] call[name[self].log, parameter[constant[Checking that config_string is well formed]]] call[name[self].check_raw_string, parameter[name[config_string]]] if <ast.UnaryOp object at 0x7da2054a4070> begin[:] return[name[self].result] variable[config_string] assign[=] call[name[gf].safe_unicode, parameter[name[config_string]]] call[name[self].log, parameter[constant[Checking required parameters]]] variable[parameters] assign[=] call[name[gf].config_string_to_dict, parameter[name[config_string], name[self].result]] call[name[self]._check_required_parameters, parameter[name[required_parameters], name[parameters]]] call[name[self].log, parameter[list[[<ast.Constant object at 0x7da2054a6890>, <ast.Attribute object at 0x7da2054a6a40>]]]] return[name[self].result]
keyword[def] identifier[check_configuration_string] ( identifier[self] , identifier[config_string] , identifier[is_job] = keyword[True] , identifier[external_name] = keyword[False] ): literal[string] keyword[if] identifier[is_job] : identifier[self] . identifier[log] ( literal[string] ) keyword[else] : identifier[self] . identifier[log] ( literal[string] ) identifier[self] . identifier[result] = identifier[ValidatorResult] () keyword[if] identifier[self] . identifier[_are_safety_checks_disabled] ( literal[string] ): keyword[return] identifier[self] . identifier[result] keyword[if] identifier[is_job] : identifier[required_parameters] = identifier[self] . identifier[JOB_REQUIRED_PARAMETERS] keyword[elif] identifier[external_name] : identifier[required_parameters] = identifier[self] . identifier[TASK_REQUIRED_PARAMETERS_EXTERNAL_NAME] keyword[else] : identifier[required_parameters] = identifier[self] . identifier[TASK_REQUIRED_PARAMETERS] identifier[is_bstring] = identifier[gf] . identifier[is_bytes] ( identifier[config_string] ) keyword[if] identifier[is_bstring] : identifier[self] . identifier[log] ( literal[string] ) identifier[self] . identifier[check_raw_string] ( identifier[config_string] , identifier[is_bstring] = keyword[True] ) keyword[if] keyword[not] identifier[self] . identifier[result] . identifier[passed] : keyword[return] identifier[self] . identifier[result] identifier[config_string] = identifier[gf] . identifier[safe_unicode] ( identifier[config_string] ) identifier[self] . identifier[log] ( literal[string] ) identifier[parameters] = identifier[gf] . identifier[config_string_to_dict] ( identifier[config_string] , identifier[self] . identifier[result] ) identifier[self] . identifier[_check_required_parameters] ( identifier[required_parameters] , identifier[parameters] ) identifier[self] . identifier[log] ([ literal[string] , identifier[self] . identifier[result] . identifier[passed] ]) keyword[return] identifier[self] . identifier[result]
def check_configuration_string(self, config_string, is_job=True, external_name=False): """ Check whether the given job or task configuration string is well-formed (if ``is_bstring`` is ``True``) and it has all the required parameters. :param string config_string: the byte string or Unicode string to be checked :param bool is_job: if ``True``, ``config_string`` is a job config string :param bool external_name: if ``True``, the task name is provided externally, and it is not required to appear in the config string :rtype: :class:`~aeneas.validator.ValidatorResult` """ if is_job: self.log(u'Checking job configuration string') # depends on [control=['if'], data=[]] else: self.log(u'Checking task configuration string') self.result = ValidatorResult() if self._are_safety_checks_disabled(u'check_configuration_string'): return self.result # depends on [control=['if'], data=[]] if is_job: required_parameters = self.JOB_REQUIRED_PARAMETERS # depends on [control=['if'], data=[]] elif external_name: required_parameters = self.TASK_REQUIRED_PARAMETERS_EXTERNAL_NAME # depends on [control=['if'], data=[]] else: required_parameters = self.TASK_REQUIRED_PARAMETERS is_bstring = gf.is_bytes(config_string) if is_bstring: self.log(u'Checking that config_string is well formed') self.check_raw_string(config_string, is_bstring=True) if not self.result.passed: return self.result # depends on [control=['if'], data=[]] config_string = gf.safe_unicode(config_string) # depends on [control=['if'], data=[]] self.log(u'Checking required parameters') parameters = gf.config_string_to_dict(config_string, self.result) self._check_required_parameters(required_parameters, parameters) self.log([u'Checking config_string: returning %s', self.result.passed]) return self.result
def handleMethodCallMessage(self, msg): """ Handles DBus MethodCall messages on behalf of the DBus Connection and dispatches them to the appropriate exported object """ if ( msg.interface == 'org.freedesktop.DBus.Peer' and msg.member == 'Ping' ): r = message.MethodReturnMessage( msg.serial, destination=msg.sender, ) self.conn.sendMessage(r) return if ( msg.interface == 'org.freedesktop.DBus.Introspectable' and msg.member == 'Introspect' ): xml = introspection.generateIntrospectionXML( msg.path, self.exports, ) if xml is not None: r = message.MethodReturnMessage( msg.serial, body=[xml], destination=msg.sender, signature='s', ) self.conn.sendMessage(r) return # Try to get object from complete object path o = self.exports.get(msg.path, None) if o is None: self._send_err( msg, 'org.freedesktop.DBus.Error.UnknownObject', '%s is not an object provided by this process.' % (msg.path), ) return if ( msg.interface == 'org.freedesktop.DBus.ObjectManager' and msg.member == 'GetManagedObjects' ): i_and_p = self.getManagedObjects(o.getObjectPath()) r = message.MethodReturnMessage( msg.serial, body=[i_and_p], destination=msg.sender, signature='a{oa{sa{sv}}}', ) self.conn.sendMessage(r) return i = None for x in o.getInterfaces(): if msg.interface: if x.name == msg.interface: i = x break else: if msg.member in x.methods: i = x break m = None if i: m = i.methods.get(msg.member, None) if m is None: self._send_err( msg, 'org.freedesktop.DBus.Error.UnknownMethod', ( 'Method "%s" with signature "%s" on interface "%s" ' 'doesn\'t exist' ) % ( msg.member, msg.signature or '', msg.interface or '(null)', ), ) return msig = msg.signature if msg.signature is not None else '' esig = m.sigIn if m.sigIn is not None else '' if esig != msig: self._send_err( msg, 'org.freedesktop.DBus.Error.InvalidArgs', 'Call to %s has wrong args (%s, expected %s)' % (msg.member, msg.signature or '', m.sigIn or '') ) return d = defer.maybeDeferred( o.executeMethod, i, msg.member, msg.body, msg.sender, ) if msg.expectReply: def send_reply(return_values): if isinstance(return_values, (list, tuple)): if m.nret == 1: return_values = [return_values] else: return_values = [return_values] r = message.MethodReturnMessage( msg.serial, body=return_values, destination=msg.sender, signature=m.sigOut, ) self.conn.sendMessage(r) def send_error(err): e = err.value errMsg = err.getErrorMessage() name = None if hasattr(e, 'dbusErrorName'): name = e.dbusErrorName if name is None: name = 'org.txdbus.PythonException.' + e.__class__.__name__ try: marshal.validateErrorName(name) except error.MarshallingError: errMsg = ('!!(Invalid error name "%s")!! ' % name) + errMsg name = 'org.txdbus.InvalidErrorName' r = message.ErrorMessage(name, msg.serial, body=[errMsg], signature='s', destination=msg.sender) self.conn.sendMessage(r) d.addCallback(send_reply) d.addErrback(send_error)
def function[handleMethodCallMessage, parameter[self, msg]]: constant[ Handles DBus MethodCall messages on behalf of the DBus Connection and dispatches them to the appropriate exported object ] if <ast.BoolOp object at 0x7da1b06dd6c0> begin[:] variable[r] assign[=] call[name[message].MethodReturnMessage, parameter[name[msg].serial]] call[name[self].conn.sendMessage, parameter[name[r]]] return[None] if <ast.BoolOp object at 0x7da1b06df790> begin[:] variable[xml] assign[=] call[name[introspection].generateIntrospectionXML, parameter[name[msg].path, name[self].exports]] if compare[name[xml] is_not constant[None]] begin[:] variable[r] assign[=] call[name[message].MethodReturnMessage, parameter[name[msg].serial]] call[name[self].conn.sendMessage, parameter[name[r]]] return[None] variable[o] assign[=] call[name[self].exports.get, parameter[name[msg].path, constant[None]]] if compare[name[o] is constant[None]] begin[:] call[name[self]._send_err, parameter[name[msg], constant[org.freedesktop.DBus.Error.UnknownObject], binary_operation[constant[%s is not an object provided by this process.] <ast.Mod object at 0x7da2590d6920> name[msg].path]]] return[None] if <ast.BoolOp object at 0x7da1b06dc760> begin[:] variable[i_and_p] assign[=] call[name[self].getManagedObjects, parameter[call[name[o].getObjectPath, parameter[]]]] variable[r] assign[=] call[name[message].MethodReturnMessage, parameter[name[msg].serial]] call[name[self].conn.sendMessage, parameter[name[r]]] return[None] variable[i] assign[=] constant[None] for taget[name[x]] in starred[call[name[o].getInterfaces, parameter[]]] begin[:] if name[msg].interface begin[:] if compare[name[x].name equal[==] name[msg].interface] begin[:] variable[i] assign[=] name[x] break variable[m] assign[=] constant[None] if name[i] begin[:] variable[m] assign[=] call[name[i].methods.get, parameter[name[msg].member, constant[None]]] if compare[name[m] is constant[None]] begin[:] call[name[self]._send_err, parameter[name[msg], constant[org.freedesktop.DBus.Error.UnknownMethod], binary_operation[constant[Method "%s" with signature "%s" on interface "%s" doesn't exist] <ast.Mod object at 0x7da2590d6920> tuple[[<ast.Attribute object at 0x7da207f01330>, <ast.BoolOp object at 0x7da207f03730>, <ast.BoolOp object at 0x7da207f00490>]]]]] return[None] variable[msig] assign[=] <ast.IfExp object at 0x7da207f02c50> variable[esig] assign[=] <ast.IfExp object at 0x7da207f02230> if compare[name[esig] not_equal[!=] name[msig]] begin[:] call[name[self]._send_err, parameter[name[msg], constant[org.freedesktop.DBus.Error.InvalidArgs], binary_operation[constant[Call to %s has wrong args (%s, expected %s)] <ast.Mod object at 0x7da2590d6920> tuple[[<ast.Attribute object at 0x7da207f02740>, <ast.BoolOp object at 0x7da207f00790>, <ast.BoolOp object at 0x7da207f02ef0>]]]]] return[None] variable[d] assign[=] call[name[defer].maybeDeferred, parameter[name[o].executeMethod, name[i], name[msg].member, name[msg].body, name[msg].sender]] if name[msg].expectReply begin[:] def function[send_reply, parameter[return_values]]: if call[name[isinstance], parameter[name[return_values], tuple[[<ast.Name object at 0x7da207f01120>, <ast.Name object at 0x7da207f039a0>]]]] begin[:] if compare[name[m].nret equal[==] constant[1]] begin[:] variable[return_values] assign[=] list[[<ast.Name object at 0x7da207f00070>]] variable[r] assign[=] call[name[message].MethodReturnMessage, parameter[name[msg].serial]] call[name[self].conn.sendMessage, parameter[name[r]]] def function[send_error, parameter[err]]: variable[e] assign[=] name[err].value variable[errMsg] assign[=] call[name[err].getErrorMessage, parameter[]] variable[name] assign[=] constant[None] if call[name[hasattr], parameter[name[e], constant[dbusErrorName]]] begin[:] variable[name] assign[=] name[e].dbusErrorName if compare[name[name] is constant[None]] begin[:] variable[name] assign[=] binary_operation[constant[org.txdbus.PythonException.] + name[e].__class__.__name__] <ast.Try object at 0x7da207f03b20> variable[r] assign[=] call[name[message].ErrorMessage, parameter[name[name], name[msg].serial]] call[name[self].conn.sendMessage, parameter[name[r]]] call[name[d].addCallback, parameter[name[send_reply]]] call[name[d].addErrback, parameter[name[send_error]]]
keyword[def] identifier[handleMethodCallMessage] ( identifier[self] , identifier[msg] ): literal[string] keyword[if] ( identifier[msg] . identifier[interface] == literal[string] keyword[and] identifier[msg] . identifier[member] == literal[string] ): identifier[r] = identifier[message] . identifier[MethodReturnMessage] ( identifier[msg] . identifier[serial] , identifier[destination] = identifier[msg] . identifier[sender] , ) identifier[self] . identifier[conn] . identifier[sendMessage] ( identifier[r] ) keyword[return] keyword[if] ( identifier[msg] . identifier[interface] == literal[string] keyword[and] identifier[msg] . identifier[member] == literal[string] ): identifier[xml] = identifier[introspection] . identifier[generateIntrospectionXML] ( identifier[msg] . identifier[path] , identifier[self] . identifier[exports] , ) keyword[if] identifier[xml] keyword[is] keyword[not] keyword[None] : identifier[r] = identifier[message] . identifier[MethodReturnMessage] ( identifier[msg] . identifier[serial] , identifier[body] =[ identifier[xml] ], identifier[destination] = identifier[msg] . identifier[sender] , identifier[signature] = literal[string] , ) identifier[self] . identifier[conn] . identifier[sendMessage] ( identifier[r] ) keyword[return] identifier[o] = identifier[self] . identifier[exports] . identifier[get] ( identifier[msg] . identifier[path] , keyword[None] ) keyword[if] identifier[o] keyword[is] keyword[None] : identifier[self] . identifier[_send_err] ( identifier[msg] , literal[string] , literal[string] %( identifier[msg] . identifier[path] ), ) keyword[return] keyword[if] ( identifier[msg] . identifier[interface] == literal[string] keyword[and] identifier[msg] . identifier[member] == literal[string] ): identifier[i_and_p] = identifier[self] . identifier[getManagedObjects] ( identifier[o] . identifier[getObjectPath] ()) identifier[r] = identifier[message] . identifier[MethodReturnMessage] ( identifier[msg] . identifier[serial] , identifier[body] =[ identifier[i_and_p] ], identifier[destination] = identifier[msg] . identifier[sender] , identifier[signature] = literal[string] , ) identifier[self] . identifier[conn] . identifier[sendMessage] ( identifier[r] ) keyword[return] identifier[i] = keyword[None] keyword[for] identifier[x] keyword[in] identifier[o] . identifier[getInterfaces] (): keyword[if] identifier[msg] . identifier[interface] : keyword[if] identifier[x] . identifier[name] == identifier[msg] . identifier[interface] : identifier[i] = identifier[x] keyword[break] keyword[else] : keyword[if] identifier[msg] . identifier[member] keyword[in] identifier[x] . identifier[methods] : identifier[i] = identifier[x] keyword[break] identifier[m] = keyword[None] keyword[if] identifier[i] : identifier[m] = identifier[i] . identifier[methods] . identifier[get] ( identifier[msg] . identifier[member] , keyword[None] ) keyword[if] identifier[m] keyword[is] keyword[None] : identifier[self] . identifier[_send_err] ( identifier[msg] , literal[string] , ( literal[string] literal[string] )%( identifier[msg] . identifier[member] , identifier[msg] . identifier[signature] keyword[or] literal[string] , identifier[msg] . identifier[interface] keyword[or] literal[string] , ), ) keyword[return] identifier[msig] = identifier[msg] . identifier[signature] keyword[if] identifier[msg] . identifier[signature] keyword[is] keyword[not] keyword[None] keyword[else] literal[string] identifier[esig] = identifier[m] . identifier[sigIn] keyword[if] identifier[m] . identifier[sigIn] keyword[is] keyword[not] keyword[None] keyword[else] literal[string] keyword[if] identifier[esig] != identifier[msig] : identifier[self] . identifier[_send_err] ( identifier[msg] , literal[string] , literal[string] % ( identifier[msg] . identifier[member] , identifier[msg] . identifier[signature] keyword[or] literal[string] , identifier[m] . identifier[sigIn] keyword[or] literal[string] ) ) keyword[return] identifier[d] = identifier[defer] . identifier[maybeDeferred] ( identifier[o] . identifier[executeMethod] , identifier[i] , identifier[msg] . identifier[member] , identifier[msg] . identifier[body] , identifier[msg] . identifier[sender] , ) keyword[if] identifier[msg] . identifier[expectReply] : keyword[def] identifier[send_reply] ( identifier[return_values] ): keyword[if] identifier[isinstance] ( identifier[return_values] ,( identifier[list] , identifier[tuple] )): keyword[if] identifier[m] . identifier[nret] == literal[int] : identifier[return_values] =[ identifier[return_values] ] keyword[else] : identifier[return_values] =[ identifier[return_values] ] identifier[r] = identifier[message] . identifier[MethodReturnMessage] ( identifier[msg] . identifier[serial] , identifier[body] = identifier[return_values] , identifier[destination] = identifier[msg] . identifier[sender] , identifier[signature] = identifier[m] . identifier[sigOut] , ) identifier[self] . identifier[conn] . identifier[sendMessage] ( identifier[r] ) keyword[def] identifier[send_error] ( identifier[err] ): identifier[e] = identifier[err] . identifier[value] identifier[errMsg] = identifier[err] . identifier[getErrorMessage] () identifier[name] = keyword[None] keyword[if] identifier[hasattr] ( identifier[e] , literal[string] ): identifier[name] = identifier[e] . identifier[dbusErrorName] keyword[if] identifier[name] keyword[is] keyword[None] : identifier[name] = literal[string] + identifier[e] . identifier[__class__] . identifier[__name__] keyword[try] : identifier[marshal] . identifier[validateErrorName] ( identifier[name] ) keyword[except] identifier[error] . identifier[MarshallingError] : identifier[errMsg] =( literal[string] % identifier[name] )+ identifier[errMsg] identifier[name] = literal[string] identifier[r] = identifier[message] . identifier[ErrorMessage] ( identifier[name] , identifier[msg] . identifier[serial] , identifier[body] =[ identifier[errMsg] ], identifier[signature] = literal[string] , identifier[destination] = identifier[msg] . identifier[sender] ) identifier[self] . identifier[conn] . identifier[sendMessage] ( identifier[r] ) identifier[d] . identifier[addCallback] ( identifier[send_reply] ) identifier[d] . identifier[addErrback] ( identifier[send_error] )
def handleMethodCallMessage(self, msg): """ Handles DBus MethodCall messages on behalf of the DBus Connection and dispatches them to the appropriate exported object """ if msg.interface == 'org.freedesktop.DBus.Peer' and msg.member == 'Ping': r = message.MethodReturnMessage(msg.serial, destination=msg.sender) self.conn.sendMessage(r) return # depends on [control=['if'], data=[]] if msg.interface == 'org.freedesktop.DBus.Introspectable' and msg.member == 'Introspect': xml = introspection.generateIntrospectionXML(msg.path, self.exports) if xml is not None: r = message.MethodReturnMessage(msg.serial, body=[xml], destination=msg.sender, signature='s') self.conn.sendMessage(r) return # depends on [control=['if'], data=['xml']] # depends on [control=['if'], data=[]] # Try to get object from complete object path o = self.exports.get(msg.path, None) if o is None: self._send_err(msg, 'org.freedesktop.DBus.Error.UnknownObject', '%s is not an object provided by this process.' % msg.path) return # depends on [control=['if'], data=[]] if msg.interface == 'org.freedesktop.DBus.ObjectManager' and msg.member == 'GetManagedObjects': i_and_p = self.getManagedObjects(o.getObjectPath()) r = message.MethodReturnMessage(msg.serial, body=[i_and_p], destination=msg.sender, signature='a{oa{sa{sv}}}') self.conn.sendMessage(r) return # depends on [control=['if'], data=[]] i = None for x in o.getInterfaces(): if msg.interface: if x.name == msg.interface: i = x break # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]] elif msg.member in x.methods: i = x break # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['x']] m = None if i: m = i.methods.get(msg.member, None) # depends on [control=['if'], data=[]] if m is None: self._send_err(msg, 'org.freedesktop.DBus.Error.UnknownMethod', 'Method "%s" with signature "%s" on interface "%s" doesn\'t exist' % (msg.member, msg.signature or '', msg.interface or '(null)')) return # depends on [control=['if'], data=[]] msig = msg.signature if msg.signature is not None else '' esig = m.sigIn if m.sigIn is not None else '' if esig != msig: self._send_err(msg, 'org.freedesktop.DBus.Error.InvalidArgs', 'Call to %s has wrong args (%s, expected %s)' % (msg.member, msg.signature or '', m.sigIn or '')) return # depends on [control=['if'], data=[]] d = defer.maybeDeferred(o.executeMethod, i, msg.member, msg.body, msg.sender) if msg.expectReply: def send_reply(return_values): if isinstance(return_values, (list, tuple)): if m.nret == 1: return_values = [return_values] # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]] else: return_values = [return_values] r = message.MethodReturnMessage(msg.serial, body=return_values, destination=msg.sender, signature=m.sigOut) self.conn.sendMessage(r) def send_error(err): e = err.value errMsg = err.getErrorMessage() name = None if hasattr(e, 'dbusErrorName'): name = e.dbusErrorName # depends on [control=['if'], data=[]] if name is None: name = 'org.txdbus.PythonException.' + e.__class__.__name__ # depends on [control=['if'], data=['name']] try: marshal.validateErrorName(name) # depends on [control=['try'], data=[]] except error.MarshallingError: errMsg = '!!(Invalid error name "%s")!! ' % name + errMsg name = 'org.txdbus.InvalidErrorName' # depends on [control=['except'], data=[]] r = message.ErrorMessage(name, msg.serial, body=[errMsg], signature='s', destination=msg.sender) self.conn.sendMessage(r) d.addCallback(send_reply) d.addErrback(send_error) # depends on [control=['if'], data=[]]
def get_power_factor(self, output='eigs', doping_levels=True, relaxation_time=1e-14): """ Gives the power factor (Seebeck^2 * conductivity) in units microW/(m*K^2) in either a full 3x3 tensor form, as 3 eigenvalues, or as the average value (trace/3.0) If doping_levels=True, the results are given at different p and n doping levels (given by self.doping), otherwise it is given as a series of electron chemical potential values Args: output (string): the type of output. 'tensor' give the full 3x3 tensor, 'eigs' its 3 eigenvalues and 'average' the average of the three eigenvalues doping_levels (boolean): True for the results to be given at different doping levels, False for results at different electron chemical potentials relaxation_time (float): constant relaxation time in secs Returns: If doping_levels=True, a dictionnary {temp:{'p':[],'n':[]}}. The 'p' links to power factor at p-type doping and 'n' to the conductivity at n-type doping. Otherwise, returns a {temp:[]} dictionary. The result contains either the sorted three eigenvalues of the symmetric power factor tensor (format='eigs') or a full tensor (3x3 array) ( output='tensor') or as an average (output='average'). The result includes a given constant relaxation time units are microW/(m K^2) """ result = None result_doping = None if doping_levels: result_doping = {doping: {t: [] for t in self._seebeck_doping[doping]} for doping in self._seebeck_doping} for doping in result_doping: for t in result_doping[doping]: for i in range(len(self.doping[doping])): full_tensor = np.dot(self._cond_doping[doping][t][i], np.dot( self._seebeck_doping[doping][ t][i], self._seebeck_doping[doping][ t][i])) result_doping[doping][t].append(full_tensor) else: result = {t: [] for t in self._seebeck} for t in result: for i in range(len(self.mu_steps)): full_tensor = np.dot(self._cond[t][i], np.dot(self._seebeck[t][i], self._seebeck[t][i])) result[t].append(full_tensor) return BoltztrapAnalyzer._format_to_output(result, result_doping, output, doping_levels, multi=1e6 * relaxation_time)
def function[get_power_factor, parameter[self, output, doping_levels, relaxation_time]]: constant[ Gives the power factor (Seebeck^2 * conductivity) in units microW/(m*K^2) in either a full 3x3 tensor form, as 3 eigenvalues, or as the average value (trace/3.0) If doping_levels=True, the results are given at different p and n doping levels (given by self.doping), otherwise it is given as a series of electron chemical potential values Args: output (string): the type of output. 'tensor' give the full 3x3 tensor, 'eigs' its 3 eigenvalues and 'average' the average of the three eigenvalues doping_levels (boolean): True for the results to be given at different doping levels, False for results at different electron chemical potentials relaxation_time (float): constant relaxation time in secs Returns: If doping_levels=True, a dictionnary {temp:{'p':[],'n':[]}}. The 'p' links to power factor at p-type doping and 'n' to the conductivity at n-type doping. Otherwise, returns a {temp:[]} dictionary. The result contains either the sorted three eigenvalues of the symmetric power factor tensor (format='eigs') or a full tensor (3x3 array) ( output='tensor') or as an average (output='average'). The result includes a given constant relaxation time units are microW/(m K^2) ] variable[result] assign[=] constant[None] variable[result_doping] assign[=] constant[None] if name[doping_levels] begin[:] variable[result_doping] assign[=] <ast.DictComp object at 0x7da1b1c10610> for taget[name[doping]] in starred[name[result_doping]] begin[:] for taget[name[t]] in starred[call[name[result_doping]][name[doping]]] begin[:] for taget[name[i]] in starred[call[name[range], parameter[call[name[len], parameter[call[name[self].doping][name[doping]]]]]]] begin[:] variable[full_tensor] assign[=] call[name[np].dot, parameter[call[call[call[name[self]._cond_doping][name[doping]]][name[t]]][name[i]], call[name[np].dot, parameter[call[call[call[name[self]._seebeck_doping][name[doping]]][name[t]]][name[i]], call[call[call[name[self]._seebeck_doping][name[doping]]][name[t]]][name[i]]]]]] call[call[call[name[result_doping]][name[doping]]][name[t]].append, parameter[name[full_tensor]]] return[call[name[BoltztrapAnalyzer]._format_to_output, parameter[name[result], name[result_doping], name[output], name[doping_levels]]]]
keyword[def] identifier[get_power_factor] ( identifier[self] , identifier[output] = literal[string] , identifier[doping_levels] = keyword[True] , identifier[relaxation_time] = literal[int] ): literal[string] identifier[result] = keyword[None] identifier[result_doping] = keyword[None] keyword[if] identifier[doping_levels] : identifier[result_doping] ={ identifier[doping] :{ identifier[t] :[] keyword[for] identifier[t] keyword[in] identifier[self] . identifier[_seebeck_doping] [ identifier[doping] ]} keyword[for] identifier[doping] keyword[in] identifier[self] . identifier[_seebeck_doping] } keyword[for] identifier[doping] keyword[in] identifier[result_doping] : keyword[for] identifier[t] keyword[in] identifier[result_doping] [ identifier[doping] ]: keyword[for] identifier[i] keyword[in] identifier[range] ( identifier[len] ( identifier[self] . identifier[doping] [ identifier[doping] ])): identifier[full_tensor] = identifier[np] . identifier[dot] ( identifier[self] . identifier[_cond_doping] [ identifier[doping] ][ identifier[t] ][ identifier[i] ], identifier[np] . identifier[dot] ( identifier[self] . identifier[_seebeck_doping] [ identifier[doping] ][ identifier[t] ][ identifier[i] ], identifier[self] . identifier[_seebeck_doping] [ identifier[doping] ][ identifier[t] ][ identifier[i] ])) identifier[result_doping] [ identifier[doping] ][ identifier[t] ]. identifier[append] ( identifier[full_tensor] ) keyword[else] : identifier[result] ={ identifier[t] :[] keyword[for] identifier[t] keyword[in] identifier[self] . identifier[_seebeck] } keyword[for] identifier[t] keyword[in] identifier[result] : keyword[for] identifier[i] keyword[in] identifier[range] ( identifier[len] ( identifier[self] . identifier[mu_steps] )): identifier[full_tensor] = identifier[np] . identifier[dot] ( identifier[self] . identifier[_cond] [ identifier[t] ][ identifier[i] ], identifier[np] . identifier[dot] ( identifier[self] . identifier[_seebeck] [ identifier[t] ][ identifier[i] ], identifier[self] . identifier[_seebeck] [ identifier[t] ][ identifier[i] ])) identifier[result] [ identifier[t] ]. identifier[append] ( identifier[full_tensor] ) keyword[return] identifier[BoltztrapAnalyzer] . identifier[_format_to_output] ( identifier[result] , identifier[result_doping] , identifier[output] , identifier[doping_levels] , identifier[multi] = literal[int] * identifier[relaxation_time] )
def get_power_factor(self, output='eigs', doping_levels=True, relaxation_time=1e-14): """ Gives the power factor (Seebeck^2 * conductivity) in units microW/(m*K^2) in either a full 3x3 tensor form, as 3 eigenvalues, or as the average value (trace/3.0) If doping_levels=True, the results are given at different p and n doping levels (given by self.doping), otherwise it is given as a series of electron chemical potential values Args: output (string): the type of output. 'tensor' give the full 3x3 tensor, 'eigs' its 3 eigenvalues and 'average' the average of the three eigenvalues doping_levels (boolean): True for the results to be given at different doping levels, False for results at different electron chemical potentials relaxation_time (float): constant relaxation time in secs Returns: If doping_levels=True, a dictionnary {temp:{'p':[],'n':[]}}. The 'p' links to power factor at p-type doping and 'n' to the conductivity at n-type doping. Otherwise, returns a {temp:[]} dictionary. The result contains either the sorted three eigenvalues of the symmetric power factor tensor (format='eigs') or a full tensor (3x3 array) ( output='tensor') or as an average (output='average'). The result includes a given constant relaxation time units are microW/(m K^2) """ result = None result_doping = None if doping_levels: result_doping = {doping: {t: [] for t in self._seebeck_doping[doping]} for doping in self._seebeck_doping} for doping in result_doping: for t in result_doping[doping]: for i in range(len(self.doping[doping])): full_tensor = np.dot(self._cond_doping[doping][t][i], np.dot(self._seebeck_doping[doping][t][i], self._seebeck_doping[doping][t][i])) result_doping[doping][t].append(full_tensor) # depends on [control=['for'], data=['i']] # depends on [control=['for'], data=['t']] # depends on [control=['for'], data=['doping']] # depends on [control=['if'], data=[]] else: result = {t: [] for t in self._seebeck} for t in result: for i in range(len(self.mu_steps)): full_tensor = np.dot(self._cond[t][i], np.dot(self._seebeck[t][i], self._seebeck[t][i])) result[t].append(full_tensor) # depends on [control=['for'], data=['i']] # depends on [control=['for'], data=['t']] return BoltztrapAnalyzer._format_to_output(result, result_doping, output, doping_levels, multi=1000000.0 * relaxation_time)
def delete(self, shape=None, part=None, point=None): """Deletes the specified part of any shape by specifying a shape number, part number, or point number.""" # shape, part, point if shape and part and point: del self._shapes[shape][part][point] # shape, part elif shape and part and not point: del self._shapes[shape][part] # shape elif shape and not part and not point: del self._shapes[shape] # point elif not shape and not part and point: for s in self._shapes: if s.shapeType == 1: del self._shapes[point] else: for part in s.parts: del s[part][point] # part, point elif not shape and part and point: for s in self._shapes: del s[part][point] # part elif not shape and part and not point: for s in self._shapes: del s[part]
def function[delete, parameter[self, shape, part, point]]: constant[Deletes the specified part of any shape by specifying a shape number, part number, or point number.] if <ast.BoolOp object at 0x7da1b07a6290> begin[:] <ast.Delete object at 0x7da1b07a52d0>
keyword[def] identifier[delete] ( identifier[self] , identifier[shape] = keyword[None] , identifier[part] = keyword[None] , identifier[point] = keyword[None] ): literal[string] keyword[if] identifier[shape] keyword[and] identifier[part] keyword[and] identifier[point] : keyword[del] identifier[self] . identifier[_shapes] [ identifier[shape] ][ identifier[part] ][ identifier[point] ] keyword[elif] identifier[shape] keyword[and] identifier[part] keyword[and] keyword[not] identifier[point] : keyword[del] identifier[self] . identifier[_shapes] [ identifier[shape] ][ identifier[part] ] keyword[elif] identifier[shape] keyword[and] keyword[not] identifier[part] keyword[and] keyword[not] identifier[point] : keyword[del] identifier[self] . identifier[_shapes] [ identifier[shape] ] keyword[elif] keyword[not] identifier[shape] keyword[and] keyword[not] identifier[part] keyword[and] identifier[point] : keyword[for] identifier[s] keyword[in] identifier[self] . identifier[_shapes] : keyword[if] identifier[s] . identifier[shapeType] == literal[int] : keyword[del] identifier[self] . identifier[_shapes] [ identifier[point] ] keyword[else] : keyword[for] identifier[part] keyword[in] identifier[s] . identifier[parts] : keyword[del] identifier[s] [ identifier[part] ][ identifier[point] ] keyword[elif] keyword[not] identifier[shape] keyword[and] identifier[part] keyword[and] identifier[point] : keyword[for] identifier[s] keyword[in] identifier[self] . identifier[_shapes] : keyword[del] identifier[s] [ identifier[part] ][ identifier[point] ] keyword[elif] keyword[not] identifier[shape] keyword[and] identifier[part] keyword[and] keyword[not] identifier[point] : keyword[for] identifier[s] keyword[in] identifier[self] . identifier[_shapes] : keyword[del] identifier[s] [ identifier[part] ]
def delete(self, shape=None, part=None, point=None): """Deletes the specified part of any shape by specifying a shape number, part number, or point number.""" # shape, part, point if shape and part and point: del self._shapes[shape][part][point] # depends on [control=['if'], data=[]] # shape, part elif shape and part and (not point): del self._shapes[shape][part] # depends on [control=['if'], data=[]] # shape elif shape and (not part) and (not point): del self._shapes[shape] # depends on [control=['if'], data=[]] # point elif not shape and (not part) and point: for s in self._shapes: if s.shapeType == 1: del self._shapes[point] # depends on [control=['if'], data=[]] else: for part in s.parts: del s[part][point] # depends on [control=['for'], data=['part']] # depends on [control=['for'], data=['s']] # depends on [control=['if'], data=[]] # part, point elif not shape and part and point: for s in self._shapes: del s[part][point] # depends on [control=['for'], data=['s']] # depends on [control=['if'], data=[]] # part elif not shape and part and (not point): for s in self._shapes: del s[part] # depends on [control=['for'], data=['s']] # depends on [control=['if'], data=[]]
def get_egg_info(cfg, verbose=False): """Call 'setup egg_info' and return the parsed meta-data.""" result = Bunch() setup_py = cfg.rootjoin('setup.py') if not os.path.exists(setup_py): return result egg_info = shell.capture("python {} egg_info".format(setup_py), echo=True if verbose else None) for info_line in egg_info.splitlines(): if info_line.endswith('PKG-INFO'): pkg_info_file = info_line.split(None, 1)[1] result['__file__'] = pkg_info_file with io.open(pkg_info_file, encoding='utf-8') as handle: lastkey = None for line in handle: if line.lstrip() != line: assert lastkey, "Bad continuation in PKG-INFO file '{}': {}".format(pkg_info_file, line) result[lastkey] += '\n' + line else: lastkey, value = line.split(':', 1) lastkey = lastkey.strip().lower().replace('-', '_') value = value.strip() if lastkey in result: try: result[lastkey].append(value) except AttributeError: result[lastkey] = [result[lastkey], value] else: result[lastkey] = value for multikey in PKG_INFO_MULTIKEYS: if not isinstance(result.get(multikey, []), list): result[multikey] = [result[multikey]] return result
def function[get_egg_info, parameter[cfg, verbose]]: constant[Call 'setup egg_info' and return the parsed meta-data.] variable[result] assign[=] call[name[Bunch], parameter[]] variable[setup_py] assign[=] call[name[cfg].rootjoin, parameter[constant[setup.py]]] if <ast.UnaryOp object at 0x7da18dc05ea0> begin[:] return[name[result]] variable[egg_info] assign[=] call[name[shell].capture, parameter[call[constant[python {} egg_info].format, parameter[name[setup_py]]]]] for taget[name[info_line]] in starred[call[name[egg_info].splitlines, parameter[]]] begin[:] if call[name[info_line].endswith, parameter[constant[PKG-INFO]]] begin[:] variable[pkg_info_file] assign[=] call[call[name[info_line].split, parameter[constant[None], constant[1]]]][constant[1]] call[name[result]][constant[__file__]] assign[=] name[pkg_info_file] with call[name[io].open, parameter[name[pkg_info_file]]] begin[:] variable[lastkey] assign[=] constant[None] for taget[name[line]] in starred[name[handle]] begin[:] if compare[call[name[line].lstrip, parameter[]] not_equal[!=] name[line]] begin[:] assert[name[lastkey]] <ast.AugAssign object at 0x7da1b003c250> for taget[name[multikey]] in starred[name[PKG_INFO_MULTIKEYS]] begin[:] if <ast.UnaryOp object at 0x7da1b003f7c0> begin[:] call[name[result]][name[multikey]] assign[=] list[[<ast.Subscript object at 0x7da1b003f190>]] return[name[result]]
keyword[def] identifier[get_egg_info] ( identifier[cfg] , identifier[verbose] = keyword[False] ): literal[string] identifier[result] = identifier[Bunch] () identifier[setup_py] = identifier[cfg] . identifier[rootjoin] ( literal[string] ) keyword[if] keyword[not] identifier[os] . identifier[path] . identifier[exists] ( identifier[setup_py] ): keyword[return] identifier[result] identifier[egg_info] = identifier[shell] . identifier[capture] ( literal[string] . identifier[format] ( identifier[setup_py] ), identifier[echo] = keyword[True] keyword[if] identifier[verbose] keyword[else] keyword[None] ) keyword[for] identifier[info_line] keyword[in] identifier[egg_info] . identifier[splitlines] (): keyword[if] identifier[info_line] . identifier[endswith] ( literal[string] ): identifier[pkg_info_file] = identifier[info_line] . identifier[split] ( keyword[None] , literal[int] )[ literal[int] ] identifier[result] [ literal[string] ]= identifier[pkg_info_file] keyword[with] identifier[io] . identifier[open] ( identifier[pkg_info_file] , identifier[encoding] = literal[string] ) keyword[as] identifier[handle] : identifier[lastkey] = keyword[None] keyword[for] identifier[line] keyword[in] identifier[handle] : keyword[if] identifier[line] . identifier[lstrip] ()!= identifier[line] : keyword[assert] identifier[lastkey] , literal[string] . identifier[format] ( identifier[pkg_info_file] , identifier[line] ) identifier[result] [ identifier[lastkey] ]+= literal[string] + identifier[line] keyword[else] : identifier[lastkey] , identifier[value] = identifier[line] . identifier[split] ( literal[string] , literal[int] ) identifier[lastkey] = identifier[lastkey] . identifier[strip] (). identifier[lower] (). identifier[replace] ( literal[string] , literal[string] ) identifier[value] = identifier[value] . identifier[strip] () keyword[if] identifier[lastkey] keyword[in] identifier[result] : keyword[try] : identifier[result] [ identifier[lastkey] ]. identifier[append] ( identifier[value] ) keyword[except] identifier[AttributeError] : identifier[result] [ identifier[lastkey] ]=[ identifier[result] [ identifier[lastkey] ], identifier[value] ] keyword[else] : identifier[result] [ identifier[lastkey] ]= identifier[value] keyword[for] identifier[multikey] keyword[in] identifier[PKG_INFO_MULTIKEYS] : keyword[if] keyword[not] identifier[isinstance] ( identifier[result] . identifier[get] ( identifier[multikey] ,[]), identifier[list] ): identifier[result] [ identifier[multikey] ]=[ identifier[result] [ identifier[multikey] ]] keyword[return] identifier[result]
def get_egg_info(cfg, verbose=False): """Call 'setup egg_info' and return the parsed meta-data.""" result = Bunch() setup_py = cfg.rootjoin('setup.py') if not os.path.exists(setup_py): return result # depends on [control=['if'], data=[]] egg_info = shell.capture('python {} egg_info'.format(setup_py), echo=True if verbose else None) for info_line in egg_info.splitlines(): if info_line.endswith('PKG-INFO'): pkg_info_file = info_line.split(None, 1)[1] result['__file__'] = pkg_info_file with io.open(pkg_info_file, encoding='utf-8') as handle: lastkey = None for line in handle: if line.lstrip() != line: assert lastkey, "Bad continuation in PKG-INFO file '{}': {}".format(pkg_info_file, line) result[lastkey] += '\n' + line # depends on [control=['if'], data=['line']] else: (lastkey, value) = line.split(':', 1) lastkey = lastkey.strip().lower().replace('-', '_') value = value.strip() if lastkey in result: try: result[lastkey].append(value) # depends on [control=['try'], data=[]] except AttributeError: result[lastkey] = [result[lastkey], value] # depends on [control=['except'], data=[]] # depends on [control=['if'], data=['lastkey', 'result']] else: result[lastkey] = value # depends on [control=['for'], data=['line']] # depends on [control=['with'], data=['handle']] # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['info_line']] for multikey in PKG_INFO_MULTIKEYS: if not isinstance(result.get(multikey, []), list): result[multikey] = [result[multikey]] # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['multikey']] return result
def prune(matrix, threshold): """ Prune the matrix so that very small edges are removed. The maximum value in each column is never pruned. :param matrix: The matrix to be pruned :param threshold: The value below which edges will be removed :returns: The pruned matrix """ if isspmatrix(matrix): pruned = dok_matrix(matrix.shape) pruned[matrix >= threshold] = matrix[matrix >= threshold] pruned = pruned.tocsc() else: pruned = matrix.copy() pruned[pruned < threshold] = 0 # keep max value in each column. same behaviour for dense/sparse num_cols = matrix.shape[1] row_indices = matrix.argmax(axis=0).reshape((num_cols,)) col_indices = np.arange(num_cols) pruned[row_indices, col_indices] = matrix[row_indices, col_indices] return pruned
def function[prune, parameter[matrix, threshold]]: constant[ Prune the matrix so that very small edges are removed. The maximum value in each column is never pruned. :param matrix: The matrix to be pruned :param threshold: The value below which edges will be removed :returns: The pruned matrix ] if call[name[isspmatrix], parameter[name[matrix]]] begin[:] variable[pruned] assign[=] call[name[dok_matrix], parameter[name[matrix].shape]] call[name[pruned]][compare[name[matrix] greater_or_equal[>=] name[threshold]]] assign[=] call[name[matrix]][compare[name[matrix] greater_or_equal[>=] name[threshold]]] variable[pruned] assign[=] call[name[pruned].tocsc, parameter[]] variable[num_cols] assign[=] call[name[matrix].shape][constant[1]] variable[row_indices] assign[=] call[call[name[matrix].argmax, parameter[]].reshape, parameter[tuple[[<ast.Name object at 0x7da1b0714670>]]]] variable[col_indices] assign[=] call[name[np].arange, parameter[name[num_cols]]] call[name[pruned]][tuple[[<ast.Name object at 0x7da1b0716bc0>, <ast.Name object at 0x7da1b0715630>]]] assign[=] call[name[matrix]][tuple[[<ast.Name object at 0x7da1b0715ab0>, <ast.Name object at 0x7da1b0717fd0>]]] return[name[pruned]]
keyword[def] identifier[prune] ( identifier[matrix] , identifier[threshold] ): literal[string] keyword[if] identifier[isspmatrix] ( identifier[matrix] ): identifier[pruned] = identifier[dok_matrix] ( identifier[matrix] . identifier[shape] ) identifier[pruned] [ identifier[matrix] >= identifier[threshold] ]= identifier[matrix] [ identifier[matrix] >= identifier[threshold] ] identifier[pruned] = identifier[pruned] . identifier[tocsc] () keyword[else] : identifier[pruned] = identifier[matrix] . identifier[copy] () identifier[pruned] [ identifier[pruned] < identifier[threshold] ]= literal[int] identifier[num_cols] = identifier[matrix] . identifier[shape] [ literal[int] ] identifier[row_indices] = identifier[matrix] . identifier[argmax] ( identifier[axis] = literal[int] ). identifier[reshape] (( identifier[num_cols] ,)) identifier[col_indices] = identifier[np] . identifier[arange] ( identifier[num_cols] ) identifier[pruned] [ identifier[row_indices] , identifier[col_indices] ]= identifier[matrix] [ identifier[row_indices] , identifier[col_indices] ] keyword[return] identifier[pruned]
def prune(matrix, threshold): """ Prune the matrix so that very small edges are removed. The maximum value in each column is never pruned. :param matrix: The matrix to be pruned :param threshold: The value below which edges will be removed :returns: The pruned matrix """ if isspmatrix(matrix): pruned = dok_matrix(matrix.shape) pruned[matrix >= threshold] = matrix[matrix >= threshold] pruned = pruned.tocsc() # depends on [control=['if'], data=[]] else: pruned = matrix.copy() pruned[pruned < threshold] = 0 # keep max value in each column. same behaviour for dense/sparse num_cols = matrix.shape[1] row_indices = matrix.argmax(axis=0).reshape((num_cols,)) col_indices = np.arange(num_cols) pruned[row_indices, col_indices] = matrix[row_indices, col_indices] return pruned
def handle_publishable_m2m_changed( sender, instance, action, reverse, model, pk_set, **kwargs): """ Cache related published objects in `pre_clear` so they can be restored in `post_clear`. """ # Do nothing if the target model is not publishable. if not issubclass(model, PublishingModel): return # Get the right `ManyRelatedManager`. Iterate M2Ms and compare `sender` # (the through model), in case there are multiple M2Ms to the same model. if reverse: for rel_obj in instance._meta.get_all_related_many_to_many_objects(): if rel_obj.field.rel.through == sender: m2m = getattr(instance, rel_obj.get_accessor_name()) break else: for field in instance._meta.many_to_many: if field.rel.through == sender: m2m = getattr(instance, field.attname) break # Cache published PKs on the instance. if action == 'pre_clear': instance._published_m2m_cache = set( m2m.filter(publishing_is_draft=False).values_list('pk', flat=True)) # Add published PKs from the cache. if action == 'post_clear': m2m.add(*instance._published_m2m_cache) del instance._published_m2m_cache
def function[handle_publishable_m2m_changed, parameter[sender, instance, action, reverse, model, pk_set]]: constant[ Cache related published objects in `pre_clear` so they can be restored in `post_clear`. ] if <ast.UnaryOp object at 0x7da18dc06230> begin[:] return[None] if name[reverse] begin[:] for taget[name[rel_obj]] in starred[call[name[instance]._meta.get_all_related_many_to_many_objects, parameter[]]] begin[:] if compare[name[rel_obj].field.rel.through equal[==] name[sender]] begin[:] variable[m2m] assign[=] call[name[getattr], parameter[name[instance], call[name[rel_obj].get_accessor_name, parameter[]]]] break if compare[name[action] equal[==] constant[pre_clear]] begin[:] name[instance]._published_m2m_cache assign[=] call[name[set], parameter[call[call[name[m2m].filter, parameter[]].values_list, parameter[constant[pk]]]]] if compare[name[action] equal[==] constant[post_clear]] begin[:] call[name[m2m].add, parameter[<ast.Starred object at 0x7da1b0ebc280>]] <ast.Delete object at 0x7da1b0ebe2c0>
keyword[def] identifier[handle_publishable_m2m_changed] ( identifier[sender] , identifier[instance] , identifier[action] , identifier[reverse] , identifier[model] , identifier[pk_set] ,** identifier[kwargs] ): literal[string] keyword[if] keyword[not] identifier[issubclass] ( identifier[model] , identifier[PublishingModel] ): keyword[return] keyword[if] identifier[reverse] : keyword[for] identifier[rel_obj] keyword[in] identifier[instance] . identifier[_meta] . identifier[get_all_related_many_to_many_objects] (): keyword[if] identifier[rel_obj] . identifier[field] . identifier[rel] . identifier[through] == identifier[sender] : identifier[m2m] = identifier[getattr] ( identifier[instance] , identifier[rel_obj] . identifier[get_accessor_name] ()) keyword[break] keyword[else] : keyword[for] identifier[field] keyword[in] identifier[instance] . identifier[_meta] . identifier[many_to_many] : keyword[if] identifier[field] . identifier[rel] . identifier[through] == identifier[sender] : identifier[m2m] = identifier[getattr] ( identifier[instance] , identifier[field] . identifier[attname] ) keyword[break] keyword[if] identifier[action] == literal[string] : identifier[instance] . identifier[_published_m2m_cache] = identifier[set] ( identifier[m2m] . identifier[filter] ( identifier[publishing_is_draft] = keyword[False] ). identifier[values_list] ( literal[string] , identifier[flat] = keyword[True] )) keyword[if] identifier[action] == literal[string] : identifier[m2m] . identifier[add] (* identifier[instance] . identifier[_published_m2m_cache] ) keyword[del] identifier[instance] . identifier[_published_m2m_cache]
def handle_publishable_m2m_changed(sender, instance, action, reverse, model, pk_set, **kwargs): """ Cache related published objects in `pre_clear` so they can be restored in `post_clear`. """ # Do nothing if the target model is not publishable. if not issubclass(model, PublishingModel): return # depends on [control=['if'], data=[]] # Get the right `ManyRelatedManager`. Iterate M2Ms and compare `sender` # (the through model), in case there are multiple M2Ms to the same model. if reverse: for rel_obj in instance._meta.get_all_related_many_to_many_objects(): if rel_obj.field.rel.through == sender: m2m = getattr(instance, rel_obj.get_accessor_name()) break # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['rel_obj']] # depends on [control=['if'], data=[]] else: for field in instance._meta.many_to_many: if field.rel.through == sender: m2m = getattr(instance, field.attname) break # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['field']] # Cache published PKs on the instance. if action == 'pre_clear': instance._published_m2m_cache = set(m2m.filter(publishing_is_draft=False).values_list('pk', flat=True)) # depends on [control=['if'], data=[]] # Add published PKs from the cache. if action == 'post_clear': m2m.add(*instance._published_m2m_cache) del instance._published_m2m_cache # depends on [control=['if'], data=[]]
def get_stacks_payment(state_engine, nameop, state_op_type): """ Find out how many tokens were paid for this nameop, if any. You need to have called state_create_put_preorder() *before* calling this on a NAME_REGISTRATION. Return {'status': True, 'token_units': ..., 'tokens_paid': ...} on success Return {'status': False} on error (i.e. this is not the right kind of namespace, there is not enough balance, there were no tokens paid, etc.) Throw exception on fatal error """ token_units = None tokens_paid = None name = nameop['name'] token_fee = nameop.get('token_fee', None) tokens_paid = None assert token_fee is None or isinstance(token_fee, (int,long)), 'Invalid token fee {} ({})'.format(token_fee, type(token_fee)) namespace_id = get_namespace_from_name(name) name_without_namespace = get_name_from_fq_name(name) namespace = state_engine.get_namespace(namespace_id) if state_op_type == 'NAME_REGISTRATION': # STACKs would already have been paid by a preorder. # find out how much, if any preorder = state_create_get_preorder(nameop) assert preorder, 'BUG: no preorder set' assert 'token_fee' in preorder, 'BUG: no token_fee set in preorder' assert 'token_units' in preorder, 'BUG: no token_units set in preorder' token_units = preorder['token_units'] tokens_paid = preorder['token_fee'] # must have paid STACKs if token_units != TOKEN_TYPE_STACKS: return {'status': False, 'error': 'Name {} paid for in {}, not {}'.format(name, token_units, TOKEN_TYPE_STACKS)} elif state_op_type == 'NAME_RENEWAL': # will have paid in Stacks in the nameop (but not yet debited the account, so we'll need to # check the account balance later on in check_renewal()) if token_fee is None or token_fee == 0: return {'status': False, 'error': 'No token fee given for {}'.format(name)} token_units = TOKEN_TYPE_STACKS tokens_paid = token_fee else: raise Exception("Unknown state operation type {}".format(state_op_type)) return {'status': True, 'tokens_paid': tokens_paid, 'token_units': token_units}
def function[get_stacks_payment, parameter[state_engine, nameop, state_op_type]]: constant[ Find out how many tokens were paid for this nameop, if any. You need to have called state_create_put_preorder() *before* calling this on a NAME_REGISTRATION. Return {'status': True, 'token_units': ..., 'tokens_paid': ...} on success Return {'status': False} on error (i.e. this is not the right kind of namespace, there is not enough balance, there were no tokens paid, etc.) Throw exception on fatal error ] variable[token_units] assign[=] constant[None] variable[tokens_paid] assign[=] constant[None] variable[name] assign[=] call[name[nameop]][constant[name]] variable[token_fee] assign[=] call[name[nameop].get, parameter[constant[token_fee], constant[None]]] variable[tokens_paid] assign[=] constant[None] assert[<ast.BoolOp object at 0x7da18bcc88e0>] variable[namespace_id] assign[=] call[name[get_namespace_from_name], parameter[name[name]]] variable[name_without_namespace] assign[=] call[name[get_name_from_fq_name], parameter[name[name]]] variable[namespace] assign[=] call[name[state_engine].get_namespace, parameter[name[namespace_id]]] if compare[name[state_op_type] equal[==] constant[NAME_REGISTRATION]] begin[:] variable[preorder] assign[=] call[name[state_create_get_preorder], parameter[name[nameop]]] assert[name[preorder]] assert[compare[constant[token_fee] in name[preorder]]] assert[compare[constant[token_units] in name[preorder]]] variable[token_units] assign[=] call[name[preorder]][constant[token_units]] variable[tokens_paid] assign[=] call[name[preorder]][constant[token_fee]] if compare[name[token_units] not_equal[!=] name[TOKEN_TYPE_STACKS]] begin[:] return[dictionary[[<ast.Constant object at 0x7da204344130>, <ast.Constant object at 0x7da204345960>], [<ast.Constant object at 0x7da204346c50>, <ast.Call object at 0x7da2043464a0>]]] return[dictionary[[<ast.Constant object at 0x7da204344fa0>, <ast.Constant object at 0x7da2043474c0>, <ast.Constant object at 0x7da2043459f0>], [<ast.Constant object at 0x7da204346530>, <ast.Name object at 0x7da2043457e0>, <ast.Name object at 0x7da204345b10>]]]
keyword[def] identifier[get_stacks_payment] ( identifier[state_engine] , identifier[nameop] , identifier[state_op_type] ): literal[string] identifier[token_units] = keyword[None] identifier[tokens_paid] = keyword[None] identifier[name] = identifier[nameop] [ literal[string] ] identifier[token_fee] = identifier[nameop] . identifier[get] ( literal[string] , keyword[None] ) identifier[tokens_paid] = keyword[None] keyword[assert] identifier[token_fee] keyword[is] keyword[None] keyword[or] identifier[isinstance] ( identifier[token_fee] ,( identifier[int] , identifier[long] )), literal[string] . identifier[format] ( identifier[token_fee] , identifier[type] ( identifier[token_fee] )) identifier[namespace_id] = identifier[get_namespace_from_name] ( identifier[name] ) identifier[name_without_namespace] = identifier[get_name_from_fq_name] ( identifier[name] ) identifier[namespace] = identifier[state_engine] . identifier[get_namespace] ( identifier[namespace_id] ) keyword[if] identifier[state_op_type] == literal[string] : identifier[preorder] = identifier[state_create_get_preorder] ( identifier[nameop] ) keyword[assert] identifier[preorder] , literal[string] keyword[assert] literal[string] keyword[in] identifier[preorder] , literal[string] keyword[assert] literal[string] keyword[in] identifier[preorder] , literal[string] identifier[token_units] = identifier[preorder] [ literal[string] ] identifier[tokens_paid] = identifier[preorder] [ literal[string] ] keyword[if] identifier[token_units] != identifier[TOKEN_TYPE_STACKS] : keyword[return] { literal[string] : keyword[False] , literal[string] : literal[string] . identifier[format] ( identifier[name] , identifier[token_units] , identifier[TOKEN_TYPE_STACKS] )} keyword[elif] identifier[state_op_type] == literal[string] : keyword[if] identifier[token_fee] keyword[is] keyword[None] keyword[or] identifier[token_fee] == literal[int] : keyword[return] { literal[string] : keyword[False] , literal[string] : literal[string] . identifier[format] ( identifier[name] )} identifier[token_units] = identifier[TOKEN_TYPE_STACKS] identifier[tokens_paid] = identifier[token_fee] keyword[else] : keyword[raise] identifier[Exception] ( literal[string] . identifier[format] ( identifier[state_op_type] )) keyword[return] { literal[string] : keyword[True] , literal[string] : identifier[tokens_paid] , literal[string] : identifier[token_units] }
def get_stacks_payment(state_engine, nameop, state_op_type): """ Find out how many tokens were paid for this nameop, if any. You need to have called state_create_put_preorder() *before* calling this on a NAME_REGISTRATION. Return {'status': True, 'token_units': ..., 'tokens_paid': ...} on success Return {'status': False} on error (i.e. this is not the right kind of namespace, there is not enough balance, there were no tokens paid, etc.) Throw exception on fatal error """ token_units = None tokens_paid = None name = nameop['name'] token_fee = nameop.get('token_fee', None) tokens_paid = None assert token_fee is None or isinstance(token_fee, (int, long)), 'Invalid token fee {} ({})'.format(token_fee, type(token_fee)) namespace_id = get_namespace_from_name(name) name_without_namespace = get_name_from_fq_name(name) namespace = state_engine.get_namespace(namespace_id) if state_op_type == 'NAME_REGISTRATION': # STACKs would already have been paid by a preorder. # find out how much, if any preorder = state_create_get_preorder(nameop) assert preorder, 'BUG: no preorder set' assert 'token_fee' in preorder, 'BUG: no token_fee set in preorder' assert 'token_units' in preorder, 'BUG: no token_units set in preorder' token_units = preorder['token_units'] tokens_paid = preorder['token_fee'] # must have paid STACKs if token_units != TOKEN_TYPE_STACKS: return {'status': False, 'error': 'Name {} paid for in {}, not {}'.format(name, token_units, TOKEN_TYPE_STACKS)} # depends on [control=['if'], data=['token_units', 'TOKEN_TYPE_STACKS']] # depends on [control=['if'], data=[]] elif state_op_type == 'NAME_RENEWAL': # will have paid in Stacks in the nameop (but not yet debited the account, so we'll need to # check the account balance later on in check_renewal()) if token_fee is None or token_fee == 0: return {'status': False, 'error': 'No token fee given for {}'.format(name)} # depends on [control=['if'], data=[]] token_units = TOKEN_TYPE_STACKS tokens_paid = token_fee # depends on [control=['if'], data=[]] else: raise Exception('Unknown state operation type {}'.format(state_op_type)) return {'status': True, 'tokens_paid': tokens_paid, 'token_units': token_units}
def task(name=None, t=INFO, *args, **kwargs): """ This decorator modifies current function such that its start, end, and duration is logged in console. If the task name is not given, it will attempt to infer it from the function name. Optionally, the decorator can log information into files. """ def c_run(name, f, t, args, kwargs): def run(*largs, **lkwargs): thread = __get_current_thread() old_name = __THREAD_PARAMS[thread][__THREAD_PARAMS_FNAME_KEY] __THREAD_PARAMS[thread][__THREAD_PARAMS_FNAME_KEY] = name r = log(name, f, t, largs, lkwargs, *args, **kwargs) __THREAD_PARAMS[thread][__THREAD_PARAMS_FNAME_KEY] = old_name return r return run if callable(name): f = name name = f.__name__ return c_run(name, f, t, args, kwargs) if name == None: def wrapped(f): name = f.__name__ return c_run(name, f, t, args, kwargs) return wrapped else: return lambda f: c_run(name, f, t, args, kwargs)
def function[task, parameter[name, t]]: constant[ This decorator modifies current function such that its start, end, and duration is logged in console. If the task name is not given, it will attempt to infer it from the function name. Optionally, the decorator can log information into files. ] def function[c_run, parameter[name, f, t, args, kwargs]]: def function[run, parameter[]]: variable[thread] assign[=] call[name[__get_current_thread], parameter[]] variable[old_name] assign[=] call[call[name[__THREAD_PARAMS]][name[thread]]][name[__THREAD_PARAMS_FNAME_KEY]] call[call[name[__THREAD_PARAMS]][name[thread]]][name[__THREAD_PARAMS_FNAME_KEY]] assign[=] name[name] variable[r] assign[=] call[name[log], parameter[name[name], name[f], name[t], name[largs], name[lkwargs], <ast.Starred object at 0x7da2044c26e0>]] call[call[name[__THREAD_PARAMS]][name[thread]]][name[__THREAD_PARAMS_FNAME_KEY]] assign[=] name[old_name] return[name[r]] return[name[run]] if call[name[callable], parameter[name[name]]] begin[:] variable[f] assign[=] name[name] variable[name] assign[=] name[f].__name__ return[call[name[c_run], parameter[name[name], name[f], name[t], name[args], name[kwargs]]]] if compare[name[name] equal[==] constant[None]] begin[:] def function[wrapped, parameter[f]]: variable[name] assign[=] name[f].__name__ return[call[name[c_run], parameter[name[name], name[f], name[t], name[args], name[kwargs]]]] return[name[wrapped]]
keyword[def] identifier[task] ( identifier[name] = keyword[None] , identifier[t] = identifier[INFO] ,* identifier[args] ,** identifier[kwargs] ): literal[string] keyword[def] identifier[c_run] ( identifier[name] , identifier[f] , identifier[t] , identifier[args] , identifier[kwargs] ): keyword[def] identifier[run] (* identifier[largs] ,** identifier[lkwargs] ): identifier[thread] = identifier[__get_current_thread] () identifier[old_name] = identifier[__THREAD_PARAMS] [ identifier[thread] ][ identifier[__THREAD_PARAMS_FNAME_KEY] ] identifier[__THREAD_PARAMS] [ identifier[thread] ][ identifier[__THREAD_PARAMS_FNAME_KEY] ]= identifier[name] identifier[r] = identifier[log] ( identifier[name] , identifier[f] , identifier[t] , identifier[largs] , identifier[lkwargs] ,* identifier[args] ,** identifier[kwargs] ) identifier[__THREAD_PARAMS] [ identifier[thread] ][ identifier[__THREAD_PARAMS_FNAME_KEY] ]= identifier[old_name] keyword[return] identifier[r] keyword[return] identifier[run] keyword[if] identifier[callable] ( identifier[name] ): identifier[f] = identifier[name] identifier[name] = identifier[f] . identifier[__name__] keyword[return] identifier[c_run] ( identifier[name] , identifier[f] , identifier[t] , identifier[args] , identifier[kwargs] ) keyword[if] identifier[name] == keyword[None] : keyword[def] identifier[wrapped] ( identifier[f] ): identifier[name] = identifier[f] . identifier[__name__] keyword[return] identifier[c_run] ( identifier[name] , identifier[f] , identifier[t] , identifier[args] , identifier[kwargs] ) keyword[return] identifier[wrapped] keyword[else] : keyword[return] keyword[lambda] identifier[f] : identifier[c_run] ( identifier[name] , identifier[f] , identifier[t] , identifier[args] , identifier[kwargs] )
def task(name=None, t=INFO, *args, **kwargs): """ This decorator modifies current function such that its start, end, and duration is logged in console. If the task name is not given, it will attempt to infer it from the function name. Optionally, the decorator can log information into files. """ def c_run(name, f, t, args, kwargs): def run(*largs, **lkwargs): thread = __get_current_thread() old_name = __THREAD_PARAMS[thread][__THREAD_PARAMS_FNAME_KEY] __THREAD_PARAMS[thread][__THREAD_PARAMS_FNAME_KEY] = name r = log(name, f, t, largs, lkwargs, *args, **kwargs) __THREAD_PARAMS[thread][__THREAD_PARAMS_FNAME_KEY] = old_name return r return run if callable(name): f = name name = f.__name__ return c_run(name, f, t, args, kwargs) # depends on [control=['if'], data=[]] if name == None: def wrapped(f): name = f.__name__ return c_run(name, f, t, args, kwargs) return wrapped # depends on [control=['if'], data=['name']] else: return lambda f: c_run(name, f, t, args, kwargs)
def __calculate_cluster_difference(self, index_cluster, difference): """! @brief Calculates distance from each object in specified cluster to specified object. @param[in] index_point (uint): Index point for which difference is calculated. @return (list) Distance from specified object to each object from input data in specified cluster. """ cluster_difference = 0.0 for index_point in self.__clusters[index_cluster]: cluster_difference += difference[index_point] return cluster_difference
def function[__calculate_cluster_difference, parameter[self, index_cluster, difference]]: constant[! @brief Calculates distance from each object in specified cluster to specified object. @param[in] index_point (uint): Index point for which difference is calculated. @return (list) Distance from specified object to each object from input data in specified cluster. ] variable[cluster_difference] assign[=] constant[0.0] for taget[name[index_point]] in starred[call[name[self].__clusters][name[index_cluster]]] begin[:] <ast.AugAssign object at 0x7da1b01b0340> return[name[cluster_difference]]
keyword[def] identifier[__calculate_cluster_difference] ( identifier[self] , identifier[index_cluster] , identifier[difference] ): literal[string] identifier[cluster_difference] = literal[int] keyword[for] identifier[index_point] keyword[in] identifier[self] . identifier[__clusters] [ identifier[index_cluster] ]: identifier[cluster_difference] += identifier[difference] [ identifier[index_point] ] keyword[return] identifier[cluster_difference]
def __calculate_cluster_difference(self, index_cluster, difference): """! @brief Calculates distance from each object in specified cluster to specified object. @param[in] index_point (uint): Index point for which difference is calculated. @return (list) Distance from specified object to each object from input data in specified cluster. """ cluster_difference = 0.0 for index_point in self.__clusters[index_cluster]: cluster_difference += difference[index_point] # depends on [control=['for'], data=['index_point']] return cluster_difference
def dump(self, indent='', depth=0, full=True): """ Diagnostic method for listing out the contents of a :class:`ParseResults`. Accepts an optional ``indent`` argument so that this string can be embedded in a nested display of other data. Example:: integer = Word(nums) date_str = integer("year") + '/' + integer("month") + '/' + integer("day") result = date_str.parseString('12/31/1999') print(result.dump()) prints:: ['12', '/', '31', '/', '1999'] - day: 1999 - month: 31 - year: 12 """ out = [] NL = '\n' out.append( indent+_ustr(self.asList()) ) if full: if self.haskeys(): items = sorted((str(k), v) for k,v in self.items()) for k,v in items: if out: out.append(NL) out.append( "%s%s- %s: " % (indent,(' '*depth), k) ) if isinstance(v,ParseResults): if v: out.append( v.dump(indent,depth+1) ) else: out.append(_ustr(v)) else: out.append(repr(v)) elif any(isinstance(vv,ParseResults) for vv in self): v = self for i,vv in enumerate(v): if isinstance(vv,ParseResults): out.append("\n%s%s[%d]:\n%s%s%s" % (indent,(' '*(depth)),i,indent,(' '*(depth+1)),vv.dump(indent,depth+1) )) else: out.append("\n%s%s[%d]:\n%s%s%s" % (indent,(' '*(depth)),i,indent,(' '*(depth+1)),_ustr(vv))) return "".join(out)
def function[dump, parameter[self, indent, depth, full]]: constant[ Diagnostic method for listing out the contents of a :class:`ParseResults`. Accepts an optional ``indent`` argument so that this string can be embedded in a nested display of other data. Example:: integer = Word(nums) date_str = integer("year") + '/' + integer("month") + '/' + integer("day") result = date_str.parseString('12/31/1999') print(result.dump()) prints:: ['12', '/', '31', '/', '1999'] - day: 1999 - month: 31 - year: 12 ] variable[out] assign[=] list[[]] variable[NL] assign[=] constant[ ] call[name[out].append, parameter[binary_operation[name[indent] + call[name[_ustr], parameter[call[name[self].asList, parameter[]]]]]]] if name[full] begin[:] if call[name[self].haskeys, parameter[]] begin[:] variable[items] assign[=] call[name[sorted], parameter[<ast.GeneratorExp object at 0x7da18dc07ac0>]] for taget[tuple[[<ast.Name object at 0x7da18dc05360>, <ast.Name object at 0x7da18dc04700>]]] in starred[name[items]] begin[:] if name[out] begin[:] call[name[out].append, parameter[name[NL]]] call[name[out].append, parameter[binary_operation[constant[%s%s- %s: ] <ast.Mod object at 0x7da2590d6920> tuple[[<ast.Name object at 0x7da18dc07ca0>, <ast.BinOp object at 0x7da18dc06e90>, <ast.Name object at 0x7da18dc07c70>]]]]] if call[name[isinstance], parameter[name[v], name[ParseResults]]] begin[:] if name[v] begin[:] call[name[out].append, parameter[call[name[v].dump, parameter[name[indent], binary_operation[name[depth] + constant[1]]]]]] return[call[constant[].join, parameter[name[out]]]]
keyword[def] identifier[dump] ( identifier[self] , identifier[indent] = literal[string] , identifier[depth] = literal[int] , identifier[full] = keyword[True] ): literal[string] identifier[out] =[] identifier[NL] = literal[string] identifier[out] . identifier[append] ( identifier[indent] + identifier[_ustr] ( identifier[self] . identifier[asList] ())) keyword[if] identifier[full] : keyword[if] identifier[self] . identifier[haskeys] (): identifier[items] = identifier[sorted] (( identifier[str] ( identifier[k] ), identifier[v] ) keyword[for] identifier[k] , identifier[v] keyword[in] identifier[self] . identifier[items] ()) keyword[for] identifier[k] , identifier[v] keyword[in] identifier[items] : keyword[if] identifier[out] : identifier[out] . identifier[append] ( identifier[NL] ) identifier[out] . identifier[append] ( literal[string] %( identifier[indent] ,( literal[string] * identifier[depth] ), identifier[k] )) keyword[if] identifier[isinstance] ( identifier[v] , identifier[ParseResults] ): keyword[if] identifier[v] : identifier[out] . identifier[append] ( identifier[v] . identifier[dump] ( identifier[indent] , identifier[depth] + literal[int] )) keyword[else] : identifier[out] . identifier[append] ( identifier[_ustr] ( identifier[v] )) keyword[else] : identifier[out] . identifier[append] ( identifier[repr] ( identifier[v] )) keyword[elif] identifier[any] ( identifier[isinstance] ( identifier[vv] , identifier[ParseResults] ) keyword[for] identifier[vv] keyword[in] identifier[self] ): identifier[v] = identifier[self] keyword[for] identifier[i] , identifier[vv] keyword[in] identifier[enumerate] ( identifier[v] ): keyword[if] identifier[isinstance] ( identifier[vv] , identifier[ParseResults] ): identifier[out] . identifier[append] ( literal[string] %( identifier[indent] ,( literal[string] *( identifier[depth] )), identifier[i] , identifier[indent] ,( literal[string] *( identifier[depth] + literal[int] )), identifier[vv] . identifier[dump] ( identifier[indent] , identifier[depth] + literal[int] ))) keyword[else] : identifier[out] . identifier[append] ( literal[string] %( identifier[indent] ,( literal[string] *( identifier[depth] )), identifier[i] , identifier[indent] ,( literal[string] *( identifier[depth] + literal[int] )), identifier[_ustr] ( identifier[vv] ))) keyword[return] literal[string] . identifier[join] ( identifier[out] )
def dump(self, indent='', depth=0, full=True): """ Diagnostic method for listing out the contents of a :class:`ParseResults`. Accepts an optional ``indent`` argument so that this string can be embedded in a nested display of other data. Example:: integer = Word(nums) date_str = integer("year") + '/' + integer("month") + '/' + integer("day") result = date_str.parseString('12/31/1999') print(result.dump()) prints:: ['12', '/', '31', '/', '1999'] - day: 1999 - month: 31 - year: 12 """ out = [] NL = '\n' out.append(indent + _ustr(self.asList())) if full: if self.haskeys(): items = sorted(((str(k), v) for (k, v) in self.items())) for (k, v) in items: if out: out.append(NL) # depends on [control=['if'], data=[]] out.append('%s%s- %s: ' % (indent, ' ' * depth, k)) if isinstance(v, ParseResults): if v: out.append(v.dump(indent, depth + 1)) # depends on [control=['if'], data=[]] else: out.append(_ustr(v)) # depends on [control=['if'], data=[]] else: out.append(repr(v)) # depends on [control=['for'], data=[]] # depends on [control=['if'], data=[]] elif any((isinstance(vv, ParseResults) for vv in self)): v = self for (i, vv) in enumerate(v): if isinstance(vv, ParseResults): out.append('\n%s%s[%d]:\n%s%s%s' % (indent, ' ' * depth, i, indent, ' ' * (depth + 1), vv.dump(indent, depth + 1))) # depends on [control=['if'], data=[]] else: out.append('\n%s%s[%d]:\n%s%s%s' % (indent, ' ' * depth, i, indent, ' ' * (depth + 1), _ustr(vv))) # depends on [control=['for'], data=[]] # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]] return ''.join(out)
def serializer_by_type_id(self, type_id): """ Find and return the serializer for the type-id :param type_id: type-id the serializer :return: the serializer """ if type_id <= 0: indx = index_for_default_type(type_id) serializer = self._constant_type_ids.get(indx, None) if serializer is not None: return serializer return self._id_dic.get(type_id, None)
def function[serializer_by_type_id, parameter[self, type_id]]: constant[ Find and return the serializer for the type-id :param type_id: type-id the serializer :return: the serializer ] if compare[name[type_id] less_or_equal[<=] constant[0]] begin[:] variable[indx] assign[=] call[name[index_for_default_type], parameter[name[type_id]]] variable[serializer] assign[=] call[name[self]._constant_type_ids.get, parameter[name[indx], constant[None]]] if compare[name[serializer] is_not constant[None]] begin[:] return[name[serializer]] return[call[name[self]._id_dic.get, parameter[name[type_id], constant[None]]]]
keyword[def] identifier[serializer_by_type_id] ( identifier[self] , identifier[type_id] ): literal[string] keyword[if] identifier[type_id] <= literal[int] : identifier[indx] = identifier[index_for_default_type] ( identifier[type_id] ) identifier[serializer] = identifier[self] . identifier[_constant_type_ids] . identifier[get] ( identifier[indx] , keyword[None] ) keyword[if] identifier[serializer] keyword[is] keyword[not] keyword[None] : keyword[return] identifier[serializer] keyword[return] identifier[self] . identifier[_id_dic] . identifier[get] ( identifier[type_id] , keyword[None] )
def serializer_by_type_id(self, type_id): """ Find and return the serializer for the type-id :param type_id: type-id the serializer :return: the serializer """ if type_id <= 0: indx = index_for_default_type(type_id) serializer = self._constant_type_ids.get(indx, None) if serializer is not None: return serializer # depends on [control=['if'], data=['serializer']] # depends on [control=['if'], data=['type_id']] return self._id_dic.get(type_id, None)
def connect(host=DEFAULT_HOST, port=DEFAULT_PORT, base=DEFAULT_BASE, chunk_size=multipart.default_chunk_size, **defaults): """Create a new :class:`~ipfsapi.Client` instance and connect to the daemon to validate that its version is supported. Raises ------ ~ipfsapi.exceptions.VersionMismatch ~ipfsapi.exceptions.ErrorResponse ~ipfsapi.exceptions.ConnectionError ~ipfsapi.exceptions.ProtocolError ~ipfsapi.exceptions.StatusError ~ipfsapi.exceptions.TimeoutError All parameters are identical to those passed to the constructor of the :class:`~ipfsapi.Client` class. Returns ------- ~ipfsapi.Client """ # Create client instance client = Client(host, port, base, chunk_size, **defaults) # Query version number from daemon and validate it assert_version(client.version()['Version']) return client
def function[connect, parameter[host, port, base, chunk_size]]: constant[Create a new :class:`~ipfsapi.Client` instance and connect to the daemon to validate that its version is supported. Raises ------ ~ipfsapi.exceptions.VersionMismatch ~ipfsapi.exceptions.ErrorResponse ~ipfsapi.exceptions.ConnectionError ~ipfsapi.exceptions.ProtocolError ~ipfsapi.exceptions.StatusError ~ipfsapi.exceptions.TimeoutError All parameters are identical to those passed to the constructor of the :class:`~ipfsapi.Client` class. Returns ------- ~ipfsapi.Client ] variable[client] assign[=] call[name[Client], parameter[name[host], name[port], name[base], name[chunk_size]]] call[name[assert_version], parameter[call[call[name[client].version, parameter[]]][constant[Version]]]] return[name[client]]
keyword[def] identifier[connect] ( identifier[host] = identifier[DEFAULT_HOST] , identifier[port] = identifier[DEFAULT_PORT] , identifier[base] = identifier[DEFAULT_BASE] , identifier[chunk_size] = identifier[multipart] . identifier[default_chunk_size] ,** identifier[defaults] ): literal[string] identifier[client] = identifier[Client] ( identifier[host] , identifier[port] , identifier[base] , identifier[chunk_size] ,** identifier[defaults] ) identifier[assert_version] ( identifier[client] . identifier[version] ()[ literal[string] ]) keyword[return] identifier[client]
def connect(host=DEFAULT_HOST, port=DEFAULT_PORT, base=DEFAULT_BASE, chunk_size=multipart.default_chunk_size, **defaults): """Create a new :class:`~ipfsapi.Client` instance and connect to the daemon to validate that its version is supported. Raises ------ ~ipfsapi.exceptions.VersionMismatch ~ipfsapi.exceptions.ErrorResponse ~ipfsapi.exceptions.ConnectionError ~ipfsapi.exceptions.ProtocolError ~ipfsapi.exceptions.StatusError ~ipfsapi.exceptions.TimeoutError All parameters are identical to those passed to the constructor of the :class:`~ipfsapi.Client` class. Returns ------- ~ipfsapi.Client """ # Create client instance client = Client(host, port, base, chunk_size, **defaults) # Query version number from daemon and validate it assert_version(client.version()['Version']) return client
def show_result(resource, verbose=False): """ TODO """ if resource.uri == surf.ns.EFRBROO['F10_Person']: print("\n{} ({})\n".format(unicode(resource), resource.get_urn())) works = resource.get_works() print("Works by {} ({}):\n".format(resource, len(works))) [show_result(work) for work in works] print("\n") elif resource.uri == surf.ns.EFRBROO['F1_Work']: if verbose: print("\n{} ({})".format(unicode(resource), resource.get_urn())) print("\nTitles:") print("\n".join(["{:20} ({})".format(title, lang) for lang, title in resource.get_titles()])) if len(resource.get_abbreviations()) > 0: print("\nAbbreviations: {}\n".format(", ".join(["{}".format(abbr) for abbr in resource.get_abbreviations()]))) else: print("{:50} {:40}".format(unicode(resource), resource.get_urn()))
def function[show_result, parameter[resource, verbose]]: constant[ TODO ] if compare[name[resource].uri equal[==] call[name[surf].ns.EFRBROO][constant[F10_Person]]] begin[:] call[name[print], parameter[call[constant[ {} ({}) ].format, parameter[call[name[unicode], parameter[name[resource]]], call[name[resource].get_urn, parameter[]]]]]] variable[works] assign[=] call[name[resource].get_works, parameter[]] call[name[print], parameter[call[constant[Works by {} ({}): ].format, parameter[name[resource], call[name[len], parameter[name[works]]]]]]] <ast.ListComp object at 0x7da20e9b1930> call[name[print], parameter[constant[ ]]]
keyword[def] identifier[show_result] ( identifier[resource] , identifier[verbose] = keyword[False] ): literal[string] keyword[if] identifier[resource] . identifier[uri] == identifier[surf] . identifier[ns] . identifier[EFRBROO] [ literal[string] ]: identifier[print] ( literal[string] . identifier[format] ( identifier[unicode] ( identifier[resource] ), identifier[resource] . identifier[get_urn] ())) identifier[works] = identifier[resource] . identifier[get_works] () identifier[print] ( literal[string] . identifier[format] ( identifier[resource] , identifier[len] ( identifier[works] ))) [ identifier[show_result] ( identifier[work] ) keyword[for] identifier[work] keyword[in] identifier[works] ] identifier[print] ( literal[string] ) keyword[elif] identifier[resource] . identifier[uri] == identifier[surf] . identifier[ns] . identifier[EFRBROO] [ literal[string] ]: keyword[if] identifier[verbose] : identifier[print] ( literal[string] . identifier[format] ( identifier[unicode] ( identifier[resource] ), identifier[resource] . identifier[get_urn] ())) identifier[print] ( literal[string] ) identifier[print] ( literal[string] . identifier[join] ([ literal[string] . identifier[format] ( identifier[title] , identifier[lang] ) keyword[for] identifier[lang] , identifier[title] keyword[in] identifier[resource] . identifier[get_titles] ()])) keyword[if] identifier[len] ( identifier[resource] . identifier[get_abbreviations] ())> literal[int] : identifier[print] ( literal[string] . identifier[format] ( literal[string] . identifier[join] ([ literal[string] . identifier[format] ( identifier[abbr] ) keyword[for] identifier[abbr] keyword[in] identifier[resource] . identifier[get_abbreviations] ()]))) keyword[else] : identifier[print] ( literal[string] . identifier[format] ( identifier[unicode] ( identifier[resource] ), identifier[resource] . identifier[get_urn] ()))
def show_result(resource, verbose=False): """ TODO """ if resource.uri == surf.ns.EFRBROO['F10_Person']: print('\n{} ({})\n'.format(unicode(resource), resource.get_urn())) works = resource.get_works() print('Works by {} ({}):\n'.format(resource, len(works))) [show_result(work) for work in works] print('\n') # depends on [control=['if'], data=[]] elif resource.uri == surf.ns.EFRBROO['F1_Work']: if verbose: print('\n{} ({})'.format(unicode(resource), resource.get_urn())) print('\nTitles:') print('\n'.join(['{:20} ({})'.format(title, lang) for (lang, title) in resource.get_titles()])) if len(resource.get_abbreviations()) > 0: print('\nAbbreviations: {}\n'.format(', '.join(['{}'.format(abbr) for abbr in resource.get_abbreviations()]))) # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]] else: print('{:50} {:40}'.format(unicode(resource), resource.get_urn())) # depends on [control=['if'], data=[]]
def section(rows, columns, items, label=None): """A section consisting of rows and columns""" # TODO: Integrate label sections = [] column_class = "section-column col-sm-%i" % (12 / columns) for vertical in range(columns): column_items = [] for horizontal in range(rows): try: item = items[horizontal][vertical] column_items.append(item) except IndexError: hfoslog('Field in', label, 'omitted, due to missing row/column:', vertical, horizontal, lvl=warn, emitter='FORMS', tb=True, frame=2) column = { 'type': 'section', 'htmlClass': column_class, 'items': column_items } sections.append(column) result = { 'type': 'section', 'htmlClass': 'row', 'items': sections } return result
def function[section, parameter[rows, columns, items, label]]: constant[A section consisting of rows and columns] variable[sections] assign[=] list[[]] variable[column_class] assign[=] binary_operation[constant[section-column col-sm-%i] <ast.Mod object at 0x7da2590d6920> binary_operation[constant[12] / name[columns]]] for taget[name[vertical]] in starred[call[name[range], parameter[name[columns]]]] begin[:] variable[column_items] assign[=] list[[]] for taget[name[horizontal]] in starred[call[name[range], parameter[name[rows]]]] begin[:] <ast.Try object at 0x7da1b0facbe0> variable[column] assign[=] dictionary[[<ast.Constant object at 0x7da1b0fad9f0>, <ast.Constant object at 0x7da1b0fad150>, <ast.Constant object at 0x7da1b0fac9a0>], [<ast.Constant object at 0x7da1b0fada80>, <ast.Name object at 0x7da1b0fadc60>, <ast.Name object at 0x7da1b0fae1d0>]] call[name[sections].append, parameter[name[column]]] variable[result] assign[=] dictionary[[<ast.Constant object at 0x7da1b0fac6d0>, <ast.Constant object at 0x7da1b0faf550>, <ast.Constant object at 0x7da1b0facb50>], [<ast.Constant object at 0x7da1b0fac310>, <ast.Constant object at 0x7da1b0fae560>, <ast.Name object at 0x7da1b0fae260>]] return[name[result]]
keyword[def] identifier[section] ( identifier[rows] , identifier[columns] , identifier[items] , identifier[label] = keyword[None] ): literal[string] identifier[sections] =[] identifier[column_class] = literal[string] %( literal[int] / identifier[columns] ) keyword[for] identifier[vertical] keyword[in] identifier[range] ( identifier[columns] ): identifier[column_items] =[] keyword[for] identifier[horizontal] keyword[in] identifier[range] ( identifier[rows] ): keyword[try] : identifier[item] = identifier[items] [ identifier[horizontal] ][ identifier[vertical] ] identifier[column_items] . identifier[append] ( identifier[item] ) keyword[except] identifier[IndexError] : identifier[hfoslog] ( literal[string] , identifier[label] , literal[string] , identifier[vertical] , identifier[horizontal] , identifier[lvl] = identifier[warn] , identifier[emitter] = literal[string] , identifier[tb] = keyword[True] , identifier[frame] = literal[int] ) identifier[column] ={ literal[string] : literal[string] , literal[string] : identifier[column_class] , literal[string] : identifier[column_items] } identifier[sections] . identifier[append] ( identifier[column] ) identifier[result] ={ literal[string] : literal[string] , literal[string] : literal[string] , literal[string] : identifier[sections] } keyword[return] identifier[result]
def section(rows, columns, items, label=None): """A section consisting of rows and columns""" # TODO: Integrate label sections = [] column_class = 'section-column col-sm-%i' % (12 / columns) for vertical in range(columns): column_items = [] for horizontal in range(rows): try: item = items[horizontal][vertical] column_items.append(item) # depends on [control=['try'], data=[]] except IndexError: hfoslog('Field in', label, 'omitted, due to missing row/column:', vertical, horizontal, lvl=warn, emitter='FORMS', tb=True, frame=2) # depends on [control=['except'], data=[]] # depends on [control=['for'], data=['horizontal']] column = {'type': 'section', 'htmlClass': column_class, 'items': column_items} sections.append(column) # depends on [control=['for'], data=['vertical']] result = {'type': 'section', 'htmlClass': 'row', 'items': sections} return result
def solve(self): """Perform the solve. """ with log_duration(self._print, "memcache get (resolve) took %s"): solver_dict = self._get_cached_solve() if solver_dict: self.from_cache = True self._set_result(solver_dict) else: self.from_cache = False solver = self._solve() solver_dict = self._solver_to_dict(solver) self._set_result(solver_dict) with log_duration(self._print, "memcache set (resolve) took %s"): self._set_cached_solve(solver_dict)
def function[solve, parameter[self]]: constant[Perform the solve. ] with call[name[log_duration], parameter[name[self]._print, constant[memcache get (resolve) took %s]]] begin[:] variable[solver_dict] assign[=] call[name[self]._get_cached_solve, parameter[]] if name[solver_dict] begin[:] name[self].from_cache assign[=] constant[True] call[name[self]._set_result, parameter[name[solver_dict]]]
keyword[def] identifier[solve] ( identifier[self] ): literal[string] keyword[with] identifier[log_duration] ( identifier[self] . identifier[_print] , literal[string] ): identifier[solver_dict] = identifier[self] . identifier[_get_cached_solve] () keyword[if] identifier[solver_dict] : identifier[self] . identifier[from_cache] = keyword[True] identifier[self] . identifier[_set_result] ( identifier[solver_dict] ) keyword[else] : identifier[self] . identifier[from_cache] = keyword[False] identifier[solver] = identifier[self] . identifier[_solve] () identifier[solver_dict] = identifier[self] . identifier[_solver_to_dict] ( identifier[solver] ) identifier[self] . identifier[_set_result] ( identifier[solver_dict] ) keyword[with] identifier[log_duration] ( identifier[self] . identifier[_print] , literal[string] ): identifier[self] . identifier[_set_cached_solve] ( identifier[solver_dict] )
def solve(self): """Perform the solve. """ with log_duration(self._print, 'memcache get (resolve) took %s'): solver_dict = self._get_cached_solve() # depends on [control=['with'], data=[]] if solver_dict: self.from_cache = True self._set_result(solver_dict) # depends on [control=['if'], data=[]] else: self.from_cache = False solver = self._solve() solver_dict = self._solver_to_dict(solver) self._set_result(solver_dict) with log_duration(self._print, 'memcache set (resolve) took %s'): self._set_cached_solve(solver_dict) # depends on [control=['with'], data=[]]
def to_array(self): """ Serializes this InlineQueryResultCachedSticker to a dictionary. :return: dictionary representation of this object. :rtype: dict """ array = super(InlineQueryResultCachedSticker, self).to_array() array['type'] = u(self.type) # py2: type unicode, py3: type str array['id'] = u(self.id) # py2: type unicode, py3: type str array['sticker_file_id'] = u(self.sticker_file_id) # py2: type unicode, py3: type str if self.reply_markup is not None: array['reply_markup'] = self.reply_markup.to_array() # type InlineKeyboardMarkup if self.input_message_content is not None: array['input_message_content'] = self.input_message_content.to_array() # type InputMessageContent return array
def function[to_array, parameter[self]]: constant[ Serializes this InlineQueryResultCachedSticker to a dictionary. :return: dictionary representation of this object. :rtype: dict ] variable[array] assign[=] call[call[name[super], parameter[name[InlineQueryResultCachedSticker], name[self]]].to_array, parameter[]] call[name[array]][constant[type]] assign[=] call[name[u], parameter[name[self].type]] call[name[array]][constant[id]] assign[=] call[name[u], parameter[name[self].id]] call[name[array]][constant[sticker_file_id]] assign[=] call[name[u], parameter[name[self].sticker_file_id]] if compare[name[self].reply_markup is_not constant[None]] begin[:] call[name[array]][constant[reply_markup]] assign[=] call[name[self].reply_markup.to_array, parameter[]] if compare[name[self].input_message_content is_not constant[None]] begin[:] call[name[array]][constant[input_message_content]] assign[=] call[name[self].input_message_content.to_array, parameter[]] return[name[array]]
keyword[def] identifier[to_array] ( identifier[self] ): literal[string] identifier[array] = identifier[super] ( identifier[InlineQueryResultCachedSticker] , identifier[self] ). identifier[to_array] () identifier[array] [ literal[string] ]= identifier[u] ( identifier[self] . identifier[type] ) identifier[array] [ literal[string] ]= identifier[u] ( identifier[self] . identifier[id] ) identifier[array] [ literal[string] ]= identifier[u] ( identifier[self] . identifier[sticker_file_id] ) keyword[if] identifier[self] . identifier[reply_markup] keyword[is] keyword[not] keyword[None] : identifier[array] [ literal[string] ]= identifier[self] . identifier[reply_markup] . identifier[to_array] () keyword[if] identifier[self] . identifier[input_message_content] keyword[is] keyword[not] keyword[None] : identifier[array] [ literal[string] ]= identifier[self] . identifier[input_message_content] . identifier[to_array] () keyword[return] identifier[array]
def to_array(self): """ Serializes this InlineQueryResultCachedSticker to a dictionary. :return: dictionary representation of this object. :rtype: dict """ array = super(InlineQueryResultCachedSticker, self).to_array() array['type'] = u(self.type) # py2: type unicode, py3: type str array['id'] = u(self.id) # py2: type unicode, py3: type str array['sticker_file_id'] = u(self.sticker_file_id) # py2: type unicode, py3: type str if self.reply_markup is not None: array['reply_markup'] = self.reply_markup.to_array() # type InlineKeyboardMarkup # depends on [control=['if'], data=[]] if self.input_message_content is not None: array['input_message_content'] = self.input_message_content.to_array() # type InputMessageContent # depends on [control=['if'], data=[]] return array
def create(self, list_id, data): """ adds a new segment to the list. """ return self._mc_client._post(url=self._build_path(list_id, 'segments'), data=data)
def function[create, parameter[self, list_id, data]]: constant[ adds a new segment to the list. ] return[call[name[self]._mc_client._post, parameter[]]]
keyword[def] identifier[create] ( identifier[self] , identifier[list_id] , identifier[data] ): literal[string] keyword[return] identifier[self] . identifier[_mc_client] . identifier[_post] ( identifier[url] = identifier[self] . identifier[_build_path] ( identifier[list_id] , literal[string] ), identifier[data] = identifier[data] )
def create(self, list_id, data): """ adds a new segment to the list. """ return self._mc_client._post(url=self._build_path(list_id, 'segments'), data=data)
def sort_common_members(): """Sorts the keys and members""" filename = PREFIX + '/common_members.json' sorted_json_data = {} json_data = read_json(filename) all_keys = [] for key, value in json_data.items(): all_keys.append(key) sorted_keys = sorted(all_keys) for key in sorted_keys: if len(json_data[key]) > 0: # Only add modules which have common members sorted_json_data[key] = sorted(json_data[key]) print('--> Sorted/cleaned ' + os.path.basename(filename)) write_json(sorted_json_data, filename)
def function[sort_common_members, parameter[]]: constant[Sorts the keys and members] variable[filename] assign[=] binary_operation[name[PREFIX] + constant[/common_members.json]] variable[sorted_json_data] assign[=] dictionary[[], []] variable[json_data] assign[=] call[name[read_json], parameter[name[filename]]] variable[all_keys] assign[=] list[[]] for taget[tuple[[<ast.Name object at 0x7da1b17f7c10>, <ast.Name object at 0x7da1b17f6b30>]]] in starred[call[name[json_data].items, parameter[]]] begin[:] call[name[all_keys].append, parameter[name[key]]] variable[sorted_keys] assign[=] call[name[sorted], parameter[name[all_keys]]] for taget[name[key]] in starred[name[sorted_keys]] begin[:] if compare[call[name[len], parameter[call[name[json_data]][name[key]]]] greater[>] constant[0]] begin[:] call[name[sorted_json_data]][name[key]] assign[=] call[name[sorted], parameter[call[name[json_data]][name[key]]]] call[name[print], parameter[binary_operation[constant[--> Sorted/cleaned ] + call[name[os].path.basename, parameter[name[filename]]]]]] call[name[write_json], parameter[name[sorted_json_data], name[filename]]]
keyword[def] identifier[sort_common_members] (): literal[string] identifier[filename] = identifier[PREFIX] + literal[string] identifier[sorted_json_data] ={} identifier[json_data] = identifier[read_json] ( identifier[filename] ) identifier[all_keys] =[] keyword[for] identifier[key] , identifier[value] keyword[in] identifier[json_data] . identifier[items] (): identifier[all_keys] . identifier[append] ( identifier[key] ) identifier[sorted_keys] = identifier[sorted] ( identifier[all_keys] ) keyword[for] identifier[key] keyword[in] identifier[sorted_keys] : keyword[if] identifier[len] ( identifier[json_data] [ identifier[key] ])> literal[int] : identifier[sorted_json_data] [ identifier[key] ]= identifier[sorted] ( identifier[json_data] [ identifier[key] ]) identifier[print] ( literal[string] + identifier[os] . identifier[path] . identifier[basename] ( identifier[filename] )) identifier[write_json] ( identifier[sorted_json_data] , identifier[filename] )
def sort_common_members(): """Sorts the keys and members""" filename = PREFIX + '/common_members.json' sorted_json_data = {} json_data = read_json(filename) all_keys = [] for (key, value) in json_data.items(): all_keys.append(key) # depends on [control=['for'], data=[]] sorted_keys = sorted(all_keys) for key in sorted_keys: if len(json_data[key]) > 0: # Only add modules which have common members sorted_json_data[key] = sorted(json_data[key]) # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['key']] print('--> Sorted/cleaned ' + os.path.basename(filename)) write_json(sorted_json_data, filename)
def writeOffsetTable(self, output): """Writes all of the object reference offsets.""" all_positions = [] writtenReferences = list(self.writtenReferences.items()) writtenReferences.sort(key=lambda x: x[1]) for obj,order in writtenReferences: # Porting note: Elsewhere we deliberately replace empty unicdoe strings # with empty binary strings, but the empty unicode string # goes into writtenReferences. This isn't an issue in Py2 # because u'' and b'' have the same hash; but it is in # Py3, where they don't. if bytes != str and obj == unicodeEmpty: obj = b'' position = self.referencePositions.get(obj) if position is None: raise InvalidPlistException("Error while writing offsets table. Object not found. %s" % obj) output += self.binaryInt(position, self.trailer.offsetSize) all_positions.append(position) return output
def function[writeOffsetTable, parameter[self, output]]: constant[Writes all of the object reference offsets.] variable[all_positions] assign[=] list[[]] variable[writtenReferences] assign[=] call[name[list], parameter[call[name[self].writtenReferences.items, parameter[]]]] call[name[writtenReferences].sort, parameter[]] for taget[tuple[[<ast.Name object at 0x7da1b1aa6b00>, <ast.Name object at 0x7da1b1aa60b0>]]] in starred[name[writtenReferences]] begin[:] if <ast.BoolOp object at 0x7da1b1aa5bd0> begin[:] variable[obj] assign[=] constant[b''] variable[position] assign[=] call[name[self].referencePositions.get, parameter[name[obj]]] if compare[name[position] is constant[None]] begin[:] <ast.Raise object at 0x7da1b1b03ca0> <ast.AugAssign object at 0x7da1b1b02290> call[name[all_positions].append, parameter[name[position]]] return[name[output]]
keyword[def] identifier[writeOffsetTable] ( identifier[self] , identifier[output] ): literal[string] identifier[all_positions] =[] identifier[writtenReferences] = identifier[list] ( identifier[self] . identifier[writtenReferences] . identifier[items] ()) identifier[writtenReferences] . identifier[sort] ( identifier[key] = keyword[lambda] identifier[x] : identifier[x] [ literal[int] ]) keyword[for] identifier[obj] , identifier[order] keyword[in] identifier[writtenReferences] : keyword[if] identifier[bytes] != identifier[str] keyword[and] identifier[obj] == identifier[unicodeEmpty] : identifier[obj] = literal[string] identifier[position] = identifier[self] . identifier[referencePositions] . identifier[get] ( identifier[obj] ) keyword[if] identifier[position] keyword[is] keyword[None] : keyword[raise] identifier[InvalidPlistException] ( literal[string] % identifier[obj] ) identifier[output] += identifier[self] . identifier[binaryInt] ( identifier[position] , identifier[self] . identifier[trailer] . identifier[offsetSize] ) identifier[all_positions] . identifier[append] ( identifier[position] ) keyword[return] identifier[output]
def writeOffsetTable(self, output): """Writes all of the object reference offsets.""" all_positions = [] writtenReferences = list(self.writtenReferences.items()) writtenReferences.sort(key=lambda x: x[1]) for (obj, order) in writtenReferences: # Porting note: Elsewhere we deliberately replace empty unicdoe strings # with empty binary strings, but the empty unicode string # goes into writtenReferences. This isn't an issue in Py2 # because u'' and b'' have the same hash; but it is in # Py3, where they don't. if bytes != str and obj == unicodeEmpty: obj = b'' # depends on [control=['if'], data=[]] position = self.referencePositions.get(obj) if position is None: raise InvalidPlistException('Error while writing offsets table. Object not found. %s' % obj) # depends on [control=['if'], data=[]] output += self.binaryInt(position, self.trailer.offsetSize) all_positions.append(position) # depends on [control=['for'], data=[]] return output