code
stringlengths
75
104k
code_sememe
stringlengths
47
309k
token_type
stringlengths
215
214k
code_dependency
stringlengths
75
155k
def eth_getBlockByNumber(self, block=BLOCK_TAG_LATEST, tx_objects=True): """TODO: documentation https://github.com/ethereum/wiki/wiki/JSON-RPC#eth_getblockbynumber TESTED """ block = validate_block(block) return self._call("eth_getBlockByNumber", [block, tx_objects])
def function[eth_getBlockByNumber, parameter[self, block, tx_objects]]: constant[TODO: documentation https://github.com/ethereum/wiki/wiki/JSON-RPC#eth_getblockbynumber TESTED ] variable[block] assign[=] call[name[validate_block], parameter[name[block]]] return[call[name[self]._call, parameter[constant[eth_getBlockByNumber], list[[<ast.Name object at 0x7da1b1d6d8a0>, <ast.Name object at 0x7da1b1d6e710>]]]]]
keyword[def] identifier[eth_getBlockByNumber] ( identifier[self] , identifier[block] = identifier[BLOCK_TAG_LATEST] , identifier[tx_objects] = keyword[True] ): literal[string] identifier[block] = identifier[validate_block] ( identifier[block] ) keyword[return] identifier[self] . identifier[_call] ( literal[string] ,[ identifier[block] , identifier[tx_objects] ])
def eth_getBlockByNumber(self, block=BLOCK_TAG_LATEST, tx_objects=True): """TODO: documentation https://github.com/ethereum/wiki/wiki/JSON-RPC#eth_getblockbynumber TESTED """ block = validate_block(block) return self._call('eth_getBlockByNumber', [block, tx_objects])
def _parse_header(line: str) -> Tuple[str, Dict[str, str]]: r"""Parse a Content-type like header. Return the main content-type and a dictionary of options. >>> d = "form-data; foo=\"b\\\\a\\\"r\"; file*=utf-8''T%C3%A4st" >>> ct, d = _parse_header(d) >>> ct 'form-data' >>> d['file'] == r'T\u00e4st'.encode('ascii').decode('unicode_escape') True >>> d['foo'] 'b\\a"r' """ parts = _parseparam(";" + line) key = next(parts) # decode_params treats first argument special, but we already stripped key params = [("Dummy", "value")] for p in parts: i = p.find("=") if i >= 0: name = p[:i].strip().lower() value = p[i + 1 :].strip() params.append((name, native_str(value))) decoded_params = email.utils.decode_params(params) decoded_params.pop(0) # get rid of the dummy again pdict = {} for name, decoded_value in decoded_params: value = email.utils.collapse_rfc2231_value(decoded_value) if len(value) >= 2 and value[0] == '"' and value[-1] == '"': value = value[1:-1] pdict[name] = value return key, pdict
def function[_parse_header, parameter[line]]: constant[Parse a Content-type like header. Return the main content-type and a dictionary of options. >>> d = "form-data; foo=\"b\\\\a\\\"r\"; file*=utf-8''T%C3%A4st" >>> ct, d = _parse_header(d) >>> ct 'form-data' >>> d['file'] == r'T\u00e4st'.encode('ascii').decode('unicode_escape') True >>> d['foo'] 'b\\a"r' ] variable[parts] assign[=] call[name[_parseparam], parameter[binary_operation[constant[;] + name[line]]]] variable[key] assign[=] call[name[next], parameter[name[parts]]] variable[params] assign[=] list[[<ast.Tuple object at 0x7da1b1f2fd30>]] for taget[name[p]] in starred[name[parts]] begin[:] variable[i] assign[=] call[name[p].find, parameter[constant[=]]] if compare[name[i] greater_or_equal[>=] constant[0]] begin[:] variable[name] assign[=] call[call[call[name[p]][<ast.Slice object at 0x7da1b1f2f040>].strip, parameter[]].lower, parameter[]] variable[value] assign[=] call[call[name[p]][<ast.Slice object at 0x7da1b1f2e860>].strip, parameter[]] call[name[params].append, parameter[tuple[[<ast.Name object at 0x7da1b1f2dcf0>, <ast.Call object at 0x7da1b1f2f760>]]]] variable[decoded_params] assign[=] call[name[email].utils.decode_params, parameter[name[params]]] call[name[decoded_params].pop, parameter[constant[0]]] variable[pdict] assign[=] dictionary[[], []] for taget[tuple[[<ast.Name object at 0x7da1b1f2dcc0>, <ast.Name object at 0x7da1b1f2f4c0>]]] in starred[name[decoded_params]] begin[:] variable[value] assign[=] call[name[email].utils.collapse_rfc2231_value, parameter[name[decoded_value]]] if <ast.BoolOp object at 0x7da1b1f2dc00> begin[:] variable[value] assign[=] call[name[value]][<ast.Slice object at 0x7da1b1f2dfc0>] call[name[pdict]][name[name]] assign[=] name[value] return[tuple[[<ast.Name object at 0x7da1b1f2df90>, <ast.Name object at 0x7da1b1f2ee90>]]]
keyword[def] identifier[_parse_header] ( identifier[line] : identifier[str] )-> identifier[Tuple] [ identifier[str] , identifier[Dict] [ identifier[str] , identifier[str] ]]: literal[string] identifier[parts] = identifier[_parseparam] ( literal[string] + identifier[line] ) identifier[key] = identifier[next] ( identifier[parts] ) identifier[params] =[( literal[string] , literal[string] )] keyword[for] identifier[p] keyword[in] identifier[parts] : identifier[i] = identifier[p] . identifier[find] ( literal[string] ) keyword[if] identifier[i] >= literal[int] : identifier[name] = identifier[p] [: identifier[i] ]. identifier[strip] (). identifier[lower] () identifier[value] = identifier[p] [ identifier[i] + literal[int] :]. identifier[strip] () identifier[params] . identifier[append] (( identifier[name] , identifier[native_str] ( identifier[value] ))) identifier[decoded_params] = identifier[email] . identifier[utils] . identifier[decode_params] ( identifier[params] ) identifier[decoded_params] . identifier[pop] ( literal[int] ) identifier[pdict] ={} keyword[for] identifier[name] , identifier[decoded_value] keyword[in] identifier[decoded_params] : identifier[value] = identifier[email] . identifier[utils] . identifier[collapse_rfc2231_value] ( identifier[decoded_value] ) keyword[if] identifier[len] ( identifier[value] )>= literal[int] keyword[and] identifier[value] [ literal[int] ]== literal[string] keyword[and] identifier[value] [- literal[int] ]== literal[string] : identifier[value] = identifier[value] [ literal[int] :- literal[int] ] identifier[pdict] [ identifier[name] ]= identifier[value] keyword[return] identifier[key] , identifier[pdict]
def _parse_header(line: str) -> Tuple[str, Dict[str, str]]: """Parse a Content-type like header. Return the main content-type and a dictionary of options. >>> d = "form-data; foo=\\"b\\\\\\\\a\\\\\\"r\\"; file*=utf-8''T%C3%A4st" >>> ct, d = _parse_header(d) >>> ct 'form-data' >>> d['file'] == r'T\\u00e4st'.encode('ascii').decode('unicode_escape') True >>> d['foo'] 'b\\\\a"r' """ parts = _parseparam(';' + line) key = next(parts) # decode_params treats first argument special, but we already stripped key params = [('Dummy', 'value')] for p in parts: i = p.find('=') if i >= 0: name = p[:i].strip().lower() value = p[i + 1:].strip() params.append((name, native_str(value))) # depends on [control=['if'], data=['i']] # depends on [control=['for'], data=['p']] decoded_params = email.utils.decode_params(params) decoded_params.pop(0) # get rid of the dummy again pdict = {} for (name, decoded_value) in decoded_params: value = email.utils.collapse_rfc2231_value(decoded_value) if len(value) >= 2 and value[0] == '"' and (value[-1] == '"'): value = value[1:-1] # depends on [control=['if'], data=[]] pdict[name] = value # depends on [control=['for'], data=[]] return (key, pdict)
def create_app(self): """Send a POST to spinnaker to create a new application with class variables. Raises: AssertionError: Application creation failed. """ self.appinfo['accounts'] = self.get_accounts() self.log.debug('Pipeline Config\n%s', pformat(self.pipeline_config)) self.log.debug('App info:\n%s', pformat(self.appinfo)) jsondata = self.retrieve_template() wait_for_task(jsondata) self.log.info("Successfully created %s application", self.appname) return jsondata
def function[create_app, parameter[self]]: constant[Send a POST to spinnaker to create a new application with class variables. Raises: AssertionError: Application creation failed. ] call[name[self].appinfo][constant[accounts]] assign[=] call[name[self].get_accounts, parameter[]] call[name[self].log.debug, parameter[constant[Pipeline Config %s], call[name[pformat], parameter[name[self].pipeline_config]]]] call[name[self].log.debug, parameter[constant[App info: %s], call[name[pformat], parameter[name[self].appinfo]]]] variable[jsondata] assign[=] call[name[self].retrieve_template, parameter[]] call[name[wait_for_task], parameter[name[jsondata]]] call[name[self].log.info, parameter[constant[Successfully created %s application], name[self].appname]] return[name[jsondata]]
keyword[def] identifier[create_app] ( identifier[self] ): literal[string] identifier[self] . identifier[appinfo] [ literal[string] ]= identifier[self] . identifier[get_accounts] () identifier[self] . identifier[log] . identifier[debug] ( literal[string] , identifier[pformat] ( identifier[self] . identifier[pipeline_config] )) identifier[self] . identifier[log] . identifier[debug] ( literal[string] , identifier[pformat] ( identifier[self] . identifier[appinfo] )) identifier[jsondata] = identifier[self] . identifier[retrieve_template] () identifier[wait_for_task] ( identifier[jsondata] ) identifier[self] . identifier[log] . identifier[info] ( literal[string] , identifier[self] . identifier[appname] ) keyword[return] identifier[jsondata]
def create_app(self): """Send a POST to spinnaker to create a new application with class variables. Raises: AssertionError: Application creation failed. """ self.appinfo['accounts'] = self.get_accounts() self.log.debug('Pipeline Config\n%s', pformat(self.pipeline_config)) self.log.debug('App info:\n%s', pformat(self.appinfo)) jsondata = self.retrieve_template() wait_for_task(jsondata) self.log.info('Successfully created %s application', self.appname) return jsondata
def beacons_refresh(self): ''' Refresh the functions and returners. ''' log.debug('Refreshing beacons.') self.beacons = salt.beacons.Beacon(self.opts, self.functions)
def function[beacons_refresh, parameter[self]]: constant[ Refresh the functions and returners. ] call[name[log].debug, parameter[constant[Refreshing beacons.]]] name[self].beacons assign[=] call[name[salt].beacons.Beacon, parameter[name[self].opts, name[self].functions]]
keyword[def] identifier[beacons_refresh] ( identifier[self] ): literal[string] identifier[log] . identifier[debug] ( literal[string] ) identifier[self] . identifier[beacons] = identifier[salt] . identifier[beacons] . identifier[Beacon] ( identifier[self] . identifier[opts] , identifier[self] . identifier[functions] )
def beacons_refresh(self): """ Refresh the functions and returners. """ log.debug('Refreshing beacons.') self.beacons = salt.beacons.Beacon(self.opts, self.functions)
def serialize(self): """Turn this report into a dictionary that encodes all information including received timestamp""" info = {} info['received_time'] = self.received_time info['encoded_report'] = bytes(self.encode()) # Handle python 2 / python 3 differences report_format = info['encoded_report'][0] if not isinstance(report_format, int): report_format = ord(report_format) info['report_format'] = report_format # Report format is the first byte of the encoded report info['origin'] = self.origin return info
def function[serialize, parameter[self]]: constant[Turn this report into a dictionary that encodes all information including received timestamp] variable[info] assign[=] dictionary[[], []] call[name[info]][constant[received_time]] assign[=] name[self].received_time call[name[info]][constant[encoded_report]] assign[=] call[name[bytes], parameter[call[name[self].encode, parameter[]]]] variable[report_format] assign[=] call[call[name[info]][constant[encoded_report]]][constant[0]] if <ast.UnaryOp object at 0x7da204346320> begin[:] variable[report_format] assign[=] call[name[ord], parameter[name[report_format]]] call[name[info]][constant[report_format]] assign[=] name[report_format] call[name[info]][constant[origin]] assign[=] name[self].origin return[name[info]]
keyword[def] identifier[serialize] ( identifier[self] ): literal[string] identifier[info] ={} identifier[info] [ literal[string] ]= identifier[self] . identifier[received_time] identifier[info] [ literal[string] ]= identifier[bytes] ( identifier[self] . identifier[encode] ()) identifier[report_format] = identifier[info] [ literal[string] ][ literal[int] ] keyword[if] keyword[not] identifier[isinstance] ( identifier[report_format] , identifier[int] ): identifier[report_format] = identifier[ord] ( identifier[report_format] ) identifier[info] [ literal[string] ]= identifier[report_format] identifier[info] [ literal[string] ]= identifier[self] . identifier[origin] keyword[return] identifier[info]
def serialize(self): """Turn this report into a dictionary that encodes all information including received timestamp""" info = {} info['received_time'] = self.received_time info['encoded_report'] = bytes(self.encode()) # Handle python 2 / python 3 differences report_format = info['encoded_report'][0] if not isinstance(report_format, int): report_format = ord(report_format) # depends on [control=['if'], data=[]] info['report_format'] = report_format # Report format is the first byte of the encoded report info['origin'] = self.origin return info
def channel_shift(x, intensity, is_random=False, channel_index=2): """Shift the channels of an image, randomly or non-randomly, see `numpy.rollaxis <https://docs.scipy.org/doc/numpy/reference/generated/numpy.rollaxis.html>`__. Parameters ----------- x : numpy.array An image with dimension of [row, col, channel] (default). intensity : float Intensity of shifting. is_random : boolean If True, randomly shift. Default is False. channel_index : int Index of channel. Default is 2. Returns ------- numpy.array A processed image. """ if is_random: factor = np.random.uniform(-intensity, intensity) else: factor = intensity x = np.rollaxis(x, channel_index, 0) min_x, max_x = np.min(x), np.max(x) channel_images = [np.clip(x_channel + factor, min_x, max_x) for x_channel in x] x = np.stack(channel_images, axis=0) x = np.rollaxis(x, 0, channel_index + 1) return x
def function[channel_shift, parameter[x, intensity, is_random, channel_index]]: constant[Shift the channels of an image, randomly or non-randomly, see `numpy.rollaxis <https://docs.scipy.org/doc/numpy/reference/generated/numpy.rollaxis.html>`__. Parameters ----------- x : numpy.array An image with dimension of [row, col, channel] (default). intensity : float Intensity of shifting. is_random : boolean If True, randomly shift. Default is False. channel_index : int Index of channel. Default is 2. Returns ------- numpy.array A processed image. ] if name[is_random] begin[:] variable[factor] assign[=] call[name[np].random.uniform, parameter[<ast.UnaryOp object at 0x7da2045660b0>, name[intensity]]] variable[x] assign[=] call[name[np].rollaxis, parameter[name[x], name[channel_index], constant[0]]] <ast.Tuple object at 0x7da2045671f0> assign[=] tuple[[<ast.Call object at 0x7da204566770>, <ast.Call object at 0x7da204566200>]] variable[channel_images] assign[=] <ast.ListComp object at 0x7da2045650f0> variable[x] assign[=] call[name[np].stack, parameter[name[channel_images]]] variable[x] assign[=] call[name[np].rollaxis, parameter[name[x], constant[0], binary_operation[name[channel_index] + constant[1]]]] return[name[x]]
keyword[def] identifier[channel_shift] ( identifier[x] , identifier[intensity] , identifier[is_random] = keyword[False] , identifier[channel_index] = literal[int] ): literal[string] keyword[if] identifier[is_random] : identifier[factor] = identifier[np] . identifier[random] . identifier[uniform] (- identifier[intensity] , identifier[intensity] ) keyword[else] : identifier[factor] = identifier[intensity] identifier[x] = identifier[np] . identifier[rollaxis] ( identifier[x] , identifier[channel_index] , literal[int] ) identifier[min_x] , identifier[max_x] = identifier[np] . identifier[min] ( identifier[x] ), identifier[np] . identifier[max] ( identifier[x] ) identifier[channel_images] =[ identifier[np] . identifier[clip] ( identifier[x_channel] + identifier[factor] , identifier[min_x] , identifier[max_x] ) keyword[for] identifier[x_channel] keyword[in] identifier[x] ] identifier[x] = identifier[np] . identifier[stack] ( identifier[channel_images] , identifier[axis] = literal[int] ) identifier[x] = identifier[np] . identifier[rollaxis] ( identifier[x] , literal[int] , identifier[channel_index] + literal[int] ) keyword[return] identifier[x]
def channel_shift(x, intensity, is_random=False, channel_index=2): """Shift the channels of an image, randomly or non-randomly, see `numpy.rollaxis <https://docs.scipy.org/doc/numpy/reference/generated/numpy.rollaxis.html>`__. Parameters ----------- x : numpy.array An image with dimension of [row, col, channel] (default). intensity : float Intensity of shifting. is_random : boolean If True, randomly shift. Default is False. channel_index : int Index of channel. Default is 2. Returns ------- numpy.array A processed image. """ if is_random: factor = np.random.uniform(-intensity, intensity) # depends on [control=['if'], data=[]] else: factor = intensity x = np.rollaxis(x, channel_index, 0) (min_x, max_x) = (np.min(x), np.max(x)) channel_images = [np.clip(x_channel + factor, min_x, max_x) for x_channel in x] x = np.stack(channel_images, axis=0) x = np.rollaxis(x, 0, channel_index + 1) return x
def p_error(self, t): """ Internal error handler args: t (Lex token): Error token """ if t: error_msg = "E: %s line: %d, Syntax Error, token: `%s`, `%s`" % \ (self.target, t.lineno, t.type, t.value) self.register.register(error_msg) while True: t = self.lex.token() if not t or t.value == '}': if len(self.scope) > 1: self.scope.pop() break self.parser.restart() return t
def function[p_error, parameter[self, t]]: constant[ Internal error handler args: t (Lex token): Error token ] if name[t] begin[:] variable[error_msg] assign[=] binary_operation[constant[E: %s line: %d, Syntax Error, token: `%s`, `%s`] <ast.Mod object at 0x7da2590d6920> tuple[[<ast.Attribute object at 0x7da1aff8c370>, <ast.Attribute object at 0x7da1aff8fa90>, <ast.Attribute object at 0x7da1aff8fd30>, <ast.Attribute object at 0x7da1aff8f310>]]] call[name[self].register.register, parameter[name[error_msg]]] while constant[True] begin[:] variable[t] assign[=] call[name[self].lex.token, parameter[]] if <ast.BoolOp object at 0x7da1aff8e650> begin[:] if compare[call[name[len], parameter[name[self].scope]] greater[>] constant[1]] begin[:] call[name[self].scope.pop, parameter[]] break call[name[self].parser.restart, parameter[]] return[name[t]]
keyword[def] identifier[p_error] ( identifier[self] , identifier[t] ): literal[string] keyword[if] identifier[t] : identifier[error_msg] = literal[string] %( identifier[self] . identifier[target] , identifier[t] . identifier[lineno] , identifier[t] . identifier[type] , identifier[t] . identifier[value] ) identifier[self] . identifier[register] . identifier[register] ( identifier[error_msg] ) keyword[while] keyword[True] : identifier[t] = identifier[self] . identifier[lex] . identifier[token] () keyword[if] keyword[not] identifier[t] keyword[or] identifier[t] . identifier[value] == literal[string] : keyword[if] identifier[len] ( identifier[self] . identifier[scope] )> literal[int] : identifier[self] . identifier[scope] . identifier[pop] () keyword[break] identifier[self] . identifier[parser] . identifier[restart] () keyword[return] identifier[t]
def p_error(self, t): """ Internal error handler args: t (Lex token): Error token """ if t: error_msg = 'E: %s line: %d, Syntax Error, token: `%s`, `%s`' % (self.target, t.lineno, t.type, t.value) self.register.register(error_msg) # depends on [control=['if'], data=[]] while True: t = self.lex.token() if not t or t.value == '}': if len(self.scope) > 1: self.scope.pop() # depends on [control=['if'], data=[]] break # depends on [control=['if'], data=[]] # depends on [control=['while'], data=[]] self.parser.restart() return t
def dannotsagg2dannots2dalignbedannot(cfg): """ Map aggregated annotations to queries step#9 :param cfg: configuration dict """ datatmpd=cfg['datatmpd'] dannotsagg=del_Unnamed(pd.read_csv(cfg['dannotsaggp'],sep='\t')) dalignbedstats=del_Unnamed(pd.read_csv(cfg['dalignbedstatsp'],sep='\t')) dalignbedannotp=cfg['dalignbedannotp'] logging.info(basename(dalignbedannotp)) if not exists(dalignbedannotp) or cfg['force']: # df2info(dalignbed) # df2info(dannotsagg) dalignbedannot=dalignbedstats.set_index('id').join(set_index(dannotsagg,'id'), rsuffix=' annotation') dalignbedannot['NM']=dalignbedannot['NM'].apply(int) # from rohan.dandage.get_scores import get_beditorscore_per_alignment,get_cfdscore # dalignbedannot['beditor score']=dalignbedannot.apply(lambda x : get_beditorscore_per_alignment(NM=x['NM'], # genic=True if x['region']=='genic' else False, # alignment=x['alignment'], # pam_length=len(x['PAM']), # pam_position=x['original position'], # # test=cfg['test'], # ),axis=1) # dalignbedannot['CFD score']=dalignbedannot.apply(lambda x : get_cfdscore(x['query sequence'].upper(), x['aligned sequence'].upper()), axis=1) dalignbedannot.to_csv(dalignbedannotp,sep='\t') return cfg
def function[dannotsagg2dannots2dalignbedannot, parameter[cfg]]: constant[ Map aggregated annotations to queries step#9 :param cfg: configuration dict ] variable[datatmpd] assign[=] call[name[cfg]][constant[datatmpd]] variable[dannotsagg] assign[=] call[name[del_Unnamed], parameter[call[name[pd].read_csv, parameter[call[name[cfg]][constant[dannotsaggp]]]]]] variable[dalignbedstats] assign[=] call[name[del_Unnamed], parameter[call[name[pd].read_csv, parameter[call[name[cfg]][constant[dalignbedstatsp]]]]]] variable[dalignbedannotp] assign[=] call[name[cfg]][constant[dalignbedannotp]] call[name[logging].info, parameter[call[name[basename], parameter[name[dalignbedannotp]]]]] if <ast.BoolOp object at 0x7da1b209e230> begin[:] variable[dalignbedannot] assign[=] call[call[name[dalignbedstats].set_index, parameter[constant[id]]].join, parameter[call[name[set_index], parameter[name[dannotsagg], constant[id]]]]] call[name[dalignbedannot]][constant[NM]] assign[=] call[call[name[dalignbedannot]][constant[NM]].apply, parameter[name[int]]] call[name[dalignbedannot].to_csv, parameter[name[dalignbedannotp]]] return[name[cfg]]
keyword[def] identifier[dannotsagg2dannots2dalignbedannot] ( identifier[cfg] ): literal[string] identifier[datatmpd] = identifier[cfg] [ literal[string] ] identifier[dannotsagg] = identifier[del_Unnamed] ( identifier[pd] . identifier[read_csv] ( identifier[cfg] [ literal[string] ], identifier[sep] = literal[string] )) identifier[dalignbedstats] = identifier[del_Unnamed] ( identifier[pd] . identifier[read_csv] ( identifier[cfg] [ literal[string] ], identifier[sep] = literal[string] )) identifier[dalignbedannotp] = identifier[cfg] [ literal[string] ] identifier[logging] . identifier[info] ( identifier[basename] ( identifier[dalignbedannotp] )) keyword[if] keyword[not] identifier[exists] ( identifier[dalignbedannotp] ) keyword[or] identifier[cfg] [ literal[string] ]: identifier[dalignbedannot] = identifier[dalignbedstats] . identifier[set_index] ( literal[string] ). identifier[join] ( identifier[set_index] ( identifier[dannotsagg] , literal[string] ), identifier[rsuffix] = literal[string] ) identifier[dalignbedannot] [ literal[string] ]= identifier[dalignbedannot] [ literal[string] ]. identifier[apply] ( identifier[int] ) identifier[dalignbedannot] . identifier[to_csv] ( identifier[dalignbedannotp] , identifier[sep] = literal[string] ) keyword[return] identifier[cfg]
def dannotsagg2dannots2dalignbedannot(cfg): """ Map aggregated annotations to queries step#9 :param cfg: configuration dict """ datatmpd = cfg['datatmpd'] dannotsagg = del_Unnamed(pd.read_csv(cfg['dannotsaggp'], sep='\t')) dalignbedstats = del_Unnamed(pd.read_csv(cfg['dalignbedstatsp'], sep='\t')) dalignbedannotp = cfg['dalignbedannotp'] logging.info(basename(dalignbedannotp)) if not exists(dalignbedannotp) or cfg['force']: # df2info(dalignbed) # df2info(dannotsagg) dalignbedannot = dalignbedstats.set_index('id').join(set_index(dannotsagg, 'id'), rsuffix=' annotation') dalignbedannot['NM'] = dalignbedannot['NM'].apply(int) # from rohan.dandage.get_scores import get_beditorscore_per_alignment,get_cfdscore # dalignbedannot['beditor score']=dalignbedannot.apply(lambda x : get_beditorscore_per_alignment(NM=x['NM'], # genic=True if x['region']=='genic' else False, # alignment=x['alignment'], # pam_length=len(x['PAM']), # pam_position=x['original position'], # # test=cfg['test'], # ),axis=1) # dalignbedannot['CFD score']=dalignbedannot.apply(lambda x : get_cfdscore(x['query sequence'].upper(), x['aligned sequence'].upper()), axis=1) dalignbedannot.to_csv(dalignbedannotp, sep='\t') # depends on [control=['if'], data=[]] return cfg
def _findSwiplFromExec(): """ This function tries to use an executable on the path to find SWI-Prolog SO/DLL and the resource file. :returns: A tuple of (path to the swipl DLL, path to the resource file) :returns type: ({str, None}, {str, None}) """ platform = sys.platform[:3] fullName = None swiHome = None try: # try to get library path from swipl executable. # We may have pl or swipl as the executable try: cmd = Popen(['swipl', '--dump-runtime-variables'], stdout=PIPE) except OSError: cmd = Popen(['pl', '--dump-runtime-variables'], stdout=PIPE) ret = cmd.communicate() # Parse the output into a dictionary ret = ret[0].decode().replace(';', '').splitlines() ret = [line.split('=', 1) for line in ret] rtvars = dict((name, value[1:-1]) for name, value in ret) # [1:-1] gets # rid of the # quotes if rtvars['PLSHARED'] == 'no': raise ImportError('SWI-Prolog is not installed as a shared ' 'library.') else: # PLSHARED == 'yes' swiHome = rtvars['PLBASE'] # The environment is in PLBASE if not os.path.exists(swiHome): swiHome = None # determine platform specific path if platform == "win": dllName = rtvars['PLLIB'][:-4] + '.' + rtvars['PLSOEXT'] path = os.path.join(rtvars['PLBASE'], 'bin') fullName = os.path.join(path, dllName) if not os.path.exists(fullName): fullName = None elif platform == "cyg": # e.g. /usr/lib/pl-5.6.36/bin/i686-cygwin/cygpl.dll dllName = 'cygpl.dll' path = os.path.join(rtvars['PLBASE'], 'bin', rtvars['PLARCH']) fullName = os.path.join(path, dllName) if not os.path.exists(fullName): fullName = None elif platform == "dar": dllName = 'lib' + rtvars['PLLIB'][2:] + '.' + rtvars['PLSOEXT'] path = os.path.join(rtvars['PLBASE'], 'lib', rtvars['PLARCH']) baseName = os.path.join(path, dllName) if os.path.exists(baseName): fullName = baseName else: # We will search for versions fullName = None else: # assume UNIX-like # The SO name in some linuxes is of the form libswipl.so.5.10.2, # so we have to use glob to find the correct one dllName = 'lib' + rtvars['PLLIB'][2:] + '.' + rtvars['PLSOEXT'] path = os.path.join(rtvars['PLBASE'], 'lib', rtvars['PLARCH']) baseName = os.path.join(path, dllName) if os.path.exists(baseName): fullName = baseName else: # We will search for versions pattern = baseName + '.*' files = glob.glob(pattern) if len(files) == 0: fullName = None elif len(files) == 1: fullName = files[0] else: # Will this ever happen? fullName = None except (OSError, KeyError): # KeyError from accessing rtvars pass return (fullName, swiHome)
def function[_findSwiplFromExec, parameter[]]: constant[ This function tries to use an executable on the path to find SWI-Prolog SO/DLL and the resource file. :returns: A tuple of (path to the swipl DLL, path to the resource file) :returns type: ({str, None}, {str, None}) ] variable[platform] assign[=] call[name[sys].platform][<ast.Slice object at 0x7da2046239a0>] variable[fullName] assign[=] constant[None] variable[swiHome] assign[=] constant[None] <ast.Try object at 0x7da204620460> return[tuple[[<ast.Name object at 0x7da1b16a71f0>, <ast.Name object at 0x7da1b16a4be0>]]]
keyword[def] identifier[_findSwiplFromExec] (): literal[string] identifier[platform] = identifier[sys] . identifier[platform] [: literal[int] ] identifier[fullName] = keyword[None] identifier[swiHome] = keyword[None] keyword[try] : keyword[try] : identifier[cmd] = identifier[Popen] ([ literal[string] , literal[string] ], identifier[stdout] = identifier[PIPE] ) keyword[except] identifier[OSError] : identifier[cmd] = identifier[Popen] ([ literal[string] , literal[string] ], identifier[stdout] = identifier[PIPE] ) identifier[ret] = identifier[cmd] . identifier[communicate] () identifier[ret] = identifier[ret] [ literal[int] ]. identifier[decode] (). identifier[replace] ( literal[string] , literal[string] ). identifier[splitlines] () identifier[ret] =[ identifier[line] . identifier[split] ( literal[string] , literal[int] ) keyword[for] identifier[line] keyword[in] identifier[ret] ] identifier[rtvars] = identifier[dict] (( identifier[name] , identifier[value] [ literal[int] :- literal[int] ]) keyword[for] identifier[name] , identifier[value] keyword[in] identifier[ret] ) keyword[if] identifier[rtvars] [ literal[string] ]== literal[string] : keyword[raise] identifier[ImportError] ( literal[string] literal[string] ) keyword[else] : identifier[swiHome] = identifier[rtvars] [ literal[string] ] keyword[if] keyword[not] identifier[os] . identifier[path] . identifier[exists] ( identifier[swiHome] ): identifier[swiHome] = keyword[None] keyword[if] identifier[platform] == literal[string] : identifier[dllName] = identifier[rtvars] [ literal[string] ][:- literal[int] ]+ literal[string] + identifier[rtvars] [ literal[string] ] identifier[path] = identifier[os] . identifier[path] . identifier[join] ( identifier[rtvars] [ literal[string] ], literal[string] ) identifier[fullName] = identifier[os] . identifier[path] . identifier[join] ( identifier[path] , identifier[dllName] ) keyword[if] keyword[not] identifier[os] . identifier[path] . identifier[exists] ( identifier[fullName] ): identifier[fullName] = keyword[None] keyword[elif] identifier[platform] == literal[string] : identifier[dllName] = literal[string] identifier[path] = identifier[os] . identifier[path] . identifier[join] ( identifier[rtvars] [ literal[string] ], literal[string] , identifier[rtvars] [ literal[string] ]) identifier[fullName] = identifier[os] . identifier[path] . identifier[join] ( identifier[path] , identifier[dllName] ) keyword[if] keyword[not] identifier[os] . identifier[path] . identifier[exists] ( identifier[fullName] ): identifier[fullName] = keyword[None] keyword[elif] identifier[platform] == literal[string] : identifier[dllName] = literal[string] + identifier[rtvars] [ literal[string] ][ literal[int] :]+ literal[string] + identifier[rtvars] [ literal[string] ] identifier[path] = identifier[os] . identifier[path] . identifier[join] ( identifier[rtvars] [ literal[string] ], literal[string] , identifier[rtvars] [ literal[string] ]) identifier[baseName] = identifier[os] . identifier[path] . identifier[join] ( identifier[path] , identifier[dllName] ) keyword[if] identifier[os] . identifier[path] . identifier[exists] ( identifier[baseName] ): identifier[fullName] = identifier[baseName] keyword[else] : identifier[fullName] = keyword[None] keyword[else] : identifier[dllName] = literal[string] + identifier[rtvars] [ literal[string] ][ literal[int] :]+ literal[string] + identifier[rtvars] [ literal[string] ] identifier[path] = identifier[os] . identifier[path] . identifier[join] ( identifier[rtvars] [ literal[string] ], literal[string] , identifier[rtvars] [ literal[string] ]) identifier[baseName] = identifier[os] . identifier[path] . identifier[join] ( identifier[path] , identifier[dllName] ) keyword[if] identifier[os] . identifier[path] . identifier[exists] ( identifier[baseName] ): identifier[fullName] = identifier[baseName] keyword[else] : identifier[pattern] = identifier[baseName] + literal[string] identifier[files] = identifier[glob] . identifier[glob] ( identifier[pattern] ) keyword[if] identifier[len] ( identifier[files] )== literal[int] : identifier[fullName] = keyword[None] keyword[elif] identifier[len] ( identifier[files] )== literal[int] : identifier[fullName] = identifier[files] [ literal[int] ] keyword[else] : identifier[fullName] = keyword[None] keyword[except] ( identifier[OSError] , identifier[KeyError] ): keyword[pass] keyword[return] ( identifier[fullName] , identifier[swiHome] )
def _findSwiplFromExec(): """ This function tries to use an executable on the path to find SWI-Prolog SO/DLL and the resource file. :returns: A tuple of (path to the swipl DLL, path to the resource file) :returns type: ({str, None}, {str, None}) """ platform = sys.platform[:3] fullName = None swiHome = None try: # try to get library path from swipl executable. # We may have pl or swipl as the executable try: cmd = Popen(['swipl', '--dump-runtime-variables'], stdout=PIPE) # depends on [control=['try'], data=[]] except OSError: cmd = Popen(['pl', '--dump-runtime-variables'], stdout=PIPE) # depends on [control=['except'], data=[]] ret = cmd.communicate() # Parse the output into a dictionary ret = ret[0].decode().replace(';', '').splitlines() ret = [line.split('=', 1) for line in ret] rtvars = dict(((name, value[1:-1]) for (name, value) in ret)) # [1:-1] gets # rid of the # quotes if rtvars['PLSHARED'] == 'no': raise ImportError('SWI-Prolog is not installed as a shared library.') # depends on [control=['if'], data=[]] else: # PLSHARED == 'yes' swiHome = rtvars['PLBASE'] # The environment is in PLBASE if not os.path.exists(swiHome): swiHome = None # depends on [control=['if'], data=[]] # determine platform specific path if platform == 'win': dllName = rtvars['PLLIB'][:-4] + '.' + rtvars['PLSOEXT'] path = os.path.join(rtvars['PLBASE'], 'bin') fullName = os.path.join(path, dllName) if not os.path.exists(fullName): fullName = None # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]] elif platform == 'cyg': # e.g. /usr/lib/pl-5.6.36/bin/i686-cygwin/cygpl.dll dllName = 'cygpl.dll' path = os.path.join(rtvars['PLBASE'], 'bin', rtvars['PLARCH']) fullName = os.path.join(path, dllName) if not os.path.exists(fullName): fullName = None # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]] elif platform == 'dar': dllName = 'lib' + rtvars['PLLIB'][2:] + '.' + rtvars['PLSOEXT'] path = os.path.join(rtvars['PLBASE'], 'lib', rtvars['PLARCH']) baseName = os.path.join(path, dllName) if os.path.exists(baseName): fullName = baseName # depends on [control=['if'], data=[]] else: # We will search for versions fullName = None # depends on [control=['if'], data=[]] else: # assume UNIX-like # The SO name in some linuxes is of the form libswipl.so.5.10.2, # so we have to use glob to find the correct one dllName = 'lib' + rtvars['PLLIB'][2:] + '.' + rtvars['PLSOEXT'] path = os.path.join(rtvars['PLBASE'], 'lib', rtvars['PLARCH']) baseName = os.path.join(path, dllName) if os.path.exists(baseName): fullName = baseName # depends on [control=['if'], data=[]] else: # We will search for versions pattern = baseName + '.*' files = glob.glob(pattern) if len(files) == 0: fullName = None # depends on [control=['if'], data=[]] elif len(files) == 1: fullName = files[0] # depends on [control=['if'], data=[]] else: # Will this ever happen? fullName = None # depends on [control=['try'], data=[]] except (OSError, KeyError): # KeyError from accessing rtvars pass # depends on [control=['except'], data=[]] return (fullName, swiHome)
def delete_report(report): """Delete report(s), supports globbing. """ for path in glob.glob(os.path.join(_get_reports_path(), report)): shutil.rmtree(path)
def function[delete_report, parameter[report]]: constant[Delete report(s), supports globbing. ] for taget[name[path]] in starred[call[name[glob].glob, parameter[call[name[os].path.join, parameter[call[name[_get_reports_path], parameter[]], name[report]]]]]] begin[:] call[name[shutil].rmtree, parameter[name[path]]]
keyword[def] identifier[delete_report] ( identifier[report] ): literal[string] keyword[for] identifier[path] keyword[in] identifier[glob] . identifier[glob] ( identifier[os] . identifier[path] . identifier[join] ( identifier[_get_reports_path] (), identifier[report] )): identifier[shutil] . identifier[rmtree] ( identifier[path] )
def delete_report(report): """Delete report(s), supports globbing. """ for path in glob.glob(os.path.join(_get_reports_path(), report)): shutil.rmtree(path) # depends on [control=['for'], data=['path']]
def consume(self, routingKey, msg): """ Consumer for CaptureBuildStartTime. Gets the build start time. """ builder_info = yield self.master.data.get(("builders", msg['builderid'])) if self._builder_name_matches(builder_info): try: ret_val = self._callback(*self._retValParams(msg)) except Exception as e: # catching generic exceptions is okay here since we propagate # it raise CaptureCallbackError("%s Exception raised: %s with message: %s" % (self._err_msg(msg, builder_info['name']), type(e).__name__, str(e))) context = self._defaultContext(msg, builder_info['name']) post_data = { self._time_type: ret_val } series_name = "%s-build-times" % builder_info['name'] yield self._store(post_data, series_name, context) else: yield defer.succeed(None)
def function[consume, parameter[self, routingKey, msg]]: constant[ Consumer for CaptureBuildStartTime. Gets the build start time. ] variable[builder_info] assign[=] <ast.Yield object at 0x7da18f58d9c0> if call[name[self]._builder_name_matches, parameter[name[builder_info]]] begin[:] <ast.Try object at 0x7da1b1c3fbb0> variable[context] assign[=] call[name[self]._defaultContext, parameter[name[msg], call[name[builder_info]][constant[name]]]] variable[post_data] assign[=] dictionary[[<ast.Attribute object at 0x7da1b1c3d090>], [<ast.Name object at 0x7da1b1c3d120>]] variable[series_name] assign[=] binary_operation[constant[%s-build-times] <ast.Mod object at 0x7da2590d6920> call[name[builder_info]][constant[name]]] <ast.Yield object at 0x7da1b1c3cfd0>
keyword[def] identifier[consume] ( identifier[self] , identifier[routingKey] , identifier[msg] ): literal[string] identifier[builder_info] = keyword[yield] identifier[self] . identifier[master] . identifier[data] . identifier[get] (( literal[string] , identifier[msg] [ literal[string] ])) keyword[if] identifier[self] . identifier[_builder_name_matches] ( identifier[builder_info] ): keyword[try] : identifier[ret_val] = identifier[self] . identifier[_callback] (* identifier[self] . identifier[_retValParams] ( identifier[msg] )) keyword[except] identifier[Exception] keyword[as] identifier[e] : keyword[raise] identifier[CaptureCallbackError] ( literal[string] % ( identifier[self] . identifier[_err_msg] ( identifier[msg] , identifier[builder_info] [ literal[string] ]), identifier[type] ( identifier[e] ). identifier[__name__] , identifier[str] ( identifier[e] ))) identifier[context] = identifier[self] . identifier[_defaultContext] ( identifier[msg] , identifier[builder_info] [ literal[string] ]) identifier[post_data] ={ identifier[self] . identifier[_time_type] : identifier[ret_val] } identifier[series_name] = literal[string] % identifier[builder_info] [ literal[string] ] keyword[yield] identifier[self] . identifier[_store] ( identifier[post_data] , identifier[series_name] , identifier[context] ) keyword[else] : keyword[yield] identifier[defer] . identifier[succeed] ( keyword[None] )
def consume(self, routingKey, msg): """ Consumer for CaptureBuildStartTime. Gets the build start time. """ builder_info = (yield self.master.data.get(('builders', msg['builderid']))) if self._builder_name_matches(builder_info): try: ret_val = self._callback(*self._retValParams(msg)) # depends on [control=['try'], data=[]] except Exception as e: # catching generic exceptions is okay here since we propagate # it raise CaptureCallbackError('%s Exception raised: %s with message: %s' % (self._err_msg(msg, builder_info['name']), type(e).__name__, str(e))) # depends on [control=['except'], data=['e']] context = self._defaultContext(msg, builder_info['name']) post_data = {self._time_type: ret_val} series_name = '%s-build-times' % builder_info['name'] yield self._store(post_data, series_name, context) # depends on [control=['if'], data=[]] else: yield defer.succeed(None)
def _interpolate_p(p, r, v): """ interpolates p based on the values in the A table for the scalar value of r and the scalar value of v """ # interpolate p (v should be in table) # if .5 < p < .75 use linear interpolation in q # if p > .75 use quadratic interpolation in log(y + r/v) # by -1. / (1. + 1.5 * _phi((1. + p)/2.)) # find the 3 closest v values p0, p1, p2 = _select_ps(p) try: y0 = _func(A[(p0, v)], p0, r, v) + 1. except: print(p,r,v) y1 = _func(A[(p1, v)], p1, r, v) + 1. y2 = _func(A[(p2, v)], p2, r, v) + 1. y_log0 = math.log(y0 + float(r)/float(v)) y_log1 = math.log(y1 + float(r)/float(v)) y_log2 = math.log(y2 + float(r)/float(v)) # If p < .85 apply only the ordinate transformation # if p > .85 apply the ordinate and the abcissa transformation # In both cases apply quadratic interpolation if p > .85: p_t = _ptransform(p) p0_t = _ptransform(p0) p1_t = _ptransform(p1) p2_t = _ptransform(p2) # calculate derivatives for quadratic interpolation d2 = 2*((y_log2-y_log1)/(p2_t-p1_t) - \ (y_log1-y_log0)/(p1_t-p0_t))/(p2_t-p0_t) if (p2+p0)>=(p1+p1): d1 = (y_log2-y_log1)/(p2_t-p1_t) - 0.5*d2*(p2_t-p1_t) else: d1 = (y_log1-y_log0)/(p1_t-p0_t) + 0.5*d2*(p1_t-p0_t) d0 = y_log1 # interpolate value y_log = (d2/2.) * (p_t-p1_t)**2. + d1 * (p_t-p1_t) + d0 # transform back to y y = math.exp(y_log) - float(r)/float(v) elif p > .5: # calculate derivatives for quadratic interpolation d2 = 2*((y_log2-y_log1)/(p2-p1) - \ (y_log1-y_log0)/(p1-p0))/(p2-p0) if (p2+p0)>=(p1+p1): d1 = (y_log2-y_log1)/(p2-p1) - 0.5*d2*(p2-p1) else: d1 = (y_log1-y_log0)/(p1-p0) + 0.5*d2*(p1-p0) d0 = y_log1 # interpolate values y_log = (d2/2.) * (p-p1)**2. + d1 * (p-p1) + d0 # transform back to y y = math.exp(y_log) - float(r)/float(v) else: # linear interpolation in q and p q0 = math.sqrt(2) * -y0 * \ scipy.stats.t.isf((1.+p0)/2., max(v, 1e38)) q1 = math.sqrt(2) * -y1 * \ scipy.stats.t.isf((1.+p1)/2., max(v, 1e38)) d1 = (q1-q0)/(p1-p0) d0 = q0 # interpolate values q = d1 * (p-p0) + d0 # transform back to y y = -q / (math.sqrt(2) * \ scipy.stats.t.isf((1.+p)/2., max(v, 1e38))) return y
def function[_interpolate_p, parameter[p, r, v]]: constant[ interpolates p based on the values in the A table for the scalar value of r and the scalar value of v ] <ast.Tuple object at 0x7da20e955960> assign[=] call[name[_select_ps], parameter[name[p]]] <ast.Try object at 0x7da20e957e50> variable[y1] assign[=] binary_operation[call[name[_func], parameter[call[name[A]][tuple[[<ast.Name object at 0x7da20e9564d0>, <ast.Name object at 0x7da20e9543d0>]]], name[p1], name[r], name[v]]] + constant[1.0]] variable[y2] assign[=] binary_operation[call[name[_func], parameter[call[name[A]][tuple[[<ast.Name object at 0x7da20e955db0>, <ast.Name object at 0x7da20e955510>]]], name[p2], name[r], name[v]]] + constant[1.0]] variable[y_log0] assign[=] call[name[math].log, parameter[binary_operation[name[y0] + binary_operation[call[name[float], parameter[name[r]]] / call[name[float], parameter[name[v]]]]]]] variable[y_log1] assign[=] call[name[math].log, parameter[binary_operation[name[y1] + binary_operation[call[name[float], parameter[name[r]]] / call[name[float], parameter[name[v]]]]]]] variable[y_log2] assign[=] call[name[math].log, parameter[binary_operation[name[y2] + binary_operation[call[name[float], parameter[name[r]]] / call[name[float], parameter[name[v]]]]]]] if compare[name[p] greater[>] constant[0.85]] begin[:] variable[p_t] assign[=] call[name[_ptransform], parameter[name[p]]] variable[p0_t] assign[=] call[name[_ptransform], parameter[name[p0]]] variable[p1_t] assign[=] call[name[_ptransform], parameter[name[p1]]] variable[p2_t] assign[=] call[name[_ptransform], parameter[name[p2]]] variable[d2] assign[=] binary_operation[binary_operation[constant[2] * binary_operation[binary_operation[binary_operation[name[y_log2] - name[y_log1]] / binary_operation[name[p2_t] - name[p1_t]]] - binary_operation[binary_operation[name[y_log1] - name[y_log0]] / binary_operation[name[p1_t] - name[p0_t]]]]] / binary_operation[name[p2_t] - name[p0_t]]] if compare[binary_operation[name[p2] + name[p0]] greater_or_equal[>=] binary_operation[name[p1] + name[p1]]] begin[:] variable[d1] assign[=] binary_operation[binary_operation[binary_operation[name[y_log2] - name[y_log1]] / binary_operation[name[p2_t] - name[p1_t]]] - binary_operation[binary_operation[constant[0.5] * name[d2]] * binary_operation[name[p2_t] - name[p1_t]]]] variable[d0] assign[=] name[y_log1] variable[y_log] assign[=] binary_operation[binary_operation[binary_operation[binary_operation[name[d2] / constant[2.0]] * binary_operation[binary_operation[name[p_t] - name[p1_t]] ** constant[2.0]]] + binary_operation[name[d1] * binary_operation[name[p_t] - name[p1_t]]]] + name[d0]] variable[y] assign[=] binary_operation[call[name[math].exp, parameter[name[y_log]]] - binary_operation[call[name[float], parameter[name[r]]] / call[name[float], parameter[name[v]]]]] return[name[y]]
keyword[def] identifier[_interpolate_p] ( identifier[p] , identifier[r] , identifier[v] ): literal[string] identifier[p0] , identifier[p1] , identifier[p2] = identifier[_select_ps] ( identifier[p] ) keyword[try] : identifier[y0] = identifier[_func] ( identifier[A] [( identifier[p0] , identifier[v] )], identifier[p0] , identifier[r] , identifier[v] )+ literal[int] keyword[except] : identifier[print] ( identifier[p] , identifier[r] , identifier[v] ) identifier[y1] = identifier[_func] ( identifier[A] [( identifier[p1] , identifier[v] )], identifier[p1] , identifier[r] , identifier[v] )+ literal[int] identifier[y2] = identifier[_func] ( identifier[A] [( identifier[p2] , identifier[v] )], identifier[p2] , identifier[r] , identifier[v] )+ literal[int] identifier[y_log0] = identifier[math] . identifier[log] ( identifier[y0] + identifier[float] ( identifier[r] )/ identifier[float] ( identifier[v] )) identifier[y_log1] = identifier[math] . identifier[log] ( identifier[y1] + identifier[float] ( identifier[r] )/ identifier[float] ( identifier[v] )) identifier[y_log2] = identifier[math] . identifier[log] ( identifier[y2] + identifier[float] ( identifier[r] )/ identifier[float] ( identifier[v] )) keyword[if] identifier[p] > literal[int] : identifier[p_t] = identifier[_ptransform] ( identifier[p] ) identifier[p0_t] = identifier[_ptransform] ( identifier[p0] ) identifier[p1_t] = identifier[_ptransform] ( identifier[p1] ) identifier[p2_t] = identifier[_ptransform] ( identifier[p2] ) identifier[d2] = literal[int] *(( identifier[y_log2] - identifier[y_log1] )/( identifier[p2_t] - identifier[p1_t] )-( identifier[y_log1] - identifier[y_log0] )/( identifier[p1_t] - identifier[p0_t] ))/( identifier[p2_t] - identifier[p0_t] ) keyword[if] ( identifier[p2] + identifier[p0] )>=( identifier[p1] + identifier[p1] ): identifier[d1] =( identifier[y_log2] - identifier[y_log1] )/( identifier[p2_t] - identifier[p1_t] )- literal[int] * identifier[d2] *( identifier[p2_t] - identifier[p1_t] ) keyword[else] : identifier[d1] =( identifier[y_log1] - identifier[y_log0] )/( identifier[p1_t] - identifier[p0_t] )+ literal[int] * identifier[d2] *( identifier[p1_t] - identifier[p0_t] ) identifier[d0] = identifier[y_log1] identifier[y_log] =( identifier[d2] / literal[int] )*( identifier[p_t] - identifier[p1_t] )** literal[int] + identifier[d1] *( identifier[p_t] - identifier[p1_t] )+ identifier[d0] identifier[y] = identifier[math] . identifier[exp] ( identifier[y_log] )- identifier[float] ( identifier[r] )/ identifier[float] ( identifier[v] ) keyword[elif] identifier[p] > literal[int] : identifier[d2] = literal[int] *(( identifier[y_log2] - identifier[y_log1] )/( identifier[p2] - identifier[p1] )-( identifier[y_log1] - identifier[y_log0] )/( identifier[p1] - identifier[p0] ))/( identifier[p2] - identifier[p0] ) keyword[if] ( identifier[p2] + identifier[p0] )>=( identifier[p1] + identifier[p1] ): identifier[d1] =( identifier[y_log2] - identifier[y_log1] )/( identifier[p2] - identifier[p1] )- literal[int] * identifier[d2] *( identifier[p2] - identifier[p1] ) keyword[else] : identifier[d1] =( identifier[y_log1] - identifier[y_log0] )/( identifier[p1] - identifier[p0] )+ literal[int] * identifier[d2] *( identifier[p1] - identifier[p0] ) identifier[d0] = identifier[y_log1] identifier[y_log] =( identifier[d2] / literal[int] )*( identifier[p] - identifier[p1] )** literal[int] + identifier[d1] *( identifier[p] - identifier[p1] )+ identifier[d0] identifier[y] = identifier[math] . identifier[exp] ( identifier[y_log] )- identifier[float] ( identifier[r] )/ identifier[float] ( identifier[v] ) keyword[else] : identifier[q0] = identifier[math] . identifier[sqrt] ( literal[int] )*- identifier[y0] * identifier[scipy] . identifier[stats] . identifier[t] . identifier[isf] (( literal[int] + identifier[p0] )/ literal[int] , identifier[max] ( identifier[v] , literal[int] )) identifier[q1] = identifier[math] . identifier[sqrt] ( literal[int] )*- identifier[y1] * identifier[scipy] . identifier[stats] . identifier[t] . identifier[isf] (( literal[int] + identifier[p1] )/ literal[int] , identifier[max] ( identifier[v] , literal[int] )) identifier[d1] =( identifier[q1] - identifier[q0] )/( identifier[p1] - identifier[p0] ) identifier[d0] = identifier[q0] identifier[q] = identifier[d1] *( identifier[p] - identifier[p0] )+ identifier[d0] identifier[y] =- identifier[q] /( identifier[math] . identifier[sqrt] ( literal[int] )* identifier[scipy] . identifier[stats] . identifier[t] . identifier[isf] (( literal[int] + identifier[p] )/ literal[int] , identifier[max] ( identifier[v] , literal[int] ))) keyword[return] identifier[y]
def _interpolate_p(p, r, v): """ interpolates p based on the values in the A table for the scalar value of r and the scalar value of v """ # interpolate p (v should be in table) # if .5 < p < .75 use linear interpolation in q # if p > .75 use quadratic interpolation in log(y + r/v) # by -1. / (1. + 1.5 * _phi((1. + p)/2.)) # find the 3 closest v values (p0, p1, p2) = _select_ps(p) try: y0 = _func(A[p0, v], p0, r, v) + 1.0 # depends on [control=['try'], data=[]] except: print(p, r, v) # depends on [control=['except'], data=[]] y1 = _func(A[p1, v], p1, r, v) + 1.0 y2 = _func(A[p2, v], p2, r, v) + 1.0 y_log0 = math.log(y0 + float(r) / float(v)) y_log1 = math.log(y1 + float(r) / float(v)) y_log2 = math.log(y2 + float(r) / float(v)) # If p < .85 apply only the ordinate transformation # if p > .85 apply the ordinate and the abcissa transformation # In both cases apply quadratic interpolation if p > 0.85: p_t = _ptransform(p) p0_t = _ptransform(p0) p1_t = _ptransform(p1) p2_t = _ptransform(p2) # calculate derivatives for quadratic interpolation d2 = 2 * ((y_log2 - y_log1) / (p2_t - p1_t) - (y_log1 - y_log0) / (p1_t - p0_t)) / (p2_t - p0_t) if p2 + p0 >= p1 + p1: d1 = (y_log2 - y_log1) / (p2_t - p1_t) - 0.5 * d2 * (p2_t - p1_t) # depends on [control=['if'], data=[]] else: d1 = (y_log1 - y_log0) / (p1_t - p0_t) + 0.5 * d2 * (p1_t - p0_t) d0 = y_log1 # interpolate value y_log = d2 / 2.0 * (p_t - p1_t) ** 2.0 + d1 * (p_t - p1_t) + d0 # transform back to y y = math.exp(y_log) - float(r) / float(v) # depends on [control=['if'], data=['p']] elif p > 0.5: # calculate derivatives for quadratic interpolation d2 = 2 * ((y_log2 - y_log1) / (p2 - p1) - (y_log1 - y_log0) / (p1 - p0)) / (p2 - p0) if p2 + p0 >= p1 + p1: d1 = (y_log2 - y_log1) / (p2 - p1) - 0.5 * d2 * (p2 - p1) # depends on [control=['if'], data=[]] else: d1 = (y_log1 - y_log0) / (p1 - p0) + 0.5 * d2 * (p1 - p0) d0 = y_log1 # interpolate values y_log = d2 / 2.0 * (p - p1) ** 2.0 + d1 * (p - p1) + d0 # transform back to y y = math.exp(y_log) - float(r) / float(v) # depends on [control=['if'], data=['p']] else: # linear interpolation in q and p q0 = math.sqrt(2) * -y0 * scipy.stats.t.isf((1.0 + p0) / 2.0, max(v, 1e+38)) q1 = math.sqrt(2) * -y1 * scipy.stats.t.isf((1.0 + p1) / 2.0, max(v, 1e+38)) d1 = (q1 - q0) / (p1 - p0) d0 = q0 # interpolate values q = d1 * (p - p0) + d0 # transform back to y y = -q / (math.sqrt(2) * scipy.stats.t.isf((1.0 + p) / 2.0, max(v, 1e+38))) return y
def delete_running(self, timeout_last_refresh=0, dry_run=False): """Delete jobs stalled in the running state for too long timeout_last_refresh, int: number of seconds """ running_all = self.handle.jobs_running() running_timeout = [job for job in running_all if coarse_utcnow() > job["refresh_time"] + timedelta(seconds=timeout_last_refresh)] if len(running_timeout) == 0: # Nothing to stop self.refresh_tids(None) return None if dry_run: logger.warning("Dry run. Not removing anything.") logger.info("Removing {0}/{1} running jobs. # all jobs: {2} ". format(len(running_timeout), len(running_all), len(self))) now = coarse_utcnow() logger.info("Current utc time: {0}".format(now)) logger.info("Time horizont: {0}".format(now - timedelta(seconds=timeout_last_refresh))) for job in running_timeout: logger.info("Removing job: ") pjob = job.to_dict() del pjob["misc"] # ignore misc when printing logger.info(pprint.pformat(pjob)) if not dry_run: self.handle.delete(job) logger.info("Job deleted") self.refresh_tids(None)
def function[delete_running, parameter[self, timeout_last_refresh, dry_run]]: constant[Delete jobs stalled in the running state for too long timeout_last_refresh, int: number of seconds ] variable[running_all] assign[=] call[name[self].handle.jobs_running, parameter[]] variable[running_timeout] assign[=] <ast.ListComp object at 0x7da1b0549fc0> if compare[call[name[len], parameter[name[running_timeout]]] equal[==] constant[0]] begin[:] call[name[self].refresh_tids, parameter[constant[None]]] return[constant[None]] if name[dry_run] begin[:] call[name[logger].warning, parameter[constant[Dry run. Not removing anything.]]] call[name[logger].info, parameter[call[constant[Removing {0}/{1} running jobs. # all jobs: {2} ].format, parameter[call[name[len], parameter[name[running_timeout]]], call[name[len], parameter[name[running_all]]], call[name[len], parameter[name[self]]]]]]] variable[now] assign[=] call[name[coarse_utcnow], parameter[]] call[name[logger].info, parameter[call[constant[Current utc time: {0}].format, parameter[name[now]]]]] call[name[logger].info, parameter[call[constant[Time horizont: {0}].format, parameter[binary_operation[name[now] - call[name[timedelta], parameter[]]]]]]] for taget[name[job]] in starred[name[running_timeout]] begin[:] call[name[logger].info, parameter[constant[Removing job: ]]] variable[pjob] assign[=] call[name[job].to_dict, parameter[]] <ast.Delete object at 0x7da2047ea4d0> call[name[logger].info, parameter[call[name[pprint].pformat, parameter[name[pjob]]]]] if <ast.UnaryOp object at 0x7da2047e8be0> begin[:] call[name[self].handle.delete, parameter[name[job]]] call[name[logger].info, parameter[constant[Job deleted]]] call[name[self].refresh_tids, parameter[constant[None]]]
keyword[def] identifier[delete_running] ( identifier[self] , identifier[timeout_last_refresh] = literal[int] , identifier[dry_run] = keyword[False] ): literal[string] identifier[running_all] = identifier[self] . identifier[handle] . identifier[jobs_running] () identifier[running_timeout] =[ identifier[job] keyword[for] identifier[job] keyword[in] identifier[running_all] keyword[if] identifier[coarse_utcnow] ()> identifier[job] [ literal[string] ]+ identifier[timedelta] ( identifier[seconds] = identifier[timeout_last_refresh] )] keyword[if] identifier[len] ( identifier[running_timeout] )== literal[int] : identifier[self] . identifier[refresh_tids] ( keyword[None] ) keyword[return] keyword[None] keyword[if] identifier[dry_run] : identifier[logger] . identifier[warning] ( literal[string] ) identifier[logger] . identifier[info] ( literal[string] . identifier[format] ( identifier[len] ( identifier[running_timeout] ), identifier[len] ( identifier[running_all] ), identifier[len] ( identifier[self] ))) identifier[now] = identifier[coarse_utcnow] () identifier[logger] . identifier[info] ( literal[string] . identifier[format] ( identifier[now] )) identifier[logger] . identifier[info] ( literal[string] . identifier[format] ( identifier[now] - identifier[timedelta] ( identifier[seconds] = identifier[timeout_last_refresh] ))) keyword[for] identifier[job] keyword[in] identifier[running_timeout] : identifier[logger] . identifier[info] ( literal[string] ) identifier[pjob] = identifier[job] . identifier[to_dict] () keyword[del] identifier[pjob] [ literal[string] ] identifier[logger] . identifier[info] ( identifier[pprint] . identifier[pformat] ( identifier[pjob] )) keyword[if] keyword[not] identifier[dry_run] : identifier[self] . identifier[handle] . identifier[delete] ( identifier[job] ) identifier[logger] . identifier[info] ( literal[string] ) identifier[self] . identifier[refresh_tids] ( keyword[None] )
def delete_running(self, timeout_last_refresh=0, dry_run=False): """Delete jobs stalled in the running state for too long timeout_last_refresh, int: number of seconds """ running_all = self.handle.jobs_running() running_timeout = [job for job in running_all if coarse_utcnow() > job['refresh_time'] + timedelta(seconds=timeout_last_refresh)] if len(running_timeout) == 0: # Nothing to stop self.refresh_tids(None) return None # depends on [control=['if'], data=[]] if dry_run: logger.warning('Dry run. Not removing anything.') # depends on [control=['if'], data=[]] logger.info('Removing {0}/{1} running jobs. # all jobs: {2} '.format(len(running_timeout), len(running_all), len(self))) now = coarse_utcnow() logger.info('Current utc time: {0}'.format(now)) logger.info('Time horizont: {0}'.format(now - timedelta(seconds=timeout_last_refresh))) for job in running_timeout: logger.info('Removing job: ') pjob = job.to_dict() del pjob['misc'] # ignore misc when printing logger.info(pprint.pformat(pjob)) if not dry_run: self.handle.delete(job) logger.info('Job deleted') # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['job']] self.refresh_tids(None)
def debugfile(filename, args=None, wdir=None, post_mortem=False): """ Debug filename args: command line arguments (string) wdir: working directory post_mortem: boolean, included for compatiblity with runfile """ debugger = pdb.Pdb() filename = debugger.canonic(filename) debugger._wait_for_mainpyfile = 1 debugger.mainpyfile = filename debugger._user_requested_quit = 0 if os.name == 'nt': filename = filename.replace('\\', '/') debugger.run("runfile(%r, args=%r, wdir=%r)" % (filename, args, wdir))
def function[debugfile, parameter[filename, args, wdir, post_mortem]]: constant[ Debug filename args: command line arguments (string) wdir: working directory post_mortem: boolean, included for compatiblity with runfile ] variable[debugger] assign[=] call[name[pdb].Pdb, parameter[]] variable[filename] assign[=] call[name[debugger].canonic, parameter[name[filename]]] name[debugger]._wait_for_mainpyfile assign[=] constant[1] name[debugger].mainpyfile assign[=] name[filename] name[debugger]._user_requested_quit assign[=] constant[0] if compare[name[os].name equal[==] constant[nt]] begin[:] variable[filename] assign[=] call[name[filename].replace, parameter[constant[\], constant[/]]] call[name[debugger].run, parameter[binary_operation[constant[runfile(%r, args=%r, wdir=%r)] <ast.Mod object at 0x7da2590d6920> tuple[[<ast.Name object at 0x7da1b0593d60>, <ast.Name object at 0x7da1b0590250>, <ast.Name object at 0x7da1b0593370>]]]]]
keyword[def] identifier[debugfile] ( identifier[filename] , identifier[args] = keyword[None] , identifier[wdir] = keyword[None] , identifier[post_mortem] = keyword[False] ): literal[string] identifier[debugger] = identifier[pdb] . identifier[Pdb] () identifier[filename] = identifier[debugger] . identifier[canonic] ( identifier[filename] ) identifier[debugger] . identifier[_wait_for_mainpyfile] = literal[int] identifier[debugger] . identifier[mainpyfile] = identifier[filename] identifier[debugger] . identifier[_user_requested_quit] = literal[int] keyword[if] identifier[os] . identifier[name] == literal[string] : identifier[filename] = identifier[filename] . identifier[replace] ( literal[string] , literal[string] ) identifier[debugger] . identifier[run] ( literal[string] %( identifier[filename] , identifier[args] , identifier[wdir] ))
def debugfile(filename, args=None, wdir=None, post_mortem=False): """ Debug filename args: command line arguments (string) wdir: working directory post_mortem: boolean, included for compatiblity with runfile """ debugger = pdb.Pdb() filename = debugger.canonic(filename) debugger._wait_for_mainpyfile = 1 debugger.mainpyfile = filename debugger._user_requested_quit = 0 if os.name == 'nt': filename = filename.replace('\\', '/') # depends on [control=['if'], data=[]] debugger.run('runfile(%r, args=%r, wdir=%r)' % (filename, args, wdir))
def homology_report(seq1, seq2, strand1, strand2, cutoff=0, min_tm=63.0, top_two=False, max_size=500): '''Given two sequences (seq1 and seq2), report the size of all perfect matches between the 3' end of the top strand of seq1 and the 3' end of either strand of seq2. In short, in a Gibson reaction, what would bind the desired part of seq1, given a seq2? :param seq1: Sequence for which to test 3\' binding of a single strand to seq2. :type seq1: coral.DNA :param seq2: Sequence for which to test 3\' binding of each strand to seq1. :type seq1: coral.DNA :param strand1: w (watson) or c (crick) - which strand of seq1 is being tested. :type strand1ed: str :param strand2: w (watson) or c (crick) - which strand of seq2 is being tested. :type strand2ed: str :param cutoff: size cutoff for the report - if a match is lower, it's ignored :type cutoff: int :param min_tm: Minimum tm value cutoff - matches below are ignored. :type min_tm: float :param top_two: Return the best two matches :type top_two: bool :param max_size: Maximum overlap size (increases speed) :type max_size: int :returns: List of left and right identities. :rtype: list of ints ''' # Ensure that strand 1 is Watson and strand 2 is Crick if strand1 == 'c': seq1 = seq1.reverse_complement() if strand2 == 'w': seq2 = seq2.reverse_complement() # Generate all same-length 5' ends of seq1 and 3' ends of seq2 within # maximum homology length # TODO: If strings aren't used here, gen_chunks takes forever. Suggests a # need to optimize coral.DNA subsetting seq1_str = str(seq1) seq2_str = str(seq2) def gen_chunks(s1, s2): chunks1 = [seq1_str[-(i + 1):] for i in range(min(len(seq1_str), max_size))] chunks2 = [seq2_str[:(i + 1)] for i in range(min(len(seq2_str), max_size))] return chunks1, chunks2 seq1_chunks, seq2_chunks = gen_chunks(seq1_str, seq2_str) # Check for exact matches from terminal end to terminal end target_matches = [] for i, (s1, s2) in enumerate(zip(seq1_chunks, seq2_chunks)): s1len = len(s1) # Inefficient! (reverse complementing a bunch of times) # Don't calculate tm once base tm has been reached. # TODO: Go through logic here again and make sure the order of checking # makes sense if s1 == s2: logger.debug('Found Match: {}'.format(str(s1))) if s1len >= cutoff: tm = coral.analysis.tm(seq1[-(i + 1):]) logger.debug('Match tm: {} C'.format(tm)) if tm >= min_tm: target_matches.append(s1len) elif tm >= min_tm - 4: msg = 'One overlap had a Tm of {} C.'.format(tm) warnings.warn(msg) target_matches.append(s1len) target_matches.sort() if not top_two: return 0 if not target_matches else target_matches[0] else: return 0 if not target_matches else target_matches[0:2]
def function[homology_report, parameter[seq1, seq2, strand1, strand2, cutoff, min_tm, top_two, max_size]]: constant[Given two sequences (seq1 and seq2), report the size of all perfect matches between the 3' end of the top strand of seq1 and the 3' end of either strand of seq2. In short, in a Gibson reaction, what would bind the desired part of seq1, given a seq2? :param seq1: Sequence for which to test 3' binding of a single strand to seq2. :type seq1: coral.DNA :param seq2: Sequence for which to test 3' binding of each strand to seq1. :type seq1: coral.DNA :param strand1: w (watson) or c (crick) - which strand of seq1 is being tested. :type strand1ed: str :param strand2: w (watson) or c (crick) - which strand of seq2 is being tested. :type strand2ed: str :param cutoff: size cutoff for the report - if a match is lower, it's ignored :type cutoff: int :param min_tm: Minimum tm value cutoff - matches below are ignored. :type min_tm: float :param top_two: Return the best two matches :type top_two: bool :param max_size: Maximum overlap size (increases speed) :type max_size: int :returns: List of left and right identities. :rtype: list of ints ] if compare[name[strand1] equal[==] constant[c]] begin[:] variable[seq1] assign[=] call[name[seq1].reverse_complement, parameter[]] if compare[name[strand2] equal[==] constant[w]] begin[:] variable[seq2] assign[=] call[name[seq2].reverse_complement, parameter[]] variable[seq1_str] assign[=] call[name[str], parameter[name[seq1]]] variable[seq2_str] assign[=] call[name[str], parameter[name[seq2]]] def function[gen_chunks, parameter[s1, s2]]: variable[chunks1] assign[=] <ast.ListComp object at 0x7da1b057a830> variable[chunks2] assign[=] <ast.ListComp object at 0x7da1b057a680> return[tuple[[<ast.Name object at 0x7da1b0578280>, <ast.Name object at 0x7da1b05782e0>]]] <ast.Tuple object at 0x7da1b05781f0> assign[=] call[name[gen_chunks], parameter[name[seq1_str], name[seq2_str]]] variable[target_matches] assign[=] list[[]] for taget[tuple[[<ast.Name object at 0x7da1b057be80>, <ast.Tuple object at 0x7da1b057beb0>]]] in starred[call[name[enumerate], parameter[call[name[zip], parameter[name[seq1_chunks], name[seq2_chunks]]]]]] begin[:] variable[s1len] assign[=] call[name[len], parameter[name[s1]]] if compare[name[s1] equal[==] name[s2]] begin[:] call[name[logger].debug, parameter[call[constant[Found Match: {}].format, parameter[call[name[str], parameter[name[s1]]]]]]] if compare[name[s1len] greater_or_equal[>=] name[cutoff]] begin[:] variable[tm] assign[=] call[name[coral].analysis.tm, parameter[call[name[seq1]][<ast.Slice object at 0x7da1b057a950>]]] call[name[logger].debug, parameter[call[constant[Match tm: {} C].format, parameter[name[tm]]]]] if compare[name[tm] greater_or_equal[>=] name[min_tm]] begin[:] call[name[target_matches].append, parameter[name[s1len]]] call[name[target_matches].sort, parameter[]] if <ast.UnaryOp object at 0x7da1b0547850> begin[:] return[<ast.IfExp object at 0x7da1b0524040>]
keyword[def] identifier[homology_report] ( identifier[seq1] , identifier[seq2] , identifier[strand1] , identifier[strand2] , identifier[cutoff] = literal[int] , identifier[min_tm] = literal[int] , identifier[top_two] = keyword[False] , identifier[max_size] = literal[int] ): literal[string] keyword[if] identifier[strand1] == literal[string] : identifier[seq1] = identifier[seq1] . identifier[reverse_complement] () keyword[if] identifier[strand2] == literal[string] : identifier[seq2] = identifier[seq2] . identifier[reverse_complement] () identifier[seq1_str] = identifier[str] ( identifier[seq1] ) identifier[seq2_str] = identifier[str] ( identifier[seq2] ) keyword[def] identifier[gen_chunks] ( identifier[s1] , identifier[s2] ): identifier[chunks1] =[ identifier[seq1_str] [-( identifier[i] + literal[int] ):] keyword[for] identifier[i] keyword[in] identifier[range] ( identifier[min] ( identifier[len] ( identifier[seq1_str] ), identifier[max_size] ))] identifier[chunks2] =[ identifier[seq2_str] [:( identifier[i] + literal[int] )] keyword[for] identifier[i] keyword[in] identifier[range] ( identifier[min] ( identifier[len] ( identifier[seq2_str] ), identifier[max_size] ))] keyword[return] identifier[chunks1] , identifier[chunks2] identifier[seq1_chunks] , identifier[seq2_chunks] = identifier[gen_chunks] ( identifier[seq1_str] , identifier[seq2_str] ) identifier[target_matches] =[] keyword[for] identifier[i] ,( identifier[s1] , identifier[s2] ) keyword[in] identifier[enumerate] ( identifier[zip] ( identifier[seq1_chunks] , identifier[seq2_chunks] )): identifier[s1len] = identifier[len] ( identifier[s1] ) keyword[if] identifier[s1] == identifier[s2] : identifier[logger] . identifier[debug] ( literal[string] . identifier[format] ( identifier[str] ( identifier[s1] ))) keyword[if] identifier[s1len] >= identifier[cutoff] : identifier[tm] = identifier[coral] . identifier[analysis] . identifier[tm] ( identifier[seq1] [-( identifier[i] + literal[int] ):]) identifier[logger] . identifier[debug] ( literal[string] . identifier[format] ( identifier[tm] )) keyword[if] identifier[tm] >= identifier[min_tm] : identifier[target_matches] . identifier[append] ( identifier[s1len] ) keyword[elif] identifier[tm] >= identifier[min_tm] - literal[int] : identifier[msg] = literal[string] . identifier[format] ( identifier[tm] ) identifier[warnings] . identifier[warn] ( identifier[msg] ) identifier[target_matches] . identifier[append] ( identifier[s1len] ) identifier[target_matches] . identifier[sort] () keyword[if] keyword[not] identifier[top_two] : keyword[return] literal[int] keyword[if] keyword[not] identifier[target_matches] keyword[else] identifier[target_matches] [ literal[int] ] keyword[else] : keyword[return] literal[int] keyword[if] keyword[not] identifier[target_matches] keyword[else] identifier[target_matches] [ literal[int] : literal[int] ]
def homology_report(seq1, seq2, strand1, strand2, cutoff=0, min_tm=63.0, top_two=False, max_size=500): """Given two sequences (seq1 and seq2), report the size of all perfect matches between the 3' end of the top strand of seq1 and the 3' end of either strand of seq2. In short, in a Gibson reaction, what would bind the desired part of seq1, given a seq2? :param seq1: Sequence for which to test 3' binding of a single strand to seq2. :type seq1: coral.DNA :param seq2: Sequence for which to test 3' binding of each strand to seq1. :type seq1: coral.DNA :param strand1: w (watson) or c (crick) - which strand of seq1 is being tested. :type strand1ed: str :param strand2: w (watson) or c (crick) - which strand of seq2 is being tested. :type strand2ed: str :param cutoff: size cutoff for the report - if a match is lower, it's ignored :type cutoff: int :param min_tm: Minimum tm value cutoff - matches below are ignored. :type min_tm: float :param top_two: Return the best two matches :type top_two: bool :param max_size: Maximum overlap size (increases speed) :type max_size: int :returns: List of left and right identities. :rtype: list of ints """ # Ensure that strand 1 is Watson and strand 2 is Crick if strand1 == 'c': seq1 = seq1.reverse_complement() # depends on [control=['if'], data=[]] if strand2 == 'w': seq2 = seq2.reverse_complement() # depends on [control=['if'], data=[]] # Generate all same-length 5' ends of seq1 and 3' ends of seq2 within # maximum homology length # TODO: If strings aren't used here, gen_chunks takes forever. Suggests a # need to optimize coral.DNA subsetting seq1_str = str(seq1) seq2_str = str(seq2) def gen_chunks(s1, s2): chunks1 = [seq1_str[-(i + 1):] for i in range(min(len(seq1_str), max_size))] chunks2 = [seq2_str[:i + 1] for i in range(min(len(seq2_str), max_size))] return (chunks1, chunks2) (seq1_chunks, seq2_chunks) = gen_chunks(seq1_str, seq2_str) # Check for exact matches from terminal end to terminal end target_matches = [] for (i, (s1, s2)) in enumerate(zip(seq1_chunks, seq2_chunks)): s1len = len(s1) # Inefficient! (reverse complementing a bunch of times) # Don't calculate tm once base tm has been reached. # TODO: Go through logic here again and make sure the order of checking # makes sense if s1 == s2: logger.debug('Found Match: {}'.format(str(s1))) if s1len >= cutoff: tm = coral.analysis.tm(seq1[-(i + 1):]) logger.debug('Match tm: {} C'.format(tm)) if tm >= min_tm: target_matches.append(s1len) # depends on [control=['if'], data=[]] elif tm >= min_tm - 4: msg = 'One overlap had a Tm of {} C.'.format(tm) warnings.warn(msg) target_matches.append(s1len) # depends on [control=['if'], data=['tm']] # depends on [control=['if'], data=['s1len']] # depends on [control=['if'], data=['s1']] # depends on [control=['for'], data=[]] target_matches.sort() if not top_two: return 0 if not target_matches else target_matches[0] # depends on [control=['if'], data=[]] else: return 0 if not target_matches else target_matches[0:2]
def update_payload(self, fields=None): """Wrap payload in ``os_default_template`` relates to `Redmine #21169`_. .. _Redmine #21169: http://projects.theforeman.org/issues/21169 """ payload = super(OSDefaultTemplate, self).update_payload(fields) return {'os_default_template': payload}
def function[update_payload, parameter[self, fields]]: constant[Wrap payload in ``os_default_template`` relates to `Redmine #21169`_. .. _Redmine #21169: http://projects.theforeman.org/issues/21169 ] variable[payload] assign[=] call[call[name[super], parameter[name[OSDefaultTemplate], name[self]]].update_payload, parameter[name[fields]]] return[dictionary[[<ast.Constant object at 0x7da18bcca7d0>], [<ast.Name object at 0x7da18bcc8610>]]]
keyword[def] identifier[update_payload] ( identifier[self] , identifier[fields] = keyword[None] ): literal[string] identifier[payload] = identifier[super] ( identifier[OSDefaultTemplate] , identifier[self] ). identifier[update_payload] ( identifier[fields] ) keyword[return] { literal[string] : identifier[payload] }
def update_payload(self, fields=None): """Wrap payload in ``os_default_template`` relates to `Redmine #21169`_. .. _Redmine #21169: http://projects.theforeman.org/issues/21169 """ payload = super(OSDefaultTemplate, self).update_payload(fields) return {'os_default_template': payload}
def __init_xml(self, rootElementTag): """Init a etree element and pop a key in there""" xml_root = etree.Element(rootElementTag) key = etree.SubElement(xml_root, "Key") key.text = self.apikey return xml_root
def function[__init_xml, parameter[self, rootElementTag]]: constant[Init a etree element and pop a key in there] variable[xml_root] assign[=] call[name[etree].Element, parameter[name[rootElementTag]]] variable[key] assign[=] call[name[etree].SubElement, parameter[name[xml_root], constant[Key]]] name[key].text assign[=] name[self].apikey return[name[xml_root]]
keyword[def] identifier[__init_xml] ( identifier[self] , identifier[rootElementTag] ): literal[string] identifier[xml_root] = identifier[etree] . identifier[Element] ( identifier[rootElementTag] ) identifier[key] = identifier[etree] . identifier[SubElement] ( identifier[xml_root] , literal[string] ) identifier[key] . identifier[text] = identifier[self] . identifier[apikey] keyword[return] identifier[xml_root]
def __init_xml(self, rootElementTag): """Init a etree element and pop a key in there""" xml_root = etree.Element(rootElementTag) key = etree.SubElement(xml_root, 'Key') key.text = self.apikey return xml_root
def create_spectrum_from_dict(spectrum_type, spectral_pars, fn=None): """Create a Function object from a parameter dictionary. Parameters ---------- spectrum_type : str String identifying the spectrum type (e.g. PowerLaw). spectral_pars : dict Dictionary of spectral parameters. """ if fn is None: fn = pyLike.SourceFactory_funcFactory().create(str(spectrum_type)) if spectrum_type == 'PiecewisePowerLaw': build_piecewise_powerlaw(fn, spectral_pars) for k, v in spectral_pars.items(): v.setdefault('scale', 1.0) v.setdefault('min', v['value'] * 1E-3) v.setdefault('max', v['value'] * 1E3) par = fn.getParam(str(k)) vmin = min(float(v['value']), float(v['min'])) vmax = max(float(v['value']), float(v['max'])) par.setValue(float(v['value'])) par.setBounds(vmin, vmax) par.setScale(float(v['scale'])) if 'free' in v and int(v['free']) != 0: par.setFree(True) else: par.setFree(False) fn.setParam(par) return fn
def function[create_spectrum_from_dict, parameter[spectrum_type, spectral_pars, fn]]: constant[Create a Function object from a parameter dictionary. Parameters ---------- spectrum_type : str String identifying the spectrum type (e.g. PowerLaw). spectral_pars : dict Dictionary of spectral parameters. ] if compare[name[fn] is constant[None]] begin[:] variable[fn] assign[=] call[call[name[pyLike].SourceFactory_funcFactory, parameter[]].create, parameter[call[name[str], parameter[name[spectrum_type]]]]] if compare[name[spectrum_type] equal[==] constant[PiecewisePowerLaw]] begin[:] call[name[build_piecewise_powerlaw], parameter[name[fn], name[spectral_pars]]] for taget[tuple[[<ast.Name object at 0x7da20c7cac20>, <ast.Name object at 0x7da20c7c8940>]]] in starred[call[name[spectral_pars].items, parameter[]]] begin[:] call[name[v].setdefault, parameter[constant[scale], constant[1.0]]] call[name[v].setdefault, parameter[constant[min], binary_operation[call[name[v]][constant[value]] * constant[0.001]]]] call[name[v].setdefault, parameter[constant[max], binary_operation[call[name[v]][constant[value]] * constant[1000.0]]]] variable[par] assign[=] call[name[fn].getParam, parameter[call[name[str], parameter[name[k]]]]] variable[vmin] assign[=] call[name[min], parameter[call[name[float], parameter[call[name[v]][constant[value]]]], call[name[float], parameter[call[name[v]][constant[min]]]]]] variable[vmax] assign[=] call[name[max], parameter[call[name[float], parameter[call[name[v]][constant[value]]]], call[name[float], parameter[call[name[v]][constant[max]]]]]] call[name[par].setValue, parameter[call[name[float], parameter[call[name[v]][constant[value]]]]]] call[name[par].setBounds, parameter[name[vmin], name[vmax]]] call[name[par].setScale, parameter[call[name[float], parameter[call[name[v]][constant[scale]]]]]] if <ast.BoolOp object at 0x7da20c7ca7a0> begin[:] call[name[par].setFree, parameter[constant[True]]] call[name[fn].setParam, parameter[name[par]]] return[name[fn]]
keyword[def] identifier[create_spectrum_from_dict] ( identifier[spectrum_type] , identifier[spectral_pars] , identifier[fn] = keyword[None] ): literal[string] keyword[if] identifier[fn] keyword[is] keyword[None] : identifier[fn] = identifier[pyLike] . identifier[SourceFactory_funcFactory] (). identifier[create] ( identifier[str] ( identifier[spectrum_type] )) keyword[if] identifier[spectrum_type] == literal[string] : identifier[build_piecewise_powerlaw] ( identifier[fn] , identifier[spectral_pars] ) keyword[for] identifier[k] , identifier[v] keyword[in] identifier[spectral_pars] . identifier[items] (): identifier[v] . identifier[setdefault] ( literal[string] , literal[int] ) identifier[v] . identifier[setdefault] ( literal[string] , identifier[v] [ literal[string] ]* literal[int] ) identifier[v] . identifier[setdefault] ( literal[string] , identifier[v] [ literal[string] ]* literal[int] ) identifier[par] = identifier[fn] . identifier[getParam] ( identifier[str] ( identifier[k] )) identifier[vmin] = identifier[min] ( identifier[float] ( identifier[v] [ literal[string] ]), identifier[float] ( identifier[v] [ literal[string] ])) identifier[vmax] = identifier[max] ( identifier[float] ( identifier[v] [ literal[string] ]), identifier[float] ( identifier[v] [ literal[string] ])) identifier[par] . identifier[setValue] ( identifier[float] ( identifier[v] [ literal[string] ])) identifier[par] . identifier[setBounds] ( identifier[vmin] , identifier[vmax] ) identifier[par] . identifier[setScale] ( identifier[float] ( identifier[v] [ literal[string] ])) keyword[if] literal[string] keyword[in] identifier[v] keyword[and] identifier[int] ( identifier[v] [ literal[string] ])!= literal[int] : identifier[par] . identifier[setFree] ( keyword[True] ) keyword[else] : identifier[par] . identifier[setFree] ( keyword[False] ) identifier[fn] . identifier[setParam] ( identifier[par] ) keyword[return] identifier[fn]
def create_spectrum_from_dict(spectrum_type, spectral_pars, fn=None): """Create a Function object from a parameter dictionary. Parameters ---------- spectrum_type : str String identifying the spectrum type (e.g. PowerLaw). spectral_pars : dict Dictionary of spectral parameters. """ if fn is None: fn = pyLike.SourceFactory_funcFactory().create(str(spectrum_type)) # depends on [control=['if'], data=['fn']] if spectrum_type == 'PiecewisePowerLaw': build_piecewise_powerlaw(fn, spectral_pars) # depends on [control=['if'], data=[]] for (k, v) in spectral_pars.items(): v.setdefault('scale', 1.0) v.setdefault('min', v['value'] * 0.001) v.setdefault('max', v['value'] * 1000.0) par = fn.getParam(str(k)) vmin = min(float(v['value']), float(v['min'])) vmax = max(float(v['value']), float(v['max'])) par.setValue(float(v['value'])) par.setBounds(vmin, vmax) par.setScale(float(v['scale'])) if 'free' in v and int(v['free']) != 0: par.setFree(True) # depends on [control=['if'], data=[]] else: par.setFree(False) fn.setParam(par) # depends on [control=['for'], data=[]] return fn
def resolve_requirements(self, interpreter, req_libs): """Requirements resolution for PEX files. :param interpreter: Resolve against this :class:`PythonInterpreter`. :param req_libs: A list of :class:`PythonRequirementLibrary` targets to resolve. :returns: a PEX containing target requirements and any specified python dist targets. """ with self.invalidated(req_libs) as invalidation_check: # If there are no relevant targets, we still go through the motions of resolving # an empty set of requirements, to prevent downstream tasks from having to check # for this special case. if invalidation_check.all_vts: target_set_id = VersionedTargetSet.from_versioned_targets( invalidation_check.all_vts).cache_key.hash else: target_set_id = 'no_targets' # We need to ensure that we are resolving for only the current platform if we are # including local python dist targets that have native extensions. targets_by_platform = pex_build_util.targets_by_platform(self.context.targets(), self._python_setup) if self._python_native_code_settings.check_build_for_current_platform_only(targets_by_platform): platforms = ['current'] else: platforms = list(sorted(targets_by_platform.keys())) path = os.path.realpath(os.path.join(self.workdir, str(interpreter.identity), target_set_id)) # Note that we check for the existence of the directory, instead of for invalid_vts, # to cover the empty case. if not os.path.isdir(path): with safe_concurrent_creation(path) as safe_path: pex_builder = PexBuilderWrapper.Factory.create( builder=PEXBuilder(path=safe_path, interpreter=interpreter, copy=True), log=self.context.log) pex_builder.add_requirement_libs_from(req_libs, platforms=platforms) pex_builder.freeze() return PEX(path, interpreter=interpreter)
def function[resolve_requirements, parameter[self, interpreter, req_libs]]: constant[Requirements resolution for PEX files. :param interpreter: Resolve against this :class:`PythonInterpreter`. :param req_libs: A list of :class:`PythonRequirementLibrary` targets to resolve. :returns: a PEX containing target requirements and any specified python dist targets. ] with call[name[self].invalidated, parameter[name[req_libs]]] begin[:] if name[invalidation_check].all_vts begin[:] variable[target_set_id] assign[=] call[name[VersionedTargetSet].from_versioned_targets, parameter[name[invalidation_check].all_vts]].cache_key.hash variable[targets_by_platform] assign[=] call[name[pex_build_util].targets_by_platform, parameter[call[name[self].context.targets, parameter[]], name[self]._python_setup]] if call[name[self]._python_native_code_settings.check_build_for_current_platform_only, parameter[name[targets_by_platform]]] begin[:] variable[platforms] assign[=] list[[<ast.Constant object at 0x7da1b2249b40>]] variable[path] assign[=] call[name[os].path.realpath, parameter[call[name[os].path.join, parameter[name[self].workdir, call[name[str], parameter[name[interpreter].identity]], name[target_set_id]]]]] if <ast.UnaryOp object at 0x7da1b224a4a0> begin[:] with call[name[safe_concurrent_creation], parameter[name[path]]] begin[:] variable[pex_builder] assign[=] call[name[PexBuilderWrapper].Factory.create, parameter[]] call[name[pex_builder].add_requirement_libs_from, parameter[name[req_libs]]] call[name[pex_builder].freeze, parameter[]] return[call[name[PEX], parameter[name[path]]]]
keyword[def] identifier[resolve_requirements] ( identifier[self] , identifier[interpreter] , identifier[req_libs] ): literal[string] keyword[with] identifier[self] . identifier[invalidated] ( identifier[req_libs] ) keyword[as] identifier[invalidation_check] : keyword[if] identifier[invalidation_check] . identifier[all_vts] : identifier[target_set_id] = identifier[VersionedTargetSet] . identifier[from_versioned_targets] ( identifier[invalidation_check] . identifier[all_vts] ). identifier[cache_key] . identifier[hash] keyword[else] : identifier[target_set_id] = literal[string] identifier[targets_by_platform] = identifier[pex_build_util] . identifier[targets_by_platform] ( identifier[self] . identifier[context] . identifier[targets] (), identifier[self] . identifier[_python_setup] ) keyword[if] identifier[self] . identifier[_python_native_code_settings] . identifier[check_build_for_current_platform_only] ( identifier[targets_by_platform] ): identifier[platforms] =[ literal[string] ] keyword[else] : identifier[platforms] = identifier[list] ( identifier[sorted] ( identifier[targets_by_platform] . identifier[keys] ())) identifier[path] = identifier[os] . identifier[path] . identifier[realpath] ( identifier[os] . identifier[path] . identifier[join] ( identifier[self] . identifier[workdir] , identifier[str] ( identifier[interpreter] . identifier[identity] ), identifier[target_set_id] )) keyword[if] keyword[not] identifier[os] . identifier[path] . identifier[isdir] ( identifier[path] ): keyword[with] identifier[safe_concurrent_creation] ( identifier[path] ) keyword[as] identifier[safe_path] : identifier[pex_builder] = identifier[PexBuilderWrapper] . identifier[Factory] . identifier[create] ( identifier[builder] = identifier[PEXBuilder] ( identifier[path] = identifier[safe_path] , identifier[interpreter] = identifier[interpreter] , identifier[copy] = keyword[True] ), identifier[log] = identifier[self] . identifier[context] . identifier[log] ) identifier[pex_builder] . identifier[add_requirement_libs_from] ( identifier[req_libs] , identifier[platforms] = identifier[platforms] ) identifier[pex_builder] . identifier[freeze] () keyword[return] identifier[PEX] ( identifier[path] , identifier[interpreter] = identifier[interpreter] )
def resolve_requirements(self, interpreter, req_libs): """Requirements resolution for PEX files. :param interpreter: Resolve against this :class:`PythonInterpreter`. :param req_libs: A list of :class:`PythonRequirementLibrary` targets to resolve. :returns: a PEX containing target requirements and any specified python dist targets. """ with self.invalidated(req_libs) as invalidation_check: # If there are no relevant targets, we still go through the motions of resolving # an empty set of requirements, to prevent downstream tasks from having to check # for this special case. if invalidation_check.all_vts: target_set_id = VersionedTargetSet.from_versioned_targets(invalidation_check.all_vts).cache_key.hash # depends on [control=['if'], data=[]] else: target_set_id = 'no_targets' # We need to ensure that we are resolving for only the current platform if we are # including local python dist targets that have native extensions. targets_by_platform = pex_build_util.targets_by_platform(self.context.targets(), self._python_setup) if self._python_native_code_settings.check_build_for_current_platform_only(targets_by_platform): platforms = ['current'] # depends on [control=['if'], data=[]] else: platforms = list(sorted(targets_by_platform.keys())) path = os.path.realpath(os.path.join(self.workdir, str(interpreter.identity), target_set_id)) # Note that we check for the existence of the directory, instead of for invalid_vts, # to cover the empty case. if not os.path.isdir(path): with safe_concurrent_creation(path) as safe_path: pex_builder = PexBuilderWrapper.Factory.create(builder=PEXBuilder(path=safe_path, interpreter=interpreter, copy=True), log=self.context.log) pex_builder.add_requirement_libs_from(req_libs, platforms=platforms) pex_builder.freeze() # depends on [control=['with'], data=['safe_path']] # depends on [control=['if'], data=[]] # depends on [control=['with'], data=['invalidation_check']] return PEX(path, interpreter=interpreter)
def get_country_long(self, ip): ''' Get country_long ''' rec = self.get_all(ip) return rec and rec.country_long
def function[get_country_long, parameter[self, ip]]: constant[ Get country_long ] variable[rec] assign[=] call[name[self].get_all, parameter[name[ip]]] return[<ast.BoolOp object at 0x7da1b0e25a80>]
keyword[def] identifier[get_country_long] ( identifier[self] , identifier[ip] ): literal[string] identifier[rec] = identifier[self] . identifier[get_all] ( identifier[ip] ) keyword[return] identifier[rec] keyword[and] identifier[rec] . identifier[country_long]
def get_country_long(self, ip): """ Get country_long """ rec = self.get_all(ip) return rec and rec.country_long
def patch_discriminator(x, filters=64, filter_size=5, n=4, name="patch_discrim"): """Patch descriminator.""" with tf.variable_scope(name): x_shape = shape_list(x) spatial_dims = [x_shape[1] // 4, x_shape[2] // 4] x = tf.random_crop(x, [x_shape[0]] + spatial_dims + [x_shape[3]]) for i in range(n): x = general_conv( x=x, num_filters=filters * 2**i, filter_size=filter_size, stride=2 if i != n - 1 else 1, stddev=0.02, padding="SAME", name="c%d" % i, do_norm="instance" if i != 0 else False, do_relu=i != n - 1, relufactor=0.2) x = tf.reduce_mean(x, [1, 2]) return x
def function[patch_discriminator, parameter[x, filters, filter_size, n, name]]: constant[Patch descriminator.] with call[name[tf].variable_scope, parameter[name[name]]] begin[:] variable[x_shape] assign[=] call[name[shape_list], parameter[name[x]]] variable[spatial_dims] assign[=] list[[<ast.BinOp object at 0x7da1b2059a50>, <ast.BinOp object at 0x7da1b2059150>]] variable[x] assign[=] call[name[tf].random_crop, parameter[name[x], binary_operation[binary_operation[list[[<ast.Subscript object at 0x7da20cabcbe0>]] + name[spatial_dims]] + list[[<ast.Subscript object at 0x7da20cabce20>]]]]] for taget[name[i]] in starred[call[name[range], parameter[name[n]]]] begin[:] variable[x] assign[=] call[name[general_conv], parameter[]] variable[x] assign[=] call[name[tf].reduce_mean, parameter[name[x], list[[<ast.Constant object at 0x7da1b201e470>, <ast.Constant object at 0x7da1b201c370>]]]] return[name[x]]
keyword[def] identifier[patch_discriminator] ( identifier[x] , identifier[filters] = literal[int] , identifier[filter_size] = literal[int] , identifier[n] = literal[int] , identifier[name] = literal[string] ): literal[string] keyword[with] identifier[tf] . identifier[variable_scope] ( identifier[name] ): identifier[x_shape] = identifier[shape_list] ( identifier[x] ) identifier[spatial_dims] =[ identifier[x_shape] [ literal[int] ]// literal[int] , identifier[x_shape] [ literal[int] ]// literal[int] ] identifier[x] = identifier[tf] . identifier[random_crop] ( identifier[x] ,[ identifier[x_shape] [ literal[int] ]]+ identifier[spatial_dims] +[ identifier[x_shape] [ literal[int] ]]) keyword[for] identifier[i] keyword[in] identifier[range] ( identifier[n] ): identifier[x] = identifier[general_conv] ( identifier[x] = identifier[x] , identifier[num_filters] = identifier[filters] * literal[int] ** identifier[i] , identifier[filter_size] = identifier[filter_size] , identifier[stride] = literal[int] keyword[if] identifier[i] != identifier[n] - literal[int] keyword[else] literal[int] , identifier[stddev] = literal[int] , identifier[padding] = literal[string] , identifier[name] = literal[string] % identifier[i] , identifier[do_norm] = literal[string] keyword[if] identifier[i] != literal[int] keyword[else] keyword[False] , identifier[do_relu] = identifier[i] != identifier[n] - literal[int] , identifier[relufactor] = literal[int] ) identifier[x] = identifier[tf] . identifier[reduce_mean] ( identifier[x] ,[ literal[int] , literal[int] ]) keyword[return] identifier[x]
def patch_discriminator(x, filters=64, filter_size=5, n=4, name='patch_discrim'): """Patch descriminator.""" with tf.variable_scope(name): x_shape = shape_list(x) spatial_dims = [x_shape[1] // 4, x_shape[2] // 4] x = tf.random_crop(x, [x_shape[0]] + spatial_dims + [x_shape[3]]) for i in range(n): x = general_conv(x=x, num_filters=filters * 2 ** i, filter_size=filter_size, stride=2 if i != n - 1 else 1, stddev=0.02, padding='SAME', name='c%d' % i, do_norm='instance' if i != 0 else False, do_relu=i != n - 1, relufactor=0.2) # depends on [control=['for'], data=['i']] x = tf.reduce_mean(x, [1, 2]) return x # depends on [control=['with'], data=[]]
def _repr_html_(self): """Give a nice representation of columns in notebooks.""" out="<table class='taqltable'>\n" # Print column name (not if it is auto-generated) if not(self.name()[:4]=="Col_"): out+="<tr>" out+="<th><b>"+self.name()+"</b></th>" out+="</tr>" cropped=False rowcount=0 colkeywords=self.getkeywords() for row in self: out +="\n<tr>" out += "<td>" + _format_cell(row, colkeywords) + "</td>\n" out += "</tr>\n" rowcount+=1 out+="\n" if rowcount>=20: cropped=True break if out[-2:]=="\n\n": out=out[:-1] out+="</table>" if cropped: out+="<p style='text-align:center'>("+str(self.nrows()-20)+" more rows)</p>\n" return out
def function[_repr_html_, parameter[self]]: constant[Give a nice representation of columns in notebooks.] variable[out] assign[=] constant[<table class='taqltable'> ] if <ast.UnaryOp object at 0x7da1b26ad8d0> begin[:] <ast.AugAssign object at 0x7da1b26ad870> <ast.AugAssign object at 0x7da1b26ae860> <ast.AugAssign object at 0x7da1b26ae740> variable[cropped] assign[=] constant[False] variable[rowcount] assign[=] constant[0] variable[colkeywords] assign[=] call[name[self].getkeywords, parameter[]] for taget[name[row]] in starred[name[self]] begin[:] <ast.AugAssign object at 0x7da1b26afdc0> <ast.AugAssign object at 0x7da1b26ade70> <ast.AugAssign object at 0x7da1b26aef20> <ast.AugAssign object at 0x7da1b26add50> <ast.AugAssign object at 0x7da1b26ad180> if compare[name[rowcount] greater_or_equal[>=] constant[20]] begin[:] variable[cropped] assign[=] constant[True] break if compare[call[name[out]][<ast.Slice object at 0x7da1b26ae1a0>] equal[==] constant[ ]] begin[:] variable[out] assign[=] call[name[out]][<ast.Slice object at 0x7da1b26af520>] <ast.AugAssign object at 0x7da1b26ac940> if name[cropped] begin[:] <ast.AugAssign object at 0x7da1b26acd00> return[name[out]]
keyword[def] identifier[_repr_html_] ( identifier[self] ): literal[string] identifier[out] = literal[string] keyword[if] keyword[not] ( identifier[self] . identifier[name] ()[: literal[int] ]== literal[string] ): identifier[out] += literal[string] identifier[out] += literal[string] + identifier[self] . identifier[name] ()+ literal[string] identifier[out] += literal[string] identifier[cropped] = keyword[False] identifier[rowcount] = literal[int] identifier[colkeywords] = identifier[self] . identifier[getkeywords] () keyword[for] identifier[row] keyword[in] identifier[self] : identifier[out] += literal[string] identifier[out] += literal[string] + identifier[_format_cell] ( identifier[row] , identifier[colkeywords] )+ literal[string] identifier[out] += literal[string] identifier[rowcount] += literal[int] identifier[out] += literal[string] keyword[if] identifier[rowcount] >= literal[int] : identifier[cropped] = keyword[True] keyword[break] keyword[if] identifier[out] [- literal[int] :]== literal[string] : identifier[out] = identifier[out] [:- literal[int] ] identifier[out] += literal[string] keyword[if] identifier[cropped] : identifier[out] += literal[string] + identifier[str] ( identifier[self] . identifier[nrows] ()- literal[int] )+ literal[string] keyword[return] identifier[out]
def _repr_html_(self): """Give a nice representation of columns in notebooks.""" out = "<table class='taqltable'>\n" # Print column name (not if it is auto-generated) if not self.name()[:4] == 'Col_': out += '<tr>' out += '<th><b>' + self.name() + '</b></th>' out += '</tr>' # depends on [control=['if'], data=[]] cropped = False rowcount = 0 colkeywords = self.getkeywords() for row in self: out += '\n<tr>' out += '<td>' + _format_cell(row, colkeywords) + '</td>\n' out += '</tr>\n' rowcount += 1 out += '\n' if rowcount >= 20: cropped = True break # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['row']] if out[-2:] == '\n\n': out = out[:-1] # depends on [control=['if'], data=[]] out += '</table>' if cropped: out += "<p style='text-align:center'>(" + str(self.nrows() - 20) + ' more rows)</p>\n' # depends on [control=['if'], data=[]] return out
def get_datacenter_id(): ''' Return datacenter ID from provider configuration ''' datacenter_id = config.get_cloud_config_value( 'datacenter_id', get_configured_provider(), __opts__, search_global=False ) conn = get_conn() try: conn.get_datacenter(datacenter_id=datacenter_id) except PBNotFoundError: log.error('Failed to get datacenter: %s', datacenter_id) raise return datacenter_id
def function[get_datacenter_id, parameter[]]: constant[ Return datacenter ID from provider configuration ] variable[datacenter_id] assign[=] call[name[config].get_cloud_config_value, parameter[constant[datacenter_id], call[name[get_configured_provider], parameter[]], name[__opts__]]] variable[conn] assign[=] call[name[get_conn], parameter[]] <ast.Try object at 0x7da1b2194f40> return[name[datacenter_id]]
keyword[def] identifier[get_datacenter_id] (): literal[string] identifier[datacenter_id] = identifier[config] . identifier[get_cloud_config_value] ( literal[string] , identifier[get_configured_provider] (), identifier[__opts__] , identifier[search_global] = keyword[False] ) identifier[conn] = identifier[get_conn] () keyword[try] : identifier[conn] . identifier[get_datacenter] ( identifier[datacenter_id] = identifier[datacenter_id] ) keyword[except] identifier[PBNotFoundError] : identifier[log] . identifier[error] ( literal[string] , identifier[datacenter_id] ) keyword[raise] keyword[return] identifier[datacenter_id]
def get_datacenter_id(): """ Return datacenter ID from provider configuration """ datacenter_id = config.get_cloud_config_value('datacenter_id', get_configured_provider(), __opts__, search_global=False) conn = get_conn() try: conn.get_datacenter(datacenter_id=datacenter_id) # depends on [control=['try'], data=[]] except PBNotFoundError: log.error('Failed to get datacenter: %s', datacenter_id) raise # depends on [control=['except'], data=[]] return datacenter_id
def on_resize(self, *args): """Signal handler callback for SIGWINCH.""" # pylint: disable=W0613 # Unused argument 'args' self.screen.style.name_len = min(self.screen.style.name_len, self.term.width - 15) assert self.term.width >= self.screen.hint_width, ( 'Screen to small {}, must be at least {}'.format( self.term.width, self.screen.hint_width)) self._set_lastpage() self.dirty = self.STATE_REFRESH
def function[on_resize, parameter[self]]: constant[Signal handler callback for SIGWINCH.] name[self].screen.style.name_len assign[=] call[name[min], parameter[name[self].screen.style.name_len, binary_operation[name[self].term.width - constant[15]]]] assert[compare[name[self].term.width greater_or_equal[>=] name[self].screen.hint_width]] call[name[self]._set_lastpage, parameter[]] name[self].dirty assign[=] name[self].STATE_REFRESH
keyword[def] identifier[on_resize] ( identifier[self] ,* identifier[args] ): literal[string] identifier[self] . identifier[screen] . identifier[style] . identifier[name_len] = identifier[min] ( identifier[self] . identifier[screen] . identifier[style] . identifier[name_len] , identifier[self] . identifier[term] . identifier[width] - literal[int] ) keyword[assert] identifier[self] . identifier[term] . identifier[width] >= identifier[self] . identifier[screen] . identifier[hint_width] ,( literal[string] . identifier[format] ( identifier[self] . identifier[term] . identifier[width] , identifier[self] . identifier[screen] . identifier[hint_width] )) identifier[self] . identifier[_set_lastpage] () identifier[self] . identifier[dirty] = identifier[self] . identifier[STATE_REFRESH]
def on_resize(self, *args): """Signal handler callback for SIGWINCH.""" # pylint: disable=W0613 # Unused argument 'args' self.screen.style.name_len = min(self.screen.style.name_len, self.term.width - 15) assert self.term.width >= self.screen.hint_width, 'Screen to small {}, must be at least {}'.format(self.term.width, self.screen.hint_width) self._set_lastpage() self.dirty = self.STATE_REFRESH
def apply(self, func, window=None, bycolumn=True, align=None, **kwargs): '''Apply function ``func`` to the timeseries. :keyword func: string indicating function to apply :keyword window: Rolling window, If not defined ``func`` is applied on the whole dataset. Default ``None``. :keyword bycolumn: If ``True``, function ``func`` is applied on each column separately. Default ``True``. :keyword align: string specifying whether the index of the result should be ``left`` or ``right`` (default) or ``centered`` aligned compared to the rolling window of observations. :keyword kwargs: dictionary of auxiliary parameters used by function ``func``. ''' N = len(self) window = window or N self.precondition(window <= N and window > 0, OutOfBound) return self._rollapply(func, window=window, align=align or self.default_align, bycolumn=bycolumn, **kwargs)
def function[apply, parameter[self, func, window, bycolumn, align]]: constant[Apply function ``func`` to the timeseries. :keyword func: string indicating function to apply :keyword window: Rolling window, If not defined ``func`` is applied on the whole dataset. Default ``None``. :keyword bycolumn: If ``True``, function ``func`` is applied on each column separately. Default ``True``. :keyword align: string specifying whether the index of the result should be ``left`` or ``right`` (default) or ``centered`` aligned compared to the rolling window of observations. :keyword kwargs: dictionary of auxiliary parameters used by function ``func``. ] variable[N] assign[=] call[name[len], parameter[name[self]]] variable[window] assign[=] <ast.BoolOp object at 0x7da1b2346470> call[name[self].precondition, parameter[<ast.BoolOp object at 0x7da1b2346c20>, name[OutOfBound]]] return[call[name[self]._rollapply, parameter[name[func]]]]
keyword[def] identifier[apply] ( identifier[self] , identifier[func] , identifier[window] = keyword[None] , identifier[bycolumn] = keyword[True] , identifier[align] = keyword[None] ,** identifier[kwargs] ): literal[string] identifier[N] = identifier[len] ( identifier[self] ) identifier[window] = identifier[window] keyword[or] identifier[N] identifier[self] . identifier[precondition] ( identifier[window] <= identifier[N] keyword[and] identifier[window] > literal[int] , identifier[OutOfBound] ) keyword[return] identifier[self] . identifier[_rollapply] ( identifier[func] , identifier[window] = identifier[window] , identifier[align] = identifier[align] keyword[or] identifier[self] . identifier[default_align] , identifier[bycolumn] = identifier[bycolumn] , ** identifier[kwargs] )
def apply(self, func, window=None, bycolumn=True, align=None, **kwargs): """Apply function ``func`` to the timeseries. :keyword func: string indicating function to apply :keyword window: Rolling window, If not defined ``func`` is applied on the whole dataset. Default ``None``. :keyword bycolumn: If ``True``, function ``func`` is applied on each column separately. Default ``True``. :keyword align: string specifying whether the index of the result should be ``left`` or ``right`` (default) or ``centered`` aligned compared to the rolling window of observations. :keyword kwargs: dictionary of auxiliary parameters used by function ``func``. """ N = len(self) window = window or N self.precondition(window <= N and window > 0, OutOfBound) return self._rollapply(func, window=window, align=align or self.default_align, bycolumn=bycolumn, **kwargs)
def binary_vectors(n, n_match, m=[0.9] * 8, u=[0.1] * 8, random_state=None, return_links=False, dtype=np.int8): """Generate random binary comparison vectors. This function is used to generate random comparison vectors. The result of each comparison is a binary value (0 or 1). Parameters ---------- n : int The total number of comparison vectors. n_match : int The number of matching record pairs. m : list, default [0.9] * 8, optional A list of m probabilities of each partially identifying variable. The m probability is the probability that an identifier in matching record pairs agrees. u : list, default [0.9] * 8, optional A list of u probabilities of each partially identifying variable. The u probability is the probability that an identifier in non-matching record pairs agrees. random_state : int or numpy.random.RandomState, optional Seed for the random number generator with an integer or numpy RandomState object. return_links: bool When True, the function returns also the true links. dtype: numpy.dtype The dtype of each column in the returned DataFrame. Returns ------- pandas.DataFrame A dataframe with comparison vectors. """ if len(m) != len(u): raise ValueError("the length of 'm' is not equal the length of 'u'") if n_match >= n or n_match < 0: raise ValueError("the number of matches is bounded by [0, n]") # set the random seed np.random.seed(random_state) matches = [] nonmatches = [] sample_set = np.array([0, 1], dtype=dtype) for i, _ in enumerate(m): p_mi = [1 - m[i], m[i]] p_ui = [1 - u[i], u[i]] comp_mi = np.random.choice(sample_set, (n_match, 1), p=p_mi) comp_ui = np.random.choice(sample_set, (n - n_match, 1), p=p_ui) nonmatches.append(comp_ui) matches.append(comp_mi) match_block = np.concatenate(matches, axis=1) nonmatch_block = np.concatenate(nonmatches, axis=1) data_np = np.concatenate((match_block, nonmatch_block), axis=0) index_np = np.random.randint(1001, 1001 + n * 2, (n, 2)) data_col_names = ['c_%s' % (i + 1) for i in range(len(m))] data_mi = pd.MultiIndex.from_arrays([index_np[:, 0], index_np[:, 1]]) data_df = pd.DataFrame(data_np, index=data_mi, columns=data_col_names) features = data_df.sample(frac=1, random_state=random_state) if return_links: links = data_mi[:n_match] return features, links else: return features
def function[binary_vectors, parameter[n, n_match, m, u, random_state, return_links, dtype]]: constant[Generate random binary comparison vectors. This function is used to generate random comparison vectors. The result of each comparison is a binary value (0 or 1). Parameters ---------- n : int The total number of comparison vectors. n_match : int The number of matching record pairs. m : list, default [0.9] * 8, optional A list of m probabilities of each partially identifying variable. The m probability is the probability that an identifier in matching record pairs agrees. u : list, default [0.9] * 8, optional A list of u probabilities of each partially identifying variable. The u probability is the probability that an identifier in non-matching record pairs agrees. random_state : int or numpy.random.RandomState, optional Seed for the random number generator with an integer or numpy RandomState object. return_links: bool When True, the function returns also the true links. dtype: numpy.dtype The dtype of each column in the returned DataFrame. Returns ------- pandas.DataFrame A dataframe with comparison vectors. ] if compare[call[name[len], parameter[name[m]]] not_equal[!=] call[name[len], parameter[name[u]]]] begin[:] <ast.Raise object at 0x7da18f58e9b0> if <ast.BoolOp object at 0x7da18f58fd60> begin[:] <ast.Raise object at 0x7da18f58fbb0> call[name[np].random.seed, parameter[name[random_state]]] variable[matches] assign[=] list[[]] variable[nonmatches] assign[=] list[[]] variable[sample_set] assign[=] call[name[np].array, parameter[list[[<ast.Constant object at 0x7da18f58fe20>, <ast.Constant object at 0x7da18f58ec50>]]]] for taget[tuple[[<ast.Name object at 0x7da18f58c520>, <ast.Name object at 0x7da18f58fac0>]]] in starred[call[name[enumerate], parameter[name[m]]]] begin[:] variable[p_mi] assign[=] list[[<ast.BinOp object at 0x7da18f58f670>, <ast.Subscript object at 0x7da18f58f6d0>]] variable[p_ui] assign[=] list[[<ast.BinOp object at 0x7da18f58ddb0>, <ast.Subscript object at 0x7da18f58f190>]] variable[comp_mi] assign[=] call[name[np].random.choice, parameter[name[sample_set], tuple[[<ast.Name object at 0x7da18f58d840>, <ast.Constant object at 0x7da18f58ed10>]]]] variable[comp_ui] assign[=] call[name[np].random.choice, parameter[name[sample_set], tuple[[<ast.BinOp object at 0x7da18f58d1e0>, <ast.Constant object at 0x7da18f58f8e0>]]]] call[name[nonmatches].append, parameter[name[comp_ui]]] call[name[matches].append, parameter[name[comp_mi]]] variable[match_block] assign[=] call[name[np].concatenate, parameter[name[matches]]] variable[nonmatch_block] assign[=] call[name[np].concatenate, parameter[name[nonmatches]]] variable[data_np] assign[=] call[name[np].concatenate, parameter[tuple[[<ast.Name object at 0x7da18f58c550>, <ast.Name object at 0x7da18f58c820>]]]] variable[index_np] assign[=] call[name[np].random.randint, parameter[constant[1001], binary_operation[constant[1001] + binary_operation[name[n] * constant[2]]], tuple[[<ast.Name object at 0x7da18f58dea0>, <ast.Constant object at 0x7da18f58ee00>]]]] variable[data_col_names] assign[=] <ast.ListComp object at 0x7da18f58d030> variable[data_mi] assign[=] call[name[pd].MultiIndex.from_arrays, parameter[list[[<ast.Subscript object at 0x7da20c6e7940>, <ast.Subscript object at 0x7da20c6e5cc0>]]]] variable[data_df] assign[=] call[name[pd].DataFrame, parameter[name[data_np]]] variable[features] assign[=] call[name[data_df].sample, parameter[]] if name[return_links] begin[:] variable[links] assign[=] call[name[data_mi]][<ast.Slice object at 0x7da20c6e4760>] return[tuple[[<ast.Name object at 0x7da20c6e7a30>, <ast.Name object at 0x7da20c6e78e0>]]]
keyword[def] identifier[binary_vectors] ( identifier[n] , identifier[n_match] , identifier[m] =[ literal[int] ]* literal[int] , identifier[u] =[ literal[int] ]* literal[int] , identifier[random_state] = keyword[None] , identifier[return_links] = keyword[False] , identifier[dtype] = identifier[np] . identifier[int8] ): literal[string] keyword[if] identifier[len] ( identifier[m] )!= identifier[len] ( identifier[u] ): keyword[raise] identifier[ValueError] ( literal[string] ) keyword[if] identifier[n_match] >= identifier[n] keyword[or] identifier[n_match] < literal[int] : keyword[raise] identifier[ValueError] ( literal[string] ) identifier[np] . identifier[random] . identifier[seed] ( identifier[random_state] ) identifier[matches] =[] identifier[nonmatches] =[] identifier[sample_set] = identifier[np] . identifier[array] ([ literal[int] , literal[int] ], identifier[dtype] = identifier[dtype] ) keyword[for] identifier[i] , identifier[_] keyword[in] identifier[enumerate] ( identifier[m] ): identifier[p_mi] =[ literal[int] - identifier[m] [ identifier[i] ], identifier[m] [ identifier[i] ]] identifier[p_ui] =[ literal[int] - identifier[u] [ identifier[i] ], identifier[u] [ identifier[i] ]] identifier[comp_mi] = identifier[np] . identifier[random] . identifier[choice] ( identifier[sample_set] ,( identifier[n_match] , literal[int] ), identifier[p] = identifier[p_mi] ) identifier[comp_ui] = identifier[np] . identifier[random] . identifier[choice] ( identifier[sample_set] ,( identifier[n] - identifier[n_match] , literal[int] ), identifier[p] = identifier[p_ui] ) identifier[nonmatches] . identifier[append] ( identifier[comp_ui] ) identifier[matches] . identifier[append] ( identifier[comp_mi] ) identifier[match_block] = identifier[np] . identifier[concatenate] ( identifier[matches] , identifier[axis] = literal[int] ) identifier[nonmatch_block] = identifier[np] . identifier[concatenate] ( identifier[nonmatches] , identifier[axis] = literal[int] ) identifier[data_np] = identifier[np] . identifier[concatenate] (( identifier[match_block] , identifier[nonmatch_block] ), identifier[axis] = literal[int] ) identifier[index_np] = identifier[np] . identifier[random] . identifier[randint] ( literal[int] , literal[int] + identifier[n] * literal[int] ,( identifier[n] , literal[int] )) identifier[data_col_names] =[ literal[string] %( identifier[i] + literal[int] ) keyword[for] identifier[i] keyword[in] identifier[range] ( identifier[len] ( identifier[m] ))] identifier[data_mi] = identifier[pd] . identifier[MultiIndex] . identifier[from_arrays] ([ identifier[index_np] [:, literal[int] ], identifier[index_np] [:, literal[int] ]]) identifier[data_df] = identifier[pd] . identifier[DataFrame] ( identifier[data_np] , identifier[index] = identifier[data_mi] , identifier[columns] = identifier[data_col_names] ) identifier[features] = identifier[data_df] . identifier[sample] ( identifier[frac] = literal[int] , identifier[random_state] = identifier[random_state] ) keyword[if] identifier[return_links] : identifier[links] = identifier[data_mi] [: identifier[n_match] ] keyword[return] identifier[features] , identifier[links] keyword[else] : keyword[return] identifier[features]
def binary_vectors(n, n_match, m=[0.9] * 8, u=[0.1] * 8, random_state=None, return_links=False, dtype=np.int8): """Generate random binary comparison vectors. This function is used to generate random comparison vectors. The result of each comparison is a binary value (0 or 1). Parameters ---------- n : int The total number of comparison vectors. n_match : int The number of matching record pairs. m : list, default [0.9] * 8, optional A list of m probabilities of each partially identifying variable. The m probability is the probability that an identifier in matching record pairs agrees. u : list, default [0.9] * 8, optional A list of u probabilities of each partially identifying variable. The u probability is the probability that an identifier in non-matching record pairs agrees. random_state : int or numpy.random.RandomState, optional Seed for the random number generator with an integer or numpy RandomState object. return_links: bool When True, the function returns also the true links. dtype: numpy.dtype The dtype of each column in the returned DataFrame. Returns ------- pandas.DataFrame A dataframe with comparison vectors. """ if len(m) != len(u): raise ValueError("the length of 'm' is not equal the length of 'u'") # depends on [control=['if'], data=[]] if n_match >= n or n_match < 0: raise ValueError('the number of matches is bounded by [0, n]') # depends on [control=['if'], data=[]] # set the random seed np.random.seed(random_state) matches = [] nonmatches = [] sample_set = np.array([0, 1], dtype=dtype) for (i, _) in enumerate(m): p_mi = [1 - m[i], m[i]] p_ui = [1 - u[i], u[i]] comp_mi = np.random.choice(sample_set, (n_match, 1), p=p_mi) comp_ui = np.random.choice(sample_set, (n - n_match, 1), p=p_ui) nonmatches.append(comp_ui) matches.append(comp_mi) # depends on [control=['for'], data=[]] match_block = np.concatenate(matches, axis=1) nonmatch_block = np.concatenate(nonmatches, axis=1) data_np = np.concatenate((match_block, nonmatch_block), axis=0) index_np = np.random.randint(1001, 1001 + n * 2, (n, 2)) data_col_names = ['c_%s' % (i + 1) for i in range(len(m))] data_mi = pd.MultiIndex.from_arrays([index_np[:, 0], index_np[:, 1]]) data_df = pd.DataFrame(data_np, index=data_mi, columns=data_col_names) features = data_df.sample(frac=1, random_state=random_state) if return_links: links = data_mi[:n_match] return (features, links) # depends on [control=['if'], data=[]] else: return features
def set_windows_env_var(key, value): """Set an env var. Raises: WindowsError """ if not isinstance(key, text_type): raise TypeError("%r not of type %r" % (key, text_type)) if not isinstance(value, text_type): raise TypeError("%r not of type %r" % (value, text_type)) status = winapi.SetEnvironmentVariableW(key, value) if status == 0: raise ctypes.WinError()
def function[set_windows_env_var, parameter[key, value]]: constant[Set an env var. Raises: WindowsError ] if <ast.UnaryOp object at 0x7da1b2042320> begin[:] <ast.Raise object at 0x7da1b2043e80> if <ast.UnaryOp object at 0x7da1b2040d60> begin[:] <ast.Raise object at 0x7da1b2041030> variable[status] assign[=] call[name[winapi].SetEnvironmentVariableW, parameter[name[key], name[value]]] if compare[name[status] equal[==] constant[0]] begin[:] <ast.Raise object at 0x7da1b2041600>
keyword[def] identifier[set_windows_env_var] ( identifier[key] , identifier[value] ): literal[string] keyword[if] keyword[not] identifier[isinstance] ( identifier[key] , identifier[text_type] ): keyword[raise] identifier[TypeError] ( literal[string] %( identifier[key] , identifier[text_type] )) keyword[if] keyword[not] identifier[isinstance] ( identifier[value] , identifier[text_type] ): keyword[raise] identifier[TypeError] ( literal[string] %( identifier[value] , identifier[text_type] )) identifier[status] = identifier[winapi] . identifier[SetEnvironmentVariableW] ( identifier[key] , identifier[value] ) keyword[if] identifier[status] == literal[int] : keyword[raise] identifier[ctypes] . identifier[WinError] ()
def set_windows_env_var(key, value): """Set an env var. Raises: WindowsError """ if not isinstance(key, text_type): raise TypeError('%r not of type %r' % (key, text_type)) # depends on [control=['if'], data=[]] if not isinstance(value, text_type): raise TypeError('%r not of type %r' % (value, text_type)) # depends on [control=['if'], data=[]] status = winapi.SetEnvironmentVariableW(key, value) if status == 0: raise ctypes.WinError() # depends on [control=['if'], data=[]]
def RFC3156_micalg_from_algo(hash_algo): """ Converts a GPGME hash algorithm name to one conforming to RFC3156. GPGME returns hash algorithm names such as "SHA256", but RFC3156 says that programs need to use names such as "pgp-sha256" instead. :param str hash_algo: GPGME hash_algo :returns: the lowercase name of of the algorithm with "pgp-" prepended :rtype: str """ # hash_algo will be something like SHA256, but we need pgp-sha256. algo = gpg.core.hash_algo_name(hash_algo) if algo is None: raise GPGProblem('Unknown hash algorithm {}'.format(algo), code=GPGCode.INVALID_HASH_ALGORITHM) return 'pgp-' + algo.lower()
def function[RFC3156_micalg_from_algo, parameter[hash_algo]]: constant[ Converts a GPGME hash algorithm name to one conforming to RFC3156. GPGME returns hash algorithm names such as "SHA256", but RFC3156 says that programs need to use names such as "pgp-sha256" instead. :param str hash_algo: GPGME hash_algo :returns: the lowercase name of of the algorithm with "pgp-" prepended :rtype: str ] variable[algo] assign[=] call[name[gpg].core.hash_algo_name, parameter[name[hash_algo]]] if compare[name[algo] is constant[None]] begin[:] <ast.Raise object at 0x7da1b08479d0> return[binary_operation[constant[pgp-] + call[name[algo].lower, parameter[]]]]
keyword[def] identifier[RFC3156_micalg_from_algo] ( identifier[hash_algo] ): literal[string] identifier[algo] = identifier[gpg] . identifier[core] . identifier[hash_algo_name] ( identifier[hash_algo] ) keyword[if] identifier[algo] keyword[is] keyword[None] : keyword[raise] identifier[GPGProblem] ( literal[string] . identifier[format] ( identifier[algo] ), identifier[code] = identifier[GPGCode] . identifier[INVALID_HASH_ALGORITHM] ) keyword[return] literal[string] + identifier[algo] . identifier[lower] ()
def RFC3156_micalg_from_algo(hash_algo): """ Converts a GPGME hash algorithm name to one conforming to RFC3156. GPGME returns hash algorithm names such as "SHA256", but RFC3156 says that programs need to use names such as "pgp-sha256" instead. :param str hash_algo: GPGME hash_algo :returns: the lowercase name of of the algorithm with "pgp-" prepended :rtype: str """ # hash_algo will be something like SHA256, but we need pgp-sha256. algo = gpg.core.hash_algo_name(hash_algo) if algo is None: raise GPGProblem('Unknown hash algorithm {}'.format(algo), code=GPGCode.INVALID_HASH_ALGORITHM) # depends on [control=['if'], data=['algo']] return 'pgp-' + algo.lower()
def _request_process_json_bulk(self, response_data): """Handle bulk JSON response Return: (string): The response data (string): The response status """ status = 'Failure' data = response_data.get(self.request_entity, []) if data: status = 'Success' return data, status
def function[_request_process_json_bulk, parameter[self, response_data]]: constant[Handle bulk JSON response Return: (string): The response data (string): The response status ] variable[status] assign[=] constant[Failure] variable[data] assign[=] call[name[response_data].get, parameter[name[self].request_entity, list[[]]]] if name[data] begin[:] variable[status] assign[=] constant[Success] return[tuple[[<ast.Name object at 0x7da1b0cedff0>, <ast.Name object at 0x7da1b0ceecb0>]]]
keyword[def] identifier[_request_process_json_bulk] ( identifier[self] , identifier[response_data] ): literal[string] identifier[status] = literal[string] identifier[data] = identifier[response_data] . identifier[get] ( identifier[self] . identifier[request_entity] ,[]) keyword[if] identifier[data] : identifier[status] = literal[string] keyword[return] identifier[data] , identifier[status]
def _request_process_json_bulk(self, response_data): """Handle bulk JSON response Return: (string): The response data (string): The response status """ status = 'Failure' data = response_data.get(self.request_entity, []) if data: status = 'Success' # depends on [control=['if'], data=[]] return (data, status)
def is_site_available(self): """ Returns true if we can access LendingClub.com This is also a simple test to see if there's a network connection Returns ------- boolean True or False """ try: response = requests.head(self.base_url) status = response.status_code return 200 <= status < 400 # Returns true if the status code is greater than 200 and less than 400 except Exception: return False
def function[is_site_available, parameter[self]]: constant[ Returns true if we can access LendingClub.com This is also a simple test to see if there's a network connection Returns ------- boolean True or False ] <ast.Try object at 0x7da1b11e4130>
keyword[def] identifier[is_site_available] ( identifier[self] ): literal[string] keyword[try] : identifier[response] = identifier[requests] . identifier[head] ( identifier[self] . identifier[base_url] ) identifier[status] = identifier[response] . identifier[status_code] keyword[return] literal[int] <= identifier[status] < literal[int] keyword[except] identifier[Exception] : keyword[return] keyword[False]
def is_site_available(self): """ Returns true if we can access LendingClub.com This is also a simple test to see if there's a network connection Returns ------- boolean True or False """ try: response = requests.head(self.base_url) status = response.status_code return 200 <= status < 400 # Returns true if the status code is greater than 200 and less than 400 # depends on [control=['try'], data=[]] except Exception: return False # depends on [control=['except'], data=[]]
def from_celery(cls, worker_name, job_dict, celery_app): """ Create a JobStats object from the dictionary returned by celery. Args: worker_name (str): The name of the worker this jobs runs on. job_dict (dict): The dictionary as returned by celery. celery_app: Reference to a celery application object. Returns: JobStats: A fully initialized JobStats object. """ if not isinstance(job_dict, dict) or 'id' not in job_dict: raise JobStatInvalid('The job description is missing important fields.') async_result = AsyncResult(id=job_dict['id'], app=celery_app) a_info = async_result.info if isinstance(async_result.info, dict) else None return JobStats( name=a_info.get('name', '') if a_info is not None else '', job_id=job_dict['id'], job_type=a_info.get('type', '') if a_info is not None else '', workflow_id=a_info.get('workflow_id', '') if a_info is not None else '', queue=a_info.get('queue', '') if a_info is not None else '', start_time=a_info.get('start_time', None) if a_info is not None else None, arguments=a_info.get('arguments', {}) if a_info is not None else {}, acknowledged=job_dict['acknowledged'], func_name=job_dict['type'], hostname=job_dict['hostname'], worker_name=worker_name, worker_pid=job_dict['worker_pid'], routing_key=job_dict['delivery_info']['routing_key'] )
def function[from_celery, parameter[cls, worker_name, job_dict, celery_app]]: constant[ Create a JobStats object from the dictionary returned by celery. Args: worker_name (str): The name of the worker this jobs runs on. job_dict (dict): The dictionary as returned by celery. celery_app: Reference to a celery application object. Returns: JobStats: A fully initialized JobStats object. ] if <ast.BoolOp object at 0x7da1b11efa30> begin[:] <ast.Raise object at 0x7da1b11ee1a0> variable[async_result] assign[=] call[name[AsyncResult], parameter[]] variable[a_info] assign[=] <ast.IfExp object at 0x7da1b11ec670> return[call[name[JobStats], parameter[]]]
keyword[def] identifier[from_celery] ( identifier[cls] , identifier[worker_name] , identifier[job_dict] , identifier[celery_app] ): literal[string] keyword[if] keyword[not] identifier[isinstance] ( identifier[job_dict] , identifier[dict] ) keyword[or] literal[string] keyword[not] keyword[in] identifier[job_dict] : keyword[raise] identifier[JobStatInvalid] ( literal[string] ) identifier[async_result] = identifier[AsyncResult] ( identifier[id] = identifier[job_dict] [ literal[string] ], identifier[app] = identifier[celery_app] ) identifier[a_info] = identifier[async_result] . identifier[info] keyword[if] identifier[isinstance] ( identifier[async_result] . identifier[info] , identifier[dict] ) keyword[else] keyword[None] keyword[return] identifier[JobStats] ( identifier[name] = identifier[a_info] . identifier[get] ( literal[string] , literal[string] ) keyword[if] identifier[a_info] keyword[is] keyword[not] keyword[None] keyword[else] literal[string] , identifier[job_id] = identifier[job_dict] [ literal[string] ], identifier[job_type] = identifier[a_info] . identifier[get] ( literal[string] , literal[string] ) keyword[if] identifier[a_info] keyword[is] keyword[not] keyword[None] keyword[else] literal[string] , identifier[workflow_id] = identifier[a_info] . identifier[get] ( literal[string] , literal[string] ) keyword[if] identifier[a_info] keyword[is] keyword[not] keyword[None] keyword[else] literal[string] , identifier[queue] = identifier[a_info] . identifier[get] ( literal[string] , literal[string] ) keyword[if] identifier[a_info] keyword[is] keyword[not] keyword[None] keyword[else] literal[string] , identifier[start_time] = identifier[a_info] . identifier[get] ( literal[string] , keyword[None] ) keyword[if] identifier[a_info] keyword[is] keyword[not] keyword[None] keyword[else] keyword[None] , identifier[arguments] = identifier[a_info] . identifier[get] ( literal[string] ,{}) keyword[if] identifier[a_info] keyword[is] keyword[not] keyword[None] keyword[else] {}, identifier[acknowledged] = identifier[job_dict] [ literal[string] ], identifier[func_name] = identifier[job_dict] [ literal[string] ], identifier[hostname] = identifier[job_dict] [ literal[string] ], identifier[worker_name] = identifier[worker_name] , identifier[worker_pid] = identifier[job_dict] [ literal[string] ], identifier[routing_key] = identifier[job_dict] [ literal[string] ][ literal[string] ] )
def from_celery(cls, worker_name, job_dict, celery_app): """ Create a JobStats object from the dictionary returned by celery. Args: worker_name (str): The name of the worker this jobs runs on. job_dict (dict): The dictionary as returned by celery. celery_app: Reference to a celery application object. Returns: JobStats: A fully initialized JobStats object. """ if not isinstance(job_dict, dict) or 'id' not in job_dict: raise JobStatInvalid('The job description is missing important fields.') # depends on [control=['if'], data=[]] async_result = AsyncResult(id=job_dict['id'], app=celery_app) a_info = async_result.info if isinstance(async_result.info, dict) else None return JobStats(name=a_info.get('name', '') if a_info is not None else '', job_id=job_dict['id'], job_type=a_info.get('type', '') if a_info is not None else '', workflow_id=a_info.get('workflow_id', '') if a_info is not None else '', queue=a_info.get('queue', '') if a_info is not None else '', start_time=a_info.get('start_time', None) if a_info is not None else None, arguments=a_info.get('arguments', {}) if a_info is not None else {}, acknowledged=job_dict['acknowledged'], func_name=job_dict['type'], hostname=job_dict['hostname'], worker_name=worker_name, worker_pid=job_dict['worker_pid'], routing_key=job_dict['delivery_info']['routing_key'])
def ReplyPacket(self): """Create a ready-to-transmit authentication reply packet. Returns a RADIUS packet which can be directly transmitted to a RADIUS server. This differs with Packet() in how the authenticator is calculated. :return: raw packet :rtype: string """ assert(self.authenticator) assert(self.secret is not None) attr = self._PktEncodeAttributes() header = struct.pack('!BBH', self.code, self.id, (20 + len(attr))) authenticator = md5_constructor(header[0:4] + self.authenticator + attr + self.secret).digest() return header + authenticator + attr
def function[ReplyPacket, parameter[self]]: constant[Create a ready-to-transmit authentication reply packet. Returns a RADIUS packet which can be directly transmitted to a RADIUS server. This differs with Packet() in how the authenticator is calculated. :return: raw packet :rtype: string ] assert[name[self].authenticator] assert[compare[name[self].secret is_not constant[None]]] variable[attr] assign[=] call[name[self]._PktEncodeAttributes, parameter[]] variable[header] assign[=] call[name[struct].pack, parameter[constant[!BBH], name[self].code, name[self].id, binary_operation[constant[20] + call[name[len], parameter[name[attr]]]]]] variable[authenticator] assign[=] call[call[name[md5_constructor], parameter[binary_operation[binary_operation[binary_operation[call[name[header]][<ast.Slice object at 0x7da18bccae90>] + name[self].authenticator] + name[attr]] + name[self].secret]]].digest, parameter[]] return[binary_operation[binary_operation[name[header] + name[authenticator]] + name[attr]]]
keyword[def] identifier[ReplyPacket] ( identifier[self] ): literal[string] keyword[assert] ( identifier[self] . identifier[authenticator] ) keyword[assert] ( identifier[self] . identifier[secret] keyword[is] keyword[not] keyword[None] ) identifier[attr] = identifier[self] . identifier[_PktEncodeAttributes] () identifier[header] = identifier[struct] . identifier[pack] ( literal[string] , identifier[self] . identifier[code] , identifier[self] . identifier[id] ,( literal[int] + identifier[len] ( identifier[attr] ))) identifier[authenticator] = identifier[md5_constructor] ( identifier[header] [ literal[int] : literal[int] ]+ identifier[self] . identifier[authenticator] + identifier[attr] + identifier[self] . identifier[secret] ). identifier[digest] () keyword[return] identifier[header] + identifier[authenticator] + identifier[attr]
def ReplyPacket(self): """Create a ready-to-transmit authentication reply packet. Returns a RADIUS packet which can be directly transmitted to a RADIUS server. This differs with Packet() in how the authenticator is calculated. :return: raw packet :rtype: string """ assert self.authenticator assert self.secret is not None attr = self._PktEncodeAttributes() header = struct.pack('!BBH', self.code, self.id, 20 + len(attr)) authenticator = md5_constructor(header[0:4] + self.authenticator + attr + self.secret).digest() return header + authenticator + attr
def peaks(samples): """ Find the minimum and maximum peak of the samples. Returns that pair in the order they were found. So if min was found first, it returns (min, max) else the other way around. """ max_index = numpy.argmax(samples) max_value = samples[max_index] min_index = numpy.argmin(samples) min_value = samples[min_index] if min_index < max_index: return (min_value, max_value) else: return (max_value, min_value)
def function[peaks, parameter[samples]]: constant[ Find the minimum and maximum peak of the samples. Returns that pair in the order they were found. So if min was found first, it returns (min, max) else the other way around. ] variable[max_index] assign[=] call[name[numpy].argmax, parameter[name[samples]]] variable[max_value] assign[=] call[name[samples]][name[max_index]] variable[min_index] assign[=] call[name[numpy].argmin, parameter[name[samples]]] variable[min_value] assign[=] call[name[samples]][name[min_index]] if compare[name[min_index] less[<] name[max_index]] begin[:] return[tuple[[<ast.Name object at 0x7da18f00e6e0>, <ast.Name object at 0x7da18f00e290>]]]
keyword[def] identifier[peaks] ( identifier[samples] ): literal[string] identifier[max_index] = identifier[numpy] . identifier[argmax] ( identifier[samples] ) identifier[max_value] = identifier[samples] [ identifier[max_index] ] identifier[min_index] = identifier[numpy] . identifier[argmin] ( identifier[samples] ) identifier[min_value] = identifier[samples] [ identifier[min_index] ] keyword[if] identifier[min_index] < identifier[max_index] : keyword[return] ( identifier[min_value] , identifier[max_value] ) keyword[else] : keyword[return] ( identifier[max_value] , identifier[min_value] )
def peaks(samples): """ Find the minimum and maximum peak of the samples. Returns that pair in the order they were found. So if min was found first, it returns (min, max) else the other way around. """ max_index = numpy.argmax(samples) max_value = samples[max_index] min_index = numpy.argmin(samples) min_value = samples[min_index] if min_index < max_index: return (min_value, max_value) # depends on [control=['if'], data=[]] else: return (max_value, min_value)
def preview(image, **kwargs): ''' Show a slippy map preview of the image. Requires iPython. Args: image (image): image object to display zoom (int): zoom level to intialize the map, default is 16 center (list): center coordinates to initialize the map, defaults to center of image bands (list): bands of image to display, defaults to the image's default RGB bands ''' try: from IPython.display import Javascript, HTML, display from gbdxtools.rda.interface import RDA from gbdxtools import Interface gbdx = Interface() except: print("IPython is required to produce maps.") return zoom = kwargs.get("zoom", 16) bands = kwargs.get("bands") if bands is None: bands = image._rgb_bands wgs84_bounds = kwargs.get("bounds", list(loads(image.metadata["image"]["imageBoundsWGS84"]).bounds)) center = kwargs.get("center", list(shape(image).centroid.bounds[0:2])) if image.proj != 'EPSG:4326': code = image.proj.split(':')[1] conn = gbdx.gbdx_connection proj_info = conn.get('https://ughlicoordinates.geobigdata.io/ughli/v1/projinfo/{}'.format(code)).json() tfm = partial(pyproj.transform, pyproj.Proj(init='EPSG:4326'), pyproj.Proj(init=image.proj)) bounds = list(ops.transform(tfm, box(*wgs84_bounds)).bounds) else: proj_info = {} bounds = wgs84_bounds # Applying DRA to a DRA'ed image looks bad, skip if already in graph if not image.options.get('dra'): rda = RDA() # Need some simple DRA to get the image in range for display. dra = rda.HistogramDRA(image) image = dra.aoi(bbox=image.bounds) graph_id = image.rda_id node_id = image.rda.graph()['nodes'][0]['id'] map_id = "map_{}".format(str(int(time.time()))) scales = ','.join(['1'] * len(bands)) offsets = ','.join(['0'] * len(bands)) display(HTML(Template(''' <div id="$map_id"/> <link href='https://openlayers.org/en/v4.6.4/css/ol.css' rel='stylesheet' /> <script src="https://cdn.polyfill.io/v2/polyfill.min.js?features=requestAnimationFrame,Element.prototype.classList,URL"></script> <style>body{margin:0;padding:0;}#$map_id{position:relative;top:0;bottom:0;width:100%;height:400px;}</style> <style></style> ''').substitute({"map_id": map_id}))) js = Template(""" require.config({ paths: { oljs: 'https://cdnjs.cloudflare.com/ajax/libs/openlayers/4.6.4/ol', proj4: 'https://cdnjs.cloudflare.com/ajax/libs/proj4js/2.4.4/proj4' } }); require(['oljs', 'proj4'], function(oljs, proj4) { oljs.proj.setProj4(proj4) var md = $md; var georef = $georef; var graphId = '$graphId'; var nodeId = '$nodeId'; var extents = $bounds; var x1 = md.minTileX * md.tileXSize; var y1 = ((md.minTileY + md.numYTiles) * md.tileYSize + md.tileYSize); var x2 = ((md.minTileX + md.numXTiles) * md.tileXSize + md.tileXSize); var y2 = md.minTileY * md.tileYSize; var tileLayerResolutions = [georef.scaleX]; var url = '$url' + '/tile/'; url += graphId + '/' + nodeId; url += "/{x}/{y}.png?token=$token&display_bands=$bands&display_scales=$scales&display_offsets=$offsets"; var proj = '$proj'; var projInfo = $projInfo; if ( proj !== 'EPSG:4326' ) { var proj4def = projInfo["proj4"]; proj4.defs(proj, proj4def); var area = projInfo["area_of_use"]; var bbox = [area["area_west_bound_lon"], area["area_south_bound_lat"], area["area_east_bound_lon"], area["area_north_bound_lat"]] var projection = oljs.proj.get(proj); var fromLonLat = oljs.proj.getTransform('EPSG:4326', projection); var extent = oljs.extent.applyTransform( [bbox[0], bbox[1], bbox[2], bbox[3]], fromLonLat); projection.setExtent(extent); } else { var projection = oljs.proj.get(proj); } var rda = new oljs.layer.Tile({ title: 'RDA', opacity: 1, extent: extents, source: new oljs.source.TileImage({ crossOrigin: null, projection: projection, extent: extents, tileGrid: new oljs.tilegrid.TileGrid({ extent: extents, origin: [extents[0], extents[3]], resolutions: tileLayerResolutions, tileSize: [md.tileXSize, md.tileYSize], }), tileUrlFunction: function (coordinate) { if (coordinate === null) return undefined; const x = coordinate[1] + md.minTileX; const y = -(coordinate[2] + 1 - md.minTileY); if (x < md.minTileX || x > md.maxTileX) return undefined; if (y < md.minTileY || y > md.maxTileY) return undefined; return url.replace('{x}', x).replace('{y}', y); } }) }); var map = new oljs.Map({ layers: [ rda ], target: '$map_id', view: new oljs.View({ projection: projection, center: $center, zoom: $zoom }) }); }); """).substitute({ "map_id": map_id, "proj": image.proj, "projInfo": json.dumps(proj_info), "graphId": graph_id, "bounds": bounds, "bands": ",".join(map(str, bands)), "nodeId": node_id, "md": json.dumps(image.metadata["image"]), "georef": json.dumps(image.metadata["georef"]), "center": center, "zoom": zoom, "token": gbdx.gbdx_connection.access_token, "scales": scales, "offsets": offsets, "url": VIRTUAL_RDA_URL }) display(Javascript(js))
def function[preview, parameter[image]]: constant[ Show a slippy map preview of the image. Requires iPython. Args: image (image): image object to display zoom (int): zoom level to intialize the map, default is 16 center (list): center coordinates to initialize the map, defaults to center of image bands (list): bands of image to display, defaults to the image's default RGB bands ] <ast.Try object at 0x7da20ec062c0> variable[zoom] assign[=] call[name[kwargs].get, parameter[constant[zoom], constant[16]]] variable[bands] assign[=] call[name[kwargs].get, parameter[constant[bands]]] if compare[name[bands] is constant[None]] begin[:] variable[bands] assign[=] name[image]._rgb_bands variable[wgs84_bounds] assign[=] call[name[kwargs].get, parameter[constant[bounds], call[name[list], parameter[call[name[loads], parameter[call[call[name[image].metadata][constant[image]]][constant[imageBoundsWGS84]]]].bounds]]]] variable[center] assign[=] call[name[kwargs].get, parameter[constant[center], call[name[list], parameter[call[call[name[shape], parameter[name[image]]].centroid.bounds][<ast.Slice object at 0x7da1b01c5870>]]]]] if compare[name[image].proj not_equal[!=] constant[EPSG:4326]] begin[:] variable[code] assign[=] call[call[name[image].proj.split, parameter[constant[:]]]][constant[1]] variable[conn] assign[=] name[gbdx].gbdx_connection variable[proj_info] assign[=] call[call[name[conn].get, parameter[call[constant[https://ughlicoordinates.geobigdata.io/ughli/v1/projinfo/{}].format, parameter[name[code]]]]].json, parameter[]] variable[tfm] assign[=] call[name[partial], parameter[name[pyproj].transform, call[name[pyproj].Proj, parameter[]], call[name[pyproj].Proj, parameter[]]]] variable[bounds] assign[=] call[name[list], parameter[call[name[ops].transform, parameter[name[tfm], call[name[box], parameter[<ast.Starred object at 0x7da1b01c5ab0>]]]].bounds]] if <ast.UnaryOp object at 0x7da1b000c460> begin[:] variable[rda] assign[=] call[name[RDA], parameter[]] variable[dra] assign[=] call[name[rda].HistogramDRA, parameter[name[image]]] variable[image] assign[=] call[name[dra].aoi, parameter[]] variable[graph_id] assign[=] name[image].rda_id variable[node_id] assign[=] call[call[call[call[name[image].rda.graph, parameter[]]][constant[nodes]]][constant[0]]][constant[id]] variable[map_id] assign[=] call[constant[map_{}].format, parameter[call[name[str], parameter[call[name[int], parameter[call[name[time].time, parameter[]]]]]]]] variable[scales] assign[=] call[constant[,].join, parameter[binary_operation[list[[<ast.Constant object at 0x7da1b012d690>]] * call[name[len], parameter[name[bands]]]]]] variable[offsets] assign[=] call[constant[,].join, parameter[binary_operation[list[[<ast.Constant object at 0x7da1b012c1f0>]] * call[name[len], parameter[name[bands]]]]]] call[name[display], parameter[call[name[HTML], parameter[call[call[name[Template], parameter[constant[ <div id="$map_id"/> <link href='https://openlayers.org/en/v4.6.4/css/ol.css' rel='stylesheet' /> <script src="https://cdn.polyfill.io/v2/polyfill.min.js?features=requestAnimationFrame,Element.prototype.classList,URL"></script> <style>body{margin:0;padding:0;}#$map_id{position:relative;top:0;bottom:0;width:100%;height:400px;}</style> <style></style> ]]].substitute, parameter[dictionary[[<ast.Constant object at 0x7da1b012fa00>], [<ast.Name object at 0x7da1b012ff40>]]]]]]]] variable[js] assign[=] call[call[name[Template], parameter[constant[ require.config({ paths: { oljs: 'https://cdnjs.cloudflare.com/ajax/libs/openlayers/4.6.4/ol', proj4: 'https://cdnjs.cloudflare.com/ajax/libs/proj4js/2.4.4/proj4' } }); require(['oljs', 'proj4'], function(oljs, proj4) { oljs.proj.setProj4(proj4) var md = $md; var georef = $georef; var graphId = '$graphId'; var nodeId = '$nodeId'; var extents = $bounds; var x1 = md.minTileX * md.tileXSize; var y1 = ((md.minTileY + md.numYTiles) * md.tileYSize + md.tileYSize); var x2 = ((md.minTileX + md.numXTiles) * md.tileXSize + md.tileXSize); var y2 = md.minTileY * md.tileYSize; var tileLayerResolutions = [georef.scaleX]; var url = '$url' + '/tile/'; url += graphId + '/' + nodeId; url += "/{x}/{y}.png?token=$token&display_bands=$bands&display_scales=$scales&display_offsets=$offsets"; var proj = '$proj'; var projInfo = $projInfo; if ( proj !== 'EPSG:4326' ) { var proj4def = projInfo["proj4"]; proj4.defs(proj, proj4def); var area = projInfo["area_of_use"]; var bbox = [area["area_west_bound_lon"], area["area_south_bound_lat"], area["area_east_bound_lon"], area["area_north_bound_lat"]] var projection = oljs.proj.get(proj); var fromLonLat = oljs.proj.getTransform('EPSG:4326', projection); var extent = oljs.extent.applyTransform( [bbox[0], bbox[1], bbox[2], bbox[3]], fromLonLat); projection.setExtent(extent); } else { var projection = oljs.proj.get(proj); } var rda = new oljs.layer.Tile({ title: 'RDA', opacity: 1, extent: extents, source: new oljs.source.TileImage({ crossOrigin: null, projection: projection, extent: extents, tileGrid: new oljs.tilegrid.TileGrid({ extent: extents, origin: [extents[0], extents[3]], resolutions: tileLayerResolutions, tileSize: [md.tileXSize, md.tileYSize], }), tileUrlFunction: function (coordinate) { if (coordinate === null) return undefined; const x = coordinate[1] + md.minTileX; const y = -(coordinate[2] + 1 - md.minTileY); if (x < md.minTileX || x > md.maxTileX) return undefined; if (y < md.minTileY || y > md.maxTileY) return undefined; return url.replace('{x}', x).replace('{y}', y); } }) }); var map = new oljs.Map({ layers: [ rda ], target: '$map_id', view: new oljs.View({ projection: projection, center: $center, zoom: $zoom }) }); }); ]]].substitute, parameter[dictionary[[<ast.Constant object at 0x7da1b012fb50>, <ast.Constant object at 0x7da1b012fc40>, <ast.Constant object at 0x7da1b012f940>, <ast.Constant object at 0x7da1b012fa60>, <ast.Constant object at 0x7da1b012fa90>, <ast.Constant object at 0x7da1b012fc10>, <ast.Constant object at 0x7da1b012ee30>, <ast.Constant object at 0x7da1b012f880>, <ast.Constant object at 0x7da1b012c220>, <ast.Constant object at 0x7da1b012f820>, <ast.Constant object at 0x7da1b012d6c0>, <ast.Constant object at 0x7da1b012c3d0>, <ast.Constant object at 0x7da1b012edd0>, <ast.Constant object at 0x7da1b012de70>, <ast.Constant object at 0x7da1b012c8e0>], [<ast.Name object at 0x7da1b012e200>, <ast.Attribute object at 0x7da1b012c430>, <ast.Call object at 0x7da1b012ca00>, <ast.Name object at 0x7da1b012ca90>, <ast.Name object at 0x7da1b012f4c0>, <ast.Call object at 0x7da1b012dff0>, <ast.Name object at 0x7da1b012f490>, <ast.Call object at 0x7da1b012de40>, <ast.Call object at 0x7da1b012cd60>, <ast.Name object at 0x7da1b012ca60>, <ast.Name object at 0x7da1b012f760>, <ast.Attribute object at 0x7da1b012c880>, <ast.Name object at 0x7da1b012c7c0>, <ast.Name object at 0x7da1b012f6d0>, <ast.Name object at 0x7da1b012f790>]]]] call[name[display], parameter[call[name[Javascript], parameter[name[js]]]]]
keyword[def] identifier[preview] ( identifier[image] ,** identifier[kwargs] ): literal[string] keyword[try] : keyword[from] identifier[IPython] . identifier[display] keyword[import] identifier[Javascript] , identifier[HTML] , identifier[display] keyword[from] identifier[gbdxtools] . identifier[rda] . identifier[interface] keyword[import] identifier[RDA] keyword[from] identifier[gbdxtools] keyword[import] identifier[Interface] identifier[gbdx] = identifier[Interface] () keyword[except] : identifier[print] ( literal[string] ) keyword[return] identifier[zoom] = identifier[kwargs] . identifier[get] ( literal[string] , literal[int] ) identifier[bands] = identifier[kwargs] . identifier[get] ( literal[string] ) keyword[if] identifier[bands] keyword[is] keyword[None] : identifier[bands] = identifier[image] . identifier[_rgb_bands] identifier[wgs84_bounds] = identifier[kwargs] . identifier[get] ( literal[string] , identifier[list] ( identifier[loads] ( identifier[image] . identifier[metadata] [ literal[string] ][ literal[string] ]). identifier[bounds] )) identifier[center] = identifier[kwargs] . identifier[get] ( literal[string] , identifier[list] ( identifier[shape] ( identifier[image] ). identifier[centroid] . identifier[bounds] [ literal[int] : literal[int] ])) keyword[if] identifier[image] . identifier[proj] != literal[string] : identifier[code] = identifier[image] . identifier[proj] . identifier[split] ( literal[string] )[ literal[int] ] identifier[conn] = identifier[gbdx] . identifier[gbdx_connection] identifier[proj_info] = identifier[conn] . identifier[get] ( literal[string] . identifier[format] ( identifier[code] )). identifier[json] () identifier[tfm] = identifier[partial] ( identifier[pyproj] . identifier[transform] , identifier[pyproj] . identifier[Proj] ( identifier[init] = literal[string] ), identifier[pyproj] . identifier[Proj] ( identifier[init] = identifier[image] . identifier[proj] )) identifier[bounds] = identifier[list] ( identifier[ops] . identifier[transform] ( identifier[tfm] , identifier[box] (* identifier[wgs84_bounds] )). identifier[bounds] ) keyword[else] : identifier[proj_info] ={} identifier[bounds] = identifier[wgs84_bounds] keyword[if] keyword[not] identifier[image] . identifier[options] . identifier[get] ( literal[string] ): identifier[rda] = identifier[RDA] () identifier[dra] = identifier[rda] . identifier[HistogramDRA] ( identifier[image] ) identifier[image] = identifier[dra] . identifier[aoi] ( identifier[bbox] = identifier[image] . identifier[bounds] ) identifier[graph_id] = identifier[image] . identifier[rda_id] identifier[node_id] = identifier[image] . identifier[rda] . identifier[graph] ()[ literal[string] ][ literal[int] ][ literal[string] ] identifier[map_id] = literal[string] . identifier[format] ( identifier[str] ( identifier[int] ( identifier[time] . identifier[time] ()))) identifier[scales] = literal[string] . identifier[join] ([ literal[string] ]* identifier[len] ( identifier[bands] )) identifier[offsets] = literal[string] . identifier[join] ([ literal[string] ]* identifier[len] ( identifier[bands] )) identifier[display] ( identifier[HTML] ( identifier[Template] ( literal[string] ). identifier[substitute] ({ literal[string] : identifier[map_id] }))) identifier[js] = identifier[Template] ( literal[string] ). identifier[substitute] ({ literal[string] : identifier[map_id] , literal[string] : identifier[image] . identifier[proj] , literal[string] : identifier[json] . identifier[dumps] ( identifier[proj_info] ), literal[string] : identifier[graph_id] , literal[string] : identifier[bounds] , literal[string] : literal[string] . identifier[join] ( identifier[map] ( identifier[str] , identifier[bands] )), literal[string] : identifier[node_id] , literal[string] : identifier[json] . identifier[dumps] ( identifier[image] . identifier[metadata] [ literal[string] ]), literal[string] : identifier[json] . identifier[dumps] ( identifier[image] . identifier[metadata] [ literal[string] ]), literal[string] : identifier[center] , literal[string] : identifier[zoom] , literal[string] : identifier[gbdx] . identifier[gbdx_connection] . identifier[access_token] , literal[string] : identifier[scales] , literal[string] : identifier[offsets] , literal[string] : identifier[VIRTUAL_RDA_URL] }) identifier[display] ( identifier[Javascript] ( identifier[js] ))
def preview(image, **kwargs): """ Show a slippy map preview of the image. Requires iPython. Args: image (image): image object to display zoom (int): zoom level to intialize the map, default is 16 center (list): center coordinates to initialize the map, defaults to center of image bands (list): bands of image to display, defaults to the image's default RGB bands """ try: from IPython.display import Javascript, HTML, display from gbdxtools.rda.interface import RDA from gbdxtools import Interface gbdx = Interface() # depends on [control=['try'], data=[]] except: print('IPython is required to produce maps.') return # depends on [control=['except'], data=[]] zoom = kwargs.get('zoom', 16) bands = kwargs.get('bands') if bands is None: bands = image._rgb_bands # depends on [control=['if'], data=['bands']] wgs84_bounds = kwargs.get('bounds', list(loads(image.metadata['image']['imageBoundsWGS84']).bounds)) center = kwargs.get('center', list(shape(image).centroid.bounds[0:2])) if image.proj != 'EPSG:4326': code = image.proj.split(':')[1] conn = gbdx.gbdx_connection proj_info = conn.get('https://ughlicoordinates.geobigdata.io/ughli/v1/projinfo/{}'.format(code)).json() tfm = partial(pyproj.transform, pyproj.Proj(init='EPSG:4326'), pyproj.Proj(init=image.proj)) bounds = list(ops.transform(tfm, box(*wgs84_bounds)).bounds) # depends on [control=['if'], data=[]] else: proj_info = {} bounds = wgs84_bounds # Applying DRA to a DRA'ed image looks bad, skip if already in graph if not image.options.get('dra'): rda = RDA() # Need some simple DRA to get the image in range for display. dra = rda.HistogramDRA(image) image = dra.aoi(bbox=image.bounds) # depends on [control=['if'], data=[]] graph_id = image.rda_id node_id = image.rda.graph()['nodes'][0]['id'] map_id = 'map_{}'.format(str(int(time.time()))) scales = ','.join(['1'] * len(bands)) offsets = ','.join(['0'] * len(bands)) display(HTML(Template('\n <div id="$map_id"/>\n <link href=\'https://openlayers.org/en/v4.6.4/css/ol.css\' rel=\'stylesheet\' />\n <script src="https://cdn.polyfill.io/v2/polyfill.min.js?features=requestAnimationFrame,Element.prototype.classList,URL"></script>\n <style>body{margin:0;padding:0;}#$map_id{position:relative;top:0;bottom:0;width:100%;height:400px;}</style>\n <style></style>\n ').substitute({'map_id': map_id}))) js = Template('\n require.config({\n paths: {\n oljs: \'https://cdnjs.cloudflare.com/ajax/libs/openlayers/4.6.4/ol\',\n proj4: \'https://cdnjs.cloudflare.com/ajax/libs/proj4js/2.4.4/proj4\'\n }\n });\n\n require([\'oljs\', \'proj4\'], function(oljs, proj4) {\n oljs.proj.setProj4(proj4)\n var md = $md;\n var georef = $georef;\n var graphId = \'$graphId\';\n var nodeId = \'$nodeId\';\n var extents = $bounds;\n\n var x1 = md.minTileX * md.tileXSize;\n var y1 = ((md.minTileY + md.numYTiles) * md.tileYSize + md.tileYSize);\n var x2 = ((md.minTileX + md.numXTiles) * md.tileXSize + md.tileXSize);\n var y2 = md.minTileY * md.tileYSize;\n var tileLayerResolutions = [georef.scaleX];\n\n var url = \'$url\' + \'/tile/\';\n url += graphId + \'/\' + nodeId;\n url += "/{x}/{y}.png?token=$token&display_bands=$bands&display_scales=$scales&display_offsets=$offsets";\n\n var proj = \'$proj\';\n var projInfo = $projInfo;\n\n if ( proj !== \'EPSG:4326\' ) {\n var proj4def = projInfo["proj4"];\n proj4.defs(proj, proj4def);\n var area = projInfo["area_of_use"];\n var bbox = [area["area_west_bound_lon"], area["area_south_bound_lat"],\n area["area_east_bound_lon"], area["area_north_bound_lat"]]\n var projection = oljs.proj.get(proj);\n var fromLonLat = oljs.proj.getTransform(\'EPSG:4326\', projection);\n var extent = oljs.extent.applyTransform(\n [bbox[0], bbox[1], bbox[2], bbox[3]], fromLonLat);\n projection.setExtent(extent);\n } else {\n var projection = oljs.proj.get(proj);\n }\n\n var rda = new oljs.layer.Tile({\n title: \'RDA\',\n opacity: 1,\n extent: extents,\n source: new oljs.source.TileImage({\n crossOrigin: null,\n projection: projection,\n extent: extents,\n\n tileGrid: new oljs.tilegrid.TileGrid({\n extent: extents,\n origin: [extents[0], extents[3]],\n resolutions: tileLayerResolutions,\n tileSize: [md.tileXSize, md.tileYSize],\n }),\n tileUrlFunction: function (coordinate) {\n if (coordinate === null) return undefined;\n const x = coordinate[1] + md.minTileX;\n const y = -(coordinate[2] + 1 - md.minTileY);\n if (x < md.minTileX || x > md.maxTileX) return undefined;\n if (y < md.minTileY || y > md.maxTileY) return undefined;\n return url.replace(\'{x}\', x).replace(\'{y}\', y);\n }\n })\n });\n\n var map = new oljs.Map({\n layers: [ rda ],\n target: \'$map_id\',\n view: new oljs.View({\n projection: projection,\n center: $center,\n zoom: $zoom\n })\n });\n });\n ').substitute({'map_id': map_id, 'proj': image.proj, 'projInfo': json.dumps(proj_info), 'graphId': graph_id, 'bounds': bounds, 'bands': ','.join(map(str, bands)), 'nodeId': node_id, 'md': json.dumps(image.metadata['image']), 'georef': json.dumps(image.metadata['georef']), 'center': center, 'zoom': zoom, 'token': gbdx.gbdx_connection.access_token, 'scales': scales, 'offsets': offsets, 'url': VIRTUAL_RDA_URL}) display(Javascript(js))
def tmp_context(fn, mode="r"): """ Return content fo the `fn` from the temporary directory. """ with open(tmp_context_name(fn), mode) as f: return f.read()
def function[tmp_context, parameter[fn, mode]]: constant[ Return content fo the `fn` from the temporary directory. ] with call[name[open], parameter[call[name[tmp_context_name], parameter[name[fn]]], name[mode]]] begin[:] return[call[name[f].read, parameter[]]]
keyword[def] identifier[tmp_context] ( identifier[fn] , identifier[mode] = literal[string] ): literal[string] keyword[with] identifier[open] ( identifier[tmp_context_name] ( identifier[fn] ), identifier[mode] ) keyword[as] identifier[f] : keyword[return] identifier[f] . identifier[read] ()
def tmp_context(fn, mode='r'): """ Return content fo the `fn` from the temporary directory. """ with open(tmp_context_name(fn), mode) as f: return f.read() # depends on [control=['with'], data=['f']]
def should_stale_item_be_fetched_synchronously(self, delta, *args, **kwargs): """ Return whether to refresh an item synchronously when it is found in the cache but stale """ if self.fetch_on_stale_threshold is None: return False return delta > (self.fetch_on_stale_threshold - self.lifetime)
def function[should_stale_item_be_fetched_synchronously, parameter[self, delta]]: constant[ Return whether to refresh an item synchronously when it is found in the cache but stale ] if compare[name[self].fetch_on_stale_threshold is constant[None]] begin[:] return[constant[False]] return[compare[name[delta] greater[>] binary_operation[name[self].fetch_on_stale_threshold - name[self].lifetime]]]
keyword[def] identifier[should_stale_item_be_fetched_synchronously] ( identifier[self] , identifier[delta] ,* identifier[args] ,** identifier[kwargs] ): literal[string] keyword[if] identifier[self] . identifier[fetch_on_stale_threshold] keyword[is] keyword[None] : keyword[return] keyword[False] keyword[return] identifier[delta] >( identifier[self] . identifier[fetch_on_stale_threshold] - identifier[self] . identifier[lifetime] )
def should_stale_item_be_fetched_synchronously(self, delta, *args, **kwargs): """ Return whether to refresh an item synchronously when it is found in the cache but stale """ if self.fetch_on_stale_threshold is None: return False # depends on [control=['if'], data=[]] return delta > self.fetch_on_stale_threshold - self.lifetime
def _convert_to_style(cls, style_dict): """ converts a style_dict to an openpyxl style object Parameters ---------- style_dict : style dictionary to convert """ from openpyxl.style import Style xls_style = Style() for key, value in style_dict.items(): for nk, nv in value.items(): if key == "borders": (xls_style.borders.__getattribute__(nk) .__setattr__('border_style', nv)) else: xls_style.__getattribute__(key).__setattr__(nk, nv) return xls_style
def function[_convert_to_style, parameter[cls, style_dict]]: constant[ converts a style_dict to an openpyxl style object Parameters ---------- style_dict : style dictionary to convert ] from relative_module[openpyxl.style] import module[Style] variable[xls_style] assign[=] call[name[Style], parameter[]] for taget[tuple[[<ast.Name object at 0x7da20e9b3fa0>, <ast.Name object at 0x7da20e9b2b60>]]] in starred[call[name[style_dict].items, parameter[]]] begin[:] for taget[tuple[[<ast.Name object at 0x7da20e9b0520>, <ast.Name object at 0x7da20e9b20e0>]]] in starred[call[name[value].items, parameter[]]] begin[:] if compare[name[key] equal[==] constant[borders]] begin[:] call[call[name[xls_style].borders.__getattribute__, parameter[name[nk]]].__setattr__, parameter[constant[border_style], name[nv]]] return[name[xls_style]]
keyword[def] identifier[_convert_to_style] ( identifier[cls] , identifier[style_dict] ): literal[string] keyword[from] identifier[openpyxl] . identifier[style] keyword[import] identifier[Style] identifier[xls_style] = identifier[Style] () keyword[for] identifier[key] , identifier[value] keyword[in] identifier[style_dict] . identifier[items] (): keyword[for] identifier[nk] , identifier[nv] keyword[in] identifier[value] . identifier[items] (): keyword[if] identifier[key] == literal[string] : ( identifier[xls_style] . identifier[borders] . identifier[__getattribute__] ( identifier[nk] ) . identifier[__setattr__] ( literal[string] , identifier[nv] )) keyword[else] : identifier[xls_style] . identifier[__getattribute__] ( identifier[key] ). identifier[__setattr__] ( identifier[nk] , identifier[nv] ) keyword[return] identifier[xls_style]
def _convert_to_style(cls, style_dict): """ converts a style_dict to an openpyxl style object Parameters ---------- style_dict : style dictionary to convert """ from openpyxl.style import Style xls_style = Style() for (key, value) in style_dict.items(): for (nk, nv) in value.items(): if key == 'borders': xls_style.borders.__getattribute__(nk).__setattr__('border_style', nv) # depends on [control=['if'], data=[]] else: xls_style.__getattribute__(key).__setattr__(nk, nv) # depends on [control=['for'], data=[]] # depends on [control=['for'], data=[]] return xls_style
def write(self): """write the current settings to the config file""" with open(storage.config_file, 'w') as cfg: yaml.dump(self.as_dict(), cfg, default_flow_style=False) storage.refresh()
def function[write, parameter[self]]: constant[write the current settings to the config file] with call[name[open], parameter[name[storage].config_file, constant[w]]] begin[:] call[name[yaml].dump, parameter[call[name[self].as_dict, parameter[]], name[cfg]]] call[name[storage].refresh, parameter[]]
keyword[def] identifier[write] ( identifier[self] ): literal[string] keyword[with] identifier[open] ( identifier[storage] . identifier[config_file] , literal[string] ) keyword[as] identifier[cfg] : identifier[yaml] . identifier[dump] ( identifier[self] . identifier[as_dict] (), identifier[cfg] , identifier[default_flow_style] = keyword[False] ) identifier[storage] . identifier[refresh] ()
def write(self): """write the current settings to the config file""" with open(storage.config_file, 'w') as cfg: yaml.dump(self.as_dict(), cfg, default_flow_style=False) # depends on [control=['with'], data=['cfg']] storage.refresh()
def build_query(self, product=None, component=None, version=None, long_desc=None, bug_id=None, short_desc=None, cc=None, assigned_to=None, reporter=None, qa_contact=None, status=None, blocked=None, dependson=None, keywords=None, keywords_type=None, url=None, url_type=None, status_whiteboard=None, status_whiteboard_type=None, fixed_in=None, fixed_in_type=None, flag=None, alias=None, qa_whiteboard=None, devel_whiteboard=None, boolean_query=None, bug_severity=None, priority=None, target_release=None, target_milestone=None, emailtype=None, booleantype=None, include_fields=None, quicksearch=None, savedsearch=None, savedsearch_sharer_id=None, sub_component=None, tags=None, exclude_fields=None, extra_fields=None): """ Build a query string from passed arguments. Will handle query parameter differences between various bugzilla versions. Most of the parameters should be self-explanatory. However, if you want to perform a complex query, and easy way is to create it with the bugzilla web UI, copy the entire URL it generates, and pass it to the static method Bugzilla.url_to_query Then pass the output to Bugzilla.query() For details about the specific argument formats, see the bugzilla docs: https://bugzilla.readthedocs.io/en/latest/api/core/v1/bug.html#search-bugs """ if boolean_query or booleantype: raise RuntimeError("boolean_query format is no longer supported. " "If you need complicated URL queries, look into " "query --from-url/url_to_query().") query = { "alias": alias, "product": self._listify(product), "component": self._listify(component), "version": version, "id": bug_id, "short_desc": short_desc, "bug_status": status, "bug_severity": bug_severity, "priority": priority, "target_release": target_release, "target_milestone": target_milestone, "tag": self._listify(tags), "quicksearch": quicksearch, "savedsearch": savedsearch, "sharer_id": savedsearch_sharer_id, # RH extensions... don't add any more. See comment below "sub_components": self._listify(sub_component), } def add_bool(bzkey, value, bool_id, booltype=None): value = self._listify(value) if value is None: return bool_id query["query_format"] = "advanced" for boolval in value: def make_bool_str(prefix): # pylint: disable=cell-var-from-loop return "%s%i-0-0" % (prefix, bool_id) query[make_bool_str("field")] = bzkey query[make_bool_str("value")] = boolval query[make_bool_str("type")] = booltype or "substring" bool_id += 1 return bool_id # RH extensions that we have to maintain here for back compat, # but all future custom fields should be specified via # cli --field option, or via extending the query dict() manually. # No more supporting custom fields in this API bool_id = 0 bool_id = add_bool("keywords", keywords, bool_id, keywords_type) bool_id = add_bool("blocked", blocked, bool_id) bool_id = add_bool("dependson", dependson, bool_id) bool_id = add_bool("bug_file_loc", url, bool_id, url_type) bool_id = add_bool("cf_fixed_in", fixed_in, bool_id, fixed_in_type) bool_id = add_bool("flagtypes.name", flag, bool_id) bool_id = add_bool("status_whiteboard", status_whiteboard, bool_id, status_whiteboard_type) bool_id = add_bool("cf_qa_whiteboard", qa_whiteboard, bool_id) bool_id = add_bool("cf_devel_whiteboard", devel_whiteboard, bool_id) def add_email(key, value, count): if value is None: return count if not emailtype: query[key] = value return count query["query_format"] = "advanced" query['email%i' % count] = value query['email%s%i' % (key, count)] = True query['emailtype%i' % count] = emailtype return count + 1 email_count = 1 email_count = add_email("cc", cc, email_count) email_count = add_email("assigned_to", assigned_to, email_count) email_count = add_email("reporter", reporter, email_count) email_count = add_email("qa_contact", qa_contact, email_count) if long_desc is not None: query["query_format"] = "advanced" query["longdesc"] = long_desc query["longdesc_type"] = "allwordssubstr" # 'include_fields' only available for Bugzilla4+ # 'extra_fields' is an RHBZ extension query.update(self._process_include_fields( include_fields, exclude_fields, extra_fields)) # Strip out None elements in the dict for k, v in query.copy().items(): if v is None: del(query[k]) self.pre_translation(query) return query
def function[build_query, parameter[self, product, component, version, long_desc, bug_id, short_desc, cc, assigned_to, reporter, qa_contact, status, blocked, dependson, keywords, keywords_type, url, url_type, status_whiteboard, status_whiteboard_type, fixed_in, fixed_in_type, flag, alias, qa_whiteboard, devel_whiteboard, boolean_query, bug_severity, priority, target_release, target_milestone, emailtype, booleantype, include_fields, quicksearch, savedsearch, savedsearch_sharer_id, sub_component, tags, exclude_fields, extra_fields]]: constant[ Build a query string from passed arguments. Will handle query parameter differences between various bugzilla versions. Most of the parameters should be self-explanatory. However, if you want to perform a complex query, and easy way is to create it with the bugzilla web UI, copy the entire URL it generates, and pass it to the static method Bugzilla.url_to_query Then pass the output to Bugzilla.query() For details about the specific argument formats, see the bugzilla docs: https://bugzilla.readthedocs.io/en/latest/api/core/v1/bug.html#search-bugs ] if <ast.BoolOp object at 0x7da1b0d19a20> begin[:] <ast.Raise object at 0x7da1b0d1a9e0> variable[query] assign[=] dictionary[[<ast.Constant object at 0x7da1b0d195d0>, <ast.Constant object at 0x7da1b0d18c70>, <ast.Constant object at 0x7da1b0d18910>, <ast.Constant object at 0x7da1b0d1b010>, <ast.Constant object at 0x7da1b0d1a440>, <ast.Constant object at 0x7da1b0d19f60>, <ast.Constant object at 0x7da1b0d1b910>, <ast.Constant object at 0x7da1b0d18700>, <ast.Constant object at 0x7da1b0d186a0>, <ast.Constant object at 0x7da1b0d184c0>, <ast.Constant object at 0x7da1b0d1bca0>, <ast.Constant object at 0x7da1b0d18190>, <ast.Constant object at 0x7da1b0d1bc10>, <ast.Constant object at 0x7da1b0d18520>, <ast.Constant object at 0x7da1b0d1b340>, <ast.Constant object at 0x7da1b0d19b10>], [<ast.Name object at 0x7da1b0d19bd0>, <ast.Call object at 0x7da1b0d18430>, <ast.Call object at 0x7da1b0d1b9d0>, <ast.Name object at 0x7da1b0d1b730>, <ast.Name object at 0x7da1b0d1afe0>, <ast.Name object at 0x7da1b0d1a5c0>, <ast.Name object at 0x7da1b0d1a3b0>, <ast.Name object at 0x7da1b0d182b0>, <ast.Name object at 0x7da1b0d1bbb0>, <ast.Name object at 0x7da1b0d1ad70>, <ast.Name object at 0x7da1b0d193f0>, <ast.Call object at 0x7da1b0d1bfd0>, <ast.Name object at 0x7da1b0d1bb50>, <ast.Name object at 0x7da1b0d1b6a0>, <ast.Name object at 0x7da1b0d19ed0>, <ast.Call object at 0x7da1b0d1bf10>]] def function[add_bool, parameter[bzkey, value, bool_id, booltype]]: variable[value] assign[=] call[name[self]._listify, parameter[name[value]]] if compare[name[value] is constant[None]] begin[:] return[name[bool_id]] call[name[query]][constant[query_format]] assign[=] constant[advanced] for taget[name[boolval]] in starred[name[value]] begin[:] def function[make_bool_str, parameter[prefix]]: return[binary_operation[constant[%s%i-0-0] <ast.Mod object at 0x7da2590d6920> tuple[[<ast.Name object at 0x7da1b0d18490>, <ast.Name object at 0x7da1b0d1be20>]]]] call[name[query]][call[name[make_bool_str], parameter[constant[field]]]] assign[=] name[bzkey] call[name[query]][call[name[make_bool_str], parameter[constant[value]]]] assign[=] name[boolval] call[name[query]][call[name[make_bool_str], parameter[constant[type]]]] assign[=] <ast.BoolOp object at 0x7da1b0c24220> <ast.AugAssign object at 0x7da1b0c25150> return[name[bool_id]] variable[bool_id] assign[=] constant[0] variable[bool_id] assign[=] call[name[add_bool], parameter[constant[keywords], name[keywords], name[bool_id], name[keywords_type]]] variable[bool_id] assign[=] call[name[add_bool], parameter[constant[blocked], name[blocked], name[bool_id]]] variable[bool_id] assign[=] call[name[add_bool], parameter[constant[dependson], name[dependson], name[bool_id]]] variable[bool_id] assign[=] call[name[add_bool], parameter[constant[bug_file_loc], name[url], name[bool_id], name[url_type]]] variable[bool_id] assign[=] call[name[add_bool], parameter[constant[cf_fixed_in], name[fixed_in], name[bool_id], name[fixed_in_type]]] variable[bool_id] assign[=] call[name[add_bool], parameter[constant[flagtypes.name], name[flag], name[bool_id]]] variable[bool_id] assign[=] call[name[add_bool], parameter[constant[status_whiteboard], name[status_whiteboard], name[bool_id], name[status_whiteboard_type]]] variable[bool_id] assign[=] call[name[add_bool], parameter[constant[cf_qa_whiteboard], name[qa_whiteboard], name[bool_id]]] variable[bool_id] assign[=] call[name[add_bool], parameter[constant[cf_devel_whiteboard], name[devel_whiteboard], name[bool_id]]] def function[add_email, parameter[key, value, count]]: if compare[name[value] is constant[None]] begin[:] return[name[count]] if <ast.UnaryOp object at 0x7da1b0c272b0> begin[:] call[name[query]][name[key]] assign[=] name[value] return[name[count]] call[name[query]][constant[query_format]] assign[=] constant[advanced] call[name[query]][binary_operation[constant[email%i] <ast.Mod object at 0x7da2590d6920> name[count]]] assign[=] name[value] call[name[query]][binary_operation[constant[email%s%i] <ast.Mod object at 0x7da2590d6920> tuple[[<ast.Name object at 0x7da1b0efbe80>, <ast.Name object at 0x7da1b0efbd90>]]]] assign[=] constant[True] call[name[query]][binary_operation[constant[emailtype%i] <ast.Mod object at 0x7da2590d6920> name[count]]] assign[=] name[emailtype] return[binary_operation[name[count] + constant[1]]] variable[email_count] assign[=] constant[1] variable[email_count] assign[=] call[name[add_email], parameter[constant[cc], name[cc], name[email_count]]] variable[email_count] assign[=] call[name[add_email], parameter[constant[assigned_to], name[assigned_to], name[email_count]]] variable[email_count] assign[=] call[name[add_email], parameter[constant[reporter], name[reporter], name[email_count]]] variable[email_count] assign[=] call[name[add_email], parameter[constant[qa_contact], name[qa_contact], name[email_count]]] if compare[name[long_desc] is_not constant[None]] begin[:] call[name[query]][constant[query_format]] assign[=] constant[advanced] call[name[query]][constant[longdesc]] assign[=] name[long_desc] call[name[query]][constant[longdesc_type]] assign[=] constant[allwordssubstr] call[name[query].update, parameter[call[name[self]._process_include_fields, parameter[name[include_fields], name[exclude_fields], name[extra_fields]]]]] for taget[tuple[[<ast.Name object at 0x7da1b0ef8700>, <ast.Name object at 0x7da1b0ef8640>]]] in starred[call[call[name[query].copy, parameter[]].items, parameter[]]] begin[:] if compare[name[v] is constant[None]] begin[:] <ast.Delete object at 0x7da1b0ef9780> call[name[self].pre_translation, parameter[name[query]]] return[name[query]]
keyword[def] identifier[build_query] ( identifier[self] , identifier[product] = keyword[None] , identifier[component] = keyword[None] , identifier[version] = keyword[None] , identifier[long_desc] = keyword[None] , identifier[bug_id] = keyword[None] , identifier[short_desc] = keyword[None] , identifier[cc] = keyword[None] , identifier[assigned_to] = keyword[None] , identifier[reporter] = keyword[None] , identifier[qa_contact] = keyword[None] , identifier[status] = keyword[None] , identifier[blocked] = keyword[None] , identifier[dependson] = keyword[None] , identifier[keywords] = keyword[None] , identifier[keywords_type] = keyword[None] , identifier[url] = keyword[None] , identifier[url_type] = keyword[None] , identifier[status_whiteboard] = keyword[None] , identifier[status_whiteboard_type] = keyword[None] , identifier[fixed_in] = keyword[None] , identifier[fixed_in_type] = keyword[None] , identifier[flag] = keyword[None] , identifier[alias] = keyword[None] , identifier[qa_whiteboard] = keyword[None] , identifier[devel_whiteboard] = keyword[None] , identifier[boolean_query] = keyword[None] , identifier[bug_severity] = keyword[None] , identifier[priority] = keyword[None] , identifier[target_release] = keyword[None] , identifier[target_milestone] = keyword[None] , identifier[emailtype] = keyword[None] , identifier[booleantype] = keyword[None] , identifier[include_fields] = keyword[None] , identifier[quicksearch] = keyword[None] , identifier[savedsearch] = keyword[None] , identifier[savedsearch_sharer_id] = keyword[None] , identifier[sub_component] = keyword[None] , identifier[tags] = keyword[None] , identifier[exclude_fields] = keyword[None] , identifier[extra_fields] = keyword[None] ): literal[string] keyword[if] identifier[boolean_query] keyword[or] identifier[booleantype] : keyword[raise] identifier[RuntimeError] ( literal[string] literal[string] literal[string] ) identifier[query] ={ literal[string] : identifier[alias] , literal[string] : identifier[self] . identifier[_listify] ( identifier[product] ), literal[string] : identifier[self] . identifier[_listify] ( identifier[component] ), literal[string] : identifier[version] , literal[string] : identifier[bug_id] , literal[string] : identifier[short_desc] , literal[string] : identifier[status] , literal[string] : identifier[bug_severity] , literal[string] : identifier[priority] , literal[string] : identifier[target_release] , literal[string] : identifier[target_milestone] , literal[string] : identifier[self] . identifier[_listify] ( identifier[tags] ), literal[string] : identifier[quicksearch] , literal[string] : identifier[savedsearch] , literal[string] : identifier[savedsearch_sharer_id] , literal[string] : identifier[self] . identifier[_listify] ( identifier[sub_component] ), } keyword[def] identifier[add_bool] ( identifier[bzkey] , identifier[value] , identifier[bool_id] , identifier[booltype] = keyword[None] ): identifier[value] = identifier[self] . identifier[_listify] ( identifier[value] ) keyword[if] identifier[value] keyword[is] keyword[None] : keyword[return] identifier[bool_id] identifier[query] [ literal[string] ]= literal[string] keyword[for] identifier[boolval] keyword[in] identifier[value] : keyword[def] identifier[make_bool_str] ( identifier[prefix] ): keyword[return] literal[string] %( identifier[prefix] , identifier[bool_id] ) identifier[query] [ identifier[make_bool_str] ( literal[string] )]= identifier[bzkey] identifier[query] [ identifier[make_bool_str] ( literal[string] )]= identifier[boolval] identifier[query] [ identifier[make_bool_str] ( literal[string] )]= identifier[booltype] keyword[or] literal[string] identifier[bool_id] += literal[int] keyword[return] identifier[bool_id] identifier[bool_id] = literal[int] identifier[bool_id] = identifier[add_bool] ( literal[string] , identifier[keywords] , identifier[bool_id] , identifier[keywords_type] ) identifier[bool_id] = identifier[add_bool] ( literal[string] , identifier[blocked] , identifier[bool_id] ) identifier[bool_id] = identifier[add_bool] ( literal[string] , identifier[dependson] , identifier[bool_id] ) identifier[bool_id] = identifier[add_bool] ( literal[string] , identifier[url] , identifier[bool_id] , identifier[url_type] ) identifier[bool_id] = identifier[add_bool] ( literal[string] , identifier[fixed_in] , identifier[bool_id] , identifier[fixed_in_type] ) identifier[bool_id] = identifier[add_bool] ( literal[string] , identifier[flag] , identifier[bool_id] ) identifier[bool_id] = identifier[add_bool] ( literal[string] , identifier[status_whiteboard] , identifier[bool_id] , identifier[status_whiteboard_type] ) identifier[bool_id] = identifier[add_bool] ( literal[string] , identifier[qa_whiteboard] , identifier[bool_id] ) identifier[bool_id] = identifier[add_bool] ( literal[string] , identifier[devel_whiteboard] , identifier[bool_id] ) keyword[def] identifier[add_email] ( identifier[key] , identifier[value] , identifier[count] ): keyword[if] identifier[value] keyword[is] keyword[None] : keyword[return] identifier[count] keyword[if] keyword[not] identifier[emailtype] : identifier[query] [ identifier[key] ]= identifier[value] keyword[return] identifier[count] identifier[query] [ literal[string] ]= literal[string] identifier[query] [ literal[string] % identifier[count] ]= identifier[value] identifier[query] [ literal[string] %( identifier[key] , identifier[count] )]= keyword[True] identifier[query] [ literal[string] % identifier[count] ]= identifier[emailtype] keyword[return] identifier[count] + literal[int] identifier[email_count] = literal[int] identifier[email_count] = identifier[add_email] ( literal[string] , identifier[cc] , identifier[email_count] ) identifier[email_count] = identifier[add_email] ( literal[string] , identifier[assigned_to] , identifier[email_count] ) identifier[email_count] = identifier[add_email] ( literal[string] , identifier[reporter] , identifier[email_count] ) identifier[email_count] = identifier[add_email] ( literal[string] , identifier[qa_contact] , identifier[email_count] ) keyword[if] identifier[long_desc] keyword[is] keyword[not] keyword[None] : identifier[query] [ literal[string] ]= literal[string] identifier[query] [ literal[string] ]= identifier[long_desc] identifier[query] [ literal[string] ]= literal[string] identifier[query] . identifier[update] ( identifier[self] . identifier[_process_include_fields] ( identifier[include_fields] , identifier[exclude_fields] , identifier[extra_fields] )) keyword[for] identifier[k] , identifier[v] keyword[in] identifier[query] . identifier[copy] (). identifier[items] (): keyword[if] identifier[v] keyword[is] keyword[None] : keyword[del] ( identifier[query] [ identifier[k] ]) identifier[self] . identifier[pre_translation] ( identifier[query] ) keyword[return] identifier[query]
def build_query(self, product=None, component=None, version=None, long_desc=None, bug_id=None, short_desc=None, cc=None, assigned_to=None, reporter=None, qa_contact=None, status=None, blocked=None, dependson=None, keywords=None, keywords_type=None, url=None, url_type=None, status_whiteboard=None, status_whiteboard_type=None, fixed_in=None, fixed_in_type=None, flag=None, alias=None, qa_whiteboard=None, devel_whiteboard=None, boolean_query=None, bug_severity=None, priority=None, target_release=None, target_milestone=None, emailtype=None, booleantype=None, include_fields=None, quicksearch=None, savedsearch=None, savedsearch_sharer_id=None, sub_component=None, tags=None, exclude_fields=None, extra_fields=None): """ Build a query string from passed arguments. Will handle query parameter differences between various bugzilla versions. Most of the parameters should be self-explanatory. However, if you want to perform a complex query, and easy way is to create it with the bugzilla web UI, copy the entire URL it generates, and pass it to the static method Bugzilla.url_to_query Then pass the output to Bugzilla.query() For details about the specific argument formats, see the bugzilla docs: https://bugzilla.readthedocs.io/en/latest/api/core/v1/bug.html#search-bugs """ if boolean_query or booleantype: raise RuntimeError('boolean_query format is no longer supported. If you need complicated URL queries, look into query --from-url/url_to_query().') # depends on [control=['if'], data=[]] # RH extensions... don't add any more. See comment below query = {'alias': alias, 'product': self._listify(product), 'component': self._listify(component), 'version': version, 'id': bug_id, 'short_desc': short_desc, 'bug_status': status, 'bug_severity': bug_severity, 'priority': priority, 'target_release': target_release, 'target_milestone': target_milestone, 'tag': self._listify(tags), 'quicksearch': quicksearch, 'savedsearch': savedsearch, 'sharer_id': savedsearch_sharer_id, 'sub_components': self._listify(sub_component)} def add_bool(bzkey, value, bool_id, booltype=None): value = self._listify(value) if value is None: return bool_id # depends on [control=['if'], data=[]] query['query_format'] = 'advanced' for boolval in value: def make_bool_str(prefix): # pylint: disable=cell-var-from-loop return '%s%i-0-0' % (prefix, bool_id) query[make_bool_str('field')] = bzkey query[make_bool_str('value')] = boolval query[make_bool_str('type')] = booltype or 'substring' bool_id += 1 # depends on [control=['for'], data=['boolval']] return bool_id # RH extensions that we have to maintain here for back compat, # but all future custom fields should be specified via # cli --field option, or via extending the query dict() manually. # No more supporting custom fields in this API bool_id = 0 bool_id = add_bool('keywords', keywords, bool_id, keywords_type) bool_id = add_bool('blocked', blocked, bool_id) bool_id = add_bool('dependson', dependson, bool_id) bool_id = add_bool('bug_file_loc', url, bool_id, url_type) bool_id = add_bool('cf_fixed_in', fixed_in, bool_id, fixed_in_type) bool_id = add_bool('flagtypes.name', flag, bool_id) bool_id = add_bool('status_whiteboard', status_whiteboard, bool_id, status_whiteboard_type) bool_id = add_bool('cf_qa_whiteboard', qa_whiteboard, bool_id) bool_id = add_bool('cf_devel_whiteboard', devel_whiteboard, bool_id) def add_email(key, value, count): if value is None: return count # depends on [control=['if'], data=[]] if not emailtype: query[key] = value return count # depends on [control=['if'], data=[]] query['query_format'] = 'advanced' query['email%i' % count] = value query['email%s%i' % (key, count)] = True query['emailtype%i' % count] = emailtype return count + 1 email_count = 1 email_count = add_email('cc', cc, email_count) email_count = add_email('assigned_to', assigned_to, email_count) email_count = add_email('reporter', reporter, email_count) email_count = add_email('qa_contact', qa_contact, email_count) if long_desc is not None: query['query_format'] = 'advanced' query['longdesc'] = long_desc query['longdesc_type'] = 'allwordssubstr' # depends on [control=['if'], data=['long_desc']] # 'include_fields' only available for Bugzilla4+ # 'extra_fields' is an RHBZ extension query.update(self._process_include_fields(include_fields, exclude_fields, extra_fields)) # Strip out None elements in the dict for (k, v) in query.copy().items(): if v is None: del query[k] # depends on [control=['if'], data=[]] # depends on [control=['for'], data=[]] self.pre_translation(query) return query
def update( # noqa: C901 self, alert_condition_nrql_id, policy_id, name=None, threshold_type=None, query=None, since_value=None, terms=None, expected_groups=None, value_function=None, runbook_url=None, ignore_overlap=None, enabled=True): """ Updates any of the optional parameters of the alert condition nrql :type alert_condition_nrql_id: int :param alert_condition_nrql_id: Alerts condition NRQL id to update :type policy_id: int :param policy_id: Alert policy id where target alert condition belongs to :type condition_scope: str :param condition_scope: The scope of the condition, can be instance or application :type name: str :param name: The name of the alert :type threshold_type: str :param threshold_type: The tthreshold_typeype of the condition, can be static or outlier :type query: str :param query: nrql query for the alerts :type since_value: str :param since_value: since value for the alert :type terms: list[hash] :param terms: list of hashes containing threshold config for the alert :type expected_groups: int :param expected_groups: expected groups setting for outlier alerts :type value_function: str :param type: value function for static alerts :type runbook_url: str :param runbook_url: The url of the runbook :type ignore_overlap: bool :param ignore_overlap: Whether to ignore overlaps for outlier alerts :type enabled: bool :param enabled: Whether to enable that alert condition :rtype: dict :return: The JSON response of the API :raises: This will raise a :class:`NewRelicAPIServerException<newrelic_api.exceptions.NoEntityException>` if target alert condition is not included in target policy :raises: This will raise a :class:`ConfigurationException<newrelic_api.exceptions.ConfigurationException>` if metric is set as user_defined but user_defined config is not passed :: { "nrql_condition": { "name": "string", "runbook_url": "string", "enabled": "boolean", "expected_groups": "integer", "ignore_overlap": "boolean", "value_function": "string", "terms": [ { "duration": "string", "operator": "string", "priority": "string", "threshold": "string", "time_function": "string" } ], "nrql": { "query": "string", "since_value": "string" } } } """ conditions_nrql_dict = self.list(policy_id) target_condition_nrql = None for condition in conditions_nrql_dict['nrql_conditions']: if int(condition['id']) == alert_condition_nrql_id: target_condition_nrql = condition break if target_condition_nrql is None: raise NoEntityException( 'Target alert condition nrql is not included in that policy.' 'policy_id: {}, alert_condition_nrql_id {}'.format( policy_id, alert_condition_nrql_id ) ) data = { 'nrql_condition': { 'type': threshold_type or target_condition_nrql['type'], 'enabled': target_condition_nrql['enabled'], 'name': name or target_condition_nrql['name'], 'terms': terms or target_condition_nrql['terms'], 'nrql': { 'query': query or target_condition_nrql['nrql']['query'], 'since_value': since_value or target_condition_nrql['nrql']['since_value'], } } } if enabled is not None: data['nrql_condition']['enabled'] = str(enabled).lower() if runbook_url is not None: data['nrql_condition']['runbook_url'] = runbook_url elif 'runbook_url' in target_condition_nrql: data['nrql_condition']['runbook_url'] = target_condition_nrql['runbook_url'] if expected_groups is not None: data['nrql_condition']['expected_groups'] = expected_groups elif 'expected_groups' in target_condition_nrql: data['nrql_condition']['expected_groups'] = target_condition_nrql['expected_groups'] if ignore_overlap is not None: data['nrql_condition']['ignore_overlap'] = ignore_overlap elif 'ignore_overlap' in target_condition_nrql: data['nrql_condition']['ignore_overlap'] = target_condition_nrql['ignore_overlap'] if value_function is not None: data['nrql_condition']['value_function'] = value_function elif 'value_function' in target_condition_nrql: data['nrql_condition']['value_function'] = target_condition_nrql['value_function'] if data['nrql_condition']['type'] == 'static': if 'value_function' not in data['nrql_condition']: raise ConfigurationException( 'Alert is set as static but no value_function config specified' ) data['nrql_condition'].pop('expected_groups', None) data['nrql_condition'].pop('ignore_overlap', None) elif data['nrql_condition']['type'] == 'outlier': if 'expected_groups' not in data['nrql_condition']: raise ConfigurationException( 'Alert is set as outlier but expected_groups config is not specified' ) if 'ignore_overlap' not in data['nrql_condition']: raise ConfigurationException( 'Alert is set as outlier but ignore_overlap config is not specified' ) data['nrql_condition'].pop('value_function', None) return self._put( url='{0}alerts_nrql_conditions/{1}.json'.format(self.URL, alert_condition_nrql_id), headers=self.headers, data=data )
def function[update, parameter[self, alert_condition_nrql_id, policy_id, name, threshold_type, query, since_value, terms, expected_groups, value_function, runbook_url, ignore_overlap, enabled]]: constant[ Updates any of the optional parameters of the alert condition nrql :type alert_condition_nrql_id: int :param alert_condition_nrql_id: Alerts condition NRQL id to update :type policy_id: int :param policy_id: Alert policy id where target alert condition belongs to :type condition_scope: str :param condition_scope: The scope of the condition, can be instance or application :type name: str :param name: The name of the alert :type threshold_type: str :param threshold_type: The tthreshold_typeype of the condition, can be static or outlier :type query: str :param query: nrql query for the alerts :type since_value: str :param since_value: since value for the alert :type terms: list[hash] :param terms: list of hashes containing threshold config for the alert :type expected_groups: int :param expected_groups: expected groups setting for outlier alerts :type value_function: str :param type: value function for static alerts :type runbook_url: str :param runbook_url: The url of the runbook :type ignore_overlap: bool :param ignore_overlap: Whether to ignore overlaps for outlier alerts :type enabled: bool :param enabled: Whether to enable that alert condition :rtype: dict :return: The JSON response of the API :raises: This will raise a :class:`NewRelicAPIServerException<newrelic_api.exceptions.NoEntityException>` if target alert condition is not included in target policy :raises: This will raise a :class:`ConfigurationException<newrelic_api.exceptions.ConfigurationException>` if metric is set as user_defined but user_defined config is not passed :: { "nrql_condition": { "name": "string", "runbook_url": "string", "enabled": "boolean", "expected_groups": "integer", "ignore_overlap": "boolean", "value_function": "string", "terms": [ { "duration": "string", "operator": "string", "priority": "string", "threshold": "string", "time_function": "string" } ], "nrql": { "query": "string", "since_value": "string" } } } ] variable[conditions_nrql_dict] assign[=] call[name[self].list, parameter[name[policy_id]]] variable[target_condition_nrql] assign[=] constant[None] for taget[name[condition]] in starred[call[name[conditions_nrql_dict]][constant[nrql_conditions]]] begin[:] if compare[call[name[int], parameter[call[name[condition]][constant[id]]]] equal[==] name[alert_condition_nrql_id]] begin[:] variable[target_condition_nrql] assign[=] name[condition] break if compare[name[target_condition_nrql] is constant[None]] begin[:] <ast.Raise object at 0x7da1b0f3ac20> variable[data] assign[=] dictionary[[<ast.Constant object at 0x7da1b0f3bc70>], [<ast.Dict object at 0x7da1b0f39f90>]] if compare[name[enabled] is_not constant[None]] begin[:] call[call[name[data]][constant[nrql_condition]]][constant[enabled]] assign[=] call[call[name[str], parameter[name[enabled]]].lower, parameter[]] if compare[name[runbook_url] is_not constant[None]] begin[:] call[call[name[data]][constant[nrql_condition]]][constant[runbook_url]] assign[=] name[runbook_url] if compare[name[expected_groups] is_not constant[None]] begin[:] call[call[name[data]][constant[nrql_condition]]][constant[expected_groups]] assign[=] name[expected_groups] if compare[name[ignore_overlap] is_not constant[None]] begin[:] call[call[name[data]][constant[nrql_condition]]][constant[ignore_overlap]] assign[=] name[ignore_overlap] if compare[name[value_function] is_not constant[None]] begin[:] call[call[name[data]][constant[nrql_condition]]][constant[value_function]] assign[=] name[value_function] if compare[call[call[name[data]][constant[nrql_condition]]][constant[type]] equal[==] constant[static]] begin[:] if compare[constant[value_function] <ast.NotIn object at 0x7da2590d7190> call[name[data]][constant[nrql_condition]]] begin[:] <ast.Raise object at 0x7da1b0f2bf70> call[call[name[data]][constant[nrql_condition]].pop, parameter[constant[expected_groups], constant[None]]] call[call[name[data]][constant[nrql_condition]].pop, parameter[constant[ignore_overlap], constant[None]]] return[call[name[self]._put, parameter[]]]
keyword[def] identifier[update] ( identifier[self] , identifier[alert_condition_nrql_id] , identifier[policy_id] , identifier[name] = keyword[None] , identifier[threshold_type] = keyword[None] , identifier[query] = keyword[None] , identifier[since_value] = keyword[None] , identifier[terms] = keyword[None] , identifier[expected_groups] = keyword[None] , identifier[value_function] = keyword[None] , identifier[runbook_url] = keyword[None] , identifier[ignore_overlap] = keyword[None] , identifier[enabled] = keyword[True] ): literal[string] identifier[conditions_nrql_dict] = identifier[self] . identifier[list] ( identifier[policy_id] ) identifier[target_condition_nrql] = keyword[None] keyword[for] identifier[condition] keyword[in] identifier[conditions_nrql_dict] [ literal[string] ]: keyword[if] identifier[int] ( identifier[condition] [ literal[string] ])== identifier[alert_condition_nrql_id] : identifier[target_condition_nrql] = identifier[condition] keyword[break] keyword[if] identifier[target_condition_nrql] keyword[is] keyword[None] : keyword[raise] identifier[NoEntityException] ( literal[string] literal[string] . identifier[format] ( identifier[policy_id] , identifier[alert_condition_nrql_id] ) ) identifier[data] ={ literal[string] :{ literal[string] : identifier[threshold_type] keyword[or] identifier[target_condition_nrql] [ literal[string] ], literal[string] : identifier[target_condition_nrql] [ literal[string] ], literal[string] : identifier[name] keyword[or] identifier[target_condition_nrql] [ literal[string] ], literal[string] : identifier[terms] keyword[or] identifier[target_condition_nrql] [ literal[string] ], literal[string] :{ literal[string] : identifier[query] keyword[or] identifier[target_condition_nrql] [ literal[string] ][ literal[string] ], literal[string] : identifier[since_value] keyword[or] identifier[target_condition_nrql] [ literal[string] ][ literal[string] ], } } } keyword[if] identifier[enabled] keyword[is] keyword[not] keyword[None] : identifier[data] [ literal[string] ][ literal[string] ]= identifier[str] ( identifier[enabled] ). identifier[lower] () keyword[if] identifier[runbook_url] keyword[is] keyword[not] keyword[None] : identifier[data] [ literal[string] ][ literal[string] ]= identifier[runbook_url] keyword[elif] literal[string] keyword[in] identifier[target_condition_nrql] : identifier[data] [ literal[string] ][ literal[string] ]= identifier[target_condition_nrql] [ literal[string] ] keyword[if] identifier[expected_groups] keyword[is] keyword[not] keyword[None] : identifier[data] [ literal[string] ][ literal[string] ]= identifier[expected_groups] keyword[elif] literal[string] keyword[in] identifier[target_condition_nrql] : identifier[data] [ literal[string] ][ literal[string] ]= identifier[target_condition_nrql] [ literal[string] ] keyword[if] identifier[ignore_overlap] keyword[is] keyword[not] keyword[None] : identifier[data] [ literal[string] ][ literal[string] ]= identifier[ignore_overlap] keyword[elif] literal[string] keyword[in] identifier[target_condition_nrql] : identifier[data] [ literal[string] ][ literal[string] ]= identifier[target_condition_nrql] [ literal[string] ] keyword[if] identifier[value_function] keyword[is] keyword[not] keyword[None] : identifier[data] [ literal[string] ][ literal[string] ]= identifier[value_function] keyword[elif] literal[string] keyword[in] identifier[target_condition_nrql] : identifier[data] [ literal[string] ][ literal[string] ]= identifier[target_condition_nrql] [ literal[string] ] keyword[if] identifier[data] [ literal[string] ][ literal[string] ]== literal[string] : keyword[if] literal[string] keyword[not] keyword[in] identifier[data] [ literal[string] ]: keyword[raise] identifier[ConfigurationException] ( literal[string] ) identifier[data] [ literal[string] ]. identifier[pop] ( literal[string] , keyword[None] ) identifier[data] [ literal[string] ]. identifier[pop] ( literal[string] , keyword[None] ) keyword[elif] identifier[data] [ literal[string] ][ literal[string] ]== literal[string] : keyword[if] literal[string] keyword[not] keyword[in] identifier[data] [ literal[string] ]: keyword[raise] identifier[ConfigurationException] ( literal[string] ) keyword[if] literal[string] keyword[not] keyword[in] identifier[data] [ literal[string] ]: keyword[raise] identifier[ConfigurationException] ( literal[string] ) identifier[data] [ literal[string] ]. identifier[pop] ( literal[string] , keyword[None] ) keyword[return] identifier[self] . identifier[_put] ( identifier[url] = literal[string] . identifier[format] ( identifier[self] . identifier[URL] , identifier[alert_condition_nrql_id] ), identifier[headers] = identifier[self] . identifier[headers] , identifier[data] = identifier[data] )
def update(self, alert_condition_nrql_id, policy_id, name=None, threshold_type=None, query=None, since_value=None, terms=None, expected_groups=None, value_function=None, runbook_url=None, ignore_overlap=None, enabled=True): # noqa: C901 '\n Updates any of the optional parameters of the alert condition nrql\n\n :type alert_condition_nrql_id: int\n :param alert_condition_nrql_id: Alerts condition NRQL id to update\n\n :type policy_id: int\n :param policy_id: Alert policy id where target alert condition belongs to\n\n :type condition_scope: str\n :param condition_scope: The scope of the condition, can be instance or application\n\n :type name: str\n :param name: The name of the alert\n\n :type threshold_type: str\n :param threshold_type: The tthreshold_typeype of the condition, can be static or outlier\n\n :type query: str\n :param query: nrql query for the alerts\n\n :type since_value: str\n :param since_value: since value for the alert\n\n :type terms: list[hash]\n :param terms: list of hashes containing threshold config for the alert\n\n :type expected_groups: int\n :param expected_groups: expected groups setting for outlier alerts\n\n :type value_function: str\n :param type: value function for static alerts\n\n :type runbook_url: str\n :param runbook_url: The url of the runbook\n\n :type ignore_overlap: bool\n :param ignore_overlap: Whether to ignore overlaps for outlier alerts\n\n :type enabled: bool\n :param enabled: Whether to enable that alert condition\n\n :rtype: dict\n :return: The JSON response of the API\n\n :raises: This will raise a\n :class:`NewRelicAPIServerException<newrelic_api.exceptions.NoEntityException>`\n if target alert condition is not included in target policy\n\n :raises: This will raise a\n :class:`ConfigurationException<newrelic_api.exceptions.ConfigurationException>`\n if metric is set as user_defined but user_defined config is not passed\n ::\n {\n "nrql_condition": {\n "name": "string",\n "runbook_url": "string",\n "enabled": "boolean",\n "expected_groups": "integer",\n "ignore_overlap": "boolean",\n "value_function": "string",\n "terms": [\n {\n "duration": "string",\n "operator": "string",\n "priority": "string",\n "threshold": "string",\n "time_function": "string"\n }\n ],\n "nrql": {\n "query": "string",\n "since_value": "string"\n }\n }\n }\n ' conditions_nrql_dict = self.list(policy_id) target_condition_nrql = None for condition in conditions_nrql_dict['nrql_conditions']: if int(condition['id']) == alert_condition_nrql_id: target_condition_nrql = condition break # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['condition']] if target_condition_nrql is None: raise NoEntityException('Target alert condition nrql is not included in that policy.policy_id: {}, alert_condition_nrql_id {}'.format(policy_id, alert_condition_nrql_id)) # depends on [control=['if'], data=[]] data = {'nrql_condition': {'type': threshold_type or target_condition_nrql['type'], 'enabled': target_condition_nrql['enabled'], 'name': name or target_condition_nrql['name'], 'terms': terms or target_condition_nrql['terms'], 'nrql': {'query': query or target_condition_nrql['nrql']['query'], 'since_value': since_value or target_condition_nrql['nrql']['since_value']}}} if enabled is not None: data['nrql_condition']['enabled'] = str(enabled).lower() # depends on [control=['if'], data=['enabled']] if runbook_url is not None: data['nrql_condition']['runbook_url'] = runbook_url # depends on [control=['if'], data=['runbook_url']] elif 'runbook_url' in target_condition_nrql: data['nrql_condition']['runbook_url'] = target_condition_nrql['runbook_url'] # depends on [control=['if'], data=['target_condition_nrql']] if expected_groups is not None: data['nrql_condition']['expected_groups'] = expected_groups # depends on [control=['if'], data=['expected_groups']] elif 'expected_groups' in target_condition_nrql: data['nrql_condition']['expected_groups'] = target_condition_nrql['expected_groups'] # depends on [control=['if'], data=['target_condition_nrql']] if ignore_overlap is not None: data['nrql_condition']['ignore_overlap'] = ignore_overlap # depends on [control=['if'], data=['ignore_overlap']] elif 'ignore_overlap' in target_condition_nrql: data['nrql_condition']['ignore_overlap'] = target_condition_nrql['ignore_overlap'] # depends on [control=['if'], data=['target_condition_nrql']] if value_function is not None: data['nrql_condition']['value_function'] = value_function # depends on [control=['if'], data=['value_function']] elif 'value_function' in target_condition_nrql: data['nrql_condition']['value_function'] = target_condition_nrql['value_function'] # depends on [control=['if'], data=['target_condition_nrql']] if data['nrql_condition']['type'] == 'static': if 'value_function' not in data['nrql_condition']: raise ConfigurationException('Alert is set as static but no value_function config specified') # depends on [control=['if'], data=[]] data['nrql_condition'].pop('expected_groups', None) data['nrql_condition'].pop('ignore_overlap', None) # depends on [control=['if'], data=[]] elif data['nrql_condition']['type'] == 'outlier': if 'expected_groups' not in data['nrql_condition']: raise ConfigurationException('Alert is set as outlier but expected_groups config is not specified') # depends on [control=['if'], data=[]] if 'ignore_overlap' not in data['nrql_condition']: raise ConfigurationException('Alert is set as outlier but ignore_overlap config is not specified') # depends on [control=['if'], data=[]] data['nrql_condition'].pop('value_function', None) # depends on [control=['if'], data=[]] return self._put(url='{0}alerts_nrql_conditions/{1}.json'.format(self.URL, alert_condition_nrql_id), headers=self.headers, data=data)
def get(self, index1, index2=None): """Return the text from INDEX1 to INDEX2 (not included).""" returnvar = self.tk.call(self._w, 'get', index1, index2) return returnvar.strip()
def function[get, parameter[self, index1, index2]]: constant[Return the text from INDEX1 to INDEX2 (not included).] variable[returnvar] assign[=] call[name[self].tk.call, parameter[name[self]._w, constant[get], name[index1], name[index2]]] return[call[name[returnvar].strip, parameter[]]]
keyword[def] identifier[get] ( identifier[self] , identifier[index1] , identifier[index2] = keyword[None] ): literal[string] identifier[returnvar] = identifier[self] . identifier[tk] . identifier[call] ( identifier[self] . identifier[_w] , literal[string] , identifier[index1] , identifier[index2] ) keyword[return] identifier[returnvar] . identifier[strip] ()
def get(self, index1, index2=None): """Return the text from INDEX1 to INDEX2 (not included).""" returnvar = self.tk.call(self._w, 'get', index1, index2) return returnvar.strip()
def create(self, force=False, exists_ok=False): """ Creates a db file with the core schema. :param force: If `True` an existing db file will be overwritten. """ if self.fname and self.fname.exists(): if force: self.drop() elif exists_ok: return else: raise ValueError('db file already exists, use force=True to overwrite') with self.connection() as db: db.execute( """\ CREATE TABLE dataset ( ID TEXT PRIMARY KEY NOT NULL, name TEXT, version TEXT, metadata_json TEXT )""") db.execute("""\ CREATE TABLE datasetmeta ( dataset_ID TEXT , key TEXT, value TEXT, PRIMARY KEY (dataset_ID, key), FOREIGN KEY(dataset_ID) REFERENCES dataset(ID) )""") db.execute("""\ CREATE TABLE SourceTable ( dataset_ID TEXT , ID TEXT , bibtex_type TEXT, {0} extra TEXT, PRIMARY KEY (dataset_ID, ID), FOREIGN KEY(dataset_ID) REFERENCES dataset(ID) )""".format('\n '.join('`{0}` TEXT,'.format(f) for f in BIBTEX_FIELDS)))
def function[create, parameter[self, force, exists_ok]]: constant[ Creates a db file with the core schema. :param force: If `True` an existing db file will be overwritten. ] if <ast.BoolOp object at 0x7da2041d8f40> begin[:] if name[force] begin[:] call[name[self].drop, parameter[]] with call[name[self].connection, parameter[]] begin[:] call[name[db].execute, parameter[constant[CREATE TABLE dataset ( ID TEXT PRIMARY KEY NOT NULL, name TEXT, version TEXT, metadata_json TEXT )]]] call[name[db].execute, parameter[constant[CREATE TABLE datasetmeta ( dataset_ID TEXT , key TEXT, value TEXT, PRIMARY KEY (dataset_ID, key), FOREIGN KEY(dataset_ID) REFERENCES dataset(ID) )]]] call[name[db].execute, parameter[call[constant[CREATE TABLE SourceTable ( dataset_ID TEXT , ID TEXT , bibtex_type TEXT, {0} extra TEXT, PRIMARY KEY (dataset_ID, ID), FOREIGN KEY(dataset_ID) REFERENCES dataset(ID) )].format, parameter[call[constant[ ].join, parameter[<ast.GeneratorExp object at 0x7da2041d9030>]]]]]]
keyword[def] identifier[create] ( identifier[self] , identifier[force] = keyword[False] , identifier[exists_ok] = keyword[False] ): literal[string] keyword[if] identifier[self] . identifier[fname] keyword[and] identifier[self] . identifier[fname] . identifier[exists] (): keyword[if] identifier[force] : identifier[self] . identifier[drop] () keyword[elif] identifier[exists_ok] : keyword[return] keyword[else] : keyword[raise] identifier[ValueError] ( literal[string] ) keyword[with] identifier[self] . identifier[connection] () keyword[as] identifier[db] : identifier[db] . identifier[execute] ( literal[string] ) identifier[db] . identifier[execute] ( literal[string] ) identifier[db] . identifier[execute] ( literal[string] . identifier[format] ( literal[string] . identifier[join] ( literal[string] . identifier[format] ( identifier[f] ) keyword[for] identifier[f] keyword[in] identifier[BIBTEX_FIELDS] )))
def create(self, force=False, exists_ok=False): """ Creates a db file with the core schema. :param force: If `True` an existing db file will be overwritten. """ if self.fname and self.fname.exists(): if force: self.drop() # depends on [control=['if'], data=[]] elif exists_ok: return # depends on [control=['if'], data=[]] else: raise ValueError('db file already exists, use force=True to overwrite') # depends on [control=['if'], data=[]] with self.connection() as db: db.execute('CREATE TABLE dataset (\n ID TEXT PRIMARY KEY NOT NULL,\n name TEXT,\n version TEXT,\n metadata_json TEXT\n)') db.execute('CREATE TABLE datasetmeta (\n dataset_ID TEXT ,\n key TEXT,\n value TEXT,\n PRIMARY KEY (dataset_ID, key),\n FOREIGN KEY(dataset_ID) REFERENCES dataset(ID)\n)') db.execute('CREATE TABLE SourceTable (\n dataset_ID TEXT ,\n ID TEXT ,\n bibtex_type TEXT,\n {0}\n extra TEXT,\n PRIMARY KEY (dataset_ID, ID),\n FOREIGN KEY(dataset_ID) REFERENCES dataset(ID)\n)'.format('\n '.join(('`{0}` TEXT,'.format(f) for f in BIBTEX_FIELDS)))) # depends on [control=['with'], data=['db']]
def paginate_data(searched_data, request_data): """ Paginates the searched_data as per the request_data Source: Himanshu Shankar (https://github.com/iamhssingh) Parameters ---------- searched_data: Serializer.data It is the data received from queryset. It uses show_serializer request_data: Serializer.data It is the request data. It uses serializer_class. Returns ------- data: dict """ from django.core.paginator import Paginator, EmptyPage, PageNotAnInteger if int(request_data.data['paginator']) > 0: paginator = Paginator(searched_data.data, request_data.data['paginator']) try: curr = paginator.page(request_data.data['page']) except PageNotAnInteger: curr = paginator.page(1) except EmptyPage: curr = paginator.page(paginator.num_pages) data = {'total_pages': paginator.num_pages, 'current': curr.number, 'total_objects': len(searched_data.data)} if curr.has_next(): data['next'] = curr.next_page_number() else: data['next'] = -1 if curr.number > 1: data['previous'] = curr.previous_page_number() else: data['previous'] = -1 data['objects'] = curr.object_list else: data = {'objects': searched_data.data, 'previous': -1, 'next': -1, 'total_pages': 1, 'current': 1, 'total_objects': len(searched_data.data)} return data
def function[paginate_data, parameter[searched_data, request_data]]: constant[ Paginates the searched_data as per the request_data Source: Himanshu Shankar (https://github.com/iamhssingh) Parameters ---------- searched_data: Serializer.data It is the data received from queryset. It uses show_serializer request_data: Serializer.data It is the request data. It uses serializer_class. Returns ------- data: dict ] from relative_module[django.core.paginator] import module[Paginator], module[EmptyPage], module[PageNotAnInteger] if compare[call[name[int], parameter[call[name[request_data].data][constant[paginator]]]] greater[>] constant[0]] begin[:] variable[paginator] assign[=] call[name[Paginator], parameter[name[searched_data].data, call[name[request_data].data][constant[paginator]]]] <ast.Try object at 0x7da204961d50> variable[data] assign[=] dictionary[[<ast.Constant object at 0x7da204963d00>, <ast.Constant object at 0x7da2049632e0>, <ast.Constant object at 0x7da204963fd0>], [<ast.Attribute object at 0x7da2049625c0>, <ast.Attribute object at 0x7da204960040>, <ast.Call object at 0x7da204960df0>]] if call[name[curr].has_next, parameter[]] begin[:] call[name[data]][constant[next]] assign[=] call[name[curr].next_page_number, parameter[]] if compare[name[curr].number greater[>] constant[1]] begin[:] call[name[data]][constant[previous]] assign[=] call[name[curr].previous_page_number, parameter[]] call[name[data]][constant[objects]] assign[=] name[curr].object_list return[name[data]]
keyword[def] identifier[paginate_data] ( identifier[searched_data] , identifier[request_data] ): literal[string] keyword[from] identifier[django] . identifier[core] . identifier[paginator] keyword[import] identifier[Paginator] , identifier[EmptyPage] , identifier[PageNotAnInteger] keyword[if] identifier[int] ( identifier[request_data] . identifier[data] [ literal[string] ])> literal[int] : identifier[paginator] = identifier[Paginator] ( identifier[searched_data] . identifier[data] , identifier[request_data] . identifier[data] [ literal[string] ]) keyword[try] : identifier[curr] = identifier[paginator] . identifier[page] ( identifier[request_data] . identifier[data] [ literal[string] ]) keyword[except] identifier[PageNotAnInteger] : identifier[curr] = identifier[paginator] . identifier[page] ( literal[int] ) keyword[except] identifier[EmptyPage] : identifier[curr] = identifier[paginator] . identifier[page] ( identifier[paginator] . identifier[num_pages] ) identifier[data] ={ literal[string] : identifier[paginator] . identifier[num_pages] , literal[string] : identifier[curr] . identifier[number] , literal[string] : identifier[len] ( identifier[searched_data] . identifier[data] )} keyword[if] identifier[curr] . identifier[has_next] (): identifier[data] [ literal[string] ]= identifier[curr] . identifier[next_page_number] () keyword[else] : identifier[data] [ literal[string] ]=- literal[int] keyword[if] identifier[curr] . identifier[number] > literal[int] : identifier[data] [ literal[string] ]= identifier[curr] . identifier[previous_page_number] () keyword[else] : identifier[data] [ literal[string] ]=- literal[int] identifier[data] [ literal[string] ]= identifier[curr] . identifier[object_list] keyword[else] : identifier[data] ={ literal[string] : identifier[searched_data] . identifier[data] , literal[string] :- literal[int] , literal[string] :- literal[int] , literal[string] : literal[int] , literal[string] : literal[int] , literal[string] : identifier[len] ( identifier[searched_data] . identifier[data] )} keyword[return] identifier[data]
def paginate_data(searched_data, request_data): """ Paginates the searched_data as per the request_data Source: Himanshu Shankar (https://github.com/iamhssingh) Parameters ---------- searched_data: Serializer.data It is the data received from queryset. It uses show_serializer request_data: Serializer.data It is the request data. It uses serializer_class. Returns ------- data: dict """ from django.core.paginator import Paginator, EmptyPage, PageNotAnInteger if int(request_data.data['paginator']) > 0: paginator = Paginator(searched_data.data, request_data.data['paginator']) try: curr = paginator.page(request_data.data['page']) # depends on [control=['try'], data=[]] except PageNotAnInteger: curr = paginator.page(1) # depends on [control=['except'], data=[]] except EmptyPage: curr = paginator.page(paginator.num_pages) # depends on [control=['except'], data=[]] data = {'total_pages': paginator.num_pages, 'current': curr.number, 'total_objects': len(searched_data.data)} if curr.has_next(): data['next'] = curr.next_page_number() # depends on [control=['if'], data=[]] else: data['next'] = -1 if curr.number > 1: data['previous'] = curr.previous_page_number() # depends on [control=['if'], data=[]] else: data['previous'] = -1 data['objects'] = curr.object_list # depends on [control=['if'], data=[]] else: data = {'objects': searched_data.data, 'previous': -1, 'next': -1, 'total_pages': 1, 'current': 1, 'total_objects': len(searched_data.data)} return data
def cmd_async(self, low): ''' Execute a function asynchronously; eauth is respected This function requires that :conf_master:`external_auth` is configured and the user is authorized .. code-block:: python >>> wheel.cmd_async({ 'fun': 'key.finger', 'match': 'jerry', 'eauth': 'auto', 'username': 'saltdev', 'password': 'saltdev', }) {'jid': '20131219224744416681', 'tag': 'salt/wheel/20131219224744416681'} ''' fun = low.pop('fun') return self.asynchronous(fun, low)
def function[cmd_async, parameter[self, low]]: constant[ Execute a function asynchronously; eauth is respected This function requires that :conf_master:`external_auth` is configured and the user is authorized .. code-block:: python >>> wheel.cmd_async({ 'fun': 'key.finger', 'match': 'jerry', 'eauth': 'auto', 'username': 'saltdev', 'password': 'saltdev', }) {'jid': '20131219224744416681', 'tag': 'salt/wheel/20131219224744416681'} ] variable[fun] assign[=] call[name[low].pop, parameter[constant[fun]]] return[call[name[self].asynchronous, parameter[name[fun], name[low]]]]
keyword[def] identifier[cmd_async] ( identifier[self] , identifier[low] ): literal[string] identifier[fun] = identifier[low] . identifier[pop] ( literal[string] ) keyword[return] identifier[self] . identifier[asynchronous] ( identifier[fun] , identifier[low] )
def cmd_async(self, low): """ Execute a function asynchronously; eauth is respected This function requires that :conf_master:`external_auth` is configured and the user is authorized .. code-block:: python >>> wheel.cmd_async({ 'fun': 'key.finger', 'match': 'jerry', 'eauth': 'auto', 'username': 'saltdev', 'password': 'saltdev', }) {'jid': '20131219224744416681', 'tag': 'salt/wheel/20131219224744416681'} """ fun = low.pop('fun') return self.asynchronous(fun, low)
def short_help(i): """ Input: { } Output: { return - return code = 0, if successful > 0, if error (error) - error text if return > 0 help - help text } """ o=i.get('out','') r=version({}) if r['return']>0: return r h='CK version: '+r['version_str']+'\n' r=python_version({}) if r['return']>0: return r h+='\nPython version used by CK: '+r['version']+'\n' h+='\nAll internal CK commands: ck help\n' h+='\n'+cfg['help_web'].replace('\n','').strip().replace(' ','') if o=='con': out(h) return {'return':0, 'help':h}
def function[short_help, parameter[i]]: constant[ Input: { } Output: { return - return code = 0, if successful > 0, if error (error) - error text if return > 0 help - help text } ] variable[o] assign[=] call[name[i].get, parameter[constant[out], constant[]]] variable[r] assign[=] call[name[version], parameter[dictionary[[], []]]] if compare[call[name[r]][constant[return]] greater[>] constant[0]] begin[:] return[name[r]] variable[h] assign[=] binary_operation[binary_operation[constant[CK version: ] + call[name[r]][constant[version_str]]] + constant[ ]] variable[r] assign[=] call[name[python_version], parameter[dictionary[[], []]]] if compare[call[name[r]][constant[return]] greater[>] constant[0]] begin[:] return[name[r]] <ast.AugAssign object at 0x7da1b22f4e80> <ast.AugAssign object at 0x7da1b22f4700> <ast.AugAssign object at 0x7da1b22f6c80> if compare[name[o] equal[==] constant[con]] begin[:] call[name[out], parameter[name[h]]] return[dictionary[[<ast.Constant object at 0x7da1b22f4eb0>, <ast.Constant object at 0x7da1b22f64a0>], [<ast.Constant object at 0x7da1b22f4df0>, <ast.Name object at 0x7da1b22f6410>]]]
keyword[def] identifier[short_help] ( identifier[i] ): literal[string] identifier[o] = identifier[i] . identifier[get] ( literal[string] , literal[string] ) identifier[r] = identifier[version] ({}) keyword[if] identifier[r] [ literal[string] ]> literal[int] : keyword[return] identifier[r] identifier[h] = literal[string] + identifier[r] [ literal[string] ]+ literal[string] identifier[r] = identifier[python_version] ({}) keyword[if] identifier[r] [ literal[string] ]> literal[int] : keyword[return] identifier[r] identifier[h] += literal[string] + identifier[r] [ literal[string] ]+ literal[string] identifier[h] += literal[string] identifier[h] += literal[string] + identifier[cfg] [ literal[string] ]. identifier[replace] ( literal[string] , literal[string] ). identifier[strip] (). identifier[replace] ( literal[string] , literal[string] ) keyword[if] identifier[o] == literal[string] : identifier[out] ( identifier[h] ) keyword[return] { literal[string] : literal[int] , literal[string] : identifier[h] }
def short_help(i): """ Input: { } Output: { return - return code = 0, if successful > 0, if error (error) - error text if return > 0 help - help text } """ o = i.get('out', '') r = version({}) if r['return'] > 0: return r # depends on [control=['if'], data=[]] h = 'CK version: ' + r['version_str'] + '\n' r = python_version({}) if r['return'] > 0: return r # depends on [control=['if'], data=[]] h += '\nPython version used by CK: ' + r['version'] + '\n' h += '\nAll internal CK commands: ck help\n' h += '\n' + cfg['help_web'].replace('\n', '').strip().replace(' ', '') if o == 'con': out(h) # depends on [control=['if'], data=[]] return {'return': 0, 'help': h}
def show(self, wait = False): """Show the window.""" self.tk.deiconify() self._visible = True self._modal = wait if self._modal: self.tk.grab_set()
def function[show, parameter[self, wait]]: constant[Show the window.] call[name[self].tk.deiconify, parameter[]] name[self]._visible assign[=] constant[True] name[self]._modal assign[=] name[wait] if name[self]._modal begin[:] call[name[self].tk.grab_set, parameter[]]
keyword[def] identifier[show] ( identifier[self] , identifier[wait] = keyword[False] ): literal[string] identifier[self] . identifier[tk] . identifier[deiconify] () identifier[self] . identifier[_visible] = keyword[True] identifier[self] . identifier[_modal] = identifier[wait] keyword[if] identifier[self] . identifier[_modal] : identifier[self] . identifier[tk] . identifier[grab_set] ()
def show(self, wait=False): """Show the window.""" self.tk.deiconify() self._visible = True self._modal = wait if self._modal: self.tk.grab_set() # depends on [control=['if'], data=[]]
def get_epoch_names(self): '''This function returns a list of all the epoch names in your recording Returns ---------- epoch_names: list List of epoch names in the recording extractor ''' epoch_names = list(self._epochs.keys()) if not epoch_names: pass else: epoch_start_frames = [] for epoch_name in epoch_names: epoch_info = self.get_epoch_info(epoch_name) start_frame = epoch_info['start_frame'] epoch_start_frames.append(start_frame) epoch_names = [epoch_name for _, epoch_name in sorted(zip(epoch_start_frames, epoch_names))] return epoch_names
def function[get_epoch_names, parameter[self]]: constant[This function returns a list of all the epoch names in your recording Returns ---------- epoch_names: list List of epoch names in the recording extractor ] variable[epoch_names] assign[=] call[name[list], parameter[call[name[self]._epochs.keys, parameter[]]]] if <ast.UnaryOp object at 0x7da20e9b31f0> begin[:] pass return[name[epoch_names]]
keyword[def] identifier[get_epoch_names] ( identifier[self] ): literal[string] identifier[epoch_names] = identifier[list] ( identifier[self] . identifier[_epochs] . identifier[keys] ()) keyword[if] keyword[not] identifier[epoch_names] : keyword[pass] keyword[else] : identifier[epoch_start_frames] =[] keyword[for] identifier[epoch_name] keyword[in] identifier[epoch_names] : identifier[epoch_info] = identifier[self] . identifier[get_epoch_info] ( identifier[epoch_name] ) identifier[start_frame] = identifier[epoch_info] [ literal[string] ] identifier[epoch_start_frames] . identifier[append] ( identifier[start_frame] ) identifier[epoch_names] =[ identifier[epoch_name] keyword[for] identifier[_] , identifier[epoch_name] keyword[in] identifier[sorted] ( identifier[zip] ( identifier[epoch_start_frames] , identifier[epoch_names] ))] keyword[return] identifier[epoch_names]
def get_epoch_names(self): """This function returns a list of all the epoch names in your recording Returns ---------- epoch_names: list List of epoch names in the recording extractor """ epoch_names = list(self._epochs.keys()) if not epoch_names: pass # depends on [control=['if'], data=[]] else: epoch_start_frames = [] for epoch_name in epoch_names: epoch_info = self.get_epoch_info(epoch_name) start_frame = epoch_info['start_frame'] epoch_start_frames.append(start_frame) # depends on [control=['for'], data=['epoch_name']] epoch_names = [epoch_name for (_, epoch_name) in sorted(zip(epoch_start_frames, epoch_names))] return epoch_names
def get_cls_doc(elt, full_name:str)->str: "Class definition." parent_class = inspect.getclasstree([elt])[-1][0][1][0] name,args = format_ft_def(elt, full_name) if parent_class != object: args += f' :: {link_type(parent_class, include_bt=True)}' return name,args
def function[get_cls_doc, parameter[elt, full_name]]: constant[Class definition.] variable[parent_class] assign[=] call[call[call[call[call[name[inspect].getclasstree, parameter[list[[<ast.Name object at 0x7da1b1ec52a0>]]]]][<ast.UnaryOp object at 0x7da1b1ec4c10>]][constant[0]]][constant[1]]][constant[0]] <ast.Tuple object at 0x7da1b1ec5390> assign[=] call[name[format_ft_def], parameter[name[elt], name[full_name]]] if compare[name[parent_class] not_equal[!=] name[object]] begin[:] <ast.AugAssign object at 0x7da1b1ec54b0> return[tuple[[<ast.Name object at 0x7da1b1ec6800>, <ast.Name object at 0x7da1b1ec7010>]]]
keyword[def] identifier[get_cls_doc] ( identifier[elt] , identifier[full_name] : identifier[str] )-> identifier[str] : literal[string] identifier[parent_class] = identifier[inspect] . identifier[getclasstree] ([ identifier[elt] ])[- literal[int] ][ literal[int] ][ literal[int] ][ literal[int] ] identifier[name] , identifier[args] = identifier[format_ft_def] ( identifier[elt] , identifier[full_name] ) keyword[if] identifier[parent_class] != identifier[object] : identifier[args] += literal[string] keyword[return] identifier[name] , identifier[args]
def get_cls_doc(elt, full_name: str) -> str: """Class definition.""" parent_class = inspect.getclasstree([elt])[-1][0][1][0] (name, args) = format_ft_def(elt, full_name) if parent_class != object: args += f' :: {link_type(parent_class, include_bt=True)}' # depends on [control=['if'], data=['parent_class']] return (name, args)
def normalize(self, form): """ Return the Unicode normal form for the strings in the Series/Index. For more information on the forms, see the :func:`unicodedata.normalize`. Parameters ---------- form : {'NFC', 'NFKC', 'NFD', 'NFKD'} Unicode form Returns ------- normalized : Series/Index of objects """ import unicodedata f = lambda x: unicodedata.normalize(form, x) result = _na_map(f, self._parent) return self._wrap_result(result)
def function[normalize, parameter[self, form]]: constant[ Return the Unicode normal form for the strings in the Series/Index. For more information on the forms, see the :func:`unicodedata.normalize`. Parameters ---------- form : {'NFC', 'NFKC', 'NFD', 'NFKD'} Unicode form Returns ------- normalized : Series/Index of objects ] import module[unicodedata] variable[f] assign[=] <ast.Lambda object at 0x7da20cabdb70> variable[result] assign[=] call[name[_na_map], parameter[name[f], name[self]._parent]] return[call[name[self]._wrap_result, parameter[name[result]]]]
keyword[def] identifier[normalize] ( identifier[self] , identifier[form] ): literal[string] keyword[import] identifier[unicodedata] identifier[f] = keyword[lambda] identifier[x] : identifier[unicodedata] . identifier[normalize] ( identifier[form] , identifier[x] ) identifier[result] = identifier[_na_map] ( identifier[f] , identifier[self] . identifier[_parent] ) keyword[return] identifier[self] . identifier[_wrap_result] ( identifier[result] )
def normalize(self, form): """ Return the Unicode normal form for the strings in the Series/Index. For more information on the forms, see the :func:`unicodedata.normalize`. Parameters ---------- form : {'NFC', 'NFKC', 'NFD', 'NFKD'} Unicode form Returns ------- normalized : Series/Index of objects """ import unicodedata f = lambda x: unicodedata.normalize(form, x) result = _na_map(f, self._parent) return self._wrap_result(result)
def stop(self): """ Force the next() method to return while in another thread. The return value of next() will be None. """ with self.condition: self.running = False self.condition.notify_all()
def function[stop, parameter[self]]: constant[ Force the next() method to return while in another thread. The return value of next() will be None. ] with name[self].condition begin[:] name[self].running assign[=] constant[False] call[name[self].condition.notify_all, parameter[]]
keyword[def] identifier[stop] ( identifier[self] ): literal[string] keyword[with] identifier[self] . identifier[condition] : identifier[self] . identifier[running] = keyword[False] identifier[self] . identifier[condition] . identifier[notify_all] ()
def stop(self): """ Force the next() method to return while in another thread. The return value of next() will be None. """ with self.condition: self.running = False self.condition.notify_all() # depends on [control=['with'], data=[]]
def _alignmentToStr(self, result): """ Make a textual representation of an alignment result. @param result: A C{dict}, as returned by C{self.createAlignment}. @return: A C{str} desription of a result. For every three lines the first and third contain the input sequences, possibly padded with '-'. The second contains '|' where the two sequences match, and ' ' where not. Format of the output is as follows: Cigar: (Cigar string) Evalue: Bitscore: Id1 Match start: (int) Match end: (int) Id2 Match start: (int) Match end: (int) Id1: 1 (seq) 50 [lines to show matches] Id2: 1 (seq) 50 """ if result is None: return ('\nNo alignment between %s and %s\n' % ( self.seq1ID, self.seq2ID)) else: header = ( '\nCigar string of aligned region: %s\n' '%s Match start: %d Match end: %d\n' '%s Match start: %d Match end: %d\n' % (result['cigar'], self.seq1ID, result['sequence1Start'], result['sequence1End'], self.seq2ID, result['sequence2Start'], result['sequence2End']) ) text = '\n'.join(result['text']) return header + text
def function[_alignmentToStr, parameter[self, result]]: constant[ Make a textual representation of an alignment result. @param result: A C{dict}, as returned by C{self.createAlignment}. @return: A C{str} desription of a result. For every three lines the first and third contain the input sequences, possibly padded with '-'. The second contains '|' where the two sequences match, and ' ' where not. Format of the output is as follows: Cigar: (Cigar string) Evalue: Bitscore: Id1 Match start: (int) Match end: (int) Id2 Match start: (int) Match end: (int) Id1: 1 (seq) 50 [lines to show matches] Id2: 1 (seq) 50 ] if compare[name[result] is constant[None]] begin[:] return[binary_operation[constant[ No alignment between %s and %s ] <ast.Mod object at 0x7da2590d6920> tuple[[<ast.Attribute object at 0x7da18f721e70>, <ast.Attribute object at 0x7da18f722770>]]]]
keyword[def] identifier[_alignmentToStr] ( identifier[self] , identifier[result] ): literal[string] keyword[if] identifier[result] keyword[is] keyword[None] : keyword[return] ( literal[string] %( identifier[self] . identifier[seq1ID] , identifier[self] . identifier[seq2ID] )) keyword[else] : identifier[header] =( literal[string] literal[string] literal[string] % ( identifier[result] [ literal[string] ], identifier[self] . identifier[seq1ID] , identifier[result] [ literal[string] ], identifier[result] [ literal[string] ], identifier[self] . identifier[seq2ID] , identifier[result] [ literal[string] ], identifier[result] [ literal[string] ]) ) identifier[text] = literal[string] . identifier[join] ( identifier[result] [ literal[string] ]) keyword[return] identifier[header] + identifier[text]
def _alignmentToStr(self, result): """ Make a textual representation of an alignment result. @param result: A C{dict}, as returned by C{self.createAlignment}. @return: A C{str} desription of a result. For every three lines the first and third contain the input sequences, possibly padded with '-'. The second contains '|' where the two sequences match, and ' ' where not. Format of the output is as follows: Cigar: (Cigar string) Evalue: Bitscore: Id1 Match start: (int) Match end: (int) Id2 Match start: (int) Match end: (int) Id1: 1 (seq) 50 [lines to show matches] Id2: 1 (seq) 50 """ if result is None: return '\nNo alignment between %s and %s\n' % (self.seq1ID, self.seq2ID) # depends on [control=['if'], data=[]] else: header = '\nCigar string of aligned region: %s\n%s Match start: %d Match end: %d\n%s Match start: %d Match end: %d\n' % (result['cigar'], self.seq1ID, result['sequence1Start'], result['sequence1End'], self.seq2ID, result['sequence2Start'], result['sequence2End']) text = '\n'.join(result['text']) return header + text
def send(self, data): """ Tries to send data to the client. :param data: Data to be sent :return: True if the data was sent, False on error """ if data is not None: data = data.encode("UTF-8") try: self.wfile.write(data) self.wfile.flush() return True except IOError: # An error occurred, mask it # -> This allows to handle the command even if the client has been # disconnect (i.e. "echo stop 0 | nc localhost 9000") return False
def function[send, parameter[self, data]]: constant[ Tries to send data to the client. :param data: Data to be sent :return: True if the data was sent, False on error ] if compare[name[data] is_not constant[None]] begin[:] variable[data] assign[=] call[name[data].encode, parameter[constant[UTF-8]]] <ast.Try object at 0x7da18f723c70>
keyword[def] identifier[send] ( identifier[self] , identifier[data] ): literal[string] keyword[if] identifier[data] keyword[is] keyword[not] keyword[None] : identifier[data] = identifier[data] . identifier[encode] ( literal[string] ) keyword[try] : identifier[self] . identifier[wfile] . identifier[write] ( identifier[data] ) identifier[self] . identifier[wfile] . identifier[flush] () keyword[return] keyword[True] keyword[except] identifier[IOError] : keyword[return] keyword[False]
def send(self, data): """ Tries to send data to the client. :param data: Data to be sent :return: True if the data was sent, False on error """ if data is not None: data = data.encode('UTF-8') # depends on [control=['if'], data=['data']] try: self.wfile.write(data) self.wfile.flush() return True # depends on [control=['try'], data=[]] except IOError: # An error occurred, mask it # -> This allows to handle the command even if the client has been # disconnect (i.e. "echo stop 0 | nc localhost 9000") return False # depends on [control=['except'], data=[]]
def get_permission_request(parser, token): """ Performs a permission request check with the given signature, user and objects and assigns the result to a context variable. Syntax:: {% get_permission_request PERMISSION_LABEL.CHECK_NAME for USER and *OBJS [as VARNAME] %} {% get_permission_request "poll_permission.change_poll" for request.user and poll as "asked_for_permissio" %} {% get_permission_request "poll_permission.change_poll" for request.user and poll,second_poll as "asked_for_permissio" %} {% if asked_for_permissio %} Dude, you already asked for permission! {% else %} Oh, please fill out this 20 page form and sign here. {% endif %} """ return PermissionForObjectNode.handle_token( parser, token, approved=False, name='"permission_request"')
def function[get_permission_request, parameter[parser, token]]: constant[ Performs a permission request check with the given signature, user and objects and assigns the result to a context variable. Syntax:: {% get_permission_request PERMISSION_LABEL.CHECK_NAME for USER and *OBJS [as VARNAME] %} {% get_permission_request "poll_permission.change_poll" for request.user and poll as "asked_for_permissio" %} {% get_permission_request "poll_permission.change_poll" for request.user and poll,second_poll as "asked_for_permissio" %} {% if asked_for_permissio %} Dude, you already asked for permission! {% else %} Oh, please fill out this 20 page form and sign here. {% endif %} ] return[call[name[PermissionForObjectNode].handle_token, parameter[name[parser], name[token]]]]
keyword[def] identifier[get_permission_request] ( identifier[parser] , identifier[token] ): literal[string] keyword[return] identifier[PermissionForObjectNode] . identifier[handle_token] ( identifier[parser] , identifier[token] , identifier[approved] = keyword[False] , identifier[name] = literal[string] )
def get_permission_request(parser, token): """ Performs a permission request check with the given signature, user and objects and assigns the result to a context variable. Syntax:: {% get_permission_request PERMISSION_LABEL.CHECK_NAME for USER and *OBJS [as VARNAME] %} {% get_permission_request "poll_permission.change_poll" for request.user and poll as "asked_for_permissio" %} {% get_permission_request "poll_permission.change_poll" for request.user and poll,second_poll as "asked_for_permissio" %} {% if asked_for_permissio %} Dude, you already asked for permission! {% else %} Oh, please fill out this 20 page form and sign here. {% endif %} """ return PermissionForObjectNode.handle_token(parser, token, approved=False, name='"permission_request"')
def update(self, dist): """ Adds the given distribution's counts to the current distribution. """ assert isinstance(dist, DDist) for k, c in iteritems(dist.counts): self.counts[k] += c self.total += dist.total
def function[update, parameter[self, dist]]: constant[ Adds the given distribution's counts to the current distribution. ] assert[call[name[isinstance], parameter[name[dist], name[DDist]]]] for taget[tuple[[<ast.Name object at 0x7da1b10c6ce0>, <ast.Name object at 0x7da1b10c71f0>]]] in starred[call[name[iteritems], parameter[name[dist].counts]]] begin[:] <ast.AugAssign object at 0x7da1b0fe9cf0> <ast.AugAssign object at 0x7da1b0fe8e20>
keyword[def] identifier[update] ( identifier[self] , identifier[dist] ): literal[string] keyword[assert] identifier[isinstance] ( identifier[dist] , identifier[DDist] ) keyword[for] identifier[k] , identifier[c] keyword[in] identifier[iteritems] ( identifier[dist] . identifier[counts] ): identifier[self] . identifier[counts] [ identifier[k] ]+= identifier[c] identifier[self] . identifier[total] += identifier[dist] . identifier[total]
def update(self, dist): """ Adds the given distribution's counts to the current distribution. """ assert isinstance(dist, DDist) for (k, c) in iteritems(dist.counts): self.counts[k] += c # depends on [control=['for'], data=[]] self.total += dist.total
def synchronized(func): """ Decorator for synchronizing method access. """ @wraps(func) def wrapped(self, *args, **kwargs): try: rlock = self._sync_lock except AttributeError: from multiprocessing import RLock rlock = self.__dict__.setdefault('_sync_lock', RLock()) with rlock: return func(self, *args, **kwargs) return wrapped
def function[synchronized, parameter[func]]: constant[ Decorator for synchronizing method access. ] def function[wrapped, parameter[self]]: <ast.Try object at 0x7da1b06517b0> with name[rlock] begin[:] return[call[name[func], parameter[name[self], <ast.Starred object at 0x7da1b0652140>]]] return[name[wrapped]]
keyword[def] identifier[synchronized] ( identifier[func] ): literal[string] @ identifier[wraps] ( identifier[func] ) keyword[def] identifier[wrapped] ( identifier[self] ,* identifier[args] ,** identifier[kwargs] ): keyword[try] : identifier[rlock] = identifier[self] . identifier[_sync_lock] keyword[except] identifier[AttributeError] : keyword[from] identifier[multiprocessing] keyword[import] identifier[RLock] identifier[rlock] = identifier[self] . identifier[__dict__] . identifier[setdefault] ( literal[string] , identifier[RLock] ()) keyword[with] identifier[rlock] : keyword[return] identifier[func] ( identifier[self] ,* identifier[args] ,** identifier[kwargs] ) keyword[return] identifier[wrapped]
def synchronized(func): """ Decorator for synchronizing method access. """ @wraps(func) def wrapped(self, *args, **kwargs): try: rlock = self._sync_lock # depends on [control=['try'], data=[]] except AttributeError: from multiprocessing import RLock rlock = self.__dict__.setdefault('_sync_lock', RLock()) # depends on [control=['except'], data=[]] with rlock: return func(self, *args, **kwargs) # depends on [control=['with'], data=[]] return wrapped
def read_chunks(stream, block_size=2**10): """ Given a byte stream with reader, yield chunks of block_size until the stream is consusmed. """ while True: chunk = stream.read(block_size) if not chunk: break yield chunk
def function[read_chunks, parameter[stream, block_size]]: constant[ Given a byte stream with reader, yield chunks of block_size until the stream is consusmed. ] while constant[True] begin[:] variable[chunk] assign[=] call[name[stream].read, parameter[name[block_size]]] if <ast.UnaryOp object at 0x7da18f812920> begin[:] break <ast.Yield object at 0x7da18f813be0>
keyword[def] identifier[read_chunks] ( identifier[stream] , identifier[block_size] = literal[int] ** literal[int] ): literal[string] keyword[while] keyword[True] : identifier[chunk] = identifier[stream] . identifier[read] ( identifier[block_size] ) keyword[if] keyword[not] identifier[chunk] : keyword[break] keyword[yield] identifier[chunk]
def read_chunks(stream, block_size=2 ** 10): """ Given a byte stream with reader, yield chunks of block_size until the stream is consusmed. """ while True: chunk = stream.read(block_size) if not chunk: break # depends on [control=['if'], data=[]] yield chunk # depends on [control=['while'], data=[]]
def read_stream(cls, stream, validate=True): """ Read torrent metainfo from file-like object :param stream: Readable file-like object (e.g. :class:`io.BytesIO`) :param bool validate: Whether to run :meth:`validate` on the new Torrent object :raises ReadError: if reading from `stream` fails :raises ParseError: if `stream` does not produce a valid bencoded byte string :raises MetainfoError: if `validate` is `True` and the read metainfo is invalid :return: New Torrent object """ try: content = stream.read(cls.MAX_TORRENT_FILE_SIZE) except OSError as e: raise error.ReadError(e.errno) else: try: metainfo_enc = bdecode(content) except BTFailure as e: raise error.ParseError() if validate: if b'info' not in metainfo_enc: raise error.MetainfoError("Missing 'info'") elif not isinstance(metainfo_enc[b'info'], abc.Mapping): raise error.MetainfoError("'info' is not a dictionary") elif b'pieces' not in metainfo_enc[b'info']: raise error.MetainfoError("Missing 'pieces' in ['info']") # Extract 'pieces' from metainfo because it's the only byte string # that isn't supposed to be decoded to unicode. if b'info' in metainfo_enc and b'pieces' in metainfo_enc[b'info']: pieces = metainfo_enc[b'info'].pop(b'pieces') metainfo = utils.decode_dict(metainfo_enc) metainfo['info']['pieces'] = pieces else: metainfo = utils.decode_dict(metainfo_enc) torrent = cls() torrent._metainfo = metainfo # Convert some values from official types to something nicer # (e.g. int -> datetime) for attr in ('creation_date', 'private'): setattr(torrent, attr, getattr(torrent, attr)) # Auto-set 'include_md5' info = torrent.metainfo['info'] torrent.include_md5 = ('length' in info and 'md5sum' in info) or \ ('files' in info and all('md5sum' in fileinfo for fileinfo in info['files'])) if validate: torrent.validate() return torrent
def function[read_stream, parameter[cls, stream, validate]]: constant[ Read torrent metainfo from file-like object :param stream: Readable file-like object (e.g. :class:`io.BytesIO`) :param bool validate: Whether to run :meth:`validate` on the new Torrent object :raises ReadError: if reading from `stream` fails :raises ParseError: if `stream` does not produce a valid bencoded byte string :raises MetainfoError: if `validate` is `True` and the read metainfo is invalid :return: New Torrent object ] <ast.Try object at 0x7da1b0df5630>
keyword[def] identifier[read_stream] ( identifier[cls] , identifier[stream] , identifier[validate] = keyword[True] ): literal[string] keyword[try] : identifier[content] = identifier[stream] . identifier[read] ( identifier[cls] . identifier[MAX_TORRENT_FILE_SIZE] ) keyword[except] identifier[OSError] keyword[as] identifier[e] : keyword[raise] identifier[error] . identifier[ReadError] ( identifier[e] . identifier[errno] ) keyword[else] : keyword[try] : identifier[metainfo_enc] = identifier[bdecode] ( identifier[content] ) keyword[except] identifier[BTFailure] keyword[as] identifier[e] : keyword[raise] identifier[error] . identifier[ParseError] () keyword[if] identifier[validate] : keyword[if] literal[string] keyword[not] keyword[in] identifier[metainfo_enc] : keyword[raise] identifier[error] . identifier[MetainfoError] ( literal[string] ) keyword[elif] keyword[not] identifier[isinstance] ( identifier[metainfo_enc] [ literal[string] ], identifier[abc] . identifier[Mapping] ): keyword[raise] identifier[error] . identifier[MetainfoError] ( literal[string] ) keyword[elif] literal[string] keyword[not] keyword[in] identifier[metainfo_enc] [ literal[string] ]: keyword[raise] identifier[error] . identifier[MetainfoError] ( literal[string] ) keyword[if] literal[string] keyword[in] identifier[metainfo_enc] keyword[and] literal[string] keyword[in] identifier[metainfo_enc] [ literal[string] ]: identifier[pieces] = identifier[metainfo_enc] [ literal[string] ]. identifier[pop] ( literal[string] ) identifier[metainfo] = identifier[utils] . identifier[decode_dict] ( identifier[metainfo_enc] ) identifier[metainfo] [ literal[string] ][ literal[string] ]= identifier[pieces] keyword[else] : identifier[metainfo] = identifier[utils] . identifier[decode_dict] ( identifier[metainfo_enc] ) identifier[torrent] = identifier[cls] () identifier[torrent] . identifier[_metainfo] = identifier[metainfo] keyword[for] identifier[attr] keyword[in] ( literal[string] , literal[string] ): identifier[setattr] ( identifier[torrent] , identifier[attr] , identifier[getattr] ( identifier[torrent] , identifier[attr] )) identifier[info] = identifier[torrent] . identifier[metainfo] [ literal[string] ] identifier[torrent] . identifier[include_md5] =( literal[string] keyword[in] identifier[info] keyword[and] literal[string] keyword[in] identifier[info] ) keyword[or] ( literal[string] keyword[in] identifier[info] keyword[and] identifier[all] ( literal[string] keyword[in] identifier[fileinfo] keyword[for] identifier[fileinfo] keyword[in] identifier[info] [ literal[string] ])) keyword[if] identifier[validate] : identifier[torrent] . identifier[validate] () keyword[return] identifier[torrent]
def read_stream(cls, stream, validate=True): """ Read torrent metainfo from file-like object :param stream: Readable file-like object (e.g. :class:`io.BytesIO`) :param bool validate: Whether to run :meth:`validate` on the new Torrent object :raises ReadError: if reading from `stream` fails :raises ParseError: if `stream` does not produce a valid bencoded byte string :raises MetainfoError: if `validate` is `True` and the read metainfo is invalid :return: New Torrent object """ try: content = stream.read(cls.MAX_TORRENT_FILE_SIZE) # depends on [control=['try'], data=[]] except OSError as e: raise error.ReadError(e.errno) # depends on [control=['except'], data=['e']] else: try: metainfo_enc = bdecode(content) # depends on [control=['try'], data=[]] except BTFailure as e: raise error.ParseError() # depends on [control=['except'], data=[]] if validate: if b'info' not in metainfo_enc: raise error.MetainfoError("Missing 'info'") # depends on [control=['if'], data=[]] elif not isinstance(metainfo_enc[b'info'], abc.Mapping): raise error.MetainfoError("'info' is not a dictionary") # depends on [control=['if'], data=[]] elif b'pieces' not in metainfo_enc[b'info']: raise error.MetainfoError("Missing 'pieces' in ['info']") # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]] # Extract 'pieces' from metainfo because it's the only byte string # that isn't supposed to be decoded to unicode. if b'info' in metainfo_enc and b'pieces' in metainfo_enc[b'info']: pieces = metainfo_enc[b'info'].pop(b'pieces') metainfo = utils.decode_dict(metainfo_enc) metainfo['info']['pieces'] = pieces # depends on [control=['if'], data=[]] else: metainfo = utils.decode_dict(metainfo_enc) torrent = cls() torrent._metainfo = metainfo # Convert some values from official types to something nicer # (e.g. int -> datetime) for attr in ('creation_date', 'private'): setattr(torrent, attr, getattr(torrent, attr)) # depends on [control=['for'], data=['attr']] # Auto-set 'include_md5' info = torrent.metainfo['info'] torrent.include_md5 = 'length' in info and 'md5sum' in info or ('files' in info and all(('md5sum' in fileinfo for fileinfo in info['files']))) if validate: torrent.validate() # depends on [control=['if'], data=[]] return torrent
def from_where(cls, where): """ Factory method for creating the top-level expression """ if where.conjunction: return Conjunction.from_clause(where) else: return cls.from_clause(where[0])
def function[from_where, parameter[cls, where]]: constant[ Factory method for creating the top-level expression ] if name[where].conjunction begin[:] return[call[name[Conjunction].from_clause, parameter[name[where]]]]
keyword[def] identifier[from_where] ( identifier[cls] , identifier[where] ): literal[string] keyword[if] identifier[where] . identifier[conjunction] : keyword[return] identifier[Conjunction] . identifier[from_clause] ( identifier[where] ) keyword[else] : keyword[return] identifier[cls] . identifier[from_clause] ( identifier[where] [ literal[int] ])
def from_where(cls, where): """ Factory method for creating the top-level expression """ if where.conjunction: return Conjunction.from_clause(where) # depends on [control=['if'], data=[]] else: return cls.from_clause(where[0])
def _deep_string_coerce(content, json_path='json'): """ Coerces content or all values of content if it is a dict to a string. The function will throw if content contains non-string or non-numeric types. The reason why we have this function is because the ``self.json`` field must be a dict with only string values. This is because ``render_template`` will fail for numerical values. """ c = _deep_string_coerce if isinstance(content, six.string_types): return content elif isinstance(content, six.integer_types + (float,)): # Databricks can tolerate either numeric or string types in the API backend. return str(content) elif isinstance(content, (list, tuple)): return [c(e, '{0}[{1}]'.format(json_path, i)) for i, e in enumerate(content)] elif isinstance(content, dict): return {k: c(v, '{0}[{1}]'.format(json_path, k)) for k, v in list(content.items())} else: param_type = type(content) msg = 'Type {0} used for parameter {1} is not a number or a string' \ .format(param_type, json_path) raise AirflowException(msg)
def function[_deep_string_coerce, parameter[content, json_path]]: constant[ Coerces content or all values of content if it is a dict to a string. The function will throw if content contains non-string or non-numeric types. The reason why we have this function is because the ``self.json`` field must be a dict with only string values. This is because ``render_template`` will fail for numerical values. ] variable[c] assign[=] name[_deep_string_coerce] if call[name[isinstance], parameter[name[content], name[six].string_types]] begin[:] return[name[content]]
keyword[def] identifier[_deep_string_coerce] ( identifier[content] , identifier[json_path] = literal[string] ): literal[string] identifier[c] = identifier[_deep_string_coerce] keyword[if] identifier[isinstance] ( identifier[content] , identifier[six] . identifier[string_types] ): keyword[return] identifier[content] keyword[elif] identifier[isinstance] ( identifier[content] , identifier[six] . identifier[integer_types] +( identifier[float] ,)): keyword[return] identifier[str] ( identifier[content] ) keyword[elif] identifier[isinstance] ( identifier[content] ,( identifier[list] , identifier[tuple] )): keyword[return] [ identifier[c] ( identifier[e] , literal[string] . identifier[format] ( identifier[json_path] , identifier[i] )) keyword[for] identifier[i] , identifier[e] keyword[in] identifier[enumerate] ( identifier[content] )] keyword[elif] identifier[isinstance] ( identifier[content] , identifier[dict] ): keyword[return] { identifier[k] : identifier[c] ( identifier[v] , literal[string] . identifier[format] ( identifier[json_path] , identifier[k] )) keyword[for] identifier[k] , identifier[v] keyword[in] identifier[list] ( identifier[content] . identifier[items] ())} keyword[else] : identifier[param_type] = identifier[type] ( identifier[content] ) identifier[msg] = literal[string] . identifier[format] ( identifier[param_type] , identifier[json_path] ) keyword[raise] identifier[AirflowException] ( identifier[msg] )
def _deep_string_coerce(content, json_path='json'): """ Coerces content or all values of content if it is a dict to a string. The function will throw if content contains non-string or non-numeric types. The reason why we have this function is because the ``self.json`` field must be a dict with only string values. This is because ``render_template`` will fail for numerical values. """ c = _deep_string_coerce if isinstance(content, six.string_types): return content # depends on [control=['if'], data=[]] elif isinstance(content, six.integer_types + (float,)): # Databricks can tolerate either numeric or string types in the API backend. return str(content) # depends on [control=['if'], data=[]] elif isinstance(content, (list, tuple)): return [c(e, '{0}[{1}]'.format(json_path, i)) for (i, e) in enumerate(content)] # depends on [control=['if'], data=[]] elif isinstance(content, dict): return {k: c(v, '{0}[{1}]'.format(json_path, k)) for (k, v) in list(content.items())} # depends on [control=['if'], data=[]] else: param_type = type(content) msg = 'Type {0} used for parameter {1} is not a number or a string'.format(param_type, json_path) raise AirflowException(msg)
def subtasks(self, task, params={}, **options): """Returns a compact representation of all of the subtasks of a task. Parameters ---------- task : {Id} The task to get the subtasks of. [params] : {Object} Parameters for the request """ path = "/tasks/%s/subtasks" % (task) return self.client.get_collection(path, params, **options)
def function[subtasks, parameter[self, task, params]]: constant[Returns a compact representation of all of the subtasks of a task. Parameters ---------- task : {Id} The task to get the subtasks of. [params] : {Object} Parameters for the request ] variable[path] assign[=] binary_operation[constant[/tasks/%s/subtasks] <ast.Mod object at 0x7da2590d6920> name[task]] return[call[name[self].client.get_collection, parameter[name[path], name[params]]]]
keyword[def] identifier[subtasks] ( identifier[self] , identifier[task] , identifier[params] ={},** identifier[options] ): literal[string] identifier[path] = literal[string] %( identifier[task] ) keyword[return] identifier[self] . identifier[client] . identifier[get_collection] ( identifier[path] , identifier[params] ,** identifier[options] )
def subtasks(self, task, params={}, **options): """Returns a compact representation of all of the subtasks of a task. Parameters ---------- task : {Id} The task to get the subtasks of. [params] : {Object} Parameters for the request """ path = '/tasks/%s/subtasks' % task return self.client.get_collection(path, params, **options)
def update_gateway_device(self, gateway_device_id, body=None): """Updates a new gateway device.""" return self.put(self.gateway_device_path % gateway_device_id, body=body)
def function[update_gateway_device, parameter[self, gateway_device_id, body]]: constant[Updates a new gateway device.] return[call[name[self].put, parameter[binary_operation[name[self].gateway_device_path <ast.Mod object at 0x7da2590d6920> name[gateway_device_id]]]]]
keyword[def] identifier[update_gateway_device] ( identifier[self] , identifier[gateway_device_id] , identifier[body] = keyword[None] ): literal[string] keyword[return] identifier[self] . identifier[put] ( identifier[self] . identifier[gateway_device_path] % identifier[gateway_device_id] , identifier[body] = identifier[body] )
def update_gateway_device(self, gateway_device_id, body=None): """Updates a new gateway device.""" return self.put(self.gateway_device_path % gateway_device_id, body=body)
def add_filtered_folder(self, path, regex, depth=None, source_type=DefaultSourceType): """ Add a folder source to scan recursively, with a regex filter on directories. :param regex: regex string to filter folders by. :param depth: if provided will be depth limit. 0 = first level only. :param source_type: what to return; files only, folders only, or both. """ self.add_source(FilteredFolderSource(path, regex, depth, **source_type)) return self
def function[add_filtered_folder, parameter[self, path, regex, depth, source_type]]: constant[ Add a folder source to scan recursively, with a regex filter on directories. :param regex: regex string to filter folders by. :param depth: if provided will be depth limit. 0 = first level only. :param source_type: what to return; files only, folders only, or both. ] call[name[self].add_source, parameter[call[name[FilteredFolderSource], parameter[name[path], name[regex], name[depth]]]]] return[name[self]]
keyword[def] identifier[add_filtered_folder] ( identifier[self] , identifier[path] , identifier[regex] , identifier[depth] = keyword[None] , identifier[source_type] = identifier[DefaultSourceType] ): literal[string] identifier[self] . identifier[add_source] ( identifier[FilteredFolderSource] ( identifier[path] , identifier[regex] , identifier[depth] ,** identifier[source_type] )) keyword[return] identifier[self]
def add_filtered_folder(self, path, regex, depth=None, source_type=DefaultSourceType): """ Add a folder source to scan recursively, with a regex filter on directories. :param regex: regex string to filter folders by. :param depth: if provided will be depth limit. 0 = first level only. :param source_type: what to return; files only, folders only, or both. """ self.add_source(FilteredFolderSource(path, regex, depth, **source_type)) return self
def to_xdr_object(self): """Creates an XDR Operation object that represents this :class:`CreatePassiveOffer`. """ selling = self.selling.to_xdr_object() buying = self.buying.to_xdr_object() price = Operation.to_xdr_price(self.price) price = Xdr.types.Price(price['n'], price['d']) amount = Operation.to_xdr_amount(self.amount) create_passive_offer_op = Xdr.types.CreatePassiveOfferOp( selling, buying, amount, price) self.body.type = Xdr.const.CREATE_PASSIVE_OFFER self.body.createPassiveOfferOp = create_passive_offer_op return super(CreatePassiveOffer, self).to_xdr_object()
def function[to_xdr_object, parameter[self]]: constant[Creates an XDR Operation object that represents this :class:`CreatePassiveOffer`. ] variable[selling] assign[=] call[name[self].selling.to_xdr_object, parameter[]] variable[buying] assign[=] call[name[self].buying.to_xdr_object, parameter[]] variable[price] assign[=] call[name[Operation].to_xdr_price, parameter[name[self].price]] variable[price] assign[=] call[name[Xdr].types.Price, parameter[call[name[price]][constant[n]], call[name[price]][constant[d]]]] variable[amount] assign[=] call[name[Operation].to_xdr_amount, parameter[name[self].amount]] variable[create_passive_offer_op] assign[=] call[name[Xdr].types.CreatePassiveOfferOp, parameter[name[selling], name[buying], name[amount], name[price]]] name[self].body.type assign[=] name[Xdr].const.CREATE_PASSIVE_OFFER name[self].body.createPassiveOfferOp assign[=] name[create_passive_offer_op] return[call[call[name[super], parameter[name[CreatePassiveOffer], name[self]]].to_xdr_object, parameter[]]]
keyword[def] identifier[to_xdr_object] ( identifier[self] ): literal[string] identifier[selling] = identifier[self] . identifier[selling] . identifier[to_xdr_object] () identifier[buying] = identifier[self] . identifier[buying] . identifier[to_xdr_object] () identifier[price] = identifier[Operation] . identifier[to_xdr_price] ( identifier[self] . identifier[price] ) identifier[price] = identifier[Xdr] . identifier[types] . identifier[Price] ( identifier[price] [ literal[string] ], identifier[price] [ literal[string] ]) identifier[amount] = identifier[Operation] . identifier[to_xdr_amount] ( identifier[self] . identifier[amount] ) identifier[create_passive_offer_op] = identifier[Xdr] . identifier[types] . identifier[CreatePassiveOfferOp] ( identifier[selling] , identifier[buying] , identifier[amount] , identifier[price] ) identifier[self] . identifier[body] . identifier[type] = identifier[Xdr] . identifier[const] . identifier[CREATE_PASSIVE_OFFER] identifier[self] . identifier[body] . identifier[createPassiveOfferOp] = identifier[create_passive_offer_op] keyword[return] identifier[super] ( identifier[CreatePassiveOffer] , identifier[self] ). identifier[to_xdr_object] ()
def to_xdr_object(self): """Creates an XDR Operation object that represents this :class:`CreatePassiveOffer`. """ selling = self.selling.to_xdr_object() buying = self.buying.to_xdr_object() price = Operation.to_xdr_price(self.price) price = Xdr.types.Price(price['n'], price['d']) amount = Operation.to_xdr_amount(self.amount) create_passive_offer_op = Xdr.types.CreatePassiveOfferOp(selling, buying, amount, price) self.body.type = Xdr.const.CREATE_PASSIVE_OFFER self.body.createPassiveOfferOp = create_passive_offer_op return super(CreatePassiveOffer, self).to_xdr_object()
def watch_run_status(server, project, run, apikey, timeout=None, update_period=1): """ Monitor a linkage run and yield status updates. Will immediately yield an update and then only yield further updates when the status object changes. If a timeout is provided and the run hasn't entered a terminal state (error or completed) when the timeout is reached, updates will cease and a TimeoutError will be raised. :param server: Base url of the upstream server. :param project: :param run: :param apikey: :param timeout: Stop waiting after this many seconds. The default (None) is to never give you up. :param update_period: Time in seconds between queries to the run's status. :raises TimeoutError """ start_time = time.time() status = old_status = run_get_status(server, project, run, apikey) yield status def time_not_up(): return ( (timeout is None) or (time.time() - start_time < timeout) ) while time_not_up(): if status['state'] in {'error', 'completed'}: # No point continuing as run has entered a terminal state yield status return if old_status != status: yield status time.sleep(update_period) old_status = status try: status = run_get_status(server, project, run, apikey) except RateLimitedClient: time.sleep(1) raise TimeoutError("Timeout exceeded before run {} terminated".format(run))
def function[watch_run_status, parameter[server, project, run, apikey, timeout, update_period]]: constant[ Monitor a linkage run and yield status updates. Will immediately yield an update and then only yield further updates when the status object changes. If a timeout is provided and the run hasn't entered a terminal state (error or completed) when the timeout is reached, updates will cease and a TimeoutError will be raised. :param server: Base url of the upstream server. :param project: :param run: :param apikey: :param timeout: Stop waiting after this many seconds. The default (None) is to never give you up. :param update_period: Time in seconds between queries to the run's status. :raises TimeoutError ] variable[start_time] assign[=] call[name[time].time, parameter[]] variable[status] assign[=] call[name[run_get_status], parameter[name[server], name[project], name[run], name[apikey]]] <ast.Yield object at 0x7da18ede6aa0> def function[time_not_up, parameter[]]: return[<ast.BoolOp object at 0x7da18ede6140>] while call[name[time_not_up], parameter[]] begin[:] if compare[call[name[status]][constant[state]] in <ast.Set object at 0x7da18ede7e50>] begin[:] <ast.Yield object at 0x7da18ede47f0> return[None] if compare[name[old_status] not_equal[!=] name[status]] begin[:] <ast.Yield object at 0x7da18ede78b0> call[name[time].sleep, parameter[name[update_period]]] variable[old_status] assign[=] name[status] <ast.Try object at 0x7da18ede78e0> <ast.Raise object at 0x7da207f03a30>
keyword[def] identifier[watch_run_status] ( identifier[server] , identifier[project] , identifier[run] , identifier[apikey] , identifier[timeout] = keyword[None] , identifier[update_period] = literal[int] ): literal[string] identifier[start_time] = identifier[time] . identifier[time] () identifier[status] = identifier[old_status] = identifier[run_get_status] ( identifier[server] , identifier[project] , identifier[run] , identifier[apikey] ) keyword[yield] identifier[status] keyword[def] identifier[time_not_up] (): keyword[return] ( ( identifier[timeout] keyword[is] keyword[None] ) keyword[or] ( identifier[time] . identifier[time] ()- identifier[start_time] < identifier[timeout] ) ) keyword[while] identifier[time_not_up] (): keyword[if] identifier[status] [ literal[string] ] keyword[in] { literal[string] , literal[string] }: keyword[yield] identifier[status] keyword[return] keyword[if] identifier[old_status] != identifier[status] : keyword[yield] identifier[status] identifier[time] . identifier[sleep] ( identifier[update_period] ) identifier[old_status] = identifier[status] keyword[try] : identifier[status] = identifier[run_get_status] ( identifier[server] , identifier[project] , identifier[run] , identifier[apikey] ) keyword[except] identifier[RateLimitedClient] : identifier[time] . identifier[sleep] ( literal[int] ) keyword[raise] identifier[TimeoutError] ( literal[string] . identifier[format] ( identifier[run] ))
def watch_run_status(server, project, run, apikey, timeout=None, update_period=1): """ Monitor a linkage run and yield status updates. Will immediately yield an update and then only yield further updates when the status object changes. If a timeout is provided and the run hasn't entered a terminal state (error or completed) when the timeout is reached, updates will cease and a TimeoutError will be raised. :param server: Base url of the upstream server. :param project: :param run: :param apikey: :param timeout: Stop waiting after this many seconds. The default (None) is to never give you up. :param update_period: Time in seconds between queries to the run's status. :raises TimeoutError """ start_time = time.time() status = old_status = run_get_status(server, project, run, apikey) yield status def time_not_up(): return timeout is None or time.time() - start_time < timeout while time_not_up(): if status['state'] in {'error', 'completed'}: # No point continuing as run has entered a terminal state yield status return # depends on [control=['if'], data=[]] if old_status != status: yield status # depends on [control=['if'], data=['status']] time.sleep(update_period) old_status = status try: status = run_get_status(server, project, run, apikey) # depends on [control=['try'], data=[]] except RateLimitedClient: time.sleep(1) # depends on [control=['except'], data=[]] # depends on [control=['while'], data=[]] raise TimeoutError('Timeout exceeded before run {} terminated'.format(run))
def generate_sl_transformation_sets(self, film_area, substrate_area): """ Generates transformation sets for film/substrate pair given the area of the unit cell area for the film and substrate. The transformation sets map the film and substrate unit cells to super lattices with a maximum area Args: film_area(int): the unit cell area for the film substrate_area(int): the unit cell area for the substrate Returns: transformation_sets: a set of transformation_sets defined as: 1.) the transformation matricies for the film to create a super lattice of area i*film area 2.) the tranformation matricies for the substrate to create a super lattice of area j*film area """ transformation_indicies = [(i, j) for i in range(1, int(self.max_area / film_area)) for j in range(1, int(self.max_area / substrate_area)) if np.absolute(film_area / substrate_area - float(j) / i) < self.max_area_ratio_tol] # Sort sets by the square of the matching area and yield in order # from smallest to largest for x in sorted(transformation_indicies, key=lambda x: x[0] * x[1]): yield (gen_sl_transform_matricies(x[0]), gen_sl_transform_matricies(x[1]))
def function[generate_sl_transformation_sets, parameter[self, film_area, substrate_area]]: constant[ Generates transformation sets for film/substrate pair given the area of the unit cell area for the film and substrate. The transformation sets map the film and substrate unit cells to super lattices with a maximum area Args: film_area(int): the unit cell area for the film substrate_area(int): the unit cell area for the substrate Returns: transformation_sets: a set of transformation_sets defined as: 1.) the transformation matricies for the film to create a super lattice of area i*film area 2.) the tranformation matricies for the substrate to create a super lattice of area j*film area ] variable[transformation_indicies] assign[=] <ast.ListComp object at 0x7da1b1cd6f80> for taget[name[x]] in starred[call[name[sorted], parameter[name[transformation_indicies]]]] begin[:] <ast.Yield object at 0x7da1b1c59c60>
keyword[def] identifier[generate_sl_transformation_sets] ( identifier[self] , identifier[film_area] , identifier[substrate_area] ): literal[string] identifier[transformation_indicies] =[( identifier[i] , identifier[j] ) keyword[for] identifier[i] keyword[in] identifier[range] ( literal[int] , identifier[int] ( identifier[self] . identifier[max_area] / identifier[film_area] )) keyword[for] identifier[j] keyword[in] identifier[range] ( literal[int] , identifier[int] ( identifier[self] . identifier[max_area] / identifier[substrate_area] )) keyword[if] identifier[np] . identifier[absolute] ( identifier[film_area] / identifier[substrate_area] - identifier[float] ( identifier[j] )/ identifier[i] )< identifier[self] . identifier[max_area_ratio_tol] ] keyword[for] identifier[x] keyword[in] identifier[sorted] ( identifier[transformation_indicies] , identifier[key] = keyword[lambda] identifier[x] : identifier[x] [ literal[int] ]* identifier[x] [ literal[int] ]): keyword[yield] ( identifier[gen_sl_transform_matricies] ( identifier[x] [ literal[int] ]), identifier[gen_sl_transform_matricies] ( identifier[x] [ literal[int] ]))
def generate_sl_transformation_sets(self, film_area, substrate_area): """ Generates transformation sets for film/substrate pair given the area of the unit cell area for the film and substrate. The transformation sets map the film and substrate unit cells to super lattices with a maximum area Args: film_area(int): the unit cell area for the film substrate_area(int): the unit cell area for the substrate Returns: transformation_sets: a set of transformation_sets defined as: 1.) the transformation matricies for the film to create a super lattice of area i*film area 2.) the tranformation matricies for the substrate to create a super lattice of area j*film area """ transformation_indicies = [(i, j) for i in range(1, int(self.max_area / film_area)) for j in range(1, int(self.max_area / substrate_area)) if np.absolute(film_area / substrate_area - float(j) / i) < self.max_area_ratio_tol] # Sort sets by the square of the matching area and yield in order # from smallest to largest for x in sorted(transformation_indicies, key=lambda x: x[0] * x[1]): yield (gen_sl_transform_matricies(x[0]), gen_sl_transform_matricies(x[1])) # depends on [control=['for'], data=['x']]
def delete(self, deviceId, measurementId): """ Deletes a stored measurement. :param deviceId: the device to measure. :param measurementId: the name of the measurement. :return: 200 if it was deleted, 400 if no such measurement (or device). """ record = self.measurements.get(deviceId) if record is not None: popped = record.pop(measurementId, None) return popped, 200 if popped else 400 return None, 400
def function[delete, parameter[self, deviceId, measurementId]]: constant[ Deletes a stored measurement. :param deviceId: the device to measure. :param measurementId: the name of the measurement. :return: 200 if it was deleted, 400 if no such measurement (or device). ] variable[record] assign[=] call[name[self].measurements.get, parameter[name[deviceId]]] if compare[name[record] is_not constant[None]] begin[:] variable[popped] assign[=] call[name[record].pop, parameter[name[measurementId], constant[None]]] return[tuple[[<ast.Name object at 0x7da1b0f0c340>, <ast.IfExp object at 0x7da1b0f0c2e0>]]] return[tuple[[<ast.Constant object at 0x7da1b0ef7430>, <ast.Constant object at 0x7da1b0ef4c40>]]]
keyword[def] identifier[delete] ( identifier[self] , identifier[deviceId] , identifier[measurementId] ): literal[string] identifier[record] = identifier[self] . identifier[measurements] . identifier[get] ( identifier[deviceId] ) keyword[if] identifier[record] keyword[is] keyword[not] keyword[None] : identifier[popped] = identifier[record] . identifier[pop] ( identifier[measurementId] , keyword[None] ) keyword[return] identifier[popped] , literal[int] keyword[if] identifier[popped] keyword[else] literal[int] keyword[return] keyword[None] , literal[int]
def delete(self, deviceId, measurementId): """ Deletes a stored measurement. :param deviceId: the device to measure. :param measurementId: the name of the measurement. :return: 200 if it was deleted, 400 if no such measurement (or device). """ record = self.measurements.get(deviceId) if record is not None: popped = record.pop(measurementId, None) return (popped, 200 if popped else 400) # depends on [control=['if'], data=['record']] return (None, 400)
def _get_key_by_keyid(keyid): """Get a key via HTTPS from the Ubuntu keyserver. Different key ID formats are supported by SKS keyservers (the longer ones are more secure, see "dead beef attack" and https://evil32.com/). Since HTTPS is used, if SSLBump-like HTTPS proxies are in place, they will impersonate keyserver.ubuntu.com and generate a certificate with keyserver.ubuntu.com in the CN field or in SubjAltName fields of a certificate. If such proxy behavior is expected it is necessary to add the CA certificate chain containing the intermediate CA of the SSLBump proxy to every machine that this code runs on via ca-certs cloud-init directive (via cloudinit-userdata model-config) or via other means (such as through a custom charm option). Also note that DNS resolution for the hostname in a URL is done at a proxy server - not at the client side. 8-digit (32 bit) key ID https://keyserver.ubuntu.com/pks/lookup?search=0x4652B4E6 16-digit (64 bit) key ID https://keyserver.ubuntu.com/pks/lookup?search=0x6E85A86E4652B4E6 40-digit key ID: https://keyserver.ubuntu.com/pks/lookup?search=0x35F77D63B5CEC106C577ED856E85A86E4652B4E6 :param keyid: An 8, 16 or 40 hex digit keyid to find a key for :type keyid: (bytes, str) :returns: A key material for the specified GPG key id :rtype: (str, bytes) :raises: subprocess.CalledProcessError """ # options=mr - machine-readable output (disables html wrappers) keyserver_url = ('https://keyserver.ubuntu.com' '/pks/lookup?op=get&options=mr&exact=on&search=0x{}') curl_cmd = ['curl', keyserver_url.format(keyid)] # use proxy server settings in order to retrieve the key return subprocess.check_output(curl_cmd, env=env_proxy_settings(['https']))
def function[_get_key_by_keyid, parameter[keyid]]: constant[Get a key via HTTPS from the Ubuntu keyserver. Different key ID formats are supported by SKS keyservers (the longer ones are more secure, see "dead beef attack" and https://evil32.com/). Since HTTPS is used, if SSLBump-like HTTPS proxies are in place, they will impersonate keyserver.ubuntu.com and generate a certificate with keyserver.ubuntu.com in the CN field or in SubjAltName fields of a certificate. If such proxy behavior is expected it is necessary to add the CA certificate chain containing the intermediate CA of the SSLBump proxy to every machine that this code runs on via ca-certs cloud-init directive (via cloudinit-userdata model-config) or via other means (such as through a custom charm option). Also note that DNS resolution for the hostname in a URL is done at a proxy server - not at the client side. 8-digit (32 bit) key ID https://keyserver.ubuntu.com/pks/lookup?search=0x4652B4E6 16-digit (64 bit) key ID https://keyserver.ubuntu.com/pks/lookup?search=0x6E85A86E4652B4E6 40-digit key ID: https://keyserver.ubuntu.com/pks/lookup?search=0x35F77D63B5CEC106C577ED856E85A86E4652B4E6 :param keyid: An 8, 16 or 40 hex digit keyid to find a key for :type keyid: (bytes, str) :returns: A key material for the specified GPG key id :rtype: (str, bytes) :raises: subprocess.CalledProcessError ] variable[keyserver_url] assign[=] constant[https://keyserver.ubuntu.com/pks/lookup?op=get&options=mr&exact=on&search=0x{}] variable[curl_cmd] assign[=] list[[<ast.Constant object at 0x7da18f09d990>, <ast.Call object at 0x7da18f09d630>]] return[call[name[subprocess].check_output, parameter[name[curl_cmd]]]]
keyword[def] identifier[_get_key_by_keyid] ( identifier[keyid] ): literal[string] identifier[keyserver_url] =( literal[string] literal[string] ) identifier[curl_cmd] =[ literal[string] , identifier[keyserver_url] . identifier[format] ( identifier[keyid] )] keyword[return] identifier[subprocess] . identifier[check_output] ( identifier[curl_cmd] , identifier[env] = identifier[env_proxy_settings] ([ literal[string] ]))
def _get_key_by_keyid(keyid): """Get a key via HTTPS from the Ubuntu keyserver. Different key ID formats are supported by SKS keyservers (the longer ones are more secure, see "dead beef attack" and https://evil32.com/). Since HTTPS is used, if SSLBump-like HTTPS proxies are in place, they will impersonate keyserver.ubuntu.com and generate a certificate with keyserver.ubuntu.com in the CN field or in SubjAltName fields of a certificate. If such proxy behavior is expected it is necessary to add the CA certificate chain containing the intermediate CA of the SSLBump proxy to every machine that this code runs on via ca-certs cloud-init directive (via cloudinit-userdata model-config) or via other means (such as through a custom charm option). Also note that DNS resolution for the hostname in a URL is done at a proxy server - not at the client side. 8-digit (32 bit) key ID https://keyserver.ubuntu.com/pks/lookup?search=0x4652B4E6 16-digit (64 bit) key ID https://keyserver.ubuntu.com/pks/lookup?search=0x6E85A86E4652B4E6 40-digit key ID: https://keyserver.ubuntu.com/pks/lookup?search=0x35F77D63B5CEC106C577ED856E85A86E4652B4E6 :param keyid: An 8, 16 or 40 hex digit keyid to find a key for :type keyid: (bytes, str) :returns: A key material for the specified GPG key id :rtype: (str, bytes) :raises: subprocess.CalledProcessError """ # options=mr - machine-readable output (disables html wrappers) keyserver_url = 'https://keyserver.ubuntu.com/pks/lookup?op=get&options=mr&exact=on&search=0x{}' curl_cmd = ['curl', keyserver_url.format(keyid)] # use proxy server settings in order to retrieve the key return subprocess.check_output(curl_cmd, env=env_proxy_settings(['https']))
def cancel(self): """Cancel a connector from completing.""" if self.started and not self.connected and not self.timedout: self.connect_watcher.stop() self.timeout_watcher.stop()
def function[cancel, parameter[self]]: constant[Cancel a connector from completing.] if <ast.BoolOp object at 0x7da1b09bd1e0> begin[:] call[name[self].connect_watcher.stop, parameter[]] call[name[self].timeout_watcher.stop, parameter[]]
keyword[def] identifier[cancel] ( identifier[self] ): literal[string] keyword[if] identifier[self] . identifier[started] keyword[and] keyword[not] identifier[self] . identifier[connected] keyword[and] keyword[not] identifier[self] . identifier[timedout] : identifier[self] . identifier[connect_watcher] . identifier[stop] () identifier[self] . identifier[timeout_watcher] . identifier[stop] ()
def cancel(self): """Cancel a connector from completing.""" if self.started and (not self.connected) and (not self.timedout): self.connect_watcher.stop() self.timeout_watcher.stop() # depends on [control=['if'], data=[]]
def consult(string_in): """ provide file:consult/1 functionality with python types """ # pylint: disable=eval-used # pylint: disable=too-many-branches # pylint: disable=too-many-statements # manually parse textual erlang data to avoid external dependencies list_out = [] tuple_binary = False # binaries become tuples of integers quoted_string = False # strings become python string atom_string = False # atoms become python string number = False whitespace = frozenset(('\n', '\t', ' ')) i = 0 while i < len(string_in): character = string_in[i] if character == ',': if atom_string: list_out.append('"') atom_string = False list_out.append(',') number = string_in[i + 1].isdigit() elif character == '{': list_out.append('(') number = string_in[i + 1].isdigit() elif character == '}': if atom_string: list_out.append('"') atom_string = False list_out.append(')') number = False elif character == '[': list_out.append('[') number = string_in[i + 1].isdigit() elif character == ']': if atom_string: list_out.append('"') atom_string = False list_out.append(']') number = False elif character == '<' and string_in[i + 1] == '<': list_out.append('(') tuple_binary = True i += 1 elif character == '>' and string_in[i + 1] == '>': list_out.append(')') tuple_binary = False i += 1 elif not quoted_string and not atom_string and character in whitespace: number = string_in[i + 1].isdigit() elif tuple_binary or number: list_out.append(character) elif character == '"': if quoted_string: quoted_string = False else: quoted_string = True list_out.append('"') elif character == "'": if atom_string: atom_string = False else: atom_string = True list_out.append('"') elif not quoted_string and not atom_string: atom_string = True list_out.append('"') list_out.append(character) else: list_out.append(character) i += 1 return eval(''.join(list_out))
def function[consult, parameter[string_in]]: constant[ provide file:consult/1 functionality with python types ] variable[list_out] assign[=] list[[]] variable[tuple_binary] assign[=] constant[False] variable[quoted_string] assign[=] constant[False] variable[atom_string] assign[=] constant[False] variable[number] assign[=] constant[False] variable[whitespace] assign[=] call[name[frozenset], parameter[tuple[[<ast.Constant object at 0x7da18c4cece0>, <ast.Constant object at 0x7da18c4cd570>, <ast.Constant object at 0x7da18c4cf3a0>]]]] variable[i] assign[=] constant[0] while compare[name[i] less[<] call[name[len], parameter[name[string_in]]]] begin[:] variable[character] assign[=] call[name[string_in]][name[i]] if compare[name[character] equal[==] constant[,]] begin[:] if name[atom_string] begin[:] call[name[list_out].append, parameter[constant["]]] variable[atom_string] assign[=] constant[False] call[name[list_out].append, parameter[constant[,]]] variable[number] assign[=] call[call[name[string_in]][binary_operation[name[i] + constant[1]]].isdigit, parameter[]] <ast.AugAssign object at 0x7da20c990940> return[call[name[eval], parameter[call[constant[].join, parameter[name[list_out]]]]]]
keyword[def] identifier[consult] ( identifier[string_in] ): literal[string] identifier[list_out] =[] identifier[tuple_binary] = keyword[False] identifier[quoted_string] = keyword[False] identifier[atom_string] = keyword[False] identifier[number] = keyword[False] identifier[whitespace] = identifier[frozenset] (( literal[string] , literal[string] , literal[string] )) identifier[i] = literal[int] keyword[while] identifier[i] < identifier[len] ( identifier[string_in] ): identifier[character] = identifier[string_in] [ identifier[i] ] keyword[if] identifier[character] == literal[string] : keyword[if] identifier[atom_string] : identifier[list_out] . identifier[append] ( literal[string] ) identifier[atom_string] = keyword[False] identifier[list_out] . identifier[append] ( literal[string] ) identifier[number] = identifier[string_in] [ identifier[i] + literal[int] ]. identifier[isdigit] () keyword[elif] identifier[character] == literal[string] : identifier[list_out] . identifier[append] ( literal[string] ) identifier[number] = identifier[string_in] [ identifier[i] + literal[int] ]. identifier[isdigit] () keyword[elif] identifier[character] == literal[string] : keyword[if] identifier[atom_string] : identifier[list_out] . identifier[append] ( literal[string] ) identifier[atom_string] = keyword[False] identifier[list_out] . identifier[append] ( literal[string] ) identifier[number] = keyword[False] keyword[elif] identifier[character] == literal[string] : identifier[list_out] . identifier[append] ( literal[string] ) identifier[number] = identifier[string_in] [ identifier[i] + literal[int] ]. identifier[isdigit] () keyword[elif] identifier[character] == literal[string] : keyword[if] identifier[atom_string] : identifier[list_out] . identifier[append] ( literal[string] ) identifier[atom_string] = keyword[False] identifier[list_out] . identifier[append] ( literal[string] ) identifier[number] = keyword[False] keyword[elif] identifier[character] == literal[string] keyword[and] identifier[string_in] [ identifier[i] + literal[int] ]== literal[string] : identifier[list_out] . identifier[append] ( literal[string] ) identifier[tuple_binary] = keyword[True] identifier[i] += literal[int] keyword[elif] identifier[character] == literal[string] keyword[and] identifier[string_in] [ identifier[i] + literal[int] ]== literal[string] : identifier[list_out] . identifier[append] ( literal[string] ) identifier[tuple_binary] = keyword[False] identifier[i] += literal[int] keyword[elif] keyword[not] identifier[quoted_string] keyword[and] keyword[not] identifier[atom_string] keyword[and] identifier[character] keyword[in] identifier[whitespace] : identifier[number] = identifier[string_in] [ identifier[i] + literal[int] ]. identifier[isdigit] () keyword[elif] identifier[tuple_binary] keyword[or] identifier[number] : identifier[list_out] . identifier[append] ( identifier[character] ) keyword[elif] identifier[character] == literal[string] : keyword[if] identifier[quoted_string] : identifier[quoted_string] = keyword[False] keyword[else] : identifier[quoted_string] = keyword[True] identifier[list_out] . identifier[append] ( literal[string] ) keyword[elif] identifier[character] == literal[string] : keyword[if] identifier[atom_string] : identifier[atom_string] = keyword[False] keyword[else] : identifier[atom_string] = keyword[True] identifier[list_out] . identifier[append] ( literal[string] ) keyword[elif] keyword[not] identifier[quoted_string] keyword[and] keyword[not] identifier[atom_string] : identifier[atom_string] = keyword[True] identifier[list_out] . identifier[append] ( literal[string] ) identifier[list_out] . identifier[append] ( identifier[character] ) keyword[else] : identifier[list_out] . identifier[append] ( identifier[character] ) identifier[i] += literal[int] keyword[return] identifier[eval] ( literal[string] . identifier[join] ( identifier[list_out] ))
def consult(string_in): """ provide file:consult/1 functionality with python types """ # pylint: disable=eval-used # pylint: disable=too-many-branches # pylint: disable=too-many-statements # manually parse textual erlang data to avoid external dependencies list_out = [] tuple_binary = False # binaries become tuples of integers quoted_string = False # strings become python string atom_string = False # atoms become python string number = False whitespace = frozenset(('\n', '\t', ' ')) i = 0 while i < len(string_in): character = string_in[i] if character == ',': if atom_string: list_out.append('"') atom_string = False # depends on [control=['if'], data=[]] list_out.append(',') number = string_in[i + 1].isdigit() # depends on [control=['if'], data=[]] elif character == '{': list_out.append('(') number = string_in[i + 1].isdigit() # depends on [control=['if'], data=[]] elif character == '}': if atom_string: list_out.append('"') atom_string = False # depends on [control=['if'], data=[]] list_out.append(')') number = False # depends on [control=['if'], data=[]] elif character == '[': list_out.append('[') number = string_in[i + 1].isdigit() # depends on [control=['if'], data=[]] elif character == ']': if atom_string: list_out.append('"') atom_string = False # depends on [control=['if'], data=[]] list_out.append(']') number = False # depends on [control=['if'], data=[]] elif character == '<' and string_in[i + 1] == '<': list_out.append('(') tuple_binary = True i += 1 # depends on [control=['if'], data=[]] elif character == '>' and string_in[i + 1] == '>': list_out.append(')') tuple_binary = False i += 1 # depends on [control=['if'], data=[]] elif not quoted_string and (not atom_string) and (character in whitespace): number = string_in[i + 1].isdigit() # depends on [control=['if'], data=[]] elif tuple_binary or number: list_out.append(character) # depends on [control=['if'], data=[]] elif character == '"': if quoted_string: quoted_string = False # depends on [control=['if'], data=[]] else: quoted_string = True list_out.append('"') # depends on [control=['if'], data=[]] elif character == "'": if atom_string: atom_string = False # depends on [control=['if'], data=[]] else: atom_string = True list_out.append('"') # depends on [control=['if'], data=[]] elif not quoted_string and (not atom_string): atom_string = True list_out.append('"') list_out.append(character) # depends on [control=['if'], data=[]] else: list_out.append(character) i += 1 # depends on [control=['while'], data=['i']] return eval(''.join(list_out))
def is_mag_data(mdat): ''' is_mag_data(dat) yields True if the given data is a valid set of magnification data and False otherwise. Note that this does not return True for all valid return values of the mag_data() function: specifically, if the mag_data() function yields a list of mag-data maps or a lazy-map of the mag-data maps split out by visual area, then this will return False. This function only returns True for a map of mag data itself. ''' if not pimms.is_map(mdat): return False for k in ['surface_coordinates', 'visual_coordinates', 'mesh', 'submesh', 'mask', 'retinotopy_data', 'masked_data', 'surface_areas', 'visual_areas']: if k not in mdat: return False return True
def function[is_mag_data, parameter[mdat]]: constant[ is_mag_data(dat) yields True if the given data is a valid set of magnification data and False otherwise. Note that this does not return True for all valid return values of the mag_data() function: specifically, if the mag_data() function yields a list of mag-data maps or a lazy-map of the mag-data maps split out by visual area, then this will return False. This function only returns True for a map of mag data itself. ] if <ast.UnaryOp object at 0x7da18bc718d0> begin[:] return[constant[False]] for taget[name[k]] in starred[list[[<ast.Constant object at 0x7da18bc70910>, <ast.Constant object at 0x7da18bc70e80>, <ast.Constant object at 0x7da18bc704f0>, <ast.Constant object at 0x7da18bc728c0>, <ast.Constant object at 0x7da18bc71990>, <ast.Constant object at 0x7da18bc716f0>, <ast.Constant object at 0x7da18bc73fd0>, <ast.Constant object at 0x7da18bc71fc0>, <ast.Constant object at 0x7da18bc71c00>]]] begin[:] if compare[name[k] <ast.NotIn object at 0x7da2590d7190> name[mdat]] begin[:] return[constant[False]] return[constant[True]]
keyword[def] identifier[is_mag_data] ( identifier[mdat] ): literal[string] keyword[if] keyword[not] identifier[pimms] . identifier[is_map] ( identifier[mdat] ): keyword[return] keyword[False] keyword[for] identifier[k] keyword[in] [ literal[string] , literal[string] , literal[string] , literal[string] , literal[string] , literal[string] , literal[string] , literal[string] , literal[string] ]: keyword[if] identifier[k] keyword[not] keyword[in] identifier[mdat] : keyword[return] keyword[False] keyword[return] keyword[True]
def is_mag_data(mdat): """ is_mag_data(dat) yields True if the given data is a valid set of magnification data and False otherwise. Note that this does not return True for all valid return values of the mag_data() function: specifically, if the mag_data() function yields a list of mag-data maps or a lazy-map of the mag-data maps split out by visual area, then this will return False. This function only returns True for a map of mag data itself. """ if not pimms.is_map(mdat): return False # depends on [control=['if'], data=[]] for k in ['surface_coordinates', 'visual_coordinates', 'mesh', 'submesh', 'mask', 'retinotopy_data', 'masked_data', 'surface_areas', 'visual_areas']: if k not in mdat: return False # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['k']] return True
def SHA1_file(filepath, extra=b''): """ Returns hex digest of SHA1 hash of file at filepath :param str filepath: File to hash :param bytes extra: Extra content added to raw read of file before taking hash :return: hex digest of hash :rtype: str """ h = hashlib.sha1() with io.open(filepath, 'rb') as f: for chunk in iter(lambda: f.read(h.block_size), b''): h.update(chunk) h.update(extra) return h.hexdigest()
def function[SHA1_file, parameter[filepath, extra]]: constant[ Returns hex digest of SHA1 hash of file at filepath :param str filepath: File to hash :param bytes extra: Extra content added to raw read of file before taking hash :return: hex digest of hash :rtype: str ] variable[h] assign[=] call[name[hashlib].sha1, parameter[]] with call[name[io].open, parameter[name[filepath], constant[rb]]] begin[:] for taget[name[chunk]] in starred[call[name[iter], parameter[<ast.Lambda object at 0x7da1b07d2bf0>, constant[b'']]]] begin[:] call[name[h].update, parameter[name[chunk]]] call[name[h].update, parameter[name[extra]]] return[call[name[h].hexdigest, parameter[]]]
keyword[def] identifier[SHA1_file] ( identifier[filepath] , identifier[extra] = literal[string] ): literal[string] identifier[h] = identifier[hashlib] . identifier[sha1] () keyword[with] identifier[io] . identifier[open] ( identifier[filepath] , literal[string] ) keyword[as] identifier[f] : keyword[for] identifier[chunk] keyword[in] identifier[iter] ( keyword[lambda] : identifier[f] . identifier[read] ( identifier[h] . identifier[block_size] ), literal[string] ): identifier[h] . identifier[update] ( identifier[chunk] ) identifier[h] . identifier[update] ( identifier[extra] ) keyword[return] identifier[h] . identifier[hexdigest] ()
def SHA1_file(filepath, extra=b''): """ Returns hex digest of SHA1 hash of file at filepath :param str filepath: File to hash :param bytes extra: Extra content added to raw read of file before taking hash :return: hex digest of hash :rtype: str """ h = hashlib.sha1() with io.open(filepath, 'rb') as f: for chunk in iter(lambda : f.read(h.block_size), b''): h.update(chunk) # depends on [control=['for'], data=['chunk']] # depends on [control=['with'], data=['f']] h.update(extra) return h.hexdigest()
def load_user_envs(self): """ Loads downloaded user envs from filesystem cache on `import gym` """ installed_packages = self._list_packages() # Tagging core envs gym_package = 'gym ({})'.format(installed_packages['gym']) if 'gym' in installed_packages else 'gym' core_specs = registry.all() for spec in core_specs: spec.source = 'OpenAI Gym Core Package' spec.package = gym_package # Loading user envs if not os.path.isfile(self.cache_path): return with open(self.cache_path) as cache: for line in cache: user_package, registered_envs = self._load_package(line.rstrip('\n'), installed_packages) if logger.level <= logging.DEBUG: logger.debug('Installed %d user environments from package "%s"', len(registered_envs), user_package['name']) if self.cache_needs_update: self._update_cache() if len(self.env_ids) > 0: logger.info('Found and registered %d user environments.', len(self.env_ids))
def function[load_user_envs, parameter[self]]: constant[ Loads downloaded user envs from filesystem cache on `import gym` ] variable[installed_packages] assign[=] call[name[self]._list_packages, parameter[]] variable[gym_package] assign[=] <ast.IfExp object at 0x7da2047e8070> variable[core_specs] assign[=] call[name[registry].all, parameter[]] for taget[name[spec]] in starred[name[core_specs]] begin[:] name[spec].source assign[=] constant[OpenAI Gym Core Package] name[spec].package assign[=] name[gym_package] if <ast.UnaryOp object at 0x7da2047e98d0> begin[:] return[None] with call[name[open], parameter[name[self].cache_path]] begin[:] for taget[name[line]] in starred[name[cache]] begin[:] <ast.Tuple object at 0x7da2047e89d0> assign[=] call[name[self]._load_package, parameter[call[name[line].rstrip, parameter[constant[ ]]], name[installed_packages]]] if compare[name[logger].level less_or_equal[<=] name[logging].DEBUG] begin[:] call[name[logger].debug, parameter[constant[Installed %d user environments from package "%s"], call[name[len], parameter[name[registered_envs]]], call[name[user_package]][constant[name]]]] if name[self].cache_needs_update begin[:] call[name[self]._update_cache, parameter[]] if compare[call[name[len], parameter[name[self].env_ids]] greater[>] constant[0]] begin[:] call[name[logger].info, parameter[constant[Found and registered %d user environments.], call[name[len], parameter[name[self].env_ids]]]]
keyword[def] identifier[load_user_envs] ( identifier[self] ): literal[string] identifier[installed_packages] = identifier[self] . identifier[_list_packages] () identifier[gym_package] = literal[string] . identifier[format] ( identifier[installed_packages] [ literal[string] ]) keyword[if] literal[string] keyword[in] identifier[installed_packages] keyword[else] literal[string] identifier[core_specs] = identifier[registry] . identifier[all] () keyword[for] identifier[spec] keyword[in] identifier[core_specs] : identifier[spec] . identifier[source] = literal[string] identifier[spec] . identifier[package] = identifier[gym_package] keyword[if] keyword[not] identifier[os] . identifier[path] . identifier[isfile] ( identifier[self] . identifier[cache_path] ): keyword[return] keyword[with] identifier[open] ( identifier[self] . identifier[cache_path] ) keyword[as] identifier[cache] : keyword[for] identifier[line] keyword[in] identifier[cache] : identifier[user_package] , identifier[registered_envs] = identifier[self] . identifier[_load_package] ( identifier[line] . identifier[rstrip] ( literal[string] ), identifier[installed_packages] ) keyword[if] identifier[logger] . identifier[level] <= identifier[logging] . identifier[DEBUG] : identifier[logger] . identifier[debug] ( literal[string] , identifier[len] ( identifier[registered_envs] ), identifier[user_package] [ literal[string] ]) keyword[if] identifier[self] . identifier[cache_needs_update] : identifier[self] . identifier[_update_cache] () keyword[if] identifier[len] ( identifier[self] . identifier[env_ids] )> literal[int] : identifier[logger] . identifier[info] ( literal[string] , identifier[len] ( identifier[self] . identifier[env_ids] ))
def load_user_envs(self): """ Loads downloaded user envs from filesystem cache on `import gym` """ installed_packages = self._list_packages() # Tagging core envs gym_package = 'gym ({})'.format(installed_packages['gym']) if 'gym' in installed_packages else 'gym' core_specs = registry.all() for spec in core_specs: spec.source = 'OpenAI Gym Core Package' spec.package = gym_package # depends on [control=['for'], data=['spec']] # Loading user envs if not os.path.isfile(self.cache_path): return # depends on [control=['if'], data=[]] with open(self.cache_path) as cache: for line in cache: (user_package, registered_envs) = self._load_package(line.rstrip('\n'), installed_packages) if logger.level <= logging.DEBUG: logger.debug('Installed %d user environments from package "%s"', len(registered_envs), user_package['name']) # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['line']] # depends on [control=['with'], data=['cache']] if self.cache_needs_update: self._update_cache() # depends on [control=['if'], data=[]] if len(self.env_ids) > 0: logger.info('Found and registered %d user environments.', len(self.env_ids)) # depends on [control=['if'], data=[]]
def dir_on_bezier_curve(P=[(0.0, 0.0)], t=0.5): '''Return direction at t on bezier curve defined by control points P. List of vectors per pair of dimensions are returned in radians. E.g. Where X is "right", Y is "up", Z is "in" on a computer screen, and returned value is [pi/4, -pi/4], then the vector will be coming out the screen over the viewer's right shoulder. ''' assert isinstance(P, list) assert len(P) > 0 if not len(P) > 1: return None # Points have no gradient. for p in P: assert isinstance(p, tuple) for i in p: assert len(p) > 1 assert isinstance(i, float) assert isinstance(t, float) assert 0 <= t <= 1 O = len(P) - 1 # Order of curve # Recurse down the orders calculating the next set of control points until # there are only two left, which is the points on the gradient we want. Q = P while O > 1: Q = [pt_between_pts(Q[l], Q[l+1], t) for l in range(O)] O -= 1 assert len(Q) == 2 # Now that we have the two points in N dimensions, we can reduce to the # gradients on N-1 planes. q0 = Q[0] q1 = Q[1] return dir_between_pts(q0, q1)
def function[dir_on_bezier_curve, parameter[P, t]]: constant[Return direction at t on bezier curve defined by control points P. List of vectors per pair of dimensions are returned in radians. E.g. Where X is "right", Y is "up", Z is "in" on a computer screen, and returned value is [pi/4, -pi/4], then the vector will be coming out the screen over the viewer's right shoulder. ] assert[call[name[isinstance], parameter[name[P], name[list]]]] assert[compare[call[name[len], parameter[name[P]]] greater[>] constant[0]]] if <ast.UnaryOp object at 0x7da18ede5d50> begin[:] return[constant[None]] for taget[name[p]] in starred[name[P]] begin[:] assert[call[name[isinstance], parameter[name[p], name[tuple]]]] for taget[name[i]] in starred[name[p]] begin[:] assert[compare[call[name[len], parameter[name[p]]] greater[>] constant[1]]] assert[call[name[isinstance], parameter[name[i], name[float]]]] assert[call[name[isinstance], parameter[name[t], name[float]]]] assert[compare[constant[0] less_or_equal[<=] name[t]]] variable[O] assign[=] binary_operation[call[name[len], parameter[name[P]]] - constant[1]] variable[Q] assign[=] name[P] while compare[name[O] greater[>] constant[1]] begin[:] variable[Q] assign[=] <ast.ListComp object at 0x7da18ede46a0> <ast.AugAssign object at 0x7da18ede7dc0> assert[compare[call[name[len], parameter[name[Q]]] equal[==] constant[2]]] variable[q0] assign[=] call[name[Q]][constant[0]] variable[q1] assign[=] call[name[Q]][constant[1]] return[call[name[dir_between_pts], parameter[name[q0], name[q1]]]]
keyword[def] identifier[dir_on_bezier_curve] ( identifier[P] =[( literal[int] , literal[int] )], identifier[t] = literal[int] ): literal[string] keyword[assert] identifier[isinstance] ( identifier[P] , identifier[list] ) keyword[assert] identifier[len] ( identifier[P] )> literal[int] keyword[if] keyword[not] identifier[len] ( identifier[P] )> literal[int] : keyword[return] keyword[None] keyword[for] identifier[p] keyword[in] identifier[P] : keyword[assert] identifier[isinstance] ( identifier[p] , identifier[tuple] ) keyword[for] identifier[i] keyword[in] identifier[p] : keyword[assert] identifier[len] ( identifier[p] )> literal[int] keyword[assert] identifier[isinstance] ( identifier[i] , identifier[float] ) keyword[assert] identifier[isinstance] ( identifier[t] , identifier[float] ) keyword[assert] literal[int] <= identifier[t] <= literal[int] identifier[O] = identifier[len] ( identifier[P] )- literal[int] identifier[Q] = identifier[P] keyword[while] identifier[O] > literal[int] : identifier[Q] =[ identifier[pt_between_pts] ( identifier[Q] [ identifier[l] ], identifier[Q] [ identifier[l] + literal[int] ], identifier[t] ) keyword[for] identifier[l] keyword[in] identifier[range] ( identifier[O] )] identifier[O] -= literal[int] keyword[assert] identifier[len] ( identifier[Q] )== literal[int] identifier[q0] = identifier[Q] [ literal[int] ] identifier[q1] = identifier[Q] [ literal[int] ] keyword[return] identifier[dir_between_pts] ( identifier[q0] , identifier[q1] )
def dir_on_bezier_curve(P=[(0.0, 0.0)], t=0.5): """Return direction at t on bezier curve defined by control points P. List of vectors per pair of dimensions are returned in radians. E.g. Where X is "right", Y is "up", Z is "in" on a computer screen, and returned value is [pi/4, -pi/4], then the vector will be coming out the screen over the viewer's right shoulder. """ assert isinstance(P, list) assert len(P) > 0 if not len(P) > 1: return None # Points have no gradient. # depends on [control=['if'], data=[]] for p in P: assert isinstance(p, tuple) for i in p: assert len(p) > 1 assert isinstance(i, float) # depends on [control=['for'], data=['i']] # depends on [control=['for'], data=['p']] assert isinstance(t, float) assert 0 <= t <= 1 O = len(P) - 1 # Order of curve # Recurse down the orders calculating the next set of control points until # there are only two left, which is the points on the gradient we want. Q = P while O > 1: Q = [pt_between_pts(Q[l], Q[l + 1], t) for l in range(O)] O -= 1 # depends on [control=['while'], data=['O']] assert len(Q) == 2 # Now that we have the two points in N dimensions, we can reduce to the # gradients on N-1 planes. q0 = Q[0] q1 = Q[1] return dir_between_pts(q0, q1)
def make_lines(data, precision=None): """Extract points from given dict. Extracts the points from the given dict and returns a Unicode string matching the line protocol introduced in InfluxDB 0.9.0. """ lines = [] static_tags = data.get('tags') for point in data['points']: elements = [] # add measurement name measurement = _escape_tag(_get_unicode( point.get('measurement', data.get('measurement')))) key_values = [measurement] # add tags if static_tags: tags = dict(static_tags) # make a copy, since we'll modify tags.update(point.get('tags') or {}) else: tags = point.get('tags') or {} # tags should be sorted client-side to take load off server for tag_key, tag_value in sorted(iteritems(tags)): key = _escape_tag(tag_key) value = _escape_tag_value(tag_value) if key != '' and value != '': key_values.append(key + "=" + value) elements.append(','.join(key_values)) # add fields field_values = [] for field_key, field_value in sorted(iteritems(point['fields'])): key = _escape_tag(field_key) value = _escape_value(field_value) if key != '' and value != '': field_values.append(key + "=" + value) elements.append(','.join(field_values)) # add timestamp if 'time' in point: timestamp = _get_unicode(str(int( _convert_timestamp(point['time'], precision)))) elements.append(timestamp) line = ' '.join(elements) lines.append(line) return '\n'.join(lines) + '\n'
def function[make_lines, parameter[data, precision]]: constant[Extract points from given dict. Extracts the points from the given dict and returns a Unicode string matching the line protocol introduced in InfluxDB 0.9.0. ] variable[lines] assign[=] list[[]] variable[static_tags] assign[=] call[name[data].get, parameter[constant[tags]]] for taget[name[point]] in starred[call[name[data]][constant[points]]] begin[:] variable[elements] assign[=] list[[]] variable[measurement] assign[=] call[name[_escape_tag], parameter[call[name[_get_unicode], parameter[call[name[point].get, parameter[constant[measurement], call[name[data].get, parameter[constant[measurement]]]]]]]]] variable[key_values] assign[=] list[[<ast.Name object at 0x7da204567f40>]] if name[static_tags] begin[:] variable[tags] assign[=] call[name[dict], parameter[name[static_tags]]] call[name[tags].update, parameter[<ast.BoolOp object at 0x7da204566500>]] for taget[tuple[[<ast.Name object at 0x7da204566290>, <ast.Name object at 0x7da204565570>]]] in starred[call[name[sorted], parameter[call[name[iteritems], parameter[name[tags]]]]]] begin[:] variable[key] assign[=] call[name[_escape_tag], parameter[name[tag_key]]] variable[value] assign[=] call[name[_escape_tag_value], parameter[name[tag_value]]] if <ast.BoolOp object at 0x7da2045645b0> begin[:] call[name[key_values].append, parameter[binary_operation[binary_operation[name[key] + constant[=]] + name[value]]]] call[name[elements].append, parameter[call[constant[,].join, parameter[name[key_values]]]]] variable[field_values] assign[=] list[[]] for taget[tuple[[<ast.Name object at 0x7da204564490>, <ast.Name object at 0x7da204564cd0>]]] in starred[call[name[sorted], parameter[call[name[iteritems], parameter[call[name[point]][constant[fields]]]]]]] begin[:] variable[key] assign[=] call[name[_escape_tag], parameter[name[field_key]]] variable[value] assign[=] call[name[_escape_value], parameter[name[field_value]]] if <ast.BoolOp object at 0x7da1b18a37c0> begin[:] call[name[field_values].append, parameter[binary_operation[binary_operation[name[key] + constant[=]] + name[value]]]] call[name[elements].append, parameter[call[constant[,].join, parameter[name[field_values]]]]] if compare[constant[time] in name[point]] begin[:] variable[timestamp] assign[=] call[name[_get_unicode], parameter[call[name[str], parameter[call[name[int], parameter[call[name[_convert_timestamp], parameter[call[name[point]][constant[time]], name[precision]]]]]]]]] call[name[elements].append, parameter[name[timestamp]]] variable[line] assign[=] call[constant[ ].join, parameter[name[elements]]] call[name[lines].append, parameter[name[line]]] return[binary_operation[call[constant[ ].join, parameter[name[lines]]] + constant[ ]]]
keyword[def] identifier[make_lines] ( identifier[data] , identifier[precision] = keyword[None] ): literal[string] identifier[lines] =[] identifier[static_tags] = identifier[data] . identifier[get] ( literal[string] ) keyword[for] identifier[point] keyword[in] identifier[data] [ literal[string] ]: identifier[elements] =[] identifier[measurement] = identifier[_escape_tag] ( identifier[_get_unicode] ( identifier[point] . identifier[get] ( literal[string] , identifier[data] . identifier[get] ( literal[string] )))) identifier[key_values] =[ identifier[measurement] ] keyword[if] identifier[static_tags] : identifier[tags] = identifier[dict] ( identifier[static_tags] ) identifier[tags] . identifier[update] ( identifier[point] . identifier[get] ( literal[string] ) keyword[or] {}) keyword[else] : identifier[tags] = identifier[point] . identifier[get] ( literal[string] ) keyword[or] {} keyword[for] identifier[tag_key] , identifier[tag_value] keyword[in] identifier[sorted] ( identifier[iteritems] ( identifier[tags] )): identifier[key] = identifier[_escape_tag] ( identifier[tag_key] ) identifier[value] = identifier[_escape_tag_value] ( identifier[tag_value] ) keyword[if] identifier[key] != literal[string] keyword[and] identifier[value] != literal[string] : identifier[key_values] . identifier[append] ( identifier[key] + literal[string] + identifier[value] ) identifier[elements] . identifier[append] ( literal[string] . identifier[join] ( identifier[key_values] )) identifier[field_values] =[] keyword[for] identifier[field_key] , identifier[field_value] keyword[in] identifier[sorted] ( identifier[iteritems] ( identifier[point] [ literal[string] ])): identifier[key] = identifier[_escape_tag] ( identifier[field_key] ) identifier[value] = identifier[_escape_value] ( identifier[field_value] ) keyword[if] identifier[key] != literal[string] keyword[and] identifier[value] != literal[string] : identifier[field_values] . identifier[append] ( identifier[key] + literal[string] + identifier[value] ) identifier[elements] . identifier[append] ( literal[string] . identifier[join] ( identifier[field_values] )) keyword[if] literal[string] keyword[in] identifier[point] : identifier[timestamp] = identifier[_get_unicode] ( identifier[str] ( identifier[int] ( identifier[_convert_timestamp] ( identifier[point] [ literal[string] ], identifier[precision] )))) identifier[elements] . identifier[append] ( identifier[timestamp] ) identifier[line] = literal[string] . identifier[join] ( identifier[elements] ) identifier[lines] . identifier[append] ( identifier[line] ) keyword[return] literal[string] . identifier[join] ( identifier[lines] )+ literal[string]
def make_lines(data, precision=None): """Extract points from given dict. Extracts the points from the given dict and returns a Unicode string matching the line protocol introduced in InfluxDB 0.9.0. """ lines = [] static_tags = data.get('tags') for point in data['points']: elements = [] # add measurement name measurement = _escape_tag(_get_unicode(point.get('measurement', data.get('measurement')))) key_values = [measurement] # add tags if static_tags: tags = dict(static_tags) # make a copy, since we'll modify tags.update(point.get('tags') or {}) # depends on [control=['if'], data=[]] else: tags = point.get('tags') or {} # tags should be sorted client-side to take load off server for (tag_key, tag_value) in sorted(iteritems(tags)): key = _escape_tag(tag_key) value = _escape_tag_value(tag_value) if key != '' and value != '': key_values.append(key + '=' + value) # depends on [control=['if'], data=[]] # depends on [control=['for'], data=[]] elements.append(','.join(key_values)) # add fields field_values = [] for (field_key, field_value) in sorted(iteritems(point['fields'])): key = _escape_tag(field_key) value = _escape_value(field_value) if key != '' and value != '': field_values.append(key + '=' + value) # depends on [control=['if'], data=[]] # depends on [control=['for'], data=[]] elements.append(','.join(field_values)) # add timestamp if 'time' in point: timestamp = _get_unicode(str(int(_convert_timestamp(point['time'], precision)))) elements.append(timestamp) # depends on [control=['if'], data=['point']] line = ' '.join(elements) lines.append(line) # depends on [control=['for'], data=['point']] return '\n'.join(lines) + '\n'
def _init_middlewares(self): """Initialize hooks and middlewares If you have another Middleware, like BrokeMiddleware for e.x You can append this to middleware: self.middleware.append(BrokeMiddleware()) """ self.middleware = [DeserializeMiddleware()] self.middleware += \ [FuncMiddleware(hook) for hook in self.before_hooks()] self.middleware.append(SerializeMiddleware())
def function[_init_middlewares, parameter[self]]: constant[Initialize hooks and middlewares If you have another Middleware, like BrokeMiddleware for e.x You can append this to middleware: self.middleware.append(BrokeMiddleware()) ] name[self].middleware assign[=] list[[<ast.Call object at 0x7da18c4cd990>]] <ast.AugAssign object at 0x7da18c4cc460> call[name[self].middleware.append, parameter[call[name[SerializeMiddleware], parameter[]]]]
keyword[def] identifier[_init_middlewares] ( identifier[self] ): literal[string] identifier[self] . identifier[middleware] =[ identifier[DeserializeMiddleware] ()] identifier[self] . identifier[middleware] +=[ identifier[FuncMiddleware] ( identifier[hook] ) keyword[for] identifier[hook] keyword[in] identifier[self] . identifier[before_hooks] ()] identifier[self] . identifier[middleware] . identifier[append] ( identifier[SerializeMiddleware] ())
def _init_middlewares(self): """Initialize hooks and middlewares If you have another Middleware, like BrokeMiddleware for e.x You can append this to middleware: self.middleware.append(BrokeMiddleware()) """ self.middleware = [DeserializeMiddleware()] self.middleware += [FuncMiddleware(hook) for hook in self.before_hooks()] self.middleware.append(SerializeMiddleware())
def history(zpool=None, internal=False, verbose=False): ''' .. versionadded:: 2016.3.0 Displays the command history of the specified pools, or all pools if no pool is specified zpool : string Optional storage pool internal : boolean Toggle display of internally logged ZFS events verbose : boolean Toggle display of the user name, the hostname, and the zone in which the operation was performed CLI Example: .. code-block:: bash salt '*' zpool.upgrade myzpool ''' ret = OrderedDict() ## Configure pool # NOTE: initialize the defaults flags = [] # NOTE: set extra config if verbose: flags.append('-l') if internal: flags.append('-i') ## Lookup history res = __salt__['cmd.run_all']( __utils__['zfs.zpool_command']( command='history', flags=flags, target=zpool, ), python_shell=False, ) if res['retcode'] != 0: return __utils__['zfs.parse_command_result'](res) else: pool = 'unknown' for line in res['stdout'].splitlines(): if line.startswith('History for'): pool = line[13:-2] ret[pool] = OrderedDict() else: if line == '': continue log_timestamp = line[0:19] log_command = line[20:] ret[pool][log_timestamp] = log_command return ret
def function[history, parameter[zpool, internal, verbose]]: constant[ .. versionadded:: 2016.3.0 Displays the command history of the specified pools, or all pools if no pool is specified zpool : string Optional storage pool internal : boolean Toggle display of internally logged ZFS events verbose : boolean Toggle display of the user name, the hostname, and the zone in which the operation was performed CLI Example: .. code-block:: bash salt '*' zpool.upgrade myzpool ] variable[ret] assign[=] call[name[OrderedDict], parameter[]] variable[flags] assign[=] list[[]] if name[verbose] begin[:] call[name[flags].append, parameter[constant[-l]]] if name[internal] begin[:] call[name[flags].append, parameter[constant[-i]]] variable[res] assign[=] call[call[name[__salt__]][constant[cmd.run_all]], parameter[call[call[name[__utils__]][constant[zfs.zpool_command]], parameter[]]]] if compare[call[name[res]][constant[retcode]] not_equal[!=] constant[0]] begin[:] return[call[call[name[__utils__]][constant[zfs.parse_command_result]], parameter[name[res]]]] return[name[ret]]
keyword[def] identifier[history] ( identifier[zpool] = keyword[None] , identifier[internal] = keyword[False] , identifier[verbose] = keyword[False] ): literal[string] identifier[ret] = identifier[OrderedDict] () identifier[flags] =[] keyword[if] identifier[verbose] : identifier[flags] . identifier[append] ( literal[string] ) keyword[if] identifier[internal] : identifier[flags] . identifier[append] ( literal[string] ) identifier[res] = identifier[__salt__] [ literal[string] ]( identifier[__utils__] [ literal[string] ]( identifier[command] = literal[string] , identifier[flags] = identifier[flags] , identifier[target] = identifier[zpool] , ), identifier[python_shell] = keyword[False] , ) keyword[if] identifier[res] [ literal[string] ]!= literal[int] : keyword[return] identifier[__utils__] [ literal[string] ]( identifier[res] ) keyword[else] : identifier[pool] = literal[string] keyword[for] identifier[line] keyword[in] identifier[res] [ literal[string] ]. identifier[splitlines] (): keyword[if] identifier[line] . identifier[startswith] ( literal[string] ): identifier[pool] = identifier[line] [ literal[int] :- literal[int] ] identifier[ret] [ identifier[pool] ]= identifier[OrderedDict] () keyword[else] : keyword[if] identifier[line] == literal[string] : keyword[continue] identifier[log_timestamp] = identifier[line] [ literal[int] : literal[int] ] identifier[log_command] = identifier[line] [ literal[int] :] identifier[ret] [ identifier[pool] ][ identifier[log_timestamp] ]= identifier[log_command] keyword[return] identifier[ret]
def history(zpool=None, internal=False, verbose=False): """ .. versionadded:: 2016.3.0 Displays the command history of the specified pools, or all pools if no pool is specified zpool : string Optional storage pool internal : boolean Toggle display of internally logged ZFS events verbose : boolean Toggle display of the user name, the hostname, and the zone in which the operation was performed CLI Example: .. code-block:: bash salt '*' zpool.upgrade myzpool """ ret = OrderedDict() ## Configure pool # NOTE: initialize the defaults flags = [] # NOTE: set extra config if verbose: flags.append('-l') # depends on [control=['if'], data=[]] if internal: flags.append('-i') # depends on [control=['if'], data=[]] ## Lookup history res = __salt__['cmd.run_all'](__utils__['zfs.zpool_command'](command='history', flags=flags, target=zpool), python_shell=False) if res['retcode'] != 0: return __utils__['zfs.parse_command_result'](res) # depends on [control=['if'], data=[]] else: pool = 'unknown' for line in res['stdout'].splitlines(): if line.startswith('History for'): pool = line[13:-2] ret[pool] = OrderedDict() # depends on [control=['if'], data=[]] else: if line == '': continue # depends on [control=['if'], data=[]] log_timestamp = line[0:19] log_command = line[20:] ret[pool][log_timestamp] = log_command # depends on [control=['for'], data=['line']] return ret
def extract_worker_exc(*arg, **kw): """Get exception added by worker""" _self = arg[0] if not isinstance(_self, StrategyBase): # Run for StrategyBase instance only return # Iterate over workers to get their task and queue for _worker_prc, _main_q, _rslt_q in _self._workers: _task = _worker_prc._task if _task.action == 'setup': # Ignore setup continue # Do till queue is empty for the worker while True: try: _exc = _rslt_q.get(block=False, interceptor=True) RESULT[_task.name].add(_exc) except Empty: break
def function[extract_worker_exc, parameter[]]: constant[Get exception added by worker] variable[_self] assign[=] call[name[arg]][constant[0]] if <ast.UnaryOp object at 0x7da20e957520> begin[:] return[None] for taget[tuple[[<ast.Name object at 0x7da20e957df0>, <ast.Name object at 0x7da20e957100>, <ast.Name object at 0x7da20e954f70>]]] in starred[name[_self]._workers] begin[:] variable[_task] assign[=] name[_worker_prc]._task if compare[name[_task].action equal[==] constant[setup]] begin[:] continue while constant[True] begin[:] <ast.Try object at 0x7da20e955a50>
keyword[def] identifier[extract_worker_exc] (* identifier[arg] ,** identifier[kw] ): literal[string] identifier[_self] = identifier[arg] [ literal[int] ] keyword[if] keyword[not] identifier[isinstance] ( identifier[_self] , identifier[StrategyBase] ): keyword[return] keyword[for] identifier[_worker_prc] , identifier[_main_q] , identifier[_rslt_q] keyword[in] identifier[_self] . identifier[_workers] : identifier[_task] = identifier[_worker_prc] . identifier[_task] keyword[if] identifier[_task] . identifier[action] == literal[string] : keyword[continue] keyword[while] keyword[True] : keyword[try] : identifier[_exc] = identifier[_rslt_q] . identifier[get] ( identifier[block] = keyword[False] , identifier[interceptor] = keyword[True] ) identifier[RESULT] [ identifier[_task] . identifier[name] ]. identifier[add] ( identifier[_exc] ) keyword[except] identifier[Empty] : keyword[break]
def extract_worker_exc(*arg, **kw): """Get exception added by worker""" _self = arg[0] if not isinstance(_self, StrategyBase): # Run for StrategyBase instance only return # depends on [control=['if'], data=[]] # Iterate over workers to get their task and queue for (_worker_prc, _main_q, _rslt_q) in _self._workers: _task = _worker_prc._task if _task.action == 'setup': # Ignore setup continue # depends on [control=['if'], data=[]] # Do till queue is empty for the worker while True: try: _exc = _rslt_q.get(block=False, interceptor=True) RESULT[_task.name].add(_exc) # depends on [control=['try'], data=[]] except Empty: break # depends on [control=['except'], data=[]] # depends on [control=['while'], data=[]] # depends on [control=['for'], data=[]]
def is_archive(self): ''' Determines if the attachment is an archive. ''' try: if zipfile.is_zipfile(self.attachment.path) or tarfile.is_tarfile(self.attachment.path): return True except Exception: pass return False
def function[is_archive, parameter[self]]: constant[ Determines if the attachment is an archive. ] <ast.Try object at 0x7da1b27bb580> return[constant[False]]
keyword[def] identifier[is_archive] ( identifier[self] ): literal[string] keyword[try] : keyword[if] identifier[zipfile] . identifier[is_zipfile] ( identifier[self] . identifier[attachment] . identifier[path] ) keyword[or] identifier[tarfile] . identifier[is_tarfile] ( identifier[self] . identifier[attachment] . identifier[path] ): keyword[return] keyword[True] keyword[except] identifier[Exception] : keyword[pass] keyword[return] keyword[False]
def is_archive(self): """ Determines if the attachment is an archive. """ try: if zipfile.is_zipfile(self.attachment.path) or tarfile.is_tarfile(self.attachment.path): return True # depends on [control=['if'], data=[]] # depends on [control=['try'], data=[]] except Exception: pass # depends on [control=['except'], data=[]] return False
def clean_whitespace(string, compact=False): """Return string with compressed whitespace.""" for a, b in (('\r\n', '\n'), ('\r', '\n'), ('\n\n', '\n'), ('\t', ' '), (' ', ' ')): string = string.replace(a, b) if compact: for a, b in (('\n', ' '), ('[ ', '['), (' ', ' '), (' ', ' '), (' ', ' ')): string = string.replace(a, b) return string.strip()
def function[clean_whitespace, parameter[string, compact]]: constant[Return string with compressed whitespace.] for taget[tuple[[<ast.Name object at 0x7da1b1970970>, <ast.Name object at 0x7da1b19727a0>]]] in starred[tuple[[<ast.Tuple object at 0x7da1b1972170>, <ast.Tuple object at 0x7da1b1973b20>, <ast.Tuple object at 0x7da1b19712d0>, <ast.Tuple object at 0x7da1b1970850>, <ast.Tuple object at 0x7da1b1972440>]]] begin[:] variable[string] assign[=] call[name[string].replace, parameter[name[a], name[b]]] if name[compact] begin[:] for taget[tuple[[<ast.Name object at 0x7da1b1970ca0>, <ast.Name object at 0x7da1b1973250>]]] in starred[tuple[[<ast.Tuple object at 0x7da1b1970280>, <ast.Tuple object at 0x7da1b1972dd0>, <ast.Tuple object at 0x7da1b19720b0>, <ast.Tuple object at 0x7da1b1970af0>, <ast.Tuple object at 0x7da1b1973bb0>]]] begin[:] variable[string] assign[=] call[name[string].replace, parameter[name[a], name[b]]] return[call[name[string].strip, parameter[]]]
keyword[def] identifier[clean_whitespace] ( identifier[string] , identifier[compact] = keyword[False] ): literal[string] keyword[for] identifier[a] , identifier[b] keyword[in] (( literal[string] , literal[string] ),( literal[string] , literal[string] ),( literal[string] , literal[string] ), ( literal[string] , literal[string] ),( literal[string] , literal[string] )): identifier[string] = identifier[string] . identifier[replace] ( identifier[a] , identifier[b] ) keyword[if] identifier[compact] : keyword[for] identifier[a] , identifier[b] keyword[in] (( literal[string] , literal[string] ),( literal[string] , literal[string] ), ( literal[string] , literal[string] ),( literal[string] , literal[string] ),( literal[string] , literal[string] )): identifier[string] = identifier[string] . identifier[replace] ( identifier[a] , identifier[b] ) keyword[return] identifier[string] . identifier[strip] ()
def clean_whitespace(string, compact=False): """Return string with compressed whitespace.""" for (a, b) in (('\r\n', '\n'), ('\r', '\n'), ('\n\n', '\n'), ('\t', ' '), (' ', ' ')): string = string.replace(a, b) # depends on [control=['for'], data=[]] if compact: for (a, b) in (('\n', ' '), ('[ ', '['), (' ', ' '), (' ', ' '), (' ', ' ')): string = string.replace(a, b) # depends on [control=['for'], data=[]] # depends on [control=['if'], data=[]] return string.strip()
def is_time(value, minimum = None, maximum = None, coerce_value = False, **kwargs): """Indicate whether ``value`` is a :class:`time <python:datetime.time>`. :param value: The value to evaluate. :param minimum: If supplied, will make sure that ``value`` is on or after this value. :type minimum: :func:`datetime <validator_collection.validators.datetime>` or :func:`time <validator_collection.validators.time>`-compliant :class:`str <python:str>` / :class:`datetime <python:datetime.datetime>` / :class:`time <python:datetime.time> / numeric / :obj:`None <python:None>` :param maximum: If supplied, will make sure that ``value`` is on or before this value. :type maximum: :func:`datetime <validator_collection.validators.datetime>` or :func:`time <validator_collection.validators.time>`-compliant :class:`str <python:str>` / :class:`datetime <python:datetime.datetime>` / :class:`time <python:datetime.time> / numeric / :obj:`None <python:None>` :param coerce_value: If ``True``, will return ``True`` if ``value`` can be coerced to a :class:`time <python:datetime.time>`. If ``False``, will only return ``True`` if ``value`` is a valid time. Defaults to ``False``. :type coerce_value: :class:`bool <python:bool>` :returns: ``True`` if ``value`` is valid, ``False`` if it is not. :rtype: :class:`bool <python:bool>` :raises SyntaxError: if ``kwargs`` contains duplicate keyword parameters or duplicates keyword parameters passed to the underlying validator """ try: value = validators.time(value, minimum = minimum, maximum = maximum, coerce_value = coerce_value, **kwargs) except SyntaxError as error: raise error except Exception: return False return True
def function[is_time, parameter[value, minimum, maximum, coerce_value]]: constant[Indicate whether ``value`` is a :class:`time <python:datetime.time>`. :param value: The value to evaluate. :param minimum: If supplied, will make sure that ``value`` is on or after this value. :type minimum: :func:`datetime <validator_collection.validators.datetime>` or :func:`time <validator_collection.validators.time>`-compliant :class:`str <python:str>` / :class:`datetime <python:datetime.datetime>` / :class:`time <python:datetime.time> / numeric / :obj:`None <python:None>` :param maximum: If supplied, will make sure that ``value`` is on or before this value. :type maximum: :func:`datetime <validator_collection.validators.datetime>` or :func:`time <validator_collection.validators.time>`-compliant :class:`str <python:str>` / :class:`datetime <python:datetime.datetime>` / :class:`time <python:datetime.time> / numeric / :obj:`None <python:None>` :param coerce_value: If ``True``, will return ``True`` if ``value`` can be coerced to a :class:`time <python:datetime.time>`. If ``False``, will only return ``True`` if ``value`` is a valid time. Defaults to ``False``. :type coerce_value: :class:`bool <python:bool>` :returns: ``True`` if ``value`` is valid, ``False`` if it is not. :rtype: :class:`bool <python:bool>` :raises SyntaxError: if ``kwargs`` contains duplicate keyword parameters or duplicates keyword parameters passed to the underlying validator ] <ast.Try object at 0x7da1b0659fc0> return[constant[True]]
keyword[def] identifier[is_time] ( identifier[value] , identifier[minimum] = keyword[None] , identifier[maximum] = keyword[None] , identifier[coerce_value] = keyword[False] , ** identifier[kwargs] ): literal[string] keyword[try] : identifier[value] = identifier[validators] . identifier[time] ( identifier[value] , identifier[minimum] = identifier[minimum] , identifier[maximum] = identifier[maximum] , identifier[coerce_value] = identifier[coerce_value] , ** identifier[kwargs] ) keyword[except] identifier[SyntaxError] keyword[as] identifier[error] : keyword[raise] identifier[error] keyword[except] identifier[Exception] : keyword[return] keyword[False] keyword[return] keyword[True]
def is_time(value, minimum=None, maximum=None, coerce_value=False, **kwargs): """Indicate whether ``value`` is a :class:`time <python:datetime.time>`. :param value: The value to evaluate. :param minimum: If supplied, will make sure that ``value`` is on or after this value. :type minimum: :func:`datetime <validator_collection.validators.datetime>` or :func:`time <validator_collection.validators.time>`-compliant :class:`str <python:str>` / :class:`datetime <python:datetime.datetime>` / :class:`time <python:datetime.time> / numeric / :obj:`None <python:None>` :param maximum: If supplied, will make sure that ``value`` is on or before this value. :type maximum: :func:`datetime <validator_collection.validators.datetime>` or :func:`time <validator_collection.validators.time>`-compliant :class:`str <python:str>` / :class:`datetime <python:datetime.datetime>` / :class:`time <python:datetime.time> / numeric / :obj:`None <python:None>` :param coerce_value: If ``True``, will return ``True`` if ``value`` can be coerced to a :class:`time <python:datetime.time>`. If ``False``, will only return ``True`` if ``value`` is a valid time. Defaults to ``False``. :type coerce_value: :class:`bool <python:bool>` :returns: ``True`` if ``value`` is valid, ``False`` if it is not. :rtype: :class:`bool <python:bool>` :raises SyntaxError: if ``kwargs`` contains duplicate keyword parameters or duplicates keyword parameters passed to the underlying validator """ try: value = validators.time(value, minimum=minimum, maximum=maximum, coerce_value=coerce_value, **kwargs) # depends on [control=['try'], data=[]] except SyntaxError as error: raise error # depends on [control=['except'], data=['error']] except Exception: return False # depends on [control=['except'], data=[]] return True
def _location(self, obj): """ Get location of the `obj` Arguments: :obj: self.Model instance. """ field_name = self.clean_id_name return self.request.route_url( self._resource.uid, **{self._resource.id_name: getattr(obj, field_name)})
def function[_location, parameter[self, obj]]: constant[ Get location of the `obj` Arguments: :obj: self.Model instance. ] variable[field_name] assign[=] name[self].clean_id_name return[call[name[self].request.route_url, parameter[name[self]._resource.uid]]]
keyword[def] identifier[_location] ( identifier[self] , identifier[obj] ): literal[string] identifier[field_name] = identifier[self] . identifier[clean_id_name] keyword[return] identifier[self] . identifier[request] . identifier[route_url] ( identifier[self] . identifier[_resource] . identifier[uid] , **{ identifier[self] . identifier[_resource] . identifier[id_name] : identifier[getattr] ( identifier[obj] , identifier[field_name] )})
def _location(self, obj): """ Get location of the `obj` Arguments: :obj: self.Model instance. """ field_name = self.clean_id_name return self.request.route_url(self._resource.uid, **{self._resource.id_name: getattr(obj, field_name)})
def get_jid(jid): ''' Return the information returned when the specified job id was executed ''' log.debug('sdstack_etcd returner <get_jid> called jid: %s', jid) ret = {} client, path = _get_conn(__opts__) items = client.get('/'.join((path, 'jobs', jid))) for item in items.children: if str(item.key).endswith('.load.p'): continue comps = str(item.key).split('/') data = client.get('/'.join((path, 'jobs', jid, comps[-1], 'return'))).value ret[comps[-1]] = {'return': salt.utils.json.loads(data)} return ret
def function[get_jid, parameter[jid]]: constant[ Return the information returned when the specified job id was executed ] call[name[log].debug, parameter[constant[sdstack_etcd returner <get_jid> called jid: %s], name[jid]]] variable[ret] assign[=] dictionary[[], []] <ast.Tuple object at 0x7da1b1c22e90> assign[=] call[name[_get_conn], parameter[name[__opts__]]] variable[items] assign[=] call[name[client].get, parameter[call[constant[/].join, parameter[tuple[[<ast.Name object at 0x7da1b1c233d0>, <ast.Constant object at 0x7da1b1c207f0>, <ast.Name object at 0x7da1b1c22320>]]]]]] for taget[name[item]] in starred[name[items].children] begin[:] if call[call[name[str], parameter[name[item].key]].endswith, parameter[constant[.load.p]]] begin[:] continue variable[comps] assign[=] call[call[name[str], parameter[name[item].key]].split, parameter[constant[/]]] variable[data] assign[=] call[name[client].get, parameter[call[constant[/].join, parameter[tuple[[<ast.Name object at 0x7da1b1c21960>, <ast.Constant object at 0x7da1b1c23df0>, <ast.Name object at 0x7da1b1c23760>, <ast.Subscript object at 0x7da1b1c220e0>, <ast.Constant object at 0x7da1b1c23970>]]]]]].value call[name[ret]][call[name[comps]][<ast.UnaryOp object at 0x7da1b1c234f0>]] assign[=] dictionary[[<ast.Constant object at 0x7da1b1c23b80>], [<ast.Call object at 0x7da1b1c22770>]] return[name[ret]]
keyword[def] identifier[get_jid] ( identifier[jid] ): literal[string] identifier[log] . identifier[debug] ( literal[string] , identifier[jid] ) identifier[ret] ={} identifier[client] , identifier[path] = identifier[_get_conn] ( identifier[__opts__] ) identifier[items] = identifier[client] . identifier[get] ( literal[string] . identifier[join] (( identifier[path] , literal[string] , identifier[jid] ))) keyword[for] identifier[item] keyword[in] identifier[items] . identifier[children] : keyword[if] identifier[str] ( identifier[item] . identifier[key] ). identifier[endswith] ( literal[string] ): keyword[continue] identifier[comps] = identifier[str] ( identifier[item] . identifier[key] ). identifier[split] ( literal[string] ) identifier[data] = identifier[client] . identifier[get] ( literal[string] . identifier[join] (( identifier[path] , literal[string] , identifier[jid] , identifier[comps] [- literal[int] ], literal[string] ))). identifier[value] identifier[ret] [ identifier[comps] [- literal[int] ]]={ literal[string] : identifier[salt] . identifier[utils] . identifier[json] . identifier[loads] ( identifier[data] )} keyword[return] identifier[ret]
def get_jid(jid): """ Return the information returned when the specified job id was executed """ log.debug('sdstack_etcd returner <get_jid> called jid: %s', jid) ret = {} (client, path) = _get_conn(__opts__) items = client.get('/'.join((path, 'jobs', jid))) for item in items.children: if str(item.key).endswith('.load.p'): continue # depends on [control=['if'], data=[]] comps = str(item.key).split('/') data = client.get('/'.join((path, 'jobs', jid, comps[-1], 'return'))).value ret[comps[-1]] = {'return': salt.utils.json.loads(data)} # depends on [control=['for'], data=['item']] return ret
def get(self, client_id, client_secret, code, redirect_uri): """Exchange an Authorization Code for an Access Token. Exchange an Authorization Code for an Access Token that can be used to invoke the APIs. Args: client_id(basestring): Provided when you created your integration. client_secret(basestring): Provided when you created your integration. code(basestring): The Authorization Code provided by the user OAuth process. redirect_uri(basestring): The redirect URI used in the user OAuth process. Returns: AccessToken: An AccessToken object with the access token provided by the Webex Teams cloud. Raises: TypeError: If the parameter types are incorrect. ApiError: If the Webex Teams cloud returns an error. """ check_type(client_id, basestring, may_be_none=False) check_type(client_secret, basestring, may_be_none=False) check_type(code, basestring, may_be_none=False) check_type(redirect_uri, basestring, may_be_none=False) post_data = dict_from_items_with_values( grant_type="authorization_code", client_id=client_id, client_secret=client_secret, code=code, redirect_uri=redirect_uri, ) # API request response = requests.post(self._endpoint_url, data=post_data, **self._request_kwargs) check_response_code(response, EXPECTED_RESPONSE_CODE['POST']) json_data = extract_and_parse_json(response) # Return a access_token object created from the response JSON data return self._object_factory(OBJECT_TYPE, json_data)
def function[get, parameter[self, client_id, client_secret, code, redirect_uri]]: constant[Exchange an Authorization Code for an Access Token. Exchange an Authorization Code for an Access Token that can be used to invoke the APIs. Args: client_id(basestring): Provided when you created your integration. client_secret(basestring): Provided when you created your integration. code(basestring): The Authorization Code provided by the user OAuth process. redirect_uri(basestring): The redirect URI used in the user OAuth process. Returns: AccessToken: An AccessToken object with the access token provided by the Webex Teams cloud. Raises: TypeError: If the parameter types are incorrect. ApiError: If the Webex Teams cloud returns an error. ] call[name[check_type], parameter[name[client_id], name[basestring]]] call[name[check_type], parameter[name[client_secret], name[basestring]]] call[name[check_type], parameter[name[code], name[basestring]]] call[name[check_type], parameter[name[redirect_uri], name[basestring]]] variable[post_data] assign[=] call[name[dict_from_items_with_values], parameter[]] variable[response] assign[=] call[name[requests].post, parameter[name[self]._endpoint_url]] call[name[check_response_code], parameter[name[response], call[name[EXPECTED_RESPONSE_CODE]][constant[POST]]]] variable[json_data] assign[=] call[name[extract_and_parse_json], parameter[name[response]]] return[call[name[self]._object_factory, parameter[name[OBJECT_TYPE], name[json_data]]]]
keyword[def] identifier[get] ( identifier[self] , identifier[client_id] , identifier[client_secret] , identifier[code] , identifier[redirect_uri] ): literal[string] identifier[check_type] ( identifier[client_id] , identifier[basestring] , identifier[may_be_none] = keyword[False] ) identifier[check_type] ( identifier[client_secret] , identifier[basestring] , identifier[may_be_none] = keyword[False] ) identifier[check_type] ( identifier[code] , identifier[basestring] , identifier[may_be_none] = keyword[False] ) identifier[check_type] ( identifier[redirect_uri] , identifier[basestring] , identifier[may_be_none] = keyword[False] ) identifier[post_data] = identifier[dict_from_items_with_values] ( identifier[grant_type] = literal[string] , identifier[client_id] = identifier[client_id] , identifier[client_secret] = identifier[client_secret] , identifier[code] = identifier[code] , identifier[redirect_uri] = identifier[redirect_uri] , ) identifier[response] = identifier[requests] . identifier[post] ( identifier[self] . identifier[_endpoint_url] , identifier[data] = identifier[post_data] , ** identifier[self] . identifier[_request_kwargs] ) identifier[check_response_code] ( identifier[response] , identifier[EXPECTED_RESPONSE_CODE] [ literal[string] ]) identifier[json_data] = identifier[extract_and_parse_json] ( identifier[response] ) keyword[return] identifier[self] . identifier[_object_factory] ( identifier[OBJECT_TYPE] , identifier[json_data] )
def get(self, client_id, client_secret, code, redirect_uri): """Exchange an Authorization Code for an Access Token. Exchange an Authorization Code for an Access Token that can be used to invoke the APIs. Args: client_id(basestring): Provided when you created your integration. client_secret(basestring): Provided when you created your integration. code(basestring): The Authorization Code provided by the user OAuth process. redirect_uri(basestring): The redirect URI used in the user OAuth process. Returns: AccessToken: An AccessToken object with the access token provided by the Webex Teams cloud. Raises: TypeError: If the parameter types are incorrect. ApiError: If the Webex Teams cloud returns an error. """ check_type(client_id, basestring, may_be_none=False) check_type(client_secret, basestring, may_be_none=False) check_type(code, basestring, may_be_none=False) check_type(redirect_uri, basestring, may_be_none=False) post_data = dict_from_items_with_values(grant_type='authorization_code', client_id=client_id, client_secret=client_secret, code=code, redirect_uri=redirect_uri) # API request response = requests.post(self._endpoint_url, data=post_data, **self._request_kwargs) check_response_code(response, EXPECTED_RESPONSE_CODE['POST']) json_data = extract_and_parse_json(response) # Return a access_token object created from the response JSON data return self._object_factory(OBJECT_TYPE, json_data)
def __bind(self): ''' Binds the reply server ''' if self.log_queue is not None: salt.log.setup.set_multiprocessing_logging_queue(self.log_queue) if self.log_queue_level is not None: salt.log.setup.set_multiprocessing_logging_level(self.log_queue_level) salt.log.setup.setup_multiprocessing_logging(self.log_queue) if self.secrets is not None: SMaster.secrets = self.secrets dfn = os.path.join(self.opts['cachedir'], '.dfn') if os.path.isfile(dfn): try: if salt.utils.platform.is_windows() and not os.access(dfn, os.W_OK): # Cannot delete read-only files on Windows. os.chmod(dfn, stat.S_IRUSR | stat.S_IWUSR) os.remove(dfn) except os.error: pass # Wait for kill should be less then parent's ProcessManager. self.process_manager = salt.utils.process.ProcessManager(name='ReqServer_ProcessManager', wait_for_kill=1) req_channels = [] tcp_only = True for transport, opts in iter_transport_opts(self.opts): chan = salt.transport.server.ReqServerChannel.factory(opts) chan.pre_fork(self.process_manager) req_channels.append(chan) if transport != 'tcp': tcp_only = False kwargs = {} if salt.utils.platform.is_windows(): kwargs['log_queue'] = self.log_queue kwargs['log_queue_level'] = self.log_queue_level # Use one worker thread if only the TCP transport is set up on # Windows and we are using Python 2. There is load balancer # support on Windows for the TCP transport when using Python 3. if tcp_only and six.PY2 and int(self.opts['worker_threads']) != 1: log.warning('TCP transport supports only 1 worker on Windows ' 'when using Python 2.') self.opts['worker_threads'] = 1 # Reset signals to default ones before adding processes to the process # manager. We don't want the processes being started to inherit those # signal handlers with salt.utils.process.default_signals(signal.SIGINT, signal.SIGTERM): for ind in range(int(self.opts['worker_threads'])): name = 'MWorker-{0}'.format(ind) self.process_manager.add_process(MWorker, args=(self.opts, self.master_key, self.key, req_channels, name), kwargs=kwargs, name=name) self.process_manager.run()
def function[__bind, parameter[self]]: constant[ Binds the reply server ] if compare[name[self].log_queue is_not constant[None]] begin[:] call[name[salt].log.setup.set_multiprocessing_logging_queue, parameter[name[self].log_queue]] if compare[name[self].log_queue_level is_not constant[None]] begin[:] call[name[salt].log.setup.set_multiprocessing_logging_level, parameter[name[self].log_queue_level]] call[name[salt].log.setup.setup_multiprocessing_logging, parameter[name[self].log_queue]] if compare[name[self].secrets is_not constant[None]] begin[:] name[SMaster].secrets assign[=] name[self].secrets variable[dfn] assign[=] call[name[os].path.join, parameter[call[name[self].opts][constant[cachedir]], constant[.dfn]]] if call[name[os].path.isfile, parameter[name[dfn]]] begin[:] <ast.Try object at 0x7da18fe93790> name[self].process_manager assign[=] call[name[salt].utils.process.ProcessManager, parameter[]] variable[req_channels] assign[=] list[[]] variable[tcp_only] assign[=] constant[True] for taget[tuple[[<ast.Name object at 0x7da18fe90760>, <ast.Name object at 0x7da18fe93f40>]]] in starred[call[name[iter_transport_opts], parameter[name[self].opts]]] begin[:] variable[chan] assign[=] call[name[salt].transport.server.ReqServerChannel.factory, parameter[name[opts]]] call[name[chan].pre_fork, parameter[name[self].process_manager]] call[name[req_channels].append, parameter[name[chan]]] if compare[name[transport] not_equal[!=] constant[tcp]] begin[:] variable[tcp_only] assign[=] constant[False] variable[kwargs] assign[=] dictionary[[], []] if call[name[salt].utils.platform.is_windows, parameter[]] begin[:] call[name[kwargs]][constant[log_queue]] assign[=] name[self].log_queue call[name[kwargs]][constant[log_queue_level]] assign[=] name[self].log_queue_level if <ast.BoolOp object at 0x7da18fe92650> begin[:] call[name[log].warning, parameter[constant[TCP transport supports only 1 worker on Windows when using Python 2.]]] call[name[self].opts][constant[worker_threads]] assign[=] constant[1] with call[name[salt].utils.process.default_signals, parameter[name[signal].SIGINT, name[signal].SIGTERM]] begin[:] for taget[name[ind]] in starred[call[name[range], parameter[call[name[int], parameter[call[name[self].opts][constant[worker_threads]]]]]]] begin[:] variable[name] assign[=] call[constant[MWorker-{0}].format, parameter[name[ind]]] call[name[self].process_manager.add_process, parameter[name[MWorker]]] call[name[self].process_manager.run, parameter[]]
keyword[def] identifier[__bind] ( identifier[self] ): literal[string] keyword[if] identifier[self] . identifier[log_queue] keyword[is] keyword[not] keyword[None] : identifier[salt] . identifier[log] . identifier[setup] . identifier[set_multiprocessing_logging_queue] ( identifier[self] . identifier[log_queue] ) keyword[if] identifier[self] . identifier[log_queue_level] keyword[is] keyword[not] keyword[None] : identifier[salt] . identifier[log] . identifier[setup] . identifier[set_multiprocessing_logging_level] ( identifier[self] . identifier[log_queue_level] ) identifier[salt] . identifier[log] . identifier[setup] . identifier[setup_multiprocessing_logging] ( identifier[self] . identifier[log_queue] ) keyword[if] identifier[self] . identifier[secrets] keyword[is] keyword[not] keyword[None] : identifier[SMaster] . identifier[secrets] = identifier[self] . identifier[secrets] identifier[dfn] = identifier[os] . identifier[path] . identifier[join] ( identifier[self] . identifier[opts] [ literal[string] ], literal[string] ) keyword[if] identifier[os] . identifier[path] . identifier[isfile] ( identifier[dfn] ): keyword[try] : keyword[if] identifier[salt] . identifier[utils] . identifier[platform] . identifier[is_windows] () keyword[and] keyword[not] identifier[os] . identifier[access] ( identifier[dfn] , identifier[os] . identifier[W_OK] ): identifier[os] . identifier[chmod] ( identifier[dfn] , identifier[stat] . identifier[S_IRUSR] | identifier[stat] . identifier[S_IWUSR] ) identifier[os] . identifier[remove] ( identifier[dfn] ) keyword[except] identifier[os] . identifier[error] : keyword[pass] identifier[self] . identifier[process_manager] = identifier[salt] . identifier[utils] . identifier[process] . identifier[ProcessManager] ( identifier[name] = literal[string] , identifier[wait_for_kill] = literal[int] ) identifier[req_channels] =[] identifier[tcp_only] = keyword[True] keyword[for] identifier[transport] , identifier[opts] keyword[in] identifier[iter_transport_opts] ( identifier[self] . identifier[opts] ): identifier[chan] = identifier[salt] . identifier[transport] . identifier[server] . identifier[ReqServerChannel] . identifier[factory] ( identifier[opts] ) identifier[chan] . identifier[pre_fork] ( identifier[self] . identifier[process_manager] ) identifier[req_channels] . identifier[append] ( identifier[chan] ) keyword[if] identifier[transport] != literal[string] : identifier[tcp_only] = keyword[False] identifier[kwargs] ={} keyword[if] identifier[salt] . identifier[utils] . identifier[platform] . identifier[is_windows] (): identifier[kwargs] [ literal[string] ]= identifier[self] . identifier[log_queue] identifier[kwargs] [ literal[string] ]= identifier[self] . identifier[log_queue_level] keyword[if] identifier[tcp_only] keyword[and] identifier[six] . identifier[PY2] keyword[and] identifier[int] ( identifier[self] . identifier[opts] [ literal[string] ])!= literal[int] : identifier[log] . identifier[warning] ( literal[string] literal[string] ) identifier[self] . identifier[opts] [ literal[string] ]= literal[int] keyword[with] identifier[salt] . identifier[utils] . identifier[process] . identifier[default_signals] ( identifier[signal] . identifier[SIGINT] , identifier[signal] . identifier[SIGTERM] ): keyword[for] identifier[ind] keyword[in] identifier[range] ( identifier[int] ( identifier[self] . identifier[opts] [ literal[string] ])): identifier[name] = literal[string] . identifier[format] ( identifier[ind] ) identifier[self] . identifier[process_manager] . identifier[add_process] ( identifier[MWorker] , identifier[args] =( identifier[self] . identifier[opts] , identifier[self] . identifier[master_key] , identifier[self] . identifier[key] , identifier[req_channels] , identifier[name] ), identifier[kwargs] = identifier[kwargs] , identifier[name] = identifier[name] ) identifier[self] . identifier[process_manager] . identifier[run] ()
def __bind(self): """ Binds the reply server """ if self.log_queue is not None: salt.log.setup.set_multiprocessing_logging_queue(self.log_queue) # depends on [control=['if'], data=[]] if self.log_queue_level is not None: salt.log.setup.set_multiprocessing_logging_level(self.log_queue_level) # depends on [control=['if'], data=[]] salt.log.setup.setup_multiprocessing_logging(self.log_queue) if self.secrets is not None: SMaster.secrets = self.secrets # depends on [control=['if'], data=[]] dfn = os.path.join(self.opts['cachedir'], '.dfn') if os.path.isfile(dfn): try: if salt.utils.platform.is_windows() and (not os.access(dfn, os.W_OK)): # Cannot delete read-only files on Windows. os.chmod(dfn, stat.S_IRUSR | stat.S_IWUSR) # depends on [control=['if'], data=[]] os.remove(dfn) # depends on [control=['try'], data=[]] except os.error: pass # depends on [control=['except'], data=[]] # depends on [control=['if'], data=[]] # Wait for kill should be less then parent's ProcessManager. self.process_manager = salt.utils.process.ProcessManager(name='ReqServer_ProcessManager', wait_for_kill=1) req_channels = [] tcp_only = True for (transport, opts) in iter_transport_opts(self.opts): chan = salt.transport.server.ReqServerChannel.factory(opts) chan.pre_fork(self.process_manager) req_channels.append(chan) if transport != 'tcp': tcp_only = False # depends on [control=['if'], data=[]] # depends on [control=['for'], data=[]] kwargs = {} if salt.utils.platform.is_windows(): kwargs['log_queue'] = self.log_queue kwargs['log_queue_level'] = self.log_queue_level # Use one worker thread if only the TCP transport is set up on # Windows and we are using Python 2. There is load balancer # support on Windows for the TCP transport when using Python 3. if tcp_only and six.PY2 and (int(self.opts['worker_threads']) != 1): log.warning('TCP transport supports only 1 worker on Windows when using Python 2.') self.opts['worker_threads'] = 1 # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]] # Reset signals to default ones before adding processes to the process # manager. We don't want the processes being started to inherit those # signal handlers with salt.utils.process.default_signals(signal.SIGINT, signal.SIGTERM): for ind in range(int(self.opts['worker_threads'])): name = 'MWorker-{0}'.format(ind) self.process_manager.add_process(MWorker, args=(self.opts, self.master_key, self.key, req_channels, name), kwargs=kwargs, name=name) # depends on [control=['for'], data=['ind']] # depends on [control=['with'], data=[]] self.process_manager.run()
def cli(env, zone_id, by_record, by_id, data, ttl): """Update DNS record.""" manager = SoftLayer.DNSManager(env.client) zone_id = helpers.resolve_id(manager.resolve_ids, zone_id, name='zone') results = manager.get_records(zone_id, host=by_record) for result in results: if by_id and str(result['id']) != by_id: continue result['data'] = data or result['data'] result['ttl'] = ttl or result['ttl'] manager.edit_record(result)
def function[cli, parameter[env, zone_id, by_record, by_id, data, ttl]]: constant[Update DNS record.] variable[manager] assign[=] call[name[SoftLayer].DNSManager, parameter[name[env].client]] variable[zone_id] assign[=] call[name[helpers].resolve_id, parameter[name[manager].resolve_ids, name[zone_id]]] variable[results] assign[=] call[name[manager].get_records, parameter[name[zone_id]]] for taget[name[result]] in starred[name[results]] begin[:] if <ast.BoolOp object at 0x7da20e9b1180> begin[:] continue call[name[result]][constant[data]] assign[=] <ast.BoolOp object at 0x7da20e9b2440> call[name[result]][constant[ttl]] assign[=] <ast.BoolOp object at 0x7da20e9b2c50> call[name[manager].edit_record, parameter[name[result]]]
keyword[def] identifier[cli] ( identifier[env] , identifier[zone_id] , identifier[by_record] , identifier[by_id] , identifier[data] , identifier[ttl] ): literal[string] identifier[manager] = identifier[SoftLayer] . identifier[DNSManager] ( identifier[env] . identifier[client] ) identifier[zone_id] = identifier[helpers] . identifier[resolve_id] ( identifier[manager] . identifier[resolve_ids] , identifier[zone_id] , identifier[name] = literal[string] ) identifier[results] = identifier[manager] . identifier[get_records] ( identifier[zone_id] , identifier[host] = identifier[by_record] ) keyword[for] identifier[result] keyword[in] identifier[results] : keyword[if] identifier[by_id] keyword[and] identifier[str] ( identifier[result] [ literal[string] ])!= identifier[by_id] : keyword[continue] identifier[result] [ literal[string] ]= identifier[data] keyword[or] identifier[result] [ literal[string] ] identifier[result] [ literal[string] ]= identifier[ttl] keyword[or] identifier[result] [ literal[string] ] identifier[manager] . identifier[edit_record] ( identifier[result] )
def cli(env, zone_id, by_record, by_id, data, ttl): """Update DNS record.""" manager = SoftLayer.DNSManager(env.client) zone_id = helpers.resolve_id(manager.resolve_ids, zone_id, name='zone') results = manager.get_records(zone_id, host=by_record) for result in results: if by_id and str(result['id']) != by_id: continue # depends on [control=['if'], data=[]] result['data'] = data or result['data'] result['ttl'] = ttl or result['ttl'] manager.edit_record(result) # depends on [control=['for'], data=['result']]
def _add_remover(self): """ Add a ``_remove_x()`` method to the element class for this child element. """ def _remove_child(obj): obj.remove_all(self._nsptagname) _remove_child.__doc__ = ( 'Remove all ``<%s>`` child elements.' ) % self._nsptagname self._add_to_class(self._remove_method_name, _remove_child)
def function[_add_remover, parameter[self]]: constant[ Add a ``_remove_x()`` method to the element class for this child element. ] def function[_remove_child, parameter[obj]]: call[name[obj].remove_all, parameter[name[self]._nsptagname]] name[_remove_child].__doc__ assign[=] binary_operation[constant[Remove all ``<%s>`` child elements.] <ast.Mod object at 0x7da2590d6920> name[self]._nsptagname] call[name[self]._add_to_class, parameter[name[self]._remove_method_name, name[_remove_child]]]
keyword[def] identifier[_add_remover] ( identifier[self] ): literal[string] keyword[def] identifier[_remove_child] ( identifier[obj] ): identifier[obj] . identifier[remove_all] ( identifier[self] . identifier[_nsptagname] ) identifier[_remove_child] . identifier[__doc__] =( literal[string] )% identifier[self] . identifier[_nsptagname] identifier[self] . identifier[_add_to_class] ( identifier[self] . identifier[_remove_method_name] , identifier[_remove_child] )
def _add_remover(self): """ Add a ``_remove_x()`` method to the element class for this child element. """ def _remove_child(obj): obj.remove_all(self._nsptagname) _remove_child.__doc__ = 'Remove all ``<%s>`` child elements.' % self._nsptagname self._add_to_class(self._remove_method_name, _remove_child)
def flatten(nested, containers=(list, tuple)): """ Flatten a nested list by yielding its scalar items. """ for item in nested: if hasattr(item, "next") or isinstance(item, containers): for subitem in flatten(item): yield subitem else: yield item
def function[flatten, parameter[nested, containers]]: constant[ Flatten a nested list by yielding its scalar items. ] for taget[name[item]] in starred[name[nested]] begin[:] if <ast.BoolOp object at 0x7da20c6c48b0> begin[:] for taget[name[subitem]] in starred[call[name[flatten], parameter[name[item]]]] begin[:] <ast.Yield object at 0x7da20c6c6290>
keyword[def] identifier[flatten] ( identifier[nested] , identifier[containers] =( identifier[list] , identifier[tuple] )): literal[string] keyword[for] identifier[item] keyword[in] identifier[nested] : keyword[if] identifier[hasattr] ( identifier[item] , literal[string] ) keyword[or] identifier[isinstance] ( identifier[item] , identifier[containers] ): keyword[for] identifier[subitem] keyword[in] identifier[flatten] ( identifier[item] ): keyword[yield] identifier[subitem] keyword[else] : keyword[yield] identifier[item]
def flatten(nested, containers=(list, tuple)): """ Flatten a nested list by yielding its scalar items. """ for item in nested: if hasattr(item, 'next') or isinstance(item, containers): for subitem in flatten(item): yield subitem # depends on [control=['for'], data=['subitem']] # depends on [control=['if'], data=[]] else: yield item # depends on [control=['for'], data=['item']]
def database(self): """ Returns the database associated with this tree widget. :return <orb.Database> || None """ if self._database: return self._database if self._recordSet is not None: return self._recordSet.database() else: return Orb.instance().database()
def function[database, parameter[self]]: constant[ Returns the database associated with this tree widget. :return <orb.Database> || None ] if name[self]._database begin[:] return[name[self]._database] if compare[name[self]._recordSet is_not constant[None]] begin[:] return[call[name[self]._recordSet.database, parameter[]]]
keyword[def] identifier[database] ( identifier[self] ): literal[string] keyword[if] identifier[self] . identifier[_database] : keyword[return] identifier[self] . identifier[_database] keyword[if] identifier[self] . identifier[_recordSet] keyword[is] keyword[not] keyword[None] : keyword[return] identifier[self] . identifier[_recordSet] . identifier[database] () keyword[else] : keyword[return] identifier[Orb] . identifier[instance] (). identifier[database] ()
def database(self): """ Returns the database associated with this tree widget. :return <orb.Database> || None """ if self._database: return self._database # depends on [control=['if'], data=[]] if self._recordSet is not None: return self._recordSet.database() # depends on [control=['if'], data=[]] else: return Orb.instance().database()
def destination(self, point, bearing, distance=None): """ TODO docs. """ point = Point(point) lat1 = units.radians(degrees=point.latitude) lng1 = units.radians(degrees=point.longitude) bearing = units.radians(degrees=bearing) if distance is None: distance = self if isinstance(distance, Distance): distance = distance.kilometers ellipsoid = self.ELLIPSOID if isinstance(ellipsoid, string_compare): ellipsoid = ELLIPSOIDS[ellipsoid] major, minor, f = ellipsoid tan_reduced1 = (1 - f) * tan(lat1) cos_reduced1 = 1 / sqrt(1 + tan_reduced1 ** 2) sin_reduced1 = tan_reduced1 * cos_reduced1 sin_bearing, cos_bearing = sin(bearing), cos(bearing) sigma1 = atan2(tan_reduced1, cos_bearing) sin_alpha = cos_reduced1 * sin_bearing cos_sq_alpha = 1 - sin_alpha ** 2 u_sq = cos_sq_alpha * (major ** 2 - minor ** 2) / minor ** 2 A = 1 + u_sq / 16384. * ( 4096 + u_sq * (-768 + u_sq * (320 - 175 * u_sq)) ) B = u_sq / 1024. * (256 + u_sq * (-128 + u_sq * (74 - 47 * u_sq))) sigma = distance / (minor * A) sigma_prime = 2 * pi while abs(sigma - sigma_prime) > 10e-12: cos2_sigma_m = cos(2 * sigma1 + sigma) sin_sigma, cos_sigma = sin(sigma), cos(sigma) delta_sigma = B * sin_sigma * ( cos2_sigma_m + B / 4. * ( cos_sigma * ( -1 + 2 * cos2_sigma_m ** 2 ) - B / 6. * cos2_sigma_m * ( -3 + 4 * sin_sigma ** 2 ) * ( -3 + 4 * cos2_sigma_m ** 2 ) ) ) sigma_prime = sigma sigma = distance / (minor * A) + delta_sigma sin_sigma, cos_sigma = sin(sigma), cos(sigma) lat2 = atan2( sin_reduced1 * cos_sigma + cos_reduced1 * sin_sigma * cos_bearing, (1 - f) * sqrt( sin_alpha ** 2 + ( sin_reduced1 * sin_sigma - cos_reduced1 * cos_sigma * cos_bearing ) ** 2 ) ) lambda_lng = atan2( sin_sigma * sin_bearing, cos_reduced1 * cos_sigma - sin_reduced1 * sin_sigma * cos_bearing ) C = f / 16. * cos_sq_alpha * (4 + f * (4 - 3 * cos_sq_alpha)) delta_lng = ( lambda_lng - (1 - C) * f * sin_alpha * ( sigma + C * sin_sigma * ( cos2_sigma_m + C * cos_sigma * ( -1 + 2 * cos2_sigma_m ** 2 ) ) ) ) lng2 = lng1 + delta_lng return Point(units.degrees(radians=lat2), units.degrees(radians=lng2))
def function[destination, parameter[self, point, bearing, distance]]: constant[ TODO docs. ] variable[point] assign[=] call[name[Point], parameter[name[point]]] variable[lat1] assign[=] call[name[units].radians, parameter[]] variable[lng1] assign[=] call[name[units].radians, parameter[]] variable[bearing] assign[=] call[name[units].radians, parameter[]] if compare[name[distance] is constant[None]] begin[:] variable[distance] assign[=] name[self] if call[name[isinstance], parameter[name[distance], name[Distance]]] begin[:] variable[distance] assign[=] name[distance].kilometers variable[ellipsoid] assign[=] name[self].ELLIPSOID if call[name[isinstance], parameter[name[ellipsoid], name[string_compare]]] begin[:] variable[ellipsoid] assign[=] call[name[ELLIPSOIDS]][name[ellipsoid]] <ast.Tuple object at 0x7da20e9b1d50> assign[=] name[ellipsoid] variable[tan_reduced1] assign[=] binary_operation[binary_operation[constant[1] - name[f]] * call[name[tan], parameter[name[lat1]]]] variable[cos_reduced1] assign[=] binary_operation[constant[1] / call[name[sqrt], parameter[binary_operation[constant[1] + binary_operation[name[tan_reduced1] ** constant[2]]]]]] variable[sin_reduced1] assign[=] binary_operation[name[tan_reduced1] * name[cos_reduced1]] <ast.Tuple object at 0x7da20c794700> assign[=] tuple[[<ast.Call object at 0x7da20c795ea0>, <ast.Call object at 0x7da20c795480>]] variable[sigma1] assign[=] call[name[atan2], parameter[name[tan_reduced1], name[cos_bearing]]] variable[sin_alpha] assign[=] binary_operation[name[cos_reduced1] * name[sin_bearing]] variable[cos_sq_alpha] assign[=] binary_operation[constant[1] - binary_operation[name[sin_alpha] ** constant[2]]] variable[u_sq] assign[=] binary_operation[binary_operation[name[cos_sq_alpha] * binary_operation[binary_operation[name[major] ** constant[2]] - binary_operation[name[minor] ** constant[2]]]] / binary_operation[name[minor] ** constant[2]]] variable[A] assign[=] binary_operation[constant[1] + binary_operation[binary_operation[name[u_sq] / constant[16384.0]] * binary_operation[constant[4096] + binary_operation[name[u_sq] * binary_operation[<ast.UnaryOp object at 0x7da20c7942e0> + binary_operation[name[u_sq] * binary_operation[constant[320] - binary_operation[constant[175] * name[u_sq]]]]]]]]] variable[B] assign[=] binary_operation[binary_operation[name[u_sq] / constant[1024.0]] * binary_operation[constant[256] + binary_operation[name[u_sq] * binary_operation[<ast.UnaryOp object at 0x7da20c794610> + binary_operation[name[u_sq] * binary_operation[constant[74] - binary_operation[constant[47] * name[u_sq]]]]]]]] variable[sigma] assign[=] binary_operation[name[distance] / binary_operation[name[minor] * name[A]]] variable[sigma_prime] assign[=] binary_operation[constant[2] * name[pi]] while compare[call[name[abs], parameter[binary_operation[name[sigma] - name[sigma_prime]]]] greater[>] constant[1e-11]] begin[:] variable[cos2_sigma_m] assign[=] call[name[cos], parameter[binary_operation[binary_operation[constant[2] * name[sigma1]] + name[sigma]]]] <ast.Tuple object at 0x7da20c796aa0> assign[=] tuple[[<ast.Call object at 0x7da20c794a90>, <ast.Call object at 0x7da20c7950c0>]] variable[delta_sigma] assign[=] binary_operation[binary_operation[name[B] * name[sin_sigma]] * binary_operation[name[cos2_sigma_m] + binary_operation[binary_operation[name[B] / constant[4.0]] * binary_operation[binary_operation[name[cos_sigma] * binary_operation[<ast.UnaryOp object at 0x7da20c795f90> + binary_operation[constant[2] * binary_operation[name[cos2_sigma_m] ** constant[2]]]]] - binary_operation[binary_operation[binary_operation[binary_operation[name[B] / constant[6.0]] * name[cos2_sigma_m]] * binary_operation[<ast.UnaryOp object at 0x7da20c794f10> + binary_operation[constant[4] * binary_operation[name[sin_sigma] ** constant[2]]]]] * binary_operation[<ast.UnaryOp object at 0x7da20c795c60> + binary_operation[constant[4] * binary_operation[name[cos2_sigma_m] ** constant[2]]]]]]]]] variable[sigma_prime] assign[=] name[sigma] variable[sigma] assign[=] binary_operation[binary_operation[name[distance] / binary_operation[name[minor] * name[A]]] + name[delta_sigma]] <ast.Tuple object at 0x7da20e9571f0> assign[=] tuple[[<ast.Call object at 0x7da20e957ca0>, <ast.Call object at 0x7da20e954df0>]] variable[lat2] assign[=] call[name[atan2], parameter[binary_operation[binary_operation[name[sin_reduced1] * name[cos_sigma]] + binary_operation[binary_operation[name[cos_reduced1] * name[sin_sigma]] * name[cos_bearing]]], binary_operation[binary_operation[constant[1] - name[f]] * call[name[sqrt], parameter[binary_operation[binary_operation[name[sin_alpha] ** constant[2]] + binary_operation[binary_operation[binary_operation[name[sin_reduced1] * name[sin_sigma]] - binary_operation[binary_operation[name[cos_reduced1] * name[cos_sigma]] * name[cos_bearing]]] ** constant[2]]]]]]]] variable[lambda_lng] assign[=] call[name[atan2], parameter[binary_operation[name[sin_sigma] * name[sin_bearing]], binary_operation[binary_operation[name[cos_reduced1] * name[cos_sigma]] - binary_operation[binary_operation[name[sin_reduced1] * name[sin_sigma]] * name[cos_bearing]]]]] variable[C] assign[=] binary_operation[binary_operation[binary_operation[name[f] / constant[16.0]] * name[cos_sq_alpha]] * binary_operation[constant[4] + binary_operation[name[f] * binary_operation[constant[4] - binary_operation[constant[3] * name[cos_sq_alpha]]]]]] variable[delta_lng] assign[=] binary_operation[name[lambda_lng] - binary_operation[binary_operation[binary_operation[binary_operation[constant[1] - name[C]] * name[f]] * name[sin_alpha]] * binary_operation[name[sigma] + binary_operation[binary_operation[name[C] * name[sin_sigma]] * binary_operation[name[cos2_sigma_m] + binary_operation[binary_operation[name[C] * name[cos_sigma]] * binary_operation[<ast.UnaryOp object at 0x7da20cabecb0> + binary_operation[constant[2] * binary_operation[name[cos2_sigma_m] ** constant[2]]]]]]]]]] variable[lng2] assign[=] binary_operation[name[lng1] + name[delta_lng]] return[call[name[Point], parameter[call[name[units].degrees, parameter[]], call[name[units].degrees, parameter[]]]]]
keyword[def] identifier[destination] ( identifier[self] , identifier[point] , identifier[bearing] , identifier[distance] = keyword[None] ): literal[string] identifier[point] = identifier[Point] ( identifier[point] ) identifier[lat1] = identifier[units] . identifier[radians] ( identifier[degrees] = identifier[point] . identifier[latitude] ) identifier[lng1] = identifier[units] . identifier[radians] ( identifier[degrees] = identifier[point] . identifier[longitude] ) identifier[bearing] = identifier[units] . identifier[radians] ( identifier[degrees] = identifier[bearing] ) keyword[if] identifier[distance] keyword[is] keyword[None] : identifier[distance] = identifier[self] keyword[if] identifier[isinstance] ( identifier[distance] , identifier[Distance] ): identifier[distance] = identifier[distance] . identifier[kilometers] identifier[ellipsoid] = identifier[self] . identifier[ELLIPSOID] keyword[if] identifier[isinstance] ( identifier[ellipsoid] , identifier[string_compare] ): identifier[ellipsoid] = identifier[ELLIPSOIDS] [ identifier[ellipsoid] ] identifier[major] , identifier[minor] , identifier[f] = identifier[ellipsoid] identifier[tan_reduced1] =( literal[int] - identifier[f] )* identifier[tan] ( identifier[lat1] ) identifier[cos_reduced1] = literal[int] / identifier[sqrt] ( literal[int] + identifier[tan_reduced1] ** literal[int] ) identifier[sin_reduced1] = identifier[tan_reduced1] * identifier[cos_reduced1] identifier[sin_bearing] , identifier[cos_bearing] = identifier[sin] ( identifier[bearing] ), identifier[cos] ( identifier[bearing] ) identifier[sigma1] = identifier[atan2] ( identifier[tan_reduced1] , identifier[cos_bearing] ) identifier[sin_alpha] = identifier[cos_reduced1] * identifier[sin_bearing] identifier[cos_sq_alpha] = literal[int] - identifier[sin_alpha] ** literal[int] identifier[u_sq] = identifier[cos_sq_alpha] *( identifier[major] ** literal[int] - identifier[minor] ** literal[int] )/ identifier[minor] ** literal[int] identifier[A] = literal[int] + identifier[u_sq] / literal[int] *( literal[int] + identifier[u_sq] *(- literal[int] + identifier[u_sq] *( literal[int] - literal[int] * identifier[u_sq] )) ) identifier[B] = identifier[u_sq] / literal[int] *( literal[int] + identifier[u_sq] *(- literal[int] + identifier[u_sq] *( literal[int] - literal[int] * identifier[u_sq] ))) identifier[sigma] = identifier[distance] /( identifier[minor] * identifier[A] ) identifier[sigma_prime] = literal[int] * identifier[pi] keyword[while] identifier[abs] ( identifier[sigma] - identifier[sigma_prime] )> literal[int] : identifier[cos2_sigma_m] = identifier[cos] ( literal[int] * identifier[sigma1] + identifier[sigma] ) identifier[sin_sigma] , identifier[cos_sigma] = identifier[sin] ( identifier[sigma] ), identifier[cos] ( identifier[sigma] ) identifier[delta_sigma] = identifier[B] * identifier[sin_sigma] *( identifier[cos2_sigma_m] + identifier[B] / literal[int] *( identifier[cos_sigma] *( - literal[int] + literal[int] * identifier[cos2_sigma_m] ** literal[int] )- identifier[B] / literal[int] * identifier[cos2_sigma_m] *( - literal[int] + literal[int] * identifier[sin_sigma] ** literal[int] )*( - literal[int] + literal[int] * identifier[cos2_sigma_m] ** literal[int] ) ) ) identifier[sigma_prime] = identifier[sigma] identifier[sigma] = identifier[distance] /( identifier[minor] * identifier[A] )+ identifier[delta_sigma] identifier[sin_sigma] , identifier[cos_sigma] = identifier[sin] ( identifier[sigma] ), identifier[cos] ( identifier[sigma] ) identifier[lat2] = identifier[atan2] ( identifier[sin_reduced1] * identifier[cos_sigma] + identifier[cos_reduced1] * identifier[sin_sigma] * identifier[cos_bearing] , ( literal[int] - identifier[f] )* identifier[sqrt] ( identifier[sin_alpha] ** literal[int] +( identifier[sin_reduced1] * identifier[sin_sigma] - identifier[cos_reduced1] * identifier[cos_sigma] * identifier[cos_bearing] )** literal[int] ) ) identifier[lambda_lng] = identifier[atan2] ( identifier[sin_sigma] * identifier[sin_bearing] , identifier[cos_reduced1] * identifier[cos_sigma] - identifier[sin_reduced1] * identifier[sin_sigma] * identifier[cos_bearing] ) identifier[C] = identifier[f] / literal[int] * identifier[cos_sq_alpha] *( literal[int] + identifier[f] *( literal[int] - literal[int] * identifier[cos_sq_alpha] )) identifier[delta_lng] =( identifier[lambda_lng] -( literal[int] - identifier[C] )* identifier[f] * identifier[sin_alpha] *( identifier[sigma] + identifier[C] * identifier[sin_sigma] *( identifier[cos2_sigma_m] + identifier[C] * identifier[cos_sigma] *( - literal[int] + literal[int] * identifier[cos2_sigma_m] ** literal[int] ) ) ) ) identifier[lng2] = identifier[lng1] + identifier[delta_lng] keyword[return] identifier[Point] ( identifier[units] . identifier[degrees] ( identifier[radians] = identifier[lat2] ), identifier[units] . identifier[degrees] ( identifier[radians] = identifier[lng2] ))
def destination(self, point, bearing, distance=None): """ TODO docs. """ point = Point(point) lat1 = units.radians(degrees=point.latitude) lng1 = units.radians(degrees=point.longitude) bearing = units.radians(degrees=bearing) if distance is None: distance = self # depends on [control=['if'], data=['distance']] if isinstance(distance, Distance): distance = distance.kilometers # depends on [control=['if'], data=[]] ellipsoid = self.ELLIPSOID if isinstance(ellipsoid, string_compare): ellipsoid = ELLIPSOIDS[ellipsoid] # depends on [control=['if'], data=[]] (major, minor, f) = ellipsoid tan_reduced1 = (1 - f) * tan(lat1) cos_reduced1 = 1 / sqrt(1 + tan_reduced1 ** 2) sin_reduced1 = tan_reduced1 * cos_reduced1 (sin_bearing, cos_bearing) = (sin(bearing), cos(bearing)) sigma1 = atan2(tan_reduced1, cos_bearing) sin_alpha = cos_reduced1 * sin_bearing cos_sq_alpha = 1 - sin_alpha ** 2 u_sq = cos_sq_alpha * (major ** 2 - minor ** 2) / minor ** 2 A = 1 + u_sq / 16384.0 * (4096 + u_sq * (-768 + u_sq * (320 - 175 * u_sq))) B = u_sq / 1024.0 * (256 + u_sq * (-128 + u_sq * (74 - 47 * u_sq))) sigma = distance / (minor * A) sigma_prime = 2 * pi while abs(sigma - sigma_prime) > 1e-11: cos2_sigma_m = cos(2 * sigma1 + sigma) (sin_sigma, cos_sigma) = (sin(sigma), cos(sigma)) delta_sigma = B * sin_sigma * (cos2_sigma_m + B / 4.0 * (cos_sigma * (-1 + 2 * cos2_sigma_m ** 2) - B / 6.0 * cos2_sigma_m * (-3 + 4 * sin_sigma ** 2) * (-3 + 4 * cos2_sigma_m ** 2))) sigma_prime = sigma sigma = distance / (minor * A) + delta_sigma # depends on [control=['while'], data=[]] (sin_sigma, cos_sigma) = (sin(sigma), cos(sigma)) lat2 = atan2(sin_reduced1 * cos_sigma + cos_reduced1 * sin_sigma * cos_bearing, (1 - f) * sqrt(sin_alpha ** 2 + (sin_reduced1 * sin_sigma - cos_reduced1 * cos_sigma * cos_bearing) ** 2)) lambda_lng = atan2(sin_sigma * sin_bearing, cos_reduced1 * cos_sigma - sin_reduced1 * sin_sigma * cos_bearing) C = f / 16.0 * cos_sq_alpha * (4 + f * (4 - 3 * cos_sq_alpha)) delta_lng = lambda_lng - (1 - C) * f * sin_alpha * (sigma + C * sin_sigma * (cos2_sigma_m + C * cos_sigma * (-1 + 2 * cos2_sigma_m ** 2))) lng2 = lng1 + delta_lng return Point(units.degrees(radians=lat2), units.degrees(radians=lng2))
def transform_polygon(polygon, matrix): """ Transform a polygon by a a 2D homogenous transform. Parameters ------------- polygon : shapely.geometry.Polygon 2D polygon to be transformed. matrix : (3, 3) float 2D homogenous transformation. Returns -------------- result : shapely.geometry.Polygon Polygon transformed by matrix. """ matrix = np.asanyarray(matrix, dtype=np.float64) if util.is_sequence(polygon): result = [transform_polygon(p, t) for p, t in zip(polygon, matrix)] return result # transform the outer shell shell = transform_points(np.array(polygon.exterior.coords), matrix)[:, :2] # transform the interiors holes = [transform_points(np.array(i.coords), matrix)[:, :2] for i in polygon.interiors] # create a new polygon with the result result = Polygon(shell=shell, holes=holes) return result
def function[transform_polygon, parameter[polygon, matrix]]: constant[ Transform a polygon by a a 2D homogenous transform. Parameters ------------- polygon : shapely.geometry.Polygon 2D polygon to be transformed. matrix : (3, 3) float 2D homogenous transformation. Returns -------------- result : shapely.geometry.Polygon Polygon transformed by matrix. ] variable[matrix] assign[=] call[name[np].asanyarray, parameter[name[matrix]]] if call[name[util].is_sequence, parameter[name[polygon]]] begin[:] variable[result] assign[=] <ast.ListComp object at 0x7da18bc71f90> return[name[result]] variable[shell] assign[=] call[call[name[transform_points], parameter[call[name[np].array, parameter[name[polygon].exterior.coords]], name[matrix]]]][tuple[[<ast.Slice object at 0x7da18bc72410>, <ast.Slice object at 0x7da18bc72cb0>]]] variable[holes] assign[=] <ast.ListComp object at 0x7da18bc71450> variable[result] assign[=] call[name[Polygon], parameter[]] return[name[result]]
keyword[def] identifier[transform_polygon] ( identifier[polygon] , identifier[matrix] ): literal[string] identifier[matrix] = identifier[np] . identifier[asanyarray] ( identifier[matrix] , identifier[dtype] = identifier[np] . identifier[float64] ) keyword[if] identifier[util] . identifier[is_sequence] ( identifier[polygon] ): identifier[result] =[ identifier[transform_polygon] ( identifier[p] , identifier[t] ) keyword[for] identifier[p] , identifier[t] keyword[in] identifier[zip] ( identifier[polygon] , identifier[matrix] )] keyword[return] identifier[result] identifier[shell] = identifier[transform_points] ( identifier[np] . identifier[array] ( identifier[polygon] . identifier[exterior] . identifier[coords] ), identifier[matrix] )[:,: literal[int] ] identifier[holes] =[ identifier[transform_points] ( identifier[np] . identifier[array] ( identifier[i] . identifier[coords] ), identifier[matrix] )[:,: literal[int] ] keyword[for] identifier[i] keyword[in] identifier[polygon] . identifier[interiors] ] identifier[result] = identifier[Polygon] ( identifier[shell] = identifier[shell] , identifier[holes] = identifier[holes] ) keyword[return] identifier[result]
def transform_polygon(polygon, matrix): """ Transform a polygon by a a 2D homogenous transform. Parameters ------------- polygon : shapely.geometry.Polygon 2D polygon to be transformed. matrix : (3, 3) float 2D homogenous transformation. Returns -------------- result : shapely.geometry.Polygon Polygon transformed by matrix. """ matrix = np.asanyarray(matrix, dtype=np.float64) if util.is_sequence(polygon): result = [transform_polygon(p, t) for (p, t) in zip(polygon, matrix)] return result # depends on [control=['if'], data=[]] # transform the outer shell shell = transform_points(np.array(polygon.exterior.coords), matrix)[:, :2] # transform the interiors holes = [transform_points(np.array(i.coords), matrix)[:, :2] for i in polygon.interiors] # create a new polygon with the result result = Polygon(shell=shell, holes=holes) return result
def _get_meta_options(self) -> List[MetaOption]: """ Returns a list of :class:`MetaOption` instances that this factory supports. """ return [option if isinstance(option, MetaOption) else option() for option in self._options]
def function[_get_meta_options, parameter[self]]: constant[ Returns a list of :class:`MetaOption` instances that this factory supports. ] return[<ast.ListComp object at 0x7da20c6abd90>]
keyword[def] identifier[_get_meta_options] ( identifier[self] )-> identifier[List] [ identifier[MetaOption] ]: literal[string] keyword[return] [ identifier[option] keyword[if] identifier[isinstance] ( identifier[option] , identifier[MetaOption] ) keyword[else] identifier[option] () keyword[for] identifier[option] keyword[in] identifier[self] . identifier[_options] ]
def _get_meta_options(self) -> List[MetaOption]: """ Returns a list of :class:`MetaOption` instances that this factory supports. """ return [option if isinstance(option, MetaOption) else option() for option in self._options]
def inject(self, *args): """ Decorator to mark a class, method, or function as needing dependencies injected. Example usage:: from flask_unchained import unchained, injectable # automatically figure out which params to inject @unchained.inject() def my_function(not_injected, some_service: SomeService = injectable): # do stuff # or declare injectables explicitly @unchained.inject('some_service') def my_function(not_injected, some_service: SomeService): # do stuff # use it on a class to set up injection on everything @unchained.inject() class MyClass: some_service: SomeService = injectable def __init__(self, another_service: AnotherService = injectable): self.another_service = another_service def a_method(self, yet_another_service = injectable): yet_another_service.do_stuff() """ used_without_parenthesis = len(args) and callable(args[0]) has_explicit_args = len(args) and all(isinstance(x, str) for x in args) def wrapper(fn): cls = None if isinstance(fn, type): cls = fn fn = cls.__init__ # check if the fn has already been wrapped with inject if hasattr(fn, '__signature__'): if cls and not hasattr(cls, '__signature__'): # this happens when both the class and its __init__ method # where decorated with @inject. which would be silly, but, # it should still work regardless cls.__signature__ = fn.__signature__ if not cls: return fn sig = inspect.signature(fn) # create a new function wrapping the original to inject params @functools.wraps(fn) def new_fn(*fn_args, **fn_kwargs): # figure out which params we need to inject (we don't want to # interfere with any params the user has passed manually) bound_args = sig.bind_partial(*fn_args, **fn_kwargs) required = set(sig.parameters.keys()) have = set(bound_args.arguments.keys()) need = required.difference(have) to_inject = (args if has_explicit_args else set([k for k, v in sig.parameters.items() if v.default == injectable])) # try to inject needed params from extensions or services for param_name in to_inject: if param_name not in need: continue if param_name in self.extensions: fn_kwargs[param_name] = self.extensions[param_name] elif param_name in self.services: fn_kwargs[param_name] = self.services[param_name] # check to make sure we we're not missing anything required bound_args = sig.bind_partial(*fn_args, **fn_kwargs) bound_args.apply_defaults() for k, v in bound_args.arguments.items(): if isinstance(v, str) and v == injectable: di_name = new_fn.__di_name__ is_constructor = ('.' not in di_name and di_name != di_name.lower()) action = 'initialized' if is_constructor else 'called' msg = f'{di_name} was {action} without the ' \ f'{k} parameter. Please supply it manually, or '\ 'make sure it gets injected.' raise ServiceUsageError(msg) if cls and not getattr(cls, _DI_AUTOMATICALLY_HANDLED, False): cls_attrs_to_inject = getattr(cls, _INJECT_CLS_ATTRS, []) for attr, value in vars(cls).items(): if value == injectable: cls_attrs_to_inject.append(attr) if cls_attrs_to_inject: setattr(cls, _INJECT_CLS_ATTRS, cls_attrs_to_inject) _inject_cls_attrs()(cls) return fn(*bound_args.args, **bound_args.kwargs) new_fn.__signature__ = sig new_fn.__di_name__ = getattr(fn, '__di_name__', fn.__name__) if cls: cls.__init__ = new_fn cls.__signature__ = sig for attr, meth in vars(cls).items(): if (attr.startswith('__') or not callable(meth) or hasattr(meth, '__signature__')): continue setattr(cls, attr, self.inject()(meth)) return cls return new_fn if used_without_parenthesis: return wrapper(args[0]) return wrapper
def function[inject, parameter[self]]: constant[ Decorator to mark a class, method, or function as needing dependencies injected. Example usage:: from flask_unchained import unchained, injectable # automatically figure out which params to inject @unchained.inject() def my_function(not_injected, some_service: SomeService = injectable): # do stuff # or declare injectables explicitly @unchained.inject('some_service') def my_function(not_injected, some_service: SomeService): # do stuff # use it on a class to set up injection on everything @unchained.inject() class MyClass: some_service: SomeService = injectable def __init__(self, another_service: AnotherService = injectable): self.another_service = another_service def a_method(self, yet_another_service = injectable): yet_another_service.do_stuff() ] variable[used_without_parenthesis] assign[=] <ast.BoolOp object at 0x7da20c6c5f00> variable[has_explicit_args] assign[=] <ast.BoolOp object at 0x7da20c6c61d0> def function[wrapper, parameter[fn]]: variable[cls] assign[=] constant[None] if call[name[isinstance], parameter[name[fn], name[type]]] begin[:] variable[cls] assign[=] name[fn] variable[fn] assign[=] name[cls].__init__ if call[name[hasattr], parameter[name[fn], constant[__signature__]]] begin[:] if <ast.BoolOp object at 0x7da20c6c49a0> begin[:] name[cls].__signature__ assign[=] name[fn].__signature__ if <ast.UnaryOp object at 0x7da20c6c7370> begin[:] return[name[fn]] variable[sig] assign[=] call[name[inspect].signature, parameter[name[fn]]] def function[new_fn, parameter[]]: variable[bound_args] assign[=] call[name[sig].bind_partial, parameter[<ast.Starred object at 0x7da20c6c52d0>]] variable[required] assign[=] call[name[set], parameter[call[name[sig].parameters.keys, parameter[]]]] variable[have] assign[=] call[name[set], parameter[call[name[bound_args].arguments.keys, parameter[]]]] variable[need] assign[=] call[name[required].difference, parameter[name[have]]] variable[to_inject] assign[=] <ast.IfExp object at 0x7da20c9905b0> for taget[name[param_name]] in starred[name[to_inject]] begin[:] if compare[name[param_name] <ast.NotIn object at 0x7da2590d7190> name[need]] begin[:] continue if compare[name[param_name] in name[self].extensions] begin[:] call[name[fn_kwargs]][name[param_name]] assign[=] call[name[self].extensions][name[param_name]] variable[bound_args] assign[=] call[name[sig].bind_partial, parameter[<ast.Starred object at 0x7da20e9b3760>]] call[name[bound_args].apply_defaults, parameter[]] for taget[tuple[[<ast.Name object at 0x7da20c6c7ee0>, <ast.Name object at 0x7da20c6c5600>]]] in starred[call[name[bound_args].arguments.items, parameter[]]] begin[:] if <ast.BoolOp object at 0x7da20c6c6110> begin[:] variable[di_name] assign[=] name[new_fn].__di_name__ variable[is_constructor] assign[=] <ast.BoolOp object at 0x7da20c6c7b20> variable[action] assign[=] <ast.IfExp object at 0x7da20c6c6050> variable[msg] assign[=] <ast.JoinedStr object at 0x7da20c6c6620> <ast.Raise object at 0x7da20c6c57b0> if <ast.BoolOp object at 0x7da20c6c6b00> begin[:] variable[cls_attrs_to_inject] assign[=] call[name[getattr], parameter[name[cls], name[_INJECT_CLS_ATTRS], list[[]]]] for taget[tuple[[<ast.Name object at 0x7da20c6c7310>, <ast.Name object at 0x7da20c6c4190>]]] in starred[call[call[name[vars], parameter[name[cls]]].items, parameter[]]] begin[:] if compare[name[value] equal[==] name[injectable]] begin[:] call[name[cls_attrs_to_inject].append, parameter[name[attr]]] if name[cls_attrs_to_inject] begin[:] call[name[setattr], parameter[name[cls], name[_INJECT_CLS_ATTRS], name[cls_attrs_to_inject]]] call[call[name[_inject_cls_attrs], parameter[]], parameter[name[cls]]] return[call[name[fn], parameter[<ast.Starred object at 0x7da1b0e05ba0>]]] name[new_fn].__signature__ assign[=] name[sig] name[new_fn].__di_name__ assign[=] call[name[getattr], parameter[name[fn], constant[__di_name__], name[fn].__name__]] if name[cls] begin[:] name[cls].__init__ assign[=] name[new_fn] name[cls].__signature__ assign[=] name[sig] for taget[tuple[[<ast.Name object at 0x7da1b0e05a80>, <ast.Name object at 0x7da1b0e05a50>]]] in starred[call[call[name[vars], parameter[name[cls]]].items, parameter[]]] begin[:] if <ast.BoolOp object at 0x7da1b0e05c30> begin[:] continue call[name[setattr], parameter[name[cls], name[attr], call[call[name[self].inject, parameter[]], parameter[name[meth]]]]] return[name[cls]] return[name[new_fn]] if name[used_without_parenthesis] begin[:] return[call[name[wrapper], parameter[call[name[args]][constant[0]]]]] return[name[wrapper]]
keyword[def] identifier[inject] ( identifier[self] ,* identifier[args] ): literal[string] identifier[used_without_parenthesis] = identifier[len] ( identifier[args] ) keyword[and] identifier[callable] ( identifier[args] [ literal[int] ]) identifier[has_explicit_args] = identifier[len] ( identifier[args] ) keyword[and] identifier[all] ( identifier[isinstance] ( identifier[x] , identifier[str] ) keyword[for] identifier[x] keyword[in] identifier[args] ) keyword[def] identifier[wrapper] ( identifier[fn] ): identifier[cls] = keyword[None] keyword[if] identifier[isinstance] ( identifier[fn] , identifier[type] ): identifier[cls] = identifier[fn] identifier[fn] = identifier[cls] . identifier[__init__] keyword[if] identifier[hasattr] ( identifier[fn] , literal[string] ): keyword[if] identifier[cls] keyword[and] keyword[not] identifier[hasattr] ( identifier[cls] , literal[string] ): identifier[cls] . identifier[__signature__] = identifier[fn] . identifier[__signature__] keyword[if] keyword[not] identifier[cls] : keyword[return] identifier[fn] identifier[sig] = identifier[inspect] . identifier[signature] ( identifier[fn] ) @ identifier[functools] . identifier[wraps] ( identifier[fn] ) keyword[def] identifier[new_fn] (* identifier[fn_args] ,** identifier[fn_kwargs] ): identifier[bound_args] = identifier[sig] . identifier[bind_partial] (* identifier[fn_args] ,** identifier[fn_kwargs] ) identifier[required] = identifier[set] ( identifier[sig] . identifier[parameters] . identifier[keys] ()) identifier[have] = identifier[set] ( identifier[bound_args] . identifier[arguments] . identifier[keys] ()) identifier[need] = identifier[required] . identifier[difference] ( identifier[have] ) identifier[to_inject] =( identifier[args] keyword[if] identifier[has_explicit_args] keyword[else] identifier[set] ([ identifier[k] keyword[for] identifier[k] , identifier[v] keyword[in] identifier[sig] . identifier[parameters] . identifier[items] () keyword[if] identifier[v] . identifier[default] == identifier[injectable] ])) keyword[for] identifier[param_name] keyword[in] identifier[to_inject] : keyword[if] identifier[param_name] keyword[not] keyword[in] identifier[need] : keyword[continue] keyword[if] identifier[param_name] keyword[in] identifier[self] . identifier[extensions] : identifier[fn_kwargs] [ identifier[param_name] ]= identifier[self] . identifier[extensions] [ identifier[param_name] ] keyword[elif] identifier[param_name] keyword[in] identifier[self] . identifier[services] : identifier[fn_kwargs] [ identifier[param_name] ]= identifier[self] . identifier[services] [ identifier[param_name] ] identifier[bound_args] = identifier[sig] . identifier[bind_partial] (* identifier[fn_args] ,** identifier[fn_kwargs] ) identifier[bound_args] . identifier[apply_defaults] () keyword[for] identifier[k] , identifier[v] keyword[in] identifier[bound_args] . identifier[arguments] . identifier[items] (): keyword[if] identifier[isinstance] ( identifier[v] , identifier[str] ) keyword[and] identifier[v] == identifier[injectable] : identifier[di_name] = identifier[new_fn] . identifier[__di_name__] identifier[is_constructor] =( literal[string] keyword[not] keyword[in] identifier[di_name] keyword[and] identifier[di_name] != identifier[di_name] . identifier[lower] ()) identifier[action] = literal[string] keyword[if] identifier[is_constructor] keyword[else] literal[string] identifier[msg] = literal[string] literal[string] literal[string] keyword[raise] identifier[ServiceUsageError] ( identifier[msg] ) keyword[if] identifier[cls] keyword[and] keyword[not] identifier[getattr] ( identifier[cls] , identifier[_DI_AUTOMATICALLY_HANDLED] , keyword[False] ): identifier[cls_attrs_to_inject] = identifier[getattr] ( identifier[cls] , identifier[_INJECT_CLS_ATTRS] ,[]) keyword[for] identifier[attr] , identifier[value] keyword[in] identifier[vars] ( identifier[cls] ). identifier[items] (): keyword[if] identifier[value] == identifier[injectable] : identifier[cls_attrs_to_inject] . identifier[append] ( identifier[attr] ) keyword[if] identifier[cls_attrs_to_inject] : identifier[setattr] ( identifier[cls] , identifier[_INJECT_CLS_ATTRS] , identifier[cls_attrs_to_inject] ) identifier[_inject_cls_attrs] ()( identifier[cls] ) keyword[return] identifier[fn] (* identifier[bound_args] . identifier[args] ,** identifier[bound_args] . identifier[kwargs] ) identifier[new_fn] . identifier[__signature__] = identifier[sig] identifier[new_fn] . identifier[__di_name__] = identifier[getattr] ( identifier[fn] , literal[string] , identifier[fn] . identifier[__name__] ) keyword[if] identifier[cls] : identifier[cls] . identifier[__init__] = identifier[new_fn] identifier[cls] . identifier[__signature__] = identifier[sig] keyword[for] identifier[attr] , identifier[meth] keyword[in] identifier[vars] ( identifier[cls] ). identifier[items] (): keyword[if] ( identifier[attr] . identifier[startswith] ( literal[string] ) keyword[or] keyword[not] identifier[callable] ( identifier[meth] ) keyword[or] identifier[hasattr] ( identifier[meth] , literal[string] )): keyword[continue] identifier[setattr] ( identifier[cls] , identifier[attr] , identifier[self] . identifier[inject] ()( identifier[meth] )) keyword[return] identifier[cls] keyword[return] identifier[new_fn] keyword[if] identifier[used_without_parenthesis] : keyword[return] identifier[wrapper] ( identifier[args] [ literal[int] ]) keyword[return] identifier[wrapper]
def inject(self, *args): """ Decorator to mark a class, method, or function as needing dependencies injected. Example usage:: from flask_unchained import unchained, injectable # automatically figure out which params to inject @unchained.inject() def my_function(not_injected, some_service: SomeService = injectable): # do stuff # or declare injectables explicitly @unchained.inject('some_service') def my_function(not_injected, some_service: SomeService): # do stuff # use it on a class to set up injection on everything @unchained.inject() class MyClass: some_service: SomeService = injectable def __init__(self, another_service: AnotherService = injectable): self.another_service = another_service def a_method(self, yet_another_service = injectable): yet_another_service.do_stuff() """ used_without_parenthesis = len(args) and callable(args[0]) has_explicit_args = len(args) and all((isinstance(x, str) for x in args)) def wrapper(fn): cls = None if isinstance(fn, type): cls = fn fn = cls.__init__ # depends on [control=['if'], data=[]] # check if the fn has already been wrapped with inject if hasattr(fn, '__signature__'): if cls and (not hasattr(cls, '__signature__')): # this happens when both the class and its __init__ method # where decorated with @inject. which would be silly, but, # it should still work regardless cls.__signature__ = fn.__signature__ # depends on [control=['if'], data=[]] if not cls: return fn # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]] sig = inspect.signature(fn) # create a new function wrapping the original to inject params @functools.wraps(fn) def new_fn(*fn_args, **fn_kwargs): # figure out which params we need to inject (we don't want to # interfere with any params the user has passed manually) bound_args = sig.bind_partial(*fn_args, **fn_kwargs) required = set(sig.parameters.keys()) have = set(bound_args.arguments.keys()) need = required.difference(have) to_inject = args if has_explicit_args else set([k for (k, v) in sig.parameters.items() if v.default == injectable]) # try to inject needed params from extensions or services for param_name in to_inject: if param_name not in need: continue # depends on [control=['if'], data=[]] if param_name in self.extensions: fn_kwargs[param_name] = self.extensions[param_name] # depends on [control=['if'], data=['param_name']] elif param_name in self.services: fn_kwargs[param_name] = self.services[param_name] # depends on [control=['if'], data=['param_name']] # depends on [control=['for'], data=['param_name']] # check to make sure we we're not missing anything required bound_args = sig.bind_partial(*fn_args, **fn_kwargs) bound_args.apply_defaults() for (k, v) in bound_args.arguments.items(): if isinstance(v, str) and v == injectable: di_name = new_fn.__di_name__ is_constructor = '.' not in di_name and di_name != di_name.lower() action = 'initialized' if is_constructor else 'called' msg = f'{di_name} was {action} without the {k} parameter. Please supply it manually, or make sure it gets injected.' raise ServiceUsageError(msg) # depends on [control=['if'], data=[]] # depends on [control=['for'], data=[]] if cls and (not getattr(cls, _DI_AUTOMATICALLY_HANDLED, False)): cls_attrs_to_inject = getattr(cls, _INJECT_CLS_ATTRS, []) for (attr, value) in vars(cls).items(): if value == injectable: cls_attrs_to_inject.append(attr) # depends on [control=['if'], data=[]] # depends on [control=['for'], data=[]] if cls_attrs_to_inject: setattr(cls, _INJECT_CLS_ATTRS, cls_attrs_to_inject) _inject_cls_attrs()(cls) # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]] return fn(*bound_args.args, **bound_args.kwargs) new_fn.__signature__ = sig new_fn.__di_name__ = getattr(fn, '__di_name__', fn.__name__) if cls: cls.__init__ = new_fn cls.__signature__ = sig for (attr, meth) in vars(cls).items(): if attr.startswith('__') or not callable(meth) or hasattr(meth, '__signature__'): continue # depends on [control=['if'], data=[]] setattr(cls, attr, self.inject()(meth)) # depends on [control=['for'], data=[]] return cls # depends on [control=['if'], data=[]] return new_fn if used_without_parenthesis: return wrapper(args[0]) # depends on [control=['if'], data=[]] return wrapper
def endpoint_check( first, node_first, s, second, node_second, t, intersections ): r"""Check if curve endpoints are identical. .. note:: This is a helper for :func:`tangent_bbox_intersection`. These functions are used (directly or indirectly) by :func:`_all_intersections` exclusively, and that function has a Fortran equivalent. Args: first (SubdividedCurve): First curve being intersected (assumed in :math:\mathbf{R}^2`). node_first (numpy.ndarray): 1D ``2``-array, one of the endpoints of ``first``. s (float): The parameter corresponding to ``node_first``, so expected to be one of ``0.0`` or ``1.0``. second (SubdividedCurve): Second curve being intersected (assumed in :math:\mathbf{R}^2`). node_second (numpy.ndarray): 1D ``2``-array, one of the endpoints of ``second``. t (float): The parameter corresponding to ``node_second``, so expected to be one of ``0.0`` or ``1.0``. intersections (list): A list of already encountered intersections. If these curves intersect at their tangency, then those intersections will be added to this list. """ if _helpers.vector_close(node_first, node_second): orig_s = (1 - s) * first.start + s * first.end orig_t = (1 - t) * second.start + t * second.end add_intersection(orig_s, orig_t, intersections)
def function[endpoint_check, parameter[first, node_first, s, second, node_second, t, intersections]]: constant[Check if curve endpoints are identical. .. note:: This is a helper for :func:`tangent_bbox_intersection`. These functions are used (directly or indirectly) by :func:`_all_intersections` exclusively, and that function has a Fortran equivalent. Args: first (SubdividedCurve): First curve being intersected (assumed in :math:\mathbf{R}^2`). node_first (numpy.ndarray): 1D ``2``-array, one of the endpoints of ``first``. s (float): The parameter corresponding to ``node_first``, so expected to be one of ``0.0`` or ``1.0``. second (SubdividedCurve): Second curve being intersected (assumed in :math:\mathbf{R}^2`). node_second (numpy.ndarray): 1D ``2``-array, one of the endpoints of ``second``. t (float): The parameter corresponding to ``node_second``, so expected to be one of ``0.0`` or ``1.0``. intersections (list): A list of already encountered intersections. If these curves intersect at their tangency, then those intersections will be added to this list. ] if call[name[_helpers].vector_close, parameter[name[node_first], name[node_second]]] begin[:] variable[orig_s] assign[=] binary_operation[binary_operation[binary_operation[constant[1] - name[s]] * name[first].start] + binary_operation[name[s] * name[first].end]] variable[orig_t] assign[=] binary_operation[binary_operation[binary_operation[constant[1] - name[t]] * name[second].start] + binary_operation[name[t] * name[second].end]] call[name[add_intersection], parameter[name[orig_s], name[orig_t], name[intersections]]]
keyword[def] identifier[endpoint_check] ( identifier[first] , identifier[node_first] , identifier[s] , identifier[second] , identifier[node_second] , identifier[t] , identifier[intersections] ): literal[string] keyword[if] identifier[_helpers] . identifier[vector_close] ( identifier[node_first] , identifier[node_second] ): identifier[orig_s] =( literal[int] - identifier[s] )* identifier[first] . identifier[start] + identifier[s] * identifier[first] . identifier[end] identifier[orig_t] =( literal[int] - identifier[t] )* identifier[second] . identifier[start] + identifier[t] * identifier[second] . identifier[end] identifier[add_intersection] ( identifier[orig_s] , identifier[orig_t] , identifier[intersections] )
def endpoint_check(first, node_first, s, second, node_second, t, intersections): """Check if curve endpoints are identical. .. note:: This is a helper for :func:`tangent_bbox_intersection`. These functions are used (directly or indirectly) by :func:`_all_intersections` exclusively, and that function has a Fortran equivalent. Args: first (SubdividedCurve): First curve being intersected (assumed in :math:\\mathbf{R}^2`). node_first (numpy.ndarray): 1D ``2``-array, one of the endpoints of ``first``. s (float): The parameter corresponding to ``node_first``, so expected to be one of ``0.0`` or ``1.0``. second (SubdividedCurve): Second curve being intersected (assumed in :math:\\mathbf{R}^2`). node_second (numpy.ndarray): 1D ``2``-array, one of the endpoints of ``second``. t (float): The parameter corresponding to ``node_second``, so expected to be one of ``0.0`` or ``1.0``. intersections (list): A list of already encountered intersections. If these curves intersect at their tangency, then those intersections will be added to this list. """ if _helpers.vector_close(node_first, node_second): orig_s = (1 - s) * first.start + s * first.end orig_t = (1 - t) * second.start + t * second.end add_intersection(orig_s, orig_t, intersections) # depends on [control=['if'], data=[]]
def main(): """ Main entry point for gunicorn_console. """ # Set up curses. stdscr = curses.initscr() curses.start_color() curses.init_pair(1, foreground_colour, background_colour) curses.noecho() stdscr.keypad(True) stdscr.nodelay(True) try: curses.curs_set(False) except: pass try: # Run main event loop until quit. while True: try: update_gunicorns() handle_keypress(stdscr) display_output(stdscr) curses.napms(int(screen_delay * 1000)) except KeyboardInterrupt: break finally: # Tear down curses. curses.nocbreak() stdscr.keypad(False) curses.echo() curses.endwin()
def function[main, parameter[]]: constant[ Main entry point for gunicorn_console. ] variable[stdscr] assign[=] call[name[curses].initscr, parameter[]] call[name[curses].start_color, parameter[]] call[name[curses].init_pair, parameter[constant[1], name[foreground_colour], name[background_colour]]] call[name[curses].noecho, parameter[]] call[name[stdscr].keypad, parameter[constant[True]]] call[name[stdscr].nodelay, parameter[constant[True]]] <ast.Try object at 0x7da1b2360e50> <ast.Try object at 0x7da1b2361240>
keyword[def] identifier[main] (): literal[string] identifier[stdscr] = identifier[curses] . identifier[initscr] () identifier[curses] . identifier[start_color] () identifier[curses] . identifier[init_pair] ( literal[int] , identifier[foreground_colour] , identifier[background_colour] ) identifier[curses] . identifier[noecho] () identifier[stdscr] . identifier[keypad] ( keyword[True] ) identifier[stdscr] . identifier[nodelay] ( keyword[True] ) keyword[try] : identifier[curses] . identifier[curs_set] ( keyword[False] ) keyword[except] : keyword[pass] keyword[try] : keyword[while] keyword[True] : keyword[try] : identifier[update_gunicorns] () identifier[handle_keypress] ( identifier[stdscr] ) identifier[display_output] ( identifier[stdscr] ) identifier[curses] . identifier[napms] ( identifier[int] ( identifier[screen_delay] * literal[int] )) keyword[except] identifier[KeyboardInterrupt] : keyword[break] keyword[finally] : identifier[curses] . identifier[nocbreak] () identifier[stdscr] . identifier[keypad] ( keyword[False] ) identifier[curses] . identifier[echo] () identifier[curses] . identifier[endwin] ()
def main(): """ Main entry point for gunicorn_console. """ # Set up curses. stdscr = curses.initscr() curses.start_color() curses.init_pair(1, foreground_colour, background_colour) curses.noecho() stdscr.keypad(True) stdscr.nodelay(True) try: curses.curs_set(False) # depends on [control=['try'], data=[]] except: pass # depends on [control=['except'], data=[]] try: # Run main event loop until quit. while True: try: update_gunicorns() handle_keypress(stdscr) display_output(stdscr) curses.napms(int(screen_delay * 1000)) # depends on [control=['try'], data=[]] except KeyboardInterrupt: break # depends on [control=['except'], data=[]] # depends on [control=['while'], data=[]] # depends on [control=['try'], data=[]] finally: # Tear down curses. curses.nocbreak() stdscr.keypad(False) curses.echo() curses.endwin()
def write_surf_params_to_file(self): """Write the params to file that surftool_Free needs to generate the surface facets""" inp_file = self.water_surface_file + '_params.txt' lg.info('Writing Inputs to file : ' + inp_file) if self.surf_state == 'flat': # this is the only one that currently works. lg.info('Surface Type is :: flat') f = open(inp_file, 'w') f.write('verbose= ' + str(self.verbose) + '\n') f.write('band_count= ' + str(self.num_bands) + '\n') f.write('band_centres_data= ') f.write(",".join([str(wave) for wave in self.wavelengths]) + '\n') f.write('partition= ' + self.partition + '\n') f.write('vn= ' + str(self.vn) + '\n') f.write('hn= ' + str(self.hn) + '\n') f.write('theta_points= ') f.write(",".join([str(theta) for theta in self.theta_points]) + '\n') f.write('type= ' + self.iface_type + '\n') f.write('refrac_index_0= ' + str(self.iface_0_ri) + '\n') f.write('refrac_index_1= ' + str(self.iface_1_ri) + '\n') f.write('wind_speed= ' + str(self.wind_speed) + '\n') f.write('wind_direc= ' + str(self.wind_direc) + '\n') f.write('crosswind_vertices= ' + str(self.crosswind_vertices) + '\n') f.write('upwind_vertices= ' + str(self.upwind_vertices) + '\n') f.write('surface_size= ' + str(self.surface_size) + '\n') f.write('surface_radius=' + str(self.surface_radius) + '\n') f.write('target_size= ' + str(self.target_size) + '\n') f.write('rays_per_quad= ' + str(self.rays_per_quad) + '\n') f.write('surface_count= ' + str(self.surface_count) + '\n') f.write('azimuthally_average= ' + str(self.azimuthally_average) + '\n') f.write('surface_save_fp= ' + inp_file.strip('_params.txt') + '\n') f.flush() f.close()
def function[write_surf_params_to_file, parameter[self]]: constant[Write the params to file that surftool_Free needs to generate the surface facets] variable[inp_file] assign[=] binary_operation[name[self].water_surface_file + constant[_params.txt]] call[name[lg].info, parameter[binary_operation[constant[Writing Inputs to file : ] + name[inp_file]]]] if compare[name[self].surf_state equal[==] constant[flat]] begin[:] call[name[lg].info, parameter[constant[Surface Type is :: flat]]] variable[f] assign[=] call[name[open], parameter[name[inp_file], constant[w]]] call[name[f].write, parameter[binary_operation[binary_operation[constant[verbose= ] + call[name[str], parameter[name[self].verbose]]] + constant[ ]]]] call[name[f].write, parameter[binary_operation[binary_operation[constant[band_count= ] + call[name[str], parameter[name[self].num_bands]]] + constant[ ]]]] call[name[f].write, parameter[constant[band_centres_data= ]]] call[name[f].write, parameter[binary_operation[call[constant[,].join, parameter[<ast.ListComp object at 0x7da18fe903d0>]] + constant[ ]]]] call[name[f].write, parameter[binary_operation[binary_operation[constant[partition= ] + name[self].partition] + constant[ ]]]] call[name[f].write, parameter[binary_operation[binary_operation[constant[vn= ] + call[name[str], parameter[name[self].vn]]] + constant[ ]]]] call[name[f].write, parameter[binary_operation[binary_operation[constant[hn= ] + call[name[str], parameter[name[self].hn]]] + constant[ ]]]] call[name[f].write, parameter[constant[theta_points= ]]] call[name[f].write, parameter[binary_operation[call[constant[,].join, parameter[<ast.ListComp object at 0x7da18fe908e0>]] + constant[ ]]]] call[name[f].write, parameter[binary_operation[binary_operation[constant[type= ] + name[self].iface_type] + constant[ ]]]] call[name[f].write, parameter[binary_operation[binary_operation[constant[refrac_index_0= ] + call[name[str], parameter[name[self].iface_0_ri]]] + constant[ ]]]] call[name[f].write, parameter[binary_operation[binary_operation[constant[refrac_index_1= ] + call[name[str], parameter[name[self].iface_1_ri]]] + constant[ ]]]] call[name[f].write, parameter[binary_operation[binary_operation[constant[wind_speed= ] + call[name[str], parameter[name[self].wind_speed]]] + constant[ ]]]] call[name[f].write, parameter[binary_operation[binary_operation[constant[wind_direc= ] + call[name[str], parameter[name[self].wind_direc]]] + constant[ ]]]] call[name[f].write, parameter[binary_operation[binary_operation[constant[crosswind_vertices= ] + call[name[str], parameter[name[self].crosswind_vertices]]] + constant[ ]]]] call[name[f].write, parameter[binary_operation[binary_operation[constant[upwind_vertices= ] + call[name[str], parameter[name[self].upwind_vertices]]] + constant[ ]]]] call[name[f].write, parameter[binary_operation[binary_operation[constant[surface_size= ] + call[name[str], parameter[name[self].surface_size]]] + constant[ ]]]] call[name[f].write, parameter[binary_operation[binary_operation[constant[surface_radius=] + call[name[str], parameter[name[self].surface_radius]]] + constant[ ]]]] call[name[f].write, parameter[binary_operation[binary_operation[constant[target_size= ] + call[name[str], parameter[name[self].target_size]]] + constant[ ]]]] call[name[f].write, parameter[binary_operation[binary_operation[constant[rays_per_quad= ] + call[name[str], parameter[name[self].rays_per_quad]]] + constant[ ]]]] call[name[f].write, parameter[binary_operation[binary_operation[constant[surface_count= ] + call[name[str], parameter[name[self].surface_count]]] + constant[ ]]]] call[name[f].write, parameter[binary_operation[binary_operation[constant[azimuthally_average= ] + call[name[str], parameter[name[self].azimuthally_average]]] + constant[ ]]]] call[name[f].write, parameter[binary_operation[binary_operation[constant[surface_save_fp= ] + call[name[inp_file].strip, parameter[constant[_params.txt]]]] + constant[ ]]]] call[name[f].flush, parameter[]] call[name[f].close, parameter[]]
keyword[def] identifier[write_surf_params_to_file] ( identifier[self] ): literal[string] identifier[inp_file] = identifier[self] . identifier[water_surface_file] + literal[string] identifier[lg] . identifier[info] ( literal[string] + identifier[inp_file] ) keyword[if] identifier[self] . identifier[surf_state] == literal[string] : identifier[lg] . identifier[info] ( literal[string] ) identifier[f] = identifier[open] ( identifier[inp_file] , literal[string] ) identifier[f] . identifier[write] ( literal[string] + identifier[str] ( identifier[self] . identifier[verbose] )+ literal[string] ) identifier[f] . identifier[write] ( literal[string] + identifier[str] ( identifier[self] . identifier[num_bands] )+ literal[string] ) identifier[f] . identifier[write] ( literal[string] ) identifier[f] . identifier[write] ( literal[string] . identifier[join] ([ identifier[str] ( identifier[wave] ) keyword[for] identifier[wave] keyword[in] identifier[self] . identifier[wavelengths] ])+ literal[string] ) identifier[f] . identifier[write] ( literal[string] + identifier[self] . identifier[partition] + literal[string] ) identifier[f] . identifier[write] ( literal[string] + identifier[str] ( identifier[self] . identifier[vn] )+ literal[string] ) identifier[f] . identifier[write] ( literal[string] + identifier[str] ( identifier[self] . identifier[hn] )+ literal[string] ) identifier[f] . identifier[write] ( literal[string] ) identifier[f] . identifier[write] ( literal[string] . identifier[join] ([ identifier[str] ( identifier[theta] ) keyword[for] identifier[theta] keyword[in] identifier[self] . identifier[theta_points] ])+ literal[string] ) identifier[f] . identifier[write] ( literal[string] + identifier[self] . identifier[iface_type] + literal[string] ) identifier[f] . identifier[write] ( literal[string] + identifier[str] ( identifier[self] . identifier[iface_0_ri] )+ literal[string] ) identifier[f] . identifier[write] ( literal[string] + identifier[str] ( identifier[self] . identifier[iface_1_ri] )+ literal[string] ) identifier[f] . identifier[write] ( literal[string] + identifier[str] ( identifier[self] . identifier[wind_speed] )+ literal[string] ) identifier[f] . identifier[write] ( literal[string] + identifier[str] ( identifier[self] . identifier[wind_direc] )+ literal[string] ) identifier[f] . identifier[write] ( literal[string] + identifier[str] ( identifier[self] . identifier[crosswind_vertices] )+ literal[string] ) identifier[f] . identifier[write] ( literal[string] + identifier[str] ( identifier[self] . identifier[upwind_vertices] )+ literal[string] ) identifier[f] . identifier[write] ( literal[string] + identifier[str] ( identifier[self] . identifier[surface_size] )+ literal[string] ) identifier[f] . identifier[write] ( literal[string] + identifier[str] ( identifier[self] . identifier[surface_radius] )+ literal[string] ) identifier[f] . identifier[write] ( literal[string] + identifier[str] ( identifier[self] . identifier[target_size] )+ literal[string] ) identifier[f] . identifier[write] ( literal[string] + identifier[str] ( identifier[self] . identifier[rays_per_quad] )+ literal[string] ) identifier[f] . identifier[write] ( literal[string] + identifier[str] ( identifier[self] . identifier[surface_count] )+ literal[string] ) identifier[f] . identifier[write] ( literal[string] + identifier[str] ( identifier[self] . identifier[azimuthally_average] )+ literal[string] ) identifier[f] . identifier[write] ( literal[string] + identifier[inp_file] . identifier[strip] ( literal[string] )+ literal[string] ) identifier[f] . identifier[flush] () identifier[f] . identifier[close] ()
def write_surf_params_to_file(self): """Write the params to file that surftool_Free needs to generate the surface facets""" inp_file = self.water_surface_file + '_params.txt' lg.info('Writing Inputs to file : ' + inp_file) if self.surf_state == 'flat': # this is the only one that currently works. lg.info('Surface Type is :: flat') f = open(inp_file, 'w') f.write('verbose= ' + str(self.verbose) + '\n') f.write('band_count= ' + str(self.num_bands) + '\n') f.write('band_centres_data= ') f.write(','.join([str(wave) for wave in self.wavelengths]) + '\n') f.write('partition= ' + self.partition + '\n') f.write('vn= ' + str(self.vn) + '\n') f.write('hn= ' + str(self.hn) + '\n') f.write('theta_points= ') f.write(','.join([str(theta) for theta in self.theta_points]) + '\n') f.write('type= ' + self.iface_type + '\n') f.write('refrac_index_0= ' + str(self.iface_0_ri) + '\n') f.write('refrac_index_1= ' + str(self.iface_1_ri) + '\n') f.write('wind_speed= ' + str(self.wind_speed) + '\n') f.write('wind_direc= ' + str(self.wind_direc) + '\n') f.write('crosswind_vertices= ' + str(self.crosswind_vertices) + '\n') f.write('upwind_vertices= ' + str(self.upwind_vertices) + '\n') f.write('surface_size= ' + str(self.surface_size) + '\n') f.write('surface_radius=' + str(self.surface_radius) + '\n') f.write('target_size= ' + str(self.target_size) + '\n') f.write('rays_per_quad= ' + str(self.rays_per_quad) + '\n') f.write('surface_count= ' + str(self.surface_count) + '\n') f.write('azimuthally_average= ' + str(self.azimuthally_average) + '\n') f.write('surface_save_fp= ' + inp_file.strip('_params.txt') + '\n') f.flush() f.close() # depends on [control=['if'], data=[]]
def wait_for_state_machine_finished(state_machine): """ wait for a state machine to finish its execution :param state_machine: the statemachine to synchronize with :return: """ global _user_abort from rafcon.core.states.execution_state import ExecutionState if not isinstance(state_machine.root_state, ExecutionState): while len(state_machine.execution_histories[0]) < 1: time.sleep(0.1) else: time.sleep(0.5) while state_machine.root_state.state_execution_status is not StateExecutionStatus.INACTIVE: try: state_machine.root_state.concurrency_queue.get(timeout=1) # this check triggers if the state machine could not be stopped in the signal handler if _user_abort: return except Empty: pass # no logger output here to make it easier for the parser logger.verbose("RAFCON live signal")
def function[wait_for_state_machine_finished, parameter[state_machine]]: constant[ wait for a state machine to finish its execution :param state_machine: the statemachine to synchronize with :return: ] <ast.Global object at 0x7da18eb57e80> from relative_module[rafcon.core.states.execution_state] import module[ExecutionState] if <ast.UnaryOp object at 0x7da18eb54370> begin[:] while compare[call[name[len], parameter[call[name[state_machine].execution_histories][constant[0]]]] less[<] constant[1]] begin[:] call[name[time].sleep, parameter[constant[0.1]]] while compare[name[state_machine].root_state.state_execution_status is_not name[StateExecutionStatus].INACTIVE] begin[:] <ast.Try object at 0x7da20c76fbe0> call[name[logger].verbose, parameter[constant[RAFCON live signal]]]
keyword[def] identifier[wait_for_state_machine_finished] ( identifier[state_machine] ): literal[string] keyword[global] identifier[_user_abort] keyword[from] identifier[rafcon] . identifier[core] . identifier[states] . identifier[execution_state] keyword[import] identifier[ExecutionState] keyword[if] keyword[not] identifier[isinstance] ( identifier[state_machine] . identifier[root_state] , identifier[ExecutionState] ): keyword[while] identifier[len] ( identifier[state_machine] . identifier[execution_histories] [ literal[int] ])< literal[int] : identifier[time] . identifier[sleep] ( literal[int] ) keyword[else] : identifier[time] . identifier[sleep] ( literal[int] ) keyword[while] identifier[state_machine] . identifier[root_state] . identifier[state_execution_status] keyword[is] keyword[not] identifier[StateExecutionStatus] . identifier[INACTIVE] : keyword[try] : identifier[state_machine] . identifier[root_state] . identifier[concurrency_queue] . identifier[get] ( identifier[timeout] = literal[int] ) keyword[if] identifier[_user_abort] : keyword[return] keyword[except] identifier[Empty] : keyword[pass] identifier[logger] . identifier[verbose] ( literal[string] )
def wait_for_state_machine_finished(state_machine): """ wait for a state machine to finish its execution :param state_machine: the statemachine to synchronize with :return: """ global _user_abort from rafcon.core.states.execution_state import ExecutionState if not isinstance(state_machine.root_state, ExecutionState): while len(state_machine.execution_histories[0]) < 1: time.sleep(0.1) # depends on [control=['while'], data=[]] # depends on [control=['if'], data=[]] else: time.sleep(0.5) while state_machine.root_state.state_execution_status is not StateExecutionStatus.INACTIVE: try: state_machine.root_state.concurrency_queue.get(timeout=1) # this check triggers if the state machine could not be stopped in the signal handler if _user_abort: return # depends on [control=['if'], data=[]] # depends on [control=['try'], data=[]] except Empty: pass # depends on [control=['except'], data=[]] # no logger output here to make it easier for the parser logger.verbose('RAFCON live signal') # depends on [control=['while'], data=[]]
def format(x, format): """Uses http://www.cplusplus.com/reference/string/to_string/ for formatting""" # don't change the dtype, otherwise for each block the dtype may be different (string length) sl = vaex.strings.format(x, format) return column.ColumnStringArrow(sl.bytes, sl.indices, sl.length, sl.offset, string_sequence=sl)
def function[format, parameter[x, format]]: constant[Uses http://www.cplusplus.com/reference/string/to_string/ for formatting] variable[sl] assign[=] call[name[vaex].strings.format, parameter[name[x], name[format]]] return[call[name[column].ColumnStringArrow, parameter[name[sl].bytes, name[sl].indices, name[sl].length, name[sl].offset]]]
keyword[def] identifier[format] ( identifier[x] , identifier[format] ): literal[string] identifier[sl] = identifier[vaex] . identifier[strings] . identifier[format] ( identifier[x] , identifier[format] ) keyword[return] identifier[column] . identifier[ColumnStringArrow] ( identifier[sl] . identifier[bytes] , identifier[sl] . identifier[indices] , identifier[sl] . identifier[length] , identifier[sl] . identifier[offset] , identifier[string_sequence] = identifier[sl] )
def format(x, format): """Uses http://www.cplusplus.com/reference/string/to_string/ for formatting""" # don't change the dtype, otherwise for each block the dtype may be different (string length) sl = vaex.strings.format(x, format) return column.ColumnStringArrow(sl.bytes, sl.indices, sl.length, sl.offset, string_sequence=sl)
def SetPackageView(self, directoryView): """Set whether to use directory/package based view""" self.directoryView = not self.directoryView self.packageMenuItem.Check(self.directoryView) self.packageViewTool.SetValue(self.directoryView) if self.loader: self.SetModel(self.loader) self.RecordHistory()
def function[SetPackageView, parameter[self, directoryView]]: constant[Set whether to use directory/package based view] name[self].directoryView assign[=] <ast.UnaryOp object at 0x7da20c6e77f0> call[name[self].packageMenuItem.Check, parameter[name[self].directoryView]] call[name[self].packageViewTool.SetValue, parameter[name[self].directoryView]] if name[self].loader begin[:] call[name[self].SetModel, parameter[name[self].loader]] call[name[self].RecordHistory, parameter[]]
keyword[def] identifier[SetPackageView] ( identifier[self] , identifier[directoryView] ): literal[string] identifier[self] . identifier[directoryView] = keyword[not] identifier[self] . identifier[directoryView] identifier[self] . identifier[packageMenuItem] . identifier[Check] ( identifier[self] . identifier[directoryView] ) identifier[self] . identifier[packageViewTool] . identifier[SetValue] ( identifier[self] . identifier[directoryView] ) keyword[if] identifier[self] . identifier[loader] : identifier[self] . identifier[SetModel] ( identifier[self] . identifier[loader] ) identifier[self] . identifier[RecordHistory] ()
def SetPackageView(self, directoryView): """Set whether to use directory/package based view""" self.directoryView = not self.directoryView self.packageMenuItem.Check(self.directoryView) self.packageViewTool.SetValue(self.directoryView) if self.loader: self.SetModel(self.loader) # depends on [control=['if'], data=[]] self.RecordHistory()
def buffer_read(self, frames=-1, dtype=None): """Read from the file and return data as buffer object. Reads the given number of `frames` in the given data format starting at the current read/write position. This advances the read/write position by the same number of frames. By default, all frames from the current read/write position to the end of the file are returned. Use :meth:`.seek` to move the current read/write position. Parameters ---------- frames : int, optional The number of frames to read. If `frames < 0`, the whole rest of the file is read. dtype : {'float64', 'float32', 'int32', 'int16'} Audio data will be converted to the given data type. Returns ------- buffer A buffer containing the read data. See Also -------- buffer_read_into, .read, buffer_write """ frames = self._check_frames(frames, fill_value=None) ctype = self._check_dtype(dtype) cdata = _ffi.new(ctype + '[]', frames * self.channels) read_frames = self._cdata_io('read', cdata, ctype, frames) assert read_frames == frames return _ffi.buffer(cdata)
def function[buffer_read, parameter[self, frames, dtype]]: constant[Read from the file and return data as buffer object. Reads the given number of `frames` in the given data format starting at the current read/write position. This advances the read/write position by the same number of frames. By default, all frames from the current read/write position to the end of the file are returned. Use :meth:`.seek` to move the current read/write position. Parameters ---------- frames : int, optional The number of frames to read. If `frames < 0`, the whole rest of the file is read. dtype : {'float64', 'float32', 'int32', 'int16'} Audio data will be converted to the given data type. Returns ------- buffer A buffer containing the read data. See Also -------- buffer_read_into, .read, buffer_write ] variable[frames] assign[=] call[name[self]._check_frames, parameter[name[frames]]] variable[ctype] assign[=] call[name[self]._check_dtype, parameter[name[dtype]]] variable[cdata] assign[=] call[name[_ffi].new, parameter[binary_operation[name[ctype] + constant[[]]], binary_operation[name[frames] * name[self].channels]]] variable[read_frames] assign[=] call[name[self]._cdata_io, parameter[constant[read], name[cdata], name[ctype], name[frames]]] assert[compare[name[read_frames] equal[==] name[frames]]] return[call[name[_ffi].buffer, parameter[name[cdata]]]]
keyword[def] identifier[buffer_read] ( identifier[self] , identifier[frames] =- literal[int] , identifier[dtype] = keyword[None] ): literal[string] identifier[frames] = identifier[self] . identifier[_check_frames] ( identifier[frames] , identifier[fill_value] = keyword[None] ) identifier[ctype] = identifier[self] . identifier[_check_dtype] ( identifier[dtype] ) identifier[cdata] = identifier[_ffi] . identifier[new] ( identifier[ctype] + literal[string] , identifier[frames] * identifier[self] . identifier[channels] ) identifier[read_frames] = identifier[self] . identifier[_cdata_io] ( literal[string] , identifier[cdata] , identifier[ctype] , identifier[frames] ) keyword[assert] identifier[read_frames] == identifier[frames] keyword[return] identifier[_ffi] . identifier[buffer] ( identifier[cdata] )
def buffer_read(self, frames=-1, dtype=None): """Read from the file and return data as buffer object. Reads the given number of `frames` in the given data format starting at the current read/write position. This advances the read/write position by the same number of frames. By default, all frames from the current read/write position to the end of the file are returned. Use :meth:`.seek` to move the current read/write position. Parameters ---------- frames : int, optional The number of frames to read. If `frames < 0`, the whole rest of the file is read. dtype : {'float64', 'float32', 'int32', 'int16'} Audio data will be converted to the given data type. Returns ------- buffer A buffer containing the read data. See Also -------- buffer_read_into, .read, buffer_write """ frames = self._check_frames(frames, fill_value=None) ctype = self._check_dtype(dtype) cdata = _ffi.new(ctype + '[]', frames * self.channels) read_frames = self._cdata_io('read', cdata, ctype, frames) assert read_frames == frames return _ffi.buffer(cdata)
def delete_job(job_id, deployment_name, token_manager=None, app_url=defaults.APP_URL): """ delete a job with a specific job id """ headers = token_manager.get_access_token_headers() data_url = get_data_url_for_job(job_id, deployment_name, token_manager=token_manager, app_url=app_url) url = '%s/api/v1/jobs/%s' % (data_url, job_id) response = requests.delete(url, headers=headers) if response.status_code != 200: raise JutException('Error %s: %s' % (response.status_code, response.text))
def function[delete_job, parameter[job_id, deployment_name, token_manager, app_url]]: constant[ delete a job with a specific job id ] variable[headers] assign[=] call[name[token_manager].get_access_token_headers, parameter[]] variable[data_url] assign[=] call[name[get_data_url_for_job], parameter[name[job_id], name[deployment_name]]] variable[url] assign[=] binary_operation[constant[%s/api/v1/jobs/%s] <ast.Mod object at 0x7da2590d6920> tuple[[<ast.Name object at 0x7da1b13cf430>, <ast.Name object at 0x7da1b13cfe20>]]] variable[response] assign[=] call[name[requests].delete, parameter[name[url]]] if compare[name[response].status_code not_equal[!=] constant[200]] begin[:] <ast.Raise object at 0x7da1b13cdde0>
keyword[def] identifier[delete_job] ( identifier[job_id] , identifier[deployment_name] , identifier[token_manager] = keyword[None] , identifier[app_url] = identifier[defaults] . identifier[APP_URL] ): literal[string] identifier[headers] = identifier[token_manager] . identifier[get_access_token_headers] () identifier[data_url] = identifier[get_data_url_for_job] ( identifier[job_id] , identifier[deployment_name] , identifier[token_manager] = identifier[token_manager] , identifier[app_url] = identifier[app_url] ) identifier[url] = literal[string] %( identifier[data_url] , identifier[job_id] ) identifier[response] = identifier[requests] . identifier[delete] ( identifier[url] , identifier[headers] = identifier[headers] ) keyword[if] identifier[response] . identifier[status_code] != literal[int] : keyword[raise] identifier[JutException] ( literal[string] %( identifier[response] . identifier[status_code] , identifier[response] . identifier[text] ))
def delete_job(job_id, deployment_name, token_manager=None, app_url=defaults.APP_URL): """ delete a job with a specific job id """ headers = token_manager.get_access_token_headers() data_url = get_data_url_for_job(job_id, deployment_name, token_manager=token_manager, app_url=app_url) url = '%s/api/v1/jobs/%s' % (data_url, job_id) response = requests.delete(url, headers=headers) if response.status_code != 200: raise JutException('Error %s: %s' % (response.status_code, response.text)) # depends on [control=['if'], data=[]]