code
stringlengths
75
104k
code_sememe
stringlengths
47
309k
token_type
stringlengths
215
214k
code_dependency
stringlengths
75
155k
def start_slaves(slave_dir,exe_rel_path,pst_rel_path,num_slaves=None,slave_root="..", port=4004,rel_path=None,local=True,cleanup=True,master_dir=None, verbose=False,silent_master=False): """ start a group of pest(++) slaves on the local machine Parameters ---------- slave_dir : str the path to a complete set of input files exe_rel_path : str the relative path to the pest(++) executable from within the slave_dir pst_rel_path : str the relative path to the pst file from within the slave_dir num_slaves : int number of slaves to start. defaults to number of cores slave_root : str the root to make the new slave directories in rel_path: str the relative path to where pest(++) should be run from within the slave_dir, defaults to the uppermost level of the slave dir local: bool flag for using "localhost" instead of hostname on slave command line cleanup: bool flag to remove slave directories once processes exit master_dir: str name of directory for master instance. If master_dir exists, then it will be removed. If master_dir is None, no master instance will be started verbose : bool flag to echo useful information to stdout Note ---- if all slaves (and optionally master) exit gracefully, then the slave dirs will be removed unless cleanup is false Example ------- ``>>>import pyemu`` start 10 slaves using the directory "template" as the base case and also start a master instance in a directory "master". ``>>>pyemu.helpers.start_slaves("template","pestpp","pest.pst",10,master_dir="master")`` """ warnings.warn("start_slaves has moved to pyemu.os_utils",PyemuWarning) pyemu.os_utils.start_slaves(slave_dir=slave_dir,exe_rel_path=exe_rel_path,pst_rel_path=pst_rel_path ,num_slaves=num_slaves,slave_root=slave_root,port=port,rel_path=rel_path, local=local,cleanup=cleanup,master_dir=master_dir,verbose=verbose, silent_master=silent_master)
def function[start_slaves, parameter[slave_dir, exe_rel_path, pst_rel_path, num_slaves, slave_root, port, rel_path, local, cleanup, master_dir, verbose, silent_master]]: constant[ start a group of pest(++) slaves on the local machine Parameters ---------- slave_dir : str the path to a complete set of input files exe_rel_path : str the relative path to the pest(++) executable from within the slave_dir pst_rel_path : str the relative path to the pst file from within the slave_dir num_slaves : int number of slaves to start. defaults to number of cores slave_root : str the root to make the new slave directories in rel_path: str the relative path to where pest(++) should be run from within the slave_dir, defaults to the uppermost level of the slave dir local: bool flag for using "localhost" instead of hostname on slave command line cleanup: bool flag to remove slave directories once processes exit master_dir: str name of directory for master instance. If master_dir exists, then it will be removed. If master_dir is None, no master instance will be started verbose : bool flag to echo useful information to stdout Note ---- if all slaves (and optionally master) exit gracefully, then the slave dirs will be removed unless cleanup is false Example ------- ``>>>import pyemu`` start 10 slaves using the directory "template" as the base case and also start a master instance in a directory "master". ``>>>pyemu.helpers.start_slaves("template","pestpp","pest.pst",10,master_dir="master")`` ] call[name[warnings].warn, parameter[constant[start_slaves has moved to pyemu.os_utils], name[PyemuWarning]]] call[name[pyemu].os_utils.start_slaves, parameter[]]
keyword[def] identifier[start_slaves] ( identifier[slave_dir] , identifier[exe_rel_path] , identifier[pst_rel_path] , identifier[num_slaves] = keyword[None] , identifier[slave_root] = literal[string] , identifier[port] = literal[int] , identifier[rel_path] = keyword[None] , identifier[local] = keyword[True] , identifier[cleanup] = keyword[True] , identifier[master_dir] = keyword[None] , identifier[verbose] = keyword[False] , identifier[silent_master] = keyword[False] ): literal[string] identifier[warnings] . identifier[warn] ( literal[string] , identifier[PyemuWarning] ) identifier[pyemu] . identifier[os_utils] . identifier[start_slaves] ( identifier[slave_dir] = identifier[slave_dir] , identifier[exe_rel_path] = identifier[exe_rel_path] , identifier[pst_rel_path] = identifier[pst_rel_path] , identifier[num_slaves] = identifier[num_slaves] , identifier[slave_root] = identifier[slave_root] , identifier[port] = identifier[port] , identifier[rel_path] = identifier[rel_path] , identifier[local] = identifier[local] , identifier[cleanup] = identifier[cleanup] , identifier[master_dir] = identifier[master_dir] , identifier[verbose] = identifier[verbose] , identifier[silent_master] = identifier[silent_master] )
def start_slaves(slave_dir, exe_rel_path, pst_rel_path, num_slaves=None, slave_root='..', port=4004, rel_path=None, local=True, cleanup=True, master_dir=None, verbose=False, silent_master=False): """ start a group of pest(++) slaves on the local machine Parameters ---------- slave_dir : str the path to a complete set of input files exe_rel_path : str the relative path to the pest(++) executable from within the slave_dir pst_rel_path : str the relative path to the pst file from within the slave_dir num_slaves : int number of slaves to start. defaults to number of cores slave_root : str the root to make the new slave directories in rel_path: str the relative path to where pest(++) should be run from within the slave_dir, defaults to the uppermost level of the slave dir local: bool flag for using "localhost" instead of hostname on slave command line cleanup: bool flag to remove slave directories once processes exit master_dir: str name of directory for master instance. If master_dir exists, then it will be removed. If master_dir is None, no master instance will be started verbose : bool flag to echo useful information to stdout Note ---- if all slaves (and optionally master) exit gracefully, then the slave dirs will be removed unless cleanup is false Example ------- ``>>>import pyemu`` start 10 slaves using the directory "template" as the base case and also start a master instance in a directory "master". ``>>>pyemu.helpers.start_slaves("template","pestpp","pest.pst",10,master_dir="master")`` """ warnings.warn('start_slaves has moved to pyemu.os_utils', PyemuWarning) pyemu.os_utils.start_slaves(slave_dir=slave_dir, exe_rel_path=exe_rel_path, pst_rel_path=pst_rel_path, num_slaves=num_slaves, slave_root=slave_root, port=port, rel_path=rel_path, local=local, cleanup=cleanup, master_dir=master_dir, verbose=verbose, silent_master=silent_master)
def get_events(self, from_=None, to=None): """Query a slice of the events. Events are always returned in the order the were added. Parameters: from_ -- if not None, return only events added after the event with id `from_`. If None, return from the start of history. to -- if not None, return only events added before, and including, the event with event id `to`. If None, return up to, and including, the last added event. returns -- an iterable of (event id, eventdata) tuples. """ if from_ and (from_ not in self.keys or from_ not in self.events): raise EventStore.EventKeyDoesNotExistError( "Could not find the from_ key: {0}".format(from_)) if to and (to not in self.keys or to not in self.events): raise EventStore.EventKeyDoesNotExistError( "Could not find the from_ key: {0}".format(to)) # +1 here because we have already seen the event we are asking for fromindex = self.keys.index(from_) + 1 if from_ else 0 toindex = self.keys.index(to) + 1 if to else len(self.events) if fromindex > toindex: msg = ("'From' index came after 'To'." " Keys: ({0}, {1})" " Indices: ({2}, {3})").format(from_, to, fromindex, toindex) raise EventOrderError(msg) return ((key, self.events[key]) for key in self.keys[fromindex:toindex])
def function[get_events, parameter[self, from_, to]]: constant[Query a slice of the events. Events are always returned in the order the were added. Parameters: from_ -- if not None, return only events added after the event with id `from_`. If None, return from the start of history. to -- if not None, return only events added before, and including, the event with event id `to`. If None, return up to, and including, the last added event. returns -- an iterable of (event id, eventdata) tuples. ] if <ast.BoolOp object at 0x7da1b25ee3e0> begin[:] <ast.Raise object at 0x7da1b25ef7c0> if <ast.BoolOp object at 0x7da1b25ef4c0> begin[:] <ast.Raise object at 0x7da1b25edd50> variable[fromindex] assign[=] <ast.IfExp object at 0x7da1b25ee6b0> variable[toindex] assign[=] <ast.IfExp object at 0x7da1b25ec0d0> if compare[name[fromindex] greater[>] name[toindex]] begin[:] variable[msg] assign[=] call[constant['From' index came after 'To'. Keys: ({0}, {1}) Indices: ({2}, {3})].format, parameter[name[from_], name[to], name[fromindex], name[toindex]]] <ast.Raise object at 0x7da1b2487b80> return[<ast.GeneratorExp object at 0x7da1b2486890>]
keyword[def] identifier[get_events] ( identifier[self] , identifier[from_] = keyword[None] , identifier[to] = keyword[None] ): literal[string] keyword[if] identifier[from_] keyword[and] ( identifier[from_] keyword[not] keyword[in] identifier[self] . identifier[keys] keyword[or] identifier[from_] keyword[not] keyword[in] identifier[self] . identifier[events] ): keyword[raise] identifier[EventStore] . identifier[EventKeyDoesNotExistError] ( literal[string] . identifier[format] ( identifier[from_] )) keyword[if] identifier[to] keyword[and] ( identifier[to] keyword[not] keyword[in] identifier[self] . identifier[keys] keyword[or] identifier[to] keyword[not] keyword[in] identifier[self] . identifier[events] ): keyword[raise] identifier[EventStore] . identifier[EventKeyDoesNotExistError] ( literal[string] . identifier[format] ( identifier[to] )) identifier[fromindex] = identifier[self] . identifier[keys] . identifier[index] ( identifier[from_] )+ literal[int] keyword[if] identifier[from_] keyword[else] literal[int] identifier[toindex] = identifier[self] . identifier[keys] . identifier[index] ( identifier[to] )+ literal[int] keyword[if] identifier[to] keyword[else] identifier[len] ( identifier[self] . identifier[events] ) keyword[if] identifier[fromindex] > identifier[toindex] : identifier[msg] =( literal[string] literal[string] literal[string] ). identifier[format] ( identifier[from_] , identifier[to] , identifier[fromindex] , identifier[toindex] ) keyword[raise] identifier[EventOrderError] ( identifier[msg] ) keyword[return] (( identifier[key] , identifier[self] . identifier[events] [ identifier[key] ]) keyword[for] identifier[key] keyword[in] identifier[self] . identifier[keys] [ identifier[fromindex] : identifier[toindex] ])
def get_events(self, from_=None, to=None): """Query a slice of the events. Events are always returned in the order the were added. Parameters: from_ -- if not None, return only events added after the event with id `from_`. If None, return from the start of history. to -- if not None, return only events added before, and including, the event with event id `to`. If None, return up to, and including, the last added event. returns -- an iterable of (event id, eventdata) tuples. """ if from_ and (from_ not in self.keys or from_ not in self.events): raise EventStore.EventKeyDoesNotExistError('Could not find the from_ key: {0}'.format(from_)) # depends on [control=['if'], data=[]] if to and (to not in self.keys or to not in self.events): raise EventStore.EventKeyDoesNotExistError('Could not find the from_ key: {0}'.format(to)) # depends on [control=['if'], data=[]] # +1 here because we have already seen the event we are asking for fromindex = self.keys.index(from_) + 1 if from_ else 0 toindex = self.keys.index(to) + 1 if to else len(self.events) if fromindex > toindex: msg = "'From' index came after 'To'. Keys: ({0}, {1}) Indices: ({2}, {3})".format(from_, to, fromindex, toindex) raise EventOrderError(msg) # depends on [control=['if'], data=['fromindex', 'toindex']] return ((key, self.events[key]) for key in self.keys[fromindex:toindex])
def saveget(self,con): "save, return old value. todo: make the expire() atomic with a redis script" k,v=self.kv() oldv=con.getset(k,v) if self.TTL is not None: con.expire(k,self.TTL) return None if oldv is None else msgpack.loads(oldv)
def function[saveget, parameter[self, con]]: constant[save, return old value. todo: make the expire() atomic with a redis script] <ast.Tuple object at 0x7da204564d00> assign[=] call[name[self].kv, parameter[]] variable[oldv] assign[=] call[name[con].getset, parameter[name[k], name[v]]] if compare[name[self].TTL is_not constant[None]] begin[:] call[name[con].expire, parameter[name[k], name[self].TTL]] return[<ast.IfExp object at 0x7da204564a00>]
keyword[def] identifier[saveget] ( identifier[self] , identifier[con] ): literal[string] identifier[k] , identifier[v] = identifier[self] . identifier[kv] () identifier[oldv] = identifier[con] . identifier[getset] ( identifier[k] , identifier[v] ) keyword[if] identifier[self] . identifier[TTL] keyword[is] keyword[not] keyword[None] : identifier[con] . identifier[expire] ( identifier[k] , identifier[self] . identifier[TTL] ) keyword[return] keyword[None] keyword[if] identifier[oldv] keyword[is] keyword[None] keyword[else] identifier[msgpack] . identifier[loads] ( identifier[oldv] )
def saveget(self, con): """save, return old value. todo: make the expire() atomic with a redis script""" (k, v) = self.kv() oldv = con.getset(k, v) if self.TTL is not None: con.expire(k, self.TTL) # depends on [control=['if'], data=[]] return None if oldv is None else msgpack.loads(oldv)
def build_extension(self, ext): """ build clrmagic.dll using csc or mcs """ if sys.platform == "win32": _clr_compiler = "C:\\Windows\\Microsoft.NET\\Framework\\v4.0.30319\\csc.exe" else: _clr_compiler = "mcs" cmd = [ _clr_compiler, "/target:library", "clrmagic.cs" ] check_call(" ".join(cmd), shell=True)
def function[build_extension, parameter[self, ext]]: constant[ build clrmagic.dll using csc or mcs ] if compare[name[sys].platform equal[==] constant[win32]] begin[:] variable[_clr_compiler] assign[=] constant[C:\Windows\Microsoft.NET\Framework\v4.0.30319\csc.exe] variable[cmd] assign[=] list[[<ast.Name object at 0x7da1b0bd4190>, <ast.Constant object at 0x7da1b0bd4c40>, <ast.Constant object at 0x7da1b0bd5bd0>]] call[name[check_call], parameter[call[constant[ ].join, parameter[name[cmd]]]]]
keyword[def] identifier[build_extension] ( identifier[self] , identifier[ext] ): literal[string] keyword[if] identifier[sys] . identifier[platform] == literal[string] : identifier[_clr_compiler] = literal[string] keyword[else] : identifier[_clr_compiler] = literal[string] identifier[cmd] =[ identifier[_clr_compiler] , literal[string] , literal[string] ] identifier[check_call] ( literal[string] . identifier[join] ( identifier[cmd] ), identifier[shell] = keyword[True] )
def build_extension(self, ext): """ build clrmagic.dll using csc or mcs """ if sys.platform == 'win32': _clr_compiler = 'C:\\Windows\\Microsoft.NET\\Framework\\v4.0.30319\\csc.exe' # depends on [control=['if'], data=[]] else: _clr_compiler = 'mcs' cmd = [_clr_compiler, '/target:library', 'clrmagic.cs'] check_call(' '.join(cmd), shell=True)
def create_stemmer(self, isDev=False): """ Returns Stemmer instance """ words = self.get_words(isDev) dictionary = ArrayDictionary(words) stemmer = Stemmer(dictionary) resultCache = ArrayCache() cachedStemmer = CachedStemmer(resultCache, stemmer) return cachedStemmer
def function[create_stemmer, parameter[self, isDev]]: constant[ Returns Stemmer instance ] variable[words] assign[=] call[name[self].get_words, parameter[name[isDev]]] variable[dictionary] assign[=] call[name[ArrayDictionary], parameter[name[words]]] variable[stemmer] assign[=] call[name[Stemmer], parameter[name[dictionary]]] variable[resultCache] assign[=] call[name[ArrayCache], parameter[]] variable[cachedStemmer] assign[=] call[name[CachedStemmer], parameter[name[resultCache], name[stemmer]]] return[name[cachedStemmer]]
keyword[def] identifier[create_stemmer] ( identifier[self] , identifier[isDev] = keyword[False] ): literal[string] identifier[words] = identifier[self] . identifier[get_words] ( identifier[isDev] ) identifier[dictionary] = identifier[ArrayDictionary] ( identifier[words] ) identifier[stemmer] = identifier[Stemmer] ( identifier[dictionary] ) identifier[resultCache] = identifier[ArrayCache] () identifier[cachedStemmer] = identifier[CachedStemmer] ( identifier[resultCache] , identifier[stemmer] ) keyword[return] identifier[cachedStemmer]
def create_stemmer(self, isDev=False): """ Returns Stemmer instance """ words = self.get_words(isDev) dictionary = ArrayDictionary(words) stemmer = Stemmer(dictionary) resultCache = ArrayCache() cachedStemmer = CachedStemmer(resultCache, stemmer) return cachedStemmer
def display_fitsfile(self, chname, fitspath, dowait): """Load (``fitspath``) into channel (``chname``). (The parameter ``dowait`` is currently ignored.) """ # TEMP: dowait ignored self.fv.gui_do(self.fv.open_uris, [fitspath], chname=chname) return 0
def function[display_fitsfile, parameter[self, chname, fitspath, dowait]]: constant[Load (``fitspath``) into channel (``chname``). (The parameter ``dowait`` is currently ignored.) ] call[name[self].fv.gui_do, parameter[name[self].fv.open_uris, list[[<ast.Name object at 0x7da207f98520>]]]] return[constant[0]]
keyword[def] identifier[display_fitsfile] ( identifier[self] , identifier[chname] , identifier[fitspath] , identifier[dowait] ): literal[string] identifier[self] . identifier[fv] . identifier[gui_do] ( identifier[self] . identifier[fv] . identifier[open_uris] ,[ identifier[fitspath] ], identifier[chname] = identifier[chname] ) keyword[return] literal[int]
def display_fitsfile(self, chname, fitspath, dowait): """Load (``fitspath``) into channel (``chname``). (The parameter ``dowait`` is currently ignored.) """ # TEMP: dowait ignored self.fv.gui_do(self.fv.open_uris, [fitspath], chname=chname) return 0
def create_datastore(self, schema=None, primary_key=None, delete_first=0, path=None): # type: (Optional[List[Dict]], Optional[str], int, Optional[str]) -> None """For tabular data, create a resource in the HDX datastore which enables data preview in HDX. If no schema is provided all fields are assumed to be text. If path is not supplied, the file is first downloaded from HDX. Args: schema (List[Dict]): List of fields and types of form {'id': 'FIELD', 'type': 'TYPE'}. Defaults to None. primary_key (Optional[str]): Primary key of schema. Defaults to None. delete_first (int): Delete datastore before creation. 0 = No, 1 = Yes, 2 = If no primary key. Defaults to 0. path (Optional[str]): Local path to file that was uploaded. Defaults to None. Returns: None """ if delete_first == 0: pass elif delete_first == 1: self.delete_datastore() elif delete_first == 2: if primary_key is None: self.delete_datastore() else: raise HDXError('delete_first must be 0, 1 or 2! (0 = No, 1 = Yes, 2 = Delete if no primary key)') if path is None: # Download the resource url, path = self.download() delete_after_download = True else: url = path delete_after_download = False def convert_to_text(extended_rows): for number, headers, row in extended_rows: for i, val in enumerate(row): row[i] = str(val) yield (number, headers, row) with Download(full_agent=self.configuration.get_user_agent()) as downloader: try: stream = downloader.get_tabular_stream(path, headers=1, post_parse=[convert_to_text], bytes_sample_size=1000000) nonefieldname = False if schema is None: schema = list() for fieldname in stream.headers: if fieldname is not None: schema.append({'id': fieldname, 'type': 'text'}) else: nonefieldname = True data = {'resource_id': self.data['id'], 'force': True, 'fields': schema, 'primary_key': primary_key} self._write_to_hdx('datastore_create', data, 'resource_id') if primary_key is None: method = 'insert' else: method = 'upsert' logger.debug('Uploading data from %s to datastore' % url) offset = 0 chunksize = 100 rowset = stream.read(keyed=True, limit=chunksize) while len(rowset) != 0: if nonefieldname: for row in rowset: del row[None] data = {'resource_id': self.data['id'], 'force': True, 'method': method, 'records': rowset} self._write_to_hdx('datastore_upsert', data, 'resource_id') rowset = stream.read(keyed=True, limit=chunksize) logger.debug('Uploading: %s' % offset) offset += chunksize except Exception as e: raisefrom(HDXError, 'Upload to datastore of %s failed!' % url, e) finally: if delete_after_download: remove(path)
def function[create_datastore, parameter[self, schema, primary_key, delete_first, path]]: constant[For tabular data, create a resource in the HDX datastore which enables data preview in HDX. If no schema is provided all fields are assumed to be text. If path is not supplied, the file is first downloaded from HDX. Args: schema (List[Dict]): List of fields and types of form {'id': 'FIELD', 'type': 'TYPE'}. Defaults to None. primary_key (Optional[str]): Primary key of schema. Defaults to None. delete_first (int): Delete datastore before creation. 0 = No, 1 = Yes, 2 = If no primary key. Defaults to 0. path (Optional[str]): Local path to file that was uploaded. Defaults to None. Returns: None ] if compare[name[delete_first] equal[==] constant[0]] begin[:] pass if compare[name[path] is constant[None]] begin[:] <ast.Tuple object at 0x7da20e955ab0> assign[=] call[name[self].download, parameter[]] variable[delete_after_download] assign[=] constant[True] def function[convert_to_text, parameter[extended_rows]]: for taget[tuple[[<ast.Name object at 0x7da20e954250>, <ast.Name object at 0x7da20e954550>, <ast.Name object at 0x7da20e956920>]]] in starred[name[extended_rows]] begin[:] for taget[tuple[[<ast.Name object at 0x7da20e954310>, <ast.Name object at 0x7da20e956950>]]] in starred[call[name[enumerate], parameter[name[row]]]] begin[:] call[name[row]][name[i]] assign[=] call[name[str], parameter[name[val]]] <ast.Yield object at 0x7da20e955930> with call[name[Download], parameter[]] begin[:] <ast.Try object at 0x7da20e957430>
keyword[def] identifier[create_datastore] ( identifier[self] , identifier[schema] = keyword[None] , identifier[primary_key] = keyword[None] , identifier[delete_first] = literal[int] , identifier[path] = keyword[None] ): literal[string] keyword[if] identifier[delete_first] == literal[int] : keyword[pass] keyword[elif] identifier[delete_first] == literal[int] : identifier[self] . identifier[delete_datastore] () keyword[elif] identifier[delete_first] == literal[int] : keyword[if] identifier[primary_key] keyword[is] keyword[None] : identifier[self] . identifier[delete_datastore] () keyword[else] : keyword[raise] identifier[HDXError] ( literal[string] ) keyword[if] identifier[path] keyword[is] keyword[None] : identifier[url] , identifier[path] = identifier[self] . identifier[download] () identifier[delete_after_download] = keyword[True] keyword[else] : identifier[url] = identifier[path] identifier[delete_after_download] = keyword[False] keyword[def] identifier[convert_to_text] ( identifier[extended_rows] ): keyword[for] identifier[number] , identifier[headers] , identifier[row] keyword[in] identifier[extended_rows] : keyword[for] identifier[i] , identifier[val] keyword[in] identifier[enumerate] ( identifier[row] ): identifier[row] [ identifier[i] ]= identifier[str] ( identifier[val] ) keyword[yield] ( identifier[number] , identifier[headers] , identifier[row] ) keyword[with] identifier[Download] ( identifier[full_agent] = identifier[self] . identifier[configuration] . identifier[get_user_agent] ()) keyword[as] identifier[downloader] : keyword[try] : identifier[stream] = identifier[downloader] . identifier[get_tabular_stream] ( identifier[path] , identifier[headers] = literal[int] , identifier[post_parse] =[ identifier[convert_to_text] ], identifier[bytes_sample_size] = literal[int] ) identifier[nonefieldname] = keyword[False] keyword[if] identifier[schema] keyword[is] keyword[None] : identifier[schema] = identifier[list] () keyword[for] identifier[fieldname] keyword[in] identifier[stream] . identifier[headers] : keyword[if] identifier[fieldname] keyword[is] keyword[not] keyword[None] : identifier[schema] . identifier[append] ({ literal[string] : identifier[fieldname] , literal[string] : literal[string] }) keyword[else] : identifier[nonefieldname] = keyword[True] identifier[data] ={ literal[string] : identifier[self] . identifier[data] [ literal[string] ], literal[string] : keyword[True] , literal[string] : identifier[schema] , literal[string] : identifier[primary_key] } identifier[self] . identifier[_write_to_hdx] ( literal[string] , identifier[data] , literal[string] ) keyword[if] identifier[primary_key] keyword[is] keyword[None] : identifier[method] = literal[string] keyword[else] : identifier[method] = literal[string] identifier[logger] . identifier[debug] ( literal[string] % identifier[url] ) identifier[offset] = literal[int] identifier[chunksize] = literal[int] identifier[rowset] = identifier[stream] . identifier[read] ( identifier[keyed] = keyword[True] , identifier[limit] = identifier[chunksize] ) keyword[while] identifier[len] ( identifier[rowset] )!= literal[int] : keyword[if] identifier[nonefieldname] : keyword[for] identifier[row] keyword[in] identifier[rowset] : keyword[del] identifier[row] [ keyword[None] ] identifier[data] ={ literal[string] : identifier[self] . identifier[data] [ literal[string] ], literal[string] : keyword[True] , literal[string] : identifier[method] , literal[string] : identifier[rowset] } identifier[self] . identifier[_write_to_hdx] ( literal[string] , identifier[data] , literal[string] ) identifier[rowset] = identifier[stream] . identifier[read] ( identifier[keyed] = keyword[True] , identifier[limit] = identifier[chunksize] ) identifier[logger] . identifier[debug] ( literal[string] % identifier[offset] ) identifier[offset] += identifier[chunksize] keyword[except] identifier[Exception] keyword[as] identifier[e] : identifier[raisefrom] ( identifier[HDXError] , literal[string] % identifier[url] , identifier[e] ) keyword[finally] : keyword[if] identifier[delete_after_download] : identifier[remove] ( identifier[path] )
def create_datastore(self, schema=None, primary_key=None, delete_first=0, path=None): # type: (Optional[List[Dict]], Optional[str], int, Optional[str]) -> None "For tabular data, create a resource in the HDX datastore which enables data preview in HDX. If no schema is provided\n all fields are assumed to be text. If path is not supplied, the file is first downloaded from HDX.\n\n Args:\n schema (List[Dict]): List of fields and types of form {'id': 'FIELD', 'type': 'TYPE'}. Defaults to None.\n primary_key (Optional[str]): Primary key of schema. Defaults to None.\n delete_first (int): Delete datastore before creation. 0 = No, 1 = Yes, 2 = If no primary key. Defaults to 0.\n path (Optional[str]): Local path to file that was uploaded. Defaults to None.\n\n Returns:\n None\n " if delete_first == 0: pass # depends on [control=['if'], data=[]] elif delete_first == 1: self.delete_datastore() # depends on [control=['if'], data=[]] elif delete_first == 2: if primary_key is None: self.delete_datastore() # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]] else: raise HDXError('delete_first must be 0, 1 or 2! (0 = No, 1 = Yes, 2 = Delete if no primary key)') if path is None: # Download the resource (url, path) = self.download() delete_after_download = True # depends on [control=['if'], data=['path']] else: url = path delete_after_download = False def convert_to_text(extended_rows): for (number, headers, row) in extended_rows: for (i, val) in enumerate(row): row[i] = str(val) # depends on [control=['for'], data=[]] yield (number, headers, row) # depends on [control=['for'], data=[]] with Download(full_agent=self.configuration.get_user_agent()) as downloader: try: stream = downloader.get_tabular_stream(path, headers=1, post_parse=[convert_to_text], bytes_sample_size=1000000) nonefieldname = False if schema is None: schema = list() for fieldname in stream.headers: if fieldname is not None: schema.append({'id': fieldname, 'type': 'text'}) # depends on [control=['if'], data=['fieldname']] else: nonefieldname = True # depends on [control=['for'], data=['fieldname']] # depends on [control=['if'], data=['schema']] data = {'resource_id': self.data['id'], 'force': True, 'fields': schema, 'primary_key': primary_key} self._write_to_hdx('datastore_create', data, 'resource_id') if primary_key is None: method = 'insert' # depends on [control=['if'], data=[]] else: method = 'upsert' logger.debug('Uploading data from %s to datastore' % url) offset = 0 chunksize = 100 rowset = stream.read(keyed=True, limit=chunksize) while len(rowset) != 0: if nonefieldname: for row in rowset: del row[None] # depends on [control=['for'], data=['row']] # depends on [control=['if'], data=[]] data = {'resource_id': self.data['id'], 'force': True, 'method': method, 'records': rowset} self._write_to_hdx('datastore_upsert', data, 'resource_id') rowset = stream.read(keyed=True, limit=chunksize) logger.debug('Uploading: %s' % offset) offset += chunksize # depends on [control=['while'], data=[]] # depends on [control=['try'], data=[]] except Exception as e: raisefrom(HDXError, 'Upload to datastore of %s failed!' % url, e) # depends on [control=['except'], data=['e']] finally: if delete_after_download: remove(path) # depends on [control=['if'], data=[]] # depends on [control=['with'], data=['downloader']]
def public_ip_address_create_or_update(name, resource_group, **kwargs): ''' .. versionadded:: 2019.2.0 Create or update a public IP address within a specified resource group. :param name: The name of the public IP address to create. :param resource_group: The resource group name assigned to the public IP address. CLI Example: .. code-block:: bash salt-call azurearm_network.public_ip_address_create_or_update test-ip-0 testgroup ''' if 'location' not in kwargs: rg_props = __salt__['azurearm_resource.resource_group_get']( resource_group, **kwargs ) if 'error' in rg_props: log.error( 'Unable to determine location from resource group specified.' ) return False kwargs['location'] = rg_props['location'] netconn = __utils__['azurearm.get_client']('network', **kwargs) try: pub_ip_model = __utils__['azurearm.create_object_model']('network', 'PublicIPAddress', **kwargs) except TypeError as exc: result = {'error': 'The object model could not be built. ({0})'.format(str(exc))} return result try: ip = netconn.public_ip_addresses.create_or_update( resource_group_name=resource_group, public_ip_address_name=name, parameters=pub_ip_model ) ip.wait() ip_result = ip.result() result = ip_result.as_dict() except CloudError as exc: __utils__['azurearm.log_cloud_error']('network', str(exc), **kwargs) result = {'error': str(exc)} except SerializationError as exc: result = {'error': 'The object model could not be parsed. ({0})'.format(str(exc))} return result
def function[public_ip_address_create_or_update, parameter[name, resource_group]]: constant[ .. versionadded:: 2019.2.0 Create or update a public IP address within a specified resource group. :param name: The name of the public IP address to create. :param resource_group: The resource group name assigned to the public IP address. CLI Example: .. code-block:: bash salt-call azurearm_network.public_ip_address_create_or_update test-ip-0 testgroup ] if compare[constant[location] <ast.NotIn object at 0x7da2590d7190> name[kwargs]] begin[:] variable[rg_props] assign[=] call[call[name[__salt__]][constant[azurearm_resource.resource_group_get]], parameter[name[resource_group]]] if compare[constant[error] in name[rg_props]] begin[:] call[name[log].error, parameter[constant[Unable to determine location from resource group specified.]]] return[constant[False]] call[name[kwargs]][constant[location]] assign[=] call[name[rg_props]][constant[location]] variable[netconn] assign[=] call[call[name[__utils__]][constant[azurearm.get_client]], parameter[constant[network]]] <ast.Try object at 0x7da1b2346e00> <ast.Try object at 0x7da1b2345450> return[name[result]]
keyword[def] identifier[public_ip_address_create_or_update] ( identifier[name] , identifier[resource_group] ,** identifier[kwargs] ): literal[string] keyword[if] literal[string] keyword[not] keyword[in] identifier[kwargs] : identifier[rg_props] = identifier[__salt__] [ literal[string] ]( identifier[resource_group] ,** identifier[kwargs] ) keyword[if] literal[string] keyword[in] identifier[rg_props] : identifier[log] . identifier[error] ( literal[string] ) keyword[return] keyword[False] identifier[kwargs] [ literal[string] ]= identifier[rg_props] [ literal[string] ] identifier[netconn] = identifier[__utils__] [ literal[string] ]( literal[string] ,** identifier[kwargs] ) keyword[try] : identifier[pub_ip_model] = identifier[__utils__] [ literal[string] ]( literal[string] , literal[string] ,** identifier[kwargs] ) keyword[except] identifier[TypeError] keyword[as] identifier[exc] : identifier[result] ={ literal[string] : literal[string] . identifier[format] ( identifier[str] ( identifier[exc] ))} keyword[return] identifier[result] keyword[try] : identifier[ip] = identifier[netconn] . identifier[public_ip_addresses] . identifier[create_or_update] ( identifier[resource_group_name] = identifier[resource_group] , identifier[public_ip_address_name] = identifier[name] , identifier[parameters] = identifier[pub_ip_model] ) identifier[ip] . identifier[wait] () identifier[ip_result] = identifier[ip] . identifier[result] () identifier[result] = identifier[ip_result] . identifier[as_dict] () keyword[except] identifier[CloudError] keyword[as] identifier[exc] : identifier[__utils__] [ literal[string] ]( literal[string] , identifier[str] ( identifier[exc] ),** identifier[kwargs] ) identifier[result] ={ literal[string] : identifier[str] ( identifier[exc] )} keyword[except] identifier[SerializationError] keyword[as] identifier[exc] : identifier[result] ={ literal[string] : literal[string] . identifier[format] ( identifier[str] ( identifier[exc] ))} keyword[return] identifier[result]
def public_ip_address_create_or_update(name, resource_group, **kwargs): """ .. versionadded:: 2019.2.0 Create or update a public IP address within a specified resource group. :param name: The name of the public IP address to create. :param resource_group: The resource group name assigned to the public IP address. CLI Example: .. code-block:: bash salt-call azurearm_network.public_ip_address_create_or_update test-ip-0 testgroup """ if 'location' not in kwargs: rg_props = __salt__['azurearm_resource.resource_group_get'](resource_group, **kwargs) if 'error' in rg_props: log.error('Unable to determine location from resource group specified.') return False # depends on [control=['if'], data=[]] kwargs['location'] = rg_props['location'] # depends on [control=['if'], data=['kwargs']] netconn = __utils__['azurearm.get_client']('network', **kwargs) try: pub_ip_model = __utils__['azurearm.create_object_model']('network', 'PublicIPAddress', **kwargs) # depends on [control=['try'], data=[]] except TypeError as exc: result = {'error': 'The object model could not be built. ({0})'.format(str(exc))} return result # depends on [control=['except'], data=['exc']] try: ip = netconn.public_ip_addresses.create_or_update(resource_group_name=resource_group, public_ip_address_name=name, parameters=pub_ip_model) ip.wait() ip_result = ip.result() result = ip_result.as_dict() # depends on [control=['try'], data=[]] except CloudError as exc: __utils__['azurearm.log_cloud_error']('network', str(exc), **kwargs) result = {'error': str(exc)} # depends on [control=['except'], data=['exc']] except SerializationError as exc: result = {'error': 'The object model could not be parsed. ({0})'.format(str(exc))} # depends on [control=['except'], data=['exc']] return result
def colors(self, value): """ Converts color strings into a color listing. """ if isinstance(value, str): # Must import here to avoid recursive import from .palettes import PALETTES if value not in PALETTES: raise YellowbrickValueError( "'{}' is not a registered color palette".format(value) ) self._colors = copy(PALETTES[value]) elif isinstance(value, list): self._colors = value else: self._colors = list(value)
def function[colors, parameter[self, value]]: constant[ Converts color strings into a color listing. ] if call[name[isinstance], parameter[name[value], name[str]]] begin[:] from relative_module[palettes] import module[PALETTES] if compare[name[value] <ast.NotIn object at 0x7da2590d7190> name[PALETTES]] begin[:] <ast.Raise object at 0x7da18c4cd2d0> name[self]._colors assign[=] call[name[copy], parameter[call[name[PALETTES]][name[value]]]]
keyword[def] identifier[colors] ( identifier[self] , identifier[value] ): literal[string] keyword[if] identifier[isinstance] ( identifier[value] , identifier[str] ): keyword[from] . identifier[palettes] keyword[import] identifier[PALETTES] keyword[if] identifier[value] keyword[not] keyword[in] identifier[PALETTES] : keyword[raise] identifier[YellowbrickValueError] ( literal[string] . identifier[format] ( identifier[value] ) ) identifier[self] . identifier[_colors] = identifier[copy] ( identifier[PALETTES] [ identifier[value] ]) keyword[elif] identifier[isinstance] ( identifier[value] , identifier[list] ): identifier[self] . identifier[_colors] = identifier[value] keyword[else] : identifier[self] . identifier[_colors] = identifier[list] ( identifier[value] )
def colors(self, value): """ Converts color strings into a color listing. """ if isinstance(value, str): # Must import here to avoid recursive import from .palettes import PALETTES if value not in PALETTES: raise YellowbrickValueError("'{}' is not a registered color palette".format(value)) # depends on [control=['if'], data=['value']] self._colors = copy(PALETTES[value]) # depends on [control=['if'], data=[]] elif isinstance(value, list): self._colors = value # depends on [control=['if'], data=[]] else: self._colors = list(value)
def onchange(self, new_value): """Called when the user changes the TextInput content. With single_line=True it fires in case of focus lost and Enter key pressed. With single_line=False it fires at each key released. Args: new_value (str): the new string content of the TextInput. """ self.disable_refresh() self.set_value(new_value) self.enable_refresh() return (new_value, )
def function[onchange, parameter[self, new_value]]: constant[Called when the user changes the TextInput content. With single_line=True it fires in case of focus lost and Enter key pressed. With single_line=False it fires at each key released. Args: new_value (str): the new string content of the TextInput. ] call[name[self].disable_refresh, parameter[]] call[name[self].set_value, parameter[name[new_value]]] call[name[self].enable_refresh, parameter[]] return[tuple[[<ast.Name object at 0x7da18dc9ad40>]]]
keyword[def] identifier[onchange] ( identifier[self] , identifier[new_value] ): literal[string] identifier[self] . identifier[disable_refresh] () identifier[self] . identifier[set_value] ( identifier[new_value] ) identifier[self] . identifier[enable_refresh] () keyword[return] ( identifier[new_value] ,)
def onchange(self, new_value): """Called when the user changes the TextInput content. With single_line=True it fires in case of focus lost and Enter key pressed. With single_line=False it fires at each key released. Args: new_value (str): the new string content of the TextInput. """ self.disable_refresh() self.set_value(new_value) self.enable_refresh() return (new_value,)
def print_overwrite(*args, **kwargs): """ Move to the beginning of the current line, and print some text. Arguments: Same as `print()`. Keyword Arguments: Same as `print()`, except `end` defaults to '' (empty str), and these: delay : Time in seconds between character writes. """ kwargs.setdefault('file', sys.stdout) kwargs.setdefault('end', '') delay = None with suppress(KeyError): delay = kwargs.pop('delay') erase_line() # Move to the beginning of the line. move_column(1, file=kwargs['file']) if delay is None: print(*args, **kwargs) else: for c in kwargs.get('sep', ' ').join(str(a) for a in args): kwargs['file'].write(c) kwargs['file'].flush() sleep(delay) if kwargs['end']: kwargs['file'].write(kwargs['end']) kwargs['file'].flush()
def function[print_overwrite, parameter[]]: constant[ Move to the beginning of the current line, and print some text. Arguments: Same as `print()`. Keyword Arguments: Same as `print()`, except `end` defaults to '' (empty str), and these: delay : Time in seconds between character writes. ] call[name[kwargs].setdefault, parameter[constant[file], name[sys].stdout]] call[name[kwargs].setdefault, parameter[constant[end], constant[]]] variable[delay] assign[=] constant[None] with call[name[suppress], parameter[name[KeyError]]] begin[:] variable[delay] assign[=] call[name[kwargs].pop, parameter[constant[delay]]] call[name[erase_line], parameter[]] call[name[move_column], parameter[constant[1]]] if compare[name[delay] is constant[None]] begin[:] call[name[print], parameter[<ast.Starred object at 0x7da1b03da7a0>]] call[call[name[kwargs]][constant[file]].flush, parameter[]]
keyword[def] identifier[print_overwrite] (* identifier[args] ,** identifier[kwargs] ): literal[string] identifier[kwargs] . identifier[setdefault] ( literal[string] , identifier[sys] . identifier[stdout] ) identifier[kwargs] . identifier[setdefault] ( literal[string] , literal[string] ) identifier[delay] = keyword[None] keyword[with] identifier[suppress] ( identifier[KeyError] ): identifier[delay] = identifier[kwargs] . identifier[pop] ( literal[string] ) identifier[erase_line] () identifier[move_column] ( literal[int] , identifier[file] = identifier[kwargs] [ literal[string] ]) keyword[if] identifier[delay] keyword[is] keyword[None] : identifier[print] (* identifier[args] ,** identifier[kwargs] ) keyword[else] : keyword[for] identifier[c] keyword[in] identifier[kwargs] . identifier[get] ( literal[string] , literal[string] ). identifier[join] ( identifier[str] ( identifier[a] ) keyword[for] identifier[a] keyword[in] identifier[args] ): identifier[kwargs] [ literal[string] ]. identifier[write] ( identifier[c] ) identifier[kwargs] [ literal[string] ]. identifier[flush] () identifier[sleep] ( identifier[delay] ) keyword[if] identifier[kwargs] [ literal[string] ]: identifier[kwargs] [ literal[string] ]. identifier[write] ( identifier[kwargs] [ literal[string] ]) identifier[kwargs] [ literal[string] ]. identifier[flush] ()
def print_overwrite(*args, **kwargs): """ Move to the beginning of the current line, and print some text. Arguments: Same as `print()`. Keyword Arguments: Same as `print()`, except `end` defaults to '' (empty str), and these: delay : Time in seconds between character writes. """ kwargs.setdefault('file', sys.stdout) kwargs.setdefault('end', '') delay = None with suppress(KeyError): delay = kwargs.pop('delay') # depends on [control=['with'], data=[]] erase_line() # Move to the beginning of the line. move_column(1, file=kwargs['file']) if delay is None: print(*args, **kwargs) # depends on [control=['if'], data=[]] else: for c in kwargs.get('sep', ' ').join((str(a) for a in args)): kwargs['file'].write(c) kwargs['file'].flush() sleep(delay) # depends on [control=['for'], data=['c']] if kwargs['end']: kwargs['file'].write(kwargs['end']) # depends on [control=['if'], data=[]] kwargs['file'].flush()
def get_commit_message(self, commit_sha): """ Return the commit message for the current commit hash, replace #<PRID> with GH-<PRID> """ cmd = ["git", "show", "-s", "--format=%B", commit_sha] output = subprocess.check_output(cmd, stderr=subprocess.STDOUT) message = output.strip().decode("utf-8") if self.config["fix_commit_msg"]: return message.replace("#", "GH-") else: return message
def function[get_commit_message, parameter[self, commit_sha]]: constant[ Return the commit message for the current commit hash, replace #<PRID> with GH-<PRID> ] variable[cmd] assign[=] list[[<ast.Constant object at 0x7da1b2309900>, <ast.Constant object at 0x7da1b230a7a0>, <ast.Constant object at 0x7da1b23094b0>, <ast.Constant object at 0x7da1b23097e0>, <ast.Name object at 0x7da1b2309810>]] variable[output] assign[=] call[name[subprocess].check_output, parameter[name[cmd]]] variable[message] assign[=] call[call[name[output].strip, parameter[]].decode, parameter[constant[utf-8]]] if call[name[self].config][constant[fix_commit_msg]] begin[:] return[call[name[message].replace, parameter[constant[#], constant[GH-]]]]
keyword[def] identifier[get_commit_message] ( identifier[self] , identifier[commit_sha] ): literal[string] identifier[cmd] =[ literal[string] , literal[string] , literal[string] , literal[string] , identifier[commit_sha] ] identifier[output] = identifier[subprocess] . identifier[check_output] ( identifier[cmd] , identifier[stderr] = identifier[subprocess] . identifier[STDOUT] ) identifier[message] = identifier[output] . identifier[strip] (). identifier[decode] ( literal[string] ) keyword[if] identifier[self] . identifier[config] [ literal[string] ]: keyword[return] identifier[message] . identifier[replace] ( literal[string] , literal[string] ) keyword[else] : keyword[return] identifier[message]
def get_commit_message(self, commit_sha): """ Return the commit message for the current commit hash, replace #<PRID> with GH-<PRID> """ cmd = ['git', 'show', '-s', '--format=%B', commit_sha] output = subprocess.check_output(cmd, stderr=subprocess.STDOUT) message = output.strip().decode('utf-8') if self.config['fix_commit_msg']: return message.replace('#', 'GH-') # depends on [control=['if'], data=[]] else: return message
def list_containers(self): """ List all available podman containers. :return: collection of instances of :class:`conu.PodmanContainer` """ containers = [] for container in self._list_podman_containers(): identifier = container["ID"] name = container["Names"] image_name = container["Image"] try: image_name, image_tag = parse_reference(image_name) except (IndexError, TypeError): image_name, image_tag = None, None image = PodmanImage(image_name, tag=image_tag, identifier=None) container = PodmanContainer(image, identifier, name=name) containers.append(container) return containers
def function[list_containers, parameter[self]]: constant[ List all available podman containers. :return: collection of instances of :class:`conu.PodmanContainer` ] variable[containers] assign[=] list[[]] for taget[name[container]] in starred[call[name[self]._list_podman_containers, parameter[]]] begin[:] variable[identifier] assign[=] call[name[container]][constant[ID]] variable[name] assign[=] call[name[container]][constant[Names]] variable[image_name] assign[=] call[name[container]][constant[Image]] <ast.Try object at 0x7da1b1196ef0> variable[image] assign[=] call[name[PodmanImage], parameter[name[image_name]]] variable[container] assign[=] call[name[PodmanContainer], parameter[name[image], name[identifier]]] call[name[containers].append, parameter[name[container]]] return[name[containers]]
keyword[def] identifier[list_containers] ( identifier[self] ): literal[string] identifier[containers] =[] keyword[for] identifier[container] keyword[in] identifier[self] . identifier[_list_podman_containers] (): identifier[identifier] = identifier[container] [ literal[string] ] identifier[name] = identifier[container] [ literal[string] ] identifier[image_name] = identifier[container] [ literal[string] ] keyword[try] : identifier[image_name] , identifier[image_tag] = identifier[parse_reference] ( identifier[image_name] ) keyword[except] ( identifier[IndexError] , identifier[TypeError] ): identifier[image_name] , identifier[image_tag] = keyword[None] , keyword[None] identifier[image] = identifier[PodmanImage] ( identifier[image_name] , identifier[tag] = identifier[image_tag] , identifier[identifier] = keyword[None] ) identifier[container] = identifier[PodmanContainer] ( identifier[image] , identifier[identifier] , identifier[name] = identifier[name] ) identifier[containers] . identifier[append] ( identifier[container] ) keyword[return] identifier[containers]
def list_containers(self): """ List all available podman containers. :return: collection of instances of :class:`conu.PodmanContainer` """ containers = [] for container in self._list_podman_containers(): identifier = container['ID'] name = container['Names'] image_name = container['Image'] try: (image_name, image_tag) = parse_reference(image_name) # depends on [control=['try'], data=[]] except (IndexError, TypeError): (image_name, image_tag) = (None, None) # depends on [control=['except'], data=[]] image = PodmanImage(image_name, tag=image_tag, identifier=None) container = PodmanContainer(image, identifier, name=name) containers.append(container) # depends on [control=['for'], data=['container']] return containers
def moveTo(self, newX=0, newY=0): """! \~english Move vertex of rectangles to new point (x,y) @param newX: Coordinated X value @param newY: Coordinated Y value \~chinese 移动矩形到新坐标点 (x,y) @param newX: 坐标 X @param newY: 坐标 Y """ self.x = newX self.y = newY
def function[moveTo, parameter[self, newX, newY]]: constant[! \~english Move vertex of rectangles to new point (x,y) @param newX: Coordinated X value @param newY: Coordinated Y value \~chinese 移动矩形到新坐标点 (x,y) @param newX: 坐标 X @param newY: 坐标 Y ] name[self].x assign[=] name[newX] name[self].y assign[=] name[newY]
keyword[def] identifier[moveTo] ( identifier[self] , identifier[newX] = literal[int] , identifier[newY] = literal[int] ): literal[string] identifier[self] . identifier[x] = identifier[newX] identifier[self] . identifier[y] = identifier[newY]
def moveTo(self, newX=0, newY=0): """! \\~english Move vertex of rectangles to new point (x,y) @param newX: Coordinated X value @param newY: Coordinated Y value \\~chinese 移动矩形到新坐标点 (x,y) @param newX: 坐标 X @param newY: 坐标 Y """ self.x = newX self.y = newY
def _aload16(ins): ''' Loads a 16 bit value from a memory address If 2nd arg. start with '*', it is always treated as an indirect value. ''' output = _addr(ins.quad[2]) output.append('ld e, (hl)') output.append('inc hl') output.append('ld d, (hl)') output.append('ex de, hl') output.append('push hl') return output
def function[_aload16, parameter[ins]]: constant[ Loads a 16 bit value from a memory address If 2nd arg. start with '*', it is always treated as an indirect value. ] variable[output] assign[=] call[name[_addr], parameter[call[name[ins].quad][constant[2]]]] call[name[output].append, parameter[constant[ld e, (hl)]]] call[name[output].append, parameter[constant[inc hl]]] call[name[output].append, parameter[constant[ld d, (hl)]]] call[name[output].append, parameter[constant[ex de, hl]]] call[name[output].append, parameter[constant[push hl]]] return[name[output]]
keyword[def] identifier[_aload16] ( identifier[ins] ): literal[string] identifier[output] = identifier[_addr] ( identifier[ins] . identifier[quad] [ literal[int] ]) identifier[output] . identifier[append] ( literal[string] ) identifier[output] . identifier[append] ( literal[string] ) identifier[output] . identifier[append] ( literal[string] ) identifier[output] . identifier[append] ( literal[string] ) identifier[output] . identifier[append] ( literal[string] ) keyword[return] identifier[output]
def _aload16(ins): """ Loads a 16 bit value from a memory address If 2nd arg. start with '*', it is always treated as an indirect value. """ output = _addr(ins.quad[2]) output.append('ld e, (hl)') output.append('inc hl') output.append('ld d, (hl)') output.append('ex de, hl') output.append('push hl') return output
def app(*args, **kwargs): """Create a vaex app, the QApplication mainloop must be started. In ipython notebook/jupyter do the following: >>> import vaex.ui.main # this causes the qt api level to be set properly >>> import vaex Next cell: >>> %gui qt Next cell: >>> app = vaex.app() From now on, you can run the app along with jupyter """ import vaex.ui.main return vaex.ui.main.VaexApp()
def function[app, parameter[]]: constant[Create a vaex app, the QApplication mainloop must be started. In ipython notebook/jupyter do the following: >>> import vaex.ui.main # this causes the qt api level to be set properly >>> import vaex Next cell: >>> %gui qt Next cell: >>> app = vaex.app() From now on, you can run the app along with jupyter ] import module[vaex.ui.main] return[call[name[vaex].ui.main.VaexApp, parameter[]]]
keyword[def] identifier[app] (* identifier[args] ,** identifier[kwargs] ): literal[string] keyword[import] identifier[vaex] . identifier[ui] . identifier[main] keyword[return] identifier[vaex] . identifier[ui] . identifier[main] . identifier[VaexApp] ()
def app(*args, **kwargs): """Create a vaex app, the QApplication mainloop must be started. In ipython notebook/jupyter do the following: >>> import vaex.ui.main # this causes the qt api level to be set properly >>> import vaex Next cell: >>> %gui qt Next cell: >>> app = vaex.app() From now on, you can run the app along with jupyter """ import vaex.ui.main return vaex.ui.main.VaexApp()
def obtain_to(filename): """ Return the digital elevation map projected to the lat lon matrix coordenates. Keyword arguments: filename -- the name of a netcdf file. """ root, _ = nc.open(filename) lat, lon = nc.getvar(root, 'lat')[0,:], nc.getvar(root, 'lon')[0,:] nc.close(root) return obtain(lat, lon)
def function[obtain_to, parameter[filename]]: constant[ Return the digital elevation map projected to the lat lon matrix coordenates. Keyword arguments: filename -- the name of a netcdf file. ] <ast.Tuple object at 0x7da18f00cbb0> assign[=] call[name[nc].open, parameter[name[filename]]] <ast.Tuple object at 0x7da18f00ed40> assign[=] tuple[[<ast.Subscript object at 0x7da18f00f580>, <ast.Subscript object at 0x7da18f00dff0>]] call[name[nc].close, parameter[name[root]]] return[call[name[obtain], parameter[name[lat], name[lon]]]]
keyword[def] identifier[obtain_to] ( identifier[filename] ): literal[string] identifier[root] , identifier[_] = identifier[nc] . identifier[open] ( identifier[filename] ) identifier[lat] , identifier[lon] = identifier[nc] . identifier[getvar] ( identifier[root] , literal[string] )[ literal[int] ,:], identifier[nc] . identifier[getvar] ( identifier[root] , literal[string] )[ literal[int] ,:] identifier[nc] . identifier[close] ( identifier[root] ) keyword[return] identifier[obtain] ( identifier[lat] , identifier[lon] )
def obtain_to(filename): """ Return the digital elevation map projected to the lat lon matrix coordenates. Keyword arguments: filename -- the name of a netcdf file. """ (root, _) = nc.open(filename) (lat, lon) = (nc.getvar(root, 'lat')[0, :], nc.getvar(root, 'lon')[0, :]) nc.close(root) return obtain(lat, lon)
def next(self, verifyPad=False): """Manually iterate through the data loaded in Instrument object. Bounds of iteration and iteration type (day/file) are set by `bounds` attribute. Note ---- If there were no previous calls to load then the first day(default)/file will be loaded. """ if self._iter_type == 'date': if self.date is not None: idx, = np.where(self._iter_list == self.date) if (len(idx) == 0): raise StopIteration('File list is empty. Nothing to be done.') elif idx[-1]+1 >= len(self._iter_list): raise StopIteration('Outside the set date boundaries.') else: idx += 1 self.load(date=self._iter_list[idx[0]], verifyPad=verifyPad) else: self.load(date=self._iter_list[0], verifyPad=verifyPad) elif self._iter_type == 'file': if self._fid is not None: first = self.files.get_index(self._iter_list[0]) last = self.files.get_index(self._iter_list[-1]) if (self._fid < first) | (self._fid+1 > last): raise StopIteration('Outside the set file boundaries.') else: self.load(fname=self._iter_list[self._fid+1-first], verifyPad=verifyPad) else: self.load(fname=self._iter_list[0], verifyPad=verifyPad)
def function[next, parameter[self, verifyPad]]: constant[Manually iterate through the data loaded in Instrument object. Bounds of iteration and iteration type (day/file) are set by `bounds` attribute. Note ---- If there were no previous calls to load then the first day(default)/file will be loaded. ] if compare[name[self]._iter_type equal[==] constant[date]] begin[:] if compare[name[self].date is_not constant[None]] begin[:] <ast.Tuple object at 0x7da20c7c8f70> assign[=] call[name[np].where, parameter[compare[name[self]._iter_list equal[==] name[self].date]]] if compare[call[name[len], parameter[name[idx]]] equal[==] constant[0]] begin[:] <ast.Raise object at 0x7da20c7cabf0>
keyword[def] identifier[next] ( identifier[self] , identifier[verifyPad] = keyword[False] ): literal[string] keyword[if] identifier[self] . identifier[_iter_type] == literal[string] : keyword[if] identifier[self] . identifier[date] keyword[is] keyword[not] keyword[None] : identifier[idx] ,= identifier[np] . identifier[where] ( identifier[self] . identifier[_iter_list] == identifier[self] . identifier[date] ) keyword[if] ( identifier[len] ( identifier[idx] )== literal[int] ): keyword[raise] identifier[StopIteration] ( literal[string] ) keyword[elif] identifier[idx] [- literal[int] ]+ literal[int] >= identifier[len] ( identifier[self] . identifier[_iter_list] ): keyword[raise] identifier[StopIteration] ( literal[string] ) keyword[else] : identifier[idx] += literal[int] identifier[self] . identifier[load] ( identifier[date] = identifier[self] . identifier[_iter_list] [ identifier[idx] [ literal[int] ]], identifier[verifyPad] = identifier[verifyPad] ) keyword[else] : identifier[self] . identifier[load] ( identifier[date] = identifier[self] . identifier[_iter_list] [ literal[int] ], identifier[verifyPad] = identifier[verifyPad] ) keyword[elif] identifier[self] . identifier[_iter_type] == literal[string] : keyword[if] identifier[self] . identifier[_fid] keyword[is] keyword[not] keyword[None] : identifier[first] = identifier[self] . identifier[files] . identifier[get_index] ( identifier[self] . identifier[_iter_list] [ literal[int] ]) identifier[last] = identifier[self] . identifier[files] . identifier[get_index] ( identifier[self] . identifier[_iter_list] [- literal[int] ]) keyword[if] ( identifier[self] . identifier[_fid] < identifier[first] )|( identifier[self] . identifier[_fid] + literal[int] > identifier[last] ): keyword[raise] identifier[StopIteration] ( literal[string] ) keyword[else] : identifier[self] . identifier[load] ( identifier[fname] = identifier[self] . identifier[_iter_list] [ identifier[self] . identifier[_fid] + literal[int] - identifier[first] ], identifier[verifyPad] = identifier[verifyPad] ) keyword[else] : identifier[self] . identifier[load] ( identifier[fname] = identifier[self] . identifier[_iter_list] [ literal[int] ], identifier[verifyPad] = identifier[verifyPad] )
def next(self, verifyPad=False): """Manually iterate through the data loaded in Instrument object. Bounds of iteration and iteration type (day/file) are set by `bounds` attribute. Note ---- If there were no previous calls to load then the first day(default)/file will be loaded. """ if self._iter_type == 'date': if self.date is not None: (idx,) = np.where(self._iter_list == self.date) if len(idx) == 0: raise StopIteration('File list is empty. Nothing to be done.') # depends on [control=['if'], data=[]] elif idx[-1] + 1 >= len(self._iter_list): raise StopIteration('Outside the set date boundaries.') # depends on [control=['if'], data=[]] else: idx += 1 self.load(date=self._iter_list[idx[0]], verifyPad=verifyPad) # depends on [control=['if'], data=[]] else: self.load(date=self._iter_list[0], verifyPad=verifyPad) # depends on [control=['if'], data=[]] elif self._iter_type == 'file': if self._fid is not None: first = self.files.get_index(self._iter_list[0]) last = self.files.get_index(self._iter_list[-1]) if (self._fid < first) | (self._fid + 1 > last): raise StopIteration('Outside the set file boundaries.') # depends on [control=['if'], data=[]] else: self.load(fname=self._iter_list[self._fid + 1 - first], verifyPad=verifyPad) # depends on [control=['if'], data=[]] else: self.load(fname=self._iter_list[0], verifyPad=verifyPad) # depends on [control=['if'], data=[]]
def prepare_hmet(self): """ Prepare HMET data for simulation """ if self._prepare_lsm_hmet: netcdf_file_path = None hmet_ascii_output_folder = None if self.output_netcdf: netcdf_file_path = '{0}_hmet.nc'.format(self.project_manager.name) if self.hotstart_minimal_mode: netcdf_file_path = '{0}_hmet_hotstart.nc'.format(self.project_manager.name) else: hmet_ascii_output_folder = 'hmet_data_{0}to{1}' if self.hotstart_minimal_mode: hmet_ascii_output_folder += "_hotstart" self.event_manager.prepare_hmet_lsm(self.lsm_data_var_map_array, hmet_ascii_output_folder, netcdf_file_path) self.simulation_modified_input_cards += ["HMET_NETCDF", "HMET_ASCII"] else: log.info("HMET preparation skipped due to missing parameters ...")
def function[prepare_hmet, parameter[self]]: constant[ Prepare HMET data for simulation ] if name[self]._prepare_lsm_hmet begin[:] variable[netcdf_file_path] assign[=] constant[None] variable[hmet_ascii_output_folder] assign[=] constant[None] if name[self].output_netcdf begin[:] variable[netcdf_file_path] assign[=] call[constant[{0}_hmet.nc].format, parameter[name[self].project_manager.name]] if name[self].hotstart_minimal_mode begin[:] variable[netcdf_file_path] assign[=] call[constant[{0}_hmet_hotstart.nc].format, parameter[name[self].project_manager.name]] call[name[self].event_manager.prepare_hmet_lsm, parameter[name[self].lsm_data_var_map_array, name[hmet_ascii_output_folder], name[netcdf_file_path]]] <ast.AugAssign object at 0x7da20c992f50>
keyword[def] identifier[prepare_hmet] ( identifier[self] ): literal[string] keyword[if] identifier[self] . identifier[_prepare_lsm_hmet] : identifier[netcdf_file_path] = keyword[None] identifier[hmet_ascii_output_folder] = keyword[None] keyword[if] identifier[self] . identifier[output_netcdf] : identifier[netcdf_file_path] = literal[string] . identifier[format] ( identifier[self] . identifier[project_manager] . identifier[name] ) keyword[if] identifier[self] . identifier[hotstart_minimal_mode] : identifier[netcdf_file_path] = literal[string] . identifier[format] ( identifier[self] . identifier[project_manager] . identifier[name] ) keyword[else] : identifier[hmet_ascii_output_folder] = literal[string] keyword[if] identifier[self] . identifier[hotstart_minimal_mode] : identifier[hmet_ascii_output_folder] += literal[string] identifier[self] . identifier[event_manager] . identifier[prepare_hmet_lsm] ( identifier[self] . identifier[lsm_data_var_map_array] , identifier[hmet_ascii_output_folder] , identifier[netcdf_file_path] ) identifier[self] . identifier[simulation_modified_input_cards] +=[ literal[string] , literal[string] ] keyword[else] : identifier[log] . identifier[info] ( literal[string] )
def prepare_hmet(self): """ Prepare HMET data for simulation """ if self._prepare_lsm_hmet: netcdf_file_path = None hmet_ascii_output_folder = None if self.output_netcdf: netcdf_file_path = '{0}_hmet.nc'.format(self.project_manager.name) if self.hotstart_minimal_mode: netcdf_file_path = '{0}_hmet_hotstart.nc'.format(self.project_manager.name) # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]] else: hmet_ascii_output_folder = 'hmet_data_{0}to{1}' if self.hotstart_minimal_mode: hmet_ascii_output_folder += '_hotstart' # depends on [control=['if'], data=[]] self.event_manager.prepare_hmet_lsm(self.lsm_data_var_map_array, hmet_ascii_output_folder, netcdf_file_path) self.simulation_modified_input_cards += ['HMET_NETCDF', 'HMET_ASCII'] # depends on [control=['if'], data=[]] else: log.info('HMET preparation skipped due to missing parameters ...')
def has_ssd(self): """Return true if any of the drive under ArrayControllers is ssd""" for member in self.get_members(): if member.physical_drives.has_ssd: return True return False
def function[has_ssd, parameter[self]]: constant[Return true if any of the drive under ArrayControllers is ssd] for taget[name[member]] in starred[call[name[self].get_members, parameter[]]] begin[:] if name[member].physical_drives.has_ssd begin[:] return[constant[True]] return[constant[False]]
keyword[def] identifier[has_ssd] ( identifier[self] ): literal[string] keyword[for] identifier[member] keyword[in] identifier[self] . identifier[get_members] (): keyword[if] identifier[member] . identifier[physical_drives] . identifier[has_ssd] : keyword[return] keyword[True] keyword[return] keyword[False]
def has_ssd(self): """Return true if any of the drive under ArrayControllers is ssd""" for member in self.get_members(): if member.physical_drives.has_ssd: return True # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['member']] return False
def criar_ip(self, id_vlan, id_equipamento, descricao): """Aloca um IP em uma VLAN para um equipamento. Insere um novo IP para a VLAN e o associa ao equipamento. :param id_vlan: Identificador da vlan. :param id_equipamento: Identificador do equipamento. :param descricao: Descriçao do IP. :return: Dicionário com a seguinte estrutura: :: {'ip': {'id': < id_ip >, 'id_network_ipv4': < id_network_ipv4 >, 'oct1’: < oct1 >, 'oct2': < oct2 >, 'oct3': < oct3 >, 'oct4': < oct4 >, 'descricao': < descricao >}} :raise InvalidParameterError: O identificador da VLAN e/ou do equipamento são nulos ou inválidos. :raise EquipamentoNaoExisteError: Equipamento não cadastrado. :raise VlanNaoExisteError: VLAN não cadastrada. :raise IPNaoDisponivelError: Não existe IP disponível para a VLAN informada. :raise DataBaseError: Falha na networkapi ao acessar o banco de dados. :raise XMLError: Falha na networkapi ao ler o XML de requisição ou gerar o XML de resposta. """ ip_map = dict() ip_map['id_vlan'] = id_vlan ip_map['descricao'] = descricao ip_map['id_equipamento'] = id_equipamento code, xml = self.submit({'ip': ip_map}, 'POST', 'ip/') return self.response(code, xml)
def function[criar_ip, parameter[self, id_vlan, id_equipamento, descricao]]: constant[Aloca um IP em uma VLAN para um equipamento. Insere um novo IP para a VLAN e o associa ao equipamento. :param id_vlan: Identificador da vlan. :param id_equipamento: Identificador do equipamento. :param descricao: Descriçao do IP. :return: Dicionário com a seguinte estrutura: :: {'ip': {'id': < id_ip >, 'id_network_ipv4': < id_network_ipv4 >, 'oct1’: < oct1 >, 'oct2': < oct2 >, 'oct3': < oct3 >, 'oct4': < oct4 >, 'descricao': < descricao >}} :raise InvalidParameterError: O identificador da VLAN e/ou do equipamento são nulos ou inválidos. :raise EquipamentoNaoExisteError: Equipamento não cadastrado. :raise VlanNaoExisteError: VLAN não cadastrada. :raise IPNaoDisponivelError: Não existe IP disponível para a VLAN informada. :raise DataBaseError: Falha na networkapi ao acessar o banco de dados. :raise XMLError: Falha na networkapi ao ler o XML de requisição ou gerar o XML de resposta. ] variable[ip_map] assign[=] call[name[dict], parameter[]] call[name[ip_map]][constant[id_vlan]] assign[=] name[id_vlan] call[name[ip_map]][constant[descricao]] assign[=] name[descricao] call[name[ip_map]][constant[id_equipamento]] assign[=] name[id_equipamento] <ast.Tuple object at 0x7da20c6a9b70> assign[=] call[name[self].submit, parameter[dictionary[[<ast.Constant object at 0x7da1b23462f0>], [<ast.Name object at 0x7da1b2347700>]], constant[POST], constant[ip/]]] return[call[name[self].response, parameter[name[code], name[xml]]]]
keyword[def] identifier[criar_ip] ( identifier[self] , identifier[id_vlan] , identifier[id_equipamento] , identifier[descricao] ): literal[string] identifier[ip_map] = identifier[dict] () identifier[ip_map] [ literal[string] ]= identifier[id_vlan] identifier[ip_map] [ literal[string] ]= identifier[descricao] identifier[ip_map] [ literal[string] ]= identifier[id_equipamento] identifier[code] , identifier[xml] = identifier[self] . identifier[submit] ({ literal[string] : identifier[ip_map] }, literal[string] , literal[string] ) keyword[return] identifier[self] . identifier[response] ( identifier[code] , identifier[xml] )
def criar_ip(self, id_vlan, id_equipamento, descricao): """Aloca um IP em uma VLAN para um equipamento. Insere um novo IP para a VLAN e o associa ao equipamento. :param id_vlan: Identificador da vlan. :param id_equipamento: Identificador do equipamento. :param descricao: Descriçao do IP. :return: Dicionário com a seguinte estrutura: :: {'ip': {'id': < id_ip >, 'id_network_ipv4': < id_network_ipv4 >, 'oct1’: < oct1 >, 'oct2': < oct2 >, 'oct3': < oct3 >, 'oct4': < oct4 >, 'descricao': < descricao >}} :raise InvalidParameterError: O identificador da VLAN e/ou do equipamento são nulos ou inválidos. :raise EquipamentoNaoExisteError: Equipamento não cadastrado. :raise VlanNaoExisteError: VLAN não cadastrada. :raise IPNaoDisponivelError: Não existe IP disponível para a VLAN informada. :raise DataBaseError: Falha na networkapi ao acessar o banco de dados. :raise XMLError: Falha na networkapi ao ler o XML de requisição ou gerar o XML de resposta. """ ip_map = dict() ip_map['id_vlan'] = id_vlan ip_map['descricao'] = descricao ip_map['id_equipamento'] = id_equipamento (code, xml) = self.submit({'ip': ip_map}, 'POST', 'ip/') return self.response(code, xml)
def create_item(self, name): """ create a new todo list item """ elem = self.controlled_list.create_item(name) if elem: return TodoElementUX(parent=self, controlled_element=elem)
def function[create_item, parameter[self, name]]: constant[ create a new todo list item ] variable[elem] assign[=] call[name[self].controlled_list.create_item, parameter[name[name]]] if name[elem] begin[:] return[call[name[TodoElementUX], parameter[]]]
keyword[def] identifier[create_item] ( identifier[self] , identifier[name] ): literal[string] identifier[elem] = identifier[self] . identifier[controlled_list] . identifier[create_item] ( identifier[name] ) keyword[if] identifier[elem] : keyword[return] identifier[TodoElementUX] ( identifier[parent] = identifier[self] , identifier[controlled_element] = identifier[elem] )
def create_item(self, name): """ create a new todo list item """ elem = self.controlled_list.create_item(name) if elem: return TodoElementUX(parent=self, controlled_element=elem) # depends on [control=['if'], data=[]]
def write_log_file(namespace, document): """Writes a line to a log file Arguments: namespace {str} -- namespace of document document {dict} -- document to write to the logs """ log_timestamp = asctime(gmtime(document[TS])) with open("{}{}.{}.log".format(LOG_DIR, namespace, DAY_STRING), "a") as f: log_string = dumps({ "datetime": log_timestamp.upper(), "namespace": namespace, "log": document[LOG_KEY] }) f.write("{}\n".format(log_string))
def function[write_log_file, parameter[namespace, document]]: constant[Writes a line to a log file Arguments: namespace {str} -- namespace of document document {dict} -- document to write to the logs ] variable[log_timestamp] assign[=] call[name[asctime], parameter[call[name[gmtime], parameter[call[name[document]][name[TS]]]]]] with call[name[open], parameter[call[constant[{}{}.{}.log].format, parameter[name[LOG_DIR], name[namespace], name[DAY_STRING]]], constant[a]]] begin[:] variable[log_string] assign[=] call[name[dumps], parameter[dictionary[[<ast.Constant object at 0x7da1b15d57b0>, <ast.Constant object at 0x7da1b15d51e0>, <ast.Constant object at 0x7da1b15d6b00>], [<ast.Call object at 0x7da1b15d6290>, <ast.Name object at 0x7da1b15d48b0>, <ast.Subscript object at 0x7da1b15d4d60>]]]] call[name[f].write, parameter[call[constant[{} ].format, parameter[name[log_string]]]]]
keyword[def] identifier[write_log_file] ( identifier[namespace] , identifier[document] ): literal[string] identifier[log_timestamp] = identifier[asctime] ( identifier[gmtime] ( identifier[document] [ identifier[TS] ])) keyword[with] identifier[open] ( literal[string] . identifier[format] ( identifier[LOG_DIR] , identifier[namespace] , identifier[DAY_STRING] ), literal[string] ) keyword[as] identifier[f] : identifier[log_string] = identifier[dumps] ({ literal[string] : identifier[log_timestamp] . identifier[upper] (), literal[string] : identifier[namespace] , literal[string] : identifier[document] [ identifier[LOG_KEY] ] }) identifier[f] . identifier[write] ( literal[string] . identifier[format] ( identifier[log_string] ))
def write_log_file(namespace, document): """Writes a line to a log file Arguments: namespace {str} -- namespace of document document {dict} -- document to write to the logs """ log_timestamp = asctime(gmtime(document[TS])) with open('{}{}.{}.log'.format(LOG_DIR, namespace, DAY_STRING), 'a') as f: log_string = dumps({'datetime': log_timestamp.upper(), 'namespace': namespace, 'log': document[LOG_KEY]}) f.write('{}\n'.format(log_string)) # depends on [control=['with'], data=['f']]
def remaining_time(self): """ estimates the time remaining until script is finished """ elapsed_time = (datetime.datetime.now() - self.start_time).total_seconds() # safety to avoid devision by zero if self.progress == 0: self.progress = 1 estimated_total_time = 100. / self.progress * elapsed_time return datetime.timedelta(seconds = max(estimated_total_time - elapsed_time, 0))
def function[remaining_time, parameter[self]]: constant[ estimates the time remaining until script is finished ] variable[elapsed_time] assign[=] call[binary_operation[call[name[datetime].datetime.now, parameter[]] - name[self].start_time].total_seconds, parameter[]] if compare[name[self].progress equal[==] constant[0]] begin[:] name[self].progress assign[=] constant[1] variable[estimated_total_time] assign[=] binary_operation[binary_operation[constant[100.0] / name[self].progress] * name[elapsed_time]] return[call[name[datetime].timedelta, parameter[]]]
keyword[def] identifier[remaining_time] ( identifier[self] ): literal[string] identifier[elapsed_time] =( identifier[datetime] . identifier[datetime] . identifier[now] ()- identifier[self] . identifier[start_time] ). identifier[total_seconds] () keyword[if] identifier[self] . identifier[progress] == literal[int] : identifier[self] . identifier[progress] = literal[int] identifier[estimated_total_time] = literal[int] / identifier[self] . identifier[progress] * identifier[elapsed_time] keyword[return] identifier[datetime] . identifier[timedelta] ( identifier[seconds] = identifier[max] ( identifier[estimated_total_time] - identifier[elapsed_time] , literal[int] ))
def remaining_time(self): """ estimates the time remaining until script is finished """ elapsed_time = (datetime.datetime.now() - self.start_time).total_seconds() # safety to avoid devision by zero if self.progress == 0: self.progress = 1 # depends on [control=['if'], data=[]] estimated_total_time = 100.0 / self.progress * elapsed_time return datetime.timedelta(seconds=max(estimated_total_time - elapsed_time, 0))
def get_caller(stack_index=2, root_dir=None): ''' Returns file.py:lineno of your caller. A stack_index of 2 will provide the caller of the function calling this function. Notice that stack_index of 2 or more will fail if called from global scope. ''' caller = inspect.getframeinfo(inspect.stack()[stack_index][0]) # Trim the filenames for readability. filename = caller.filename if root_dir is not None: filename = re.sub("^" + root_dir + "/", "", filename) return "%s:%d" % (filename, caller.lineno)
def function[get_caller, parameter[stack_index, root_dir]]: constant[ Returns file.py:lineno of your caller. A stack_index of 2 will provide the caller of the function calling this function. Notice that stack_index of 2 or more will fail if called from global scope. ] variable[caller] assign[=] call[name[inspect].getframeinfo, parameter[call[call[call[name[inspect].stack, parameter[]]][name[stack_index]]][constant[0]]]] variable[filename] assign[=] name[caller].filename if compare[name[root_dir] is_not constant[None]] begin[:] variable[filename] assign[=] call[name[re].sub, parameter[binary_operation[binary_operation[constant[^] + name[root_dir]] + constant[/]], constant[], name[filename]]] return[binary_operation[constant[%s:%d] <ast.Mod object at 0x7da2590d6920> tuple[[<ast.Name object at 0x7da18f00fa30>, <ast.Attribute object at 0x7da18f00c340>]]]]
keyword[def] identifier[get_caller] ( identifier[stack_index] = literal[int] , identifier[root_dir] = keyword[None] ): literal[string] identifier[caller] = identifier[inspect] . identifier[getframeinfo] ( identifier[inspect] . identifier[stack] ()[ identifier[stack_index] ][ literal[int] ]) identifier[filename] = identifier[caller] . identifier[filename] keyword[if] identifier[root_dir] keyword[is] keyword[not] keyword[None] : identifier[filename] = identifier[re] . identifier[sub] ( literal[string] + identifier[root_dir] + literal[string] , literal[string] , identifier[filename] ) keyword[return] literal[string] %( identifier[filename] , identifier[caller] . identifier[lineno] )
def get_caller(stack_index=2, root_dir=None): """ Returns file.py:lineno of your caller. A stack_index of 2 will provide the caller of the function calling this function. Notice that stack_index of 2 or more will fail if called from global scope. """ caller = inspect.getframeinfo(inspect.stack()[stack_index][0]) # Trim the filenames for readability. filename = caller.filename if root_dir is not None: filename = re.sub('^' + root_dir + '/', '', filename) # depends on [control=['if'], data=['root_dir']] return '%s:%d' % (filename, caller.lineno)
def delayed_burst_run(self, target_cycles_per_sec): """ Run CPU not faster than given speedlimit """ old_cycles = self.cycles start_time = time.time() self.burst_run() is_duration = time.time() - start_time new_cycles = self.cycles - old_cycles try: is_cycles_per_sec = new_cycles / is_duration except ZeroDivisionError: pass else: should_burst_duration = is_cycles_per_sec / target_cycles_per_sec target_duration = should_burst_duration * is_duration delay = target_duration - is_duration if delay > 0: if delay > self.max_delay: self.delay = self.max_delay else: self.delay = delay time.sleep(self.delay) self.call_sync_callbacks()
def function[delayed_burst_run, parameter[self, target_cycles_per_sec]]: constant[ Run CPU not faster than given speedlimit ] variable[old_cycles] assign[=] name[self].cycles variable[start_time] assign[=] call[name[time].time, parameter[]] call[name[self].burst_run, parameter[]] variable[is_duration] assign[=] binary_operation[call[name[time].time, parameter[]] - name[start_time]] variable[new_cycles] assign[=] binary_operation[name[self].cycles - name[old_cycles]] <ast.Try object at 0x7da1b26ac220> call[name[self].call_sync_callbacks, parameter[]]
keyword[def] identifier[delayed_burst_run] ( identifier[self] , identifier[target_cycles_per_sec] ): literal[string] identifier[old_cycles] = identifier[self] . identifier[cycles] identifier[start_time] = identifier[time] . identifier[time] () identifier[self] . identifier[burst_run] () identifier[is_duration] = identifier[time] . identifier[time] ()- identifier[start_time] identifier[new_cycles] = identifier[self] . identifier[cycles] - identifier[old_cycles] keyword[try] : identifier[is_cycles_per_sec] = identifier[new_cycles] / identifier[is_duration] keyword[except] identifier[ZeroDivisionError] : keyword[pass] keyword[else] : identifier[should_burst_duration] = identifier[is_cycles_per_sec] / identifier[target_cycles_per_sec] identifier[target_duration] = identifier[should_burst_duration] * identifier[is_duration] identifier[delay] = identifier[target_duration] - identifier[is_duration] keyword[if] identifier[delay] > literal[int] : keyword[if] identifier[delay] > identifier[self] . identifier[max_delay] : identifier[self] . identifier[delay] = identifier[self] . identifier[max_delay] keyword[else] : identifier[self] . identifier[delay] = identifier[delay] identifier[time] . identifier[sleep] ( identifier[self] . identifier[delay] ) identifier[self] . identifier[call_sync_callbacks] ()
def delayed_burst_run(self, target_cycles_per_sec): """ Run CPU not faster than given speedlimit """ old_cycles = self.cycles start_time = time.time() self.burst_run() is_duration = time.time() - start_time new_cycles = self.cycles - old_cycles try: is_cycles_per_sec = new_cycles / is_duration # depends on [control=['try'], data=[]] except ZeroDivisionError: pass # depends on [control=['except'], data=[]] else: should_burst_duration = is_cycles_per_sec / target_cycles_per_sec target_duration = should_burst_duration * is_duration delay = target_duration - is_duration if delay > 0: if delay > self.max_delay: self.delay = self.max_delay # depends on [control=['if'], data=[]] else: self.delay = delay time.sleep(self.delay) # depends on [control=['if'], data=['delay']] self.call_sync_callbacks()
def base64_encodestring(instr): ''' Encode a string as base64 using the "legacy" Python interface. Among other possible differences, the "legacy" encoder includes a newline ('\\n') character after every 76 characters and always at the end of the encoded string. ''' return salt.utils.stringutils.to_unicode( base64.encodestring(salt.utils.stringutils.to_bytes(instr)), encoding='utf8' if salt.utils.platform.is_windows() else None )
def function[base64_encodestring, parameter[instr]]: constant[ Encode a string as base64 using the "legacy" Python interface. Among other possible differences, the "legacy" encoder includes a newline ('\n') character after every 76 characters and always at the end of the encoded string. ] return[call[name[salt].utils.stringutils.to_unicode, parameter[call[name[base64].encodestring, parameter[call[name[salt].utils.stringutils.to_bytes, parameter[name[instr]]]]]]]]
keyword[def] identifier[base64_encodestring] ( identifier[instr] ): literal[string] keyword[return] identifier[salt] . identifier[utils] . identifier[stringutils] . identifier[to_unicode] ( identifier[base64] . identifier[encodestring] ( identifier[salt] . identifier[utils] . identifier[stringutils] . identifier[to_bytes] ( identifier[instr] )), identifier[encoding] = literal[string] keyword[if] identifier[salt] . identifier[utils] . identifier[platform] . identifier[is_windows] () keyword[else] keyword[None] )
def base64_encodestring(instr): """ Encode a string as base64 using the "legacy" Python interface. Among other possible differences, the "legacy" encoder includes a newline ('\\n') character after every 76 characters and always at the end of the encoded string. """ return salt.utils.stringutils.to_unicode(base64.encodestring(salt.utils.stringutils.to_bytes(instr)), encoding='utf8' if salt.utils.platform.is_windows() else None)
def check_important_variables(self): """ Check all the variables needed are defined """ if len(self.important_variables - set(self.args.keys())): raise TypeError("Some important variables are not set")
def function[check_important_variables, parameter[self]]: constant[ Check all the variables needed are defined ] if call[name[len], parameter[binary_operation[name[self].important_variables - call[name[set], parameter[call[name[self].args.keys, parameter[]]]]]]] begin[:] <ast.Raise object at 0x7da1b1352200>
keyword[def] identifier[check_important_variables] ( identifier[self] ): literal[string] keyword[if] identifier[len] ( identifier[self] . identifier[important_variables] - identifier[set] ( identifier[self] . identifier[args] . identifier[keys] ())): keyword[raise] identifier[TypeError] ( literal[string] )
def check_important_variables(self): """ Check all the variables needed are defined """ if len(self.important_variables - set(self.args.keys())): raise TypeError('Some important variables are not set') # depends on [control=['if'], data=[]]
def find_globals_and_nonlocals(node, globs, nonlocals, code, version): """search a node of parse tree to find variable names that need a either 'global' or 'nonlocal' statements added.""" for n in node: if isinstance(n, SyntaxTree): globs, nonlocals = find_globals_and_nonlocals(n, globs, nonlocals, code, version) elif n.kind in read_global_ops: globs.add(n.pattr) elif (version >= 3.0 and n.kind in nonglobal_ops and n.pattr in code.co_freevars and n.pattr != code.co_name and code.co_name != '<lambda>'): nonlocals.add(n.pattr) return globs, nonlocals
def function[find_globals_and_nonlocals, parameter[node, globs, nonlocals, code, version]]: constant[search a node of parse tree to find variable names that need a either 'global' or 'nonlocal' statements added.] for taget[name[n]] in starred[name[node]] begin[:] if call[name[isinstance], parameter[name[n], name[SyntaxTree]]] begin[:] <ast.Tuple object at 0x7da1b21c7190> assign[=] call[name[find_globals_and_nonlocals], parameter[name[n], name[globs], name[nonlocals], name[code], name[version]]] return[tuple[[<ast.Name object at 0x7da18c4cd750>, <ast.Name object at 0x7da18c4cfa60>]]]
keyword[def] identifier[find_globals_and_nonlocals] ( identifier[node] , identifier[globs] , identifier[nonlocals] , identifier[code] , identifier[version] ): literal[string] keyword[for] identifier[n] keyword[in] identifier[node] : keyword[if] identifier[isinstance] ( identifier[n] , identifier[SyntaxTree] ): identifier[globs] , identifier[nonlocals] = identifier[find_globals_and_nonlocals] ( identifier[n] , identifier[globs] , identifier[nonlocals] , identifier[code] , identifier[version] ) keyword[elif] identifier[n] . identifier[kind] keyword[in] identifier[read_global_ops] : identifier[globs] . identifier[add] ( identifier[n] . identifier[pattr] ) keyword[elif] ( identifier[version] >= literal[int] keyword[and] identifier[n] . identifier[kind] keyword[in] identifier[nonglobal_ops] keyword[and] identifier[n] . identifier[pattr] keyword[in] identifier[code] . identifier[co_freevars] keyword[and] identifier[n] . identifier[pattr] != identifier[code] . identifier[co_name] keyword[and] identifier[code] . identifier[co_name] != literal[string] ): identifier[nonlocals] . identifier[add] ( identifier[n] . identifier[pattr] ) keyword[return] identifier[globs] , identifier[nonlocals]
def find_globals_and_nonlocals(node, globs, nonlocals, code, version): """search a node of parse tree to find variable names that need a either 'global' or 'nonlocal' statements added.""" for n in node: if isinstance(n, SyntaxTree): (globs, nonlocals) = find_globals_and_nonlocals(n, globs, nonlocals, code, version) # depends on [control=['if'], data=[]] elif n.kind in read_global_ops: globs.add(n.pattr) # depends on [control=['if'], data=[]] elif version >= 3.0 and n.kind in nonglobal_ops and (n.pattr in code.co_freevars) and (n.pattr != code.co_name) and (code.co_name != '<lambda>'): nonlocals.add(n.pattr) # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['n']] return (globs, nonlocals)
def ip_address(): """Get the IP address used for public connections.""" s = socket.socket(socket.AF_INET, socket.SOCK_DGRAM) # 8.8.8.8 is the google public DNS s.connect(("8.8.8.8", 53)) ip = s.getsockname()[0] s.close() return ip
def function[ip_address, parameter[]]: constant[Get the IP address used for public connections.] variable[s] assign[=] call[name[socket].socket, parameter[name[socket].AF_INET, name[socket].SOCK_DGRAM]] call[name[s].connect, parameter[tuple[[<ast.Constant object at 0x7da1b1971120>, <ast.Constant object at 0x7da1b1971150>]]]] variable[ip] assign[=] call[call[name[s].getsockname, parameter[]]][constant[0]] call[name[s].close, parameter[]] return[name[ip]]
keyword[def] identifier[ip_address] (): literal[string] identifier[s] = identifier[socket] . identifier[socket] ( identifier[socket] . identifier[AF_INET] , identifier[socket] . identifier[SOCK_DGRAM] ) identifier[s] . identifier[connect] (( literal[string] , literal[int] )) identifier[ip] = identifier[s] . identifier[getsockname] ()[ literal[int] ] identifier[s] . identifier[close] () keyword[return] identifier[ip]
def ip_address(): """Get the IP address used for public connections.""" s = socket.socket(socket.AF_INET, socket.SOCK_DGRAM) # 8.8.8.8 is the google public DNS s.connect(('8.8.8.8', 53)) ip = s.getsockname()[0] s.close() return ip
def get_table_keys_name(self, table_name, keys): """ Given a set of keys, extracts the key and range key """ table = self.tables.get(table_name) if not table: return None, None else: if len(keys) == 1: for key in keys: if key in table.hash_key_names: return key, None # for potential_hash, potential_range in zip(table.hash_key_names, table.range_key_names): # if set([potential_hash, potential_range]) == set(keys): # return potential_hash, potential_range potential_hash, potential_range = None, None for key in set(keys): if key in table.hash_key_names: potential_hash = key elif key in table.range_key_names: potential_range = key return potential_hash, potential_range
def function[get_table_keys_name, parameter[self, table_name, keys]]: constant[ Given a set of keys, extracts the key and range key ] variable[table] assign[=] call[name[self].tables.get, parameter[name[table_name]]] if <ast.UnaryOp object at 0x7da1b1980130> begin[:] return[tuple[[<ast.Constant object at 0x7da1b19826e0>, <ast.Constant object at 0x7da1b1981f30>]]]
keyword[def] identifier[get_table_keys_name] ( identifier[self] , identifier[table_name] , identifier[keys] ): literal[string] identifier[table] = identifier[self] . identifier[tables] . identifier[get] ( identifier[table_name] ) keyword[if] keyword[not] identifier[table] : keyword[return] keyword[None] , keyword[None] keyword[else] : keyword[if] identifier[len] ( identifier[keys] )== literal[int] : keyword[for] identifier[key] keyword[in] identifier[keys] : keyword[if] identifier[key] keyword[in] identifier[table] . identifier[hash_key_names] : keyword[return] identifier[key] , keyword[None] identifier[potential_hash] , identifier[potential_range] = keyword[None] , keyword[None] keyword[for] identifier[key] keyword[in] identifier[set] ( identifier[keys] ): keyword[if] identifier[key] keyword[in] identifier[table] . identifier[hash_key_names] : identifier[potential_hash] = identifier[key] keyword[elif] identifier[key] keyword[in] identifier[table] . identifier[range_key_names] : identifier[potential_range] = identifier[key] keyword[return] identifier[potential_hash] , identifier[potential_range]
def get_table_keys_name(self, table_name, keys): """ Given a set of keys, extracts the key and range key """ table = self.tables.get(table_name) if not table: return (None, None) # depends on [control=['if'], data=[]] else: if len(keys) == 1: for key in keys: if key in table.hash_key_names: return (key, None) # depends on [control=['if'], data=['key']] # depends on [control=['for'], data=['key']] # depends on [control=['if'], data=[]] # for potential_hash, potential_range in zip(table.hash_key_names, table.range_key_names): # if set([potential_hash, potential_range]) == set(keys): # return potential_hash, potential_range (potential_hash, potential_range) = (None, None) for key in set(keys): if key in table.hash_key_names: potential_hash = key # depends on [control=['if'], data=['key']] elif key in table.range_key_names: potential_range = key # depends on [control=['if'], data=['key']] # depends on [control=['for'], data=['key']] return (potential_hash, potential_range)
def play_alert(zones, alert_uri, alert_volume=20, alert_duration=0, fade_back=False): """ Demo function using soco.snapshot across multiple Sonos players. Args: zones (set): a set of SoCo objects alert_uri (str): uri that Sonos can play as an alert alert_volume (int): volume level for playing alert (0 tp 100) alert_duration (int): length of alert (if zero then length of track) fade_back (bool): on reinstating the zones fade up the sound? """ # Use soco.snapshot to capture current state of each zone to allow restore for zone in zones: zone.snap = Snapshot(zone) zone.snap.snapshot() print('snapshot of zone: {}'.format(zone.player_name)) # prepare all zones for playing the alert for zone in zones: # Each Sonos group has one coordinator only these can play, pause, etc. if zone.is_coordinator: if not zone.is_playing_tv: # can't pause TV - so don't try! # pause music for each coordinators if playing trans_state = zone.get_current_transport_info() if trans_state['current_transport_state'] == 'PLAYING': zone.pause() # For every Sonos player set volume and mute for every zone zone.volume = alert_volume zone.mute = False # play the sound (uri) on each sonos coordinator print('will play: {} on all coordinators'.format(alert_uri)) for zone in zones: if zone.is_coordinator: zone.play_uri(uri=alert_uri, title='Sonos Alert') # wait for alert_duration time.sleep(alert_duration) # restore each zone to previous state for zone in zones: print('restoring {}'.format(zone.player_name)) zone.snap.restore(fade=fade_back)
def function[play_alert, parameter[zones, alert_uri, alert_volume, alert_duration, fade_back]]: constant[ Demo function using soco.snapshot across multiple Sonos players. Args: zones (set): a set of SoCo objects alert_uri (str): uri that Sonos can play as an alert alert_volume (int): volume level for playing alert (0 tp 100) alert_duration (int): length of alert (if zero then length of track) fade_back (bool): on reinstating the zones fade up the sound? ] for taget[name[zone]] in starred[name[zones]] begin[:] name[zone].snap assign[=] call[name[Snapshot], parameter[name[zone]]] call[name[zone].snap.snapshot, parameter[]] call[name[print], parameter[call[constant[snapshot of zone: {}].format, parameter[name[zone].player_name]]]] for taget[name[zone]] in starred[name[zones]] begin[:] if name[zone].is_coordinator begin[:] if <ast.UnaryOp object at 0x7da18f722560> begin[:] variable[trans_state] assign[=] call[name[zone].get_current_transport_info, parameter[]] if compare[call[name[trans_state]][constant[current_transport_state]] equal[==] constant[PLAYING]] begin[:] call[name[zone].pause, parameter[]] name[zone].volume assign[=] name[alert_volume] name[zone].mute assign[=] constant[False] call[name[print], parameter[call[constant[will play: {} on all coordinators].format, parameter[name[alert_uri]]]]] for taget[name[zone]] in starred[name[zones]] begin[:] if name[zone].is_coordinator begin[:] call[name[zone].play_uri, parameter[]] call[name[time].sleep, parameter[name[alert_duration]]] for taget[name[zone]] in starred[name[zones]] begin[:] call[name[print], parameter[call[constant[restoring {}].format, parameter[name[zone].player_name]]]] call[name[zone].snap.restore, parameter[]]
keyword[def] identifier[play_alert] ( identifier[zones] , identifier[alert_uri] , identifier[alert_volume] = literal[int] , identifier[alert_duration] = literal[int] , identifier[fade_back] = keyword[False] ): literal[string] keyword[for] identifier[zone] keyword[in] identifier[zones] : identifier[zone] . identifier[snap] = identifier[Snapshot] ( identifier[zone] ) identifier[zone] . identifier[snap] . identifier[snapshot] () identifier[print] ( literal[string] . identifier[format] ( identifier[zone] . identifier[player_name] )) keyword[for] identifier[zone] keyword[in] identifier[zones] : keyword[if] identifier[zone] . identifier[is_coordinator] : keyword[if] keyword[not] identifier[zone] . identifier[is_playing_tv] : identifier[trans_state] = identifier[zone] . identifier[get_current_transport_info] () keyword[if] identifier[trans_state] [ literal[string] ]== literal[string] : identifier[zone] . identifier[pause] () identifier[zone] . identifier[volume] = identifier[alert_volume] identifier[zone] . identifier[mute] = keyword[False] identifier[print] ( literal[string] . identifier[format] ( identifier[alert_uri] )) keyword[for] identifier[zone] keyword[in] identifier[zones] : keyword[if] identifier[zone] . identifier[is_coordinator] : identifier[zone] . identifier[play_uri] ( identifier[uri] = identifier[alert_uri] , identifier[title] = literal[string] ) identifier[time] . identifier[sleep] ( identifier[alert_duration] ) keyword[for] identifier[zone] keyword[in] identifier[zones] : identifier[print] ( literal[string] . identifier[format] ( identifier[zone] . identifier[player_name] )) identifier[zone] . identifier[snap] . identifier[restore] ( identifier[fade] = identifier[fade_back] )
def play_alert(zones, alert_uri, alert_volume=20, alert_duration=0, fade_back=False): """ Demo function using soco.snapshot across multiple Sonos players. Args: zones (set): a set of SoCo objects alert_uri (str): uri that Sonos can play as an alert alert_volume (int): volume level for playing alert (0 tp 100) alert_duration (int): length of alert (if zero then length of track) fade_back (bool): on reinstating the zones fade up the sound? """ # Use soco.snapshot to capture current state of each zone to allow restore for zone in zones: zone.snap = Snapshot(zone) zone.snap.snapshot() print('snapshot of zone: {}'.format(zone.player_name)) # depends on [control=['for'], data=['zone']] # prepare all zones for playing the alert for zone in zones: # Each Sonos group has one coordinator only these can play, pause, etc. if zone.is_coordinator: if not zone.is_playing_tv: # can't pause TV - so don't try! # pause music for each coordinators if playing trans_state = zone.get_current_transport_info() if trans_state['current_transport_state'] == 'PLAYING': zone.pause() # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]] # For every Sonos player set volume and mute for every zone zone.volume = alert_volume zone.mute = False # depends on [control=['for'], data=['zone']] # play the sound (uri) on each sonos coordinator print('will play: {} on all coordinators'.format(alert_uri)) for zone in zones: if zone.is_coordinator: zone.play_uri(uri=alert_uri, title='Sonos Alert') # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['zone']] # wait for alert_duration time.sleep(alert_duration) # restore each zone to previous state for zone in zones: print('restoring {}'.format(zone.player_name)) zone.snap.restore(fade=fade_back) # depends on [control=['for'], data=['zone']]
def create_cluster(self, node_list, cluster_attrs={}, node_attrs={}): ''' API: create_cluster(self, node_list, cluster_attrs, node_attrs) Description: Creates a cluster from the node given in the node list. Input: node_list: List of nodes in the cluster. cluster_attrs: Dictionary of cluster attributes, see Dot language grammer documentation for details. node_attrs: Dictionary of node attributes. It will overwrite previous attributes of the nodes in the cluster. Post: A cluster will be created. Attributes of the nodes in the cluster may change. ''' if 'name' in cluster_attrs: if 'name' in self.cluster: raise Exception('A cluster with name %s already exists!' %cluster_attrs['name']) else: name = cluster_attrs['name'] else: name = 'c%d' %self.attr['cluster_count'] self.attr['cluster_count'] += 1 cluster_attrs['name'] = name #cluster_attrs['name'] = self.cluster[name] = {'node_list':node_list, 'attrs':copy.deepcopy(cluster_attrs), 'node_attrs':copy.deepcopy(node_attrs)}
def function[create_cluster, parameter[self, node_list, cluster_attrs, node_attrs]]: constant[ API: create_cluster(self, node_list, cluster_attrs, node_attrs) Description: Creates a cluster from the node given in the node list. Input: node_list: List of nodes in the cluster. cluster_attrs: Dictionary of cluster attributes, see Dot language grammer documentation for details. node_attrs: Dictionary of node attributes. It will overwrite previous attributes of the nodes in the cluster. Post: A cluster will be created. Attributes of the nodes in the cluster may change. ] if compare[constant[name] in name[cluster_attrs]] begin[:] if compare[constant[name] in name[self].cluster] begin[:] <ast.Raise object at 0x7da1b0535c60> call[name[self].cluster][name[name]] assign[=] dictionary[[<ast.Constant object at 0x7da1b0534550>, <ast.Constant object at 0x7da1b0534460>, <ast.Constant object at 0x7da1b0536aa0>], [<ast.Name object at 0x7da1b0536da0>, <ast.Call object at 0x7da1b0535570>, <ast.Call object at 0x7da1b0535690>]]
keyword[def] identifier[create_cluster] ( identifier[self] , identifier[node_list] , identifier[cluster_attrs] ={}, identifier[node_attrs] ={}): literal[string] keyword[if] literal[string] keyword[in] identifier[cluster_attrs] : keyword[if] literal[string] keyword[in] identifier[self] . identifier[cluster] : keyword[raise] identifier[Exception] ( literal[string] % identifier[cluster_attrs] [ literal[string] ]) keyword[else] : identifier[name] = identifier[cluster_attrs] [ literal[string] ] keyword[else] : identifier[name] = literal[string] % identifier[self] . identifier[attr] [ literal[string] ] identifier[self] . identifier[attr] [ literal[string] ]+= literal[int] identifier[cluster_attrs] [ literal[string] ]= identifier[name] identifier[self] . identifier[cluster] [ identifier[name] ]={ literal[string] : identifier[node_list] , literal[string] : identifier[copy] . identifier[deepcopy] ( identifier[cluster_attrs] ), literal[string] : identifier[copy] . identifier[deepcopy] ( identifier[node_attrs] )}
def create_cluster(self, node_list, cluster_attrs={}, node_attrs={}): """ API: create_cluster(self, node_list, cluster_attrs, node_attrs) Description: Creates a cluster from the node given in the node list. Input: node_list: List of nodes in the cluster. cluster_attrs: Dictionary of cluster attributes, see Dot language grammer documentation for details. node_attrs: Dictionary of node attributes. It will overwrite previous attributes of the nodes in the cluster. Post: A cluster will be created. Attributes of the nodes in the cluster may change. """ if 'name' in cluster_attrs: if 'name' in self.cluster: raise Exception('A cluster with name %s already exists!' % cluster_attrs['name']) # depends on [control=['if'], data=[]] else: name = cluster_attrs['name'] # depends on [control=['if'], data=['cluster_attrs']] else: name = 'c%d' % self.attr['cluster_count'] self.attr['cluster_count'] += 1 cluster_attrs['name'] = name #cluster_attrs['name'] = self.cluster[name] = {'node_list': node_list, 'attrs': copy.deepcopy(cluster_attrs), 'node_attrs': copy.deepcopy(node_attrs)}
def check_page_for_warnings(html: str) -> None: """ Checks if is any warnings on page if so raises an exception """ soup = BeautifulSoup(html, 'html.parser') warnings = soup.find_all('div', {'class': 'service_msg_warning'}) if warnings: exception_msg = '; '.join((warning.get_text() for warning in warnings)) raise VVKPageWarningException(exception_msg)
def function[check_page_for_warnings, parameter[html]]: constant[ Checks if is any warnings on page if so raises an exception ] variable[soup] assign[=] call[name[BeautifulSoup], parameter[name[html], constant[html.parser]]] variable[warnings] assign[=] call[name[soup].find_all, parameter[constant[div], dictionary[[<ast.Constant object at 0x7da18f09ee60>], [<ast.Constant object at 0x7da18f09c4f0>]]]] if name[warnings] begin[:] variable[exception_msg] assign[=] call[constant[; ].join, parameter[<ast.GeneratorExp object at 0x7da18f09d9c0>]] <ast.Raise object at 0x7da2045667a0>
keyword[def] identifier[check_page_for_warnings] ( identifier[html] : identifier[str] )-> keyword[None] : literal[string] identifier[soup] = identifier[BeautifulSoup] ( identifier[html] , literal[string] ) identifier[warnings] = identifier[soup] . identifier[find_all] ( literal[string] ,{ literal[string] : literal[string] }) keyword[if] identifier[warnings] : identifier[exception_msg] = literal[string] . identifier[join] (( identifier[warning] . identifier[get_text] () keyword[for] identifier[warning] keyword[in] identifier[warnings] )) keyword[raise] identifier[VVKPageWarningException] ( identifier[exception_msg] )
def check_page_for_warnings(html: str) -> None: """ Checks if is any warnings on page if so raises an exception """ soup = BeautifulSoup(html, 'html.parser') warnings = soup.find_all('div', {'class': 'service_msg_warning'}) if warnings: exception_msg = '; '.join((warning.get_text() for warning in warnings)) raise VVKPageWarningException(exception_msg) # depends on [control=['if'], data=[]]
def load(self): """Loads configuration file""" # Config files prior to 0.2.4 dor not have config version keys old_config = not self.cfg_file.Exists("config_version") # Reset data self.data.__dict__.update(self.defaults.__dict__) for key in self.defaults.__dict__: if self.cfg_file.Exists(key): setattr(self.data, key, self.cfg_file.Read(key)) # Reset keys that should be reset on version upgrades if old_config or self.version != self.data.config_version: for key in self.reset_on_version_change: setattr(self.data, key, getattr(DefaultConfig(), key)) self.data.config_version = self.version # Delete gpg_key_uid and insert fingerprint key if hasattr(self.data, "gpg_key_uid"): oldkey = "gpg_key_uid" delattr(self.data, oldkey) newkey = "gpg_key_fingerprint" setattr(self.data, newkey, getattr(DefaultConfig(), newkey))
def function[load, parameter[self]]: constant[Loads configuration file] variable[old_config] assign[=] <ast.UnaryOp object at 0x7da1b16fc700> call[name[self].data.__dict__.update, parameter[name[self].defaults.__dict__]] for taget[name[key]] in starred[name[self].defaults.__dict__] begin[:] if call[name[self].cfg_file.Exists, parameter[name[key]]] begin[:] call[name[setattr], parameter[name[self].data, name[key], call[name[self].cfg_file.Read, parameter[name[key]]]]] if <ast.BoolOp object at 0x7da1b16fc340> begin[:] for taget[name[key]] in starred[name[self].reset_on_version_change] begin[:] call[name[setattr], parameter[name[self].data, name[key], call[name[getattr], parameter[call[name[DefaultConfig], parameter[]], name[key]]]]] name[self].data.config_version assign[=] name[self].version if call[name[hasattr], parameter[name[self].data, constant[gpg_key_uid]]] begin[:] variable[oldkey] assign[=] constant[gpg_key_uid] call[name[delattr], parameter[name[self].data, name[oldkey]]] variable[newkey] assign[=] constant[gpg_key_fingerprint] call[name[setattr], parameter[name[self].data, name[newkey], call[name[getattr], parameter[call[name[DefaultConfig], parameter[]], name[newkey]]]]]
keyword[def] identifier[load] ( identifier[self] ): literal[string] identifier[old_config] = keyword[not] identifier[self] . identifier[cfg_file] . identifier[Exists] ( literal[string] ) identifier[self] . identifier[data] . identifier[__dict__] . identifier[update] ( identifier[self] . identifier[defaults] . identifier[__dict__] ) keyword[for] identifier[key] keyword[in] identifier[self] . identifier[defaults] . identifier[__dict__] : keyword[if] identifier[self] . identifier[cfg_file] . identifier[Exists] ( identifier[key] ): identifier[setattr] ( identifier[self] . identifier[data] , identifier[key] , identifier[self] . identifier[cfg_file] . identifier[Read] ( identifier[key] )) keyword[if] identifier[old_config] keyword[or] identifier[self] . identifier[version] != identifier[self] . identifier[data] . identifier[config_version] : keyword[for] identifier[key] keyword[in] identifier[self] . identifier[reset_on_version_change] : identifier[setattr] ( identifier[self] . identifier[data] , identifier[key] , identifier[getattr] ( identifier[DefaultConfig] (), identifier[key] )) identifier[self] . identifier[data] . identifier[config_version] = identifier[self] . identifier[version] keyword[if] identifier[hasattr] ( identifier[self] . identifier[data] , literal[string] ): identifier[oldkey] = literal[string] identifier[delattr] ( identifier[self] . identifier[data] , identifier[oldkey] ) identifier[newkey] = literal[string] identifier[setattr] ( identifier[self] . identifier[data] , identifier[newkey] , identifier[getattr] ( identifier[DefaultConfig] (), identifier[newkey] ))
def load(self): """Loads configuration file""" # Config files prior to 0.2.4 dor not have config version keys old_config = not self.cfg_file.Exists('config_version') # Reset data self.data.__dict__.update(self.defaults.__dict__) for key in self.defaults.__dict__: if self.cfg_file.Exists(key): setattr(self.data, key, self.cfg_file.Read(key)) # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['key']] # Reset keys that should be reset on version upgrades if old_config or self.version != self.data.config_version: for key in self.reset_on_version_change: setattr(self.data, key, getattr(DefaultConfig(), key)) # depends on [control=['for'], data=['key']] self.data.config_version = self.version # depends on [control=['if'], data=[]] # Delete gpg_key_uid and insert fingerprint key if hasattr(self.data, 'gpg_key_uid'): oldkey = 'gpg_key_uid' delattr(self.data, oldkey) newkey = 'gpg_key_fingerprint' setattr(self.data, newkey, getattr(DefaultConfig(), newkey)) # depends on [control=['if'], data=[]]
def bar(self, height='thickness', sort=False, reverse=False, legend=None, ax=None, figsize=None, **kwargs): """ Make a bar plot of thickness per interval. Args: height (str): The property of the primary component to plot. sort (bool or function): Either pass a boolean indicating whether to reverse sort by thickness, or pass a function to be used as the sort key. reverse (bool): Reverses the sort order. legend (Legend): The legend to plot with. ax (axis): Optional axis to plot to. figsize (tuple): A figure size, (width, height), optional. **kwargs: passed to the matplotlib bar plot command, ax.bar(). Returns: axis: If you sent an axis in, you get it back. """ if sort: if sort is True: def func(x): return x.thickness reverse = True data = sorted(self, key=func, reverse=reverse) else: data = self[:] if ax is None: fig, ax = plt.subplots(figsize=figsize) heights = [getattr(i, height) for i in data] comps = [i[0] for i in self.unique] if legend is None: legend = Legend.random(comps) colors = [legend.get_colour(i.primary) for i in data] bars = ax.bar(range(len(data)), height=heights, color=colors, **kwargs) # Legend. colourables = [i.primary.summary() for i in data] unique_bars = dict(zip(colourables, bars)) ax.legend(unique_bars.values(), unique_bars.keys()) ax.set_ylabel(height.title()) return ax
def function[bar, parameter[self, height, sort, reverse, legend, ax, figsize]]: constant[ Make a bar plot of thickness per interval. Args: height (str): The property of the primary component to plot. sort (bool or function): Either pass a boolean indicating whether to reverse sort by thickness, or pass a function to be used as the sort key. reverse (bool): Reverses the sort order. legend (Legend): The legend to plot with. ax (axis): Optional axis to plot to. figsize (tuple): A figure size, (width, height), optional. **kwargs: passed to the matplotlib bar plot command, ax.bar(). Returns: axis: If you sent an axis in, you get it back. ] if name[sort] begin[:] if compare[name[sort] is constant[True]] begin[:] def function[func, parameter[x]]: return[name[x].thickness] variable[reverse] assign[=] constant[True] variable[data] assign[=] call[name[sorted], parameter[name[self]]] if compare[name[ax] is constant[None]] begin[:] <ast.Tuple object at 0x7da20c990700> assign[=] call[name[plt].subplots, parameter[]] variable[heights] assign[=] <ast.ListComp object at 0x7da20c9911e0> variable[comps] assign[=] <ast.ListComp object at 0x7da18c4cc730> if compare[name[legend] is constant[None]] begin[:] variable[legend] assign[=] call[name[Legend].random, parameter[name[comps]]] variable[colors] assign[=] <ast.ListComp object at 0x7da18c4ce230> variable[bars] assign[=] call[name[ax].bar, parameter[call[name[range], parameter[call[name[len], parameter[name[data]]]]]]] variable[colourables] assign[=] <ast.ListComp object at 0x7da18c4cd7b0> variable[unique_bars] assign[=] call[name[dict], parameter[call[name[zip], parameter[name[colourables], name[bars]]]]] call[name[ax].legend, parameter[call[name[unique_bars].values, parameter[]], call[name[unique_bars].keys, parameter[]]]] call[name[ax].set_ylabel, parameter[call[name[height].title, parameter[]]]] return[name[ax]]
keyword[def] identifier[bar] ( identifier[self] , identifier[height] = literal[string] , identifier[sort] = keyword[False] , identifier[reverse] = keyword[False] , identifier[legend] = keyword[None] , identifier[ax] = keyword[None] , identifier[figsize] = keyword[None] ,** identifier[kwargs] ): literal[string] keyword[if] identifier[sort] : keyword[if] identifier[sort] keyword[is] keyword[True] : keyword[def] identifier[func] ( identifier[x] ): keyword[return] identifier[x] . identifier[thickness] identifier[reverse] = keyword[True] identifier[data] = identifier[sorted] ( identifier[self] , identifier[key] = identifier[func] , identifier[reverse] = identifier[reverse] ) keyword[else] : identifier[data] = identifier[self] [:] keyword[if] identifier[ax] keyword[is] keyword[None] : identifier[fig] , identifier[ax] = identifier[plt] . identifier[subplots] ( identifier[figsize] = identifier[figsize] ) identifier[heights] =[ identifier[getattr] ( identifier[i] , identifier[height] ) keyword[for] identifier[i] keyword[in] identifier[data] ] identifier[comps] =[ identifier[i] [ literal[int] ] keyword[for] identifier[i] keyword[in] identifier[self] . identifier[unique] ] keyword[if] identifier[legend] keyword[is] keyword[None] : identifier[legend] = identifier[Legend] . identifier[random] ( identifier[comps] ) identifier[colors] =[ identifier[legend] . identifier[get_colour] ( identifier[i] . identifier[primary] ) keyword[for] identifier[i] keyword[in] identifier[data] ] identifier[bars] = identifier[ax] . identifier[bar] ( identifier[range] ( identifier[len] ( identifier[data] )), identifier[height] = identifier[heights] , identifier[color] = identifier[colors] ,** identifier[kwargs] ) identifier[colourables] =[ identifier[i] . identifier[primary] . identifier[summary] () keyword[for] identifier[i] keyword[in] identifier[data] ] identifier[unique_bars] = identifier[dict] ( identifier[zip] ( identifier[colourables] , identifier[bars] )) identifier[ax] . identifier[legend] ( identifier[unique_bars] . identifier[values] (), identifier[unique_bars] . identifier[keys] ()) identifier[ax] . identifier[set_ylabel] ( identifier[height] . identifier[title] ()) keyword[return] identifier[ax]
def bar(self, height='thickness', sort=False, reverse=False, legend=None, ax=None, figsize=None, **kwargs): """ Make a bar plot of thickness per interval. Args: height (str): The property of the primary component to plot. sort (bool or function): Either pass a boolean indicating whether to reverse sort by thickness, or pass a function to be used as the sort key. reverse (bool): Reverses the sort order. legend (Legend): The legend to plot with. ax (axis): Optional axis to plot to. figsize (tuple): A figure size, (width, height), optional. **kwargs: passed to the matplotlib bar plot command, ax.bar(). Returns: axis: If you sent an axis in, you get it back. """ if sort: if sort is True: def func(x): return x.thickness reverse = True # depends on [control=['if'], data=[]] data = sorted(self, key=func, reverse=reverse) # depends on [control=['if'], data=[]] else: data = self[:] if ax is None: (fig, ax) = plt.subplots(figsize=figsize) # depends on [control=['if'], data=['ax']] heights = [getattr(i, height) for i in data] comps = [i[0] for i in self.unique] if legend is None: legend = Legend.random(comps) # depends on [control=['if'], data=['legend']] colors = [legend.get_colour(i.primary) for i in data] bars = ax.bar(range(len(data)), height=heights, color=colors, **kwargs) # Legend. colourables = [i.primary.summary() for i in data] unique_bars = dict(zip(colourables, bars)) ax.legend(unique_bars.values(), unique_bars.keys()) ax.set_ylabel(height.title()) return ax
def table_formatter(self, dataframe, inc_header=1, inc_index=1): """Return a table formatter for the dataframe. Saves the user the need to import this class""" return TableFormatter(dataframe, inc_header=inc_header, inc_index=inc_index)
def function[table_formatter, parameter[self, dataframe, inc_header, inc_index]]: constant[Return a table formatter for the dataframe. Saves the user the need to import this class] return[call[name[TableFormatter], parameter[name[dataframe]]]]
keyword[def] identifier[table_formatter] ( identifier[self] , identifier[dataframe] , identifier[inc_header] = literal[int] , identifier[inc_index] = literal[int] ): literal[string] keyword[return] identifier[TableFormatter] ( identifier[dataframe] , identifier[inc_header] = identifier[inc_header] , identifier[inc_index] = identifier[inc_index] )
def table_formatter(self, dataframe, inc_header=1, inc_index=1): """Return a table formatter for the dataframe. Saves the user the need to import this class""" return TableFormatter(dataframe, inc_header=inc_header, inc_index=inc_index)
def _configure_app(app_): """Configure the Flask WSGI app.""" app_.url_map.strict_slashes = False app_.config.from_object(default_settings) app_.config.from_envvar('JOB_CONFIG', silent=True) db_url = app_.config.get('SQLALCHEMY_DATABASE_URI') if not db_url: raise Exception('No db_url in config') app_.wsgi_app = ProxyFix(app_.wsgi_app) global SSL_VERIFY if app_.config.get('SSL_VERIFY') in ['False', 'FALSE', '0', False, 0]: SSL_VERIFY = False else: SSL_VERIFY = True return app_
def function[_configure_app, parameter[app_]]: constant[Configure the Flask WSGI app.] name[app_].url_map.strict_slashes assign[=] constant[False] call[name[app_].config.from_object, parameter[name[default_settings]]] call[name[app_].config.from_envvar, parameter[constant[JOB_CONFIG]]] variable[db_url] assign[=] call[name[app_].config.get, parameter[constant[SQLALCHEMY_DATABASE_URI]]] if <ast.UnaryOp object at 0x7da18bcc8490> begin[:] <ast.Raise object at 0x7da18bcc8700> name[app_].wsgi_app assign[=] call[name[ProxyFix], parameter[name[app_].wsgi_app]] <ast.Global object at 0x7da18bcc8ee0> if compare[call[name[app_].config.get, parameter[constant[SSL_VERIFY]]] in list[[<ast.Constant object at 0x7da18bcc81f0>, <ast.Constant object at 0x7da18bcc9c60>, <ast.Constant object at 0x7da2047eb8e0>, <ast.Constant object at 0x7da2047e8310>, <ast.Constant object at 0x7da2047e93f0>]]] begin[:] variable[SSL_VERIFY] assign[=] constant[False] return[name[app_]]
keyword[def] identifier[_configure_app] ( identifier[app_] ): literal[string] identifier[app_] . identifier[url_map] . identifier[strict_slashes] = keyword[False] identifier[app_] . identifier[config] . identifier[from_object] ( identifier[default_settings] ) identifier[app_] . identifier[config] . identifier[from_envvar] ( literal[string] , identifier[silent] = keyword[True] ) identifier[db_url] = identifier[app_] . identifier[config] . identifier[get] ( literal[string] ) keyword[if] keyword[not] identifier[db_url] : keyword[raise] identifier[Exception] ( literal[string] ) identifier[app_] . identifier[wsgi_app] = identifier[ProxyFix] ( identifier[app_] . identifier[wsgi_app] ) keyword[global] identifier[SSL_VERIFY] keyword[if] identifier[app_] . identifier[config] . identifier[get] ( literal[string] ) keyword[in] [ literal[string] , literal[string] , literal[string] , keyword[False] , literal[int] ]: identifier[SSL_VERIFY] = keyword[False] keyword[else] : identifier[SSL_VERIFY] = keyword[True] keyword[return] identifier[app_]
def _configure_app(app_): """Configure the Flask WSGI app.""" app_.url_map.strict_slashes = False app_.config.from_object(default_settings) app_.config.from_envvar('JOB_CONFIG', silent=True) db_url = app_.config.get('SQLALCHEMY_DATABASE_URI') if not db_url: raise Exception('No db_url in config') # depends on [control=['if'], data=[]] app_.wsgi_app = ProxyFix(app_.wsgi_app) global SSL_VERIFY if app_.config.get('SSL_VERIFY') in ['False', 'FALSE', '0', False, 0]: SSL_VERIFY = False # depends on [control=['if'], data=[]] else: SSL_VERIFY = True return app_
def update_campaign_archive(self, campaign_id, **kwargs): # noqa: E501 """Archive a campaign. # noqa: E501 This command will archive a campaign. # noqa: E501 This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass asynchronous=True >>> thread = api.update_campaign_archive(campaign_id, asynchronous=True) >>> result = thread.get() :param asynchronous bool :param str campaign_id: The campaign ID (required) :return: None If the method is called asynchronously, returns the request thread. """ kwargs['_return_http_data_only'] = True if kwargs.get('asynchronous'): return self.update_campaign_archive_with_http_info(campaign_id, **kwargs) # noqa: E501 else: (data) = self.update_campaign_archive_with_http_info(campaign_id, **kwargs) # noqa: E501 return data
def function[update_campaign_archive, parameter[self, campaign_id]]: constant[Archive a campaign. # noqa: E501 This command will archive a campaign. # noqa: E501 This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass asynchronous=True >>> thread = api.update_campaign_archive(campaign_id, asynchronous=True) >>> result = thread.get() :param asynchronous bool :param str campaign_id: The campaign ID (required) :return: None If the method is called asynchronously, returns the request thread. ] call[name[kwargs]][constant[_return_http_data_only]] assign[=] constant[True] if call[name[kwargs].get, parameter[constant[asynchronous]]] begin[:] return[call[name[self].update_campaign_archive_with_http_info, parameter[name[campaign_id]]]]
keyword[def] identifier[update_campaign_archive] ( identifier[self] , identifier[campaign_id] ,** identifier[kwargs] ): literal[string] identifier[kwargs] [ literal[string] ]= keyword[True] keyword[if] identifier[kwargs] . identifier[get] ( literal[string] ): keyword[return] identifier[self] . identifier[update_campaign_archive_with_http_info] ( identifier[campaign_id] ,** identifier[kwargs] ) keyword[else] : ( identifier[data] )= identifier[self] . identifier[update_campaign_archive_with_http_info] ( identifier[campaign_id] ,** identifier[kwargs] ) keyword[return] identifier[data]
def update_campaign_archive(self, campaign_id, **kwargs): # noqa: E501 'Archive a campaign. # noqa: E501\n\n This command will archive a campaign. # noqa: E501\n This method makes a synchronous HTTP request by default. To make an\n asynchronous HTTP request, please pass asynchronous=True\n >>> thread = api.update_campaign_archive(campaign_id, asynchronous=True)\n >>> result = thread.get()\n\n :param asynchronous bool\n :param str campaign_id: The campaign ID (required)\n :return: None\n If the method is called asynchronously,\n returns the request thread.\n ' kwargs['_return_http_data_only'] = True if kwargs.get('asynchronous'): return self.update_campaign_archive_with_http_info(campaign_id, **kwargs) # noqa: E501 # depends on [control=['if'], data=[]] else: data = self.update_campaign_archive_with_http_info(campaign_id, **kwargs) # noqa: E501 return data
def image(self, render_mode): """ Get the image associated with a particular render mode """ if render_mode == RenderMode.SEGMASK: return self.query_im elif render_mode == RenderMode.COLOR: return self.color_im elif render_mode == RenderMode.DEPTH: return self.depth_im else: raise ValueError('Render mode %s not supported' %(render_mode))
def function[image, parameter[self, render_mode]]: constant[ Get the image associated with a particular render mode ] if compare[name[render_mode] equal[==] name[RenderMode].SEGMASK] begin[:] return[name[self].query_im]
keyword[def] identifier[image] ( identifier[self] , identifier[render_mode] ): literal[string] keyword[if] identifier[render_mode] == identifier[RenderMode] . identifier[SEGMASK] : keyword[return] identifier[self] . identifier[query_im] keyword[elif] identifier[render_mode] == identifier[RenderMode] . identifier[COLOR] : keyword[return] identifier[self] . identifier[color_im] keyword[elif] identifier[render_mode] == identifier[RenderMode] . identifier[DEPTH] : keyword[return] identifier[self] . identifier[depth_im] keyword[else] : keyword[raise] identifier[ValueError] ( literal[string] %( identifier[render_mode] ))
def image(self, render_mode): """ Get the image associated with a particular render mode """ if render_mode == RenderMode.SEGMASK: return self.query_im # depends on [control=['if'], data=[]] elif render_mode == RenderMode.COLOR: return self.color_im # depends on [control=['if'], data=[]] elif render_mode == RenderMode.DEPTH: return self.depth_im # depends on [control=['if'], data=[]] else: raise ValueError('Render mode %s not supported' % render_mode)
def validate_enum_attribute(fully_qualified_name: str, spec: Dict[str, Any], attribute: str, candidates: Set[Union[str, int, float]]) -> Optional[InvalidValueError]: """ Validates to ensure that the value of an attribute lies within an allowed set of candidates """ if attribute not in spec: return if spec[attribute] not in candidates: return InvalidValueError(fully_qualified_name, spec, attribute, candidates)
def function[validate_enum_attribute, parameter[fully_qualified_name, spec, attribute, candidates]]: constant[ Validates to ensure that the value of an attribute lies within an allowed set of candidates ] if compare[name[attribute] <ast.NotIn object at 0x7da2590d7190> name[spec]] begin[:] return[None] if compare[call[name[spec]][name[attribute]] <ast.NotIn object at 0x7da2590d7190> name[candidates]] begin[:] return[call[name[InvalidValueError], parameter[name[fully_qualified_name], name[spec], name[attribute], name[candidates]]]]
keyword[def] identifier[validate_enum_attribute] ( identifier[fully_qualified_name] : identifier[str] , identifier[spec] : identifier[Dict] [ identifier[str] , identifier[Any] ], identifier[attribute] : identifier[str] , identifier[candidates] : identifier[Set] [ identifier[Union] [ identifier[str] , identifier[int] , identifier[float] ]])-> identifier[Optional] [ identifier[InvalidValueError] ]: literal[string] keyword[if] identifier[attribute] keyword[not] keyword[in] identifier[spec] : keyword[return] keyword[if] identifier[spec] [ identifier[attribute] ] keyword[not] keyword[in] identifier[candidates] : keyword[return] identifier[InvalidValueError] ( identifier[fully_qualified_name] , identifier[spec] , identifier[attribute] , identifier[candidates] )
def validate_enum_attribute(fully_qualified_name: str, spec: Dict[str, Any], attribute: str, candidates: Set[Union[str, int, float]]) -> Optional[InvalidValueError]: """ Validates to ensure that the value of an attribute lies within an allowed set of candidates """ if attribute not in spec: return # depends on [control=['if'], data=[]] if spec[attribute] not in candidates: return InvalidValueError(fully_qualified_name, spec, attribute, candidates) # depends on [control=['if'], data=['candidates']]
def create_plugin(plugin_data, verify_plugin=True, conn=None): """ :param plugin_data: <dict> dict matching Plugin() :param verify_plugin: <bool> :param conn: <rethinkdb.DefaultConnection> :return: <dict> rethinkdb insert response value """ assert isinstance(plugin_data, dict) if verify_plugin and not verify(plugin_data, Plugin()): raise ValueError("Invalid Plugin entry") current = find_plugin(plugin_data[SERVICE_KEY], SERVICE_KEY, conn) if not current: success = RPC.insert(plugin_data, conflict="update").run(conn) else: success = MOCK_ERROR_DICT error_msg = DUPLICATE_SERVICE_STRING.format(plugin_data[SERVICE_KEY]) MOCK_ERROR_DICT[FIRST_ERROR] = error_msg return success
def function[create_plugin, parameter[plugin_data, verify_plugin, conn]]: constant[ :param plugin_data: <dict> dict matching Plugin() :param verify_plugin: <bool> :param conn: <rethinkdb.DefaultConnection> :return: <dict> rethinkdb insert response value ] assert[call[name[isinstance], parameter[name[plugin_data], name[dict]]]] if <ast.BoolOp object at 0x7da1b1589690> begin[:] <ast.Raise object at 0x7da1b1589b10> variable[current] assign[=] call[name[find_plugin], parameter[call[name[plugin_data]][name[SERVICE_KEY]], name[SERVICE_KEY], name[conn]]] if <ast.UnaryOp object at 0x7da1b15f1660> begin[:] variable[success] assign[=] call[call[name[RPC].insert, parameter[name[plugin_data]]].run, parameter[name[conn]]] return[name[success]]
keyword[def] identifier[create_plugin] ( identifier[plugin_data] , identifier[verify_plugin] = keyword[True] , identifier[conn] = keyword[None] ): literal[string] keyword[assert] identifier[isinstance] ( identifier[plugin_data] , identifier[dict] ) keyword[if] identifier[verify_plugin] keyword[and] keyword[not] identifier[verify] ( identifier[plugin_data] , identifier[Plugin] ()): keyword[raise] identifier[ValueError] ( literal[string] ) identifier[current] = identifier[find_plugin] ( identifier[plugin_data] [ identifier[SERVICE_KEY] ], identifier[SERVICE_KEY] , identifier[conn] ) keyword[if] keyword[not] identifier[current] : identifier[success] = identifier[RPC] . identifier[insert] ( identifier[plugin_data] , identifier[conflict] = literal[string] ). identifier[run] ( identifier[conn] ) keyword[else] : identifier[success] = identifier[MOCK_ERROR_DICT] identifier[error_msg] = identifier[DUPLICATE_SERVICE_STRING] . identifier[format] ( identifier[plugin_data] [ identifier[SERVICE_KEY] ]) identifier[MOCK_ERROR_DICT] [ identifier[FIRST_ERROR] ]= identifier[error_msg] keyword[return] identifier[success]
def create_plugin(plugin_data, verify_plugin=True, conn=None): """ :param plugin_data: <dict> dict matching Plugin() :param verify_plugin: <bool> :param conn: <rethinkdb.DefaultConnection> :return: <dict> rethinkdb insert response value """ assert isinstance(plugin_data, dict) if verify_plugin and (not verify(plugin_data, Plugin())): raise ValueError('Invalid Plugin entry') # depends on [control=['if'], data=[]] current = find_plugin(plugin_data[SERVICE_KEY], SERVICE_KEY, conn) if not current: success = RPC.insert(plugin_data, conflict='update').run(conn) # depends on [control=['if'], data=[]] else: success = MOCK_ERROR_DICT error_msg = DUPLICATE_SERVICE_STRING.format(plugin_data[SERVICE_KEY]) MOCK_ERROR_DICT[FIRST_ERROR] = error_msg return success
def default(self, obj): """ Convert QuerySet objects to their list counter-parts """ if isinstance(obj, models.Model): return self.encode(model_to_dict(obj)) elif isinstance(obj, models.query.QuerySet): return serializers.serialize('json', obj) else: return super(JsonResponseEncoder, self).default(obj)
def function[default, parameter[self, obj]]: constant[ Convert QuerySet objects to their list counter-parts ] if call[name[isinstance], parameter[name[obj], name[models].Model]] begin[:] return[call[name[self].encode, parameter[call[name[model_to_dict], parameter[name[obj]]]]]]
keyword[def] identifier[default] ( identifier[self] , identifier[obj] ): literal[string] keyword[if] identifier[isinstance] ( identifier[obj] , identifier[models] . identifier[Model] ): keyword[return] identifier[self] . identifier[encode] ( identifier[model_to_dict] ( identifier[obj] )) keyword[elif] identifier[isinstance] ( identifier[obj] , identifier[models] . identifier[query] . identifier[QuerySet] ): keyword[return] identifier[serializers] . identifier[serialize] ( literal[string] , identifier[obj] ) keyword[else] : keyword[return] identifier[super] ( identifier[JsonResponseEncoder] , identifier[self] ). identifier[default] ( identifier[obj] )
def default(self, obj): """ Convert QuerySet objects to their list counter-parts """ if isinstance(obj, models.Model): return self.encode(model_to_dict(obj)) # depends on [control=['if'], data=[]] elif isinstance(obj, models.query.QuerySet): return serializers.serialize('json', obj) # depends on [control=['if'], data=[]] else: return super(JsonResponseEncoder, self).default(obj)
def run(vrn_info, calls_by_name, somatic_info, do_plots=True, handle_failures=True): """Run BubbleTree given variant calls, CNVs and somatic """ if "seq2c" in calls_by_name: cnv_info = calls_by_name["seq2c"] elif "cnvkit" in calls_by_name: cnv_info = calls_by_name["cnvkit"] else: raise ValueError("BubbleTree only currently support CNVkit and Seq2c: %s" % ", ".join(calls_by_name.keys())) work_dir = _cur_workdir(somatic_info.tumor_data) class OutWriter: def __init__(self, out_handle): self.writer = csv.writer(out_handle) def write_header(self): self.writer.writerow(["chrom", "start", "end", "freq"]) def write_row(self, rec, stats): self.writer.writerow([_to_ucsc_style(rec.chrom), rec.start, rec.stop, stats["tumor"]["freq"]]) vcf_csv = prep_vrn_file(vrn_info["vrn_file"], vrn_info["variantcaller"], work_dir, somatic_info, OutWriter, cnv_info["cns"]) cnv_csv = _prep_cnv_file(cnv_info["cns"], cnv_info["variantcaller"], work_dir, somatic_info.tumor_data) wide_lrr = cnv_info["variantcaller"] == "cnvkit" and somatic_info.normal_bam is None return _run_bubbletree(vcf_csv, cnv_csv, somatic_info.tumor_data, wide_lrr, do_plots, handle_failures)
def function[run, parameter[vrn_info, calls_by_name, somatic_info, do_plots, handle_failures]]: constant[Run BubbleTree given variant calls, CNVs and somatic ] if compare[constant[seq2c] in name[calls_by_name]] begin[:] variable[cnv_info] assign[=] call[name[calls_by_name]][constant[seq2c]] variable[work_dir] assign[=] call[name[_cur_workdir], parameter[name[somatic_info].tumor_data]] class class[OutWriter, parameter[]] begin[:] def function[__init__, parameter[self, out_handle]]: name[self].writer assign[=] call[name[csv].writer, parameter[name[out_handle]]] def function[write_header, parameter[self]]: call[name[self].writer.writerow, parameter[list[[<ast.Constant object at 0x7da1b18a9840>, <ast.Constant object at 0x7da1b18a97e0>, <ast.Constant object at 0x7da1b18a9720>, <ast.Constant object at 0x7da1b18ab490>]]]] def function[write_row, parameter[self, rec, stats]]: call[name[self].writer.writerow, parameter[list[[<ast.Call object at 0x7da1b18aa7d0>, <ast.Attribute object at 0x7da1b18a8a00>, <ast.Attribute object at 0x7da1b18a8a30>, <ast.Subscript object at 0x7da1b18aaa40>]]]] variable[vcf_csv] assign[=] call[name[prep_vrn_file], parameter[call[name[vrn_info]][constant[vrn_file]], call[name[vrn_info]][constant[variantcaller]], name[work_dir], name[somatic_info], name[OutWriter], call[name[cnv_info]][constant[cns]]]] variable[cnv_csv] assign[=] call[name[_prep_cnv_file], parameter[call[name[cnv_info]][constant[cns]], call[name[cnv_info]][constant[variantcaller]], name[work_dir], name[somatic_info].tumor_data]] variable[wide_lrr] assign[=] <ast.BoolOp object at 0x7da1b18fe7d0> return[call[name[_run_bubbletree], parameter[name[vcf_csv], name[cnv_csv], name[somatic_info].tumor_data, name[wide_lrr], name[do_plots], name[handle_failures]]]]
keyword[def] identifier[run] ( identifier[vrn_info] , identifier[calls_by_name] , identifier[somatic_info] , identifier[do_plots] = keyword[True] , identifier[handle_failures] = keyword[True] ): literal[string] keyword[if] literal[string] keyword[in] identifier[calls_by_name] : identifier[cnv_info] = identifier[calls_by_name] [ literal[string] ] keyword[elif] literal[string] keyword[in] identifier[calls_by_name] : identifier[cnv_info] = identifier[calls_by_name] [ literal[string] ] keyword[else] : keyword[raise] identifier[ValueError] ( literal[string] % literal[string] . identifier[join] ( identifier[calls_by_name] . identifier[keys] ())) identifier[work_dir] = identifier[_cur_workdir] ( identifier[somatic_info] . identifier[tumor_data] ) keyword[class] identifier[OutWriter] : keyword[def] identifier[__init__] ( identifier[self] , identifier[out_handle] ): identifier[self] . identifier[writer] = identifier[csv] . identifier[writer] ( identifier[out_handle] ) keyword[def] identifier[write_header] ( identifier[self] ): identifier[self] . identifier[writer] . identifier[writerow] ([ literal[string] , literal[string] , literal[string] , literal[string] ]) keyword[def] identifier[write_row] ( identifier[self] , identifier[rec] , identifier[stats] ): identifier[self] . identifier[writer] . identifier[writerow] ([ identifier[_to_ucsc_style] ( identifier[rec] . identifier[chrom] ), identifier[rec] . identifier[start] , identifier[rec] . identifier[stop] , identifier[stats] [ literal[string] ][ literal[string] ]]) identifier[vcf_csv] = identifier[prep_vrn_file] ( identifier[vrn_info] [ literal[string] ], identifier[vrn_info] [ literal[string] ], identifier[work_dir] , identifier[somatic_info] , identifier[OutWriter] , identifier[cnv_info] [ literal[string] ]) identifier[cnv_csv] = identifier[_prep_cnv_file] ( identifier[cnv_info] [ literal[string] ], identifier[cnv_info] [ literal[string] ], identifier[work_dir] , identifier[somatic_info] . identifier[tumor_data] ) identifier[wide_lrr] = identifier[cnv_info] [ literal[string] ]== literal[string] keyword[and] identifier[somatic_info] . identifier[normal_bam] keyword[is] keyword[None] keyword[return] identifier[_run_bubbletree] ( identifier[vcf_csv] , identifier[cnv_csv] , identifier[somatic_info] . identifier[tumor_data] , identifier[wide_lrr] , identifier[do_plots] , identifier[handle_failures] )
def run(vrn_info, calls_by_name, somatic_info, do_plots=True, handle_failures=True): """Run BubbleTree given variant calls, CNVs and somatic """ if 'seq2c' in calls_by_name: cnv_info = calls_by_name['seq2c'] # depends on [control=['if'], data=['calls_by_name']] elif 'cnvkit' in calls_by_name: cnv_info = calls_by_name['cnvkit'] # depends on [control=['if'], data=['calls_by_name']] else: raise ValueError('BubbleTree only currently support CNVkit and Seq2c: %s' % ', '.join(calls_by_name.keys())) work_dir = _cur_workdir(somatic_info.tumor_data) class OutWriter: def __init__(self, out_handle): self.writer = csv.writer(out_handle) def write_header(self): self.writer.writerow(['chrom', 'start', 'end', 'freq']) def write_row(self, rec, stats): self.writer.writerow([_to_ucsc_style(rec.chrom), rec.start, rec.stop, stats['tumor']['freq']]) vcf_csv = prep_vrn_file(vrn_info['vrn_file'], vrn_info['variantcaller'], work_dir, somatic_info, OutWriter, cnv_info['cns']) cnv_csv = _prep_cnv_file(cnv_info['cns'], cnv_info['variantcaller'], work_dir, somatic_info.tumor_data) wide_lrr = cnv_info['variantcaller'] == 'cnvkit' and somatic_info.normal_bam is None return _run_bubbletree(vcf_csv, cnv_csv, somatic_info.tumor_data, wide_lrr, do_plots, handle_failures)
def close(self): """ Closes this QEMU VM. """ if not (yield from super().close()): return False self.acpi_shutdown = False yield from self.stop() for adapter in self._ethernet_adapters: if adapter is not None: for nio in adapter.ports.values(): if nio and isinstance(nio, NIOUDP): self.manager.port_manager.release_udp_port(nio.lport, self._project) for udp_tunnel in self._local_udp_tunnels.values(): self.manager.port_manager.release_udp_port(udp_tunnel[0].lport, self._project) self.manager.port_manager.release_udp_port(udp_tunnel[1].lport, self._project) self._local_udp_tunnels = {}
def function[close, parameter[self]]: constant[ Closes this QEMU VM. ] if <ast.UnaryOp object at 0x7da18eb56320> begin[:] return[constant[False]] name[self].acpi_shutdown assign[=] constant[False] <ast.YieldFrom object at 0x7da18eb54490> for taget[name[adapter]] in starred[name[self]._ethernet_adapters] begin[:] if compare[name[adapter] is_not constant[None]] begin[:] for taget[name[nio]] in starred[call[name[adapter].ports.values, parameter[]]] begin[:] if <ast.BoolOp object at 0x7da18eb54f10> begin[:] call[name[self].manager.port_manager.release_udp_port, parameter[name[nio].lport, name[self]._project]] for taget[name[udp_tunnel]] in starred[call[name[self]._local_udp_tunnels.values, parameter[]]] begin[:] call[name[self].manager.port_manager.release_udp_port, parameter[call[name[udp_tunnel]][constant[0]].lport, name[self]._project]] call[name[self].manager.port_manager.release_udp_port, parameter[call[name[udp_tunnel]][constant[1]].lport, name[self]._project]] name[self]._local_udp_tunnels assign[=] dictionary[[], []]
keyword[def] identifier[close] ( identifier[self] ): literal[string] keyword[if] keyword[not] ( keyword[yield] keyword[from] identifier[super] (). identifier[close] ()): keyword[return] keyword[False] identifier[self] . identifier[acpi_shutdown] = keyword[False] keyword[yield] keyword[from] identifier[self] . identifier[stop] () keyword[for] identifier[adapter] keyword[in] identifier[self] . identifier[_ethernet_adapters] : keyword[if] identifier[adapter] keyword[is] keyword[not] keyword[None] : keyword[for] identifier[nio] keyword[in] identifier[adapter] . identifier[ports] . identifier[values] (): keyword[if] identifier[nio] keyword[and] identifier[isinstance] ( identifier[nio] , identifier[NIOUDP] ): identifier[self] . identifier[manager] . identifier[port_manager] . identifier[release_udp_port] ( identifier[nio] . identifier[lport] , identifier[self] . identifier[_project] ) keyword[for] identifier[udp_tunnel] keyword[in] identifier[self] . identifier[_local_udp_tunnels] . identifier[values] (): identifier[self] . identifier[manager] . identifier[port_manager] . identifier[release_udp_port] ( identifier[udp_tunnel] [ literal[int] ]. identifier[lport] , identifier[self] . identifier[_project] ) identifier[self] . identifier[manager] . identifier[port_manager] . identifier[release_udp_port] ( identifier[udp_tunnel] [ literal[int] ]. identifier[lport] , identifier[self] . identifier[_project] ) identifier[self] . identifier[_local_udp_tunnels] ={}
def close(self): """ Closes this QEMU VM. """ if not (yield from super().close()): return False # depends on [control=['if'], data=[]] self.acpi_shutdown = False yield from self.stop() for adapter in self._ethernet_adapters: if adapter is not None: for nio in adapter.ports.values(): if nio and isinstance(nio, NIOUDP): self.manager.port_manager.release_udp_port(nio.lport, self._project) # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['nio']] # depends on [control=['if'], data=['adapter']] # depends on [control=['for'], data=['adapter']] for udp_tunnel in self._local_udp_tunnels.values(): self.manager.port_manager.release_udp_port(udp_tunnel[0].lport, self._project) self.manager.port_manager.release_udp_port(udp_tunnel[1].lport, self._project) # depends on [control=['for'], data=['udp_tunnel']] self._local_udp_tunnels = {}
def requirements(fname): """ Generator to parse requirements.txt file Supports bits of extended pip format (git urls) """ with open(fname) as f: for line in f: match = re.search('#egg=(.*)$', line) if match: yield match.groups()[0] else: yield line.strip()
def function[requirements, parameter[fname]]: constant[ Generator to parse requirements.txt file Supports bits of extended pip format (git urls) ] with call[name[open], parameter[name[fname]]] begin[:] for taget[name[line]] in starred[name[f]] begin[:] variable[match] assign[=] call[name[re].search, parameter[constant[#egg=(.*)$], name[line]]] if name[match] begin[:] <ast.Yield object at 0x7da1b0ebdd20>
keyword[def] identifier[requirements] ( identifier[fname] ): literal[string] keyword[with] identifier[open] ( identifier[fname] ) keyword[as] identifier[f] : keyword[for] identifier[line] keyword[in] identifier[f] : identifier[match] = identifier[re] . identifier[search] ( literal[string] , identifier[line] ) keyword[if] identifier[match] : keyword[yield] identifier[match] . identifier[groups] ()[ literal[int] ] keyword[else] : keyword[yield] identifier[line] . identifier[strip] ()
def requirements(fname): """ Generator to parse requirements.txt file Supports bits of extended pip format (git urls) """ with open(fname) as f: for line in f: match = re.search('#egg=(.*)$', line) if match: yield match.groups()[0] # depends on [control=['if'], data=[]] else: yield line.strip() # depends on [control=['for'], data=['line']] # depends on [control=['with'], data=['f']]
def read(*paths): """Build a file path from *paths* and return the contents.""" with open(os.path.join(*paths), 'r') as file_handler: return file_handler.read()
def function[read, parameter[]]: constant[Build a file path from *paths* and return the contents.] with call[name[open], parameter[call[name[os].path.join, parameter[<ast.Starred object at 0x7da2041d9a20>]], constant[r]]] begin[:] return[call[name[file_handler].read, parameter[]]]
keyword[def] identifier[read] (* identifier[paths] ): literal[string] keyword[with] identifier[open] ( identifier[os] . identifier[path] . identifier[join] (* identifier[paths] ), literal[string] ) keyword[as] identifier[file_handler] : keyword[return] identifier[file_handler] . identifier[read] ()
def read(*paths): """Build a file path from *paths* and return the contents.""" with open(os.path.join(*paths), 'r') as file_handler: return file_handler.read() # depends on [control=['with'], data=['file_handler']]
def setObsoletedByResponse( self, pid, obsoletedByPid, serialVersion, vendorSpecific=None ): """CNCore.setObsoletedBy(session, pid, obsoletedByPid, serialVersion) → boolean https://releases.dataone.org/online/api- documentation-v2.0.1/apis/CN_APIs.html#CNCore.setObsoletedBy. Args: pid: obsoletedByPid: serialVersion: vendorSpecific: Returns: """ mmp_dict = { 'obsoletedByPid': obsoletedByPid, 'serialVersion': str(serialVersion), } return self.PUT(['obsoletedBy', pid], fields=mmp_dict, headers=vendorSpecific)
def function[setObsoletedByResponse, parameter[self, pid, obsoletedByPid, serialVersion, vendorSpecific]]: constant[CNCore.setObsoletedBy(session, pid, obsoletedByPid, serialVersion) → boolean https://releases.dataone.org/online/api- documentation-v2.0.1/apis/CN_APIs.html#CNCore.setObsoletedBy. Args: pid: obsoletedByPid: serialVersion: vendorSpecific: Returns: ] variable[mmp_dict] assign[=] dictionary[[<ast.Constant object at 0x7da1b1a2f310>, <ast.Constant object at 0x7da1b1a2e410>], [<ast.Name object at 0x7da1b1a2ca60>, <ast.Call object at 0x7da1b1a2c6a0>]] return[call[name[self].PUT, parameter[list[[<ast.Constant object at 0x7da1b1a2e6b0>, <ast.Name object at 0x7da1b1a2e1a0>]]]]]
keyword[def] identifier[setObsoletedByResponse] ( identifier[self] , identifier[pid] , identifier[obsoletedByPid] , identifier[serialVersion] , identifier[vendorSpecific] = keyword[None] ): literal[string] identifier[mmp_dict] ={ literal[string] : identifier[obsoletedByPid] , literal[string] : identifier[str] ( identifier[serialVersion] ), } keyword[return] identifier[self] . identifier[PUT] ([ literal[string] , identifier[pid] ], identifier[fields] = identifier[mmp_dict] , identifier[headers] = identifier[vendorSpecific] )
def setObsoletedByResponse(self, pid, obsoletedByPid, serialVersion, vendorSpecific=None): """CNCore.setObsoletedBy(session, pid, obsoletedByPid, serialVersion) → boolean https://releases.dataone.org/online/api- documentation-v2.0.1/apis/CN_APIs.html#CNCore.setObsoletedBy. Args: pid: obsoletedByPid: serialVersion: vendorSpecific: Returns: """ mmp_dict = {'obsoletedByPid': obsoletedByPid, 'serialVersion': str(serialVersion)} return self.PUT(['obsoletedBy', pid], fields=mmp_dict, headers=vendorSpecific)
def _simple_new(cls, values, name=None, dtype=None, **kwargs): """ We require that we have a dtype compat for the values. If we are passed a non-dtype compat, then coerce using the constructor. Must be careful not to recurse. """ if not hasattr(values, 'dtype'): if (values is None or not len(values)) and dtype is not None: values = np.empty(0, dtype=dtype) else: values = np.array(values, copy=False) if is_object_dtype(values): values = cls(values, name=name, dtype=dtype, **kwargs)._ndarray_values if isinstance(values, (ABCSeries, ABCIndexClass)): # Index._data must always be an ndarray. # This is no-copy for when _values is an ndarray, # which should be always at this point. values = np.asarray(values._values) result = object.__new__(cls) result._data = values # _index_data is a (temporary?) fix to ensure that the direct data # manipulation we do in `_libs/reduction.pyx` continues to work. # We need access to the actual ndarray, since we're messing with # data buffers and strides. We don't re-use `_ndarray_values`, since # we actually set this value too. result._index_data = values result.name = name for k, v in kwargs.items(): setattr(result, k, v) return result._reset_identity()
def function[_simple_new, parameter[cls, values, name, dtype]]: constant[ We require that we have a dtype compat for the values. If we are passed a non-dtype compat, then coerce using the constructor. Must be careful not to recurse. ] if <ast.UnaryOp object at 0x7da1b23459c0> begin[:] if <ast.BoolOp object at 0x7da1b2345fc0> begin[:] variable[values] assign[=] call[name[np].empty, parameter[constant[0]]] if call[name[isinstance], parameter[name[values], tuple[[<ast.Name object at 0x7da18fe921a0>, <ast.Name object at 0x7da18fe903d0>]]]] begin[:] variable[values] assign[=] call[name[np].asarray, parameter[name[values]._values]] variable[result] assign[=] call[name[object].__new__, parameter[name[cls]]] name[result]._data assign[=] name[values] name[result]._index_data assign[=] name[values] name[result].name assign[=] name[name] for taget[tuple[[<ast.Name object at 0x7da18fe90220>, <ast.Name object at 0x7da18fe93730>]]] in starred[call[name[kwargs].items, parameter[]]] begin[:] call[name[setattr], parameter[name[result], name[k], name[v]]] return[call[name[result]._reset_identity, parameter[]]]
keyword[def] identifier[_simple_new] ( identifier[cls] , identifier[values] , identifier[name] = keyword[None] , identifier[dtype] = keyword[None] ,** identifier[kwargs] ): literal[string] keyword[if] keyword[not] identifier[hasattr] ( identifier[values] , literal[string] ): keyword[if] ( identifier[values] keyword[is] keyword[None] keyword[or] keyword[not] identifier[len] ( identifier[values] )) keyword[and] identifier[dtype] keyword[is] keyword[not] keyword[None] : identifier[values] = identifier[np] . identifier[empty] ( literal[int] , identifier[dtype] = identifier[dtype] ) keyword[else] : identifier[values] = identifier[np] . identifier[array] ( identifier[values] , identifier[copy] = keyword[False] ) keyword[if] identifier[is_object_dtype] ( identifier[values] ): identifier[values] = identifier[cls] ( identifier[values] , identifier[name] = identifier[name] , identifier[dtype] = identifier[dtype] , ** identifier[kwargs] ). identifier[_ndarray_values] keyword[if] identifier[isinstance] ( identifier[values] ,( identifier[ABCSeries] , identifier[ABCIndexClass] )): identifier[values] = identifier[np] . identifier[asarray] ( identifier[values] . identifier[_values] ) identifier[result] = identifier[object] . identifier[__new__] ( identifier[cls] ) identifier[result] . identifier[_data] = identifier[values] identifier[result] . identifier[_index_data] = identifier[values] identifier[result] . identifier[name] = identifier[name] keyword[for] identifier[k] , identifier[v] keyword[in] identifier[kwargs] . identifier[items] (): identifier[setattr] ( identifier[result] , identifier[k] , identifier[v] ) keyword[return] identifier[result] . identifier[_reset_identity] ()
def _simple_new(cls, values, name=None, dtype=None, **kwargs): """ We require that we have a dtype compat for the values. If we are passed a non-dtype compat, then coerce using the constructor. Must be careful not to recurse. """ if not hasattr(values, 'dtype'): if (values is None or not len(values)) and dtype is not None: values = np.empty(0, dtype=dtype) # depends on [control=['if'], data=[]] else: values = np.array(values, copy=False) if is_object_dtype(values): values = cls(values, name=name, dtype=dtype, **kwargs)._ndarray_values # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]] if isinstance(values, (ABCSeries, ABCIndexClass)): # Index._data must always be an ndarray. # This is no-copy for when _values is an ndarray, # which should be always at this point. values = np.asarray(values._values) # depends on [control=['if'], data=[]] result = object.__new__(cls) result._data = values # _index_data is a (temporary?) fix to ensure that the direct data # manipulation we do in `_libs/reduction.pyx` continues to work. # We need access to the actual ndarray, since we're messing with # data buffers and strides. We don't re-use `_ndarray_values`, since # we actually set this value too. result._index_data = values result.name = name for (k, v) in kwargs.items(): setattr(result, k, v) # depends on [control=['for'], data=[]] return result._reset_identity()
def from_file(cls, filename, **kwargs): """Create a reddening law from file. If filename has 'fits' or 'fit' suffix, it is read as FITS. Otherwise, it is read as ASCII. Parameters ---------- filename : str Reddening law filename. kwargs : dict Keywords acceptable by :func:`~synphot.specio.read_fits_spec` (if FITS) or :func:`~synphot.specio.read_ascii_spec` (if ASCII). Returns ------- redlaw : `ReddeningLaw` Empirical reddening law. """ if 'flux_unit' not in kwargs: kwargs['flux_unit'] = cls._internal_flux_unit if ((filename.endswith('fits') or filename.endswith('fit')) and 'flux_col' not in kwargs): kwargs['flux_col'] = 'Av/E(B-V)' header, wavelengths, rvs = specio.read_spec(filename, **kwargs) return cls(Empirical1D, points=wavelengths, lookup_table=rvs, meta={'header': header})
def function[from_file, parameter[cls, filename]]: constant[Create a reddening law from file. If filename has 'fits' or 'fit' suffix, it is read as FITS. Otherwise, it is read as ASCII. Parameters ---------- filename : str Reddening law filename. kwargs : dict Keywords acceptable by :func:`~synphot.specio.read_fits_spec` (if FITS) or :func:`~synphot.specio.read_ascii_spec` (if ASCII). Returns ------- redlaw : `ReddeningLaw` Empirical reddening law. ] if compare[constant[flux_unit] <ast.NotIn object at 0x7da2590d7190> name[kwargs]] begin[:] call[name[kwargs]][constant[flux_unit]] assign[=] name[cls]._internal_flux_unit if <ast.BoolOp object at 0x7da18f58e140> begin[:] call[name[kwargs]][constant[flux_col]] assign[=] constant[Av/E(B-V)] <ast.Tuple object at 0x7da18f58f8e0> assign[=] call[name[specio].read_spec, parameter[name[filename]]] return[call[name[cls], parameter[name[Empirical1D]]]]
keyword[def] identifier[from_file] ( identifier[cls] , identifier[filename] ,** identifier[kwargs] ): literal[string] keyword[if] literal[string] keyword[not] keyword[in] identifier[kwargs] : identifier[kwargs] [ literal[string] ]= identifier[cls] . identifier[_internal_flux_unit] keyword[if] (( identifier[filename] . identifier[endswith] ( literal[string] ) keyword[or] identifier[filename] . identifier[endswith] ( literal[string] )) keyword[and] literal[string] keyword[not] keyword[in] identifier[kwargs] ): identifier[kwargs] [ literal[string] ]= literal[string] identifier[header] , identifier[wavelengths] , identifier[rvs] = identifier[specio] . identifier[read_spec] ( identifier[filename] ,** identifier[kwargs] ) keyword[return] identifier[cls] ( identifier[Empirical1D] , identifier[points] = identifier[wavelengths] , identifier[lookup_table] = identifier[rvs] , identifier[meta] ={ literal[string] : identifier[header] })
def from_file(cls, filename, **kwargs): """Create a reddening law from file. If filename has 'fits' or 'fit' suffix, it is read as FITS. Otherwise, it is read as ASCII. Parameters ---------- filename : str Reddening law filename. kwargs : dict Keywords acceptable by :func:`~synphot.specio.read_fits_spec` (if FITS) or :func:`~synphot.specio.read_ascii_spec` (if ASCII). Returns ------- redlaw : `ReddeningLaw` Empirical reddening law. """ if 'flux_unit' not in kwargs: kwargs['flux_unit'] = cls._internal_flux_unit # depends on [control=['if'], data=['kwargs']] if (filename.endswith('fits') or filename.endswith('fit')) and 'flux_col' not in kwargs: kwargs['flux_col'] = 'Av/E(B-V)' # depends on [control=['if'], data=[]] (header, wavelengths, rvs) = specio.read_spec(filename, **kwargs) return cls(Empirical1D, points=wavelengths, lookup_table=rvs, meta={'header': header})
def sleep_here(count, t): """simple function that takes args, prints a short message, sleeps for a time, and returns the same args""" import time,sys print("hi from engine %i" % id) sys.stdout.flush() time.sleep(t) return count,t
def function[sleep_here, parameter[count, t]]: constant[simple function that takes args, prints a short message, sleeps for a time, and returns the same args] import module[time], module[sys] call[name[print], parameter[binary_operation[constant[hi from engine %i] <ast.Mod object at 0x7da2590d6920> name[id]]]] call[name[sys].stdout.flush, parameter[]] call[name[time].sleep, parameter[name[t]]] return[tuple[[<ast.Name object at 0x7da1b021ffa0>, <ast.Name object at 0x7da1b021ddb0>]]]
keyword[def] identifier[sleep_here] ( identifier[count] , identifier[t] ): literal[string] keyword[import] identifier[time] , identifier[sys] identifier[print] ( literal[string] % identifier[id] ) identifier[sys] . identifier[stdout] . identifier[flush] () identifier[time] . identifier[sleep] ( identifier[t] ) keyword[return] identifier[count] , identifier[t]
def sleep_here(count, t): """simple function that takes args, prints a short message, sleeps for a time, and returns the same args""" import time, sys print('hi from engine %i' % id) sys.stdout.flush() time.sleep(t) return (count, t)
def load_logs(optimizer, logs): """Load previous ... """ import json if isinstance(logs, str): logs = [logs] for log in logs: with open(log, "r") as j: while True: try: iteration = next(j) except StopIteration: break iteration = json.loads(iteration) try: optimizer.register( params=iteration["params"], target=iteration["target"], ) except KeyError: pass return optimizer
def function[load_logs, parameter[optimizer, logs]]: constant[Load previous ... ] import module[json] if call[name[isinstance], parameter[name[logs], name[str]]] begin[:] variable[logs] assign[=] list[[<ast.Name object at 0x7da1b21e38e0>]] for taget[name[log]] in starred[name[logs]] begin[:] with call[name[open], parameter[name[log], constant[r]]] begin[:] while constant[True] begin[:] <ast.Try object at 0x7da1b212e410> variable[iteration] assign[=] call[name[json].loads, parameter[name[iteration]]] <ast.Try object at 0x7da1b212c3d0> return[name[optimizer]]
keyword[def] identifier[load_logs] ( identifier[optimizer] , identifier[logs] ): literal[string] keyword[import] identifier[json] keyword[if] identifier[isinstance] ( identifier[logs] , identifier[str] ): identifier[logs] =[ identifier[logs] ] keyword[for] identifier[log] keyword[in] identifier[logs] : keyword[with] identifier[open] ( identifier[log] , literal[string] ) keyword[as] identifier[j] : keyword[while] keyword[True] : keyword[try] : identifier[iteration] = identifier[next] ( identifier[j] ) keyword[except] identifier[StopIteration] : keyword[break] identifier[iteration] = identifier[json] . identifier[loads] ( identifier[iteration] ) keyword[try] : identifier[optimizer] . identifier[register] ( identifier[params] = identifier[iteration] [ literal[string] ], identifier[target] = identifier[iteration] [ literal[string] ], ) keyword[except] identifier[KeyError] : keyword[pass] keyword[return] identifier[optimizer]
def load_logs(optimizer, logs): """Load previous ... """ import json if isinstance(logs, str): logs = [logs] # depends on [control=['if'], data=[]] for log in logs: with open(log, 'r') as j: while True: try: iteration = next(j) # depends on [control=['try'], data=[]] except StopIteration: break # depends on [control=['except'], data=[]] iteration = json.loads(iteration) try: optimizer.register(params=iteration['params'], target=iteration['target']) # depends on [control=['try'], data=[]] except KeyError: pass # depends on [control=['except'], data=[]] # depends on [control=['while'], data=[]] # depends on [control=['with'], data=['j']] # depends on [control=['for'], data=['log']] return optimizer
def contains_parent_dir(fpath, dirs): """ Returns true if paths in dirs start with fpath. Precondition: dirs and fpath should be normalized before calling this function. """ # Note: this function is used nowhere in pygccxml but is used # at least by pypluplus; so it should stay here. return bool([x for x in dirs if _f(fpath, x)])
def function[contains_parent_dir, parameter[fpath, dirs]]: constant[ Returns true if paths in dirs start with fpath. Precondition: dirs and fpath should be normalized before calling this function. ] return[call[name[bool], parameter[<ast.ListComp object at 0x7da18dc98f10>]]]
keyword[def] identifier[contains_parent_dir] ( identifier[fpath] , identifier[dirs] ): literal[string] keyword[return] identifier[bool] ([ identifier[x] keyword[for] identifier[x] keyword[in] identifier[dirs] keyword[if] identifier[_f] ( identifier[fpath] , identifier[x] )])
def contains_parent_dir(fpath, dirs): """ Returns true if paths in dirs start with fpath. Precondition: dirs and fpath should be normalized before calling this function. """ # Note: this function is used nowhere in pygccxml but is used # at least by pypluplus; so it should stay here. return bool([x for x in dirs if _f(fpath, x)])
def Add(self, category, label, age): """Adds another instance of this category into the active_days counter. We automatically count the event towards all relevant active_days. For example, if the category "Windows" was seen 8 days ago it will be counted towards the 30 day active, 14 day active but not against the 7 and 1 day actives. Args: category: The category name to account this instance against. label: Client label to which this should be applied. age: When this instance occurred. """ now = rdfvalue.RDFDatetime.Now() category = utils.SmartUnicode(category) for active_time in self.active_days: self.categories[active_time].setdefault(label, {}) if (now - age).seconds < active_time * 24 * 60 * 60: self.categories[active_time][label][ category] = self.categories[active_time][label].get(category, 0) + 1
def function[Add, parameter[self, category, label, age]]: constant[Adds another instance of this category into the active_days counter. We automatically count the event towards all relevant active_days. For example, if the category "Windows" was seen 8 days ago it will be counted towards the 30 day active, 14 day active but not against the 7 and 1 day actives. Args: category: The category name to account this instance against. label: Client label to which this should be applied. age: When this instance occurred. ] variable[now] assign[=] call[name[rdfvalue].RDFDatetime.Now, parameter[]] variable[category] assign[=] call[name[utils].SmartUnicode, parameter[name[category]]] for taget[name[active_time]] in starred[name[self].active_days] begin[:] call[call[name[self].categories][name[active_time]].setdefault, parameter[name[label], dictionary[[], []]]] if compare[binary_operation[name[now] - name[age]].seconds less[<] binary_operation[binary_operation[binary_operation[name[active_time] * constant[24]] * constant[60]] * constant[60]]] begin[:] call[call[call[name[self].categories][name[active_time]]][name[label]]][name[category]] assign[=] binary_operation[call[call[call[name[self].categories][name[active_time]]][name[label]].get, parameter[name[category], constant[0]]] + constant[1]]
keyword[def] identifier[Add] ( identifier[self] , identifier[category] , identifier[label] , identifier[age] ): literal[string] identifier[now] = identifier[rdfvalue] . identifier[RDFDatetime] . identifier[Now] () identifier[category] = identifier[utils] . identifier[SmartUnicode] ( identifier[category] ) keyword[for] identifier[active_time] keyword[in] identifier[self] . identifier[active_days] : identifier[self] . identifier[categories] [ identifier[active_time] ]. identifier[setdefault] ( identifier[label] ,{}) keyword[if] ( identifier[now] - identifier[age] ). identifier[seconds] < identifier[active_time] * literal[int] * literal[int] * literal[int] : identifier[self] . identifier[categories] [ identifier[active_time] ][ identifier[label] ][ identifier[category] ]= identifier[self] . identifier[categories] [ identifier[active_time] ][ identifier[label] ]. identifier[get] ( identifier[category] , literal[int] )+ literal[int]
def Add(self, category, label, age): """Adds another instance of this category into the active_days counter. We automatically count the event towards all relevant active_days. For example, if the category "Windows" was seen 8 days ago it will be counted towards the 30 day active, 14 day active but not against the 7 and 1 day actives. Args: category: The category name to account this instance against. label: Client label to which this should be applied. age: When this instance occurred. """ now = rdfvalue.RDFDatetime.Now() category = utils.SmartUnicode(category) for active_time in self.active_days: self.categories[active_time].setdefault(label, {}) if (now - age).seconds < active_time * 24 * 60 * 60: self.categories[active_time][label][category] = self.categories[active_time][label].get(category, 0) + 1 # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['active_time']]
def remove_nonspeech_fragments(self, zero_length_only=False): """ Remove ``NONSPEECH`` fragments from the list. If ``zero_length_only`` is ``True``, remove only those fragments with zero length, and make all the others ``REGULAR``. :param bool zero_length_only: remove only zero length NONSPEECH fragments """ self.log(u"Removing nonspeech fragments...") nonspeech = list(self.nonspeech_fragments) if zero_length_only: nonspeech = [(i, f) for i, f in nonspeech if f.has_zero_length] nonspeech_indices = [i for i, f in nonspeech] self.remove(nonspeech_indices) if zero_length_only: for i, f in list(self.nonspeech_fragments): f.fragment_type = SyncMapFragment.REGULAR self.log(u"Removing nonspeech fragments... done")
def function[remove_nonspeech_fragments, parameter[self, zero_length_only]]: constant[ Remove ``NONSPEECH`` fragments from the list. If ``zero_length_only`` is ``True``, remove only those fragments with zero length, and make all the others ``REGULAR``. :param bool zero_length_only: remove only zero length NONSPEECH fragments ] call[name[self].log, parameter[constant[Removing nonspeech fragments...]]] variable[nonspeech] assign[=] call[name[list], parameter[name[self].nonspeech_fragments]] if name[zero_length_only] begin[:] variable[nonspeech] assign[=] <ast.ListComp object at 0x7da1b18fa470> variable[nonspeech_indices] assign[=] <ast.ListComp object at 0x7da204963d00> call[name[self].remove, parameter[name[nonspeech_indices]]] if name[zero_length_only] begin[:] for taget[tuple[[<ast.Name object at 0x7da2049612a0>, <ast.Name object at 0x7da2049606d0>]]] in starred[call[name[list], parameter[name[self].nonspeech_fragments]]] begin[:] name[f].fragment_type assign[=] name[SyncMapFragment].REGULAR call[name[self].log, parameter[constant[Removing nonspeech fragments... done]]]
keyword[def] identifier[remove_nonspeech_fragments] ( identifier[self] , identifier[zero_length_only] = keyword[False] ): literal[string] identifier[self] . identifier[log] ( literal[string] ) identifier[nonspeech] = identifier[list] ( identifier[self] . identifier[nonspeech_fragments] ) keyword[if] identifier[zero_length_only] : identifier[nonspeech] =[( identifier[i] , identifier[f] ) keyword[for] identifier[i] , identifier[f] keyword[in] identifier[nonspeech] keyword[if] identifier[f] . identifier[has_zero_length] ] identifier[nonspeech_indices] =[ identifier[i] keyword[for] identifier[i] , identifier[f] keyword[in] identifier[nonspeech] ] identifier[self] . identifier[remove] ( identifier[nonspeech_indices] ) keyword[if] identifier[zero_length_only] : keyword[for] identifier[i] , identifier[f] keyword[in] identifier[list] ( identifier[self] . identifier[nonspeech_fragments] ): identifier[f] . identifier[fragment_type] = identifier[SyncMapFragment] . identifier[REGULAR] identifier[self] . identifier[log] ( literal[string] )
def remove_nonspeech_fragments(self, zero_length_only=False): """ Remove ``NONSPEECH`` fragments from the list. If ``zero_length_only`` is ``True``, remove only those fragments with zero length, and make all the others ``REGULAR``. :param bool zero_length_only: remove only zero length NONSPEECH fragments """ self.log(u'Removing nonspeech fragments...') nonspeech = list(self.nonspeech_fragments) if zero_length_only: nonspeech = [(i, f) for (i, f) in nonspeech if f.has_zero_length] # depends on [control=['if'], data=[]] nonspeech_indices = [i for (i, f) in nonspeech] self.remove(nonspeech_indices) if zero_length_only: for (i, f) in list(self.nonspeech_fragments): f.fragment_type = SyncMapFragment.REGULAR # depends on [control=['for'], data=[]] # depends on [control=['if'], data=[]] self.log(u'Removing nonspeech fragments... done')
def _ReadData(self, file_object, file_offset, data_size): """Reads data. Args: file_object (dvfvs.FileIO): a file-like object to read. file_offset (int): offset of the data relative to the start of the file-like object. data_size (int): size of the data. The resulting data size much match the requested data size so that dtFabric can map the data type definitions onto the byte stream. Returns: bytes: byte stream containing the data. Raises: ParseError: if the data cannot be read. ValueError: if the file-like object is missing. """ if not file_object: raise ValueError('Missing file-like object.') file_object.seek(file_offset, os.SEEK_SET) read_error = '' try: data = file_object.read(data_size) if len(data) != data_size: read_error = 'missing data' except IOError as exception: read_error = '{0!s}'.format(exception) if read_error: raise errors.ParseError( 'Unable to read data at offset: 0x{0:08x} with error: {1:s}'.format( file_offset, read_error)) return data
def function[_ReadData, parameter[self, file_object, file_offset, data_size]]: constant[Reads data. Args: file_object (dvfvs.FileIO): a file-like object to read. file_offset (int): offset of the data relative to the start of the file-like object. data_size (int): size of the data. The resulting data size much match the requested data size so that dtFabric can map the data type definitions onto the byte stream. Returns: bytes: byte stream containing the data. Raises: ParseError: if the data cannot be read. ValueError: if the file-like object is missing. ] if <ast.UnaryOp object at 0x7da20c795180> begin[:] <ast.Raise object at 0x7da20c796ad0> call[name[file_object].seek, parameter[name[file_offset], name[os].SEEK_SET]] variable[read_error] assign[=] constant[] <ast.Try object at 0x7da20c794580> if name[read_error] begin[:] <ast.Raise object at 0x7da20c794100> return[name[data]]
keyword[def] identifier[_ReadData] ( identifier[self] , identifier[file_object] , identifier[file_offset] , identifier[data_size] ): literal[string] keyword[if] keyword[not] identifier[file_object] : keyword[raise] identifier[ValueError] ( literal[string] ) identifier[file_object] . identifier[seek] ( identifier[file_offset] , identifier[os] . identifier[SEEK_SET] ) identifier[read_error] = literal[string] keyword[try] : identifier[data] = identifier[file_object] . identifier[read] ( identifier[data_size] ) keyword[if] identifier[len] ( identifier[data] )!= identifier[data_size] : identifier[read_error] = literal[string] keyword[except] identifier[IOError] keyword[as] identifier[exception] : identifier[read_error] = literal[string] . identifier[format] ( identifier[exception] ) keyword[if] identifier[read_error] : keyword[raise] identifier[errors] . identifier[ParseError] ( literal[string] . identifier[format] ( identifier[file_offset] , identifier[read_error] )) keyword[return] identifier[data]
def _ReadData(self, file_object, file_offset, data_size): """Reads data. Args: file_object (dvfvs.FileIO): a file-like object to read. file_offset (int): offset of the data relative to the start of the file-like object. data_size (int): size of the data. The resulting data size much match the requested data size so that dtFabric can map the data type definitions onto the byte stream. Returns: bytes: byte stream containing the data. Raises: ParseError: if the data cannot be read. ValueError: if the file-like object is missing. """ if not file_object: raise ValueError('Missing file-like object.') # depends on [control=['if'], data=[]] file_object.seek(file_offset, os.SEEK_SET) read_error = '' try: data = file_object.read(data_size) if len(data) != data_size: read_error = 'missing data' # depends on [control=['if'], data=[]] # depends on [control=['try'], data=[]] except IOError as exception: read_error = '{0!s}'.format(exception) # depends on [control=['except'], data=['exception']] if read_error: raise errors.ParseError('Unable to read data at offset: 0x{0:08x} with error: {1:s}'.format(file_offset, read_error)) # depends on [control=['if'], data=[]] return data
def compute_hkdf(ikm, salt): """ Standard hkdf algorithm :param {Buffer} ikm Input key material. :param {Buffer} salt Salt value. :return {Buffer} Strong key material. @private """ prk = hmac.new(salt, ikm, hashlib.sha256).digest() info_bits_update = info_bits + bytearray(chr(1), 'utf-8') hmac_hash = hmac.new(prk, info_bits_update, hashlib.sha256).digest() return hmac_hash[:16]
def function[compute_hkdf, parameter[ikm, salt]]: constant[ Standard hkdf algorithm :param {Buffer} ikm Input key material. :param {Buffer} salt Salt value. :return {Buffer} Strong key material. @private ] variable[prk] assign[=] call[call[name[hmac].new, parameter[name[salt], name[ikm], name[hashlib].sha256]].digest, parameter[]] variable[info_bits_update] assign[=] binary_operation[name[info_bits] + call[name[bytearray], parameter[call[name[chr], parameter[constant[1]]], constant[utf-8]]]] variable[hmac_hash] assign[=] call[call[name[hmac].new, parameter[name[prk], name[info_bits_update], name[hashlib].sha256]].digest, parameter[]] return[call[name[hmac_hash]][<ast.Slice object at 0x7da1b22e9180>]]
keyword[def] identifier[compute_hkdf] ( identifier[ikm] , identifier[salt] ): literal[string] identifier[prk] = identifier[hmac] . identifier[new] ( identifier[salt] , identifier[ikm] , identifier[hashlib] . identifier[sha256] ). identifier[digest] () identifier[info_bits_update] = identifier[info_bits] + identifier[bytearray] ( identifier[chr] ( literal[int] ), literal[string] ) identifier[hmac_hash] = identifier[hmac] . identifier[new] ( identifier[prk] , identifier[info_bits_update] , identifier[hashlib] . identifier[sha256] ). identifier[digest] () keyword[return] identifier[hmac_hash] [: literal[int] ]
def compute_hkdf(ikm, salt): """ Standard hkdf algorithm :param {Buffer} ikm Input key material. :param {Buffer} salt Salt value. :return {Buffer} Strong key material. @private """ prk = hmac.new(salt, ikm, hashlib.sha256).digest() info_bits_update = info_bits + bytearray(chr(1), 'utf-8') hmac_hash = hmac.new(prk, info_bits_update, hashlib.sha256).digest() return hmac_hash[:16]
def job_listener(event): '''Listens to completed job''' job_id = event.job.args[0] if event.code == events.EVENT_JOB_MISSED: db.mark_job_as_missed(job_id) elif event.exception: if isinstance(event.exception, util.JobError): error_object = event.exception.as_dict() else: error_object = "\n".join(traceback.format_tb(event.traceback) + [repr(event.exception)]) db.mark_job_as_errored(job_id, error_object) else: db.mark_job_as_completed(job_id, event.retval) api_key = db.get_job(job_id)["api_key"] result_ok = send_result(job_id, api_key) if not result_ok: db.mark_job_as_failed_to_post_result(job_id) # Optionally notify tests that job_listener() has finished. if "_TEST_CALLBACK_URL" in app.config: requests.get(app.config["_TEST_CALLBACK_URL"])
def function[job_listener, parameter[event]]: constant[Listens to completed job] variable[job_id] assign[=] call[name[event].job.args][constant[0]] if compare[name[event].code equal[==] name[events].EVENT_JOB_MISSED] begin[:] call[name[db].mark_job_as_missed, parameter[name[job_id]]] variable[api_key] assign[=] call[call[name[db].get_job, parameter[name[job_id]]]][constant[api_key]] variable[result_ok] assign[=] call[name[send_result], parameter[name[job_id], name[api_key]]] if <ast.UnaryOp object at 0x7da204567580> begin[:] call[name[db].mark_job_as_failed_to_post_result, parameter[name[job_id]]] if compare[constant[_TEST_CALLBACK_URL] in name[app].config] begin[:] call[name[requests].get, parameter[call[name[app].config][constant[_TEST_CALLBACK_URL]]]]
keyword[def] identifier[job_listener] ( identifier[event] ): literal[string] identifier[job_id] = identifier[event] . identifier[job] . identifier[args] [ literal[int] ] keyword[if] identifier[event] . identifier[code] == identifier[events] . identifier[EVENT_JOB_MISSED] : identifier[db] . identifier[mark_job_as_missed] ( identifier[job_id] ) keyword[elif] identifier[event] . identifier[exception] : keyword[if] identifier[isinstance] ( identifier[event] . identifier[exception] , identifier[util] . identifier[JobError] ): identifier[error_object] = identifier[event] . identifier[exception] . identifier[as_dict] () keyword[else] : identifier[error_object] = literal[string] . identifier[join] ( identifier[traceback] . identifier[format_tb] ( identifier[event] . identifier[traceback] )+ [ identifier[repr] ( identifier[event] . identifier[exception] )]) identifier[db] . identifier[mark_job_as_errored] ( identifier[job_id] , identifier[error_object] ) keyword[else] : identifier[db] . identifier[mark_job_as_completed] ( identifier[job_id] , identifier[event] . identifier[retval] ) identifier[api_key] = identifier[db] . identifier[get_job] ( identifier[job_id] )[ literal[string] ] identifier[result_ok] = identifier[send_result] ( identifier[job_id] , identifier[api_key] ) keyword[if] keyword[not] identifier[result_ok] : identifier[db] . identifier[mark_job_as_failed_to_post_result] ( identifier[job_id] ) keyword[if] literal[string] keyword[in] identifier[app] . identifier[config] : identifier[requests] . identifier[get] ( identifier[app] . identifier[config] [ literal[string] ])
def job_listener(event): """Listens to completed job""" job_id = event.job.args[0] if event.code == events.EVENT_JOB_MISSED: db.mark_job_as_missed(job_id) # depends on [control=['if'], data=[]] elif event.exception: if isinstance(event.exception, util.JobError): error_object = event.exception.as_dict() # depends on [control=['if'], data=[]] else: error_object = '\n'.join(traceback.format_tb(event.traceback) + [repr(event.exception)]) db.mark_job_as_errored(job_id, error_object) # depends on [control=['if'], data=[]] else: db.mark_job_as_completed(job_id, event.retval) api_key = db.get_job(job_id)['api_key'] result_ok = send_result(job_id, api_key) if not result_ok: db.mark_job_as_failed_to_post_result(job_id) # depends on [control=['if'], data=[]] # Optionally notify tests that job_listener() has finished. if '_TEST_CALLBACK_URL' in app.config: requests.get(app.config['_TEST_CALLBACK_URL']) # depends on [control=['if'], data=[]]
def key_click(self, key): """ 为自定义菜单 ``(click)`` 事件添加 handler 的简便方法。 **@key_click('KEYNAME')** 用来为特定 key 的点击事件添加 handler 方法。 """ def wraps(f): argc = len(signature(f).parameters.keys()) @self.click def onclick(message, session=None): if message.key == key: return f(*[message, session][:argc]) return f return wraps
def function[key_click, parameter[self, key]]: constant[ 为自定义菜单 ``(click)`` 事件添加 handler 的简便方法。 **@key_click('KEYNAME')** 用来为特定 key 的点击事件添加 handler 方法。 ] def function[wraps, parameter[f]]: variable[argc] assign[=] call[name[len], parameter[call[call[name[signature], parameter[name[f]]].parameters.keys, parameter[]]]] def function[onclick, parameter[message, session]]: if compare[name[message].key equal[==] name[key]] begin[:] return[call[name[f], parameter[<ast.Starred object at 0x7da1b216c550>]]] return[name[f]] return[name[wraps]]
keyword[def] identifier[key_click] ( identifier[self] , identifier[key] ): literal[string] keyword[def] identifier[wraps] ( identifier[f] ): identifier[argc] = identifier[len] ( identifier[signature] ( identifier[f] ). identifier[parameters] . identifier[keys] ()) @ identifier[self] . identifier[click] keyword[def] identifier[onclick] ( identifier[message] , identifier[session] = keyword[None] ): keyword[if] identifier[message] . identifier[key] == identifier[key] : keyword[return] identifier[f] (*[ identifier[message] , identifier[session] ][: identifier[argc] ]) keyword[return] identifier[f] keyword[return] identifier[wraps]
def key_click(self, key): """ 为自定义菜单 ``(click)`` 事件添加 handler 的简便方法。 **@key_click('KEYNAME')** 用来为特定 key 的点击事件添加 handler 方法。 """ def wraps(f): argc = len(signature(f).parameters.keys()) @self.click def onclick(message, session=None): if message.key == key: return f(*[message, session][:argc]) # depends on [control=['if'], data=[]] return f return wraps
def saveFormatFile(self, filename, format): """Dump an XML document to a file. Will use compression if compiled in and enabled. If @filename is "-" the stdout file is used. If @format is set then the document will be indented on output. Note that @format = 1 provide node indenting only if xmlIndentTreeOutput = 1 or xmlKeepBlanksDefault(0) was called """ ret = libxml2mod.xmlSaveFormatFile(filename, self._o, format) return ret
def function[saveFormatFile, parameter[self, filename, format]]: constant[Dump an XML document to a file. Will use compression if compiled in and enabled. If @filename is "-" the stdout file is used. If @format is set then the document will be indented on output. Note that @format = 1 provide node indenting only if xmlIndentTreeOutput = 1 or xmlKeepBlanksDefault(0) was called ] variable[ret] assign[=] call[name[libxml2mod].xmlSaveFormatFile, parameter[name[filename], name[self]._o, name[format]]] return[name[ret]]
keyword[def] identifier[saveFormatFile] ( identifier[self] , identifier[filename] , identifier[format] ): literal[string] identifier[ret] = identifier[libxml2mod] . identifier[xmlSaveFormatFile] ( identifier[filename] , identifier[self] . identifier[_o] , identifier[format] ) keyword[return] identifier[ret]
def saveFormatFile(self, filename, format): """Dump an XML document to a file. Will use compression if compiled in and enabled. If @filename is "-" the stdout file is used. If @format is set then the document will be indented on output. Note that @format = 1 provide node indenting only if xmlIndentTreeOutput = 1 or xmlKeepBlanksDefault(0) was called """ ret = libxml2mod.xmlSaveFormatFile(filename, self._o, format) return ret
def list_user_topics(self, start=0): """ 发表的话题 :param start: 翻页 :return: 带下一页的列表 """ xml = self.api.xml(API_GROUP_LIST_USER_PUBLISHED_TOPICS % self.api.user_alias, params={'start': start}) return build_list_result(self._parse_topic_table(xml, 'title,comment,created,group'), xml)
def function[list_user_topics, parameter[self, start]]: constant[ 发表的话题 :param start: 翻页 :return: 带下一页的列表 ] variable[xml] assign[=] call[name[self].api.xml, parameter[binary_operation[name[API_GROUP_LIST_USER_PUBLISHED_TOPICS] <ast.Mod object at 0x7da2590d6920> name[self].api.user_alias]]] return[call[name[build_list_result], parameter[call[name[self]._parse_topic_table, parameter[name[xml], constant[title,comment,created,group]]], name[xml]]]]
keyword[def] identifier[list_user_topics] ( identifier[self] , identifier[start] = literal[int] ): literal[string] identifier[xml] = identifier[self] . identifier[api] . identifier[xml] ( identifier[API_GROUP_LIST_USER_PUBLISHED_TOPICS] % identifier[self] . identifier[api] . identifier[user_alias] , identifier[params] ={ literal[string] : identifier[start] }) keyword[return] identifier[build_list_result] ( identifier[self] . identifier[_parse_topic_table] ( identifier[xml] , literal[string] ), identifier[xml] )
def list_user_topics(self, start=0): """ 发表的话题 :param start: 翻页 :return: 带下一页的列表 """ xml = self.api.xml(API_GROUP_LIST_USER_PUBLISHED_TOPICS % self.api.user_alias, params={'start': start}) return build_list_result(self._parse_topic_table(xml, 'title,comment,created,group'), xml)
def alter(self, operation, timeout=None, metadata=None, credentials=None): """Runs alter operation.""" return self.stub.Alter(operation, timeout=timeout, metadata=metadata, credentials=credentials)
def function[alter, parameter[self, operation, timeout, metadata, credentials]]: constant[Runs alter operation.] return[call[name[self].stub.Alter, parameter[name[operation]]]]
keyword[def] identifier[alter] ( identifier[self] , identifier[operation] , identifier[timeout] = keyword[None] , identifier[metadata] = keyword[None] , identifier[credentials] = keyword[None] ): literal[string] keyword[return] identifier[self] . identifier[stub] . identifier[Alter] ( identifier[operation] , identifier[timeout] = identifier[timeout] , identifier[metadata] = identifier[metadata] , identifier[credentials] = identifier[credentials] )
def alter(self, operation, timeout=None, metadata=None, credentials=None): """Runs alter operation.""" return self.stub.Alter(operation, timeout=timeout, metadata=metadata, credentials=credentials)
def unique_list(input_list): r""" For a given list (of points) remove any duplicates """ output_list = [] if len(input_list) > 0: dim = _sp.shape(input_list)[1] for i in input_list: match = False for j in output_list: if dim == 3: if i[0] == j[0] and i[1] == j[1] and i[2] == j[2]: match = True elif dim == 2: if i[0] == j[0] and i[1] == j[1]: match = True elif dim == 1: if i[0] == j[0]: match = True if match is False: output_list.append(i) return output_list
def function[unique_list, parameter[input_list]]: constant[ For a given list (of points) remove any duplicates ] variable[output_list] assign[=] list[[]] if compare[call[name[len], parameter[name[input_list]]] greater[>] constant[0]] begin[:] variable[dim] assign[=] call[call[name[_sp].shape, parameter[name[input_list]]]][constant[1]] for taget[name[i]] in starred[name[input_list]] begin[:] variable[match] assign[=] constant[False] for taget[name[j]] in starred[name[output_list]] begin[:] if compare[name[dim] equal[==] constant[3]] begin[:] if <ast.BoolOp object at 0x7da18f58d540> begin[:] variable[match] assign[=] constant[True] if compare[name[match] is constant[False]] begin[:] call[name[output_list].append, parameter[name[i]]] return[name[output_list]]
keyword[def] identifier[unique_list] ( identifier[input_list] ): literal[string] identifier[output_list] =[] keyword[if] identifier[len] ( identifier[input_list] )> literal[int] : identifier[dim] = identifier[_sp] . identifier[shape] ( identifier[input_list] )[ literal[int] ] keyword[for] identifier[i] keyword[in] identifier[input_list] : identifier[match] = keyword[False] keyword[for] identifier[j] keyword[in] identifier[output_list] : keyword[if] identifier[dim] == literal[int] : keyword[if] identifier[i] [ literal[int] ]== identifier[j] [ literal[int] ] keyword[and] identifier[i] [ literal[int] ]== identifier[j] [ literal[int] ] keyword[and] identifier[i] [ literal[int] ]== identifier[j] [ literal[int] ]: identifier[match] = keyword[True] keyword[elif] identifier[dim] == literal[int] : keyword[if] identifier[i] [ literal[int] ]== identifier[j] [ literal[int] ] keyword[and] identifier[i] [ literal[int] ]== identifier[j] [ literal[int] ]: identifier[match] = keyword[True] keyword[elif] identifier[dim] == literal[int] : keyword[if] identifier[i] [ literal[int] ]== identifier[j] [ literal[int] ]: identifier[match] = keyword[True] keyword[if] identifier[match] keyword[is] keyword[False] : identifier[output_list] . identifier[append] ( identifier[i] ) keyword[return] identifier[output_list]
def unique_list(input_list): """ For a given list (of points) remove any duplicates """ output_list = [] if len(input_list) > 0: dim = _sp.shape(input_list)[1] for i in input_list: match = False for j in output_list: if dim == 3: if i[0] == j[0] and i[1] == j[1] and (i[2] == j[2]): match = True # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]] elif dim == 2: if i[0] == j[0] and i[1] == j[1]: match = True # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]] elif dim == 1: if i[0] == j[0]: match = True # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['j']] if match is False: output_list.append(i) # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['i']] # depends on [control=['if'], data=[]] return output_list
def execute_code_block(elem, doc): """Executes a code block by passing it to the executor. Args: elem The AST element. doc The document. Returns: The output of the command. """ command = select_executor(elem, doc).split(' ') code = elem.text if 'plt' in elem.attributes or 'plt' in elem.classes: code = save_plot(code, elem) command.append(code) if 'args' in elem.attributes: for arg in elem.attributes['args'].split(): command.append(arg) cwd = elem.attributes['wd'] if 'wd' in elem.attributes else None return subprocess.run(command, encoding='utf8', stdout=subprocess.PIPE, stderr=subprocess.STDOUT, cwd=cwd).stdout
def function[execute_code_block, parameter[elem, doc]]: constant[Executes a code block by passing it to the executor. Args: elem The AST element. doc The document. Returns: The output of the command. ] variable[command] assign[=] call[call[name[select_executor], parameter[name[elem], name[doc]]].split, parameter[constant[ ]]] variable[code] assign[=] name[elem].text if <ast.BoolOp object at 0x7da1b1605690> begin[:] variable[code] assign[=] call[name[save_plot], parameter[name[code], name[elem]]] call[name[command].append, parameter[name[code]]] if compare[constant[args] in name[elem].attributes] begin[:] for taget[name[arg]] in starred[call[call[name[elem].attributes][constant[args]].split, parameter[]]] begin[:] call[name[command].append, parameter[name[arg]]] variable[cwd] assign[=] <ast.IfExp object at 0x7da1b16a9300> return[call[name[subprocess].run, parameter[name[command]]].stdout]
keyword[def] identifier[execute_code_block] ( identifier[elem] , identifier[doc] ): literal[string] identifier[command] = identifier[select_executor] ( identifier[elem] , identifier[doc] ). identifier[split] ( literal[string] ) identifier[code] = identifier[elem] . identifier[text] keyword[if] literal[string] keyword[in] identifier[elem] . identifier[attributes] keyword[or] literal[string] keyword[in] identifier[elem] . identifier[classes] : identifier[code] = identifier[save_plot] ( identifier[code] , identifier[elem] ) identifier[command] . identifier[append] ( identifier[code] ) keyword[if] literal[string] keyword[in] identifier[elem] . identifier[attributes] : keyword[for] identifier[arg] keyword[in] identifier[elem] . identifier[attributes] [ literal[string] ]. identifier[split] (): identifier[command] . identifier[append] ( identifier[arg] ) identifier[cwd] = identifier[elem] . identifier[attributes] [ literal[string] ] keyword[if] literal[string] keyword[in] identifier[elem] . identifier[attributes] keyword[else] keyword[None] keyword[return] identifier[subprocess] . identifier[run] ( identifier[command] , identifier[encoding] = literal[string] , identifier[stdout] = identifier[subprocess] . identifier[PIPE] , identifier[stderr] = identifier[subprocess] . identifier[STDOUT] , identifier[cwd] = identifier[cwd] ). identifier[stdout]
def execute_code_block(elem, doc): """Executes a code block by passing it to the executor. Args: elem The AST element. doc The document. Returns: The output of the command. """ command = select_executor(elem, doc).split(' ') code = elem.text if 'plt' in elem.attributes or 'plt' in elem.classes: code = save_plot(code, elem) # depends on [control=['if'], data=[]] command.append(code) if 'args' in elem.attributes: for arg in elem.attributes['args'].split(): command.append(arg) # depends on [control=['for'], data=['arg']] # depends on [control=['if'], data=[]] cwd = elem.attributes['wd'] if 'wd' in elem.attributes else None return subprocess.run(command, encoding='utf8', stdout=subprocess.PIPE, stderr=subprocess.STDOUT, cwd=cwd).stdout
def joint_sfs_folded(ac1, ac2, n1=None, n2=None): """Compute the joint folded site frequency spectrum between two populations. Parameters ---------- ac1 : array_like, int, shape (n_variants, 2) Allele counts for the first population. ac2 : array_like, int, shape (n_variants, 2) Allele counts for the second population. n1, n2 : int, optional The total number of chromosomes called in each population. Returns ------- joint_sfs_folded : ndarray, int, shape (n1//2 + 1, n2//2 + 1) Array where the (i, j)th element is the number of variant sites with a minor allele count of i in the first population and j in the second population. """ # check inputs ac1, n1 = _check_ac_n(ac1, n1) ac2, n2 = _check_ac_n(ac2, n2) # compute minor allele counts mac1 = np.amin(ac1, axis=1) mac2 = np.amin(ac2, axis=1) # compute site frequency spectrum x = n1//2 + 1 y = n2//2 + 1 tmp = (mac1 * y + mac2).astype(int, copy=False) s = np.bincount(tmp) s.resize(x, y) return s
def function[joint_sfs_folded, parameter[ac1, ac2, n1, n2]]: constant[Compute the joint folded site frequency spectrum between two populations. Parameters ---------- ac1 : array_like, int, shape (n_variants, 2) Allele counts for the first population. ac2 : array_like, int, shape (n_variants, 2) Allele counts for the second population. n1, n2 : int, optional The total number of chromosomes called in each population. Returns ------- joint_sfs_folded : ndarray, int, shape (n1//2 + 1, n2//2 + 1) Array where the (i, j)th element is the number of variant sites with a minor allele count of i in the first population and j in the second population. ] <ast.Tuple object at 0x7da2054a4970> assign[=] call[name[_check_ac_n], parameter[name[ac1], name[n1]]] <ast.Tuple object at 0x7da2054a52d0> assign[=] call[name[_check_ac_n], parameter[name[ac2], name[n2]]] variable[mac1] assign[=] call[name[np].amin, parameter[name[ac1]]] variable[mac2] assign[=] call[name[np].amin, parameter[name[ac2]]] variable[x] assign[=] binary_operation[binary_operation[name[n1] <ast.FloorDiv object at 0x7da2590d6bc0> constant[2]] + constant[1]] variable[y] assign[=] binary_operation[binary_operation[name[n2] <ast.FloorDiv object at 0x7da2590d6bc0> constant[2]] + constant[1]] variable[tmp] assign[=] call[binary_operation[binary_operation[name[mac1] * name[y]] + name[mac2]].astype, parameter[name[int]]] variable[s] assign[=] call[name[np].bincount, parameter[name[tmp]]] call[name[s].resize, parameter[name[x], name[y]]] return[name[s]]
keyword[def] identifier[joint_sfs_folded] ( identifier[ac1] , identifier[ac2] , identifier[n1] = keyword[None] , identifier[n2] = keyword[None] ): literal[string] identifier[ac1] , identifier[n1] = identifier[_check_ac_n] ( identifier[ac1] , identifier[n1] ) identifier[ac2] , identifier[n2] = identifier[_check_ac_n] ( identifier[ac2] , identifier[n2] ) identifier[mac1] = identifier[np] . identifier[amin] ( identifier[ac1] , identifier[axis] = literal[int] ) identifier[mac2] = identifier[np] . identifier[amin] ( identifier[ac2] , identifier[axis] = literal[int] ) identifier[x] = identifier[n1] // literal[int] + literal[int] identifier[y] = identifier[n2] // literal[int] + literal[int] identifier[tmp] =( identifier[mac1] * identifier[y] + identifier[mac2] ). identifier[astype] ( identifier[int] , identifier[copy] = keyword[False] ) identifier[s] = identifier[np] . identifier[bincount] ( identifier[tmp] ) identifier[s] . identifier[resize] ( identifier[x] , identifier[y] ) keyword[return] identifier[s]
def joint_sfs_folded(ac1, ac2, n1=None, n2=None): """Compute the joint folded site frequency spectrum between two populations. Parameters ---------- ac1 : array_like, int, shape (n_variants, 2) Allele counts for the first population. ac2 : array_like, int, shape (n_variants, 2) Allele counts for the second population. n1, n2 : int, optional The total number of chromosomes called in each population. Returns ------- joint_sfs_folded : ndarray, int, shape (n1//2 + 1, n2//2 + 1) Array where the (i, j)th element is the number of variant sites with a minor allele count of i in the first population and j in the second population. """ # check inputs (ac1, n1) = _check_ac_n(ac1, n1) (ac2, n2) = _check_ac_n(ac2, n2) # compute minor allele counts mac1 = np.amin(ac1, axis=1) mac2 = np.amin(ac2, axis=1) # compute site frequency spectrum x = n1 // 2 + 1 y = n2 // 2 + 1 tmp = (mac1 * y + mac2).astype(int, copy=False) s = np.bincount(tmp) s.resize(x, y) return s
def findObjects(self, template=()): """ find the objects matching the template pattern :param template: list of attributes tuples (attribute,value). The default value is () and all the objects are returned :type template: list :return: a list of object ids :rtype: list """ t = self._template2ckattrlist(template) # we search for 10 objects by default. speed/memory tradeoff result = PyKCS11.LowLevel.ckobjlist(10) rv = self.lib.C_FindObjectsInit(self.session, t) if rv != CKR_OK: raise PyKCS11Error(rv) res = [] while True: rv = self.lib.C_FindObjects(self.session, result) if rv != CKR_OK: raise PyKCS11Error(rv) for x in result: # make a copy of the handle: the original value get # corrupted (!!) a = CK_OBJECT_HANDLE(self) a.assign(x.value()) res.append(a) if len(result) == 0: break rv = self.lib.C_FindObjectsFinal(self.session) if rv != CKR_OK: raise PyKCS11Error(rv) return res
def function[findObjects, parameter[self, template]]: constant[ find the objects matching the template pattern :param template: list of attributes tuples (attribute,value). The default value is () and all the objects are returned :type template: list :return: a list of object ids :rtype: list ] variable[t] assign[=] call[name[self]._template2ckattrlist, parameter[name[template]]] variable[result] assign[=] call[name[PyKCS11].LowLevel.ckobjlist, parameter[constant[10]]] variable[rv] assign[=] call[name[self].lib.C_FindObjectsInit, parameter[name[self].session, name[t]]] if compare[name[rv] not_equal[!=] name[CKR_OK]] begin[:] <ast.Raise object at 0x7da1b2346bc0> variable[res] assign[=] list[[]] while constant[True] begin[:] variable[rv] assign[=] call[name[self].lib.C_FindObjects, parameter[name[self].session, name[result]]] if compare[name[rv] not_equal[!=] name[CKR_OK]] begin[:] <ast.Raise object at 0x7da1b2347070> for taget[name[x]] in starred[name[result]] begin[:] variable[a] assign[=] call[name[CK_OBJECT_HANDLE], parameter[name[self]]] call[name[a].assign, parameter[call[name[x].value, parameter[]]]] call[name[res].append, parameter[name[a]]] if compare[call[name[len], parameter[name[result]]] equal[==] constant[0]] begin[:] break variable[rv] assign[=] call[name[self].lib.C_FindObjectsFinal, parameter[name[self].session]] if compare[name[rv] not_equal[!=] name[CKR_OK]] begin[:] <ast.Raise object at 0x7da1b2347f10> return[name[res]]
keyword[def] identifier[findObjects] ( identifier[self] , identifier[template] =()): literal[string] identifier[t] = identifier[self] . identifier[_template2ckattrlist] ( identifier[template] ) identifier[result] = identifier[PyKCS11] . identifier[LowLevel] . identifier[ckobjlist] ( literal[int] ) identifier[rv] = identifier[self] . identifier[lib] . identifier[C_FindObjectsInit] ( identifier[self] . identifier[session] , identifier[t] ) keyword[if] identifier[rv] != identifier[CKR_OK] : keyword[raise] identifier[PyKCS11Error] ( identifier[rv] ) identifier[res] =[] keyword[while] keyword[True] : identifier[rv] = identifier[self] . identifier[lib] . identifier[C_FindObjects] ( identifier[self] . identifier[session] , identifier[result] ) keyword[if] identifier[rv] != identifier[CKR_OK] : keyword[raise] identifier[PyKCS11Error] ( identifier[rv] ) keyword[for] identifier[x] keyword[in] identifier[result] : identifier[a] = identifier[CK_OBJECT_HANDLE] ( identifier[self] ) identifier[a] . identifier[assign] ( identifier[x] . identifier[value] ()) identifier[res] . identifier[append] ( identifier[a] ) keyword[if] identifier[len] ( identifier[result] )== literal[int] : keyword[break] identifier[rv] = identifier[self] . identifier[lib] . identifier[C_FindObjectsFinal] ( identifier[self] . identifier[session] ) keyword[if] identifier[rv] != identifier[CKR_OK] : keyword[raise] identifier[PyKCS11Error] ( identifier[rv] ) keyword[return] identifier[res]
def findObjects(self, template=()): """ find the objects matching the template pattern :param template: list of attributes tuples (attribute,value). The default value is () and all the objects are returned :type template: list :return: a list of object ids :rtype: list """ t = self._template2ckattrlist(template) # we search for 10 objects by default. speed/memory tradeoff result = PyKCS11.LowLevel.ckobjlist(10) rv = self.lib.C_FindObjectsInit(self.session, t) if rv != CKR_OK: raise PyKCS11Error(rv) # depends on [control=['if'], data=['rv']] res = [] while True: rv = self.lib.C_FindObjects(self.session, result) if rv != CKR_OK: raise PyKCS11Error(rv) # depends on [control=['if'], data=['rv']] for x in result: # make a copy of the handle: the original value get # corrupted (!!) a = CK_OBJECT_HANDLE(self) a.assign(x.value()) res.append(a) # depends on [control=['for'], data=['x']] if len(result) == 0: break # depends on [control=['if'], data=[]] # depends on [control=['while'], data=[]] rv = self.lib.C_FindObjectsFinal(self.session) if rv != CKR_OK: raise PyKCS11Error(rv) # depends on [control=['if'], data=['rv']] return res
def _toggleSectionActiveState(self, sectionName, state, skipList): """ Make an entire section (minus skipList items) either active or inactive. sectionName is the same as the param's scope. """ # Get model data, the list of pars theParamList = self._taskParsObj.getParList() # Loop over their assoc. entries for i in range(self.numParams): if theParamList[i].scope == sectionName: if skipList and theParamList[i].name in skipList: # self.entryNo[i].setActiveState(True) # these always active pass # if it started active, we don't need to reactivate it else: self.entryNo[i].setActiveState(state)
def function[_toggleSectionActiveState, parameter[self, sectionName, state, skipList]]: constant[ Make an entire section (minus skipList items) either active or inactive. sectionName is the same as the param's scope. ] variable[theParamList] assign[=] call[name[self]._taskParsObj.getParList, parameter[]] for taget[name[i]] in starred[call[name[range], parameter[name[self].numParams]]] begin[:] if compare[call[name[theParamList]][name[i]].scope equal[==] name[sectionName]] begin[:] if <ast.BoolOp object at 0x7da18f8136d0> begin[:] pass
keyword[def] identifier[_toggleSectionActiveState] ( identifier[self] , identifier[sectionName] , identifier[state] , identifier[skipList] ): literal[string] identifier[theParamList] = identifier[self] . identifier[_taskParsObj] . identifier[getParList] () keyword[for] identifier[i] keyword[in] identifier[range] ( identifier[self] . identifier[numParams] ): keyword[if] identifier[theParamList] [ identifier[i] ]. identifier[scope] == identifier[sectionName] : keyword[if] identifier[skipList] keyword[and] identifier[theParamList] [ identifier[i] ]. identifier[name] keyword[in] identifier[skipList] : keyword[pass] keyword[else] : identifier[self] . identifier[entryNo] [ identifier[i] ]. identifier[setActiveState] ( identifier[state] )
def _toggleSectionActiveState(self, sectionName, state, skipList): """ Make an entire section (minus skipList items) either active or inactive. sectionName is the same as the param's scope. """ # Get model data, the list of pars theParamList = self._taskParsObj.getParList() # Loop over their assoc. entries for i in range(self.numParams): if theParamList[i].scope == sectionName: if skipList and theParamList[i].name in skipList: # self.entryNo[i].setActiveState(True) # these always active pass # if it started active, we don't need to reactivate it # depends on [control=['if'], data=[]] else: self.entryNo[i].setActiveState(state) # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['i']]
def fut_ticker(gen_ticker: str, dt, freq: str, log=logs.LOG_LEVEL) -> str: """ Get proper ticker from generic ticker Args: gen_ticker: generic ticker dt: date freq: futures contract frequency log: level of logs Returns: str: exact futures ticker """ logger = logs.get_logger(fut_ticker, level=log) dt = pd.Timestamp(dt) t_info = gen_ticker.split() asset = t_info[-1] if asset in ['Index', 'Curncy', 'Comdty']: ticker = ' '.join(t_info[:-1]) prefix, idx, postfix = ticker[:-1], int(ticker[-1]) - 1, asset elif asset == 'Equity': ticker = t_info[0] prefix, idx, postfix = ticker[:-1], int(ticker[-1]) - 1, ' '.join(t_info[1:]) else: logger.error(f'unkonwn asset type for ticker: {gen_ticker}') return '' month_ext = 4 if asset == 'Comdty' else 2 months = pd.date_range(start=dt, periods=max(idx + month_ext, 3), freq=freq) logger.debug(f'pulling expiry dates for months: {months}') def to_fut(month): return prefix + const.Futures[month.strftime('%b')] + \ month.strftime('%y')[-1] + ' ' + postfix fut = [to_fut(m) for m in months] logger.debug(f'trying futures: {fut}') # noinspection PyBroadException try: fut_matu = bdp(tickers=fut, flds='last_tradeable_dt', cache=True) except Exception as e1: logger.error(f'error downloading futures contracts (1st trial) {e1}:\n{fut}') # noinspection PyBroadException try: fut = fut[:-1] logger.debug(f'trying futures (2nd trial): {fut}') fut_matu = bdp(tickers=fut, flds='last_tradeable_dt', cache=True) except Exception as e2: logger.error(f'error downloading futures contracts (2nd trial) {e2}:\n{fut}') return '' sub_fut = fut_matu[pd.DatetimeIndex(fut_matu.last_tradeable_dt) > dt] logger.debug(f'futures full chain:\n{fut_matu.to_string()}') logger.debug(f'getting index {idx} from:\n{sub_fut.to_string()}') return sub_fut.index.values[idx]
def function[fut_ticker, parameter[gen_ticker, dt, freq, log]]: constant[ Get proper ticker from generic ticker Args: gen_ticker: generic ticker dt: date freq: futures contract frequency log: level of logs Returns: str: exact futures ticker ] variable[logger] assign[=] call[name[logs].get_logger, parameter[name[fut_ticker]]] variable[dt] assign[=] call[name[pd].Timestamp, parameter[name[dt]]] variable[t_info] assign[=] call[name[gen_ticker].split, parameter[]] variable[asset] assign[=] call[name[t_info]][<ast.UnaryOp object at 0x7da1b013c0a0>] if compare[name[asset] in list[[<ast.Constant object at 0x7da1b013d300>, <ast.Constant object at 0x7da1b013d4b0>, <ast.Constant object at 0x7da1b013fa30>]]] begin[:] variable[ticker] assign[=] call[constant[ ].join, parameter[call[name[t_info]][<ast.Slice object at 0x7da1b013ef50>]]] <ast.Tuple object at 0x7da1b013d2d0> assign[=] tuple[[<ast.Subscript object at 0x7da1b013cfa0>, <ast.BinOp object at 0x7da1b013dba0>, <ast.Name object at 0x7da1b013fb20>]] variable[month_ext] assign[=] <ast.IfExp object at 0x7da1b013ff70> variable[months] assign[=] call[name[pd].date_range, parameter[]] call[name[logger].debug, parameter[<ast.JoinedStr object at 0x7da1b013ee60>]] def function[to_fut, parameter[month]]: return[binary_operation[binary_operation[binary_operation[binary_operation[name[prefix] + call[name[const].Futures][call[name[month].strftime, parameter[constant[%b]]]]] + call[call[name[month].strftime, parameter[constant[%y]]]][<ast.UnaryOp object at 0x7da1b016cd30>]] + constant[ ]] + name[postfix]]] variable[fut] assign[=] <ast.ListComp object at 0x7da1b016f0a0> call[name[logger].debug, parameter[<ast.JoinedStr object at 0x7da1b0140d30>]] <ast.Try object at 0x7da1b0140190> variable[sub_fut] assign[=] call[name[fut_matu]][compare[call[name[pd].DatetimeIndex, parameter[name[fut_matu].last_tradeable_dt]] greater[>] name[dt]]] call[name[logger].debug, parameter[<ast.JoinedStr object at 0x7da1b01411e0>]] call[name[logger].debug, parameter[<ast.JoinedStr object at 0x7da1b0143be0>]] return[call[name[sub_fut].index.values][name[idx]]]
keyword[def] identifier[fut_ticker] ( identifier[gen_ticker] : identifier[str] , identifier[dt] , identifier[freq] : identifier[str] , identifier[log] = identifier[logs] . identifier[LOG_LEVEL] )-> identifier[str] : literal[string] identifier[logger] = identifier[logs] . identifier[get_logger] ( identifier[fut_ticker] , identifier[level] = identifier[log] ) identifier[dt] = identifier[pd] . identifier[Timestamp] ( identifier[dt] ) identifier[t_info] = identifier[gen_ticker] . identifier[split] () identifier[asset] = identifier[t_info] [- literal[int] ] keyword[if] identifier[asset] keyword[in] [ literal[string] , literal[string] , literal[string] ]: identifier[ticker] = literal[string] . identifier[join] ( identifier[t_info] [:- literal[int] ]) identifier[prefix] , identifier[idx] , identifier[postfix] = identifier[ticker] [:- literal[int] ], identifier[int] ( identifier[ticker] [- literal[int] ])- literal[int] , identifier[asset] keyword[elif] identifier[asset] == literal[string] : identifier[ticker] = identifier[t_info] [ literal[int] ] identifier[prefix] , identifier[idx] , identifier[postfix] = identifier[ticker] [:- literal[int] ], identifier[int] ( identifier[ticker] [- literal[int] ])- literal[int] , literal[string] . identifier[join] ( identifier[t_info] [ literal[int] :]) keyword[else] : identifier[logger] . identifier[error] ( literal[string] ) keyword[return] literal[string] identifier[month_ext] = literal[int] keyword[if] identifier[asset] == literal[string] keyword[else] literal[int] identifier[months] = identifier[pd] . identifier[date_range] ( identifier[start] = identifier[dt] , identifier[periods] = identifier[max] ( identifier[idx] + identifier[month_ext] , literal[int] ), identifier[freq] = identifier[freq] ) identifier[logger] . identifier[debug] ( literal[string] ) keyword[def] identifier[to_fut] ( identifier[month] ): keyword[return] identifier[prefix] + identifier[const] . identifier[Futures] [ identifier[month] . identifier[strftime] ( literal[string] )]+ identifier[month] . identifier[strftime] ( literal[string] )[- literal[int] ]+ literal[string] + identifier[postfix] identifier[fut] =[ identifier[to_fut] ( identifier[m] ) keyword[for] identifier[m] keyword[in] identifier[months] ] identifier[logger] . identifier[debug] ( literal[string] ) keyword[try] : identifier[fut_matu] = identifier[bdp] ( identifier[tickers] = identifier[fut] , identifier[flds] = literal[string] , identifier[cache] = keyword[True] ) keyword[except] identifier[Exception] keyword[as] identifier[e1] : identifier[logger] . identifier[error] ( literal[string] ) keyword[try] : identifier[fut] = identifier[fut] [:- literal[int] ] identifier[logger] . identifier[debug] ( literal[string] ) identifier[fut_matu] = identifier[bdp] ( identifier[tickers] = identifier[fut] , identifier[flds] = literal[string] , identifier[cache] = keyword[True] ) keyword[except] identifier[Exception] keyword[as] identifier[e2] : identifier[logger] . identifier[error] ( literal[string] ) keyword[return] literal[string] identifier[sub_fut] = identifier[fut_matu] [ identifier[pd] . identifier[DatetimeIndex] ( identifier[fut_matu] . identifier[last_tradeable_dt] )> identifier[dt] ] identifier[logger] . identifier[debug] ( literal[string] ) identifier[logger] . identifier[debug] ( literal[string] ) keyword[return] identifier[sub_fut] . identifier[index] . identifier[values] [ identifier[idx] ]
def fut_ticker(gen_ticker: str, dt, freq: str, log=logs.LOG_LEVEL) -> str: """ Get proper ticker from generic ticker Args: gen_ticker: generic ticker dt: date freq: futures contract frequency log: level of logs Returns: str: exact futures ticker """ logger = logs.get_logger(fut_ticker, level=log) dt = pd.Timestamp(dt) t_info = gen_ticker.split() asset = t_info[-1] if asset in ['Index', 'Curncy', 'Comdty']: ticker = ' '.join(t_info[:-1]) (prefix, idx, postfix) = (ticker[:-1], int(ticker[-1]) - 1, asset) # depends on [control=['if'], data=['asset']] elif asset == 'Equity': ticker = t_info[0] (prefix, idx, postfix) = (ticker[:-1], int(ticker[-1]) - 1, ' '.join(t_info[1:])) # depends on [control=['if'], data=[]] else: logger.error(f'unkonwn asset type for ticker: {gen_ticker}') return '' month_ext = 4 if asset == 'Comdty' else 2 months = pd.date_range(start=dt, periods=max(idx + month_ext, 3), freq=freq) logger.debug(f'pulling expiry dates for months: {months}') def to_fut(month): return prefix + const.Futures[month.strftime('%b')] + month.strftime('%y')[-1] + ' ' + postfix fut = [to_fut(m) for m in months] logger.debug(f'trying futures: {fut}') # noinspection PyBroadException try: fut_matu = bdp(tickers=fut, flds='last_tradeable_dt', cache=True) # depends on [control=['try'], data=[]] except Exception as e1: logger.error(f'error downloading futures contracts (1st trial) {e1}:\n{fut}') # noinspection PyBroadException try: fut = fut[:-1] logger.debug(f'trying futures (2nd trial): {fut}') fut_matu = bdp(tickers=fut, flds='last_tradeable_dt', cache=True) # depends on [control=['try'], data=[]] except Exception as e2: logger.error(f'error downloading futures contracts (2nd trial) {e2}:\n{fut}') return '' # depends on [control=['except'], data=['e2']] # depends on [control=['except'], data=['e1']] sub_fut = fut_matu[pd.DatetimeIndex(fut_matu.last_tradeable_dt) > dt] logger.debug(f'futures full chain:\n{fut_matu.to_string()}') logger.debug(f'getting index {idx} from:\n{sub_fut.to_string()}') return sub_fut.index.values[idx]
def columns_dataset(self): """ Generate the columns and the whole dataset. """ data = {} words_total = {} for instance, words in self.raw_dataset.items(): words_item_total = {} for word in words: words_total.setdefault(word, 0) words_item_total.setdefault(word, 0) words_total[word] += 1 words_item_total[word] += 1 data[instance] = words_item_total columns = sorted(words_total.keys(), key=lambda w: words_total[w], reverse=True)[:250] columns = sorted(columns) dataset = {} for instance in data.keys(): dataset[instance] = [data[instance].get(word, 0) for word in columns] return columns, dataset
def function[columns_dataset, parameter[self]]: constant[ Generate the columns and the whole dataset. ] variable[data] assign[=] dictionary[[], []] variable[words_total] assign[=] dictionary[[], []] for taget[tuple[[<ast.Name object at 0x7da1b1d99960>, <ast.Name object at 0x7da1b1d99390>]]] in starred[call[name[self].raw_dataset.items, parameter[]]] begin[:] variable[words_item_total] assign[=] dictionary[[], []] for taget[name[word]] in starred[name[words]] begin[:] call[name[words_total].setdefault, parameter[name[word], constant[0]]] call[name[words_item_total].setdefault, parameter[name[word], constant[0]]] <ast.AugAssign object at 0x7da2044c1ab0> <ast.AugAssign object at 0x7da2044c05b0> call[name[data]][name[instance]] assign[=] name[words_item_total] variable[columns] assign[=] call[call[name[sorted], parameter[call[name[words_total].keys, parameter[]]]]][<ast.Slice object at 0x7da1b1d9e500>] variable[columns] assign[=] call[name[sorted], parameter[name[columns]]] variable[dataset] assign[=] dictionary[[], []] for taget[name[instance]] in starred[call[name[data].keys, parameter[]]] begin[:] call[name[dataset]][name[instance]] assign[=] <ast.ListComp object at 0x7da1b1d9e320> return[tuple[[<ast.Name object at 0x7da1b1d9da80>, <ast.Name object at 0x7da1b1d9dba0>]]]
keyword[def] identifier[columns_dataset] ( identifier[self] ): literal[string] identifier[data] ={} identifier[words_total] ={} keyword[for] identifier[instance] , identifier[words] keyword[in] identifier[self] . identifier[raw_dataset] . identifier[items] (): identifier[words_item_total] ={} keyword[for] identifier[word] keyword[in] identifier[words] : identifier[words_total] . identifier[setdefault] ( identifier[word] , literal[int] ) identifier[words_item_total] . identifier[setdefault] ( identifier[word] , literal[int] ) identifier[words_total] [ identifier[word] ]+= literal[int] identifier[words_item_total] [ identifier[word] ]+= literal[int] identifier[data] [ identifier[instance] ]= identifier[words_item_total] identifier[columns] = identifier[sorted] ( identifier[words_total] . identifier[keys] (), identifier[key] = keyword[lambda] identifier[w] : identifier[words_total] [ identifier[w] ], identifier[reverse] = keyword[True] )[: literal[int] ] identifier[columns] = identifier[sorted] ( identifier[columns] ) identifier[dataset] ={} keyword[for] identifier[instance] keyword[in] identifier[data] . identifier[keys] (): identifier[dataset] [ identifier[instance] ]=[ identifier[data] [ identifier[instance] ]. identifier[get] ( identifier[word] , literal[int] ) keyword[for] identifier[word] keyword[in] identifier[columns] ] keyword[return] identifier[columns] , identifier[dataset]
def columns_dataset(self): """ Generate the columns and the whole dataset. """ data = {} words_total = {} for (instance, words) in self.raw_dataset.items(): words_item_total = {} for word in words: words_total.setdefault(word, 0) words_item_total.setdefault(word, 0) words_total[word] += 1 words_item_total[word] += 1 # depends on [control=['for'], data=['word']] data[instance] = words_item_total # depends on [control=['for'], data=[]] columns = sorted(words_total.keys(), key=lambda w: words_total[w], reverse=True)[:250] columns = sorted(columns) dataset = {} for instance in data.keys(): dataset[instance] = [data[instance].get(word, 0) for word in columns] # depends on [control=['for'], data=['instance']] return (columns, dataset)
def database_caller_creator(self, name=None): '''creates a sqlite3 db returns the related connection object which will be later used to spawn the cursor ''' try: if name: database = name + '.db' else: database = 'sqlite_' + str_generator(self) + '.db' conn = sqlite3.connect(database) logger.warning('Database created and opened succesfully: %s' % database, extra=d) except Exception: logger.error('Failed to connect or create database / sqlite3', extra=d) raise DbConnException return conn
def function[database_caller_creator, parameter[self, name]]: constant[creates a sqlite3 db returns the related connection object which will be later used to spawn the cursor ] <ast.Try object at 0x7da1b08e4280> return[name[conn]]
keyword[def] identifier[database_caller_creator] ( identifier[self] , identifier[name] = keyword[None] ): literal[string] keyword[try] : keyword[if] identifier[name] : identifier[database] = identifier[name] + literal[string] keyword[else] : identifier[database] = literal[string] + identifier[str_generator] ( identifier[self] )+ literal[string] identifier[conn] = identifier[sqlite3] . identifier[connect] ( identifier[database] ) identifier[logger] . identifier[warning] ( literal[string] % identifier[database] , identifier[extra] = identifier[d] ) keyword[except] identifier[Exception] : identifier[logger] . identifier[error] ( literal[string] , identifier[extra] = identifier[d] ) keyword[raise] identifier[DbConnException] keyword[return] identifier[conn]
def database_caller_creator(self, name=None): """creates a sqlite3 db returns the related connection object which will be later used to spawn the cursor """ try: if name: database = name + '.db' # depends on [control=['if'], data=[]] else: database = 'sqlite_' + str_generator(self) + '.db' conn = sqlite3.connect(database) logger.warning('Database created and opened succesfully: %s' % database, extra=d) # depends on [control=['try'], data=[]] except Exception: logger.error('Failed to connect or create database / sqlite3', extra=d) raise DbConnException # depends on [control=['except'], data=[]] return conn
def start_collecting_data(self, queues=None, edge=None, edge_type=None): """Tells the queues to collect data on agents' arrival, service start, and departure times. If none of the parameters are given then every :class:`.QueueServer` will start collecting data. Parameters ---------- queues : :any:`int`, *array_like* (optional) The edge index (or an iterable of edge indices) identifying the :class:`QueueServer(s)<.QueueServer>` that will start collecting data. edge : 2-tuple of int or *array_like* (optional) Explicitly specify which queues will collect data. Must be either: * A 2-tuple of the edge's source and target vertex indices, or * An iterable of 2-tuples of the edge's source and target vertex indices. edge_type : int or an iterable of int (optional) A integer, or a collection of integers identifying which edge types will be set active. """ queues = _get_queues(self.g, queues, edge, edge_type) for k in queues: self.edge2queue[k].collect_data = True
def function[start_collecting_data, parameter[self, queues, edge, edge_type]]: constant[Tells the queues to collect data on agents' arrival, service start, and departure times. If none of the parameters are given then every :class:`.QueueServer` will start collecting data. Parameters ---------- queues : :any:`int`, *array_like* (optional) The edge index (or an iterable of edge indices) identifying the :class:`QueueServer(s)<.QueueServer>` that will start collecting data. edge : 2-tuple of int or *array_like* (optional) Explicitly specify which queues will collect data. Must be either: * A 2-tuple of the edge's source and target vertex indices, or * An iterable of 2-tuples of the edge's source and target vertex indices. edge_type : int or an iterable of int (optional) A integer, or a collection of integers identifying which edge types will be set active. ] variable[queues] assign[=] call[name[_get_queues], parameter[name[self].g, name[queues], name[edge], name[edge_type]]] for taget[name[k]] in starred[name[queues]] begin[:] call[name[self].edge2queue][name[k]].collect_data assign[=] constant[True]
keyword[def] identifier[start_collecting_data] ( identifier[self] , identifier[queues] = keyword[None] , identifier[edge] = keyword[None] , identifier[edge_type] = keyword[None] ): literal[string] identifier[queues] = identifier[_get_queues] ( identifier[self] . identifier[g] , identifier[queues] , identifier[edge] , identifier[edge_type] ) keyword[for] identifier[k] keyword[in] identifier[queues] : identifier[self] . identifier[edge2queue] [ identifier[k] ]. identifier[collect_data] = keyword[True]
def start_collecting_data(self, queues=None, edge=None, edge_type=None): """Tells the queues to collect data on agents' arrival, service start, and departure times. If none of the parameters are given then every :class:`.QueueServer` will start collecting data. Parameters ---------- queues : :any:`int`, *array_like* (optional) The edge index (or an iterable of edge indices) identifying the :class:`QueueServer(s)<.QueueServer>` that will start collecting data. edge : 2-tuple of int or *array_like* (optional) Explicitly specify which queues will collect data. Must be either: * A 2-tuple of the edge's source and target vertex indices, or * An iterable of 2-tuples of the edge's source and target vertex indices. edge_type : int or an iterable of int (optional) A integer, or a collection of integers identifying which edge types will be set active. """ queues = _get_queues(self.g, queues, edge, edge_type) for k in queues: self.edge2queue[k].collect_data = True # depends on [control=['for'], data=['k']]
def merge(polylines, mx_dist=4): """ point by line segment comparison merge polylines if points are close """ l = len(polylines) to_remove = set() for n in range(l - 1, -1, -1): if n not in to_remove: c = polylines[n] for p0, p1 in zip(c[:-1], c[1:]): # create a line from any subsegment: l0 = p0[0], p0[1], p1[0], p1[1] # for every other polyline: for m in range(l - 1, -1, -1): if m not in to_remove: if n == m: continue remove = False cc = polylines[m] ind = np.zeros(shape=cc.shape[0], dtype=bool) # for every point p in this polyline: for o in range(len(cc) - 1, -1, -1): p = cc[o] if line.segmentDistance(l0, p) < mx_dist: remove = True ind[o] = True if remove: polylines[n] = np.append(c, cc[ind], axis=0) ind = ~ind s = ind.sum() if s < 2: to_remove.add(m) else: polylines[m] = cc[ind] to_remove = sorted(to_remove) to_remove.reverse() for i in to_remove: polylines.pop(i)
def function[merge, parameter[polylines, mx_dist]]: constant[ point by line segment comparison merge polylines if points are close ] variable[l] assign[=] call[name[len], parameter[name[polylines]]] variable[to_remove] assign[=] call[name[set], parameter[]] for taget[name[n]] in starred[call[name[range], parameter[binary_operation[name[l] - constant[1]], <ast.UnaryOp object at 0x7da18ede65f0>, <ast.UnaryOp object at 0x7da18ede4340>]]] begin[:] if compare[name[n] <ast.NotIn object at 0x7da2590d7190> name[to_remove]] begin[:] variable[c] assign[=] call[name[polylines]][name[n]] for taget[tuple[[<ast.Name object at 0x7da18dc98b80>, <ast.Name object at 0x7da18dc981f0>]]] in starred[call[name[zip], parameter[call[name[c]][<ast.Slice object at 0x7da18dc9b1c0>], call[name[c]][<ast.Slice object at 0x7da18dc98b50>]]]] begin[:] variable[l0] assign[=] tuple[[<ast.Subscript object at 0x7da18dc99660>, <ast.Subscript object at 0x7da18dc99d80>, <ast.Subscript object at 0x7da18dc99e10>, <ast.Subscript object at 0x7da18dc99750>]] for taget[name[m]] in starred[call[name[range], parameter[binary_operation[name[l] - constant[1]], <ast.UnaryOp object at 0x7da18dc99300>, <ast.UnaryOp object at 0x7da18dc99a50>]]] begin[:] if compare[name[m] <ast.NotIn object at 0x7da2590d7190> name[to_remove]] begin[:] if compare[name[n] equal[==] name[m]] begin[:] continue variable[remove] assign[=] constant[False] variable[cc] assign[=] call[name[polylines]][name[m]] variable[ind] assign[=] call[name[np].zeros, parameter[]] for taget[name[o]] in starred[call[name[range], parameter[binary_operation[call[name[len], parameter[name[cc]]] - constant[1]], <ast.UnaryOp object at 0x7da18dc9a1a0>, <ast.UnaryOp object at 0x7da18dc993f0>]]] begin[:] variable[p] assign[=] call[name[cc]][name[o]] if compare[call[name[line].segmentDistance, parameter[name[l0], name[p]]] less[<] name[mx_dist]] begin[:] variable[remove] assign[=] constant[True] call[name[ind]][name[o]] assign[=] constant[True] if name[remove] begin[:] call[name[polylines]][name[n]] assign[=] call[name[np].append, parameter[name[c], call[name[cc]][name[ind]]]] variable[ind] assign[=] <ast.UnaryOp object at 0x7da18dc99570> variable[s] assign[=] call[name[ind].sum, parameter[]] if compare[name[s] less[<] constant[2]] begin[:] call[name[to_remove].add, parameter[name[m]]] variable[to_remove] assign[=] call[name[sorted], parameter[name[to_remove]]] call[name[to_remove].reverse, parameter[]] for taget[name[i]] in starred[name[to_remove]] begin[:] call[name[polylines].pop, parameter[name[i]]]
keyword[def] identifier[merge] ( identifier[polylines] , identifier[mx_dist] = literal[int] ): literal[string] identifier[l] = identifier[len] ( identifier[polylines] ) identifier[to_remove] = identifier[set] () keyword[for] identifier[n] keyword[in] identifier[range] ( identifier[l] - literal[int] ,- literal[int] ,- literal[int] ): keyword[if] identifier[n] keyword[not] keyword[in] identifier[to_remove] : identifier[c] = identifier[polylines] [ identifier[n] ] keyword[for] identifier[p0] , identifier[p1] keyword[in] identifier[zip] ( identifier[c] [:- literal[int] ], identifier[c] [ literal[int] :]): identifier[l0] = identifier[p0] [ literal[int] ], identifier[p0] [ literal[int] ], identifier[p1] [ literal[int] ], identifier[p1] [ literal[int] ] keyword[for] identifier[m] keyword[in] identifier[range] ( identifier[l] - literal[int] ,- literal[int] ,- literal[int] ): keyword[if] identifier[m] keyword[not] keyword[in] identifier[to_remove] : keyword[if] identifier[n] == identifier[m] : keyword[continue] identifier[remove] = keyword[False] identifier[cc] = identifier[polylines] [ identifier[m] ] identifier[ind] = identifier[np] . identifier[zeros] ( identifier[shape] = identifier[cc] . identifier[shape] [ literal[int] ], identifier[dtype] = identifier[bool] ) keyword[for] identifier[o] keyword[in] identifier[range] ( identifier[len] ( identifier[cc] )- literal[int] ,- literal[int] ,- literal[int] ): identifier[p] = identifier[cc] [ identifier[o] ] keyword[if] identifier[line] . identifier[segmentDistance] ( identifier[l0] , identifier[p] )< identifier[mx_dist] : identifier[remove] = keyword[True] identifier[ind] [ identifier[o] ]= keyword[True] keyword[if] identifier[remove] : identifier[polylines] [ identifier[n] ]= identifier[np] . identifier[append] ( identifier[c] , identifier[cc] [ identifier[ind] ], identifier[axis] = literal[int] ) identifier[ind] =~ identifier[ind] identifier[s] = identifier[ind] . identifier[sum] () keyword[if] identifier[s] < literal[int] : identifier[to_remove] . identifier[add] ( identifier[m] ) keyword[else] : identifier[polylines] [ identifier[m] ]= identifier[cc] [ identifier[ind] ] identifier[to_remove] = identifier[sorted] ( identifier[to_remove] ) identifier[to_remove] . identifier[reverse] () keyword[for] identifier[i] keyword[in] identifier[to_remove] : identifier[polylines] . identifier[pop] ( identifier[i] )
def merge(polylines, mx_dist=4): """ point by line segment comparison merge polylines if points are close """ l = len(polylines) to_remove = set() for n in range(l - 1, -1, -1): if n not in to_remove: c = polylines[n] for (p0, p1) in zip(c[:-1], c[1:]): # create a line from any subsegment: l0 = (p0[0], p0[1], p1[0], p1[1]) # for every other polyline: for m in range(l - 1, -1, -1): if m not in to_remove: if n == m: continue # depends on [control=['if'], data=[]] remove = False cc = polylines[m] ind = np.zeros(shape=cc.shape[0], dtype=bool) # for every point p in this polyline: for o in range(len(cc) - 1, -1, -1): p = cc[o] if line.segmentDistance(l0, p) < mx_dist: remove = True ind[o] = True # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['o']] if remove: polylines[n] = np.append(c, cc[ind], axis=0) ind = ~ind s = ind.sum() if s < 2: to_remove.add(m) # depends on [control=['if'], data=[]] else: polylines[m] = cc[ind] # depends on [control=['if'], data=[]] # depends on [control=['if'], data=['m', 'to_remove']] # depends on [control=['for'], data=['m']] # depends on [control=['for'], data=[]] # depends on [control=['if'], data=['n', 'to_remove']] # depends on [control=['for'], data=['n']] to_remove = sorted(to_remove) to_remove.reverse() for i in to_remove: polylines.pop(i) # depends on [control=['for'], data=['i']]
def get_md5hash(self, bucket_name, object_name): """ Gets the MD5 hash of an object in Google Cloud Storage. :param bucket_name: The Google cloud storage bucket where the blob_name is. :type bucket_name: str :param object_name: The name of the object to check in the Google cloud storage bucket_name. :type object_name: str """ self.log.info('Retrieving the MD5 hash of ' 'object: %s in bucket: %s', object_name, bucket_name) client = self.get_conn() bucket = client.get_bucket(bucket_name=bucket_name) blob = bucket.get_blob(blob_name=object_name) blob.reload() blob_md5hash = blob.md5_hash self.log.info('The md5Hash of %s is %s', object_name, blob_md5hash) return blob_md5hash
def function[get_md5hash, parameter[self, bucket_name, object_name]]: constant[ Gets the MD5 hash of an object in Google Cloud Storage. :param bucket_name: The Google cloud storage bucket where the blob_name is. :type bucket_name: str :param object_name: The name of the object to check in the Google cloud storage bucket_name. :type object_name: str ] call[name[self].log.info, parameter[constant[Retrieving the MD5 hash of object: %s in bucket: %s], name[object_name], name[bucket_name]]] variable[client] assign[=] call[name[self].get_conn, parameter[]] variable[bucket] assign[=] call[name[client].get_bucket, parameter[]] variable[blob] assign[=] call[name[bucket].get_blob, parameter[]] call[name[blob].reload, parameter[]] variable[blob_md5hash] assign[=] name[blob].md5_hash call[name[self].log.info, parameter[constant[The md5Hash of %s is %s], name[object_name], name[blob_md5hash]]] return[name[blob_md5hash]]
keyword[def] identifier[get_md5hash] ( identifier[self] , identifier[bucket_name] , identifier[object_name] ): literal[string] identifier[self] . identifier[log] . identifier[info] ( literal[string] literal[string] , identifier[object_name] , identifier[bucket_name] ) identifier[client] = identifier[self] . identifier[get_conn] () identifier[bucket] = identifier[client] . identifier[get_bucket] ( identifier[bucket_name] = identifier[bucket_name] ) identifier[blob] = identifier[bucket] . identifier[get_blob] ( identifier[blob_name] = identifier[object_name] ) identifier[blob] . identifier[reload] () identifier[blob_md5hash] = identifier[blob] . identifier[md5_hash] identifier[self] . identifier[log] . identifier[info] ( literal[string] , identifier[object_name] , identifier[blob_md5hash] ) keyword[return] identifier[blob_md5hash]
def get_md5hash(self, bucket_name, object_name): """ Gets the MD5 hash of an object in Google Cloud Storage. :param bucket_name: The Google cloud storage bucket where the blob_name is. :type bucket_name: str :param object_name: The name of the object to check in the Google cloud storage bucket_name. :type object_name: str """ self.log.info('Retrieving the MD5 hash of object: %s in bucket: %s', object_name, bucket_name) client = self.get_conn() bucket = client.get_bucket(bucket_name=bucket_name) blob = bucket.get_blob(blob_name=object_name) blob.reload() blob_md5hash = blob.md5_hash self.log.info('The md5Hash of %s is %s', object_name, blob_md5hash) return blob_md5hash
def _get_api_events(self, function): """ Method to return a dictionary of API Events on the function :param SamResource function: Function Resource object :return dict: Dictionary of API events along with any other configuration passed to it. Example: { FooEvent: {Path: "/foo", Method: "post", RestApiId: blah, MethodSettings: {<something>}, Cors: {<something>}, Auth: {<something>}}, BarEvent: {Path: "/bar", Method: "any", MethodSettings: {<something>}, Cors: {<something>}, Auth: {<something>}}" } """ if not (function.valid() and isinstance(function.properties, dict) and isinstance(function.properties.get("Events"), dict) ): # Function resource structure is invalid. return {} api_events = {} for event_id, event in function.properties["Events"].items(): if event and isinstance(event, dict) and event.get("Type") == "Api": api_events[event_id] = event return api_events
def function[_get_api_events, parameter[self, function]]: constant[ Method to return a dictionary of API Events on the function :param SamResource function: Function Resource object :return dict: Dictionary of API events along with any other configuration passed to it. Example: { FooEvent: {Path: "/foo", Method: "post", RestApiId: blah, MethodSettings: {<something>}, Cors: {<something>}, Auth: {<something>}}, BarEvent: {Path: "/bar", Method: "any", MethodSettings: {<something>}, Cors: {<something>}, Auth: {<something>}}" } ] if <ast.UnaryOp object at 0x7da1b1e16140> begin[:] return[dictionary[[], []]] variable[api_events] assign[=] dictionary[[], []] for taget[tuple[[<ast.Name object at 0x7da20c76ec80>, <ast.Name object at 0x7da20c76d630>]]] in starred[call[call[name[function].properties][constant[Events]].items, parameter[]]] begin[:] if <ast.BoolOp object at 0x7da20e957100> begin[:] call[name[api_events]][name[event_id]] assign[=] name[event] return[name[api_events]]
keyword[def] identifier[_get_api_events] ( identifier[self] , identifier[function] ): literal[string] keyword[if] keyword[not] ( identifier[function] . identifier[valid] () keyword[and] identifier[isinstance] ( identifier[function] . identifier[properties] , identifier[dict] ) keyword[and] identifier[isinstance] ( identifier[function] . identifier[properties] . identifier[get] ( literal[string] ), identifier[dict] ) ): keyword[return] {} identifier[api_events] ={} keyword[for] identifier[event_id] , identifier[event] keyword[in] identifier[function] . identifier[properties] [ literal[string] ]. identifier[items] (): keyword[if] identifier[event] keyword[and] identifier[isinstance] ( identifier[event] , identifier[dict] ) keyword[and] identifier[event] . identifier[get] ( literal[string] )== literal[string] : identifier[api_events] [ identifier[event_id] ]= identifier[event] keyword[return] identifier[api_events]
def _get_api_events(self, function): """ Method to return a dictionary of API Events on the function :param SamResource function: Function Resource object :return dict: Dictionary of API events along with any other configuration passed to it. Example: { FooEvent: {Path: "/foo", Method: "post", RestApiId: blah, MethodSettings: {<something>}, Cors: {<something>}, Auth: {<something>}}, BarEvent: {Path: "/bar", Method: "any", MethodSettings: {<something>}, Cors: {<something>}, Auth: {<something>}}" } """ if not (function.valid() and isinstance(function.properties, dict) and isinstance(function.properties.get('Events'), dict)): # Function resource structure is invalid. return {} # depends on [control=['if'], data=[]] api_events = {} for (event_id, event) in function.properties['Events'].items(): if event and isinstance(event, dict) and (event.get('Type') == 'Api'): api_events[event_id] = event # depends on [control=['if'], data=[]] # depends on [control=['for'], data=[]] return api_events
def _aux_type(self, i): """Data-type of the array's ith aux data. Returns ------- numpy.dtype This BaseSparseNDArray's aux data type. """ aux_type = ctypes.c_int() check_call(_LIB.MXNDArrayGetAuxType(self.handle, i, ctypes.byref(aux_type))) return _DTYPE_MX_TO_NP[aux_type.value]
def function[_aux_type, parameter[self, i]]: constant[Data-type of the array's ith aux data. Returns ------- numpy.dtype This BaseSparseNDArray's aux data type. ] variable[aux_type] assign[=] call[name[ctypes].c_int, parameter[]] call[name[check_call], parameter[call[name[_LIB].MXNDArrayGetAuxType, parameter[name[self].handle, name[i], call[name[ctypes].byref, parameter[name[aux_type]]]]]]] return[call[name[_DTYPE_MX_TO_NP]][name[aux_type].value]]
keyword[def] identifier[_aux_type] ( identifier[self] , identifier[i] ): literal[string] identifier[aux_type] = identifier[ctypes] . identifier[c_int] () identifier[check_call] ( identifier[_LIB] . identifier[MXNDArrayGetAuxType] ( identifier[self] . identifier[handle] , identifier[i] , identifier[ctypes] . identifier[byref] ( identifier[aux_type] ))) keyword[return] identifier[_DTYPE_MX_TO_NP] [ identifier[aux_type] . identifier[value] ]
def _aux_type(self, i): """Data-type of the array's ith aux data. Returns ------- numpy.dtype This BaseSparseNDArray's aux data type. """ aux_type = ctypes.c_int() check_call(_LIB.MXNDArrayGetAuxType(self.handle, i, ctypes.byref(aux_type))) return _DTYPE_MX_TO_NP[aux_type.value]
def register_type(env_type, alias=None): """Registers environment type. :param str|unicode|Environment env_type: Environment type or its alias (for already registered types). :param str|unicode alias: Alias to register type under. If not set type name is used. :rtype: Environment """ if isinstance(env_type, string_types): env_type = TYPES[env_type] if alias is None: alias = env_type.name TYPES[alias] = env_type for alias in env_type.aliases: TYPES[alias] = env_type return env_type
def function[register_type, parameter[env_type, alias]]: constant[Registers environment type. :param str|unicode|Environment env_type: Environment type or its alias (for already registered types). :param str|unicode alias: Alias to register type under. If not set type name is used. :rtype: Environment ] if call[name[isinstance], parameter[name[env_type], name[string_types]]] begin[:] variable[env_type] assign[=] call[name[TYPES]][name[env_type]] if compare[name[alias] is constant[None]] begin[:] variable[alias] assign[=] name[env_type].name call[name[TYPES]][name[alias]] assign[=] name[env_type] for taget[name[alias]] in starred[name[env_type].aliases] begin[:] call[name[TYPES]][name[alias]] assign[=] name[env_type] return[name[env_type]]
keyword[def] identifier[register_type] ( identifier[env_type] , identifier[alias] = keyword[None] ): literal[string] keyword[if] identifier[isinstance] ( identifier[env_type] , identifier[string_types] ): identifier[env_type] = identifier[TYPES] [ identifier[env_type] ] keyword[if] identifier[alias] keyword[is] keyword[None] : identifier[alias] = identifier[env_type] . identifier[name] identifier[TYPES] [ identifier[alias] ]= identifier[env_type] keyword[for] identifier[alias] keyword[in] identifier[env_type] . identifier[aliases] : identifier[TYPES] [ identifier[alias] ]= identifier[env_type] keyword[return] identifier[env_type]
def register_type(env_type, alias=None): """Registers environment type. :param str|unicode|Environment env_type: Environment type or its alias (for already registered types). :param str|unicode alias: Alias to register type under. If not set type name is used. :rtype: Environment """ if isinstance(env_type, string_types): env_type = TYPES[env_type] # depends on [control=['if'], data=[]] if alias is None: alias = env_type.name # depends on [control=['if'], data=['alias']] TYPES[alias] = env_type for alias in env_type.aliases: TYPES[alias] = env_type # depends on [control=['for'], data=['alias']] return env_type
def has_method(obj, name): """ Checks if object has a method with specified name. :param obj: an object to introspect. :param name: a name of the method to check. :return: true if the object has the method and false if it doesn't. """ if obj == None: raise Exception("Object cannot be null") if name == None: raise Exception("Method name cannot be null") name = name.lower() for method_name in dir(obj): if method_name.lower() != name: continue method = getattr(obj, method_name) if MethodReflector._is_method(method, method_name): return True return False
def function[has_method, parameter[obj, name]]: constant[ Checks if object has a method with specified name. :param obj: an object to introspect. :param name: a name of the method to check. :return: true if the object has the method and false if it doesn't. ] if compare[name[obj] equal[==] constant[None]] begin[:] <ast.Raise object at 0x7da20e9b2740> if compare[name[name] equal[==] constant[None]] begin[:] <ast.Raise object at 0x7da20e9b2b90> variable[name] assign[=] call[name[name].lower, parameter[]] for taget[name[method_name]] in starred[call[name[dir], parameter[name[obj]]]] begin[:] if compare[call[name[method_name].lower, parameter[]] not_equal[!=] name[name]] begin[:] continue variable[method] assign[=] call[name[getattr], parameter[name[obj], name[method_name]]] if call[name[MethodReflector]._is_method, parameter[name[method], name[method_name]]] begin[:] return[constant[True]] return[constant[False]]
keyword[def] identifier[has_method] ( identifier[obj] , identifier[name] ): literal[string] keyword[if] identifier[obj] == keyword[None] : keyword[raise] identifier[Exception] ( literal[string] ) keyword[if] identifier[name] == keyword[None] : keyword[raise] identifier[Exception] ( literal[string] ) identifier[name] = identifier[name] . identifier[lower] () keyword[for] identifier[method_name] keyword[in] identifier[dir] ( identifier[obj] ): keyword[if] identifier[method_name] . identifier[lower] ()!= identifier[name] : keyword[continue] identifier[method] = identifier[getattr] ( identifier[obj] , identifier[method_name] ) keyword[if] identifier[MethodReflector] . identifier[_is_method] ( identifier[method] , identifier[method_name] ): keyword[return] keyword[True] keyword[return] keyword[False]
def has_method(obj, name): """ Checks if object has a method with specified name. :param obj: an object to introspect. :param name: a name of the method to check. :return: true if the object has the method and false if it doesn't. """ if obj == None: raise Exception('Object cannot be null') # depends on [control=['if'], data=[]] if name == None: raise Exception('Method name cannot be null') # depends on [control=['if'], data=[]] name = name.lower() for method_name in dir(obj): if method_name.lower() != name: continue # depends on [control=['if'], data=[]] method = getattr(obj, method_name) if MethodReflector._is_method(method, method_name): return True # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['method_name']] return False
def evaluate(self, sequence): """ Compute the lineage on the sequence. :param sequence: Sequence to compute :return: Evaluated sequence """ last_cache_index = self.cache_scan() transformations = self.transformations[last_cache_index:] return self.engine.evaluate(sequence, transformations)
def function[evaluate, parameter[self, sequence]]: constant[ Compute the lineage on the sequence. :param sequence: Sequence to compute :return: Evaluated sequence ] variable[last_cache_index] assign[=] call[name[self].cache_scan, parameter[]] variable[transformations] assign[=] call[name[self].transformations][<ast.Slice object at 0x7da20c6e6dd0>] return[call[name[self].engine.evaluate, parameter[name[sequence], name[transformations]]]]
keyword[def] identifier[evaluate] ( identifier[self] , identifier[sequence] ): literal[string] identifier[last_cache_index] = identifier[self] . identifier[cache_scan] () identifier[transformations] = identifier[self] . identifier[transformations] [ identifier[last_cache_index] :] keyword[return] identifier[self] . identifier[engine] . identifier[evaluate] ( identifier[sequence] , identifier[transformations] )
def evaluate(self, sequence): """ Compute the lineage on the sequence. :param sequence: Sequence to compute :return: Evaluated sequence """ last_cache_index = self.cache_scan() transformations = self.transformations[last_cache_index:] return self.engine.evaluate(sequence, transformations)
def setParseAction( self, *fns, **kwargs ): """Define action to perform when successfully matching parse element definition. Parse action fn is a callable method with 0-3 arguments, called as C{fn(s,loc,toks)}, C{fn(loc,toks)}, C{fn(toks)}, or just C{fn()}, where: - s = the original string being parsed (see note below) - loc = the location of the matching substring - toks = a list of the matched tokens, packaged as a C{L{ParseResults}} object If the functions in fns modify the tokens, they can return them as the return value from fn, and the modified list of tokens will replace the original. Otherwise, fn does not need to return any value. Note: the default parsing behavior is to expand tabs in the input string before starting the parsing process. See L{I{parseString}<parseString>} for more information on parsing strings containing C{<TAB>}s, and suggested methods to maintain a consistent view of the parsed string, the parse location, and line and column positions within the parsed string. """ self.parseAction = list(map(_trim_arity, list(fns))) self.callDuringTry = ("callDuringTry" in kwargs and kwargs["callDuringTry"]) return self
def function[setParseAction, parameter[self]]: constant[Define action to perform when successfully matching parse element definition. Parse action fn is a callable method with 0-3 arguments, called as C{fn(s,loc,toks)}, C{fn(loc,toks)}, C{fn(toks)}, or just C{fn()}, where: - s = the original string being parsed (see note below) - loc = the location of the matching substring - toks = a list of the matched tokens, packaged as a C{L{ParseResults}} object If the functions in fns modify the tokens, they can return them as the return value from fn, and the modified list of tokens will replace the original. Otherwise, fn does not need to return any value. Note: the default parsing behavior is to expand tabs in the input string before starting the parsing process. See L{I{parseString}<parseString>} for more information on parsing strings containing C{<TAB>}s, and suggested methods to maintain a consistent view of the parsed string, the parse location, and line and column positions within the parsed string. ] name[self].parseAction assign[=] call[name[list], parameter[call[name[map], parameter[name[_trim_arity], call[name[list], parameter[name[fns]]]]]]] name[self].callDuringTry assign[=] <ast.BoolOp object at 0x7da18c4cfd30> return[name[self]]
keyword[def] identifier[setParseAction] ( identifier[self] ,* identifier[fns] ,** identifier[kwargs] ): literal[string] identifier[self] . identifier[parseAction] = identifier[list] ( identifier[map] ( identifier[_trim_arity] , identifier[list] ( identifier[fns] ))) identifier[self] . identifier[callDuringTry] =( literal[string] keyword[in] identifier[kwargs] keyword[and] identifier[kwargs] [ literal[string] ]) keyword[return] identifier[self]
def setParseAction(self, *fns, **kwargs): """Define action to perform when successfully matching parse element definition. Parse action fn is a callable method with 0-3 arguments, called as C{fn(s,loc,toks)}, C{fn(loc,toks)}, C{fn(toks)}, or just C{fn()}, where: - s = the original string being parsed (see note below) - loc = the location of the matching substring - toks = a list of the matched tokens, packaged as a C{L{ParseResults}} object If the functions in fns modify the tokens, they can return them as the return value from fn, and the modified list of tokens will replace the original. Otherwise, fn does not need to return any value. Note: the default parsing behavior is to expand tabs in the input string before starting the parsing process. See L{I{parseString}<parseString>} for more information on parsing strings containing C{<TAB>}s, and suggested methods to maintain a consistent view of the parsed string, the parse location, and line and column positions within the parsed string. """ self.parseAction = list(map(_trim_arity, list(fns))) self.callDuringTry = 'callDuringTry' in kwargs and kwargs['callDuringTry'] return self
def rgb_2_hex(self, r, g, b): """ convert a rgb color to hex """ return "#{:02X}{:02X}{:02X}".format(int(r * 255), int(g * 255), int(b * 255))
def function[rgb_2_hex, parameter[self, r, g, b]]: constant[ convert a rgb color to hex ] return[call[constant[#{:02X}{:02X}{:02X}].format, parameter[call[name[int], parameter[binary_operation[name[r] * constant[255]]]], call[name[int], parameter[binary_operation[name[g] * constant[255]]]], call[name[int], parameter[binary_operation[name[b] * constant[255]]]]]]]
keyword[def] identifier[rgb_2_hex] ( identifier[self] , identifier[r] , identifier[g] , identifier[b] ): literal[string] keyword[return] literal[string] . identifier[format] ( identifier[int] ( identifier[r] * literal[int] ), identifier[int] ( identifier[g] * literal[int] ), identifier[int] ( identifier[b] * literal[int] ))
def rgb_2_hex(self, r, g, b): """ convert a rgb color to hex """ return '#{:02X}{:02X}{:02X}'.format(int(r * 255), int(g * 255), int(b * 255))
def rectangle(self, x1, y1, x2, y2, color="black", outline=False, outline_color="black"): """ Draws a rectangle between 2 points :param int x1: The x position of the starting point. :param int y1: The y position of the starting point. :param int x2: The x position of the end point. :param int y2: The y position of the end point. :param str color: The color of the shape. Defaults to `"black"`. :param int outline: `0` or `False` is no outline. `True` or value > 1 sets an outline. Defaults to `False`. :param str outline_color: The color of the outline. Defaults to `"black"`. :return: The id of the shape. """ return self.tk.create_rectangle( x1, y1, x2, y2, outline = utils.convert_color(outline_color) if outline else "", width = int(outline), fill = "" if color is None else utils.convert_color(color) )
def function[rectangle, parameter[self, x1, y1, x2, y2, color, outline, outline_color]]: constant[ Draws a rectangle between 2 points :param int x1: The x position of the starting point. :param int y1: The y position of the starting point. :param int x2: The x position of the end point. :param int y2: The y position of the end point. :param str color: The color of the shape. Defaults to `"black"`. :param int outline: `0` or `False` is no outline. `True` or value > 1 sets an outline. Defaults to `False`. :param str outline_color: The color of the outline. Defaults to `"black"`. :return: The id of the shape. ] return[call[name[self].tk.create_rectangle, parameter[name[x1], name[y1], name[x2], name[y2]]]]
keyword[def] identifier[rectangle] ( identifier[self] , identifier[x1] , identifier[y1] , identifier[x2] , identifier[y2] , identifier[color] = literal[string] , identifier[outline] = keyword[False] , identifier[outline_color] = literal[string] ): literal[string] keyword[return] identifier[self] . identifier[tk] . identifier[create_rectangle] ( identifier[x1] , identifier[y1] , identifier[x2] , identifier[y2] , identifier[outline] = identifier[utils] . identifier[convert_color] ( identifier[outline_color] ) keyword[if] identifier[outline] keyword[else] literal[string] , identifier[width] = identifier[int] ( identifier[outline] ), identifier[fill] = literal[string] keyword[if] identifier[color] keyword[is] keyword[None] keyword[else] identifier[utils] . identifier[convert_color] ( identifier[color] ) )
def rectangle(self, x1, y1, x2, y2, color='black', outline=False, outline_color='black'): """ Draws a rectangle between 2 points :param int x1: The x position of the starting point. :param int y1: The y position of the starting point. :param int x2: The x position of the end point. :param int y2: The y position of the end point. :param str color: The color of the shape. Defaults to `"black"`. :param int outline: `0` or `False` is no outline. `True` or value > 1 sets an outline. Defaults to `False`. :param str outline_color: The color of the outline. Defaults to `"black"`. :return: The id of the shape. """ return self.tk.create_rectangle(x1, y1, x2, y2, outline=utils.convert_color(outline_color) if outline else '', width=int(outline), fill='' if color is None else utils.convert_color(color))
def consume(self, callback, queue, previous_consumer=None): """ Register a message consumer that executes the provided callback when messages are received. The queue must exist prior to calling this method. If a consumer already exists for the given queue, the callback is simply updated and any new messages for that consumer use the new callback. Args: callback (callable): The callback to invoke when a message is received. queue (str): The name of the queue to consume from. previous_consumer (ConsumerV2): If this is the resumption of a prior consumer, you can provide the previous consumer so its result deferred can be re-used. Returns: Deferred: A Deferred that fires when the consumer is successfully registered with the message broker. The callback receives a :class:`.ConsumerV2` object that represents the AMQP consumer. The Deferred may error back with a :class:`PermissionException` if the user cannot read from the queue, a :class:`NoFreeChannels` if this connection has hit its channel limit, or a :class:`ConnectionException` if the connection dies before the consumer is successfully registered. NoFreeChannels: If there are no available channels on this connection. If this occurs, you can either reduce the number of consumers on this connection or create an additional connection. """ if queue in self._consumers: self._consumers[queue].callback = callback defer.returnValue(self._consumers[queue]) if previous_consumer is not None: consumer = previous_consumer else: consumer = ConsumerV2(queue=queue, callback=callback) consumer._protocol = self consumer._channel = yield self._allocate_channel() try: queue_object, _ = yield consumer._channel.basic_consume( queue=consumer.queue, consumer_tag=consumer._tag ) except pika.exceptions.ChannelClosed as exc: if exc.args[0] == 403: raise PermissionException( obj_type="queue", description=queue, reason=exc.args[1] ) else: raise ConnectionException(reason=exc) def on_cancel_callback(frame): """ Called when the consumer is canceled server-side. This can happen, for example, when the queue is deleted. To handle this, we do the necessary book-keeping to remove the consumer and then fire the errback on the consumer so the caller of :func:`fedora_messaging.api.consume` can decide what to do. Args: frame (pika.frame.Method): The cancel method from the server, unused here because we already know what consumer is being canceled. """ _std_log.error("%r was canceled by the AMQP broker!", consumer) # If client and server are racing to cancel it might already be gone which # is why both are marked as no cover. try: del self._consumers[consumer.queue] except KeyError: # pragma: no cover pass try: del self.factory._consumers[consumer.queue] except KeyError: # pragma: no cover pass consumer._running = False consumer.result.errback(fail=ConsumerCanceled()) try: consumer._channel.add_on_cancel_callback(on_cancel_callback) except AttributeError: pass # pika 1.0.0+ def read_loop_errback(failure): """ Handle errors coming out of the read loop. There are two basic categories of errors: ones where the ``consumer.result`` Deferred needs to be fired because the error is not recoverable, ones where we can recover from by letting the connection restart, and ones which are fatal for this consumer only (the queue was deleted by an administrator). Args: failure (twisted.python.failure.Failure): The exception raised by the read loop encapsulated in a Failure. """ exc = failure.value if failure.check(pika.exceptions.ConsumerCancelled): # Pika 1.0.0+ raises this exception. To support previous versions # we register a callback (called below) ourselves with the channel. on_cancel_callback(None) elif failure.check(pika.exceptions.ChannelClosed): if exc.args[0] == 403: # This is a mis-configuration, the consumer can register itself, # but it doesn't have permissions to read from the queue, # so no amount of restarting will help. e = PermissionException( obj_type="queue", description=queue, reason=failure.value.args[1], ) consumer.result.errback(Failure(e, PermissionException)) consumer.cancel() else: _std_log.exception( "Consumer halted (%r) unexpectedly; " "the connection should restart.", failure, ) elif failure.check(error.ConnectionDone, error.ConnectionLost): _std_log.warning( "The connection to the broker was lost (%r), consumer halted; " "the connection should restart and consuming will resume.", exc, ) elif failure.check(pika.exceptions.AMQPError): _std_log.exception( "An unexpected AMQP error occurred; the connection should " "restart, but please report this as a bug." ) else: consumer.result.errback(failure) consumer.cancel() consumer._read_loop = self._read(queue_object, consumer) consumer._read_loop.addErrback(read_loop_errback) self._consumers[queue] = consumer _std_log.info("Successfully registered AMQP consumer %r", consumer) defer.returnValue(consumer)
def function[consume, parameter[self, callback, queue, previous_consumer]]: constant[ Register a message consumer that executes the provided callback when messages are received. The queue must exist prior to calling this method. If a consumer already exists for the given queue, the callback is simply updated and any new messages for that consumer use the new callback. Args: callback (callable): The callback to invoke when a message is received. queue (str): The name of the queue to consume from. previous_consumer (ConsumerV2): If this is the resumption of a prior consumer, you can provide the previous consumer so its result deferred can be re-used. Returns: Deferred: A Deferred that fires when the consumer is successfully registered with the message broker. The callback receives a :class:`.ConsumerV2` object that represents the AMQP consumer. The Deferred may error back with a :class:`PermissionException` if the user cannot read from the queue, a :class:`NoFreeChannels` if this connection has hit its channel limit, or a :class:`ConnectionException` if the connection dies before the consumer is successfully registered. NoFreeChannels: If there are no available channels on this connection. If this occurs, you can either reduce the number of consumers on this connection or create an additional connection. ] if compare[name[queue] in name[self]._consumers] begin[:] call[name[self]._consumers][name[queue]].callback assign[=] name[callback] call[name[defer].returnValue, parameter[call[name[self]._consumers][name[queue]]]] if compare[name[previous_consumer] is_not constant[None]] begin[:] variable[consumer] assign[=] name[previous_consumer] name[consumer]._protocol assign[=] name[self] name[consumer]._channel assign[=] <ast.Yield object at 0x7da1b0464ca0> <ast.Try object at 0x7da1b0464760> def function[on_cancel_callback, parameter[frame]]: constant[ Called when the consumer is canceled server-side. This can happen, for example, when the queue is deleted. To handle this, we do the necessary book-keeping to remove the consumer and then fire the errback on the consumer so the caller of :func:`fedora_messaging.api.consume` can decide what to do. Args: frame (pika.frame.Method): The cancel method from the server, unused here because we already know what consumer is being canceled. ] call[name[_std_log].error, parameter[constant[%r was canceled by the AMQP broker!], name[consumer]]] <ast.Try object at 0x7da1b0416410> <ast.Try object at 0x7da1b0416e60> name[consumer]._running assign[=] constant[False] call[name[consumer].result.errback, parameter[]] <ast.Try object at 0x7da1b0416ec0> def function[read_loop_errback, parameter[failure]]: constant[ Handle errors coming out of the read loop. There are two basic categories of errors: ones where the ``consumer.result`` Deferred needs to be fired because the error is not recoverable, ones where we can recover from by letting the connection restart, and ones which are fatal for this consumer only (the queue was deleted by an administrator). Args: failure (twisted.python.failure.Failure): The exception raised by the read loop encapsulated in a Failure. ] variable[exc] assign[=] name[failure].value if call[name[failure].check, parameter[name[pika].exceptions.ConsumerCancelled]] begin[:] call[name[on_cancel_callback], parameter[constant[None]]] name[consumer]._read_loop assign[=] call[name[self]._read, parameter[name[queue_object], name[consumer]]] call[name[consumer]._read_loop.addErrback, parameter[name[read_loop_errback]]] call[name[self]._consumers][name[queue]] assign[=] name[consumer] call[name[_std_log].info, parameter[constant[Successfully registered AMQP consumer %r], name[consumer]]] call[name[defer].returnValue, parameter[name[consumer]]]
keyword[def] identifier[consume] ( identifier[self] , identifier[callback] , identifier[queue] , identifier[previous_consumer] = keyword[None] ): literal[string] keyword[if] identifier[queue] keyword[in] identifier[self] . identifier[_consumers] : identifier[self] . identifier[_consumers] [ identifier[queue] ]. identifier[callback] = identifier[callback] identifier[defer] . identifier[returnValue] ( identifier[self] . identifier[_consumers] [ identifier[queue] ]) keyword[if] identifier[previous_consumer] keyword[is] keyword[not] keyword[None] : identifier[consumer] = identifier[previous_consumer] keyword[else] : identifier[consumer] = identifier[ConsumerV2] ( identifier[queue] = identifier[queue] , identifier[callback] = identifier[callback] ) identifier[consumer] . identifier[_protocol] = identifier[self] identifier[consumer] . identifier[_channel] = keyword[yield] identifier[self] . identifier[_allocate_channel] () keyword[try] : identifier[queue_object] , identifier[_] = keyword[yield] identifier[consumer] . identifier[_channel] . identifier[basic_consume] ( identifier[queue] = identifier[consumer] . identifier[queue] , identifier[consumer_tag] = identifier[consumer] . identifier[_tag] ) keyword[except] identifier[pika] . identifier[exceptions] . identifier[ChannelClosed] keyword[as] identifier[exc] : keyword[if] identifier[exc] . identifier[args] [ literal[int] ]== literal[int] : keyword[raise] identifier[PermissionException] ( identifier[obj_type] = literal[string] , identifier[description] = identifier[queue] , identifier[reason] = identifier[exc] . identifier[args] [ literal[int] ] ) keyword[else] : keyword[raise] identifier[ConnectionException] ( identifier[reason] = identifier[exc] ) keyword[def] identifier[on_cancel_callback] ( identifier[frame] ): literal[string] identifier[_std_log] . identifier[error] ( literal[string] , identifier[consumer] ) keyword[try] : keyword[del] identifier[self] . identifier[_consumers] [ identifier[consumer] . identifier[queue] ] keyword[except] identifier[KeyError] : keyword[pass] keyword[try] : keyword[del] identifier[self] . identifier[factory] . identifier[_consumers] [ identifier[consumer] . identifier[queue] ] keyword[except] identifier[KeyError] : keyword[pass] identifier[consumer] . identifier[_running] = keyword[False] identifier[consumer] . identifier[result] . identifier[errback] ( identifier[fail] = identifier[ConsumerCanceled] ()) keyword[try] : identifier[consumer] . identifier[_channel] . identifier[add_on_cancel_callback] ( identifier[on_cancel_callback] ) keyword[except] identifier[AttributeError] : keyword[pass] keyword[def] identifier[read_loop_errback] ( identifier[failure] ): literal[string] identifier[exc] = identifier[failure] . identifier[value] keyword[if] identifier[failure] . identifier[check] ( identifier[pika] . identifier[exceptions] . identifier[ConsumerCancelled] ): identifier[on_cancel_callback] ( keyword[None] ) keyword[elif] identifier[failure] . identifier[check] ( identifier[pika] . identifier[exceptions] . identifier[ChannelClosed] ): keyword[if] identifier[exc] . identifier[args] [ literal[int] ]== literal[int] : identifier[e] = identifier[PermissionException] ( identifier[obj_type] = literal[string] , identifier[description] = identifier[queue] , identifier[reason] = identifier[failure] . identifier[value] . identifier[args] [ literal[int] ], ) identifier[consumer] . identifier[result] . identifier[errback] ( identifier[Failure] ( identifier[e] , identifier[PermissionException] )) identifier[consumer] . identifier[cancel] () keyword[else] : identifier[_std_log] . identifier[exception] ( literal[string] literal[string] , identifier[failure] , ) keyword[elif] identifier[failure] . identifier[check] ( identifier[error] . identifier[ConnectionDone] , identifier[error] . identifier[ConnectionLost] ): identifier[_std_log] . identifier[warning] ( literal[string] literal[string] , identifier[exc] , ) keyword[elif] identifier[failure] . identifier[check] ( identifier[pika] . identifier[exceptions] . identifier[AMQPError] ): identifier[_std_log] . identifier[exception] ( literal[string] literal[string] ) keyword[else] : identifier[consumer] . identifier[result] . identifier[errback] ( identifier[failure] ) identifier[consumer] . identifier[cancel] () identifier[consumer] . identifier[_read_loop] = identifier[self] . identifier[_read] ( identifier[queue_object] , identifier[consumer] ) identifier[consumer] . identifier[_read_loop] . identifier[addErrback] ( identifier[read_loop_errback] ) identifier[self] . identifier[_consumers] [ identifier[queue] ]= identifier[consumer] identifier[_std_log] . identifier[info] ( literal[string] , identifier[consumer] ) identifier[defer] . identifier[returnValue] ( identifier[consumer] )
def consume(self, callback, queue, previous_consumer=None): """ Register a message consumer that executes the provided callback when messages are received. The queue must exist prior to calling this method. If a consumer already exists for the given queue, the callback is simply updated and any new messages for that consumer use the new callback. Args: callback (callable): The callback to invoke when a message is received. queue (str): The name of the queue to consume from. previous_consumer (ConsumerV2): If this is the resumption of a prior consumer, you can provide the previous consumer so its result deferred can be re-used. Returns: Deferred: A Deferred that fires when the consumer is successfully registered with the message broker. The callback receives a :class:`.ConsumerV2` object that represents the AMQP consumer. The Deferred may error back with a :class:`PermissionException` if the user cannot read from the queue, a :class:`NoFreeChannels` if this connection has hit its channel limit, or a :class:`ConnectionException` if the connection dies before the consumer is successfully registered. NoFreeChannels: If there are no available channels on this connection. If this occurs, you can either reduce the number of consumers on this connection or create an additional connection. """ if queue in self._consumers: self._consumers[queue].callback = callback defer.returnValue(self._consumers[queue]) # depends on [control=['if'], data=['queue']] if previous_consumer is not None: consumer = previous_consumer # depends on [control=['if'], data=['previous_consumer']] else: consumer = ConsumerV2(queue=queue, callback=callback) consumer._protocol = self consumer._channel = (yield self._allocate_channel()) try: (queue_object, _) = (yield consumer._channel.basic_consume(queue=consumer.queue, consumer_tag=consumer._tag)) # depends on [control=['try'], data=[]] except pika.exceptions.ChannelClosed as exc: if exc.args[0] == 403: raise PermissionException(obj_type='queue', description=queue, reason=exc.args[1]) # depends on [control=['if'], data=[]] else: raise ConnectionException(reason=exc) # depends on [control=['except'], data=['exc']] def on_cancel_callback(frame): """ Called when the consumer is canceled server-side. This can happen, for example, when the queue is deleted. To handle this, we do the necessary book-keeping to remove the consumer and then fire the errback on the consumer so the caller of :func:`fedora_messaging.api.consume` can decide what to do. Args: frame (pika.frame.Method): The cancel method from the server, unused here because we already know what consumer is being canceled. """ _std_log.error('%r was canceled by the AMQP broker!', consumer) # If client and server are racing to cancel it might already be gone which # is why both are marked as no cover. try: del self._consumers[consumer.queue] # depends on [control=['try'], data=[]] except KeyError: # pragma: no cover pass # depends on [control=['except'], data=[]] try: del self.factory._consumers[consumer.queue] # depends on [control=['try'], data=[]] except KeyError: # pragma: no cover pass # depends on [control=['except'], data=[]] consumer._running = False consumer.result.errback(fail=ConsumerCanceled()) try: consumer._channel.add_on_cancel_callback(on_cancel_callback) # depends on [control=['try'], data=[]] except AttributeError: pass # pika 1.0.0+ # depends on [control=['except'], data=[]] def read_loop_errback(failure): """ Handle errors coming out of the read loop. There are two basic categories of errors: ones where the ``consumer.result`` Deferred needs to be fired because the error is not recoverable, ones where we can recover from by letting the connection restart, and ones which are fatal for this consumer only (the queue was deleted by an administrator). Args: failure (twisted.python.failure.Failure): The exception raised by the read loop encapsulated in a Failure. """ exc = failure.value if failure.check(pika.exceptions.ConsumerCancelled): # Pika 1.0.0+ raises this exception. To support previous versions # we register a callback (called below) ourselves with the channel. on_cancel_callback(None) # depends on [control=['if'], data=[]] elif failure.check(pika.exceptions.ChannelClosed): if exc.args[0] == 403: # This is a mis-configuration, the consumer can register itself, # but it doesn't have permissions to read from the queue, # so no amount of restarting will help. e = PermissionException(obj_type='queue', description=queue, reason=failure.value.args[1]) consumer.result.errback(Failure(e, PermissionException)) consumer.cancel() # depends on [control=['if'], data=[]] else: _std_log.exception('Consumer halted (%r) unexpectedly; the connection should restart.', failure) # depends on [control=['if'], data=[]] elif failure.check(error.ConnectionDone, error.ConnectionLost): _std_log.warning('The connection to the broker was lost (%r), consumer halted; the connection should restart and consuming will resume.', exc) # depends on [control=['if'], data=[]] elif failure.check(pika.exceptions.AMQPError): _std_log.exception('An unexpected AMQP error occurred; the connection should restart, but please report this as a bug.') # depends on [control=['if'], data=[]] else: consumer.result.errback(failure) consumer.cancel() consumer._read_loop = self._read(queue_object, consumer) consumer._read_loop.addErrback(read_loop_errback) self._consumers[queue] = consumer _std_log.info('Successfully registered AMQP consumer %r', consumer) defer.returnValue(consumer)
def monitor(): """Connect to receiver and show events as they occur. Pulls the following arguments from the command line: :param device: Unix device where the PLM is attached :param address: Insteon address of the device to link with :param group: Insteon group for the link :param: linkcode Link direction: 0 - PLM is responder 1 - PLM is controller 3 - IM is responder or controller' :param verbose: Show debug logging. """ parser = argparse.ArgumentParser(description=__doc__) parser.add_argument('--device', default='/dev/ttyUSB0', help='Path to PLM device') parser.add_argument('--verbose', '-v', action='count', help='Set logging level to verbose') parser.add_argument('-l', '--logfile', default='', help='Log file name') parser.add_argument('--workdir', default='', help='Working directory for reading and saving ' 'device information.') args = parser.parse_args() loop = asyncio.get_event_loop() monTool = Tools(loop, args) asyncio.ensure_future(monTool.monitor_mode()) try: loop.run_forever() except KeyboardInterrupt: if monTool.plm: if monTool.plm.transport: _LOGGING.info('Closing the session') asyncio.ensure_future(monTool.plm.transport.close(), loop=loop) loop.stop() pending = asyncio.Task.all_tasks(loop=loop) for task in pending: task.cancel() try: loop.run_until_complete(task) except asyncio.CancelledError: pass except KeyboardInterrupt: pass loop.close()
def function[monitor, parameter[]]: constant[Connect to receiver and show events as they occur. Pulls the following arguments from the command line: :param device: Unix device where the PLM is attached :param address: Insteon address of the device to link with :param group: Insteon group for the link :param: linkcode Link direction: 0 - PLM is responder 1 - PLM is controller 3 - IM is responder or controller' :param verbose: Show debug logging. ] variable[parser] assign[=] call[name[argparse].ArgumentParser, parameter[]] call[name[parser].add_argument, parameter[constant[--device]]] call[name[parser].add_argument, parameter[constant[--verbose], constant[-v]]] call[name[parser].add_argument, parameter[constant[-l], constant[--logfile]]] call[name[parser].add_argument, parameter[constant[--workdir]]] variable[args] assign[=] call[name[parser].parse_args, parameter[]] variable[loop] assign[=] call[name[asyncio].get_event_loop, parameter[]] variable[monTool] assign[=] call[name[Tools], parameter[name[loop], name[args]]] call[name[asyncio].ensure_future, parameter[call[name[monTool].monitor_mode, parameter[]]]] <ast.Try object at 0x7da1b1a12920>
keyword[def] identifier[monitor] (): literal[string] identifier[parser] = identifier[argparse] . identifier[ArgumentParser] ( identifier[description] = identifier[__doc__] ) identifier[parser] . identifier[add_argument] ( literal[string] , identifier[default] = literal[string] , identifier[help] = literal[string] ) identifier[parser] . identifier[add_argument] ( literal[string] , literal[string] , identifier[action] = literal[string] , identifier[help] = literal[string] ) identifier[parser] . identifier[add_argument] ( literal[string] , literal[string] , identifier[default] = literal[string] , identifier[help] = literal[string] ) identifier[parser] . identifier[add_argument] ( literal[string] , identifier[default] = literal[string] , identifier[help] = literal[string] literal[string] ) identifier[args] = identifier[parser] . identifier[parse_args] () identifier[loop] = identifier[asyncio] . identifier[get_event_loop] () identifier[monTool] = identifier[Tools] ( identifier[loop] , identifier[args] ) identifier[asyncio] . identifier[ensure_future] ( identifier[monTool] . identifier[monitor_mode] ()) keyword[try] : identifier[loop] . identifier[run_forever] () keyword[except] identifier[KeyboardInterrupt] : keyword[if] identifier[monTool] . identifier[plm] : keyword[if] identifier[monTool] . identifier[plm] . identifier[transport] : identifier[_LOGGING] . identifier[info] ( literal[string] ) identifier[asyncio] . identifier[ensure_future] ( identifier[monTool] . identifier[plm] . identifier[transport] . identifier[close] (), identifier[loop] = identifier[loop] ) identifier[loop] . identifier[stop] () identifier[pending] = identifier[asyncio] . identifier[Task] . identifier[all_tasks] ( identifier[loop] = identifier[loop] ) keyword[for] identifier[task] keyword[in] identifier[pending] : identifier[task] . identifier[cancel] () keyword[try] : identifier[loop] . identifier[run_until_complete] ( identifier[task] ) keyword[except] identifier[asyncio] . identifier[CancelledError] : keyword[pass] keyword[except] identifier[KeyboardInterrupt] : keyword[pass] identifier[loop] . identifier[close] ()
def monitor(): """Connect to receiver and show events as they occur. Pulls the following arguments from the command line: :param device: Unix device where the PLM is attached :param address: Insteon address of the device to link with :param group: Insteon group for the link :param: linkcode Link direction: 0 - PLM is responder 1 - PLM is controller 3 - IM is responder or controller' :param verbose: Show debug logging. """ parser = argparse.ArgumentParser(description=__doc__) parser.add_argument('--device', default='/dev/ttyUSB0', help='Path to PLM device') parser.add_argument('--verbose', '-v', action='count', help='Set logging level to verbose') parser.add_argument('-l', '--logfile', default='', help='Log file name') parser.add_argument('--workdir', default='', help='Working directory for reading and saving device information.') args = parser.parse_args() loop = asyncio.get_event_loop() monTool = Tools(loop, args) asyncio.ensure_future(monTool.monitor_mode()) try: loop.run_forever() # depends on [control=['try'], data=[]] except KeyboardInterrupt: if monTool.plm: if monTool.plm.transport: _LOGGING.info('Closing the session') asyncio.ensure_future(monTool.plm.transport.close(), loop=loop) # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]] loop.stop() pending = asyncio.Task.all_tasks(loop=loop) for task in pending: task.cancel() try: loop.run_until_complete(task) # depends on [control=['try'], data=[]] except asyncio.CancelledError: pass # depends on [control=['except'], data=[]] except KeyboardInterrupt: pass # depends on [control=['except'], data=[]] # depends on [control=['for'], data=['task']] loop.close() # depends on [control=['except'], data=[]]
def setCurrentRecord( self, record ): """ Sets the current record for this browser to the inputed record. :param record | <orb.Table> || None """ mode = self.currentMode() if ( mode == XOrbBrowserWidget.Mode.Detail ): self.detailWidget().setCurrentRecord(record) elif ( mode == XOrbBrowserWidget.Mode.Thumbnail ): thumbs = self.thumbnailWidget() for row in range(thumbs.count()): item = thumbs.item(row) if ( isinstance(item, RecordListWidgetItem) and \ item.record() == item ): thumbs.setCurrentItem(item) break
def function[setCurrentRecord, parameter[self, record]]: constant[ Sets the current record for this browser to the inputed record. :param record | <orb.Table> || None ] variable[mode] assign[=] call[name[self].currentMode, parameter[]] if compare[name[mode] equal[==] name[XOrbBrowserWidget].Mode.Detail] begin[:] call[call[name[self].detailWidget, parameter[]].setCurrentRecord, parameter[name[record]]]
keyword[def] identifier[setCurrentRecord] ( identifier[self] , identifier[record] ): literal[string] identifier[mode] = identifier[self] . identifier[currentMode] () keyword[if] ( identifier[mode] == identifier[XOrbBrowserWidget] . identifier[Mode] . identifier[Detail] ): identifier[self] . identifier[detailWidget] (). identifier[setCurrentRecord] ( identifier[record] ) keyword[elif] ( identifier[mode] == identifier[XOrbBrowserWidget] . identifier[Mode] . identifier[Thumbnail] ): identifier[thumbs] = identifier[self] . identifier[thumbnailWidget] () keyword[for] identifier[row] keyword[in] identifier[range] ( identifier[thumbs] . identifier[count] ()): identifier[item] = identifier[thumbs] . identifier[item] ( identifier[row] ) keyword[if] ( identifier[isinstance] ( identifier[item] , identifier[RecordListWidgetItem] ) keyword[and] identifier[item] . identifier[record] ()== identifier[item] ): identifier[thumbs] . identifier[setCurrentItem] ( identifier[item] ) keyword[break]
def setCurrentRecord(self, record): """ Sets the current record for this browser to the inputed record. :param record | <orb.Table> || None """ mode = self.currentMode() if mode == XOrbBrowserWidget.Mode.Detail: self.detailWidget().setCurrentRecord(record) # depends on [control=['if'], data=[]] elif mode == XOrbBrowserWidget.Mode.Thumbnail: thumbs = self.thumbnailWidget() for row in range(thumbs.count()): item = thumbs.item(row) if isinstance(item, RecordListWidgetItem) and item.record() == item: thumbs.setCurrentItem(item) break # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['row']] # depends on [control=['if'], data=[]]
def unregister(self, model): """ Unregister a permission handler from the model Parameters ---------- model : django model class A django model class Raises ------ KeyError Raise when the model have not registered in registry yet. """ if model not in self._registry: raise KeyError("A permission handler class have not been " "registered for '%s' yet" % model) # remove from registry del self._registry[model]
def function[unregister, parameter[self, model]]: constant[ Unregister a permission handler from the model Parameters ---------- model : django model class A django model class Raises ------ KeyError Raise when the model have not registered in registry yet. ] if compare[name[model] <ast.NotIn object at 0x7da2590d7190> name[self]._registry] begin[:] <ast.Raise object at 0x7da1b060bcd0> <ast.Delete object at 0x7da1b2345300>
keyword[def] identifier[unregister] ( identifier[self] , identifier[model] ): literal[string] keyword[if] identifier[model] keyword[not] keyword[in] identifier[self] . identifier[_registry] : keyword[raise] identifier[KeyError] ( literal[string] literal[string] % identifier[model] ) keyword[del] identifier[self] . identifier[_registry] [ identifier[model] ]
def unregister(self, model): """ Unregister a permission handler from the model Parameters ---------- model : django model class A django model class Raises ------ KeyError Raise when the model have not registered in registry yet. """ if model not in self._registry: raise KeyError("A permission handler class have not been registered for '%s' yet" % model) # depends on [control=['if'], data=['model']] # remove from registry del self._registry[model]
def ticket(self, handler, arg): """ Register a ticket timer. Ticket timers are very fast in the case where you use a lot of timers (thousands), and frequently remove and add them. The main use case is expiry timers for servers that handle many clients, and which reset the expiry timer for each message received from a client. Whereas normal timers perform poorly as the number of clients grows, the cost of ticket timers is constant, no matter the number of clients. You must set the ticket delay using zloop_set_ticket_delay before creating a ticket. Returns a handle to the timer that you should use in zloop_ticket_reset and zloop_ticket_delete. """ return c_void_p(lib.zloop_ticket(self._as_parameter_, handler, arg))
def function[ticket, parameter[self, handler, arg]]: constant[ Register a ticket timer. Ticket timers are very fast in the case where you use a lot of timers (thousands), and frequently remove and add them. The main use case is expiry timers for servers that handle many clients, and which reset the expiry timer for each message received from a client. Whereas normal timers perform poorly as the number of clients grows, the cost of ticket timers is constant, no matter the number of clients. You must set the ticket delay using zloop_set_ticket_delay before creating a ticket. Returns a handle to the timer that you should use in zloop_ticket_reset and zloop_ticket_delete. ] return[call[name[c_void_p], parameter[call[name[lib].zloop_ticket, parameter[name[self]._as_parameter_, name[handler], name[arg]]]]]]
keyword[def] identifier[ticket] ( identifier[self] , identifier[handler] , identifier[arg] ): literal[string] keyword[return] identifier[c_void_p] ( identifier[lib] . identifier[zloop_ticket] ( identifier[self] . identifier[_as_parameter_] , identifier[handler] , identifier[arg] ))
def ticket(self, handler, arg): """ Register a ticket timer. Ticket timers are very fast in the case where you use a lot of timers (thousands), and frequently remove and add them. The main use case is expiry timers for servers that handle many clients, and which reset the expiry timer for each message received from a client. Whereas normal timers perform poorly as the number of clients grows, the cost of ticket timers is constant, no matter the number of clients. You must set the ticket delay using zloop_set_ticket_delay before creating a ticket. Returns a handle to the timer that you should use in zloop_ticket_reset and zloop_ticket_delete. """ return c_void_p(lib.zloop_ticket(self._as_parameter_, handler, arg))
def concatenate(samplesets, defaults=None): """Combine SampleSets. Args: samplesets (iterable[:obj:`.SampleSet`): An iterable of sample sets. defaults (dict, optional): Dictionary mapping data vector names to the corresponding default values. Returns: :obj:`.SampleSet`: A sample set with the same vartype and variable order as the first given in `samplesets`. Examples: >>> a = dimod.SampleSet.from_samples(([-1, +1], 'ab'), dimod.SPIN, energy=-1) >>> b = dimod.SampleSet.from_samples(([-1, +1], 'ba'), dimod.SPIN, energy=-1) >>> ab = dimod.concatenate((a, b)) >>> ab.record.sample array([[-1, 1], [ 1, -1]], dtype=int8) """ itertup = iter(samplesets) try: first = next(itertup) except StopIteration: raise ValueError("samplesets must contain at least one SampleSet") vartype = first.vartype variables = first.variables records = [first.record] records.extend(_iter_records(itertup, vartype, variables)) # dev note: I was able to get ~2x performance boost when trying to # implement the same functionality here by hand (I didn't know that # this function existed then). However I think it is better to use # numpy's function and rely on their testing etc. If however this becomes # a performance bottleneck in the future, it might be worth changing. record = recfunctions.stack_arrays(records, defaults=defaults, asrecarray=True, usemask=False) return SampleSet(record, variables, {}, vartype)
def function[concatenate, parameter[samplesets, defaults]]: constant[Combine SampleSets. Args: samplesets (iterable[:obj:`.SampleSet`): An iterable of sample sets. defaults (dict, optional): Dictionary mapping data vector names to the corresponding default values. Returns: :obj:`.SampleSet`: A sample set with the same vartype and variable order as the first given in `samplesets`. Examples: >>> a = dimod.SampleSet.from_samples(([-1, +1], 'ab'), dimod.SPIN, energy=-1) >>> b = dimod.SampleSet.from_samples(([-1, +1], 'ba'), dimod.SPIN, energy=-1) >>> ab = dimod.concatenate((a, b)) >>> ab.record.sample array([[-1, 1], [ 1, -1]], dtype=int8) ] variable[itertup] assign[=] call[name[iter], parameter[name[samplesets]]] <ast.Try object at 0x7da1b06303a0> variable[vartype] assign[=] name[first].vartype variable[variables] assign[=] name[first].variables variable[records] assign[=] list[[<ast.Attribute object at 0x7da1b07925c0>]] call[name[records].extend, parameter[call[name[_iter_records], parameter[name[itertup], name[vartype], name[variables]]]]] variable[record] assign[=] call[name[recfunctions].stack_arrays, parameter[name[records]]] return[call[name[SampleSet], parameter[name[record], name[variables], dictionary[[], []], name[vartype]]]]
keyword[def] identifier[concatenate] ( identifier[samplesets] , identifier[defaults] = keyword[None] ): literal[string] identifier[itertup] = identifier[iter] ( identifier[samplesets] ) keyword[try] : identifier[first] = identifier[next] ( identifier[itertup] ) keyword[except] identifier[StopIteration] : keyword[raise] identifier[ValueError] ( literal[string] ) identifier[vartype] = identifier[first] . identifier[vartype] identifier[variables] = identifier[first] . identifier[variables] identifier[records] =[ identifier[first] . identifier[record] ] identifier[records] . identifier[extend] ( identifier[_iter_records] ( identifier[itertup] , identifier[vartype] , identifier[variables] )) identifier[record] = identifier[recfunctions] . identifier[stack_arrays] ( identifier[records] , identifier[defaults] = identifier[defaults] , identifier[asrecarray] = keyword[True] , identifier[usemask] = keyword[False] ) keyword[return] identifier[SampleSet] ( identifier[record] , identifier[variables] ,{}, identifier[vartype] )
def concatenate(samplesets, defaults=None): """Combine SampleSets. Args: samplesets (iterable[:obj:`.SampleSet`): An iterable of sample sets. defaults (dict, optional): Dictionary mapping data vector names to the corresponding default values. Returns: :obj:`.SampleSet`: A sample set with the same vartype and variable order as the first given in `samplesets`. Examples: >>> a = dimod.SampleSet.from_samples(([-1, +1], 'ab'), dimod.SPIN, energy=-1) >>> b = dimod.SampleSet.from_samples(([-1, +1], 'ba'), dimod.SPIN, energy=-1) >>> ab = dimod.concatenate((a, b)) >>> ab.record.sample array([[-1, 1], [ 1, -1]], dtype=int8) """ itertup = iter(samplesets) try: first = next(itertup) # depends on [control=['try'], data=[]] except StopIteration: raise ValueError('samplesets must contain at least one SampleSet') # depends on [control=['except'], data=[]] vartype = first.vartype variables = first.variables records = [first.record] records.extend(_iter_records(itertup, vartype, variables)) # dev note: I was able to get ~2x performance boost when trying to # implement the same functionality here by hand (I didn't know that # this function existed then). However I think it is better to use # numpy's function and rely on their testing etc. If however this becomes # a performance bottleneck in the future, it might be worth changing. record = recfunctions.stack_arrays(records, defaults=defaults, asrecarray=True, usemask=False) return SampleSet(record, variables, {}, vartype)
def file_md5sum(filename): """ :param filename: The filename of the file to process :returns: The MD5 hash of the file """ hash_md5 = hashlib.md5() with open(filename, 'rb') as f: for chunk in iter(lambda: f.read(1024 * 4), b''): hash_md5.update(chunk) return hash_md5.hexdigest()
def function[file_md5sum, parameter[filename]]: constant[ :param filename: The filename of the file to process :returns: The MD5 hash of the file ] variable[hash_md5] assign[=] call[name[hashlib].md5, parameter[]] with call[name[open], parameter[name[filename], constant[rb]]] begin[:] for taget[name[chunk]] in starred[call[name[iter], parameter[<ast.Lambda object at 0x7da2043464a0>, constant[b'']]]] begin[:] call[name[hash_md5].update, parameter[name[chunk]]] return[call[name[hash_md5].hexdigest, parameter[]]]
keyword[def] identifier[file_md5sum] ( identifier[filename] ): literal[string] identifier[hash_md5] = identifier[hashlib] . identifier[md5] () keyword[with] identifier[open] ( identifier[filename] , literal[string] ) keyword[as] identifier[f] : keyword[for] identifier[chunk] keyword[in] identifier[iter] ( keyword[lambda] : identifier[f] . identifier[read] ( literal[int] * literal[int] ), literal[string] ): identifier[hash_md5] . identifier[update] ( identifier[chunk] ) keyword[return] identifier[hash_md5] . identifier[hexdigest] ()
def file_md5sum(filename): """ :param filename: The filename of the file to process :returns: The MD5 hash of the file """ hash_md5 = hashlib.md5() with open(filename, 'rb') as f: for chunk in iter(lambda : f.read(1024 * 4), b''): hash_md5.update(chunk) # depends on [control=['for'], data=['chunk']] # depends on [control=['with'], data=['f']] return hash_md5.hexdigest()
def lazy_load_modules(*modules): """ Decorator to load module to perform related operation for specific function and delete the module from imports once the task is done. GC frees the memory related to module during clean-up. """ def decorator(function): def wrapper(*args, **kwargs): module_dict = {} for module_string in modules: module = __import__(module_string) # Add `module` entry in `sys.modules`. After deleting the module # from `sys.modules` and re-importing the module don't update # the module entry in `sys.modules` dict sys.modules[module.__package__] = module reload_module(module) module_dict[module_string] = module func_response = function(*args, **kwargs) for module_string, module in module_dict.items(): # delete idna module delete_module(module_string) del module # delete reference to idna return func_response return wrapper return decorator
def function[lazy_load_modules, parameter[]]: constant[ Decorator to load module to perform related operation for specific function and delete the module from imports once the task is done. GC frees the memory related to module during clean-up. ] def function[decorator, parameter[function]]: def function[wrapper, parameter[]]: variable[module_dict] assign[=] dictionary[[], []] for taget[name[module_string]] in starred[name[modules]] begin[:] variable[module] assign[=] call[name[__import__], parameter[name[module_string]]] call[name[sys].modules][name[module].__package__] assign[=] name[module] call[name[reload_module], parameter[name[module]]] call[name[module_dict]][name[module_string]] assign[=] name[module] variable[func_response] assign[=] call[name[function], parameter[<ast.Starred object at 0x7da1b1ec3010>]] for taget[tuple[[<ast.Name object at 0x7da1b1ec3d60>, <ast.Name object at 0x7da1b1ec10c0>]]] in starred[call[name[module_dict].items, parameter[]]] begin[:] call[name[delete_module], parameter[name[module_string]]] <ast.Delete object at 0x7da1b1ec1630> return[name[func_response]] return[name[wrapper]] return[name[decorator]]
keyword[def] identifier[lazy_load_modules] (* identifier[modules] ): literal[string] keyword[def] identifier[decorator] ( identifier[function] ): keyword[def] identifier[wrapper] (* identifier[args] ,** identifier[kwargs] ): identifier[module_dict] ={} keyword[for] identifier[module_string] keyword[in] identifier[modules] : identifier[module] = identifier[__import__] ( identifier[module_string] ) identifier[sys] . identifier[modules] [ identifier[module] . identifier[__package__] ]= identifier[module] identifier[reload_module] ( identifier[module] ) identifier[module_dict] [ identifier[module_string] ]= identifier[module] identifier[func_response] = identifier[function] (* identifier[args] ,** identifier[kwargs] ) keyword[for] identifier[module_string] , identifier[module] keyword[in] identifier[module_dict] . identifier[items] (): identifier[delete_module] ( identifier[module_string] ) keyword[del] identifier[module] keyword[return] identifier[func_response] keyword[return] identifier[wrapper] keyword[return] identifier[decorator]
def lazy_load_modules(*modules): """ Decorator to load module to perform related operation for specific function and delete the module from imports once the task is done. GC frees the memory related to module during clean-up. """ def decorator(function): def wrapper(*args, **kwargs): module_dict = {} for module_string in modules: module = __import__(module_string) # Add `module` entry in `sys.modules`. After deleting the module # from `sys.modules` and re-importing the module don't update # the module entry in `sys.modules` dict sys.modules[module.__package__] = module reload_module(module) module_dict[module_string] = module # depends on [control=['for'], data=['module_string']] func_response = function(*args, **kwargs) for (module_string, module) in module_dict.items(): # delete idna module delete_module(module_string) del module # delete reference to idna # depends on [control=['for'], data=[]] return func_response return wrapper return decorator
def WriteInit(self, out): """Write a simple __init__.py for the generated client.""" printer = self._GetPrinter(out) if self.__init_wildcards_file: printer('"""Common imports for generated %s client library."""', self.__client_info.package) printer('# pylint:disable=wildcard-import') else: printer('"""Package marker file."""') printer() printer('import pkgutil') printer() if self.__init_wildcards_file: printer('from %s import *', self.__base_files_package) if self.__root_package == '.': import_prefix = '' else: import_prefix = '%s.' % self.__root_package printer('from %s%s import *', import_prefix, self.__client_info.client_rule_name) printer('from %s%s import *', import_prefix, self.__client_info.messages_rule_name) printer() printer('__path__ = pkgutil.extend_path(__path__, __name__)')
def function[WriteInit, parameter[self, out]]: constant[Write a simple __init__.py for the generated client.] variable[printer] assign[=] call[name[self]._GetPrinter, parameter[name[out]]] if name[self].__init_wildcards_file begin[:] call[name[printer], parameter[constant["""Common imports for generated %s client library."""], name[self].__client_info.package]] call[name[printer], parameter[constant[# pylint:disable=wildcard-import]]] call[name[printer], parameter[]] call[name[printer], parameter[constant[import pkgutil]]] call[name[printer], parameter[]] if name[self].__init_wildcards_file begin[:] call[name[printer], parameter[constant[from %s import *], name[self].__base_files_package]] if compare[name[self].__root_package equal[==] constant[.]] begin[:] variable[import_prefix] assign[=] constant[] call[name[printer], parameter[constant[from %s%s import *], name[import_prefix], name[self].__client_info.client_rule_name]] call[name[printer], parameter[constant[from %s%s import *], name[import_prefix], name[self].__client_info.messages_rule_name]] call[name[printer], parameter[]] call[name[printer], parameter[constant[__path__ = pkgutil.extend_path(__path__, __name__)]]]
keyword[def] identifier[WriteInit] ( identifier[self] , identifier[out] ): literal[string] identifier[printer] = identifier[self] . identifier[_GetPrinter] ( identifier[out] ) keyword[if] identifier[self] . identifier[__init_wildcards_file] : identifier[printer] ( literal[string] , identifier[self] . identifier[__client_info] . identifier[package] ) identifier[printer] ( literal[string] ) keyword[else] : identifier[printer] ( literal[string] ) identifier[printer] () identifier[printer] ( literal[string] ) identifier[printer] () keyword[if] identifier[self] . identifier[__init_wildcards_file] : identifier[printer] ( literal[string] , identifier[self] . identifier[__base_files_package] ) keyword[if] identifier[self] . identifier[__root_package] == literal[string] : identifier[import_prefix] = literal[string] keyword[else] : identifier[import_prefix] = literal[string] % identifier[self] . identifier[__root_package] identifier[printer] ( literal[string] , identifier[import_prefix] , identifier[self] . identifier[__client_info] . identifier[client_rule_name] ) identifier[printer] ( literal[string] , identifier[import_prefix] , identifier[self] . identifier[__client_info] . identifier[messages_rule_name] ) identifier[printer] () identifier[printer] ( literal[string] )
def WriteInit(self, out): """Write a simple __init__.py for the generated client.""" printer = self._GetPrinter(out) if self.__init_wildcards_file: printer('"""Common imports for generated %s client library."""', self.__client_info.package) printer('# pylint:disable=wildcard-import') # depends on [control=['if'], data=[]] else: printer('"""Package marker file."""') printer() printer('import pkgutil') printer() if self.__init_wildcards_file: printer('from %s import *', self.__base_files_package) if self.__root_package == '.': import_prefix = '' # depends on [control=['if'], data=[]] else: import_prefix = '%s.' % self.__root_package printer('from %s%s import *', import_prefix, self.__client_info.client_rule_name) printer('from %s%s import *', import_prefix, self.__client_info.messages_rule_name) printer() # depends on [control=['if'], data=[]] printer('__path__ = pkgutil.extend_path(__path__, __name__)')
def request(self, name, *args, **kwargs): r"""Send an API request or notification to nvim. It is rarely needed to call this function directly, as most API functions have python wrapper functions. The `api` object can be also be used to call API functions as methods: vim.api.err_write('ERROR\n', async_=True) vim.current.buffer.api.get_mark('.') is equivalent to vim.request('nvim_err_write', 'ERROR\n', async_=True) vim.request('nvim_buf_get_mark', vim.current.buffer, '.') Normally a blocking request will be sent. If the `async_` flag is present and True, a asynchronous notification is sent instead. This will never block, and the return value or error is ignored. """ if (self._session._loop_thread is not None and threading.current_thread() != self._session._loop_thread): msg = ("Request from non-main thread.\n" "Requests from different threads should be wrapped " "with nvim.async_call(cb, ...) \n{}\n" .format('\n'.join(format_stack(None, 5)[:-1]))) self.async_call(self._err_cb, msg) raise NvimError("request from non-main thread") decode = kwargs.pop('decode', self._decode) args = walk(self._to_nvim, args) res = self._session.request(name, *args, **kwargs) return walk(self._from_nvim, res, decode=decode)
def function[request, parameter[self, name]]: constant[Send an API request or notification to nvim. It is rarely needed to call this function directly, as most API functions have python wrapper functions. The `api` object can be also be used to call API functions as methods: vim.api.err_write('ERROR\n', async_=True) vim.current.buffer.api.get_mark('.') is equivalent to vim.request('nvim_err_write', 'ERROR\n', async_=True) vim.request('nvim_buf_get_mark', vim.current.buffer, '.') Normally a blocking request will be sent. If the `async_` flag is present and True, a asynchronous notification is sent instead. This will never block, and the return value or error is ignored. ] if <ast.BoolOp object at 0x7da1b22ac490> begin[:] variable[msg] assign[=] call[constant[Request from non-main thread. Requests from different threads should be wrapped with nvim.async_call(cb, ...) {} ].format, parameter[call[constant[ ].join, parameter[call[call[name[format_stack], parameter[constant[None], constant[5]]]][<ast.Slice object at 0x7da1b22aecb0>]]]]] call[name[self].async_call, parameter[name[self]._err_cb, name[msg]]] <ast.Raise object at 0x7da1b22ad600> variable[decode] assign[=] call[name[kwargs].pop, parameter[constant[decode], name[self]._decode]] variable[args] assign[=] call[name[walk], parameter[name[self]._to_nvim, name[args]]] variable[res] assign[=] call[name[self]._session.request, parameter[name[name], <ast.Starred object at 0x7da1b22ad150>]] return[call[name[walk], parameter[name[self]._from_nvim, name[res]]]]
keyword[def] identifier[request] ( identifier[self] , identifier[name] ,* identifier[args] ,** identifier[kwargs] ): literal[string] keyword[if] ( identifier[self] . identifier[_session] . identifier[_loop_thread] keyword[is] keyword[not] keyword[None] keyword[and] identifier[threading] . identifier[current_thread] ()!= identifier[self] . identifier[_session] . identifier[_loop_thread] ): identifier[msg] =( literal[string] literal[string] literal[string] . identifier[format] ( literal[string] . identifier[join] ( identifier[format_stack] ( keyword[None] , literal[int] )[:- literal[int] ]))) identifier[self] . identifier[async_call] ( identifier[self] . identifier[_err_cb] , identifier[msg] ) keyword[raise] identifier[NvimError] ( literal[string] ) identifier[decode] = identifier[kwargs] . identifier[pop] ( literal[string] , identifier[self] . identifier[_decode] ) identifier[args] = identifier[walk] ( identifier[self] . identifier[_to_nvim] , identifier[args] ) identifier[res] = identifier[self] . identifier[_session] . identifier[request] ( identifier[name] ,* identifier[args] ,** identifier[kwargs] ) keyword[return] identifier[walk] ( identifier[self] . identifier[_from_nvim] , identifier[res] , identifier[decode] = identifier[decode] )
def request(self, name, *args, **kwargs): """Send an API request or notification to nvim. It is rarely needed to call this function directly, as most API functions have python wrapper functions. The `api` object can be also be used to call API functions as methods: vim.api.err_write('ERROR\\n', async_=True) vim.current.buffer.api.get_mark('.') is equivalent to vim.request('nvim_err_write', 'ERROR\\n', async_=True) vim.request('nvim_buf_get_mark', vim.current.buffer, '.') Normally a blocking request will be sent. If the `async_` flag is present and True, a asynchronous notification is sent instead. This will never block, and the return value or error is ignored. """ if self._session._loop_thread is not None and threading.current_thread() != self._session._loop_thread: msg = 'Request from non-main thread.\nRequests from different threads should be wrapped with nvim.async_call(cb, ...) \n{}\n'.format('\n'.join(format_stack(None, 5)[:-1])) self.async_call(self._err_cb, msg) raise NvimError('request from non-main thread') # depends on [control=['if'], data=[]] decode = kwargs.pop('decode', self._decode) args = walk(self._to_nvim, args) res = self._session.request(name, *args, **kwargs) return walk(self._from_nvim, res, decode=decode)
def add_virtual_columns_cartesian_velocities_to_spherical(self, x="x", y="y", z="z", vx="vx", vy="vy", vz="vz", vr="vr", vlong="vlong", vlat="vlat", distance=None): """Concert velocities from a cartesian to a spherical coordinate system TODO: errors :param x: name of x column (input) :param y: y :param z: z :param vx: vx :param vy: vy :param vz: vz :param vr: name of the column for the radial velocity in the r direction (output) :param vlong: name of the column for the velocity component in the longitude direction (output) :param vlat: name of the column for the velocity component in the latitude direction, positive points to the north pole (output) :param distance: Expression for distance, if not given defaults to sqrt(x**2+y**2+z**2), but if this column already exists, passing this expression may lead to a better performance :return: """ # see http://www.astrosurf.com/jephem/library/li110spherCart_en.htm if distance is None: distance = "sqrt({x}**2+{y}**2+{z}**2)".format(**locals()) self.add_virtual_column(vr, "({x}*{vx}+{y}*{vy}+{z}*{vz})/{distance}".format(**locals())) self.add_virtual_column(vlong, "-({vx}*{y}-{x}*{vy})/sqrt({x}**2+{y}**2)".format(**locals())) self.add_virtual_column(vlat, "-({z}*({x}*{vx}+{y}*{vy}) - ({x}**2+{y}**2)*{vz})/( {distance}*sqrt({x}**2+{y}**2) )".format(**locals()))
def function[add_virtual_columns_cartesian_velocities_to_spherical, parameter[self, x, y, z, vx, vy, vz, vr, vlong, vlat, distance]]: constant[Concert velocities from a cartesian to a spherical coordinate system TODO: errors :param x: name of x column (input) :param y: y :param z: z :param vx: vx :param vy: vy :param vz: vz :param vr: name of the column for the radial velocity in the r direction (output) :param vlong: name of the column for the velocity component in the longitude direction (output) :param vlat: name of the column for the velocity component in the latitude direction, positive points to the north pole (output) :param distance: Expression for distance, if not given defaults to sqrt(x**2+y**2+z**2), but if this column already exists, passing this expression may lead to a better performance :return: ] if compare[name[distance] is constant[None]] begin[:] variable[distance] assign[=] call[constant[sqrt({x}**2+{y}**2+{z}**2)].format, parameter[]] call[name[self].add_virtual_column, parameter[name[vr], call[constant[({x}*{vx}+{y}*{vy}+{z}*{vz})/{distance}].format, parameter[]]]] call[name[self].add_virtual_column, parameter[name[vlong], call[constant[-({vx}*{y}-{x}*{vy})/sqrt({x}**2+{y}**2)].format, parameter[]]]] call[name[self].add_virtual_column, parameter[name[vlat], call[constant[-({z}*({x}*{vx}+{y}*{vy}) - ({x}**2+{y}**2)*{vz})/( {distance}*sqrt({x}**2+{y}**2) )].format, parameter[]]]]
keyword[def] identifier[add_virtual_columns_cartesian_velocities_to_spherical] ( identifier[self] , identifier[x] = literal[string] , identifier[y] = literal[string] , identifier[z] = literal[string] , identifier[vx] = literal[string] , identifier[vy] = literal[string] , identifier[vz] = literal[string] , identifier[vr] = literal[string] , identifier[vlong] = literal[string] , identifier[vlat] = literal[string] , identifier[distance] = keyword[None] ): literal[string] keyword[if] identifier[distance] keyword[is] keyword[None] : identifier[distance] = literal[string] . identifier[format] (** identifier[locals] ()) identifier[self] . identifier[add_virtual_column] ( identifier[vr] , literal[string] . identifier[format] (** identifier[locals] ())) identifier[self] . identifier[add_virtual_column] ( identifier[vlong] , literal[string] . identifier[format] (** identifier[locals] ())) identifier[self] . identifier[add_virtual_column] ( identifier[vlat] , literal[string] . identifier[format] (** identifier[locals] ()))
def add_virtual_columns_cartesian_velocities_to_spherical(self, x='x', y='y', z='z', vx='vx', vy='vy', vz='vz', vr='vr', vlong='vlong', vlat='vlat', distance=None): """Concert velocities from a cartesian to a spherical coordinate system TODO: errors :param x: name of x column (input) :param y: y :param z: z :param vx: vx :param vy: vy :param vz: vz :param vr: name of the column for the radial velocity in the r direction (output) :param vlong: name of the column for the velocity component in the longitude direction (output) :param vlat: name of the column for the velocity component in the latitude direction, positive points to the north pole (output) :param distance: Expression for distance, if not given defaults to sqrt(x**2+y**2+z**2), but if this column already exists, passing this expression may lead to a better performance :return: """ # see http://www.astrosurf.com/jephem/library/li110spherCart_en.htm if distance is None: distance = 'sqrt({x}**2+{y}**2+{z}**2)'.format(**locals()) # depends on [control=['if'], data=['distance']] self.add_virtual_column(vr, '({x}*{vx}+{y}*{vy}+{z}*{vz})/{distance}'.format(**locals())) self.add_virtual_column(vlong, '-({vx}*{y}-{x}*{vy})/sqrt({x}**2+{y}**2)'.format(**locals())) self.add_virtual_column(vlat, '-({z}*({x}*{vx}+{y}*{vy}) - ({x}**2+{y}**2)*{vz})/( {distance}*sqrt({x}**2+{y}**2) )'.format(**locals()))
def update_phase(self, environment, data, prediction, user, item, correct, time, answer_id, **kwargs): """ After the prediction update the environment and persist some information for the predictive model. Args: environment (proso.models.environment.Environment): environment where all the important data are persist data (object): data from the prepare phase user (int): identifier of the user answering the question item (int): identifier of the question item correct (bool): corretness of the answer """ pass
def function[update_phase, parameter[self, environment, data, prediction, user, item, correct, time, answer_id]]: constant[ After the prediction update the environment and persist some information for the predictive model. Args: environment (proso.models.environment.Environment): environment where all the important data are persist data (object): data from the prepare phase user (int): identifier of the user answering the question item (int): identifier of the question item correct (bool): corretness of the answer ] pass
keyword[def] identifier[update_phase] ( identifier[self] , identifier[environment] , identifier[data] , identifier[prediction] , identifier[user] , identifier[item] , identifier[correct] , identifier[time] , identifier[answer_id] ,** identifier[kwargs] ): literal[string] keyword[pass]
def update_phase(self, environment, data, prediction, user, item, correct, time, answer_id, **kwargs): """ After the prediction update the environment and persist some information for the predictive model. Args: environment (proso.models.environment.Environment): environment where all the important data are persist data (object): data from the prepare phase user (int): identifier of the user answering the question item (int): identifier of the question item correct (bool): corretness of the answer """ pass
def read_with_selection(func): """Decorate a Table read method to apply ``selection`` keyword """ def wrapper(*args, **kwargs): """Execute a function, then apply a selection filter """ # parse selection selection = kwargs.pop('selection', None) or [] # read table tab = func(*args, **kwargs) # apply selection if selection: return filter_table(tab, selection) return tab return _safe_wraps(wrapper, func)
def function[read_with_selection, parameter[func]]: constant[Decorate a Table read method to apply ``selection`` keyword ] def function[wrapper, parameter[]]: constant[Execute a function, then apply a selection filter ] variable[selection] assign[=] <ast.BoolOp object at 0x7da20e962470> variable[tab] assign[=] call[name[func], parameter[<ast.Starred object at 0x7da20e963970>]] if name[selection] begin[:] return[call[name[filter_table], parameter[name[tab], name[selection]]]] return[name[tab]] return[call[name[_safe_wraps], parameter[name[wrapper], name[func]]]]
keyword[def] identifier[read_with_selection] ( identifier[func] ): literal[string] keyword[def] identifier[wrapper] (* identifier[args] ,** identifier[kwargs] ): literal[string] identifier[selection] = identifier[kwargs] . identifier[pop] ( literal[string] , keyword[None] ) keyword[or] [] identifier[tab] = identifier[func] (* identifier[args] ,** identifier[kwargs] ) keyword[if] identifier[selection] : keyword[return] identifier[filter_table] ( identifier[tab] , identifier[selection] ) keyword[return] identifier[tab] keyword[return] identifier[_safe_wraps] ( identifier[wrapper] , identifier[func] )
def read_with_selection(func): """Decorate a Table read method to apply ``selection`` keyword """ def wrapper(*args, **kwargs): """Execute a function, then apply a selection filter """ # parse selection selection = kwargs.pop('selection', None) or [] # read table tab = func(*args, **kwargs) # apply selection if selection: return filter_table(tab, selection) # depends on [control=['if'], data=[]] return tab return _safe_wraps(wrapper, func)
def send_signature_reminder(self, signature_id): """ Send a reminder email @signature_id: Id of signature @document_id: Id of document """ connection = Connection(self.token) connection.set_url(self.production, self.SIGNS_SEND_REMINDER_URL % signature_id) return connection.post_request()
def function[send_signature_reminder, parameter[self, signature_id]]: constant[ Send a reminder email @signature_id: Id of signature @document_id: Id of document ] variable[connection] assign[=] call[name[Connection], parameter[name[self].token]] call[name[connection].set_url, parameter[name[self].production, binary_operation[name[self].SIGNS_SEND_REMINDER_URL <ast.Mod object at 0x7da2590d6920> name[signature_id]]]] return[call[name[connection].post_request, parameter[]]]
keyword[def] identifier[send_signature_reminder] ( identifier[self] , identifier[signature_id] ): literal[string] identifier[connection] = identifier[Connection] ( identifier[self] . identifier[token] ) identifier[connection] . identifier[set_url] ( identifier[self] . identifier[production] , identifier[self] . identifier[SIGNS_SEND_REMINDER_URL] % identifier[signature_id] ) keyword[return] identifier[connection] . identifier[post_request] ()
def send_signature_reminder(self, signature_id): """ Send a reminder email @signature_id: Id of signature @document_id: Id of document """ connection = Connection(self.token) connection.set_url(self.production, self.SIGNS_SEND_REMINDER_URL % signature_id) return connection.post_request()
def get_tmhmm_predictions(self, tmhmm_results, custom_gene_mapping=None): """Parse TMHMM results and store in the representative sequences. This is a basic function to parse pre-run TMHMM results. Run TMHMM from the web service (http://www.cbs.dtu.dk/services/TMHMM/) by doing the following: 1. Write all representative sequences in the GEM-PRO using the function ``write_representative_sequences_file`` 2. Upload the file to http://www.cbs.dtu.dk/services/TMHMM/ and choose "Extensive, no graphics" as the output 3. Copy and paste the results (ignoring the top header and above "HELP with output formats") into a file and save it 4. Run this function on that file Args: tmhmm_results (str): Path to TMHMM results (long format) custom_gene_mapping (dict): Default parsing of TMHMM output is to look for the model gene IDs. If your output file contains IDs which differ from the model gene IDs, use this dictionary to map model gene IDs to result file IDs. Dictionary keys must match model genes. """ # TODO: refactor to Protein class? tmhmm_dict = ssbio.protein.sequence.properties.tmhmm.parse_tmhmm_long(tmhmm_results) counter = 0 for g in tqdm(self.genes_with_a_representative_sequence): if custom_gene_mapping: g_id = custom_gene_mapping[g.id] else: g_id = g.id if g_id in tmhmm_dict: log.debug('{}: loading TMHMM results'.format(g.id)) if not tmhmm_dict[g_id]: log.error("{}: missing TMHMM results".format(g.id)) g.protein.representative_sequence.annotations['num_tm_helix-tmhmm'] = tmhmm_dict[g_id]['num_tm_helices'] try: g.protein.representative_sequence.letter_annotations['TM-tmhmm'] = tmhmm_dict[g_id]['sequence'] counter += 1 except TypeError: log.error('Gene {}, SeqProp {}: sequence length mismatch between TMHMM results and representative ' 'sequence, unable to set letter annotation'.format(g_id, g.protein.representative_sequence.id)) else: log.error("{}: missing TMHMM results".format(g.id)) log.info('{}/{}: number of genes with TMHMM predictions loaded'.format(counter, len(self.genes)))
def function[get_tmhmm_predictions, parameter[self, tmhmm_results, custom_gene_mapping]]: constant[Parse TMHMM results and store in the representative sequences. This is a basic function to parse pre-run TMHMM results. Run TMHMM from the web service (http://www.cbs.dtu.dk/services/TMHMM/) by doing the following: 1. Write all representative sequences in the GEM-PRO using the function ``write_representative_sequences_file`` 2. Upload the file to http://www.cbs.dtu.dk/services/TMHMM/ and choose "Extensive, no graphics" as the output 3. Copy and paste the results (ignoring the top header and above "HELP with output formats") into a file and save it 4. Run this function on that file Args: tmhmm_results (str): Path to TMHMM results (long format) custom_gene_mapping (dict): Default parsing of TMHMM output is to look for the model gene IDs. If your output file contains IDs which differ from the model gene IDs, use this dictionary to map model gene IDs to result file IDs. Dictionary keys must match model genes. ] variable[tmhmm_dict] assign[=] call[name[ssbio].protein.sequence.properties.tmhmm.parse_tmhmm_long, parameter[name[tmhmm_results]]] variable[counter] assign[=] constant[0] for taget[name[g]] in starred[call[name[tqdm], parameter[name[self].genes_with_a_representative_sequence]]] begin[:] if name[custom_gene_mapping] begin[:] variable[g_id] assign[=] call[name[custom_gene_mapping]][name[g].id] if compare[name[g_id] in name[tmhmm_dict]] begin[:] call[name[log].debug, parameter[call[constant[{}: loading TMHMM results].format, parameter[name[g].id]]]] if <ast.UnaryOp object at 0x7da2046217b0> begin[:] call[name[log].error, parameter[call[constant[{}: missing TMHMM results].format, parameter[name[g].id]]]] call[name[g].protein.representative_sequence.annotations][constant[num_tm_helix-tmhmm]] assign[=] call[call[name[tmhmm_dict]][name[g_id]]][constant[num_tm_helices]] <ast.Try object at 0x7da204620c70> call[name[log].info, parameter[call[constant[{}/{}: number of genes with TMHMM predictions loaded].format, parameter[name[counter], call[name[len], parameter[name[self].genes]]]]]]
keyword[def] identifier[get_tmhmm_predictions] ( identifier[self] , identifier[tmhmm_results] , identifier[custom_gene_mapping] = keyword[None] ): literal[string] identifier[tmhmm_dict] = identifier[ssbio] . identifier[protein] . identifier[sequence] . identifier[properties] . identifier[tmhmm] . identifier[parse_tmhmm_long] ( identifier[tmhmm_results] ) identifier[counter] = literal[int] keyword[for] identifier[g] keyword[in] identifier[tqdm] ( identifier[self] . identifier[genes_with_a_representative_sequence] ): keyword[if] identifier[custom_gene_mapping] : identifier[g_id] = identifier[custom_gene_mapping] [ identifier[g] . identifier[id] ] keyword[else] : identifier[g_id] = identifier[g] . identifier[id] keyword[if] identifier[g_id] keyword[in] identifier[tmhmm_dict] : identifier[log] . identifier[debug] ( literal[string] . identifier[format] ( identifier[g] . identifier[id] )) keyword[if] keyword[not] identifier[tmhmm_dict] [ identifier[g_id] ]: identifier[log] . identifier[error] ( literal[string] . identifier[format] ( identifier[g] . identifier[id] )) identifier[g] . identifier[protein] . identifier[representative_sequence] . identifier[annotations] [ literal[string] ]= identifier[tmhmm_dict] [ identifier[g_id] ][ literal[string] ] keyword[try] : identifier[g] . identifier[protein] . identifier[representative_sequence] . identifier[letter_annotations] [ literal[string] ]= identifier[tmhmm_dict] [ identifier[g_id] ][ literal[string] ] identifier[counter] += literal[int] keyword[except] identifier[TypeError] : identifier[log] . identifier[error] ( literal[string] literal[string] . identifier[format] ( identifier[g_id] , identifier[g] . identifier[protein] . identifier[representative_sequence] . identifier[id] )) keyword[else] : identifier[log] . identifier[error] ( literal[string] . identifier[format] ( identifier[g] . identifier[id] )) identifier[log] . identifier[info] ( literal[string] . identifier[format] ( identifier[counter] , identifier[len] ( identifier[self] . identifier[genes] )))
def get_tmhmm_predictions(self, tmhmm_results, custom_gene_mapping=None): """Parse TMHMM results and store in the representative sequences. This is a basic function to parse pre-run TMHMM results. Run TMHMM from the web service (http://www.cbs.dtu.dk/services/TMHMM/) by doing the following: 1. Write all representative sequences in the GEM-PRO using the function ``write_representative_sequences_file`` 2. Upload the file to http://www.cbs.dtu.dk/services/TMHMM/ and choose "Extensive, no graphics" as the output 3. Copy and paste the results (ignoring the top header and above "HELP with output formats") into a file and save it 4. Run this function on that file Args: tmhmm_results (str): Path to TMHMM results (long format) custom_gene_mapping (dict): Default parsing of TMHMM output is to look for the model gene IDs. If your output file contains IDs which differ from the model gene IDs, use this dictionary to map model gene IDs to result file IDs. Dictionary keys must match model genes. """ # TODO: refactor to Protein class? tmhmm_dict = ssbio.protein.sequence.properties.tmhmm.parse_tmhmm_long(tmhmm_results) counter = 0 for g in tqdm(self.genes_with_a_representative_sequence): if custom_gene_mapping: g_id = custom_gene_mapping[g.id] # depends on [control=['if'], data=[]] else: g_id = g.id if g_id in tmhmm_dict: log.debug('{}: loading TMHMM results'.format(g.id)) if not tmhmm_dict[g_id]: log.error('{}: missing TMHMM results'.format(g.id)) # depends on [control=['if'], data=[]] g.protein.representative_sequence.annotations['num_tm_helix-tmhmm'] = tmhmm_dict[g_id]['num_tm_helices'] try: g.protein.representative_sequence.letter_annotations['TM-tmhmm'] = tmhmm_dict[g_id]['sequence'] counter += 1 # depends on [control=['try'], data=[]] except TypeError: log.error('Gene {}, SeqProp {}: sequence length mismatch between TMHMM results and representative sequence, unable to set letter annotation'.format(g_id, g.protein.representative_sequence.id)) # depends on [control=['except'], data=[]] # depends on [control=['if'], data=['g_id', 'tmhmm_dict']] else: log.error('{}: missing TMHMM results'.format(g.id)) # depends on [control=['for'], data=['g']] log.info('{}/{}: number of genes with TMHMM predictions loaded'.format(counter, len(self.genes)))
def absolute_path(user_path): """ Some paths must be made absolute, this will attempt to convert them. """ if os.path.abspath(user_path): return unix_path_coercion(user_path) else: try: openaccess_epub.utils.evaluate_relative_path(relative=user_path) except: raise ValidationError('This path could not be rendered as absolute')
def function[absolute_path, parameter[user_path]]: constant[ Some paths must be made absolute, this will attempt to convert them. ] if call[name[os].path.abspath, parameter[name[user_path]]] begin[:] return[call[name[unix_path_coercion], parameter[name[user_path]]]]
keyword[def] identifier[absolute_path] ( identifier[user_path] ): literal[string] keyword[if] identifier[os] . identifier[path] . identifier[abspath] ( identifier[user_path] ): keyword[return] identifier[unix_path_coercion] ( identifier[user_path] ) keyword[else] : keyword[try] : identifier[openaccess_epub] . identifier[utils] . identifier[evaluate_relative_path] ( identifier[relative] = identifier[user_path] ) keyword[except] : keyword[raise] identifier[ValidationError] ( literal[string] )
def absolute_path(user_path): """ Some paths must be made absolute, this will attempt to convert them. """ if os.path.abspath(user_path): return unix_path_coercion(user_path) # depends on [control=['if'], data=[]] else: try: openaccess_epub.utils.evaluate_relative_path(relative=user_path) # depends on [control=['try'], data=[]] except: raise ValidationError('This path could not be rendered as absolute') # depends on [control=['except'], data=[]]
def locate(self, point, _verify=True): r"""Find a point on the current surface. Solves for :math:`s` and :math:`t` in :math:`B(s, t) = p`. This method acts as a (partial) inverse to :meth:`evaluate_cartesian`. .. warning:: A unique solution is only guaranteed if the current surface is valid. This code assumes a valid surface, but doesn't check. .. image:: ../../images/surface_locate.png :align: center .. doctest:: surface-locate >>> nodes = np.asfortranarray([ ... [0.0, 0.5 , 1.0, 0.25, 0.75, 0.0], ... [0.0, -0.25, 0.0, 0.5 , 0.75, 1.0], ... ]) >>> surface = bezier.Surface(nodes, degree=2) >>> point = np.asfortranarray([ ... [0.59375], ... [0.25 ], ... ]) >>> s, t = surface.locate(point) >>> s 0.5 >>> t 0.25 .. testcleanup:: surface-locate import make_images make_images.surface_locate(surface, point) Args: point (numpy.ndarray): A (``D x 1``) point on the surface, where :math:`D` is the dimension of the surface. _verify (Optional[bool]): Indicates if extra caution should be used to verify assumptions about the inputs. Can be disabled to speed up execution time. Defaults to :data:`True`. Returns: Optional[Tuple[float, float]]: The :math:`s` and :math:`t` values corresponding to ``point`` or :data:`None` if the point is not on the surface. Raises: NotImplementedError: If the surface isn't in :math:`\mathbf{R}^2`. ValueError: If the dimension of the ``point`` doesn't match the dimension of the current surface. """ if _verify: if self._dimension != 2: raise NotImplementedError("Only 2D surfaces supported.") if point.shape != (self._dimension, 1): point_dimensions = " x ".join( str(dimension) for dimension in point.shape ) msg = _LOCATE_ERROR_TEMPLATE.format( self._dimension, self._dimension, point, point_dimensions ) raise ValueError(msg) return _surface_intersection.locate_point( self._nodes, self._degree, point[0, 0], point[1, 0] )
def function[locate, parameter[self, point, _verify]]: constant[Find a point on the current surface. Solves for :math:`s` and :math:`t` in :math:`B(s, t) = p`. This method acts as a (partial) inverse to :meth:`evaluate_cartesian`. .. warning:: A unique solution is only guaranteed if the current surface is valid. This code assumes a valid surface, but doesn't check. .. image:: ../../images/surface_locate.png :align: center .. doctest:: surface-locate >>> nodes = np.asfortranarray([ ... [0.0, 0.5 , 1.0, 0.25, 0.75, 0.0], ... [0.0, -0.25, 0.0, 0.5 , 0.75, 1.0], ... ]) >>> surface = bezier.Surface(nodes, degree=2) >>> point = np.asfortranarray([ ... [0.59375], ... [0.25 ], ... ]) >>> s, t = surface.locate(point) >>> s 0.5 >>> t 0.25 .. testcleanup:: surface-locate import make_images make_images.surface_locate(surface, point) Args: point (numpy.ndarray): A (``D x 1``) point on the surface, where :math:`D` is the dimension of the surface. _verify (Optional[bool]): Indicates if extra caution should be used to verify assumptions about the inputs. Can be disabled to speed up execution time. Defaults to :data:`True`. Returns: Optional[Tuple[float, float]]: The :math:`s` and :math:`t` values corresponding to ``point`` or :data:`None` if the point is not on the surface. Raises: NotImplementedError: If the surface isn't in :math:`\mathbf{R}^2`. ValueError: If the dimension of the ``point`` doesn't match the dimension of the current surface. ] if name[_verify] begin[:] if compare[name[self]._dimension not_equal[!=] constant[2]] begin[:] <ast.Raise object at 0x7da204620ca0> if compare[name[point].shape not_equal[!=] tuple[[<ast.Attribute object at 0x7da204621f60>, <ast.Constant object at 0x7da1b1849240>]]] begin[:] variable[point_dimensions] assign[=] call[constant[ x ].join, parameter[<ast.GeneratorExp object at 0x7da1b184a0e0>]] variable[msg] assign[=] call[name[_LOCATE_ERROR_TEMPLATE].format, parameter[name[self]._dimension, name[self]._dimension, name[point], name[point_dimensions]]] <ast.Raise object at 0x7da1b184bc40> return[call[name[_surface_intersection].locate_point, parameter[name[self]._nodes, name[self]._degree, call[name[point]][tuple[[<ast.Constant object at 0x7da1b184a500>, <ast.Constant object at 0x7da1b184b8b0>]]], call[name[point]][tuple[[<ast.Constant object at 0x7da1b1848220>, <ast.Constant object at 0x7da1b1849660>]]]]]]
keyword[def] identifier[locate] ( identifier[self] , identifier[point] , identifier[_verify] = keyword[True] ): literal[string] keyword[if] identifier[_verify] : keyword[if] identifier[self] . identifier[_dimension] != literal[int] : keyword[raise] identifier[NotImplementedError] ( literal[string] ) keyword[if] identifier[point] . identifier[shape] !=( identifier[self] . identifier[_dimension] , literal[int] ): identifier[point_dimensions] = literal[string] . identifier[join] ( identifier[str] ( identifier[dimension] ) keyword[for] identifier[dimension] keyword[in] identifier[point] . identifier[shape] ) identifier[msg] = identifier[_LOCATE_ERROR_TEMPLATE] . identifier[format] ( identifier[self] . identifier[_dimension] , identifier[self] . identifier[_dimension] , identifier[point] , identifier[point_dimensions] ) keyword[raise] identifier[ValueError] ( identifier[msg] ) keyword[return] identifier[_surface_intersection] . identifier[locate_point] ( identifier[self] . identifier[_nodes] , identifier[self] . identifier[_degree] , identifier[point] [ literal[int] , literal[int] ], identifier[point] [ literal[int] , literal[int] ] )
def locate(self, point, _verify=True): """Find a point on the current surface. Solves for :math:`s` and :math:`t` in :math:`B(s, t) = p`. This method acts as a (partial) inverse to :meth:`evaluate_cartesian`. .. warning:: A unique solution is only guaranteed if the current surface is valid. This code assumes a valid surface, but doesn't check. .. image:: ../../images/surface_locate.png :align: center .. doctest:: surface-locate >>> nodes = np.asfortranarray([ ... [0.0, 0.5 , 1.0, 0.25, 0.75, 0.0], ... [0.0, -0.25, 0.0, 0.5 , 0.75, 1.0], ... ]) >>> surface = bezier.Surface(nodes, degree=2) >>> point = np.asfortranarray([ ... [0.59375], ... [0.25 ], ... ]) >>> s, t = surface.locate(point) >>> s 0.5 >>> t 0.25 .. testcleanup:: surface-locate import make_images make_images.surface_locate(surface, point) Args: point (numpy.ndarray): A (``D x 1``) point on the surface, where :math:`D` is the dimension of the surface. _verify (Optional[bool]): Indicates if extra caution should be used to verify assumptions about the inputs. Can be disabled to speed up execution time. Defaults to :data:`True`. Returns: Optional[Tuple[float, float]]: The :math:`s` and :math:`t` values corresponding to ``point`` or :data:`None` if the point is not on the surface. Raises: NotImplementedError: If the surface isn't in :math:`\\mathbf{R}^2`. ValueError: If the dimension of the ``point`` doesn't match the dimension of the current surface. """ if _verify: if self._dimension != 2: raise NotImplementedError('Only 2D surfaces supported.') # depends on [control=['if'], data=[]] if point.shape != (self._dimension, 1): point_dimensions = ' x '.join((str(dimension) for dimension in point.shape)) msg = _LOCATE_ERROR_TEMPLATE.format(self._dimension, self._dimension, point, point_dimensions) raise ValueError(msg) # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]] return _surface_intersection.locate_point(self._nodes, self._degree, point[0, 0], point[1, 0])
def remove_accessibility_type(self, accessibility_type=None): """Removes an accessibility type. :param accessibility_type: accessibility type to remove :type accessibility_type: ``osid.type.Type`` :raise: ``NoAccess`` -- ``Metadata.isReadOnly()`` is ``true`` :raise: ``NotFound`` -- acessibility type not found :raise: ``NullArgument`` -- ``accessibility_type`` is ``null`` *compliance: mandatory -- This method must be implemented.* """ if accessibility_type is None: raise NullArgument metadata = Metadata(**settings.METADATA['accessibility_type']) if metadata.is_read_only() or metadata.is_required(): raise NoAccess() if (accessibility_type._my_map['id']) not in self._my_map['accessibility_type']: raise NotFound() self._my_map['accessibility_types'].remove(accessibility_type._my_map['id'])
def function[remove_accessibility_type, parameter[self, accessibility_type]]: constant[Removes an accessibility type. :param accessibility_type: accessibility type to remove :type accessibility_type: ``osid.type.Type`` :raise: ``NoAccess`` -- ``Metadata.isReadOnly()`` is ``true`` :raise: ``NotFound`` -- acessibility type not found :raise: ``NullArgument`` -- ``accessibility_type`` is ``null`` *compliance: mandatory -- This method must be implemented.* ] if compare[name[accessibility_type] is constant[None]] begin[:] <ast.Raise object at 0x7da20c6e7e80> variable[metadata] assign[=] call[name[Metadata], parameter[]] if <ast.BoolOp object at 0x7da20c6e5810> begin[:] <ast.Raise object at 0x7da20c6e5480> if compare[call[name[accessibility_type]._my_map][constant[id]] <ast.NotIn object at 0x7da2590d7190> call[name[self]._my_map][constant[accessibility_type]]] begin[:] <ast.Raise object at 0x7da20c6e4e50> call[call[name[self]._my_map][constant[accessibility_types]].remove, parameter[call[name[accessibility_type]._my_map][constant[id]]]]
keyword[def] identifier[remove_accessibility_type] ( identifier[self] , identifier[accessibility_type] = keyword[None] ): literal[string] keyword[if] identifier[accessibility_type] keyword[is] keyword[None] : keyword[raise] identifier[NullArgument] identifier[metadata] = identifier[Metadata] (** identifier[settings] . identifier[METADATA] [ literal[string] ]) keyword[if] identifier[metadata] . identifier[is_read_only] () keyword[or] identifier[metadata] . identifier[is_required] (): keyword[raise] identifier[NoAccess] () keyword[if] ( identifier[accessibility_type] . identifier[_my_map] [ literal[string] ]) keyword[not] keyword[in] identifier[self] . identifier[_my_map] [ literal[string] ]: keyword[raise] identifier[NotFound] () identifier[self] . identifier[_my_map] [ literal[string] ]. identifier[remove] ( identifier[accessibility_type] . identifier[_my_map] [ literal[string] ])
def remove_accessibility_type(self, accessibility_type=None): """Removes an accessibility type. :param accessibility_type: accessibility type to remove :type accessibility_type: ``osid.type.Type`` :raise: ``NoAccess`` -- ``Metadata.isReadOnly()`` is ``true`` :raise: ``NotFound`` -- acessibility type not found :raise: ``NullArgument`` -- ``accessibility_type`` is ``null`` *compliance: mandatory -- This method must be implemented.* """ if accessibility_type is None: raise NullArgument # depends on [control=['if'], data=[]] metadata = Metadata(**settings.METADATA['accessibility_type']) if metadata.is_read_only() or metadata.is_required(): raise NoAccess() # depends on [control=['if'], data=[]] if accessibility_type._my_map['id'] not in self._my_map['accessibility_type']: raise NotFound() # depends on [control=['if'], data=[]] self._my_map['accessibility_types'].remove(accessibility_type._my_map['id'])
def bdib(self, ticker, start_datetime, end_datetime, event_type, interval, elms=None): """ Get Open, High, Low, Close, Volume, and numEvents for a ticker. Return pandas DataFrame Parameters ---------- ticker: string String corresponding to ticker start_datetime: string UTC datetime in format YYYY-mm-ddTHH:MM:SS end_datetime: string UTC datetime in format YYYY-mm-ddTHH:MM:SS event_type: string {TRADE, BID, ASK, BID_BEST, ASK_BEST, BEST_BID, BEST_ASK} Requested data event type interval: int {1... 1440} Length of time bars elms: list of tuples List of tuples where each tuple corresponds to the other elements to be set. Refer to the IntradayBarRequest section in the 'Services & schemas reference guide' for more info on these values """ elms = [] if not elms else elms # flush event queue in case previous call errored out logger = _get_logger(self.debug) while(self._session.tryNextEvent()): pass # Create and fill the request for the historical data request = self.refDataService.createRequest('IntradayBarRequest') request.set('security', ticker) request.set('eventType', event_type) request.set('interval', interval) # bar interval in minutes request.set('startDateTime', start_datetime) request.set('endDateTime', end_datetime) for name, val in elms: request.set(name, val) logger.info('Sending Request:\n{}'.format(request)) # Send the request self._session.sendRequest(request, identity=self._identity) # Process received events data = [] flds = ['open', 'high', 'low', 'close', 'volume', 'numEvents'] for msg in self._receive_events(): d = msg['element']['IntradayBarResponse'] for bar in d['barData']['barTickData']: data.append(bar['barTickData']) data = pd.DataFrame(data).set_index('time').sort_index().loc[:, flds] return data
def function[bdib, parameter[self, ticker, start_datetime, end_datetime, event_type, interval, elms]]: constant[ Get Open, High, Low, Close, Volume, and numEvents for a ticker. Return pandas DataFrame Parameters ---------- ticker: string String corresponding to ticker start_datetime: string UTC datetime in format YYYY-mm-ddTHH:MM:SS end_datetime: string UTC datetime in format YYYY-mm-ddTHH:MM:SS event_type: string {TRADE, BID, ASK, BID_BEST, ASK_BEST, BEST_BID, BEST_ASK} Requested data event type interval: int {1... 1440} Length of time bars elms: list of tuples List of tuples where each tuple corresponds to the other elements to be set. Refer to the IntradayBarRequest section in the 'Services & schemas reference guide' for more info on these values ] variable[elms] assign[=] <ast.IfExp object at 0x7da1b0061db0> variable[logger] assign[=] call[name[_get_logger], parameter[name[self].debug]] while call[name[self]._session.tryNextEvent, parameter[]] begin[:] pass variable[request] assign[=] call[name[self].refDataService.createRequest, parameter[constant[IntradayBarRequest]]] call[name[request].set, parameter[constant[security], name[ticker]]] call[name[request].set, parameter[constant[eventType], name[event_type]]] call[name[request].set, parameter[constant[interval], name[interval]]] call[name[request].set, parameter[constant[startDateTime], name[start_datetime]]] call[name[request].set, parameter[constant[endDateTime], name[end_datetime]]] for taget[tuple[[<ast.Name object at 0x7da1b0062ce0>, <ast.Name object at 0x7da1b0062230>]]] in starred[name[elms]] begin[:] call[name[request].set, parameter[name[name], name[val]]] call[name[logger].info, parameter[call[constant[Sending Request: {}].format, parameter[name[request]]]]] call[name[self]._session.sendRequest, parameter[name[request]]] variable[data] assign[=] list[[]] variable[flds] assign[=] list[[<ast.Constant object at 0x7da1b00615d0>, <ast.Constant object at 0x7da1b0061ea0>, <ast.Constant object at 0x7da1b00629b0>, <ast.Constant object at 0x7da1b00624d0>, <ast.Constant object at 0x7da1b0061060>, <ast.Constant object at 0x7da1b00633d0>]] for taget[name[msg]] in starred[call[name[self]._receive_events, parameter[]]] begin[:] variable[d] assign[=] call[call[name[msg]][constant[element]]][constant[IntradayBarResponse]] for taget[name[bar]] in starred[call[call[name[d]][constant[barData]]][constant[barTickData]]] begin[:] call[name[data].append, parameter[call[name[bar]][constant[barTickData]]]] variable[data] assign[=] call[call[call[call[name[pd].DataFrame, parameter[name[data]]].set_index, parameter[constant[time]]].sort_index, parameter[]].loc][tuple[[<ast.Slice object at 0x7da1b009c910>, <ast.Name object at 0x7da1b009f340>]]] return[name[data]]
keyword[def] identifier[bdib] ( identifier[self] , identifier[ticker] , identifier[start_datetime] , identifier[end_datetime] , identifier[event_type] , identifier[interval] , identifier[elms] = keyword[None] ): literal[string] identifier[elms] =[] keyword[if] keyword[not] identifier[elms] keyword[else] identifier[elms] identifier[logger] = identifier[_get_logger] ( identifier[self] . identifier[debug] ) keyword[while] ( identifier[self] . identifier[_session] . identifier[tryNextEvent] ()): keyword[pass] identifier[request] = identifier[self] . identifier[refDataService] . identifier[createRequest] ( literal[string] ) identifier[request] . identifier[set] ( literal[string] , identifier[ticker] ) identifier[request] . identifier[set] ( literal[string] , identifier[event_type] ) identifier[request] . identifier[set] ( literal[string] , identifier[interval] ) identifier[request] . identifier[set] ( literal[string] , identifier[start_datetime] ) identifier[request] . identifier[set] ( literal[string] , identifier[end_datetime] ) keyword[for] identifier[name] , identifier[val] keyword[in] identifier[elms] : identifier[request] . identifier[set] ( identifier[name] , identifier[val] ) identifier[logger] . identifier[info] ( literal[string] . identifier[format] ( identifier[request] )) identifier[self] . identifier[_session] . identifier[sendRequest] ( identifier[request] , identifier[identity] = identifier[self] . identifier[_identity] ) identifier[data] =[] identifier[flds] =[ literal[string] , literal[string] , literal[string] , literal[string] , literal[string] , literal[string] ] keyword[for] identifier[msg] keyword[in] identifier[self] . identifier[_receive_events] (): identifier[d] = identifier[msg] [ literal[string] ][ literal[string] ] keyword[for] identifier[bar] keyword[in] identifier[d] [ literal[string] ][ literal[string] ]: identifier[data] . identifier[append] ( identifier[bar] [ literal[string] ]) identifier[data] = identifier[pd] . identifier[DataFrame] ( identifier[data] ). identifier[set_index] ( literal[string] ). identifier[sort_index] (). identifier[loc] [:, identifier[flds] ] keyword[return] identifier[data]
def bdib(self, ticker, start_datetime, end_datetime, event_type, interval, elms=None): """ Get Open, High, Low, Close, Volume, and numEvents for a ticker. Return pandas DataFrame Parameters ---------- ticker: string String corresponding to ticker start_datetime: string UTC datetime in format YYYY-mm-ddTHH:MM:SS end_datetime: string UTC datetime in format YYYY-mm-ddTHH:MM:SS event_type: string {TRADE, BID, ASK, BID_BEST, ASK_BEST, BEST_BID, BEST_ASK} Requested data event type interval: int {1... 1440} Length of time bars elms: list of tuples List of tuples where each tuple corresponds to the other elements to be set. Refer to the IntradayBarRequest section in the 'Services & schemas reference guide' for more info on these values """ elms = [] if not elms else elms # flush event queue in case previous call errored out logger = _get_logger(self.debug) while self._session.tryNextEvent(): pass # depends on [control=['while'], data=[]] # Create and fill the request for the historical data request = self.refDataService.createRequest('IntradayBarRequest') request.set('security', ticker) request.set('eventType', event_type) request.set('interval', interval) # bar interval in minutes request.set('startDateTime', start_datetime) request.set('endDateTime', end_datetime) for (name, val) in elms: request.set(name, val) # depends on [control=['for'], data=[]] logger.info('Sending Request:\n{}'.format(request)) # Send the request self._session.sendRequest(request, identity=self._identity) # Process received events data = [] flds = ['open', 'high', 'low', 'close', 'volume', 'numEvents'] for msg in self._receive_events(): d = msg['element']['IntradayBarResponse'] for bar in d['barData']['barTickData']: data.append(bar['barTickData']) # depends on [control=['for'], data=['bar']] # depends on [control=['for'], data=['msg']] data = pd.DataFrame(data).set_index('time').sort_index().loc[:, flds] return data