code
stringlengths
75
104k
code_sememe
stringlengths
47
309k
token_type
stringlengths
215
214k
code_dependency
stringlengths
75
155k
def find_args(self, text, start=None): """implementation details""" if start is None: start = 0 first_occurance = text.find(self.__begin, start) if first_occurance == -1: return self.NOT_FOUND previous_found, found = first_occurance + 1, 0 while True: found = self.__find_args_separator(text, previous_found) if found == -1: return self.NOT_FOUND elif text[found] == self.__end: return first_occurance, found else: previous_found = found + 1
def function[find_args, parameter[self, text, start]]: constant[implementation details] if compare[name[start] is constant[None]] begin[:] variable[start] assign[=] constant[0] variable[first_occurance] assign[=] call[name[text].find, parameter[name[self].__begin, name[start]]] if compare[name[first_occurance] equal[==] <ast.UnaryOp object at 0x7da1b26af2b0>] begin[:] return[name[self].NOT_FOUND] <ast.Tuple object at 0x7da1b26ad600> assign[=] tuple[[<ast.BinOp object at 0x7da1b26ae4a0>, <ast.Constant object at 0x7da1b26acaf0>]] while constant[True] begin[:] variable[found] assign[=] call[name[self].__find_args_separator, parameter[name[text], name[previous_found]]] if compare[name[found] equal[==] <ast.UnaryOp object at 0x7da1b26ae3b0>] begin[:] return[name[self].NOT_FOUND]
keyword[def] identifier[find_args] ( identifier[self] , identifier[text] , identifier[start] = keyword[None] ): literal[string] keyword[if] identifier[start] keyword[is] keyword[None] : identifier[start] = literal[int] identifier[first_occurance] = identifier[text] . identifier[find] ( identifier[self] . identifier[__begin] , identifier[start] ) keyword[if] identifier[first_occurance] ==- literal[int] : keyword[return] identifier[self] . identifier[NOT_FOUND] identifier[previous_found] , identifier[found] = identifier[first_occurance] + literal[int] , literal[int] keyword[while] keyword[True] : identifier[found] = identifier[self] . identifier[__find_args_separator] ( identifier[text] , identifier[previous_found] ) keyword[if] identifier[found] ==- literal[int] : keyword[return] identifier[self] . identifier[NOT_FOUND] keyword[elif] identifier[text] [ identifier[found] ]== identifier[self] . identifier[__end] : keyword[return] identifier[first_occurance] , identifier[found] keyword[else] : identifier[previous_found] = identifier[found] + literal[int]
def find_args(self, text, start=None): """implementation details""" if start is None: start = 0 # depends on [control=['if'], data=['start']] first_occurance = text.find(self.__begin, start) if first_occurance == -1: return self.NOT_FOUND # depends on [control=['if'], data=[]] (previous_found, found) = (first_occurance + 1, 0) while True: found = self.__find_args_separator(text, previous_found) if found == -1: return self.NOT_FOUND # depends on [control=['if'], data=[]] elif text[found] == self.__end: return (first_occurance, found) # depends on [control=['if'], data=[]] else: previous_found = found + 1 # depends on [control=['while'], data=[]]
def reload(self, message): """恢复方法 Arguments: message {[type]} -- [description] """ self.phone = message.get('phone') self.level = message.get('level') self.utype = message.get('utype') self.coins = message.get('coins') self.wechat_id = message.get('wechat_id') self.coins_history = message.get('coins_history') self.money = message.get('money') self._subscribed_strategy = message.get('subuscribed_strategy') self._subscribed_code = message.get('subscribed_code') self.username = message.get('username') self.password = message.get('password') self.user_cookie = message.get('user_cookie') # portfolio_list = [item['portfolio_cookie'] for item in DATABASE.portfolio.find( {'user_cookie': self.user_cookie}, {'portfolio_cookie': 1, '_id': 0})] # portfolio_list = message.get('portfolio_list') if len(portfolio_list) > 0: self.portfolio_list = dict( zip( portfolio_list, [ QA_Portfolio( user_cookie=self.user_cookie, portfolio_cookie=item ) for item in portfolio_list ] ) ) else: self.portfolio_list = {}
def function[reload, parameter[self, message]]: constant[恢复方法 Arguments: message {[type]} -- [description] ] name[self].phone assign[=] call[name[message].get, parameter[constant[phone]]] name[self].level assign[=] call[name[message].get, parameter[constant[level]]] name[self].utype assign[=] call[name[message].get, parameter[constant[utype]]] name[self].coins assign[=] call[name[message].get, parameter[constant[coins]]] name[self].wechat_id assign[=] call[name[message].get, parameter[constant[wechat_id]]] name[self].coins_history assign[=] call[name[message].get, parameter[constant[coins_history]]] name[self].money assign[=] call[name[message].get, parameter[constant[money]]] name[self]._subscribed_strategy assign[=] call[name[message].get, parameter[constant[subuscribed_strategy]]] name[self]._subscribed_code assign[=] call[name[message].get, parameter[constant[subscribed_code]]] name[self].username assign[=] call[name[message].get, parameter[constant[username]]] name[self].password assign[=] call[name[message].get, parameter[constant[password]]] name[self].user_cookie assign[=] call[name[message].get, parameter[constant[user_cookie]]] variable[portfolio_list] assign[=] <ast.ListComp object at 0x7da1b20c9960> if compare[call[name[len], parameter[name[portfolio_list]]] greater[>] constant[0]] begin[:] name[self].portfolio_list assign[=] call[name[dict], parameter[call[name[zip], parameter[name[portfolio_list], <ast.ListComp object at 0x7da1b20c8cd0>]]]]
keyword[def] identifier[reload] ( identifier[self] , identifier[message] ): literal[string] identifier[self] . identifier[phone] = identifier[message] . identifier[get] ( literal[string] ) identifier[self] . identifier[level] = identifier[message] . identifier[get] ( literal[string] ) identifier[self] . identifier[utype] = identifier[message] . identifier[get] ( literal[string] ) identifier[self] . identifier[coins] = identifier[message] . identifier[get] ( literal[string] ) identifier[self] . identifier[wechat_id] = identifier[message] . identifier[get] ( literal[string] ) identifier[self] . identifier[coins_history] = identifier[message] . identifier[get] ( literal[string] ) identifier[self] . identifier[money] = identifier[message] . identifier[get] ( literal[string] ) identifier[self] . identifier[_subscribed_strategy] = identifier[message] . identifier[get] ( literal[string] ) identifier[self] . identifier[_subscribed_code] = identifier[message] . identifier[get] ( literal[string] ) identifier[self] . identifier[username] = identifier[message] . identifier[get] ( literal[string] ) identifier[self] . identifier[password] = identifier[message] . identifier[get] ( literal[string] ) identifier[self] . identifier[user_cookie] = identifier[message] . identifier[get] ( literal[string] ) identifier[portfolio_list] =[ identifier[item] [ literal[string] ] keyword[for] identifier[item] keyword[in] identifier[DATABASE] . identifier[portfolio] . identifier[find] ( { literal[string] : identifier[self] . identifier[user_cookie] },{ literal[string] : literal[int] , literal[string] : literal[int] })] keyword[if] identifier[len] ( identifier[portfolio_list] )> literal[int] : identifier[self] . identifier[portfolio_list] = identifier[dict] ( identifier[zip] ( identifier[portfolio_list] , [ identifier[QA_Portfolio] ( identifier[user_cookie] = identifier[self] . identifier[user_cookie] , identifier[portfolio_cookie] = identifier[item] ) keyword[for] identifier[item] keyword[in] identifier[portfolio_list] ] ) ) keyword[else] : identifier[self] . identifier[portfolio_list] ={}
def reload(self, message): """恢复方法 Arguments: message {[type]} -- [description] """ self.phone = message.get('phone') self.level = message.get('level') self.utype = message.get('utype') self.coins = message.get('coins') self.wechat_id = message.get('wechat_id') self.coins_history = message.get('coins_history') self.money = message.get('money') self._subscribed_strategy = message.get('subuscribed_strategy') self._subscribed_code = message.get('subscribed_code') self.username = message.get('username') self.password = message.get('password') self.user_cookie = message.get('user_cookie') # portfolio_list = [item['portfolio_cookie'] for item in DATABASE.portfolio.find({'user_cookie': self.user_cookie}, {'portfolio_cookie': 1, '_id': 0})] # portfolio_list = message.get('portfolio_list') if len(portfolio_list) > 0: self.portfolio_list = dict(zip(portfolio_list, [QA_Portfolio(user_cookie=self.user_cookie, portfolio_cookie=item) for item in portfolio_list])) # depends on [control=['if'], data=[]] else: self.portfolio_list = {}
def fingerprint(self): """The fingerprint of the current process. This can either read the current fingerprint from the running process's psutil.Process.cmdline (if the managed process supports that) or from the `ProcessManager` metadata. :returns: The fingerprint of the running process as read from the process table, ProcessManager metadata or `None`. :rtype: string """ return ( self.parse_fingerprint(self.cmdline) or self.read_metadata_by_name(self.name, self.FINGERPRINT_KEY) )
def function[fingerprint, parameter[self]]: constant[The fingerprint of the current process. This can either read the current fingerprint from the running process's psutil.Process.cmdline (if the managed process supports that) or from the `ProcessManager` metadata. :returns: The fingerprint of the running process as read from the process table, ProcessManager metadata or `None`. :rtype: string ] return[<ast.BoolOp object at 0x7da1b22d1360>]
keyword[def] identifier[fingerprint] ( identifier[self] ): literal[string] keyword[return] ( identifier[self] . identifier[parse_fingerprint] ( identifier[self] . identifier[cmdline] ) keyword[or] identifier[self] . identifier[read_metadata_by_name] ( identifier[self] . identifier[name] , identifier[self] . identifier[FINGERPRINT_KEY] ) )
def fingerprint(self): """The fingerprint of the current process. This can either read the current fingerprint from the running process's psutil.Process.cmdline (if the managed process supports that) or from the `ProcessManager` metadata. :returns: The fingerprint of the running process as read from the process table, ProcessManager metadata or `None`. :rtype: string """ return self.parse_fingerprint(self.cmdline) or self.read_metadata_by_name(self.name, self.FINGERPRINT_KEY)
def _emit(self, event): """ If the given event is a stat event, send a I{StatUpdate} command. """ if (event.get('interface') is not iaxiom.IStatEvent and 'athena_send_messages' not in event and 'athena_received_messages' not in event): return out = [] for k, v in event.iteritems(): if k in ('system', 'message', 'interface', 'isError'): continue if not isinstance(v, unicode): v = str(v).decode('ascii') out.append(dict(key=k.decode('ascii'), value=v)) self.callRemote(StatUpdate, data=out)
def function[_emit, parameter[self, event]]: constant[ If the given event is a stat event, send a I{StatUpdate} command. ] if <ast.BoolOp object at 0x7da1b0a23010> begin[:] return[None] variable[out] assign[=] list[[]] for taget[tuple[[<ast.Name object at 0x7da1b0a214e0>, <ast.Name object at 0x7da1b0a21c90>]]] in starred[call[name[event].iteritems, parameter[]]] begin[:] if compare[name[k] in tuple[[<ast.Constant object at 0x7da1b0a21570>, <ast.Constant object at 0x7da1b0a23190>, <ast.Constant object at 0x7da1b0a21fc0>, <ast.Constant object at 0x7da1b0a234f0>]]] begin[:] continue if <ast.UnaryOp object at 0x7da1b0a22680> begin[:] variable[v] assign[=] call[call[name[str], parameter[name[v]]].decode, parameter[constant[ascii]]] call[name[out].append, parameter[call[name[dict], parameter[]]]] call[name[self].callRemote, parameter[name[StatUpdate]]]
keyword[def] identifier[_emit] ( identifier[self] , identifier[event] ): literal[string] keyword[if] ( identifier[event] . identifier[get] ( literal[string] ) keyword[is] keyword[not] identifier[iaxiom] . identifier[IStatEvent] keyword[and] literal[string] keyword[not] keyword[in] identifier[event] keyword[and] literal[string] keyword[not] keyword[in] identifier[event] ): keyword[return] identifier[out] =[] keyword[for] identifier[k] , identifier[v] keyword[in] identifier[event] . identifier[iteritems] (): keyword[if] identifier[k] keyword[in] ( literal[string] , literal[string] , literal[string] , literal[string] ): keyword[continue] keyword[if] keyword[not] identifier[isinstance] ( identifier[v] , identifier[unicode] ): identifier[v] = identifier[str] ( identifier[v] ). identifier[decode] ( literal[string] ) identifier[out] . identifier[append] ( identifier[dict] ( identifier[key] = identifier[k] . identifier[decode] ( literal[string] ), identifier[value] = identifier[v] )) identifier[self] . identifier[callRemote] ( identifier[StatUpdate] , identifier[data] = identifier[out] )
def _emit(self, event): """ If the given event is a stat event, send a I{StatUpdate} command. """ if event.get('interface') is not iaxiom.IStatEvent and 'athena_send_messages' not in event and ('athena_received_messages' not in event): return # depends on [control=['if'], data=[]] out = [] for (k, v) in event.iteritems(): if k in ('system', 'message', 'interface', 'isError'): continue # depends on [control=['if'], data=[]] if not isinstance(v, unicode): v = str(v).decode('ascii') # depends on [control=['if'], data=[]] out.append(dict(key=k.decode('ascii'), value=v)) # depends on [control=['for'], data=[]] self.callRemote(StatUpdate, data=out)
def contents(self, path=None): """Get _all_ metadata for this device. Call this method if you have the lite/abbreviated device info from e.g. <user/>. """ if isinstance(path, object) and hasattr(path, 'name'): log.debug("passed an object, use .'name' as path value") # passed an object, use .'name' as path value path = '/%s' % path.name c = self._jfs.get('%s%s' % (self.path, path or '/')) return c
def function[contents, parameter[self, path]]: constant[Get _all_ metadata for this device. Call this method if you have the lite/abbreviated device info from e.g. <user/>. ] if <ast.BoolOp object at 0x7da20c7c8eb0> begin[:] call[name[log].debug, parameter[constant[passed an object, use .'name' as path value]]] variable[path] assign[=] binary_operation[constant[/%s] <ast.Mod object at 0x7da2590d6920> name[path].name] variable[c] assign[=] call[name[self]._jfs.get, parameter[binary_operation[constant[%s%s] <ast.Mod object at 0x7da2590d6920> tuple[[<ast.Attribute object at 0x7da18f7202e0>, <ast.BoolOp object at 0x7da18f723640>]]]]] return[name[c]]
keyword[def] identifier[contents] ( identifier[self] , identifier[path] = keyword[None] ): literal[string] keyword[if] identifier[isinstance] ( identifier[path] , identifier[object] ) keyword[and] identifier[hasattr] ( identifier[path] , literal[string] ): identifier[log] . identifier[debug] ( literal[string] ) identifier[path] = literal[string] % identifier[path] . identifier[name] identifier[c] = identifier[self] . identifier[_jfs] . identifier[get] ( literal[string] %( identifier[self] . identifier[path] , identifier[path] keyword[or] literal[string] )) keyword[return] identifier[c]
def contents(self, path=None): """Get _all_ metadata for this device. Call this method if you have the lite/abbreviated device info from e.g. <user/>. """ if isinstance(path, object) and hasattr(path, 'name'): log.debug("passed an object, use .'name' as path value") # passed an object, use .'name' as path value path = '/%s' % path.name # depends on [control=['if'], data=[]] c = self._jfs.get('%s%s' % (self.path, path or '/')) return c
def _get_fields(self, fields, excludes, hotfixes): ''' Given any explicit fields to include and fields to exclude, add additional fields based on the associated model. If the field needs a hotfix, apply it. ''' final_fields = {} fields = fields or [] excludes = excludes or [] for f in self.model._meta.fields: # If the field name is already present, skip if f.name in self.fields: continue # If field is not present in explicit field listing, skip if fields and f.name not in fields: continue # If field is in exclude list, skip if excludes and f.name in excludes: continue # If field is a relation, skip. if getattr(f, 'rel'): continue attr = {'model_attr': f.name} if f.has_default(): attr['null_value'] = f.default if f.name in hotfixes: attr.update(hotfixes[f.name]) final_fields[f.name] = django_field_to_index(f, **attr) return final_fields
def function[_get_fields, parameter[self, fields, excludes, hotfixes]]: constant[ Given any explicit fields to include and fields to exclude, add additional fields based on the associated model. If the field needs a hotfix, apply it. ] variable[final_fields] assign[=] dictionary[[], []] variable[fields] assign[=] <ast.BoolOp object at 0x7da1b1a1ec80> variable[excludes] assign[=] <ast.BoolOp object at 0x7da1b1a1c4f0> for taget[name[f]] in starred[name[self].model._meta.fields] begin[:] if compare[name[f].name in name[self].fields] begin[:] continue if <ast.BoolOp object at 0x7da1b1a1ed70> begin[:] continue if <ast.BoolOp object at 0x7da1b1a1d2d0> begin[:] continue if call[name[getattr], parameter[name[f], constant[rel]]] begin[:] continue variable[attr] assign[=] dictionary[[<ast.Constant object at 0x7da1b1a1dae0>], [<ast.Attribute object at 0x7da1b1a1f520>]] if call[name[f].has_default, parameter[]] begin[:] call[name[attr]][constant[null_value]] assign[=] name[f].default if compare[name[f].name in name[hotfixes]] begin[:] call[name[attr].update, parameter[call[name[hotfixes]][name[f].name]]] call[name[final_fields]][name[f].name] assign[=] call[name[django_field_to_index], parameter[name[f]]] return[name[final_fields]]
keyword[def] identifier[_get_fields] ( identifier[self] , identifier[fields] , identifier[excludes] , identifier[hotfixes] ): literal[string] identifier[final_fields] ={} identifier[fields] = identifier[fields] keyword[or] [] identifier[excludes] = identifier[excludes] keyword[or] [] keyword[for] identifier[f] keyword[in] identifier[self] . identifier[model] . identifier[_meta] . identifier[fields] : keyword[if] identifier[f] . identifier[name] keyword[in] identifier[self] . identifier[fields] : keyword[continue] keyword[if] identifier[fields] keyword[and] identifier[f] . identifier[name] keyword[not] keyword[in] identifier[fields] : keyword[continue] keyword[if] identifier[excludes] keyword[and] identifier[f] . identifier[name] keyword[in] identifier[excludes] : keyword[continue] keyword[if] identifier[getattr] ( identifier[f] , literal[string] ): keyword[continue] identifier[attr] ={ literal[string] : identifier[f] . identifier[name] } keyword[if] identifier[f] . identifier[has_default] (): identifier[attr] [ literal[string] ]= identifier[f] . identifier[default] keyword[if] identifier[f] . identifier[name] keyword[in] identifier[hotfixes] : identifier[attr] . identifier[update] ( identifier[hotfixes] [ identifier[f] . identifier[name] ]) identifier[final_fields] [ identifier[f] . identifier[name] ]= identifier[django_field_to_index] ( identifier[f] ,** identifier[attr] ) keyword[return] identifier[final_fields]
def _get_fields(self, fields, excludes, hotfixes): """ Given any explicit fields to include and fields to exclude, add additional fields based on the associated model. If the field needs a hotfix, apply it. """ final_fields = {} fields = fields or [] excludes = excludes or [] for f in self.model._meta.fields: # If the field name is already present, skip if f.name in self.fields: continue # depends on [control=['if'], data=[]] # If field is not present in explicit field listing, skip if fields and f.name not in fields: continue # depends on [control=['if'], data=[]] # If field is in exclude list, skip if excludes and f.name in excludes: continue # depends on [control=['if'], data=[]] # If field is a relation, skip. if getattr(f, 'rel'): continue # depends on [control=['if'], data=[]] attr = {'model_attr': f.name} if f.has_default(): attr['null_value'] = f.default # depends on [control=['if'], data=[]] if f.name in hotfixes: attr.update(hotfixes[f.name]) # depends on [control=['if'], data=['hotfixes']] final_fields[f.name] = django_field_to_index(f, **attr) # depends on [control=['for'], data=['f']] return final_fields
def _local_upload(self, filepath, remove=False): """Local plugin package processing""" if os.path.isfile(filepath): filename = os.path.basename(os.path.abspath(filepath)) if filename and self.__isValidFilename(filename): suffix = self.__getFilenameSuffix(filename) try: self.__unpack_tgz(os.path.abspath(filepath)) if self.__isValidTGZ(suffix) else self.__unpack_zip(os.path.abspath(filepath)) finally: if remove is True: os.remove(filepath) else: raise InstallError("Invalid Filename") else: raise InstallError("Invalid Filepath")
def function[_local_upload, parameter[self, filepath, remove]]: constant[Local plugin package processing] if call[name[os].path.isfile, parameter[name[filepath]]] begin[:] variable[filename] assign[=] call[name[os].path.basename, parameter[call[name[os].path.abspath, parameter[name[filepath]]]]] if <ast.BoolOp object at 0x7da1b0cb8310> begin[:] variable[suffix] assign[=] call[name[self].__getFilenameSuffix, parameter[name[filename]]] <ast.Try object at 0x7da1b0cb93c0>
keyword[def] identifier[_local_upload] ( identifier[self] , identifier[filepath] , identifier[remove] = keyword[False] ): literal[string] keyword[if] identifier[os] . identifier[path] . identifier[isfile] ( identifier[filepath] ): identifier[filename] = identifier[os] . identifier[path] . identifier[basename] ( identifier[os] . identifier[path] . identifier[abspath] ( identifier[filepath] )) keyword[if] identifier[filename] keyword[and] identifier[self] . identifier[__isValidFilename] ( identifier[filename] ): identifier[suffix] = identifier[self] . identifier[__getFilenameSuffix] ( identifier[filename] ) keyword[try] : identifier[self] . identifier[__unpack_tgz] ( identifier[os] . identifier[path] . identifier[abspath] ( identifier[filepath] )) keyword[if] identifier[self] . identifier[__isValidTGZ] ( identifier[suffix] ) keyword[else] identifier[self] . identifier[__unpack_zip] ( identifier[os] . identifier[path] . identifier[abspath] ( identifier[filepath] )) keyword[finally] : keyword[if] identifier[remove] keyword[is] keyword[True] : identifier[os] . identifier[remove] ( identifier[filepath] ) keyword[else] : keyword[raise] identifier[InstallError] ( literal[string] ) keyword[else] : keyword[raise] identifier[InstallError] ( literal[string] )
def _local_upload(self, filepath, remove=False): """Local plugin package processing""" if os.path.isfile(filepath): filename = os.path.basename(os.path.abspath(filepath)) if filename and self.__isValidFilename(filename): suffix = self.__getFilenameSuffix(filename) try: self.__unpack_tgz(os.path.abspath(filepath)) if self.__isValidTGZ(suffix) else self.__unpack_zip(os.path.abspath(filepath)) # depends on [control=['try'], data=[]] finally: if remove is True: os.remove(filepath) # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]] else: raise InstallError('Invalid Filename') # depends on [control=['if'], data=[]] else: raise InstallError('Invalid Filepath')
def dumps(obj): """ Dumps a serializable object to JSON. This API maps to the Python built-in json dumps method, with a few differences: * The return value is always valid JSON according to RFC 7159. * The input can be any of the following types: - SFrame - SArray - SGraph - single flexible_type (Image, int, long, float, datetime.datetime) - recursive flexible_type (list, dict, array.array) - recursive variant_type (list or dict of all of the above) * Serialized result includes both data and schema. Deserialization requires valid schema information to disambiguate various other wrapped types (like Image) from dict. """ (data, schema) = to_serializable(obj) return _json.dumps({'data': data, 'schema': schema})
def function[dumps, parameter[obj]]: constant[ Dumps a serializable object to JSON. This API maps to the Python built-in json dumps method, with a few differences: * The return value is always valid JSON according to RFC 7159. * The input can be any of the following types: - SFrame - SArray - SGraph - single flexible_type (Image, int, long, float, datetime.datetime) - recursive flexible_type (list, dict, array.array) - recursive variant_type (list or dict of all of the above) * Serialized result includes both data and schema. Deserialization requires valid schema information to disambiguate various other wrapped types (like Image) from dict. ] <ast.Tuple object at 0x7da1b1ec4430> assign[=] call[name[to_serializable], parameter[name[obj]]] return[call[name[_json].dumps, parameter[dictionary[[<ast.Constant object at 0x7da1b1ec4c70>, <ast.Constant object at 0x7da1b1ec7b50>], [<ast.Name object at 0x7da1b1ec5630>, <ast.Name object at 0x7da1b1ec4880>]]]]]
keyword[def] identifier[dumps] ( identifier[obj] ): literal[string] ( identifier[data] , identifier[schema] )= identifier[to_serializable] ( identifier[obj] ) keyword[return] identifier[_json] . identifier[dumps] ({ literal[string] : identifier[data] , literal[string] : identifier[schema] })
def dumps(obj): """ Dumps a serializable object to JSON. This API maps to the Python built-in json dumps method, with a few differences: * The return value is always valid JSON according to RFC 7159. * The input can be any of the following types: - SFrame - SArray - SGraph - single flexible_type (Image, int, long, float, datetime.datetime) - recursive flexible_type (list, dict, array.array) - recursive variant_type (list or dict of all of the above) * Serialized result includes both data and schema. Deserialization requires valid schema information to disambiguate various other wrapped types (like Image) from dict. """ (data, schema) = to_serializable(obj) return _json.dumps({'data': data, 'schema': schema})
def get_qualification_requests(self, qualification_type_id, sort_by='Expiration', sort_direction='Ascending', page_size=10, page_number=1): """TODO: Document.""" params = {'QualificationTypeId' : qualification_type_id, 'SortProperty' : sort_by, 'SortDirection' : sort_direction, 'PageSize' : page_size, 'PageNumber' : page_number} return self._process_request('GetQualificationRequests', params, [('QualificationRequest', QualificationRequest),])
def function[get_qualification_requests, parameter[self, qualification_type_id, sort_by, sort_direction, page_size, page_number]]: constant[TODO: Document.] variable[params] assign[=] dictionary[[<ast.Constant object at 0x7da1b265b190>, <ast.Constant object at 0x7da1b26589a0>, <ast.Constant object at 0x7da1b26586a0>, <ast.Constant object at 0x7da1b265b160>, <ast.Constant object at 0x7da1b265b040>], [<ast.Name object at 0x7da1b2658d00>, <ast.Name object at 0x7da1b26582b0>, <ast.Name object at 0x7da1b2659bd0>, <ast.Name object at 0x7da1b2658a60>, <ast.Name object at 0x7da1b2659b10>]] return[call[name[self]._process_request, parameter[constant[GetQualificationRequests], name[params], list[[<ast.Tuple object at 0x7da1b265ac50>]]]]]
keyword[def] identifier[get_qualification_requests] ( identifier[self] , identifier[qualification_type_id] , identifier[sort_by] = literal[string] , identifier[sort_direction] = literal[string] , identifier[page_size] = literal[int] , identifier[page_number] = literal[int] ): literal[string] identifier[params] ={ literal[string] : identifier[qualification_type_id] , literal[string] : identifier[sort_by] , literal[string] : identifier[sort_direction] , literal[string] : identifier[page_size] , literal[string] : identifier[page_number] } keyword[return] identifier[self] . identifier[_process_request] ( literal[string] , identifier[params] , [( literal[string] , identifier[QualificationRequest] ),])
def get_qualification_requests(self, qualification_type_id, sort_by='Expiration', sort_direction='Ascending', page_size=10, page_number=1): """TODO: Document.""" params = {'QualificationTypeId': qualification_type_id, 'SortProperty': sort_by, 'SortDirection': sort_direction, 'PageSize': page_size, 'PageNumber': page_number} return self._process_request('GetQualificationRequests', params, [('QualificationRequest', QualificationRequest)])
def db_dp990(self, value=None): """ Corresponds to IDD Field `db_dp990` mean coincident drybulb temperature corresponding to Dew-point temperature corresponding to 90.0% annual cumulative frequency of occurrence (cold conditions) Args: value (float): value for IDD Field `db_dp990` Unit: C if `value` is None it will not be checked against the specification and is assumed to be a missing value Raises: ValueError: if `value` is not a valid value """ if value is not None: try: value = float(value) except ValueError: raise ValueError('value {} need to be of type float ' 'for field `db_dp990`'.format(value)) self._db_dp990 = value
def function[db_dp990, parameter[self, value]]: constant[ Corresponds to IDD Field `db_dp990` mean coincident drybulb temperature corresponding to Dew-point temperature corresponding to 90.0% annual cumulative frequency of occurrence (cold conditions) Args: value (float): value for IDD Field `db_dp990` Unit: C if `value` is None it will not be checked against the specification and is assumed to be a missing value Raises: ValueError: if `value` is not a valid value ] if compare[name[value] is_not constant[None]] begin[:] <ast.Try object at 0x7da18c4cfac0> name[self]._db_dp990 assign[=] name[value]
keyword[def] identifier[db_dp990] ( identifier[self] , identifier[value] = keyword[None] ): literal[string] keyword[if] identifier[value] keyword[is] keyword[not] keyword[None] : keyword[try] : identifier[value] = identifier[float] ( identifier[value] ) keyword[except] identifier[ValueError] : keyword[raise] identifier[ValueError] ( literal[string] literal[string] . identifier[format] ( identifier[value] )) identifier[self] . identifier[_db_dp990] = identifier[value]
def db_dp990(self, value=None): """ Corresponds to IDD Field `db_dp990` mean coincident drybulb temperature corresponding to Dew-point temperature corresponding to 90.0% annual cumulative frequency of occurrence (cold conditions) Args: value (float): value for IDD Field `db_dp990` Unit: C if `value` is None it will not be checked against the specification and is assumed to be a missing value Raises: ValueError: if `value` is not a valid value """ if value is not None: try: value = float(value) # depends on [control=['try'], data=[]] except ValueError: raise ValueError('value {} need to be of type float for field `db_dp990`'.format(value)) # depends on [control=['except'], data=[]] # depends on [control=['if'], data=['value']] self._db_dp990 = value
def on_redraw(self): """ Called when the Layer should be redrawn. If a subclass uses the :py:meth:`initialize()` Method, it is very important to also call the Super Class Method to prevent crashes. """ super(WidgetLayer,self).on_redraw() if not self._initialized: self.initialize() self._initialized = True
def function[on_redraw, parameter[self]]: constant[ Called when the Layer should be redrawn. If a subclass uses the :py:meth:`initialize()` Method, it is very important to also call the Super Class Method to prevent crashes. ] call[call[name[super], parameter[name[WidgetLayer], name[self]]].on_redraw, parameter[]] if <ast.UnaryOp object at 0x7da1b01ffd30> begin[:] call[name[self].initialize, parameter[]] name[self]._initialized assign[=] constant[True]
keyword[def] identifier[on_redraw] ( identifier[self] ): literal[string] identifier[super] ( identifier[WidgetLayer] , identifier[self] ). identifier[on_redraw] () keyword[if] keyword[not] identifier[self] . identifier[_initialized] : identifier[self] . identifier[initialize] () identifier[self] . identifier[_initialized] = keyword[True]
def on_redraw(self): """ Called when the Layer should be redrawn. If a subclass uses the :py:meth:`initialize()` Method, it is very important to also call the Super Class Method to prevent crashes. """ super(WidgetLayer, self).on_redraw() if not self._initialized: self.initialize() self._initialized = True # depends on [control=['if'], data=[]]
def setTerms( self, terms ): """ Sets the term options for this widget. :param terms | [<str>, ..] """ self.uiTermDDL.blockSignals(True) term = self.uiTermDDL.currentText() self.uiTermDDL.clear() self.uiTermDDL.addItems(terms) self.uiTermDDL.setCurrentIndex(self.uiTermDDL.findText(term)) self.applyRule() self.uiTermDDL.blockSignals(False)
def function[setTerms, parameter[self, terms]]: constant[ Sets the term options for this widget. :param terms | [<str>, ..] ] call[name[self].uiTermDDL.blockSignals, parameter[constant[True]]] variable[term] assign[=] call[name[self].uiTermDDL.currentText, parameter[]] call[name[self].uiTermDDL.clear, parameter[]] call[name[self].uiTermDDL.addItems, parameter[name[terms]]] call[name[self].uiTermDDL.setCurrentIndex, parameter[call[name[self].uiTermDDL.findText, parameter[name[term]]]]] call[name[self].applyRule, parameter[]] call[name[self].uiTermDDL.blockSignals, parameter[constant[False]]]
keyword[def] identifier[setTerms] ( identifier[self] , identifier[terms] ): literal[string] identifier[self] . identifier[uiTermDDL] . identifier[blockSignals] ( keyword[True] ) identifier[term] = identifier[self] . identifier[uiTermDDL] . identifier[currentText] () identifier[self] . identifier[uiTermDDL] . identifier[clear] () identifier[self] . identifier[uiTermDDL] . identifier[addItems] ( identifier[terms] ) identifier[self] . identifier[uiTermDDL] . identifier[setCurrentIndex] ( identifier[self] . identifier[uiTermDDL] . identifier[findText] ( identifier[term] )) identifier[self] . identifier[applyRule] () identifier[self] . identifier[uiTermDDL] . identifier[blockSignals] ( keyword[False] )
def setTerms(self, terms): """ Sets the term options for this widget. :param terms | [<str>, ..] """ self.uiTermDDL.blockSignals(True) term = self.uiTermDDL.currentText() self.uiTermDDL.clear() self.uiTermDDL.addItems(terms) self.uiTermDDL.setCurrentIndex(self.uiTermDDL.findText(term)) self.applyRule() self.uiTermDDL.blockSignals(False)
def parse_tibiacom_content(content, *, html_class="BoxContent", tag="div", builder="lxml"): """Parses HTML content from Tibia.com into a BeautifulSoup object. Parameters ---------- content: :class:`str` The raw HTML content from Tibia.com html_class: :class:`str` The HTML class of the parsed element. The default value is ``BoxContent``. tag: :class:`str` The HTML tag select. The default value is ``div``. builder: :class:`str` The builder to use. The default value is ``lxml``. Returns ------- :class:`bs4.BeautifulSoup`, optional The parsed content. """ return bs4.BeautifulSoup(content.replace('ISO-8859-1', 'utf-8'), builder, parse_only=bs4.SoupStrainer(tag, class_=html_class))
def function[parse_tibiacom_content, parameter[content]]: constant[Parses HTML content from Tibia.com into a BeautifulSoup object. Parameters ---------- content: :class:`str` The raw HTML content from Tibia.com html_class: :class:`str` The HTML class of the parsed element. The default value is ``BoxContent``. tag: :class:`str` The HTML tag select. The default value is ``div``. builder: :class:`str` The builder to use. The default value is ``lxml``. Returns ------- :class:`bs4.BeautifulSoup`, optional The parsed content. ] return[call[name[bs4].BeautifulSoup, parameter[call[name[content].replace, parameter[constant[ISO-8859-1], constant[utf-8]]], name[builder]]]]
keyword[def] identifier[parse_tibiacom_content] ( identifier[content] ,*, identifier[html_class] = literal[string] , identifier[tag] = literal[string] , identifier[builder] = literal[string] ): literal[string] keyword[return] identifier[bs4] . identifier[BeautifulSoup] ( identifier[content] . identifier[replace] ( literal[string] , literal[string] ), identifier[builder] , identifier[parse_only] = identifier[bs4] . identifier[SoupStrainer] ( identifier[tag] , identifier[class_] = identifier[html_class] ))
def parse_tibiacom_content(content, *, html_class='BoxContent', tag='div', builder='lxml'): """Parses HTML content from Tibia.com into a BeautifulSoup object. Parameters ---------- content: :class:`str` The raw HTML content from Tibia.com html_class: :class:`str` The HTML class of the parsed element. The default value is ``BoxContent``. tag: :class:`str` The HTML tag select. The default value is ``div``. builder: :class:`str` The builder to use. The default value is ``lxml``. Returns ------- :class:`bs4.BeautifulSoup`, optional The parsed content. """ return bs4.BeautifulSoup(content.replace('ISO-8859-1', 'utf-8'), builder, parse_only=bs4.SoupStrainer(tag, class_=html_class))
def file_segment(filename): """Return the data segment for a filename following T050017 Parameters --------- filename : `str`, :class:`~lal.utils.CacheEntry` the path name of a file Returns ------- segment : `~gwpy.segments.Segment` the ``[start, stop)`` GPS segment covered by the given file Notes ----- |LIGO-T050017|_ declares a filenaming convention that includes documenting the GPS start integer and integer duration of a file, see that document for more details. """ from ..segments import Segment try: # CacheEntry return Segment(filename.segment) except AttributeError: # file path (str) return filename_metadata(filename)[2]
def function[file_segment, parameter[filename]]: constant[Return the data segment for a filename following T050017 Parameters --------- filename : `str`, :class:`~lal.utils.CacheEntry` the path name of a file Returns ------- segment : `~gwpy.segments.Segment` the ``[start, stop)`` GPS segment covered by the given file Notes ----- |LIGO-T050017|_ declares a filenaming convention that includes documenting the GPS start integer and integer duration of a file, see that document for more details. ] from relative_module[segments] import module[Segment] <ast.Try object at 0x7da18f09e560>
keyword[def] identifier[file_segment] ( identifier[filename] ): literal[string] keyword[from] .. identifier[segments] keyword[import] identifier[Segment] keyword[try] : keyword[return] identifier[Segment] ( identifier[filename] . identifier[segment] ) keyword[except] identifier[AttributeError] : keyword[return] identifier[filename_metadata] ( identifier[filename] )[ literal[int] ]
def file_segment(filename): """Return the data segment for a filename following T050017 Parameters --------- filename : `str`, :class:`~lal.utils.CacheEntry` the path name of a file Returns ------- segment : `~gwpy.segments.Segment` the ``[start, stop)`` GPS segment covered by the given file Notes ----- |LIGO-T050017|_ declares a filenaming convention that includes documenting the GPS start integer and integer duration of a file, see that document for more details. """ from ..segments import Segment try: # CacheEntry return Segment(filename.segment) # depends on [control=['try'], data=[]] except AttributeError: # file path (str) return filename_metadata(filename)[2] # depends on [control=['except'], data=[]]
def get_key(key_name, region=None, key=None, keyid=None, profile=None): ''' Check to see if a key exists. Returns fingerprint and name if it does and False if it doesn't CLI Example: .. code-block:: bash salt myminion boto_ec2.get_key mykey ''' conn = _get_conn(region=region, key=key, keyid=keyid, profile=profile) try: key = conn.get_key_pair(key_name) log.debug("the key to return is : %s", key) if key is None: return False return key.name, key.fingerprint except boto.exception.BotoServerError as e: log.debug(e) return False
def function[get_key, parameter[key_name, region, key, keyid, profile]]: constant[ Check to see if a key exists. Returns fingerprint and name if it does and False if it doesn't CLI Example: .. code-block:: bash salt myminion boto_ec2.get_key mykey ] variable[conn] assign[=] call[name[_get_conn], parameter[]] <ast.Try object at 0x7da18ede6230>
keyword[def] identifier[get_key] ( identifier[key_name] , identifier[region] = keyword[None] , identifier[key] = keyword[None] , identifier[keyid] = keyword[None] , identifier[profile] = keyword[None] ): literal[string] identifier[conn] = identifier[_get_conn] ( identifier[region] = identifier[region] , identifier[key] = identifier[key] , identifier[keyid] = identifier[keyid] , identifier[profile] = identifier[profile] ) keyword[try] : identifier[key] = identifier[conn] . identifier[get_key_pair] ( identifier[key_name] ) identifier[log] . identifier[debug] ( literal[string] , identifier[key] ) keyword[if] identifier[key] keyword[is] keyword[None] : keyword[return] keyword[False] keyword[return] identifier[key] . identifier[name] , identifier[key] . identifier[fingerprint] keyword[except] identifier[boto] . identifier[exception] . identifier[BotoServerError] keyword[as] identifier[e] : identifier[log] . identifier[debug] ( identifier[e] ) keyword[return] keyword[False]
def get_key(key_name, region=None, key=None, keyid=None, profile=None): """ Check to see if a key exists. Returns fingerprint and name if it does and False if it doesn't CLI Example: .. code-block:: bash salt myminion boto_ec2.get_key mykey """ conn = _get_conn(region=region, key=key, keyid=keyid, profile=profile) try: key = conn.get_key_pair(key_name) log.debug('the key to return is : %s', key) if key is None: return False # depends on [control=['if'], data=[]] return (key.name, key.fingerprint) # depends on [control=['try'], data=[]] except boto.exception.BotoServerError as e: log.debug(e) return False # depends on [control=['except'], data=['e']]
def open(self): """ Opens the port. :returns: Deferred that callbacks when we are ready to make and receive calls. """ logging.debug("Opening rpc system") d = self._connectionpool.open(self._packet_received) def opened(_): logging.debug("RPC system is open") self._opened = True logging.debug("Starting ping loop") self._ping_loop.start(self._ping_interval, now=False) d.addCallback(opened) return d
def function[open, parameter[self]]: constant[ Opens the port. :returns: Deferred that callbacks when we are ready to make and receive calls. ] call[name[logging].debug, parameter[constant[Opening rpc system]]] variable[d] assign[=] call[name[self]._connectionpool.open, parameter[name[self]._packet_received]] def function[opened, parameter[_]]: call[name[logging].debug, parameter[constant[RPC system is open]]] name[self]._opened assign[=] constant[True] call[name[logging].debug, parameter[constant[Starting ping loop]]] call[name[self]._ping_loop.start, parameter[name[self]._ping_interval]] call[name[d].addCallback, parameter[name[opened]]] return[name[d]]
keyword[def] identifier[open] ( identifier[self] ): literal[string] identifier[logging] . identifier[debug] ( literal[string] ) identifier[d] = identifier[self] . identifier[_connectionpool] . identifier[open] ( identifier[self] . identifier[_packet_received] ) keyword[def] identifier[opened] ( identifier[_] ): identifier[logging] . identifier[debug] ( literal[string] ) identifier[self] . identifier[_opened] = keyword[True] identifier[logging] . identifier[debug] ( literal[string] ) identifier[self] . identifier[_ping_loop] . identifier[start] ( identifier[self] . identifier[_ping_interval] , identifier[now] = keyword[False] ) identifier[d] . identifier[addCallback] ( identifier[opened] ) keyword[return] identifier[d]
def open(self): """ Opens the port. :returns: Deferred that callbacks when we are ready to make and receive calls. """ logging.debug('Opening rpc system') d = self._connectionpool.open(self._packet_received) def opened(_): logging.debug('RPC system is open') self._opened = True logging.debug('Starting ping loop') self._ping_loop.start(self._ping_interval, now=False) d.addCallback(opened) return d
def clean_inconcs(self): """ Check if there are any inconclusives or uknowns that were not subsequently retried. :return: Boolean """ for item in self.data: if (item.inconclusive or item.get_verdict() == "unknown") and not item.retries_left > 0: return True return False
def function[clean_inconcs, parameter[self]]: constant[ Check if there are any inconclusives or uknowns that were not subsequently retried. :return: Boolean ] for taget[name[item]] in starred[name[self].data] begin[:] if <ast.BoolOp object at 0x7da1b0c35f30> begin[:] return[constant[True]] return[constant[False]]
keyword[def] identifier[clean_inconcs] ( identifier[self] ): literal[string] keyword[for] identifier[item] keyword[in] identifier[self] . identifier[data] : keyword[if] ( identifier[item] . identifier[inconclusive] keyword[or] identifier[item] . identifier[get_verdict] ()== literal[string] ) keyword[and] keyword[not] identifier[item] . identifier[retries_left] > literal[int] : keyword[return] keyword[True] keyword[return] keyword[False]
def clean_inconcs(self): """ Check if there are any inconclusives or uknowns that were not subsequently retried. :return: Boolean """ for item in self.data: if (item.inconclusive or item.get_verdict() == 'unknown') and (not item.retries_left > 0): return True # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['item']] return False
def _cleanup(self, domains): """Remove the temporary '.pot' files that were created for the domains.""" for option in domains.values(): try: os.remove(option['pot']) except (IOError, OSError): # It is not a problem if we can't actually remove the temporary file pass
def function[_cleanup, parameter[self, domains]]: constant[Remove the temporary '.pot' files that were created for the domains.] for taget[name[option]] in starred[call[name[domains].values, parameter[]]] begin[:] <ast.Try object at 0x7da20e961ed0>
keyword[def] identifier[_cleanup] ( identifier[self] , identifier[domains] ): literal[string] keyword[for] identifier[option] keyword[in] identifier[domains] . identifier[values] (): keyword[try] : identifier[os] . identifier[remove] ( identifier[option] [ literal[string] ]) keyword[except] ( identifier[IOError] , identifier[OSError] ): keyword[pass]
def _cleanup(self, domains): """Remove the temporary '.pot' files that were created for the domains.""" for option in domains.values(): try: os.remove(option['pot']) # depends on [control=['try'], data=[]] except (IOError, OSError): # It is not a problem if we can't actually remove the temporary file pass # depends on [control=['except'], data=[]] # depends on [control=['for'], data=['option']]
async def play_now(self, requester: int, track: dict): """ Add track and play it. """ self.add_next(requester, track) await self.play(ignore_shuffle=True)
<ast.AsyncFunctionDef object at 0x7da1b015bd30>
keyword[async] keyword[def] identifier[play_now] ( identifier[self] , identifier[requester] : identifier[int] , identifier[track] : identifier[dict] ): literal[string] identifier[self] . identifier[add_next] ( identifier[requester] , identifier[track] ) keyword[await] identifier[self] . identifier[play] ( identifier[ignore_shuffle] = keyword[True] )
async def play_now(self, requester: int, track: dict): """ Add track and play it. """ self.add_next(requester, track) await self.play(ignore_shuffle=True)
def select_header_accept(self, accepts): """ Returns `Accept` based on an array of accepts provided. :param accepts: List of headers. :return: Accept (e.g. application/json). """ if not accepts: return accepts = [x.lower() for x in accepts] if 'application/json' in accepts: return 'application/json' else: return ', '.join(accepts)
def function[select_header_accept, parameter[self, accepts]]: constant[ Returns `Accept` based on an array of accepts provided. :param accepts: List of headers. :return: Accept (e.g. application/json). ] if <ast.UnaryOp object at 0x7da1b1f611e0> begin[:] return[None] variable[accepts] assign[=] <ast.ListComp object at 0x7da1b1f61570> if compare[constant[application/json] in name[accepts]] begin[:] return[constant[application/json]]
keyword[def] identifier[select_header_accept] ( identifier[self] , identifier[accepts] ): literal[string] keyword[if] keyword[not] identifier[accepts] : keyword[return] identifier[accepts] =[ identifier[x] . identifier[lower] () keyword[for] identifier[x] keyword[in] identifier[accepts] ] keyword[if] literal[string] keyword[in] identifier[accepts] : keyword[return] literal[string] keyword[else] : keyword[return] literal[string] . identifier[join] ( identifier[accepts] )
def select_header_accept(self, accepts): """ Returns `Accept` based on an array of accepts provided. :param accepts: List of headers. :return: Accept (e.g. application/json). """ if not accepts: return # depends on [control=['if'], data=[]] accepts = [x.lower() for x in accepts] if 'application/json' in accepts: return 'application/json' # depends on [control=['if'], data=[]] else: return ', '.join(accepts)
def perform_create(self, serializer): """ determine user when node is added """ if serializer.instance is None: serializer.save(user=self.request.user)
def function[perform_create, parameter[self, serializer]]: constant[ determine user when node is added ] if compare[name[serializer].instance is constant[None]] begin[:] call[name[serializer].save, parameter[]]
keyword[def] identifier[perform_create] ( identifier[self] , identifier[serializer] ): literal[string] keyword[if] identifier[serializer] . identifier[instance] keyword[is] keyword[None] : identifier[serializer] . identifier[save] ( identifier[user] = identifier[self] . identifier[request] . identifier[user] )
def perform_create(self, serializer): """ determine user when node is added """ if serializer.instance is None: serializer.save(user=self.request.user) # depends on [control=['if'], data=[]]
def atlas_init_peer_info( peer_table, peer_hostport, blacklisted=False, whitelisted=False ): """ Initialize peer info table entry """ peer_table[peer_hostport] = { "time": [], "zonefile_inv": "", "blacklisted": blacklisted, "whitelisted": whitelisted }
def function[atlas_init_peer_info, parameter[peer_table, peer_hostport, blacklisted, whitelisted]]: constant[ Initialize peer info table entry ] call[name[peer_table]][name[peer_hostport]] assign[=] dictionary[[<ast.Constant object at 0x7da20e963b80>, <ast.Constant object at 0x7da20e9611b0>, <ast.Constant object at 0x7da20e961e40>, <ast.Constant object at 0x7da20e961cc0>], [<ast.List object at 0x7da20e963ac0>, <ast.Constant object at 0x7da20e963370>, <ast.Name object at 0x7da20e9638e0>, <ast.Name object at 0x7da20e9600d0>]]
keyword[def] identifier[atlas_init_peer_info] ( identifier[peer_table] , identifier[peer_hostport] , identifier[blacklisted] = keyword[False] , identifier[whitelisted] = keyword[False] ): literal[string] identifier[peer_table] [ identifier[peer_hostport] ]={ literal[string] :[], literal[string] : literal[string] , literal[string] : identifier[blacklisted] , literal[string] : identifier[whitelisted] }
def atlas_init_peer_info(peer_table, peer_hostport, blacklisted=False, whitelisted=False): """ Initialize peer info table entry """ peer_table[peer_hostport] = {'time': [], 'zonefile_inv': '', 'blacklisted': blacklisted, 'whitelisted': whitelisted}
def splitquery(url): """splitquery('/path?query') --> '/path', 'query'.""" global _queryprog if _queryprog is None: import re _queryprog = re.compile('^(.*)\?([^?]*)$') match = _queryprog.match(url) if match: return match.group(1, 2) return url, None
def function[splitquery, parameter[url]]: constant[splitquery('/path?query') --> '/path', 'query'.] <ast.Global object at 0x7da1b2347610> if compare[name[_queryprog] is constant[None]] begin[:] import module[re] variable[_queryprog] assign[=] call[name[re].compile, parameter[constant[^(.*)\?([^?]*)$]]] variable[match] assign[=] call[name[_queryprog].match, parameter[name[url]]] if name[match] begin[:] return[call[name[match].group, parameter[constant[1], constant[2]]]] return[tuple[[<ast.Name object at 0x7da1b2345900>, <ast.Constant object at 0x7da1b2344250>]]]
keyword[def] identifier[splitquery] ( identifier[url] ): literal[string] keyword[global] identifier[_queryprog] keyword[if] identifier[_queryprog] keyword[is] keyword[None] : keyword[import] identifier[re] identifier[_queryprog] = identifier[re] . identifier[compile] ( literal[string] ) identifier[match] = identifier[_queryprog] . identifier[match] ( identifier[url] ) keyword[if] identifier[match] : keyword[return] identifier[match] . identifier[group] ( literal[int] , literal[int] ) keyword[return] identifier[url] , keyword[None]
def splitquery(url): """splitquery('/path?query') --> '/path', 'query'.""" global _queryprog if _queryprog is None: import re _queryprog = re.compile('^(.*)\\?([^?]*)$') # depends on [control=['if'], data=['_queryprog']] match = _queryprog.match(url) if match: return match.group(1, 2) # depends on [control=['if'], data=[]] return (url, None)
def get_groups(self, **kwargs): """Obtain line types and details. Args: lang (str): Language code (*es* or *en*). Returns: Status boolean and parsed response (list[GeoGroupItem]), or message string in case of error. """ # Endpoint parameters params = { 'cultureInfo': util.language_code(kwargs.get('lang')) } # Request result = self.make_request('geo', 'get_groups', **params) if not util.check_result(result): return False, result.get('resultDescription', 'UNKNOWN ERROR') # Parse values = util.response_list(result, 'resultValues') return True, [emtype.GeoGroupItem(**a) for a in values]
def function[get_groups, parameter[self]]: constant[Obtain line types and details. Args: lang (str): Language code (*es* or *en*). Returns: Status boolean and parsed response (list[GeoGroupItem]), or message string in case of error. ] variable[params] assign[=] dictionary[[<ast.Constant object at 0x7da1b0bd9780>], [<ast.Call object at 0x7da1b0bd8b20>]] variable[result] assign[=] call[name[self].make_request, parameter[constant[geo], constant[get_groups]]] if <ast.UnaryOp object at 0x7da1b0bdaf50> begin[:] return[tuple[[<ast.Constant object at 0x7da1b0bda890>, <ast.Call object at 0x7da1b0bdb400>]]] variable[values] assign[=] call[name[util].response_list, parameter[name[result], constant[resultValues]]] return[tuple[[<ast.Constant object at 0x7da1b0bdb850>, <ast.ListComp object at 0x7da1b0bda8f0>]]]
keyword[def] identifier[get_groups] ( identifier[self] ,** identifier[kwargs] ): literal[string] identifier[params] ={ literal[string] : identifier[util] . identifier[language_code] ( identifier[kwargs] . identifier[get] ( literal[string] )) } identifier[result] = identifier[self] . identifier[make_request] ( literal[string] , literal[string] ,** identifier[params] ) keyword[if] keyword[not] identifier[util] . identifier[check_result] ( identifier[result] ): keyword[return] keyword[False] , identifier[result] . identifier[get] ( literal[string] , literal[string] ) identifier[values] = identifier[util] . identifier[response_list] ( identifier[result] , literal[string] ) keyword[return] keyword[True] ,[ identifier[emtype] . identifier[GeoGroupItem] (** identifier[a] ) keyword[for] identifier[a] keyword[in] identifier[values] ]
def get_groups(self, **kwargs): """Obtain line types and details. Args: lang (str): Language code (*es* or *en*). Returns: Status boolean and parsed response (list[GeoGroupItem]), or message string in case of error. """ # Endpoint parameters params = {'cultureInfo': util.language_code(kwargs.get('lang'))} # Request result = self.make_request('geo', 'get_groups', **params) if not util.check_result(result): return (False, result.get('resultDescription', 'UNKNOWN ERROR')) # depends on [control=['if'], data=[]] # Parse values = util.response_list(result, 'resultValues') return (True, [emtype.GeoGroupItem(**a) for a in values])
def _repr_html_(self): """ Return a html representation for a particular DataFrame. Mainly for IPython notebook. """ # qtconsole doesn't report its line width, and also # behaves badly when outputting an HTML table # that doesn't fit the window, so disable it. # XXX: In IPython 3.x and above, the Qt console will not attempt to # display HTML, so this check can be removed when support for IPython 2.x # is no longer needed. if self._pandas and options.display.notebook_repr_widget: from .. import DataFrame from ..ui import show_df_widget show_df_widget(DataFrame(self._values, schema=self.schema)) if self._pandas: return self._values._repr_html_() if in_qtconsole(): # 'HTML output is disabled in QtConsole' return None if options.display.notebook_repr_html: max_rows = options.display.max_rows max_cols = options.display.max_columns show_dimensions = options.display.show_dimensions return self.to_html(max_rows=max_rows, max_cols=max_cols, show_dimensions=show_dimensions, notebook=True) else: return None
def function[_repr_html_, parameter[self]]: constant[ Return a html representation for a particular DataFrame. Mainly for IPython notebook. ] if <ast.BoolOp object at 0x7da20cabfb50> begin[:] from relative_module[None] import module[DataFrame] from relative_module[ui] import module[show_df_widget] call[name[show_df_widget], parameter[call[name[DataFrame], parameter[name[self]._values]]]] if name[self]._pandas begin[:] return[call[name[self]._values._repr_html_, parameter[]]] if call[name[in_qtconsole], parameter[]] begin[:] return[constant[None]] if name[options].display.notebook_repr_html begin[:] variable[max_rows] assign[=] name[options].display.max_rows variable[max_cols] assign[=] name[options].display.max_columns variable[show_dimensions] assign[=] name[options].display.show_dimensions return[call[name[self].to_html, parameter[]]]
keyword[def] identifier[_repr_html_] ( identifier[self] ): literal[string] keyword[if] identifier[self] . identifier[_pandas] keyword[and] identifier[options] . identifier[display] . identifier[notebook_repr_widget] : keyword[from] .. keyword[import] identifier[DataFrame] keyword[from] .. identifier[ui] keyword[import] identifier[show_df_widget] identifier[show_df_widget] ( identifier[DataFrame] ( identifier[self] . identifier[_values] , identifier[schema] = identifier[self] . identifier[schema] )) keyword[if] identifier[self] . identifier[_pandas] : keyword[return] identifier[self] . identifier[_values] . identifier[_repr_html_] () keyword[if] identifier[in_qtconsole] (): keyword[return] keyword[None] keyword[if] identifier[options] . identifier[display] . identifier[notebook_repr_html] : identifier[max_rows] = identifier[options] . identifier[display] . identifier[max_rows] identifier[max_cols] = identifier[options] . identifier[display] . identifier[max_columns] identifier[show_dimensions] = identifier[options] . identifier[display] . identifier[show_dimensions] keyword[return] identifier[self] . identifier[to_html] ( identifier[max_rows] = identifier[max_rows] , identifier[max_cols] = identifier[max_cols] , identifier[show_dimensions] = identifier[show_dimensions] , identifier[notebook] = keyword[True] ) keyword[else] : keyword[return] keyword[None]
def _repr_html_(self): """ Return a html representation for a particular DataFrame. Mainly for IPython notebook. """ # qtconsole doesn't report its line width, and also # behaves badly when outputting an HTML table # that doesn't fit the window, so disable it. # XXX: In IPython 3.x and above, the Qt console will not attempt to # display HTML, so this check can be removed when support for IPython 2.x # is no longer needed. if self._pandas and options.display.notebook_repr_widget: from .. import DataFrame from ..ui import show_df_widget show_df_widget(DataFrame(self._values, schema=self.schema)) # depends on [control=['if'], data=[]] if self._pandas: return self._values._repr_html_() # depends on [control=['if'], data=[]] if in_qtconsole(): # 'HTML output is disabled in QtConsole' return None # depends on [control=['if'], data=[]] if options.display.notebook_repr_html: max_rows = options.display.max_rows max_cols = options.display.max_columns show_dimensions = options.display.show_dimensions return self.to_html(max_rows=max_rows, max_cols=max_cols, show_dimensions=show_dimensions, notebook=True) # depends on [control=['if'], data=[]] else: return None
def get_channel_comment(self, name=None, group=None, index=None): """Gets channel comment. Channel can be specified in two ways: * using the first positional argument *name* * if there are multiple occurrences for this channel then the *group* and *index* arguments can be used to select a specific group. * if there are multiple occurrences for this channel and either the *group* or *index* arguments is None then a warning is issued * using the group number (keyword argument *group*) and the channel number (keyword argument *index*). Use *info* method for group and channel numbers If the *raster* keyword argument is not *None* the output is interpolated accordingly. Parameters ---------- name : string name of channel group : int 0-based group index index : int 0-based channel index Returns ------- comment : str found channel comment """ gp_nr, ch_nr = self._validate_channel_selection(name, group, index) grp = self.groups[gp_nr] channel = grp.channels[ch_nr] return extract_cncomment_xml(channel.comment)
def function[get_channel_comment, parameter[self, name, group, index]]: constant[Gets channel comment. Channel can be specified in two ways: * using the first positional argument *name* * if there are multiple occurrences for this channel then the *group* and *index* arguments can be used to select a specific group. * if there are multiple occurrences for this channel and either the *group* or *index* arguments is None then a warning is issued * using the group number (keyword argument *group*) and the channel number (keyword argument *index*). Use *info* method for group and channel numbers If the *raster* keyword argument is not *None* the output is interpolated accordingly. Parameters ---------- name : string name of channel group : int 0-based group index index : int 0-based channel index Returns ------- comment : str found channel comment ] <ast.Tuple object at 0x7da1b1897f40> assign[=] call[name[self]._validate_channel_selection, parameter[name[name], name[group], name[index]]] variable[grp] assign[=] call[name[self].groups][name[gp_nr]] variable[channel] assign[=] call[name[grp].channels][name[ch_nr]] return[call[name[extract_cncomment_xml], parameter[name[channel].comment]]]
keyword[def] identifier[get_channel_comment] ( identifier[self] , identifier[name] = keyword[None] , identifier[group] = keyword[None] , identifier[index] = keyword[None] ): literal[string] identifier[gp_nr] , identifier[ch_nr] = identifier[self] . identifier[_validate_channel_selection] ( identifier[name] , identifier[group] , identifier[index] ) identifier[grp] = identifier[self] . identifier[groups] [ identifier[gp_nr] ] identifier[channel] = identifier[grp] . identifier[channels] [ identifier[ch_nr] ] keyword[return] identifier[extract_cncomment_xml] ( identifier[channel] . identifier[comment] )
def get_channel_comment(self, name=None, group=None, index=None): """Gets channel comment. Channel can be specified in two ways: * using the first positional argument *name* * if there are multiple occurrences for this channel then the *group* and *index* arguments can be used to select a specific group. * if there are multiple occurrences for this channel and either the *group* or *index* arguments is None then a warning is issued * using the group number (keyword argument *group*) and the channel number (keyword argument *index*). Use *info* method for group and channel numbers If the *raster* keyword argument is not *None* the output is interpolated accordingly. Parameters ---------- name : string name of channel group : int 0-based group index index : int 0-based channel index Returns ------- comment : str found channel comment """ (gp_nr, ch_nr) = self._validate_channel_selection(name, group, index) grp = self.groups[gp_nr] channel = grp.channels[ch_nr] return extract_cncomment_xml(channel.comment)
def event_later(self, delay, data_tuple): """ Schedule an event to be emitted after a delay. :param delay: number of seconds :param data_tuple: a 2-tuple (flavor, data) :return: an event object, useful for cancelling. """ return self._base.event_later(delay, self.make_event_data(*data_tuple))
def function[event_later, parameter[self, delay, data_tuple]]: constant[ Schedule an event to be emitted after a delay. :param delay: number of seconds :param data_tuple: a 2-tuple (flavor, data) :return: an event object, useful for cancelling. ] return[call[name[self]._base.event_later, parameter[name[delay], call[name[self].make_event_data, parameter[<ast.Starred object at 0x7da1b1baedd0>]]]]]
keyword[def] identifier[event_later] ( identifier[self] , identifier[delay] , identifier[data_tuple] ): literal[string] keyword[return] identifier[self] . identifier[_base] . identifier[event_later] ( identifier[delay] , identifier[self] . identifier[make_event_data] (* identifier[data_tuple] ))
def event_later(self, delay, data_tuple): """ Schedule an event to be emitted after a delay. :param delay: number of seconds :param data_tuple: a 2-tuple (flavor, data) :return: an event object, useful for cancelling. """ return self._base.event_later(delay, self.make_event_data(*data_tuple))
def Split(axis, a, n): """ Split op with n splits. """ return tuple(np.split(np.copy(a), n, axis=axis))
def function[Split, parameter[axis, a, n]]: constant[ Split op with n splits. ] return[call[name[tuple], parameter[call[name[np].split, parameter[call[name[np].copy, parameter[name[a]]], name[n]]]]]]
keyword[def] identifier[Split] ( identifier[axis] , identifier[a] , identifier[n] ): literal[string] keyword[return] identifier[tuple] ( identifier[np] . identifier[split] ( identifier[np] . identifier[copy] ( identifier[a] ), identifier[n] , identifier[axis] = identifier[axis] ))
def Split(axis, a, n): """ Split op with n splits. """ return tuple(np.split(np.copy(a), n, axis=axis))
def create(self, **kwargs): """Custom creation logic to handle edge cases This shouldn't be needed, but ASM has a tendency to raise various errors that are painful to handle from a customer point-of-view The error itself are described in their exception handler To address these failure, we try a number of exception handling cases to catch and reliably deal with the error. :param kwargs: :return: """ for x in range(0, 30): try: return self._create(**kwargs) except iControlUnexpectedHTTPError as ex: if self._check_exception(ex): continue else: raise
def function[create, parameter[self]]: constant[Custom creation logic to handle edge cases This shouldn't be needed, but ASM has a tendency to raise various errors that are painful to handle from a customer point-of-view The error itself are described in their exception handler To address these failure, we try a number of exception handling cases to catch and reliably deal with the error. :param kwargs: :return: ] for taget[name[x]] in starred[call[name[range], parameter[constant[0], constant[30]]]] begin[:] <ast.Try object at 0x7da1b26ac760>
keyword[def] identifier[create] ( identifier[self] ,** identifier[kwargs] ): literal[string] keyword[for] identifier[x] keyword[in] identifier[range] ( literal[int] , literal[int] ): keyword[try] : keyword[return] identifier[self] . identifier[_create] (** identifier[kwargs] ) keyword[except] identifier[iControlUnexpectedHTTPError] keyword[as] identifier[ex] : keyword[if] identifier[self] . identifier[_check_exception] ( identifier[ex] ): keyword[continue] keyword[else] : keyword[raise]
def create(self, **kwargs): """Custom creation logic to handle edge cases This shouldn't be needed, but ASM has a tendency to raise various errors that are painful to handle from a customer point-of-view The error itself are described in their exception handler To address these failure, we try a number of exception handling cases to catch and reliably deal with the error. :param kwargs: :return: """ for x in range(0, 30): try: return self._create(**kwargs) # depends on [control=['try'], data=[]] except iControlUnexpectedHTTPError as ex: if self._check_exception(ex): continue # depends on [control=['if'], data=[]] else: raise # depends on [control=['except'], data=['ex']] # depends on [control=['for'], data=[]]
def record_is_valid(record): "Checks if a record is valid for processing." # No random contigs if record.CHROM.startswith('GL'): return False # Skip results with a read depth < 5. If no read depth is specified then # we have no choice but to consider this record as being valid. if 'DP' in record.INFO and record.INFO['DP'] < 5: return False return True
def function[record_is_valid, parameter[record]]: constant[Checks if a record is valid for processing.] if call[name[record].CHROM.startswith, parameter[constant[GL]]] begin[:] return[constant[False]] if <ast.BoolOp object at 0x7da1b0925540> begin[:] return[constant[False]] return[constant[True]]
keyword[def] identifier[record_is_valid] ( identifier[record] ): literal[string] keyword[if] identifier[record] . identifier[CHROM] . identifier[startswith] ( literal[string] ): keyword[return] keyword[False] keyword[if] literal[string] keyword[in] identifier[record] . identifier[INFO] keyword[and] identifier[record] . identifier[INFO] [ literal[string] ]< literal[int] : keyword[return] keyword[False] keyword[return] keyword[True]
def record_is_valid(record): """Checks if a record is valid for processing.""" # No random contigs if record.CHROM.startswith('GL'): return False # depends on [control=['if'], data=[]] # Skip results with a read depth < 5. If no read depth is specified then # we have no choice but to consider this record as being valid. if 'DP' in record.INFO and record.INFO['DP'] < 5: return False # depends on [control=['if'], data=[]] return True
def process(self, event): """ Send events as push notification via Google Cloud Messaging. Expected settings as follows: # https://developers.google.com/mobile/add WALDUR_CORE['GOOGLE_API'] = { 'NOTIFICATION_TITLE': "Waldur notification", 'Android': { 'server_key': 'AIzaSyA2_7UaVIxXfKeFvxTjQNZbrzkXG9OTCkg', }, 'iOS': { 'server_key': 'AIzaSyA34zlG_y5uHOe2FmcJKwfk2vG-3RW05vk', } } """ conf = settings.WALDUR_CORE.get('GOOGLE_API') or {} keys = conf.get(dict(self.Type.CHOICES)[self.type]) if not keys or not self.token: return endpoint = 'https://gcm-http.googleapis.com/gcm/send' headers = { 'Content-Type': 'application/json', 'Authorization': 'key=%s' % keys['server_key'], } payload = { 'to': self.token, 'notification': { 'body': event.get('message', 'New event'), 'title': conf.get('NOTIFICATION_TITLE', 'Waldur notification'), 'image': 'icon', }, 'data': { 'event': event }, } if self.type == self.Type.IOS: payload['content-available'] = '1' logger.debug('Submitting GCM push notification with headers %s, payload: %s' % (headers, payload)) requests.post(endpoint, json=payload, headers=headers)
def function[process, parameter[self, event]]: constant[ Send events as push notification via Google Cloud Messaging. Expected settings as follows: # https://developers.google.com/mobile/add WALDUR_CORE['GOOGLE_API'] = { 'NOTIFICATION_TITLE': "Waldur notification", 'Android': { 'server_key': 'AIzaSyA2_7UaVIxXfKeFvxTjQNZbrzkXG9OTCkg', }, 'iOS': { 'server_key': 'AIzaSyA34zlG_y5uHOe2FmcJKwfk2vG-3RW05vk', } } ] variable[conf] assign[=] <ast.BoolOp object at 0x7da1b0f51a80> variable[keys] assign[=] call[name[conf].get, parameter[call[call[name[dict], parameter[name[self].Type.CHOICES]]][name[self].type]]] if <ast.BoolOp object at 0x7da1b0f518a0> begin[:] return[None] variable[endpoint] assign[=] constant[https://gcm-http.googleapis.com/gcm/send] variable[headers] assign[=] dictionary[[<ast.Constant object at 0x7da1b0f508b0>, <ast.Constant object at 0x7da1b0f50b20>], [<ast.Constant object at 0x7da1b0f51a20>, <ast.BinOp object at 0x7da1b0e30130>]] variable[payload] assign[=] dictionary[[<ast.Constant object at 0x7da1b0e338b0>, <ast.Constant object at 0x7da1b0e32b00>, <ast.Constant object at 0x7da1b0e30670>], [<ast.Attribute object at 0x7da1b0e31f60>, <ast.Dict object at 0x7da1b0e33e50>, <ast.Dict object at 0x7da1b0e31c00>]] if compare[name[self].type equal[==] name[self].Type.IOS] begin[:] call[name[payload]][constant[content-available]] assign[=] constant[1] call[name[logger].debug, parameter[binary_operation[constant[Submitting GCM push notification with headers %s, payload: %s] <ast.Mod object at 0x7da2590d6920> tuple[[<ast.Name object at 0x7da1b0e332e0>, <ast.Name object at 0x7da1b0e321a0>]]]]] call[name[requests].post, parameter[name[endpoint]]]
keyword[def] identifier[process] ( identifier[self] , identifier[event] ): literal[string] identifier[conf] = identifier[settings] . identifier[WALDUR_CORE] . identifier[get] ( literal[string] ) keyword[or] {} identifier[keys] = identifier[conf] . identifier[get] ( identifier[dict] ( identifier[self] . identifier[Type] . identifier[CHOICES] )[ identifier[self] . identifier[type] ]) keyword[if] keyword[not] identifier[keys] keyword[or] keyword[not] identifier[self] . identifier[token] : keyword[return] identifier[endpoint] = literal[string] identifier[headers] ={ literal[string] : literal[string] , literal[string] : literal[string] % identifier[keys] [ literal[string] ], } identifier[payload] ={ literal[string] : identifier[self] . identifier[token] , literal[string] :{ literal[string] : identifier[event] . identifier[get] ( literal[string] , literal[string] ), literal[string] : identifier[conf] . identifier[get] ( literal[string] , literal[string] ), literal[string] : literal[string] , }, literal[string] :{ literal[string] : identifier[event] }, } keyword[if] identifier[self] . identifier[type] == identifier[self] . identifier[Type] . identifier[IOS] : identifier[payload] [ literal[string] ]= literal[string] identifier[logger] . identifier[debug] ( literal[string] %( identifier[headers] , identifier[payload] )) identifier[requests] . identifier[post] ( identifier[endpoint] , identifier[json] = identifier[payload] , identifier[headers] = identifier[headers] )
def process(self, event): """ Send events as push notification via Google Cloud Messaging. Expected settings as follows: # https://developers.google.com/mobile/add WALDUR_CORE['GOOGLE_API'] = { 'NOTIFICATION_TITLE': "Waldur notification", 'Android': { 'server_key': 'AIzaSyA2_7UaVIxXfKeFvxTjQNZbrzkXG9OTCkg', }, 'iOS': { 'server_key': 'AIzaSyA34zlG_y5uHOe2FmcJKwfk2vG-3RW05vk', } } """ conf = settings.WALDUR_CORE.get('GOOGLE_API') or {} keys = conf.get(dict(self.Type.CHOICES)[self.type]) if not keys or not self.token: return # depends on [control=['if'], data=[]] endpoint = 'https://gcm-http.googleapis.com/gcm/send' headers = {'Content-Type': 'application/json', 'Authorization': 'key=%s' % keys['server_key']} payload = {'to': self.token, 'notification': {'body': event.get('message', 'New event'), 'title': conf.get('NOTIFICATION_TITLE', 'Waldur notification'), 'image': 'icon'}, 'data': {'event': event}} if self.type == self.Type.IOS: payload['content-available'] = '1' # depends on [control=['if'], data=[]] logger.debug('Submitting GCM push notification with headers %s, payload: %s' % (headers, payload)) requests.post(endpoint, json=payload, headers=headers)
def show_vcs_output_vcs_nodes_vcs_node_info_node_internal_ip_address(self, **kwargs): """Auto Generated Code """ config = ET.Element("config") show_vcs = ET.Element("show_vcs") config = show_vcs output = ET.SubElement(show_vcs, "output") vcs_nodes = ET.SubElement(output, "vcs-nodes") vcs_node_info = ET.SubElement(vcs_nodes, "vcs-node-info") node_internal_ip_address = ET.SubElement(vcs_node_info, "node-internal-ip-address") node_internal_ip_address.text = kwargs.pop('node_internal_ip_address') callback = kwargs.pop('callback', self._callback) return callback(config)
def function[show_vcs_output_vcs_nodes_vcs_node_info_node_internal_ip_address, parameter[self]]: constant[Auto Generated Code ] variable[config] assign[=] call[name[ET].Element, parameter[constant[config]]] variable[show_vcs] assign[=] call[name[ET].Element, parameter[constant[show_vcs]]] variable[config] assign[=] name[show_vcs] variable[output] assign[=] call[name[ET].SubElement, parameter[name[show_vcs], constant[output]]] variable[vcs_nodes] assign[=] call[name[ET].SubElement, parameter[name[output], constant[vcs-nodes]]] variable[vcs_node_info] assign[=] call[name[ET].SubElement, parameter[name[vcs_nodes], constant[vcs-node-info]]] variable[node_internal_ip_address] assign[=] call[name[ET].SubElement, parameter[name[vcs_node_info], constant[node-internal-ip-address]]] name[node_internal_ip_address].text assign[=] call[name[kwargs].pop, parameter[constant[node_internal_ip_address]]] variable[callback] assign[=] call[name[kwargs].pop, parameter[constant[callback], name[self]._callback]] return[call[name[callback], parameter[name[config]]]]
keyword[def] identifier[show_vcs_output_vcs_nodes_vcs_node_info_node_internal_ip_address] ( identifier[self] ,** identifier[kwargs] ): literal[string] identifier[config] = identifier[ET] . identifier[Element] ( literal[string] ) identifier[show_vcs] = identifier[ET] . identifier[Element] ( literal[string] ) identifier[config] = identifier[show_vcs] identifier[output] = identifier[ET] . identifier[SubElement] ( identifier[show_vcs] , literal[string] ) identifier[vcs_nodes] = identifier[ET] . identifier[SubElement] ( identifier[output] , literal[string] ) identifier[vcs_node_info] = identifier[ET] . identifier[SubElement] ( identifier[vcs_nodes] , literal[string] ) identifier[node_internal_ip_address] = identifier[ET] . identifier[SubElement] ( identifier[vcs_node_info] , literal[string] ) identifier[node_internal_ip_address] . identifier[text] = identifier[kwargs] . identifier[pop] ( literal[string] ) identifier[callback] = identifier[kwargs] . identifier[pop] ( literal[string] , identifier[self] . identifier[_callback] ) keyword[return] identifier[callback] ( identifier[config] )
def show_vcs_output_vcs_nodes_vcs_node_info_node_internal_ip_address(self, **kwargs): """Auto Generated Code """ config = ET.Element('config') show_vcs = ET.Element('show_vcs') config = show_vcs output = ET.SubElement(show_vcs, 'output') vcs_nodes = ET.SubElement(output, 'vcs-nodes') vcs_node_info = ET.SubElement(vcs_nodes, 'vcs-node-info') node_internal_ip_address = ET.SubElement(vcs_node_info, 'node-internal-ip-address') node_internal_ip_address.text = kwargs.pop('node_internal_ip_address') callback = kwargs.pop('callback', self._callback) return callback(config)
def incidence_matrix(network, branch_components=None, busorder=None): """ Construct a sparse incidence matrix (directed) Parameters ---------- branch_components : iterable sublist of `branch_components` Buses connected by any of the selected branches are adjacent (default: branch_components (network) or passive_branch_components (sub_network)) busorder : pd.Index subset of network.buses.index Basis to use for the matrix representation of the adjacency matrix (default: buses.index (network) or buses_i() (sub_network)) Returns ------- incidence_matrix : sp.sparse.csr_matrix Directed incidence matrix """ from . import components if isinstance(network, components.Network): if branch_components is None: branch_components = network.branch_components if busorder is None: busorder = network.buses.index elif isinstance(network, components.SubNetwork): if branch_components is None: branch_components = network.network.passive_branch_components if busorder is None: busorder = network.buses_i() else: raise TypeError(" must be called with a Network or a SubNetwork") no_buses = len(busorder) no_branches = 0 bus0_inds = [] bus1_inds = [] for c in network.iterate_components(branch_components): if c.ind is None: sel = slice(None) no_branches += len(c.df) else: sel = c.ind no_branches += len(c.ind) bus0_inds.append(busorder.get_indexer(c.df.loc[sel, "bus0"])) bus1_inds.append(busorder.get_indexer(c.df.loc[sel, "bus1"])) bus0_inds = np.concatenate(bus0_inds) bus1_inds = np.concatenate(bus1_inds) return sp.sparse.csr_matrix((np.r_[np.ones(no_branches), -np.ones(no_branches)], (np.r_[bus0_inds, bus1_inds], np.r_[:no_branches, :no_branches])), (no_buses, no_branches))
def function[incidence_matrix, parameter[network, branch_components, busorder]]: constant[ Construct a sparse incidence matrix (directed) Parameters ---------- branch_components : iterable sublist of `branch_components` Buses connected by any of the selected branches are adjacent (default: branch_components (network) or passive_branch_components (sub_network)) busorder : pd.Index subset of network.buses.index Basis to use for the matrix representation of the adjacency matrix (default: buses.index (network) or buses_i() (sub_network)) Returns ------- incidence_matrix : sp.sparse.csr_matrix Directed incidence matrix ] from relative_module[None] import module[components] if call[name[isinstance], parameter[name[network], name[components].Network]] begin[:] if compare[name[branch_components] is constant[None]] begin[:] variable[branch_components] assign[=] name[network].branch_components if compare[name[busorder] is constant[None]] begin[:] variable[busorder] assign[=] name[network].buses.index variable[no_buses] assign[=] call[name[len], parameter[name[busorder]]] variable[no_branches] assign[=] constant[0] variable[bus0_inds] assign[=] list[[]] variable[bus1_inds] assign[=] list[[]] for taget[name[c]] in starred[call[name[network].iterate_components, parameter[name[branch_components]]]] begin[:] if compare[name[c].ind is constant[None]] begin[:] variable[sel] assign[=] call[name[slice], parameter[constant[None]]] <ast.AugAssign object at 0x7da207f9bb50> call[name[bus0_inds].append, parameter[call[name[busorder].get_indexer, parameter[call[name[c].df.loc][tuple[[<ast.Name object at 0x7da207f9ba30>, <ast.Constant object at 0x7da207f9b7c0>]]]]]]] call[name[bus1_inds].append, parameter[call[name[busorder].get_indexer, parameter[call[name[c].df.loc][tuple[[<ast.Name object at 0x7da207f9aa10>, <ast.Constant object at 0x7da207f98910>]]]]]]] variable[bus0_inds] assign[=] call[name[np].concatenate, parameter[name[bus0_inds]]] variable[bus1_inds] assign[=] call[name[np].concatenate, parameter[name[bus1_inds]]] return[call[name[sp].sparse.csr_matrix, parameter[tuple[[<ast.Subscript object at 0x7da207f9b040>, <ast.Tuple object at 0x7da207f99ed0>]], tuple[[<ast.Name object at 0x7da20e960bb0>, <ast.Name object at 0x7da20e960280>]]]]]
keyword[def] identifier[incidence_matrix] ( identifier[network] , identifier[branch_components] = keyword[None] , identifier[busorder] = keyword[None] ): literal[string] keyword[from] . keyword[import] identifier[components] keyword[if] identifier[isinstance] ( identifier[network] , identifier[components] . identifier[Network] ): keyword[if] identifier[branch_components] keyword[is] keyword[None] : identifier[branch_components] = identifier[network] . identifier[branch_components] keyword[if] identifier[busorder] keyword[is] keyword[None] : identifier[busorder] = identifier[network] . identifier[buses] . identifier[index] keyword[elif] identifier[isinstance] ( identifier[network] , identifier[components] . identifier[SubNetwork] ): keyword[if] identifier[branch_components] keyword[is] keyword[None] : identifier[branch_components] = identifier[network] . identifier[network] . identifier[passive_branch_components] keyword[if] identifier[busorder] keyword[is] keyword[None] : identifier[busorder] = identifier[network] . identifier[buses_i] () keyword[else] : keyword[raise] identifier[TypeError] ( literal[string] ) identifier[no_buses] = identifier[len] ( identifier[busorder] ) identifier[no_branches] = literal[int] identifier[bus0_inds] =[] identifier[bus1_inds] =[] keyword[for] identifier[c] keyword[in] identifier[network] . identifier[iterate_components] ( identifier[branch_components] ): keyword[if] identifier[c] . identifier[ind] keyword[is] keyword[None] : identifier[sel] = identifier[slice] ( keyword[None] ) identifier[no_branches] += identifier[len] ( identifier[c] . identifier[df] ) keyword[else] : identifier[sel] = identifier[c] . identifier[ind] identifier[no_branches] += identifier[len] ( identifier[c] . identifier[ind] ) identifier[bus0_inds] . identifier[append] ( identifier[busorder] . identifier[get_indexer] ( identifier[c] . identifier[df] . identifier[loc] [ identifier[sel] , literal[string] ])) identifier[bus1_inds] . identifier[append] ( identifier[busorder] . identifier[get_indexer] ( identifier[c] . identifier[df] . identifier[loc] [ identifier[sel] , literal[string] ])) identifier[bus0_inds] = identifier[np] . identifier[concatenate] ( identifier[bus0_inds] ) identifier[bus1_inds] = identifier[np] . identifier[concatenate] ( identifier[bus1_inds] ) keyword[return] identifier[sp] . identifier[sparse] . identifier[csr_matrix] (( identifier[np] . identifier[r_] [ identifier[np] . identifier[ones] ( identifier[no_branches] ),- identifier[np] . identifier[ones] ( identifier[no_branches] )], ( identifier[np] . identifier[r_] [ identifier[bus0_inds] , identifier[bus1_inds] ], identifier[np] . identifier[r_] [: identifier[no_branches] ,: identifier[no_branches] ])), ( identifier[no_buses] , identifier[no_branches] ))
def incidence_matrix(network, branch_components=None, busorder=None): """ Construct a sparse incidence matrix (directed) Parameters ---------- branch_components : iterable sublist of `branch_components` Buses connected by any of the selected branches are adjacent (default: branch_components (network) or passive_branch_components (sub_network)) busorder : pd.Index subset of network.buses.index Basis to use for the matrix representation of the adjacency matrix (default: buses.index (network) or buses_i() (sub_network)) Returns ------- incidence_matrix : sp.sparse.csr_matrix Directed incidence matrix """ from . import components if isinstance(network, components.Network): if branch_components is None: branch_components = network.branch_components # depends on [control=['if'], data=['branch_components']] if busorder is None: busorder = network.buses.index # depends on [control=['if'], data=['busorder']] # depends on [control=['if'], data=[]] elif isinstance(network, components.SubNetwork): if branch_components is None: branch_components = network.network.passive_branch_components # depends on [control=['if'], data=['branch_components']] if busorder is None: busorder = network.buses_i() # depends on [control=['if'], data=['busorder']] # depends on [control=['if'], data=[]] else: raise TypeError(' must be called with a Network or a SubNetwork') no_buses = len(busorder) no_branches = 0 bus0_inds = [] bus1_inds = [] for c in network.iterate_components(branch_components): if c.ind is None: sel = slice(None) no_branches += len(c.df) # depends on [control=['if'], data=[]] else: sel = c.ind no_branches += len(c.ind) bus0_inds.append(busorder.get_indexer(c.df.loc[sel, 'bus0'])) bus1_inds.append(busorder.get_indexer(c.df.loc[sel, 'bus1'])) # depends on [control=['for'], data=['c']] bus0_inds = np.concatenate(bus0_inds) bus1_inds = np.concatenate(bus1_inds) return sp.sparse.csr_matrix((np.r_[np.ones(no_branches), -np.ones(no_branches)], (np.r_[bus0_inds, bus1_inds], np.r_[:no_branches, :no_branches])), (no_buses, no_branches))
def default(self, value): """Return dictionary instance from a message object. Args: value: Value to get dictionary for. If not encodable, will call superclasses default method. """ if isinstance(value, messages.Enum): return str(value) if six.PY3 and isinstance(value, bytes): return value.decode('utf8') if isinstance(value, messages.Message): result = {} for field in value.all_fields(): item = value.get_assigned_value(field.name) if item not in (None, [], ()): result[field.name] = ( self.__protojson_protocol.encode_field(field, item)) # Handle unrecognized fields, so they're included when a message is # decoded then encoded. for unknown_key in value.all_unrecognized_fields(): unrecognized_field, _ = value.get_unrecognized_field_info( unknown_key) # Unknown fields are not encoded as they should have been # processed before we get to here. result[unknown_key] = unrecognized_field return result return super(MessageJSONEncoder, self).default(value)
def function[default, parameter[self, value]]: constant[Return dictionary instance from a message object. Args: value: Value to get dictionary for. If not encodable, will call superclasses default method. ] if call[name[isinstance], parameter[name[value], name[messages].Enum]] begin[:] return[call[name[str], parameter[name[value]]]] if <ast.BoolOp object at 0x7da1b07f7100> begin[:] return[call[name[value].decode, parameter[constant[utf8]]]] if call[name[isinstance], parameter[name[value], name[messages].Message]] begin[:] variable[result] assign[=] dictionary[[], []] for taget[name[field]] in starred[call[name[value].all_fields, parameter[]]] begin[:] variable[item] assign[=] call[name[value].get_assigned_value, parameter[name[field].name]] if compare[name[item] <ast.NotIn object at 0x7da2590d7190> tuple[[<ast.Constant object at 0x7da1b07f7670>, <ast.List object at 0x7da1b07f76d0>, <ast.Tuple object at 0x7da1b07f8520>]]] begin[:] call[name[result]][name[field].name] assign[=] call[name[self].__protojson_protocol.encode_field, parameter[name[field], name[item]]] for taget[name[unknown_key]] in starred[call[name[value].all_unrecognized_fields, parameter[]]] begin[:] <ast.Tuple object at 0x7da1b07f9a80> assign[=] call[name[value].get_unrecognized_field_info, parameter[name[unknown_key]]] call[name[result]][name[unknown_key]] assign[=] name[unrecognized_field] return[name[result]] return[call[call[name[super], parameter[name[MessageJSONEncoder], name[self]]].default, parameter[name[value]]]]
keyword[def] identifier[default] ( identifier[self] , identifier[value] ): literal[string] keyword[if] identifier[isinstance] ( identifier[value] , identifier[messages] . identifier[Enum] ): keyword[return] identifier[str] ( identifier[value] ) keyword[if] identifier[six] . identifier[PY3] keyword[and] identifier[isinstance] ( identifier[value] , identifier[bytes] ): keyword[return] identifier[value] . identifier[decode] ( literal[string] ) keyword[if] identifier[isinstance] ( identifier[value] , identifier[messages] . identifier[Message] ): identifier[result] ={} keyword[for] identifier[field] keyword[in] identifier[value] . identifier[all_fields] (): identifier[item] = identifier[value] . identifier[get_assigned_value] ( identifier[field] . identifier[name] ) keyword[if] identifier[item] keyword[not] keyword[in] ( keyword[None] ,[],()): identifier[result] [ identifier[field] . identifier[name] ]=( identifier[self] . identifier[__protojson_protocol] . identifier[encode_field] ( identifier[field] , identifier[item] )) keyword[for] identifier[unknown_key] keyword[in] identifier[value] . identifier[all_unrecognized_fields] (): identifier[unrecognized_field] , identifier[_] = identifier[value] . identifier[get_unrecognized_field_info] ( identifier[unknown_key] ) identifier[result] [ identifier[unknown_key] ]= identifier[unrecognized_field] keyword[return] identifier[result] keyword[return] identifier[super] ( identifier[MessageJSONEncoder] , identifier[self] ). identifier[default] ( identifier[value] )
def default(self, value): """Return dictionary instance from a message object. Args: value: Value to get dictionary for. If not encodable, will call superclasses default method. """ if isinstance(value, messages.Enum): return str(value) # depends on [control=['if'], data=[]] if six.PY3 and isinstance(value, bytes): return value.decode('utf8') # depends on [control=['if'], data=[]] if isinstance(value, messages.Message): result = {} for field in value.all_fields(): item = value.get_assigned_value(field.name) if item not in (None, [], ()): result[field.name] = self.__protojson_protocol.encode_field(field, item) # depends on [control=['if'], data=['item']] # depends on [control=['for'], data=['field']] # Handle unrecognized fields, so they're included when a message is # decoded then encoded. for unknown_key in value.all_unrecognized_fields(): (unrecognized_field, _) = value.get_unrecognized_field_info(unknown_key) # Unknown fields are not encoded as they should have been # processed before we get to here. result[unknown_key] = unrecognized_field # depends on [control=['for'], data=['unknown_key']] return result # depends on [control=['if'], data=[]] return super(MessageJSONEncoder, self).default(value)
def add_missing(C): """Add arrays with zeros for missing Wilson coefficient keys""" C_out = C.copy() for k in (set(WC_keys) - set(C.keys())): C_out[k] = np.zeros(C_keys_shape[k]) return C_out
def function[add_missing, parameter[C]]: constant[Add arrays with zeros for missing Wilson coefficient keys] variable[C_out] assign[=] call[name[C].copy, parameter[]] for taget[name[k]] in starred[binary_operation[call[name[set], parameter[name[WC_keys]]] - call[name[set], parameter[call[name[C].keys, parameter[]]]]]] begin[:] call[name[C_out]][name[k]] assign[=] call[name[np].zeros, parameter[call[name[C_keys_shape]][name[k]]]] return[name[C_out]]
keyword[def] identifier[add_missing] ( identifier[C] ): literal[string] identifier[C_out] = identifier[C] . identifier[copy] () keyword[for] identifier[k] keyword[in] ( identifier[set] ( identifier[WC_keys] )- identifier[set] ( identifier[C] . identifier[keys] ())): identifier[C_out] [ identifier[k] ]= identifier[np] . identifier[zeros] ( identifier[C_keys_shape] [ identifier[k] ]) keyword[return] identifier[C_out]
def add_missing(C): """Add arrays with zeros for missing Wilson coefficient keys""" C_out = C.copy() for k in set(WC_keys) - set(C.keys()): C_out[k] = np.zeros(C_keys_shape[k]) # depends on [control=['for'], data=['k']] return C_out
def append_payload(self, payload: Payload) -> Payload: """Adds a new body part to multipart writer.""" # compression encoding = payload.headers.get(CONTENT_ENCODING, '').lower() # type: Optional[str] # noqa if encoding and encoding not in ('deflate', 'gzip', 'identity'): raise RuntimeError('unknown content encoding: {}'.format(encoding)) if encoding == 'identity': encoding = None # te encoding te_encoding = payload.headers.get( CONTENT_TRANSFER_ENCODING, '').lower() # type: Optional[str] # noqa if te_encoding not in ('', 'base64', 'quoted-printable', 'binary'): raise RuntimeError('unknown content transfer encoding: {}' ''.format(te_encoding)) if te_encoding == 'binary': te_encoding = None # size size = payload.size if size is not None and not (encoding or te_encoding): payload.headers[CONTENT_LENGTH] = str(size) self._parts.append((payload, encoding, te_encoding)) # type: ignore return payload
def function[append_payload, parameter[self, payload]]: constant[Adds a new body part to multipart writer.] variable[encoding] assign[=] call[call[name[payload].headers.get, parameter[name[CONTENT_ENCODING], constant[]]].lower, parameter[]] if <ast.BoolOp object at 0x7da1b1f42f50> begin[:] <ast.Raise object at 0x7da1b1f43070> if compare[name[encoding] equal[==] constant[identity]] begin[:] variable[encoding] assign[=] constant[None] variable[te_encoding] assign[=] call[call[name[payload].headers.get, parameter[name[CONTENT_TRANSFER_ENCODING], constant[]]].lower, parameter[]] if compare[name[te_encoding] <ast.NotIn object at 0x7da2590d7190> tuple[[<ast.Constant object at 0x7da1b1f40880>, <ast.Constant object at 0x7da1b1f43580>, <ast.Constant object at 0x7da1b1f40970>, <ast.Constant object at 0x7da1b1f41e70>]]] begin[:] <ast.Raise object at 0x7da1b1f43d30> if compare[name[te_encoding] equal[==] constant[binary]] begin[:] variable[te_encoding] assign[=] constant[None] variable[size] assign[=] name[payload].size if <ast.BoolOp object at 0x7da1b1f42920> begin[:] call[name[payload].headers][name[CONTENT_LENGTH]] assign[=] call[name[str], parameter[name[size]]] call[name[self]._parts.append, parameter[tuple[[<ast.Name object at 0x7da1b1f410c0>, <ast.Name object at 0x7da1b1f419f0>, <ast.Name object at 0x7da1b1f430d0>]]]] return[name[payload]]
keyword[def] identifier[append_payload] ( identifier[self] , identifier[payload] : identifier[Payload] )-> identifier[Payload] : literal[string] identifier[encoding] = identifier[payload] . identifier[headers] . identifier[get] ( identifier[CONTENT_ENCODING] , literal[string] ). identifier[lower] () keyword[if] identifier[encoding] keyword[and] identifier[encoding] keyword[not] keyword[in] ( literal[string] , literal[string] , literal[string] ): keyword[raise] identifier[RuntimeError] ( literal[string] . identifier[format] ( identifier[encoding] )) keyword[if] identifier[encoding] == literal[string] : identifier[encoding] = keyword[None] identifier[te_encoding] = identifier[payload] . identifier[headers] . identifier[get] ( identifier[CONTENT_TRANSFER_ENCODING] , literal[string] ). identifier[lower] () keyword[if] identifier[te_encoding] keyword[not] keyword[in] ( literal[string] , literal[string] , literal[string] , literal[string] ): keyword[raise] identifier[RuntimeError] ( literal[string] literal[string] . identifier[format] ( identifier[te_encoding] )) keyword[if] identifier[te_encoding] == literal[string] : identifier[te_encoding] = keyword[None] identifier[size] = identifier[payload] . identifier[size] keyword[if] identifier[size] keyword[is] keyword[not] keyword[None] keyword[and] keyword[not] ( identifier[encoding] keyword[or] identifier[te_encoding] ): identifier[payload] . identifier[headers] [ identifier[CONTENT_LENGTH] ]= identifier[str] ( identifier[size] ) identifier[self] . identifier[_parts] . identifier[append] (( identifier[payload] , identifier[encoding] , identifier[te_encoding] )) keyword[return] identifier[payload]
def append_payload(self, payload: Payload) -> Payload: """Adds a new body part to multipart writer.""" # compression encoding = payload.headers.get(CONTENT_ENCODING, '').lower() # type: Optional[str] # noqa if encoding and encoding not in ('deflate', 'gzip', 'identity'): raise RuntimeError('unknown content encoding: {}'.format(encoding)) # depends on [control=['if'], data=[]] if encoding == 'identity': encoding = None # depends on [control=['if'], data=['encoding']] # te encoding te_encoding = payload.headers.get(CONTENT_TRANSFER_ENCODING, '').lower() # type: Optional[str] # noqa if te_encoding not in ('', 'base64', 'quoted-printable', 'binary'): raise RuntimeError('unknown content transfer encoding: {}'.format(te_encoding)) # depends on [control=['if'], data=['te_encoding']] if te_encoding == 'binary': te_encoding = None # depends on [control=['if'], data=['te_encoding']] # size size = payload.size if size is not None and (not (encoding or te_encoding)): payload.headers[CONTENT_LENGTH] = str(size) # depends on [control=['if'], data=[]] self._parts.append((payload, encoding, te_encoding)) # type: ignore return payload
def apply_config(self, applicator): """ Replace any config tokens in the file's path with values from the config. """ if type(self._fpath) == str: self._fpath = applicator.apply(self._fpath)
def function[apply_config, parameter[self, applicator]]: constant[ Replace any config tokens in the file's path with values from the config. ] if compare[call[name[type], parameter[name[self]._fpath]] equal[==] name[str]] begin[:] name[self]._fpath assign[=] call[name[applicator].apply, parameter[name[self]._fpath]]
keyword[def] identifier[apply_config] ( identifier[self] , identifier[applicator] ): literal[string] keyword[if] identifier[type] ( identifier[self] . identifier[_fpath] )== identifier[str] : identifier[self] . identifier[_fpath] = identifier[applicator] . identifier[apply] ( identifier[self] . identifier[_fpath] )
def apply_config(self, applicator): """ Replace any config tokens in the file's path with values from the config. """ if type(self._fpath) == str: self._fpath = applicator.apply(self._fpath) # depends on [control=['if'], data=[]]
def _send_rtm_message(self, channel_id, text): """Send a Slack message to a channel over RTM. :param channel_id: a slack channel id. :param text: a slack message. Serverside formatting is done in a similar way to normal user message; see `Slack's docs <https://api.slack.com/docs/formatting>`__. """ message = { 'id': self._current_message_id, 'type': 'message', 'channel': channel_id, 'text': text, } self.ws.send(json.dumps(message)) self._current_message_id += 1
def function[_send_rtm_message, parameter[self, channel_id, text]]: constant[Send a Slack message to a channel over RTM. :param channel_id: a slack channel id. :param text: a slack message. Serverside formatting is done in a similar way to normal user message; see `Slack's docs <https://api.slack.com/docs/formatting>`__. ] variable[message] assign[=] dictionary[[<ast.Constant object at 0x7da1b191d6f0>, <ast.Constant object at 0x7da1b191ca30>, <ast.Constant object at 0x7da1b191eb00>, <ast.Constant object at 0x7da1b191df30>], [<ast.Attribute object at 0x7da1b191d570>, <ast.Constant object at 0x7da1b191c130>, <ast.Name object at 0x7da1b191d330>, <ast.Name object at 0x7da1b191fc40>]] call[name[self].ws.send, parameter[call[name[json].dumps, parameter[name[message]]]]] <ast.AugAssign object at 0x7da1b191c4c0>
keyword[def] identifier[_send_rtm_message] ( identifier[self] , identifier[channel_id] , identifier[text] ): literal[string] identifier[message] ={ literal[string] : identifier[self] . identifier[_current_message_id] , literal[string] : literal[string] , literal[string] : identifier[channel_id] , literal[string] : identifier[text] , } identifier[self] . identifier[ws] . identifier[send] ( identifier[json] . identifier[dumps] ( identifier[message] )) identifier[self] . identifier[_current_message_id] += literal[int]
def _send_rtm_message(self, channel_id, text): """Send a Slack message to a channel over RTM. :param channel_id: a slack channel id. :param text: a slack message. Serverside formatting is done in a similar way to normal user message; see `Slack's docs <https://api.slack.com/docs/formatting>`__. """ message = {'id': self._current_message_id, 'type': 'message', 'channel': channel_id, 'text': text} self.ws.send(json.dumps(message)) self._current_message_id += 1
def transformations(self, type=None, failed=False): """ Get Transformations done by this Node. type must be a type of Transformation (defaults to Transformation) Failed can be True, False or "all" """ if failed not in ["all", False, True]: raise ValueError("{} is not a valid transmission failed".format(failed)) if type is None: type = Transformation if failed == "all": return type.query.filter_by(node_id=self.id).all() else: return type.query.filter_by(node_id=self.id, failed=failed).all()
def function[transformations, parameter[self, type, failed]]: constant[ Get Transformations done by this Node. type must be a type of Transformation (defaults to Transformation) Failed can be True, False or "all" ] if compare[name[failed] <ast.NotIn object at 0x7da2590d7190> list[[<ast.Constant object at 0x7da1b030a8f0>, <ast.Constant object at 0x7da1b03090c0>, <ast.Constant object at 0x7da1b03087f0>]]] begin[:] <ast.Raise object at 0x7da1b030a0b0> if compare[name[type] is constant[None]] begin[:] variable[type] assign[=] name[Transformation] if compare[name[failed] equal[==] constant[all]] begin[:] return[call[call[name[type].query.filter_by, parameter[]].all, parameter[]]]
keyword[def] identifier[transformations] ( identifier[self] , identifier[type] = keyword[None] , identifier[failed] = keyword[False] ): literal[string] keyword[if] identifier[failed] keyword[not] keyword[in] [ literal[string] , keyword[False] , keyword[True] ]: keyword[raise] identifier[ValueError] ( literal[string] . identifier[format] ( identifier[failed] )) keyword[if] identifier[type] keyword[is] keyword[None] : identifier[type] = identifier[Transformation] keyword[if] identifier[failed] == literal[string] : keyword[return] identifier[type] . identifier[query] . identifier[filter_by] ( identifier[node_id] = identifier[self] . identifier[id] ). identifier[all] () keyword[else] : keyword[return] identifier[type] . identifier[query] . identifier[filter_by] ( identifier[node_id] = identifier[self] . identifier[id] , identifier[failed] = identifier[failed] ). identifier[all] ()
def transformations(self, type=None, failed=False): """ Get Transformations done by this Node. type must be a type of Transformation (defaults to Transformation) Failed can be True, False or "all" """ if failed not in ['all', False, True]: raise ValueError('{} is not a valid transmission failed'.format(failed)) # depends on [control=['if'], data=['failed']] if type is None: type = Transformation # depends on [control=['if'], data=['type']] if failed == 'all': return type.query.filter_by(node_id=self.id).all() # depends on [control=['if'], data=[]] else: return type.query.filter_by(node_id=self.id, failed=failed).all()
def _close_connection(self, frame_in): """Connection Close. :param specification.Connection.Close frame_in: Amqp frame. :return: """ self._set_connection_state(Stateful.CLOSED) if frame_in.reply_code != 200: reply_text = try_utf8_decode(frame_in.reply_text) message = ( 'Connection was closed by remote server: %s' % reply_text ) exception = AMQPConnectionError(message, reply_code=frame_in.reply_code) self._connection.exceptions.append(exception)
def function[_close_connection, parameter[self, frame_in]]: constant[Connection Close. :param specification.Connection.Close frame_in: Amqp frame. :return: ] call[name[self]._set_connection_state, parameter[name[Stateful].CLOSED]] if compare[name[frame_in].reply_code not_equal[!=] constant[200]] begin[:] variable[reply_text] assign[=] call[name[try_utf8_decode], parameter[name[frame_in].reply_text]] variable[message] assign[=] binary_operation[constant[Connection was closed by remote server: %s] <ast.Mod object at 0x7da2590d6920> name[reply_text]] variable[exception] assign[=] call[name[AMQPConnectionError], parameter[name[message]]] call[name[self]._connection.exceptions.append, parameter[name[exception]]]
keyword[def] identifier[_close_connection] ( identifier[self] , identifier[frame_in] ): literal[string] identifier[self] . identifier[_set_connection_state] ( identifier[Stateful] . identifier[CLOSED] ) keyword[if] identifier[frame_in] . identifier[reply_code] != literal[int] : identifier[reply_text] = identifier[try_utf8_decode] ( identifier[frame_in] . identifier[reply_text] ) identifier[message] =( literal[string] % identifier[reply_text] ) identifier[exception] = identifier[AMQPConnectionError] ( identifier[message] , identifier[reply_code] = identifier[frame_in] . identifier[reply_code] ) identifier[self] . identifier[_connection] . identifier[exceptions] . identifier[append] ( identifier[exception] )
def _close_connection(self, frame_in): """Connection Close. :param specification.Connection.Close frame_in: Amqp frame. :return: """ self._set_connection_state(Stateful.CLOSED) if frame_in.reply_code != 200: reply_text = try_utf8_decode(frame_in.reply_text) message = 'Connection was closed by remote server: %s' % reply_text exception = AMQPConnectionError(message, reply_code=frame_in.reply_code) self._connection.exceptions.append(exception) # depends on [control=['if'], data=[]]
async def ban(self, user, *, reason=None, delete_message_days=1): """|coro| Bans a user from the guild. The user must meet the :class:`abc.Snowflake` abc. You must have the :attr:`~Permissions.ban_members` permission to do this. Parameters ----------- user: :class:`abc.Snowflake` The user to ban from their guild. delete_message_days: :class:`int` The number of days worth of messages to delete from the user in the guild. The minimum is 0 and the maximum is 7. reason: Optional[:class:`str`] The reason the user got banned. Raises ------- Forbidden You do not have the proper permissions to ban. HTTPException Banning failed. """ await self._state.http.ban(user.id, self.id, delete_message_days, reason=reason)
<ast.AsyncFunctionDef object at 0x7da1b1fe5180>
keyword[async] keyword[def] identifier[ban] ( identifier[self] , identifier[user] ,*, identifier[reason] = keyword[None] , identifier[delete_message_days] = literal[int] ): literal[string] keyword[await] identifier[self] . identifier[_state] . identifier[http] . identifier[ban] ( identifier[user] . identifier[id] , identifier[self] . identifier[id] , identifier[delete_message_days] , identifier[reason] = identifier[reason] )
async def ban(self, user, *, reason=None, delete_message_days=1): """|coro| Bans a user from the guild. The user must meet the :class:`abc.Snowflake` abc. You must have the :attr:`~Permissions.ban_members` permission to do this. Parameters ----------- user: :class:`abc.Snowflake` The user to ban from their guild. delete_message_days: :class:`int` The number of days worth of messages to delete from the user in the guild. The minimum is 0 and the maximum is 7. reason: Optional[:class:`str`] The reason the user got banned. Raises ------- Forbidden You do not have the proper permissions to ban. HTTPException Banning failed. """ await self._state.http.ban(user.id, self.id, delete_message_days, reason=reason)
def _construct_pillar(top_dir, follow_dir_links, keep_newline=False, render_default=None, renderer_blacklist=None, renderer_whitelist=None, template=False): ''' Construct pillar from file tree. ''' pillar = {} renderers = salt.loader.render(__opts__, __salt__) norm_top_dir = os.path.normpath(top_dir) for dir_path, dir_names, file_names in salt.utils.path.os_walk( top_dir, topdown=True, onerror=_on_walk_error, followlinks=follow_dir_links): # Find current path in pillar tree pillar_node = pillar norm_dir_path = os.path.normpath(dir_path) prefix = os.path.relpath(norm_dir_path, norm_top_dir) if norm_dir_path != norm_top_dir: path_parts = [] head = prefix while head: head, tail = os.path.split(head) path_parts.insert(0, tail) while path_parts: pillar_node = pillar_node[path_parts.pop(0)] # Create dicts for subdirectories for dir_name in dir_names: pillar_node[dir_name] = {} # Add files for file_name in file_names: file_path = os.path.join(dir_path, file_name) if not os.path.isfile(file_path): log.error('file_tree: %s: not a regular file', file_path) continue contents = b'' try: with salt.utils.files.fopen(file_path, 'rb') as fhr: buf = fhr.read(__opts__['file_buffer_size']) while buf: contents += buf buf = fhr.read(__opts__['file_buffer_size']) if contents.endswith(b'\n') \ and _check_newline(prefix, file_name, keep_newline): contents = contents[:-1] except (IOError, OSError) as exc: log.error('file_tree: Error reading %s: %s', file_path, exc.strerror) else: data = contents if template is True: data = salt.template.compile_template_str(template=salt.utils.stringutils.to_unicode(contents), renderers=renderers, default=render_default, blacklist=renderer_blacklist, whitelist=renderer_whitelist) if salt.utils.stringio.is_readable(data): pillar_node[file_name] = data.getvalue() else: pillar_node[file_name] = data return pillar
def function[_construct_pillar, parameter[top_dir, follow_dir_links, keep_newline, render_default, renderer_blacklist, renderer_whitelist, template]]: constant[ Construct pillar from file tree. ] variable[pillar] assign[=] dictionary[[], []] variable[renderers] assign[=] call[name[salt].loader.render, parameter[name[__opts__], name[__salt__]]] variable[norm_top_dir] assign[=] call[name[os].path.normpath, parameter[name[top_dir]]] for taget[tuple[[<ast.Name object at 0x7da1b1c211e0>, <ast.Name object at 0x7da1b1c23eb0>, <ast.Name object at 0x7da1b1c21ae0>]]] in starred[call[name[salt].utils.path.os_walk, parameter[name[top_dir]]]] begin[:] variable[pillar_node] assign[=] name[pillar] variable[norm_dir_path] assign[=] call[name[os].path.normpath, parameter[name[dir_path]]] variable[prefix] assign[=] call[name[os].path.relpath, parameter[name[norm_dir_path], name[norm_top_dir]]] if compare[name[norm_dir_path] not_equal[!=] name[norm_top_dir]] begin[:] variable[path_parts] assign[=] list[[]] variable[head] assign[=] name[prefix] while name[head] begin[:] <ast.Tuple object at 0x7da1b1c1b580> assign[=] call[name[os].path.split, parameter[name[head]]] call[name[path_parts].insert, parameter[constant[0], name[tail]]] while name[path_parts] begin[:] variable[pillar_node] assign[=] call[name[pillar_node]][call[name[path_parts].pop, parameter[constant[0]]]] for taget[name[dir_name]] in starred[name[dir_names]] begin[:] call[name[pillar_node]][name[dir_name]] assign[=] dictionary[[], []] for taget[name[file_name]] in starred[name[file_names]] begin[:] variable[file_path] assign[=] call[name[os].path.join, parameter[name[dir_path], name[file_name]]] if <ast.UnaryOp object at 0x7da1b1c1b040> begin[:] call[name[log].error, parameter[constant[file_tree: %s: not a regular file], name[file_path]]] continue variable[contents] assign[=] constant[b''] <ast.Try object at 0x7da1b1c1ac20> return[name[pillar]]
keyword[def] identifier[_construct_pillar] ( identifier[top_dir] , identifier[follow_dir_links] , identifier[keep_newline] = keyword[False] , identifier[render_default] = keyword[None] , identifier[renderer_blacklist] = keyword[None] , identifier[renderer_whitelist] = keyword[None] , identifier[template] = keyword[False] ): literal[string] identifier[pillar] ={} identifier[renderers] = identifier[salt] . identifier[loader] . identifier[render] ( identifier[__opts__] , identifier[__salt__] ) identifier[norm_top_dir] = identifier[os] . identifier[path] . identifier[normpath] ( identifier[top_dir] ) keyword[for] identifier[dir_path] , identifier[dir_names] , identifier[file_names] keyword[in] identifier[salt] . identifier[utils] . identifier[path] . identifier[os_walk] ( identifier[top_dir] , identifier[topdown] = keyword[True] , identifier[onerror] = identifier[_on_walk_error] , identifier[followlinks] = identifier[follow_dir_links] ): identifier[pillar_node] = identifier[pillar] identifier[norm_dir_path] = identifier[os] . identifier[path] . identifier[normpath] ( identifier[dir_path] ) identifier[prefix] = identifier[os] . identifier[path] . identifier[relpath] ( identifier[norm_dir_path] , identifier[norm_top_dir] ) keyword[if] identifier[norm_dir_path] != identifier[norm_top_dir] : identifier[path_parts] =[] identifier[head] = identifier[prefix] keyword[while] identifier[head] : identifier[head] , identifier[tail] = identifier[os] . identifier[path] . identifier[split] ( identifier[head] ) identifier[path_parts] . identifier[insert] ( literal[int] , identifier[tail] ) keyword[while] identifier[path_parts] : identifier[pillar_node] = identifier[pillar_node] [ identifier[path_parts] . identifier[pop] ( literal[int] )] keyword[for] identifier[dir_name] keyword[in] identifier[dir_names] : identifier[pillar_node] [ identifier[dir_name] ]={} keyword[for] identifier[file_name] keyword[in] identifier[file_names] : identifier[file_path] = identifier[os] . identifier[path] . identifier[join] ( identifier[dir_path] , identifier[file_name] ) keyword[if] keyword[not] identifier[os] . identifier[path] . identifier[isfile] ( identifier[file_path] ): identifier[log] . identifier[error] ( literal[string] , identifier[file_path] ) keyword[continue] identifier[contents] = literal[string] keyword[try] : keyword[with] identifier[salt] . identifier[utils] . identifier[files] . identifier[fopen] ( identifier[file_path] , literal[string] ) keyword[as] identifier[fhr] : identifier[buf] = identifier[fhr] . identifier[read] ( identifier[__opts__] [ literal[string] ]) keyword[while] identifier[buf] : identifier[contents] += identifier[buf] identifier[buf] = identifier[fhr] . identifier[read] ( identifier[__opts__] [ literal[string] ]) keyword[if] identifier[contents] . identifier[endswith] ( literal[string] ) keyword[and] identifier[_check_newline] ( identifier[prefix] , identifier[file_name] , identifier[keep_newline] ): identifier[contents] = identifier[contents] [:- literal[int] ] keyword[except] ( identifier[IOError] , identifier[OSError] ) keyword[as] identifier[exc] : identifier[log] . identifier[error] ( literal[string] , identifier[file_path] , identifier[exc] . identifier[strerror] ) keyword[else] : identifier[data] = identifier[contents] keyword[if] identifier[template] keyword[is] keyword[True] : identifier[data] = identifier[salt] . identifier[template] . identifier[compile_template_str] ( identifier[template] = identifier[salt] . identifier[utils] . identifier[stringutils] . identifier[to_unicode] ( identifier[contents] ), identifier[renderers] = identifier[renderers] , identifier[default] = identifier[render_default] , identifier[blacklist] = identifier[renderer_blacklist] , identifier[whitelist] = identifier[renderer_whitelist] ) keyword[if] identifier[salt] . identifier[utils] . identifier[stringio] . identifier[is_readable] ( identifier[data] ): identifier[pillar_node] [ identifier[file_name] ]= identifier[data] . identifier[getvalue] () keyword[else] : identifier[pillar_node] [ identifier[file_name] ]= identifier[data] keyword[return] identifier[pillar]
def _construct_pillar(top_dir, follow_dir_links, keep_newline=False, render_default=None, renderer_blacklist=None, renderer_whitelist=None, template=False): """ Construct pillar from file tree. """ pillar = {} renderers = salt.loader.render(__opts__, __salt__) norm_top_dir = os.path.normpath(top_dir) for (dir_path, dir_names, file_names) in salt.utils.path.os_walk(top_dir, topdown=True, onerror=_on_walk_error, followlinks=follow_dir_links): # Find current path in pillar tree pillar_node = pillar norm_dir_path = os.path.normpath(dir_path) prefix = os.path.relpath(norm_dir_path, norm_top_dir) if norm_dir_path != norm_top_dir: path_parts = [] head = prefix while head: (head, tail) = os.path.split(head) path_parts.insert(0, tail) # depends on [control=['while'], data=[]] while path_parts: pillar_node = pillar_node[path_parts.pop(0)] # depends on [control=['while'], data=[]] # depends on [control=['if'], data=[]] # Create dicts for subdirectories for dir_name in dir_names: pillar_node[dir_name] = {} # depends on [control=['for'], data=['dir_name']] # Add files for file_name in file_names: file_path = os.path.join(dir_path, file_name) if not os.path.isfile(file_path): log.error('file_tree: %s: not a regular file', file_path) continue # depends on [control=['if'], data=[]] contents = b'' try: with salt.utils.files.fopen(file_path, 'rb') as fhr: buf = fhr.read(__opts__['file_buffer_size']) while buf: contents += buf buf = fhr.read(__opts__['file_buffer_size']) # depends on [control=['while'], data=[]] if contents.endswith(b'\n') and _check_newline(prefix, file_name, keep_newline): contents = contents[:-1] # depends on [control=['if'], data=[]] # depends on [control=['with'], data=['fhr']] # depends on [control=['try'], data=[]] except (IOError, OSError) as exc: log.error('file_tree: Error reading %s: %s', file_path, exc.strerror) # depends on [control=['except'], data=['exc']] else: data = contents if template is True: data = salt.template.compile_template_str(template=salt.utils.stringutils.to_unicode(contents), renderers=renderers, default=render_default, blacklist=renderer_blacklist, whitelist=renderer_whitelist) # depends on [control=['if'], data=[]] if salt.utils.stringio.is_readable(data): pillar_node[file_name] = data.getvalue() # depends on [control=['if'], data=[]] else: pillar_node[file_name] = data # depends on [control=['for'], data=['file_name']] # depends on [control=['for'], data=[]] return pillar
def quasi_random_indices(number_of_total_items, number_of_desired_items = None): """quasi_random_indices(number_of_total_items, [number_of_desired_items]) -> index Yields an iterator to a quasi-random list of indices that will contain exactly the number of desired indices (or the number of total items in the list, if this is smaller). This function can be used to retrieve a consistent and reproducible list of indices of the data, in case the ``number_of_total_items`` is lower that the given ``number_of_desired_items``. **Parameters:** ``number_of_total_items`` : int The total number of elements in the collection, which should be sub-sampled ``number_of_desired_items`` : int or ``None`` The number of items that should be used; if ``None`` or greater than ``number_of_total_items``, all indices are yielded **Yields:** ``index`` : int An iterator to indices, which will span ``number_of_total_items`` evenly. """ # check if we need to compute a sublist at all if number_of_desired_items is None or number_of_desired_items >= number_of_total_items or number_of_desired_items < 0: for i in range(number_of_total_items): yield i else: increase = float(number_of_total_items)/float(number_of_desired_items) # generate a regular quasi-random index list for i in range(number_of_desired_items): yield int((i +.5)*increase)
def function[quasi_random_indices, parameter[number_of_total_items, number_of_desired_items]]: constant[quasi_random_indices(number_of_total_items, [number_of_desired_items]) -> index Yields an iterator to a quasi-random list of indices that will contain exactly the number of desired indices (or the number of total items in the list, if this is smaller). This function can be used to retrieve a consistent and reproducible list of indices of the data, in case the ``number_of_total_items`` is lower that the given ``number_of_desired_items``. **Parameters:** ``number_of_total_items`` : int The total number of elements in the collection, which should be sub-sampled ``number_of_desired_items`` : int or ``None`` The number of items that should be used; if ``None`` or greater than ``number_of_total_items``, all indices are yielded **Yields:** ``index`` : int An iterator to indices, which will span ``number_of_total_items`` evenly. ] if <ast.BoolOp object at 0x7da204565240> begin[:] for taget[name[i]] in starred[call[name[range], parameter[name[number_of_total_items]]]] begin[:] <ast.Yield object at 0x7da2045645b0>
keyword[def] identifier[quasi_random_indices] ( identifier[number_of_total_items] , identifier[number_of_desired_items] = keyword[None] ): literal[string] keyword[if] identifier[number_of_desired_items] keyword[is] keyword[None] keyword[or] identifier[number_of_desired_items] >= identifier[number_of_total_items] keyword[or] identifier[number_of_desired_items] < literal[int] : keyword[for] identifier[i] keyword[in] identifier[range] ( identifier[number_of_total_items] ): keyword[yield] identifier[i] keyword[else] : identifier[increase] = identifier[float] ( identifier[number_of_total_items] )/ identifier[float] ( identifier[number_of_desired_items] ) keyword[for] identifier[i] keyword[in] identifier[range] ( identifier[number_of_desired_items] ): keyword[yield] identifier[int] (( identifier[i] + literal[int] )* identifier[increase] )
def quasi_random_indices(number_of_total_items, number_of_desired_items=None): """quasi_random_indices(number_of_total_items, [number_of_desired_items]) -> index Yields an iterator to a quasi-random list of indices that will contain exactly the number of desired indices (or the number of total items in the list, if this is smaller). This function can be used to retrieve a consistent and reproducible list of indices of the data, in case the ``number_of_total_items`` is lower that the given ``number_of_desired_items``. **Parameters:** ``number_of_total_items`` : int The total number of elements in the collection, which should be sub-sampled ``number_of_desired_items`` : int or ``None`` The number of items that should be used; if ``None`` or greater than ``number_of_total_items``, all indices are yielded **Yields:** ``index`` : int An iterator to indices, which will span ``number_of_total_items`` evenly. """ # check if we need to compute a sublist at all if number_of_desired_items is None or number_of_desired_items >= number_of_total_items or number_of_desired_items < 0: for i in range(number_of_total_items): yield i # depends on [control=['for'], data=['i']] # depends on [control=['if'], data=[]] else: increase = float(number_of_total_items) / float(number_of_desired_items) # generate a regular quasi-random index list for i in range(number_of_desired_items): yield int((i + 0.5) * increase) # depends on [control=['for'], data=['i']]
async def get_sleep_timer_settings(self) -> List[Setting]: """Get sleep timer settings.""" return [ Setting.make(**x) for x in await self.services["system"]["getSleepTimerSettings"]({}) ]
<ast.AsyncFunctionDef object at 0x7da18f00f880>
keyword[async] keyword[def] identifier[get_sleep_timer_settings] ( identifier[self] )-> identifier[List] [ identifier[Setting] ]: literal[string] keyword[return] [ identifier[Setting] . identifier[make] (** identifier[x] ) keyword[for] identifier[x] keyword[in] keyword[await] identifier[self] . identifier[services] [ literal[string] ][ literal[string] ]({}) ]
async def get_sleep_timer_settings(self) -> List[Setting]: """Get sleep timer settings.""" return [Setting.make(**x) for x in await self.services['system']['getSleepTimerSettings']({})]
def record(session_file, shell, prompt, alias, envvar): """Record a session file. If no argument is passed, commands are written to ./session.sh. When you are finished recording, run the "stop" command. """ if os.path.exists(session_file): click.confirm( 'File "{0}" already exists. Overwrite?'.format(session_file), abort=True, default=False, ) secho("We'll do it live!", fg="red", bold=True) filename = click.format_filename(session_file) secho("RECORDING SESSION: {}".format(filename), fg="yellow", bold=True) print_recorder_instructions() click.pause() click.clear() cwd = os.getcwd() # Save cwd # Run the recorder commands = run_recorder(shell, prompt, aliases=alias, envvars=envvar) os.chdir(cwd) # Reset cwd secho("FINISHED RECORDING SESSION", fg="yellow", bold=True) secho("Writing to {0}...".format(filename), fg="cyan") with open(session_file, "w", encoding="utf-8") as fp: fp.write(HEADER_TEMPLATE.format(shell=shell, prompt=prompt)) write_directives(fp, "alias", alias) write_directives(fp, "env", envvar) fp.write("\n") fp.write("".join(commands)) fp.write("\n") play_cmd = style("doitlive play {}".format(filename), bold=True) echo("Done. Run {} to play back your session.".format(play_cmd))
def function[record, parameter[session_file, shell, prompt, alias, envvar]]: constant[Record a session file. If no argument is passed, commands are written to ./session.sh. When you are finished recording, run the "stop" command. ] if call[name[os].path.exists, parameter[name[session_file]]] begin[:] call[name[click].confirm, parameter[call[constant[File "{0}" already exists. Overwrite?].format, parameter[name[session_file]]]]] call[name[secho], parameter[constant[We'll do it live!]]] variable[filename] assign[=] call[name[click].format_filename, parameter[name[session_file]]] call[name[secho], parameter[call[constant[RECORDING SESSION: {}].format, parameter[name[filename]]]]] call[name[print_recorder_instructions], parameter[]] call[name[click].pause, parameter[]] call[name[click].clear, parameter[]] variable[cwd] assign[=] call[name[os].getcwd, parameter[]] variable[commands] assign[=] call[name[run_recorder], parameter[name[shell], name[prompt]]] call[name[os].chdir, parameter[name[cwd]]] call[name[secho], parameter[constant[FINISHED RECORDING SESSION]]] call[name[secho], parameter[call[constant[Writing to {0}...].format, parameter[name[filename]]]]] with call[name[open], parameter[name[session_file], constant[w]]] begin[:] call[name[fp].write, parameter[call[name[HEADER_TEMPLATE].format, parameter[]]]] call[name[write_directives], parameter[name[fp], constant[alias], name[alias]]] call[name[write_directives], parameter[name[fp], constant[env], name[envvar]]] call[name[fp].write, parameter[constant[ ]]] call[name[fp].write, parameter[call[constant[].join, parameter[name[commands]]]]] call[name[fp].write, parameter[constant[ ]]] variable[play_cmd] assign[=] call[name[style], parameter[call[constant[doitlive play {}].format, parameter[name[filename]]]]] call[name[echo], parameter[call[constant[Done. Run {} to play back your session.].format, parameter[name[play_cmd]]]]]
keyword[def] identifier[record] ( identifier[session_file] , identifier[shell] , identifier[prompt] , identifier[alias] , identifier[envvar] ): literal[string] keyword[if] identifier[os] . identifier[path] . identifier[exists] ( identifier[session_file] ): identifier[click] . identifier[confirm] ( literal[string] . identifier[format] ( identifier[session_file] ), identifier[abort] = keyword[True] , identifier[default] = keyword[False] , ) identifier[secho] ( literal[string] , identifier[fg] = literal[string] , identifier[bold] = keyword[True] ) identifier[filename] = identifier[click] . identifier[format_filename] ( identifier[session_file] ) identifier[secho] ( literal[string] . identifier[format] ( identifier[filename] ), identifier[fg] = literal[string] , identifier[bold] = keyword[True] ) identifier[print_recorder_instructions] () identifier[click] . identifier[pause] () identifier[click] . identifier[clear] () identifier[cwd] = identifier[os] . identifier[getcwd] () identifier[commands] = identifier[run_recorder] ( identifier[shell] , identifier[prompt] , identifier[aliases] = identifier[alias] , identifier[envvars] = identifier[envvar] ) identifier[os] . identifier[chdir] ( identifier[cwd] ) identifier[secho] ( literal[string] , identifier[fg] = literal[string] , identifier[bold] = keyword[True] ) identifier[secho] ( literal[string] . identifier[format] ( identifier[filename] ), identifier[fg] = literal[string] ) keyword[with] identifier[open] ( identifier[session_file] , literal[string] , identifier[encoding] = literal[string] ) keyword[as] identifier[fp] : identifier[fp] . identifier[write] ( identifier[HEADER_TEMPLATE] . identifier[format] ( identifier[shell] = identifier[shell] , identifier[prompt] = identifier[prompt] )) identifier[write_directives] ( identifier[fp] , literal[string] , identifier[alias] ) identifier[write_directives] ( identifier[fp] , literal[string] , identifier[envvar] ) identifier[fp] . identifier[write] ( literal[string] ) identifier[fp] . identifier[write] ( literal[string] . identifier[join] ( identifier[commands] )) identifier[fp] . identifier[write] ( literal[string] ) identifier[play_cmd] = identifier[style] ( literal[string] . identifier[format] ( identifier[filename] ), identifier[bold] = keyword[True] ) identifier[echo] ( literal[string] . identifier[format] ( identifier[play_cmd] ))
def record(session_file, shell, prompt, alias, envvar): """Record a session file. If no argument is passed, commands are written to ./session.sh. When you are finished recording, run the "stop" command. """ if os.path.exists(session_file): click.confirm('File "{0}" already exists. Overwrite?'.format(session_file), abort=True, default=False) # depends on [control=['if'], data=[]] secho("We'll do it live!", fg='red', bold=True) filename = click.format_filename(session_file) secho('RECORDING SESSION: {}'.format(filename), fg='yellow', bold=True) print_recorder_instructions() click.pause() click.clear() cwd = os.getcwd() # Save cwd # Run the recorder commands = run_recorder(shell, prompt, aliases=alias, envvars=envvar) os.chdir(cwd) # Reset cwd secho('FINISHED RECORDING SESSION', fg='yellow', bold=True) secho('Writing to {0}...'.format(filename), fg='cyan') with open(session_file, 'w', encoding='utf-8') as fp: fp.write(HEADER_TEMPLATE.format(shell=shell, prompt=prompt)) write_directives(fp, 'alias', alias) write_directives(fp, 'env', envvar) fp.write('\n') fp.write(''.join(commands)) fp.write('\n') # depends on [control=['with'], data=['fp']] play_cmd = style('doitlive play {}'.format(filename), bold=True) echo('Done. Run {} to play back your session.'.format(play_cmd))
def colored(cls, color, message): """ Small function to wrap a string around a color Args: color (str): name of the color to wrap the string with, must be one of the class properties message (str): String to wrap with the color Returns: str: the colored string """ return getattr(cls, color.upper()) + message + cls.DEFAULT
def function[colored, parameter[cls, color, message]]: constant[ Small function to wrap a string around a color Args: color (str): name of the color to wrap the string with, must be one of the class properties message (str): String to wrap with the color Returns: str: the colored string ] return[binary_operation[binary_operation[call[name[getattr], parameter[name[cls], call[name[color].upper, parameter[]]]] + name[message]] + name[cls].DEFAULT]]
keyword[def] identifier[colored] ( identifier[cls] , identifier[color] , identifier[message] ): literal[string] keyword[return] identifier[getattr] ( identifier[cls] , identifier[color] . identifier[upper] ())+ identifier[message] + identifier[cls] . identifier[DEFAULT]
def colored(cls, color, message): """ Small function to wrap a string around a color Args: color (str): name of the color to wrap the string with, must be one of the class properties message (str): String to wrap with the color Returns: str: the colored string """ return getattr(cls, color.upper()) + message + cls.DEFAULT
def _validate_item(self, validator, data_item, position, includes): """ Validates a single data item against validator. Returns an array of errors. """ errors = [] # Optional field with optional value? Who cares. if data_item is None and validator.is_optional and validator.can_be_none: return errors errors += self._validate_primitive(validator, data_item, position) if errors: return errors if isinstance(validator, val.Include): errors += self._validate_include(validator, data_item, includes, position) elif isinstance(validator, (val.Map, val.List)): errors += self._validate_map_list(validator, data_item, includes, position) elif isinstance(validator, val.Any): errors += self._validate_any(validator, data_item, includes, position) return errors
def function[_validate_item, parameter[self, validator, data_item, position, includes]]: constant[ Validates a single data item against validator. Returns an array of errors. ] variable[errors] assign[=] list[[]] if <ast.BoolOp object at 0x7da2054a55a0> begin[:] return[name[errors]] <ast.AugAssign object at 0x7da2054a6b00> if name[errors] begin[:] return[name[errors]] if call[name[isinstance], parameter[name[validator], name[val].Include]] begin[:] <ast.AugAssign object at 0x7da2054a6a70> return[name[errors]]
keyword[def] identifier[_validate_item] ( identifier[self] , identifier[validator] , identifier[data_item] , identifier[position] , identifier[includes] ): literal[string] identifier[errors] =[] keyword[if] identifier[data_item] keyword[is] keyword[None] keyword[and] identifier[validator] . identifier[is_optional] keyword[and] identifier[validator] . identifier[can_be_none] : keyword[return] identifier[errors] identifier[errors] += identifier[self] . identifier[_validate_primitive] ( identifier[validator] , identifier[data_item] , identifier[position] ) keyword[if] identifier[errors] : keyword[return] identifier[errors] keyword[if] identifier[isinstance] ( identifier[validator] , identifier[val] . identifier[Include] ): identifier[errors] += identifier[self] . identifier[_validate_include] ( identifier[validator] , identifier[data_item] , identifier[includes] , identifier[position] ) keyword[elif] identifier[isinstance] ( identifier[validator] ,( identifier[val] . identifier[Map] , identifier[val] . identifier[List] )): identifier[errors] += identifier[self] . identifier[_validate_map_list] ( identifier[validator] , identifier[data_item] , identifier[includes] , identifier[position] ) keyword[elif] identifier[isinstance] ( identifier[validator] , identifier[val] . identifier[Any] ): identifier[errors] += identifier[self] . identifier[_validate_any] ( identifier[validator] , identifier[data_item] , identifier[includes] , identifier[position] ) keyword[return] identifier[errors]
def _validate_item(self, validator, data_item, position, includes): """ Validates a single data item against validator. Returns an array of errors. """ errors = [] # Optional field with optional value? Who cares. if data_item is None and validator.is_optional and validator.can_be_none: return errors # depends on [control=['if'], data=[]] errors += self._validate_primitive(validator, data_item, position) if errors: return errors # depends on [control=['if'], data=[]] if isinstance(validator, val.Include): errors += self._validate_include(validator, data_item, includes, position) # depends on [control=['if'], data=[]] elif isinstance(validator, (val.Map, val.List)): errors += self._validate_map_list(validator, data_item, includes, position) # depends on [control=['if'], data=[]] elif isinstance(validator, val.Any): errors += self._validate_any(validator, data_item, includes, position) # depends on [control=['if'], data=[]] return errors
def is_tuple_type(tp): """Test if the type is a generic tuple type, including subclasses excluding non-generic classes. Examples:: is_tuple_type(int) == False is_tuple_type(tuple) == False is_tuple_type(Tuple) == True is_tuple_type(Tuple[str, int]) == True class MyClass(Tuple[str, int]): ... is_tuple_type(MyClass) == True For more general tests use issubclass(..., tuple), for more precise test (excluding subclasses) use:: get_origin(tp) is tuple # Tuple prior to Python 3.7 """ if NEW_TYPING: return (tp is Tuple or isinstance(tp, _GenericAlias) and tp.__origin__ is tuple or isinstance(tp, type) and issubclass(tp, Generic) and issubclass(tp, tuple)) return type(tp) is TupleMeta
def function[is_tuple_type, parameter[tp]]: constant[Test if the type is a generic tuple type, including subclasses excluding non-generic classes. Examples:: is_tuple_type(int) == False is_tuple_type(tuple) == False is_tuple_type(Tuple) == True is_tuple_type(Tuple[str, int]) == True class MyClass(Tuple[str, int]): ... is_tuple_type(MyClass) == True For more general tests use issubclass(..., tuple), for more precise test (excluding subclasses) use:: get_origin(tp) is tuple # Tuple prior to Python 3.7 ] if name[NEW_TYPING] begin[:] return[<ast.BoolOp object at 0x7da1b07ac430>] return[compare[call[name[type], parameter[name[tp]]] is name[TupleMeta]]]
keyword[def] identifier[is_tuple_type] ( identifier[tp] ): literal[string] keyword[if] identifier[NEW_TYPING] : keyword[return] ( identifier[tp] keyword[is] identifier[Tuple] keyword[or] identifier[isinstance] ( identifier[tp] , identifier[_GenericAlias] ) keyword[and] identifier[tp] . identifier[__origin__] keyword[is] identifier[tuple] keyword[or] identifier[isinstance] ( identifier[tp] , identifier[type] ) keyword[and] identifier[issubclass] ( identifier[tp] , identifier[Generic] ) keyword[and] identifier[issubclass] ( identifier[tp] , identifier[tuple] )) keyword[return] identifier[type] ( identifier[tp] ) keyword[is] identifier[TupleMeta]
def is_tuple_type(tp): """Test if the type is a generic tuple type, including subclasses excluding non-generic classes. Examples:: is_tuple_type(int) == False is_tuple_type(tuple) == False is_tuple_type(Tuple) == True is_tuple_type(Tuple[str, int]) == True class MyClass(Tuple[str, int]): ... is_tuple_type(MyClass) == True For more general tests use issubclass(..., tuple), for more precise test (excluding subclasses) use:: get_origin(tp) is tuple # Tuple prior to Python 3.7 """ if NEW_TYPING: return tp is Tuple or (isinstance(tp, _GenericAlias) and tp.__origin__ is tuple) or (isinstance(tp, type) and issubclass(tp, Generic) and issubclass(tp, tuple)) # depends on [control=['if'], data=[]] return type(tp) is TupleMeta
def inject_nulls(data: Mapping, field_names) -> dict: """Insert None as value for missing fields.""" record = dict() for field in field_names: record[field] = data.get(field, None) return record
def function[inject_nulls, parameter[data, field_names]]: constant[Insert None as value for missing fields.] variable[record] assign[=] call[name[dict], parameter[]] for taget[name[field]] in starred[name[field_names]] begin[:] call[name[record]][name[field]] assign[=] call[name[data].get, parameter[name[field], constant[None]]] return[name[record]]
keyword[def] identifier[inject_nulls] ( identifier[data] : identifier[Mapping] , identifier[field_names] )-> identifier[dict] : literal[string] identifier[record] = identifier[dict] () keyword[for] identifier[field] keyword[in] identifier[field_names] : identifier[record] [ identifier[field] ]= identifier[data] . identifier[get] ( identifier[field] , keyword[None] ) keyword[return] identifier[record]
def inject_nulls(data: Mapping, field_names) -> dict: """Insert None as value for missing fields.""" record = dict() for field in field_names: record[field] = data.get(field, None) # depends on [control=['for'], data=['field']] return record
def rowget(self,tables_dict,row_list,index): "row_list in self.row_order" tmp=row_list for i in self.index_tuple(tables_dict,index,False): tmp=tmp[i] return tmp
def function[rowget, parameter[self, tables_dict, row_list, index]]: constant[row_list in self.row_order] variable[tmp] assign[=] name[row_list] for taget[name[i]] in starred[call[name[self].index_tuple, parameter[name[tables_dict], name[index], constant[False]]]] begin[:] variable[tmp] assign[=] call[name[tmp]][name[i]] return[name[tmp]]
keyword[def] identifier[rowget] ( identifier[self] , identifier[tables_dict] , identifier[row_list] , identifier[index] ): literal[string] identifier[tmp] = identifier[row_list] keyword[for] identifier[i] keyword[in] identifier[self] . identifier[index_tuple] ( identifier[tables_dict] , identifier[index] , keyword[False] ): identifier[tmp] = identifier[tmp] [ identifier[i] ] keyword[return] identifier[tmp]
def rowget(self, tables_dict, row_list, index): """row_list in self.row_order""" tmp = row_list for i in self.index_tuple(tables_dict, index, False): tmp = tmp[i] # depends on [control=['for'], data=['i']] return tmp
def cat(self, multihash, offset=0, length=-1, **kwargs): r"""Retrieves the contents of a file identified by hash. .. code-block:: python >>> c.cat('QmTkzDwWqPbnAh5YiV5VwcTLnGdwSNsNTn2aDxdXBFca7D') Traceback (most recent call last): ... ipfsapi.exceptions.Error: this dag node is a directory >>> c.cat('QmeKozNssnkJ4NcyRidYgDY2jfRZqVEoRGfipkgath71bX') b'<!DOCTYPE html>\n<html>\n\n<head>\n<title>ipfs example viewer</…' Parameters ---------- multihash : str The path to the IPFS object(s) to be retrieved offset : int Byte offset to begin reading from length : int Maximum number of bytes to read(-1 for all) Returns ------- str : File contents """ opts = {} if offset != 0: opts['offset'] = offset if length != -1: opts['length'] = length args = (multihash,) return self._client.request('/cat', args, opts=opts, **kwargs)
def function[cat, parameter[self, multihash, offset, length]]: constant[Retrieves the contents of a file identified by hash. .. code-block:: python >>> c.cat('QmTkzDwWqPbnAh5YiV5VwcTLnGdwSNsNTn2aDxdXBFca7D') Traceback (most recent call last): ... ipfsapi.exceptions.Error: this dag node is a directory >>> c.cat('QmeKozNssnkJ4NcyRidYgDY2jfRZqVEoRGfipkgath71bX') b'<!DOCTYPE html>\n<html>\n\n<head>\n<title>ipfs example viewer</…' Parameters ---------- multihash : str The path to the IPFS object(s) to be retrieved offset : int Byte offset to begin reading from length : int Maximum number of bytes to read(-1 for all) Returns ------- str : File contents ] variable[opts] assign[=] dictionary[[], []] if compare[name[offset] not_equal[!=] constant[0]] begin[:] call[name[opts]][constant[offset]] assign[=] name[offset] if compare[name[length] not_equal[!=] <ast.UnaryOp object at 0x7da18f720bb0>] begin[:] call[name[opts]][constant[length]] assign[=] name[length] variable[args] assign[=] tuple[[<ast.Name object at 0x7da18f723a00>]] return[call[name[self]._client.request, parameter[constant[/cat], name[args]]]]
keyword[def] identifier[cat] ( identifier[self] , identifier[multihash] , identifier[offset] = literal[int] , identifier[length] =- literal[int] ,** identifier[kwargs] ): literal[string] identifier[opts] ={} keyword[if] identifier[offset] != literal[int] : identifier[opts] [ literal[string] ]= identifier[offset] keyword[if] identifier[length] !=- literal[int] : identifier[opts] [ literal[string] ]= identifier[length] identifier[args] =( identifier[multihash] ,) keyword[return] identifier[self] . identifier[_client] . identifier[request] ( literal[string] , identifier[args] , identifier[opts] = identifier[opts] ,** identifier[kwargs] )
def cat(self, multihash, offset=0, length=-1, **kwargs): """Retrieves the contents of a file identified by hash. .. code-block:: python >>> c.cat('QmTkzDwWqPbnAh5YiV5VwcTLnGdwSNsNTn2aDxdXBFca7D') Traceback (most recent call last): ... ipfsapi.exceptions.Error: this dag node is a directory >>> c.cat('QmeKozNssnkJ4NcyRidYgDY2jfRZqVEoRGfipkgath71bX') b'<!DOCTYPE html>\\n<html>\\n\\n<head>\\n<title>ipfs example viewer</…' Parameters ---------- multihash : str The path to the IPFS object(s) to be retrieved offset : int Byte offset to begin reading from length : int Maximum number of bytes to read(-1 for all) Returns ------- str : File contents """ opts = {} if offset != 0: opts['offset'] = offset # depends on [control=['if'], data=['offset']] if length != -1: opts['length'] = length # depends on [control=['if'], data=['length']] args = (multihash,) return self._client.request('/cat', args, opts=opts, **kwargs)
def ps(ui, repo, *pats, **opts): """alias for hg p --short """ opts['short'] = True return pending(ui, repo, *pats, **opts)
def function[ps, parameter[ui, repo]]: constant[alias for hg p --short ] call[name[opts]][constant[short]] assign[=] constant[True] return[call[name[pending], parameter[name[ui], name[repo], <ast.Starred object at 0x7da207f02680>]]]
keyword[def] identifier[ps] ( identifier[ui] , identifier[repo] ,* identifier[pats] ,** identifier[opts] ): literal[string] identifier[opts] [ literal[string] ]= keyword[True] keyword[return] identifier[pending] ( identifier[ui] , identifier[repo] ,* identifier[pats] ,** identifier[opts] )
def ps(ui, repo, *pats, **opts): """alias for hg p --short """ opts['short'] = True return pending(ui, repo, *pats, **opts)
def get(name: str, required: bool=False, default: Union[Type[empty], T]=empty, type: Type[T]=None) -> T: """Generic getter for environment variables. Handles defaults, required-ness, and what type to expect. :param name: The name of the environment variable be pulled :type name: str :param required: Whether the environment variable is required. If ``True`` and the variable is not present, a ``KeyError`` is raised. :type required: bool :param default: The value to return if the environment variable is not present. (Providing a default alongside setting ``required=True`` will raise a ``ValueError``) :type default: bool :param type: The type of variable expected. :param type: str or type """ fns = { 'int': env_int, int: env_int, # 'float': env_float, # float: env_float, 'bool': env_bool, bool: env_bool, 'string': env_string, str: env_string, 'list': env_list, list: env_list, } # type: Dict[Union[str, Type[Any]], Callable[..., Any]] fn = fns.get(type, env_string) return fn(name, default=default, required=required)
def function[get, parameter[name, required, default, type]]: constant[Generic getter for environment variables. Handles defaults, required-ness, and what type to expect. :param name: The name of the environment variable be pulled :type name: str :param required: Whether the environment variable is required. If ``True`` and the variable is not present, a ``KeyError`` is raised. :type required: bool :param default: The value to return if the environment variable is not present. (Providing a default alongside setting ``required=True`` will raise a ``ValueError``) :type default: bool :param type: The type of variable expected. :param type: str or type ] variable[fns] assign[=] dictionary[[<ast.Constant object at 0x7da1b17fb520>, <ast.Name object at 0x7da1b17fa1a0>, <ast.Constant object at 0x7da1b17f8c10>, <ast.Name object at 0x7da1b17f9d50>, <ast.Constant object at 0x7da1b17f8490>, <ast.Name object at 0x7da1b17fbf70>, <ast.Constant object at 0x7da1b17fa050>, <ast.Name object at 0x7da1b17fb550>], [<ast.Name object at 0x7da1b17fafe0>, <ast.Name object at 0x7da1b17f8ee0>, <ast.Name object at 0x7da1b17faef0>, <ast.Name object at 0x7da1b17fa290>, <ast.Name object at 0x7da1b17f8460>, <ast.Name object at 0x7da1b17face0>, <ast.Name object at 0x7da1b17fa920>, <ast.Name object at 0x7da1b17fa890>]] variable[fn] assign[=] call[name[fns].get, parameter[name[type], name[env_string]]] return[call[name[fn], parameter[name[name]]]]
keyword[def] identifier[get] ( identifier[name] : identifier[str] , identifier[required] : identifier[bool] = keyword[False] , identifier[default] : identifier[Union] [ identifier[Type] [ identifier[empty] ], identifier[T] ]= identifier[empty] , identifier[type] : identifier[Type] [ identifier[T] ]= keyword[None] )-> identifier[T] : literal[string] identifier[fns] ={ literal[string] : identifier[env_int] , identifier[int] : identifier[env_int] , literal[string] : identifier[env_bool] , identifier[bool] : identifier[env_bool] , literal[string] : identifier[env_string] , identifier[str] : identifier[env_string] , literal[string] : identifier[env_list] , identifier[list] : identifier[env_list] , } identifier[fn] = identifier[fns] . identifier[get] ( identifier[type] , identifier[env_string] ) keyword[return] identifier[fn] ( identifier[name] , identifier[default] = identifier[default] , identifier[required] = identifier[required] )
def get(name: str, required: bool=False, default: Union[Type[empty], T]=empty, type: Type[T]=None) -> T: """Generic getter for environment variables. Handles defaults, required-ness, and what type to expect. :param name: The name of the environment variable be pulled :type name: str :param required: Whether the environment variable is required. If ``True`` and the variable is not present, a ``KeyError`` is raised. :type required: bool :param default: The value to return if the environment variable is not present. (Providing a default alongside setting ``required=True`` will raise a ``ValueError``) :type default: bool :param type: The type of variable expected. :param type: str or type """ # 'float': env_float, # float: env_float, fns = {'int': env_int, int: env_int, 'bool': env_bool, bool: env_bool, 'string': env_string, str: env_string, 'list': env_list, list: env_list} # type: Dict[Union[str, Type[Any]], Callable[..., Any]] fn = fns.get(type, env_string) return fn(name, default=default, required=required)
def _get_retry_delay(cause): """Helper for :func:`_delay_until_retry`. :type exc: :class:`grpc.Call` :param exc: exception for aborted transaction :rtype: float :returns: seconds to wait before retrying the transaction. """ metadata = dict(cause.trailing_metadata()) retry_info_pb = metadata.get("google.rpc.retryinfo-bin") if retry_info_pb is not None: retry_info = RetryInfo() retry_info.ParseFromString(retry_info_pb) nanos = retry_info.retry_delay.nanos return retry_info.retry_delay.seconds + nanos / 1.0e9
def function[_get_retry_delay, parameter[cause]]: constant[Helper for :func:`_delay_until_retry`. :type exc: :class:`grpc.Call` :param exc: exception for aborted transaction :rtype: float :returns: seconds to wait before retrying the transaction. ] variable[metadata] assign[=] call[name[dict], parameter[call[name[cause].trailing_metadata, parameter[]]]] variable[retry_info_pb] assign[=] call[name[metadata].get, parameter[constant[google.rpc.retryinfo-bin]]] if compare[name[retry_info_pb] is_not constant[None]] begin[:] variable[retry_info] assign[=] call[name[RetryInfo], parameter[]] call[name[retry_info].ParseFromString, parameter[name[retry_info_pb]]] variable[nanos] assign[=] name[retry_info].retry_delay.nanos return[binary_operation[name[retry_info].retry_delay.seconds + binary_operation[name[nanos] / constant[1000000000.0]]]]
keyword[def] identifier[_get_retry_delay] ( identifier[cause] ): literal[string] identifier[metadata] = identifier[dict] ( identifier[cause] . identifier[trailing_metadata] ()) identifier[retry_info_pb] = identifier[metadata] . identifier[get] ( literal[string] ) keyword[if] identifier[retry_info_pb] keyword[is] keyword[not] keyword[None] : identifier[retry_info] = identifier[RetryInfo] () identifier[retry_info] . identifier[ParseFromString] ( identifier[retry_info_pb] ) identifier[nanos] = identifier[retry_info] . identifier[retry_delay] . identifier[nanos] keyword[return] identifier[retry_info] . identifier[retry_delay] . identifier[seconds] + identifier[nanos] / literal[int]
def _get_retry_delay(cause): """Helper for :func:`_delay_until_retry`. :type exc: :class:`grpc.Call` :param exc: exception for aborted transaction :rtype: float :returns: seconds to wait before retrying the transaction. """ metadata = dict(cause.trailing_metadata()) retry_info_pb = metadata.get('google.rpc.retryinfo-bin') if retry_info_pb is not None: retry_info = RetryInfo() retry_info.ParseFromString(retry_info_pb) nanos = retry_info.retry_delay.nanos return retry_info.retry_delay.seconds + nanos / 1000000000.0 # depends on [control=['if'], data=['retry_info_pb']]
def UCRTLibraries(self): """ Microsoft Universal C Runtime SDK Libraries """ if self.vc_ver < 14.0: return [] arch_subdir = self.pi.target_dir(x64=True) lib = os.path.join(self.si.UniversalCRTSdkDir, 'lib') ucrtver = self._ucrt_subdir return [os.path.join(lib, '%sucrt%s' % (ucrtver, arch_subdir))]
def function[UCRTLibraries, parameter[self]]: constant[ Microsoft Universal C Runtime SDK Libraries ] if compare[name[self].vc_ver less[<] constant[14.0]] begin[:] return[list[[]]] variable[arch_subdir] assign[=] call[name[self].pi.target_dir, parameter[]] variable[lib] assign[=] call[name[os].path.join, parameter[name[self].si.UniversalCRTSdkDir, constant[lib]]] variable[ucrtver] assign[=] name[self]._ucrt_subdir return[list[[<ast.Call object at 0x7da1b1b111b0>]]]
keyword[def] identifier[UCRTLibraries] ( identifier[self] ): literal[string] keyword[if] identifier[self] . identifier[vc_ver] < literal[int] : keyword[return] [] identifier[arch_subdir] = identifier[self] . identifier[pi] . identifier[target_dir] ( identifier[x64] = keyword[True] ) identifier[lib] = identifier[os] . identifier[path] . identifier[join] ( identifier[self] . identifier[si] . identifier[UniversalCRTSdkDir] , literal[string] ) identifier[ucrtver] = identifier[self] . identifier[_ucrt_subdir] keyword[return] [ identifier[os] . identifier[path] . identifier[join] ( identifier[lib] , literal[string] %( identifier[ucrtver] , identifier[arch_subdir] ))]
def UCRTLibraries(self): """ Microsoft Universal C Runtime SDK Libraries """ if self.vc_ver < 14.0: return [] # depends on [control=['if'], data=[]] arch_subdir = self.pi.target_dir(x64=True) lib = os.path.join(self.si.UniversalCRTSdkDir, 'lib') ucrtver = self._ucrt_subdir return [os.path.join(lib, '%sucrt%s' % (ucrtver, arch_subdir))]
def _shape_array(array1, array2): """ Function that equalises the input arrays by zero-padding the shortest one. ---------- Parameters ---------- array1: list or numpy.array Array array2: list or numpy.array Array Return ------ arrays: numpy.array Array containing the equal-length arrays. """ if len(array1) > len(array2): new_array = array2 old_array = array1 else: new_array = array1 old_array = array2 length = len(old_array) - len(new_array) for i in range(length): n = new_array[-1].copy() n[0::3] += 1 n[2::3] = 0 new_array = np.vstack([new_array, [n]]) arrays = np.hstack([old_array, new_array]) return arrays
def function[_shape_array, parameter[array1, array2]]: constant[ Function that equalises the input arrays by zero-padding the shortest one. ---------- Parameters ---------- array1: list or numpy.array Array array2: list or numpy.array Array Return ------ arrays: numpy.array Array containing the equal-length arrays. ] if compare[call[name[len], parameter[name[array1]]] greater[>] call[name[len], parameter[name[array2]]]] begin[:] variable[new_array] assign[=] name[array2] variable[old_array] assign[=] name[array1] variable[length] assign[=] binary_operation[call[name[len], parameter[name[old_array]]] - call[name[len], parameter[name[new_array]]]] for taget[name[i]] in starred[call[name[range], parameter[name[length]]]] begin[:] variable[n] assign[=] call[call[name[new_array]][<ast.UnaryOp object at 0x7da20c6c7f10>].copy, parameter[]] <ast.AugAssign object at 0x7da20c6c6b30> call[name[n]][<ast.Slice object at 0x7da20c6c5030>] assign[=] constant[0] variable[new_array] assign[=] call[name[np].vstack, parameter[list[[<ast.Name object at 0x7da20c6c5cf0>, <ast.List object at 0x7da20c6c4190>]]]] variable[arrays] assign[=] call[name[np].hstack, parameter[list[[<ast.Name object at 0x7da20e963e80>, <ast.Name object at 0x7da20e962020>]]]] return[name[arrays]]
keyword[def] identifier[_shape_array] ( identifier[array1] , identifier[array2] ): literal[string] keyword[if] identifier[len] ( identifier[array1] )> identifier[len] ( identifier[array2] ): identifier[new_array] = identifier[array2] identifier[old_array] = identifier[array1] keyword[else] : identifier[new_array] = identifier[array1] identifier[old_array] = identifier[array2] identifier[length] = identifier[len] ( identifier[old_array] )- identifier[len] ( identifier[new_array] ) keyword[for] identifier[i] keyword[in] identifier[range] ( identifier[length] ): identifier[n] = identifier[new_array] [- literal[int] ]. identifier[copy] () identifier[n] [ literal[int] :: literal[int] ]+= literal[int] identifier[n] [ literal[int] :: literal[int] ]= literal[int] identifier[new_array] = identifier[np] . identifier[vstack] ([ identifier[new_array] ,[ identifier[n] ]]) identifier[arrays] = identifier[np] . identifier[hstack] ([ identifier[old_array] , identifier[new_array] ]) keyword[return] identifier[arrays]
def _shape_array(array1, array2): """ Function that equalises the input arrays by zero-padding the shortest one. ---------- Parameters ---------- array1: list or numpy.array Array array2: list or numpy.array Array Return ------ arrays: numpy.array Array containing the equal-length arrays. """ if len(array1) > len(array2): new_array = array2 old_array = array1 # depends on [control=['if'], data=[]] else: new_array = array1 old_array = array2 length = len(old_array) - len(new_array) for i in range(length): n = new_array[-1].copy() n[0::3] += 1 n[2::3] = 0 new_array = np.vstack([new_array, [n]]) # depends on [control=['for'], data=[]] arrays = np.hstack([old_array, new_array]) return arrays
def locations(self): """ Available locations to be used when creating a new machine. :returns: A list of available locations. """ req = self.request(self.mist_client.uri+'/clouds/'+self.id+'/locations') locations = req.get().json() return locations
def function[locations, parameter[self]]: constant[ Available locations to be used when creating a new machine. :returns: A list of available locations. ] variable[req] assign[=] call[name[self].request, parameter[binary_operation[binary_operation[binary_operation[name[self].mist_client.uri + constant[/clouds/]] + name[self].id] + constant[/locations]]]] variable[locations] assign[=] call[call[name[req].get, parameter[]].json, parameter[]] return[name[locations]]
keyword[def] identifier[locations] ( identifier[self] ): literal[string] identifier[req] = identifier[self] . identifier[request] ( identifier[self] . identifier[mist_client] . identifier[uri] + literal[string] + identifier[self] . identifier[id] + literal[string] ) identifier[locations] = identifier[req] . identifier[get] (). identifier[json] () keyword[return] identifier[locations]
def locations(self): """ Available locations to be used when creating a new machine. :returns: A list of available locations. """ req = self.request(self.mist_client.uri + '/clouds/' + self.id + '/locations') locations = req.get().json() return locations
def save_lastnode_id(): """Save the id of the last node created.""" init_counter() with FileLock(_COUNTER_FILE): with AtomicFile(_COUNTER_FILE, mode="w") as fh: fh.write("%d\n" % _COUNTER)
def function[save_lastnode_id, parameter[]]: constant[Save the id of the last node created.] call[name[init_counter], parameter[]] with call[name[FileLock], parameter[name[_COUNTER_FILE]]] begin[:] with call[name[AtomicFile], parameter[name[_COUNTER_FILE]]] begin[:] call[name[fh].write, parameter[binary_operation[constant[%d ] <ast.Mod object at 0x7da2590d6920> name[_COUNTER]]]]
keyword[def] identifier[save_lastnode_id] (): literal[string] identifier[init_counter] () keyword[with] identifier[FileLock] ( identifier[_COUNTER_FILE] ): keyword[with] identifier[AtomicFile] ( identifier[_COUNTER_FILE] , identifier[mode] = literal[string] ) keyword[as] identifier[fh] : identifier[fh] . identifier[write] ( literal[string] % identifier[_COUNTER] )
def save_lastnode_id(): """Save the id of the last node created.""" init_counter() with FileLock(_COUNTER_FILE): with AtomicFile(_COUNTER_FILE, mode='w') as fh: fh.write('%d\n' % _COUNTER) # depends on [control=['with'], data=['fh']] # depends on [control=['with'], data=[]]
def string(html, start_on=None, ignore=(), use_short=True, **queries): '''Returns a blox template from an html string''' if use_short: html = grow_short(html) return _to_template(fromstring(html), start_on=start_on, ignore=ignore, **queries)
def function[string, parameter[html, start_on, ignore, use_short]]: constant[Returns a blox template from an html string] if name[use_short] begin[:] variable[html] assign[=] call[name[grow_short], parameter[name[html]]] return[call[name[_to_template], parameter[call[name[fromstring], parameter[name[html]]]]]]
keyword[def] identifier[string] ( identifier[html] , identifier[start_on] = keyword[None] , identifier[ignore] =(), identifier[use_short] = keyword[True] ,** identifier[queries] ): literal[string] keyword[if] identifier[use_short] : identifier[html] = identifier[grow_short] ( identifier[html] ) keyword[return] identifier[_to_template] ( identifier[fromstring] ( identifier[html] ), identifier[start_on] = identifier[start_on] , identifier[ignore] = identifier[ignore] ,** identifier[queries] )
def string(html, start_on=None, ignore=(), use_short=True, **queries): """Returns a blox template from an html string""" if use_short: html = grow_short(html) # depends on [control=['if'], data=[]] return _to_template(fromstring(html), start_on=start_on, ignore=ignore, **queries)
def _build_tmp_access_args(method, ip, ttl, port, direction, comment): ''' Builds the cmd args for temporary access/deny opts. ''' opt = _get_opt(method) args = '{0} {1} {2}'.format(opt, ip, ttl) if port: args += ' -p {0}'.format(port) if direction: args += ' -d {0}'.format(direction) if comment: args += ' #{0}'.format(comment) return args
def function[_build_tmp_access_args, parameter[method, ip, ttl, port, direction, comment]]: constant[ Builds the cmd args for temporary access/deny opts. ] variable[opt] assign[=] call[name[_get_opt], parameter[name[method]]] variable[args] assign[=] call[constant[{0} {1} {2}].format, parameter[name[opt], name[ip], name[ttl]]] if name[port] begin[:] <ast.AugAssign object at 0x7da1b200b9d0> if name[direction] begin[:] <ast.AugAssign object at 0x7da1b200ba30> if name[comment] begin[:] <ast.AugAssign object at 0x7da1b2108490> return[name[args]]
keyword[def] identifier[_build_tmp_access_args] ( identifier[method] , identifier[ip] , identifier[ttl] , identifier[port] , identifier[direction] , identifier[comment] ): literal[string] identifier[opt] = identifier[_get_opt] ( identifier[method] ) identifier[args] = literal[string] . identifier[format] ( identifier[opt] , identifier[ip] , identifier[ttl] ) keyword[if] identifier[port] : identifier[args] += literal[string] . identifier[format] ( identifier[port] ) keyword[if] identifier[direction] : identifier[args] += literal[string] . identifier[format] ( identifier[direction] ) keyword[if] identifier[comment] : identifier[args] += literal[string] . identifier[format] ( identifier[comment] ) keyword[return] identifier[args]
def _build_tmp_access_args(method, ip, ttl, port, direction, comment): """ Builds the cmd args for temporary access/deny opts. """ opt = _get_opt(method) args = '{0} {1} {2}'.format(opt, ip, ttl) if port: args += ' -p {0}'.format(port) # depends on [control=['if'], data=[]] if direction: args += ' -d {0}'.format(direction) # depends on [control=['if'], data=[]] if comment: args += ' #{0}'.format(comment) # depends on [control=['if'], data=[]] return args
def flush(self, stats, cs_status=None): """Clear and update the screen. stats: Stats database to display cs_status: "None": standalone or server mode "Connected": Client is connected to the server "Disconnected": Client is disconnected from the server """ self.erase() self.display(stats, cs_status=cs_status)
def function[flush, parameter[self, stats, cs_status]]: constant[Clear and update the screen. stats: Stats database to display cs_status: "None": standalone or server mode "Connected": Client is connected to the server "Disconnected": Client is disconnected from the server ] call[name[self].erase, parameter[]] call[name[self].display, parameter[name[stats]]]
keyword[def] identifier[flush] ( identifier[self] , identifier[stats] , identifier[cs_status] = keyword[None] ): literal[string] identifier[self] . identifier[erase] () identifier[self] . identifier[display] ( identifier[stats] , identifier[cs_status] = identifier[cs_status] )
def flush(self, stats, cs_status=None): """Clear and update the screen. stats: Stats database to display cs_status: "None": standalone or server mode "Connected": Client is connected to the server "Disconnected": Client is disconnected from the server """ self.erase() self.display(stats, cs_status=cs_status)
def parts(self, *args, **kwargs): """Retrieve parts belonging to this scope. See :class:`pykechain.Client.parts` for available parameters. """ return self._client.parts(*args, bucket=self.bucket.get('id'), **kwargs)
def function[parts, parameter[self]]: constant[Retrieve parts belonging to this scope. See :class:`pykechain.Client.parts` for available parameters. ] return[call[name[self]._client.parts, parameter[<ast.Starred object at 0x7da204622bc0>]]]
keyword[def] identifier[parts] ( identifier[self] ,* identifier[args] ,** identifier[kwargs] ): literal[string] keyword[return] identifier[self] . identifier[_client] . identifier[parts] (* identifier[args] , identifier[bucket] = identifier[self] . identifier[bucket] . identifier[get] ( literal[string] ),** identifier[kwargs] )
def parts(self, *args, **kwargs): """Retrieve parts belonging to this scope. See :class:`pykechain.Client.parts` for available parameters. """ return self._client.parts(*args, bucket=self.bucket.get('id'), **kwargs)
def _uniquewords(*args): """Dictionary of words to their indices. Helper function to `encode.`""" words = {} n = 0 for word in itertools.chain(*args): if word not in words: words[word] = n n += 1 return words
def function[_uniquewords, parameter[]]: constant[Dictionary of words to their indices. Helper function to `encode.`] variable[words] assign[=] dictionary[[], []] variable[n] assign[=] constant[0] for taget[name[word]] in starred[call[name[itertools].chain, parameter[<ast.Starred object at 0x7da1b07e8eb0>]]] begin[:] if compare[name[word] <ast.NotIn object at 0x7da2590d7190> name[words]] begin[:] call[name[words]][name[word]] assign[=] name[n] <ast.AugAssign object at 0x7da1b07e84f0> return[name[words]]
keyword[def] identifier[_uniquewords] (* identifier[args] ): literal[string] identifier[words] ={} identifier[n] = literal[int] keyword[for] identifier[word] keyword[in] identifier[itertools] . identifier[chain] (* identifier[args] ): keyword[if] identifier[word] keyword[not] keyword[in] identifier[words] : identifier[words] [ identifier[word] ]= identifier[n] identifier[n] += literal[int] keyword[return] identifier[words]
def _uniquewords(*args): """Dictionary of words to their indices. Helper function to `encode.`""" words = {} n = 0 for word in itertools.chain(*args): if word not in words: words[word] = n n += 1 # depends on [control=['if'], data=['word', 'words']] # depends on [control=['for'], data=['word']] return words
def parse_ggKbase_tables(tables, id_type): """ convert ggKbase genome info tables to dictionary """ g2info = {} for table in tables: for line in open(table): line = line.strip().split('\t') if line[0].startswith('name'): header = line header[4] = 'genome size (bp)' header[12] = '#SCGs' header[13] = '#SCG duplicates' continue name, code, info = line[0], line[1], line info = [to_int(i) for i in info] if id_type is False: # try to use name and code ID if 'UNK' in code or 'unknown' in code: code = name if (name != code) and (name and code in g2info): print('# duplicate name or code in table(s)', file=sys.stderr) print('# %s and/or %s' % (name, code), file=sys.stderr) exit() if name not in g2info: g2info[name] = {item:stat for item, stat in zip(header, info)} if code not in g2info: g2info[code] = {item:stat for item, stat in zip(header, info)} else: if id_type == 'name': ID = name elif id_type == 'code': ID = code else: print('# specify name or code column using -id', file=sys.stderr) exit() ID = ID.replace(' ', '') g2info[ID] = {item:stat for item, stat in zip(header, info)} if g2info[ID]['genome size (bp)'] == '': g2info[ID]['genome size (bp)'] = 0 return g2info
def function[parse_ggKbase_tables, parameter[tables, id_type]]: constant[ convert ggKbase genome info tables to dictionary ] variable[g2info] assign[=] dictionary[[], []] for taget[name[table]] in starred[name[tables]] begin[:] for taget[name[line]] in starred[call[name[open], parameter[name[table]]]] begin[:] variable[line] assign[=] call[call[name[line].strip, parameter[]].split, parameter[constant[ ]]] if call[call[name[line]][constant[0]].startswith, parameter[constant[name]]] begin[:] variable[header] assign[=] name[line] call[name[header]][constant[4]] assign[=] constant[genome size (bp)] call[name[header]][constant[12]] assign[=] constant[#SCGs] call[name[header]][constant[13]] assign[=] constant[#SCG duplicates] continue <ast.Tuple object at 0x7da18f720f10> assign[=] tuple[[<ast.Subscript object at 0x7da18f721fc0>, <ast.Subscript object at 0x7da18f7228c0>, <ast.Name object at 0x7da18f720ca0>]] variable[info] assign[=] <ast.ListComp object at 0x7da18f723ac0> if compare[name[id_type] is constant[False]] begin[:] if <ast.BoolOp object at 0x7da18f7209a0> begin[:] variable[code] assign[=] name[name] if <ast.BoolOp object at 0x7da18f720c40> begin[:] call[name[print], parameter[constant[# duplicate name or code in table(s)]]] call[name[print], parameter[binary_operation[constant[# %s and/or %s] <ast.Mod object at 0x7da2590d6920> tuple[[<ast.Name object at 0x7da18f7206a0>, <ast.Name object at 0x7da18f7220b0>]]]]] call[name[exit], parameter[]] if compare[name[name] <ast.NotIn object at 0x7da2590d7190> name[g2info]] begin[:] call[name[g2info]][name[name]] assign[=] <ast.DictComp object at 0x7da18f722c50> if compare[name[code] <ast.NotIn object at 0x7da2590d7190> name[g2info]] begin[:] call[name[g2info]][name[code]] assign[=] <ast.DictComp object at 0x7da18f7226e0> return[name[g2info]]
keyword[def] identifier[parse_ggKbase_tables] ( identifier[tables] , identifier[id_type] ): literal[string] identifier[g2info] ={} keyword[for] identifier[table] keyword[in] identifier[tables] : keyword[for] identifier[line] keyword[in] identifier[open] ( identifier[table] ): identifier[line] = identifier[line] . identifier[strip] (). identifier[split] ( literal[string] ) keyword[if] identifier[line] [ literal[int] ]. identifier[startswith] ( literal[string] ): identifier[header] = identifier[line] identifier[header] [ literal[int] ]= literal[string] identifier[header] [ literal[int] ]= literal[string] identifier[header] [ literal[int] ]= literal[string] keyword[continue] identifier[name] , identifier[code] , identifier[info] = identifier[line] [ literal[int] ], identifier[line] [ literal[int] ], identifier[line] identifier[info] =[ identifier[to_int] ( identifier[i] ) keyword[for] identifier[i] keyword[in] identifier[info] ] keyword[if] identifier[id_type] keyword[is] keyword[False] : keyword[if] literal[string] keyword[in] identifier[code] keyword[or] literal[string] keyword[in] identifier[code] : identifier[code] = identifier[name] keyword[if] ( identifier[name] != identifier[code] ) keyword[and] ( identifier[name] keyword[and] identifier[code] keyword[in] identifier[g2info] ): identifier[print] ( literal[string] , identifier[file] = identifier[sys] . identifier[stderr] ) identifier[print] ( literal[string] %( identifier[name] , identifier[code] ), identifier[file] = identifier[sys] . identifier[stderr] ) identifier[exit] () keyword[if] identifier[name] keyword[not] keyword[in] identifier[g2info] : identifier[g2info] [ identifier[name] ]={ identifier[item] : identifier[stat] keyword[for] identifier[item] , identifier[stat] keyword[in] identifier[zip] ( identifier[header] , identifier[info] )} keyword[if] identifier[code] keyword[not] keyword[in] identifier[g2info] : identifier[g2info] [ identifier[code] ]={ identifier[item] : identifier[stat] keyword[for] identifier[item] , identifier[stat] keyword[in] identifier[zip] ( identifier[header] , identifier[info] )} keyword[else] : keyword[if] identifier[id_type] == literal[string] : identifier[ID] = identifier[name] keyword[elif] identifier[id_type] == literal[string] : identifier[ID] = identifier[code] keyword[else] : identifier[print] ( literal[string] , identifier[file] = identifier[sys] . identifier[stderr] ) identifier[exit] () identifier[ID] = identifier[ID] . identifier[replace] ( literal[string] , literal[string] ) identifier[g2info] [ identifier[ID] ]={ identifier[item] : identifier[stat] keyword[for] identifier[item] , identifier[stat] keyword[in] identifier[zip] ( identifier[header] , identifier[info] )} keyword[if] identifier[g2info] [ identifier[ID] ][ literal[string] ]== literal[string] : identifier[g2info] [ identifier[ID] ][ literal[string] ]= literal[int] keyword[return] identifier[g2info]
def parse_ggKbase_tables(tables, id_type): """ convert ggKbase genome info tables to dictionary """ g2info = {} for table in tables: for line in open(table): line = line.strip().split('\t') if line[0].startswith('name'): header = line header[4] = 'genome size (bp)' header[12] = '#SCGs' header[13] = '#SCG duplicates' continue # depends on [control=['if'], data=[]] (name, code, info) = (line[0], line[1], line) info = [to_int(i) for i in info] if id_type is False: # try to use name and code ID if 'UNK' in code or 'unknown' in code: code = name # depends on [control=['if'], data=[]] if name != code and (name and code in g2info): print('# duplicate name or code in table(s)', file=sys.stderr) print('# %s and/or %s' % (name, code), file=sys.stderr) exit() # depends on [control=['if'], data=[]] if name not in g2info: g2info[name] = {item: stat for (item, stat) in zip(header, info)} # depends on [control=['if'], data=['name', 'g2info']] if code not in g2info: g2info[code] = {item: stat for (item, stat) in zip(header, info)} # depends on [control=['if'], data=['code', 'g2info']] # depends on [control=['if'], data=[]] else: if id_type == 'name': ID = name # depends on [control=['if'], data=[]] elif id_type == 'code': ID = code # depends on [control=['if'], data=[]] else: print('# specify name or code column using -id', file=sys.stderr) exit() ID = ID.replace(' ', '') g2info[ID] = {item: stat for (item, stat) in zip(header, info)} if g2info[ID]['genome size (bp)'] == '': g2info[ID]['genome size (bp)'] = 0 # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['line']] # depends on [control=['for'], data=['table']] return g2info
def update(self): """Get a repository git or update it""" if not os.path.isdir(os.path.join(self.path)): os.makedirs(self.path) if not os.path.isdir(os.path.join(self.path, 'refs')): subprocess.check_output([ 'git', 'clone', '--bare', self.repo_git, self.path ]) self.run(['gc', '--auto', '--prune=all']) self.run(['fetch', '-p', 'origin', '+refs/heads/*:refs/heads/*']) # github support self.run(['fetch', 'origin', '+refs/pull/*/head:refs/pull/*']) # gitlab support self.run([ 'fetch', 'origin', '+refs/merge-requests/*/head:refs/pull/*'])
def function[update, parameter[self]]: constant[Get a repository git or update it] if <ast.UnaryOp object at 0x7da18bc70d90> begin[:] call[name[os].makedirs, parameter[name[self].path]] if <ast.UnaryOp object at 0x7da18bc711b0> begin[:] call[name[subprocess].check_output, parameter[list[[<ast.Constant object at 0x7da18bc72320>, <ast.Constant object at 0x7da18bc73670>, <ast.Constant object at 0x7da18bc70430>, <ast.Attribute object at 0x7da18bc73e50>, <ast.Attribute object at 0x7da18bc723b0>]]]] call[name[self].run, parameter[list[[<ast.Constant object at 0x7da18bc71c00>, <ast.Constant object at 0x7da18bc73100>, <ast.Constant object at 0x7da18bc733d0>]]]] call[name[self].run, parameter[list[[<ast.Constant object at 0x7da18bc71cf0>, <ast.Constant object at 0x7da18bc70d30>, <ast.Constant object at 0x7da18bc71990>, <ast.Constant object at 0x7da18bc720b0>]]]] call[name[self].run, parameter[list[[<ast.Constant object at 0x7da18bc704c0>, <ast.Constant object at 0x7da18bc725c0>, <ast.Constant object at 0x7da18bc71e40>]]]] call[name[self].run, parameter[list[[<ast.Constant object at 0x7da18bc72950>, <ast.Constant object at 0x7da18bc738e0>, <ast.Constant object at 0x7da18bc710c0>]]]]
keyword[def] identifier[update] ( identifier[self] ): literal[string] keyword[if] keyword[not] identifier[os] . identifier[path] . identifier[isdir] ( identifier[os] . identifier[path] . identifier[join] ( identifier[self] . identifier[path] )): identifier[os] . identifier[makedirs] ( identifier[self] . identifier[path] ) keyword[if] keyword[not] identifier[os] . identifier[path] . identifier[isdir] ( identifier[os] . identifier[path] . identifier[join] ( identifier[self] . identifier[path] , literal[string] )): identifier[subprocess] . identifier[check_output] ([ literal[string] , literal[string] , literal[string] , identifier[self] . identifier[repo_git] , identifier[self] . identifier[path] ]) identifier[self] . identifier[run] ([ literal[string] , literal[string] , literal[string] ]) identifier[self] . identifier[run] ([ literal[string] , literal[string] , literal[string] , literal[string] ]) identifier[self] . identifier[run] ([ literal[string] , literal[string] , literal[string] ]) identifier[self] . identifier[run] ([ literal[string] , literal[string] , literal[string] ])
def update(self): """Get a repository git or update it""" if not os.path.isdir(os.path.join(self.path)): os.makedirs(self.path) # depends on [control=['if'], data=[]] if not os.path.isdir(os.path.join(self.path, 'refs')): subprocess.check_output(['git', 'clone', '--bare', self.repo_git, self.path]) # depends on [control=['if'], data=[]] self.run(['gc', '--auto', '--prune=all']) self.run(['fetch', '-p', 'origin', '+refs/heads/*:refs/heads/*']) # github support self.run(['fetch', 'origin', '+refs/pull/*/head:refs/pull/*']) # gitlab support self.run(['fetch', 'origin', '+refs/merge-requests/*/head:refs/pull/*'])
def poisson_cluster(data, k, init=None, max_iters=100): """ Performs Poisson hard EM on the given data. Args: data (array): A 2d array- genes x cells. Can be dense or sparse; for best performance, sparse matrices should be in CSC format. k (int): Number of clusters init (array, optional): Initial centers - genes x k array. Default: None, use kmeans++ max_iters (int, optional): Maximum number of iterations. Default: 100 Returns: a tuple of two arrays: a cells x 1 vector of cluster assignments, and a genes x k array of cluster means. """ # TODO: be able to use a combination of fixed and unknown starting points # e.g., have init values only for certain genes, have a row of all # zeros indicating that kmeans++ should be used for that row. genes, cells = data.shape #print 'starting: ', centers if sparse.issparse(data) and not sparse.isspmatrix_csc(data): data = sparse.csc_matrix(data) init, assignments = kmeans_pp(data, k, centers=init) centers = np.copy(init) assignments = np.zeros(cells) for it in range(max_iters): lls = poisson_ll(data, centers) #cluster_dists = np.zeros((cells, k)) new_assignments = np.argmax(lls, 1) if np.equal(assignments, new_assignments).all(): #print 'ending: ', centers return new_assignments, centers for c in range(k): if sparse.issparse(data): if data[:,new_assignments==c].shape[0]==0: # re-initialize centers? new_c, _ = kmeans_pp(data, k, centers[:,:c]) centers[:,c] = new_c[:,c] else: centers[:,c] = np.asarray(data[:,new_assignments==c].mean(1)).flatten() else: if len(data[:,new_assignments==c])==0: new_c, _ = kmeans_pp(data, k, centers[:,:c]) centers[:,c] = new_c[:,c] else: centers[:,c] = np.mean(data[:,new_assignments==c], 1) assignments = new_assignments return assignments, centers
def function[poisson_cluster, parameter[data, k, init, max_iters]]: constant[ Performs Poisson hard EM on the given data. Args: data (array): A 2d array- genes x cells. Can be dense or sparse; for best performance, sparse matrices should be in CSC format. k (int): Number of clusters init (array, optional): Initial centers - genes x k array. Default: None, use kmeans++ max_iters (int, optional): Maximum number of iterations. Default: 100 Returns: a tuple of two arrays: a cells x 1 vector of cluster assignments, and a genes x k array of cluster means. ] <ast.Tuple object at 0x7da1b1a2da80> assign[=] name[data].shape if <ast.BoolOp object at 0x7da1b1a2cc10> begin[:] variable[data] assign[=] call[name[sparse].csc_matrix, parameter[name[data]]] <ast.Tuple object at 0x7da1b1a2d000> assign[=] call[name[kmeans_pp], parameter[name[data], name[k]]] variable[centers] assign[=] call[name[np].copy, parameter[name[init]]] variable[assignments] assign[=] call[name[np].zeros, parameter[name[cells]]] for taget[name[it]] in starred[call[name[range], parameter[name[max_iters]]]] begin[:] variable[lls] assign[=] call[name[poisson_ll], parameter[name[data], name[centers]]] variable[new_assignments] assign[=] call[name[np].argmax, parameter[name[lls], constant[1]]] if call[call[name[np].equal, parameter[name[assignments], name[new_assignments]]].all, parameter[]] begin[:] return[tuple[[<ast.Name object at 0x7da1b1a2c250>, <ast.Name object at 0x7da1b1a2f0a0>]]] for taget[name[c]] in starred[call[name[range], parameter[name[k]]]] begin[:] if call[name[sparse].issparse, parameter[name[data]]] begin[:] if compare[call[call[name[data]][tuple[[<ast.Slice object at 0x7da18ede76d0>, <ast.Compare object at 0x7da18ede7df0>]]].shape][constant[0]] equal[==] constant[0]] begin[:] <ast.Tuple object at 0x7da18ede77c0> assign[=] call[name[kmeans_pp], parameter[name[data], name[k], call[name[centers]][tuple[[<ast.Slice object at 0x7da18ede71c0>, <ast.Slice object at 0x7da18ede55d0>]]]]] call[name[centers]][tuple[[<ast.Slice object at 0x7da18ede5de0>, <ast.Name object at 0x7da18ede4100>]]] assign[=] call[name[new_c]][tuple[[<ast.Slice object at 0x7da18ede54e0>, <ast.Name object at 0x7da18ede5930>]]] variable[assignments] assign[=] name[new_assignments] return[tuple[[<ast.Name object at 0x7da1b26afc70>, <ast.Name object at 0x7da1b26af2e0>]]]
keyword[def] identifier[poisson_cluster] ( identifier[data] , identifier[k] , identifier[init] = keyword[None] , identifier[max_iters] = literal[int] ): literal[string] identifier[genes] , identifier[cells] = identifier[data] . identifier[shape] keyword[if] identifier[sparse] . identifier[issparse] ( identifier[data] ) keyword[and] keyword[not] identifier[sparse] . identifier[isspmatrix_csc] ( identifier[data] ): identifier[data] = identifier[sparse] . identifier[csc_matrix] ( identifier[data] ) identifier[init] , identifier[assignments] = identifier[kmeans_pp] ( identifier[data] , identifier[k] , identifier[centers] = identifier[init] ) identifier[centers] = identifier[np] . identifier[copy] ( identifier[init] ) identifier[assignments] = identifier[np] . identifier[zeros] ( identifier[cells] ) keyword[for] identifier[it] keyword[in] identifier[range] ( identifier[max_iters] ): identifier[lls] = identifier[poisson_ll] ( identifier[data] , identifier[centers] ) identifier[new_assignments] = identifier[np] . identifier[argmax] ( identifier[lls] , literal[int] ) keyword[if] identifier[np] . identifier[equal] ( identifier[assignments] , identifier[new_assignments] ). identifier[all] (): keyword[return] identifier[new_assignments] , identifier[centers] keyword[for] identifier[c] keyword[in] identifier[range] ( identifier[k] ): keyword[if] identifier[sparse] . identifier[issparse] ( identifier[data] ): keyword[if] identifier[data] [:, identifier[new_assignments] == identifier[c] ]. identifier[shape] [ literal[int] ]== literal[int] : identifier[new_c] , identifier[_] = identifier[kmeans_pp] ( identifier[data] , identifier[k] , identifier[centers] [:,: identifier[c] ]) identifier[centers] [:, identifier[c] ]= identifier[new_c] [:, identifier[c] ] keyword[else] : identifier[centers] [:, identifier[c] ]= identifier[np] . identifier[asarray] ( identifier[data] [:, identifier[new_assignments] == identifier[c] ]. identifier[mean] ( literal[int] )). identifier[flatten] () keyword[else] : keyword[if] identifier[len] ( identifier[data] [:, identifier[new_assignments] == identifier[c] ])== literal[int] : identifier[new_c] , identifier[_] = identifier[kmeans_pp] ( identifier[data] , identifier[k] , identifier[centers] [:,: identifier[c] ]) identifier[centers] [:, identifier[c] ]= identifier[new_c] [:, identifier[c] ] keyword[else] : identifier[centers] [:, identifier[c] ]= identifier[np] . identifier[mean] ( identifier[data] [:, identifier[new_assignments] == identifier[c] ], literal[int] ) identifier[assignments] = identifier[new_assignments] keyword[return] identifier[assignments] , identifier[centers]
def poisson_cluster(data, k, init=None, max_iters=100): """ Performs Poisson hard EM on the given data. Args: data (array): A 2d array- genes x cells. Can be dense or sparse; for best performance, sparse matrices should be in CSC format. k (int): Number of clusters init (array, optional): Initial centers - genes x k array. Default: None, use kmeans++ max_iters (int, optional): Maximum number of iterations. Default: 100 Returns: a tuple of two arrays: a cells x 1 vector of cluster assignments, and a genes x k array of cluster means. """ # TODO: be able to use a combination of fixed and unknown starting points # e.g., have init values only for certain genes, have a row of all # zeros indicating that kmeans++ should be used for that row. (genes, cells) = data.shape #print 'starting: ', centers if sparse.issparse(data) and (not sparse.isspmatrix_csc(data)): data = sparse.csc_matrix(data) # depends on [control=['if'], data=[]] (init, assignments) = kmeans_pp(data, k, centers=init) centers = np.copy(init) assignments = np.zeros(cells) for it in range(max_iters): lls = poisson_ll(data, centers) #cluster_dists = np.zeros((cells, k)) new_assignments = np.argmax(lls, 1) if np.equal(assignments, new_assignments).all(): #print 'ending: ', centers return (new_assignments, centers) # depends on [control=['if'], data=[]] for c in range(k): if sparse.issparse(data): if data[:, new_assignments == c].shape[0] == 0: # re-initialize centers? (new_c, _) = kmeans_pp(data, k, centers[:, :c]) centers[:, c] = new_c[:, c] # depends on [control=['if'], data=[]] else: centers[:, c] = np.asarray(data[:, new_assignments == c].mean(1)).flatten() # depends on [control=['if'], data=[]] elif len(data[:, new_assignments == c]) == 0: (new_c, _) = kmeans_pp(data, k, centers[:, :c]) centers[:, c] = new_c[:, c] # depends on [control=['if'], data=[]] else: centers[:, c] = np.mean(data[:, new_assignments == c], 1) # depends on [control=['for'], data=['c']] assignments = new_assignments # depends on [control=['for'], data=[]] return (assignments, centers)
def _aload32(ins): ''' Load a 32 bit value from a memory address If 2nd arg. start with '*', it is always treated as an indirect value. ''' output = _addr(ins.quad[2]) output.append('call __ILOAD32') output.append('push de') output.append('push hl') REQUIRES.add('iload32.asm') return output
def function[_aload32, parameter[ins]]: constant[ Load a 32 bit value from a memory address If 2nd arg. start with '*', it is always treated as an indirect value. ] variable[output] assign[=] call[name[_addr], parameter[call[name[ins].quad][constant[2]]]] call[name[output].append, parameter[constant[call __ILOAD32]]] call[name[output].append, parameter[constant[push de]]] call[name[output].append, parameter[constant[push hl]]] call[name[REQUIRES].add, parameter[constant[iload32.asm]]] return[name[output]]
keyword[def] identifier[_aload32] ( identifier[ins] ): literal[string] identifier[output] = identifier[_addr] ( identifier[ins] . identifier[quad] [ literal[int] ]) identifier[output] . identifier[append] ( literal[string] ) identifier[output] . identifier[append] ( literal[string] ) identifier[output] . identifier[append] ( literal[string] ) identifier[REQUIRES] . identifier[add] ( literal[string] ) keyword[return] identifier[output]
def _aload32(ins): """ Load a 32 bit value from a memory address If 2nd arg. start with '*', it is always treated as an indirect value. """ output = _addr(ins.quad[2]) output.append('call __ILOAD32') output.append('push de') output.append('push hl') REQUIRES.add('iload32.asm') return output
def sub_path(self, path): """ If this redirect is a regular expression, it will return a rewritten version of `path`; otherwise returns the `new_path`. """ if not self.regular_expression: return self.new_path return re.sub(self.old_path, self.new_path, path)
def function[sub_path, parameter[self, path]]: constant[ If this redirect is a regular expression, it will return a rewritten version of `path`; otherwise returns the `new_path`. ] if <ast.UnaryOp object at 0x7da1b1648340> begin[:] return[name[self].new_path] return[call[name[re].sub, parameter[name[self].old_path, name[self].new_path, name[path]]]]
keyword[def] identifier[sub_path] ( identifier[self] , identifier[path] ): literal[string] keyword[if] keyword[not] identifier[self] . identifier[regular_expression] : keyword[return] identifier[self] . identifier[new_path] keyword[return] identifier[re] . identifier[sub] ( identifier[self] . identifier[old_path] , identifier[self] . identifier[new_path] , identifier[path] )
def sub_path(self, path): """ If this redirect is a regular expression, it will return a rewritten version of `path`; otherwise returns the `new_path`. """ if not self.regular_expression: return self.new_path # depends on [control=['if'], data=[]] return re.sub(self.old_path, self.new_path, path)
def entry_point(context, block_name): """include an snippet at the bottom of a block, if it exists For example, if the plugin with slug 'attachments' is registered waliki/attachments_edit_content.html will be included with {% entry_point 'edit_content' %} which is declared at the bottom of the block 'content' in edit.html """ from waliki.plugins import get_plugins includes = [] for plugin in get_plugins(): template_name = 'waliki/%s_%s.html' % (plugin.slug, block_name) try: # template exists template.loader.get_template(template_name) includes.append(template_name) except template.TemplateDoesNotExist: continue context.update({'includes': includes}) return context
def function[entry_point, parameter[context, block_name]]: constant[include an snippet at the bottom of a block, if it exists For example, if the plugin with slug 'attachments' is registered waliki/attachments_edit_content.html will be included with {% entry_point 'edit_content' %} which is declared at the bottom of the block 'content' in edit.html ] from relative_module[waliki.plugins] import module[get_plugins] variable[includes] assign[=] list[[]] for taget[name[plugin]] in starred[call[name[get_plugins], parameter[]]] begin[:] variable[template_name] assign[=] binary_operation[constant[waliki/%s_%s.html] <ast.Mod object at 0x7da2590d6920> tuple[[<ast.Attribute object at 0x7da20c6e5ab0>, <ast.Name object at 0x7da20c6e6380>]]] <ast.Try object at 0x7da20c6e5870> call[name[context].update, parameter[dictionary[[<ast.Constant object at 0x7da18c4cd960>], [<ast.Name object at 0x7da18c4cd6c0>]]]] return[name[context]]
keyword[def] identifier[entry_point] ( identifier[context] , identifier[block_name] ): literal[string] keyword[from] identifier[waliki] . identifier[plugins] keyword[import] identifier[get_plugins] identifier[includes] =[] keyword[for] identifier[plugin] keyword[in] identifier[get_plugins] (): identifier[template_name] = literal[string] %( identifier[plugin] . identifier[slug] , identifier[block_name] ) keyword[try] : identifier[template] . identifier[loader] . identifier[get_template] ( identifier[template_name] ) identifier[includes] . identifier[append] ( identifier[template_name] ) keyword[except] identifier[template] . identifier[TemplateDoesNotExist] : keyword[continue] identifier[context] . identifier[update] ({ literal[string] : identifier[includes] }) keyword[return] identifier[context]
def entry_point(context, block_name): """include an snippet at the bottom of a block, if it exists For example, if the plugin with slug 'attachments' is registered waliki/attachments_edit_content.html will be included with {% entry_point 'edit_content' %} which is declared at the bottom of the block 'content' in edit.html """ from waliki.plugins import get_plugins includes = [] for plugin in get_plugins(): template_name = 'waliki/%s_%s.html' % (plugin.slug, block_name) try: # template exists template.loader.get_template(template_name) includes.append(template_name) # depends on [control=['try'], data=[]] except template.TemplateDoesNotExist: continue # depends on [control=['except'], data=[]] # depends on [control=['for'], data=['plugin']] context.update({'includes': includes}) return context
def _call(self, resource, params): """Retrive the given resource. :param resource: resource to retrieve :param params: dict with the HTTP parameters needed to retrieve the given resource """ url = self.URL % {'base': self.base_url, 'resource': resource} logger.debug("Confluence client requests: %s params: %s", resource, str(params)) while True: r = self.fetch(url, payload=params) yield r.text # Pagination is available when 'next' link exists j = r.json() if '_links' not in j: break if 'next' not in j['_links']: break url = urijoin(self.base_url, j['_links']['next']) params = {}
def function[_call, parameter[self, resource, params]]: constant[Retrive the given resource. :param resource: resource to retrieve :param params: dict with the HTTP parameters needed to retrieve the given resource ] variable[url] assign[=] binary_operation[name[self].URL <ast.Mod object at 0x7da2590d6920> dictionary[[<ast.Constant object at 0x7da1b020f6d0>, <ast.Constant object at 0x7da1b020dd50>], [<ast.Attribute object at 0x7da1b020dcf0>, <ast.Name object at 0x7da1b020e860>]]] call[name[logger].debug, parameter[constant[Confluence client requests: %s params: %s], name[resource], call[name[str], parameter[name[params]]]]] while constant[True] begin[:] variable[r] assign[=] call[name[self].fetch, parameter[name[url]]] <ast.Yield object at 0x7da1b020e350> variable[j] assign[=] call[name[r].json, parameter[]] if compare[constant[_links] <ast.NotIn object at 0x7da2590d7190> name[j]] begin[:] break if compare[constant[next] <ast.NotIn object at 0x7da2590d7190> call[name[j]][constant[_links]]] begin[:] break variable[url] assign[=] call[name[urijoin], parameter[name[self].base_url, call[call[name[j]][constant[_links]]][constant[next]]]] variable[params] assign[=] dictionary[[], []]
keyword[def] identifier[_call] ( identifier[self] , identifier[resource] , identifier[params] ): literal[string] identifier[url] = identifier[self] . identifier[URL] %{ literal[string] : identifier[self] . identifier[base_url] , literal[string] : identifier[resource] } identifier[logger] . identifier[debug] ( literal[string] , identifier[resource] , identifier[str] ( identifier[params] )) keyword[while] keyword[True] : identifier[r] = identifier[self] . identifier[fetch] ( identifier[url] , identifier[payload] = identifier[params] ) keyword[yield] identifier[r] . identifier[text] identifier[j] = identifier[r] . identifier[json] () keyword[if] literal[string] keyword[not] keyword[in] identifier[j] : keyword[break] keyword[if] literal[string] keyword[not] keyword[in] identifier[j] [ literal[string] ]: keyword[break] identifier[url] = identifier[urijoin] ( identifier[self] . identifier[base_url] , identifier[j] [ literal[string] ][ literal[string] ]) identifier[params] ={}
def _call(self, resource, params): """Retrive the given resource. :param resource: resource to retrieve :param params: dict with the HTTP parameters needed to retrieve the given resource """ url = self.URL % {'base': self.base_url, 'resource': resource} logger.debug('Confluence client requests: %s params: %s', resource, str(params)) while True: r = self.fetch(url, payload=params) yield r.text # Pagination is available when 'next' link exists j = r.json() if '_links' not in j: break # depends on [control=['if'], data=[]] if 'next' not in j['_links']: break # depends on [control=['if'], data=[]] url = urijoin(self.base_url, j['_links']['next']) params = {} # depends on [control=['while'], data=[]]
def invalidate_model_cache(sender, instance, **kwargs): """ Signal receiver for models to invalidate model cache of sender and related models. Model cache is invalidated by generating new key for each model. Parameters ~~~~~~~~~~ sender The model class instance The actual instance being saved. """ logger.debug('Received post_save/post_delete signal from sender {0}'.format(sender)) if django.VERSION >= (1, 8): related_tables = set( [f.related_model._meta.db_table for f in sender._meta.get_fields() if f.related_model is not None and (((f.one_to_many or f.one_to_one) and f.auto_created) or f.many_to_one or (f.many_to_many and not f.auto_created))]) else: related_tables = set([rel.model._meta.db_table for rel in sender._meta.get_all_related_objects()]) # temporary fix for m2m relations with an intermediate model, goes away after better join caching related_tables |= set([field.rel.to._meta.db_table for field in sender._meta.fields if issubclass(type(field), RelatedField)]) logger.debug('Related tables of sender {0} are {1}'.format(sender, related_tables)) update_model_cache(sender._meta.db_table) for related_table in related_tables: update_model_cache(related_table)
def function[invalidate_model_cache, parameter[sender, instance]]: constant[ Signal receiver for models to invalidate model cache of sender and related models. Model cache is invalidated by generating new key for each model. Parameters ~~~~~~~~~~ sender The model class instance The actual instance being saved. ] call[name[logger].debug, parameter[call[constant[Received post_save/post_delete signal from sender {0}].format, parameter[name[sender]]]]] if compare[name[django].VERSION greater_or_equal[>=] tuple[[<ast.Constant object at 0x7da1b27b6bc0>, <ast.Constant object at 0x7da1b27b6bf0>]]] begin[:] variable[related_tables] assign[=] call[name[set], parameter[<ast.ListComp object at 0x7da1b27b69e0>]] call[name[logger].debug, parameter[call[constant[Related tables of sender {0} are {1}].format, parameter[name[sender], name[related_tables]]]]] call[name[update_model_cache], parameter[name[sender]._meta.db_table]] for taget[name[related_table]] in starred[name[related_tables]] begin[:] call[name[update_model_cache], parameter[name[related_table]]]
keyword[def] identifier[invalidate_model_cache] ( identifier[sender] , identifier[instance] ,** identifier[kwargs] ): literal[string] identifier[logger] . identifier[debug] ( literal[string] . identifier[format] ( identifier[sender] )) keyword[if] identifier[django] . identifier[VERSION] >=( literal[int] , literal[int] ): identifier[related_tables] = identifier[set] ( [ identifier[f] . identifier[related_model] . identifier[_meta] . identifier[db_table] keyword[for] identifier[f] keyword[in] identifier[sender] . identifier[_meta] . identifier[get_fields] () keyword[if] identifier[f] . identifier[related_model] keyword[is] keyword[not] keyword[None] keyword[and] ((( identifier[f] . identifier[one_to_many] keyword[or] identifier[f] . identifier[one_to_one] ) keyword[and] identifier[f] . identifier[auto_created] ) keyword[or] identifier[f] . identifier[many_to_one] keyword[or] ( identifier[f] . identifier[many_to_many] keyword[and] keyword[not] identifier[f] . identifier[auto_created] ))]) keyword[else] : identifier[related_tables] = identifier[set] ([ identifier[rel] . identifier[model] . identifier[_meta] . identifier[db_table] keyword[for] identifier[rel] keyword[in] identifier[sender] . identifier[_meta] . identifier[get_all_related_objects] ()]) identifier[related_tables] |= identifier[set] ([ identifier[field] . identifier[rel] . identifier[to] . identifier[_meta] . identifier[db_table] keyword[for] identifier[field] keyword[in] identifier[sender] . identifier[_meta] . identifier[fields] keyword[if] identifier[issubclass] ( identifier[type] ( identifier[field] ), identifier[RelatedField] )]) identifier[logger] . identifier[debug] ( literal[string] . identifier[format] ( identifier[sender] , identifier[related_tables] )) identifier[update_model_cache] ( identifier[sender] . identifier[_meta] . identifier[db_table] ) keyword[for] identifier[related_table] keyword[in] identifier[related_tables] : identifier[update_model_cache] ( identifier[related_table] )
def invalidate_model_cache(sender, instance, **kwargs): """ Signal receiver for models to invalidate model cache of sender and related models. Model cache is invalidated by generating new key for each model. Parameters ~~~~~~~~~~ sender The model class instance The actual instance being saved. """ logger.debug('Received post_save/post_delete signal from sender {0}'.format(sender)) if django.VERSION >= (1, 8): related_tables = set([f.related_model._meta.db_table for f in sender._meta.get_fields() if f.related_model is not None and ((f.one_to_many or f.one_to_one) and f.auto_created or f.many_to_one or (f.many_to_many and (not f.auto_created)))]) # depends on [control=['if'], data=[]] else: related_tables = set([rel.model._meta.db_table for rel in sender._meta.get_all_related_objects()]) # temporary fix for m2m relations with an intermediate model, goes away after better join caching related_tables |= set([field.rel.to._meta.db_table for field in sender._meta.fields if issubclass(type(field), RelatedField)]) logger.debug('Related tables of sender {0} are {1}'.format(sender, related_tables)) update_model_cache(sender._meta.db_table) for related_table in related_tables: update_model_cache(related_table) # depends on [control=['for'], data=['related_table']]
def has_no_title(self, title, **kwargs): """ Checks if the page doesn't have the given title. Args: title (str | RegexObject): The string that the title should include. **kwargs: Arbitrary keyword arguments for :class:`TitleQuery`. Returns: bool: Whether it doesn't match. """ try: self.assert_no_title(title, **kwargs) return True except ExpectationNotMet: return False
def function[has_no_title, parameter[self, title]]: constant[ Checks if the page doesn't have the given title. Args: title (str | RegexObject): The string that the title should include. **kwargs: Arbitrary keyword arguments for :class:`TitleQuery`. Returns: bool: Whether it doesn't match. ] <ast.Try object at 0x7da1b0210eb0>
keyword[def] identifier[has_no_title] ( identifier[self] , identifier[title] ,** identifier[kwargs] ): literal[string] keyword[try] : identifier[self] . identifier[assert_no_title] ( identifier[title] ,** identifier[kwargs] ) keyword[return] keyword[True] keyword[except] identifier[ExpectationNotMet] : keyword[return] keyword[False]
def has_no_title(self, title, **kwargs): """ Checks if the page doesn't have the given title. Args: title (str | RegexObject): The string that the title should include. **kwargs: Arbitrary keyword arguments for :class:`TitleQuery`. Returns: bool: Whether it doesn't match. """ try: self.assert_no_title(title, **kwargs) return True # depends on [control=['try'], data=[]] except ExpectationNotMet: return False # depends on [control=['except'], data=[]]
def bool(self, name): """parse a boolean frame""" self._assert_is_string(name) frame = self._next_frame() if len(frame) != 1: raise MessageParserError("Expected exacty 1 byte for boolean value") val = frame != b"\x00" self.results.__dict__[name] = val return self
def function[bool, parameter[self, name]]: constant[parse a boolean frame] call[name[self]._assert_is_string, parameter[name[name]]] variable[frame] assign[=] call[name[self]._next_frame, parameter[]] if compare[call[name[len], parameter[name[frame]]] not_equal[!=] constant[1]] begin[:] <ast.Raise object at 0x7da1b13a1bd0> variable[val] assign[=] compare[name[frame] not_equal[!=] constant[b'\x00']] call[name[self].results.__dict__][name[name]] assign[=] name[val] return[name[self]]
keyword[def] identifier[bool] ( identifier[self] , identifier[name] ): literal[string] identifier[self] . identifier[_assert_is_string] ( identifier[name] ) identifier[frame] = identifier[self] . identifier[_next_frame] () keyword[if] identifier[len] ( identifier[frame] )!= literal[int] : keyword[raise] identifier[MessageParserError] ( literal[string] ) identifier[val] = identifier[frame] != literal[string] identifier[self] . identifier[results] . identifier[__dict__] [ identifier[name] ]= identifier[val] keyword[return] identifier[self]
def bool(self, name): """parse a boolean frame""" self._assert_is_string(name) frame = self._next_frame() if len(frame) != 1: raise MessageParserError('Expected exacty 1 byte for boolean value') # depends on [control=['if'], data=[]] val = frame != b'\x00' self.results.__dict__[name] = val return self
def delete_tables(self, **kwargs): """ removes all the tables from the db this is, obviously, very bad if you didn't mean to call this, because of that, you have to pass in disable_protection=True, if it doesn't get that passed in, it won't run this method """ if not kwargs.get('disable_protection', False): raise ValueError('In order to delete all the tables, pass in disable_protection=True') with self.connection(**kwargs) as connection: kwargs['connection'] = connection self._delete_tables(**kwargs)
def function[delete_tables, parameter[self]]: constant[ removes all the tables from the db this is, obviously, very bad if you didn't mean to call this, because of that, you have to pass in disable_protection=True, if it doesn't get that passed in, it won't run this method ] if <ast.UnaryOp object at 0x7da18f09f820> begin[:] <ast.Raise object at 0x7da18f09ecb0> with call[name[self].connection, parameter[]] begin[:] call[name[kwargs]][constant[connection]] assign[=] name[connection] call[name[self]._delete_tables, parameter[]]
keyword[def] identifier[delete_tables] ( identifier[self] ,** identifier[kwargs] ): literal[string] keyword[if] keyword[not] identifier[kwargs] . identifier[get] ( literal[string] , keyword[False] ): keyword[raise] identifier[ValueError] ( literal[string] ) keyword[with] identifier[self] . identifier[connection] (** identifier[kwargs] ) keyword[as] identifier[connection] : identifier[kwargs] [ literal[string] ]= identifier[connection] identifier[self] . identifier[_delete_tables] (** identifier[kwargs] )
def delete_tables(self, **kwargs): """ removes all the tables from the db this is, obviously, very bad if you didn't mean to call this, because of that, you have to pass in disable_protection=True, if it doesn't get that passed in, it won't run this method """ if not kwargs.get('disable_protection', False): raise ValueError('In order to delete all the tables, pass in disable_protection=True') # depends on [control=['if'], data=[]] with self.connection(**kwargs) as connection: kwargs['connection'] = connection self._delete_tables(**kwargs) # depends on [control=['with'], data=['connection']]
def update_resource(resource, incoming_request): """Replace the contents of a resource with *data* and return an appropriate *Response*. :param resource: :class:`sandman.model.Model` to be updated :param data: New values for the fields in *resource* """ resource.from_dict(get_resource_data(incoming_request)) _perform_database_action('merge', resource) return no_content_response()
def function[update_resource, parameter[resource, incoming_request]]: constant[Replace the contents of a resource with *data* and return an appropriate *Response*. :param resource: :class:`sandman.model.Model` to be updated :param data: New values for the fields in *resource* ] call[name[resource].from_dict, parameter[call[name[get_resource_data], parameter[name[incoming_request]]]]] call[name[_perform_database_action], parameter[constant[merge], name[resource]]] return[call[name[no_content_response], parameter[]]]
keyword[def] identifier[update_resource] ( identifier[resource] , identifier[incoming_request] ): literal[string] identifier[resource] . identifier[from_dict] ( identifier[get_resource_data] ( identifier[incoming_request] )) identifier[_perform_database_action] ( literal[string] , identifier[resource] ) keyword[return] identifier[no_content_response] ()
def update_resource(resource, incoming_request): """Replace the contents of a resource with *data* and return an appropriate *Response*. :param resource: :class:`sandman.model.Model` to be updated :param data: New values for the fields in *resource* """ resource.from_dict(get_resource_data(incoming_request)) _perform_database_action('merge', resource) return no_content_response()
def next(self): ''' Returns next image for same content_object and None if image is the last. ''' try: return self.__class__.objects.for_model(self.content_object, self.content_type).\ filter(order__lt=self.order).order_by('-order')[0] except IndexError: return None
def function[next, parameter[self]]: constant[ Returns next image for same content_object and None if image is the last. ] <ast.Try object at 0x7da1b09bbc40>
keyword[def] identifier[next] ( identifier[self] ): literal[string] keyword[try] : keyword[return] identifier[self] . identifier[__class__] . identifier[objects] . identifier[for_model] ( identifier[self] . identifier[content_object] , identifier[self] . identifier[content_type] ). identifier[filter] ( identifier[order__lt] = identifier[self] . identifier[order] ). identifier[order_by] ( literal[string] )[ literal[int] ] keyword[except] identifier[IndexError] : keyword[return] keyword[None]
def next(self): """ Returns next image for same content_object and None if image is the last. """ try: return self.__class__.objects.for_model(self.content_object, self.content_type).filter(order__lt=self.order).order_by('-order')[0] # depends on [control=['try'], data=[]] except IndexError: return None # depends on [control=['except'], data=[]]
def init(**kwargs): """Initialize the specified names in the specified databases. The general process is as follows: - Ensure the database in question exists - Ensure all tables exist in the database. """ # TODO: Iterate through all engines in name set. database = kwargs.pop('database', False) if database and not database_exists(engine['default'].url): create_database(engine['default'].url, encoding='utf8') clear_cache() expression = lambda target, table: table.create(target) test = lambda target, table: table.exists(target) op(expression, test=test, primary='init', secondary='create', **kwargs)
def function[init, parameter[]]: constant[Initialize the specified names in the specified databases. The general process is as follows: - Ensure the database in question exists - Ensure all tables exist in the database. ] variable[database] assign[=] call[name[kwargs].pop, parameter[constant[database], constant[False]]] if <ast.BoolOp object at 0x7da18dc9a440> begin[:] call[name[create_database], parameter[call[name[engine]][constant[default]].url]] call[name[clear_cache], parameter[]] variable[expression] assign[=] <ast.Lambda object at 0x7da18dc9aaa0> variable[test] assign[=] <ast.Lambda object at 0x7da18dc9b850> call[name[op], parameter[name[expression]]]
keyword[def] identifier[init] (** identifier[kwargs] ): literal[string] identifier[database] = identifier[kwargs] . identifier[pop] ( literal[string] , keyword[False] ) keyword[if] identifier[database] keyword[and] keyword[not] identifier[database_exists] ( identifier[engine] [ literal[string] ]. identifier[url] ): identifier[create_database] ( identifier[engine] [ literal[string] ]. identifier[url] , identifier[encoding] = literal[string] ) identifier[clear_cache] () identifier[expression] = keyword[lambda] identifier[target] , identifier[table] : identifier[table] . identifier[create] ( identifier[target] ) identifier[test] = keyword[lambda] identifier[target] , identifier[table] : identifier[table] . identifier[exists] ( identifier[target] ) identifier[op] ( identifier[expression] , identifier[test] = identifier[test] , identifier[primary] = literal[string] , identifier[secondary] = literal[string] ,** identifier[kwargs] )
def init(**kwargs): """Initialize the specified names in the specified databases. The general process is as follows: - Ensure the database in question exists - Ensure all tables exist in the database. """ # TODO: Iterate through all engines in name set. database = kwargs.pop('database', False) if database and (not database_exists(engine['default'].url)): create_database(engine['default'].url, encoding='utf8') clear_cache() # depends on [control=['if'], data=[]] expression = lambda target, table: table.create(target) test = lambda target, table: table.exists(target) op(expression, test=test, primary='init', secondary='create', **kwargs)
def echo_event(data): """Echo a json dump of an object using click""" return click.echo(json.dumps(data, sort_keys=True, indent=2))
def function[echo_event, parameter[data]]: constant[Echo a json dump of an object using click] return[call[name[click].echo, parameter[call[name[json].dumps, parameter[name[data]]]]]]
keyword[def] identifier[echo_event] ( identifier[data] ): literal[string] keyword[return] identifier[click] . identifier[echo] ( identifier[json] . identifier[dumps] ( identifier[data] , identifier[sort_keys] = keyword[True] , identifier[indent] = literal[int] ))
def echo_event(data): """Echo a json dump of an object using click""" return click.echo(json.dumps(data, sort_keys=True, indent=2))
def record_factory(app, fields=None): """Return a temporary Record instance to be used for field validation and value parsing Args: app (App): Target App to create a transient Record instance for fields (dict): Optional dict of fields and values to set on new Record instance before returning Returns: Record: Unsaved Record instance to be used for validation, creation, etc. """ # pylint: disable=line-too-long record = Record(app, { '$type': Record._type, 'isNew': True, 'applicationId': app.id, 'comments': { '$type': 'System.Collections.Generic.Dictionary`2[[System.String, mscorlib],[System.Collections.Generic.List`1[[Core.Models.Record.Comments, Core]], mscorlib]], mscorlib' }, 'values': { '$type': 'System.Collections.Generic.Dictionary`2[[System.String, mscorlib],[System.Object, mscorlib]], mscorlib' } }) fields = fields or {} for name, value in six.iteritems(fields): record[name] = value # Pop off fields with None value to allow for saving empty fields copy_raw = copy.copy(record._raw) values_dict = {} for key, value in six.iteritems(copy_raw['values']): if value is not None: values_dict[key] = value record._raw['values'] = values_dict return record
def function[record_factory, parameter[app, fields]]: constant[Return a temporary Record instance to be used for field validation and value parsing Args: app (App): Target App to create a transient Record instance for fields (dict): Optional dict of fields and values to set on new Record instance before returning Returns: Record: Unsaved Record instance to be used for validation, creation, etc. ] variable[record] assign[=] call[name[Record], parameter[name[app], dictionary[[<ast.Constant object at 0x7da2044c30d0>, <ast.Constant object at 0x7da2044c1ab0>, <ast.Constant object at 0x7da2044c28f0>, <ast.Constant object at 0x7da2044c0d90>, <ast.Constant object at 0x7da2044c1660>], [<ast.Attribute object at 0x7da2044c22f0>, <ast.Constant object at 0x7da2044c08b0>, <ast.Attribute object at 0x7da2044c3460>, <ast.Dict object at 0x7da2044c3430>, <ast.Dict object at 0x7da2044c0100>]]]] variable[fields] assign[=] <ast.BoolOp object at 0x7da2044c19c0> for taget[tuple[[<ast.Name object at 0x7da2044c0520>, <ast.Name object at 0x7da2044c3820>]]] in starred[call[name[six].iteritems, parameter[name[fields]]]] begin[:] call[name[record]][name[name]] assign[=] name[value] variable[copy_raw] assign[=] call[name[copy].copy, parameter[name[record]._raw]] variable[values_dict] assign[=] dictionary[[], []] for taget[tuple[[<ast.Name object at 0x7da1b12c5000>, <ast.Name object at 0x7da1b12c42b0>]]] in starred[call[name[six].iteritems, parameter[call[name[copy_raw]][constant[values]]]]] begin[:] if compare[name[value] is_not constant[None]] begin[:] call[name[values_dict]][name[key]] assign[=] name[value] call[name[record]._raw][constant[values]] assign[=] name[values_dict] return[name[record]]
keyword[def] identifier[record_factory] ( identifier[app] , identifier[fields] = keyword[None] ): literal[string] identifier[record] = identifier[Record] ( identifier[app] ,{ literal[string] : identifier[Record] . identifier[_type] , literal[string] : keyword[True] , literal[string] : identifier[app] . identifier[id] , literal[string] :{ literal[string] : literal[string] }, literal[string] :{ literal[string] : literal[string] } }) identifier[fields] = identifier[fields] keyword[or] {} keyword[for] identifier[name] , identifier[value] keyword[in] identifier[six] . identifier[iteritems] ( identifier[fields] ): identifier[record] [ identifier[name] ]= identifier[value] identifier[copy_raw] = identifier[copy] . identifier[copy] ( identifier[record] . identifier[_raw] ) identifier[values_dict] ={} keyword[for] identifier[key] , identifier[value] keyword[in] identifier[six] . identifier[iteritems] ( identifier[copy_raw] [ literal[string] ]): keyword[if] identifier[value] keyword[is] keyword[not] keyword[None] : identifier[values_dict] [ identifier[key] ]= identifier[value] identifier[record] . identifier[_raw] [ literal[string] ]= identifier[values_dict] keyword[return] identifier[record]
def record_factory(app, fields=None): """Return a temporary Record instance to be used for field validation and value parsing Args: app (App): Target App to create a transient Record instance for fields (dict): Optional dict of fields and values to set on new Record instance before returning Returns: Record: Unsaved Record instance to be used for validation, creation, etc. """ # pylint: disable=line-too-long record = Record(app, {'$type': Record._type, 'isNew': True, 'applicationId': app.id, 'comments': {'$type': 'System.Collections.Generic.Dictionary`2[[System.String, mscorlib],[System.Collections.Generic.List`1[[Core.Models.Record.Comments, Core]], mscorlib]], mscorlib'}, 'values': {'$type': 'System.Collections.Generic.Dictionary`2[[System.String, mscorlib],[System.Object, mscorlib]], mscorlib'}}) fields = fields or {} for (name, value) in six.iteritems(fields): record[name] = value # depends on [control=['for'], data=[]] # Pop off fields with None value to allow for saving empty fields copy_raw = copy.copy(record._raw) values_dict = {} for (key, value) in six.iteritems(copy_raw['values']): if value is not None: values_dict[key] = value # depends on [control=['if'], data=['value']] # depends on [control=['for'], data=[]] record._raw['values'] = values_dict return record
def snapshot(model, filename, pytest_args, exclusive, skip, solver, experimental, custom_tests, custom_config): """ Take a snapshot of a model's state and generate a report. MODEL: Path to model file. Can also be supplied via the environment variable MEMOTE_MODEL or configured in 'setup.cfg' or 'memote.ini'. """ model_obj, sbml_ver, notifications = api.validate_model( model) if model_obj is None: LOGGER.critical( "The model could not be loaded due to the following SBML errors.") utils.stdout_notifications(notifications) api.validation_report(model, notifications, filename) sys.exit(1) if not any(a.startswith("--tb") for a in pytest_args): pytest_args = ["--tb", "no"] + pytest_args # Add further directories to search for tests. pytest_args.extend(custom_tests) config = ReportConfiguration.load() # Update the default test configuration with custom ones (if any). for custom in custom_config: config.merge(ReportConfiguration.load(custom)) model_obj.solver = solver _, results = api.test_model(model_obj, sbml_version=sbml_ver, results=True, pytest_args=pytest_args, skip=skip, exclusive=exclusive, experimental=experimental) with open(filename, "w", encoding="utf-8") as file_handle: LOGGER.info("Writing snapshot report to '%s'.", filename) file_handle.write(api.snapshot_report(results, config))
def function[snapshot, parameter[model, filename, pytest_args, exclusive, skip, solver, experimental, custom_tests, custom_config]]: constant[ Take a snapshot of a model's state and generate a report. MODEL: Path to model file. Can also be supplied via the environment variable MEMOTE_MODEL or configured in 'setup.cfg' or 'memote.ini'. ] <ast.Tuple object at 0x7da20c76dea0> assign[=] call[name[api].validate_model, parameter[name[model]]] if compare[name[model_obj] is constant[None]] begin[:] call[name[LOGGER].critical, parameter[constant[The model could not be loaded due to the following SBML errors.]]] call[name[utils].stdout_notifications, parameter[name[notifications]]] call[name[api].validation_report, parameter[name[model], name[notifications], name[filename]]] call[name[sys].exit, parameter[constant[1]]] if <ast.UnaryOp object at 0x7da1b0547190> begin[:] variable[pytest_args] assign[=] binary_operation[list[[<ast.Constant object at 0x7da1b06d1060>, <ast.Constant object at 0x7da1b06d2c20>]] + name[pytest_args]] call[name[pytest_args].extend, parameter[name[custom_tests]]] variable[config] assign[=] call[name[ReportConfiguration].load, parameter[]] for taget[name[custom]] in starred[name[custom_config]] begin[:] call[name[config].merge, parameter[call[name[ReportConfiguration].load, parameter[name[custom]]]]] name[model_obj].solver assign[=] name[solver] <ast.Tuple object at 0x7da1b0626d40> assign[=] call[name[api].test_model, parameter[name[model_obj]]] with call[name[open], parameter[name[filename], constant[w]]] begin[:] call[name[LOGGER].info, parameter[constant[Writing snapshot report to '%s'.], name[filename]]] call[name[file_handle].write, parameter[call[name[api].snapshot_report, parameter[name[results], name[config]]]]]
keyword[def] identifier[snapshot] ( identifier[model] , identifier[filename] , identifier[pytest_args] , identifier[exclusive] , identifier[skip] , identifier[solver] , identifier[experimental] , identifier[custom_tests] , identifier[custom_config] ): literal[string] identifier[model_obj] , identifier[sbml_ver] , identifier[notifications] = identifier[api] . identifier[validate_model] ( identifier[model] ) keyword[if] identifier[model_obj] keyword[is] keyword[None] : identifier[LOGGER] . identifier[critical] ( literal[string] ) identifier[utils] . identifier[stdout_notifications] ( identifier[notifications] ) identifier[api] . identifier[validation_report] ( identifier[model] , identifier[notifications] , identifier[filename] ) identifier[sys] . identifier[exit] ( literal[int] ) keyword[if] keyword[not] identifier[any] ( identifier[a] . identifier[startswith] ( literal[string] ) keyword[for] identifier[a] keyword[in] identifier[pytest_args] ): identifier[pytest_args] =[ literal[string] , literal[string] ]+ identifier[pytest_args] identifier[pytest_args] . identifier[extend] ( identifier[custom_tests] ) identifier[config] = identifier[ReportConfiguration] . identifier[load] () keyword[for] identifier[custom] keyword[in] identifier[custom_config] : identifier[config] . identifier[merge] ( identifier[ReportConfiguration] . identifier[load] ( identifier[custom] )) identifier[model_obj] . identifier[solver] = identifier[solver] identifier[_] , identifier[results] = identifier[api] . identifier[test_model] ( identifier[model_obj] , identifier[sbml_version] = identifier[sbml_ver] , identifier[results] = keyword[True] , identifier[pytest_args] = identifier[pytest_args] , identifier[skip] = identifier[skip] , identifier[exclusive] = identifier[exclusive] , identifier[experimental] = identifier[experimental] ) keyword[with] identifier[open] ( identifier[filename] , literal[string] , identifier[encoding] = literal[string] ) keyword[as] identifier[file_handle] : identifier[LOGGER] . identifier[info] ( literal[string] , identifier[filename] ) identifier[file_handle] . identifier[write] ( identifier[api] . identifier[snapshot_report] ( identifier[results] , identifier[config] ))
def snapshot(model, filename, pytest_args, exclusive, skip, solver, experimental, custom_tests, custom_config): """ Take a snapshot of a model's state and generate a report. MODEL: Path to model file. Can also be supplied via the environment variable MEMOTE_MODEL or configured in 'setup.cfg' or 'memote.ini'. """ (model_obj, sbml_ver, notifications) = api.validate_model(model) if model_obj is None: LOGGER.critical('The model could not be loaded due to the following SBML errors.') utils.stdout_notifications(notifications) api.validation_report(model, notifications, filename) sys.exit(1) # depends on [control=['if'], data=[]] if not any((a.startswith('--tb') for a in pytest_args)): pytest_args = ['--tb', 'no'] + pytest_args # depends on [control=['if'], data=[]] # Add further directories to search for tests. pytest_args.extend(custom_tests) config = ReportConfiguration.load() # Update the default test configuration with custom ones (if any). for custom in custom_config: config.merge(ReportConfiguration.load(custom)) # depends on [control=['for'], data=['custom']] model_obj.solver = solver (_, results) = api.test_model(model_obj, sbml_version=sbml_ver, results=True, pytest_args=pytest_args, skip=skip, exclusive=exclusive, experimental=experimental) with open(filename, 'w', encoding='utf-8') as file_handle: LOGGER.info("Writing snapshot report to '%s'.", filename) file_handle.write(api.snapshot_report(results, config)) # depends on [control=['with'], data=['file_handle']]
def dSbus_dV(Y, V): """ Computes the partial derivative of power injection w.r.t. voltage. References: Ray Zimmerman, "dSbus_dV.m", MATPOWER, version 3.2, PSERC (Cornell), http://www.pserc.cornell.edu/matpower/ """ I = Y * V diagV = spdiag(V) diagIbus = spdiag(I) diagVnorm = spdiag(div(V, abs(V))) # Element-wise division. dS_dVm = diagV * conj(Y * diagVnorm) + conj(diagIbus) * diagVnorm dS_dVa = 1j * diagV * conj(diagIbus - Y * diagV) return dS_dVm, dS_dVa
def function[dSbus_dV, parameter[Y, V]]: constant[ Computes the partial derivative of power injection w.r.t. voltage. References: Ray Zimmerman, "dSbus_dV.m", MATPOWER, version 3.2, PSERC (Cornell), http://www.pserc.cornell.edu/matpower/ ] variable[I] assign[=] binary_operation[name[Y] * name[V]] variable[diagV] assign[=] call[name[spdiag], parameter[name[V]]] variable[diagIbus] assign[=] call[name[spdiag], parameter[name[I]]] variable[diagVnorm] assign[=] call[name[spdiag], parameter[call[name[div], parameter[name[V], call[name[abs], parameter[name[V]]]]]]] variable[dS_dVm] assign[=] binary_operation[binary_operation[name[diagV] * call[name[conj], parameter[binary_operation[name[Y] * name[diagVnorm]]]]] + binary_operation[call[name[conj], parameter[name[diagIbus]]] * name[diagVnorm]]] variable[dS_dVa] assign[=] binary_operation[binary_operation[constant[1j] * name[diagV]] * call[name[conj], parameter[binary_operation[name[diagIbus] - binary_operation[name[Y] * name[diagV]]]]]] return[tuple[[<ast.Name object at 0x7da1b25d2500>, <ast.Name object at 0x7da1b25d2200>]]]
keyword[def] identifier[dSbus_dV] ( identifier[Y] , identifier[V] ): literal[string] identifier[I] = identifier[Y] * identifier[V] identifier[diagV] = identifier[spdiag] ( identifier[V] ) identifier[diagIbus] = identifier[spdiag] ( identifier[I] ) identifier[diagVnorm] = identifier[spdiag] ( identifier[div] ( identifier[V] , identifier[abs] ( identifier[V] ))) identifier[dS_dVm] = identifier[diagV] * identifier[conj] ( identifier[Y] * identifier[diagVnorm] )+ identifier[conj] ( identifier[diagIbus] )* identifier[diagVnorm] identifier[dS_dVa] = literal[int] * identifier[diagV] * identifier[conj] ( identifier[diagIbus] - identifier[Y] * identifier[diagV] ) keyword[return] identifier[dS_dVm] , identifier[dS_dVa]
def dSbus_dV(Y, V): """ Computes the partial derivative of power injection w.r.t. voltage. References: Ray Zimmerman, "dSbus_dV.m", MATPOWER, version 3.2, PSERC (Cornell), http://www.pserc.cornell.edu/matpower/ """ I = Y * V diagV = spdiag(V) diagIbus = spdiag(I) diagVnorm = spdiag(div(V, abs(V))) # Element-wise division. dS_dVm = diagV * conj(Y * diagVnorm) + conj(diagIbus) * diagVnorm dS_dVa = 1j * diagV * conj(diagIbus - Y * diagV) return (dS_dVm, dS_dVa)
def training_env(): # type: () -> _env.TrainingEnv """Create a TrainingEnv. Returns: TrainingEnv: an instance of TrainingEnv """ from sagemaker_containers import _env return _env.TrainingEnv( resource_config=_env.read_resource_config(), input_data_config=_env.read_input_data_config(), hyperparameters=_env.read_hyperparameters())
def function[training_env, parameter[]]: constant[Create a TrainingEnv. Returns: TrainingEnv: an instance of TrainingEnv ] from relative_module[sagemaker_containers] import module[_env] return[call[name[_env].TrainingEnv, parameter[]]]
keyword[def] identifier[training_env] (): literal[string] keyword[from] identifier[sagemaker_containers] keyword[import] identifier[_env] keyword[return] identifier[_env] . identifier[TrainingEnv] ( identifier[resource_config] = identifier[_env] . identifier[read_resource_config] (), identifier[input_data_config] = identifier[_env] . identifier[read_input_data_config] (), identifier[hyperparameters] = identifier[_env] . identifier[read_hyperparameters] ())
def training_env(): # type: () -> _env.TrainingEnv 'Create a TrainingEnv.\n\n Returns:\n TrainingEnv: an instance of TrainingEnv\n ' from sagemaker_containers import _env return _env.TrainingEnv(resource_config=_env.read_resource_config(), input_data_config=_env.read_input_data_config(), hyperparameters=_env.read_hyperparameters())
def validate_token(key, token, user_id, action_id="", current_time=None): """Validates that the given token authorizes the user for the action. Tokens are invalid if the time of issue is too old or if the token does not match what generateToken outputs (i.e. the token was forged). Args: key: secret key to use. token: a string of the token generated by generateToken. user_id: the user ID of the authenticated user. action_id: a string identifier of the action they requested authorization for. Returns: A boolean - True if the user is authorized for the action, False otherwise. """ if not token: return False try: decoded = base64.urlsafe_b64decode(token) token_time = int(decoded.split(DELIMITER)[-1]) except (TypeError, ValueError, binascii.Error): return False if current_time is None: current_time = time.time() # If the token is too old it's not valid. if current_time - token_time > DEFAULT_TIMEOUT_SECS: return False # The given token should match the generated one with the same time. expected_token = generate_token(key, user_id, action_id=action_id, when=token_time) if len(token) != len(expected_token): return False # Perform constant time comparison to avoid timing attacks different = 0 for x, y in zip(bytearray(token), bytearray(expected_token)): different |= x ^ y return not different
def function[validate_token, parameter[key, token, user_id, action_id, current_time]]: constant[Validates that the given token authorizes the user for the action. Tokens are invalid if the time of issue is too old or if the token does not match what generateToken outputs (i.e. the token was forged). Args: key: secret key to use. token: a string of the token generated by generateToken. user_id: the user ID of the authenticated user. action_id: a string identifier of the action they requested authorization for. Returns: A boolean - True if the user is authorized for the action, False otherwise. ] if <ast.UnaryOp object at 0x7da20e955540> begin[:] return[constant[False]] <ast.Try object at 0x7da20e955720> if compare[name[current_time] is constant[None]] begin[:] variable[current_time] assign[=] call[name[time].time, parameter[]] if compare[binary_operation[name[current_time] - name[token_time]] greater[>] name[DEFAULT_TIMEOUT_SECS]] begin[:] return[constant[False]] variable[expected_token] assign[=] call[name[generate_token], parameter[name[key], name[user_id]]] if compare[call[name[len], parameter[name[token]]] not_equal[!=] call[name[len], parameter[name[expected_token]]]] begin[:] return[constant[False]] variable[different] assign[=] constant[0] for taget[tuple[[<ast.Name object at 0x7da1b01e23b0>, <ast.Name object at 0x7da1b01e1de0>]]] in starred[call[name[zip], parameter[call[name[bytearray], parameter[name[token]]], call[name[bytearray], parameter[name[expected_token]]]]]] begin[:] <ast.AugAssign object at 0x7da1b01e1600> return[<ast.UnaryOp object at 0x7da1b01e3370>]
keyword[def] identifier[validate_token] ( identifier[key] , identifier[token] , identifier[user_id] , identifier[action_id] = literal[string] , identifier[current_time] = keyword[None] ): literal[string] keyword[if] keyword[not] identifier[token] : keyword[return] keyword[False] keyword[try] : identifier[decoded] = identifier[base64] . identifier[urlsafe_b64decode] ( identifier[token] ) identifier[token_time] = identifier[int] ( identifier[decoded] . identifier[split] ( identifier[DELIMITER] )[- literal[int] ]) keyword[except] ( identifier[TypeError] , identifier[ValueError] , identifier[binascii] . identifier[Error] ): keyword[return] keyword[False] keyword[if] identifier[current_time] keyword[is] keyword[None] : identifier[current_time] = identifier[time] . identifier[time] () keyword[if] identifier[current_time] - identifier[token_time] > identifier[DEFAULT_TIMEOUT_SECS] : keyword[return] keyword[False] identifier[expected_token] = identifier[generate_token] ( identifier[key] , identifier[user_id] , identifier[action_id] = identifier[action_id] , identifier[when] = identifier[token_time] ) keyword[if] identifier[len] ( identifier[token] )!= identifier[len] ( identifier[expected_token] ): keyword[return] keyword[False] identifier[different] = literal[int] keyword[for] identifier[x] , identifier[y] keyword[in] identifier[zip] ( identifier[bytearray] ( identifier[token] ), identifier[bytearray] ( identifier[expected_token] )): identifier[different] |= identifier[x] ^ identifier[y] keyword[return] keyword[not] identifier[different]
def validate_token(key, token, user_id, action_id='', current_time=None): """Validates that the given token authorizes the user for the action. Tokens are invalid if the time of issue is too old or if the token does not match what generateToken outputs (i.e. the token was forged). Args: key: secret key to use. token: a string of the token generated by generateToken. user_id: the user ID of the authenticated user. action_id: a string identifier of the action they requested authorization for. Returns: A boolean - True if the user is authorized for the action, False otherwise. """ if not token: return False # depends on [control=['if'], data=[]] try: decoded = base64.urlsafe_b64decode(token) token_time = int(decoded.split(DELIMITER)[-1]) # depends on [control=['try'], data=[]] except (TypeError, ValueError, binascii.Error): return False # depends on [control=['except'], data=[]] if current_time is None: current_time = time.time() # depends on [control=['if'], data=['current_time']] # If the token is too old it's not valid. if current_time - token_time > DEFAULT_TIMEOUT_SECS: return False # depends on [control=['if'], data=[]] # The given token should match the generated one with the same time. expected_token = generate_token(key, user_id, action_id=action_id, when=token_time) if len(token) != len(expected_token): return False # depends on [control=['if'], data=[]] # Perform constant time comparison to avoid timing attacks different = 0 for (x, y) in zip(bytearray(token), bytearray(expected_token)): different |= x ^ y # depends on [control=['for'], data=[]] return not different
def type_map(gtype, fn): """Map fn over all child types of gtype.""" cb = ffi.callback('VipsTypeMap2Fn', fn) return vips_lib.vips_type_map(gtype, cb, ffi.NULL, ffi.NULL)
def function[type_map, parameter[gtype, fn]]: constant[Map fn over all child types of gtype.] variable[cb] assign[=] call[name[ffi].callback, parameter[constant[VipsTypeMap2Fn], name[fn]]] return[call[name[vips_lib].vips_type_map, parameter[name[gtype], name[cb], name[ffi].NULL, name[ffi].NULL]]]
keyword[def] identifier[type_map] ( identifier[gtype] , identifier[fn] ): literal[string] identifier[cb] = identifier[ffi] . identifier[callback] ( literal[string] , identifier[fn] ) keyword[return] identifier[vips_lib] . identifier[vips_type_map] ( identifier[gtype] , identifier[cb] , identifier[ffi] . identifier[NULL] , identifier[ffi] . identifier[NULL] )
def type_map(gtype, fn): """Map fn over all child types of gtype.""" cb = ffi.callback('VipsTypeMap2Fn', fn) return vips_lib.vips_type_map(gtype, cb, ffi.NULL, ffi.NULL)
def runSearchRnaQuantifications(self, request): """ Returns a SearchRnaQuantificationResponse for the specified SearchRnaQuantificationRequest object. """ return self.runSearchRequest( request, protocol.SearchRnaQuantificationsRequest, protocol.SearchRnaQuantificationsResponse, self.rnaQuantificationsGenerator)
def function[runSearchRnaQuantifications, parameter[self, request]]: constant[ Returns a SearchRnaQuantificationResponse for the specified SearchRnaQuantificationRequest object. ] return[call[name[self].runSearchRequest, parameter[name[request], name[protocol].SearchRnaQuantificationsRequest, name[protocol].SearchRnaQuantificationsResponse, name[self].rnaQuantificationsGenerator]]]
keyword[def] identifier[runSearchRnaQuantifications] ( identifier[self] , identifier[request] ): literal[string] keyword[return] identifier[self] . identifier[runSearchRequest] ( identifier[request] , identifier[protocol] . identifier[SearchRnaQuantificationsRequest] , identifier[protocol] . identifier[SearchRnaQuantificationsResponse] , identifier[self] . identifier[rnaQuantificationsGenerator] )
def runSearchRnaQuantifications(self, request): """ Returns a SearchRnaQuantificationResponse for the specified SearchRnaQuantificationRequest object. """ return self.runSearchRequest(request, protocol.SearchRnaQuantificationsRequest, protocol.SearchRnaQuantificationsResponse, self.rnaQuantificationsGenerator)
def from_stat(stat): """ Return an instantiated geom object geoms should not override this method. Parameters ---------- stat : stat `stat` Returns ------- out : geom A geom object Raises ------ :class:`PlotnineError` if unable to create a `geom`. """ name = stat.params['geom'] if issubclass(type(name), geom): return name if isinstance(name, type) and issubclass(name, geom): klass = name elif is_string(name): if not name.startswith('geom_'): name = 'geom_{}'.format(name) klass = Registry[name] else: raise PlotnineError( 'Unknown geom of type {}'.format(type(name))) return klass(stat=stat, **stat._kwargs)
def function[from_stat, parameter[stat]]: constant[ Return an instantiated geom object geoms should not override this method. Parameters ---------- stat : stat `stat` Returns ------- out : geom A geom object Raises ------ :class:`PlotnineError` if unable to create a `geom`. ] variable[name] assign[=] call[name[stat].params][constant[geom]] if call[name[issubclass], parameter[call[name[type], parameter[name[name]]], name[geom]]] begin[:] return[name[name]] if <ast.BoolOp object at 0x7da18eb540d0> begin[:] variable[klass] assign[=] name[name] return[call[name[klass], parameter[]]]
keyword[def] identifier[from_stat] ( identifier[stat] ): literal[string] identifier[name] = identifier[stat] . identifier[params] [ literal[string] ] keyword[if] identifier[issubclass] ( identifier[type] ( identifier[name] ), identifier[geom] ): keyword[return] identifier[name] keyword[if] identifier[isinstance] ( identifier[name] , identifier[type] ) keyword[and] identifier[issubclass] ( identifier[name] , identifier[geom] ): identifier[klass] = identifier[name] keyword[elif] identifier[is_string] ( identifier[name] ): keyword[if] keyword[not] identifier[name] . identifier[startswith] ( literal[string] ): identifier[name] = literal[string] . identifier[format] ( identifier[name] ) identifier[klass] = identifier[Registry] [ identifier[name] ] keyword[else] : keyword[raise] identifier[PlotnineError] ( literal[string] . identifier[format] ( identifier[type] ( identifier[name] ))) keyword[return] identifier[klass] ( identifier[stat] = identifier[stat] ,** identifier[stat] . identifier[_kwargs] )
def from_stat(stat): """ Return an instantiated geom object geoms should not override this method. Parameters ---------- stat : stat `stat` Returns ------- out : geom A geom object Raises ------ :class:`PlotnineError` if unable to create a `geom`. """ name = stat.params['geom'] if issubclass(type(name), geom): return name # depends on [control=['if'], data=[]] if isinstance(name, type) and issubclass(name, geom): klass = name # depends on [control=['if'], data=[]] elif is_string(name): if not name.startswith('geom_'): name = 'geom_{}'.format(name) # depends on [control=['if'], data=[]] klass = Registry[name] # depends on [control=['if'], data=[]] else: raise PlotnineError('Unknown geom of type {}'.format(type(name))) return klass(stat=stat, **stat._kwargs)
def run_bolts(command, parser, cl_args, unknown_args): """ run bolts subcommand """ cluster, role, env = cl_args['cluster'], cl_args['role'], cl_args['environ'] topology = cl_args['topology-name'] try: result = tracker_access.get_topology_info(cluster, env, topology, role) bolts = result['physical_plan']['bolts'].keys() bolt_name = cl_args['bolt'] if bolt_name: if bolt_name in bolts: bolts = [bolt_name] else: Log.error('Unknown bolt: \'%s\'' % bolt_name) raise except Exception: Log.error("Fail to connect to tracker: \'%s\'", cl_args["tracker_url"]) return False bolts_result = [] for bolt in bolts: try: metrics = tracker_access.get_component_metrics(bolt, cluster, env, topology, role) stat, header = to_table(metrics) bolts_result.append((bolt, stat, header)) except Exception: Log.error("Fail to connect to tracker: \'%s\'", cl_args["tracker_url"]) return False for i, (bolt, stat, header) in enumerate(bolts_result): if i != 0: print('') print('\'%s\' metrics:' % bolt) print(tabulate(stat, headers=header)) return True
def function[run_bolts, parameter[command, parser, cl_args, unknown_args]]: constant[ run bolts subcommand ] <ast.Tuple object at 0x7da18ede7a30> assign[=] tuple[[<ast.Subscript object at 0x7da18ede4310>, <ast.Subscript object at 0x7da18ede70d0>, <ast.Subscript object at 0x7da18ede5cc0>]] variable[topology] assign[=] call[name[cl_args]][constant[topology-name]] <ast.Try object at 0x7da18ede7e50> variable[bolts_result] assign[=] list[[]] for taget[name[bolt]] in starred[name[bolts]] begin[:] <ast.Try object at 0x7da18ede5f90> for taget[tuple[[<ast.Name object at 0x7da18ede59c0>, <ast.Tuple object at 0x7da18ede57e0>]]] in starred[call[name[enumerate], parameter[name[bolts_result]]]] begin[:] if compare[name[i] not_equal[!=] constant[0]] begin[:] call[name[print], parameter[constant[]]] call[name[print], parameter[binary_operation[constant['%s' metrics:] <ast.Mod object at 0x7da2590d6920> name[bolt]]]] call[name[print], parameter[call[name[tabulate], parameter[name[stat]]]]] return[constant[True]]
keyword[def] identifier[run_bolts] ( identifier[command] , identifier[parser] , identifier[cl_args] , identifier[unknown_args] ): literal[string] identifier[cluster] , identifier[role] , identifier[env] = identifier[cl_args] [ literal[string] ], identifier[cl_args] [ literal[string] ], identifier[cl_args] [ literal[string] ] identifier[topology] = identifier[cl_args] [ literal[string] ] keyword[try] : identifier[result] = identifier[tracker_access] . identifier[get_topology_info] ( identifier[cluster] , identifier[env] , identifier[topology] , identifier[role] ) identifier[bolts] = identifier[result] [ literal[string] ][ literal[string] ]. identifier[keys] () identifier[bolt_name] = identifier[cl_args] [ literal[string] ] keyword[if] identifier[bolt_name] : keyword[if] identifier[bolt_name] keyword[in] identifier[bolts] : identifier[bolts] =[ identifier[bolt_name] ] keyword[else] : identifier[Log] . identifier[error] ( literal[string] % identifier[bolt_name] ) keyword[raise] keyword[except] identifier[Exception] : identifier[Log] . identifier[error] ( literal[string] , identifier[cl_args] [ literal[string] ]) keyword[return] keyword[False] identifier[bolts_result] =[] keyword[for] identifier[bolt] keyword[in] identifier[bolts] : keyword[try] : identifier[metrics] = identifier[tracker_access] . identifier[get_component_metrics] ( identifier[bolt] , identifier[cluster] , identifier[env] , identifier[topology] , identifier[role] ) identifier[stat] , identifier[header] = identifier[to_table] ( identifier[metrics] ) identifier[bolts_result] . identifier[append] (( identifier[bolt] , identifier[stat] , identifier[header] )) keyword[except] identifier[Exception] : identifier[Log] . identifier[error] ( literal[string] , identifier[cl_args] [ literal[string] ]) keyword[return] keyword[False] keyword[for] identifier[i] ,( identifier[bolt] , identifier[stat] , identifier[header] ) keyword[in] identifier[enumerate] ( identifier[bolts_result] ): keyword[if] identifier[i] != literal[int] : identifier[print] ( literal[string] ) identifier[print] ( literal[string] % identifier[bolt] ) identifier[print] ( identifier[tabulate] ( identifier[stat] , identifier[headers] = identifier[header] )) keyword[return] keyword[True]
def run_bolts(command, parser, cl_args, unknown_args): """ run bolts subcommand """ (cluster, role, env) = (cl_args['cluster'], cl_args['role'], cl_args['environ']) topology = cl_args['topology-name'] try: result = tracker_access.get_topology_info(cluster, env, topology, role) bolts = result['physical_plan']['bolts'].keys() bolt_name = cl_args['bolt'] if bolt_name: if bolt_name in bolts: bolts = [bolt_name] # depends on [control=['if'], data=['bolt_name', 'bolts']] else: Log.error("Unknown bolt: '%s'" % bolt_name) raise # depends on [control=['if'], data=[]] # depends on [control=['try'], data=[]] except Exception: Log.error("Fail to connect to tracker: '%s'", cl_args['tracker_url']) return False # depends on [control=['except'], data=[]] bolts_result = [] for bolt in bolts: try: metrics = tracker_access.get_component_metrics(bolt, cluster, env, topology, role) (stat, header) = to_table(metrics) bolts_result.append((bolt, stat, header)) # depends on [control=['try'], data=[]] except Exception: Log.error("Fail to connect to tracker: '%s'", cl_args['tracker_url']) return False # depends on [control=['except'], data=[]] # depends on [control=['for'], data=['bolt']] for (i, (bolt, stat, header)) in enumerate(bolts_result): if i != 0: print('') # depends on [control=['if'], data=[]] print("'%s' metrics:" % bolt) print(tabulate(stat, headers=header)) # depends on [control=['for'], data=[]] return True
def run(self): """ Run the shadowing computation with the values stored in ``self.arg_``. Precomputed information is stored in: * **.diff_t** (*ndarray*): ``np.diff(t)`` * **.integral** (*ndarray*): Trapezoidal data integration over time. The steps are: * :func:`get_sunpos` * :func:`Vonoroi_SH` * :func:`compute_shadows` * :func:`project_data` :retruns: None """ # Adapt series ## time if self.integral is None: if self.arg_t is not None: import datetime if type(self.arg_t[0]) == datetime.datetime: self.arg_t = self.to_minutes(time_obj=self.arg_t) else: self.arg_t = np.round(self.arg_t) elif self.arg_dt is not None: self.arg_dt = np.round(self.arg_dt) self.arg_t = self.to_minutes(dt=self.arg_dt) else: raise ValueError('At least one time parameter is needed.') self.diff_t = np.diff(self.arg_t) ## data if self.arg_data is None: self.arg_data = np.ones(self.arg_t.shape[0]) dt = self.diff_t/60 # hs rect = self.arg_data[:-1]/1000*dt # kilounits triang_side = np.diff(self.arg_data) triang = 0.5*triang_side*dt self.integral = rect + triang self.integral = np.hstack((0, self.integral)) # Computation if self.azimuth_zenit is None: self.get_sunpos(self.arg_t, self.arg_run_true_time) if self.vor_centers is None: self.Vonoroi_SH(self.arg_vor_size) self.compute_shadows() self.project_data()
def function[run, parameter[self]]: constant[ Run the shadowing computation with the values stored in ``self.arg_``. Precomputed information is stored in: * **.diff_t** (*ndarray*): ``np.diff(t)`` * **.integral** (*ndarray*): Trapezoidal data integration over time. The steps are: * :func:`get_sunpos` * :func:`Vonoroi_SH` * :func:`compute_shadows` * :func:`project_data` :retruns: None ] if compare[name[self].integral is constant[None]] begin[:] if compare[name[self].arg_t is_not constant[None]] begin[:] import module[datetime] if compare[call[name[type], parameter[call[name[self].arg_t][constant[0]]]] equal[==] name[datetime].datetime] begin[:] name[self].arg_t assign[=] call[name[self].to_minutes, parameter[]] name[self].diff_t assign[=] call[name[np].diff, parameter[name[self].arg_t]] if compare[name[self].arg_data is constant[None]] begin[:] name[self].arg_data assign[=] call[name[np].ones, parameter[call[name[self].arg_t.shape][constant[0]]]] variable[dt] assign[=] binary_operation[name[self].diff_t / constant[60]] variable[rect] assign[=] binary_operation[binary_operation[call[name[self].arg_data][<ast.Slice object at 0x7da1b2486f50>] / constant[1000]] * name[dt]] variable[triang_side] assign[=] call[name[np].diff, parameter[name[self].arg_data]] variable[triang] assign[=] binary_operation[binary_operation[constant[0.5] * name[triang_side]] * name[dt]] name[self].integral assign[=] binary_operation[name[rect] + name[triang]] name[self].integral assign[=] call[name[np].hstack, parameter[tuple[[<ast.Constant object at 0x7da1b2484e50>, <ast.Attribute object at 0x7da20e960d90>]]]] if compare[name[self].azimuth_zenit is constant[None]] begin[:] call[name[self].get_sunpos, parameter[name[self].arg_t, name[self].arg_run_true_time]] if compare[name[self].vor_centers is constant[None]] begin[:] call[name[self].Vonoroi_SH, parameter[name[self].arg_vor_size]] call[name[self].compute_shadows, parameter[]] call[name[self].project_data, parameter[]]
keyword[def] identifier[run] ( identifier[self] ): literal[string] keyword[if] identifier[self] . identifier[integral] keyword[is] keyword[None] : keyword[if] identifier[self] . identifier[arg_t] keyword[is] keyword[not] keyword[None] : keyword[import] identifier[datetime] keyword[if] identifier[type] ( identifier[self] . identifier[arg_t] [ literal[int] ])== identifier[datetime] . identifier[datetime] : identifier[self] . identifier[arg_t] = identifier[self] . identifier[to_minutes] ( identifier[time_obj] = identifier[self] . identifier[arg_t] ) keyword[else] : identifier[self] . identifier[arg_t] = identifier[np] . identifier[round] ( identifier[self] . identifier[arg_t] ) keyword[elif] identifier[self] . identifier[arg_dt] keyword[is] keyword[not] keyword[None] : identifier[self] . identifier[arg_dt] = identifier[np] . identifier[round] ( identifier[self] . identifier[arg_dt] ) identifier[self] . identifier[arg_t] = identifier[self] . identifier[to_minutes] ( identifier[dt] = identifier[self] . identifier[arg_dt] ) keyword[else] : keyword[raise] identifier[ValueError] ( literal[string] ) identifier[self] . identifier[diff_t] = identifier[np] . identifier[diff] ( identifier[self] . identifier[arg_t] ) keyword[if] identifier[self] . identifier[arg_data] keyword[is] keyword[None] : identifier[self] . identifier[arg_data] = identifier[np] . identifier[ones] ( identifier[self] . identifier[arg_t] . identifier[shape] [ literal[int] ]) identifier[dt] = identifier[self] . identifier[diff_t] / literal[int] identifier[rect] = identifier[self] . identifier[arg_data] [:- literal[int] ]/ literal[int] * identifier[dt] identifier[triang_side] = identifier[np] . identifier[diff] ( identifier[self] . identifier[arg_data] ) identifier[triang] = literal[int] * identifier[triang_side] * identifier[dt] identifier[self] . identifier[integral] = identifier[rect] + identifier[triang] identifier[self] . identifier[integral] = identifier[np] . identifier[hstack] (( literal[int] , identifier[self] . identifier[integral] )) keyword[if] identifier[self] . identifier[azimuth_zenit] keyword[is] keyword[None] : identifier[self] . identifier[get_sunpos] ( identifier[self] . identifier[arg_t] , identifier[self] . identifier[arg_run_true_time] ) keyword[if] identifier[self] . identifier[vor_centers] keyword[is] keyword[None] : identifier[self] . identifier[Vonoroi_SH] ( identifier[self] . identifier[arg_vor_size] ) identifier[self] . identifier[compute_shadows] () identifier[self] . identifier[project_data] ()
def run(self): """ Run the shadowing computation with the values stored in ``self.arg_``. Precomputed information is stored in: * **.diff_t** (*ndarray*): ``np.diff(t)`` * **.integral** (*ndarray*): Trapezoidal data integration over time. The steps are: * :func:`get_sunpos` * :func:`Vonoroi_SH` * :func:`compute_shadows` * :func:`project_data` :retruns: None """ # Adapt series ## time if self.integral is None: if self.arg_t is not None: import datetime if type(self.arg_t[0]) == datetime.datetime: self.arg_t = self.to_minutes(time_obj=self.arg_t) # depends on [control=['if'], data=[]] else: self.arg_t = np.round(self.arg_t) # depends on [control=['if'], data=[]] elif self.arg_dt is not None: self.arg_dt = np.round(self.arg_dt) self.arg_t = self.to_minutes(dt=self.arg_dt) # depends on [control=['if'], data=[]] else: raise ValueError('At least one time parameter is needed.') self.diff_t = np.diff(self.arg_t) # depends on [control=['if'], data=[]] ## data if self.arg_data is None: self.arg_data = np.ones(self.arg_t.shape[0]) # depends on [control=['if'], data=[]] dt = self.diff_t / 60 # hs rect = self.arg_data[:-1] / 1000 * dt # kilounits triang_side = np.diff(self.arg_data) triang = 0.5 * triang_side * dt self.integral = rect + triang self.integral = np.hstack((0, self.integral)) # Computation if self.azimuth_zenit is None: self.get_sunpos(self.arg_t, self.arg_run_true_time) # depends on [control=['if'], data=[]] if self.vor_centers is None: self.Vonoroi_SH(self.arg_vor_size) # depends on [control=['if'], data=[]] self.compute_shadows() self.project_data()
def get_data_port_by_id(self, data_port_id): """Search for the given data port id in the data ports of the state The method tries to find a data port in the input and output data ports as well as in the scoped variables. :param data_port_id: the unique id of the data port :return: the data port with the searched id or None if not found """ data_port = super(ContainerState, self).get_data_port_by_id(data_port_id) if data_port: return data_port if data_port_id in self.scoped_variables: return self.scoped_variables[data_port_id] return None
def function[get_data_port_by_id, parameter[self, data_port_id]]: constant[Search for the given data port id in the data ports of the state The method tries to find a data port in the input and output data ports as well as in the scoped variables. :param data_port_id: the unique id of the data port :return: the data port with the searched id or None if not found ] variable[data_port] assign[=] call[call[name[super], parameter[name[ContainerState], name[self]]].get_data_port_by_id, parameter[name[data_port_id]]] if name[data_port] begin[:] return[name[data_port]] if compare[name[data_port_id] in name[self].scoped_variables] begin[:] return[call[name[self].scoped_variables][name[data_port_id]]] return[constant[None]]
keyword[def] identifier[get_data_port_by_id] ( identifier[self] , identifier[data_port_id] ): literal[string] identifier[data_port] = identifier[super] ( identifier[ContainerState] , identifier[self] ). identifier[get_data_port_by_id] ( identifier[data_port_id] ) keyword[if] identifier[data_port] : keyword[return] identifier[data_port] keyword[if] identifier[data_port_id] keyword[in] identifier[self] . identifier[scoped_variables] : keyword[return] identifier[self] . identifier[scoped_variables] [ identifier[data_port_id] ] keyword[return] keyword[None]
def get_data_port_by_id(self, data_port_id): """Search for the given data port id in the data ports of the state The method tries to find a data port in the input and output data ports as well as in the scoped variables. :param data_port_id: the unique id of the data port :return: the data port with the searched id or None if not found """ data_port = super(ContainerState, self).get_data_port_by_id(data_port_id) if data_port: return data_port # depends on [control=['if'], data=[]] if data_port_id in self.scoped_variables: return self.scoped_variables[data_port_id] # depends on [control=['if'], data=['data_port_id']] return None
def gen_colors(img): """Format the output from imagemagick into a list of hex colors.""" magick_command = has_im() for i in range(0, 20, 1): raw_colors = imagemagick(16 + i, img, magick_command) if len(raw_colors) > 16: break elif i == 19: logging.error("Imagemagick couldn't generate a suitable palette.") sys.exit(1) else: logging.warning("Imagemagick couldn't generate a palette.") logging.warning("Trying a larger palette size %s", 16 + i) return [re.search("#.{6}", str(col)).group(0) for col in raw_colors[1:]]
def function[gen_colors, parameter[img]]: constant[Format the output from imagemagick into a list of hex colors.] variable[magick_command] assign[=] call[name[has_im], parameter[]] for taget[name[i]] in starred[call[name[range], parameter[constant[0], constant[20], constant[1]]]] begin[:] variable[raw_colors] assign[=] call[name[imagemagick], parameter[binary_operation[constant[16] + name[i]], name[img], name[magick_command]]] if compare[call[name[len], parameter[name[raw_colors]]] greater[>] constant[16]] begin[:] break return[<ast.ListComp object at 0x7da1b17aa6e0>]
keyword[def] identifier[gen_colors] ( identifier[img] ): literal[string] identifier[magick_command] = identifier[has_im] () keyword[for] identifier[i] keyword[in] identifier[range] ( literal[int] , literal[int] , literal[int] ): identifier[raw_colors] = identifier[imagemagick] ( literal[int] + identifier[i] , identifier[img] , identifier[magick_command] ) keyword[if] identifier[len] ( identifier[raw_colors] )> literal[int] : keyword[break] keyword[elif] identifier[i] == literal[int] : identifier[logging] . identifier[error] ( literal[string] ) identifier[sys] . identifier[exit] ( literal[int] ) keyword[else] : identifier[logging] . identifier[warning] ( literal[string] ) identifier[logging] . identifier[warning] ( literal[string] , literal[int] + identifier[i] ) keyword[return] [ identifier[re] . identifier[search] ( literal[string] , identifier[str] ( identifier[col] )). identifier[group] ( literal[int] ) keyword[for] identifier[col] keyword[in] identifier[raw_colors] [ literal[int] :]]
def gen_colors(img): """Format the output from imagemagick into a list of hex colors.""" magick_command = has_im() for i in range(0, 20, 1): raw_colors = imagemagick(16 + i, img, magick_command) if len(raw_colors) > 16: break # depends on [control=['if'], data=[]] elif i == 19: logging.error("Imagemagick couldn't generate a suitable palette.") sys.exit(1) # depends on [control=['if'], data=[]] else: logging.warning("Imagemagick couldn't generate a palette.") logging.warning('Trying a larger palette size %s', 16 + i) # depends on [control=['for'], data=['i']] return [re.search('#.{6}', str(col)).group(0) for col in raw_colors[1:]]
def snake_case(a_string): """Returns a snake cased version of a string. :param a_string: any :class:`str` object. Usage: >>> snake_case('FooBar') "foo_bar" """ partial = re.sub('(.)([A-Z][a-z]+)', r'\1_\2', a_string) return re.sub('([a-z0-9])([A-Z])', r'\1_\2', partial).lower()
def function[snake_case, parameter[a_string]]: constant[Returns a snake cased version of a string. :param a_string: any :class:`str` object. Usage: >>> snake_case('FooBar') "foo_bar" ] variable[partial] assign[=] call[name[re].sub, parameter[constant[(.)([A-Z][a-z]+)], constant[\1_\2], name[a_string]]] return[call[call[name[re].sub, parameter[constant[([a-z0-9])([A-Z])], constant[\1_\2], name[partial]]].lower, parameter[]]]
keyword[def] identifier[snake_case] ( identifier[a_string] ): literal[string] identifier[partial] = identifier[re] . identifier[sub] ( literal[string] , literal[string] , identifier[a_string] ) keyword[return] identifier[re] . identifier[sub] ( literal[string] , literal[string] , identifier[partial] ). identifier[lower] ()
def snake_case(a_string): """Returns a snake cased version of a string. :param a_string: any :class:`str` object. Usage: >>> snake_case('FooBar') "foo_bar" """ partial = re.sub('(.)([A-Z][a-z]+)', '\\1_\\2', a_string) return re.sub('([a-z0-9])([A-Z])', '\\1_\\2', partial).lower()
def is_valid(self): """ Tests if the dependency is in a valid state """ return super(SimpleDependency, self).is_valid() or ( self.requirement.immediate_rebind and self._pending_ref is not None )
def function[is_valid, parameter[self]]: constant[ Tests if the dependency is in a valid state ] return[<ast.BoolOp object at 0x7da1b033cf10>]
keyword[def] identifier[is_valid] ( identifier[self] ): literal[string] keyword[return] identifier[super] ( identifier[SimpleDependency] , identifier[self] ). identifier[is_valid] () keyword[or] ( identifier[self] . identifier[requirement] . identifier[immediate_rebind] keyword[and] identifier[self] . identifier[_pending_ref] keyword[is] keyword[not] keyword[None] )
def is_valid(self): """ Tests if the dependency is in a valid state """ return super(SimpleDependency, self).is_valid() or (self.requirement.immediate_rebind and self._pending_ref is not None)
def _clean(self, magic): """ Given a magic string, remove the output tag designator. """ if magic.lower() == 'o': self.magic = '' elif magic[:2].lower() == 'o:': self.magic = magic[2:] elif magic[:2].lower() == 'o.': self.ext = magic[1:]
def function[_clean, parameter[self, magic]]: constant[ Given a magic string, remove the output tag designator. ] if compare[call[name[magic].lower, parameter[]] equal[==] constant[o]] begin[:] name[self].magic assign[=] constant[]
keyword[def] identifier[_clean] ( identifier[self] , identifier[magic] ): literal[string] keyword[if] identifier[magic] . identifier[lower] ()== literal[string] : identifier[self] . identifier[magic] = literal[string] keyword[elif] identifier[magic] [: literal[int] ]. identifier[lower] ()== literal[string] : identifier[self] . identifier[magic] = identifier[magic] [ literal[int] :] keyword[elif] identifier[magic] [: literal[int] ]. identifier[lower] ()== literal[string] : identifier[self] . identifier[ext] = identifier[magic] [ literal[int] :]
def _clean(self, magic): """ Given a magic string, remove the output tag designator. """ if magic.lower() == 'o': self.magic = '' # depends on [control=['if'], data=[]] elif magic[:2].lower() == 'o:': self.magic = magic[2:] # depends on [control=['if'], data=[]] elif magic[:2].lower() == 'o.': self.ext = magic[1:] # depends on [control=['if'], data=[]]
def p_contextualize_sentence(self, t): """contextualize_sentence : CONTEXTUALIZE LPAREN contextualize_items RPAREN | CONTEXTUALIZE NUMBER LPAREN contextualize_items RPAREN""" if len(t) == 5: t[0] = contextualize(t[3], line=t.lineno(1)) else: t[0] = contextualize(t[4], t[2], line=t.lineno(1))
def function[p_contextualize_sentence, parameter[self, t]]: constant[contextualize_sentence : CONTEXTUALIZE LPAREN contextualize_items RPAREN | CONTEXTUALIZE NUMBER LPAREN contextualize_items RPAREN] if compare[call[name[len], parameter[name[t]]] equal[==] constant[5]] begin[:] call[name[t]][constant[0]] assign[=] call[name[contextualize], parameter[call[name[t]][constant[3]]]]
keyword[def] identifier[p_contextualize_sentence] ( identifier[self] , identifier[t] ): literal[string] keyword[if] identifier[len] ( identifier[t] )== literal[int] : identifier[t] [ literal[int] ]= identifier[contextualize] ( identifier[t] [ literal[int] ], identifier[line] = identifier[t] . identifier[lineno] ( literal[int] )) keyword[else] : identifier[t] [ literal[int] ]= identifier[contextualize] ( identifier[t] [ literal[int] ], identifier[t] [ literal[int] ], identifier[line] = identifier[t] . identifier[lineno] ( literal[int] ))
def p_contextualize_sentence(self, t): """contextualize_sentence : CONTEXTUALIZE LPAREN contextualize_items RPAREN | CONTEXTUALIZE NUMBER LPAREN contextualize_items RPAREN""" if len(t) == 5: t[0] = contextualize(t[3], line=t.lineno(1)) # depends on [control=['if'], data=[]] else: t[0] = contextualize(t[4], t[2], line=t.lineno(1))
def leaf(self, parent, parts): """ Find the leaf. @param parts: A list of path parts. @type parts: [str,..] @param parent: The leaf's parent. @type parent: L{xsd.sxbase.SchemaObject} @return: The leaf. @rtype: L{xsd.sxbase.SchemaObject} """ name = splitPrefix(parts[-1])[1] if name.startswith('@'): result, path = parent.get_attribute(name[1:]) else: result, ancestry = parent.get_child(name) if result is None: raise PathResolver.BadPath(name) return result
def function[leaf, parameter[self, parent, parts]]: constant[ Find the leaf. @param parts: A list of path parts. @type parts: [str,..] @param parent: The leaf's parent. @type parent: L{xsd.sxbase.SchemaObject} @return: The leaf. @rtype: L{xsd.sxbase.SchemaObject} ] variable[name] assign[=] call[call[name[splitPrefix], parameter[call[name[parts]][<ast.UnaryOp object at 0x7da2041d8280>]]]][constant[1]] if call[name[name].startswith, parameter[constant[@]]] begin[:] <ast.Tuple object at 0x7da2041dbca0> assign[=] call[name[parent].get_attribute, parameter[call[name[name]][<ast.Slice object at 0x7da1b084ee90>]]] if compare[name[result] is constant[None]] begin[:] <ast.Raise object at 0x7da1b084edd0> return[name[result]]
keyword[def] identifier[leaf] ( identifier[self] , identifier[parent] , identifier[parts] ): literal[string] identifier[name] = identifier[splitPrefix] ( identifier[parts] [- literal[int] ])[ literal[int] ] keyword[if] identifier[name] . identifier[startswith] ( literal[string] ): identifier[result] , identifier[path] = identifier[parent] . identifier[get_attribute] ( identifier[name] [ literal[int] :]) keyword[else] : identifier[result] , identifier[ancestry] = identifier[parent] . identifier[get_child] ( identifier[name] ) keyword[if] identifier[result] keyword[is] keyword[None] : keyword[raise] identifier[PathResolver] . identifier[BadPath] ( identifier[name] ) keyword[return] identifier[result]
def leaf(self, parent, parts): """ Find the leaf. @param parts: A list of path parts. @type parts: [str,..] @param parent: The leaf's parent. @type parent: L{xsd.sxbase.SchemaObject} @return: The leaf. @rtype: L{xsd.sxbase.SchemaObject} """ name = splitPrefix(parts[-1])[1] if name.startswith('@'): (result, path) = parent.get_attribute(name[1:]) # depends on [control=['if'], data=[]] else: (result, ancestry) = parent.get_child(name) if result is None: raise PathResolver.BadPath(name) # depends on [control=['if'], data=[]] return result
def raise_error(name=None, args=None, message=''): ''' Raise an exception with __name__ from name, args from args If args is None Otherwise message from message\ If name is empty then use "Exception" ''' name = name or 'Exception' if hasattr(salt.exceptions, name): ex = getattr(salt.exceptions, name) elif hasattr(exceptions, name): ex = getattr(exceptions, name) else: name = 'SaltException' ex = getattr(salt.exceptions, name) if args is not None: raise ex(*args) else: raise ex(message)
def function[raise_error, parameter[name, args, message]]: constant[ Raise an exception with __name__ from name, args from args If args is None Otherwise message from message If name is empty then use "Exception" ] variable[name] assign[=] <ast.BoolOp object at 0x7da1b2034af0> if call[name[hasattr], parameter[name[salt].exceptions, name[name]]] begin[:] variable[ex] assign[=] call[name[getattr], parameter[name[salt].exceptions, name[name]]] if compare[name[args] is_not constant[None]] begin[:] <ast.Raise object at 0x7da207f9aa10>
keyword[def] identifier[raise_error] ( identifier[name] = keyword[None] , identifier[args] = keyword[None] , identifier[message] = literal[string] ): literal[string] identifier[name] = identifier[name] keyword[or] literal[string] keyword[if] identifier[hasattr] ( identifier[salt] . identifier[exceptions] , identifier[name] ): identifier[ex] = identifier[getattr] ( identifier[salt] . identifier[exceptions] , identifier[name] ) keyword[elif] identifier[hasattr] ( identifier[exceptions] , identifier[name] ): identifier[ex] = identifier[getattr] ( identifier[exceptions] , identifier[name] ) keyword[else] : identifier[name] = literal[string] identifier[ex] = identifier[getattr] ( identifier[salt] . identifier[exceptions] , identifier[name] ) keyword[if] identifier[args] keyword[is] keyword[not] keyword[None] : keyword[raise] identifier[ex] (* identifier[args] ) keyword[else] : keyword[raise] identifier[ex] ( identifier[message] )
def raise_error(name=None, args=None, message=''): """ Raise an exception with __name__ from name, args from args If args is None Otherwise message from message If name is empty then use "Exception" """ name = name or 'Exception' if hasattr(salt.exceptions, name): ex = getattr(salt.exceptions, name) # depends on [control=['if'], data=[]] elif hasattr(exceptions, name): ex = getattr(exceptions, name) # depends on [control=['if'], data=[]] else: name = 'SaltException' ex = getattr(salt.exceptions, name) if args is not None: raise ex(*args) # depends on [control=['if'], data=['args']] else: raise ex(message)
def _parse_or_match(self, text, pos, method_name): """Execute a parse or match on the default grammar, followed by a visitation. Raise RuntimeError if there is no default grammar specified. """ if not self.grammar: raise RuntimeError( "The {cls}.{method}() shortcut won't work because {cls} was " "never associated with a specific " "grammar. Fill out its " "`grammar` attribute, and try again.".format( cls=self.__class__.__name__, method=method_name)) return self.visit(getattr(self.grammar, method_name)(text, pos=pos))
def function[_parse_or_match, parameter[self, text, pos, method_name]]: constant[Execute a parse or match on the default grammar, followed by a visitation. Raise RuntimeError if there is no default grammar specified. ] if <ast.UnaryOp object at 0x7da1b083c130> begin[:] <ast.Raise object at 0x7da1b083da50> return[call[name[self].visit, parameter[call[call[name[getattr], parameter[name[self].grammar, name[method_name]]], parameter[name[text]]]]]]
keyword[def] identifier[_parse_or_match] ( identifier[self] , identifier[text] , identifier[pos] , identifier[method_name] ): literal[string] keyword[if] keyword[not] identifier[self] . identifier[grammar] : keyword[raise] identifier[RuntimeError] ( literal[string] literal[string] literal[string] literal[string] . identifier[format] ( identifier[cls] = identifier[self] . identifier[__class__] . identifier[__name__] , identifier[method] = identifier[method_name] )) keyword[return] identifier[self] . identifier[visit] ( identifier[getattr] ( identifier[self] . identifier[grammar] , identifier[method_name] )( identifier[text] , identifier[pos] = identifier[pos] ))
def _parse_or_match(self, text, pos, method_name): """Execute a parse or match on the default grammar, followed by a visitation. Raise RuntimeError if there is no default grammar specified. """ if not self.grammar: raise RuntimeError("The {cls}.{method}() shortcut won't work because {cls} was never associated with a specific grammar. Fill out its `grammar` attribute, and try again.".format(cls=self.__class__.__name__, method=method_name)) # depends on [control=['if'], data=[]] return self.visit(getattr(self.grammar, method_name)(text, pos=pos))
def get_behavior_for_work_item_type(self, process_id, wit_ref_name_for_behaviors, behavior_ref_name): """GetBehaviorForWorkItemType. [Preview API] Returns a behavior for the work item type of the process. :param str process_id: The ID of the process :param str wit_ref_name_for_behaviors: Work item type reference name for the behavior :param str behavior_ref_name: The reference name of the behavior :rtype: :class:`<WorkItemTypeBehavior> <azure.devops.v5_0.work_item_tracking_process.models.WorkItemTypeBehavior>` """ route_values = {} if process_id is not None: route_values['processId'] = self._serialize.url('process_id', process_id, 'str') if wit_ref_name_for_behaviors is not None: route_values['witRefNameForBehaviors'] = self._serialize.url('wit_ref_name_for_behaviors', wit_ref_name_for_behaviors, 'str') if behavior_ref_name is not None: route_values['behaviorRefName'] = self._serialize.url('behavior_ref_name', behavior_ref_name, 'str') response = self._send(http_method='GET', location_id='6d765a2e-4e1b-4b11-be93-f953be676024', version='5.0-preview.1', route_values=route_values) return self._deserialize('WorkItemTypeBehavior', response)
def function[get_behavior_for_work_item_type, parameter[self, process_id, wit_ref_name_for_behaviors, behavior_ref_name]]: constant[GetBehaviorForWorkItemType. [Preview API] Returns a behavior for the work item type of the process. :param str process_id: The ID of the process :param str wit_ref_name_for_behaviors: Work item type reference name for the behavior :param str behavior_ref_name: The reference name of the behavior :rtype: :class:`<WorkItemTypeBehavior> <azure.devops.v5_0.work_item_tracking_process.models.WorkItemTypeBehavior>` ] variable[route_values] assign[=] dictionary[[], []] if compare[name[process_id] is_not constant[None]] begin[:] call[name[route_values]][constant[processId]] assign[=] call[name[self]._serialize.url, parameter[constant[process_id], name[process_id], constant[str]]] if compare[name[wit_ref_name_for_behaviors] is_not constant[None]] begin[:] call[name[route_values]][constant[witRefNameForBehaviors]] assign[=] call[name[self]._serialize.url, parameter[constant[wit_ref_name_for_behaviors], name[wit_ref_name_for_behaviors], constant[str]]] if compare[name[behavior_ref_name] is_not constant[None]] begin[:] call[name[route_values]][constant[behaviorRefName]] assign[=] call[name[self]._serialize.url, parameter[constant[behavior_ref_name], name[behavior_ref_name], constant[str]]] variable[response] assign[=] call[name[self]._send, parameter[]] return[call[name[self]._deserialize, parameter[constant[WorkItemTypeBehavior], name[response]]]]
keyword[def] identifier[get_behavior_for_work_item_type] ( identifier[self] , identifier[process_id] , identifier[wit_ref_name_for_behaviors] , identifier[behavior_ref_name] ): literal[string] identifier[route_values] ={} keyword[if] identifier[process_id] keyword[is] keyword[not] keyword[None] : identifier[route_values] [ literal[string] ]= identifier[self] . identifier[_serialize] . identifier[url] ( literal[string] , identifier[process_id] , literal[string] ) keyword[if] identifier[wit_ref_name_for_behaviors] keyword[is] keyword[not] keyword[None] : identifier[route_values] [ literal[string] ]= identifier[self] . identifier[_serialize] . identifier[url] ( literal[string] , identifier[wit_ref_name_for_behaviors] , literal[string] ) keyword[if] identifier[behavior_ref_name] keyword[is] keyword[not] keyword[None] : identifier[route_values] [ literal[string] ]= identifier[self] . identifier[_serialize] . identifier[url] ( literal[string] , identifier[behavior_ref_name] , literal[string] ) identifier[response] = identifier[self] . identifier[_send] ( identifier[http_method] = literal[string] , identifier[location_id] = literal[string] , identifier[version] = literal[string] , identifier[route_values] = identifier[route_values] ) keyword[return] identifier[self] . identifier[_deserialize] ( literal[string] , identifier[response] )
def get_behavior_for_work_item_type(self, process_id, wit_ref_name_for_behaviors, behavior_ref_name): """GetBehaviorForWorkItemType. [Preview API] Returns a behavior for the work item type of the process. :param str process_id: The ID of the process :param str wit_ref_name_for_behaviors: Work item type reference name for the behavior :param str behavior_ref_name: The reference name of the behavior :rtype: :class:`<WorkItemTypeBehavior> <azure.devops.v5_0.work_item_tracking_process.models.WorkItemTypeBehavior>` """ route_values = {} if process_id is not None: route_values['processId'] = self._serialize.url('process_id', process_id, 'str') # depends on [control=['if'], data=['process_id']] if wit_ref_name_for_behaviors is not None: route_values['witRefNameForBehaviors'] = self._serialize.url('wit_ref_name_for_behaviors', wit_ref_name_for_behaviors, 'str') # depends on [control=['if'], data=['wit_ref_name_for_behaviors']] if behavior_ref_name is not None: route_values['behaviorRefName'] = self._serialize.url('behavior_ref_name', behavior_ref_name, 'str') # depends on [control=['if'], data=['behavior_ref_name']] response = self._send(http_method='GET', location_id='6d765a2e-4e1b-4b11-be93-f953be676024', version='5.0-preview.1', route_values=route_values) return self._deserialize('WorkItemTypeBehavior', response)
def zscore(self, key, member): """Get the score associated with the given member in a sorted set.""" fut = self.execute(b'ZSCORE', key, member) return wait_convert(fut, optional_int_or_float)
def function[zscore, parameter[self, key, member]]: constant[Get the score associated with the given member in a sorted set.] variable[fut] assign[=] call[name[self].execute, parameter[constant[b'ZSCORE'], name[key], name[member]]] return[call[name[wait_convert], parameter[name[fut], name[optional_int_or_float]]]]
keyword[def] identifier[zscore] ( identifier[self] , identifier[key] , identifier[member] ): literal[string] identifier[fut] = identifier[self] . identifier[execute] ( literal[string] , identifier[key] , identifier[member] ) keyword[return] identifier[wait_convert] ( identifier[fut] , identifier[optional_int_or_float] )
def zscore(self, key, member): """Get the score associated with the given member in a sorted set.""" fut = self.execute(b'ZSCORE', key, member) return wait_convert(fut, optional_int_or_float)
def import_participant_element(diagram_graph, participants_dictionary, participant_element): """ Adds 'participant' element to the collaboration dictionary. :param diagram_graph: NetworkX graph representing a BPMN process diagram, :param participants_dictionary: dictionary with participant element attributes. Key is participant ID, value is a dictionary of participant attributes, :param participant_element: object representing a BPMN XML 'participant' element. """ participant_id = participant_element.getAttribute(consts.Consts.id) name = participant_element.getAttribute(consts.Consts.name) process_ref = participant_element.getAttribute(consts.Consts.process_ref) if participant_element.getAttribute(consts.Consts.process_ref) == '': diagram_graph.add_node(participant_id) diagram_graph.node[participant_id][consts.Consts.type] = consts.Consts.participant diagram_graph.node[participant_id][consts.Consts.process] = participant_id participants_dictionary[participant_id] = {consts.Consts.name: name, consts.Consts.process_ref: process_ref}
def function[import_participant_element, parameter[diagram_graph, participants_dictionary, participant_element]]: constant[ Adds 'participant' element to the collaboration dictionary. :param diagram_graph: NetworkX graph representing a BPMN process diagram, :param participants_dictionary: dictionary with participant element attributes. Key is participant ID, value is a dictionary of participant attributes, :param participant_element: object representing a BPMN XML 'participant' element. ] variable[participant_id] assign[=] call[name[participant_element].getAttribute, parameter[name[consts].Consts.id]] variable[name] assign[=] call[name[participant_element].getAttribute, parameter[name[consts].Consts.name]] variable[process_ref] assign[=] call[name[participant_element].getAttribute, parameter[name[consts].Consts.process_ref]] if compare[call[name[participant_element].getAttribute, parameter[name[consts].Consts.process_ref]] equal[==] constant[]] begin[:] call[name[diagram_graph].add_node, parameter[name[participant_id]]] call[call[name[diagram_graph].node][name[participant_id]]][name[consts].Consts.type] assign[=] name[consts].Consts.participant call[call[name[diagram_graph].node][name[participant_id]]][name[consts].Consts.process] assign[=] name[participant_id] call[name[participants_dictionary]][name[participant_id]] assign[=] dictionary[[<ast.Attribute object at 0x7da18c4cd990>, <ast.Attribute object at 0x7da18c4cc490>], [<ast.Name object at 0x7da18c4cc910>, <ast.Name object at 0x7da18c4cf0a0>]]
keyword[def] identifier[import_participant_element] ( identifier[diagram_graph] , identifier[participants_dictionary] , identifier[participant_element] ): literal[string] identifier[participant_id] = identifier[participant_element] . identifier[getAttribute] ( identifier[consts] . identifier[Consts] . identifier[id] ) identifier[name] = identifier[participant_element] . identifier[getAttribute] ( identifier[consts] . identifier[Consts] . identifier[name] ) identifier[process_ref] = identifier[participant_element] . identifier[getAttribute] ( identifier[consts] . identifier[Consts] . identifier[process_ref] ) keyword[if] identifier[participant_element] . identifier[getAttribute] ( identifier[consts] . identifier[Consts] . identifier[process_ref] )== literal[string] : identifier[diagram_graph] . identifier[add_node] ( identifier[participant_id] ) identifier[diagram_graph] . identifier[node] [ identifier[participant_id] ][ identifier[consts] . identifier[Consts] . identifier[type] ]= identifier[consts] . identifier[Consts] . identifier[participant] identifier[diagram_graph] . identifier[node] [ identifier[participant_id] ][ identifier[consts] . identifier[Consts] . identifier[process] ]= identifier[participant_id] identifier[participants_dictionary] [ identifier[participant_id] ]={ identifier[consts] . identifier[Consts] . identifier[name] : identifier[name] , identifier[consts] . identifier[Consts] . identifier[process_ref] : identifier[process_ref] }
def import_participant_element(diagram_graph, participants_dictionary, participant_element): """ Adds 'participant' element to the collaboration dictionary. :param diagram_graph: NetworkX graph representing a BPMN process diagram, :param participants_dictionary: dictionary with participant element attributes. Key is participant ID, value is a dictionary of participant attributes, :param participant_element: object representing a BPMN XML 'participant' element. """ participant_id = participant_element.getAttribute(consts.Consts.id) name = participant_element.getAttribute(consts.Consts.name) process_ref = participant_element.getAttribute(consts.Consts.process_ref) if participant_element.getAttribute(consts.Consts.process_ref) == '': diagram_graph.add_node(participant_id) diagram_graph.node[participant_id][consts.Consts.type] = consts.Consts.participant diagram_graph.node[participant_id][consts.Consts.process] = participant_id # depends on [control=['if'], data=[]] participants_dictionary[participant_id] = {consts.Consts.name: name, consts.Consts.process_ref: process_ref}