code
stringlengths
75
104k
code_sememe
stringlengths
47
309k
token_type
stringlengths
215
214k
code_dependency
stringlengths
75
155k
def from_name(cls, name, path=THEMES): """ Search for the given theme on the filesystem and attempt to load it. Directories will be checked in a pre-determined order. If the name is provided as an absolute file path, it will be loaded directly. """ if os.path.isfile(name): return cls.from_file(name, 'custom') filename = os.path.join(path, '{0}.cfg'.format(name)) if os.path.isfile(filename): return cls.from_file(filename, 'installed') filename = os.path.join(DEFAULT_THEMES, '{0}.cfg'.format(name)) if os.path.isfile(filename): return cls.from_file(filename, 'preset') raise ConfigError('Could not find theme named "{0}"'.format(name))
def function[from_name, parameter[cls, name, path]]: constant[ Search for the given theme on the filesystem and attempt to load it. Directories will be checked in a pre-determined order. If the name is provided as an absolute file path, it will be loaded directly. ] if call[name[os].path.isfile, parameter[name[name]]] begin[:] return[call[name[cls].from_file, parameter[name[name], constant[custom]]]] variable[filename] assign[=] call[name[os].path.join, parameter[name[path], call[constant[{0}.cfg].format, parameter[name[name]]]]] if call[name[os].path.isfile, parameter[name[filename]]] begin[:] return[call[name[cls].from_file, parameter[name[filename], constant[installed]]]] variable[filename] assign[=] call[name[os].path.join, parameter[name[DEFAULT_THEMES], call[constant[{0}.cfg].format, parameter[name[name]]]]] if call[name[os].path.isfile, parameter[name[filename]]] begin[:] return[call[name[cls].from_file, parameter[name[filename], constant[preset]]]] <ast.Raise object at 0x7da2054a5540>
keyword[def] identifier[from_name] ( identifier[cls] , identifier[name] , identifier[path] = identifier[THEMES] ): literal[string] keyword[if] identifier[os] . identifier[path] . identifier[isfile] ( identifier[name] ): keyword[return] identifier[cls] . identifier[from_file] ( identifier[name] , literal[string] ) identifier[filename] = identifier[os] . identifier[path] . identifier[join] ( identifier[path] , literal[string] . identifier[format] ( identifier[name] )) keyword[if] identifier[os] . identifier[path] . identifier[isfile] ( identifier[filename] ): keyword[return] identifier[cls] . identifier[from_file] ( identifier[filename] , literal[string] ) identifier[filename] = identifier[os] . identifier[path] . identifier[join] ( identifier[DEFAULT_THEMES] , literal[string] . identifier[format] ( identifier[name] )) keyword[if] identifier[os] . identifier[path] . identifier[isfile] ( identifier[filename] ): keyword[return] identifier[cls] . identifier[from_file] ( identifier[filename] , literal[string] ) keyword[raise] identifier[ConfigError] ( literal[string] . identifier[format] ( identifier[name] ))
def from_name(cls, name, path=THEMES): """ Search for the given theme on the filesystem and attempt to load it. Directories will be checked in a pre-determined order. If the name is provided as an absolute file path, it will be loaded directly. """ if os.path.isfile(name): return cls.from_file(name, 'custom') # depends on [control=['if'], data=[]] filename = os.path.join(path, '{0}.cfg'.format(name)) if os.path.isfile(filename): return cls.from_file(filename, 'installed') # depends on [control=['if'], data=[]] filename = os.path.join(DEFAULT_THEMES, '{0}.cfg'.format(name)) if os.path.isfile(filename): return cls.from_file(filename, 'preset') # depends on [control=['if'], data=[]] raise ConfigError('Could not find theme named "{0}"'.format(name))
async def mutual_friends(self): """|coro| Gets all mutual friends of this user. .. note:: This only applies to non-bot accounts. Raises ------- Forbidden Not allowed to get mutual friends of this user. HTTPException Getting mutual friends failed. Returns ------- List[:class:`User`] The users that are mutual friends. """ state = self._state mutuals = await state.http.get_mutual_friends(self.id) return [User(state=state, data=friend) for friend in mutuals]
<ast.AsyncFunctionDef object at 0x7da1b1f249a0>
keyword[async] keyword[def] identifier[mutual_friends] ( identifier[self] ): literal[string] identifier[state] = identifier[self] . identifier[_state] identifier[mutuals] = keyword[await] identifier[state] . identifier[http] . identifier[get_mutual_friends] ( identifier[self] . identifier[id] ) keyword[return] [ identifier[User] ( identifier[state] = identifier[state] , identifier[data] = identifier[friend] ) keyword[for] identifier[friend] keyword[in] identifier[mutuals] ]
async def mutual_friends(self): """|coro| Gets all mutual friends of this user. .. note:: This only applies to non-bot accounts. Raises ------- Forbidden Not allowed to get mutual friends of this user. HTTPException Getting mutual friends failed. Returns ------- List[:class:`User`] The users that are mutual friends. """ state = self._state mutuals = await state.http.get_mutual_friends(self.id) return [User(state=state, data=friend) for friend in mutuals]
def convert_time(obj): """Returns a TIME column as a time object: >>> time_or_None('15:06:17') datetime.time(15, 6, 17) Illegal values are returned as None: >>> time_or_None('-25:06:17') is None True >>> time_or_None('random crap') is None True Note that MySQL always returns TIME columns as (+|-)HH:MM:SS, but can accept values as (+|-)DD HH:MM:SS. The latter format will not be parsed correctly by this function. Also note that MySQL's TIME column corresponds more closely to Python's timedelta and not time. However if you want TIME columns to be treated as time-of-day and not a time offset, then you can use set this function as the converter for FIELD_TYPE.TIME. """ if not PY2 and isinstance(obj, (bytes, bytearray)): obj = obj.decode('ascii') m = TIME_RE.match(obj) if not m: return obj try: groups = list(m.groups()) groups[-1] = _convert_second_fraction(groups[-1]) hours, minutes, seconds, microseconds = groups return datetime.time(hour=int(hours), minute=int(minutes), second=int(seconds), microsecond=int(microseconds)) except ValueError: return obj
def function[convert_time, parameter[obj]]: constant[Returns a TIME column as a time object: >>> time_or_None('15:06:17') datetime.time(15, 6, 17) Illegal values are returned as None: >>> time_or_None('-25:06:17') is None True >>> time_or_None('random crap') is None True Note that MySQL always returns TIME columns as (+|-)HH:MM:SS, but can accept values as (+|-)DD HH:MM:SS. The latter format will not be parsed correctly by this function. Also note that MySQL's TIME column corresponds more closely to Python's timedelta and not time. However if you want TIME columns to be treated as time-of-day and not a time offset, then you can use set this function as the converter for FIELD_TYPE.TIME. ] if <ast.BoolOp object at 0x7da20c7c8190> begin[:] variable[obj] assign[=] call[name[obj].decode, parameter[constant[ascii]]] variable[m] assign[=] call[name[TIME_RE].match, parameter[name[obj]]] if <ast.UnaryOp object at 0x7da20c7c95d0> begin[:] return[name[obj]] <ast.Try object at 0x7da20c7c98d0>
keyword[def] identifier[convert_time] ( identifier[obj] ): literal[string] keyword[if] keyword[not] identifier[PY2] keyword[and] identifier[isinstance] ( identifier[obj] ,( identifier[bytes] , identifier[bytearray] )): identifier[obj] = identifier[obj] . identifier[decode] ( literal[string] ) identifier[m] = identifier[TIME_RE] . identifier[match] ( identifier[obj] ) keyword[if] keyword[not] identifier[m] : keyword[return] identifier[obj] keyword[try] : identifier[groups] = identifier[list] ( identifier[m] . identifier[groups] ()) identifier[groups] [- literal[int] ]= identifier[_convert_second_fraction] ( identifier[groups] [- literal[int] ]) identifier[hours] , identifier[minutes] , identifier[seconds] , identifier[microseconds] = identifier[groups] keyword[return] identifier[datetime] . identifier[time] ( identifier[hour] = identifier[int] ( identifier[hours] ), identifier[minute] = identifier[int] ( identifier[minutes] ), identifier[second] = identifier[int] ( identifier[seconds] ), identifier[microsecond] = identifier[int] ( identifier[microseconds] )) keyword[except] identifier[ValueError] : keyword[return] identifier[obj]
def convert_time(obj): """Returns a TIME column as a time object: >>> time_or_None('15:06:17') datetime.time(15, 6, 17) Illegal values are returned as None: >>> time_or_None('-25:06:17') is None True >>> time_or_None('random crap') is None True Note that MySQL always returns TIME columns as (+|-)HH:MM:SS, but can accept values as (+|-)DD HH:MM:SS. The latter format will not be parsed correctly by this function. Also note that MySQL's TIME column corresponds more closely to Python's timedelta and not time. However if you want TIME columns to be treated as time-of-day and not a time offset, then you can use set this function as the converter for FIELD_TYPE.TIME. """ if not PY2 and isinstance(obj, (bytes, bytearray)): obj = obj.decode('ascii') # depends on [control=['if'], data=[]] m = TIME_RE.match(obj) if not m: return obj # depends on [control=['if'], data=[]] try: groups = list(m.groups()) groups[-1] = _convert_second_fraction(groups[-1]) (hours, minutes, seconds, microseconds) = groups return datetime.time(hour=int(hours), minute=int(minutes), second=int(seconds), microsecond=int(microseconds)) # depends on [control=['try'], data=[]] except ValueError: return obj # depends on [control=['except'], data=[]]
def _merge(options, name, bases, default=None): """Merges a named option collection.""" result = None for base in bases: if base is None: continue value = getattr(base, name, None) if value is None: continue result = utils.cons(result, value) value = options.get(name) if value is not None: result = utils.cons(result, value) return result or default
def function[_merge, parameter[options, name, bases, default]]: constant[Merges a named option collection.] variable[result] assign[=] constant[None] for taget[name[base]] in starred[name[bases]] begin[:] if compare[name[base] is constant[None]] begin[:] continue variable[value] assign[=] call[name[getattr], parameter[name[base], name[name], constant[None]]] if compare[name[value] is constant[None]] begin[:] continue variable[result] assign[=] call[name[utils].cons, parameter[name[result], name[value]]] variable[value] assign[=] call[name[options].get, parameter[name[name]]] if compare[name[value] is_not constant[None]] begin[:] variable[result] assign[=] call[name[utils].cons, parameter[name[result], name[value]]] return[<ast.BoolOp object at 0x7da1afe0f9d0>]
keyword[def] identifier[_merge] ( identifier[options] , identifier[name] , identifier[bases] , identifier[default] = keyword[None] ): literal[string] identifier[result] = keyword[None] keyword[for] identifier[base] keyword[in] identifier[bases] : keyword[if] identifier[base] keyword[is] keyword[None] : keyword[continue] identifier[value] = identifier[getattr] ( identifier[base] , identifier[name] , keyword[None] ) keyword[if] identifier[value] keyword[is] keyword[None] : keyword[continue] identifier[result] = identifier[utils] . identifier[cons] ( identifier[result] , identifier[value] ) identifier[value] = identifier[options] . identifier[get] ( identifier[name] ) keyword[if] identifier[value] keyword[is] keyword[not] keyword[None] : identifier[result] = identifier[utils] . identifier[cons] ( identifier[result] , identifier[value] ) keyword[return] identifier[result] keyword[or] identifier[default]
def _merge(options, name, bases, default=None): """Merges a named option collection.""" result = None for base in bases: if base is None: continue # depends on [control=['if'], data=[]] value = getattr(base, name, None) if value is None: continue # depends on [control=['if'], data=[]] result = utils.cons(result, value) # depends on [control=['for'], data=['base']] value = options.get(name) if value is not None: result = utils.cons(result, value) # depends on [control=['if'], data=['value']] return result or default
def _add_to_typedef(self, typedef_curr, line, lnum): """Add new fields to the current typedef.""" mtch = re.match(r'^(\S+):\s*(\S.*)$', line) if mtch: field_name = mtch.group(1) field_value = mtch.group(2).split('!')[0].rstrip() if field_name == "id": self._chk_none(typedef_curr.id, lnum) typedef_curr.id = field_value elif field_name == "name": self._chk_none(typedef_curr.name, lnum) typedef_curr.name = field_value elif field_name == "transitive_over": typedef_curr.transitive_over.append(field_value) elif field_name == "inverse_of": self._chk_none(typedef_curr.inverse_of, lnum) typedef_curr.inverse_of = field_value # Note: there are other tags that aren't imported here. else: self._die("UNEXPECTED FIELD CONTENT: {L}\n".format(L=line), lnum)
def function[_add_to_typedef, parameter[self, typedef_curr, line, lnum]]: constant[Add new fields to the current typedef.] variable[mtch] assign[=] call[name[re].match, parameter[constant[^(\S+):\s*(\S.*)$], name[line]]] if name[mtch] begin[:] variable[field_name] assign[=] call[name[mtch].group, parameter[constant[1]]] variable[field_value] assign[=] call[call[call[call[name[mtch].group, parameter[constant[2]]].split, parameter[constant[!]]]][constant[0]].rstrip, parameter[]] if compare[name[field_name] equal[==] constant[id]] begin[:] call[name[self]._chk_none, parameter[name[typedef_curr].id, name[lnum]]] name[typedef_curr].id assign[=] name[field_value]
keyword[def] identifier[_add_to_typedef] ( identifier[self] , identifier[typedef_curr] , identifier[line] , identifier[lnum] ): literal[string] identifier[mtch] = identifier[re] . identifier[match] ( literal[string] , identifier[line] ) keyword[if] identifier[mtch] : identifier[field_name] = identifier[mtch] . identifier[group] ( literal[int] ) identifier[field_value] = identifier[mtch] . identifier[group] ( literal[int] ). identifier[split] ( literal[string] )[ literal[int] ]. identifier[rstrip] () keyword[if] identifier[field_name] == literal[string] : identifier[self] . identifier[_chk_none] ( identifier[typedef_curr] . identifier[id] , identifier[lnum] ) identifier[typedef_curr] . identifier[id] = identifier[field_value] keyword[elif] identifier[field_name] == literal[string] : identifier[self] . identifier[_chk_none] ( identifier[typedef_curr] . identifier[name] , identifier[lnum] ) identifier[typedef_curr] . identifier[name] = identifier[field_value] keyword[elif] identifier[field_name] == literal[string] : identifier[typedef_curr] . identifier[transitive_over] . identifier[append] ( identifier[field_value] ) keyword[elif] identifier[field_name] == literal[string] : identifier[self] . identifier[_chk_none] ( identifier[typedef_curr] . identifier[inverse_of] , identifier[lnum] ) identifier[typedef_curr] . identifier[inverse_of] = identifier[field_value] keyword[else] : identifier[self] . identifier[_die] ( literal[string] . identifier[format] ( identifier[L] = identifier[line] ), identifier[lnum] )
def _add_to_typedef(self, typedef_curr, line, lnum): """Add new fields to the current typedef.""" mtch = re.match('^(\\S+):\\s*(\\S.*)$', line) if mtch: field_name = mtch.group(1) field_value = mtch.group(2).split('!')[0].rstrip() if field_name == 'id': self._chk_none(typedef_curr.id, lnum) typedef_curr.id = field_value # depends on [control=['if'], data=[]] elif field_name == 'name': self._chk_none(typedef_curr.name, lnum) typedef_curr.name = field_value # depends on [control=['if'], data=[]] elif field_name == 'transitive_over': typedef_curr.transitive_over.append(field_value) # depends on [control=['if'], data=[]] elif field_name == 'inverse_of': self._chk_none(typedef_curr.inverse_of, lnum) typedef_curr.inverse_of = field_value # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]] else: # Note: there are other tags that aren't imported here. self._die('UNEXPECTED FIELD CONTENT: {L}\n'.format(L=line), lnum)
def multi_ops(data_stream, *funcs): """ fork a generator with multiple operations/functions data_stream - an iterable data structure (ie: list/generator/tuple) funcs - every function that will be applied to the data_stream """ assert all(callable(func) for func in funcs), 'multi_ops can only apply functions to the first argument' assert len(funcs), 'multi_ops needs at least one function to apply to data_stream' for i in data_stream: if len(funcs) > 1: yield tuple(func(i) for func in funcs) elif len(funcs) == 1: yield funcs[0](i)
def function[multi_ops, parameter[data_stream]]: constant[ fork a generator with multiple operations/functions data_stream - an iterable data structure (ie: list/generator/tuple) funcs - every function that will be applied to the data_stream ] assert[call[name[all], parameter[<ast.GeneratorExp object at 0x7da20e9575b0>]]] assert[call[name[len], parameter[name[funcs]]]] for taget[name[i]] in starred[name[data_stream]] begin[:] if compare[call[name[len], parameter[name[funcs]]] greater[>] constant[1]] begin[:] <ast.Yield object at 0x7da20e955b70>
keyword[def] identifier[multi_ops] ( identifier[data_stream] ,* identifier[funcs] ): literal[string] keyword[assert] identifier[all] ( identifier[callable] ( identifier[func] ) keyword[for] identifier[func] keyword[in] identifier[funcs] ), literal[string] keyword[assert] identifier[len] ( identifier[funcs] ), literal[string] keyword[for] identifier[i] keyword[in] identifier[data_stream] : keyword[if] identifier[len] ( identifier[funcs] )> literal[int] : keyword[yield] identifier[tuple] ( identifier[func] ( identifier[i] ) keyword[for] identifier[func] keyword[in] identifier[funcs] ) keyword[elif] identifier[len] ( identifier[funcs] )== literal[int] : keyword[yield] identifier[funcs] [ literal[int] ]( identifier[i] )
def multi_ops(data_stream, *funcs): """ fork a generator with multiple operations/functions data_stream - an iterable data structure (ie: list/generator/tuple) funcs - every function that will be applied to the data_stream """ assert all((callable(func) for func in funcs)), 'multi_ops can only apply functions to the first argument' assert len(funcs), 'multi_ops needs at least one function to apply to data_stream' for i in data_stream: if len(funcs) > 1: yield tuple((func(i) for func in funcs)) # depends on [control=['if'], data=[]] elif len(funcs) == 1: yield funcs[0](i) # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['i']]
def register(cls, key, filename): """Register a image file using key""" if key in cls._stock: logger.info('Warning, replacing resource ' + str(key)) cls._stock[key] = {'type': 'custom', 'filename': filename} logger.info('%s registered as %s' % (filename, key))
def function[register, parameter[cls, key, filename]]: constant[Register a image file using key] if compare[name[key] in name[cls]._stock] begin[:] call[name[logger].info, parameter[binary_operation[constant[Warning, replacing resource ] + call[name[str], parameter[name[key]]]]]] call[name[cls]._stock][name[key]] assign[=] dictionary[[<ast.Constant object at 0x7da1b1736710>, <ast.Constant object at 0x7da1b17344c0>], [<ast.Constant object at 0x7da1b1737190>, <ast.Name object at 0x7da1b1735bd0>]] call[name[logger].info, parameter[binary_operation[constant[%s registered as %s] <ast.Mod object at 0x7da2590d6920> tuple[[<ast.Name object at 0x7da1b17367d0>, <ast.Name object at 0x7da1b17342b0>]]]]]
keyword[def] identifier[register] ( identifier[cls] , identifier[key] , identifier[filename] ): literal[string] keyword[if] identifier[key] keyword[in] identifier[cls] . identifier[_stock] : identifier[logger] . identifier[info] ( literal[string] + identifier[str] ( identifier[key] )) identifier[cls] . identifier[_stock] [ identifier[key] ]={ literal[string] : literal[string] , literal[string] : identifier[filename] } identifier[logger] . identifier[info] ( literal[string] %( identifier[filename] , identifier[key] ))
def register(cls, key, filename): """Register a image file using key""" if key in cls._stock: logger.info('Warning, replacing resource ' + str(key)) # depends on [control=['if'], data=['key']] cls._stock[key] = {'type': 'custom', 'filename': filename} logger.info('%s registered as %s' % (filename, key))
def validate(bo, error_level: str = "WARNING") -> Tuple[bool, List[Tuple[str, str]]]: """Semantically validate BEL AST Add errors and warnings to bel_obj.validation_messages Error Levels are similar to log levels - selecting WARNING includes both WARNING and ERROR, selecting ERROR just includes ERROR Args: bo: main BEL language object error_level: return ERRORs only or also WARNINGs Returns: Tuple[bool, List[Tuple[str, str]]]: (is_valid, messages) """ if bo.ast: bo = validate_functions(bo.ast, bo) # No WARNINGs generated in this function if error_level == "WARNING": bo = validate_arg_values(bo.ast, bo) # validates NSArg and StrArg values else: bo.validation_messages.append(("ERROR", "Invalid BEL Statement - cannot parse")) for msg in bo.validation_messages: if msg[0] == "ERROR": bo.parse_valid = False break return bo
def function[validate, parameter[bo, error_level]]: constant[Semantically validate BEL AST Add errors and warnings to bel_obj.validation_messages Error Levels are similar to log levels - selecting WARNING includes both WARNING and ERROR, selecting ERROR just includes ERROR Args: bo: main BEL language object error_level: return ERRORs only or also WARNINGs Returns: Tuple[bool, List[Tuple[str, str]]]: (is_valid, messages) ] if name[bo].ast begin[:] variable[bo] assign[=] call[name[validate_functions], parameter[name[bo].ast, name[bo]]] if compare[name[error_level] equal[==] constant[WARNING]] begin[:] variable[bo] assign[=] call[name[validate_arg_values], parameter[name[bo].ast, name[bo]]] for taget[name[msg]] in starred[name[bo].validation_messages] begin[:] if compare[call[name[msg]][constant[0]] equal[==] constant[ERROR]] begin[:] name[bo].parse_valid assign[=] constant[False] break return[name[bo]]
keyword[def] identifier[validate] ( identifier[bo] , identifier[error_level] : identifier[str] = literal[string] )-> identifier[Tuple] [ identifier[bool] , identifier[List] [ identifier[Tuple] [ identifier[str] , identifier[str] ]]]: literal[string] keyword[if] identifier[bo] . identifier[ast] : identifier[bo] = identifier[validate_functions] ( identifier[bo] . identifier[ast] , identifier[bo] ) keyword[if] identifier[error_level] == literal[string] : identifier[bo] = identifier[validate_arg_values] ( identifier[bo] . identifier[ast] , identifier[bo] ) keyword[else] : identifier[bo] . identifier[validation_messages] . identifier[append] (( literal[string] , literal[string] )) keyword[for] identifier[msg] keyword[in] identifier[bo] . identifier[validation_messages] : keyword[if] identifier[msg] [ literal[int] ]== literal[string] : identifier[bo] . identifier[parse_valid] = keyword[False] keyword[break] keyword[return] identifier[bo]
def validate(bo, error_level: str='WARNING') -> Tuple[bool, List[Tuple[str, str]]]: """Semantically validate BEL AST Add errors and warnings to bel_obj.validation_messages Error Levels are similar to log levels - selecting WARNING includes both WARNING and ERROR, selecting ERROR just includes ERROR Args: bo: main BEL language object error_level: return ERRORs only or also WARNINGs Returns: Tuple[bool, List[Tuple[str, str]]]: (is_valid, messages) """ if bo.ast: bo = validate_functions(bo.ast, bo) # No WARNINGs generated in this function if error_level == 'WARNING': bo = validate_arg_values(bo.ast, bo) # validates NSArg and StrArg values # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]] else: bo.validation_messages.append(('ERROR', 'Invalid BEL Statement - cannot parse')) for msg in bo.validation_messages: if msg[0] == 'ERROR': bo.parse_valid = False break # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['msg']] return bo
def get_config(name, region=None, key=None, keyid=None, profile=None): ''' Get the configuration for a cache cluster. CLI example:: salt myminion boto_elasticache.get_config myelasticache ''' conn = _get_conn(region=region, key=key, keyid=keyid, profile=profile) if not conn: return None try: cc = conn.describe_cache_clusters(name, show_cache_node_info=True) except boto.exception.BotoServerError as e: msg = 'Failed to get config for cache cluster {0}.'.format(name) log.error(msg) log.debug(e) return {} cc = cc['DescribeCacheClustersResponse']['DescribeCacheClustersResult'] cc = cc['CacheClusters'][0] ret = odict.OrderedDict() attrs = ['engine', 'cache_parameter_group', 'cache_cluster_id', 'cache_security_groups', 'replication_group_id', 'auto_minor_version_upgrade', 'num_cache_nodes', 'preferred_availability_zone', 'security_groups', 'cache_subnet_group_name', 'engine_version', 'cache_node_type', 'notification_configuration', 'preferred_maintenance_window', 'configuration_endpoint', 'cache_cluster_status', 'cache_nodes'] for key, val in six.iteritems(cc): _key = boto.utils.pythonize_name(key) if _key not in attrs: continue if _key == 'cache_parameter_group': if val: ret[_key] = val['CacheParameterGroupName'] else: ret[_key] = None elif _key == 'cache_nodes': if val: ret[_key] = [k for k in val] else: ret[_key] = [] elif _key == 'cache_security_groups': if val: ret[_key] = [k['CacheSecurityGroupName'] for k in val] else: ret[_key] = [] elif _key == 'configuration_endpoint': if val: ret['port'] = val['Port'] ret['address'] = val['Address'] else: ret['port'] = None ret['address'] = None elif _key == 'notification_configuration': if val: ret['notification_topic_arn'] = val['TopicArn'] else: ret['notification_topic_arn'] = None else: ret[_key] = val return ret
def function[get_config, parameter[name, region, key, keyid, profile]]: constant[ Get the configuration for a cache cluster. CLI example:: salt myminion boto_elasticache.get_config myelasticache ] variable[conn] assign[=] call[name[_get_conn], parameter[]] if <ast.UnaryOp object at 0x7da18dc05f30> begin[:] return[constant[None]] <ast.Try object at 0x7da18dc06500> variable[cc] assign[=] call[call[name[cc]][constant[DescribeCacheClustersResponse]]][constant[DescribeCacheClustersResult]] variable[cc] assign[=] call[call[name[cc]][constant[CacheClusters]]][constant[0]] variable[ret] assign[=] call[name[odict].OrderedDict, parameter[]] variable[attrs] assign[=] list[[<ast.Constant object at 0x7da1b2023d30>, <ast.Constant object at 0x7da1b2022620>, <ast.Constant object at 0x7da1b20237f0>, <ast.Constant object at 0x7da1b2021cf0>, <ast.Constant object at 0x7da1b2022ec0>, <ast.Constant object at 0x7da1b2021c30>, <ast.Constant object at 0x7da1b20232b0>, <ast.Constant object at 0x7da1b2022e30>, <ast.Constant object at 0x7da1b2021c60>, <ast.Constant object at 0x7da1b20202e0>, <ast.Constant object at 0x7da1b20208e0>, <ast.Constant object at 0x7da1b2022560>, <ast.Constant object at 0x7da1b20204c0>, <ast.Constant object at 0x7da1b2022da0>, <ast.Constant object at 0x7da1b2020550>, <ast.Constant object at 0x7da1b20216f0>, <ast.Constant object at 0x7da1b2021f00>]] for taget[tuple[[<ast.Name object at 0x7da1b1f76110>, <ast.Name object at 0x7da1b1f779d0>]]] in starred[call[name[six].iteritems, parameter[name[cc]]]] begin[:] variable[_key] assign[=] call[name[boto].utils.pythonize_name, parameter[name[key]]] if compare[name[_key] <ast.NotIn object at 0x7da2590d7190> name[attrs]] begin[:] continue if compare[name[_key] equal[==] constant[cache_parameter_group]] begin[:] if name[val] begin[:] call[name[ret]][name[_key]] assign[=] call[name[val]][constant[CacheParameterGroupName]] return[name[ret]]
keyword[def] identifier[get_config] ( identifier[name] , identifier[region] = keyword[None] , identifier[key] = keyword[None] , identifier[keyid] = keyword[None] , identifier[profile] = keyword[None] ): literal[string] identifier[conn] = identifier[_get_conn] ( identifier[region] = identifier[region] , identifier[key] = identifier[key] , identifier[keyid] = identifier[keyid] , identifier[profile] = identifier[profile] ) keyword[if] keyword[not] identifier[conn] : keyword[return] keyword[None] keyword[try] : identifier[cc] = identifier[conn] . identifier[describe_cache_clusters] ( identifier[name] , identifier[show_cache_node_info] = keyword[True] ) keyword[except] identifier[boto] . identifier[exception] . identifier[BotoServerError] keyword[as] identifier[e] : identifier[msg] = literal[string] . identifier[format] ( identifier[name] ) identifier[log] . identifier[error] ( identifier[msg] ) identifier[log] . identifier[debug] ( identifier[e] ) keyword[return] {} identifier[cc] = identifier[cc] [ literal[string] ][ literal[string] ] identifier[cc] = identifier[cc] [ literal[string] ][ literal[int] ] identifier[ret] = identifier[odict] . identifier[OrderedDict] () identifier[attrs] =[ literal[string] , literal[string] , literal[string] , literal[string] , literal[string] , literal[string] , literal[string] , literal[string] , literal[string] , literal[string] , literal[string] , literal[string] , literal[string] , literal[string] , literal[string] , literal[string] , literal[string] ] keyword[for] identifier[key] , identifier[val] keyword[in] identifier[six] . identifier[iteritems] ( identifier[cc] ): identifier[_key] = identifier[boto] . identifier[utils] . identifier[pythonize_name] ( identifier[key] ) keyword[if] identifier[_key] keyword[not] keyword[in] identifier[attrs] : keyword[continue] keyword[if] identifier[_key] == literal[string] : keyword[if] identifier[val] : identifier[ret] [ identifier[_key] ]= identifier[val] [ literal[string] ] keyword[else] : identifier[ret] [ identifier[_key] ]= keyword[None] keyword[elif] identifier[_key] == literal[string] : keyword[if] identifier[val] : identifier[ret] [ identifier[_key] ]=[ identifier[k] keyword[for] identifier[k] keyword[in] identifier[val] ] keyword[else] : identifier[ret] [ identifier[_key] ]=[] keyword[elif] identifier[_key] == literal[string] : keyword[if] identifier[val] : identifier[ret] [ identifier[_key] ]=[ identifier[k] [ literal[string] ] keyword[for] identifier[k] keyword[in] identifier[val] ] keyword[else] : identifier[ret] [ identifier[_key] ]=[] keyword[elif] identifier[_key] == literal[string] : keyword[if] identifier[val] : identifier[ret] [ literal[string] ]= identifier[val] [ literal[string] ] identifier[ret] [ literal[string] ]= identifier[val] [ literal[string] ] keyword[else] : identifier[ret] [ literal[string] ]= keyword[None] identifier[ret] [ literal[string] ]= keyword[None] keyword[elif] identifier[_key] == literal[string] : keyword[if] identifier[val] : identifier[ret] [ literal[string] ]= identifier[val] [ literal[string] ] keyword[else] : identifier[ret] [ literal[string] ]= keyword[None] keyword[else] : identifier[ret] [ identifier[_key] ]= identifier[val] keyword[return] identifier[ret]
def get_config(name, region=None, key=None, keyid=None, profile=None): """ Get the configuration for a cache cluster. CLI example:: salt myminion boto_elasticache.get_config myelasticache """ conn = _get_conn(region=region, key=key, keyid=keyid, profile=profile) if not conn: return None # depends on [control=['if'], data=[]] try: cc = conn.describe_cache_clusters(name, show_cache_node_info=True) # depends on [control=['try'], data=[]] except boto.exception.BotoServerError as e: msg = 'Failed to get config for cache cluster {0}.'.format(name) log.error(msg) log.debug(e) return {} # depends on [control=['except'], data=['e']] cc = cc['DescribeCacheClustersResponse']['DescribeCacheClustersResult'] cc = cc['CacheClusters'][0] ret = odict.OrderedDict() attrs = ['engine', 'cache_parameter_group', 'cache_cluster_id', 'cache_security_groups', 'replication_group_id', 'auto_minor_version_upgrade', 'num_cache_nodes', 'preferred_availability_zone', 'security_groups', 'cache_subnet_group_name', 'engine_version', 'cache_node_type', 'notification_configuration', 'preferred_maintenance_window', 'configuration_endpoint', 'cache_cluster_status', 'cache_nodes'] for (key, val) in six.iteritems(cc): _key = boto.utils.pythonize_name(key) if _key not in attrs: continue # depends on [control=['if'], data=[]] if _key == 'cache_parameter_group': if val: ret[_key] = val['CacheParameterGroupName'] # depends on [control=['if'], data=[]] else: ret[_key] = None # depends on [control=['if'], data=['_key']] elif _key == 'cache_nodes': if val: ret[_key] = [k for k in val] # depends on [control=['if'], data=[]] else: ret[_key] = [] # depends on [control=['if'], data=['_key']] elif _key == 'cache_security_groups': if val: ret[_key] = [k['CacheSecurityGroupName'] for k in val] # depends on [control=['if'], data=[]] else: ret[_key] = [] # depends on [control=['if'], data=['_key']] elif _key == 'configuration_endpoint': if val: ret['port'] = val['Port'] ret['address'] = val['Address'] # depends on [control=['if'], data=[]] else: ret['port'] = None ret['address'] = None # depends on [control=['if'], data=[]] elif _key == 'notification_configuration': if val: ret['notification_topic_arn'] = val['TopicArn'] # depends on [control=['if'], data=[]] else: ret['notification_topic_arn'] = None # depends on [control=['if'], data=[]] else: ret[_key] = val # depends on [control=['for'], data=[]] return ret
def _register_diff_order_book_channels(self): """ Registers the binding for the diff_order_book channels. :return: """ channels = {'diff_order_book': self.btcusd_dob_callback, 'diff_order_book_btceur': self.btceur_dob_callback, 'diff_order_book_eurusd': self.eurusd_dob_callback, 'diff_order_book_xrpusd': self.xrpusd_dob_callback, 'diff_order_book_xrpeur': self.xrpeur_dob_callback, 'diff_order_book_xrpbtc': self.xrpbtc_dob_callback} event = 'data' self._bind_channels(event, channels)
def function[_register_diff_order_book_channels, parameter[self]]: constant[ Registers the binding for the diff_order_book channels. :return: ] variable[channels] assign[=] dictionary[[<ast.Constant object at 0x7da204962380>, <ast.Constant object at 0x7da204963340>, <ast.Constant object at 0x7da18f58e980>, <ast.Constant object at 0x7da18f58d4e0>, <ast.Constant object at 0x7da18f58d900>, <ast.Constant object at 0x7da18f58d7b0>], [<ast.Attribute object at 0x7da18f58dd50>, <ast.Attribute object at 0x7da18f58c4c0>, <ast.Attribute object at 0x7da18f58c190>, <ast.Attribute object at 0x7da18f58e770>, <ast.Attribute object at 0x7da1b07bd210>, <ast.Attribute object at 0x7da1b07fa170>]] variable[event] assign[=] constant[data] call[name[self]._bind_channels, parameter[name[event], name[channels]]]
keyword[def] identifier[_register_diff_order_book_channels] ( identifier[self] ): literal[string] identifier[channels] ={ literal[string] : identifier[self] . identifier[btcusd_dob_callback] , literal[string] : identifier[self] . identifier[btceur_dob_callback] , literal[string] : identifier[self] . identifier[eurusd_dob_callback] , literal[string] : identifier[self] . identifier[xrpusd_dob_callback] , literal[string] : identifier[self] . identifier[xrpeur_dob_callback] , literal[string] : identifier[self] . identifier[xrpbtc_dob_callback] } identifier[event] = literal[string] identifier[self] . identifier[_bind_channels] ( identifier[event] , identifier[channels] )
def _register_diff_order_book_channels(self): """ Registers the binding for the diff_order_book channels. :return: """ channels = {'diff_order_book': self.btcusd_dob_callback, 'diff_order_book_btceur': self.btceur_dob_callback, 'diff_order_book_eurusd': self.eurusd_dob_callback, 'diff_order_book_xrpusd': self.xrpusd_dob_callback, 'diff_order_book_xrpeur': self.xrpeur_dob_callback, 'diff_order_book_xrpbtc': self.xrpbtc_dob_callback} event = 'data' self._bind_channels(event, channels)
def _read_check(self, filepath): """Returns the path of a file on the *local* system that can be read from. If the filepath is on a remote server, the file is first copied locally.""" if self.is_ssh(filepath): self._check_ftp() #First we need to generate a file path on the local system to #copy the file to. source = self._get_remote(filepath) target = self._get_hashed_path(filepath) self.ftp.get(source, target) #Now we can just read it with the normal python commands. else: target = filepath return target
def function[_read_check, parameter[self, filepath]]: constant[Returns the path of a file on the *local* system that can be read from. If the filepath is on a remote server, the file is first copied locally.] if call[name[self].is_ssh, parameter[name[filepath]]] begin[:] call[name[self]._check_ftp, parameter[]] variable[source] assign[=] call[name[self]._get_remote, parameter[name[filepath]]] variable[target] assign[=] call[name[self]._get_hashed_path, parameter[name[filepath]]] call[name[self].ftp.get, parameter[name[source], name[target]]] return[name[target]]
keyword[def] identifier[_read_check] ( identifier[self] , identifier[filepath] ): literal[string] keyword[if] identifier[self] . identifier[is_ssh] ( identifier[filepath] ): identifier[self] . identifier[_check_ftp] () identifier[source] = identifier[self] . identifier[_get_remote] ( identifier[filepath] ) identifier[target] = identifier[self] . identifier[_get_hashed_path] ( identifier[filepath] ) identifier[self] . identifier[ftp] . identifier[get] ( identifier[source] , identifier[target] ) keyword[else] : identifier[target] = identifier[filepath] keyword[return] identifier[target]
def _read_check(self, filepath): """Returns the path of a file on the *local* system that can be read from. If the filepath is on a remote server, the file is first copied locally.""" if self.is_ssh(filepath): self._check_ftp() #First we need to generate a file path on the local system to #copy the file to. source = self._get_remote(filepath) target = self._get_hashed_path(filepath) self.ftp.get(source, target) # depends on [control=['if'], data=[]] else: #Now we can just read it with the normal python commands. target = filepath return target
def create_packet(reqid, message): """Creates Outgoing Packet from a given reqid and message :param reqid: REQID object :param message: protocol buffer object """ assert message.IsInitialized() packet = '' # calculate the totla size of the packet incl. header typename = message.DESCRIPTOR.full_name datasize = HeronProtocol.get_size_to_pack_string(typename) + \ REQID.REQID_SIZE + HeronProtocol.get_size_to_pack_message(message) # first write out how much data is there as the header packet += HeronProtocol.pack_int(datasize) # next write the type string packet += HeronProtocol.pack_int(len(typename)) packet += typename # reqid packet += reqid.pack() # add the proto packet += HeronProtocol.pack_int(message.ByteSize()) packet += message.SerializeToString() return OutgoingPacket(packet)
def function[create_packet, parameter[reqid, message]]: constant[Creates Outgoing Packet from a given reqid and message :param reqid: REQID object :param message: protocol buffer object ] assert[call[name[message].IsInitialized, parameter[]]] variable[packet] assign[=] constant[] variable[typename] assign[=] name[message].DESCRIPTOR.full_name variable[datasize] assign[=] binary_operation[binary_operation[call[name[HeronProtocol].get_size_to_pack_string, parameter[name[typename]]] + name[REQID].REQID_SIZE] + call[name[HeronProtocol].get_size_to_pack_message, parameter[name[message]]]] <ast.AugAssign object at 0x7da204345ae0> <ast.AugAssign object at 0x7da204346bc0> <ast.AugAssign object at 0x7da204346440> <ast.AugAssign object at 0x7da204345240> <ast.AugAssign object at 0x7da204345d50> <ast.AugAssign object at 0x7da2043471f0> return[call[name[OutgoingPacket], parameter[name[packet]]]]
keyword[def] identifier[create_packet] ( identifier[reqid] , identifier[message] ): literal[string] keyword[assert] identifier[message] . identifier[IsInitialized] () identifier[packet] = literal[string] identifier[typename] = identifier[message] . identifier[DESCRIPTOR] . identifier[full_name] identifier[datasize] = identifier[HeronProtocol] . identifier[get_size_to_pack_string] ( identifier[typename] )+ identifier[REQID] . identifier[REQID_SIZE] + identifier[HeronProtocol] . identifier[get_size_to_pack_message] ( identifier[message] ) identifier[packet] += identifier[HeronProtocol] . identifier[pack_int] ( identifier[datasize] ) identifier[packet] += identifier[HeronProtocol] . identifier[pack_int] ( identifier[len] ( identifier[typename] )) identifier[packet] += identifier[typename] identifier[packet] += identifier[reqid] . identifier[pack] () identifier[packet] += identifier[HeronProtocol] . identifier[pack_int] ( identifier[message] . identifier[ByteSize] ()) identifier[packet] += identifier[message] . identifier[SerializeToString] () keyword[return] identifier[OutgoingPacket] ( identifier[packet] )
def create_packet(reqid, message): """Creates Outgoing Packet from a given reqid and message :param reqid: REQID object :param message: protocol buffer object """ assert message.IsInitialized() packet = '' # calculate the totla size of the packet incl. header typename = message.DESCRIPTOR.full_name datasize = HeronProtocol.get_size_to_pack_string(typename) + REQID.REQID_SIZE + HeronProtocol.get_size_to_pack_message(message) # first write out how much data is there as the header packet += HeronProtocol.pack_int(datasize) # next write the type string packet += HeronProtocol.pack_int(len(typename)) packet += typename # reqid packet += reqid.pack() # add the proto packet += HeronProtocol.pack_int(message.ByteSize()) packet += message.SerializeToString() return OutgoingPacket(packet)
def get_dataset(self, owner, id, **kwargs): """ Retrieve a dataset Return details on the dataset. This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please define a `callback` function to be invoked when receiving the response. >>> def callback_function(response): >>> pprint(response) >>> >>> thread = api.get_dataset(owner, id, callback=callback_function) :param callback function: The callback function for asynchronous request. (optional) :param str owner: User name and unique identifier of the creator of a dataset or project. For example, in the URL: [https://data.world/jonloyens/an-intro-to-dataworld-dataset](https://data.world/jonloyens/an-intro-to-dataworld-dataset), jonloyens is the unique identifier of the owner. (required) :param str id: Dataset unique identifier. For example, in the URL:[https://data.world/jonloyens/an-intro-to-dataworld-dataset](https://data.world/jonloyens/an-intro-to-dataworld-dataset), an-intro-to-dataworld-dataset is the unique identifier of the dataset. (required) :return: DatasetSummaryResponse If the method is called asynchronously, returns the request thread. """ kwargs['_return_http_data_only'] = True if kwargs.get('callback'): return self.get_dataset_with_http_info(owner, id, **kwargs) else: (data) = self.get_dataset_with_http_info(owner, id, **kwargs) return data
def function[get_dataset, parameter[self, owner, id]]: constant[ Retrieve a dataset Return details on the dataset. This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please define a `callback` function to be invoked when receiving the response. >>> def callback_function(response): >>> pprint(response) >>> >>> thread = api.get_dataset(owner, id, callback=callback_function) :param callback function: The callback function for asynchronous request. (optional) :param str owner: User name and unique identifier of the creator of a dataset or project. For example, in the URL: [https://data.world/jonloyens/an-intro-to-dataworld-dataset](https://data.world/jonloyens/an-intro-to-dataworld-dataset), jonloyens is the unique identifier of the owner. (required) :param str id: Dataset unique identifier. For example, in the URL:[https://data.world/jonloyens/an-intro-to-dataworld-dataset](https://data.world/jonloyens/an-intro-to-dataworld-dataset), an-intro-to-dataworld-dataset is the unique identifier of the dataset. (required) :return: DatasetSummaryResponse If the method is called asynchronously, returns the request thread. ] call[name[kwargs]][constant[_return_http_data_only]] assign[=] constant[True] if call[name[kwargs].get, parameter[constant[callback]]] begin[:] return[call[name[self].get_dataset_with_http_info, parameter[name[owner], name[id]]]]
keyword[def] identifier[get_dataset] ( identifier[self] , identifier[owner] , identifier[id] ,** identifier[kwargs] ): literal[string] identifier[kwargs] [ literal[string] ]= keyword[True] keyword[if] identifier[kwargs] . identifier[get] ( literal[string] ): keyword[return] identifier[self] . identifier[get_dataset_with_http_info] ( identifier[owner] , identifier[id] ,** identifier[kwargs] ) keyword[else] : ( identifier[data] )= identifier[self] . identifier[get_dataset_with_http_info] ( identifier[owner] , identifier[id] ,** identifier[kwargs] ) keyword[return] identifier[data]
def get_dataset(self, owner, id, **kwargs): """ Retrieve a dataset Return details on the dataset. This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please define a `callback` function to be invoked when receiving the response. >>> def callback_function(response): >>> pprint(response) >>> >>> thread = api.get_dataset(owner, id, callback=callback_function) :param callback function: The callback function for asynchronous request. (optional) :param str owner: User name and unique identifier of the creator of a dataset or project. For example, in the URL: [https://data.world/jonloyens/an-intro-to-dataworld-dataset](https://data.world/jonloyens/an-intro-to-dataworld-dataset), jonloyens is the unique identifier of the owner. (required) :param str id: Dataset unique identifier. For example, in the URL:[https://data.world/jonloyens/an-intro-to-dataworld-dataset](https://data.world/jonloyens/an-intro-to-dataworld-dataset), an-intro-to-dataworld-dataset is the unique identifier of the dataset. (required) :return: DatasetSummaryResponse If the method is called asynchronously, returns the request thread. """ kwargs['_return_http_data_only'] = True if kwargs.get('callback'): return self.get_dataset_with_http_info(owner, id, **kwargs) # depends on [control=['if'], data=[]] else: data = self.get_dataset_with_http_info(owner, id, **kwargs) return data
def get_water_level(cls): """ This method uses the ADC on the control module to measure the current water tank level and returns the water volume remaining in the tank. For this method, it is assumed that a simple voltage divider is used to interface the sensor to the ADC module. Testing shows that the sensor response is not completely linear, though it is quite close. To make the results more accurate, a mapping method approximated by a linear fit to data is used. """ # ---------- # These values should be updated based on the real system parameters vref = 4.95 tank_height = 17.5 # in centimeters (height of container) rref = 2668 # Reference resistor # ---------- val = 0 for i in range(5): # Take five readings and do an average # Fetch value from ADC (0x69 - ch1) val = get_ADC_value(cls.bus, 0x6c, 1) + val avg = val / 5 water_sensor_res = rref * avg/(vref - avg) depth_cm = water_sensor_res * \ (-.0163) + 28.127 # measured transfer adjusted offset if depth_cm < 1.0: # Below 1cm, the values should not be trusted. depth_cm = 0 cls.water_remaining = depth_cm / tank_height # Return the current depth in case the user is interested in # that parameter alone. (IE for automatic shut-off) return depth_cm/tank_height
def function[get_water_level, parameter[cls]]: constant[ This method uses the ADC on the control module to measure the current water tank level and returns the water volume remaining in the tank. For this method, it is assumed that a simple voltage divider is used to interface the sensor to the ADC module. Testing shows that the sensor response is not completely linear, though it is quite close. To make the results more accurate, a mapping method approximated by a linear fit to data is used. ] variable[vref] assign[=] constant[4.95] variable[tank_height] assign[=] constant[17.5] variable[rref] assign[=] constant[2668] variable[val] assign[=] constant[0] for taget[name[i]] in starred[call[name[range], parameter[constant[5]]]] begin[:] variable[val] assign[=] binary_operation[call[name[get_ADC_value], parameter[name[cls].bus, constant[108], constant[1]]] + name[val]] variable[avg] assign[=] binary_operation[name[val] / constant[5]] variable[water_sensor_res] assign[=] binary_operation[binary_operation[name[rref] * name[avg]] / binary_operation[name[vref] - name[avg]]] variable[depth_cm] assign[=] binary_operation[binary_operation[name[water_sensor_res] * <ast.UnaryOp object at 0x7da2043473d0>] + constant[28.127]] if compare[name[depth_cm] less[<] constant[1.0]] begin[:] variable[depth_cm] assign[=] constant[0] name[cls].water_remaining assign[=] binary_operation[name[depth_cm] / name[tank_height]] return[binary_operation[name[depth_cm] / name[tank_height]]]
keyword[def] identifier[get_water_level] ( identifier[cls] ): literal[string] identifier[vref] = literal[int] identifier[tank_height] = literal[int] identifier[rref] = literal[int] identifier[val] = literal[int] keyword[for] identifier[i] keyword[in] identifier[range] ( literal[int] ): identifier[val] = identifier[get_ADC_value] ( identifier[cls] . identifier[bus] , literal[int] , literal[int] )+ identifier[val] identifier[avg] = identifier[val] / literal[int] identifier[water_sensor_res] = identifier[rref] * identifier[avg] /( identifier[vref] - identifier[avg] ) identifier[depth_cm] = identifier[water_sensor_res] *(- literal[int] )+ literal[int] keyword[if] identifier[depth_cm] < literal[int] : identifier[depth_cm] = literal[int] identifier[cls] . identifier[water_remaining] = identifier[depth_cm] / identifier[tank_height] keyword[return] identifier[depth_cm] / identifier[tank_height]
def get_water_level(cls): """ This method uses the ADC on the control module to measure the current water tank level and returns the water volume remaining in the tank. For this method, it is assumed that a simple voltage divider is used to interface the sensor to the ADC module. Testing shows that the sensor response is not completely linear, though it is quite close. To make the results more accurate, a mapping method approximated by a linear fit to data is used. """ # ---------- # These values should be updated based on the real system parameters vref = 4.95 tank_height = 17.5 # in centimeters (height of container) rref = 2668 # Reference resistor # ---------- val = 0 for i in range(5): # Take five readings and do an average # Fetch value from ADC (0x69 - ch1) val = get_ADC_value(cls.bus, 108, 1) + val # depends on [control=['for'], data=[]] avg = val / 5 water_sensor_res = rref * avg / (vref - avg) depth_cm = water_sensor_res * -0.0163 + 28.127 # measured transfer adjusted offset if depth_cm < 1.0: # Below 1cm, the values should not be trusted. depth_cm = 0 # depends on [control=['if'], data=['depth_cm']] cls.water_remaining = depth_cm / tank_height # Return the current depth in case the user is interested in # that parameter alone. (IE for automatic shut-off) return depth_cm / tank_height
def parse_format_index(self, text): """Parse format index.""" base = 10 prefix = text[1:3] if text[0] == "-" else text[:2] if prefix[0:1] == "0": char = prefix[-1] if char == "b": base = 2 elif char == "o": base = 8 elif char == "x": base = 16 try: text = int(text, base) except Exception: pass return text
def function[parse_format_index, parameter[self, text]]: constant[Parse format index.] variable[base] assign[=] constant[10] variable[prefix] assign[=] <ast.IfExp object at 0x7da1b0300370> if compare[call[name[prefix]][<ast.Slice object at 0x7da1b0300ee0>] equal[==] constant[0]] begin[:] variable[char] assign[=] call[name[prefix]][<ast.UnaryOp object at 0x7da1b0301060>] if compare[name[char] equal[==] constant[b]] begin[:] variable[base] assign[=] constant[2] <ast.Try object at 0x7da1b03016c0> return[name[text]]
keyword[def] identifier[parse_format_index] ( identifier[self] , identifier[text] ): literal[string] identifier[base] = literal[int] identifier[prefix] = identifier[text] [ literal[int] : literal[int] ] keyword[if] identifier[text] [ literal[int] ]== literal[string] keyword[else] identifier[text] [: literal[int] ] keyword[if] identifier[prefix] [ literal[int] : literal[int] ]== literal[string] : identifier[char] = identifier[prefix] [- literal[int] ] keyword[if] identifier[char] == literal[string] : identifier[base] = literal[int] keyword[elif] identifier[char] == literal[string] : identifier[base] = literal[int] keyword[elif] identifier[char] == literal[string] : identifier[base] = literal[int] keyword[try] : identifier[text] = identifier[int] ( identifier[text] , identifier[base] ) keyword[except] identifier[Exception] : keyword[pass] keyword[return] identifier[text]
def parse_format_index(self, text): """Parse format index.""" base = 10 prefix = text[1:3] if text[0] == '-' else text[:2] if prefix[0:1] == '0': char = prefix[-1] if char == 'b': base = 2 # depends on [control=['if'], data=[]] elif char == 'o': base = 8 # depends on [control=['if'], data=[]] elif char == 'x': base = 16 # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]] try: text = int(text, base) # depends on [control=['try'], data=[]] except Exception: pass # depends on [control=['except'], data=[]] return text
def rename(self, image_name, path): '''rename performs a move, but ensures the path is maintained in storage Parameters ========== image_name: the image name (uri) to rename to. path: the name to rename (basename is taken) ''' container = self.get(image_name, quiet=True) if container is not None: if container.image is not None: # The original directory for the container stays the same dirname = os.path.dirname(container.image) # But we derive a new filename and uri names = parse_image_name( remove_uri (path) ) storage = os.path.join( self.storage, os.path.dirname(names['storage']) ) # This is the collection folder if not os.path.exists(storage): os.mkdir(storage) # Here we get the new full path, rename the container file fullpath = os.path.abspath(os.path.join(dirname, names['storage'])) container = self.cp(move_to=fullpath, container=container, command="rename") # On successful rename of file, update the uri if container is not None: container.uri = names['uri'] self.session.commit() return container bot.warning('%s not found' %(image_name))
def function[rename, parameter[self, image_name, path]]: constant[rename performs a move, but ensures the path is maintained in storage Parameters ========== image_name: the image name (uri) to rename to. path: the name to rename (basename is taken) ] variable[container] assign[=] call[name[self].get, parameter[name[image_name]]] if compare[name[container] is_not constant[None]] begin[:] if compare[name[container].image is_not constant[None]] begin[:] variable[dirname] assign[=] call[name[os].path.dirname, parameter[name[container].image]] variable[names] assign[=] call[name[parse_image_name], parameter[call[name[remove_uri], parameter[name[path]]]]] variable[storage] assign[=] call[name[os].path.join, parameter[name[self].storage, call[name[os].path.dirname, parameter[call[name[names]][constant[storage]]]]]] if <ast.UnaryOp object at 0x7da1b03fb550> begin[:] call[name[os].mkdir, parameter[name[storage]]] variable[fullpath] assign[=] call[name[os].path.abspath, parameter[call[name[os].path.join, parameter[name[dirname], call[name[names]][constant[storage]]]]]] variable[container] assign[=] call[name[self].cp, parameter[]] if compare[name[container] is_not constant[None]] begin[:] name[container].uri assign[=] call[name[names]][constant[uri]] call[name[self].session.commit, parameter[]] return[name[container]] call[name[bot].warning, parameter[binary_operation[constant[%s not found] <ast.Mod object at 0x7da2590d6920> name[image_name]]]]
keyword[def] identifier[rename] ( identifier[self] , identifier[image_name] , identifier[path] ): literal[string] identifier[container] = identifier[self] . identifier[get] ( identifier[image_name] , identifier[quiet] = keyword[True] ) keyword[if] identifier[container] keyword[is] keyword[not] keyword[None] : keyword[if] identifier[container] . identifier[image] keyword[is] keyword[not] keyword[None] : identifier[dirname] = identifier[os] . identifier[path] . identifier[dirname] ( identifier[container] . identifier[image] ) identifier[names] = identifier[parse_image_name] ( identifier[remove_uri] ( identifier[path] )) identifier[storage] = identifier[os] . identifier[path] . identifier[join] ( identifier[self] . identifier[storage] , identifier[os] . identifier[path] . identifier[dirname] ( identifier[names] [ literal[string] ])) keyword[if] keyword[not] identifier[os] . identifier[path] . identifier[exists] ( identifier[storage] ): identifier[os] . identifier[mkdir] ( identifier[storage] ) identifier[fullpath] = identifier[os] . identifier[path] . identifier[abspath] ( identifier[os] . identifier[path] . identifier[join] ( identifier[dirname] , identifier[names] [ literal[string] ])) identifier[container] = identifier[self] . identifier[cp] ( identifier[move_to] = identifier[fullpath] , identifier[container] = identifier[container] , identifier[command] = literal[string] ) keyword[if] identifier[container] keyword[is] keyword[not] keyword[None] : identifier[container] . identifier[uri] = identifier[names] [ literal[string] ] identifier[self] . identifier[session] . identifier[commit] () keyword[return] identifier[container] identifier[bot] . identifier[warning] ( literal[string] %( identifier[image_name] ))
def rename(self, image_name, path): """rename performs a move, but ensures the path is maintained in storage Parameters ========== image_name: the image name (uri) to rename to. path: the name to rename (basename is taken) """ container = self.get(image_name, quiet=True) if container is not None: if container.image is not None: # The original directory for the container stays the same dirname = os.path.dirname(container.image) # But we derive a new filename and uri names = parse_image_name(remove_uri(path)) storage = os.path.join(self.storage, os.path.dirname(names['storage'])) # This is the collection folder if not os.path.exists(storage): os.mkdir(storage) # depends on [control=['if'], data=[]] # Here we get the new full path, rename the container file fullpath = os.path.abspath(os.path.join(dirname, names['storage'])) container = self.cp(move_to=fullpath, container=container, command='rename') # On successful rename of file, update the uri if container is not None: container.uri = names['uri'] self.session.commit() return container # depends on [control=['if'], data=['container']] # depends on [control=['if'], data=[]] # depends on [control=['if'], data=['container']] bot.warning('%s not found' % image_name)
def parse_doc_dict(text=None, split_character="::"): """ Returns a dictionary of the parsed doc for example the following would return {'a':'A','b':'B'} :: a:A b:B :param split_character: :param text: str of the text to parse, by default uses calling function doc :param split_character: str of the characters to split on in the doc string :return: dict """ text = text or function_doc(2) text = text.split(split_character, 1)[-1] text = text.split(':param')[0].split(':return')[0] text = text.strip().split('\n') def clean(t): return t.split(':', 1)[0].strip(), t.split(':', 1)[1].strip() return dict(clean(line) for line in text)
def function[parse_doc_dict, parameter[text, split_character]]: constant[ Returns a dictionary of the parsed doc for example the following would return {'a':'A','b':'B'} :: a:A b:B :param split_character: :param text: str of the text to parse, by default uses calling function doc :param split_character: str of the characters to split on in the doc string :return: dict ] variable[text] assign[=] <ast.BoolOp object at 0x7da18dc05e40> variable[text] assign[=] call[call[name[text].split, parameter[name[split_character], constant[1]]]][<ast.UnaryOp object at 0x7da18dc067a0>] variable[text] assign[=] call[call[call[call[name[text].split, parameter[constant[:param]]]][constant[0]].split, parameter[constant[:return]]]][constant[0]] variable[text] assign[=] call[call[name[text].strip, parameter[]].split, parameter[constant[ ]]] def function[clean, parameter[t]]: return[tuple[[<ast.Call object at 0x7da18dc04eb0>, <ast.Call object at 0x7da18dc05cc0>]]] return[call[name[dict], parameter[<ast.GeneratorExp object at 0x7da20c76e1a0>]]]
keyword[def] identifier[parse_doc_dict] ( identifier[text] = keyword[None] , identifier[split_character] = literal[string] ): literal[string] identifier[text] = identifier[text] keyword[or] identifier[function_doc] ( literal[int] ) identifier[text] = identifier[text] . identifier[split] ( identifier[split_character] , literal[int] )[- literal[int] ] identifier[text] = identifier[text] . identifier[split] ( literal[string] )[ literal[int] ]. identifier[split] ( literal[string] )[ literal[int] ] identifier[text] = identifier[text] . identifier[strip] (). identifier[split] ( literal[string] ) keyword[def] identifier[clean] ( identifier[t] ): keyword[return] identifier[t] . identifier[split] ( literal[string] , literal[int] )[ literal[int] ]. identifier[strip] (), identifier[t] . identifier[split] ( literal[string] , literal[int] )[ literal[int] ]. identifier[strip] () keyword[return] identifier[dict] ( identifier[clean] ( identifier[line] ) keyword[for] identifier[line] keyword[in] identifier[text] )
def parse_doc_dict(text=None, split_character='::'): """ Returns a dictionary of the parsed doc for example the following would return {'a':'A','b':'B'} :: a:A b:B :param split_character: :param text: str of the text to parse, by default uses calling function doc :param split_character: str of the characters to split on in the doc string :return: dict """ text = text or function_doc(2) text = text.split(split_character, 1)[-1] text = text.split(':param')[0].split(':return')[0] text = text.strip().split('\n') def clean(t): return (t.split(':', 1)[0].strip(), t.split(':', 1)[1].strip()) return dict((clean(line) for line in text))
def get_cacheable(cache_key, cache_ttl, calculate, recalculate=False): """ Gets the result of a method call, using the given key and TTL as a cache """ if not recalculate: cached = cache.get(cache_key) if cached is not None: return json.loads(cached) calculated = calculate() cache.set(cache_key, json.dumps(calculated), cache_ttl) return calculated
def function[get_cacheable, parameter[cache_key, cache_ttl, calculate, recalculate]]: constant[ Gets the result of a method call, using the given key and TTL as a cache ] if <ast.UnaryOp object at 0x7da2045662c0> begin[:] variable[cached] assign[=] call[name[cache].get, parameter[name[cache_key]]] if compare[name[cached] is_not constant[None]] begin[:] return[call[name[json].loads, parameter[name[cached]]]] variable[calculated] assign[=] call[name[calculate], parameter[]] call[name[cache].set, parameter[name[cache_key], call[name[json].dumps, parameter[name[calculated]]], name[cache_ttl]]] return[name[calculated]]
keyword[def] identifier[get_cacheable] ( identifier[cache_key] , identifier[cache_ttl] , identifier[calculate] , identifier[recalculate] = keyword[False] ): literal[string] keyword[if] keyword[not] identifier[recalculate] : identifier[cached] = identifier[cache] . identifier[get] ( identifier[cache_key] ) keyword[if] identifier[cached] keyword[is] keyword[not] keyword[None] : keyword[return] identifier[json] . identifier[loads] ( identifier[cached] ) identifier[calculated] = identifier[calculate] () identifier[cache] . identifier[set] ( identifier[cache_key] , identifier[json] . identifier[dumps] ( identifier[calculated] ), identifier[cache_ttl] ) keyword[return] identifier[calculated]
def get_cacheable(cache_key, cache_ttl, calculate, recalculate=False): """ Gets the result of a method call, using the given key and TTL as a cache """ if not recalculate: cached = cache.get(cache_key) if cached is not None: return json.loads(cached) # depends on [control=['if'], data=['cached']] # depends on [control=['if'], data=[]] calculated = calculate() cache.set(cache_key, json.dumps(calculated), cache_ttl) return calculated
def parse(self, data, path=None): """ Args: data (str): Raw specification text. path (Optional[str]): Path to specification on filesystem. Only used to tag tokens with the file they originated from. """ assert not self.exhausted, 'Must call get_parser() to reset state.' self.path = path parsed_data = self.yacc.parse(data, lexer=self.lexer, debug=self.debug) # It generally makes sense for lexer errors to come first, because # those can be the root of parser errors. Also, since we only show one # error max right now, it's best to show the lexing one. for err_msg, lineno in self.lexer.errors[::-1]: self.errors.insert(0, (err_msg, lineno, self.path)) parsed_data.extend(self.anony_defs) self.exhausted = True return parsed_data
def function[parse, parameter[self, data, path]]: constant[ Args: data (str): Raw specification text. path (Optional[str]): Path to specification on filesystem. Only used to tag tokens with the file they originated from. ] assert[<ast.UnaryOp object at 0x7da20c7cb9a0>] name[self].path assign[=] name[path] variable[parsed_data] assign[=] call[name[self].yacc.parse, parameter[name[data]]] for taget[tuple[[<ast.Name object at 0x7da20c7cb1f0>, <ast.Name object at 0x7da20c7c8400>]]] in starred[call[name[self].lexer.errors][<ast.Slice object at 0x7da20c7c9570>]] begin[:] call[name[self].errors.insert, parameter[constant[0], tuple[[<ast.Name object at 0x7da20c7c82e0>, <ast.Name object at 0x7da20c7c9960>, <ast.Attribute object at 0x7da20c7c80a0>]]]] call[name[parsed_data].extend, parameter[name[self].anony_defs]] name[self].exhausted assign[=] constant[True] return[name[parsed_data]]
keyword[def] identifier[parse] ( identifier[self] , identifier[data] , identifier[path] = keyword[None] ): literal[string] keyword[assert] keyword[not] identifier[self] . identifier[exhausted] , literal[string] identifier[self] . identifier[path] = identifier[path] identifier[parsed_data] = identifier[self] . identifier[yacc] . identifier[parse] ( identifier[data] , identifier[lexer] = identifier[self] . identifier[lexer] , identifier[debug] = identifier[self] . identifier[debug] ) keyword[for] identifier[err_msg] , identifier[lineno] keyword[in] identifier[self] . identifier[lexer] . identifier[errors] [::- literal[int] ]: identifier[self] . identifier[errors] . identifier[insert] ( literal[int] ,( identifier[err_msg] , identifier[lineno] , identifier[self] . identifier[path] )) identifier[parsed_data] . identifier[extend] ( identifier[self] . identifier[anony_defs] ) identifier[self] . identifier[exhausted] = keyword[True] keyword[return] identifier[parsed_data]
def parse(self, data, path=None): """ Args: data (str): Raw specification text. path (Optional[str]): Path to specification on filesystem. Only used to tag tokens with the file they originated from. """ assert not self.exhausted, 'Must call get_parser() to reset state.' self.path = path parsed_data = self.yacc.parse(data, lexer=self.lexer, debug=self.debug) # It generally makes sense for lexer errors to come first, because # those can be the root of parser errors. Also, since we only show one # error max right now, it's best to show the lexing one. for (err_msg, lineno) in self.lexer.errors[::-1]: self.errors.insert(0, (err_msg, lineno, self.path)) # depends on [control=['for'], data=[]] parsed_data.extend(self.anony_defs) self.exhausted = True return parsed_data
def set_variable(section, value, create): """ Set value of a variable in an environment file for the given section. If the variable is already defined, its value is replaced, otherwise, it is added to the end of the file. The value is given as "ENV_VAR_NAME=env_var_value", e.g.: s3conf set test ENV_VAR_NAME=env_var_value """ if not value: value = section section = None try: logger.debug('Running env command') settings = config.Settings(section=section) conf = s3conf.S3Conf(settings=settings) env_vars = conf.get_envfile() env_vars.set(value, create=create) except exceptions.EnvfilePathNotDefinedError: raise exceptions.EnvfilePathNotDefinedUsageError()
def function[set_variable, parameter[section, value, create]]: constant[ Set value of a variable in an environment file for the given section. If the variable is already defined, its value is replaced, otherwise, it is added to the end of the file. The value is given as "ENV_VAR_NAME=env_var_value", e.g.: s3conf set test ENV_VAR_NAME=env_var_value ] if <ast.UnaryOp object at 0x7da1b023e5c0> begin[:] variable[value] assign[=] name[section] variable[section] assign[=] constant[None] <ast.Try object at 0x7da1b023df00>
keyword[def] identifier[set_variable] ( identifier[section] , identifier[value] , identifier[create] ): literal[string] keyword[if] keyword[not] identifier[value] : identifier[value] = identifier[section] identifier[section] = keyword[None] keyword[try] : identifier[logger] . identifier[debug] ( literal[string] ) identifier[settings] = identifier[config] . identifier[Settings] ( identifier[section] = identifier[section] ) identifier[conf] = identifier[s3conf] . identifier[S3Conf] ( identifier[settings] = identifier[settings] ) identifier[env_vars] = identifier[conf] . identifier[get_envfile] () identifier[env_vars] . identifier[set] ( identifier[value] , identifier[create] = identifier[create] ) keyword[except] identifier[exceptions] . identifier[EnvfilePathNotDefinedError] : keyword[raise] identifier[exceptions] . identifier[EnvfilePathNotDefinedUsageError] ()
def set_variable(section, value, create): """ Set value of a variable in an environment file for the given section. If the variable is already defined, its value is replaced, otherwise, it is added to the end of the file. The value is given as "ENV_VAR_NAME=env_var_value", e.g.: s3conf set test ENV_VAR_NAME=env_var_value """ if not value: value = section section = None # depends on [control=['if'], data=[]] try: logger.debug('Running env command') settings = config.Settings(section=section) conf = s3conf.S3Conf(settings=settings) env_vars = conf.get_envfile() env_vars.set(value, create=create) # depends on [control=['try'], data=[]] except exceptions.EnvfilePathNotDefinedError: raise exceptions.EnvfilePathNotDefinedUsageError() # depends on [control=['except'], data=[]]
def from_shape(self, shape, rho, gm, nmax=7, lmax=None, lmax_grid=None, lmax_calc=None, omega=None): """ Initialize a class of gravitational potential spherical harmonic coefficients by calculuting the gravitational potential associatiated with relief along an interface. Usage ----- x = SHGravCoeffs.from_shape(shape, rho, gm, [nmax, lmax, lmax_grid, lmax_calc, omega]) Returns ------- x : SHGravCoeffs class instance. Parameters ---------- shape : SHGrid or SHCoeffs class instance The shape of the interface, either as an SHGrid or SHCoeffs class instance. If the input is an SHCoeffs class instance, this will be expaned on a grid using the optional parameters lmax_grid and lmax_calc. rho : int, float, or ndarray, or an SHGrid or SHCoeffs class instance The density contrast associated with the interface in kg / m3. If the input is a scalar, the density contrast is constant. If the input is an SHCoeffs or SHGrid class instance, the density contrast will vary laterally. gm : float The gravitational constant times the mass that is associated with the gravitational potential coefficients. nmax : integer, optional, default = 7 The maximum order used in the Taylor-series expansion when calculating the potential coefficients. lmax : int, optional, shape.lmax The maximum spherical harmonic degree of the output spherical harmonic coefficients. lmax_grid : int, optional, default = lmax If shape or rho is of type SHCoeffs, this parameter determines the maximum spherical harmonic degree that is resolvable when expanded onto a grid. lmax_calc : optional, integer, default = lmax If shape or rho is of type SHCoeffs, this parameter determines the maximum spherical harmonic degree that will be used when expanded onto a grid. omega : float, optional, default = None The angular rotation rate of the body. Description ----------- Initialize an SHGravCoeffs class instance by calculating the spherical harmonic coefficients of the gravitational potential associated with the shape of a density interface. The potential is calculated using the finite-amplitude technique of Wieczorek and Phillips (1998) for a constant density contrast and Wieczorek (2007) for a density contrast that varies laterally. The output coefficients are referenced to the mean radius of shape, and the potential is strictly valid only when it is evaluated at a radius greater than the maximum radius of shape. The input shape (and density contrast rho for variable density) can be either an SHGrid or SHCoeffs class instance. The routine makes direct use of gridded versions of these quantities, so if the input is of type SHCoeffs, it will first be expanded onto a grid. This exansion will be performed on a grid that can resolve degrees up to lmax_grid, with only the first lmax_calc coefficients being used. The input shape must correspond to absolute radii as the degree 0 term determines the reference radius of the coefficients. As an intermediate step, this routine calculates the spherical harmonic coefficients of the interface raised to the nth power, i.e., (shape-r0)**n, where r0 is the mean radius of shape. If the input shape is bandlimited to degree L, the resulting function will thus be bandlimited to degree L*nmax. This subroutine assumes implicitly that the maximum spherical harmonic degree of the input shape (when SHCoeffs) or maximum resolvable spherical harmonic degree of shape (when SHGrid) is greater or equal to this value. If this is not the case, aliasing will occur. In practice, for accurate results, the effective bandwidth needs only to be about three times the size of L, though this should be verified for each application. The effective bandwidth of shape (when SHCoeffs) can be increased by preprocessing with the method pad(), or by increaesing the value of lmax_grid (when SHGrid). """ mass = gm / _G.value if type(shape) is not _SHRealCoeffs and type(shape) is not _DHRealGrid: raise ValueError('shape must be of type SHRealCoeffs ' 'or DHRealGrid. Input type is {:s}' .format(repr(type(shape)))) if (not issubclass(type(rho), float) and type(rho) is not int and type(rho) is not _np.ndarray and type(rho) is not _SHRealCoeffs and type(rho is not _DHRealGrid)): raise ValueError('rho must be of type float, int, ndarray, ' 'SHRealCoeffs or DHRealGrid. Input type is {:s}' .format(repr(type(rho)))) if type(shape) is _SHRealCoeffs: shape = shape.expand(lmax=lmax_grid, lmax_calc=lmax_calc) if type(rho) is _SHRealCoeffs: rho = rho.expand(lmax=lmax_grid, lmax_calc=lmax_calc) if type(rho) is _DHRealGrid: if shape.lmax != rho.lmax: raise ValueError('The grids for shape and rho must have the ' 'same size. ' 'lmax of shape = {:d}, lmax of rho = {:d}' .format(shape.lmax, rho.lmax)) cilm, d = _CilmPlusRhoHDH(shape.data, nmax, mass, rho.data, lmax=lmax) else: cilm, d = _CilmPlusDH(shape.data, nmax, mass, rho, lmax=lmax) clm = SHGravRealCoeffs(cilm, gm=gm, r0=d, omega=omega, normalization='4pi', csphase=1) return clm
def function[from_shape, parameter[self, shape, rho, gm, nmax, lmax, lmax_grid, lmax_calc, omega]]: constant[ Initialize a class of gravitational potential spherical harmonic coefficients by calculuting the gravitational potential associatiated with relief along an interface. Usage ----- x = SHGravCoeffs.from_shape(shape, rho, gm, [nmax, lmax, lmax_grid, lmax_calc, omega]) Returns ------- x : SHGravCoeffs class instance. Parameters ---------- shape : SHGrid or SHCoeffs class instance The shape of the interface, either as an SHGrid or SHCoeffs class instance. If the input is an SHCoeffs class instance, this will be expaned on a grid using the optional parameters lmax_grid and lmax_calc. rho : int, float, or ndarray, or an SHGrid or SHCoeffs class instance The density contrast associated with the interface in kg / m3. If the input is a scalar, the density contrast is constant. If the input is an SHCoeffs or SHGrid class instance, the density contrast will vary laterally. gm : float The gravitational constant times the mass that is associated with the gravitational potential coefficients. nmax : integer, optional, default = 7 The maximum order used in the Taylor-series expansion when calculating the potential coefficients. lmax : int, optional, shape.lmax The maximum spherical harmonic degree of the output spherical harmonic coefficients. lmax_grid : int, optional, default = lmax If shape or rho is of type SHCoeffs, this parameter determines the maximum spherical harmonic degree that is resolvable when expanded onto a grid. lmax_calc : optional, integer, default = lmax If shape or rho is of type SHCoeffs, this parameter determines the maximum spherical harmonic degree that will be used when expanded onto a grid. omega : float, optional, default = None The angular rotation rate of the body. Description ----------- Initialize an SHGravCoeffs class instance by calculating the spherical harmonic coefficients of the gravitational potential associated with the shape of a density interface. The potential is calculated using the finite-amplitude technique of Wieczorek and Phillips (1998) for a constant density contrast and Wieczorek (2007) for a density contrast that varies laterally. The output coefficients are referenced to the mean radius of shape, and the potential is strictly valid only when it is evaluated at a radius greater than the maximum radius of shape. The input shape (and density contrast rho for variable density) can be either an SHGrid or SHCoeffs class instance. The routine makes direct use of gridded versions of these quantities, so if the input is of type SHCoeffs, it will first be expanded onto a grid. This exansion will be performed on a grid that can resolve degrees up to lmax_grid, with only the first lmax_calc coefficients being used. The input shape must correspond to absolute radii as the degree 0 term determines the reference radius of the coefficients. As an intermediate step, this routine calculates the spherical harmonic coefficients of the interface raised to the nth power, i.e., (shape-r0)**n, where r0 is the mean radius of shape. If the input shape is bandlimited to degree L, the resulting function will thus be bandlimited to degree L*nmax. This subroutine assumes implicitly that the maximum spherical harmonic degree of the input shape (when SHCoeffs) or maximum resolvable spherical harmonic degree of shape (when SHGrid) is greater or equal to this value. If this is not the case, aliasing will occur. In practice, for accurate results, the effective bandwidth needs only to be about three times the size of L, though this should be verified for each application. The effective bandwidth of shape (when SHCoeffs) can be increased by preprocessing with the method pad(), or by increaesing the value of lmax_grid (when SHGrid). ] variable[mass] assign[=] binary_operation[name[gm] / name[_G].value] if <ast.BoolOp object at 0x7da18ede57e0> begin[:] <ast.Raise object at 0x7da18ede5d50> if <ast.BoolOp object at 0x7da18bccb490> begin[:] <ast.Raise object at 0x7da18bcc9060> if compare[call[name[type], parameter[name[shape]]] is name[_SHRealCoeffs]] begin[:] variable[shape] assign[=] call[name[shape].expand, parameter[]] if compare[call[name[type], parameter[name[rho]]] is name[_SHRealCoeffs]] begin[:] variable[rho] assign[=] call[name[rho].expand, parameter[]] if compare[call[name[type], parameter[name[rho]]] is name[_DHRealGrid]] begin[:] if compare[name[shape].lmax not_equal[!=] name[rho].lmax] begin[:] <ast.Raise object at 0x7da18bcc98a0> <ast.Tuple object at 0x7da18bccaad0> assign[=] call[name[_CilmPlusRhoHDH], parameter[name[shape].data, name[nmax], name[mass], name[rho].data]] variable[clm] assign[=] call[name[SHGravRealCoeffs], parameter[name[cilm]]] return[name[clm]]
keyword[def] identifier[from_shape] ( identifier[self] , identifier[shape] , identifier[rho] , identifier[gm] , identifier[nmax] = literal[int] , identifier[lmax] = keyword[None] , identifier[lmax_grid] = keyword[None] , identifier[lmax_calc] = keyword[None] , identifier[omega] = keyword[None] ): literal[string] identifier[mass] = identifier[gm] / identifier[_G] . identifier[value] keyword[if] identifier[type] ( identifier[shape] ) keyword[is] keyword[not] identifier[_SHRealCoeffs] keyword[and] identifier[type] ( identifier[shape] ) keyword[is] keyword[not] identifier[_DHRealGrid] : keyword[raise] identifier[ValueError] ( literal[string] literal[string] . identifier[format] ( identifier[repr] ( identifier[type] ( identifier[shape] )))) keyword[if] ( keyword[not] identifier[issubclass] ( identifier[type] ( identifier[rho] ), identifier[float] ) keyword[and] identifier[type] ( identifier[rho] ) keyword[is] keyword[not] identifier[int] keyword[and] identifier[type] ( identifier[rho] ) keyword[is] keyword[not] identifier[_np] . identifier[ndarray] keyword[and] identifier[type] ( identifier[rho] ) keyword[is] keyword[not] identifier[_SHRealCoeffs] keyword[and] identifier[type] ( identifier[rho] keyword[is] keyword[not] identifier[_DHRealGrid] )): keyword[raise] identifier[ValueError] ( literal[string] literal[string] . identifier[format] ( identifier[repr] ( identifier[type] ( identifier[rho] )))) keyword[if] identifier[type] ( identifier[shape] ) keyword[is] identifier[_SHRealCoeffs] : identifier[shape] = identifier[shape] . identifier[expand] ( identifier[lmax] = identifier[lmax_grid] , identifier[lmax_calc] = identifier[lmax_calc] ) keyword[if] identifier[type] ( identifier[rho] ) keyword[is] identifier[_SHRealCoeffs] : identifier[rho] = identifier[rho] . identifier[expand] ( identifier[lmax] = identifier[lmax_grid] , identifier[lmax_calc] = identifier[lmax_calc] ) keyword[if] identifier[type] ( identifier[rho] ) keyword[is] identifier[_DHRealGrid] : keyword[if] identifier[shape] . identifier[lmax] != identifier[rho] . identifier[lmax] : keyword[raise] identifier[ValueError] ( literal[string] literal[string] literal[string] . identifier[format] ( identifier[shape] . identifier[lmax] , identifier[rho] . identifier[lmax] )) identifier[cilm] , identifier[d] = identifier[_CilmPlusRhoHDH] ( identifier[shape] . identifier[data] , identifier[nmax] , identifier[mass] , identifier[rho] . identifier[data] , identifier[lmax] = identifier[lmax] ) keyword[else] : identifier[cilm] , identifier[d] = identifier[_CilmPlusDH] ( identifier[shape] . identifier[data] , identifier[nmax] , identifier[mass] , identifier[rho] , identifier[lmax] = identifier[lmax] ) identifier[clm] = identifier[SHGravRealCoeffs] ( identifier[cilm] , identifier[gm] = identifier[gm] , identifier[r0] = identifier[d] , identifier[omega] = identifier[omega] , identifier[normalization] = literal[string] , identifier[csphase] = literal[int] ) keyword[return] identifier[clm]
def from_shape(self, shape, rho, gm, nmax=7, lmax=None, lmax_grid=None, lmax_calc=None, omega=None): """ Initialize a class of gravitational potential spherical harmonic coefficients by calculuting the gravitational potential associatiated with relief along an interface. Usage ----- x = SHGravCoeffs.from_shape(shape, rho, gm, [nmax, lmax, lmax_grid, lmax_calc, omega]) Returns ------- x : SHGravCoeffs class instance. Parameters ---------- shape : SHGrid or SHCoeffs class instance The shape of the interface, either as an SHGrid or SHCoeffs class instance. If the input is an SHCoeffs class instance, this will be expaned on a grid using the optional parameters lmax_grid and lmax_calc. rho : int, float, or ndarray, or an SHGrid or SHCoeffs class instance The density contrast associated with the interface in kg / m3. If the input is a scalar, the density contrast is constant. If the input is an SHCoeffs or SHGrid class instance, the density contrast will vary laterally. gm : float The gravitational constant times the mass that is associated with the gravitational potential coefficients. nmax : integer, optional, default = 7 The maximum order used in the Taylor-series expansion when calculating the potential coefficients. lmax : int, optional, shape.lmax The maximum spherical harmonic degree of the output spherical harmonic coefficients. lmax_grid : int, optional, default = lmax If shape or rho is of type SHCoeffs, this parameter determines the maximum spherical harmonic degree that is resolvable when expanded onto a grid. lmax_calc : optional, integer, default = lmax If shape or rho is of type SHCoeffs, this parameter determines the maximum spherical harmonic degree that will be used when expanded onto a grid. omega : float, optional, default = None The angular rotation rate of the body. Description ----------- Initialize an SHGravCoeffs class instance by calculating the spherical harmonic coefficients of the gravitational potential associated with the shape of a density interface. The potential is calculated using the finite-amplitude technique of Wieczorek and Phillips (1998) for a constant density contrast and Wieczorek (2007) for a density contrast that varies laterally. The output coefficients are referenced to the mean radius of shape, and the potential is strictly valid only when it is evaluated at a radius greater than the maximum radius of shape. The input shape (and density contrast rho for variable density) can be either an SHGrid or SHCoeffs class instance. The routine makes direct use of gridded versions of these quantities, so if the input is of type SHCoeffs, it will first be expanded onto a grid. This exansion will be performed on a grid that can resolve degrees up to lmax_grid, with only the first lmax_calc coefficients being used. The input shape must correspond to absolute radii as the degree 0 term determines the reference radius of the coefficients. As an intermediate step, this routine calculates the spherical harmonic coefficients of the interface raised to the nth power, i.e., (shape-r0)**n, where r0 is the mean radius of shape. If the input shape is bandlimited to degree L, the resulting function will thus be bandlimited to degree L*nmax. This subroutine assumes implicitly that the maximum spherical harmonic degree of the input shape (when SHCoeffs) or maximum resolvable spherical harmonic degree of shape (when SHGrid) is greater or equal to this value. If this is not the case, aliasing will occur. In practice, for accurate results, the effective bandwidth needs only to be about three times the size of L, though this should be verified for each application. The effective bandwidth of shape (when SHCoeffs) can be increased by preprocessing with the method pad(), or by increaesing the value of lmax_grid (when SHGrid). """ mass = gm / _G.value if type(shape) is not _SHRealCoeffs and type(shape) is not _DHRealGrid: raise ValueError('shape must be of type SHRealCoeffs or DHRealGrid. Input type is {:s}'.format(repr(type(shape)))) # depends on [control=['if'], data=[]] if not issubclass(type(rho), float) and type(rho) is not int and (type(rho) is not _np.ndarray) and (type(rho) is not _SHRealCoeffs) and type(rho is not _DHRealGrid): raise ValueError('rho must be of type float, int, ndarray, SHRealCoeffs or DHRealGrid. Input type is {:s}'.format(repr(type(rho)))) # depends on [control=['if'], data=[]] if type(shape) is _SHRealCoeffs: shape = shape.expand(lmax=lmax_grid, lmax_calc=lmax_calc) # depends on [control=['if'], data=[]] if type(rho) is _SHRealCoeffs: rho = rho.expand(lmax=lmax_grid, lmax_calc=lmax_calc) # depends on [control=['if'], data=[]] if type(rho) is _DHRealGrid: if shape.lmax != rho.lmax: raise ValueError('The grids for shape and rho must have the same size. lmax of shape = {:d}, lmax of rho = {:d}'.format(shape.lmax, rho.lmax)) # depends on [control=['if'], data=[]] (cilm, d) = _CilmPlusRhoHDH(shape.data, nmax, mass, rho.data, lmax=lmax) # depends on [control=['if'], data=[]] else: (cilm, d) = _CilmPlusDH(shape.data, nmax, mass, rho, lmax=lmax) clm = SHGravRealCoeffs(cilm, gm=gm, r0=d, omega=omega, normalization='4pi', csphase=1) return clm
def paint(self, painter, option, index): """Paint checkbox and text _________________________________________ | | label | duration | |toggle |_____________________| | | | families | | |_______|_____________________|___________| """ # Layout spacing = 10 metrics = painter.fontMetrics() body_rect = QtCore.QRectF(option.rect).adjusted(2, 2, -8, -2) content_rect = body_rect.adjusted(5, 5, -5, -5) toggle_rect = QtCore.QRectF(body_rect) toggle_rect.setWidth(7) toggle_rect.adjust(1, 1, 0, -1) icon_rect = QtCore.QRectF(content_rect) icon_rect.translate(toggle_rect.width() + spacing, 3) icon_rect.setWidth(35) icon_rect.setHeight(35) duration_rect = QtCore.QRectF(content_rect) duration_rect.translate(content_rect.width() - 50, 0) label_rect = QtCore.QRectF(content_rect) label_rect.translate(icon_rect.width() + spacing, 0) label_rect.setHeight(metrics.lineSpacing() + spacing) families_rect = QtCore.QRectF(label_rect) families_rect.translate(0, label_rect.height()) # Colors check_color = colors["idle"] if index.data(model.IsProcessing) is True: check_color = colors["active"] elif index.data(model.HasFailed) is True: check_color = colors["warning"] elif index.data(model.HasSucceeded) is True: check_color = colors["ok"] elif index.data(model.HasProcessed) is True: check_color = colors["ok"] icon = index.data(model.Icon) or icons["file"] label = index.data(model.Label) families = ", ".join(index.data(model.Families)) # Elide label = metrics.elidedText(label, QtCore.Qt.ElideRight, label_rect.width()) families = metrics.elidedText(families, QtCore.Qt.ElideRight, label_rect.width()) font_color = colors["idle"] if not index.data(model.IsChecked): font_color = colors["inactive"] # Maintan reference to state, so we can restore it once we're done painter.save() # Draw background painter.fillRect(body_rect, colors["hover"]) painter.setFont(fonts["largeAwesome"]) painter.setPen(QtGui.QPen(font_color)) painter.drawText(icon_rect, icon) # Draw label painter.setFont(fonts["h3"]) painter.drawText(label_rect, label) # Draw families painter.setFont(fonts["h5"]) painter.setPen(QtGui.QPen(colors["inactive"])) painter.drawText(families_rect, families) # Draw checkbox pen = QtGui.QPen(check_color, 1) painter.setPen(pen) if index.data(model.IsOptional): painter.drawRect(toggle_rect) if index.data(model.IsChecked): painter.fillRect(toggle_rect, check_color) elif not index.data(model.IsIdle) and index.data(model.IsChecked): painter.fillRect(toggle_rect, check_color) if option.state & QtWidgets.QStyle.State_MouseOver: painter.fillRect(body_rect, colors["hover"]) if option.state & QtWidgets.QStyle.State_Selected: painter.fillRect(body_rect, colors["selected"]) painter.setPen(colors["outline"]) painter.drawRect(body_rect) # Ok, we're done, tidy up. painter.restore()
def function[paint, parameter[self, painter, option, index]]: constant[Paint checkbox and text _________________________________________ | | label | duration | |toggle |_____________________| | | | families | | |_______|_____________________|___________| ] variable[spacing] assign[=] constant[10] variable[metrics] assign[=] call[name[painter].fontMetrics, parameter[]] variable[body_rect] assign[=] call[call[name[QtCore].QRectF, parameter[name[option].rect]].adjusted, parameter[constant[2], constant[2], <ast.UnaryOp object at 0x7da1b0c0ee60>, <ast.UnaryOp object at 0x7da1b0c0ee00>]] variable[content_rect] assign[=] call[name[body_rect].adjusted, parameter[constant[5], constant[5], <ast.UnaryOp object at 0x7da1b0c0ec50>, <ast.UnaryOp object at 0x7da1b0c0ebf0>]] variable[toggle_rect] assign[=] call[name[QtCore].QRectF, parameter[name[body_rect]]] call[name[toggle_rect].setWidth, parameter[constant[7]]] call[name[toggle_rect].adjust, parameter[constant[1], constant[1], constant[0], <ast.UnaryOp object at 0x7da1b0c0e800>]] variable[icon_rect] assign[=] call[name[QtCore].QRectF, parameter[name[content_rect]]] call[name[icon_rect].translate, parameter[binary_operation[call[name[toggle_rect].width, parameter[]] + name[spacing]], constant[3]]] call[name[icon_rect].setWidth, parameter[constant[35]]] call[name[icon_rect].setHeight, parameter[constant[35]]] variable[duration_rect] assign[=] call[name[QtCore].QRectF, parameter[name[content_rect]]] call[name[duration_rect].translate, parameter[binary_operation[call[name[content_rect].width, parameter[]] - constant[50]], constant[0]]] variable[label_rect] assign[=] call[name[QtCore].QRectF, parameter[name[content_rect]]] call[name[label_rect].translate, parameter[binary_operation[call[name[icon_rect].width, parameter[]] + name[spacing]], constant[0]]] call[name[label_rect].setHeight, parameter[binary_operation[call[name[metrics].lineSpacing, parameter[]] + name[spacing]]]] variable[families_rect] assign[=] call[name[QtCore].QRectF, parameter[name[label_rect]]] call[name[families_rect].translate, parameter[constant[0], call[name[label_rect].height, parameter[]]]] variable[check_color] assign[=] call[name[colors]][constant[idle]] if compare[call[name[index].data, parameter[name[model].IsProcessing]] is constant[True]] begin[:] variable[check_color] assign[=] call[name[colors]][constant[active]] variable[icon] assign[=] <ast.BoolOp object at 0x7da1b0c0cbe0> variable[label] assign[=] call[name[index].data, parameter[name[model].Label]] variable[families] assign[=] call[constant[, ].join, parameter[call[name[index].data, parameter[name[model].Families]]]] variable[label] assign[=] call[name[metrics].elidedText, parameter[name[label], name[QtCore].Qt.ElideRight, call[name[label_rect].width, parameter[]]]] variable[families] assign[=] call[name[metrics].elidedText, parameter[name[families], name[QtCore].Qt.ElideRight, call[name[label_rect].width, parameter[]]]] variable[font_color] assign[=] call[name[colors]][constant[idle]] if <ast.UnaryOp object at 0x7da1b0c0c160> begin[:] variable[font_color] assign[=] call[name[colors]][constant[inactive]] call[name[painter].save, parameter[]] call[name[painter].fillRect, parameter[name[body_rect], call[name[colors]][constant[hover]]]] call[name[painter].setFont, parameter[call[name[fonts]][constant[largeAwesome]]]] call[name[painter].setPen, parameter[call[name[QtGui].QPen, parameter[name[font_color]]]]] call[name[painter].drawText, parameter[name[icon_rect], name[icon]]] call[name[painter].setFont, parameter[call[name[fonts]][constant[h3]]]] call[name[painter].drawText, parameter[name[label_rect], name[label]]] call[name[painter].setFont, parameter[call[name[fonts]][constant[h5]]]] call[name[painter].setPen, parameter[call[name[QtGui].QPen, parameter[call[name[colors]][constant[inactive]]]]]] call[name[painter].drawText, parameter[name[families_rect], name[families]]] variable[pen] assign[=] call[name[QtGui].QPen, parameter[name[check_color], constant[1]]] call[name[painter].setPen, parameter[name[pen]]] if call[name[index].data, parameter[name[model].IsOptional]] begin[:] call[name[painter].drawRect, parameter[name[toggle_rect]]] if call[name[index].data, parameter[name[model].IsChecked]] begin[:] call[name[painter].fillRect, parameter[name[toggle_rect], name[check_color]]] if binary_operation[name[option].state <ast.BitAnd object at 0x7da2590d6b60> name[QtWidgets].QStyle.State_MouseOver] begin[:] call[name[painter].fillRect, parameter[name[body_rect], call[name[colors]][constant[hover]]]] if binary_operation[name[option].state <ast.BitAnd object at 0x7da2590d6b60> name[QtWidgets].QStyle.State_Selected] begin[:] call[name[painter].fillRect, parameter[name[body_rect], call[name[colors]][constant[selected]]]] call[name[painter].setPen, parameter[call[name[colors]][constant[outline]]]] call[name[painter].drawRect, parameter[name[body_rect]]] call[name[painter].restore, parameter[]]
keyword[def] identifier[paint] ( identifier[self] , identifier[painter] , identifier[option] , identifier[index] ): literal[string] identifier[spacing] = literal[int] identifier[metrics] = identifier[painter] . identifier[fontMetrics] () identifier[body_rect] = identifier[QtCore] . identifier[QRectF] ( identifier[option] . identifier[rect] ). identifier[adjusted] ( literal[int] , literal[int] ,- literal[int] ,- literal[int] ) identifier[content_rect] = identifier[body_rect] . identifier[adjusted] ( literal[int] , literal[int] ,- literal[int] ,- literal[int] ) identifier[toggle_rect] = identifier[QtCore] . identifier[QRectF] ( identifier[body_rect] ) identifier[toggle_rect] . identifier[setWidth] ( literal[int] ) identifier[toggle_rect] . identifier[adjust] ( literal[int] , literal[int] , literal[int] ,- literal[int] ) identifier[icon_rect] = identifier[QtCore] . identifier[QRectF] ( identifier[content_rect] ) identifier[icon_rect] . identifier[translate] ( identifier[toggle_rect] . identifier[width] ()+ identifier[spacing] , literal[int] ) identifier[icon_rect] . identifier[setWidth] ( literal[int] ) identifier[icon_rect] . identifier[setHeight] ( literal[int] ) identifier[duration_rect] = identifier[QtCore] . identifier[QRectF] ( identifier[content_rect] ) identifier[duration_rect] . identifier[translate] ( identifier[content_rect] . identifier[width] ()- literal[int] , literal[int] ) identifier[label_rect] = identifier[QtCore] . identifier[QRectF] ( identifier[content_rect] ) identifier[label_rect] . identifier[translate] ( identifier[icon_rect] . identifier[width] ()+ identifier[spacing] , literal[int] ) identifier[label_rect] . identifier[setHeight] ( identifier[metrics] . identifier[lineSpacing] ()+ identifier[spacing] ) identifier[families_rect] = identifier[QtCore] . identifier[QRectF] ( identifier[label_rect] ) identifier[families_rect] . identifier[translate] ( literal[int] , identifier[label_rect] . identifier[height] ()) identifier[check_color] = identifier[colors] [ literal[string] ] keyword[if] identifier[index] . identifier[data] ( identifier[model] . identifier[IsProcessing] ) keyword[is] keyword[True] : identifier[check_color] = identifier[colors] [ literal[string] ] keyword[elif] identifier[index] . identifier[data] ( identifier[model] . identifier[HasFailed] ) keyword[is] keyword[True] : identifier[check_color] = identifier[colors] [ literal[string] ] keyword[elif] identifier[index] . identifier[data] ( identifier[model] . identifier[HasSucceeded] ) keyword[is] keyword[True] : identifier[check_color] = identifier[colors] [ literal[string] ] keyword[elif] identifier[index] . identifier[data] ( identifier[model] . identifier[HasProcessed] ) keyword[is] keyword[True] : identifier[check_color] = identifier[colors] [ literal[string] ] identifier[icon] = identifier[index] . identifier[data] ( identifier[model] . identifier[Icon] ) keyword[or] identifier[icons] [ literal[string] ] identifier[label] = identifier[index] . identifier[data] ( identifier[model] . identifier[Label] ) identifier[families] = literal[string] . identifier[join] ( identifier[index] . identifier[data] ( identifier[model] . identifier[Families] )) identifier[label] = identifier[metrics] . identifier[elidedText] ( identifier[label] , identifier[QtCore] . identifier[Qt] . identifier[ElideRight] , identifier[label_rect] . identifier[width] ()) identifier[families] = identifier[metrics] . identifier[elidedText] ( identifier[families] , identifier[QtCore] . identifier[Qt] . identifier[ElideRight] , identifier[label_rect] . identifier[width] ()) identifier[font_color] = identifier[colors] [ literal[string] ] keyword[if] keyword[not] identifier[index] . identifier[data] ( identifier[model] . identifier[IsChecked] ): identifier[font_color] = identifier[colors] [ literal[string] ] identifier[painter] . identifier[save] () identifier[painter] . identifier[fillRect] ( identifier[body_rect] , identifier[colors] [ literal[string] ]) identifier[painter] . identifier[setFont] ( identifier[fonts] [ literal[string] ]) identifier[painter] . identifier[setPen] ( identifier[QtGui] . identifier[QPen] ( identifier[font_color] )) identifier[painter] . identifier[drawText] ( identifier[icon_rect] , identifier[icon] ) identifier[painter] . identifier[setFont] ( identifier[fonts] [ literal[string] ]) identifier[painter] . identifier[drawText] ( identifier[label_rect] , identifier[label] ) identifier[painter] . identifier[setFont] ( identifier[fonts] [ literal[string] ]) identifier[painter] . identifier[setPen] ( identifier[QtGui] . identifier[QPen] ( identifier[colors] [ literal[string] ])) identifier[painter] . identifier[drawText] ( identifier[families_rect] , identifier[families] ) identifier[pen] = identifier[QtGui] . identifier[QPen] ( identifier[check_color] , literal[int] ) identifier[painter] . identifier[setPen] ( identifier[pen] ) keyword[if] identifier[index] . identifier[data] ( identifier[model] . identifier[IsOptional] ): identifier[painter] . identifier[drawRect] ( identifier[toggle_rect] ) keyword[if] identifier[index] . identifier[data] ( identifier[model] . identifier[IsChecked] ): identifier[painter] . identifier[fillRect] ( identifier[toggle_rect] , identifier[check_color] ) keyword[elif] keyword[not] identifier[index] . identifier[data] ( identifier[model] . identifier[IsIdle] ) keyword[and] identifier[index] . identifier[data] ( identifier[model] . identifier[IsChecked] ): identifier[painter] . identifier[fillRect] ( identifier[toggle_rect] , identifier[check_color] ) keyword[if] identifier[option] . identifier[state] & identifier[QtWidgets] . identifier[QStyle] . identifier[State_MouseOver] : identifier[painter] . identifier[fillRect] ( identifier[body_rect] , identifier[colors] [ literal[string] ]) keyword[if] identifier[option] . identifier[state] & identifier[QtWidgets] . identifier[QStyle] . identifier[State_Selected] : identifier[painter] . identifier[fillRect] ( identifier[body_rect] , identifier[colors] [ literal[string] ]) identifier[painter] . identifier[setPen] ( identifier[colors] [ literal[string] ]) identifier[painter] . identifier[drawRect] ( identifier[body_rect] ) identifier[painter] . identifier[restore] ()
def paint(self, painter, option, index): """Paint checkbox and text _________________________________________ | | label | duration | |toggle |_____________________| | | | families | | |_______|_____________________|___________| """ # Layout spacing = 10 metrics = painter.fontMetrics() body_rect = QtCore.QRectF(option.rect).adjusted(2, 2, -8, -2) content_rect = body_rect.adjusted(5, 5, -5, -5) toggle_rect = QtCore.QRectF(body_rect) toggle_rect.setWidth(7) toggle_rect.adjust(1, 1, 0, -1) icon_rect = QtCore.QRectF(content_rect) icon_rect.translate(toggle_rect.width() + spacing, 3) icon_rect.setWidth(35) icon_rect.setHeight(35) duration_rect = QtCore.QRectF(content_rect) duration_rect.translate(content_rect.width() - 50, 0) label_rect = QtCore.QRectF(content_rect) label_rect.translate(icon_rect.width() + spacing, 0) label_rect.setHeight(metrics.lineSpacing() + spacing) families_rect = QtCore.QRectF(label_rect) families_rect.translate(0, label_rect.height()) # Colors check_color = colors['idle'] if index.data(model.IsProcessing) is True: check_color = colors['active'] # depends on [control=['if'], data=[]] elif index.data(model.HasFailed) is True: check_color = colors['warning'] # depends on [control=['if'], data=[]] elif index.data(model.HasSucceeded) is True: check_color = colors['ok'] # depends on [control=['if'], data=[]] elif index.data(model.HasProcessed) is True: check_color = colors['ok'] # depends on [control=['if'], data=[]] icon = index.data(model.Icon) or icons['file'] label = index.data(model.Label) families = ', '.join(index.data(model.Families)) # Elide label = metrics.elidedText(label, QtCore.Qt.ElideRight, label_rect.width()) families = metrics.elidedText(families, QtCore.Qt.ElideRight, label_rect.width()) font_color = colors['idle'] if not index.data(model.IsChecked): font_color = colors['inactive'] # depends on [control=['if'], data=[]] # Maintan reference to state, so we can restore it once we're done painter.save() # Draw background painter.fillRect(body_rect, colors['hover']) painter.setFont(fonts['largeAwesome']) painter.setPen(QtGui.QPen(font_color)) painter.drawText(icon_rect, icon) # Draw label painter.setFont(fonts['h3']) painter.drawText(label_rect, label) # Draw families painter.setFont(fonts['h5']) painter.setPen(QtGui.QPen(colors['inactive'])) painter.drawText(families_rect, families) # Draw checkbox pen = QtGui.QPen(check_color, 1) painter.setPen(pen) if index.data(model.IsOptional): painter.drawRect(toggle_rect) if index.data(model.IsChecked): painter.fillRect(toggle_rect, check_color) # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]] elif not index.data(model.IsIdle) and index.data(model.IsChecked): painter.fillRect(toggle_rect, check_color) # depends on [control=['if'], data=[]] if option.state & QtWidgets.QStyle.State_MouseOver: painter.fillRect(body_rect, colors['hover']) # depends on [control=['if'], data=[]] if option.state & QtWidgets.QStyle.State_Selected: painter.fillRect(body_rect, colors['selected']) # depends on [control=['if'], data=[]] painter.setPen(colors['outline']) painter.drawRect(body_rect) # Ok, we're done, tidy up. painter.restore()
def _normal_model(self, beta): """ Creates the structure of the model (model matrices, etc) for a Normal family ARIMAX model. Parameters ---------- beta : np.ndarray Contains untransformed starting values for the latent variables Returns ---------- mu : np.ndarray Contains the predicted values (location) for the time series Y : np.ndarray Contains the length-adjusted time series (accounting for lags) """ Y = self.y[self.max_lag:] # Transform latent variables z = np.array([self.latent_variables.z_list[k].prior.transform(beta[k]) for k in range(beta.shape[0])]) # Constant and AR terms if self.ar == 0: mu = np.transpose(self.ar_matrix) elif self.ar == 1: mu = np.transpose(self.ar_matrix)*z[:-self.family_z_no-self.ma-len(self.X_names)][0] else: mu = np.matmul(np.transpose(self.ar_matrix),z[:-self.family_z_no-self.ma-len(self.X_names)]) # X terms mu = mu + np.matmul(self.X[self.integ+self.max_lag:],z[self.ma+self.ar:(self.ma+self.ar+len(self.X_names))]) # MA terms if self.ma != 0: mu = arimax_recursion(z, mu, Y, self.max_lag, Y.shape[0], self.ar, self.ma) return mu, Y
def function[_normal_model, parameter[self, beta]]: constant[ Creates the structure of the model (model matrices, etc) for a Normal family ARIMAX model. Parameters ---------- beta : np.ndarray Contains untransformed starting values for the latent variables Returns ---------- mu : np.ndarray Contains the predicted values (location) for the time series Y : np.ndarray Contains the length-adjusted time series (accounting for lags) ] variable[Y] assign[=] call[name[self].y][<ast.Slice object at 0x7da207f9a2c0>] variable[z] assign[=] call[name[np].array, parameter[<ast.ListComp object at 0x7da207f99a50>]] if compare[name[self].ar equal[==] constant[0]] begin[:] variable[mu] assign[=] call[name[np].transpose, parameter[name[self].ar_matrix]] variable[mu] assign[=] binary_operation[name[mu] + call[name[np].matmul, parameter[call[name[self].X][<ast.Slice object at 0x7da207f99d80>], call[name[z]][<ast.Slice object at 0x7da207f98b20>]]]] if compare[name[self].ma not_equal[!=] constant[0]] begin[:] variable[mu] assign[=] call[name[arimax_recursion], parameter[name[z], name[mu], name[Y], name[self].max_lag, call[name[Y].shape][constant[0]], name[self].ar, name[self].ma]] return[tuple[[<ast.Name object at 0x7da207f9a6b0>, <ast.Name object at 0x7da207f9bac0>]]]
keyword[def] identifier[_normal_model] ( identifier[self] , identifier[beta] ): literal[string] identifier[Y] = identifier[self] . identifier[y] [ identifier[self] . identifier[max_lag] :] identifier[z] = identifier[np] . identifier[array] ([ identifier[self] . identifier[latent_variables] . identifier[z_list] [ identifier[k] ]. identifier[prior] . identifier[transform] ( identifier[beta] [ identifier[k] ]) keyword[for] identifier[k] keyword[in] identifier[range] ( identifier[beta] . identifier[shape] [ literal[int] ])]) keyword[if] identifier[self] . identifier[ar] == literal[int] : identifier[mu] = identifier[np] . identifier[transpose] ( identifier[self] . identifier[ar_matrix] ) keyword[elif] identifier[self] . identifier[ar] == literal[int] : identifier[mu] = identifier[np] . identifier[transpose] ( identifier[self] . identifier[ar_matrix] )* identifier[z] [:- identifier[self] . identifier[family_z_no] - identifier[self] . identifier[ma] - identifier[len] ( identifier[self] . identifier[X_names] )][ literal[int] ] keyword[else] : identifier[mu] = identifier[np] . identifier[matmul] ( identifier[np] . identifier[transpose] ( identifier[self] . identifier[ar_matrix] ), identifier[z] [:- identifier[self] . identifier[family_z_no] - identifier[self] . identifier[ma] - identifier[len] ( identifier[self] . identifier[X_names] )]) identifier[mu] = identifier[mu] + identifier[np] . identifier[matmul] ( identifier[self] . identifier[X] [ identifier[self] . identifier[integ] + identifier[self] . identifier[max_lag] :], identifier[z] [ identifier[self] . identifier[ma] + identifier[self] . identifier[ar] :( identifier[self] . identifier[ma] + identifier[self] . identifier[ar] + identifier[len] ( identifier[self] . identifier[X_names] ))]) keyword[if] identifier[self] . identifier[ma] != literal[int] : identifier[mu] = identifier[arimax_recursion] ( identifier[z] , identifier[mu] , identifier[Y] , identifier[self] . identifier[max_lag] , identifier[Y] . identifier[shape] [ literal[int] ], identifier[self] . identifier[ar] , identifier[self] . identifier[ma] ) keyword[return] identifier[mu] , identifier[Y]
def _normal_model(self, beta): """ Creates the structure of the model (model matrices, etc) for a Normal family ARIMAX model. Parameters ---------- beta : np.ndarray Contains untransformed starting values for the latent variables Returns ---------- mu : np.ndarray Contains the predicted values (location) for the time series Y : np.ndarray Contains the length-adjusted time series (accounting for lags) """ Y = self.y[self.max_lag:] # Transform latent variables z = np.array([self.latent_variables.z_list[k].prior.transform(beta[k]) for k in range(beta.shape[0])]) # Constant and AR terms if self.ar == 0: mu = np.transpose(self.ar_matrix) # depends on [control=['if'], data=[]] elif self.ar == 1: mu = np.transpose(self.ar_matrix) * z[:-self.family_z_no - self.ma - len(self.X_names)][0] # depends on [control=['if'], data=[]] else: mu = np.matmul(np.transpose(self.ar_matrix), z[:-self.family_z_no - self.ma - len(self.X_names)]) # X terms mu = mu + np.matmul(self.X[self.integ + self.max_lag:], z[self.ma + self.ar:self.ma + self.ar + len(self.X_names)]) # MA terms if self.ma != 0: mu = arimax_recursion(z, mu, Y, self.max_lag, Y.shape[0], self.ar, self.ma) # depends on [control=['if'], data=[]] return (mu, Y)
def get_scraperclasses(): """Find all comic scraper classes in the plugins directory. The result is cached. @return: list of Scraper classes @rtype: list of Scraper """ global _scraperclasses if _scraperclasses is None: out.debug(u"Loading comic modules...") modules = loader.get_modules('plugins') plugins = loader.get_plugins(modules, Scraper) _scraperclasses = list(plugins) check_scrapers() out.debug(u"... %d modules loaded." % len(_scraperclasses)) return _scraperclasses
def function[get_scraperclasses, parameter[]]: constant[Find all comic scraper classes in the plugins directory. The result is cached. @return: list of Scraper classes @rtype: list of Scraper ] <ast.Global object at 0x7da207f98ac0> if compare[name[_scraperclasses] is constant[None]] begin[:] call[name[out].debug, parameter[constant[Loading comic modules...]]] variable[modules] assign[=] call[name[loader].get_modules, parameter[constant[plugins]]] variable[plugins] assign[=] call[name[loader].get_plugins, parameter[name[modules], name[Scraper]]] variable[_scraperclasses] assign[=] call[name[list], parameter[name[plugins]]] call[name[check_scrapers], parameter[]] call[name[out].debug, parameter[binary_operation[constant[... %d modules loaded.] <ast.Mod object at 0x7da2590d6920> call[name[len], parameter[name[_scraperclasses]]]]]] return[name[_scraperclasses]]
keyword[def] identifier[get_scraperclasses] (): literal[string] keyword[global] identifier[_scraperclasses] keyword[if] identifier[_scraperclasses] keyword[is] keyword[None] : identifier[out] . identifier[debug] ( literal[string] ) identifier[modules] = identifier[loader] . identifier[get_modules] ( literal[string] ) identifier[plugins] = identifier[loader] . identifier[get_plugins] ( identifier[modules] , identifier[Scraper] ) identifier[_scraperclasses] = identifier[list] ( identifier[plugins] ) identifier[check_scrapers] () identifier[out] . identifier[debug] ( literal[string] % identifier[len] ( identifier[_scraperclasses] )) keyword[return] identifier[_scraperclasses]
def get_scraperclasses(): """Find all comic scraper classes in the plugins directory. The result is cached. @return: list of Scraper classes @rtype: list of Scraper """ global _scraperclasses if _scraperclasses is None: out.debug(u'Loading comic modules...') modules = loader.get_modules('plugins') plugins = loader.get_plugins(modules, Scraper) _scraperclasses = list(plugins) check_scrapers() out.debug(u'... %d modules loaded.' % len(_scraperclasses)) # depends on [control=['if'], data=['_scraperclasses']] return _scraperclasses
def _get_offset(self): """ Subclasses may override this method. """ sx, sxy, syx, sy, ox, oy = self.transformation return (ox, oy)
def function[_get_offset, parameter[self]]: constant[ Subclasses may override this method. ] <ast.Tuple object at 0x7da20c76c8b0> assign[=] name[self].transformation return[tuple[[<ast.Name object at 0x7da20c76de40>, <ast.Name object at 0x7da20c76e2f0>]]]
keyword[def] identifier[_get_offset] ( identifier[self] ): literal[string] identifier[sx] , identifier[sxy] , identifier[syx] , identifier[sy] , identifier[ox] , identifier[oy] = identifier[self] . identifier[transformation] keyword[return] ( identifier[ox] , identifier[oy] )
def _get_offset(self): """ Subclasses may override this method. """ (sx, sxy, syx, sy, ox, oy) = self.transformation return (ox, oy)
def eeg_select_channels(raw, channel_names): """ Select one or several channels by name and returns them in a dataframe. Parameters ---------- raw : mne.io.Raw Raw EEG data. channel_names : str or list Channel's name(s). Returns ---------- channels : pd.DataFrame Channel. Example ---------- >>> import neurokit as nk >>> raw = nk.eeg_select_channel(raw, "TP7") Notes ---------- *Authors* - `Dominique Makowski <https://dominiquemakowski.github.io/>`_ *Dependencies* - mne *See Also* - mne package: http://martinos.org/mne/dev/index.html """ if isinstance(channel_names, list) is False: channel_names = [channel_names] channels, time_index = raw.copy().pick_channels(channel_names)[:] if len(channel_names) > 1: channels = pd.DataFrame(channels.T, columns=channel_names) else: channels = pd.Series(channels[0]) channels.name = channel_names[0] return(channels)
def function[eeg_select_channels, parameter[raw, channel_names]]: constant[ Select one or several channels by name and returns them in a dataframe. Parameters ---------- raw : mne.io.Raw Raw EEG data. channel_names : str or list Channel's name(s). Returns ---------- channels : pd.DataFrame Channel. Example ---------- >>> import neurokit as nk >>> raw = nk.eeg_select_channel(raw, "TP7") Notes ---------- *Authors* - `Dominique Makowski <https://dominiquemakowski.github.io/>`_ *Dependencies* - mne *See Also* - mne package: http://martinos.org/mne/dev/index.html ] if compare[call[name[isinstance], parameter[name[channel_names], name[list]]] is constant[False]] begin[:] variable[channel_names] assign[=] list[[<ast.Name object at 0x7da1b26ad630>]] <ast.Tuple object at 0x7da1b26ae260> assign[=] call[call[call[name[raw].copy, parameter[]].pick_channels, parameter[name[channel_names]]]][<ast.Slice object at 0x7da204347880>] if compare[call[name[len], parameter[name[channel_names]]] greater[>] constant[1]] begin[:] variable[channels] assign[=] call[name[pd].DataFrame, parameter[name[channels].T]] return[name[channels]]
keyword[def] identifier[eeg_select_channels] ( identifier[raw] , identifier[channel_names] ): literal[string] keyword[if] identifier[isinstance] ( identifier[channel_names] , identifier[list] ) keyword[is] keyword[False] : identifier[channel_names] =[ identifier[channel_names] ] identifier[channels] , identifier[time_index] = identifier[raw] . identifier[copy] (). identifier[pick_channels] ( identifier[channel_names] )[:] keyword[if] identifier[len] ( identifier[channel_names] )> literal[int] : identifier[channels] = identifier[pd] . identifier[DataFrame] ( identifier[channels] . identifier[T] , identifier[columns] = identifier[channel_names] ) keyword[else] : identifier[channels] = identifier[pd] . identifier[Series] ( identifier[channels] [ literal[int] ]) identifier[channels] . identifier[name] = identifier[channel_names] [ literal[int] ] keyword[return] ( identifier[channels] )
def eeg_select_channels(raw, channel_names): """ Select one or several channels by name and returns them in a dataframe. Parameters ---------- raw : mne.io.Raw Raw EEG data. channel_names : str or list Channel's name(s). Returns ---------- channels : pd.DataFrame Channel. Example ---------- >>> import neurokit as nk >>> raw = nk.eeg_select_channel(raw, "TP7") Notes ---------- *Authors* - `Dominique Makowski <https://dominiquemakowski.github.io/>`_ *Dependencies* - mne *See Also* - mne package: http://martinos.org/mne/dev/index.html """ if isinstance(channel_names, list) is False: channel_names = [channel_names] # depends on [control=['if'], data=[]] (channels, time_index) = raw.copy().pick_channels(channel_names)[:] if len(channel_names) > 1: channels = pd.DataFrame(channels.T, columns=channel_names) # depends on [control=['if'], data=[]] else: channels = pd.Series(channels[0]) channels.name = channel_names[0] return channels
def dtype(self, byte_order='='): ''' Return the numpy dtype of the in-memory representation of the data. (If there are no list properties, and the PLY format is binary, then this also accurately describes the on-disk representation of the element.) ''' return _np.dtype([(prop.name, prop.dtype(byte_order)) for prop in self.properties])
def function[dtype, parameter[self, byte_order]]: constant[ Return the numpy dtype of the in-memory representation of the data. (If there are no list properties, and the PLY format is binary, then this also accurately describes the on-disk representation of the element.) ] return[call[name[_np].dtype, parameter[<ast.ListComp object at 0x7da1b0b46710>]]]
keyword[def] identifier[dtype] ( identifier[self] , identifier[byte_order] = literal[string] ): literal[string] keyword[return] identifier[_np] . identifier[dtype] ([( identifier[prop] . identifier[name] , identifier[prop] . identifier[dtype] ( identifier[byte_order] )) keyword[for] identifier[prop] keyword[in] identifier[self] . identifier[properties] ])
def dtype(self, byte_order='='): """ Return the numpy dtype of the in-memory representation of the data. (If there are no list properties, and the PLY format is binary, then this also accurately describes the on-disk representation of the element.) """ return _np.dtype([(prop.name, prop.dtype(byte_order)) for prop in self.properties])
def copy_data_ext(self, model, field, dest=None, idx=None, astype=None): """ Retrieve the field of another model and store it as a field. :param model: name of the source model being a model name or a group name :param field: name of the field to retrieve :param dest: name of the destination field in ``self`` :param idx: idx of elements to access :param astype: type cast :type model: str :type field: str :type dest: str :type idx: list, matrix :type astype: None, list, matrix :return: None """ # use default destination if not dest: dest = field assert dest not in self._states + self._algebs self.__dict__[dest] = self.read_data_ext( model, field, idx, astype=astype) if idx is not None: if len(idx) == self.n: self.link_to(model, idx, self.idx)
def function[copy_data_ext, parameter[self, model, field, dest, idx, astype]]: constant[ Retrieve the field of another model and store it as a field. :param model: name of the source model being a model name or a group name :param field: name of the field to retrieve :param dest: name of the destination field in ``self`` :param idx: idx of elements to access :param astype: type cast :type model: str :type field: str :type dest: str :type idx: list, matrix :type astype: None, list, matrix :return: None ] if <ast.UnaryOp object at 0x7da2044c2050> begin[:] variable[dest] assign[=] name[field] assert[compare[name[dest] <ast.NotIn object at 0x7da2590d7190> binary_operation[name[self]._states + name[self]._algebs]]] call[name[self].__dict__][name[dest]] assign[=] call[name[self].read_data_ext, parameter[name[model], name[field], name[idx]]] if compare[name[idx] is_not constant[None]] begin[:] if compare[call[name[len], parameter[name[idx]]] equal[==] name[self].n] begin[:] call[name[self].link_to, parameter[name[model], name[idx], name[self].idx]]
keyword[def] identifier[copy_data_ext] ( identifier[self] , identifier[model] , identifier[field] , identifier[dest] = keyword[None] , identifier[idx] = keyword[None] , identifier[astype] = keyword[None] ): literal[string] keyword[if] keyword[not] identifier[dest] : identifier[dest] = identifier[field] keyword[assert] identifier[dest] keyword[not] keyword[in] identifier[self] . identifier[_states] + identifier[self] . identifier[_algebs] identifier[self] . identifier[__dict__] [ identifier[dest] ]= identifier[self] . identifier[read_data_ext] ( identifier[model] , identifier[field] , identifier[idx] , identifier[astype] = identifier[astype] ) keyword[if] identifier[idx] keyword[is] keyword[not] keyword[None] : keyword[if] identifier[len] ( identifier[idx] )== identifier[self] . identifier[n] : identifier[self] . identifier[link_to] ( identifier[model] , identifier[idx] , identifier[self] . identifier[idx] )
def copy_data_ext(self, model, field, dest=None, idx=None, astype=None): """ Retrieve the field of another model and store it as a field. :param model: name of the source model being a model name or a group name :param field: name of the field to retrieve :param dest: name of the destination field in ``self`` :param idx: idx of elements to access :param astype: type cast :type model: str :type field: str :type dest: str :type idx: list, matrix :type astype: None, list, matrix :return: None """ # use default destination if not dest: dest = field # depends on [control=['if'], data=[]] assert dest not in self._states + self._algebs self.__dict__[dest] = self.read_data_ext(model, field, idx, astype=astype) if idx is not None: if len(idx) == self.n: self.link_to(model, idx, self.idx) # depends on [control=['if'], data=[]] # depends on [control=['if'], data=['idx']]
def open(cls, path): """Load an image file into a PIX object. Leptonica can load TIFF, PNM (PBM, PGM, PPM), PNG, and JPEG. If loading fails then the object will wrap a C null pointer. """ filename = fspath(path) with _LeptonicaErrorTrap(): return cls(lept.pixRead(os.fsencode(filename)))
def function[open, parameter[cls, path]]: constant[Load an image file into a PIX object. Leptonica can load TIFF, PNM (PBM, PGM, PPM), PNG, and JPEG. If loading fails then the object will wrap a C null pointer. ] variable[filename] assign[=] call[name[fspath], parameter[name[path]]] with call[name[_LeptonicaErrorTrap], parameter[]] begin[:] return[call[name[cls], parameter[call[name[lept].pixRead, parameter[call[name[os].fsencode, parameter[name[filename]]]]]]]]
keyword[def] identifier[open] ( identifier[cls] , identifier[path] ): literal[string] identifier[filename] = identifier[fspath] ( identifier[path] ) keyword[with] identifier[_LeptonicaErrorTrap] (): keyword[return] identifier[cls] ( identifier[lept] . identifier[pixRead] ( identifier[os] . identifier[fsencode] ( identifier[filename] )))
def open(cls, path): """Load an image file into a PIX object. Leptonica can load TIFF, PNM (PBM, PGM, PPM), PNG, and JPEG. If loading fails then the object will wrap a C null pointer. """ filename = fspath(path) with _LeptonicaErrorTrap(): return cls(lept.pixRead(os.fsencode(filename))) # depends on [control=['with'], data=[]]
def get_video(self, node): """ Create a video object from a video embed """ video = Video() video.embed_code = self.get_embed_code(node) video.embed_type = self.get_embed_type(node) video.width = self.get_width(node) video.height = self.get_height(node) video.src = self.get_src(node) video.provider = self.get_provider(video.src) return video
def function[get_video, parameter[self, node]]: constant[ Create a video object from a video embed ] variable[video] assign[=] call[name[Video], parameter[]] name[video].embed_code assign[=] call[name[self].get_embed_code, parameter[name[node]]] name[video].embed_type assign[=] call[name[self].get_embed_type, parameter[name[node]]] name[video].width assign[=] call[name[self].get_width, parameter[name[node]]] name[video].height assign[=] call[name[self].get_height, parameter[name[node]]] name[video].src assign[=] call[name[self].get_src, parameter[name[node]]] name[video].provider assign[=] call[name[self].get_provider, parameter[name[video].src]] return[name[video]]
keyword[def] identifier[get_video] ( identifier[self] , identifier[node] ): literal[string] identifier[video] = identifier[Video] () identifier[video] . identifier[embed_code] = identifier[self] . identifier[get_embed_code] ( identifier[node] ) identifier[video] . identifier[embed_type] = identifier[self] . identifier[get_embed_type] ( identifier[node] ) identifier[video] . identifier[width] = identifier[self] . identifier[get_width] ( identifier[node] ) identifier[video] . identifier[height] = identifier[self] . identifier[get_height] ( identifier[node] ) identifier[video] . identifier[src] = identifier[self] . identifier[get_src] ( identifier[node] ) identifier[video] . identifier[provider] = identifier[self] . identifier[get_provider] ( identifier[video] . identifier[src] ) keyword[return] identifier[video]
def get_video(self, node): """ Create a video object from a video embed """ video = Video() video.embed_code = self.get_embed_code(node) video.embed_type = self.get_embed_type(node) video.width = self.get_width(node) video.height = self.get_height(node) video.src = self.get_src(node) video.provider = self.get_provider(video.src) return video
def calc_contriarea_v1(self): """Determine the relative size of the contributing area of the whole subbasin. Required control parameters: |NmbZones| |ZoneType| |RespArea| |FC| |Beta| Required derived parameter: |RelSoilArea| Required state sequence: |SM| Calculated fluxes sequences: |ContriArea| Basic equation: :math:`ContriArea = \\left( \\frac{SM}{FC} \\right)^{Beta}` Examples: Four zones are initialized, but only the first two zones of type field and forest are taken into account in the calculation of the relative contributing area of the catchment (even, if also glaciers contribute to the inflow of the upper zone layer): >>> from hydpy.models.hland import * >>> parameterstep('1d') >>> nmbzones(4) >>> zonetype(FIELD, FOREST, GLACIER, ILAKE) >>> beta(2.0) >>> fc(200.0) >>> resparea(True) >>> derived.relsoilarea(0.5) >>> derived.relsoilzonearea(1.0/3.0, 2.0/3.0, 0.0, 0.0) With a relative soil moisture of 100 % in the whole subbasin, the contributing area is also estimated as 100 %,... >>> states.sm = 200.0 >>> model.calc_contriarea_v1() >>> fluxes.contriarea contriarea(1.0) ...and relative soil moistures of 0% result in an contributing area of 0 %: >>> states.sm = 0.0 >>> model.calc_contriarea_v1() >>> fluxes.contriarea contriarea(0.0) With the given value 2 of the nonlinearity parameter Beta, soil moisture of 50 % results in a contributing area estimate of 25%: >>> states.sm = 100.0 >>> model.calc_contriarea_v1() >>> fluxes.contriarea contriarea(0.25) Setting the response area option to False,... >>> resparea(False) >>> model.calc_contriarea_v1() >>> fluxes.contriarea contriarea(1.0) ... setting the soil area (total area of all field and forest zones in the subbasin) to zero..., >>> resparea(True) >>> derived.relsoilarea(0.0) >>> model.calc_contriarea_v1() >>> fluxes.contriarea contriarea(1.0) ...or setting all field capacities to zero... >>> derived.relsoilarea(0.5) >>> fc(0.0) >>> states.sm = 0.0 >>> model.calc_contriarea_v1() >>> fluxes.contriarea contriarea(1.0) ...leads to contributing area values of 100 %. """ con = self.parameters.control.fastaccess der = self.parameters.derived.fastaccess flu = self.sequences.fluxes.fastaccess sta = self.sequences.states.fastaccess if con.resparea and (der.relsoilarea > 0.): flu.contriarea = 0. for k in range(con.nmbzones): if con.zonetype[k] in (FIELD, FOREST): if con.fc[k] > 0.: flu.contriarea += (der.relsoilzonearea[k] * (sta.sm[k]/con.fc[k])**con.beta[k]) else: flu.contriarea += der.relsoilzonearea[k] else: flu.contriarea = 1.
def function[calc_contriarea_v1, parameter[self]]: constant[Determine the relative size of the contributing area of the whole subbasin. Required control parameters: |NmbZones| |ZoneType| |RespArea| |FC| |Beta| Required derived parameter: |RelSoilArea| Required state sequence: |SM| Calculated fluxes sequences: |ContriArea| Basic equation: :math:`ContriArea = \left( \frac{SM}{FC} \right)^{Beta}` Examples: Four zones are initialized, but only the first two zones of type field and forest are taken into account in the calculation of the relative contributing area of the catchment (even, if also glaciers contribute to the inflow of the upper zone layer): >>> from hydpy.models.hland import * >>> parameterstep('1d') >>> nmbzones(4) >>> zonetype(FIELD, FOREST, GLACIER, ILAKE) >>> beta(2.0) >>> fc(200.0) >>> resparea(True) >>> derived.relsoilarea(0.5) >>> derived.relsoilzonearea(1.0/3.0, 2.0/3.0, 0.0, 0.0) With a relative soil moisture of 100 % in the whole subbasin, the contributing area is also estimated as 100 %,... >>> states.sm = 200.0 >>> model.calc_contriarea_v1() >>> fluxes.contriarea contriarea(1.0) ...and relative soil moistures of 0% result in an contributing area of 0 %: >>> states.sm = 0.0 >>> model.calc_contriarea_v1() >>> fluxes.contriarea contriarea(0.0) With the given value 2 of the nonlinearity parameter Beta, soil moisture of 50 % results in a contributing area estimate of 25%: >>> states.sm = 100.0 >>> model.calc_contriarea_v1() >>> fluxes.contriarea contriarea(0.25) Setting the response area option to False,... >>> resparea(False) >>> model.calc_contriarea_v1() >>> fluxes.contriarea contriarea(1.0) ... setting the soil area (total area of all field and forest zones in the subbasin) to zero..., >>> resparea(True) >>> derived.relsoilarea(0.0) >>> model.calc_contriarea_v1() >>> fluxes.contriarea contriarea(1.0) ...or setting all field capacities to zero... >>> derived.relsoilarea(0.5) >>> fc(0.0) >>> states.sm = 0.0 >>> model.calc_contriarea_v1() >>> fluxes.contriarea contriarea(1.0) ...leads to contributing area values of 100 %. ] variable[con] assign[=] name[self].parameters.control.fastaccess variable[der] assign[=] name[self].parameters.derived.fastaccess variable[flu] assign[=] name[self].sequences.fluxes.fastaccess variable[sta] assign[=] name[self].sequences.states.fastaccess if <ast.BoolOp object at 0x7da2044c2fe0> begin[:] name[flu].contriarea assign[=] constant[0.0] for taget[name[k]] in starred[call[name[range], parameter[name[con].nmbzones]]] begin[:] if compare[call[name[con].zonetype][name[k]] in tuple[[<ast.Name object at 0x7da2044c0100>, <ast.Name object at 0x7da2044c03a0>]]] begin[:] if compare[call[name[con].fc][name[k]] greater[>] constant[0.0]] begin[:] <ast.AugAssign object at 0x7da2044c1930>
keyword[def] identifier[calc_contriarea_v1] ( identifier[self] ): literal[string] identifier[con] = identifier[self] . identifier[parameters] . identifier[control] . identifier[fastaccess] identifier[der] = identifier[self] . identifier[parameters] . identifier[derived] . identifier[fastaccess] identifier[flu] = identifier[self] . identifier[sequences] . identifier[fluxes] . identifier[fastaccess] identifier[sta] = identifier[self] . identifier[sequences] . identifier[states] . identifier[fastaccess] keyword[if] identifier[con] . identifier[resparea] keyword[and] ( identifier[der] . identifier[relsoilarea] > literal[int] ): identifier[flu] . identifier[contriarea] = literal[int] keyword[for] identifier[k] keyword[in] identifier[range] ( identifier[con] . identifier[nmbzones] ): keyword[if] identifier[con] . identifier[zonetype] [ identifier[k] ] keyword[in] ( identifier[FIELD] , identifier[FOREST] ): keyword[if] identifier[con] . identifier[fc] [ identifier[k] ]> literal[int] : identifier[flu] . identifier[contriarea] +=( identifier[der] . identifier[relsoilzonearea] [ identifier[k] ]* ( identifier[sta] . identifier[sm] [ identifier[k] ]/ identifier[con] . identifier[fc] [ identifier[k] ])** identifier[con] . identifier[beta] [ identifier[k] ]) keyword[else] : identifier[flu] . identifier[contriarea] += identifier[der] . identifier[relsoilzonearea] [ identifier[k] ] keyword[else] : identifier[flu] . identifier[contriarea] = literal[int]
def calc_contriarea_v1(self): """Determine the relative size of the contributing area of the whole subbasin. Required control parameters: |NmbZones| |ZoneType| |RespArea| |FC| |Beta| Required derived parameter: |RelSoilArea| Required state sequence: |SM| Calculated fluxes sequences: |ContriArea| Basic equation: :math:`ContriArea = \\left( \\frac{SM}{FC} \\right)^{Beta}` Examples: Four zones are initialized, but only the first two zones of type field and forest are taken into account in the calculation of the relative contributing area of the catchment (even, if also glaciers contribute to the inflow of the upper zone layer): >>> from hydpy.models.hland import * >>> parameterstep('1d') >>> nmbzones(4) >>> zonetype(FIELD, FOREST, GLACIER, ILAKE) >>> beta(2.0) >>> fc(200.0) >>> resparea(True) >>> derived.relsoilarea(0.5) >>> derived.relsoilzonearea(1.0/3.0, 2.0/3.0, 0.0, 0.0) With a relative soil moisture of 100 % in the whole subbasin, the contributing area is also estimated as 100 %,... >>> states.sm = 200.0 >>> model.calc_contriarea_v1() >>> fluxes.contriarea contriarea(1.0) ...and relative soil moistures of 0% result in an contributing area of 0 %: >>> states.sm = 0.0 >>> model.calc_contriarea_v1() >>> fluxes.contriarea contriarea(0.0) With the given value 2 of the nonlinearity parameter Beta, soil moisture of 50 % results in a contributing area estimate of 25%: >>> states.sm = 100.0 >>> model.calc_contriarea_v1() >>> fluxes.contriarea contriarea(0.25) Setting the response area option to False,... >>> resparea(False) >>> model.calc_contriarea_v1() >>> fluxes.contriarea contriarea(1.0) ... setting the soil area (total area of all field and forest zones in the subbasin) to zero..., >>> resparea(True) >>> derived.relsoilarea(0.0) >>> model.calc_contriarea_v1() >>> fluxes.contriarea contriarea(1.0) ...or setting all field capacities to zero... >>> derived.relsoilarea(0.5) >>> fc(0.0) >>> states.sm = 0.0 >>> model.calc_contriarea_v1() >>> fluxes.contriarea contriarea(1.0) ...leads to contributing area values of 100 %. """ con = self.parameters.control.fastaccess der = self.parameters.derived.fastaccess flu = self.sequences.fluxes.fastaccess sta = self.sequences.states.fastaccess if con.resparea and der.relsoilarea > 0.0: flu.contriarea = 0.0 for k in range(con.nmbzones): if con.zonetype[k] in (FIELD, FOREST): if con.fc[k] > 0.0: flu.contriarea += der.relsoilzonearea[k] * (sta.sm[k] / con.fc[k]) ** con.beta[k] # depends on [control=['if'], data=[]] else: flu.contriarea += der.relsoilzonearea[k] # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['k']] # depends on [control=['if'], data=[]] else: flu.contriarea = 1.0
def service_data(self): """ Returns all introspected service data. If the data has been previously accessed, a memoized version of the data is returned. :returns: A dict of introspected service data :rtype: dict """ # Lean on the cache first. if self._loaded_service_data is not None: return self._loaded_service_data # We don't have a cache. Build it. self._loaded_service_data = self._introspect_service( # We care about the ``botocore.session`` here, not the # ``kotocore.session``. self.session.core_session, self.service_name ) # Clear out the API version, just in case. self._api_version = None return self._loaded_service_data
def function[service_data, parameter[self]]: constant[ Returns all introspected service data. If the data has been previously accessed, a memoized version of the data is returned. :returns: A dict of introspected service data :rtype: dict ] if compare[name[self]._loaded_service_data is_not constant[None]] begin[:] return[name[self]._loaded_service_data] name[self]._loaded_service_data assign[=] call[name[self]._introspect_service, parameter[name[self].session.core_session, name[self].service_name]] name[self]._api_version assign[=] constant[None] return[name[self]._loaded_service_data]
keyword[def] identifier[service_data] ( identifier[self] ): literal[string] keyword[if] identifier[self] . identifier[_loaded_service_data] keyword[is] keyword[not] keyword[None] : keyword[return] identifier[self] . identifier[_loaded_service_data] identifier[self] . identifier[_loaded_service_data] = identifier[self] . identifier[_introspect_service] ( identifier[self] . identifier[session] . identifier[core_session] , identifier[self] . identifier[service_name] ) identifier[self] . identifier[_api_version] = keyword[None] keyword[return] identifier[self] . identifier[_loaded_service_data]
def service_data(self): """ Returns all introspected service data. If the data has been previously accessed, a memoized version of the data is returned. :returns: A dict of introspected service data :rtype: dict """ # Lean on the cache first. if self._loaded_service_data is not None: return self._loaded_service_data # depends on [control=['if'], data=[]] # We don't have a cache. Build it. # We care about the ``botocore.session`` here, not the # ``kotocore.session``. self._loaded_service_data = self._introspect_service(self.session.core_session, self.service_name) # Clear out the API version, just in case. self._api_version = None return self._loaded_service_data
def generate_ssh(self, server, args, configure): """ 异步同时执行SSH生成 generate ssh :param server: :param args: :param configure: :return: """ self.reset_server_env(server, configure) # chmod project root owner sudo('chown {user}:{user} -R {path}'.format( user=configure[server]['user'], path=bigdata_conf.project_root )) # generate ssh key if not exists('~/.ssh/id_rsa.pub'): run('ssh-keygen -t rsa -P "" -f ~/.ssh/id_rsa')
def function[generate_ssh, parameter[self, server, args, configure]]: constant[ 异步同时执行SSH生成 generate ssh :param server: :param args: :param configure: :return: ] call[name[self].reset_server_env, parameter[name[server], name[configure]]] call[name[sudo], parameter[call[constant[chown {user}:{user} -R {path}].format, parameter[]]]] if <ast.UnaryOp object at 0x7da2045663e0> begin[:] call[name[run], parameter[constant[ssh-keygen -t rsa -P "" -f ~/.ssh/id_rsa]]]
keyword[def] identifier[generate_ssh] ( identifier[self] , identifier[server] , identifier[args] , identifier[configure] ): literal[string] identifier[self] . identifier[reset_server_env] ( identifier[server] , identifier[configure] ) identifier[sudo] ( literal[string] . identifier[format] ( identifier[user] = identifier[configure] [ identifier[server] ][ literal[string] ], identifier[path] = identifier[bigdata_conf] . identifier[project_root] )) keyword[if] keyword[not] identifier[exists] ( literal[string] ): identifier[run] ( literal[string] )
def generate_ssh(self, server, args, configure): """ 异步同时执行SSH生成 generate ssh :param server: :param args: :param configure: :return: """ self.reset_server_env(server, configure) # chmod project root owner sudo('chown {user}:{user} -R {path}'.format(user=configure[server]['user'], path=bigdata_conf.project_root)) # generate ssh key if not exists('~/.ssh/id_rsa.pub'): run('ssh-keygen -t rsa -P "" -f ~/.ssh/id_rsa') # depends on [control=['if'], data=[]]
def _deserialize(s, proto): # type: (bytes, _Proto) -> _Proto ''' Parse bytes into a in-memory proto @params s is bytes containing serialized proto proto is a in-memory proto object @return The proto instance filled in by s ''' if not isinstance(s, bytes): raise ValueError('Parameter s must be bytes, but got type: {}'.format(type(s))) if not (hasattr(proto, 'ParseFromString') and callable(proto.ParseFromString)): raise ValueError('No ParseFromString method is detected. ' '\ntype is {}'.format(type(proto))) decoded = cast(Optional[int], proto.ParseFromString(s)) if decoded is not None and decoded != len(s): raise google.protobuf.message.DecodeError( "Protobuf decoding consumed too few bytes: {} out of {}".format( decoded, len(s))) return proto
def function[_deserialize, parameter[s, proto]]: constant[ Parse bytes into a in-memory proto @params s is bytes containing serialized proto proto is a in-memory proto object @return The proto instance filled in by s ] if <ast.UnaryOp object at 0x7da18fe90460> begin[:] <ast.Raise object at 0x7da18fe93610> if <ast.UnaryOp object at 0x7da18fe93490> begin[:] <ast.Raise object at 0x7da18fe932b0> variable[decoded] assign[=] call[name[cast], parameter[call[name[Optional]][name[int]], call[name[proto].ParseFromString, parameter[name[s]]]]] if <ast.BoolOp object at 0x7da18fe91bd0> begin[:] <ast.Raise object at 0x7da18fe93bb0> return[name[proto]]
keyword[def] identifier[_deserialize] ( identifier[s] , identifier[proto] ): literal[string] keyword[if] keyword[not] identifier[isinstance] ( identifier[s] , identifier[bytes] ): keyword[raise] identifier[ValueError] ( literal[string] . identifier[format] ( identifier[type] ( identifier[s] ))) keyword[if] keyword[not] ( identifier[hasattr] ( identifier[proto] , literal[string] ) keyword[and] identifier[callable] ( identifier[proto] . identifier[ParseFromString] )): keyword[raise] identifier[ValueError] ( literal[string] literal[string] . identifier[format] ( identifier[type] ( identifier[proto] ))) identifier[decoded] = identifier[cast] ( identifier[Optional] [ identifier[int] ], identifier[proto] . identifier[ParseFromString] ( identifier[s] )) keyword[if] identifier[decoded] keyword[is] keyword[not] keyword[None] keyword[and] identifier[decoded] != identifier[len] ( identifier[s] ): keyword[raise] identifier[google] . identifier[protobuf] . identifier[message] . identifier[DecodeError] ( literal[string] . identifier[format] ( identifier[decoded] , identifier[len] ( identifier[s] ))) keyword[return] identifier[proto]
def _deserialize(s, proto): # type: (bytes, _Proto) -> _Proto '\n Parse bytes into a in-memory proto\n\n @params\n s is bytes containing serialized proto\n proto is a in-memory proto object\n\n @return\n The proto instance filled in by s\n ' if not isinstance(s, bytes): raise ValueError('Parameter s must be bytes, but got type: {}'.format(type(s))) # depends on [control=['if'], data=[]] if not (hasattr(proto, 'ParseFromString') and callable(proto.ParseFromString)): raise ValueError('No ParseFromString method is detected. \ntype is {}'.format(type(proto))) # depends on [control=['if'], data=[]] decoded = cast(Optional[int], proto.ParseFromString(s)) if decoded is not None and decoded != len(s): raise google.protobuf.message.DecodeError('Protobuf decoding consumed too few bytes: {} out of {}'.format(decoded, len(s))) # depends on [control=['if'], data=[]] return proto
def recipe_velocity(adata, min_counts=3, min_counts_u=3, n_top_genes=None, n_pcs=30, n_neighbors=30, log=True, copy=False): """Runs pp.filter_and_normalize() and pp.moments() """ from .moments import moments filter_and_normalize(adata, min_counts=min_counts, min_counts_u=min_counts_u, n_top_genes=n_top_genes, log=log) moments(adata, n_neighbors=n_neighbors, n_pcs=n_pcs) return adata if copy else None
def function[recipe_velocity, parameter[adata, min_counts, min_counts_u, n_top_genes, n_pcs, n_neighbors, log, copy]]: constant[Runs pp.filter_and_normalize() and pp.moments() ] from relative_module[moments] import module[moments] call[name[filter_and_normalize], parameter[name[adata]]] call[name[moments], parameter[name[adata]]] return[<ast.IfExp object at 0x7da18dc06350>]
keyword[def] identifier[recipe_velocity] ( identifier[adata] , identifier[min_counts] = literal[int] , identifier[min_counts_u] = literal[int] , identifier[n_top_genes] = keyword[None] , identifier[n_pcs] = literal[int] , identifier[n_neighbors] = literal[int] , identifier[log] = keyword[True] , identifier[copy] = keyword[False] ): literal[string] keyword[from] . identifier[moments] keyword[import] identifier[moments] identifier[filter_and_normalize] ( identifier[adata] , identifier[min_counts] = identifier[min_counts] , identifier[min_counts_u] = identifier[min_counts_u] , identifier[n_top_genes] = identifier[n_top_genes] , identifier[log] = identifier[log] ) identifier[moments] ( identifier[adata] , identifier[n_neighbors] = identifier[n_neighbors] , identifier[n_pcs] = identifier[n_pcs] ) keyword[return] identifier[adata] keyword[if] identifier[copy] keyword[else] keyword[None]
def recipe_velocity(adata, min_counts=3, min_counts_u=3, n_top_genes=None, n_pcs=30, n_neighbors=30, log=True, copy=False): """Runs pp.filter_and_normalize() and pp.moments() """ from .moments import moments filter_and_normalize(adata, min_counts=min_counts, min_counts_u=min_counts_u, n_top_genes=n_top_genes, log=log) moments(adata, n_neighbors=n_neighbors, n_pcs=n_pcs) return adata if copy else None
def _stream_docker_logs(self): """Stream stdout and stderr from the task container to this process's stdout and stderr, respectively. """ thread = threading.Thread(target=self._stderr_stream_worker) thread.start() for line in self.docker_client.logs(self.container, stdout=True, stderr=False, stream=True): sys.stdout.write(line) thread.join()
def function[_stream_docker_logs, parameter[self]]: constant[Stream stdout and stderr from the task container to this process's stdout and stderr, respectively. ] variable[thread] assign[=] call[name[threading].Thread, parameter[]] call[name[thread].start, parameter[]] for taget[name[line]] in starred[call[name[self].docker_client.logs, parameter[name[self].container]]] begin[:] call[name[sys].stdout.write, parameter[name[line]]] call[name[thread].join, parameter[]]
keyword[def] identifier[_stream_docker_logs] ( identifier[self] ): literal[string] identifier[thread] = identifier[threading] . identifier[Thread] ( identifier[target] = identifier[self] . identifier[_stderr_stream_worker] ) identifier[thread] . identifier[start] () keyword[for] identifier[line] keyword[in] identifier[self] . identifier[docker_client] . identifier[logs] ( identifier[self] . identifier[container] , identifier[stdout] = keyword[True] , identifier[stderr] = keyword[False] , identifier[stream] = keyword[True] ): identifier[sys] . identifier[stdout] . identifier[write] ( identifier[line] ) identifier[thread] . identifier[join] ()
def _stream_docker_logs(self): """Stream stdout and stderr from the task container to this process's stdout and stderr, respectively. """ thread = threading.Thread(target=self._stderr_stream_worker) thread.start() for line in self.docker_client.logs(self.container, stdout=True, stderr=False, stream=True): sys.stdout.write(line) # depends on [control=['for'], data=['line']] thread.join()
def risearch(self): "instance of :class:`eulfedora.api.ResourceIndex`, with the same root url and credentials" if self._risearch is None: self._risearch = ResourceIndex(self.fedora_root, self.username, self.password) return self._risearch
def function[risearch, parameter[self]]: constant[instance of :class:`eulfedora.api.ResourceIndex`, with the same root url and credentials] if compare[name[self]._risearch is constant[None]] begin[:] name[self]._risearch assign[=] call[name[ResourceIndex], parameter[name[self].fedora_root, name[self].username, name[self].password]] return[name[self]._risearch]
keyword[def] identifier[risearch] ( identifier[self] ): literal[string] keyword[if] identifier[self] . identifier[_risearch] keyword[is] keyword[None] : identifier[self] . identifier[_risearch] = identifier[ResourceIndex] ( identifier[self] . identifier[fedora_root] , identifier[self] . identifier[username] , identifier[self] . identifier[password] ) keyword[return] identifier[self] . identifier[_risearch]
def risearch(self): """instance of :class:`eulfedora.api.ResourceIndex`, with the same root url and credentials""" if self._risearch is None: self._risearch = ResourceIndex(self.fedora_root, self.username, self.password) # depends on [control=['if'], data=[]] return self._risearch
def precision_series(y_true, y_score, k=None): """ Returns series of length k whose i-th entry is the precision in the top i TODO: extrapolate here """ y_true, y_score = to_float(y_true, y_score) top = _argsort(y_score, k) n = np.nan_to_num(y_true[top]).cumsum() # fill missing labels with 0 d = (~np.isnan(y_true[top])).cumsum() # count number of labels return pd.Series(n/d, index=np.arange(1, len(n)+1))
def function[precision_series, parameter[y_true, y_score, k]]: constant[ Returns series of length k whose i-th entry is the precision in the top i TODO: extrapolate here ] <ast.Tuple object at 0x7da1b24c2170> assign[=] call[name[to_float], parameter[name[y_true], name[y_score]]] variable[top] assign[=] call[name[_argsort], parameter[name[y_score], name[k]]] variable[n] assign[=] call[call[name[np].nan_to_num, parameter[call[name[y_true]][name[top]]]].cumsum, parameter[]] variable[d] assign[=] call[<ast.UnaryOp object at 0x7da1b2447490>.cumsum, parameter[]] return[call[name[pd].Series, parameter[binary_operation[name[n] / name[d]]]]]
keyword[def] identifier[precision_series] ( identifier[y_true] , identifier[y_score] , identifier[k] = keyword[None] ): literal[string] identifier[y_true] , identifier[y_score] = identifier[to_float] ( identifier[y_true] , identifier[y_score] ) identifier[top] = identifier[_argsort] ( identifier[y_score] , identifier[k] ) identifier[n] = identifier[np] . identifier[nan_to_num] ( identifier[y_true] [ identifier[top] ]). identifier[cumsum] () identifier[d] =(~ identifier[np] . identifier[isnan] ( identifier[y_true] [ identifier[top] ])). identifier[cumsum] () keyword[return] identifier[pd] . identifier[Series] ( identifier[n] / identifier[d] , identifier[index] = identifier[np] . identifier[arange] ( literal[int] , identifier[len] ( identifier[n] )+ literal[int] ))
def precision_series(y_true, y_score, k=None): """ Returns series of length k whose i-th entry is the precision in the top i TODO: extrapolate here """ (y_true, y_score) = to_float(y_true, y_score) top = _argsort(y_score, k) n = np.nan_to_num(y_true[top]).cumsum() # fill missing labels with 0 d = (~np.isnan(y_true[top])).cumsum() # count number of labels return pd.Series(n / d, index=np.arange(1, len(n) + 1))
def datetimeobj_YmdHMS(value): """Convert timestamp string to a datetime object. Timestamps strings like '20130618120000' are able to be converted by this function. Args: value: A timestamp string in the format '%Y%m%d%H%M%S'. Returns: A datetime object. Raises: ValueError: If timestamp is invalid. Note: The timezone is assumed to be UTC/GMT. """ i = int(value) S = i M = S//100 H = M//100 d = H//100 m = d//100 Y = m//100 return datetime.datetime( Y % 10000, m % 100, d % 100, H % 100, M % 100, S % 100, tzinfo=TZ_GMT )
def function[datetimeobj_YmdHMS, parameter[value]]: constant[Convert timestamp string to a datetime object. Timestamps strings like '20130618120000' are able to be converted by this function. Args: value: A timestamp string in the format '%Y%m%d%H%M%S'. Returns: A datetime object. Raises: ValueError: If timestamp is invalid. Note: The timezone is assumed to be UTC/GMT. ] variable[i] assign[=] call[name[int], parameter[name[value]]] variable[S] assign[=] name[i] variable[M] assign[=] binary_operation[name[S] <ast.FloorDiv object at 0x7da2590d6bc0> constant[100]] variable[H] assign[=] binary_operation[name[M] <ast.FloorDiv object at 0x7da2590d6bc0> constant[100]] variable[d] assign[=] binary_operation[name[H] <ast.FloorDiv object at 0x7da2590d6bc0> constant[100]] variable[m] assign[=] binary_operation[name[d] <ast.FloorDiv object at 0x7da2590d6bc0> constant[100]] variable[Y] assign[=] binary_operation[name[m] <ast.FloorDiv object at 0x7da2590d6bc0> constant[100]] return[call[name[datetime].datetime, parameter[binary_operation[name[Y] <ast.Mod object at 0x7da2590d6920> constant[10000]], binary_operation[name[m] <ast.Mod object at 0x7da2590d6920> constant[100]], binary_operation[name[d] <ast.Mod object at 0x7da2590d6920> constant[100]], binary_operation[name[H] <ast.Mod object at 0x7da2590d6920> constant[100]], binary_operation[name[M] <ast.Mod object at 0x7da2590d6920> constant[100]], binary_operation[name[S] <ast.Mod object at 0x7da2590d6920> constant[100]]]]]
keyword[def] identifier[datetimeobj_YmdHMS] ( identifier[value] ): literal[string] identifier[i] = identifier[int] ( identifier[value] ) identifier[S] = identifier[i] identifier[M] = identifier[S] // literal[int] identifier[H] = identifier[M] // literal[int] identifier[d] = identifier[H] // literal[int] identifier[m] = identifier[d] // literal[int] identifier[Y] = identifier[m] // literal[int] keyword[return] identifier[datetime] . identifier[datetime] ( identifier[Y] % literal[int] , identifier[m] % literal[int] , identifier[d] % literal[int] , identifier[H] % literal[int] , identifier[M] % literal[int] , identifier[S] % literal[int] , identifier[tzinfo] = identifier[TZ_GMT] )
def datetimeobj_YmdHMS(value): """Convert timestamp string to a datetime object. Timestamps strings like '20130618120000' are able to be converted by this function. Args: value: A timestamp string in the format '%Y%m%d%H%M%S'. Returns: A datetime object. Raises: ValueError: If timestamp is invalid. Note: The timezone is assumed to be UTC/GMT. """ i = int(value) S = i M = S // 100 H = M // 100 d = H // 100 m = d // 100 Y = m // 100 return datetime.datetime(Y % 10000, m % 100, d % 100, H % 100, M % 100, S % 100, tzinfo=TZ_GMT)
def update(self, values, copy_instance=False): """ Updates the configuration with the contents of the given configuration object or dictionary. In case of a dictionary, only valid attributes for this class are considered. Existing attributes are replaced with the new values. The object is not cleaned before or after, i.e. may accept invalid input. In case of an update by object, that object is cleaned before the update, so that updated values should be validated. However, already-stored values are not cleaned before or after. :param values: Dictionary or ConfigurationObject to update this configuration with. :type values: dict | ConfigurationObject :param copy_instance: Copies lists and dictionaries. Only has an effect if ``values`` is a ConfigurationObject. :type copy_instance: bool """ if isinstance(values, self.__class__): self.update_from_obj(values, copy=copy_instance) elif isinstance(values, dict): self.update_from_dict(values) else: raise ValueError("{0} or dictionary expected; found '{1}'.".format(self.__class__.__name__, type(values).__name__))
def function[update, parameter[self, values, copy_instance]]: constant[ Updates the configuration with the contents of the given configuration object or dictionary. In case of a dictionary, only valid attributes for this class are considered. Existing attributes are replaced with the new values. The object is not cleaned before or after, i.e. may accept invalid input. In case of an update by object, that object is cleaned before the update, so that updated values should be validated. However, already-stored values are not cleaned before or after. :param values: Dictionary or ConfigurationObject to update this configuration with. :type values: dict | ConfigurationObject :param copy_instance: Copies lists and dictionaries. Only has an effect if ``values`` is a ConfigurationObject. :type copy_instance: bool ] if call[name[isinstance], parameter[name[values], name[self].__class__]] begin[:] call[name[self].update_from_obj, parameter[name[values]]]
keyword[def] identifier[update] ( identifier[self] , identifier[values] , identifier[copy_instance] = keyword[False] ): literal[string] keyword[if] identifier[isinstance] ( identifier[values] , identifier[self] . identifier[__class__] ): identifier[self] . identifier[update_from_obj] ( identifier[values] , identifier[copy] = identifier[copy_instance] ) keyword[elif] identifier[isinstance] ( identifier[values] , identifier[dict] ): identifier[self] . identifier[update_from_dict] ( identifier[values] ) keyword[else] : keyword[raise] identifier[ValueError] ( literal[string] . identifier[format] ( identifier[self] . identifier[__class__] . identifier[__name__] , identifier[type] ( identifier[values] ). identifier[__name__] ))
def update(self, values, copy_instance=False): """ Updates the configuration with the contents of the given configuration object or dictionary. In case of a dictionary, only valid attributes for this class are considered. Existing attributes are replaced with the new values. The object is not cleaned before or after, i.e. may accept invalid input. In case of an update by object, that object is cleaned before the update, so that updated values should be validated. However, already-stored values are not cleaned before or after. :param values: Dictionary or ConfigurationObject to update this configuration with. :type values: dict | ConfigurationObject :param copy_instance: Copies lists and dictionaries. Only has an effect if ``values`` is a ConfigurationObject. :type copy_instance: bool """ if isinstance(values, self.__class__): self.update_from_obj(values, copy=copy_instance) # depends on [control=['if'], data=[]] elif isinstance(values, dict): self.update_from_dict(values) # depends on [control=['if'], data=[]] else: raise ValueError("{0} or dictionary expected; found '{1}'.".format(self.__class__.__name__, type(values).__name__))
def _crc8(self, buffer): """ Polynomial 0x31 (x8 + x5 +x4 +1) """ polynomial = 0x31; crc = 0xFF; index = 0 for index in range(0, len(buffer)): crc ^= buffer[index] for i in range(8, 0, -1): if crc & 0x80: crc = (crc << 1) ^ polynomial else: crc = (crc << 1) return crc & 0xFF
def function[_crc8, parameter[self, buffer]]: constant[ Polynomial 0x31 (x8 + x5 +x4 +1) ] variable[polynomial] assign[=] constant[49] variable[crc] assign[=] constant[255] variable[index] assign[=] constant[0] for taget[name[index]] in starred[call[name[range], parameter[constant[0], call[name[len], parameter[name[buffer]]]]]] begin[:] <ast.AugAssign object at 0x7da207f9a1a0> for taget[name[i]] in starred[call[name[range], parameter[constant[8], constant[0], <ast.UnaryOp object at 0x7da207f996f0>]]] begin[:] if binary_operation[name[crc] <ast.BitAnd object at 0x7da2590d6b60> constant[128]] begin[:] variable[crc] assign[=] binary_operation[binary_operation[name[crc] <ast.LShift object at 0x7da2590d69e0> constant[1]] <ast.BitXor object at 0x7da2590d6b00> name[polynomial]] return[binary_operation[name[crc] <ast.BitAnd object at 0x7da2590d6b60> constant[255]]]
keyword[def] identifier[_crc8] ( identifier[self] , identifier[buffer] ): literal[string] identifier[polynomial] = literal[int] ; identifier[crc] = literal[int] ; identifier[index] = literal[int] keyword[for] identifier[index] keyword[in] identifier[range] ( literal[int] , identifier[len] ( identifier[buffer] )): identifier[crc] ^= identifier[buffer] [ identifier[index] ] keyword[for] identifier[i] keyword[in] identifier[range] ( literal[int] , literal[int] ,- literal[int] ): keyword[if] identifier[crc] & literal[int] : identifier[crc] =( identifier[crc] << literal[int] )^ identifier[polynomial] keyword[else] : identifier[crc] =( identifier[crc] << literal[int] ) keyword[return] identifier[crc] & literal[int]
def _crc8(self, buffer): """ Polynomial 0x31 (x8 + x5 +x4 +1) """ polynomial = 49 crc = 255 index = 0 for index in range(0, len(buffer)): crc ^= buffer[index] for i in range(8, 0, -1): if crc & 128: crc = crc << 1 ^ polynomial # depends on [control=['if'], data=[]] else: crc = crc << 1 # depends on [control=['for'], data=[]] # depends on [control=['for'], data=['index']] return crc & 255
def get_xstatic_dirs(XSTATIC_MODULES, HORIZON_CONFIG): """Discover static file configuration of the xstatic modules. For each entry in the XSTATIC_MODULES list we determine the entry point files (which may come from the xstatic MAIN var) and then determine where in the Django static tree the xstatic package's contents should be placed. For jquery.bootstrap.wizard.js the module name is None the static file is actually a 3rd-party file but resides in the Horizon source tree and not an xstatic package. The xstatic.pkg.jquery_ui package had its contents moved by packagers so it must be handled as a special case. """ STATICFILES_DIRS = [] HORIZON_CONFIG.setdefault('xstatic_lib_files', []) for module_name, files in XSTATIC_MODULES: module = import_module(module_name) if module_name == 'xstatic.pkg.jquery_ui': # determine the correct path for jquery-ui which packagers moved if module.VERSION.startswith('1.10.'): # The 1.10.x versions already contain 'ui' directory. files = ['ui/' + files[0]] STATICFILES_DIRS.append( ('horizon/lib/' + module.NAME, module.BASE_DIR) ) # pull the file entry points from the xstatic package MAIN if possible if hasattr(module, 'MAIN'): files = module.MAIN if not isinstance(files, list): files = [files] # just the Javascript files, please (don't <script> css, etc # which is explicitly included in style/themes as appropriate) files = [file for file in files if file.endswith('.js')] # add to the list of files to link in the HTML try: for file in files: file = 'horizon/lib/' + module.NAME + '/' + file HORIZON_CONFIG['xstatic_lib_files'].append(file) except TypeError: raise Exception( '%s: Nothing to include because files to include are not ' 'defined (i.e., None) in BASE_XSTATIC_MODULES list and ' 'a corresponding XStatic module does not define MAIN list.' % module_name) return STATICFILES_DIRS
def function[get_xstatic_dirs, parameter[XSTATIC_MODULES, HORIZON_CONFIG]]: constant[Discover static file configuration of the xstatic modules. For each entry in the XSTATIC_MODULES list we determine the entry point files (which may come from the xstatic MAIN var) and then determine where in the Django static tree the xstatic package's contents should be placed. For jquery.bootstrap.wizard.js the module name is None the static file is actually a 3rd-party file but resides in the Horizon source tree and not an xstatic package. The xstatic.pkg.jquery_ui package had its contents moved by packagers so it must be handled as a special case. ] variable[STATICFILES_DIRS] assign[=] list[[]] call[name[HORIZON_CONFIG].setdefault, parameter[constant[xstatic_lib_files], list[[]]]] for taget[tuple[[<ast.Name object at 0x7da18ede42e0>, <ast.Name object at 0x7da18ede5ba0>]]] in starred[name[XSTATIC_MODULES]] begin[:] variable[module] assign[=] call[name[import_module], parameter[name[module_name]]] if compare[name[module_name] equal[==] constant[xstatic.pkg.jquery_ui]] begin[:] if call[name[module].VERSION.startswith, parameter[constant[1.10.]]] begin[:] variable[files] assign[=] list[[<ast.BinOp object at 0x7da18ede5ff0>]] call[name[STATICFILES_DIRS].append, parameter[tuple[[<ast.BinOp object at 0x7da18ede4fd0>, <ast.Attribute object at 0x7da18ede7400>]]]] if call[name[hasattr], parameter[name[module], constant[MAIN]]] begin[:] variable[files] assign[=] name[module].MAIN if <ast.UnaryOp object at 0x7da18ede6410> begin[:] variable[files] assign[=] list[[<ast.Name object at 0x7da18ede6d10>]] variable[files] assign[=] <ast.ListComp object at 0x7da18ede6f50> <ast.Try object at 0x7da1b1906a10> return[name[STATICFILES_DIRS]]
keyword[def] identifier[get_xstatic_dirs] ( identifier[XSTATIC_MODULES] , identifier[HORIZON_CONFIG] ): literal[string] identifier[STATICFILES_DIRS] =[] identifier[HORIZON_CONFIG] . identifier[setdefault] ( literal[string] ,[]) keyword[for] identifier[module_name] , identifier[files] keyword[in] identifier[XSTATIC_MODULES] : identifier[module] = identifier[import_module] ( identifier[module_name] ) keyword[if] identifier[module_name] == literal[string] : keyword[if] identifier[module] . identifier[VERSION] . identifier[startswith] ( literal[string] ): identifier[files] =[ literal[string] + identifier[files] [ literal[int] ]] identifier[STATICFILES_DIRS] . identifier[append] ( ( literal[string] + identifier[module] . identifier[NAME] , identifier[module] . identifier[BASE_DIR] ) ) keyword[if] identifier[hasattr] ( identifier[module] , literal[string] ): identifier[files] = identifier[module] . identifier[MAIN] keyword[if] keyword[not] identifier[isinstance] ( identifier[files] , identifier[list] ): identifier[files] =[ identifier[files] ] identifier[files] =[ identifier[file] keyword[for] identifier[file] keyword[in] identifier[files] keyword[if] identifier[file] . identifier[endswith] ( literal[string] )] keyword[try] : keyword[for] identifier[file] keyword[in] identifier[files] : identifier[file] = literal[string] + identifier[module] . identifier[NAME] + literal[string] + identifier[file] identifier[HORIZON_CONFIG] [ literal[string] ]. identifier[append] ( identifier[file] ) keyword[except] identifier[TypeError] : keyword[raise] identifier[Exception] ( literal[string] literal[string] literal[string] % identifier[module_name] ) keyword[return] identifier[STATICFILES_DIRS]
def get_xstatic_dirs(XSTATIC_MODULES, HORIZON_CONFIG): """Discover static file configuration of the xstatic modules. For each entry in the XSTATIC_MODULES list we determine the entry point files (which may come from the xstatic MAIN var) and then determine where in the Django static tree the xstatic package's contents should be placed. For jquery.bootstrap.wizard.js the module name is None the static file is actually a 3rd-party file but resides in the Horizon source tree and not an xstatic package. The xstatic.pkg.jquery_ui package had its contents moved by packagers so it must be handled as a special case. """ STATICFILES_DIRS = [] HORIZON_CONFIG.setdefault('xstatic_lib_files', []) for (module_name, files) in XSTATIC_MODULES: module = import_module(module_name) if module_name == 'xstatic.pkg.jquery_ui': # determine the correct path for jquery-ui which packagers moved if module.VERSION.startswith('1.10.'): # The 1.10.x versions already contain 'ui' directory. files = ['ui/' + files[0]] # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]] STATICFILES_DIRS.append(('horizon/lib/' + module.NAME, module.BASE_DIR)) # pull the file entry points from the xstatic package MAIN if possible if hasattr(module, 'MAIN'): files = module.MAIN if not isinstance(files, list): files = [files] # depends on [control=['if'], data=[]] # just the Javascript files, please (don't <script> css, etc # which is explicitly included in style/themes as appropriate) files = [file for file in files if file.endswith('.js')] # depends on [control=['if'], data=[]] # add to the list of files to link in the HTML try: for file in files: file = 'horizon/lib/' + module.NAME + '/' + file HORIZON_CONFIG['xstatic_lib_files'].append(file) # depends on [control=['for'], data=['file']] # depends on [control=['try'], data=[]] except TypeError: raise Exception('%s: Nothing to include because files to include are not defined (i.e., None) in BASE_XSTATIC_MODULES list and a corresponding XStatic module does not define MAIN list.' % module_name) # depends on [control=['except'], data=[]] # depends on [control=['for'], data=[]] return STATICFILES_DIRS
def CSS_setRuleSelector(self, styleSheetId, range, selector): """ Function path: CSS.setRuleSelector Domain: CSS Method name: setRuleSelector Parameters: Required arguments: 'styleSheetId' (type: StyleSheetId) -> No description 'range' (type: SourceRange) -> No description 'selector' (type: string) -> No description Returns: 'selectorList' (type: SelectorList) -> The resulting selector list after modification. Description: Modifies the rule selector. """ assert isinstance(selector, (str,) ), "Argument 'selector' must be of type '['str']'. Received type: '%s'" % type( selector) subdom_funcs = self.synchronous_command('CSS.setRuleSelector', styleSheetId=styleSheetId, range=range, selector=selector) return subdom_funcs
def function[CSS_setRuleSelector, parameter[self, styleSheetId, range, selector]]: constant[ Function path: CSS.setRuleSelector Domain: CSS Method name: setRuleSelector Parameters: Required arguments: 'styleSheetId' (type: StyleSheetId) -> No description 'range' (type: SourceRange) -> No description 'selector' (type: string) -> No description Returns: 'selectorList' (type: SelectorList) -> The resulting selector list after modification. Description: Modifies the rule selector. ] assert[call[name[isinstance], parameter[name[selector], tuple[[<ast.Name object at 0x7da1b101f3a0>]]]]] variable[subdom_funcs] assign[=] call[name[self].synchronous_command, parameter[constant[CSS.setRuleSelector]]] return[name[subdom_funcs]]
keyword[def] identifier[CSS_setRuleSelector] ( identifier[self] , identifier[styleSheetId] , identifier[range] , identifier[selector] ): literal[string] keyword[assert] identifier[isinstance] ( identifier[selector] ,( identifier[str] ,) ), literal[string] % identifier[type] ( identifier[selector] ) identifier[subdom_funcs] = identifier[self] . identifier[synchronous_command] ( literal[string] , identifier[styleSheetId] = identifier[styleSheetId] , identifier[range] = identifier[range] , identifier[selector] = identifier[selector] ) keyword[return] identifier[subdom_funcs]
def CSS_setRuleSelector(self, styleSheetId, range, selector): """ Function path: CSS.setRuleSelector Domain: CSS Method name: setRuleSelector Parameters: Required arguments: 'styleSheetId' (type: StyleSheetId) -> No description 'range' (type: SourceRange) -> No description 'selector' (type: string) -> No description Returns: 'selectorList' (type: SelectorList) -> The resulting selector list after modification. Description: Modifies the rule selector. """ assert isinstance(selector, (str,)), "Argument 'selector' must be of type '['str']'. Received type: '%s'" % type(selector) subdom_funcs = self.synchronous_command('CSS.setRuleSelector', styleSheetId=styleSheetId, range=range, selector=selector) return subdom_funcs
def _sub(cmd, *sections): """Build Subcmd instance.""" cmd_func = cmd if isfunction(cmd) else cmd.cmd return Subcmd(baredoc(cmd), *sections, func=cmd_func)
def function[_sub, parameter[cmd]]: constant[Build Subcmd instance.] variable[cmd_func] assign[=] <ast.IfExp object at 0x7da18f58f2e0> return[call[name[Subcmd], parameter[call[name[baredoc], parameter[name[cmd]]], <ast.Starred object at 0x7da18f58ca90>]]]
keyword[def] identifier[_sub] ( identifier[cmd] ,* identifier[sections] ): literal[string] identifier[cmd_func] = identifier[cmd] keyword[if] identifier[isfunction] ( identifier[cmd] ) keyword[else] identifier[cmd] . identifier[cmd] keyword[return] identifier[Subcmd] ( identifier[baredoc] ( identifier[cmd] ),* identifier[sections] , identifier[func] = identifier[cmd_func] )
def _sub(cmd, *sections): """Build Subcmd instance.""" cmd_func = cmd if isfunction(cmd) else cmd.cmd return Subcmd(baredoc(cmd), *sections, func=cmd_func)
def config(self): """Read the configuration variables and returns them as a dictionary :rtype: dictionary :Example: >>> alpha.config() { 'BPD 13': 1.6499, 'BPD 12': 1.6499, 'BPD 11': 1.6499, 'BPD 10': 1.6499, 'BPD 15': 1.6499, 'BPD 14': 1.6499, 'BSVW 15': 1.0, ... } """ config = [] data = {} # Send the command byte and sleep for 10 ms self.cnxn.xfer([0x3C]) sleep(10e-3) # Read the config variables by sending 256 empty bytes for i in range(256): resp = self.cnxn.xfer([0x00])[0] config.append(resp) # Add the bin bounds to the dictionary of data [bytes 0-29] for i in range(0, 15): data["Bin Boundary {0}".format(i)] = self._16bit_unsigned(config[2*i], config[2*i + 1]) # Add the Bin Particle Volumes (BPV) [bytes 32-95] for i in range(0, 16): data["BPV {0}".format(i)] = self._calculate_float(config[4*i + 32:4*i + 36]) # Add the Bin Particle Densities (BPD) [bytes 96-159] for i in range(0, 16): data["BPD {0}".format(i)] = self._calculate_float(config[4*i + 96:4*i + 100]) # Add the Bin Sample Volume Weight (BSVW) [bytes 160-223] for i in range(0, 16): data["BSVW {0}".format(i)] = self._calculate_float(config[4*i + 160: 4*i + 164]) # Add the Gain Scaling Coefficient (GSC) and sample flow rate (SFR) data["GSC"] = self._calculate_float(config[224:228]) data["SFR"] = self._calculate_float(config[228:232]) # Add laser dac (LDAC) and Fan dac (FanDAC) data["LaserDAC"] = config[232] data["FanDAC"] = config[233] # If past firmware 15, add other things if self.firmware['major'] > 15.: data['TOF_SFR'] = config[234] sleep(0.1) return data
def function[config, parameter[self]]: constant[Read the configuration variables and returns them as a dictionary :rtype: dictionary :Example: >>> alpha.config() { 'BPD 13': 1.6499, 'BPD 12': 1.6499, 'BPD 11': 1.6499, 'BPD 10': 1.6499, 'BPD 15': 1.6499, 'BPD 14': 1.6499, 'BSVW 15': 1.0, ... } ] variable[config] assign[=] list[[]] variable[data] assign[=] dictionary[[], []] call[name[self].cnxn.xfer, parameter[list[[<ast.Constant object at 0x7da1aff1d720>]]]] call[name[sleep], parameter[constant[0.01]]] for taget[name[i]] in starred[call[name[range], parameter[constant[256]]]] begin[:] variable[resp] assign[=] call[call[name[self].cnxn.xfer, parameter[list[[<ast.Constant object at 0x7da1aff1f1c0>]]]]][constant[0]] call[name[config].append, parameter[name[resp]]] for taget[name[i]] in starred[call[name[range], parameter[constant[0], constant[15]]]] begin[:] call[name[data]][call[constant[Bin Boundary {0}].format, parameter[name[i]]]] assign[=] call[name[self]._16bit_unsigned, parameter[call[name[config]][binary_operation[constant[2] * name[i]]], call[name[config]][binary_operation[binary_operation[constant[2] * name[i]] + constant[1]]]]] for taget[name[i]] in starred[call[name[range], parameter[constant[0], constant[16]]]] begin[:] call[name[data]][call[constant[BPV {0}].format, parameter[name[i]]]] assign[=] call[name[self]._calculate_float, parameter[call[name[config]][<ast.Slice object at 0x7da1aff1db70>]]] for taget[name[i]] in starred[call[name[range], parameter[constant[0], constant[16]]]] begin[:] call[name[data]][call[constant[BPD {0}].format, parameter[name[i]]]] assign[=] call[name[self]._calculate_float, parameter[call[name[config]][<ast.Slice object at 0x7da1aff1fcd0>]]] for taget[name[i]] in starred[call[name[range], parameter[constant[0], constant[16]]]] begin[:] call[name[data]][call[constant[BSVW {0}].format, parameter[name[i]]]] assign[=] call[name[self]._calculate_float, parameter[call[name[config]][<ast.Slice object at 0x7da1aff1f220>]]] call[name[data]][constant[GSC]] assign[=] call[name[self]._calculate_float, parameter[call[name[config]][<ast.Slice object at 0x7da1aff1f1f0>]]] call[name[data]][constant[SFR]] assign[=] call[name[self]._calculate_float, parameter[call[name[config]][<ast.Slice object at 0x7da1aff1f010>]]] call[name[data]][constant[LaserDAC]] assign[=] call[name[config]][constant[232]] call[name[data]][constant[FanDAC]] assign[=] call[name[config]][constant[233]] if compare[call[name[self].firmware][constant[major]] greater[>] constant[15.0]] begin[:] call[name[data]][constant[TOF_SFR]] assign[=] call[name[config]][constant[234]] call[name[sleep], parameter[constant[0.1]]] return[name[data]]
keyword[def] identifier[config] ( identifier[self] ): literal[string] identifier[config] =[] identifier[data] ={} identifier[self] . identifier[cnxn] . identifier[xfer] ([ literal[int] ]) identifier[sleep] ( literal[int] ) keyword[for] identifier[i] keyword[in] identifier[range] ( literal[int] ): identifier[resp] = identifier[self] . identifier[cnxn] . identifier[xfer] ([ literal[int] ])[ literal[int] ] identifier[config] . identifier[append] ( identifier[resp] ) keyword[for] identifier[i] keyword[in] identifier[range] ( literal[int] , literal[int] ): identifier[data] [ literal[string] . identifier[format] ( identifier[i] )]= identifier[self] . identifier[_16bit_unsigned] ( identifier[config] [ literal[int] * identifier[i] ], identifier[config] [ literal[int] * identifier[i] + literal[int] ]) keyword[for] identifier[i] keyword[in] identifier[range] ( literal[int] , literal[int] ): identifier[data] [ literal[string] . identifier[format] ( identifier[i] )]= identifier[self] . identifier[_calculate_float] ( identifier[config] [ literal[int] * identifier[i] + literal[int] : literal[int] * identifier[i] + literal[int] ]) keyword[for] identifier[i] keyword[in] identifier[range] ( literal[int] , literal[int] ): identifier[data] [ literal[string] . identifier[format] ( identifier[i] )]= identifier[self] . identifier[_calculate_float] ( identifier[config] [ literal[int] * identifier[i] + literal[int] : literal[int] * identifier[i] + literal[int] ]) keyword[for] identifier[i] keyword[in] identifier[range] ( literal[int] , literal[int] ): identifier[data] [ literal[string] . identifier[format] ( identifier[i] )]= identifier[self] . identifier[_calculate_float] ( identifier[config] [ literal[int] * identifier[i] + literal[int] : literal[int] * identifier[i] + literal[int] ]) identifier[data] [ literal[string] ]= identifier[self] . identifier[_calculate_float] ( identifier[config] [ literal[int] : literal[int] ]) identifier[data] [ literal[string] ]= identifier[self] . identifier[_calculate_float] ( identifier[config] [ literal[int] : literal[int] ]) identifier[data] [ literal[string] ]= identifier[config] [ literal[int] ] identifier[data] [ literal[string] ]= identifier[config] [ literal[int] ] keyword[if] identifier[self] . identifier[firmware] [ literal[string] ]> literal[int] : identifier[data] [ literal[string] ]= identifier[config] [ literal[int] ] identifier[sleep] ( literal[int] ) keyword[return] identifier[data]
def config(self): """Read the configuration variables and returns them as a dictionary :rtype: dictionary :Example: >>> alpha.config() { 'BPD 13': 1.6499, 'BPD 12': 1.6499, 'BPD 11': 1.6499, 'BPD 10': 1.6499, 'BPD 15': 1.6499, 'BPD 14': 1.6499, 'BSVW 15': 1.0, ... } """ config = [] data = {} # Send the command byte and sleep for 10 ms self.cnxn.xfer([60]) sleep(0.01) # Read the config variables by sending 256 empty bytes for i in range(256): resp = self.cnxn.xfer([0])[0] config.append(resp) # depends on [control=['for'], data=[]] # Add the bin bounds to the dictionary of data [bytes 0-29] for i in range(0, 15): data['Bin Boundary {0}'.format(i)] = self._16bit_unsigned(config[2 * i], config[2 * i + 1]) # depends on [control=['for'], data=['i']] # Add the Bin Particle Volumes (BPV) [bytes 32-95] for i in range(0, 16): data['BPV {0}'.format(i)] = self._calculate_float(config[4 * i + 32:4 * i + 36]) # depends on [control=['for'], data=['i']] # Add the Bin Particle Densities (BPD) [bytes 96-159] for i in range(0, 16): data['BPD {0}'.format(i)] = self._calculate_float(config[4 * i + 96:4 * i + 100]) # depends on [control=['for'], data=['i']] # Add the Bin Sample Volume Weight (BSVW) [bytes 160-223] for i in range(0, 16): data['BSVW {0}'.format(i)] = self._calculate_float(config[4 * i + 160:4 * i + 164]) # depends on [control=['for'], data=['i']] # Add the Gain Scaling Coefficient (GSC) and sample flow rate (SFR) data['GSC'] = self._calculate_float(config[224:228]) data['SFR'] = self._calculate_float(config[228:232]) # Add laser dac (LDAC) and Fan dac (FanDAC) data['LaserDAC'] = config[232] data['FanDAC'] = config[233] # If past firmware 15, add other things if self.firmware['major'] > 15.0: data['TOF_SFR'] = config[234] # depends on [control=['if'], data=[]] sleep(0.1) return data
def subs_consts(self, expr): """Substitute constants in expression unless it is already a number.""" if isinstance(expr, numbers.Number): return expr else: return expr.subs(self.constants)
def function[subs_consts, parameter[self, expr]]: constant[Substitute constants in expression unless it is already a number.] if call[name[isinstance], parameter[name[expr], name[numbers].Number]] begin[:] return[name[expr]]
keyword[def] identifier[subs_consts] ( identifier[self] , identifier[expr] ): literal[string] keyword[if] identifier[isinstance] ( identifier[expr] , identifier[numbers] . identifier[Number] ): keyword[return] identifier[expr] keyword[else] : keyword[return] identifier[expr] . identifier[subs] ( identifier[self] . identifier[constants] )
def subs_consts(self, expr): """Substitute constants in expression unless it is already a number.""" if isinstance(expr, numbers.Number): return expr # depends on [control=['if'], data=[]] else: return expr.subs(self.constants)
def leave(self, _id): """ Leave a room """ if SockJSRoomHandler._room.has_key(self._gcls() + _id): SockJSRoomHandler._room[self._gcls() + _id].remove(self) if len(SockJSRoomHandler._room[self._gcls() + _id]) == 0: del SockJSRoomHandler._room[self._gcls() + _id]
def function[leave, parameter[self, _id]]: constant[ Leave a room ] if call[name[SockJSRoomHandler]._room.has_key, parameter[binary_operation[call[name[self]._gcls, parameter[]] + name[_id]]]] begin[:] call[call[name[SockJSRoomHandler]._room][binary_operation[call[name[self]._gcls, parameter[]] + name[_id]]].remove, parameter[name[self]]] if compare[call[name[len], parameter[call[name[SockJSRoomHandler]._room][binary_operation[call[name[self]._gcls, parameter[]] + name[_id]]]]] equal[==] constant[0]] begin[:] <ast.Delete object at 0x7da20c76e080>
keyword[def] identifier[leave] ( identifier[self] , identifier[_id] ): literal[string] keyword[if] identifier[SockJSRoomHandler] . identifier[_room] . identifier[has_key] ( identifier[self] . identifier[_gcls] ()+ identifier[_id] ): identifier[SockJSRoomHandler] . identifier[_room] [ identifier[self] . identifier[_gcls] ()+ identifier[_id] ]. identifier[remove] ( identifier[self] ) keyword[if] identifier[len] ( identifier[SockJSRoomHandler] . identifier[_room] [ identifier[self] . identifier[_gcls] ()+ identifier[_id] ])== literal[int] : keyword[del] identifier[SockJSRoomHandler] . identifier[_room] [ identifier[self] . identifier[_gcls] ()+ identifier[_id] ]
def leave(self, _id): """ Leave a room """ if SockJSRoomHandler._room.has_key(self._gcls() + _id): SockJSRoomHandler._room[self._gcls() + _id].remove(self) if len(SockJSRoomHandler._room[self._gcls() + _id]) == 0: del SockJSRoomHandler._room[self._gcls() + _id] # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]]
def parse_results_mol2(mol2_outpath): """Parse a DOCK6 mol2 output file, return a Pandas DataFrame of the results. Args: mol2_outpath (str): Path to mol2 output file Returns: DataFrame: Pandas DataFrame of the results """ docked_ligands = pd.DataFrame() lines = [line.strip() for line in open(mol2_outpath, 'r')] props = {} for i, line in enumerate(lines): if line.startswith('########## Name:'): ligand = line.strip().strip('##########').replace(' ', '').replace('\t', '').split(':')[1] line = lines[i + 1] props = {} props['Ligand'] = ligand if line.startswith('##########'): splitter = line.strip().strip('##########').replace(' ', '').replace('\t', '').split(':') props[splitter[0]] = float(splitter[1]) if line.startswith('@<TRIPOS>MOLECULE'): if props: docked_ligands = docked_ligands.append(props, ignore_index=True) return docked_ligands
def function[parse_results_mol2, parameter[mol2_outpath]]: constant[Parse a DOCK6 mol2 output file, return a Pandas DataFrame of the results. Args: mol2_outpath (str): Path to mol2 output file Returns: DataFrame: Pandas DataFrame of the results ] variable[docked_ligands] assign[=] call[name[pd].DataFrame, parameter[]] variable[lines] assign[=] <ast.ListComp object at 0x7da20e963430> variable[props] assign[=] dictionary[[], []] for taget[tuple[[<ast.Name object at 0x7da20e962710>, <ast.Name object at 0x7da20e961600>]]] in starred[call[name[enumerate], parameter[name[lines]]]] begin[:] if call[name[line].startswith, parameter[constant[########## Name:]]] begin[:] variable[ligand] assign[=] call[call[call[call[call[call[name[line].strip, parameter[]].strip, parameter[constant[##########]]].replace, parameter[constant[ ], constant[]]].replace, parameter[constant[ ], constant[]]].split, parameter[constant[:]]]][constant[1]] variable[line] assign[=] call[name[lines]][binary_operation[name[i] + constant[1]]] variable[props] assign[=] dictionary[[], []] call[name[props]][constant[Ligand]] assign[=] name[ligand] if call[name[line].startswith, parameter[constant[##########]]] begin[:] variable[splitter] assign[=] call[call[call[call[call[name[line].strip, parameter[]].strip, parameter[constant[##########]]].replace, parameter[constant[ ], constant[]]].replace, parameter[constant[ ], constant[]]].split, parameter[constant[:]]] call[name[props]][call[name[splitter]][constant[0]]] assign[=] call[name[float], parameter[call[name[splitter]][constant[1]]]] if call[name[line].startswith, parameter[constant[@<TRIPOS>MOLECULE]]] begin[:] if name[props] begin[:] variable[docked_ligands] assign[=] call[name[docked_ligands].append, parameter[name[props]]] return[name[docked_ligands]]
keyword[def] identifier[parse_results_mol2] ( identifier[mol2_outpath] ): literal[string] identifier[docked_ligands] = identifier[pd] . identifier[DataFrame] () identifier[lines] =[ identifier[line] . identifier[strip] () keyword[for] identifier[line] keyword[in] identifier[open] ( identifier[mol2_outpath] , literal[string] )] identifier[props] ={} keyword[for] identifier[i] , identifier[line] keyword[in] identifier[enumerate] ( identifier[lines] ): keyword[if] identifier[line] . identifier[startswith] ( literal[string] ): identifier[ligand] = identifier[line] . identifier[strip] (). identifier[strip] ( literal[string] ). identifier[replace] ( literal[string] , literal[string] ). identifier[replace] ( literal[string] , literal[string] ). identifier[split] ( literal[string] )[ literal[int] ] identifier[line] = identifier[lines] [ identifier[i] + literal[int] ] identifier[props] ={} identifier[props] [ literal[string] ]= identifier[ligand] keyword[if] identifier[line] . identifier[startswith] ( literal[string] ): identifier[splitter] = identifier[line] . identifier[strip] (). identifier[strip] ( literal[string] ). identifier[replace] ( literal[string] , literal[string] ). identifier[replace] ( literal[string] , literal[string] ). identifier[split] ( literal[string] ) identifier[props] [ identifier[splitter] [ literal[int] ]]= identifier[float] ( identifier[splitter] [ literal[int] ]) keyword[if] identifier[line] . identifier[startswith] ( literal[string] ): keyword[if] identifier[props] : identifier[docked_ligands] = identifier[docked_ligands] . identifier[append] ( identifier[props] , identifier[ignore_index] = keyword[True] ) keyword[return] identifier[docked_ligands]
def parse_results_mol2(mol2_outpath): """Parse a DOCK6 mol2 output file, return a Pandas DataFrame of the results. Args: mol2_outpath (str): Path to mol2 output file Returns: DataFrame: Pandas DataFrame of the results """ docked_ligands = pd.DataFrame() lines = [line.strip() for line in open(mol2_outpath, 'r')] props = {} for (i, line) in enumerate(lines): if line.startswith('########## Name:'): ligand = line.strip().strip('##########').replace(' ', '').replace('\t', '').split(':')[1] line = lines[i + 1] props = {} props['Ligand'] = ligand # depends on [control=['if'], data=[]] if line.startswith('##########'): splitter = line.strip().strip('##########').replace(' ', '').replace('\t', '').split(':') props[splitter[0]] = float(splitter[1]) # depends on [control=['if'], data=[]] if line.startswith('@<TRIPOS>MOLECULE'): if props: docked_ligands = docked_ligands.append(props, ignore_index=True) # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]] # depends on [control=['for'], data=[]] return docked_ligands
def connect(cls, *args, **kwargs): """ connect(username=None, password=None, endpoint=None, admin=False) Configures the Panoptes client for use. Note that there is no need to call this unless you need to pass one or more of the below arguments. By default, the client will connect to the public Zooniverse.org API as an anonymous user. All arguments are optional: - **username** is your Zooniverse.org username. - **password** is your Zooniverse.org password. - **endpoint** is the HTTP API endpoint you'd like to connect to. Defaults to **https://www.zooniverse.org**. Should not include a trailing slash. - **admin** is a boolean, switching on admin mode if ``True``. Has no effect if the given username is not a Zooniverse.org administrator. Examples:: Panoptes.connect(username='example', password='example') Panoptes.connect(endpoint='https://panoptes.example.com') """ cls._local.panoptes_client = cls(*args, **kwargs) cls._local.panoptes_client.login() return cls._local.panoptes_client
def function[connect, parameter[cls]]: constant[ connect(username=None, password=None, endpoint=None, admin=False) Configures the Panoptes client for use. Note that there is no need to call this unless you need to pass one or more of the below arguments. By default, the client will connect to the public Zooniverse.org API as an anonymous user. All arguments are optional: - **username** is your Zooniverse.org username. - **password** is your Zooniverse.org password. - **endpoint** is the HTTP API endpoint you'd like to connect to. Defaults to **https://www.zooniverse.org**. Should not include a trailing slash. - **admin** is a boolean, switching on admin mode if ``True``. Has no effect if the given username is not a Zooniverse.org administrator. Examples:: Panoptes.connect(username='example', password='example') Panoptes.connect(endpoint='https://panoptes.example.com') ] name[cls]._local.panoptes_client assign[=] call[name[cls], parameter[<ast.Starred object at 0x7da1b0473d30>]] call[name[cls]._local.panoptes_client.login, parameter[]] return[name[cls]._local.panoptes_client]
keyword[def] identifier[connect] ( identifier[cls] ,* identifier[args] ,** identifier[kwargs] ): literal[string] identifier[cls] . identifier[_local] . identifier[panoptes_client] = identifier[cls] (* identifier[args] ,** identifier[kwargs] ) identifier[cls] . identifier[_local] . identifier[panoptes_client] . identifier[login] () keyword[return] identifier[cls] . identifier[_local] . identifier[panoptes_client]
def connect(cls, *args, **kwargs): """ connect(username=None, password=None, endpoint=None, admin=False) Configures the Panoptes client for use. Note that there is no need to call this unless you need to pass one or more of the below arguments. By default, the client will connect to the public Zooniverse.org API as an anonymous user. All arguments are optional: - **username** is your Zooniverse.org username. - **password** is your Zooniverse.org password. - **endpoint** is the HTTP API endpoint you'd like to connect to. Defaults to **https://www.zooniverse.org**. Should not include a trailing slash. - **admin** is a boolean, switching on admin mode if ``True``. Has no effect if the given username is not a Zooniverse.org administrator. Examples:: Panoptes.connect(username='example', password='example') Panoptes.connect(endpoint='https://panoptes.example.com') """ cls._local.panoptes_client = cls(*args, **kwargs) cls._local.panoptes_client.login() return cls._local.panoptes_client
def execute_file(self, filename): """ Execute a file and provide feedback to the user. :param filename: The pathname of the file to execute (a string). :returns: Whatever the executed file returns on stdout (a string). """ logger.info("Executing file: %s", format_path(filename)) contents = self.context.execute(filename, capture=True).stdout num_lines = len(contents.splitlines()) logger.debug("Execution of %s yielded % of output.", format_path(filename), pluralize(num_lines, 'line')) return contents.rstrip()
def function[execute_file, parameter[self, filename]]: constant[ Execute a file and provide feedback to the user. :param filename: The pathname of the file to execute (a string). :returns: Whatever the executed file returns on stdout (a string). ] call[name[logger].info, parameter[constant[Executing file: %s], call[name[format_path], parameter[name[filename]]]]] variable[contents] assign[=] call[name[self].context.execute, parameter[name[filename]]].stdout variable[num_lines] assign[=] call[name[len], parameter[call[name[contents].splitlines, parameter[]]]] call[name[logger].debug, parameter[constant[Execution of %s yielded % of output.], call[name[format_path], parameter[name[filename]]], call[name[pluralize], parameter[name[num_lines], constant[line]]]]] return[call[name[contents].rstrip, parameter[]]]
keyword[def] identifier[execute_file] ( identifier[self] , identifier[filename] ): literal[string] identifier[logger] . identifier[info] ( literal[string] , identifier[format_path] ( identifier[filename] )) identifier[contents] = identifier[self] . identifier[context] . identifier[execute] ( identifier[filename] , identifier[capture] = keyword[True] ). identifier[stdout] identifier[num_lines] = identifier[len] ( identifier[contents] . identifier[splitlines] ()) identifier[logger] . identifier[debug] ( literal[string] , identifier[format_path] ( identifier[filename] ), identifier[pluralize] ( identifier[num_lines] , literal[string] )) keyword[return] identifier[contents] . identifier[rstrip] ()
def execute_file(self, filename): """ Execute a file and provide feedback to the user. :param filename: The pathname of the file to execute (a string). :returns: Whatever the executed file returns on stdout (a string). """ logger.info('Executing file: %s', format_path(filename)) contents = self.context.execute(filename, capture=True).stdout num_lines = len(contents.splitlines()) logger.debug('Execution of %s yielded % of output.', format_path(filename), pluralize(num_lines, 'line')) return contents.rstrip()
def _remote_connection(server, opts, argparser_): """Initiate a remote connection, via PyWBEM. Arguments for the request are part of the command line arguments and include user name, password, namespace, etc. """ global CONN # pylint: disable=global-statement if opts.timeout is not None: if opts.timeout < 0 or opts.timeout > 300: argparser_.error('timeout option(%s) out of range' % opts.timeout) # mock only uses the namespace timeout and statistics options from the # original set of options. It ignores the url if opts.mock_server: CONN = FakedWBEMConnection( default_namespace=opts.namespace, timeout=opts.timeout, stats_enabled=opts.statistics) try: build_mock_repository(CONN, opts.mock_server, opts.verbose) except ValueError as ve: argparser_.error('Build Repository failed: %s' % ve) return CONN if server[0] == '/': url = server elif re.match(r"^https{0,1}://", server) is not None: url = server elif re.match(r"^[a-zA-Z0-9]+://", server) is not None: argparser_.error('Invalid scheme on server argument.' ' Use "http" or "https"') else: url = '%s://%s' % ('https', server) creds = None if opts.key_file is not None and opts.cert_file is None: argparser_.error('keyfile option requires certfile option') if opts.user is not None and opts.password is None: opts.password = _getpass.getpass('Enter password for %s: ' % opts.user) if opts.user is not None or opts.password is not None: creds = (opts.user, opts.password) # if client cert and key provided, create dictionary for # wbem connection x509_dict = None if opts.cert_file is not None: x509_dict = {"cert_file": opts.cert_file} if opts.key_file is not None: x509_dict.update({'key_file': opts.key_file}) CONN = WBEMConnection(url, creds, default_namespace=opts.namespace, no_verification=opts.no_verify_cert, x509=x509_dict, ca_certs=opts.ca_certs, timeout=opts.timeout, stats_enabled=opts.statistics) CONN.debug = True return CONN
def function[_remote_connection, parameter[server, opts, argparser_]]: constant[Initiate a remote connection, via PyWBEM. Arguments for the request are part of the command line arguments and include user name, password, namespace, etc. ] <ast.Global object at 0x7da204564550> if compare[name[opts].timeout is_not constant[None]] begin[:] if <ast.BoolOp object at 0x7da204564370> begin[:] call[name[argparser_].error, parameter[binary_operation[constant[timeout option(%s) out of range] <ast.Mod object at 0x7da2590d6920> name[opts].timeout]]] if name[opts].mock_server begin[:] variable[CONN] assign[=] call[name[FakedWBEMConnection], parameter[]] <ast.Try object at 0x7da204565870> return[name[CONN]] if compare[call[name[server]][constant[0]] equal[==] constant[/]] begin[:] variable[url] assign[=] name[server] variable[creds] assign[=] constant[None] if <ast.BoolOp object at 0x7da204566f20> begin[:] call[name[argparser_].error, parameter[constant[keyfile option requires certfile option]]] if <ast.BoolOp object at 0x7da204567c10> begin[:] name[opts].password assign[=] call[name[_getpass].getpass, parameter[binary_operation[constant[Enter password for %s: ] <ast.Mod object at 0x7da2590d6920> name[opts].user]]] if <ast.BoolOp object at 0x7da2054a5cc0> begin[:] variable[creds] assign[=] tuple[[<ast.Attribute object at 0x7da2054a7460>, <ast.Attribute object at 0x7da2054a56c0>]] variable[x509_dict] assign[=] constant[None] if compare[name[opts].cert_file is_not constant[None]] begin[:] variable[x509_dict] assign[=] dictionary[[<ast.Constant object at 0x7da2041da980>], [<ast.Attribute object at 0x7da2041db6d0>]] if compare[name[opts].key_file is_not constant[None]] begin[:] call[name[x509_dict].update, parameter[dictionary[[<ast.Constant object at 0x7da2041d8d00>], [<ast.Attribute object at 0x7da2041db9d0>]]]] variable[CONN] assign[=] call[name[WBEMConnection], parameter[name[url], name[creds]]] name[CONN].debug assign[=] constant[True] return[name[CONN]]
keyword[def] identifier[_remote_connection] ( identifier[server] , identifier[opts] , identifier[argparser_] ): literal[string] keyword[global] identifier[CONN] keyword[if] identifier[opts] . identifier[timeout] keyword[is] keyword[not] keyword[None] : keyword[if] identifier[opts] . identifier[timeout] < literal[int] keyword[or] identifier[opts] . identifier[timeout] > literal[int] : identifier[argparser_] . identifier[error] ( literal[string] % identifier[opts] . identifier[timeout] ) keyword[if] identifier[opts] . identifier[mock_server] : identifier[CONN] = identifier[FakedWBEMConnection] ( identifier[default_namespace] = identifier[opts] . identifier[namespace] , identifier[timeout] = identifier[opts] . identifier[timeout] , identifier[stats_enabled] = identifier[opts] . identifier[statistics] ) keyword[try] : identifier[build_mock_repository] ( identifier[CONN] , identifier[opts] . identifier[mock_server] , identifier[opts] . identifier[verbose] ) keyword[except] identifier[ValueError] keyword[as] identifier[ve] : identifier[argparser_] . identifier[error] ( literal[string] % identifier[ve] ) keyword[return] identifier[CONN] keyword[if] identifier[server] [ literal[int] ]== literal[string] : identifier[url] = identifier[server] keyword[elif] identifier[re] . identifier[match] ( literal[string] , identifier[server] ) keyword[is] keyword[not] keyword[None] : identifier[url] = identifier[server] keyword[elif] identifier[re] . identifier[match] ( literal[string] , identifier[server] ) keyword[is] keyword[not] keyword[None] : identifier[argparser_] . identifier[error] ( literal[string] literal[string] ) keyword[else] : identifier[url] = literal[string] %( literal[string] , identifier[server] ) identifier[creds] = keyword[None] keyword[if] identifier[opts] . identifier[key_file] keyword[is] keyword[not] keyword[None] keyword[and] identifier[opts] . identifier[cert_file] keyword[is] keyword[None] : identifier[argparser_] . identifier[error] ( literal[string] ) keyword[if] identifier[opts] . identifier[user] keyword[is] keyword[not] keyword[None] keyword[and] identifier[opts] . identifier[password] keyword[is] keyword[None] : identifier[opts] . identifier[password] = identifier[_getpass] . identifier[getpass] ( literal[string] % identifier[opts] . identifier[user] ) keyword[if] identifier[opts] . identifier[user] keyword[is] keyword[not] keyword[None] keyword[or] identifier[opts] . identifier[password] keyword[is] keyword[not] keyword[None] : identifier[creds] =( identifier[opts] . identifier[user] , identifier[opts] . identifier[password] ) identifier[x509_dict] = keyword[None] keyword[if] identifier[opts] . identifier[cert_file] keyword[is] keyword[not] keyword[None] : identifier[x509_dict] ={ literal[string] : identifier[opts] . identifier[cert_file] } keyword[if] identifier[opts] . identifier[key_file] keyword[is] keyword[not] keyword[None] : identifier[x509_dict] . identifier[update] ({ literal[string] : identifier[opts] . identifier[key_file] }) identifier[CONN] = identifier[WBEMConnection] ( identifier[url] , identifier[creds] , identifier[default_namespace] = identifier[opts] . identifier[namespace] , identifier[no_verification] = identifier[opts] . identifier[no_verify_cert] , identifier[x509] = identifier[x509_dict] , identifier[ca_certs] = identifier[opts] . identifier[ca_certs] , identifier[timeout] = identifier[opts] . identifier[timeout] , identifier[stats_enabled] = identifier[opts] . identifier[statistics] ) identifier[CONN] . identifier[debug] = keyword[True] keyword[return] identifier[CONN]
def _remote_connection(server, opts, argparser_): """Initiate a remote connection, via PyWBEM. Arguments for the request are part of the command line arguments and include user name, password, namespace, etc. """ global CONN # pylint: disable=global-statement if opts.timeout is not None: if opts.timeout < 0 or opts.timeout > 300: argparser_.error('timeout option(%s) out of range' % opts.timeout) # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]] # mock only uses the namespace timeout and statistics options from the # original set of options. It ignores the url if opts.mock_server: CONN = FakedWBEMConnection(default_namespace=opts.namespace, timeout=opts.timeout, stats_enabled=opts.statistics) try: build_mock_repository(CONN, opts.mock_server, opts.verbose) # depends on [control=['try'], data=[]] except ValueError as ve: argparser_.error('Build Repository failed: %s' % ve) # depends on [control=['except'], data=['ve']] return CONN # depends on [control=['if'], data=[]] if server[0] == '/': url = server # depends on [control=['if'], data=[]] elif re.match('^https{0,1}://', server) is not None: url = server # depends on [control=['if'], data=[]] elif re.match('^[a-zA-Z0-9]+://', server) is not None: argparser_.error('Invalid scheme on server argument. Use "http" or "https"') # depends on [control=['if'], data=[]] else: url = '%s://%s' % ('https', server) creds = None if opts.key_file is not None and opts.cert_file is None: argparser_.error('keyfile option requires certfile option') # depends on [control=['if'], data=[]] if opts.user is not None and opts.password is None: opts.password = _getpass.getpass('Enter password for %s: ' % opts.user) # depends on [control=['if'], data=[]] if opts.user is not None or opts.password is not None: creds = (opts.user, opts.password) # depends on [control=['if'], data=[]] # if client cert and key provided, create dictionary for # wbem connection x509_dict = None if opts.cert_file is not None: x509_dict = {'cert_file': opts.cert_file} if opts.key_file is not None: x509_dict.update({'key_file': opts.key_file}) # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]] CONN = WBEMConnection(url, creds, default_namespace=opts.namespace, no_verification=opts.no_verify_cert, x509=x509_dict, ca_certs=opts.ca_certs, timeout=opts.timeout, stats_enabled=opts.statistics) CONN.debug = True return CONN
def acquireConnection(self): """ Get a connection from the pool. Parameters: ---------------------------------------------------------------- retval: A ConnectionWrapper instance. NOTE: Caller is responsible for calling the ConnectionWrapper instance's release() method or use it in a context manager expression (with ... as:) to release resources. """ self._logger.debug("Acquiring connection") dbConn = self._pool.connection(shareable=False) connWrap = ConnectionWrapper(dbConn=dbConn, cursor=dbConn.cursor(), releaser=self._releaseConnection, logger=self._logger) return connWrap
def function[acquireConnection, parameter[self]]: constant[ Get a connection from the pool. Parameters: ---------------------------------------------------------------- retval: A ConnectionWrapper instance. NOTE: Caller is responsible for calling the ConnectionWrapper instance's release() method or use it in a context manager expression (with ... as:) to release resources. ] call[name[self]._logger.debug, parameter[constant[Acquiring connection]]] variable[dbConn] assign[=] call[name[self]._pool.connection, parameter[]] variable[connWrap] assign[=] call[name[ConnectionWrapper], parameter[]] return[name[connWrap]]
keyword[def] identifier[acquireConnection] ( identifier[self] ): literal[string] identifier[self] . identifier[_logger] . identifier[debug] ( literal[string] ) identifier[dbConn] = identifier[self] . identifier[_pool] . identifier[connection] ( identifier[shareable] = keyword[False] ) identifier[connWrap] = identifier[ConnectionWrapper] ( identifier[dbConn] = identifier[dbConn] , identifier[cursor] = identifier[dbConn] . identifier[cursor] (), identifier[releaser] = identifier[self] . identifier[_releaseConnection] , identifier[logger] = identifier[self] . identifier[_logger] ) keyword[return] identifier[connWrap]
def acquireConnection(self): """ Get a connection from the pool. Parameters: ---------------------------------------------------------------- retval: A ConnectionWrapper instance. NOTE: Caller is responsible for calling the ConnectionWrapper instance's release() method or use it in a context manager expression (with ... as:) to release resources. """ self._logger.debug('Acquiring connection') dbConn = self._pool.connection(shareable=False) connWrap = ConnectionWrapper(dbConn=dbConn, cursor=dbConn.cursor(), releaser=self._releaseConnection, logger=self._logger) return connWrap
def get_float_time(): '''returns time as double precision floats - Time64 in pytables - mapping to and from python datetime's ''' t1 = time.time() t2 = datetime.datetime.fromtimestamp(t1) return time.mktime(t2.timetuple()) + 1e-6 * t2.microsecond
def function[get_float_time, parameter[]]: constant[returns time as double precision floats - Time64 in pytables - mapping to and from python datetime's ] variable[t1] assign[=] call[name[time].time, parameter[]] variable[t2] assign[=] call[name[datetime].datetime.fromtimestamp, parameter[name[t1]]] return[binary_operation[call[name[time].mktime, parameter[call[name[t2].timetuple, parameter[]]]] + binary_operation[constant[1e-06] * name[t2].microsecond]]]
keyword[def] identifier[get_float_time] (): literal[string] identifier[t1] = identifier[time] . identifier[time] () identifier[t2] = identifier[datetime] . identifier[datetime] . identifier[fromtimestamp] ( identifier[t1] ) keyword[return] identifier[time] . identifier[mktime] ( identifier[t2] . identifier[timetuple] ())+ literal[int] * identifier[t2] . identifier[microsecond]
def get_float_time(): """returns time as double precision floats - Time64 in pytables - mapping to and from python datetime's """ t1 = time.time() t2 = datetime.datetime.fromtimestamp(t1) return time.mktime(t2.timetuple()) + 1e-06 * t2.microsecond
def tracking_branch(self): """ :return: The remote_reference we are tracking, or None if we are not a tracking branch""" from .remote import RemoteReference reader = self.config_reader() if reader.has_option(self.k_config_remote) and reader.has_option(self.k_config_remote_ref): ref = Head(self.repo, Head.to_full_path(strip_quotes(reader.get_value(self.k_config_remote_ref)))) remote_refpath = RemoteReference.to_full_path(join_path(reader.get_value(self.k_config_remote), ref.name)) return RemoteReference(self.repo, remote_refpath) # END handle have tracking branch # we are not a tracking branch return None
def function[tracking_branch, parameter[self]]: constant[ :return: The remote_reference we are tracking, or None if we are not a tracking branch] from relative_module[remote] import module[RemoteReference] variable[reader] assign[=] call[name[self].config_reader, parameter[]] if <ast.BoolOp object at 0x7da1b1d47580> begin[:] variable[ref] assign[=] call[name[Head], parameter[name[self].repo, call[name[Head].to_full_path, parameter[call[name[strip_quotes], parameter[call[name[reader].get_value, parameter[name[self].k_config_remote_ref]]]]]]]] variable[remote_refpath] assign[=] call[name[RemoteReference].to_full_path, parameter[call[name[join_path], parameter[call[name[reader].get_value, parameter[name[self].k_config_remote]], name[ref].name]]]] return[call[name[RemoteReference], parameter[name[self].repo, name[remote_refpath]]]] return[constant[None]]
keyword[def] identifier[tracking_branch] ( identifier[self] ): literal[string] keyword[from] . identifier[remote] keyword[import] identifier[RemoteReference] identifier[reader] = identifier[self] . identifier[config_reader] () keyword[if] identifier[reader] . identifier[has_option] ( identifier[self] . identifier[k_config_remote] ) keyword[and] identifier[reader] . identifier[has_option] ( identifier[self] . identifier[k_config_remote_ref] ): identifier[ref] = identifier[Head] ( identifier[self] . identifier[repo] , identifier[Head] . identifier[to_full_path] ( identifier[strip_quotes] ( identifier[reader] . identifier[get_value] ( identifier[self] . identifier[k_config_remote_ref] )))) identifier[remote_refpath] = identifier[RemoteReference] . identifier[to_full_path] ( identifier[join_path] ( identifier[reader] . identifier[get_value] ( identifier[self] . identifier[k_config_remote] ), identifier[ref] . identifier[name] )) keyword[return] identifier[RemoteReference] ( identifier[self] . identifier[repo] , identifier[remote_refpath] ) keyword[return] keyword[None]
def tracking_branch(self): """ :return: The remote_reference we are tracking, or None if we are not a tracking branch""" from .remote import RemoteReference reader = self.config_reader() if reader.has_option(self.k_config_remote) and reader.has_option(self.k_config_remote_ref): ref = Head(self.repo, Head.to_full_path(strip_quotes(reader.get_value(self.k_config_remote_ref)))) remote_refpath = RemoteReference.to_full_path(join_path(reader.get_value(self.k_config_remote), ref.name)) return RemoteReference(self.repo, remote_refpath) # depends on [control=['if'], data=[]] # END handle have tracking branch # we are not a tracking branch return None
def register_onRsp(self, req): """ 添加回调处理函数的装饰器 :param req: 具体的topic,如 :return: """ def wrapper(_callback): callbackList = self._req_callbacks.setdefault(req, []) callbackList.append(_callback) return _callback return wrapper
def function[register_onRsp, parameter[self, req]]: constant[ 添加回调处理函数的装饰器 :param req: 具体的topic,如 :return: ] def function[wrapper, parameter[_callback]]: variable[callbackList] assign[=] call[name[self]._req_callbacks.setdefault, parameter[name[req], list[[]]]] call[name[callbackList].append, parameter[name[_callback]]] return[name[_callback]] return[name[wrapper]]
keyword[def] identifier[register_onRsp] ( identifier[self] , identifier[req] ): literal[string] keyword[def] identifier[wrapper] ( identifier[_callback] ): identifier[callbackList] = identifier[self] . identifier[_req_callbacks] . identifier[setdefault] ( identifier[req] ,[]) identifier[callbackList] . identifier[append] ( identifier[_callback] ) keyword[return] identifier[_callback] keyword[return] identifier[wrapper]
def register_onRsp(self, req): """ 添加回调处理函数的装饰器 :param req: 具体的topic,如 :return: """ def wrapper(_callback): callbackList = self._req_callbacks.setdefault(req, []) callbackList.append(_callback) return _callback return wrapper
def fade_to_rgb_uncorrected(self, fade_milliseconds, red, green, blue, led_number=0): """ Command blink(1) to fade to RGB color, no color correction applied. """ action = ord('c') fade_time = int(fade_milliseconds / 10) th = (fade_time & 0xff00) >> 8 tl = fade_time & 0x00ff buf = [REPORT_ID, action, int(red), int(green), int(blue), th, tl, led_number, 0] self.write( buf )
def function[fade_to_rgb_uncorrected, parameter[self, fade_milliseconds, red, green, blue, led_number]]: constant[ Command blink(1) to fade to RGB color, no color correction applied. ] variable[action] assign[=] call[name[ord], parameter[constant[c]]] variable[fade_time] assign[=] call[name[int], parameter[binary_operation[name[fade_milliseconds] / constant[10]]]] variable[th] assign[=] binary_operation[binary_operation[name[fade_time] <ast.BitAnd object at 0x7da2590d6b60> constant[65280]] <ast.RShift object at 0x7da2590d6a40> constant[8]] variable[tl] assign[=] binary_operation[name[fade_time] <ast.BitAnd object at 0x7da2590d6b60> constant[255]] variable[buf] assign[=] list[[<ast.Name object at 0x7da18f8105b0>, <ast.Name object at 0x7da18f811bd0>, <ast.Call object at 0x7da18f812200>, <ast.Call object at 0x7da18f811c00>, <ast.Call object at 0x7da18f812b60>, <ast.Name object at 0x7da18f810c10>, <ast.Name object at 0x7da18f813310>, <ast.Name object at 0x7da18f810ca0>, <ast.Constant object at 0x7da18f813700>]] call[name[self].write, parameter[name[buf]]]
keyword[def] identifier[fade_to_rgb_uncorrected] ( identifier[self] , identifier[fade_milliseconds] , identifier[red] , identifier[green] , identifier[blue] , identifier[led_number] = literal[int] ): literal[string] identifier[action] = identifier[ord] ( literal[string] ) identifier[fade_time] = identifier[int] ( identifier[fade_milliseconds] / literal[int] ) identifier[th] =( identifier[fade_time] & literal[int] )>> literal[int] identifier[tl] = identifier[fade_time] & literal[int] identifier[buf] =[ identifier[REPORT_ID] , identifier[action] , identifier[int] ( identifier[red] ), identifier[int] ( identifier[green] ), identifier[int] ( identifier[blue] ), identifier[th] , identifier[tl] , identifier[led_number] , literal[int] ] identifier[self] . identifier[write] ( identifier[buf] )
def fade_to_rgb_uncorrected(self, fade_milliseconds, red, green, blue, led_number=0): """ Command blink(1) to fade to RGB color, no color correction applied. """ action = ord('c') fade_time = int(fade_milliseconds / 10) th = (fade_time & 65280) >> 8 tl = fade_time & 255 buf = [REPORT_ID, action, int(red), int(green), int(blue), th, tl, led_number, 0] self.write(buf)
def getL2Representations(self): """ Returns the active representation in L2. """ return [set(L2.getSelf()._pooler.getActiveCells()) for L2 in self.L2Regions]
def function[getL2Representations, parameter[self]]: constant[ Returns the active representation in L2. ] return[<ast.ListComp object at 0x7da1b085fca0>]
keyword[def] identifier[getL2Representations] ( identifier[self] ): literal[string] keyword[return] [ identifier[set] ( identifier[L2] . identifier[getSelf] (). identifier[_pooler] . identifier[getActiveCells] ()) keyword[for] identifier[L2] keyword[in] identifier[self] . identifier[L2Regions] ]
def getL2Representations(self): """ Returns the active representation in L2. """ return [set(L2.getSelf()._pooler.getActiveCells()) for L2 in self.L2Regions]
def index_buffer(self, buffer, index_element_size=4): """ Set the index buffer for this VAO Args: buffer: ``moderngl.Buffer``, ``numpy.array`` or ``bytes`` Keyword Args: index_element_size (int): Byte size of each element. 1, 2 or 4 """ if not type(buffer) in [moderngl.Buffer, numpy.ndarray, bytes]: raise VAOError("buffer parameter must be a moderngl.Buffer, numpy.ndarray or bytes instance") if isinstance(buffer, numpy.ndarray): buffer = self.ctx.buffer(buffer.tobytes()) if isinstance(buffer, bytes): buffer = self.ctx.buffer(data=buffer) self._index_buffer = buffer self._index_element_size = index_element_size
def function[index_buffer, parameter[self, buffer, index_element_size]]: constant[ Set the index buffer for this VAO Args: buffer: ``moderngl.Buffer``, ``numpy.array`` or ``bytes`` Keyword Args: index_element_size (int): Byte size of each element. 1, 2 or 4 ] if <ast.UnaryOp object at 0x7da204620e20> begin[:] <ast.Raise object at 0x7da204621150> if call[name[isinstance], parameter[name[buffer], name[numpy].ndarray]] begin[:] variable[buffer] assign[=] call[name[self].ctx.buffer, parameter[call[name[buffer].tobytes, parameter[]]]] if call[name[isinstance], parameter[name[buffer], name[bytes]]] begin[:] variable[buffer] assign[=] call[name[self].ctx.buffer, parameter[]] name[self]._index_buffer assign[=] name[buffer] name[self]._index_element_size assign[=] name[index_element_size]
keyword[def] identifier[index_buffer] ( identifier[self] , identifier[buffer] , identifier[index_element_size] = literal[int] ): literal[string] keyword[if] keyword[not] identifier[type] ( identifier[buffer] ) keyword[in] [ identifier[moderngl] . identifier[Buffer] , identifier[numpy] . identifier[ndarray] , identifier[bytes] ]: keyword[raise] identifier[VAOError] ( literal[string] ) keyword[if] identifier[isinstance] ( identifier[buffer] , identifier[numpy] . identifier[ndarray] ): identifier[buffer] = identifier[self] . identifier[ctx] . identifier[buffer] ( identifier[buffer] . identifier[tobytes] ()) keyword[if] identifier[isinstance] ( identifier[buffer] , identifier[bytes] ): identifier[buffer] = identifier[self] . identifier[ctx] . identifier[buffer] ( identifier[data] = identifier[buffer] ) identifier[self] . identifier[_index_buffer] = identifier[buffer] identifier[self] . identifier[_index_element_size] = identifier[index_element_size]
def index_buffer(self, buffer, index_element_size=4): """ Set the index buffer for this VAO Args: buffer: ``moderngl.Buffer``, ``numpy.array`` or ``bytes`` Keyword Args: index_element_size (int): Byte size of each element. 1, 2 or 4 """ if not type(buffer) in [moderngl.Buffer, numpy.ndarray, bytes]: raise VAOError('buffer parameter must be a moderngl.Buffer, numpy.ndarray or bytes instance') # depends on [control=['if'], data=[]] if isinstance(buffer, numpy.ndarray): buffer = self.ctx.buffer(buffer.tobytes()) # depends on [control=['if'], data=[]] if isinstance(buffer, bytes): buffer = self.ctx.buffer(data=buffer) # depends on [control=['if'], data=[]] self._index_buffer = buffer self._index_element_size = index_element_size
def title(self, value): """ Setter for **self.__title** attribute. :param value: Attribute value. :type value: unicode """ if value is not None: assert type(value) is unicode, "'{0}' attribute: '{1}' type is not 'unicode'!".format( "title", value) self.__title = value
def function[title, parameter[self, value]]: constant[ Setter for **self.__title** attribute. :param value: Attribute value. :type value: unicode ] if compare[name[value] is_not constant[None]] begin[:] assert[compare[call[name[type], parameter[name[value]]] is name[unicode]]] name[self].__title assign[=] name[value]
keyword[def] identifier[title] ( identifier[self] , identifier[value] ): literal[string] keyword[if] identifier[value] keyword[is] keyword[not] keyword[None] : keyword[assert] identifier[type] ( identifier[value] ) keyword[is] identifier[unicode] , literal[string] . identifier[format] ( literal[string] , identifier[value] ) identifier[self] . identifier[__title] = identifier[value]
def title(self, value): """ Setter for **self.__title** attribute. :param value: Attribute value. :type value: unicode """ if value is not None: assert type(value) is unicode, "'{0}' attribute: '{1}' type is not 'unicode'!".format('title', value) # depends on [control=['if'], data=['value']] self.__title = value
def reservations(self): """get nodes of every reservations""" command = [SINFO, '--reservation'] output = subprocess.check_output(command, env=SINFO_ENV) output = output.decode() it = iter(output.splitlines()) next(it) for line in it: rsv = Reservation.from_sinfo(line) yield rsv.name, rsv
def function[reservations, parameter[self]]: constant[get nodes of every reservations] variable[command] assign[=] list[[<ast.Name object at 0x7da2041d8d00>, <ast.Constant object at 0x7da2041daa10>]] variable[output] assign[=] call[name[subprocess].check_output, parameter[name[command]]] variable[output] assign[=] call[name[output].decode, parameter[]] variable[it] assign[=] call[name[iter], parameter[call[name[output].splitlines, parameter[]]]] call[name[next], parameter[name[it]]] for taget[name[line]] in starred[name[it]] begin[:] variable[rsv] assign[=] call[name[Reservation].from_sinfo, parameter[name[line]]] <ast.Yield object at 0x7da2041d81c0>
keyword[def] identifier[reservations] ( identifier[self] ): literal[string] identifier[command] =[ identifier[SINFO] , literal[string] ] identifier[output] = identifier[subprocess] . identifier[check_output] ( identifier[command] , identifier[env] = identifier[SINFO_ENV] ) identifier[output] = identifier[output] . identifier[decode] () identifier[it] = identifier[iter] ( identifier[output] . identifier[splitlines] ()) identifier[next] ( identifier[it] ) keyword[for] identifier[line] keyword[in] identifier[it] : identifier[rsv] = identifier[Reservation] . identifier[from_sinfo] ( identifier[line] ) keyword[yield] identifier[rsv] . identifier[name] , identifier[rsv]
def reservations(self): """get nodes of every reservations""" command = [SINFO, '--reservation'] output = subprocess.check_output(command, env=SINFO_ENV) output = output.decode() it = iter(output.splitlines()) next(it) for line in it: rsv = Reservation.from_sinfo(line) yield (rsv.name, rsv) # depends on [control=['for'], data=['line']]
def seekset_ng(func): """Read file from start then set back to original.""" @functools.wraps(func) def seekcur(file, *args, seekset=os.SEEK_SET, **kw): # seek_cur = file.tell() file.seek(seekset, os.SEEK_SET) return_ = func(file, *args, seekset=seekset, **kw) # file.seek(seek_cur, os.SEEK_SET) return return_ return seekcur
def function[seekset_ng, parameter[func]]: constant[Read file from start then set back to original.] def function[seekcur, parameter[file]]: call[name[file].seek, parameter[name[seekset], name[os].SEEK_SET]] variable[return_] assign[=] call[name[func], parameter[name[file], <ast.Starred object at 0x7da20c6c5570>]] return[name[return_]] return[name[seekcur]]
keyword[def] identifier[seekset_ng] ( identifier[func] ): literal[string] @ identifier[functools] . identifier[wraps] ( identifier[func] ) keyword[def] identifier[seekcur] ( identifier[file] ,* identifier[args] , identifier[seekset] = identifier[os] . identifier[SEEK_SET] ,** identifier[kw] ): identifier[file] . identifier[seek] ( identifier[seekset] , identifier[os] . identifier[SEEK_SET] ) identifier[return_] = identifier[func] ( identifier[file] ,* identifier[args] , identifier[seekset] = identifier[seekset] ,** identifier[kw] ) keyword[return] identifier[return_] keyword[return] identifier[seekcur]
def seekset_ng(func): """Read file from start then set back to original.""" @functools.wraps(func) def seekcur(file, *args, seekset=os.SEEK_SET, **kw): # seek_cur = file.tell() file.seek(seekset, os.SEEK_SET) return_ = func(file, *args, seekset=seekset, **kw) # file.seek(seek_cur, os.SEEK_SET) return return_ return seekcur
def get_foreign_key_declaration_sql(self, foreign_key): """ Obtain DBMS specific SQL code portion needed to set the FOREIGN KEY constraint of a field declaration to be used in statements like CREATE TABLE. :param foreign_key: The foreign key :type foreign_key: ForeignKeyConstraint :rtype: str """ sql = self.get_foreign_key_base_declaration_sql(foreign_key) sql += self.get_advanced_foreign_key_options_sql(foreign_key) return sql
def function[get_foreign_key_declaration_sql, parameter[self, foreign_key]]: constant[ Obtain DBMS specific SQL code portion needed to set the FOREIGN KEY constraint of a field declaration to be used in statements like CREATE TABLE. :param foreign_key: The foreign key :type foreign_key: ForeignKeyConstraint :rtype: str ] variable[sql] assign[=] call[name[self].get_foreign_key_base_declaration_sql, parameter[name[foreign_key]]] <ast.AugAssign object at 0x7da1b17d7730> return[name[sql]]
keyword[def] identifier[get_foreign_key_declaration_sql] ( identifier[self] , identifier[foreign_key] ): literal[string] identifier[sql] = identifier[self] . identifier[get_foreign_key_base_declaration_sql] ( identifier[foreign_key] ) identifier[sql] += identifier[self] . identifier[get_advanced_foreign_key_options_sql] ( identifier[foreign_key] ) keyword[return] identifier[sql]
def get_foreign_key_declaration_sql(self, foreign_key): """ Obtain DBMS specific SQL code portion needed to set the FOREIGN KEY constraint of a field declaration to be used in statements like CREATE TABLE. :param foreign_key: The foreign key :type foreign_key: ForeignKeyConstraint :rtype: str """ sql = self.get_foreign_key_base_declaration_sql(foreign_key) sql += self.get_advanced_foreign_key_options_sql(foreign_key) return sql
def exp(cls, x: 'TensorFluent') -> 'TensorFluent': '''Returns a TensorFluent for the exp function. Args: x: The input fluent. Returns: A TensorFluent wrapping the exp function. ''' return cls._unary_op(x, tf.exp, tf.float32)
def function[exp, parameter[cls, x]]: constant[Returns a TensorFluent for the exp function. Args: x: The input fluent. Returns: A TensorFluent wrapping the exp function. ] return[call[name[cls]._unary_op, parameter[name[x], name[tf].exp, name[tf].float32]]]
keyword[def] identifier[exp] ( identifier[cls] , identifier[x] : literal[string] )-> literal[string] : literal[string] keyword[return] identifier[cls] . identifier[_unary_op] ( identifier[x] , identifier[tf] . identifier[exp] , identifier[tf] . identifier[float32] )
def exp(cls, x: 'TensorFluent') -> 'TensorFluent': """Returns a TensorFluent for the exp function. Args: x: The input fluent. Returns: A TensorFluent wrapping the exp function. """ return cls._unary_op(x, tf.exp, tf.float32)
def addIVMInputs(imageObjectList,ivmlist): """ Add IVM filenames provided by user to outputNames dictionary for each input imageObject. """ if ivmlist is None: return for img,ivmname in zip(imageObjectList,ivmlist): img.updateIVMName(ivmname)
def function[addIVMInputs, parameter[imageObjectList, ivmlist]]: constant[ Add IVM filenames provided by user to outputNames dictionary for each input imageObject. ] if compare[name[ivmlist] is constant[None]] begin[:] return[None] for taget[tuple[[<ast.Name object at 0x7da1b1a7df30>, <ast.Name object at 0x7da1b1a7c730>]]] in starred[call[name[zip], parameter[name[imageObjectList], name[ivmlist]]]] begin[:] call[name[img].updateIVMName, parameter[name[ivmname]]]
keyword[def] identifier[addIVMInputs] ( identifier[imageObjectList] , identifier[ivmlist] ): literal[string] keyword[if] identifier[ivmlist] keyword[is] keyword[None] : keyword[return] keyword[for] identifier[img] , identifier[ivmname] keyword[in] identifier[zip] ( identifier[imageObjectList] , identifier[ivmlist] ): identifier[img] . identifier[updateIVMName] ( identifier[ivmname] )
def addIVMInputs(imageObjectList, ivmlist): """ Add IVM filenames provided by user to outputNames dictionary for each input imageObject. """ if ivmlist is None: return # depends on [control=['if'], data=[]] for (img, ivmname) in zip(imageObjectList, ivmlist): img.updateIVMName(ivmname) # depends on [control=['for'], data=[]]
async def emit(self, record: LogRecord): # type: ignore """ Actually log the specified logging record to the stream. """ if self.writer is None: self.writer = await self._init_writer() try: msg = self.format(record) + self.terminator self.writer.write(msg.encode()) await self.writer.drain() except Exception: await self.handleError(record)
<ast.AsyncFunctionDef object at 0x7da204621120>
keyword[async] keyword[def] identifier[emit] ( identifier[self] , identifier[record] : identifier[LogRecord] ): literal[string] keyword[if] identifier[self] . identifier[writer] keyword[is] keyword[None] : identifier[self] . identifier[writer] = keyword[await] identifier[self] . identifier[_init_writer] () keyword[try] : identifier[msg] = identifier[self] . identifier[format] ( identifier[record] )+ identifier[self] . identifier[terminator] identifier[self] . identifier[writer] . identifier[write] ( identifier[msg] . identifier[encode] ()) keyword[await] identifier[self] . identifier[writer] . identifier[drain] () keyword[except] identifier[Exception] : keyword[await] identifier[self] . identifier[handleError] ( identifier[record] )
async def emit(self, record: LogRecord): # type: ignore '\n Actually log the specified logging record to the stream.\n ' if self.writer is None: self.writer = await self._init_writer() # depends on [control=['if'], data=[]] try: msg = self.format(record) + self.terminator self.writer.write(msg.encode()) await self.writer.drain() # depends on [control=['try'], data=[]] except Exception: await self.handleError(record) # depends on [control=['except'], data=[]]
def plot_plate_limits_field(axis, rcmb, ridges, trenches): """plot arrows designating ridges and trenches in 2D field plots""" for trench in trenches: xxd = (rcmb + 1.02) * np.cos(trench) # arrow begin yyd = (rcmb + 1.02) * np.sin(trench) # arrow begin xxt = (rcmb + 1.35) * np.cos(trench) # arrow end yyt = (rcmb + 1.35) * np.sin(trench) # arrow end axis.annotate('', xy=(xxd, yyd), xytext=(xxt, yyt), arrowprops=dict(facecolor='red', shrink=0.05)) for ridge in ridges: xxd = (rcmb + 1.02) * np.cos(ridge) yyd = (rcmb + 1.02) * np.sin(ridge) xxt = (rcmb + 1.35) * np.cos(ridge) yyt = (rcmb + 1.35) * np.sin(ridge) axis.annotate('', xy=(xxd, yyd), xytext=(xxt, yyt), arrowprops=dict(facecolor='green', shrink=0.05))
def function[plot_plate_limits_field, parameter[axis, rcmb, ridges, trenches]]: constant[plot arrows designating ridges and trenches in 2D field plots] for taget[name[trench]] in starred[name[trenches]] begin[:] variable[xxd] assign[=] binary_operation[binary_operation[name[rcmb] + constant[1.02]] * call[name[np].cos, parameter[name[trench]]]] variable[yyd] assign[=] binary_operation[binary_operation[name[rcmb] + constant[1.02]] * call[name[np].sin, parameter[name[trench]]]] variable[xxt] assign[=] binary_operation[binary_operation[name[rcmb] + constant[1.35]] * call[name[np].cos, parameter[name[trench]]]] variable[yyt] assign[=] binary_operation[binary_operation[name[rcmb] + constant[1.35]] * call[name[np].sin, parameter[name[trench]]]] call[name[axis].annotate, parameter[constant[]]] for taget[name[ridge]] in starred[name[ridges]] begin[:] variable[xxd] assign[=] binary_operation[binary_operation[name[rcmb] + constant[1.02]] * call[name[np].cos, parameter[name[ridge]]]] variable[yyd] assign[=] binary_operation[binary_operation[name[rcmb] + constant[1.02]] * call[name[np].sin, parameter[name[ridge]]]] variable[xxt] assign[=] binary_operation[binary_operation[name[rcmb] + constant[1.35]] * call[name[np].cos, parameter[name[ridge]]]] variable[yyt] assign[=] binary_operation[binary_operation[name[rcmb] + constant[1.35]] * call[name[np].sin, parameter[name[ridge]]]] call[name[axis].annotate, parameter[constant[]]]
keyword[def] identifier[plot_plate_limits_field] ( identifier[axis] , identifier[rcmb] , identifier[ridges] , identifier[trenches] ): literal[string] keyword[for] identifier[trench] keyword[in] identifier[trenches] : identifier[xxd] =( identifier[rcmb] + literal[int] )* identifier[np] . identifier[cos] ( identifier[trench] ) identifier[yyd] =( identifier[rcmb] + literal[int] )* identifier[np] . identifier[sin] ( identifier[trench] ) identifier[xxt] =( identifier[rcmb] + literal[int] )* identifier[np] . identifier[cos] ( identifier[trench] ) identifier[yyt] =( identifier[rcmb] + literal[int] )* identifier[np] . identifier[sin] ( identifier[trench] ) identifier[axis] . identifier[annotate] ( literal[string] , identifier[xy] =( identifier[xxd] , identifier[yyd] ), identifier[xytext] =( identifier[xxt] , identifier[yyt] ), identifier[arrowprops] = identifier[dict] ( identifier[facecolor] = literal[string] , identifier[shrink] = literal[int] )) keyword[for] identifier[ridge] keyword[in] identifier[ridges] : identifier[xxd] =( identifier[rcmb] + literal[int] )* identifier[np] . identifier[cos] ( identifier[ridge] ) identifier[yyd] =( identifier[rcmb] + literal[int] )* identifier[np] . identifier[sin] ( identifier[ridge] ) identifier[xxt] =( identifier[rcmb] + literal[int] )* identifier[np] . identifier[cos] ( identifier[ridge] ) identifier[yyt] =( identifier[rcmb] + literal[int] )* identifier[np] . identifier[sin] ( identifier[ridge] ) identifier[axis] . identifier[annotate] ( literal[string] , identifier[xy] =( identifier[xxd] , identifier[yyd] ), identifier[xytext] =( identifier[xxt] , identifier[yyt] ), identifier[arrowprops] = identifier[dict] ( identifier[facecolor] = literal[string] , identifier[shrink] = literal[int] ))
def plot_plate_limits_field(axis, rcmb, ridges, trenches): """plot arrows designating ridges and trenches in 2D field plots""" for trench in trenches: xxd = (rcmb + 1.02) * np.cos(trench) # arrow begin yyd = (rcmb + 1.02) * np.sin(trench) # arrow begin xxt = (rcmb + 1.35) * np.cos(trench) # arrow end yyt = (rcmb + 1.35) * np.sin(trench) # arrow end axis.annotate('', xy=(xxd, yyd), xytext=(xxt, yyt), arrowprops=dict(facecolor='red', shrink=0.05)) # depends on [control=['for'], data=['trench']] for ridge in ridges: xxd = (rcmb + 1.02) * np.cos(ridge) yyd = (rcmb + 1.02) * np.sin(ridge) xxt = (rcmb + 1.35) * np.cos(ridge) yyt = (rcmb + 1.35) * np.sin(ridge) axis.annotate('', xy=(xxd, yyd), xytext=(xxt, yyt), arrowprops=dict(facecolor='green', shrink=0.05)) # depends on [control=['for'], data=['ridge']]
def mark_failed_exc(self, e_val=None, e_ty=None): '''Marks the tracer as failed with the given exception :code:`e_val` of type :code:`e_ty` (defaults to the current exception). May only be called in the started state and only if the tracer is not already marked as failed. Note that this does not end the tracer! Once a tracer is marked as failed, attempts to do it again are forbidden. If possible, using the tracer as a context manager (i.e., with a :code:`with`-block) is more convenient than this method. If :code:`e_val` and :code:`e_ty` are both none, the current exception (as retured by :func:`sys.exc_info`) is used. :param BaseException e_val: The exception object that caused the failure. If :code:`None`, the current exception value (:code:`sys.exc_info()[1]`) is used. :param type e_ty: The type of the exception that caused the failure. If :code:`None` the type of :code:`e_val` is used. If that is also :code:`None`, the current exception type (:code:`sys.exc_info()[0]`) is used. ''' _error_from_exc(self.nsdk, self.handle, e_val, e_ty)
def function[mark_failed_exc, parameter[self, e_val, e_ty]]: constant[Marks the tracer as failed with the given exception :code:`e_val` of type :code:`e_ty` (defaults to the current exception). May only be called in the started state and only if the tracer is not already marked as failed. Note that this does not end the tracer! Once a tracer is marked as failed, attempts to do it again are forbidden. If possible, using the tracer as a context manager (i.e., with a :code:`with`-block) is more convenient than this method. If :code:`e_val` and :code:`e_ty` are both none, the current exception (as retured by :func:`sys.exc_info`) is used. :param BaseException e_val: The exception object that caused the failure. If :code:`None`, the current exception value (:code:`sys.exc_info()[1]`) is used. :param type e_ty: The type of the exception that caused the failure. If :code:`None` the type of :code:`e_val` is used. If that is also :code:`None`, the current exception type (:code:`sys.exc_info()[0]`) is used. ] call[name[_error_from_exc], parameter[name[self].nsdk, name[self].handle, name[e_val], name[e_ty]]]
keyword[def] identifier[mark_failed_exc] ( identifier[self] , identifier[e_val] = keyword[None] , identifier[e_ty] = keyword[None] ): literal[string] identifier[_error_from_exc] ( identifier[self] . identifier[nsdk] , identifier[self] . identifier[handle] , identifier[e_val] , identifier[e_ty] )
def mark_failed_exc(self, e_val=None, e_ty=None): """Marks the tracer as failed with the given exception :code:`e_val` of type :code:`e_ty` (defaults to the current exception). May only be called in the started state and only if the tracer is not already marked as failed. Note that this does not end the tracer! Once a tracer is marked as failed, attempts to do it again are forbidden. If possible, using the tracer as a context manager (i.e., with a :code:`with`-block) is more convenient than this method. If :code:`e_val` and :code:`e_ty` are both none, the current exception (as retured by :func:`sys.exc_info`) is used. :param BaseException e_val: The exception object that caused the failure. If :code:`None`, the current exception value (:code:`sys.exc_info()[1]`) is used. :param type e_ty: The type of the exception that caused the failure. If :code:`None` the type of :code:`e_val` is used. If that is also :code:`None`, the current exception type (:code:`sys.exc_info()[0]`) is used. """ _error_from_exc(self.nsdk, self.handle, e_val, e_ty)
def schema(self, schema): """Specifies the input schema. Some data sources (e.g. JSON) can infer the input schema automatically from data. By specifying the schema here, the underlying data source can skip the schema inference step, and thus speed up data loading. :param schema: a :class:`pyspark.sql.types.StructType` object or a DDL-formatted string (For example ``col0 INT, col1 DOUBLE``). >>> s = spark.read.schema("col0 INT, col1 DOUBLE") """ from pyspark.sql import SparkSession spark = SparkSession.builder.getOrCreate() if isinstance(schema, StructType): jschema = spark._jsparkSession.parseDataType(schema.json()) self._jreader = self._jreader.schema(jschema) elif isinstance(schema, basestring): self._jreader = self._jreader.schema(schema) else: raise TypeError("schema should be StructType or string") return self
def function[schema, parameter[self, schema]]: constant[Specifies the input schema. Some data sources (e.g. JSON) can infer the input schema automatically from data. By specifying the schema here, the underlying data source can skip the schema inference step, and thus speed up data loading. :param schema: a :class:`pyspark.sql.types.StructType` object or a DDL-formatted string (For example ``col0 INT, col1 DOUBLE``). >>> s = spark.read.schema("col0 INT, col1 DOUBLE") ] from relative_module[pyspark.sql] import module[SparkSession] variable[spark] assign[=] call[name[SparkSession].builder.getOrCreate, parameter[]] if call[name[isinstance], parameter[name[schema], name[StructType]]] begin[:] variable[jschema] assign[=] call[name[spark]._jsparkSession.parseDataType, parameter[call[name[schema].json, parameter[]]]] name[self]._jreader assign[=] call[name[self]._jreader.schema, parameter[name[jschema]]] return[name[self]]
keyword[def] identifier[schema] ( identifier[self] , identifier[schema] ): literal[string] keyword[from] identifier[pyspark] . identifier[sql] keyword[import] identifier[SparkSession] identifier[spark] = identifier[SparkSession] . identifier[builder] . identifier[getOrCreate] () keyword[if] identifier[isinstance] ( identifier[schema] , identifier[StructType] ): identifier[jschema] = identifier[spark] . identifier[_jsparkSession] . identifier[parseDataType] ( identifier[schema] . identifier[json] ()) identifier[self] . identifier[_jreader] = identifier[self] . identifier[_jreader] . identifier[schema] ( identifier[jschema] ) keyword[elif] identifier[isinstance] ( identifier[schema] , identifier[basestring] ): identifier[self] . identifier[_jreader] = identifier[self] . identifier[_jreader] . identifier[schema] ( identifier[schema] ) keyword[else] : keyword[raise] identifier[TypeError] ( literal[string] ) keyword[return] identifier[self]
def schema(self, schema): """Specifies the input schema. Some data sources (e.g. JSON) can infer the input schema automatically from data. By specifying the schema here, the underlying data source can skip the schema inference step, and thus speed up data loading. :param schema: a :class:`pyspark.sql.types.StructType` object or a DDL-formatted string (For example ``col0 INT, col1 DOUBLE``). >>> s = spark.read.schema("col0 INT, col1 DOUBLE") """ from pyspark.sql import SparkSession spark = SparkSession.builder.getOrCreate() if isinstance(schema, StructType): jschema = spark._jsparkSession.parseDataType(schema.json()) self._jreader = self._jreader.schema(jschema) # depends on [control=['if'], data=[]] elif isinstance(schema, basestring): self._jreader = self._jreader.schema(schema) # depends on [control=['if'], data=[]] else: raise TypeError('schema should be StructType or string') return self
def calc_tkor_v1(self): """Adjust the given air temperature values. Required control parameters: |NHRU| |KT| Required input sequence: |TemL| Calculated flux sequence: |TKor| Basic equation: :math:`TKor = KT + TemL` Example: >>> from hydpy.models.lland import * >>> parameterstep('1d') >>> nhru(3) >>> kt(-2.0, 0.0, 2.0) >>> inputs.teml(1.) >>> model.calc_tkor_v1() >>> fluxes.tkor tkor(-1.0, 1.0, 3.0) """ con = self.parameters.control.fastaccess inp = self.sequences.inputs.fastaccess flu = self.sequences.fluxes.fastaccess for k in range(con.nhru): flu.tkor[k] = con.kt[k] + inp.teml
def function[calc_tkor_v1, parameter[self]]: constant[Adjust the given air temperature values. Required control parameters: |NHRU| |KT| Required input sequence: |TemL| Calculated flux sequence: |TKor| Basic equation: :math:`TKor = KT + TemL` Example: >>> from hydpy.models.lland import * >>> parameterstep('1d') >>> nhru(3) >>> kt(-2.0, 0.0, 2.0) >>> inputs.teml(1.) >>> model.calc_tkor_v1() >>> fluxes.tkor tkor(-1.0, 1.0, 3.0) ] variable[con] assign[=] name[self].parameters.control.fastaccess variable[inp] assign[=] name[self].sequences.inputs.fastaccess variable[flu] assign[=] name[self].sequences.fluxes.fastaccess for taget[name[k]] in starred[call[name[range], parameter[name[con].nhru]]] begin[:] call[name[flu].tkor][name[k]] assign[=] binary_operation[call[name[con].kt][name[k]] + name[inp].teml]
keyword[def] identifier[calc_tkor_v1] ( identifier[self] ): literal[string] identifier[con] = identifier[self] . identifier[parameters] . identifier[control] . identifier[fastaccess] identifier[inp] = identifier[self] . identifier[sequences] . identifier[inputs] . identifier[fastaccess] identifier[flu] = identifier[self] . identifier[sequences] . identifier[fluxes] . identifier[fastaccess] keyword[for] identifier[k] keyword[in] identifier[range] ( identifier[con] . identifier[nhru] ): identifier[flu] . identifier[tkor] [ identifier[k] ]= identifier[con] . identifier[kt] [ identifier[k] ]+ identifier[inp] . identifier[teml]
def calc_tkor_v1(self): """Adjust the given air temperature values. Required control parameters: |NHRU| |KT| Required input sequence: |TemL| Calculated flux sequence: |TKor| Basic equation: :math:`TKor = KT + TemL` Example: >>> from hydpy.models.lland import * >>> parameterstep('1d') >>> nhru(3) >>> kt(-2.0, 0.0, 2.0) >>> inputs.teml(1.) >>> model.calc_tkor_v1() >>> fluxes.tkor tkor(-1.0, 1.0, 3.0) """ con = self.parameters.control.fastaccess inp = self.sequences.inputs.fastaccess flu = self.sequences.fluxes.fastaccess for k in range(con.nhru): flu.tkor[k] = con.kt[k] + inp.teml # depends on [control=['for'], data=['k']]
def hook(self, event_type='push'): """ Registers a function as a hook. Multiple hooks can be registered for a given type, but the order in which they are invoke is unspecified. :param event_type: The event type this hook will be invoked for. """ def decorator(func): self._hooks[event_type].append(func) return func return decorator
def function[hook, parameter[self, event_type]]: constant[ Registers a function as a hook. Multiple hooks can be registered for a given type, but the order in which they are invoke is unspecified. :param event_type: The event type this hook will be invoked for. ] def function[decorator, parameter[func]]: call[call[name[self]._hooks][name[event_type]].append, parameter[name[func]]] return[name[func]] return[name[decorator]]
keyword[def] identifier[hook] ( identifier[self] , identifier[event_type] = literal[string] ): literal[string] keyword[def] identifier[decorator] ( identifier[func] ): identifier[self] . identifier[_hooks] [ identifier[event_type] ]. identifier[append] ( identifier[func] ) keyword[return] identifier[func] keyword[return] identifier[decorator]
def hook(self, event_type='push'): """ Registers a function as a hook. Multiple hooks can be registered for a given type, but the order in which they are invoke is unspecified. :param event_type: The event type this hook will be invoked for. """ def decorator(func): self._hooks[event_type].append(func) return func return decorator
def check_accesspoints(sess): """ check the status of all connected access points """ ap_names = walk_data(sess, name_ap_oid, helper)[0] ap_operationals = walk_data(sess, operational_ap_oid, helper)[0] ap_availabilitys = walk_data(sess, availability_ap_oid, helper)[0] ap_alarms = walk_data(sess, alarm_ap_oid, helper)[0] #ap_ip = walk_data(sess, ip_ap_oid, helper) # no result helper.add_summary("Access Points Status") for x in range(len(ap_names)): ap_name = ap_names[x] ap_operational = ap_operationals[x] ap_availability = ap_availabilitys[x] ap_alarm = ap_alarms[x] # Add all states to the long output helper.add_long_output("%s - Operational: %s - Availabilty: %s - Alarm: %s" % (ap_name, operational_states[int(ap_operational)], availability_states[int(ap_availability)], alarm_states[int(ap_alarm)])) # Operational State if ap_operational != "1" and ap_operational != "4": helper.status(critical) helper.add_summary("%s Operational State: %s" % (ap_name, operational_states[int(ap_operational)])) # Avaiability State if ap_availability != "3": helper.status(critical) helper.add_summary("%s Availability State: %s" % (ap_name, availability_states[int(ap_availability)])) # Alarm State if ap_alarm == "2": helper.status(warning) helper.add_summary("%s Controller Alarm State: %s" % (ap_name, alarm_states[int(ap_alarm)])) if ap_alarm == "3" or ap_alarm == "4": helper.status(critical) helper.add_summary("%s Controller Alarm State: %s" % (ap_name, alarm_states[int(ap_alarm)]))
def function[check_accesspoints, parameter[sess]]: constant[ check the status of all connected access points ] variable[ap_names] assign[=] call[call[name[walk_data], parameter[name[sess], name[name_ap_oid], name[helper]]]][constant[0]] variable[ap_operationals] assign[=] call[call[name[walk_data], parameter[name[sess], name[operational_ap_oid], name[helper]]]][constant[0]] variable[ap_availabilitys] assign[=] call[call[name[walk_data], parameter[name[sess], name[availability_ap_oid], name[helper]]]][constant[0]] variable[ap_alarms] assign[=] call[call[name[walk_data], parameter[name[sess], name[alarm_ap_oid], name[helper]]]][constant[0]] call[name[helper].add_summary, parameter[constant[Access Points Status]]] for taget[name[x]] in starred[call[name[range], parameter[call[name[len], parameter[name[ap_names]]]]]] begin[:] variable[ap_name] assign[=] call[name[ap_names]][name[x]] variable[ap_operational] assign[=] call[name[ap_operationals]][name[x]] variable[ap_availability] assign[=] call[name[ap_availabilitys]][name[x]] variable[ap_alarm] assign[=] call[name[ap_alarms]][name[x]] call[name[helper].add_long_output, parameter[binary_operation[constant[%s - Operational: %s - Availabilty: %s - Alarm: %s] <ast.Mod object at 0x7da2590d6920> tuple[[<ast.Name object at 0x7da1b1be56c0>, <ast.Subscript object at 0x7da1b1be6d70>, <ast.Subscript object at 0x7da1b1be4100>, <ast.Subscript object at 0x7da1b1be6140>]]]]] if <ast.BoolOp object at 0x7da1b1be47c0> begin[:] call[name[helper].status, parameter[name[critical]]] call[name[helper].add_summary, parameter[binary_operation[constant[%s Operational State: %s] <ast.Mod object at 0x7da2590d6920> tuple[[<ast.Name object at 0x7da1b1be5420>, <ast.Subscript object at 0x7da1b1be4940>]]]]] if compare[name[ap_availability] not_equal[!=] constant[3]] begin[:] call[name[helper].status, parameter[name[critical]]] call[name[helper].add_summary, parameter[binary_operation[constant[%s Availability State: %s] <ast.Mod object at 0x7da2590d6920> tuple[[<ast.Name object at 0x7da1b1b04fd0>, <ast.Subscript object at 0x7da1b1b05030>]]]]] if compare[name[ap_alarm] equal[==] constant[2]] begin[:] call[name[helper].status, parameter[name[warning]]] call[name[helper].add_summary, parameter[binary_operation[constant[%s Controller Alarm State: %s] <ast.Mod object at 0x7da2590d6920> tuple[[<ast.Name object at 0x7da1b1b040d0>, <ast.Subscript object at 0x7da1b1b04220>]]]]] if <ast.BoolOp object at 0x7da1b1b041c0> begin[:] call[name[helper].status, parameter[name[critical]]] call[name[helper].add_summary, parameter[binary_operation[constant[%s Controller Alarm State: %s] <ast.Mod object at 0x7da2590d6920> tuple[[<ast.Name object at 0x7da1b1b04ac0>, <ast.Subscript object at 0x7da1b1b04b80>]]]]]
keyword[def] identifier[check_accesspoints] ( identifier[sess] ): literal[string] identifier[ap_names] = identifier[walk_data] ( identifier[sess] , identifier[name_ap_oid] , identifier[helper] )[ literal[int] ] identifier[ap_operationals] = identifier[walk_data] ( identifier[sess] , identifier[operational_ap_oid] , identifier[helper] )[ literal[int] ] identifier[ap_availabilitys] = identifier[walk_data] ( identifier[sess] , identifier[availability_ap_oid] , identifier[helper] )[ literal[int] ] identifier[ap_alarms] = identifier[walk_data] ( identifier[sess] , identifier[alarm_ap_oid] , identifier[helper] )[ literal[int] ] identifier[helper] . identifier[add_summary] ( literal[string] ) keyword[for] identifier[x] keyword[in] identifier[range] ( identifier[len] ( identifier[ap_names] )): identifier[ap_name] = identifier[ap_names] [ identifier[x] ] identifier[ap_operational] = identifier[ap_operationals] [ identifier[x] ] identifier[ap_availability] = identifier[ap_availabilitys] [ identifier[x] ] identifier[ap_alarm] = identifier[ap_alarms] [ identifier[x] ] identifier[helper] . identifier[add_long_output] ( literal[string] %( identifier[ap_name] , identifier[operational_states] [ identifier[int] ( identifier[ap_operational] )], identifier[availability_states] [ identifier[int] ( identifier[ap_availability] )], identifier[alarm_states] [ identifier[int] ( identifier[ap_alarm] )])) keyword[if] identifier[ap_operational] != literal[string] keyword[and] identifier[ap_operational] != literal[string] : identifier[helper] . identifier[status] ( identifier[critical] ) identifier[helper] . identifier[add_summary] ( literal[string] %( identifier[ap_name] , identifier[operational_states] [ identifier[int] ( identifier[ap_operational] )])) keyword[if] identifier[ap_availability] != literal[string] : identifier[helper] . identifier[status] ( identifier[critical] ) identifier[helper] . identifier[add_summary] ( literal[string] %( identifier[ap_name] , identifier[availability_states] [ identifier[int] ( identifier[ap_availability] )])) keyword[if] identifier[ap_alarm] == literal[string] : identifier[helper] . identifier[status] ( identifier[warning] ) identifier[helper] . identifier[add_summary] ( literal[string] %( identifier[ap_name] , identifier[alarm_states] [ identifier[int] ( identifier[ap_alarm] )])) keyword[if] identifier[ap_alarm] == literal[string] keyword[or] identifier[ap_alarm] == literal[string] : identifier[helper] . identifier[status] ( identifier[critical] ) identifier[helper] . identifier[add_summary] ( literal[string] %( identifier[ap_name] , identifier[alarm_states] [ identifier[int] ( identifier[ap_alarm] )]))
def check_accesspoints(sess): """ check the status of all connected access points """ ap_names = walk_data(sess, name_ap_oid, helper)[0] ap_operationals = walk_data(sess, operational_ap_oid, helper)[0] ap_availabilitys = walk_data(sess, availability_ap_oid, helper)[0] ap_alarms = walk_data(sess, alarm_ap_oid, helper)[0] #ap_ip = walk_data(sess, ip_ap_oid, helper) # no result helper.add_summary('Access Points Status') for x in range(len(ap_names)): ap_name = ap_names[x] ap_operational = ap_operationals[x] ap_availability = ap_availabilitys[x] ap_alarm = ap_alarms[x] # Add all states to the long output helper.add_long_output('%s - Operational: %s - Availabilty: %s - Alarm: %s' % (ap_name, operational_states[int(ap_operational)], availability_states[int(ap_availability)], alarm_states[int(ap_alarm)])) # Operational State if ap_operational != '1' and ap_operational != '4': helper.status(critical) helper.add_summary('%s Operational State: %s' % (ap_name, operational_states[int(ap_operational)])) # depends on [control=['if'], data=[]] # Avaiability State if ap_availability != '3': helper.status(critical) helper.add_summary('%s Availability State: %s' % (ap_name, availability_states[int(ap_availability)])) # depends on [control=['if'], data=['ap_availability']] # Alarm State if ap_alarm == '2': helper.status(warning) helper.add_summary('%s Controller Alarm State: %s' % (ap_name, alarm_states[int(ap_alarm)])) # depends on [control=['if'], data=['ap_alarm']] if ap_alarm == '3' or ap_alarm == '4': helper.status(critical) helper.add_summary('%s Controller Alarm State: %s' % (ap_name, alarm_states[int(ap_alarm)])) # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['x']]
def get_required_permissions(self, request): """ Returns the required permissions to access the considered object. """ perms = [] if not self.permission_required: return perms if isinstance(self.permission_required, string_types): perms = [self.permission_required, ] elif isinstance(self.permission_required, Iterable): perms = [perm for perm in self.permission_required] else: raise ImproperlyConfigured( '\'PermissionRequiredMixin\' requires \'permission_required\' ' 'attribute to be set to \'<app_label>.<permission codename>\' but is set to {} ' 'instead'.format(self.permission_required) ) return perms
def function[get_required_permissions, parameter[self, request]]: constant[ Returns the required permissions to access the considered object. ] variable[perms] assign[=] list[[]] if <ast.UnaryOp object at 0x7da20c7ca560> begin[:] return[name[perms]] if call[name[isinstance], parameter[name[self].permission_required, name[string_types]]] begin[:] variable[perms] assign[=] list[[<ast.Attribute object at 0x7da20c7caa40>]] return[name[perms]]
keyword[def] identifier[get_required_permissions] ( identifier[self] , identifier[request] ): literal[string] identifier[perms] =[] keyword[if] keyword[not] identifier[self] . identifier[permission_required] : keyword[return] identifier[perms] keyword[if] identifier[isinstance] ( identifier[self] . identifier[permission_required] , identifier[string_types] ): identifier[perms] =[ identifier[self] . identifier[permission_required] ,] keyword[elif] identifier[isinstance] ( identifier[self] . identifier[permission_required] , identifier[Iterable] ): identifier[perms] =[ identifier[perm] keyword[for] identifier[perm] keyword[in] identifier[self] . identifier[permission_required] ] keyword[else] : keyword[raise] identifier[ImproperlyConfigured] ( literal[string] literal[string] literal[string] . identifier[format] ( identifier[self] . identifier[permission_required] ) ) keyword[return] identifier[perms]
def get_required_permissions(self, request): """ Returns the required permissions to access the considered object. """ perms = [] if not self.permission_required: return perms # depends on [control=['if'], data=[]] if isinstance(self.permission_required, string_types): perms = [self.permission_required] # depends on [control=['if'], data=[]] elif isinstance(self.permission_required, Iterable): perms = [perm for perm in self.permission_required] # depends on [control=['if'], data=[]] else: raise ImproperlyConfigured("'PermissionRequiredMixin' requires 'permission_required' attribute to be set to '<app_label>.<permission codename>' but is set to {} instead".format(self.permission_required)) return perms
def resample(args): """ %prog resample yellow-catfish-resample.txt medicago-resample.txt Plot ALLMAPS performance across resampled real data. """ p = OptionParser(resample.__doc__) opts, args, iopts = p.set_image_options(args, figsize="8x4", dpi=300) if len(args) != 2: sys.exit(not p.print_help()) dataA, dataB = args fig = plt.figure(1, (iopts.w, iopts.h)) root = fig.add_axes([0, 0, 1, 1]) A = fig.add_axes([.1, .18, .32, .64]) B = fig.add_axes([.6, .18, .32, .64]) dataA = import_data(dataA) dataB = import_data(dataB) xlabel = "Fraction of markers" ylabels = ("Anchor rate", "Runtime (m)") legend = ("anchor rate", "runtime") subplot_twinx(A, dataA, xlabel, ylabels, title="Yellow catfish", legend=legend) subplot_twinx(B, dataB, xlabel, ylabels, title="Medicago", legend=legend) labels = ((.04, .92, "A"), (.54, .92, "B")) panel_labels(root, labels) normalize_axes(root) image_name = "resample." + iopts.format savefig(image_name, dpi=iopts.dpi, iopts=iopts)
def function[resample, parameter[args]]: constant[ %prog resample yellow-catfish-resample.txt medicago-resample.txt Plot ALLMAPS performance across resampled real data. ] variable[p] assign[=] call[name[OptionParser], parameter[name[resample].__doc__]] <ast.Tuple object at 0x7da20e956bc0> assign[=] call[name[p].set_image_options, parameter[name[args]]] if compare[call[name[len], parameter[name[args]]] not_equal[!=] constant[2]] begin[:] call[name[sys].exit, parameter[<ast.UnaryOp object at 0x7da20e955060>]] <ast.Tuple object at 0x7da20e956ce0> assign[=] name[args] variable[fig] assign[=] call[name[plt].figure, parameter[constant[1], tuple[[<ast.Attribute object at 0x7da20e957ca0>, <ast.Attribute object at 0x7da1b08fc040>]]]] variable[root] assign[=] call[name[fig].add_axes, parameter[list[[<ast.Constant object at 0x7da1b084da80>, <ast.Constant object at 0x7da1b084d630>, <ast.Constant object at 0x7da1b084ead0>, <ast.Constant object at 0x7da1b084f040>]]]] variable[A] assign[=] call[name[fig].add_axes, parameter[list[[<ast.Constant object at 0x7da1b084d7e0>, <ast.Constant object at 0x7da1b084dcf0>, <ast.Constant object at 0x7da1b084d9f0>, <ast.Constant object at 0x7da1b084ed70>]]]] variable[B] assign[=] call[name[fig].add_axes, parameter[list[[<ast.Constant object at 0x7da1b084dc60>, <ast.Constant object at 0x7da1b084e980>, <ast.Constant object at 0x7da1b084d840>, <ast.Constant object at 0x7da1b084de10>]]]] variable[dataA] assign[=] call[name[import_data], parameter[name[dataA]]] variable[dataB] assign[=] call[name[import_data], parameter[name[dataB]]] variable[xlabel] assign[=] constant[Fraction of markers] variable[ylabels] assign[=] tuple[[<ast.Constant object at 0x7da1b084db70>, <ast.Constant object at 0x7da1b084e320>]] variable[legend] assign[=] tuple[[<ast.Constant object at 0x7da1b084eec0>, <ast.Constant object at 0x7da1b084dab0>]] call[name[subplot_twinx], parameter[name[A], name[dataA], name[xlabel], name[ylabels]]] call[name[subplot_twinx], parameter[name[B], name[dataB], name[xlabel], name[ylabels]]] variable[labels] assign[=] tuple[[<ast.Tuple object at 0x7da1b084d990>, <ast.Tuple object at 0x7da1b084ea10>]] call[name[panel_labels], parameter[name[root], name[labels]]] call[name[normalize_axes], parameter[name[root]]] variable[image_name] assign[=] binary_operation[constant[resample.] + name[iopts].format] call[name[savefig], parameter[name[image_name]]]
keyword[def] identifier[resample] ( identifier[args] ): literal[string] identifier[p] = identifier[OptionParser] ( identifier[resample] . identifier[__doc__] ) identifier[opts] , identifier[args] , identifier[iopts] = identifier[p] . identifier[set_image_options] ( identifier[args] , identifier[figsize] = literal[string] , identifier[dpi] = literal[int] ) keyword[if] identifier[len] ( identifier[args] )!= literal[int] : identifier[sys] . identifier[exit] ( keyword[not] identifier[p] . identifier[print_help] ()) identifier[dataA] , identifier[dataB] = identifier[args] identifier[fig] = identifier[plt] . identifier[figure] ( literal[int] ,( identifier[iopts] . identifier[w] , identifier[iopts] . identifier[h] )) identifier[root] = identifier[fig] . identifier[add_axes] ([ literal[int] , literal[int] , literal[int] , literal[int] ]) identifier[A] = identifier[fig] . identifier[add_axes] ([ literal[int] , literal[int] , literal[int] , literal[int] ]) identifier[B] = identifier[fig] . identifier[add_axes] ([ literal[int] , literal[int] , literal[int] , literal[int] ]) identifier[dataA] = identifier[import_data] ( identifier[dataA] ) identifier[dataB] = identifier[import_data] ( identifier[dataB] ) identifier[xlabel] = literal[string] identifier[ylabels] =( literal[string] , literal[string] ) identifier[legend] =( literal[string] , literal[string] ) identifier[subplot_twinx] ( identifier[A] , identifier[dataA] , identifier[xlabel] , identifier[ylabels] , identifier[title] = literal[string] , identifier[legend] = identifier[legend] ) identifier[subplot_twinx] ( identifier[B] , identifier[dataB] , identifier[xlabel] , identifier[ylabels] , identifier[title] = literal[string] , identifier[legend] = identifier[legend] ) identifier[labels] =(( literal[int] , literal[int] , literal[string] ),( literal[int] , literal[int] , literal[string] )) identifier[panel_labels] ( identifier[root] , identifier[labels] ) identifier[normalize_axes] ( identifier[root] ) identifier[image_name] = literal[string] + identifier[iopts] . identifier[format] identifier[savefig] ( identifier[image_name] , identifier[dpi] = identifier[iopts] . identifier[dpi] , identifier[iopts] = identifier[iopts] )
def resample(args): """ %prog resample yellow-catfish-resample.txt medicago-resample.txt Plot ALLMAPS performance across resampled real data. """ p = OptionParser(resample.__doc__) (opts, args, iopts) = p.set_image_options(args, figsize='8x4', dpi=300) if len(args) != 2: sys.exit(not p.print_help()) # depends on [control=['if'], data=[]] (dataA, dataB) = args fig = plt.figure(1, (iopts.w, iopts.h)) root = fig.add_axes([0, 0, 1, 1]) A = fig.add_axes([0.1, 0.18, 0.32, 0.64]) B = fig.add_axes([0.6, 0.18, 0.32, 0.64]) dataA = import_data(dataA) dataB = import_data(dataB) xlabel = 'Fraction of markers' ylabels = ('Anchor rate', 'Runtime (m)') legend = ('anchor rate', 'runtime') subplot_twinx(A, dataA, xlabel, ylabels, title='Yellow catfish', legend=legend) subplot_twinx(B, dataB, xlabel, ylabels, title='Medicago', legend=legend) labels = ((0.04, 0.92, 'A'), (0.54, 0.92, 'B')) panel_labels(root, labels) normalize_axes(root) image_name = 'resample.' + iopts.format savefig(image_name, dpi=iopts.dpi, iopts=iopts)
def annotate_text(self, document, features, encoding_type=None, retry=None, timeout=None, metadata=None): """ A convenience method that provides all the features that analyzeSentiment, analyzeEntities, and analyzeSyntax provide in one call. :param document: Input document. If a dict is provided, it must be of the same form as the protobuf message Document :type document: dict or google.cloud.language_v1.types.Document :param features: The enabled features. If a dict is provided, it must be of the same form as the protobuf message Features :type features: dict or google.cloud.language_v1.enums.Features :param encoding_type: The encoding type used by the API to calculate offsets. :type encoding_type: google.cloud.language_v1.types.EncodingType :param retry: A retry object used to retry requests. If None is specified, requests will not be retried. :type retry: google.api_core.retry.Retry :param timeout: The amount of time, in seconds, to wait for the request to complete. Note that if retry is specified, the timeout applies to each individual attempt. :type timeout: float :param metadata: Additional metadata that is provided to the method. :type metadata: sequence[tuple[str, str]]] :rtype: google.cloud.language_v1.types.AnnotateTextResponse """ client = self.get_conn() return client.annotate_text( document=document, features=features, encoding_type=encoding_type, retry=retry, timeout=timeout, metadata=metadata, )
def function[annotate_text, parameter[self, document, features, encoding_type, retry, timeout, metadata]]: constant[ A convenience method that provides all the features that analyzeSentiment, analyzeEntities, and analyzeSyntax provide in one call. :param document: Input document. If a dict is provided, it must be of the same form as the protobuf message Document :type document: dict or google.cloud.language_v1.types.Document :param features: The enabled features. If a dict is provided, it must be of the same form as the protobuf message Features :type features: dict or google.cloud.language_v1.enums.Features :param encoding_type: The encoding type used by the API to calculate offsets. :type encoding_type: google.cloud.language_v1.types.EncodingType :param retry: A retry object used to retry requests. If None is specified, requests will not be retried. :type retry: google.api_core.retry.Retry :param timeout: The amount of time, in seconds, to wait for the request to complete. Note that if retry is specified, the timeout applies to each individual attempt. :type timeout: float :param metadata: Additional metadata that is provided to the method. :type metadata: sequence[tuple[str, str]]] :rtype: google.cloud.language_v1.types.AnnotateTextResponse ] variable[client] assign[=] call[name[self].get_conn, parameter[]] return[call[name[client].annotate_text, parameter[]]]
keyword[def] identifier[annotate_text] ( identifier[self] , identifier[document] , identifier[features] , identifier[encoding_type] = keyword[None] , identifier[retry] = keyword[None] , identifier[timeout] = keyword[None] , identifier[metadata] = keyword[None] ): literal[string] identifier[client] = identifier[self] . identifier[get_conn] () keyword[return] identifier[client] . identifier[annotate_text] ( identifier[document] = identifier[document] , identifier[features] = identifier[features] , identifier[encoding_type] = identifier[encoding_type] , identifier[retry] = identifier[retry] , identifier[timeout] = identifier[timeout] , identifier[metadata] = identifier[metadata] , )
def annotate_text(self, document, features, encoding_type=None, retry=None, timeout=None, metadata=None): """ A convenience method that provides all the features that analyzeSentiment, analyzeEntities, and analyzeSyntax provide in one call. :param document: Input document. If a dict is provided, it must be of the same form as the protobuf message Document :type document: dict or google.cloud.language_v1.types.Document :param features: The enabled features. If a dict is provided, it must be of the same form as the protobuf message Features :type features: dict or google.cloud.language_v1.enums.Features :param encoding_type: The encoding type used by the API to calculate offsets. :type encoding_type: google.cloud.language_v1.types.EncodingType :param retry: A retry object used to retry requests. If None is specified, requests will not be retried. :type retry: google.api_core.retry.Retry :param timeout: The amount of time, in seconds, to wait for the request to complete. Note that if retry is specified, the timeout applies to each individual attempt. :type timeout: float :param metadata: Additional metadata that is provided to the method. :type metadata: sequence[tuple[str, str]]] :rtype: google.cloud.language_v1.types.AnnotateTextResponse """ client = self.get_conn() return client.annotate_text(document=document, features=features, encoding_type=encoding_type, retry=retry, timeout=timeout, metadata=metadata)
def conv_block(inputs, filters, dilation_rates_and_kernel_sizes, **kwargs): """A block of standard 2d convolutions.""" return conv_block_internal(conv, inputs, filters, dilation_rates_and_kernel_sizes, **kwargs)
def function[conv_block, parameter[inputs, filters, dilation_rates_and_kernel_sizes]]: constant[A block of standard 2d convolutions.] return[call[name[conv_block_internal], parameter[name[conv], name[inputs], name[filters], name[dilation_rates_and_kernel_sizes]]]]
keyword[def] identifier[conv_block] ( identifier[inputs] , identifier[filters] , identifier[dilation_rates_and_kernel_sizes] ,** identifier[kwargs] ): literal[string] keyword[return] identifier[conv_block_internal] ( identifier[conv] , identifier[inputs] , identifier[filters] , identifier[dilation_rates_and_kernel_sizes] ,** identifier[kwargs] )
def conv_block(inputs, filters, dilation_rates_and_kernel_sizes, **kwargs): """A block of standard 2d convolutions.""" return conv_block_internal(conv, inputs, filters, dilation_rates_and_kernel_sizes, **kwargs)
def makePacket(ID, instr, reg=None, params=None): """ This makes a generic packet. TODO: look a struct ... does that add value using it? 0xFF, 0xFF, 0xFD, 0x00, ID, LEN_L, LEN_H, INST, PARAM 1, PARAM 2, ..., PARAM N, CRC_L, CRC_H] in: ID - servo id instr - instruction reg - register params - instruction parameter values out: packet """ pkt = [] pkt += [0xFF, 0xFF, 0xFD] # header pkt += [0x00] # reserved byte pkt += [ID] pkt += [0x00, 0x00] # length placeholder pkt += [instr] # instruction if reg: pkt += le(reg) # not everything has a register if params: pkt += params # not everything has parameters length = le(len(pkt) - 5) # length = len(packet) - (header(3), reserve(1), id(1)) pkt[5] = length[0] # L pkt[6] = length[1] # H crc = crc16(pkt) pkt += le(crc) return pkt
def function[makePacket, parameter[ID, instr, reg, params]]: constant[ This makes a generic packet. TODO: look a struct ... does that add value using it? 0xFF, 0xFF, 0xFD, 0x00, ID, LEN_L, LEN_H, INST, PARAM 1, PARAM 2, ..., PARAM N, CRC_L, CRC_H] in: ID - servo id instr - instruction reg - register params - instruction parameter values out: packet ] variable[pkt] assign[=] list[[]] <ast.AugAssign object at 0x7da1b1bb8d00> <ast.AugAssign object at 0x7da1b1bb8610> <ast.AugAssign object at 0x7da20c76fe20> <ast.AugAssign object at 0x7da2044c2380> <ast.AugAssign object at 0x7da2044c33d0> if name[reg] begin[:] <ast.AugAssign object at 0x7da1b1be71c0> if name[params] begin[:] <ast.AugAssign object at 0x7da1b1a3c910> variable[length] assign[=] call[name[le], parameter[binary_operation[call[name[len], parameter[name[pkt]]] - constant[5]]]] call[name[pkt]][constant[5]] assign[=] call[name[length]][constant[0]] call[name[pkt]][constant[6]] assign[=] call[name[length]][constant[1]] variable[crc] assign[=] call[name[crc16], parameter[name[pkt]]] <ast.AugAssign object at 0x7da1b1a3efb0> return[name[pkt]]
keyword[def] identifier[makePacket] ( identifier[ID] , identifier[instr] , identifier[reg] = keyword[None] , identifier[params] = keyword[None] ): literal[string] identifier[pkt] =[] identifier[pkt] +=[ literal[int] , literal[int] , literal[int] ] identifier[pkt] +=[ literal[int] ] identifier[pkt] +=[ identifier[ID] ] identifier[pkt] +=[ literal[int] , literal[int] ] identifier[pkt] +=[ identifier[instr] ] keyword[if] identifier[reg] : identifier[pkt] += identifier[le] ( identifier[reg] ) keyword[if] identifier[params] : identifier[pkt] += identifier[params] identifier[length] = identifier[le] ( identifier[len] ( identifier[pkt] )- literal[int] ) identifier[pkt] [ literal[int] ]= identifier[length] [ literal[int] ] identifier[pkt] [ literal[int] ]= identifier[length] [ literal[int] ] identifier[crc] = identifier[crc16] ( identifier[pkt] ) identifier[pkt] += identifier[le] ( identifier[crc] ) keyword[return] identifier[pkt]
def makePacket(ID, instr, reg=None, params=None): """ This makes a generic packet. TODO: look a struct ... does that add value using it? 0xFF, 0xFF, 0xFD, 0x00, ID, LEN_L, LEN_H, INST, PARAM 1, PARAM 2, ..., PARAM N, CRC_L, CRC_H] in: ID - servo id instr - instruction reg - register params - instruction parameter values out: packet """ pkt = [] pkt += [255, 255, 253] # header pkt += [0] # reserved byte pkt += [ID] pkt += [0, 0] # length placeholder pkt += [instr] # instruction if reg: pkt += le(reg) # not everything has a register # depends on [control=['if'], data=[]] if params: pkt += params # not everything has parameters # depends on [control=['if'], data=[]] length = le(len(pkt) - 5) # length = len(packet) - (header(3), reserve(1), id(1)) pkt[5] = length[0] # L pkt[6] = length[1] # H crc = crc16(pkt) pkt += le(crc) return pkt
def fallbacks(enable=True): """ Temporarily switch all language fallbacks on or off. Example: with fallbacks(False): lang_has_slug = bool(self.slug) May be used to enable fallbacks just when they're needed saving on some processing or check if there is a value for the current language (not knowing the language) """ current_enable_fallbacks = settings.ENABLE_FALLBACKS settings.ENABLE_FALLBACKS = enable try: yield finally: settings.ENABLE_FALLBACKS = current_enable_fallbacks
def function[fallbacks, parameter[enable]]: constant[ Temporarily switch all language fallbacks on or off. Example: with fallbacks(False): lang_has_slug = bool(self.slug) May be used to enable fallbacks just when they're needed saving on some processing or check if there is a value for the current language (not knowing the language) ] variable[current_enable_fallbacks] assign[=] name[settings].ENABLE_FALLBACKS name[settings].ENABLE_FALLBACKS assign[=] name[enable] <ast.Try object at 0x7da1b1d55240>
keyword[def] identifier[fallbacks] ( identifier[enable] = keyword[True] ): literal[string] identifier[current_enable_fallbacks] = identifier[settings] . identifier[ENABLE_FALLBACKS] identifier[settings] . identifier[ENABLE_FALLBACKS] = identifier[enable] keyword[try] : keyword[yield] keyword[finally] : identifier[settings] . identifier[ENABLE_FALLBACKS] = identifier[current_enable_fallbacks]
def fallbacks(enable=True): """ Temporarily switch all language fallbacks on or off. Example: with fallbacks(False): lang_has_slug = bool(self.slug) May be used to enable fallbacks just when they're needed saving on some processing or check if there is a value for the current language (not knowing the language) """ current_enable_fallbacks = settings.ENABLE_FALLBACKS settings.ENABLE_FALLBACKS = enable try: yield # depends on [control=['try'], data=[]] finally: settings.ENABLE_FALLBACKS = current_enable_fallbacks
def query_api(self, q, s, g, **kwargs): # noqa: E501 """Perform a charting query against Wavefront servers that returns the appropriate points in the specified time window and granularity # noqa: E501 Long time spans and small granularities can take a long time to calculate # noqa: E501 This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async_req=True >>> thread = api.query_api(q, s, g, async_req=True) >>> result = thread.get() :param async_req bool :param str q: the query expression to execute (required) :param str s: the start time of the query window in epoch milliseconds (required) :param str g: the granularity of the points returned (required) :param str n: name used to identify the query :param str e: the end time of the query window in epoch milliseconds (null to use now) :param str p: the approximate maximum number of points to return (may not limit number of points exactly) :param bool i: whether series with only points that are outside of the query window will be returned (defaults to true) :param bool auto_events: whether events for sources included in the query will be automatically returned by the query :param str summarization: summarization strategy to use when bucketing points together :param bool list_mode: retrieve events more optimally displayed for a list :param bool strict: do not return points outside the query window [s;e), defaults to false :param bool include_obsolete_metrics: include metrics that have not been reporting recently, defaults to false :param bool sorted: sorts the output so that returned series are in order, defaults to false :param bool cached: whether the query cache is used, defaults to true :return: QueryResult If the method is called asynchronously, returns the request thread. """ kwargs['_return_http_data_only'] = True if kwargs.get('async_req'): return self.query_api_with_http_info(q, s, g, **kwargs) # noqa: E501 else: (data) = self.query_api_with_http_info(q, s, g, **kwargs) # noqa: E501 return data
def function[query_api, parameter[self, q, s, g]]: constant[Perform a charting query against Wavefront servers that returns the appropriate points in the specified time window and granularity # noqa: E501 Long time spans and small granularities can take a long time to calculate # noqa: E501 This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async_req=True >>> thread = api.query_api(q, s, g, async_req=True) >>> result = thread.get() :param async_req bool :param str q: the query expression to execute (required) :param str s: the start time of the query window in epoch milliseconds (required) :param str g: the granularity of the points returned (required) :param str n: name used to identify the query :param str e: the end time of the query window in epoch milliseconds (null to use now) :param str p: the approximate maximum number of points to return (may not limit number of points exactly) :param bool i: whether series with only points that are outside of the query window will be returned (defaults to true) :param bool auto_events: whether events for sources included in the query will be automatically returned by the query :param str summarization: summarization strategy to use when bucketing points together :param bool list_mode: retrieve events more optimally displayed for a list :param bool strict: do not return points outside the query window [s;e), defaults to false :param bool include_obsolete_metrics: include metrics that have not been reporting recently, defaults to false :param bool sorted: sorts the output so that returned series are in order, defaults to false :param bool cached: whether the query cache is used, defaults to true :return: QueryResult If the method is called asynchronously, returns the request thread. ] call[name[kwargs]][constant[_return_http_data_only]] assign[=] constant[True] if call[name[kwargs].get, parameter[constant[async_req]]] begin[:] return[call[name[self].query_api_with_http_info, parameter[name[q], name[s], name[g]]]]
keyword[def] identifier[query_api] ( identifier[self] , identifier[q] , identifier[s] , identifier[g] ,** identifier[kwargs] ): literal[string] identifier[kwargs] [ literal[string] ]= keyword[True] keyword[if] identifier[kwargs] . identifier[get] ( literal[string] ): keyword[return] identifier[self] . identifier[query_api_with_http_info] ( identifier[q] , identifier[s] , identifier[g] ,** identifier[kwargs] ) keyword[else] : ( identifier[data] )= identifier[self] . identifier[query_api_with_http_info] ( identifier[q] , identifier[s] , identifier[g] ,** identifier[kwargs] ) keyword[return] identifier[data]
def query_api(self, q, s, g, **kwargs): # noqa: E501 'Perform a charting query against Wavefront servers that returns the appropriate points in the specified time window and granularity # noqa: E501\n\n Long time spans and small granularities can take a long time to calculate # noqa: E501\n This method makes a synchronous HTTP request by default. To make an\n asynchronous HTTP request, please pass async_req=True\n >>> thread = api.query_api(q, s, g, async_req=True)\n >>> result = thread.get()\n\n :param async_req bool\n :param str q: the query expression to execute (required)\n :param str s: the start time of the query window in epoch milliseconds (required)\n :param str g: the granularity of the points returned (required)\n :param str n: name used to identify the query\n :param str e: the end time of the query window in epoch milliseconds (null to use now)\n :param str p: the approximate maximum number of points to return (may not limit number of points exactly)\n :param bool i: whether series with only points that are outside of the query window will be returned (defaults to true)\n :param bool auto_events: whether events for sources included in the query will be automatically returned by the query\n :param str summarization: summarization strategy to use when bucketing points together\n :param bool list_mode: retrieve events more optimally displayed for a list\n :param bool strict: do not return points outside the query window [s;e), defaults to false\n :param bool include_obsolete_metrics: include metrics that have not been reporting recently, defaults to false\n :param bool sorted: sorts the output so that returned series are in order, defaults to false\n :param bool cached: whether the query cache is used, defaults to true\n :return: QueryResult\n If the method is called asynchronously,\n returns the request thread.\n ' kwargs['_return_http_data_only'] = True if kwargs.get('async_req'): return self.query_api_with_http_info(q, s, g, **kwargs) # noqa: E501 # depends on [control=['if'], data=[]] else: data = self.query_api_with_http_info(q, s, g, **kwargs) # noqa: E501 return data
def gen_outputs(self, riskinput, monitor, epspath=None, hazard=None): """ Group the assets per taxonomy and compute the outputs by using the underlying riskmodels. Yield one output per realization. :param riskinput: a RiskInput instance :param monitor: a monitor object used to measure the performance """ self.monitor = monitor hazard_getter = riskinput.hazard_getter if hazard is None: with monitor('getting hazard'): hazard_getter.init() hazard = hazard_getter.get_hazard() sids = hazard_getter.sids assert len(sids) == 1 with monitor('computing risk', measuremem=False): # this approach is slow for event_based_risk since a lot of # small arrays are passed (one per realization) instead of # a long array with all realizations; ebrisk does the right # thing since it calls get_output directly assets_by_taxo = get_assets_by_taxo(riskinput.assets, epspath) for rlzi, haz in sorted(hazard[sids[0]].items()): out = self.get_output(assets_by_taxo, haz, rlzi) yield out
def function[gen_outputs, parameter[self, riskinput, monitor, epspath, hazard]]: constant[ Group the assets per taxonomy and compute the outputs by using the underlying riskmodels. Yield one output per realization. :param riskinput: a RiskInput instance :param monitor: a monitor object used to measure the performance ] name[self].monitor assign[=] name[monitor] variable[hazard_getter] assign[=] name[riskinput].hazard_getter if compare[name[hazard] is constant[None]] begin[:] with call[name[monitor], parameter[constant[getting hazard]]] begin[:] call[name[hazard_getter].init, parameter[]] variable[hazard] assign[=] call[name[hazard_getter].get_hazard, parameter[]] variable[sids] assign[=] name[hazard_getter].sids assert[compare[call[name[len], parameter[name[sids]]] equal[==] constant[1]]] with call[name[monitor], parameter[constant[computing risk]]] begin[:] variable[assets_by_taxo] assign[=] call[name[get_assets_by_taxo], parameter[name[riskinput].assets, name[epspath]]] for taget[tuple[[<ast.Name object at 0x7da20c993940>, <ast.Name object at 0x7da20c991fc0>]]] in starred[call[name[sorted], parameter[call[call[name[hazard]][call[name[sids]][constant[0]]].items, parameter[]]]]] begin[:] variable[out] assign[=] call[name[self].get_output, parameter[name[assets_by_taxo], name[haz], name[rlzi]]] <ast.Yield object at 0x7da20c993130>
keyword[def] identifier[gen_outputs] ( identifier[self] , identifier[riskinput] , identifier[monitor] , identifier[epspath] = keyword[None] , identifier[hazard] = keyword[None] ): literal[string] identifier[self] . identifier[monitor] = identifier[monitor] identifier[hazard_getter] = identifier[riskinput] . identifier[hazard_getter] keyword[if] identifier[hazard] keyword[is] keyword[None] : keyword[with] identifier[monitor] ( literal[string] ): identifier[hazard_getter] . identifier[init] () identifier[hazard] = identifier[hazard_getter] . identifier[get_hazard] () identifier[sids] = identifier[hazard_getter] . identifier[sids] keyword[assert] identifier[len] ( identifier[sids] )== literal[int] keyword[with] identifier[monitor] ( literal[string] , identifier[measuremem] = keyword[False] ): identifier[assets_by_taxo] = identifier[get_assets_by_taxo] ( identifier[riskinput] . identifier[assets] , identifier[epspath] ) keyword[for] identifier[rlzi] , identifier[haz] keyword[in] identifier[sorted] ( identifier[hazard] [ identifier[sids] [ literal[int] ]]. identifier[items] ()): identifier[out] = identifier[self] . identifier[get_output] ( identifier[assets_by_taxo] , identifier[haz] , identifier[rlzi] ) keyword[yield] identifier[out]
def gen_outputs(self, riskinput, monitor, epspath=None, hazard=None): """ Group the assets per taxonomy and compute the outputs by using the underlying riskmodels. Yield one output per realization. :param riskinput: a RiskInput instance :param monitor: a monitor object used to measure the performance """ self.monitor = monitor hazard_getter = riskinput.hazard_getter if hazard is None: with monitor('getting hazard'): hazard_getter.init() hazard = hazard_getter.get_hazard() # depends on [control=['with'], data=[]] # depends on [control=['if'], data=['hazard']] sids = hazard_getter.sids assert len(sids) == 1 with monitor('computing risk', measuremem=False): # this approach is slow for event_based_risk since a lot of # small arrays are passed (one per realization) instead of # a long array with all realizations; ebrisk does the right # thing since it calls get_output directly assets_by_taxo = get_assets_by_taxo(riskinput.assets, epspath) for (rlzi, haz) in sorted(hazard[sids[0]].items()): out = self.get_output(assets_by_taxo, haz, rlzi) yield out # depends on [control=['for'], data=[]] # depends on [control=['with'], data=[]]
def get_key(self, key, target='in'): """Get the name of a key in current style. e.g.: in javadoc style, the returned key for 'param' is '@param' :param key: the key wanted (param, type, return, rtype,..) :param target: the target docstring is 'in' for the input or 'out' for the output to generate. (Default value = 'in') """ target = 'out' if target == 'out' else 'in' return self.opt[key][self.style[target]]['name']
def function[get_key, parameter[self, key, target]]: constant[Get the name of a key in current style. e.g.: in javadoc style, the returned key for 'param' is '@param' :param key: the key wanted (param, type, return, rtype,..) :param target: the target docstring is 'in' for the input or 'out' for the output to generate. (Default value = 'in') ] variable[target] assign[=] <ast.IfExp object at 0x7da1b12aa1d0> return[call[call[call[name[self].opt][name[key]]][call[name[self].style][name[target]]]][constant[name]]]
keyword[def] identifier[get_key] ( identifier[self] , identifier[key] , identifier[target] = literal[string] ): literal[string] identifier[target] = literal[string] keyword[if] identifier[target] == literal[string] keyword[else] literal[string] keyword[return] identifier[self] . identifier[opt] [ identifier[key] ][ identifier[self] . identifier[style] [ identifier[target] ]][ literal[string] ]
def get_key(self, key, target='in'): """Get the name of a key in current style. e.g.: in javadoc style, the returned key for 'param' is '@param' :param key: the key wanted (param, type, return, rtype,..) :param target: the target docstring is 'in' for the input or 'out' for the output to generate. (Default value = 'in') """ target = 'out' if target == 'out' else 'in' return self.opt[key][self.style[target]]['name']
def parse_game_event(self, event): """ So CSVCMsg_GameEventList is a list of all events that can happen. A game event has an eventid which maps to a type of event that happened """ if event.eventid in self.event_lookup: #Bash this into a nicer data format to work with event_type = self.event_lookup[event.eventid] ge = GameEvent(event_type.name) for i, key in enumerate(event.keys): key_type = event_type.keys[i] ge.keys[key_type.name] = getattr(key, KEY_DATA_TYPES[key.type]) self.debug("|==========> %s" % (ge, )) self.run_hooks(ge)
def function[parse_game_event, parameter[self, event]]: constant[ So CSVCMsg_GameEventList is a list of all events that can happen. A game event has an eventid which maps to a type of event that happened ] if compare[name[event].eventid in name[self].event_lookup] begin[:] variable[event_type] assign[=] call[name[self].event_lookup][name[event].eventid] variable[ge] assign[=] call[name[GameEvent], parameter[name[event_type].name]] for taget[tuple[[<ast.Name object at 0x7da18ede6260>, <ast.Name object at 0x7da18ede4220>]]] in starred[call[name[enumerate], parameter[name[event].keys]]] begin[:] variable[key_type] assign[=] call[name[event_type].keys][name[i]] call[name[ge].keys][name[key_type].name] assign[=] call[name[getattr], parameter[name[key], call[name[KEY_DATA_TYPES]][name[key].type]]] call[name[self].debug, parameter[binary_operation[constant[|==========> %s] <ast.Mod object at 0x7da2590d6920> tuple[[<ast.Name object at 0x7da18ede7b80>]]]]] call[name[self].run_hooks, parameter[name[ge]]]
keyword[def] identifier[parse_game_event] ( identifier[self] , identifier[event] ): literal[string] keyword[if] identifier[event] . identifier[eventid] keyword[in] identifier[self] . identifier[event_lookup] : identifier[event_type] = identifier[self] . identifier[event_lookup] [ identifier[event] . identifier[eventid] ] identifier[ge] = identifier[GameEvent] ( identifier[event_type] . identifier[name] ) keyword[for] identifier[i] , identifier[key] keyword[in] identifier[enumerate] ( identifier[event] . identifier[keys] ): identifier[key_type] = identifier[event_type] . identifier[keys] [ identifier[i] ] identifier[ge] . identifier[keys] [ identifier[key_type] . identifier[name] ]= identifier[getattr] ( identifier[key] , identifier[KEY_DATA_TYPES] [ identifier[key] . identifier[type] ]) identifier[self] . identifier[debug] ( literal[string] %( identifier[ge] ,)) identifier[self] . identifier[run_hooks] ( identifier[ge] )
def parse_game_event(self, event): """ So CSVCMsg_GameEventList is a list of all events that can happen. A game event has an eventid which maps to a type of event that happened """ if event.eventid in self.event_lookup: #Bash this into a nicer data format to work with event_type = self.event_lookup[event.eventid] ge = GameEvent(event_type.name) for (i, key) in enumerate(event.keys): key_type = event_type.keys[i] ge.keys[key_type.name] = getattr(key, KEY_DATA_TYPES[key.type]) # depends on [control=['for'], data=[]] self.debug('|==========> %s' % (ge,)) self.run_hooks(ge) # depends on [control=['if'], data=[]]
def create_asset(self, asset_form): """Creates a new ``Asset``. arg: asset_form (osid.repository.AssetForm): the form for this ``Asset`` return: (osid.repository.Asset) - the new ``Asset`` raise: IllegalState - ``asset_form`` already used in a create transaction raise: InvalidArgument - one or more of the form elements is invalid raise: NullArgument - ``asset_form`` is ``null`` raise: OperationFailed - unable to complete request raise: PermissionDenied - authorization failure raise: Unsupported - ``asset_form`` did not originate from ``get_asset_form_for_create()`` *compliance: mandatory -- This method must be implemented.* """ # Implemented from template for # osid.resource.ResourceAdminSession.create_resource_template collection = JSONClientValidated('repository', collection='Asset', runtime=self._runtime) if not isinstance(asset_form, ABCAssetForm): raise errors.InvalidArgument('argument type is not an AssetForm') if asset_form.is_for_update(): raise errors.InvalidArgument('the AssetForm is for update only, not create') try: if self._forms[asset_form.get_id().get_identifier()] == CREATED: raise errors.IllegalState('asset_form already used in a create transaction') except KeyError: raise errors.Unsupported('asset_form did not originate from this session') if not asset_form.is_valid(): raise errors.InvalidArgument('one or more of the form elements is invalid') insert_result = collection.insert_one(asset_form._my_map) self._forms[asset_form.get_id().get_identifier()] = CREATED result = objects.Asset( osid_object_map=collection.find_one({'_id': insert_result.inserted_id}), runtime=self._runtime, proxy=self._proxy) return result
def function[create_asset, parameter[self, asset_form]]: constant[Creates a new ``Asset``. arg: asset_form (osid.repository.AssetForm): the form for this ``Asset`` return: (osid.repository.Asset) - the new ``Asset`` raise: IllegalState - ``asset_form`` already used in a create transaction raise: InvalidArgument - one or more of the form elements is invalid raise: NullArgument - ``asset_form`` is ``null`` raise: OperationFailed - unable to complete request raise: PermissionDenied - authorization failure raise: Unsupported - ``asset_form`` did not originate from ``get_asset_form_for_create()`` *compliance: mandatory -- This method must be implemented.* ] variable[collection] assign[=] call[name[JSONClientValidated], parameter[constant[repository]]] if <ast.UnaryOp object at 0x7da20c7966e0> begin[:] <ast.Raise object at 0x7da20c794370> if call[name[asset_form].is_for_update, parameter[]] begin[:] <ast.Raise object at 0x7da20c794a00> <ast.Try object at 0x7da20c796a10> if <ast.UnaryOp object at 0x7da1b092d630> begin[:] <ast.Raise object at 0x7da1b092e5c0> variable[insert_result] assign[=] call[name[collection].insert_one, parameter[name[asset_form]._my_map]] call[name[self]._forms][call[call[name[asset_form].get_id, parameter[]].get_identifier, parameter[]]] assign[=] name[CREATED] variable[result] assign[=] call[name[objects].Asset, parameter[]] return[name[result]]
keyword[def] identifier[create_asset] ( identifier[self] , identifier[asset_form] ): literal[string] identifier[collection] = identifier[JSONClientValidated] ( literal[string] , identifier[collection] = literal[string] , identifier[runtime] = identifier[self] . identifier[_runtime] ) keyword[if] keyword[not] identifier[isinstance] ( identifier[asset_form] , identifier[ABCAssetForm] ): keyword[raise] identifier[errors] . identifier[InvalidArgument] ( literal[string] ) keyword[if] identifier[asset_form] . identifier[is_for_update] (): keyword[raise] identifier[errors] . identifier[InvalidArgument] ( literal[string] ) keyword[try] : keyword[if] identifier[self] . identifier[_forms] [ identifier[asset_form] . identifier[get_id] (). identifier[get_identifier] ()]== identifier[CREATED] : keyword[raise] identifier[errors] . identifier[IllegalState] ( literal[string] ) keyword[except] identifier[KeyError] : keyword[raise] identifier[errors] . identifier[Unsupported] ( literal[string] ) keyword[if] keyword[not] identifier[asset_form] . identifier[is_valid] (): keyword[raise] identifier[errors] . identifier[InvalidArgument] ( literal[string] ) identifier[insert_result] = identifier[collection] . identifier[insert_one] ( identifier[asset_form] . identifier[_my_map] ) identifier[self] . identifier[_forms] [ identifier[asset_form] . identifier[get_id] (). identifier[get_identifier] ()]= identifier[CREATED] identifier[result] = identifier[objects] . identifier[Asset] ( identifier[osid_object_map] = identifier[collection] . identifier[find_one] ({ literal[string] : identifier[insert_result] . identifier[inserted_id] }), identifier[runtime] = identifier[self] . identifier[_runtime] , identifier[proxy] = identifier[self] . identifier[_proxy] ) keyword[return] identifier[result]
def create_asset(self, asset_form): """Creates a new ``Asset``. arg: asset_form (osid.repository.AssetForm): the form for this ``Asset`` return: (osid.repository.Asset) - the new ``Asset`` raise: IllegalState - ``asset_form`` already used in a create transaction raise: InvalidArgument - one or more of the form elements is invalid raise: NullArgument - ``asset_form`` is ``null`` raise: OperationFailed - unable to complete request raise: PermissionDenied - authorization failure raise: Unsupported - ``asset_form`` did not originate from ``get_asset_form_for_create()`` *compliance: mandatory -- This method must be implemented.* """ # Implemented from template for # osid.resource.ResourceAdminSession.create_resource_template collection = JSONClientValidated('repository', collection='Asset', runtime=self._runtime) if not isinstance(asset_form, ABCAssetForm): raise errors.InvalidArgument('argument type is not an AssetForm') # depends on [control=['if'], data=[]] if asset_form.is_for_update(): raise errors.InvalidArgument('the AssetForm is for update only, not create') # depends on [control=['if'], data=[]] try: if self._forms[asset_form.get_id().get_identifier()] == CREATED: raise errors.IllegalState('asset_form already used in a create transaction') # depends on [control=['if'], data=[]] # depends on [control=['try'], data=[]] except KeyError: raise errors.Unsupported('asset_form did not originate from this session') # depends on [control=['except'], data=[]] if not asset_form.is_valid(): raise errors.InvalidArgument('one or more of the form elements is invalid') # depends on [control=['if'], data=[]] insert_result = collection.insert_one(asset_form._my_map) self._forms[asset_form.get_id().get_identifier()] = CREATED result = objects.Asset(osid_object_map=collection.find_one({'_id': insert_result.inserted_id}), runtime=self._runtime, proxy=self._proxy) return result
def check_subtype_integrity(m, super_kind, rel_id): ''' Check the model for integrity violations across a subtype association. ''' if isinstance(rel_id, int): rel_id = 'R%d' % rel_id res = 0 for inst in m.select_many(super_kind): if not xtuml.navigate_subtype(inst, rel_id): res += 1 logger.warning('integrity violation across ' '%s[%s]' % (super_kind, rel_id)) return res
def function[check_subtype_integrity, parameter[m, super_kind, rel_id]]: constant[ Check the model for integrity violations across a subtype association. ] if call[name[isinstance], parameter[name[rel_id], name[int]]] begin[:] variable[rel_id] assign[=] binary_operation[constant[R%d] <ast.Mod object at 0x7da2590d6920> name[rel_id]] variable[res] assign[=] constant[0] for taget[name[inst]] in starred[call[name[m].select_many, parameter[name[super_kind]]]] begin[:] if <ast.UnaryOp object at 0x7da1b01a5db0> begin[:] <ast.AugAssign object at 0x7da20e957df0> call[name[logger].warning, parameter[binary_operation[constant[integrity violation across %s[%s]] <ast.Mod object at 0x7da2590d6920> tuple[[<ast.Name object at 0x7da20e957940>, <ast.Name object at 0x7da20e9548b0>]]]]] return[name[res]]
keyword[def] identifier[check_subtype_integrity] ( identifier[m] , identifier[super_kind] , identifier[rel_id] ): literal[string] keyword[if] identifier[isinstance] ( identifier[rel_id] , identifier[int] ): identifier[rel_id] = literal[string] % identifier[rel_id] identifier[res] = literal[int] keyword[for] identifier[inst] keyword[in] identifier[m] . identifier[select_many] ( identifier[super_kind] ): keyword[if] keyword[not] identifier[xtuml] . identifier[navigate_subtype] ( identifier[inst] , identifier[rel_id] ): identifier[res] += literal[int] identifier[logger] . identifier[warning] ( literal[string] literal[string] %( identifier[super_kind] , identifier[rel_id] )) keyword[return] identifier[res]
def check_subtype_integrity(m, super_kind, rel_id): """ Check the model for integrity violations across a subtype association. """ if isinstance(rel_id, int): rel_id = 'R%d' % rel_id # depends on [control=['if'], data=[]] res = 0 for inst in m.select_many(super_kind): if not xtuml.navigate_subtype(inst, rel_id): res += 1 logger.warning('integrity violation across %s[%s]' % (super_kind, rel_id)) # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['inst']] return res
def infer_x(self, y): """Infer probable x from input y @param y the desired output for infered x. @return a list of probable x """ OptimizedInverseModel.infer_x(self, y) if self.fmodel.size() == 0: return self._random_x() x_guesses = [self._guess_x_simple(y)[0]] result = [] for i, xg in enumerate(x_guesses): res = scipy.optimize.minimize(self._error, xg, args = (), method = self.algo, bounds = self.constraints, options = self.conf ) d = self._error(res.x) result.append((d, i, res.x)) return [self._enforce_bounds(xi) for fi, i, xi in sorted(result)]
def function[infer_x, parameter[self, y]]: constant[Infer probable x from input y @param y the desired output for infered x. @return a list of probable x ] call[name[OptimizedInverseModel].infer_x, parameter[name[self], name[y]]] if compare[call[name[self].fmodel.size, parameter[]] equal[==] constant[0]] begin[:] return[call[name[self]._random_x, parameter[]]] variable[x_guesses] assign[=] list[[<ast.Subscript object at 0x7da1b0c0b070>]] variable[result] assign[=] list[[]] for taget[tuple[[<ast.Name object at 0x7da1b0c096c0>, <ast.Name object at 0x7da1b0c08f10>]]] in starred[call[name[enumerate], parameter[name[x_guesses]]]] begin[:] variable[res] assign[=] call[name[scipy].optimize.minimize, parameter[name[self]._error, name[xg]]] variable[d] assign[=] call[name[self]._error, parameter[name[res].x]] call[name[result].append, parameter[tuple[[<ast.Name object at 0x7da1b0c0aef0>, <ast.Name object at 0x7da1b0c0bf70>, <ast.Attribute object at 0x7da1b0c085b0>]]]] return[<ast.ListComp object at 0x7da1b0c0b280>]
keyword[def] identifier[infer_x] ( identifier[self] , identifier[y] ): literal[string] identifier[OptimizedInverseModel] . identifier[infer_x] ( identifier[self] , identifier[y] ) keyword[if] identifier[self] . identifier[fmodel] . identifier[size] ()== literal[int] : keyword[return] identifier[self] . identifier[_random_x] () identifier[x_guesses] =[ identifier[self] . identifier[_guess_x_simple] ( identifier[y] )[ literal[int] ]] identifier[result] =[] keyword[for] identifier[i] , identifier[xg] keyword[in] identifier[enumerate] ( identifier[x_guesses] ): identifier[res] = identifier[scipy] . identifier[optimize] . identifier[minimize] ( identifier[self] . identifier[_error] , identifier[xg] , identifier[args] =(), identifier[method] = identifier[self] . identifier[algo] , identifier[bounds] = identifier[self] . identifier[constraints] , identifier[options] = identifier[self] . identifier[conf] ) identifier[d] = identifier[self] . identifier[_error] ( identifier[res] . identifier[x] ) identifier[result] . identifier[append] (( identifier[d] , identifier[i] , identifier[res] . identifier[x] )) keyword[return] [ identifier[self] . identifier[_enforce_bounds] ( identifier[xi] ) keyword[for] identifier[fi] , identifier[i] , identifier[xi] keyword[in] identifier[sorted] ( identifier[result] )]
def infer_x(self, y): """Infer probable x from input y @param y the desired output for infered x. @return a list of probable x """ OptimizedInverseModel.infer_x(self, y) if self.fmodel.size() == 0: return self._random_x() # depends on [control=['if'], data=[]] x_guesses = [self._guess_x_simple(y)[0]] result = [] for (i, xg) in enumerate(x_guesses): res = scipy.optimize.minimize(self._error, xg, args=(), method=self.algo, bounds=self.constraints, options=self.conf) d = self._error(res.x) result.append((d, i, res.x)) # depends on [control=['for'], data=[]] return [self._enforce_bounds(xi) for (fi, i, xi) in sorted(result)]
def meta_set(self, key, metafield, value): """ Set the meta field for a key to a new value. """ self._meta.setdefault(key, {})[metafield] = value
def function[meta_set, parameter[self, key, metafield, value]]: constant[ Set the meta field for a key to a new value. ] call[call[name[self]._meta.setdefault, parameter[name[key], dictionary[[], []]]]][name[metafield]] assign[=] name[value]
keyword[def] identifier[meta_set] ( identifier[self] , identifier[key] , identifier[metafield] , identifier[value] ): literal[string] identifier[self] . identifier[_meta] . identifier[setdefault] ( identifier[key] ,{})[ identifier[metafield] ]= identifier[value]
def meta_set(self, key, metafield, value): """ Set the meta field for a key to a new value. """ self._meta.setdefault(key, {})[metafield] = value
def update_catalog_extent(self, current_extent): # type: (int) -> None ''' A method to update the extent associated with this Boot Catalog. Parameters: current_extent - New extent to associate with this Boot Catalog Returns: Nothing. ''' if not self._initialized: raise pycdlibexception.PyCdlibInternalError('El Torito Boot Catalog not yet initialized') self.br.update_boot_system_use(struct.pack('=L', current_extent))
def function[update_catalog_extent, parameter[self, current_extent]]: constant[ A method to update the extent associated with this Boot Catalog. Parameters: current_extent - New extent to associate with this Boot Catalog Returns: Nothing. ] if <ast.UnaryOp object at 0x7da1b0d0cdf0> begin[:] <ast.Raise object at 0x7da1b0d0c730> call[name[self].br.update_boot_system_use, parameter[call[name[struct].pack, parameter[constant[=L], name[current_extent]]]]]
keyword[def] identifier[update_catalog_extent] ( identifier[self] , identifier[current_extent] ): literal[string] keyword[if] keyword[not] identifier[self] . identifier[_initialized] : keyword[raise] identifier[pycdlibexception] . identifier[PyCdlibInternalError] ( literal[string] ) identifier[self] . identifier[br] . identifier[update_boot_system_use] ( identifier[struct] . identifier[pack] ( literal[string] , identifier[current_extent] ))
def update_catalog_extent(self, current_extent): # type: (int) -> None '\n A method to update the extent associated with this Boot Catalog.\n\n Parameters:\n current_extent - New extent to associate with this Boot Catalog\n Returns:\n Nothing.\n ' if not self._initialized: raise pycdlibexception.PyCdlibInternalError('El Torito Boot Catalog not yet initialized') # depends on [control=['if'], data=[]] self.br.update_boot_system_use(struct.pack('=L', current_extent))
def make_vertical_bar(percentage, width=1): """ Draws a vertical bar made of unicode characters. :param value: A value between 0 and 100 :param width: How many characters wide the bar should be. :returns: Bar as a String """ bar = ' _▁▂▃▄▅▆▇█' percentage //= 10 percentage = int(percentage) if percentage < 0: output = bar[0] elif percentage >= len(bar): output = bar[-1] else: output = bar[percentage] return output * width
def function[make_vertical_bar, parameter[percentage, width]]: constant[ Draws a vertical bar made of unicode characters. :param value: A value between 0 and 100 :param width: How many characters wide the bar should be. :returns: Bar as a String ] variable[bar] assign[=] constant[ _▁▂▃▄▅▆▇█] <ast.AugAssign object at 0x7da20c7c8040> variable[percentage] assign[=] call[name[int], parameter[name[percentage]]] if compare[name[percentage] less[<] constant[0]] begin[:] variable[output] assign[=] call[name[bar]][constant[0]] return[binary_operation[name[output] * name[width]]]
keyword[def] identifier[make_vertical_bar] ( identifier[percentage] , identifier[width] = literal[int] ): literal[string] identifier[bar] = literal[string] identifier[percentage] //= literal[int] identifier[percentage] = identifier[int] ( identifier[percentage] ) keyword[if] identifier[percentage] < literal[int] : identifier[output] = identifier[bar] [ literal[int] ] keyword[elif] identifier[percentage] >= identifier[len] ( identifier[bar] ): identifier[output] = identifier[bar] [- literal[int] ] keyword[else] : identifier[output] = identifier[bar] [ identifier[percentage] ] keyword[return] identifier[output] * identifier[width]
def make_vertical_bar(percentage, width=1): """ Draws a vertical bar made of unicode characters. :param value: A value between 0 and 100 :param width: How many characters wide the bar should be. :returns: Bar as a String """ bar = ' _▁▂▃▄▅▆▇█' percentage //= 10 percentage = int(percentage) if percentage < 0: output = bar[0] # depends on [control=['if'], data=[]] elif percentage >= len(bar): output = bar[-1] # depends on [control=['if'], data=[]] else: output = bar[percentage] return output * width
def upgrade(): """Upgrade database.""" op.create_table( 'oauthclient_remoteaccount', sa.Column('id', sa.Integer(), nullable=False), sa.Column('user_id', sa.Integer(), nullable=False), sa.Column('client_id', sa.String(length=255), nullable=False), sa.Column( 'extra_data', sqlalchemy_utils.JSONType(), nullable=False), sa.ForeignKeyConstraint(['user_id'], [u'accounts_user.id'], ), sa.PrimaryKeyConstraint('id'), sa.UniqueConstraint('user_id', 'client_id') ) op.create_table( 'oauthclient_useridentity', sa.Column('id', sa.String(length=255), nullable=False), sa.Column('method', sa.String(length=255), nullable=False), sa.Column('id_user', sa.Integer(), nullable=False), sa.ForeignKeyConstraint(['id_user'], [u'accounts_user.id'], ), sa.PrimaryKeyConstraint('id', 'method') ) op.create_index( 'useridentity_id_user_method', 'oauthclient_useridentity', ['id_user', 'method'], unique=True ) op.create_table( 'oauthclient_remotetoken', sa.Column('id_remote_account', sa.Integer(), nullable=False), sa.Column('token_type', sa.String(length=40), nullable=False), sa.Column( 'access_token', sqlalchemy_utils.EncryptedType(), nullable=False), sa.Column('secret', sa.Text(), nullable=False), sa.ForeignKeyConstraint( ['id_remote_account'], [u'oauthclient_remoteaccount.id'], name='fk_oauthclient_remote_token_remote_account' ), sa.PrimaryKeyConstraint('id_remote_account', 'token_type') )
def function[upgrade, parameter[]]: constant[Upgrade database.] call[name[op].create_table, parameter[constant[oauthclient_remoteaccount], call[name[sa].Column, parameter[constant[id], call[name[sa].Integer, parameter[]]]], call[name[sa].Column, parameter[constant[user_id], call[name[sa].Integer, parameter[]]]], call[name[sa].Column, parameter[constant[client_id], call[name[sa].String, parameter[]]]], call[name[sa].Column, parameter[constant[extra_data], call[name[sqlalchemy_utils].JSONType, parameter[]]]], call[name[sa].ForeignKeyConstraint, parameter[list[[<ast.Constant object at 0x7da1b2519180>]], list[[<ast.Constant object at 0x7da1b2518280>]]]], call[name[sa].PrimaryKeyConstraint, parameter[constant[id]]], call[name[sa].UniqueConstraint, parameter[constant[user_id], constant[client_id]]]]] call[name[op].create_table, parameter[constant[oauthclient_useridentity], call[name[sa].Column, parameter[constant[id], call[name[sa].String, parameter[]]]], call[name[sa].Column, parameter[constant[method], call[name[sa].String, parameter[]]]], call[name[sa].Column, parameter[constant[id_user], call[name[sa].Integer, parameter[]]]], call[name[sa].ForeignKeyConstraint, parameter[list[[<ast.Constant object at 0x7da1b251aec0>]], list[[<ast.Constant object at 0x7da1b257f2b0>]]]], call[name[sa].PrimaryKeyConstraint, parameter[constant[id], constant[method]]]]] call[name[op].create_index, parameter[constant[useridentity_id_user_method], constant[oauthclient_useridentity], list[[<ast.Constant object at 0x7da1b257f910>, <ast.Constant object at 0x7da1b257e260>]]]] call[name[op].create_table, parameter[constant[oauthclient_remotetoken], call[name[sa].Column, parameter[constant[id_remote_account], call[name[sa].Integer, parameter[]]]], call[name[sa].Column, parameter[constant[token_type], call[name[sa].String, parameter[]]]], call[name[sa].Column, parameter[constant[access_token], call[name[sqlalchemy_utils].EncryptedType, parameter[]]]], call[name[sa].Column, parameter[constant[secret], call[name[sa].Text, parameter[]]]], call[name[sa].ForeignKeyConstraint, parameter[list[[<ast.Constant object at 0x7da1b257e5f0>]], list[[<ast.Constant object at 0x7da1b257e8c0>]]]], call[name[sa].PrimaryKeyConstraint, parameter[constant[id_remote_account], constant[token_type]]]]]
keyword[def] identifier[upgrade] (): literal[string] identifier[op] . identifier[create_table] ( literal[string] , identifier[sa] . identifier[Column] ( literal[string] , identifier[sa] . identifier[Integer] (), identifier[nullable] = keyword[False] ), identifier[sa] . identifier[Column] ( literal[string] , identifier[sa] . identifier[Integer] (), identifier[nullable] = keyword[False] ), identifier[sa] . identifier[Column] ( literal[string] , identifier[sa] . identifier[String] ( identifier[length] = literal[int] ), identifier[nullable] = keyword[False] ), identifier[sa] . identifier[Column] ( literal[string] , identifier[sqlalchemy_utils] . identifier[JSONType] (), identifier[nullable] = keyword[False] ), identifier[sa] . identifier[ForeignKeyConstraint] ([ literal[string] ],[ literal[string] ],), identifier[sa] . identifier[PrimaryKeyConstraint] ( literal[string] ), identifier[sa] . identifier[UniqueConstraint] ( literal[string] , literal[string] ) ) identifier[op] . identifier[create_table] ( literal[string] , identifier[sa] . identifier[Column] ( literal[string] , identifier[sa] . identifier[String] ( identifier[length] = literal[int] ), identifier[nullable] = keyword[False] ), identifier[sa] . identifier[Column] ( literal[string] , identifier[sa] . identifier[String] ( identifier[length] = literal[int] ), identifier[nullable] = keyword[False] ), identifier[sa] . identifier[Column] ( literal[string] , identifier[sa] . identifier[Integer] (), identifier[nullable] = keyword[False] ), identifier[sa] . identifier[ForeignKeyConstraint] ([ literal[string] ],[ literal[string] ],), identifier[sa] . identifier[PrimaryKeyConstraint] ( literal[string] , literal[string] ) ) identifier[op] . identifier[create_index] ( literal[string] , literal[string] , [ literal[string] , literal[string] ], identifier[unique] = keyword[True] ) identifier[op] . identifier[create_table] ( literal[string] , identifier[sa] . identifier[Column] ( literal[string] , identifier[sa] . identifier[Integer] (), identifier[nullable] = keyword[False] ), identifier[sa] . identifier[Column] ( literal[string] , identifier[sa] . identifier[String] ( identifier[length] = literal[int] ), identifier[nullable] = keyword[False] ), identifier[sa] . identifier[Column] ( literal[string] , identifier[sqlalchemy_utils] . identifier[EncryptedType] (), identifier[nullable] = keyword[False] ), identifier[sa] . identifier[Column] ( literal[string] , identifier[sa] . identifier[Text] (), identifier[nullable] = keyword[False] ), identifier[sa] . identifier[ForeignKeyConstraint] ( [ literal[string] ],[ literal[string] ], identifier[name] = literal[string] ), identifier[sa] . identifier[PrimaryKeyConstraint] ( literal[string] , literal[string] ) )
def upgrade(): """Upgrade database.""" op.create_table('oauthclient_remoteaccount', sa.Column('id', sa.Integer(), nullable=False), sa.Column('user_id', sa.Integer(), nullable=False), sa.Column('client_id', sa.String(length=255), nullable=False), sa.Column('extra_data', sqlalchemy_utils.JSONType(), nullable=False), sa.ForeignKeyConstraint(['user_id'], [u'accounts_user.id']), sa.PrimaryKeyConstraint('id'), sa.UniqueConstraint('user_id', 'client_id')) op.create_table('oauthclient_useridentity', sa.Column('id', sa.String(length=255), nullable=False), sa.Column('method', sa.String(length=255), nullable=False), sa.Column('id_user', sa.Integer(), nullable=False), sa.ForeignKeyConstraint(['id_user'], [u'accounts_user.id']), sa.PrimaryKeyConstraint('id', 'method')) op.create_index('useridentity_id_user_method', 'oauthclient_useridentity', ['id_user', 'method'], unique=True) op.create_table('oauthclient_remotetoken', sa.Column('id_remote_account', sa.Integer(), nullable=False), sa.Column('token_type', sa.String(length=40), nullable=False), sa.Column('access_token', sqlalchemy_utils.EncryptedType(), nullable=False), sa.Column('secret', sa.Text(), nullable=False), sa.ForeignKeyConstraint(['id_remote_account'], [u'oauthclient_remoteaccount.id'], name='fk_oauthclient_remote_token_remote_account'), sa.PrimaryKeyConstraint('id_remote_account', 'token_type'))
def scons_subst_once(strSubst, env, key): """Perform single (non-recursive) substitution of a single construction variable keyword. This is used when setting a variable when copying or overriding values in an Environment. We want to capture (expand) the old value before we override it, so people can do things like: env2 = env.Clone(CCFLAGS = '$CCFLAGS -g') We do this with some straightforward, brute-force code here... """ if isinstance(strSubst, str) and strSubst.find('$') < 0: return strSubst matchlist = ['$' + key, '${' + key + '}'] val = env.get(key, '') def sub_match(match, val=val, matchlist=matchlist): a = match.group(1) if a in matchlist: a = val if is_Sequence(a): return ' '.join(map(str, a)) else: return str(a) if is_Sequence(strSubst): result = [] for arg in strSubst: if is_String(arg): if arg in matchlist: arg = val if is_Sequence(arg): result.extend(arg) else: result.append(arg) else: result.append(_dollar_exps.sub(sub_match, arg)) else: result.append(arg) return result elif is_String(strSubst): return _dollar_exps.sub(sub_match, strSubst) else: return strSubst
def function[scons_subst_once, parameter[strSubst, env, key]]: constant[Perform single (non-recursive) substitution of a single construction variable keyword. This is used when setting a variable when copying or overriding values in an Environment. We want to capture (expand) the old value before we override it, so people can do things like: env2 = env.Clone(CCFLAGS = '$CCFLAGS -g') We do this with some straightforward, brute-force code here... ] if <ast.BoolOp object at 0x7da20cabd6c0> begin[:] return[name[strSubst]] variable[matchlist] assign[=] list[[<ast.BinOp object at 0x7da18f58c070>, <ast.BinOp object at 0x7da18f58d090>]] variable[val] assign[=] call[name[env].get, parameter[name[key], constant[]]] def function[sub_match, parameter[match, val, matchlist]]: variable[a] assign[=] call[name[match].group, parameter[constant[1]]] if compare[name[a] in name[matchlist]] begin[:] variable[a] assign[=] name[val] if call[name[is_Sequence], parameter[name[a]]] begin[:] return[call[constant[ ].join, parameter[call[name[map], parameter[name[str], name[a]]]]]] if call[name[is_Sequence], parameter[name[strSubst]]] begin[:] variable[result] assign[=] list[[]] for taget[name[arg]] in starred[name[strSubst]] begin[:] if call[name[is_String], parameter[name[arg]]] begin[:] if compare[name[arg] in name[matchlist]] begin[:] variable[arg] assign[=] name[val] if call[name[is_Sequence], parameter[name[arg]]] begin[:] call[name[result].extend, parameter[name[arg]]] return[name[result]]
keyword[def] identifier[scons_subst_once] ( identifier[strSubst] , identifier[env] , identifier[key] ): literal[string] keyword[if] identifier[isinstance] ( identifier[strSubst] , identifier[str] ) keyword[and] identifier[strSubst] . identifier[find] ( literal[string] )< literal[int] : keyword[return] identifier[strSubst] identifier[matchlist] =[ literal[string] + identifier[key] , literal[string] + identifier[key] + literal[string] ] identifier[val] = identifier[env] . identifier[get] ( identifier[key] , literal[string] ) keyword[def] identifier[sub_match] ( identifier[match] , identifier[val] = identifier[val] , identifier[matchlist] = identifier[matchlist] ): identifier[a] = identifier[match] . identifier[group] ( literal[int] ) keyword[if] identifier[a] keyword[in] identifier[matchlist] : identifier[a] = identifier[val] keyword[if] identifier[is_Sequence] ( identifier[a] ): keyword[return] literal[string] . identifier[join] ( identifier[map] ( identifier[str] , identifier[a] )) keyword[else] : keyword[return] identifier[str] ( identifier[a] ) keyword[if] identifier[is_Sequence] ( identifier[strSubst] ): identifier[result] =[] keyword[for] identifier[arg] keyword[in] identifier[strSubst] : keyword[if] identifier[is_String] ( identifier[arg] ): keyword[if] identifier[arg] keyword[in] identifier[matchlist] : identifier[arg] = identifier[val] keyword[if] identifier[is_Sequence] ( identifier[arg] ): identifier[result] . identifier[extend] ( identifier[arg] ) keyword[else] : identifier[result] . identifier[append] ( identifier[arg] ) keyword[else] : identifier[result] . identifier[append] ( identifier[_dollar_exps] . identifier[sub] ( identifier[sub_match] , identifier[arg] )) keyword[else] : identifier[result] . identifier[append] ( identifier[arg] ) keyword[return] identifier[result] keyword[elif] identifier[is_String] ( identifier[strSubst] ): keyword[return] identifier[_dollar_exps] . identifier[sub] ( identifier[sub_match] , identifier[strSubst] ) keyword[else] : keyword[return] identifier[strSubst]
def scons_subst_once(strSubst, env, key): """Perform single (non-recursive) substitution of a single construction variable keyword. This is used when setting a variable when copying or overriding values in an Environment. We want to capture (expand) the old value before we override it, so people can do things like: env2 = env.Clone(CCFLAGS = '$CCFLAGS -g') We do this with some straightforward, brute-force code here... """ if isinstance(strSubst, str) and strSubst.find('$') < 0: return strSubst # depends on [control=['if'], data=[]] matchlist = ['$' + key, '${' + key + '}'] val = env.get(key, '') def sub_match(match, val=val, matchlist=matchlist): a = match.group(1) if a in matchlist: a = val # depends on [control=['if'], data=['a']] if is_Sequence(a): return ' '.join(map(str, a)) # depends on [control=['if'], data=[]] else: return str(a) if is_Sequence(strSubst): result = [] for arg in strSubst: if is_String(arg): if arg in matchlist: arg = val if is_Sequence(arg): result.extend(arg) # depends on [control=['if'], data=[]] else: result.append(arg) # depends on [control=['if'], data=['arg']] else: result.append(_dollar_exps.sub(sub_match, arg)) # depends on [control=['if'], data=[]] else: result.append(arg) # depends on [control=['for'], data=['arg']] return result # depends on [control=['if'], data=[]] elif is_String(strSubst): return _dollar_exps.sub(sub_match, strSubst) # depends on [control=['if'], data=[]] else: return strSubst
def _get_cached_response(self): """Returns a file object of the cached response.""" if not self._is_cached(): response = self._download_response() self.cache.set_xml(self._get_cache_key(), response) return self.cache.get_xml(self._get_cache_key())
def function[_get_cached_response, parameter[self]]: constant[Returns a file object of the cached response.] if <ast.UnaryOp object at 0x7da1b0b486d0> begin[:] variable[response] assign[=] call[name[self]._download_response, parameter[]] call[name[self].cache.set_xml, parameter[call[name[self]._get_cache_key, parameter[]], name[response]]] return[call[name[self].cache.get_xml, parameter[call[name[self]._get_cache_key, parameter[]]]]]
keyword[def] identifier[_get_cached_response] ( identifier[self] ): literal[string] keyword[if] keyword[not] identifier[self] . identifier[_is_cached] (): identifier[response] = identifier[self] . identifier[_download_response] () identifier[self] . identifier[cache] . identifier[set_xml] ( identifier[self] . identifier[_get_cache_key] (), identifier[response] ) keyword[return] identifier[self] . identifier[cache] . identifier[get_xml] ( identifier[self] . identifier[_get_cache_key] ())
def _get_cached_response(self): """Returns a file object of the cached response.""" if not self._is_cached(): response = self._download_response() self.cache.set_xml(self._get_cache_key(), response) # depends on [control=['if'], data=[]] return self.cache.get_xml(self._get_cache_key())
def data_and_files(self, data=True, files=True, stream=None): """Retrieve body data. Returns a two-elements tuple of a :class:`~.MultiValueDict` containing data from the request body, and data from uploaded files. If the body data is not ready, return a :class:`~asyncio.Future` which results in the tuple. The result is cached. """ if self.method in ENCODE_URL_METHODS: value = {}, None else: value = self.cache.get('data_and_files') if not value: return self._data_and_files(data, files, stream) elif data and files: return value elif data: return value[0] elif files: return value[1] else: return None
def function[data_and_files, parameter[self, data, files, stream]]: constant[Retrieve body data. Returns a two-elements tuple of a :class:`~.MultiValueDict` containing data from the request body, and data from uploaded files. If the body data is not ready, return a :class:`~asyncio.Future` which results in the tuple. The result is cached. ] if compare[name[self].method in name[ENCODE_URL_METHODS]] begin[:] variable[value] assign[=] tuple[[<ast.Dict object at 0x7da18c4ce740>, <ast.Constant object at 0x7da18c4cd720>]] if <ast.UnaryOp object at 0x7da18c4ccbe0> begin[:] return[call[name[self]._data_and_files, parameter[name[data], name[files], name[stream]]]]
keyword[def] identifier[data_and_files] ( identifier[self] , identifier[data] = keyword[True] , identifier[files] = keyword[True] , identifier[stream] = keyword[None] ): literal[string] keyword[if] identifier[self] . identifier[method] keyword[in] identifier[ENCODE_URL_METHODS] : identifier[value] ={}, keyword[None] keyword[else] : identifier[value] = identifier[self] . identifier[cache] . identifier[get] ( literal[string] ) keyword[if] keyword[not] identifier[value] : keyword[return] identifier[self] . identifier[_data_and_files] ( identifier[data] , identifier[files] , identifier[stream] ) keyword[elif] identifier[data] keyword[and] identifier[files] : keyword[return] identifier[value] keyword[elif] identifier[data] : keyword[return] identifier[value] [ literal[int] ] keyword[elif] identifier[files] : keyword[return] identifier[value] [ literal[int] ] keyword[else] : keyword[return] keyword[None]
def data_and_files(self, data=True, files=True, stream=None): """Retrieve body data. Returns a two-elements tuple of a :class:`~.MultiValueDict` containing data from the request body, and data from uploaded files. If the body data is not ready, return a :class:`~asyncio.Future` which results in the tuple. The result is cached. """ if self.method in ENCODE_URL_METHODS: value = ({}, None) # depends on [control=['if'], data=[]] else: value = self.cache.get('data_and_files') if not value: return self._data_and_files(data, files, stream) # depends on [control=['if'], data=[]] elif data and files: return value # depends on [control=['if'], data=[]] elif data: return value[0] # depends on [control=['if'], data=[]] elif files: return value[1] # depends on [control=['if'], data=[]] else: return None
def eval(self, x, y, z): """Evaluate the function in (x, y, z).""" xc, yc, zc = self.rc sx, sy, sz = self.s ## Method1: direct evaluation #return exp(-(((x-xc)**2)/(2*sx**2) + ((y-yc)**2)/(2*sy**2) +\ # ((z-zc)**2)/(2*sz**2))) ## Method2: evaluation using numexpr def arg(s): return "((%s-%sc)**2)/(2*s%s**2)" % (s, s, s) return NE.evaluate("exp(-(%s + %s + %s))" % (arg("x"), arg("y"), arg("z")))
def function[eval, parameter[self, x, y, z]]: constant[Evaluate the function in (x, y, z).] <ast.Tuple object at 0x7da18f58dff0> assign[=] name[self].rc <ast.Tuple object at 0x7da18f58cf40> assign[=] name[self].s def function[arg, parameter[s]]: return[binary_operation[constant[((%s-%sc)**2)/(2*s%s**2)] <ast.Mod object at 0x7da2590d6920> tuple[[<ast.Name object at 0x7da18f58db70>, <ast.Name object at 0x7da18f58ff70>, <ast.Name object at 0x7da18f58cd30>]]]] return[call[name[NE].evaluate, parameter[binary_operation[constant[exp(-(%s + %s + %s))] <ast.Mod object at 0x7da2590d6920> tuple[[<ast.Call object at 0x7da18f58d240>, <ast.Call object at 0x7da18f58f730>, <ast.Call object at 0x7da18f58fcd0>]]]]]]
keyword[def] identifier[eval] ( identifier[self] , identifier[x] , identifier[y] , identifier[z] ): literal[string] identifier[xc] , identifier[yc] , identifier[zc] = identifier[self] . identifier[rc] identifier[sx] , identifier[sy] , identifier[sz] = identifier[self] . identifier[s] keyword[def] identifier[arg] ( identifier[s] ): keyword[return] literal[string] %( identifier[s] , identifier[s] , identifier[s] ) keyword[return] identifier[NE] . identifier[evaluate] ( literal[string] % ( identifier[arg] ( literal[string] ), identifier[arg] ( literal[string] ), identifier[arg] ( literal[string] )))
def eval(self, x, y, z): """Evaluate the function in (x, y, z).""" (xc, yc, zc) = self.rc (sx, sy, sz) = self.s ## Method1: direct evaluation #return exp(-(((x-xc)**2)/(2*sx**2) + ((y-yc)**2)/(2*sy**2) +\ # ((z-zc)**2)/(2*sz**2))) ## Method2: evaluation using numexpr def arg(s): return '((%s-%sc)**2)/(2*s%s**2)' % (s, s, s) return NE.evaluate('exp(-(%s + %s + %s))' % (arg('x'), arg('y'), arg('z')))
def convert_model_to_onnx(frozen_graph_path, end_node_names, onnx_output_path): """Reimplementation of the TensorFlow-onnx official tutorial convert the proto buff to onnx file: Parameters ----------- frozen_graph_path : string the path where your frozen graph file save. end_node_names : string the name of the end node in your graph you want to get in your proto buff onnx_output_path : string the path where you want to save the onnx file. References ----------- - `onnx-tf exporting tutorial <https://github.com/onnx/tutorials/blob/master/tutorials/OnnxTensorflowExport.ipynb>` """ with tf.gfile.GFile(frozen_graph_path, "rb") as f: graph_def = tf.GraphDef() graph_def.ParseFromString(f.read()) onnx_model = tensorflow_graph_to_onnx_model(graph_def, end_node_names, opset=6) file = open(onnx_output_path, "wb") file.write(onnx_model.SerializeToString()) file.close()
def function[convert_model_to_onnx, parameter[frozen_graph_path, end_node_names, onnx_output_path]]: constant[Reimplementation of the TensorFlow-onnx official tutorial convert the proto buff to onnx file: Parameters ----------- frozen_graph_path : string the path where your frozen graph file save. end_node_names : string the name of the end node in your graph you want to get in your proto buff onnx_output_path : string the path where you want to save the onnx file. References ----------- - `onnx-tf exporting tutorial <https://github.com/onnx/tutorials/blob/master/tutorials/OnnxTensorflowExport.ipynb>` ] with call[name[tf].gfile.GFile, parameter[name[frozen_graph_path], constant[rb]]] begin[:] variable[graph_def] assign[=] call[name[tf].GraphDef, parameter[]] call[name[graph_def].ParseFromString, parameter[call[name[f].read, parameter[]]]] variable[onnx_model] assign[=] call[name[tensorflow_graph_to_onnx_model], parameter[name[graph_def], name[end_node_names]]] variable[file] assign[=] call[name[open], parameter[name[onnx_output_path], constant[wb]]] call[name[file].write, parameter[call[name[onnx_model].SerializeToString, parameter[]]]] call[name[file].close, parameter[]]
keyword[def] identifier[convert_model_to_onnx] ( identifier[frozen_graph_path] , identifier[end_node_names] , identifier[onnx_output_path] ): literal[string] keyword[with] identifier[tf] . identifier[gfile] . identifier[GFile] ( identifier[frozen_graph_path] , literal[string] ) keyword[as] identifier[f] : identifier[graph_def] = identifier[tf] . identifier[GraphDef] () identifier[graph_def] . identifier[ParseFromString] ( identifier[f] . identifier[read] ()) identifier[onnx_model] = identifier[tensorflow_graph_to_onnx_model] ( identifier[graph_def] , identifier[end_node_names] , identifier[opset] = literal[int] ) identifier[file] = identifier[open] ( identifier[onnx_output_path] , literal[string] ) identifier[file] . identifier[write] ( identifier[onnx_model] . identifier[SerializeToString] ()) identifier[file] . identifier[close] ()
def convert_model_to_onnx(frozen_graph_path, end_node_names, onnx_output_path): """Reimplementation of the TensorFlow-onnx official tutorial convert the proto buff to onnx file: Parameters ----------- frozen_graph_path : string the path where your frozen graph file save. end_node_names : string the name of the end node in your graph you want to get in your proto buff onnx_output_path : string the path where you want to save the onnx file. References ----------- - `onnx-tf exporting tutorial <https://github.com/onnx/tutorials/blob/master/tutorials/OnnxTensorflowExport.ipynb>` """ with tf.gfile.GFile(frozen_graph_path, 'rb') as f: graph_def = tf.GraphDef() graph_def.ParseFromString(f.read()) onnx_model = tensorflow_graph_to_onnx_model(graph_def, end_node_names, opset=6) file = open(onnx_output_path, 'wb') file.write(onnx_model.SerializeToString()) file.close() # depends on [control=['with'], data=['f']]
def flush(self, error=False, prompt=False): """Flush buffer, write text to console""" # Fix for Issue 2452 if PY3: try: text = "".join(self.__buffer) except TypeError: text = b"".join(self.__buffer) try: text = text.decode( locale.getdefaultlocale()[1] ) except: pass else: text = "".join(self.__buffer) self.__buffer = [] self.insert_text(text, at_end=True, error=error, prompt=prompt) QCoreApplication.processEvents() self.repaint() # Clear input buffer: self.new_input_line = True
def function[flush, parameter[self, error, prompt]]: constant[Flush buffer, write text to console] if name[PY3] begin[:] <ast.Try object at 0x7da18eb54be0> name[self].__buffer assign[=] list[[]] call[name[self].insert_text, parameter[name[text]]] call[name[QCoreApplication].processEvents, parameter[]] call[name[self].repaint, parameter[]] name[self].new_input_line assign[=] constant[True]
keyword[def] identifier[flush] ( identifier[self] , identifier[error] = keyword[False] , identifier[prompt] = keyword[False] ): literal[string] keyword[if] identifier[PY3] : keyword[try] : identifier[text] = literal[string] . identifier[join] ( identifier[self] . identifier[__buffer] ) keyword[except] identifier[TypeError] : identifier[text] = literal[string] . identifier[join] ( identifier[self] . identifier[__buffer] ) keyword[try] : identifier[text] = identifier[text] . identifier[decode] ( identifier[locale] . identifier[getdefaultlocale] ()[ literal[int] ]) keyword[except] : keyword[pass] keyword[else] : identifier[text] = literal[string] . identifier[join] ( identifier[self] . identifier[__buffer] ) identifier[self] . identifier[__buffer] =[] identifier[self] . identifier[insert_text] ( identifier[text] , identifier[at_end] = keyword[True] , identifier[error] = identifier[error] , identifier[prompt] = identifier[prompt] ) identifier[QCoreApplication] . identifier[processEvents] () identifier[self] . identifier[repaint] () identifier[self] . identifier[new_input_line] = keyword[True]
def flush(self, error=False, prompt=False): """Flush buffer, write text to console""" # Fix for Issue 2452 if PY3: try: text = ''.join(self.__buffer) # depends on [control=['try'], data=[]] except TypeError: text = b''.join(self.__buffer) try: text = text.decode(locale.getdefaultlocale()[1]) # depends on [control=['try'], data=[]] except: pass # depends on [control=['except'], data=[]] # depends on [control=['except'], data=[]] # depends on [control=['if'], data=[]] else: text = ''.join(self.__buffer) self.__buffer = [] self.insert_text(text, at_end=True, error=error, prompt=prompt) QCoreApplication.processEvents() self.repaint() # Clear input buffer: self.new_input_line = True
def relocate_image(self, new_ImageBase): """Apply the relocation information to the image using the provided new image base. This method will apply the relocation information to the image. Given the new base, all the relocations will be processed and both the raw data and the section's data will be fixed accordingly. The resulting image can be retrieved as well through the method: get_memory_mapped_image() In order to get something that would more closely match what could be found in memory once the Windows loader finished its work. """ relocation_difference = new_ImageBase - self.OPTIONAL_HEADER.ImageBase for reloc in self.DIRECTORY_ENTRY_BASERELOC: virtual_address = reloc.struct.VirtualAddress size_of_block = reloc.struct.SizeOfBlock # We iterate with an index because if the relocation is of type # IMAGE_REL_BASED_HIGHADJ we need to also process the next entry # at once and skip it for the next iteration # entry_idx = 0 while entry_idx<len(reloc.entries): entry = reloc.entries[entry_idx] entry_idx += 1 if entry.type == RELOCATION_TYPE['IMAGE_REL_BASED_ABSOLUTE']: # Nothing to do for this type of relocation pass elif entry.type == RELOCATION_TYPE['IMAGE_REL_BASED_HIGH']: # Fix the high 16bits of a relocation # # Add high 16bits of relocation_difference to the # 16bit value at RVA=entry.rva self.set_word_at_rva( entry.rva, ( self.get_word_at_rva(entry.rva) + relocation_difference>>16)&0xffff ) elif entry.type == RELOCATION_TYPE['IMAGE_REL_BASED_LOW']: # Fix the low 16bits of a relocation # # Add low 16 bits of relocation_difference to the 16bit value # at RVA=entry.rva self.set_word_at_rva( entry.rva, ( self.get_word_at_rva(entry.rva) + relocation_difference)&0xffff) elif entry.type == RELOCATION_TYPE['IMAGE_REL_BASED_HIGHLOW']: # Handle all high and low parts of a 32bit relocation # # Add relocation_difference to the value at RVA=entry.rva self.set_dword_at_rva( entry.rva, self.get_dword_at_rva(entry.rva)+relocation_difference) elif entry.type == RELOCATION_TYPE['IMAGE_REL_BASED_HIGHADJ']: # Fix the high 16bits of a relocation and adjust # # Add high 16bits of relocation_difference to the 32bit value # composed from the (16bit value at RVA=entry.rva)<<16 plus # the 16bit value at the next relocation entry. # # If the next entry is beyond the array's limits, # abort... the table is corrupt # if entry_idx == len(reloc.entries): break next_entry = reloc.entries[entry_idx] entry_idx += 1 self.set_word_at_rva( entry.rva, ((self.get_word_at_rva(entry.rva)<<16) + next_entry.rva + relocation_difference & 0xffff0000) >> 16 ) elif entry.type == RELOCATION_TYPE['IMAGE_REL_BASED_DIR64']: # Apply the difference to the 64bit value at the offset # RVA=entry.rva self.set_qword_at_rva( entry.rva, self.get_qword_at_rva(entry.rva) + relocation_difference)
def function[relocate_image, parameter[self, new_ImageBase]]: constant[Apply the relocation information to the image using the provided new image base. This method will apply the relocation information to the image. Given the new base, all the relocations will be processed and both the raw data and the section's data will be fixed accordingly. The resulting image can be retrieved as well through the method: get_memory_mapped_image() In order to get something that would more closely match what could be found in memory once the Windows loader finished its work. ] variable[relocation_difference] assign[=] binary_operation[name[new_ImageBase] - name[self].OPTIONAL_HEADER.ImageBase] for taget[name[reloc]] in starred[name[self].DIRECTORY_ENTRY_BASERELOC] begin[:] variable[virtual_address] assign[=] name[reloc].struct.VirtualAddress variable[size_of_block] assign[=] name[reloc].struct.SizeOfBlock variable[entry_idx] assign[=] constant[0] while compare[name[entry_idx] less[<] call[name[len], parameter[name[reloc].entries]]] begin[:] variable[entry] assign[=] call[name[reloc].entries][name[entry_idx]] <ast.AugAssign object at 0x7da1b0c3b760> if compare[name[entry].type equal[==] call[name[RELOCATION_TYPE]][constant[IMAGE_REL_BASED_ABSOLUTE]]] begin[:] pass
keyword[def] identifier[relocate_image] ( identifier[self] , identifier[new_ImageBase] ): literal[string] identifier[relocation_difference] = identifier[new_ImageBase] - identifier[self] . identifier[OPTIONAL_HEADER] . identifier[ImageBase] keyword[for] identifier[reloc] keyword[in] identifier[self] . identifier[DIRECTORY_ENTRY_BASERELOC] : identifier[virtual_address] = identifier[reloc] . identifier[struct] . identifier[VirtualAddress] identifier[size_of_block] = identifier[reloc] . identifier[struct] . identifier[SizeOfBlock] identifier[entry_idx] = literal[int] keyword[while] identifier[entry_idx] < identifier[len] ( identifier[reloc] . identifier[entries] ): identifier[entry] = identifier[reloc] . identifier[entries] [ identifier[entry_idx] ] identifier[entry_idx] += literal[int] keyword[if] identifier[entry] . identifier[type] == identifier[RELOCATION_TYPE] [ literal[string] ]: keyword[pass] keyword[elif] identifier[entry] . identifier[type] == identifier[RELOCATION_TYPE] [ literal[string] ]: identifier[self] . identifier[set_word_at_rva] ( identifier[entry] . identifier[rva] , ( identifier[self] . identifier[get_word_at_rva] ( identifier[entry] . identifier[rva] )+ identifier[relocation_difference] >> literal[int] )& literal[int] ) keyword[elif] identifier[entry] . identifier[type] == identifier[RELOCATION_TYPE] [ literal[string] ]: identifier[self] . identifier[set_word_at_rva] ( identifier[entry] . identifier[rva] , ( identifier[self] . identifier[get_word_at_rva] ( identifier[entry] . identifier[rva] )+ identifier[relocation_difference] )& literal[int] ) keyword[elif] identifier[entry] . identifier[type] == identifier[RELOCATION_TYPE] [ literal[string] ]: identifier[self] . identifier[set_dword_at_rva] ( identifier[entry] . identifier[rva] , identifier[self] . identifier[get_dword_at_rva] ( identifier[entry] . identifier[rva] )+ identifier[relocation_difference] ) keyword[elif] identifier[entry] . identifier[type] == identifier[RELOCATION_TYPE] [ literal[string] ]: keyword[if] identifier[entry_idx] == identifier[len] ( identifier[reloc] . identifier[entries] ): keyword[break] identifier[next_entry] = identifier[reloc] . identifier[entries] [ identifier[entry_idx] ] identifier[entry_idx] += literal[int] identifier[self] . identifier[set_word_at_rva] ( identifier[entry] . identifier[rva] , (( identifier[self] . identifier[get_word_at_rva] ( identifier[entry] . identifier[rva] )<< literal[int] )+ identifier[next_entry] . identifier[rva] + identifier[relocation_difference] & literal[int] )>> literal[int] ) keyword[elif] identifier[entry] . identifier[type] == identifier[RELOCATION_TYPE] [ literal[string] ]: identifier[self] . identifier[set_qword_at_rva] ( identifier[entry] . identifier[rva] , identifier[self] . identifier[get_qword_at_rva] ( identifier[entry] . identifier[rva] )+ identifier[relocation_difference] )
def relocate_image(self, new_ImageBase): """Apply the relocation information to the image using the provided new image base. This method will apply the relocation information to the image. Given the new base, all the relocations will be processed and both the raw data and the section's data will be fixed accordingly. The resulting image can be retrieved as well through the method: get_memory_mapped_image() In order to get something that would more closely match what could be found in memory once the Windows loader finished its work. """ relocation_difference = new_ImageBase - self.OPTIONAL_HEADER.ImageBase for reloc in self.DIRECTORY_ENTRY_BASERELOC: virtual_address = reloc.struct.VirtualAddress size_of_block = reloc.struct.SizeOfBlock # We iterate with an index because if the relocation is of type # IMAGE_REL_BASED_HIGHADJ we need to also process the next entry # at once and skip it for the next iteration # entry_idx = 0 while entry_idx < len(reloc.entries): entry = reloc.entries[entry_idx] entry_idx += 1 if entry.type == RELOCATION_TYPE['IMAGE_REL_BASED_ABSOLUTE']: # Nothing to do for this type of relocation pass # depends on [control=['if'], data=[]] elif entry.type == RELOCATION_TYPE['IMAGE_REL_BASED_HIGH']: # Fix the high 16bits of a relocation # # Add high 16bits of relocation_difference to the # 16bit value at RVA=entry.rva self.set_word_at_rva(entry.rva, self.get_word_at_rva(entry.rva) + relocation_difference >> 16 & 65535) # depends on [control=['if'], data=[]] elif entry.type == RELOCATION_TYPE['IMAGE_REL_BASED_LOW']: # Fix the low 16bits of a relocation # # Add low 16 bits of relocation_difference to the 16bit value # at RVA=entry.rva self.set_word_at_rva(entry.rva, self.get_word_at_rva(entry.rva) + relocation_difference & 65535) # depends on [control=['if'], data=[]] elif entry.type == RELOCATION_TYPE['IMAGE_REL_BASED_HIGHLOW']: # Handle all high and low parts of a 32bit relocation # # Add relocation_difference to the value at RVA=entry.rva self.set_dword_at_rva(entry.rva, self.get_dword_at_rva(entry.rva) + relocation_difference) # depends on [control=['if'], data=[]] elif entry.type == RELOCATION_TYPE['IMAGE_REL_BASED_HIGHADJ']: # Fix the high 16bits of a relocation and adjust # # Add high 16bits of relocation_difference to the 32bit value # composed from the (16bit value at RVA=entry.rva)<<16 plus # the 16bit value at the next relocation entry. # # If the next entry is beyond the array's limits, # abort... the table is corrupt # if entry_idx == len(reloc.entries): break # depends on [control=['if'], data=[]] next_entry = reloc.entries[entry_idx] entry_idx += 1 self.set_word_at_rva(entry.rva, ((self.get_word_at_rva(entry.rva) << 16) + next_entry.rva + relocation_difference & 4294901760) >> 16) # depends on [control=['if'], data=[]] elif entry.type == RELOCATION_TYPE['IMAGE_REL_BASED_DIR64']: # Apply the difference to the 64bit value at the offset # RVA=entry.rva self.set_qword_at_rva(entry.rva, self.get_qword_at_rva(entry.rva) + relocation_difference) # depends on [control=['if'], data=[]] # depends on [control=['while'], data=['entry_idx']] # depends on [control=['for'], data=['reloc']]
def enable_napp(cls, mgr): """Install one NApp using NAppManager object.""" try: if not mgr.is_enabled(): LOG.info(' Enabling...') mgr.enable() LOG.info(' Enabled.') except (FileNotFoundError, PermissionError) as exception: LOG.error(' %s', exception)
def function[enable_napp, parameter[cls, mgr]]: constant[Install one NApp using NAppManager object.] <ast.Try object at 0x7da18dc98910>
keyword[def] identifier[enable_napp] ( identifier[cls] , identifier[mgr] ): literal[string] keyword[try] : keyword[if] keyword[not] identifier[mgr] . identifier[is_enabled] (): identifier[LOG] . identifier[info] ( literal[string] ) identifier[mgr] . identifier[enable] () identifier[LOG] . identifier[info] ( literal[string] ) keyword[except] ( identifier[FileNotFoundError] , identifier[PermissionError] ) keyword[as] identifier[exception] : identifier[LOG] . identifier[error] ( literal[string] , identifier[exception] )
def enable_napp(cls, mgr): """Install one NApp using NAppManager object.""" try: if not mgr.is_enabled(): LOG.info(' Enabling...') mgr.enable() # depends on [control=['if'], data=[]] LOG.info(' Enabled.') # depends on [control=['try'], data=[]] except (FileNotFoundError, PermissionError) as exception: LOG.error(' %s', exception) # depends on [control=['except'], data=['exception']]
def is_unsigned(*p): """ Returns false unless all types in p are unsigned """ from symbols.type_ import Type try: for i in p: if not i.type_.is_basic or not Type.is_unsigned(i.type_): return False return True except: pass return False
def function[is_unsigned, parameter[]]: constant[ Returns false unless all types in p are unsigned ] from relative_module[symbols.type_] import module[Type] <ast.Try object at 0x7da204567610> return[constant[False]]
keyword[def] identifier[is_unsigned] (* identifier[p] ): literal[string] keyword[from] identifier[symbols] . identifier[type_] keyword[import] identifier[Type] keyword[try] : keyword[for] identifier[i] keyword[in] identifier[p] : keyword[if] keyword[not] identifier[i] . identifier[type_] . identifier[is_basic] keyword[or] keyword[not] identifier[Type] . identifier[is_unsigned] ( identifier[i] . identifier[type_] ): keyword[return] keyword[False] keyword[return] keyword[True] keyword[except] : keyword[pass] keyword[return] keyword[False]
def is_unsigned(*p): """ Returns false unless all types in p are unsigned """ from symbols.type_ import Type try: for i in p: if not i.type_.is_basic or not Type.is_unsigned(i.type_): return False # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['i']] return True # depends on [control=['try'], data=[]] except: pass # depends on [control=['except'], data=[]] return False
def initial_state(self, batch_size, dtype=tf.float32, trainable=False, trainable_initializers=None, trainable_regularizers=None, name=None): """Builds the default start state tensor of zeros.""" return self._core.initial_state( batch_size, dtype=dtype, trainable=trainable, trainable_initializers=trainable_initializers, trainable_regularizers=trainable_regularizers, name=name)
def function[initial_state, parameter[self, batch_size, dtype, trainable, trainable_initializers, trainable_regularizers, name]]: constant[Builds the default start state tensor of zeros.] return[call[name[self]._core.initial_state, parameter[name[batch_size]]]]
keyword[def] identifier[initial_state] ( identifier[self] , identifier[batch_size] , identifier[dtype] = identifier[tf] . identifier[float32] , identifier[trainable] = keyword[False] , identifier[trainable_initializers] = keyword[None] , identifier[trainable_regularizers] = keyword[None] , identifier[name] = keyword[None] ): literal[string] keyword[return] identifier[self] . identifier[_core] . identifier[initial_state] ( identifier[batch_size] , identifier[dtype] = identifier[dtype] , identifier[trainable] = identifier[trainable] , identifier[trainable_initializers] = identifier[trainable_initializers] , identifier[trainable_regularizers] = identifier[trainable_regularizers] , identifier[name] = identifier[name] )
def initial_state(self, batch_size, dtype=tf.float32, trainable=False, trainable_initializers=None, trainable_regularizers=None, name=None): """Builds the default start state tensor of zeros.""" return self._core.initial_state(batch_size, dtype=dtype, trainable=trainable, trainable_initializers=trainable_initializers, trainable_regularizers=trainable_regularizers, name=name)
def prepare(self, cache): """Prepare to run next shot.""" if cache is not None: np.copyto(self.qubits, cache) else: self.qubits.fill(0.0) self.qubits[0] = 1.0 self.cregs = [0] * self.n_qubits
def function[prepare, parameter[self, cache]]: constant[Prepare to run next shot.] if compare[name[cache] is_not constant[None]] begin[:] call[name[np].copyto, parameter[name[self].qubits, name[cache]]] name[self].cregs assign[=] binary_operation[list[[<ast.Constant object at 0x7da2044c3220>]] * name[self].n_qubits]
keyword[def] identifier[prepare] ( identifier[self] , identifier[cache] ): literal[string] keyword[if] identifier[cache] keyword[is] keyword[not] keyword[None] : identifier[np] . identifier[copyto] ( identifier[self] . identifier[qubits] , identifier[cache] ) keyword[else] : identifier[self] . identifier[qubits] . identifier[fill] ( literal[int] ) identifier[self] . identifier[qubits] [ literal[int] ]= literal[int] identifier[self] . identifier[cregs] =[ literal[int] ]* identifier[self] . identifier[n_qubits]
def prepare(self, cache): """Prepare to run next shot.""" if cache is not None: np.copyto(self.qubits, cache) # depends on [control=['if'], data=['cache']] else: self.qubits.fill(0.0) self.qubits[0] = 1.0 self.cregs = [0] * self.n_qubits
def application(environ, start_response): """WSGI interface. """ def send_response(status, body): if not isinstance(body, bytes): body = body.encode('utf-8') start_response(status, [('Content-Type', 'text/plain'), ('Content-Length', '%d' % len(body))]) return [body] if environ['REQUEST_METHOD'] != 'POST': return send_response('403 Forbidden', "invalid request") # Gets the posted input try: request_body_size = int(environ['CONTENT_LENGTH']) except (KeyError, ValueError): return send_response('400 Bad Request', "invalid content length") if request_body_size > MAX_SIZE: return send_response('403 Forbidden', "report too big") request_body = environ['wsgi.input'].read(request_body_size) # Tries to store response_body = store(request_body, environ.get('REMOTE_ADDR')) if not response_body: status = '200 OK' response_body = "stored" else: status = '501 Server Error' # Sends the response return send_response(status, response_body)
def function[application, parameter[environ, start_response]]: constant[WSGI interface. ] def function[send_response, parameter[status, body]]: if <ast.UnaryOp object at 0x7da18c4cf9a0> begin[:] variable[body] assign[=] call[name[body].encode, parameter[constant[utf-8]]] call[name[start_response], parameter[name[status], list[[<ast.Tuple object at 0x7da18c4cee30>, <ast.Tuple object at 0x7da18c4ce050>]]]] return[list[[<ast.Name object at 0x7da18c4cf2e0>]]] if compare[call[name[environ]][constant[REQUEST_METHOD]] not_equal[!=] constant[POST]] begin[:] return[call[name[send_response], parameter[constant[403 Forbidden], constant[invalid request]]]] <ast.Try object at 0x7da18c4ccdc0> if compare[name[request_body_size] greater[>] name[MAX_SIZE]] begin[:] return[call[name[send_response], parameter[constant[403 Forbidden], constant[report too big]]]] variable[request_body] assign[=] call[call[name[environ]][constant[wsgi.input]].read, parameter[name[request_body_size]]] variable[response_body] assign[=] call[name[store], parameter[name[request_body], call[name[environ].get, parameter[constant[REMOTE_ADDR]]]]] if <ast.UnaryOp object at 0x7da18c4ccc10> begin[:] variable[status] assign[=] constant[200 OK] variable[response_body] assign[=] constant[stored] return[call[name[send_response], parameter[name[status], name[response_body]]]]
keyword[def] identifier[application] ( identifier[environ] , identifier[start_response] ): literal[string] keyword[def] identifier[send_response] ( identifier[status] , identifier[body] ): keyword[if] keyword[not] identifier[isinstance] ( identifier[body] , identifier[bytes] ): identifier[body] = identifier[body] . identifier[encode] ( literal[string] ) identifier[start_response] ( identifier[status] ,[( literal[string] , literal[string] ), ( literal[string] , literal[string] % identifier[len] ( identifier[body] ))]) keyword[return] [ identifier[body] ] keyword[if] identifier[environ] [ literal[string] ]!= literal[string] : keyword[return] identifier[send_response] ( literal[string] , literal[string] ) keyword[try] : identifier[request_body_size] = identifier[int] ( identifier[environ] [ literal[string] ]) keyword[except] ( identifier[KeyError] , identifier[ValueError] ): keyword[return] identifier[send_response] ( literal[string] , literal[string] ) keyword[if] identifier[request_body_size] > identifier[MAX_SIZE] : keyword[return] identifier[send_response] ( literal[string] , literal[string] ) identifier[request_body] = identifier[environ] [ literal[string] ]. identifier[read] ( identifier[request_body_size] ) identifier[response_body] = identifier[store] ( identifier[request_body] , identifier[environ] . identifier[get] ( literal[string] )) keyword[if] keyword[not] identifier[response_body] : identifier[status] = literal[string] identifier[response_body] = literal[string] keyword[else] : identifier[status] = literal[string] keyword[return] identifier[send_response] ( identifier[status] , identifier[response_body] )
def application(environ, start_response): """WSGI interface. """ def send_response(status, body): if not isinstance(body, bytes): body = body.encode('utf-8') # depends on [control=['if'], data=[]] start_response(status, [('Content-Type', 'text/plain'), ('Content-Length', '%d' % len(body))]) return [body] if environ['REQUEST_METHOD'] != 'POST': return send_response('403 Forbidden', 'invalid request') # depends on [control=['if'], data=[]] # Gets the posted input try: request_body_size = int(environ['CONTENT_LENGTH']) # depends on [control=['try'], data=[]] except (KeyError, ValueError): return send_response('400 Bad Request', 'invalid content length') # depends on [control=['except'], data=[]] if request_body_size > MAX_SIZE: return send_response('403 Forbidden', 'report too big') # depends on [control=['if'], data=[]] request_body = environ['wsgi.input'].read(request_body_size) # Tries to store response_body = store(request_body, environ.get('REMOTE_ADDR')) if not response_body: status = '200 OK' response_body = 'stored' # depends on [control=['if'], data=[]] else: status = '501 Server Error' # Sends the response return send_response(status, response_body)