code
stringlengths
75
104k
code_sememe
stringlengths
47
309k
token_type
stringlengths
215
214k
code_dependency
stringlengths
75
155k
def run_script(self, filename, start_opts=None, globals_=None, locals_=None): """ Run debugger on Python script `filename'. The script may inspect sys.argv for command arguments. `globals_' and `locals_' are the dictionaries to use for local and global variables. If `globals' is not given, globals() (the current global variables) is used. If `locals_' is not given, it becomes a copy of `globals_'. True is returned if the program terminated normally and False if the debugger initiated a quit or the program did not normally terminate. See also `run_call' if what you to debug a function call, `run_eval' if you want to debug an expression, and `run' if you want to debug general Python statements not inside a file. """ self.mainpyfile = self.core.canonic(filename) # Start with fresh empty copy of globals and locals and tell the script # that it's being run as __main__ to avoid scripts being able to access # the debugger namespace. if globals_ is None: import __main__ # NOQA globals_ = {"__name__" : "__main__", "__file__" : self.mainpyfile, "__builtins__" : __builtins__ } # NOQA if locals_ is None: locals_ = globals_ retval = False self.core.execution_status = 'Running' try: compiled = compile(open(self.mainpyfile).read(), self.mainpyfile, 'exec') self.core.start(start_opts) exec(compiled, globals_, locals_) retval = True except SyntaxError: print(sys.exc_info()[1]) retval = False pass except IOError: print(sys.exc_info()[1]) except DebuggerQuit: retval = False pass except DebuggerRestart: self.core.execution_status = 'Restart requested' raise DebuggerRestart finally: self.core.stop(options={'remove': True}) return retval
def function[run_script, parameter[self, filename, start_opts, globals_, locals_]]: constant[ Run debugger on Python script `filename'. The script may inspect sys.argv for command arguments. `globals_' and `locals_' are the dictionaries to use for local and global variables. If `globals' is not given, globals() (the current global variables) is used. If `locals_' is not given, it becomes a copy of `globals_'. True is returned if the program terminated normally and False if the debugger initiated a quit or the program did not normally terminate. See also `run_call' if what you to debug a function call, `run_eval' if you want to debug an expression, and `run' if you want to debug general Python statements not inside a file. ] name[self].mainpyfile assign[=] call[name[self].core.canonic, parameter[name[filename]]] if compare[name[globals_] is constant[None]] begin[:] import module[__main__] variable[globals_] assign[=] dictionary[[<ast.Constant object at 0x7da1b05c5a50>, <ast.Constant object at 0x7da1b05c67a0>, <ast.Constant object at 0x7da1b05c5180>], [<ast.Constant object at 0x7da1b05c6b60>, <ast.Attribute object at 0x7da1b05c7d30>, <ast.Name object at 0x7da1b05c6ef0>]] if compare[name[locals_] is constant[None]] begin[:] variable[locals_] assign[=] name[globals_] variable[retval] assign[=] constant[False] name[self].core.execution_status assign[=] constant[Running] <ast.Try object at 0x7da1b05c6e30> return[name[retval]]
keyword[def] identifier[run_script] ( identifier[self] , identifier[filename] , identifier[start_opts] = keyword[None] , identifier[globals_] = keyword[None] , identifier[locals_] = keyword[None] ): literal[string] identifier[self] . identifier[mainpyfile] = identifier[self] . identifier[core] . identifier[canonic] ( identifier[filename] ) keyword[if] identifier[globals_] keyword[is] keyword[None] : keyword[import] identifier[__main__] identifier[globals_] ={ literal[string] : literal[string] , literal[string] : identifier[self] . identifier[mainpyfile] , literal[string] : identifier[__builtins__] } keyword[if] identifier[locals_] keyword[is] keyword[None] : identifier[locals_] = identifier[globals_] identifier[retval] = keyword[False] identifier[self] . identifier[core] . identifier[execution_status] = literal[string] keyword[try] : identifier[compiled] = identifier[compile] ( identifier[open] ( identifier[self] . identifier[mainpyfile] ). identifier[read] (), identifier[self] . identifier[mainpyfile] , literal[string] ) identifier[self] . identifier[core] . identifier[start] ( identifier[start_opts] ) identifier[exec] ( identifier[compiled] , identifier[globals_] , identifier[locals_] ) identifier[retval] = keyword[True] keyword[except] identifier[SyntaxError] : identifier[print] ( identifier[sys] . identifier[exc_info] ()[ literal[int] ]) identifier[retval] = keyword[False] keyword[pass] keyword[except] identifier[IOError] : identifier[print] ( identifier[sys] . identifier[exc_info] ()[ literal[int] ]) keyword[except] identifier[DebuggerQuit] : identifier[retval] = keyword[False] keyword[pass] keyword[except] identifier[DebuggerRestart] : identifier[self] . identifier[core] . identifier[execution_status] = literal[string] keyword[raise] identifier[DebuggerRestart] keyword[finally] : identifier[self] . identifier[core] . identifier[stop] ( identifier[options] ={ literal[string] : keyword[True] }) keyword[return] identifier[retval]
def run_script(self, filename, start_opts=None, globals_=None, locals_=None): """ Run debugger on Python script `filename'. The script may inspect sys.argv for command arguments. `globals_' and `locals_' are the dictionaries to use for local and global variables. If `globals' is not given, globals() (the current global variables) is used. If `locals_' is not given, it becomes a copy of `globals_'. True is returned if the program terminated normally and False if the debugger initiated a quit or the program did not normally terminate. See also `run_call' if what you to debug a function call, `run_eval' if you want to debug an expression, and `run' if you want to debug general Python statements not inside a file. """ self.mainpyfile = self.core.canonic(filename) # Start with fresh empty copy of globals and locals and tell the script # that it's being run as __main__ to avoid scripts being able to access # the debugger namespace. if globals_ is None: import __main__ # NOQA globals_ = {'__name__': '__main__', '__file__': self.mainpyfile, '__builtins__': __builtins__} # NOQA # depends on [control=['if'], data=['globals_']] if locals_ is None: locals_ = globals_ # depends on [control=['if'], data=['locals_']] retval = False self.core.execution_status = 'Running' try: compiled = compile(open(self.mainpyfile).read(), self.mainpyfile, 'exec') self.core.start(start_opts) exec(compiled, globals_, locals_) retval = True # depends on [control=['try'], data=[]] except SyntaxError: print(sys.exc_info()[1]) retval = False pass # depends on [control=['except'], data=[]] except IOError: print(sys.exc_info()[1]) # depends on [control=['except'], data=[]] except DebuggerQuit: retval = False pass # depends on [control=['except'], data=[]] except DebuggerRestart: self.core.execution_status = 'Restart requested' raise DebuggerRestart # depends on [control=['except'], data=[]] finally: self.core.stop(options={'remove': True}) return retval
def cli(ctx, transcript_id, organism="", sequence=""): """Duplicate a transcripte Output: A standard apollo feature dictionary ({"features": [{...}]}) """ return ctx.gi.annotations.duplicate_transcript(transcript_id, organism=organism, sequence=sequence)
def function[cli, parameter[ctx, transcript_id, organism, sequence]]: constant[Duplicate a transcripte Output: A standard apollo feature dictionary ({"features": [{...}]}) ] return[call[name[ctx].gi.annotations.duplicate_transcript, parameter[name[transcript_id]]]]
keyword[def] identifier[cli] ( identifier[ctx] , identifier[transcript_id] , identifier[organism] = literal[string] , identifier[sequence] = literal[string] ): literal[string] keyword[return] identifier[ctx] . identifier[gi] . identifier[annotations] . identifier[duplicate_transcript] ( identifier[transcript_id] , identifier[organism] = identifier[organism] , identifier[sequence] = identifier[sequence] )
def cli(ctx, transcript_id, organism='', sequence=''): """Duplicate a transcripte Output: A standard apollo feature dictionary ({"features": [{...}]}) """ return ctx.gi.annotations.duplicate_transcript(transcript_id, organism=organism, sequence=sequence)
def process_corpus(self, corpus, output_path, frame_size=400, hop_size=160, sr=None): """ Process all utterances of the given corpus and save the processed features in a feature-container. The utterances are processed in **offline** mode so the full utterance in one go. Args: corpus (Corpus): The corpus to process the utterances from. output_path (str): A path to save the feature-container to. frame_size (int): The number of samples per frame. hop_size (int): The number of samples between two frames. sr (int): Use the given sampling rate. If None uses the native sampling rate from the underlying data. Returns: FeatureContainer: The feature-container containing the processed features. """ def processing_func(utterance, feat_container, frame_size, hop_size, sr, corpus): data = self.process_utterance(utterance, frame_size=frame_size, hop_size=hop_size, sr=sr, corpus=corpus) feat_container.set(utterance.idx, data) return self._process_corpus(corpus, output_path, processing_func, frame_size=frame_size, hop_size=hop_size, sr=sr)
def function[process_corpus, parameter[self, corpus, output_path, frame_size, hop_size, sr]]: constant[ Process all utterances of the given corpus and save the processed features in a feature-container. The utterances are processed in **offline** mode so the full utterance in one go. Args: corpus (Corpus): The corpus to process the utterances from. output_path (str): A path to save the feature-container to. frame_size (int): The number of samples per frame. hop_size (int): The number of samples between two frames. sr (int): Use the given sampling rate. If None uses the native sampling rate from the underlying data. Returns: FeatureContainer: The feature-container containing the processed features. ] def function[processing_func, parameter[utterance, feat_container, frame_size, hop_size, sr, corpus]]: variable[data] assign[=] call[name[self].process_utterance, parameter[name[utterance]]] call[name[feat_container].set, parameter[name[utterance].idx, name[data]]] return[call[name[self]._process_corpus, parameter[name[corpus], name[output_path], name[processing_func]]]]
keyword[def] identifier[process_corpus] ( identifier[self] , identifier[corpus] , identifier[output_path] , identifier[frame_size] = literal[int] , identifier[hop_size] = literal[int] , identifier[sr] = keyword[None] ): literal[string] keyword[def] identifier[processing_func] ( identifier[utterance] , identifier[feat_container] , identifier[frame_size] , identifier[hop_size] , identifier[sr] , identifier[corpus] ): identifier[data] = identifier[self] . identifier[process_utterance] ( identifier[utterance] , identifier[frame_size] = identifier[frame_size] , identifier[hop_size] = identifier[hop_size] , identifier[sr] = identifier[sr] , identifier[corpus] = identifier[corpus] ) identifier[feat_container] . identifier[set] ( identifier[utterance] . identifier[idx] , identifier[data] ) keyword[return] identifier[self] . identifier[_process_corpus] ( identifier[corpus] , identifier[output_path] , identifier[processing_func] , identifier[frame_size] = identifier[frame_size] , identifier[hop_size] = identifier[hop_size] , identifier[sr] = identifier[sr] )
def process_corpus(self, corpus, output_path, frame_size=400, hop_size=160, sr=None): """ Process all utterances of the given corpus and save the processed features in a feature-container. The utterances are processed in **offline** mode so the full utterance in one go. Args: corpus (Corpus): The corpus to process the utterances from. output_path (str): A path to save the feature-container to. frame_size (int): The number of samples per frame. hop_size (int): The number of samples between two frames. sr (int): Use the given sampling rate. If None uses the native sampling rate from the underlying data. Returns: FeatureContainer: The feature-container containing the processed features. """ def processing_func(utterance, feat_container, frame_size, hop_size, sr, corpus): data = self.process_utterance(utterance, frame_size=frame_size, hop_size=hop_size, sr=sr, corpus=corpus) feat_container.set(utterance.idx, data) return self._process_corpus(corpus, output_path, processing_func, frame_size=frame_size, hop_size=hop_size, sr=sr)
def chebyshev_distance_numpy(object1, object2): """! @brief Calculate Chebyshev distance between two objects using numpy. @param[in] object1 (array_like): The first array_like object. @param[in] object2 (array_like): The second array_like object. @return (double) Chebyshev distance between two objects. """ return numpy.max(numpy.absolute(object1 - object2), axis=1).T
def function[chebyshev_distance_numpy, parameter[object1, object2]]: constant[! @brief Calculate Chebyshev distance between two objects using numpy. @param[in] object1 (array_like): The first array_like object. @param[in] object2 (array_like): The second array_like object. @return (double) Chebyshev distance between two objects. ] return[call[name[numpy].max, parameter[call[name[numpy].absolute, parameter[binary_operation[name[object1] - name[object2]]]]]].T]
keyword[def] identifier[chebyshev_distance_numpy] ( identifier[object1] , identifier[object2] ): literal[string] keyword[return] identifier[numpy] . identifier[max] ( identifier[numpy] . identifier[absolute] ( identifier[object1] - identifier[object2] ), identifier[axis] = literal[int] ). identifier[T]
def chebyshev_distance_numpy(object1, object2): """! @brief Calculate Chebyshev distance between two objects using numpy. @param[in] object1 (array_like): The first array_like object. @param[in] object2 (array_like): The second array_like object. @return (double) Chebyshev distance between two objects. """ return numpy.max(numpy.absolute(object1 - object2), axis=1).T
def getTaskTypes(self): """ Return the current list of task types """ types = [ ('Calibration', safe_unicode(_('Calibration')).encode('utf-8')), ('Enhancement', safe_unicode(_('Enhancement')).encode('utf-8')), ('Preventive', safe_unicode(_('Preventive')).encode('utf-8')), ('Repair', safe_unicode(_('Repair')).encode('utf-8')), ('Validation', safe_unicode(_('Validation')).encode('utf-8')), ] return DisplayList(types)
def function[getTaskTypes, parameter[self]]: constant[ Return the current list of task types ] variable[types] assign[=] list[[<ast.Tuple object at 0x7da18eb568f0>, <ast.Tuple object at 0x7da18eb55e40>, <ast.Tuple object at 0x7da2047e85e0>, <ast.Tuple object at 0x7da2047eaad0>, <ast.Tuple object at 0x7da2047e9b40>]] return[call[name[DisplayList], parameter[name[types]]]]
keyword[def] identifier[getTaskTypes] ( identifier[self] ): literal[string] identifier[types] =[ ( literal[string] , identifier[safe_unicode] ( identifier[_] ( literal[string] )). identifier[encode] ( literal[string] )), ( literal[string] , identifier[safe_unicode] ( identifier[_] ( literal[string] )). identifier[encode] ( literal[string] )), ( literal[string] , identifier[safe_unicode] ( identifier[_] ( literal[string] )). identifier[encode] ( literal[string] )), ( literal[string] , identifier[safe_unicode] ( identifier[_] ( literal[string] )). identifier[encode] ( literal[string] )), ( literal[string] , identifier[safe_unicode] ( identifier[_] ( literal[string] )). identifier[encode] ( literal[string] )), ] keyword[return] identifier[DisplayList] ( identifier[types] )
def getTaskTypes(self): """ Return the current list of task types """ types = [('Calibration', safe_unicode(_('Calibration')).encode('utf-8')), ('Enhancement', safe_unicode(_('Enhancement')).encode('utf-8')), ('Preventive', safe_unicode(_('Preventive')).encode('utf-8')), ('Repair', safe_unicode(_('Repair')).encode('utf-8')), ('Validation', safe_unicode(_('Validation')).encode('utf-8'))] return DisplayList(types)
def edit(self, config, state, name, value=None): """ Change all occurrences of `name` in a kernel boot line in the given `config` text. It is expected that `config` is the contents of a file following the syntax of ``/etc/default/grub``:file:. .. warning:: This module only does a very crude textual search and replace: it is assumed that input lines in have the form ``KEY="value"`` (quote characters can be double ``"`` or single ``'``), and that the ``value`` string spans a single line and contains all relevant kernel boot parameters. However, the GRUB docs state that :file:``/etc/default/grub`` "is sourced by a shell script, and so must be valid POSIX shell input; normally, it will just be a sequence of ``KEY=value`` lines". In particular, the following cases are valid POSIX shell input but will be mishandled by this module: - It is assumed that all ``KEY=value`` assignments are on a single line. Multi-line strings will make the module error out. - Variable substitutions in the ``value`` part will not be detected. - Escaped quotes will be treated as regular quotes, i.e., there is no way to embed a ``"`` or a ``'`` character in a ``KEY=value`` line with this module. - String concatenation is not supported: whereas the POSIX shell interprets a line ``KEY="foo"'bar'`` as assigning the string ``foobar`` to ``KEY``, this module will only operate on the ``"foo"`` part. """ config = str(config) # make a copy so we can alter it pos = config.find(self._GRUB_CMDLINE_VAR) while pos > -1: # quote char can be `'` or `"` quote_pos = pos + len(self._GRUB_CMDLINE_VAR) quote_char = config[quote_pos] start = quote_pos + 1 # string ends with matching quote end = config.index(quote_char, start) cmdline = config[start:end] new_cmdline = _edit_linux_cmdline(cmdline, state, name, value) config = config[:start] + new_cmdline + config[end:] delta = len(new_cmdline) - len(cmdline) pos = config.find(self._GRUB_CMDLINE_VAR, end + delta) return config
def function[edit, parameter[self, config, state, name, value]]: constant[ Change all occurrences of `name` in a kernel boot line in the given `config` text. It is expected that `config` is the contents of a file following the syntax of ``/etc/default/grub``:file:. .. warning:: This module only does a very crude textual search and replace: it is assumed that input lines in have the form ``KEY="value"`` (quote characters can be double ``"`` or single ``'``), and that the ``value`` string spans a single line and contains all relevant kernel boot parameters. However, the GRUB docs state that :file:``/etc/default/grub`` "is sourced by a shell script, and so must be valid POSIX shell input; normally, it will just be a sequence of ``KEY=value`` lines". In particular, the following cases are valid POSIX shell input but will be mishandled by this module: - It is assumed that all ``KEY=value`` assignments are on a single line. Multi-line strings will make the module error out. - Variable substitutions in the ``value`` part will not be detected. - Escaped quotes will be treated as regular quotes, i.e., there is no way to embed a ``"`` or a ``'`` character in a ``KEY=value`` line with this module. - String concatenation is not supported: whereas the POSIX shell interprets a line ``KEY="foo"'bar'`` as assigning the string ``foobar`` to ``KEY``, this module will only operate on the ``"foo"`` part. ] variable[config] assign[=] call[name[str], parameter[name[config]]] variable[pos] assign[=] call[name[config].find, parameter[name[self]._GRUB_CMDLINE_VAR]] while compare[name[pos] greater[>] <ast.UnaryOp object at 0x7da1b08693f0>] begin[:] variable[quote_pos] assign[=] binary_operation[name[pos] + call[name[len], parameter[name[self]._GRUB_CMDLINE_VAR]]] variable[quote_char] assign[=] call[name[config]][name[quote_pos]] variable[start] assign[=] binary_operation[name[quote_pos] + constant[1]] variable[end] assign[=] call[name[config].index, parameter[name[quote_char], name[start]]] variable[cmdline] assign[=] call[name[config]][<ast.Slice object at 0x7da1b08c9630>] variable[new_cmdline] assign[=] call[name[_edit_linux_cmdline], parameter[name[cmdline], name[state], name[name], name[value]]] variable[config] assign[=] binary_operation[binary_operation[call[name[config]][<ast.Slice object at 0x7da1b060f910>] + name[new_cmdline]] + call[name[config]][<ast.Slice object at 0x7da1b060cc40>]] variable[delta] assign[=] binary_operation[call[name[len], parameter[name[new_cmdline]]] - call[name[len], parameter[name[cmdline]]]] variable[pos] assign[=] call[name[config].find, parameter[name[self]._GRUB_CMDLINE_VAR, binary_operation[name[end] + name[delta]]]] return[name[config]]
keyword[def] identifier[edit] ( identifier[self] , identifier[config] , identifier[state] , identifier[name] , identifier[value] = keyword[None] ): literal[string] identifier[config] = identifier[str] ( identifier[config] ) identifier[pos] = identifier[config] . identifier[find] ( identifier[self] . identifier[_GRUB_CMDLINE_VAR] ) keyword[while] identifier[pos] >- literal[int] : identifier[quote_pos] = identifier[pos] + identifier[len] ( identifier[self] . identifier[_GRUB_CMDLINE_VAR] ) identifier[quote_char] = identifier[config] [ identifier[quote_pos] ] identifier[start] = identifier[quote_pos] + literal[int] identifier[end] = identifier[config] . identifier[index] ( identifier[quote_char] , identifier[start] ) identifier[cmdline] = identifier[config] [ identifier[start] : identifier[end] ] identifier[new_cmdline] = identifier[_edit_linux_cmdline] ( identifier[cmdline] , identifier[state] , identifier[name] , identifier[value] ) identifier[config] = identifier[config] [: identifier[start] ]+ identifier[new_cmdline] + identifier[config] [ identifier[end] :] identifier[delta] = identifier[len] ( identifier[new_cmdline] )- identifier[len] ( identifier[cmdline] ) identifier[pos] = identifier[config] . identifier[find] ( identifier[self] . identifier[_GRUB_CMDLINE_VAR] , identifier[end] + identifier[delta] ) keyword[return] identifier[config]
def edit(self, config, state, name, value=None): """ Change all occurrences of `name` in a kernel boot line in the given `config` text. It is expected that `config` is the contents of a file following the syntax of ``/etc/default/grub``:file:. .. warning:: This module only does a very crude textual search and replace: it is assumed that input lines in have the form ``KEY="value"`` (quote characters can be double ``"`` or single ``'``), and that the ``value`` string spans a single line and contains all relevant kernel boot parameters. However, the GRUB docs state that :file:``/etc/default/grub`` "is sourced by a shell script, and so must be valid POSIX shell input; normally, it will just be a sequence of ``KEY=value`` lines". In particular, the following cases are valid POSIX shell input but will be mishandled by this module: - It is assumed that all ``KEY=value`` assignments are on a single line. Multi-line strings will make the module error out. - Variable substitutions in the ``value`` part will not be detected. - Escaped quotes will be treated as regular quotes, i.e., there is no way to embed a ``"`` or a ``'`` character in a ``KEY=value`` line with this module. - String concatenation is not supported: whereas the POSIX shell interprets a line ``KEY="foo"'bar'`` as assigning the string ``foobar`` to ``KEY``, this module will only operate on the ``"foo"`` part. """ config = str(config) # make a copy so we can alter it pos = config.find(self._GRUB_CMDLINE_VAR) while pos > -1: # quote char can be `'` or `"` quote_pos = pos + len(self._GRUB_CMDLINE_VAR) quote_char = config[quote_pos] start = quote_pos + 1 # string ends with matching quote end = config.index(quote_char, start) cmdline = config[start:end] new_cmdline = _edit_linux_cmdline(cmdline, state, name, value) config = config[:start] + new_cmdline + config[end:] delta = len(new_cmdline) - len(cmdline) pos = config.find(self._GRUB_CMDLINE_VAR, end + delta) # depends on [control=['while'], data=['pos']] return config
def _handle_response(response, server_config, synchronous=False, timeout=None): """Handle a server's response in a typical fashion. Do the following: 1. Check the server's response for an HTTP status code indicating an error. 2. Poll the server for a foreman task to complete if an HTTP 202 (accepted) status code is returned and ``synchronous is True``. 3. Immediately return if an HTTP "NO CONTENT" response is received. 4. Determine what type of the content returned from server. Depending on the type method should return server's response, with all JSON decoded or just response content itself. :param response: A response object as returned by one of the functions in :mod:`nailgun.client` or the requests library. :param server_config: A `nailgun.config.ServerConfig` object. :param synchronous: Should this function poll the server? :param timeout: Maximum number of seconds to wait until timing out. Defaults to ``nailgun.entity_mixins.TASK_TIMEOUT``. """ response.raise_for_status() if synchronous is True and response.status_code == ACCEPTED: return ForemanTask( server_config, id=response.json()['id']).poll(timeout=timeout) if response.status_code == NO_CONTENT: return if 'application/json' in response.headers.get('content-type', '').lower(): return response.json() elif isinstance(response.content, bytes): return response.content.decode('utf-8') else: return response.content
def function[_handle_response, parameter[response, server_config, synchronous, timeout]]: constant[Handle a server's response in a typical fashion. Do the following: 1. Check the server's response for an HTTP status code indicating an error. 2. Poll the server for a foreman task to complete if an HTTP 202 (accepted) status code is returned and ``synchronous is True``. 3. Immediately return if an HTTP "NO CONTENT" response is received. 4. Determine what type of the content returned from server. Depending on the type method should return server's response, with all JSON decoded or just response content itself. :param response: A response object as returned by one of the functions in :mod:`nailgun.client` or the requests library. :param server_config: A `nailgun.config.ServerConfig` object. :param synchronous: Should this function poll the server? :param timeout: Maximum number of seconds to wait until timing out. Defaults to ``nailgun.entity_mixins.TASK_TIMEOUT``. ] call[name[response].raise_for_status, parameter[]] if <ast.BoolOp object at 0x7da18f00d8d0> begin[:] return[call[call[name[ForemanTask], parameter[name[server_config]]].poll, parameter[]]] if compare[name[response].status_code equal[==] name[NO_CONTENT]] begin[:] return[None] if compare[constant[application/json] in call[call[name[response].headers.get, parameter[constant[content-type], constant[]]].lower, parameter[]]] begin[:] return[call[name[response].json, parameter[]]]
keyword[def] identifier[_handle_response] ( identifier[response] , identifier[server_config] , identifier[synchronous] = keyword[False] , identifier[timeout] = keyword[None] ): literal[string] identifier[response] . identifier[raise_for_status] () keyword[if] identifier[synchronous] keyword[is] keyword[True] keyword[and] identifier[response] . identifier[status_code] == identifier[ACCEPTED] : keyword[return] identifier[ForemanTask] ( identifier[server_config] , identifier[id] = identifier[response] . identifier[json] ()[ literal[string] ]). identifier[poll] ( identifier[timeout] = identifier[timeout] ) keyword[if] identifier[response] . identifier[status_code] == identifier[NO_CONTENT] : keyword[return] keyword[if] literal[string] keyword[in] identifier[response] . identifier[headers] . identifier[get] ( literal[string] , literal[string] ). identifier[lower] (): keyword[return] identifier[response] . identifier[json] () keyword[elif] identifier[isinstance] ( identifier[response] . identifier[content] , identifier[bytes] ): keyword[return] identifier[response] . identifier[content] . identifier[decode] ( literal[string] ) keyword[else] : keyword[return] identifier[response] . identifier[content]
def _handle_response(response, server_config, synchronous=False, timeout=None): """Handle a server's response in a typical fashion. Do the following: 1. Check the server's response for an HTTP status code indicating an error. 2. Poll the server for a foreman task to complete if an HTTP 202 (accepted) status code is returned and ``synchronous is True``. 3. Immediately return if an HTTP "NO CONTENT" response is received. 4. Determine what type of the content returned from server. Depending on the type method should return server's response, with all JSON decoded or just response content itself. :param response: A response object as returned by one of the functions in :mod:`nailgun.client` or the requests library. :param server_config: A `nailgun.config.ServerConfig` object. :param synchronous: Should this function poll the server? :param timeout: Maximum number of seconds to wait until timing out. Defaults to ``nailgun.entity_mixins.TASK_TIMEOUT``. """ response.raise_for_status() if synchronous is True and response.status_code == ACCEPTED: return ForemanTask(server_config, id=response.json()['id']).poll(timeout=timeout) # depends on [control=['if'], data=[]] if response.status_code == NO_CONTENT: return # depends on [control=['if'], data=[]] if 'application/json' in response.headers.get('content-type', '').lower(): return response.json() # depends on [control=['if'], data=[]] elif isinstance(response.content, bytes): return response.content.decode('utf-8') # depends on [control=['if'], data=[]] else: return response.content
def get_vnetwork_portgroups_output_vnetwork_pgs_vs_nn(self, **kwargs): """Auto Generated Code """ config = ET.Element("config") get_vnetwork_portgroups = ET.Element("get_vnetwork_portgroups") config = get_vnetwork_portgroups output = ET.SubElement(get_vnetwork_portgroups, "output") vnetwork_pgs = ET.SubElement(output, "vnetwork-pgs") vs_nn = ET.SubElement(vnetwork_pgs, "vs-nn") vs_nn.text = kwargs.pop('vs_nn') callback = kwargs.pop('callback', self._callback) return callback(config)
def function[get_vnetwork_portgroups_output_vnetwork_pgs_vs_nn, parameter[self]]: constant[Auto Generated Code ] variable[config] assign[=] call[name[ET].Element, parameter[constant[config]]] variable[get_vnetwork_portgroups] assign[=] call[name[ET].Element, parameter[constant[get_vnetwork_portgroups]]] variable[config] assign[=] name[get_vnetwork_portgroups] variable[output] assign[=] call[name[ET].SubElement, parameter[name[get_vnetwork_portgroups], constant[output]]] variable[vnetwork_pgs] assign[=] call[name[ET].SubElement, parameter[name[output], constant[vnetwork-pgs]]] variable[vs_nn] assign[=] call[name[ET].SubElement, parameter[name[vnetwork_pgs], constant[vs-nn]]] name[vs_nn].text assign[=] call[name[kwargs].pop, parameter[constant[vs_nn]]] variable[callback] assign[=] call[name[kwargs].pop, parameter[constant[callback], name[self]._callback]] return[call[name[callback], parameter[name[config]]]]
keyword[def] identifier[get_vnetwork_portgroups_output_vnetwork_pgs_vs_nn] ( identifier[self] ,** identifier[kwargs] ): literal[string] identifier[config] = identifier[ET] . identifier[Element] ( literal[string] ) identifier[get_vnetwork_portgroups] = identifier[ET] . identifier[Element] ( literal[string] ) identifier[config] = identifier[get_vnetwork_portgroups] identifier[output] = identifier[ET] . identifier[SubElement] ( identifier[get_vnetwork_portgroups] , literal[string] ) identifier[vnetwork_pgs] = identifier[ET] . identifier[SubElement] ( identifier[output] , literal[string] ) identifier[vs_nn] = identifier[ET] . identifier[SubElement] ( identifier[vnetwork_pgs] , literal[string] ) identifier[vs_nn] . identifier[text] = identifier[kwargs] . identifier[pop] ( literal[string] ) identifier[callback] = identifier[kwargs] . identifier[pop] ( literal[string] , identifier[self] . identifier[_callback] ) keyword[return] identifier[callback] ( identifier[config] )
def get_vnetwork_portgroups_output_vnetwork_pgs_vs_nn(self, **kwargs): """Auto Generated Code """ config = ET.Element('config') get_vnetwork_portgroups = ET.Element('get_vnetwork_portgroups') config = get_vnetwork_portgroups output = ET.SubElement(get_vnetwork_portgroups, 'output') vnetwork_pgs = ET.SubElement(output, 'vnetwork-pgs') vs_nn = ET.SubElement(vnetwork_pgs, 'vs-nn') vs_nn.text = kwargs.pop('vs_nn') callback = kwargs.pop('callback', self._callback) return callback(config)
def parse_midi_file(self, file): """Parse a MIDI file. Return the header -as a tuple containing respectively the MIDI format, the number of tracks and the time division-, the parsed track data and the number of bytes read. """ try: f = open(file, 'r') except: raise IOError('File not found') self.bytes_read = 0 header = self.parse_midi_file_header(f) tracks = header[1] result = [] while tracks > 0: events = self.parse_track(f) result.append(events) tracks -= 1 f.close() return (header, result)
def function[parse_midi_file, parameter[self, file]]: constant[Parse a MIDI file. Return the header -as a tuple containing respectively the MIDI format, the number of tracks and the time division-, the parsed track data and the number of bytes read. ] <ast.Try object at 0x7da1b26ac940> name[self].bytes_read assign[=] constant[0] variable[header] assign[=] call[name[self].parse_midi_file_header, parameter[name[f]]] variable[tracks] assign[=] call[name[header]][constant[1]] variable[result] assign[=] list[[]] while compare[name[tracks] greater[>] constant[0]] begin[:] variable[events] assign[=] call[name[self].parse_track, parameter[name[f]]] call[name[result].append, parameter[name[events]]] <ast.AugAssign object at 0x7da1b26af5e0> call[name[f].close, parameter[]] return[tuple[[<ast.Name object at 0x7da1b26aca60>, <ast.Name object at 0x7da1b26ac6a0>]]]
keyword[def] identifier[parse_midi_file] ( identifier[self] , identifier[file] ): literal[string] keyword[try] : identifier[f] = identifier[open] ( identifier[file] , literal[string] ) keyword[except] : keyword[raise] identifier[IOError] ( literal[string] ) identifier[self] . identifier[bytes_read] = literal[int] identifier[header] = identifier[self] . identifier[parse_midi_file_header] ( identifier[f] ) identifier[tracks] = identifier[header] [ literal[int] ] identifier[result] =[] keyword[while] identifier[tracks] > literal[int] : identifier[events] = identifier[self] . identifier[parse_track] ( identifier[f] ) identifier[result] . identifier[append] ( identifier[events] ) identifier[tracks] -= literal[int] identifier[f] . identifier[close] () keyword[return] ( identifier[header] , identifier[result] )
def parse_midi_file(self, file): """Parse a MIDI file. Return the header -as a tuple containing respectively the MIDI format, the number of tracks and the time division-, the parsed track data and the number of bytes read. """ try: f = open(file, 'r') # depends on [control=['try'], data=[]] except: raise IOError('File not found') # depends on [control=['except'], data=[]] self.bytes_read = 0 header = self.parse_midi_file_header(f) tracks = header[1] result = [] while tracks > 0: events = self.parse_track(f) result.append(events) tracks -= 1 # depends on [control=['while'], data=['tracks']] f.close() return (header, result)
def _apply(self, plan): """Required function of manager.py to actually apply a record change. :param plan: Contains the zones and changes to be made :type plan: octodns.provider.base.Plan :type return: void """ desired = plan.desired changes = plan.changes self.log.debug('_apply: zone=%s, len(changes)=%d', desired.name, len(changes)) # Get gcloud zone, or create one if none existed before. if desired.name not in self.gcloud_zones: gcloud_zone = self._create_gcloud_zone(desired.name) else: gcloud_zone = self.gcloud_zones.get(desired.name) gcloud_changes = gcloud_zone.changes() for change in changes: class_name = change.__class__.__name__ _rrset_func = getattr( self, '_rrset_for_{}'.format(change.record._type)) if class_name == 'Create': gcloud_changes.add_record_set( _rrset_func(gcloud_zone, change.record)) elif class_name == 'Delete': gcloud_changes.delete_record_set( _rrset_func(gcloud_zone, change.record)) elif class_name == 'Update': gcloud_changes.delete_record_set( _rrset_func(gcloud_zone, change.existing)) gcloud_changes.add_record_set( _rrset_func(gcloud_zone, change.new)) else: raise RuntimeError('Change type "{}" for change "{!s}" ' 'is none of "Create", "Delete" or "Update' .format(class_name, change)) gcloud_changes.create() for i in range(120): gcloud_changes.reload() # https://cloud.google.com/dns/api/v1/changes#resource # status can be one of either "pending" or "done" if gcloud_changes.status != 'pending': break self.log.debug("Waiting for changes to complete") time.sleep(self.CHANGE_LOOP_WAIT) if gcloud_changes.status != 'done': raise RuntimeError("Timeout reached after {} seconds".format( i * self.CHANGE_LOOP_WAIT))
def function[_apply, parameter[self, plan]]: constant[Required function of manager.py to actually apply a record change. :param plan: Contains the zones and changes to be made :type plan: octodns.provider.base.Plan :type return: void ] variable[desired] assign[=] name[plan].desired variable[changes] assign[=] name[plan].changes call[name[self].log.debug, parameter[constant[_apply: zone=%s, len(changes)=%d], name[desired].name, call[name[len], parameter[name[changes]]]]] if compare[name[desired].name <ast.NotIn object at 0x7da2590d7190> name[self].gcloud_zones] begin[:] variable[gcloud_zone] assign[=] call[name[self]._create_gcloud_zone, parameter[name[desired].name]] variable[gcloud_changes] assign[=] call[name[gcloud_zone].changes, parameter[]] for taget[name[change]] in starred[name[changes]] begin[:] variable[class_name] assign[=] name[change].__class__.__name__ variable[_rrset_func] assign[=] call[name[getattr], parameter[name[self], call[constant[_rrset_for_{}].format, parameter[name[change].record._type]]]] if compare[name[class_name] equal[==] constant[Create]] begin[:] call[name[gcloud_changes].add_record_set, parameter[call[name[_rrset_func], parameter[name[gcloud_zone], name[change].record]]]] call[name[gcloud_changes].create, parameter[]] for taget[name[i]] in starred[call[name[range], parameter[constant[120]]]] begin[:] call[name[gcloud_changes].reload, parameter[]] if compare[name[gcloud_changes].status not_equal[!=] constant[pending]] begin[:] break call[name[self].log.debug, parameter[constant[Waiting for changes to complete]]] call[name[time].sleep, parameter[name[self].CHANGE_LOOP_WAIT]] if compare[name[gcloud_changes].status not_equal[!=] constant[done]] begin[:] <ast.Raise object at 0x7da1b18a1000>
keyword[def] identifier[_apply] ( identifier[self] , identifier[plan] ): literal[string] identifier[desired] = identifier[plan] . identifier[desired] identifier[changes] = identifier[plan] . identifier[changes] identifier[self] . identifier[log] . identifier[debug] ( literal[string] , identifier[desired] . identifier[name] , identifier[len] ( identifier[changes] )) keyword[if] identifier[desired] . identifier[name] keyword[not] keyword[in] identifier[self] . identifier[gcloud_zones] : identifier[gcloud_zone] = identifier[self] . identifier[_create_gcloud_zone] ( identifier[desired] . identifier[name] ) keyword[else] : identifier[gcloud_zone] = identifier[self] . identifier[gcloud_zones] . identifier[get] ( identifier[desired] . identifier[name] ) identifier[gcloud_changes] = identifier[gcloud_zone] . identifier[changes] () keyword[for] identifier[change] keyword[in] identifier[changes] : identifier[class_name] = identifier[change] . identifier[__class__] . identifier[__name__] identifier[_rrset_func] = identifier[getattr] ( identifier[self] , literal[string] . identifier[format] ( identifier[change] . identifier[record] . identifier[_type] )) keyword[if] identifier[class_name] == literal[string] : identifier[gcloud_changes] . identifier[add_record_set] ( identifier[_rrset_func] ( identifier[gcloud_zone] , identifier[change] . identifier[record] )) keyword[elif] identifier[class_name] == literal[string] : identifier[gcloud_changes] . identifier[delete_record_set] ( identifier[_rrset_func] ( identifier[gcloud_zone] , identifier[change] . identifier[record] )) keyword[elif] identifier[class_name] == literal[string] : identifier[gcloud_changes] . identifier[delete_record_set] ( identifier[_rrset_func] ( identifier[gcloud_zone] , identifier[change] . identifier[existing] )) identifier[gcloud_changes] . identifier[add_record_set] ( identifier[_rrset_func] ( identifier[gcloud_zone] , identifier[change] . identifier[new] )) keyword[else] : keyword[raise] identifier[RuntimeError] ( literal[string] literal[string] . identifier[format] ( identifier[class_name] , identifier[change] )) identifier[gcloud_changes] . identifier[create] () keyword[for] identifier[i] keyword[in] identifier[range] ( literal[int] ): identifier[gcloud_changes] . identifier[reload] () keyword[if] identifier[gcloud_changes] . identifier[status] != literal[string] : keyword[break] identifier[self] . identifier[log] . identifier[debug] ( literal[string] ) identifier[time] . identifier[sleep] ( identifier[self] . identifier[CHANGE_LOOP_WAIT] ) keyword[if] identifier[gcloud_changes] . identifier[status] != literal[string] : keyword[raise] identifier[RuntimeError] ( literal[string] . identifier[format] ( identifier[i] * identifier[self] . identifier[CHANGE_LOOP_WAIT] ))
def _apply(self, plan): """Required function of manager.py to actually apply a record change. :param plan: Contains the zones and changes to be made :type plan: octodns.provider.base.Plan :type return: void """ desired = plan.desired changes = plan.changes self.log.debug('_apply: zone=%s, len(changes)=%d', desired.name, len(changes)) # Get gcloud zone, or create one if none existed before. if desired.name not in self.gcloud_zones: gcloud_zone = self._create_gcloud_zone(desired.name) # depends on [control=['if'], data=[]] else: gcloud_zone = self.gcloud_zones.get(desired.name) gcloud_changes = gcloud_zone.changes() for change in changes: class_name = change.__class__.__name__ _rrset_func = getattr(self, '_rrset_for_{}'.format(change.record._type)) if class_name == 'Create': gcloud_changes.add_record_set(_rrset_func(gcloud_zone, change.record)) # depends on [control=['if'], data=[]] elif class_name == 'Delete': gcloud_changes.delete_record_set(_rrset_func(gcloud_zone, change.record)) # depends on [control=['if'], data=[]] elif class_name == 'Update': gcloud_changes.delete_record_set(_rrset_func(gcloud_zone, change.existing)) gcloud_changes.add_record_set(_rrset_func(gcloud_zone, change.new)) # depends on [control=['if'], data=[]] else: raise RuntimeError('Change type "{}" for change "{!s}" is none of "Create", "Delete" or "Update'.format(class_name, change)) # depends on [control=['for'], data=['change']] gcloud_changes.create() for i in range(120): gcloud_changes.reload() # https://cloud.google.com/dns/api/v1/changes#resource # status can be one of either "pending" or "done" if gcloud_changes.status != 'pending': break # depends on [control=['if'], data=[]] self.log.debug('Waiting for changes to complete') time.sleep(self.CHANGE_LOOP_WAIT) # depends on [control=['for'], data=[]] if gcloud_changes.status != 'done': raise RuntimeError('Timeout reached after {} seconds'.format(i * self.CHANGE_LOOP_WAIT)) # depends on [control=['if'], data=[]]
def bind(self, data): """ Bind a VertexBuffer that has structured data Parameters ---------- data : VertexBuffer The vertex buffer to bind. The field names of the array are mapped to attribute names in GLSL. """ # Check if not isinstance(data, VertexBuffer): raise ValueError('Program.bind() requires a VertexBuffer.') # Apply for name in data.dtype.names: self[name] = data[name]
def function[bind, parameter[self, data]]: constant[ Bind a VertexBuffer that has structured data Parameters ---------- data : VertexBuffer The vertex buffer to bind. The field names of the array are mapped to attribute names in GLSL. ] if <ast.UnaryOp object at 0x7da1b0e5b0a0> begin[:] <ast.Raise object at 0x7da1b0e5b8b0> for taget[name[name]] in starred[name[data].dtype.names] begin[:] call[name[self]][name[name]] assign[=] call[name[data]][name[name]]
keyword[def] identifier[bind] ( identifier[self] , identifier[data] ): literal[string] keyword[if] keyword[not] identifier[isinstance] ( identifier[data] , identifier[VertexBuffer] ): keyword[raise] identifier[ValueError] ( literal[string] ) keyword[for] identifier[name] keyword[in] identifier[data] . identifier[dtype] . identifier[names] : identifier[self] [ identifier[name] ]= identifier[data] [ identifier[name] ]
def bind(self, data): """ Bind a VertexBuffer that has structured data Parameters ---------- data : VertexBuffer The vertex buffer to bind. The field names of the array are mapped to attribute names in GLSL. """ # Check if not isinstance(data, VertexBuffer): raise ValueError('Program.bind() requires a VertexBuffer.') # depends on [control=['if'], data=[]] # Apply for name in data.dtype.names: self[name] = data[name] # depends on [control=['for'], data=['name']]
def _install_container_bcbio_system(datadir): """Install limited bcbio_system.yaml file for setting core and memory usage. Adds any non-specific programs to the exposed bcbio_system.yaml file, only when upgrade happening inside a docker container. """ base_file = os.path.join(datadir, "config", "bcbio_system.yaml") if not os.path.exists(base_file): return expose_file = os.path.join(datadir, "galaxy", "bcbio_system.yaml") expose = set(["memory", "cores", "jvm_opts"]) with open(base_file) as in_handle: config = yaml.safe_load(in_handle) if os.path.exists(expose_file): with open(expose_file) as in_handle: expose_config = yaml.safe_load(in_handle) else: expose_config = {"resources": {}} for pname, vals in config["resources"].items(): expose_vals = {} for k, v in vals.items(): if k in expose: expose_vals[k] = v if len(expose_vals) > 0 and pname not in expose_config["resources"]: expose_config["resources"][pname] = expose_vals if expose_file and os.path.exists(os.path.dirname(expose_file)): with open(expose_file, "w") as out_handle: yaml.safe_dump(expose_config, out_handle, default_flow_style=False, allow_unicode=False) return expose_file
def function[_install_container_bcbio_system, parameter[datadir]]: constant[Install limited bcbio_system.yaml file for setting core and memory usage. Adds any non-specific programs to the exposed bcbio_system.yaml file, only when upgrade happening inside a docker container. ] variable[base_file] assign[=] call[name[os].path.join, parameter[name[datadir], constant[config], constant[bcbio_system.yaml]]] if <ast.UnaryOp object at 0x7da20c76e860> begin[:] return[None] variable[expose_file] assign[=] call[name[os].path.join, parameter[name[datadir], constant[galaxy], constant[bcbio_system.yaml]]] variable[expose] assign[=] call[name[set], parameter[list[[<ast.Constant object at 0x7da18bcc97e0>, <ast.Constant object at 0x7da18bcc9450>, <ast.Constant object at 0x7da18bcca110>]]]] with call[name[open], parameter[name[base_file]]] begin[:] variable[config] assign[=] call[name[yaml].safe_load, parameter[name[in_handle]]] if call[name[os].path.exists, parameter[name[expose_file]]] begin[:] with call[name[open], parameter[name[expose_file]]] begin[:] variable[expose_config] assign[=] call[name[yaml].safe_load, parameter[name[in_handle]]] for taget[tuple[[<ast.Name object at 0x7da1b26aca60>, <ast.Name object at 0x7da1b26aeb30>]]] in starred[call[call[name[config]][constant[resources]].items, parameter[]]] begin[:] variable[expose_vals] assign[=] dictionary[[], []] for taget[tuple[[<ast.Name object at 0x7da1b26afca0>, <ast.Name object at 0x7da1b26aded0>]]] in starred[call[name[vals].items, parameter[]]] begin[:] if compare[name[k] in name[expose]] begin[:] call[name[expose_vals]][name[k]] assign[=] name[v] if <ast.BoolOp object at 0x7da1b26ac7f0> begin[:] call[call[name[expose_config]][constant[resources]]][name[pname]] assign[=] name[expose_vals] if <ast.BoolOp object at 0x7da1b26aefe0> begin[:] with call[name[open], parameter[name[expose_file], constant[w]]] begin[:] call[name[yaml].safe_dump, parameter[name[expose_config], name[out_handle]]] return[name[expose_file]]
keyword[def] identifier[_install_container_bcbio_system] ( identifier[datadir] ): literal[string] identifier[base_file] = identifier[os] . identifier[path] . identifier[join] ( identifier[datadir] , literal[string] , literal[string] ) keyword[if] keyword[not] identifier[os] . identifier[path] . identifier[exists] ( identifier[base_file] ): keyword[return] identifier[expose_file] = identifier[os] . identifier[path] . identifier[join] ( identifier[datadir] , literal[string] , literal[string] ) identifier[expose] = identifier[set] ([ literal[string] , literal[string] , literal[string] ]) keyword[with] identifier[open] ( identifier[base_file] ) keyword[as] identifier[in_handle] : identifier[config] = identifier[yaml] . identifier[safe_load] ( identifier[in_handle] ) keyword[if] identifier[os] . identifier[path] . identifier[exists] ( identifier[expose_file] ): keyword[with] identifier[open] ( identifier[expose_file] ) keyword[as] identifier[in_handle] : identifier[expose_config] = identifier[yaml] . identifier[safe_load] ( identifier[in_handle] ) keyword[else] : identifier[expose_config] ={ literal[string] :{}} keyword[for] identifier[pname] , identifier[vals] keyword[in] identifier[config] [ literal[string] ]. identifier[items] (): identifier[expose_vals] ={} keyword[for] identifier[k] , identifier[v] keyword[in] identifier[vals] . identifier[items] (): keyword[if] identifier[k] keyword[in] identifier[expose] : identifier[expose_vals] [ identifier[k] ]= identifier[v] keyword[if] identifier[len] ( identifier[expose_vals] )> literal[int] keyword[and] identifier[pname] keyword[not] keyword[in] identifier[expose_config] [ literal[string] ]: identifier[expose_config] [ literal[string] ][ identifier[pname] ]= identifier[expose_vals] keyword[if] identifier[expose_file] keyword[and] identifier[os] . identifier[path] . identifier[exists] ( identifier[os] . identifier[path] . identifier[dirname] ( identifier[expose_file] )): keyword[with] identifier[open] ( identifier[expose_file] , literal[string] ) keyword[as] identifier[out_handle] : identifier[yaml] . identifier[safe_dump] ( identifier[expose_config] , identifier[out_handle] , identifier[default_flow_style] = keyword[False] , identifier[allow_unicode] = keyword[False] ) keyword[return] identifier[expose_file]
def _install_container_bcbio_system(datadir): """Install limited bcbio_system.yaml file for setting core and memory usage. Adds any non-specific programs to the exposed bcbio_system.yaml file, only when upgrade happening inside a docker container. """ base_file = os.path.join(datadir, 'config', 'bcbio_system.yaml') if not os.path.exists(base_file): return # depends on [control=['if'], data=[]] expose_file = os.path.join(datadir, 'galaxy', 'bcbio_system.yaml') expose = set(['memory', 'cores', 'jvm_opts']) with open(base_file) as in_handle: config = yaml.safe_load(in_handle) # depends on [control=['with'], data=['in_handle']] if os.path.exists(expose_file): with open(expose_file) as in_handle: expose_config = yaml.safe_load(in_handle) # depends on [control=['with'], data=['in_handle']] # depends on [control=['if'], data=[]] else: expose_config = {'resources': {}} for (pname, vals) in config['resources'].items(): expose_vals = {} for (k, v) in vals.items(): if k in expose: expose_vals[k] = v # depends on [control=['if'], data=['k']] # depends on [control=['for'], data=[]] if len(expose_vals) > 0 and pname not in expose_config['resources']: expose_config['resources'][pname] = expose_vals # depends on [control=['if'], data=[]] # depends on [control=['for'], data=[]] if expose_file and os.path.exists(os.path.dirname(expose_file)): with open(expose_file, 'w') as out_handle: yaml.safe_dump(expose_config, out_handle, default_flow_style=False, allow_unicode=False) # depends on [control=['with'], data=['out_handle']] # depends on [control=['if'], data=[]] return expose_file
def stats(path, hash_type='sha256', follow_symlinks=True): ''' Return a dict containing the stats about a given file Under Windows, `gid` will equal `uid` and `group` will equal `user`. While a file in Windows does have a 'primary group', this rarely used attribute generally has no bearing on permissions unless intentionally configured and is only used to support Unix compatibility features (e.g. Services For Unix, NFS services). Salt, therefore, remaps these properties to keep some kind of compatibility with Unix behavior. If the 'primary group' is required, it can be accessed in the `pgroup` and `pgid` properties. Args: path (str): The path to the file or directory hash_type (str): The type of hash to return follow_symlinks (bool): If the object specified by ``path`` is a symlink, get attributes of the linked file instead of the symlink itself. Default is True Returns: dict: A dictionary of file/directory stats CLI Example: .. code-block:: bash salt '*' file.stats /etc/passwd ''' # This is to mirror the behavior of file.py. `check_file_meta` expects an # empty dictionary when the file does not exist if not os.path.exists(path): raise CommandExecutionError('Path not found: {0}'.format(path)) if follow_symlinks and sys.getwindowsversion().major >= 6: path = _resolve_symlink(path) pstat = os.stat(path) ret = {} ret['inode'] = pstat.st_ino # don't need to resolve symlinks again because we've already done that ret['uid'] = get_uid(path, follow_symlinks=False) # maintain the illusion that group is the same as user as states need this ret['gid'] = ret['uid'] ret['user'] = uid_to_user(ret['uid']) ret['group'] = ret['user'] ret['pgid'] = get_pgid(path, follow_symlinks) ret['pgroup'] = gid_to_group(ret['pgid']) ret['atime'] = pstat.st_atime ret['mtime'] = pstat.st_mtime ret['ctime'] = pstat.st_ctime ret['size'] = pstat.st_size ret['mode'] = six.text_type(oct(stat.S_IMODE(pstat.st_mode))) if hash_type: ret['sum'] = get_sum(path, hash_type) ret['type'] = 'file' if stat.S_ISDIR(pstat.st_mode): ret['type'] = 'dir' if stat.S_ISCHR(pstat.st_mode): ret['type'] = 'char' if stat.S_ISBLK(pstat.st_mode): ret['type'] = 'block' if stat.S_ISREG(pstat.st_mode): ret['type'] = 'file' if stat.S_ISLNK(pstat.st_mode): ret['type'] = 'link' if stat.S_ISFIFO(pstat.st_mode): ret['type'] = 'pipe' if stat.S_ISSOCK(pstat.st_mode): ret['type'] = 'socket' ret['target'] = os.path.realpath(path) return ret
def function[stats, parameter[path, hash_type, follow_symlinks]]: constant[ Return a dict containing the stats about a given file Under Windows, `gid` will equal `uid` and `group` will equal `user`. While a file in Windows does have a 'primary group', this rarely used attribute generally has no bearing on permissions unless intentionally configured and is only used to support Unix compatibility features (e.g. Services For Unix, NFS services). Salt, therefore, remaps these properties to keep some kind of compatibility with Unix behavior. If the 'primary group' is required, it can be accessed in the `pgroup` and `pgid` properties. Args: path (str): The path to the file or directory hash_type (str): The type of hash to return follow_symlinks (bool): If the object specified by ``path`` is a symlink, get attributes of the linked file instead of the symlink itself. Default is True Returns: dict: A dictionary of file/directory stats CLI Example: .. code-block:: bash salt '*' file.stats /etc/passwd ] if <ast.UnaryOp object at 0x7da1b2196b60> begin[:] <ast.Raise object at 0x7da1b2197010> if <ast.BoolOp object at 0x7da1b21954e0> begin[:] variable[path] assign[=] call[name[_resolve_symlink], parameter[name[path]]] variable[pstat] assign[=] call[name[os].stat, parameter[name[path]]] variable[ret] assign[=] dictionary[[], []] call[name[ret]][constant[inode]] assign[=] name[pstat].st_ino call[name[ret]][constant[uid]] assign[=] call[name[get_uid], parameter[name[path]]] call[name[ret]][constant[gid]] assign[=] call[name[ret]][constant[uid]] call[name[ret]][constant[user]] assign[=] call[name[uid_to_user], parameter[call[name[ret]][constant[uid]]]] call[name[ret]][constant[group]] assign[=] call[name[ret]][constant[user]] call[name[ret]][constant[pgid]] assign[=] call[name[get_pgid], parameter[name[path], name[follow_symlinks]]] call[name[ret]][constant[pgroup]] assign[=] call[name[gid_to_group], parameter[call[name[ret]][constant[pgid]]]] call[name[ret]][constant[atime]] assign[=] name[pstat].st_atime call[name[ret]][constant[mtime]] assign[=] name[pstat].st_mtime call[name[ret]][constant[ctime]] assign[=] name[pstat].st_ctime call[name[ret]][constant[size]] assign[=] name[pstat].st_size call[name[ret]][constant[mode]] assign[=] call[name[six].text_type, parameter[call[name[oct], parameter[call[name[stat].S_IMODE, parameter[name[pstat].st_mode]]]]]] if name[hash_type] begin[:] call[name[ret]][constant[sum]] assign[=] call[name[get_sum], parameter[name[path], name[hash_type]]] call[name[ret]][constant[type]] assign[=] constant[file] if call[name[stat].S_ISDIR, parameter[name[pstat].st_mode]] begin[:] call[name[ret]][constant[type]] assign[=] constant[dir] if call[name[stat].S_ISCHR, parameter[name[pstat].st_mode]] begin[:] call[name[ret]][constant[type]] assign[=] constant[char] if call[name[stat].S_ISBLK, parameter[name[pstat].st_mode]] begin[:] call[name[ret]][constant[type]] assign[=] constant[block] if call[name[stat].S_ISREG, parameter[name[pstat].st_mode]] begin[:] call[name[ret]][constant[type]] assign[=] constant[file] if call[name[stat].S_ISLNK, parameter[name[pstat].st_mode]] begin[:] call[name[ret]][constant[type]] assign[=] constant[link] if call[name[stat].S_ISFIFO, parameter[name[pstat].st_mode]] begin[:] call[name[ret]][constant[type]] assign[=] constant[pipe] if call[name[stat].S_ISSOCK, parameter[name[pstat].st_mode]] begin[:] call[name[ret]][constant[type]] assign[=] constant[socket] call[name[ret]][constant[target]] assign[=] call[name[os].path.realpath, parameter[name[path]]] return[name[ret]]
keyword[def] identifier[stats] ( identifier[path] , identifier[hash_type] = literal[string] , identifier[follow_symlinks] = keyword[True] ): literal[string] keyword[if] keyword[not] identifier[os] . identifier[path] . identifier[exists] ( identifier[path] ): keyword[raise] identifier[CommandExecutionError] ( literal[string] . identifier[format] ( identifier[path] )) keyword[if] identifier[follow_symlinks] keyword[and] identifier[sys] . identifier[getwindowsversion] (). identifier[major] >= literal[int] : identifier[path] = identifier[_resolve_symlink] ( identifier[path] ) identifier[pstat] = identifier[os] . identifier[stat] ( identifier[path] ) identifier[ret] ={} identifier[ret] [ literal[string] ]= identifier[pstat] . identifier[st_ino] identifier[ret] [ literal[string] ]= identifier[get_uid] ( identifier[path] , identifier[follow_symlinks] = keyword[False] ) identifier[ret] [ literal[string] ]= identifier[ret] [ literal[string] ] identifier[ret] [ literal[string] ]= identifier[uid_to_user] ( identifier[ret] [ literal[string] ]) identifier[ret] [ literal[string] ]= identifier[ret] [ literal[string] ] identifier[ret] [ literal[string] ]= identifier[get_pgid] ( identifier[path] , identifier[follow_symlinks] ) identifier[ret] [ literal[string] ]= identifier[gid_to_group] ( identifier[ret] [ literal[string] ]) identifier[ret] [ literal[string] ]= identifier[pstat] . identifier[st_atime] identifier[ret] [ literal[string] ]= identifier[pstat] . identifier[st_mtime] identifier[ret] [ literal[string] ]= identifier[pstat] . identifier[st_ctime] identifier[ret] [ literal[string] ]= identifier[pstat] . identifier[st_size] identifier[ret] [ literal[string] ]= identifier[six] . identifier[text_type] ( identifier[oct] ( identifier[stat] . identifier[S_IMODE] ( identifier[pstat] . identifier[st_mode] ))) keyword[if] identifier[hash_type] : identifier[ret] [ literal[string] ]= identifier[get_sum] ( identifier[path] , identifier[hash_type] ) identifier[ret] [ literal[string] ]= literal[string] keyword[if] identifier[stat] . identifier[S_ISDIR] ( identifier[pstat] . identifier[st_mode] ): identifier[ret] [ literal[string] ]= literal[string] keyword[if] identifier[stat] . identifier[S_ISCHR] ( identifier[pstat] . identifier[st_mode] ): identifier[ret] [ literal[string] ]= literal[string] keyword[if] identifier[stat] . identifier[S_ISBLK] ( identifier[pstat] . identifier[st_mode] ): identifier[ret] [ literal[string] ]= literal[string] keyword[if] identifier[stat] . identifier[S_ISREG] ( identifier[pstat] . identifier[st_mode] ): identifier[ret] [ literal[string] ]= literal[string] keyword[if] identifier[stat] . identifier[S_ISLNK] ( identifier[pstat] . identifier[st_mode] ): identifier[ret] [ literal[string] ]= literal[string] keyword[if] identifier[stat] . identifier[S_ISFIFO] ( identifier[pstat] . identifier[st_mode] ): identifier[ret] [ literal[string] ]= literal[string] keyword[if] identifier[stat] . identifier[S_ISSOCK] ( identifier[pstat] . identifier[st_mode] ): identifier[ret] [ literal[string] ]= literal[string] identifier[ret] [ literal[string] ]= identifier[os] . identifier[path] . identifier[realpath] ( identifier[path] ) keyword[return] identifier[ret]
def stats(path, hash_type='sha256', follow_symlinks=True): """ Return a dict containing the stats about a given file Under Windows, `gid` will equal `uid` and `group` will equal `user`. While a file in Windows does have a 'primary group', this rarely used attribute generally has no bearing on permissions unless intentionally configured and is only used to support Unix compatibility features (e.g. Services For Unix, NFS services). Salt, therefore, remaps these properties to keep some kind of compatibility with Unix behavior. If the 'primary group' is required, it can be accessed in the `pgroup` and `pgid` properties. Args: path (str): The path to the file or directory hash_type (str): The type of hash to return follow_symlinks (bool): If the object specified by ``path`` is a symlink, get attributes of the linked file instead of the symlink itself. Default is True Returns: dict: A dictionary of file/directory stats CLI Example: .. code-block:: bash salt '*' file.stats /etc/passwd """ # This is to mirror the behavior of file.py. `check_file_meta` expects an # empty dictionary when the file does not exist if not os.path.exists(path): raise CommandExecutionError('Path not found: {0}'.format(path)) # depends on [control=['if'], data=[]] if follow_symlinks and sys.getwindowsversion().major >= 6: path = _resolve_symlink(path) # depends on [control=['if'], data=[]] pstat = os.stat(path) ret = {} ret['inode'] = pstat.st_ino # don't need to resolve symlinks again because we've already done that ret['uid'] = get_uid(path, follow_symlinks=False) # maintain the illusion that group is the same as user as states need this ret['gid'] = ret['uid'] ret['user'] = uid_to_user(ret['uid']) ret['group'] = ret['user'] ret['pgid'] = get_pgid(path, follow_symlinks) ret['pgroup'] = gid_to_group(ret['pgid']) ret['atime'] = pstat.st_atime ret['mtime'] = pstat.st_mtime ret['ctime'] = pstat.st_ctime ret['size'] = pstat.st_size ret['mode'] = six.text_type(oct(stat.S_IMODE(pstat.st_mode))) if hash_type: ret['sum'] = get_sum(path, hash_type) # depends on [control=['if'], data=[]] ret['type'] = 'file' if stat.S_ISDIR(pstat.st_mode): ret['type'] = 'dir' # depends on [control=['if'], data=[]] if stat.S_ISCHR(pstat.st_mode): ret['type'] = 'char' # depends on [control=['if'], data=[]] if stat.S_ISBLK(pstat.st_mode): ret['type'] = 'block' # depends on [control=['if'], data=[]] if stat.S_ISREG(pstat.st_mode): ret['type'] = 'file' # depends on [control=['if'], data=[]] if stat.S_ISLNK(pstat.st_mode): ret['type'] = 'link' # depends on [control=['if'], data=[]] if stat.S_ISFIFO(pstat.st_mode): ret['type'] = 'pipe' # depends on [control=['if'], data=[]] if stat.S_ISSOCK(pstat.st_mode): ret['type'] = 'socket' # depends on [control=['if'], data=[]] ret['target'] = os.path.realpath(path) return ret
def info(host=None, port=None, db=None, password=None): ''' Get information and statistics about the server CLI Example: .. code-block:: bash salt '*' redis.info ''' server = _connect(host, port, db, password) return server.info()
def function[info, parameter[host, port, db, password]]: constant[ Get information and statistics about the server CLI Example: .. code-block:: bash salt '*' redis.info ] variable[server] assign[=] call[name[_connect], parameter[name[host], name[port], name[db], name[password]]] return[call[name[server].info, parameter[]]]
keyword[def] identifier[info] ( identifier[host] = keyword[None] , identifier[port] = keyword[None] , identifier[db] = keyword[None] , identifier[password] = keyword[None] ): literal[string] identifier[server] = identifier[_connect] ( identifier[host] , identifier[port] , identifier[db] , identifier[password] ) keyword[return] identifier[server] . identifier[info] ()
def info(host=None, port=None, db=None, password=None): """ Get information and statistics about the server CLI Example: .. code-block:: bash salt '*' redis.info """ server = _connect(host, port, db, password) return server.info()
def launchQueryForMode(self, query=None, mode=None): """ Method that launches an i3Browser to collect data. Args: ----- query: The query to be performed mode: The mode to be used to build the query. Return: ------- A string containing the recovered data or None. """ # Creating the query URL for that mode qURL = self.createURL(word=query, mode=mode) i3Browser = browser.Browser() try: # Check if it needs creds if self.needsCredentials[mode]: self._getAuthenticated(i3Browser, qURL) data = i3Browser.recoverURL(qURL) else: # Accessing the resources data = i3Browser.recoverURL(qURL) return data except KeyError: print(general.error("[*] '{}' is not a valid mode for this wrapper ({}).".format(mode, self.__class__.__name__))) return None
def function[launchQueryForMode, parameter[self, query, mode]]: constant[ Method that launches an i3Browser to collect data. Args: ----- query: The query to be performed mode: The mode to be used to build the query. Return: ------- A string containing the recovered data or None. ] variable[qURL] assign[=] call[name[self].createURL, parameter[]] variable[i3Browser] assign[=] call[name[browser].Browser, parameter[]] <ast.Try object at 0x7da1b2347c40> return[constant[None]]
keyword[def] identifier[launchQueryForMode] ( identifier[self] , identifier[query] = keyword[None] , identifier[mode] = keyword[None] ): literal[string] identifier[qURL] = identifier[self] . identifier[createURL] ( identifier[word] = identifier[query] , identifier[mode] = identifier[mode] ) identifier[i3Browser] = identifier[browser] . identifier[Browser] () keyword[try] : keyword[if] identifier[self] . identifier[needsCredentials] [ identifier[mode] ]: identifier[self] . identifier[_getAuthenticated] ( identifier[i3Browser] , identifier[qURL] ) identifier[data] = identifier[i3Browser] . identifier[recoverURL] ( identifier[qURL] ) keyword[else] : identifier[data] = identifier[i3Browser] . identifier[recoverURL] ( identifier[qURL] ) keyword[return] identifier[data] keyword[except] identifier[KeyError] : identifier[print] ( identifier[general] . identifier[error] ( literal[string] . identifier[format] ( identifier[mode] , identifier[self] . identifier[__class__] . identifier[__name__] ))) keyword[return] keyword[None]
def launchQueryForMode(self, query=None, mode=None): """ Method that launches an i3Browser to collect data. Args: ----- query: The query to be performed mode: The mode to be used to build the query. Return: ------- A string containing the recovered data or None. """ # Creating the query URL for that mode qURL = self.createURL(word=query, mode=mode) i3Browser = browser.Browser() try: # Check if it needs creds if self.needsCredentials[mode]: self._getAuthenticated(i3Browser, qURL) data = i3Browser.recoverURL(qURL) # depends on [control=['if'], data=[]] else: # Accessing the resources data = i3Browser.recoverURL(qURL) return data # depends on [control=['try'], data=[]] except KeyError: print(general.error("[*] '{}' is not a valid mode for this wrapper ({}).".format(mode, self.__class__.__name__))) # depends on [control=['except'], data=[]] return None
def xml_marshal_bucket_constraint(region): """ Marshal's bucket constraint based on *region*. :param region: Region name of a given bucket. :return: Marshalled XML data. """ root = s3_xml.Element('CreateBucketConfiguration', {'xmlns': _S3_NAMESPACE}) location_constraint = s3_xml.SubElement(root, 'LocationConstraint') location_constraint.text = region data = io.BytesIO() s3_xml.ElementTree(root).write(data, encoding=None, xml_declaration=False) return data.getvalue()
def function[xml_marshal_bucket_constraint, parameter[region]]: constant[ Marshal's bucket constraint based on *region*. :param region: Region name of a given bucket. :return: Marshalled XML data. ] variable[root] assign[=] call[name[s3_xml].Element, parameter[constant[CreateBucketConfiguration], dictionary[[<ast.Constant object at 0x7da1b1e6a500>], [<ast.Name object at 0x7da1b1e6a260>]]]] variable[location_constraint] assign[=] call[name[s3_xml].SubElement, parameter[name[root], constant[LocationConstraint]]] name[location_constraint].text assign[=] name[region] variable[data] assign[=] call[name[io].BytesIO, parameter[]] call[call[name[s3_xml].ElementTree, parameter[name[root]]].write, parameter[name[data]]] return[call[name[data].getvalue, parameter[]]]
keyword[def] identifier[xml_marshal_bucket_constraint] ( identifier[region] ): literal[string] identifier[root] = identifier[s3_xml] . identifier[Element] ( literal[string] ,{ literal[string] : identifier[_S3_NAMESPACE] }) identifier[location_constraint] = identifier[s3_xml] . identifier[SubElement] ( identifier[root] , literal[string] ) identifier[location_constraint] . identifier[text] = identifier[region] identifier[data] = identifier[io] . identifier[BytesIO] () identifier[s3_xml] . identifier[ElementTree] ( identifier[root] ). identifier[write] ( identifier[data] , identifier[encoding] = keyword[None] , identifier[xml_declaration] = keyword[False] ) keyword[return] identifier[data] . identifier[getvalue] ()
def xml_marshal_bucket_constraint(region): """ Marshal's bucket constraint based on *region*. :param region: Region name of a given bucket. :return: Marshalled XML data. """ root = s3_xml.Element('CreateBucketConfiguration', {'xmlns': _S3_NAMESPACE}) location_constraint = s3_xml.SubElement(root, 'LocationConstraint') location_constraint.text = region data = io.BytesIO() s3_xml.ElementTree(root).write(data, encoding=None, xml_declaration=False) return data.getvalue()
def get_command(self, ctx, name): """ Get a bound command method @type ctx: Context @param name: Command name @type name: str @rtype: object """ try: mod = importlib.import_module('ips_vagrant.commands.{name}'.format(name=name)) return mod.cli except (ImportError, AttributeError): return
def function[get_command, parameter[self, ctx, name]]: constant[ Get a bound command method @type ctx: Context @param name: Command name @type name: str @rtype: object ] <ast.Try object at 0x7da18f00ed70>
keyword[def] identifier[get_command] ( identifier[self] , identifier[ctx] , identifier[name] ): literal[string] keyword[try] : identifier[mod] = identifier[importlib] . identifier[import_module] ( literal[string] . identifier[format] ( identifier[name] = identifier[name] )) keyword[return] identifier[mod] . identifier[cli] keyword[except] ( identifier[ImportError] , identifier[AttributeError] ): keyword[return]
def get_command(self, ctx, name): """ Get a bound command method @type ctx: Context @param name: Command name @type name: str @rtype: object """ try: mod = importlib.import_module('ips_vagrant.commands.{name}'.format(name=name)) return mod.cli # depends on [control=['try'], data=[]] except (ImportError, AttributeError): return # depends on [control=['except'], data=[]]
def _readResponse(self): """ Yield each row of response untill !done is received. :throws TrapError: If one !trap is received. :throws MultiTrapError: If > 1 !trap is received. """ traps = [] reply_word = None while reply_word != '!done': reply_word, words = self._readSentence() if reply_word == '!trap': traps.append(TrapError(**words)) elif reply_word in ('!re', '!done') and words: yield words if len(traps) > 1: raise MultiTrapError(*traps) elif len(traps) == 1: raise traps[0]
def function[_readResponse, parameter[self]]: constant[ Yield each row of response untill !done is received. :throws TrapError: If one !trap is received. :throws MultiTrapError: If > 1 !trap is received. ] variable[traps] assign[=] list[[]] variable[reply_word] assign[=] constant[None] while compare[name[reply_word] not_equal[!=] constant[!done]] begin[:] <ast.Tuple object at 0x7da1b1081060> assign[=] call[name[self]._readSentence, parameter[]] if compare[name[reply_word] equal[==] constant[!trap]] begin[:] call[name[traps].append, parameter[call[name[TrapError], parameter[]]]] if compare[call[name[len], parameter[name[traps]]] greater[>] constant[1]] begin[:] <ast.Raise object at 0x7da1b10838e0>
keyword[def] identifier[_readResponse] ( identifier[self] ): literal[string] identifier[traps] =[] identifier[reply_word] = keyword[None] keyword[while] identifier[reply_word] != literal[string] : identifier[reply_word] , identifier[words] = identifier[self] . identifier[_readSentence] () keyword[if] identifier[reply_word] == literal[string] : identifier[traps] . identifier[append] ( identifier[TrapError] (** identifier[words] )) keyword[elif] identifier[reply_word] keyword[in] ( literal[string] , literal[string] ) keyword[and] identifier[words] : keyword[yield] identifier[words] keyword[if] identifier[len] ( identifier[traps] )> literal[int] : keyword[raise] identifier[MultiTrapError] (* identifier[traps] ) keyword[elif] identifier[len] ( identifier[traps] )== literal[int] : keyword[raise] identifier[traps] [ literal[int] ]
def _readResponse(self): """ Yield each row of response untill !done is received. :throws TrapError: If one !trap is received. :throws MultiTrapError: If > 1 !trap is received. """ traps = [] reply_word = None while reply_word != '!done': (reply_word, words) = self._readSentence() if reply_word == '!trap': traps.append(TrapError(**words)) # depends on [control=['if'], data=[]] elif reply_word in ('!re', '!done') and words: yield words # depends on [control=['if'], data=[]] # depends on [control=['while'], data=['reply_word']] if len(traps) > 1: raise MultiTrapError(*traps) # depends on [control=['if'], data=[]] elif len(traps) == 1: raise traps[0] # depends on [control=['if'], data=[]]
def adjust_privileges(state, privileges): """ Requests or drops privileges. @type state: bool @param state: C{True} to request, C{False} to drop. @type privileges: list(int) @param privileges: Privileges to request or drop. @raise WindowsError: Raises an exception on error. """ with win32.OpenProcessToken(win32.GetCurrentProcess(), win32.TOKEN_ADJUST_PRIVILEGES) as hToken: NewState = ( (priv, state) for priv in privileges ) win32.AdjustTokenPrivileges(hToken, NewState)
def function[adjust_privileges, parameter[state, privileges]]: constant[ Requests or drops privileges. @type state: bool @param state: C{True} to request, C{False} to drop. @type privileges: list(int) @param privileges: Privileges to request or drop. @raise WindowsError: Raises an exception on error. ] with call[name[win32].OpenProcessToken, parameter[call[name[win32].GetCurrentProcess, parameter[]], name[win32].TOKEN_ADJUST_PRIVILEGES]] begin[:] variable[NewState] assign[=] <ast.GeneratorExp object at 0x7da2046203a0> call[name[win32].AdjustTokenPrivileges, parameter[name[hToken], name[NewState]]]
keyword[def] identifier[adjust_privileges] ( identifier[state] , identifier[privileges] ): literal[string] keyword[with] identifier[win32] . identifier[OpenProcessToken] ( identifier[win32] . identifier[GetCurrentProcess] (), identifier[win32] . identifier[TOKEN_ADJUST_PRIVILEGES] ) keyword[as] identifier[hToken] : identifier[NewState] =(( identifier[priv] , identifier[state] ) keyword[for] identifier[priv] keyword[in] identifier[privileges] ) identifier[win32] . identifier[AdjustTokenPrivileges] ( identifier[hToken] , identifier[NewState] )
def adjust_privileges(state, privileges): """ Requests or drops privileges. @type state: bool @param state: C{True} to request, C{False} to drop. @type privileges: list(int) @param privileges: Privileges to request or drop. @raise WindowsError: Raises an exception on error. """ with win32.OpenProcessToken(win32.GetCurrentProcess(), win32.TOKEN_ADJUST_PRIVILEGES) as hToken: NewState = ((priv, state) for priv in privileges) win32.AdjustTokenPrivileges(hToken, NewState) # depends on [control=['with'], data=['hToken']]
def download(ctx): """Download data.""" log.debug('chemdataextractor.data.download') count = 0 for package in PACKAGES: success = package.download() if success: count += 1 click.echo('Successfully downloaded %s new data packages (%s existing)' % (count, len(PACKAGES) - count))
def function[download, parameter[ctx]]: constant[Download data.] call[name[log].debug, parameter[constant[chemdataextractor.data.download]]] variable[count] assign[=] constant[0] for taget[name[package]] in starred[name[PACKAGES]] begin[:] variable[success] assign[=] call[name[package].download, parameter[]] if name[success] begin[:] <ast.AugAssign object at 0x7da1b1389f00> call[name[click].echo, parameter[binary_operation[constant[Successfully downloaded %s new data packages (%s existing)] <ast.Mod object at 0x7da2590d6920> tuple[[<ast.Name object at 0x7da1b13895a0>, <ast.BinOp object at 0x7da1b13898d0>]]]]]
keyword[def] identifier[download] ( identifier[ctx] ): literal[string] identifier[log] . identifier[debug] ( literal[string] ) identifier[count] = literal[int] keyword[for] identifier[package] keyword[in] identifier[PACKAGES] : identifier[success] = identifier[package] . identifier[download] () keyword[if] identifier[success] : identifier[count] += literal[int] identifier[click] . identifier[echo] ( literal[string] %( identifier[count] , identifier[len] ( identifier[PACKAGES] )- identifier[count] ))
def download(ctx): """Download data.""" log.debug('chemdataextractor.data.download') count = 0 for package in PACKAGES: success = package.download() if success: count += 1 # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['package']] click.echo('Successfully downloaded %s new data packages (%s existing)' % (count, len(PACKAGES) - count))
def better_exchook(etype, value, tb, debugshell=False, autodebugshell=True, file=None, with_color=None): """ Replacement for sys.excepthook. :param etype: exception type :param value: exception value :param tb: traceback :param bool debugshell: spawn a debug shell at the context of the exception :param bool autodebugshell: if env DEBUG is an integer != 0, it will spawn a debug shell :param io.TextIOBase|io.StringIO file: output stream where we will print the traceback and exception information. stderr by default. :param bool|None with_color: whether to use ANSI escape codes for colored output """ if file is None: file = sys.stderr def output(ln): """ :param str ln: :return: nothing, prints to ``file`` """ file.write(ln + "\n") color = Color(enable=with_color) output(color("EXCEPTION", color.fg_colors[1], bold=True)) all_locals, all_globals = {}, {} if tb is not None: print_tb(tb, allLocals=all_locals, allGlobals=all_globals, file=file, withTitle=True, with_color=color.enable) else: output(color("better_exchook: traceback unknown", color.fg_colors[1])) import types # noinspection PyShadowingNames def _some_str(value): """ :param object value: :rtype: str """ # noinspection PyBroadException try: return str(value) except Exception: return '<unprintable %s object>' % type(value).__name__ # noinspection PyShadowingNames def _format_final_exc_line(etype, value): value_str = _some_str(value) if value is None or not value_str: line = color("%s" % etype, color.fg_colors[1]) else: line = color("%s" % etype, color.fg_colors[1]) + ": %s" % (value_str,) return line # noinspection PyUnresolvedReferences if (isinstance(etype, BaseException) or (hasattr(types, "InstanceType") and isinstance(etype, types.InstanceType)) or etype is None or type(etype) is str): output(_format_final_exc_line(etype, value)) else: output(_format_final_exc_line(etype.__name__, value)) if autodebugshell: # noinspection PyBroadException try: debugshell = int(os.environ["DEBUG"]) != 0 except Exception: pass if debugshell: output("---------- DEBUG SHELL -----------") debug_shell(user_ns=all_locals, user_global_ns=all_globals, traceback=tb) file.flush()
def function[better_exchook, parameter[etype, value, tb, debugshell, autodebugshell, file, with_color]]: constant[ Replacement for sys.excepthook. :param etype: exception type :param value: exception value :param tb: traceback :param bool debugshell: spawn a debug shell at the context of the exception :param bool autodebugshell: if env DEBUG is an integer != 0, it will spawn a debug shell :param io.TextIOBase|io.StringIO file: output stream where we will print the traceback and exception information. stderr by default. :param bool|None with_color: whether to use ANSI escape codes for colored output ] if compare[name[file] is constant[None]] begin[:] variable[file] assign[=] name[sys].stderr def function[output, parameter[ln]]: constant[ :param str ln: :return: nothing, prints to ``file`` ] call[name[file].write, parameter[binary_operation[name[ln] + constant[ ]]]] variable[color] assign[=] call[name[Color], parameter[]] call[name[output], parameter[call[name[color], parameter[constant[EXCEPTION], call[name[color].fg_colors][constant[1]]]]]] <ast.Tuple object at 0x7da1b242af20> assign[=] tuple[[<ast.Dict object at 0x7da1b2429ba0>, <ast.Dict object at 0x7da1b24295a0>]] if compare[name[tb] is_not constant[None]] begin[:] call[name[print_tb], parameter[name[tb]]] import module[types] def function[_some_str, parameter[value]]: constant[ :param object value: :rtype: str ] <ast.Try object at 0x7da1b2429c30> def function[_format_final_exc_line, parameter[etype, value]]: variable[value_str] assign[=] call[name[_some_str], parameter[name[value]]] if <ast.BoolOp object at 0x7da1b242a890> begin[:] variable[line] assign[=] call[name[color], parameter[binary_operation[constant[%s] <ast.Mod object at 0x7da2590d6920> name[etype]], call[name[color].fg_colors][constant[1]]]] return[name[line]] if <ast.BoolOp object at 0x7da1b24c0220> begin[:] call[name[output], parameter[call[name[_format_final_exc_line], parameter[name[etype], name[value]]]]] if name[autodebugshell] begin[:] <ast.Try object at 0x7da1b24c0490> if name[debugshell] begin[:] call[name[output], parameter[constant[---------- DEBUG SHELL -----------]]] call[name[debug_shell], parameter[]] call[name[file].flush, parameter[]]
keyword[def] identifier[better_exchook] ( identifier[etype] , identifier[value] , identifier[tb] , identifier[debugshell] = keyword[False] , identifier[autodebugshell] = keyword[True] , identifier[file] = keyword[None] , identifier[with_color] = keyword[None] ): literal[string] keyword[if] identifier[file] keyword[is] keyword[None] : identifier[file] = identifier[sys] . identifier[stderr] keyword[def] identifier[output] ( identifier[ln] ): literal[string] identifier[file] . identifier[write] ( identifier[ln] + literal[string] ) identifier[color] = identifier[Color] ( identifier[enable] = identifier[with_color] ) identifier[output] ( identifier[color] ( literal[string] , identifier[color] . identifier[fg_colors] [ literal[int] ], identifier[bold] = keyword[True] )) identifier[all_locals] , identifier[all_globals] ={},{} keyword[if] identifier[tb] keyword[is] keyword[not] keyword[None] : identifier[print_tb] ( identifier[tb] , identifier[allLocals] = identifier[all_locals] , identifier[allGlobals] = identifier[all_globals] , identifier[file] = identifier[file] , identifier[withTitle] = keyword[True] , identifier[with_color] = identifier[color] . identifier[enable] ) keyword[else] : identifier[output] ( identifier[color] ( literal[string] , identifier[color] . identifier[fg_colors] [ literal[int] ])) keyword[import] identifier[types] keyword[def] identifier[_some_str] ( identifier[value] ): literal[string] keyword[try] : keyword[return] identifier[str] ( identifier[value] ) keyword[except] identifier[Exception] : keyword[return] literal[string] % identifier[type] ( identifier[value] ). identifier[__name__] keyword[def] identifier[_format_final_exc_line] ( identifier[etype] , identifier[value] ): identifier[value_str] = identifier[_some_str] ( identifier[value] ) keyword[if] identifier[value] keyword[is] keyword[None] keyword[or] keyword[not] identifier[value_str] : identifier[line] = identifier[color] ( literal[string] % identifier[etype] , identifier[color] . identifier[fg_colors] [ literal[int] ]) keyword[else] : identifier[line] = identifier[color] ( literal[string] % identifier[etype] , identifier[color] . identifier[fg_colors] [ literal[int] ])+ literal[string] %( identifier[value_str] ,) keyword[return] identifier[line] keyword[if] ( identifier[isinstance] ( identifier[etype] , identifier[BaseException] ) keyword[or] ( identifier[hasattr] ( identifier[types] , literal[string] ) keyword[and] identifier[isinstance] ( identifier[etype] , identifier[types] . identifier[InstanceType] )) keyword[or] identifier[etype] keyword[is] keyword[None] keyword[or] identifier[type] ( identifier[etype] ) keyword[is] identifier[str] ): identifier[output] ( identifier[_format_final_exc_line] ( identifier[etype] , identifier[value] )) keyword[else] : identifier[output] ( identifier[_format_final_exc_line] ( identifier[etype] . identifier[__name__] , identifier[value] )) keyword[if] identifier[autodebugshell] : keyword[try] : identifier[debugshell] = identifier[int] ( identifier[os] . identifier[environ] [ literal[string] ])!= literal[int] keyword[except] identifier[Exception] : keyword[pass] keyword[if] identifier[debugshell] : identifier[output] ( literal[string] ) identifier[debug_shell] ( identifier[user_ns] = identifier[all_locals] , identifier[user_global_ns] = identifier[all_globals] , identifier[traceback] = identifier[tb] ) identifier[file] . identifier[flush] ()
def better_exchook(etype, value, tb, debugshell=False, autodebugshell=True, file=None, with_color=None): """ Replacement for sys.excepthook. :param etype: exception type :param value: exception value :param tb: traceback :param bool debugshell: spawn a debug shell at the context of the exception :param bool autodebugshell: if env DEBUG is an integer != 0, it will spawn a debug shell :param io.TextIOBase|io.StringIO file: output stream where we will print the traceback and exception information. stderr by default. :param bool|None with_color: whether to use ANSI escape codes for colored output """ if file is None: file = sys.stderr # depends on [control=['if'], data=['file']] def output(ln): """ :param str ln: :return: nothing, prints to ``file`` """ file.write(ln + '\n') color = Color(enable=with_color) output(color('EXCEPTION', color.fg_colors[1], bold=True)) (all_locals, all_globals) = ({}, {}) if tb is not None: print_tb(tb, allLocals=all_locals, allGlobals=all_globals, file=file, withTitle=True, with_color=color.enable) # depends on [control=['if'], data=['tb']] else: output(color('better_exchook: traceback unknown', color.fg_colors[1])) import types # noinspection PyShadowingNames def _some_str(value): """ :param object value: :rtype: str """ # noinspection PyBroadException try: return str(value) # depends on [control=['try'], data=[]] except Exception: return '<unprintable %s object>' % type(value).__name__ # depends on [control=['except'], data=[]] # noinspection PyShadowingNames def _format_final_exc_line(etype, value): value_str = _some_str(value) if value is None or not value_str: line = color('%s' % etype, color.fg_colors[1]) # depends on [control=['if'], data=[]] else: line = color('%s' % etype, color.fg_colors[1]) + ': %s' % (value_str,) return line # noinspection PyUnresolvedReferences if isinstance(etype, BaseException) or (hasattr(types, 'InstanceType') and isinstance(etype, types.InstanceType)) or etype is None or (type(etype) is str): output(_format_final_exc_line(etype, value)) # depends on [control=['if'], data=[]] else: output(_format_final_exc_line(etype.__name__, value)) if autodebugshell: # noinspection PyBroadException try: debugshell = int(os.environ['DEBUG']) != 0 # depends on [control=['try'], data=[]] except Exception: pass # depends on [control=['except'], data=[]] # depends on [control=['if'], data=[]] if debugshell: output('---------- DEBUG SHELL -----------') debug_shell(user_ns=all_locals, user_global_ns=all_globals, traceback=tb) # depends on [control=['if'], data=[]] file.flush()
def parse_java_version(cls, version): """Parses the java version (given a string or Revision object). Handles java version-isms, converting things like '7' -> '1.7' appropriately. Truncates input versions down to just the major and minor numbers (eg, 1.6), ignoring extra versioning information after the second number. :param version: the input version, given as a string or Revision object. :return: the parsed and cleaned version, suitable as a javac -source or -target argument. :rtype: Revision """ conversion = {str(i): '1.{}'.format(i) for i in cls.SUPPORTED_CONVERSION_VERSIONS} if str(version) in conversion: return Revision.lenient(conversion[str(version)]) if not hasattr(version, 'components'): version = Revision.lenient(version) if len(version.components) <= 2: return version return Revision(*version.components[:2])
def function[parse_java_version, parameter[cls, version]]: constant[Parses the java version (given a string or Revision object). Handles java version-isms, converting things like '7' -> '1.7' appropriately. Truncates input versions down to just the major and minor numbers (eg, 1.6), ignoring extra versioning information after the second number. :param version: the input version, given as a string or Revision object. :return: the parsed and cleaned version, suitable as a javac -source or -target argument. :rtype: Revision ] variable[conversion] assign[=] <ast.DictComp object at 0x7da1b22a4160> if compare[call[name[str], parameter[name[version]]] in name[conversion]] begin[:] return[call[name[Revision].lenient, parameter[call[name[conversion]][call[name[str], parameter[name[version]]]]]]] if <ast.UnaryOp object at 0x7da1b22a7b20> begin[:] variable[version] assign[=] call[name[Revision].lenient, parameter[name[version]]] if compare[call[name[len], parameter[name[version].components]] less_or_equal[<=] constant[2]] begin[:] return[name[version]] return[call[name[Revision], parameter[<ast.Starred object at 0x7da1b22a4cd0>]]]
keyword[def] identifier[parse_java_version] ( identifier[cls] , identifier[version] ): literal[string] identifier[conversion] ={ identifier[str] ( identifier[i] ): literal[string] . identifier[format] ( identifier[i] ) keyword[for] identifier[i] keyword[in] identifier[cls] . identifier[SUPPORTED_CONVERSION_VERSIONS] } keyword[if] identifier[str] ( identifier[version] ) keyword[in] identifier[conversion] : keyword[return] identifier[Revision] . identifier[lenient] ( identifier[conversion] [ identifier[str] ( identifier[version] )]) keyword[if] keyword[not] identifier[hasattr] ( identifier[version] , literal[string] ): identifier[version] = identifier[Revision] . identifier[lenient] ( identifier[version] ) keyword[if] identifier[len] ( identifier[version] . identifier[components] )<= literal[int] : keyword[return] identifier[version] keyword[return] identifier[Revision] (* identifier[version] . identifier[components] [: literal[int] ])
def parse_java_version(cls, version): """Parses the java version (given a string or Revision object). Handles java version-isms, converting things like '7' -> '1.7' appropriately. Truncates input versions down to just the major and minor numbers (eg, 1.6), ignoring extra versioning information after the second number. :param version: the input version, given as a string or Revision object. :return: the parsed and cleaned version, suitable as a javac -source or -target argument. :rtype: Revision """ conversion = {str(i): '1.{}'.format(i) for i in cls.SUPPORTED_CONVERSION_VERSIONS} if str(version) in conversion: return Revision.lenient(conversion[str(version)]) # depends on [control=['if'], data=['conversion']] if not hasattr(version, 'components'): version = Revision.lenient(version) # depends on [control=['if'], data=[]] if len(version.components) <= 2: return version # depends on [control=['if'], data=[]] return Revision(*version.components[:2])
def get_value(self) -> Decimal: """ Returns the current value of stocks """ quantity = self.get_quantity() price = self.get_last_available_price() if not price: # raise ValueError("no price found for", self.full_symbol) return Decimal(0) value = quantity * price.value return value
def function[get_value, parameter[self]]: constant[ Returns the current value of stocks ] variable[quantity] assign[=] call[name[self].get_quantity, parameter[]] variable[price] assign[=] call[name[self].get_last_available_price, parameter[]] if <ast.UnaryOp object at 0x7da1b1289870> begin[:] return[call[name[Decimal], parameter[constant[0]]]] variable[value] assign[=] binary_operation[name[quantity] * name[price].value] return[name[value]]
keyword[def] identifier[get_value] ( identifier[self] )-> identifier[Decimal] : literal[string] identifier[quantity] = identifier[self] . identifier[get_quantity] () identifier[price] = identifier[self] . identifier[get_last_available_price] () keyword[if] keyword[not] identifier[price] : keyword[return] identifier[Decimal] ( literal[int] ) identifier[value] = identifier[quantity] * identifier[price] . identifier[value] keyword[return] identifier[value]
def get_value(self) -> Decimal: """ Returns the current value of stocks """ quantity = self.get_quantity() price = self.get_last_available_price() if not price: # raise ValueError("no price found for", self.full_symbol) return Decimal(0) # depends on [control=['if'], data=[]] value = quantity * price.value return value
def version(ruby=None, runas=None, gem_bin=None): ''' Print out the version of gem :param gem_bin: string : None Full path to ``gem`` binary to use. :param ruby: string : None If RVM or rbenv are installed, the ruby version and gemset to use. Ignored if ``gem_bin`` is specified. :param runas: string : None The user to run gem as. CLI Example: .. code-block:: bash salt '*' gem.version ''' cmd = ['--version'] stdout = _gem(cmd, ruby, gem_bin=gem_bin, runas=runas) ret = {} for line in salt.utils.itertools.split(stdout, '\n'): match = re.match(r'[.0-9]+', line) if match: ret = line break return ret
def function[version, parameter[ruby, runas, gem_bin]]: constant[ Print out the version of gem :param gem_bin: string : None Full path to ``gem`` binary to use. :param ruby: string : None If RVM or rbenv are installed, the ruby version and gemset to use. Ignored if ``gem_bin`` is specified. :param runas: string : None The user to run gem as. CLI Example: .. code-block:: bash salt '*' gem.version ] variable[cmd] assign[=] list[[<ast.Constant object at 0x7da204567f40>]] variable[stdout] assign[=] call[name[_gem], parameter[name[cmd], name[ruby]]] variable[ret] assign[=] dictionary[[], []] for taget[name[line]] in starred[call[name[salt].utils.itertools.split, parameter[name[stdout], constant[ ]]]] begin[:] variable[match] assign[=] call[name[re].match, parameter[constant[[.0-9]+], name[line]]] if name[match] begin[:] variable[ret] assign[=] name[line] break return[name[ret]]
keyword[def] identifier[version] ( identifier[ruby] = keyword[None] , identifier[runas] = keyword[None] , identifier[gem_bin] = keyword[None] ): literal[string] identifier[cmd] =[ literal[string] ] identifier[stdout] = identifier[_gem] ( identifier[cmd] , identifier[ruby] , identifier[gem_bin] = identifier[gem_bin] , identifier[runas] = identifier[runas] ) identifier[ret] ={} keyword[for] identifier[line] keyword[in] identifier[salt] . identifier[utils] . identifier[itertools] . identifier[split] ( identifier[stdout] , literal[string] ): identifier[match] = identifier[re] . identifier[match] ( literal[string] , identifier[line] ) keyword[if] identifier[match] : identifier[ret] = identifier[line] keyword[break] keyword[return] identifier[ret]
def version(ruby=None, runas=None, gem_bin=None): """ Print out the version of gem :param gem_bin: string : None Full path to ``gem`` binary to use. :param ruby: string : None If RVM or rbenv are installed, the ruby version and gemset to use. Ignored if ``gem_bin`` is specified. :param runas: string : None The user to run gem as. CLI Example: .. code-block:: bash salt '*' gem.version """ cmd = ['--version'] stdout = _gem(cmd, ruby, gem_bin=gem_bin, runas=runas) ret = {} for line in salt.utils.itertools.split(stdout, '\n'): match = re.match('[.0-9]+', line) if match: ret = line break # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['line']] return ret
def invoke(self, function_config, event, debug_context=None, stdout=None, stderr=None): """ Invoke the given Lambda function locally. ##### NOTE: THIS IS A LONG BLOCKING CALL ##### This method will block until either the Lambda function completes or timed out, which could be seconds. A blocking call will block the thread preventing any other operations from happening. If you are using this method in a web-server or in contexts where your application needs to be responsive when function is running, take care to invoke the function in a separate thread. Co-Routines or micro-threads might not perform well because the underlying implementation essentially blocks on a socket, which is synchronous. :param FunctionConfig function_config: Configuration of the function to invoke :param event: String input event passed to Lambda function :param DebugContext debug_context: Debugging context for the function (includes port, args, and path) :param io.IOBase stdout: Optional. IO Stream to that receives stdout text from container. :param io.IOBase stderr: Optional. IO Stream that receives stderr text from container :raises Keyboard """ timer = None # Update with event input environ = function_config.env_vars environ.add_lambda_event_body(event) # Generate a dictionary of environment variable key:values env_vars = environ.resolve() with self._get_code_dir(function_config.code_abs_path) as code_dir: container = LambdaContainer(function_config.runtime, function_config.handler, code_dir, function_config.layers, self._image_builder, memory_mb=function_config.memory, env_vars=env_vars, debug_options=debug_context) try: # Start the container. This call returns immediately after the container starts self._container_manager.run(container) # Setup appropriate interrupt - timeout or Ctrl+C - before function starts executing. # # Start the timer **after** container starts. Container startup takes several seconds, only after which, # our Lambda function code will run. Starting the timer is a reasonable approximation that function has # started running. timer = self._configure_interrupt(function_config.name, function_config.timeout, container, bool(debug_context)) # NOTE: BLOCKING METHOD # Block the thread waiting to fetch logs from the container. This method will return after container # terminates, either successfully or killed by one of the interrupt handlers above. container.wait_for_logs(stdout=stdout, stderr=stderr) except KeyboardInterrupt: # When user presses Ctrl+C, we receive a Keyboard Interrupt. This is especially very common when # container is in debugging mode. We have special handling of Ctrl+C. So handle KeyboardInterrupt # and swallow the exception. The ``finally`` block will also take care of cleaning it up. LOG.debug("Ctrl+C was pressed. Aborting Lambda execution") finally: # We will be done with execution, if either the execution completed or an interrupt was fired # Any case, cleanup the timer and container. # # If we are in debugging mode, timer would not be created. So skip cleanup of the timer if timer: timer.cancel() self._container_manager.stop(container)
def function[invoke, parameter[self, function_config, event, debug_context, stdout, stderr]]: constant[ Invoke the given Lambda function locally. ##### NOTE: THIS IS A LONG BLOCKING CALL ##### This method will block until either the Lambda function completes or timed out, which could be seconds. A blocking call will block the thread preventing any other operations from happening. If you are using this method in a web-server or in contexts where your application needs to be responsive when function is running, take care to invoke the function in a separate thread. Co-Routines or micro-threads might not perform well because the underlying implementation essentially blocks on a socket, which is synchronous. :param FunctionConfig function_config: Configuration of the function to invoke :param event: String input event passed to Lambda function :param DebugContext debug_context: Debugging context for the function (includes port, args, and path) :param io.IOBase stdout: Optional. IO Stream to that receives stdout text from container. :param io.IOBase stderr: Optional. IO Stream that receives stderr text from container :raises Keyboard ] variable[timer] assign[=] constant[None] variable[environ] assign[=] name[function_config].env_vars call[name[environ].add_lambda_event_body, parameter[name[event]]] variable[env_vars] assign[=] call[name[environ].resolve, parameter[]] with call[name[self]._get_code_dir, parameter[name[function_config].code_abs_path]] begin[:] variable[container] assign[=] call[name[LambdaContainer], parameter[name[function_config].runtime, name[function_config].handler, name[code_dir], name[function_config].layers, name[self]._image_builder]] <ast.Try object at 0x7da1b1f19210>
keyword[def] identifier[invoke] ( identifier[self] , identifier[function_config] , identifier[event] , identifier[debug_context] = keyword[None] , identifier[stdout] = keyword[None] , identifier[stderr] = keyword[None] ): literal[string] identifier[timer] = keyword[None] identifier[environ] = identifier[function_config] . identifier[env_vars] identifier[environ] . identifier[add_lambda_event_body] ( identifier[event] ) identifier[env_vars] = identifier[environ] . identifier[resolve] () keyword[with] identifier[self] . identifier[_get_code_dir] ( identifier[function_config] . identifier[code_abs_path] ) keyword[as] identifier[code_dir] : identifier[container] = identifier[LambdaContainer] ( identifier[function_config] . identifier[runtime] , identifier[function_config] . identifier[handler] , identifier[code_dir] , identifier[function_config] . identifier[layers] , identifier[self] . identifier[_image_builder] , identifier[memory_mb] = identifier[function_config] . identifier[memory] , identifier[env_vars] = identifier[env_vars] , identifier[debug_options] = identifier[debug_context] ) keyword[try] : identifier[self] . identifier[_container_manager] . identifier[run] ( identifier[container] ) identifier[timer] = identifier[self] . identifier[_configure_interrupt] ( identifier[function_config] . identifier[name] , identifier[function_config] . identifier[timeout] , identifier[container] , identifier[bool] ( identifier[debug_context] )) identifier[container] . identifier[wait_for_logs] ( identifier[stdout] = identifier[stdout] , identifier[stderr] = identifier[stderr] ) keyword[except] identifier[KeyboardInterrupt] : identifier[LOG] . identifier[debug] ( literal[string] ) keyword[finally] : keyword[if] identifier[timer] : identifier[timer] . identifier[cancel] () identifier[self] . identifier[_container_manager] . identifier[stop] ( identifier[container] )
def invoke(self, function_config, event, debug_context=None, stdout=None, stderr=None): """ Invoke the given Lambda function locally. ##### NOTE: THIS IS A LONG BLOCKING CALL ##### This method will block until either the Lambda function completes or timed out, which could be seconds. A blocking call will block the thread preventing any other operations from happening. If you are using this method in a web-server or in contexts where your application needs to be responsive when function is running, take care to invoke the function in a separate thread. Co-Routines or micro-threads might not perform well because the underlying implementation essentially blocks on a socket, which is synchronous. :param FunctionConfig function_config: Configuration of the function to invoke :param event: String input event passed to Lambda function :param DebugContext debug_context: Debugging context for the function (includes port, args, and path) :param io.IOBase stdout: Optional. IO Stream to that receives stdout text from container. :param io.IOBase stderr: Optional. IO Stream that receives stderr text from container :raises Keyboard """ timer = None # Update with event input environ = function_config.env_vars environ.add_lambda_event_body(event) # Generate a dictionary of environment variable key:values env_vars = environ.resolve() with self._get_code_dir(function_config.code_abs_path) as code_dir: container = LambdaContainer(function_config.runtime, function_config.handler, code_dir, function_config.layers, self._image_builder, memory_mb=function_config.memory, env_vars=env_vars, debug_options=debug_context) try: # Start the container. This call returns immediately after the container starts self._container_manager.run(container) # Setup appropriate interrupt - timeout or Ctrl+C - before function starts executing. # # Start the timer **after** container starts. Container startup takes several seconds, only after which, # our Lambda function code will run. Starting the timer is a reasonable approximation that function has # started running. timer = self._configure_interrupt(function_config.name, function_config.timeout, container, bool(debug_context)) # NOTE: BLOCKING METHOD # Block the thread waiting to fetch logs from the container. This method will return after container # terminates, either successfully or killed by one of the interrupt handlers above. container.wait_for_logs(stdout=stdout, stderr=stderr) # depends on [control=['try'], data=[]] except KeyboardInterrupt: # When user presses Ctrl+C, we receive a Keyboard Interrupt. This is especially very common when # container is in debugging mode. We have special handling of Ctrl+C. So handle KeyboardInterrupt # and swallow the exception. The ``finally`` block will also take care of cleaning it up. LOG.debug('Ctrl+C was pressed. Aborting Lambda execution') # depends on [control=['except'], data=[]] finally: # We will be done with execution, if either the execution completed or an interrupt was fired # Any case, cleanup the timer and container. # # If we are in debugging mode, timer would not be created. So skip cleanup of the timer if timer: timer.cancel() # depends on [control=['if'], data=[]] self._container_manager.stop(container) # depends on [control=['with'], data=['code_dir']]
def send_signal(self, signal): """ Send signal from this node to all connected receivers unless the node is in spectator mode. signal -- (hashable) signal value, see `dispatcher` connect for details Return a list of tuple pairs [(receiver, response), ... ] or None if the node is in spectator mode. if any receiver raises an error, the error propagates back through send, terminating the dispatch loop, so it is quite possible to not have all receivers called if a raises an error. """ if self.in_spectator_mode: return None logger.debug("Node %s broadcasts signal %s" % (self, signal)) dispatcher.send(signal=signal, sender=self)
def function[send_signal, parameter[self, signal]]: constant[ Send signal from this node to all connected receivers unless the node is in spectator mode. signal -- (hashable) signal value, see `dispatcher` connect for details Return a list of tuple pairs [(receiver, response), ... ] or None if the node is in spectator mode. if any receiver raises an error, the error propagates back through send, terminating the dispatch loop, so it is quite possible to not have all receivers called if a raises an error. ] if name[self].in_spectator_mode begin[:] return[constant[None]] call[name[logger].debug, parameter[binary_operation[constant[Node %s broadcasts signal %s] <ast.Mod object at 0x7da2590d6920> tuple[[<ast.Name object at 0x7da204344dc0>, <ast.Name object at 0x7da204344af0>]]]]] call[name[dispatcher].send, parameter[]]
keyword[def] identifier[send_signal] ( identifier[self] , identifier[signal] ): literal[string] keyword[if] identifier[self] . identifier[in_spectator_mode] : keyword[return] keyword[None] identifier[logger] . identifier[debug] ( literal[string] %( identifier[self] , identifier[signal] )) identifier[dispatcher] . identifier[send] ( identifier[signal] = identifier[signal] , identifier[sender] = identifier[self] )
def send_signal(self, signal): """ Send signal from this node to all connected receivers unless the node is in spectator mode. signal -- (hashable) signal value, see `dispatcher` connect for details Return a list of tuple pairs [(receiver, response), ... ] or None if the node is in spectator mode. if any receiver raises an error, the error propagates back through send, terminating the dispatch loop, so it is quite possible to not have all receivers called if a raises an error. """ if self.in_spectator_mode: return None # depends on [control=['if'], data=[]] logger.debug('Node %s broadcasts signal %s' % (self, signal)) dispatcher.send(signal=signal, sender=self)
def color(self, color): """ Updates the security labels color. Args: color: """ self._data['color'] = color request = self._base_request request['color'] = color return self._tc_requests.update(request, owner=self.owner)
def function[color, parameter[self, color]]: constant[ Updates the security labels color. Args: color: ] call[name[self]._data][constant[color]] assign[=] name[color] variable[request] assign[=] name[self]._base_request call[name[request]][constant[color]] assign[=] name[color] return[call[name[self]._tc_requests.update, parameter[name[request]]]]
keyword[def] identifier[color] ( identifier[self] , identifier[color] ): literal[string] identifier[self] . identifier[_data] [ literal[string] ]= identifier[color] identifier[request] = identifier[self] . identifier[_base_request] identifier[request] [ literal[string] ]= identifier[color] keyword[return] identifier[self] . identifier[_tc_requests] . identifier[update] ( identifier[request] , identifier[owner] = identifier[self] . identifier[owner] )
def color(self, color): """ Updates the security labels color. Args: color: """ self._data['color'] = color request = self._base_request request['color'] = color return self._tc_requests.update(request, owner=self.owner)
def plot_meshes(self, ax=None, marker='+', color='blue', outlines=False, **kwargs): """ Plot the low-resolution mesh boxes on a matplotlib Axes instance. Parameters ---------- ax : `matplotlib.axes.Axes` instance, optional If `None`, then the current ``Axes`` instance is used. marker : str, optional The marker to use to mark the center of the boxes. Default is '+'. color : str, optional The color for the markers and the box outlines. Default is 'blue'. outlines : bool, optional Whether or not to plot the box outlines in addition to the box centers. kwargs Any keyword arguments accepted by `matplotlib.patches.Patch`. Used only if ``outlines`` is True. """ import matplotlib.pyplot as plt kwargs['color'] = color if ax is None: ax = plt.gca() ax.scatter(self.x, self.y, marker=marker, color=color) if outlines: from ..aperture import RectangularAperture xy = np.column_stack([self.x, self.y]) apers = RectangularAperture(xy, self.box_size[1], self.box_size[0], 0.) apers.plot(ax=ax, **kwargs) return
def function[plot_meshes, parameter[self, ax, marker, color, outlines]]: constant[ Plot the low-resolution mesh boxes on a matplotlib Axes instance. Parameters ---------- ax : `matplotlib.axes.Axes` instance, optional If `None`, then the current ``Axes`` instance is used. marker : str, optional The marker to use to mark the center of the boxes. Default is '+'. color : str, optional The color for the markers and the box outlines. Default is 'blue'. outlines : bool, optional Whether or not to plot the box outlines in addition to the box centers. kwargs Any keyword arguments accepted by `matplotlib.patches.Patch`. Used only if ``outlines`` is True. ] import module[matplotlib.pyplot] as alias[plt] call[name[kwargs]][constant[color]] assign[=] name[color] if compare[name[ax] is constant[None]] begin[:] variable[ax] assign[=] call[name[plt].gca, parameter[]] call[name[ax].scatter, parameter[name[self].x, name[self].y]] if name[outlines] begin[:] from relative_module[aperture] import module[RectangularAperture] variable[xy] assign[=] call[name[np].column_stack, parameter[list[[<ast.Attribute object at 0x7da1b11a8880>, <ast.Attribute object at 0x7da1b11a86d0>]]]] variable[apers] assign[=] call[name[RectangularAperture], parameter[name[xy], call[name[self].box_size][constant[1]], call[name[self].box_size][constant[0]], constant[0.0]]] call[name[apers].plot, parameter[]] return[None]
keyword[def] identifier[plot_meshes] ( identifier[self] , identifier[ax] = keyword[None] , identifier[marker] = literal[string] , identifier[color] = literal[string] , identifier[outlines] = keyword[False] , ** identifier[kwargs] ): literal[string] keyword[import] identifier[matplotlib] . identifier[pyplot] keyword[as] identifier[plt] identifier[kwargs] [ literal[string] ]= identifier[color] keyword[if] identifier[ax] keyword[is] keyword[None] : identifier[ax] = identifier[plt] . identifier[gca] () identifier[ax] . identifier[scatter] ( identifier[self] . identifier[x] , identifier[self] . identifier[y] , identifier[marker] = identifier[marker] , identifier[color] = identifier[color] ) keyword[if] identifier[outlines] : keyword[from] .. identifier[aperture] keyword[import] identifier[RectangularAperture] identifier[xy] = identifier[np] . identifier[column_stack] ([ identifier[self] . identifier[x] , identifier[self] . identifier[y] ]) identifier[apers] = identifier[RectangularAperture] ( identifier[xy] , identifier[self] . identifier[box_size] [ literal[int] ], identifier[self] . identifier[box_size] [ literal[int] ], literal[int] ) identifier[apers] . identifier[plot] ( identifier[ax] = identifier[ax] ,** identifier[kwargs] ) keyword[return]
def plot_meshes(self, ax=None, marker='+', color='blue', outlines=False, **kwargs): """ Plot the low-resolution mesh boxes on a matplotlib Axes instance. Parameters ---------- ax : `matplotlib.axes.Axes` instance, optional If `None`, then the current ``Axes`` instance is used. marker : str, optional The marker to use to mark the center of the boxes. Default is '+'. color : str, optional The color for the markers and the box outlines. Default is 'blue'. outlines : bool, optional Whether or not to plot the box outlines in addition to the box centers. kwargs Any keyword arguments accepted by `matplotlib.patches.Patch`. Used only if ``outlines`` is True. """ import matplotlib.pyplot as plt kwargs['color'] = color if ax is None: ax = plt.gca() # depends on [control=['if'], data=['ax']] ax.scatter(self.x, self.y, marker=marker, color=color) if outlines: from ..aperture import RectangularAperture xy = np.column_stack([self.x, self.y]) apers = RectangularAperture(xy, self.box_size[1], self.box_size[0], 0.0) apers.plot(ax=ax, **kwargs) # depends on [control=['if'], data=[]] return
def processlist(**connection_args): ''' Retrieves the processlist from the MySQL server via "SHOW FULL PROCESSLIST". Returns: a list of dicts, with each dict representing a process: .. code-block:: python {'Command': 'Query', 'Host': 'localhost', 'Id': 39, 'Info': 'SHOW FULL PROCESSLIST', 'Rows_examined': 0, 'Rows_read': 1, 'Rows_sent': 0, 'State': None, 'Time': 0, 'User': 'root', 'db': 'mysql'} CLI Example: .. code-block:: bash salt '*' mysql.processlist ''' ret = [] dbc = _connect(**connection_args) if dbc is None: return [] cur = dbc.cursor() _execute(cur, 'SHOW FULL PROCESSLIST') hdr = [c[0] for c in cur.description] for _ in range(cur.rowcount): row = cur.fetchone() idx_r = {} for idx_j in range(len(hdr)): idx_r[hdr[idx_j]] = row[idx_j] ret.append(idx_r) cur.close() return ret
def function[processlist, parameter[]]: constant[ Retrieves the processlist from the MySQL server via "SHOW FULL PROCESSLIST". Returns: a list of dicts, with each dict representing a process: .. code-block:: python {'Command': 'Query', 'Host': 'localhost', 'Id': 39, 'Info': 'SHOW FULL PROCESSLIST', 'Rows_examined': 0, 'Rows_read': 1, 'Rows_sent': 0, 'State': None, 'Time': 0, 'User': 'root', 'db': 'mysql'} CLI Example: .. code-block:: bash salt '*' mysql.processlist ] variable[ret] assign[=] list[[]] variable[dbc] assign[=] call[name[_connect], parameter[]] if compare[name[dbc] is constant[None]] begin[:] return[list[[]]] variable[cur] assign[=] call[name[dbc].cursor, parameter[]] call[name[_execute], parameter[name[cur], constant[SHOW FULL PROCESSLIST]]] variable[hdr] assign[=] <ast.ListComp object at 0x7da20c76c490> for taget[name[_]] in starred[call[name[range], parameter[name[cur].rowcount]]] begin[:] variable[row] assign[=] call[name[cur].fetchone, parameter[]] variable[idx_r] assign[=] dictionary[[], []] for taget[name[idx_j]] in starred[call[name[range], parameter[call[name[len], parameter[name[hdr]]]]]] begin[:] call[name[idx_r]][call[name[hdr]][name[idx_j]]] assign[=] call[name[row]][name[idx_j]] call[name[ret].append, parameter[name[idx_r]]] call[name[cur].close, parameter[]] return[name[ret]]
keyword[def] identifier[processlist] (** identifier[connection_args] ): literal[string] identifier[ret] =[] identifier[dbc] = identifier[_connect] (** identifier[connection_args] ) keyword[if] identifier[dbc] keyword[is] keyword[None] : keyword[return] [] identifier[cur] = identifier[dbc] . identifier[cursor] () identifier[_execute] ( identifier[cur] , literal[string] ) identifier[hdr] =[ identifier[c] [ literal[int] ] keyword[for] identifier[c] keyword[in] identifier[cur] . identifier[description] ] keyword[for] identifier[_] keyword[in] identifier[range] ( identifier[cur] . identifier[rowcount] ): identifier[row] = identifier[cur] . identifier[fetchone] () identifier[idx_r] ={} keyword[for] identifier[idx_j] keyword[in] identifier[range] ( identifier[len] ( identifier[hdr] )): identifier[idx_r] [ identifier[hdr] [ identifier[idx_j] ]]= identifier[row] [ identifier[idx_j] ] identifier[ret] . identifier[append] ( identifier[idx_r] ) identifier[cur] . identifier[close] () keyword[return] identifier[ret]
def processlist(**connection_args): """ Retrieves the processlist from the MySQL server via "SHOW FULL PROCESSLIST". Returns: a list of dicts, with each dict representing a process: .. code-block:: python {'Command': 'Query', 'Host': 'localhost', 'Id': 39, 'Info': 'SHOW FULL PROCESSLIST', 'Rows_examined': 0, 'Rows_read': 1, 'Rows_sent': 0, 'State': None, 'Time': 0, 'User': 'root', 'db': 'mysql'} CLI Example: .. code-block:: bash salt '*' mysql.processlist """ ret = [] dbc = _connect(**connection_args) if dbc is None: return [] # depends on [control=['if'], data=[]] cur = dbc.cursor() _execute(cur, 'SHOW FULL PROCESSLIST') hdr = [c[0] for c in cur.description] for _ in range(cur.rowcount): row = cur.fetchone() idx_r = {} for idx_j in range(len(hdr)): idx_r[hdr[idx_j]] = row[idx_j] # depends on [control=['for'], data=['idx_j']] ret.append(idx_r) # depends on [control=['for'], data=[]] cur.close() return ret
def sync(remote='origin', branch='master'): """git pull and push commit""" pull(branch, remote) push(branch, remote) print(cyan("Git Synced!"))
def function[sync, parameter[remote, branch]]: constant[git pull and push commit] call[name[pull], parameter[name[branch], name[remote]]] call[name[push], parameter[name[branch], name[remote]]] call[name[print], parameter[call[name[cyan], parameter[constant[Git Synced!]]]]]
keyword[def] identifier[sync] ( identifier[remote] = literal[string] , identifier[branch] = literal[string] ): literal[string] identifier[pull] ( identifier[branch] , identifier[remote] ) identifier[push] ( identifier[branch] , identifier[remote] ) identifier[print] ( identifier[cyan] ( literal[string] ))
def sync(remote='origin', branch='master'): """git pull and push commit""" pull(branch, remote) push(branch, remote) print(cyan('Git Synced!'))
def list(self): """Returns a list of the users gists as GistInfo objects Returns: a list of GistInfo objects """ # Define the basic request. The per_page parameter is set to 100, which # is the maximum github allows. If the user has more than one page of # gists, this request object will be modified to retrieve each # successive page of gists. request = requests.Request( 'GET', 'https://api.github.com/gists', headers={ 'Accept-Encoding': 'identity, deflate, compress, gzip', 'User-Agent': 'python-requests/1.2.0', 'Accept': 'application/vnd.github.v3.base64', }, params={ 'access_token': self.token, 'per_page': 100, }, ) # Github provides a 'link' header that contains information to # navigate through a users page of gists. This regex is used to # extract the URLs contained in this header, and to find the next page # of gists. pattern = re.compile(r'<([^>]*)>; rel="([^"]*)"') gists = [] while True: # Retrieve the next page of gists try: response = self.send(request).json() except Exception: break # Extract the list of gists for gist in response: try: gists.append( GistInfo( gist['id'], gist['public'], gist['description'], ) ) except KeyError: continue try: link = response.headers['link'] # Search for the next page of gist. If a 'next' page is found, # the URL is set to this new page and the iteration continues. # If there is no next page, return the list of gists. for result in pattern.finditer(link): url = result.group(1) rel = result.group(2) if rel == 'next': request.url = url break else: return gists except Exception: break return gists
def function[list, parameter[self]]: constant[Returns a list of the users gists as GistInfo objects Returns: a list of GistInfo objects ] variable[request] assign[=] call[name[requests].Request, parameter[constant[GET], constant[https://api.github.com/gists]]] variable[pattern] assign[=] call[name[re].compile, parameter[constant[<([^>]*)>; rel="([^"]*)"]]] variable[gists] assign[=] list[[]] while constant[True] begin[:] <ast.Try object at 0x7da20c7c8ca0> for taget[name[gist]] in starred[name[response]] begin[:] <ast.Try object at 0x7da20c7cbca0> <ast.Try object at 0x7da20c7c8e80> return[name[gists]]
keyword[def] identifier[list] ( identifier[self] ): literal[string] identifier[request] = identifier[requests] . identifier[Request] ( literal[string] , literal[string] , identifier[headers] ={ literal[string] : literal[string] , literal[string] : literal[string] , literal[string] : literal[string] , }, identifier[params] ={ literal[string] : identifier[self] . identifier[token] , literal[string] : literal[int] , }, ) identifier[pattern] = identifier[re] . identifier[compile] ( literal[string] ) identifier[gists] =[] keyword[while] keyword[True] : keyword[try] : identifier[response] = identifier[self] . identifier[send] ( identifier[request] ). identifier[json] () keyword[except] identifier[Exception] : keyword[break] keyword[for] identifier[gist] keyword[in] identifier[response] : keyword[try] : identifier[gists] . identifier[append] ( identifier[GistInfo] ( identifier[gist] [ literal[string] ], identifier[gist] [ literal[string] ], identifier[gist] [ literal[string] ], ) ) keyword[except] identifier[KeyError] : keyword[continue] keyword[try] : identifier[link] = identifier[response] . identifier[headers] [ literal[string] ] keyword[for] identifier[result] keyword[in] identifier[pattern] . identifier[finditer] ( identifier[link] ): identifier[url] = identifier[result] . identifier[group] ( literal[int] ) identifier[rel] = identifier[result] . identifier[group] ( literal[int] ) keyword[if] identifier[rel] == literal[string] : identifier[request] . identifier[url] = identifier[url] keyword[break] keyword[else] : keyword[return] identifier[gists] keyword[except] identifier[Exception] : keyword[break] keyword[return] identifier[gists]
def list(self): """Returns a list of the users gists as GistInfo objects Returns: a list of GistInfo objects """ # Define the basic request. The per_page parameter is set to 100, which # is the maximum github allows. If the user has more than one page of # gists, this request object will be modified to retrieve each # successive page of gists. request = requests.Request('GET', 'https://api.github.com/gists', headers={'Accept-Encoding': 'identity, deflate, compress, gzip', 'User-Agent': 'python-requests/1.2.0', 'Accept': 'application/vnd.github.v3.base64'}, params={'access_token': self.token, 'per_page': 100}) # Github provides a 'link' header that contains information to # navigate through a users page of gists. This regex is used to # extract the URLs contained in this header, and to find the next page # of gists. pattern = re.compile('<([^>]*)>; rel="([^"]*)"') gists = [] while True: # Retrieve the next page of gists try: response = self.send(request).json() # depends on [control=['try'], data=[]] except Exception: break # depends on [control=['except'], data=[]] # Extract the list of gists for gist in response: try: gists.append(GistInfo(gist['id'], gist['public'], gist['description'])) # depends on [control=['try'], data=[]] except KeyError: continue # depends on [control=['except'], data=[]] # depends on [control=['for'], data=['gist']] try: link = response.headers['link'] # Search for the next page of gist. If a 'next' page is found, # the URL is set to this new page and the iteration continues. # If there is no next page, return the list of gists. for result in pattern.finditer(link): url = result.group(1) rel = result.group(2) if rel == 'next': request.url = url break # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['result']] else: return gists # depends on [control=['try'], data=[]] except Exception: break # depends on [control=['except'], data=[]] # depends on [control=['while'], data=[]] return gists
def connect_producer(self, bootstrap_servers='127.0.0.1:9092', client_id='Robot', **kwargs): """A Kafka client that publishes records to the Kafka cluster. Keyword Arguments: - ``bootstrap_servers``: 'host[:port]' string (or list of 'host[:port]' strings) that the producer should contact to bootstrap initial cluster metadata. This does not have to be the full node list. It just needs to have at least one broker that will respond to a Metadata API Request. Default to `localhost:9092`. - ``client_id`` (str): a name for this client. This string is passed in each request to servers and can be used to identify specific server-side log entries that correspond to this client. Default: `Robot`. Note: Configuration parameters are described in more detail at http://kafka-python.readthedocs.io/en/master/apidoc/KafkaProducer.html """ self.producer = KafkaProducer(bootstrap_servers=bootstrap_servers, client_id=client_id, **kwargs)
def function[connect_producer, parameter[self, bootstrap_servers, client_id]]: constant[A Kafka client that publishes records to the Kafka cluster. Keyword Arguments: - ``bootstrap_servers``: 'host[:port]' string (or list of 'host[:port]' strings) that the producer should contact to bootstrap initial cluster metadata. This does not have to be the full node list. It just needs to have at least one broker that will respond to a Metadata API Request. Default to `localhost:9092`. - ``client_id`` (str): a name for this client. This string is passed in each request to servers and can be used to identify specific server-side log entries that correspond to this client. Default: `Robot`. Note: Configuration parameters are described in more detail at http://kafka-python.readthedocs.io/en/master/apidoc/KafkaProducer.html ] name[self].producer assign[=] call[name[KafkaProducer], parameter[]]
keyword[def] identifier[connect_producer] ( identifier[self] , identifier[bootstrap_servers] = literal[string] , identifier[client_id] = literal[string] ,** identifier[kwargs] ): literal[string] identifier[self] . identifier[producer] = identifier[KafkaProducer] ( identifier[bootstrap_servers] = identifier[bootstrap_servers] , identifier[client_id] = identifier[client_id] ,** identifier[kwargs] )
def connect_producer(self, bootstrap_servers='127.0.0.1:9092', client_id='Robot', **kwargs): """A Kafka client that publishes records to the Kafka cluster. Keyword Arguments: - ``bootstrap_servers``: 'host[:port]' string (or list of 'host[:port]' strings) that the producer should contact to bootstrap initial cluster metadata. This does not have to be the full node list. It just needs to have at least one broker that will respond to a Metadata API Request. Default to `localhost:9092`. - ``client_id`` (str): a name for this client. This string is passed in each request to servers and can be used to identify specific server-side log entries that correspond to this client. Default: `Robot`. Note: Configuration parameters are described in more detail at http://kafka-python.readthedocs.io/en/master/apidoc/KafkaProducer.html """ self.producer = KafkaProducer(bootstrap_servers=bootstrap_servers, client_id=client_id, **kwargs)
def stop_and_reset_thread(self, ignore_results=False): """Stop current search thread and clean-up""" if self.search_thread is not None: if self.search_thread.isRunning(): if ignore_results: self.search_thread.sig_finished.disconnect( self.search_complete) self.search_thread.stop() self.search_thread.wait() self.search_thread.setParent(None) self.search_thread = None
def function[stop_and_reset_thread, parameter[self, ignore_results]]: constant[Stop current search thread and clean-up] if compare[name[self].search_thread is_not constant[None]] begin[:] if call[name[self].search_thread.isRunning, parameter[]] begin[:] if name[ignore_results] begin[:] call[name[self].search_thread.sig_finished.disconnect, parameter[name[self].search_complete]] call[name[self].search_thread.stop, parameter[]] call[name[self].search_thread.wait, parameter[]] call[name[self].search_thread.setParent, parameter[constant[None]]] name[self].search_thread assign[=] constant[None]
keyword[def] identifier[stop_and_reset_thread] ( identifier[self] , identifier[ignore_results] = keyword[False] ): literal[string] keyword[if] identifier[self] . identifier[search_thread] keyword[is] keyword[not] keyword[None] : keyword[if] identifier[self] . identifier[search_thread] . identifier[isRunning] (): keyword[if] identifier[ignore_results] : identifier[self] . identifier[search_thread] . identifier[sig_finished] . identifier[disconnect] ( identifier[self] . identifier[search_complete] ) identifier[self] . identifier[search_thread] . identifier[stop] () identifier[self] . identifier[search_thread] . identifier[wait] () identifier[self] . identifier[search_thread] . identifier[setParent] ( keyword[None] ) identifier[self] . identifier[search_thread] = keyword[None]
def stop_and_reset_thread(self, ignore_results=False): """Stop current search thread and clean-up""" if self.search_thread is not None: if self.search_thread.isRunning(): if ignore_results: self.search_thread.sig_finished.disconnect(self.search_complete) # depends on [control=['if'], data=[]] self.search_thread.stop() self.search_thread.wait() # depends on [control=['if'], data=[]] self.search_thread.setParent(None) self.search_thread = None # depends on [control=['if'], data=[]]
def get_port(self, adapter_number, port_number): """ Return the port for this adapter_number and port_number or returns None if the port is not found """ for port in self.ports: if port.adapter_number == adapter_number and port.port_number == port_number: return port return None
def function[get_port, parameter[self, adapter_number, port_number]]: constant[ Return the port for this adapter_number and port_number or returns None if the port is not found ] for taget[name[port]] in starred[name[self].ports] begin[:] if <ast.BoolOp object at 0x7da18fe92170> begin[:] return[name[port]] return[constant[None]]
keyword[def] identifier[get_port] ( identifier[self] , identifier[adapter_number] , identifier[port_number] ): literal[string] keyword[for] identifier[port] keyword[in] identifier[self] . identifier[ports] : keyword[if] identifier[port] . identifier[adapter_number] == identifier[adapter_number] keyword[and] identifier[port] . identifier[port_number] == identifier[port_number] : keyword[return] identifier[port] keyword[return] keyword[None]
def get_port(self, adapter_number, port_number): """ Return the port for this adapter_number and port_number or returns None if the port is not found """ for port in self.ports: if port.adapter_number == adapter_number and port.port_number == port_number: return port # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['port']] return None
def all_enclosing_scopes(scope, allow_global=True): """Utility function to return all scopes up to the global scope enclosing a given scope.""" _validate_full_scope(scope) # TODO: validate scopes here and/or in `enclosing_scope()` instead of assuming correctness. def scope_within_range(tentative_scope): if tentative_scope is None: return False if not allow_global and tentative_scope == GLOBAL_SCOPE: return False return True while scope_within_range(scope): yield scope scope = (None if scope == GLOBAL_SCOPE else enclosing_scope(scope))
def function[all_enclosing_scopes, parameter[scope, allow_global]]: constant[Utility function to return all scopes up to the global scope enclosing a given scope.] call[name[_validate_full_scope], parameter[name[scope]]] def function[scope_within_range, parameter[tentative_scope]]: if compare[name[tentative_scope] is constant[None]] begin[:] return[constant[False]] if <ast.BoolOp object at 0x7da1b2248fd0> begin[:] return[constant[False]] return[constant[True]] while call[name[scope_within_range], parameter[name[scope]]] begin[:] <ast.Yield object at 0x7da1b1eedcf0> variable[scope] assign[=] <ast.IfExp object at 0x7da1b1eee560>
keyword[def] identifier[all_enclosing_scopes] ( identifier[scope] , identifier[allow_global] = keyword[True] ): literal[string] identifier[_validate_full_scope] ( identifier[scope] ) keyword[def] identifier[scope_within_range] ( identifier[tentative_scope] ): keyword[if] identifier[tentative_scope] keyword[is] keyword[None] : keyword[return] keyword[False] keyword[if] keyword[not] identifier[allow_global] keyword[and] identifier[tentative_scope] == identifier[GLOBAL_SCOPE] : keyword[return] keyword[False] keyword[return] keyword[True] keyword[while] identifier[scope_within_range] ( identifier[scope] ): keyword[yield] identifier[scope] identifier[scope] =( keyword[None] keyword[if] identifier[scope] == identifier[GLOBAL_SCOPE] keyword[else] identifier[enclosing_scope] ( identifier[scope] ))
def all_enclosing_scopes(scope, allow_global=True): """Utility function to return all scopes up to the global scope enclosing a given scope.""" _validate_full_scope(scope) # TODO: validate scopes here and/or in `enclosing_scope()` instead of assuming correctness. def scope_within_range(tentative_scope): if tentative_scope is None: return False # depends on [control=['if'], data=[]] if not allow_global and tentative_scope == GLOBAL_SCOPE: return False # depends on [control=['if'], data=[]] return True while scope_within_range(scope): yield scope scope = None if scope == GLOBAL_SCOPE else enclosing_scope(scope) # depends on [control=['while'], data=[]]
def remove_edge(self, u, v): """Version of remove_edge that's much like normal networkx but only deletes once, since the database doesn't keep separate adj and succ mappings """ try: del self.succ[u][v] except KeyError: raise NetworkXError( "The edge {}-{} is not in the graph.".format(u, v) )
def function[remove_edge, parameter[self, u, v]]: constant[Version of remove_edge that's much like normal networkx but only deletes once, since the database doesn't keep separate adj and succ mappings ] <ast.Try object at 0x7da1b0e27400>
keyword[def] identifier[remove_edge] ( identifier[self] , identifier[u] , identifier[v] ): literal[string] keyword[try] : keyword[del] identifier[self] . identifier[succ] [ identifier[u] ][ identifier[v] ] keyword[except] identifier[KeyError] : keyword[raise] identifier[NetworkXError] ( literal[string] . identifier[format] ( identifier[u] , identifier[v] ) )
def remove_edge(self, u, v): """Version of remove_edge that's much like normal networkx but only deletes once, since the database doesn't keep separate adj and succ mappings """ try: del self.succ[u][v] # depends on [control=['try'], data=[]] except KeyError: raise NetworkXError('The edge {}-{} is not in the graph.'.format(u, v)) # depends on [control=['except'], data=[]]
def get_pastml_parameter_file(method, model, column): """ Get the filename where the PastML parameters are saved (for non-ML methods and input parameters will be None, as they have no parameters). This file is inside the work_dir that can be specified for the pastml_pipeline method. :param method: str, the ancestral state prediction method used by PASTML. :param model: str, the state evolution model used by PASTML. :param column: str, the column for which ancestral states are reconstructed with PASTML. :return: str, filename or None for non-ML methods """ ml = is_ml(method) template = PASTML_ML_PARAMS_TAB if ml else PASTML_MP_PARAMS_TAB column, method = get_column_method(column, method) return template.format(state=column, method=method, model=model)
def function[get_pastml_parameter_file, parameter[method, model, column]]: constant[ Get the filename where the PastML parameters are saved (for non-ML methods and input parameters will be None, as they have no parameters). This file is inside the work_dir that can be specified for the pastml_pipeline method. :param method: str, the ancestral state prediction method used by PASTML. :param model: str, the state evolution model used by PASTML. :param column: str, the column for which ancestral states are reconstructed with PASTML. :return: str, filename or None for non-ML methods ] variable[ml] assign[=] call[name[is_ml], parameter[name[method]]] variable[template] assign[=] <ast.IfExp object at 0x7da18dc05f90> <ast.Tuple object at 0x7da18dc05930> assign[=] call[name[get_column_method], parameter[name[column], name[method]]] return[call[name[template].format, parameter[]]]
keyword[def] identifier[get_pastml_parameter_file] ( identifier[method] , identifier[model] , identifier[column] ): literal[string] identifier[ml] = identifier[is_ml] ( identifier[method] ) identifier[template] = identifier[PASTML_ML_PARAMS_TAB] keyword[if] identifier[ml] keyword[else] identifier[PASTML_MP_PARAMS_TAB] identifier[column] , identifier[method] = identifier[get_column_method] ( identifier[column] , identifier[method] ) keyword[return] identifier[template] . identifier[format] ( identifier[state] = identifier[column] , identifier[method] = identifier[method] , identifier[model] = identifier[model] )
def get_pastml_parameter_file(method, model, column): """ Get the filename where the PastML parameters are saved (for non-ML methods and input parameters will be None, as they have no parameters). This file is inside the work_dir that can be specified for the pastml_pipeline method. :param method: str, the ancestral state prediction method used by PASTML. :param model: str, the state evolution model used by PASTML. :param column: str, the column for which ancestral states are reconstructed with PASTML. :return: str, filename or None for non-ML methods """ ml = is_ml(method) template = PASTML_ML_PARAMS_TAB if ml else PASTML_MP_PARAMS_TAB (column, method) = get_column_method(column, method) return template.format(state=column, method=method, model=model)
def create_bodies(self, translate=(0, 1, 0), size=0.1): '''Traverse the bone hierarchy and create physics bodies.''' stack = [('root', 0, self.root['position'] + translate)] while stack: name, depth, end = stack.pop() for child in self.hierarchy.get(name, ()): stack.append((child, depth + 1, end + self.bones[child].end)) if name not in self.bones: continue bone = self.bones[name] body = self.world.create_body( 'box', name=bone.name, density=self.density, lengths=(size, size, bone.length)) body.color = self.color # move the center of the body to the halfway point between # the parent (joint) and child (joint). x, y, z = end - bone.direction * bone.length / 2 # swizzle y and z -- asf uses y as up, but we use z as up. body.position = x, z, y # compute an orthonormal (rotation) matrix using the ground and # the body. this is mind-bending but seems to work. u = bone.direction v = np.cross(u, [0, 1, 0]) l = np.linalg.norm(v) if l > 0: v /= l rot = np.vstack([np.cross(u, v), v, u]).T swizzle = [[1, 0, 0], [0, 0, 1], [0, -1, 0]] body.rotation = np.dot(swizzle, rot) self.bodies.append(body)
def function[create_bodies, parameter[self, translate, size]]: constant[Traverse the bone hierarchy and create physics bodies.] variable[stack] assign[=] list[[<ast.Tuple object at 0x7da1b008e650>]] while name[stack] begin[:] <ast.Tuple object at 0x7da1b008d7e0> assign[=] call[name[stack].pop, parameter[]] for taget[name[child]] in starred[call[name[self].hierarchy.get, parameter[name[name], tuple[[]]]]] begin[:] call[name[stack].append, parameter[tuple[[<ast.Name object at 0x7da1b008dc90>, <ast.BinOp object at 0x7da1b008d2a0>, <ast.BinOp object at 0x7da1b008f640>]]]] if compare[name[name] <ast.NotIn object at 0x7da2590d7190> name[self].bones] begin[:] continue variable[bone] assign[=] call[name[self].bones][name[name]] variable[body] assign[=] call[name[self].world.create_body, parameter[constant[box]]] name[body].color assign[=] name[self].color <ast.Tuple object at 0x7da1b008c370> assign[=] binary_operation[name[end] - binary_operation[binary_operation[name[bone].direction * name[bone].length] / constant[2]]] name[body].position assign[=] tuple[[<ast.Name object at 0x7da1afe0d180>, <ast.Name object at 0x7da1afe0d030>, <ast.Name object at 0x7da1afe0db10>]] variable[u] assign[=] name[bone].direction variable[v] assign[=] call[name[np].cross, parameter[name[u], list[[<ast.Constant object at 0x7da1afe0ed10>, <ast.Constant object at 0x7da1afe0da20>, <ast.Constant object at 0x7da1afe0e350>]]]] variable[l] assign[=] call[name[np].linalg.norm, parameter[name[v]]] if compare[name[l] greater[>] constant[0]] begin[:] <ast.AugAssign object at 0x7da1afe0e2c0> variable[rot] assign[=] call[name[np].vstack, parameter[list[[<ast.Call object at 0x7da1afe0f6a0>, <ast.Name object at 0x7da1afe0fa00>, <ast.Name object at 0x7da1afe0c130>]]]].T variable[swizzle] assign[=] list[[<ast.List object at 0x7da1afe0f310>, <ast.List object at 0x7da1afe0ffd0>, <ast.List object at 0x7da1afe0dd50>]] name[body].rotation assign[=] call[name[np].dot, parameter[name[swizzle], name[rot]]] call[name[self].bodies.append, parameter[name[body]]]
keyword[def] identifier[create_bodies] ( identifier[self] , identifier[translate] =( literal[int] , literal[int] , literal[int] ), identifier[size] = literal[int] ): literal[string] identifier[stack] =[( literal[string] , literal[int] , identifier[self] . identifier[root] [ literal[string] ]+ identifier[translate] )] keyword[while] identifier[stack] : identifier[name] , identifier[depth] , identifier[end] = identifier[stack] . identifier[pop] () keyword[for] identifier[child] keyword[in] identifier[self] . identifier[hierarchy] . identifier[get] ( identifier[name] ,()): identifier[stack] . identifier[append] (( identifier[child] , identifier[depth] + literal[int] , identifier[end] + identifier[self] . identifier[bones] [ identifier[child] ]. identifier[end] )) keyword[if] identifier[name] keyword[not] keyword[in] identifier[self] . identifier[bones] : keyword[continue] identifier[bone] = identifier[self] . identifier[bones] [ identifier[name] ] identifier[body] = identifier[self] . identifier[world] . identifier[create_body] ( literal[string] , identifier[name] = identifier[bone] . identifier[name] , identifier[density] = identifier[self] . identifier[density] , identifier[lengths] =( identifier[size] , identifier[size] , identifier[bone] . identifier[length] )) identifier[body] . identifier[color] = identifier[self] . identifier[color] identifier[x] , identifier[y] , identifier[z] = identifier[end] - identifier[bone] . identifier[direction] * identifier[bone] . identifier[length] / literal[int] identifier[body] . identifier[position] = identifier[x] , identifier[z] , identifier[y] identifier[u] = identifier[bone] . identifier[direction] identifier[v] = identifier[np] . identifier[cross] ( identifier[u] ,[ literal[int] , literal[int] , literal[int] ]) identifier[l] = identifier[np] . identifier[linalg] . identifier[norm] ( identifier[v] ) keyword[if] identifier[l] > literal[int] : identifier[v] /= identifier[l] identifier[rot] = identifier[np] . identifier[vstack] ([ identifier[np] . identifier[cross] ( identifier[u] , identifier[v] ), identifier[v] , identifier[u] ]). identifier[T] identifier[swizzle] =[[ literal[int] , literal[int] , literal[int] ],[ literal[int] , literal[int] , literal[int] ],[ literal[int] ,- literal[int] , literal[int] ]] identifier[body] . identifier[rotation] = identifier[np] . identifier[dot] ( identifier[swizzle] , identifier[rot] ) identifier[self] . identifier[bodies] . identifier[append] ( identifier[body] )
def create_bodies(self, translate=(0, 1, 0), size=0.1): """Traverse the bone hierarchy and create physics bodies.""" stack = [('root', 0, self.root['position'] + translate)] while stack: (name, depth, end) = stack.pop() for child in self.hierarchy.get(name, ()): stack.append((child, depth + 1, end + self.bones[child].end)) # depends on [control=['for'], data=['child']] if name not in self.bones: continue # depends on [control=['if'], data=[]] bone = self.bones[name] body = self.world.create_body('box', name=bone.name, density=self.density, lengths=(size, size, bone.length)) body.color = self.color # move the center of the body to the halfway point between # the parent (joint) and child (joint). (x, y, z) = end - bone.direction * bone.length / 2 # swizzle y and z -- asf uses y as up, but we use z as up. body.position = (x, z, y) # compute an orthonormal (rotation) matrix using the ground and # the body. this is mind-bending but seems to work. u = bone.direction v = np.cross(u, [0, 1, 0]) l = np.linalg.norm(v) if l > 0: v /= l rot = np.vstack([np.cross(u, v), v, u]).T swizzle = [[1, 0, 0], [0, 0, 1], [0, -1, 0]] body.rotation = np.dot(swizzle, rot) # depends on [control=['if'], data=['l']] self.bodies.append(body) # depends on [control=['while'], data=[]]
def split_in_tiles(self, hint): """ Split a SiteCollection into a set of tiles (SiteCollection instances). :param hint: hint for how many tiles to generate """ tiles = [] for seq in split_in_blocks(range(len(self)), hint or 1): sc = SiteCollection.__new__(SiteCollection) sc.array = self.array[numpy.array(seq, int)] tiles.append(sc) return tiles
def function[split_in_tiles, parameter[self, hint]]: constant[ Split a SiteCollection into a set of tiles (SiteCollection instances). :param hint: hint for how many tiles to generate ] variable[tiles] assign[=] list[[]] for taget[name[seq]] in starred[call[name[split_in_blocks], parameter[call[name[range], parameter[call[name[len], parameter[name[self]]]]], <ast.BoolOp object at 0x7da204622380>]]] begin[:] variable[sc] assign[=] call[name[SiteCollection].__new__, parameter[name[SiteCollection]]] name[sc].array assign[=] call[name[self].array][call[name[numpy].array, parameter[name[seq], name[int]]]] call[name[tiles].append, parameter[name[sc]]] return[name[tiles]]
keyword[def] identifier[split_in_tiles] ( identifier[self] , identifier[hint] ): literal[string] identifier[tiles] =[] keyword[for] identifier[seq] keyword[in] identifier[split_in_blocks] ( identifier[range] ( identifier[len] ( identifier[self] )), identifier[hint] keyword[or] literal[int] ): identifier[sc] = identifier[SiteCollection] . identifier[__new__] ( identifier[SiteCollection] ) identifier[sc] . identifier[array] = identifier[self] . identifier[array] [ identifier[numpy] . identifier[array] ( identifier[seq] , identifier[int] )] identifier[tiles] . identifier[append] ( identifier[sc] ) keyword[return] identifier[tiles]
def split_in_tiles(self, hint): """ Split a SiteCollection into a set of tiles (SiteCollection instances). :param hint: hint for how many tiles to generate """ tiles = [] for seq in split_in_blocks(range(len(self)), hint or 1): sc = SiteCollection.__new__(SiteCollection) sc.array = self.array[numpy.array(seq, int)] tiles.append(sc) # depends on [control=['for'], data=['seq']] return tiles
def dump_dict_of_nested_lists_to_h5(fname, data): """ Take nested list structure and dump it in hdf5 file. Parameters ---------- fname : str Filename data : dict(list(numpy.ndarray)) Dict of nested lists with variable len arrays. Returns ------- None """ # Open file print('writing to file: %s' % fname) f = h5py.File(fname) # Iterate over values for i, ivalue in list(data.items()): igrp = f.create_group(str(i)) for j, jvalue in enumerate(ivalue): jgrp = igrp.create_group(str(j)) for k, kvalue in enumerate(jvalue): if kvalue.size > 0: dset = jgrp.create_dataset(str(k), data=kvalue, compression='gzip') else: dset = jgrp.create_dataset(str(k), data=kvalue, maxshape=(None, ), compression='gzip') # Close file f.close()
def function[dump_dict_of_nested_lists_to_h5, parameter[fname, data]]: constant[ Take nested list structure and dump it in hdf5 file. Parameters ---------- fname : str Filename data : dict(list(numpy.ndarray)) Dict of nested lists with variable len arrays. Returns ------- None ] call[name[print], parameter[binary_operation[constant[writing to file: %s] <ast.Mod object at 0x7da2590d6920> name[fname]]]] variable[f] assign[=] call[name[h5py].File, parameter[name[fname]]] for taget[tuple[[<ast.Name object at 0x7da1b0bcf880>, <ast.Name object at 0x7da1b0bcf130>]]] in starred[call[name[list], parameter[call[name[data].items, parameter[]]]]] begin[:] variable[igrp] assign[=] call[name[f].create_group, parameter[call[name[str], parameter[name[i]]]]] for taget[tuple[[<ast.Name object at 0x7da1b0bcf460>, <ast.Name object at 0x7da1b0bcefe0>]]] in starred[call[name[enumerate], parameter[name[ivalue]]]] begin[:] variable[jgrp] assign[=] call[name[igrp].create_group, parameter[call[name[str], parameter[name[j]]]]] for taget[tuple[[<ast.Name object at 0x7da1b0bced40>, <ast.Name object at 0x7da1b0bcf520>]]] in starred[call[name[enumerate], parameter[name[jvalue]]]] begin[:] if compare[name[kvalue].size greater[>] constant[0]] begin[:] variable[dset] assign[=] call[name[jgrp].create_dataset, parameter[call[name[str], parameter[name[k]]]]] call[name[f].close, parameter[]]
keyword[def] identifier[dump_dict_of_nested_lists_to_h5] ( identifier[fname] , identifier[data] ): literal[string] identifier[print] ( literal[string] % identifier[fname] ) identifier[f] = identifier[h5py] . identifier[File] ( identifier[fname] ) keyword[for] identifier[i] , identifier[ivalue] keyword[in] identifier[list] ( identifier[data] . identifier[items] ()): identifier[igrp] = identifier[f] . identifier[create_group] ( identifier[str] ( identifier[i] )) keyword[for] identifier[j] , identifier[jvalue] keyword[in] identifier[enumerate] ( identifier[ivalue] ): identifier[jgrp] = identifier[igrp] . identifier[create_group] ( identifier[str] ( identifier[j] )) keyword[for] identifier[k] , identifier[kvalue] keyword[in] identifier[enumerate] ( identifier[jvalue] ): keyword[if] identifier[kvalue] . identifier[size] > literal[int] : identifier[dset] = identifier[jgrp] . identifier[create_dataset] ( identifier[str] ( identifier[k] ), identifier[data] = identifier[kvalue] , identifier[compression] = literal[string] ) keyword[else] : identifier[dset] = identifier[jgrp] . identifier[create_dataset] ( identifier[str] ( identifier[k] ), identifier[data] = identifier[kvalue] , identifier[maxshape] =( keyword[None] ,), identifier[compression] = literal[string] ) identifier[f] . identifier[close] ()
def dump_dict_of_nested_lists_to_h5(fname, data): """ Take nested list structure and dump it in hdf5 file. Parameters ---------- fname : str Filename data : dict(list(numpy.ndarray)) Dict of nested lists with variable len arrays. Returns ------- None """ # Open file print('writing to file: %s' % fname) f = h5py.File(fname) # Iterate over values for (i, ivalue) in list(data.items()): igrp = f.create_group(str(i)) for (j, jvalue) in enumerate(ivalue): jgrp = igrp.create_group(str(j)) for (k, kvalue) in enumerate(jvalue): if kvalue.size > 0: dset = jgrp.create_dataset(str(k), data=kvalue, compression='gzip') # depends on [control=['if'], data=[]] else: dset = jgrp.create_dataset(str(k), data=kvalue, maxshape=(None,), compression='gzip') # depends on [control=['for'], data=[]] # depends on [control=['for'], data=[]] # depends on [control=['for'], data=[]] # Close file f.close()
def pop_changeset(self, changeset_id: uuid.UUID) -> Dict[bytes, Union[bytes, DeletedEntry]]: """ Returns all changes from the given changeset. This includes all of the changes from any subsequent changeset, giving precidence to later changesets. """ if changeset_id not in self.journal_data: raise KeyError(changeset_id, "Unknown changeset in JournalDB") all_ids = tuple(self.journal_data.keys()) changeset_idx = all_ids.index(changeset_id) changesets_to_pop = all_ids[changeset_idx:] popped_clears = tuple(idx for idx in changesets_to_pop if idx in self._clears_at) if popped_clears: last_clear_idx = changesets_to_pop.index(popped_clears[-1]) changesets_to_drop = changesets_to_pop[:last_clear_idx] changesets_to_merge = changesets_to_pop[last_clear_idx:] else: changesets_to_drop = () changesets_to_merge = changesets_to_pop # we pull all of the changesets *after* the changeset we are # reverting to and collapse them to a single set of keys (giving # precedence to later changesets) changeset_data = merge(*( self.journal_data.pop(c_id) for c_id in changesets_to_merge )) # drop the changes on the floor if they came before a clear that is being committed for changeset_id in changesets_to_drop: self.journal_data.pop(changeset_id) self._clears_at.difference_update(popped_clears) return changeset_data
def function[pop_changeset, parameter[self, changeset_id]]: constant[ Returns all changes from the given changeset. This includes all of the changes from any subsequent changeset, giving precidence to later changesets. ] if compare[name[changeset_id] <ast.NotIn object at 0x7da2590d7190> name[self].journal_data] begin[:] <ast.Raise object at 0x7da1b175faf0> variable[all_ids] assign[=] call[name[tuple], parameter[call[name[self].journal_data.keys, parameter[]]]] variable[changeset_idx] assign[=] call[name[all_ids].index, parameter[name[changeset_id]]] variable[changesets_to_pop] assign[=] call[name[all_ids]][<ast.Slice object at 0x7da1b175ff40>] variable[popped_clears] assign[=] call[name[tuple], parameter[<ast.GeneratorExp object at 0x7da1b175e830>]] if name[popped_clears] begin[:] variable[last_clear_idx] assign[=] call[name[changesets_to_pop].index, parameter[call[name[popped_clears]][<ast.UnaryOp object at 0x7da1b175c160>]]] variable[changesets_to_drop] assign[=] call[name[changesets_to_pop]][<ast.Slice object at 0x7da1b175e380>] variable[changesets_to_merge] assign[=] call[name[changesets_to_pop]][<ast.Slice object at 0x7da1b175e470>] variable[changeset_data] assign[=] call[name[merge], parameter[<ast.Starred object at 0x7da1b1603f10>]] for taget[name[changeset_id]] in starred[name[changesets_to_drop]] begin[:] call[name[self].journal_data.pop, parameter[name[changeset_id]]] call[name[self]._clears_at.difference_update, parameter[name[popped_clears]]] return[name[changeset_data]]
keyword[def] identifier[pop_changeset] ( identifier[self] , identifier[changeset_id] : identifier[uuid] . identifier[UUID] )-> identifier[Dict] [ identifier[bytes] , identifier[Union] [ identifier[bytes] , identifier[DeletedEntry] ]]: literal[string] keyword[if] identifier[changeset_id] keyword[not] keyword[in] identifier[self] . identifier[journal_data] : keyword[raise] identifier[KeyError] ( identifier[changeset_id] , literal[string] ) identifier[all_ids] = identifier[tuple] ( identifier[self] . identifier[journal_data] . identifier[keys] ()) identifier[changeset_idx] = identifier[all_ids] . identifier[index] ( identifier[changeset_id] ) identifier[changesets_to_pop] = identifier[all_ids] [ identifier[changeset_idx] :] identifier[popped_clears] = identifier[tuple] ( identifier[idx] keyword[for] identifier[idx] keyword[in] identifier[changesets_to_pop] keyword[if] identifier[idx] keyword[in] identifier[self] . identifier[_clears_at] ) keyword[if] identifier[popped_clears] : identifier[last_clear_idx] = identifier[changesets_to_pop] . identifier[index] ( identifier[popped_clears] [- literal[int] ]) identifier[changesets_to_drop] = identifier[changesets_to_pop] [: identifier[last_clear_idx] ] identifier[changesets_to_merge] = identifier[changesets_to_pop] [ identifier[last_clear_idx] :] keyword[else] : identifier[changesets_to_drop] =() identifier[changesets_to_merge] = identifier[changesets_to_pop] identifier[changeset_data] = identifier[merge] (*( identifier[self] . identifier[journal_data] . identifier[pop] ( identifier[c_id] ) keyword[for] identifier[c_id] keyword[in] identifier[changesets_to_merge] )) keyword[for] identifier[changeset_id] keyword[in] identifier[changesets_to_drop] : identifier[self] . identifier[journal_data] . identifier[pop] ( identifier[changeset_id] ) identifier[self] . identifier[_clears_at] . identifier[difference_update] ( identifier[popped_clears] ) keyword[return] identifier[changeset_data]
def pop_changeset(self, changeset_id: uuid.UUID) -> Dict[bytes, Union[bytes, DeletedEntry]]: """ Returns all changes from the given changeset. This includes all of the changes from any subsequent changeset, giving precidence to later changesets. """ if changeset_id not in self.journal_data: raise KeyError(changeset_id, 'Unknown changeset in JournalDB') # depends on [control=['if'], data=['changeset_id']] all_ids = tuple(self.journal_data.keys()) changeset_idx = all_ids.index(changeset_id) changesets_to_pop = all_ids[changeset_idx:] popped_clears = tuple((idx for idx in changesets_to_pop if idx in self._clears_at)) if popped_clears: last_clear_idx = changesets_to_pop.index(popped_clears[-1]) changesets_to_drop = changesets_to_pop[:last_clear_idx] changesets_to_merge = changesets_to_pop[last_clear_idx:] # depends on [control=['if'], data=[]] else: changesets_to_drop = () changesets_to_merge = changesets_to_pop # we pull all of the changesets *after* the changeset we are # reverting to and collapse them to a single set of keys (giving # precedence to later changesets) changeset_data = merge(*(self.journal_data.pop(c_id) for c_id in changesets_to_merge)) # drop the changes on the floor if they came before a clear that is being committed for changeset_id in changesets_to_drop: self.journal_data.pop(changeset_id) # depends on [control=['for'], data=['changeset_id']] self._clears_at.difference_update(popped_clears) return changeset_data
def count(self, index="_all", type='', source='', parameters=None, callback=None): """ The query can either be provided using a simple query string as a parameter 'q', or using the Query DSL defined within the request body (source). Notice there are additional query string parameters that could be added only with the first option. """ parameters = parameters or {} path = self.create_path('count', index=index, type=type, **parameters) if source: source = json_encode(source) self.post_by_path(path=path, callback=callback, source=source)
def function[count, parameter[self, index, type, source, parameters, callback]]: constant[ The query can either be provided using a simple query string as a parameter 'q', or using the Query DSL defined within the request body (source). Notice there are additional query string parameters that could be added only with the first option. ] variable[parameters] assign[=] <ast.BoolOp object at 0x7da20c76d9c0> variable[path] assign[=] call[name[self].create_path, parameter[constant[count]]] if name[source] begin[:] variable[source] assign[=] call[name[json_encode], parameter[name[source]]] call[name[self].post_by_path, parameter[]]
keyword[def] identifier[count] ( identifier[self] , identifier[index] = literal[string] , identifier[type] = literal[string] , identifier[source] = literal[string] , identifier[parameters] = keyword[None] , identifier[callback] = keyword[None] ): literal[string] identifier[parameters] = identifier[parameters] keyword[or] {} identifier[path] = identifier[self] . identifier[create_path] ( literal[string] , identifier[index] = identifier[index] , identifier[type] = identifier[type] ,** identifier[parameters] ) keyword[if] identifier[source] : identifier[source] = identifier[json_encode] ( identifier[source] ) identifier[self] . identifier[post_by_path] ( identifier[path] = identifier[path] , identifier[callback] = identifier[callback] , identifier[source] = identifier[source] )
def count(self, index='_all', type='', source='', parameters=None, callback=None): """ The query can either be provided using a simple query string as a parameter 'q', or using the Query DSL defined within the request body (source). Notice there are additional query string parameters that could be added only with the first option. """ parameters = parameters or {} path = self.create_path('count', index=index, type=type, **parameters) if source: source = json_encode(source) # depends on [control=['if'], data=[]] self.post_by_path(path=path, callback=callback, source=source)
def analysis(analysis_id): """Display a single analysis.""" analysis_obj = store.analysis(analysis_id) if analysis_obj is None: return abort(404) if request.method == 'PUT': analysis_obj.update(request.json) store.commit() data = analysis_obj.to_dict() data['failed_jobs'] = [job_obj.to_dict() for job_obj in analysis_obj.failed_jobs] data['user'] = analysis_obj.user.to_dict() if analysis_obj.user else None return jsonify(**data)
def function[analysis, parameter[analysis_id]]: constant[Display a single analysis.] variable[analysis_obj] assign[=] call[name[store].analysis, parameter[name[analysis_id]]] if compare[name[analysis_obj] is constant[None]] begin[:] return[call[name[abort], parameter[constant[404]]]] if compare[name[request].method equal[==] constant[PUT]] begin[:] call[name[analysis_obj].update, parameter[name[request].json]] call[name[store].commit, parameter[]] variable[data] assign[=] call[name[analysis_obj].to_dict, parameter[]] call[name[data]][constant[failed_jobs]] assign[=] <ast.ListComp object at 0x7da2044c2e00> call[name[data]][constant[user]] assign[=] <ast.IfExp object at 0x7da2044c13f0> return[call[name[jsonify], parameter[]]]
keyword[def] identifier[analysis] ( identifier[analysis_id] ): literal[string] identifier[analysis_obj] = identifier[store] . identifier[analysis] ( identifier[analysis_id] ) keyword[if] identifier[analysis_obj] keyword[is] keyword[None] : keyword[return] identifier[abort] ( literal[int] ) keyword[if] identifier[request] . identifier[method] == literal[string] : identifier[analysis_obj] . identifier[update] ( identifier[request] . identifier[json] ) identifier[store] . identifier[commit] () identifier[data] = identifier[analysis_obj] . identifier[to_dict] () identifier[data] [ literal[string] ]=[ identifier[job_obj] . identifier[to_dict] () keyword[for] identifier[job_obj] keyword[in] identifier[analysis_obj] . identifier[failed_jobs] ] identifier[data] [ literal[string] ]= identifier[analysis_obj] . identifier[user] . identifier[to_dict] () keyword[if] identifier[analysis_obj] . identifier[user] keyword[else] keyword[None] keyword[return] identifier[jsonify] (** identifier[data] )
def analysis(analysis_id): """Display a single analysis.""" analysis_obj = store.analysis(analysis_id) if analysis_obj is None: return abort(404) # depends on [control=['if'], data=[]] if request.method == 'PUT': analysis_obj.update(request.json) store.commit() # depends on [control=['if'], data=[]] data = analysis_obj.to_dict() data['failed_jobs'] = [job_obj.to_dict() for job_obj in analysis_obj.failed_jobs] data['user'] = analysis_obj.user.to_dict() if analysis_obj.user else None return jsonify(**data)
def init_UI(self): """ Builds User Interface for the interpretation Editor """ #set fonts FONT_WEIGHT=1 if sys.platform.startswith('win'): FONT_WEIGHT=-1 font1 = wx.Font(9+FONT_WEIGHT, wx.SWISS, wx.NORMAL, wx.NORMAL, False, self.font_type) font2 = wx.Font(12+FONT_WEIGHT, wx.SWISS, wx.NORMAL, wx.NORMAL, False, self.font_type) #if you're on mac do some funny stuff to make it look okay is_mac = False if sys.platform.startswith("darwin"): is_mac = True self.search_bar = wx.SearchCtrl(self.panel, size=(350*self.GUI_RESOLUTION,25) ,style=wx.TE_PROCESS_ENTER | wx.TE_PROCESS_TAB | wx.TE_NOHIDESEL) self.Bind(wx.EVT_TEXT_ENTER, self.on_enter_search_bar,self.search_bar) self.Bind(wx.EVT_SEARCHCTRL_SEARCH_BTN, self.on_enter_search_bar,self.search_bar) self.search_bar.SetHelpText(dieh.search_help) # self.Bind(wx.EVT_TEXT, self.on_complete_search_bar,self.search_bar) #build logger self.logger = wx.ListCtrl(self.panel, -1, size=(100*self.GUI_RESOLUTION,475*self.GUI_RESOLUTION),style=wx.LC_REPORT) self.logger.SetFont(font1) self.logger.InsertColumn(0, 'specimen',width=75*self.GUI_RESOLUTION) self.logger.InsertColumn(1, 'fit name',width=65*self.GUI_RESOLUTION) self.logger.InsertColumn(2, 'max',width=55*self.GUI_RESOLUTION) self.logger.InsertColumn(3, 'min',width=55*self.GUI_RESOLUTION) self.logger.InsertColumn(4, 'n',width=25*self.GUI_RESOLUTION) self.logger.InsertColumn(5, 'fit type',width=60*self.GUI_RESOLUTION) self.logger.InsertColumn(6, 'dec',width=45*self.GUI_RESOLUTION) self.logger.InsertColumn(7, 'inc',width=45*self.GUI_RESOLUTION) self.logger.InsertColumn(8, 'mad',width=45*self.GUI_RESOLUTION) self.logger.InsertColumn(9, 'dang',width=45*self.GUI_RESOLUTION) self.logger.InsertColumn(10, 'a95',width=45*self.GUI_RESOLUTION) self.logger.InsertColumn(11, 'K',width=45*self.GUI_RESOLUTION) self.logger.InsertColumn(12, 'R',width=45*self.GUI_RESOLUTION) self.Bind(wx.EVT_LIST_ITEM_ACTIVATED, self.OnClick_listctrl, self.logger) self.Bind(wx.EVT_LIST_ITEM_RIGHT_CLICK,self.OnRightClickListctrl,self.logger) self.logger.SetHelpText(dieh.logger_help) #set fit attributes boxsizers self.display_sizer = wx.StaticBoxSizer(wx.StaticBox(self.panel, wx.ID_ANY, "display options"), wx.HORIZONTAL) self.name_sizer = wx.StaticBoxSizer(wx.StaticBox(self.panel, wx.ID_ANY, "fit name/color"), wx.VERTICAL) self.bounds_sizer = wx.StaticBoxSizer(wx.StaticBox(self.panel, wx.ID_ANY, "fit bounds"), wx.VERTICAL) self.buttons_sizer = wx.StaticBoxSizer(wx.StaticBox(self.panel, wx.ID_ANY), wx.VERTICAL) #logger display selection box UPPER_LEVEL = self.parent.level_box.GetValue() if UPPER_LEVEL=='sample': name_choices = self.parent.samples if UPPER_LEVEL=='site': name_choices = self.parent.sites if UPPER_LEVEL=='location': name_choices = self.parent.locations if UPPER_LEVEL=='study': name_choices = ['this study'] self.level_box = wx.ComboBox(self.panel, -1, size=(110*self.GUI_RESOLUTION, 25), value=UPPER_LEVEL, choices=['sample','site','location','study'], style=wx.CB_DROPDOWN|wx.TE_READONLY) self.Bind(wx.EVT_COMBOBOX, self.on_select_high_level,self.level_box) self.level_box.SetHelpText(dieh.level_box_help) self.level_names = wx.ComboBox(self.panel, -1, size=(110*self.GUI_RESOLUTION, 25), value=self.parent.level_names.GetValue(), choices=name_choices, style=wx.CB_DROPDOWN|wx.TE_READONLY) self.Bind(wx.EVT_COMBOBOX, self.on_select_level_name,self.level_names) self.level_names.SetHelpText(dieh.level_names_help) #mean type and plot display boxes self.mean_type_box = wx.ComboBox(self.panel, -1, size=(110*self.GUI_RESOLUTION, 25), value=self.parent.mean_type_box.GetValue(), choices=['Fisher','Fisher by polarity','None'], style=wx.CB_DROPDOWN|wx.TE_READONLY, name="high_type") self.Bind(wx.EVT_COMBOBOX, self.on_select_mean_type_box,self.mean_type_box) self.mean_type_box.SetHelpText(dieh.mean_type_help) self.mean_fit_box = wx.ComboBox(self.panel, -1, size=(110*self.GUI_RESOLUTION, 25), value=self.parent.mean_fit, choices=(['None','All'] + self.parent.fit_list), style=wx.CB_DROPDOWN|wx.TE_READONLY, name="high_type") self.Bind(wx.EVT_COMBOBOX, self.on_select_mean_fit_box,self.mean_fit_box) self.mean_fit_box.SetHelpText(dieh.mean_fit_help) #show box if UPPER_LEVEL == "study" or UPPER_LEVEL == "location": show_box_choices = ['specimens','samples','sites'] if UPPER_LEVEL == "site": show_box_choices = ['specimens','samples'] if UPPER_LEVEL == "sample": show_box_choices = ['specimens'] self.show_box = wx.ComboBox(self.panel, -1, size=(110*self.GUI_RESOLUTION, 25), value='specimens', choices=show_box_choices, style=wx.CB_DROPDOWN|wx.TE_READONLY,name="high_elements") self.Bind(wx.EVT_COMBOBOX, self.on_select_show_box,self.show_box) self.show_box.SetHelpText(dieh.show_help) #coordinates box self.coordinates_box = wx.ComboBox(self.panel, -1, size=(110*self.GUI_RESOLUTION, 25), choices=self.parent.coordinate_list, value=self.parent.coordinates_box.GetValue(), style=wx.CB_DROPDOWN|wx.TE_READONLY, name="coordinates") self.Bind(wx.EVT_COMBOBOX, self.on_select_coordinates,self.coordinates_box) self.coordinates_box.SetHelpText(dieh.coordinates_box_help) #bounds select boxes self.tmin_box = wx.ComboBox(self.panel, -1, size=(80*self.GUI_RESOLUTION, 25), choices=[''] + self.parent.T_list, style=wx.CB_DROPDOWN|wx.TE_READONLY, name="lower bound") self.tmin_box.SetHelpText(dieh.tmin_box_help) self.tmax_box = wx.ComboBox(self.panel, -1, size=(80*self.GUI_RESOLUTION, 25), choices=[''] + self.parent.T_list, style=wx.CB_DROPDOWN|wx.TE_READONLY, name="upper bound") self.tmax_box.SetHelpText(dieh.tmax_box_help) #color box self.color_dict = self.parent.color_dict self.color_box = wx.ComboBox(self.panel, -1, size=(80*self.GUI_RESOLUTION, 25), choices=[''] + sorted(self.color_dict.keys()), style=wx.CB_DROPDOWN|wx.TE_PROCESS_ENTER, name="color") self.Bind(wx.EVT_TEXT_ENTER, self.add_new_color, self.color_box) self.color_box.SetHelpText(dieh.color_box_help) #name box self.name_box = wx.TextCtrl(self.panel, -1, size=(80*self.GUI_RESOLUTION, 25), name="name") self.name_box.SetHelpText(dieh.name_box_help) #more mac stuff h_size_buttons,button_spacing = 25,5.5 if is_mac: h_size_buttons,button_spacing = 18,0. #buttons self.add_all_button = wx.Button(self.panel, id=-1, label='add new fit to all specimens',size=(160*self.GUI_RESOLUTION,h_size_buttons)) self.add_all_button.SetFont(font1) self.Bind(wx.EVT_BUTTON, self.add_fit_to_all, self.add_all_button) self.add_all_button.SetHelpText(dieh.add_all_help) self.add_fit_button = wx.Button(self.panel, id=-1, label='add fit to highlighted specimens',size=(160*self.GUI_RESOLUTION,h_size_buttons)) self.add_fit_button.SetFont(font1) self.Bind(wx.EVT_BUTTON, self.add_highlighted_fits, self.add_fit_button) self.add_fit_button.SetHelpText(dieh.add_fit_btn_help) self.delete_fit_button = wx.Button(self.panel, id=-1, label='delete highlighted fits',size=(160*self.GUI_RESOLUTION,h_size_buttons)) self.delete_fit_button.SetFont(font1) self.Bind(wx.EVT_BUTTON, self.delete_highlighted_fits, self.delete_fit_button) self.delete_fit_button.SetHelpText(dieh.delete_fit_btn_help) self.apply_changes_button = wx.Button(self.panel, id=-1, label='apply changes to highlighted fits',size=(160*self.GUI_RESOLUTION,h_size_buttons)) self.apply_changes_button.SetFont(font1) self.Bind(wx.EVT_BUTTON, self.apply_changes, self.apply_changes_button) self.apply_changes_button.SetHelpText(dieh.apply_changes_help) #windows display_window_0 = wx.GridSizer(2, 1, 10*self.GUI_RESOLUTION, 19*self.GUI_RESOLUTION) display_window_1 = wx.GridSizer(2, 1, 10*self.GUI_RESOLUTION, 19*self.GUI_RESOLUTION) display_window_2 = wx.GridSizer(2, 1, 10*self.GUI_RESOLUTION, 19*self.GUI_RESOLUTION) name_window = wx.GridSizer(2, 1, 10*self.GUI_RESOLUTION, 19*self.GUI_RESOLUTION) bounds_window = wx.GridSizer(2, 1, 10*self.GUI_RESOLUTION, 19*self.GUI_RESOLUTION) buttons1_window = wx.GridSizer(4, 1, 5*self.GUI_RESOLUTION, 19*self.GUI_RESOLUTION) display_window_0.AddMany( [(self.coordinates_box, wx.ALIGN_LEFT), (self.show_box, wx.ALIGN_LEFT)] ) display_window_1.AddMany( [(self.level_box, wx.ALIGN_LEFT), (self.level_names, wx.ALIGN_LEFT)] ) display_window_2.AddMany( [(self.mean_type_box, wx.ALIGN_LEFT), (self.mean_fit_box, wx.ALIGN_LEFT)] ) name_window.AddMany( [(self.name_box, wx.ALIGN_LEFT), (self.color_box, wx.ALIGN_LEFT)] ) bounds_window.AddMany( [(self.tmin_box, wx.ALIGN_LEFT), (self.tmax_box, wx.ALIGN_LEFT)] ) buttons1_window.AddMany( [(self.add_fit_button, wx.ALL|wx.ALIGN_CENTER|wx.SHAPED, 0), (self.add_all_button, wx.ALL|wx.ALIGN_CENTER|wx.SHAPED, 0), (self.delete_fit_button, wx.ALL|wx.ALIGN_CENTER|wx.SHAPED, 0), (self.apply_changes_button, wx.ALL|wx.ALIGN_CENTER|wx.SHAPED, 0)]) self.display_sizer.Add(display_window_0, 1, wx.TOP|wx.EXPAND, 8) self.display_sizer.Add(display_window_1, 1, wx.TOP | wx.LEFT|wx.EXPAND, 8) self.display_sizer.Add(display_window_2, 1, wx.TOP | wx.LEFT|wx.EXPAND, 8) self.name_sizer.Add(name_window, 1, wx.TOP, 5.5) self.bounds_sizer.Add(bounds_window, 1, wx.TOP, 5.5) self.buttons_sizer.Add(buttons1_window, 1, wx.TOP, 0) #duplicate high levels plot self.fig = Figure((2.5*self.GUI_RESOLUTION, 2.5*self.GUI_RESOLUTION), dpi=100) self.canvas = FigCanvas(self.panel, -1, self.fig, ) self.toolbar = NavigationToolbar(self.canvas) self.toolbar.Hide() self.toolbar.zoom() self.high_EA_setting = "Zoom" self.canvas.Bind(wx.EVT_LEFT_DCLICK,self.on_equalarea_high_select) self.canvas.Bind(wx.EVT_MOTION,self.on_change_high_mouse_cursor) self.canvas.Bind(wx.EVT_MIDDLE_DOWN,self.home_high_equalarea) self.canvas.Bind(wx.EVT_RIGHT_DOWN,self.pan_zoom_high_equalarea) self.canvas.SetHelpText(dieh.eqarea_help) self.eqarea = self.fig.add_subplot(111) draw_net(self.eqarea) #Higher Level Statistics Box self.stats_sizer = wx.StaticBoxSizer( wx.StaticBox( self.panel, wx.ID_ANY,"mean statistics" ), wx.VERTICAL) for parameter in ['mean_type','dec','inc','alpha95','K','R','n_lines','n_planes']: COMMAND="self.%s_window=wx.TextCtrl(self.panel,style=wx.TE_CENTER|wx.TE_READONLY,size=(100*self.GUI_RESOLUTION,25))"%parameter exec(COMMAND) COMMAND="self.%s_window.SetBackgroundColour(wx.WHITE)"%parameter exec(COMMAND) COMMAND="self.%s_window.SetFont(font2)"%parameter exec(COMMAND) COMMAND="self.%s_outer_window = wx.GridSizer(1,2,5*self.GUI_RESOLUTION,15*self.GUI_RESOLUTION)"%parameter exec(COMMAND) COMMAND="""self.%s_outer_window.AddMany([ (wx.StaticText(self.panel,label='%s',style=wx.TE_CENTER),wx.EXPAND), (self.%s_window, wx.EXPAND)])"""%(parameter,parameter,parameter) exec(COMMAND) COMMAND="self.stats_sizer.Add(self.%s_outer_window, 1, wx.ALIGN_LEFT|wx.EXPAND, 0)"%parameter exec(COMMAND) self.switch_stats_button = wx.SpinButton(self.panel, id=wx.ID_ANY, style=wx.SP_HORIZONTAL|wx.SP_ARROW_KEYS|wx.SP_WRAP, name="change stats") self.Bind(wx.EVT_SPIN, self.on_select_stats_button,self.switch_stats_button) self.switch_stats_button.SetHelpText(dieh.switch_stats_btn_help) #construct panel hbox0 = wx.BoxSizer(wx.HORIZONTAL) hbox0.Add(self.name_sizer,flag=wx.ALIGN_TOP|wx.EXPAND,border=8) hbox0.Add(self.bounds_sizer,flag=wx.ALIGN_TOP|wx.EXPAND,border=8) vbox0 = wx.BoxSizer(wx.VERTICAL) vbox0.Add(hbox0,flag=wx.ALIGN_TOP,border=8) vbox0.Add(self.buttons_sizer,flag=wx.ALIGN_TOP,border=8) hbox1 = wx.BoxSizer(wx.HORIZONTAL) hbox1.Add(vbox0,flag=wx.ALIGN_TOP,border=8) hbox1.Add(self.stats_sizer,flag=wx.ALIGN_TOP,border=8) hbox1.Add(self.switch_stats_button,flag=wx.ALIGN_TOP|wx.EXPAND,border=8) vbox1 = wx.BoxSizer(wx.VERTICAL) vbox1.Add(self.display_sizer,flag=wx.ALIGN_TOP,border=8) vbox1.Add(hbox1,flag=wx.ALIGN_TOP,border=8) vbox1.Add(self.canvas,proportion=1,flag=wx.ALIGN_CENTER_HORIZONTAL | wx.ALIGN_CENTER_VERTICAL | wx.EXPAND,border=8) vbox2 = wx.BoxSizer(wx.VERTICAL) vbox2.Add(self.search_bar,proportion=.5,flag=wx.ALIGN_LEFT | wx.ALIGN_BOTTOM | wx.EXPAND, border=8) vbox2.Add(self.logger,proportion=1,flag=wx.ALIGN_LEFT|wx.EXPAND,border=8) hbox2 = wx.BoxSizer(wx.HORIZONTAL) hbox2.Add(vbox2,proportion=1,flag=wx.ALIGN_LEFT|wx.EXPAND) hbox2.Add(vbox1,flag=wx.ALIGN_TOP|wx.EXPAND) self.panel.SetSizerAndFit(hbox2) hbox2.Fit(self)
def function[init_UI, parameter[self]]: constant[ Builds User Interface for the interpretation Editor ] variable[FONT_WEIGHT] assign[=] constant[1] if call[name[sys].platform.startswith, parameter[constant[win]]] begin[:] variable[FONT_WEIGHT] assign[=] <ast.UnaryOp object at 0x7da204961150> variable[font1] assign[=] call[name[wx].Font, parameter[binary_operation[constant[9] + name[FONT_WEIGHT]], name[wx].SWISS, name[wx].NORMAL, name[wx].NORMAL, constant[False], name[self].font_type]] variable[font2] assign[=] call[name[wx].Font, parameter[binary_operation[constant[12] + name[FONT_WEIGHT]], name[wx].SWISS, name[wx].NORMAL, name[wx].NORMAL, constant[False], name[self].font_type]] variable[is_mac] assign[=] constant[False] if call[name[sys].platform.startswith, parameter[constant[darwin]]] begin[:] variable[is_mac] assign[=] constant[True] name[self].search_bar assign[=] call[name[wx].SearchCtrl, parameter[name[self].panel]] call[name[self].Bind, parameter[name[wx].EVT_TEXT_ENTER, name[self].on_enter_search_bar, name[self].search_bar]] call[name[self].Bind, parameter[name[wx].EVT_SEARCHCTRL_SEARCH_BTN, name[self].on_enter_search_bar, name[self].search_bar]] call[name[self].search_bar.SetHelpText, parameter[name[dieh].search_help]] name[self].logger assign[=] call[name[wx].ListCtrl, parameter[name[self].panel, <ast.UnaryOp object at 0x7da207f03040>]] call[name[self].logger.SetFont, parameter[name[font1]]] call[name[self].logger.InsertColumn, parameter[constant[0], constant[specimen]]] call[name[self].logger.InsertColumn, parameter[constant[1], constant[fit name]]] call[name[self].logger.InsertColumn, parameter[constant[2], constant[max]]] call[name[self].logger.InsertColumn, parameter[constant[3], constant[min]]] call[name[self].logger.InsertColumn, parameter[constant[4], constant[n]]] call[name[self].logger.InsertColumn, parameter[constant[5], constant[fit type]]] call[name[self].logger.InsertColumn, parameter[constant[6], constant[dec]]] call[name[self].logger.InsertColumn, parameter[constant[7], constant[inc]]] call[name[self].logger.InsertColumn, parameter[constant[8], constant[mad]]] call[name[self].logger.InsertColumn, parameter[constant[9], constant[dang]]] call[name[self].logger.InsertColumn, parameter[constant[10], constant[a95]]] call[name[self].logger.InsertColumn, parameter[constant[11], constant[K]]] call[name[self].logger.InsertColumn, parameter[constant[12], constant[R]]] call[name[self].Bind, parameter[name[wx].EVT_LIST_ITEM_ACTIVATED, name[self].OnClick_listctrl, name[self].logger]] call[name[self].Bind, parameter[name[wx].EVT_LIST_ITEM_RIGHT_CLICK, name[self].OnRightClickListctrl, name[self].logger]] call[name[self].logger.SetHelpText, parameter[name[dieh].logger_help]] name[self].display_sizer assign[=] call[name[wx].StaticBoxSizer, parameter[call[name[wx].StaticBox, parameter[name[self].panel, name[wx].ID_ANY, constant[display options]]], name[wx].HORIZONTAL]] name[self].name_sizer assign[=] call[name[wx].StaticBoxSizer, parameter[call[name[wx].StaticBox, parameter[name[self].panel, name[wx].ID_ANY, constant[fit name/color]]], name[wx].VERTICAL]] name[self].bounds_sizer assign[=] call[name[wx].StaticBoxSizer, parameter[call[name[wx].StaticBox, parameter[name[self].panel, name[wx].ID_ANY, constant[fit bounds]]], name[wx].VERTICAL]] name[self].buttons_sizer assign[=] call[name[wx].StaticBoxSizer, parameter[call[name[wx].StaticBox, parameter[name[self].panel, name[wx].ID_ANY]], name[wx].VERTICAL]] variable[UPPER_LEVEL] assign[=] call[name[self].parent.level_box.GetValue, parameter[]] if compare[name[UPPER_LEVEL] equal[==] constant[sample]] begin[:] variable[name_choices] assign[=] name[self].parent.samples if compare[name[UPPER_LEVEL] equal[==] constant[site]] begin[:] variable[name_choices] assign[=] name[self].parent.sites if compare[name[UPPER_LEVEL] equal[==] constant[location]] begin[:] variable[name_choices] assign[=] name[self].parent.locations if compare[name[UPPER_LEVEL] equal[==] constant[study]] begin[:] variable[name_choices] assign[=] list[[<ast.Constant object at 0x7da207f03400>]] name[self].level_box assign[=] call[name[wx].ComboBox, parameter[name[self].panel, <ast.UnaryOp object at 0x7da18f09f1c0>]] call[name[self].Bind, parameter[name[wx].EVT_COMBOBOX, name[self].on_select_high_level, name[self].level_box]] call[name[self].level_box.SetHelpText, parameter[name[dieh].level_box_help]] name[self].level_names assign[=] call[name[wx].ComboBox, parameter[name[self].panel, <ast.UnaryOp object at 0x7da18f09e8f0>]] call[name[self].Bind, parameter[name[wx].EVT_COMBOBOX, name[self].on_select_level_name, name[self].level_names]] call[name[self].level_names.SetHelpText, parameter[name[dieh].level_names_help]] name[self].mean_type_box assign[=] call[name[wx].ComboBox, parameter[name[self].panel, <ast.UnaryOp object at 0x7da18f09cbe0>]] call[name[self].Bind, parameter[name[wx].EVT_COMBOBOX, name[self].on_select_mean_type_box, name[self].mean_type_box]] call[name[self].mean_type_box.SetHelpText, parameter[name[dieh].mean_type_help]] name[self].mean_fit_box assign[=] call[name[wx].ComboBox, parameter[name[self].panel, <ast.UnaryOp object at 0x7da18f09ed40>]] call[name[self].Bind, parameter[name[wx].EVT_COMBOBOX, name[self].on_select_mean_fit_box, name[self].mean_fit_box]] call[name[self].mean_fit_box.SetHelpText, parameter[name[dieh].mean_fit_help]] if <ast.BoolOp object at 0x7da18f09f0a0> begin[:] variable[show_box_choices] assign[=] list[[<ast.Constant object at 0x7da18f09f880>, <ast.Constant object at 0x7da18f09db10>, <ast.Constant object at 0x7da18f09efe0>]] if compare[name[UPPER_LEVEL] equal[==] constant[site]] begin[:] variable[show_box_choices] assign[=] list[[<ast.Constant object at 0x7da18f09d540>, <ast.Constant object at 0x7da18f09d060>]] if compare[name[UPPER_LEVEL] equal[==] constant[sample]] begin[:] variable[show_box_choices] assign[=] list[[<ast.Constant object at 0x7da18f09d780>]] name[self].show_box assign[=] call[name[wx].ComboBox, parameter[name[self].panel, <ast.UnaryOp object at 0x7da18f09db70>]] call[name[self].Bind, parameter[name[wx].EVT_COMBOBOX, name[self].on_select_show_box, name[self].show_box]] call[name[self].show_box.SetHelpText, parameter[name[dieh].show_help]] name[self].coordinates_box assign[=] call[name[wx].ComboBox, parameter[name[self].panel, <ast.UnaryOp object at 0x7da18f09c550>]] call[name[self].Bind, parameter[name[wx].EVT_COMBOBOX, name[self].on_select_coordinates, name[self].coordinates_box]] call[name[self].coordinates_box.SetHelpText, parameter[name[dieh].coordinates_box_help]] name[self].tmin_box assign[=] call[name[wx].ComboBox, parameter[name[self].panel, <ast.UnaryOp object at 0x7da18bc71450>]] call[name[self].tmin_box.SetHelpText, parameter[name[dieh].tmin_box_help]] name[self].tmax_box assign[=] call[name[wx].ComboBox, parameter[name[self].panel, <ast.UnaryOp object at 0x7da18bc70460>]] call[name[self].tmax_box.SetHelpText, parameter[name[dieh].tmax_box_help]] name[self].color_dict assign[=] name[self].parent.color_dict name[self].color_box assign[=] call[name[wx].ComboBox, parameter[name[self].panel, <ast.UnaryOp object at 0x7da18bc71990>]] call[name[self].Bind, parameter[name[wx].EVT_TEXT_ENTER, name[self].add_new_color, name[self].color_box]] call[name[self].color_box.SetHelpText, parameter[name[dieh].color_box_help]] name[self].name_box assign[=] call[name[wx].TextCtrl, parameter[name[self].panel, <ast.UnaryOp object at 0x7da18bc71ae0>]] call[name[self].name_box.SetHelpText, parameter[name[dieh].name_box_help]] <ast.Tuple object at 0x7da18bc70fd0> assign[=] tuple[[<ast.Constant object at 0x7da18bc70dc0>, <ast.Constant object at 0x7da18bc72b60>]] if name[is_mac] begin[:] <ast.Tuple object at 0x7da18bc72da0> assign[=] tuple[[<ast.Constant object at 0x7da18bc73220>, <ast.Constant object at 0x7da18bc73ac0>]] name[self].add_all_button assign[=] call[name[wx].Button, parameter[name[self].panel]] call[name[self].add_all_button.SetFont, parameter[name[font1]]] call[name[self].Bind, parameter[name[wx].EVT_BUTTON, name[self].add_fit_to_all, name[self].add_all_button]] call[name[self].add_all_button.SetHelpText, parameter[name[dieh].add_all_help]] name[self].add_fit_button assign[=] call[name[wx].Button, parameter[name[self].panel]] call[name[self].add_fit_button.SetFont, parameter[name[font1]]] call[name[self].Bind, parameter[name[wx].EVT_BUTTON, name[self].add_highlighted_fits, name[self].add_fit_button]] call[name[self].add_fit_button.SetHelpText, parameter[name[dieh].add_fit_btn_help]] name[self].delete_fit_button assign[=] call[name[wx].Button, parameter[name[self].panel]] call[name[self].delete_fit_button.SetFont, parameter[name[font1]]] call[name[self].Bind, parameter[name[wx].EVT_BUTTON, name[self].delete_highlighted_fits, name[self].delete_fit_button]] call[name[self].delete_fit_button.SetHelpText, parameter[name[dieh].delete_fit_btn_help]] name[self].apply_changes_button assign[=] call[name[wx].Button, parameter[name[self].panel]] call[name[self].apply_changes_button.SetFont, parameter[name[font1]]] call[name[self].Bind, parameter[name[wx].EVT_BUTTON, name[self].apply_changes, name[self].apply_changes_button]] call[name[self].apply_changes_button.SetHelpText, parameter[name[dieh].apply_changes_help]] variable[display_window_0] assign[=] call[name[wx].GridSizer, parameter[constant[2], constant[1], binary_operation[constant[10] * name[self].GUI_RESOLUTION], binary_operation[constant[19] * name[self].GUI_RESOLUTION]]] variable[display_window_1] assign[=] call[name[wx].GridSizer, parameter[constant[2], constant[1], binary_operation[constant[10] * name[self].GUI_RESOLUTION], binary_operation[constant[19] * name[self].GUI_RESOLUTION]]] variable[display_window_2] assign[=] call[name[wx].GridSizer, parameter[constant[2], constant[1], binary_operation[constant[10] * name[self].GUI_RESOLUTION], binary_operation[constant[19] * name[self].GUI_RESOLUTION]]] variable[name_window] assign[=] call[name[wx].GridSizer, parameter[constant[2], constant[1], binary_operation[constant[10] * name[self].GUI_RESOLUTION], binary_operation[constant[19] * name[self].GUI_RESOLUTION]]] variable[bounds_window] assign[=] call[name[wx].GridSizer, parameter[constant[2], constant[1], binary_operation[constant[10] * name[self].GUI_RESOLUTION], binary_operation[constant[19] * name[self].GUI_RESOLUTION]]] variable[buttons1_window] assign[=] call[name[wx].GridSizer, parameter[constant[4], constant[1], binary_operation[constant[5] * name[self].GUI_RESOLUTION], binary_operation[constant[19] * name[self].GUI_RESOLUTION]]] call[name[display_window_0].AddMany, parameter[list[[<ast.Tuple object at 0x7da2046236a0>, <ast.Tuple object at 0x7da204622560>]]]] call[name[display_window_1].AddMany, parameter[list[[<ast.Tuple object at 0x7da204620160>, <ast.Tuple object at 0x7da204621f30>]]]] call[name[display_window_2].AddMany, parameter[list[[<ast.Tuple object at 0x7da2046223b0>, <ast.Tuple object at 0x7da2046212d0>]]]] call[name[name_window].AddMany, parameter[list[[<ast.Tuple object at 0x7da204623070>, <ast.Tuple object at 0x7da204622b60>]]]] call[name[bounds_window].AddMany, parameter[list[[<ast.Tuple object at 0x7da204623ee0>, <ast.Tuple object at 0x7da204623580>]]]] call[name[buttons1_window].AddMany, parameter[list[[<ast.Tuple object at 0x7da2046206a0>, <ast.Tuple object at 0x7da204620c70>, <ast.Tuple object at 0x7da1b04fe4d0>, <ast.Tuple object at 0x7da1b04fce50>]]]] call[name[self].display_sizer.Add, parameter[name[display_window_0], constant[1], binary_operation[name[wx].TOP <ast.BitOr object at 0x7da2590d6aa0> name[wx].EXPAND], constant[8]]] call[name[self].display_sizer.Add, parameter[name[display_window_1], constant[1], binary_operation[binary_operation[name[wx].TOP <ast.BitOr object at 0x7da2590d6aa0> name[wx].LEFT] <ast.BitOr object at 0x7da2590d6aa0> name[wx].EXPAND], constant[8]]] call[name[self].display_sizer.Add, parameter[name[display_window_2], constant[1], binary_operation[binary_operation[name[wx].TOP <ast.BitOr object at 0x7da2590d6aa0> name[wx].LEFT] <ast.BitOr object at 0x7da2590d6aa0> name[wx].EXPAND], constant[8]]] call[name[self].name_sizer.Add, parameter[name[name_window], constant[1], name[wx].TOP, constant[5.5]]] call[name[self].bounds_sizer.Add, parameter[name[bounds_window], constant[1], name[wx].TOP, constant[5.5]]] call[name[self].buttons_sizer.Add, parameter[name[buttons1_window], constant[1], name[wx].TOP, constant[0]]] name[self].fig assign[=] call[name[Figure], parameter[tuple[[<ast.BinOp object at 0x7da1b04fdf90>, <ast.BinOp object at 0x7da1b04ff580>]]]] name[self].canvas assign[=] call[name[FigCanvas], parameter[name[self].panel, <ast.UnaryOp object at 0x7da1b04fdff0>, name[self].fig]] name[self].toolbar assign[=] call[name[NavigationToolbar], parameter[name[self].canvas]] call[name[self].toolbar.Hide, parameter[]] call[name[self].toolbar.zoom, parameter[]] name[self].high_EA_setting assign[=] constant[Zoom] call[name[self].canvas.Bind, parameter[name[wx].EVT_LEFT_DCLICK, name[self].on_equalarea_high_select]] call[name[self].canvas.Bind, parameter[name[wx].EVT_MOTION, name[self].on_change_high_mouse_cursor]] call[name[self].canvas.Bind, parameter[name[wx].EVT_MIDDLE_DOWN, name[self].home_high_equalarea]] call[name[self].canvas.Bind, parameter[name[wx].EVT_RIGHT_DOWN, name[self].pan_zoom_high_equalarea]] call[name[self].canvas.SetHelpText, parameter[name[dieh].eqarea_help]] name[self].eqarea assign[=] call[name[self].fig.add_subplot, parameter[constant[111]]] call[name[draw_net], parameter[name[self].eqarea]] name[self].stats_sizer assign[=] call[name[wx].StaticBoxSizer, parameter[call[name[wx].StaticBox, parameter[name[self].panel, name[wx].ID_ANY, constant[mean statistics]]], name[wx].VERTICAL]] for taget[name[parameter]] in starred[list[[<ast.Constant object at 0x7da1b04fdae0>, <ast.Constant object at 0x7da1b04ff2b0>, <ast.Constant object at 0x7da1b04ff1f0>, <ast.Constant object at 0x7da1b04ff0d0>, <ast.Constant object at 0x7da1b04fc640>, <ast.Constant object at 0x7da1b04fc5e0>, <ast.Constant object at 0x7da1b04fea10>, <ast.Constant object at 0x7da1b04fd0c0>]]] begin[:] variable[COMMAND] assign[=] binary_operation[constant[self.%s_window=wx.TextCtrl(self.panel,style=wx.TE_CENTER|wx.TE_READONLY,size=(100*self.GUI_RESOLUTION,25))] <ast.Mod object at 0x7da2590d6920> name[parameter]] call[name[exec], parameter[name[COMMAND]]] variable[COMMAND] assign[=] binary_operation[constant[self.%s_window.SetBackgroundColour(wx.WHITE)] <ast.Mod object at 0x7da2590d6920> name[parameter]] call[name[exec], parameter[name[COMMAND]]] variable[COMMAND] assign[=] binary_operation[constant[self.%s_window.SetFont(font2)] <ast.Mod object at 0x7da2590d6920> name[parameter]] call[name[exec], parameter[name[COMMAND]]] variable[COMMAND] assign[=] binary_operation[constant[self.%s_outer_window = wx.GridSizer(1,2,5*self.GUI_RESOLUTION,15*self.GUI_RESOLUTION)] <ast.Mod object at 0x7da2590d6920> name[parameter]] call[name[exec], parameter[name[COMMAND]]] variable[COMMAND] assign[=] binary_operation[constant[self.%s_outer_window.AddMany([ (wx.StaticText(self.panel,label='%s',style=wx.TE_CENTER),wx.EXPAND), (self.%s_window, wx.EXPAND)])] <ast.Mod object at 0x7da2590d6920> tuple[[<ast.Name object at 0x7da1b04ff880>, <ast.Name object at 0x7da1b04ff6d0>, <ast.Name object at 0x7da1b05c9690>]]] call[name[exec], parameter[name[COMMAND]]] variable[COMMAND] assign[=] binary_operation[constant[self.stats_sizer.Add(self.%s_outer_window, 1, wx.ALIGN_LEFT|wx.EXPAND, 0)] <ast.Mod object at 0x7da2590d6920> name[parameter]] call[name[exec], parameter[name[COMMAND]]] name[self].switch_stats_button assign[=] call[name[wx].SpinButton, parameter[name[self].panel]] call[name[self].Bind, parameter[name[wx].EVT_SPIN, name[self].on_select_stats_button, name[self].switch_stats_button]] call[name[self].switch_stats_button.SetHelpText, parameter[name[dieh].switch_stats_btn_help]] variable[hbox0] assign[=] call[name[wx].BoxSizer, parameter[name[wx].HORIZONTAL]] call[name[hbox0].Add, parameter[name[self].name_sizer]] call[name[hbox0].Add, parameter[name[self].bounds_sizer]] variable[vbox0] assign[=] call[name[wx].BoxSizer, parameter[name[wx].VERTICAL]] call[name[vbox0].Add, parameter[name[hbox0]]] call[name[vbox0].Add, parameter[name[self].buttons_sizer]] variable[hbox1] assign[=] call[name[wx].BoxSizer, parameter[name[wx].HORIZONTAL]] call[name[hbox1].Add, parameter[name[vbox0]]] call[name[hbox1].Add, parameter[name[self].stats_sizer]] call[name[hbox1].Add, parameter[name[self].switch_stats_button]] variable[vbox1] assign[=] call[name[wx].BoxSizer, parameter[name[wx].VERTICAL]] call[name[vbox1].Add, parameter[name[self].display_sizer]] call[name[vbox1].Add, parameter[name[hbox1]]] call[name[vbox1].Add, parameter[name[self].canvas]] variable[vbox2] assign[=] call[name[wx].BoxSizer, parameter[name[wx].VERTICAL]] call[name[vbox2].Add, parameter[name[self].search_bar]] call[name[vbox2].Add, parameter[name[self].logger]] variable[hbox2] assign[=] call[name[wx].BoxSizer, parameter[name[wx].HORIZONTAL]] call[name[hbox2].Add, parameter[name[vbox2]]] call[name[hbox2].Add, parameter[name[vbox1]]] call[name[self].panel.SetSizerAndFit, parameter[name[hbox2]]] call[name[hbox2].Fit, parameter[name[self]]]
keyword[def] identifier[init_UI] ( identifier[self] ): literal[string] identifier[FONT_WEIGHT] = literal[int] keyword[if] identifier[sys] . identifier[platform] . identifier[startswith] ( literal[string] ): identifier[FONT_WEIGHT] =- literal[int] identifier[font1] = identifier[wx] . identifier[Font] ( literal[int] + identifier[FONT_WEIGHT] , identifier[wx] . identifier[SWISS] , identifier[wx] . identifier[NORMAL] , identifier[wx] . identifier[NORMAL] , keyword[False] , identifier[self] . identifier[font_type] ) identifier[font2] = identifier[wx] . identifier[Font] ( literal[int] + identifier[FONT_WEIGHT] , identifier[wx] . identifier[SWISS] , identifier[wx] . identifier[NORMAL] , identifier[wx] . identifier[NORMAL] , keyword[False] , identifier[self] . identifier[font_type] ) identifier[is_mac] = keyword[False] keyword[if] identifier[sys] . identifier[platform] . identifier[startswith] ( literal[string] ): identifier[is_mac] = keyword[True] identifier[self] . identifier[search_bar] = identifier[wx] . identifier[SearchCtrl] ( identifier[self] . identifier[panel] , identifier[size] =( literal[int] * identifier[self] . identifier[GUI_RESOLUTION] , literal[int] ), identifier[style] = identifier[wx] . identifier[TE_PROCESS_ENTER] | identifier[wx] . identifier[TE_PROCESS_TAB] | identifier[wx] . identifier[TE_NOHIDESEL] ) identifier[self] . identifier[Bind] ( identifier[wx] . identifier[EVT_TEXT_ENTER] , identifier[self] . identifier[on_enter_search_bar] , identifier[self] . identifier[search_bar] ) identifier[self] . identifier[Bind] ( identifier[wx] . identifier[EVT_SEARCHCTRL_SEARCH_BTN] , identifier[self] . identifier[on_enter_search_bar] , identifier[self] . identifier[search_bar] ) identifier[self] . identifier[search_bar] . identifier[SetHelpText] ( identifier[dieh] . identifier[search_help] ) identifier[self] . identifier[logger] = identifier[wx] . identifier[ListCtrl] ( identifier[self] . identifier[panel] ,- literal[int] , identifier[size] =( literal[int] * identifier[self] . identifier[GUI_RESOLUTION] , literal[int] * identifier[self] . identifier[GUI_RESOLUTION] ), identifier[style] = identifier[wx] . identifier[LC_REPORT] ) identifier[self] . identifier[logger] . identifier[SetFont] ( identifier[font1] ) identifier[self] . identifier[logger] . identifier[InsertColumn] ( literal[int] , literal[string] , identifier[width] = literal[int] * identifier[self] . identifier[GUI_RESOLUTION] ) identifier[self] . identifier[logger] . identifier[InsertColumn] ( literal[int] , literal[string] , identifier[width] = literal[int] * identifier[self] . identifier[GUI_RESOLUTION] ) identifier[self] . identifier[logger] . identifier[InsertColumn] ( literal[int] , literal[string] , identifier[width] = literal[int] * identifier[self] . identifier[GUI_RESOLUTION] ) identifier[self] . identifier[logger] . identifier[InsertColumn] ( literal[int] , literal[string] , identifier[width] = literal[int] * identifier[self] . identifier[GUI_RESOLUTION] ) identifier[self] . identifier[logger] . identifier[InsertColumn] ( literal[int] , literal[string] , identifier[width] = literal[int] * identifier[self] . identifier[GUI_RESOLUTION] ) identifier[self] . identifier[logger] . identifier[InsertColumn] ( literal[int] , literal[string] , identifier[width] = literal[int] * identifier[self] . identifier[GUI_RESOLUTION] ) identifier[self] . identifier[logger] . identifier[InsertColumn] ( literal[int] , literal[string] , identifier[width] = literal[int] * identifier[self] . identifier[GUI_RESOLUTION] ) identifier[self] . identifier[logger] . identifier[InsertColumn] ( literal[int] , literal[string] , identifier[width] = literal[int] * identifier[self] . identifier[GUI_RESOLUTION] ) identifier[self] . identifier[logger] . identifier[InsertColumn] ( literal[int] , literal[string] , identifier[width] = literal[int] * identifier[self] . identifier[GUI_RESOLUTION] ) identifier[self] . identifier[logger] . identifier[InsertColumn] ( literal[int] , literal[string] , identifier[width] = literal[int] * identifier[self] . identifier[GUI_RESOLUTION] ) identifier[self] . identifier[logger] . identifier[InsertColumn] ( literal[int] , literal[string] , identifier[width] = literal[int] * identifier[self] . identifier[GUI_RESOLUTION] ) identifier[self] . identifier[logger] . identifier[InsertColumn] ( literal[int] , literal[string] , identifier[width] = literal[int] * identifier[self] . identifier[GUI_RESOLUTION] ) identifier[self] . identifier[logger] . identifier[InsertColumn] ( literal[int] , literal[string] , identifier[width] = literal[int] * identifier[self] . identifier[GUI_RESOLUTION] ) identifier[self] . identifier[Bind] ( identifier[wx] . identifier[EVT_LIST_ITEM_ACTIVATED] , identifier[self] . identifier[OnClick_listctrl] , identifier[self] . identifier[logger] ) identifier[self] . identifier[Bind] ( identifier[wx] . identifier[EVT_LIST_ITEM_RIGHT_CLICK] , identifier[self] . identifier[OnRightClickListctrl] , identifier[self] . identifier[logger] ) identifier[self] . identifier[logger] . identifier[SetHelpText] ( identifier[dieh] . identifier[logger_help] ) identifier[self] . identifier[display_sizer] = identifier[wx] . identifier[StaticBoxSizer] ( identifier[wx] . identifier[StaticBox] ( identifier[self] . identifier[panel] , identifier[wx] . identifier[ID_ANY] , literal[string] ), identifier[wx] . identifier[HORIZONTAL] ) identifier[self] . identifier[name_sizer] = identifier[wx] . identifier[StaticBoxSizer] ( identifier[wx] . identifier[StaticBox] ( identifier[self] . identifier[panel] , identifier[wx] . identifier[ID_ANY] , literal[string] ), identifier[wx] . identifier[VERTICAL] ) identifier[self] . identifier[bounds_sizer] = identifier[wx] . identifier[StaticBoxSizer] ( identifier[wx] . identifier[StaticBox] ( identifier[self] . identifier[panel] , identifier[wx] . identifier[ID_ANY] , literal[string] ), identifier[wx] . identifier[VERTICAL] ) identifier[self] . identifier[buttons_sizer] = identifier[wx] . identifier[StaticBoxSizer] ( identifier[wx] . identifier[StaticBox] ( identifier[self] . identifier[panel] , identifier[wx] . identifier[ID_ANY] ), identifier[wx] . identifier[VERTICAL] ) identifier[UPPER_LEVEL] = identifier[self] . identifier[parent] . identifier[level_box] . identifier[GetValue] () keyword[if] identifier[UPPER_LEVEL] == literal[string] : identifier[name_choices] = identifier[self] . identifier[parent] . identifier[samples] keyword[if] identifier[UPPER_LEVEL] == literal[string] : identifier[name_choices] = identifier[self] . identifier[parent] . identifier[sites] keyword[if] identifier[UPPER_LEVEL] == literal[string] : identifier[name_choices] = identifier[self] . identifier[parent] . identifier[locations] keyword[if] identifier[UPPER_LEVEL] == literal[string] : identifier[name_choices] =[ literal[string] ] identifier[self] . identifier[level_box] = identifier[wx] . identifier[ComboBox] ( identifier[self] . identifier[panel] ,- literal[int] , identifier[size] =( literal[int] * identifier[self] . identifier[GUI_RESOLUTION] , literal[int] ), identifier[value] = identifier[UPPER_LEVEL] , identifier[choices] =[ literal[string] , literal[string] , literal[string] , literal[string] ], identifier[style] = identifier[wx] . identifier[CB_DROPDOWN] | identifier[wx] . identifier[TE_READONLY] ) identifier[self] . identifier[Bind] ( identifier[wx] . identifier[EVT_COMBOBOX] , identifier[self] . identifier[on_select_high_level] , identifier[self] . identifier[level_box] ) identifier[self] . identifier[level_box] . identifier[SetHelpText] ( identifier[dieh] . identifier[level_box_help] ) identifier[self] . identifier[level_names] = identifier[wx] . identifier[ComboBox] ( identifier[self] . identifier[panel] ,- literal[int] , identifier[size] =( literal[int] * identifier[self] . identifier[GUI_RESOLUTION] , literal[int] ), identifier[value] = identifier[self] . identifier[parent] . identifier[level_names] . identifier[GetValue] (), identifier[choices] = identifier[name_choices] , identifier[style] = identifier[wx] . identifier[CB_DROPDOWN] | identifier[wx] . identifier[TE_READONLY] ) identifier[self] . identifier[Bind] ( identifier[wx] . identifier[EVT_COMBOBOX] , identifier[self] . identifier[on_select_level_name] , identifier[self] . identifier[level_names] ) identifier[self] . identifier[level_names] . identifier[SetHelpText] ( identifier[dieh] . identifier[level_names_help] ) identifier[self] . identifier[mean_type_box] = identifier[wx] . identifier[ComboBox] ( identifier[self] . identifier[panel] ,- literal[int] , identifier[size] =( literal[int] * identifier[self] . identifier[GUI_RESOLUTION] , literal[int] ), identifier[value] = identifier[self] . identifier[parent] . identifier[mean_type_box] . identifier[GetValue] (), identifier[choices] =[ literal[string] , literal[string] , literal[string] ], identifier[style] = identifier[wx] . identifier[CB_DROPDOWN] | identifier[wx] . identifier[TE_READONLY] , identifier[name] = literal[string] ) identifier[self] . identifier[Bind] ( identifier[wx] . identifier[EVT_COMBOBOX] , identifier[self] . identifier[on_select_mean_type_box] , identifier[self] . identifier[mean_type_box] ) identifier[self] . identifier[mean_type_box] . identifier[SetHelpText] ( identifier[dieh] . identifier[mean_type_help] ) identifier[self] . identifier[mean_fit_box] = identifier[wx] . identifier[ComboBox] ( identifier[self] . identifier[panel] ,- literal[int] , identifier[size] =( literal[int] * identifier[self] . identifier[GUI_RESOLUTION] , literal[int] ), identifier[value] = identifier[self] . identifier[parent] . identifier[mean_fit] , identifier[choices] =([ literal[string] , literal[string] ]+ identifier[self] . identifier[parent] . identifier[fit_list] ), identifier[style] = identifier[wx] . identifier[CB_DROPDOWN] | identifier[wx] . identifier[TE_READONLY] , identifier[name] = literal[string] ) identifier[self] . identifier[Bind] ( identifier[wx] . identifier[EVT_COMBOBOX] , identifier[self] . identifier[on_select_mean_fit_box] , identifier[self] . identifier[mean_fit_box] ) identifier[self] . identifier[mean_fit_box] . identifier[SetHelpText] ( identifier[dieh] . identifier[mean_fit_help] ) keyword[if] identifier[UPPER_LEVEL] == literal[string] keyword[or] identifier[UPPER_LEVEL] == literal[string] : identifier[show_box_choices] =[ literal[string] , literal[string] , literal[string] ] keyword[if] identifier[UPPER_LEVEL] == literal[string] : identifier[show_box_choices] =[ literal[string] , literal[string] ] keyword[if] identifier[UPPER_LEVEL] == literal[string] : identifier[show_box_choices] =[ literal[string] ] identifier[self] . identifier[show_box] = identifier[wx] . identifier[ComboBox] ( identifier[self] . identifier[panel] ,- literal[int] , identifier[size] =( literal[int] * identifier[self] . identifier[GUI_RESOLUTION] , literal[int] ), identifier[value] = literal[string] , identifier[choices] = identifier[show_box_choices] , identifier[style] = identifier[wx] . identifier[CB_DROPDOWN] | identifier[wx] . identifier[TE_READONLY] , identifier[name] = literal[string] ) identifier[self] . identifier[Bind] ( identifier[wx] . identifier[EVT_COMBOBOX] , identifier[self] . identifier[on_select_show_box] , identifier[self] . identifier[show_box] ) identifier[self] . identifier[show_box] . identifier[SetHelpText] ( identifier[dieh] . identifier[show_help] ) identifier[self] . identifier[coordinates_box] = identifier[wx] . identifier[ComboBox] ( identifier[self] . identifier[panel] ,- literal[int] , identifier[size] =( literal[int] * identifier[self] . identifier[GUI_RESOLUTION] , literal[int] ), identifier[choices] = identifier[self] . identifier[parent] . identifier[coordinate_list] , identifier[value] = identifier[self] . identifier[parent] . identifier[coordinates_box] . identifier[GetValue] (), identifier[style] = identifier[wx] . identifier[CB_DROPDOWN] | identifier[wx] . identifier[TE_READONLY] , identifier[name] = literal[string] ) identifier[self] . identifier[Bind] ( identifier[wx] . identifier[EVT_COMBOBOX] , identifier[self] . identifier[on_select_coordinates] , identifier[self] . identifier[coordinates_box] ) identifier[self] . identifier[coordinates_box] . identifier[SetHelpText] ( identifier[dieh] . identifier[coordinates_box_help] ) identifier[self] . identifier[tmin_box] = identifier[wx] . identifier[ComboBox] ( identifier[self] . identifier[panel] ,- literal[int] , identifier[size] =( literal[int] * identifier[self] . identifier[GUI_RESOLUTION] , literal[int] ), identifier[choices] =[ literal[string] ]+ identifier[self] . identifier[parent] . identifier[T_list] , identifier[style] = identifier[wx] . identifier[CB_DROPDOWN] | identifier[wx] . identifier[TE_READONLY] , identifier[name] = literal[string] ) identifier[self] . identifier[tmin_box] . identifier[SetHelpText] ( identifier[dieh] . identifier[tmin_box_help] ) identifier[self] . identifier[tmax_box] = identifier[wx] . identifier[ComboBox] ( identifier[self] . identifier[panel] ,- literal[int] , identifier[size] =( literal[int] * identifier[self] . identifier[GUI_RESOLUTION] , literal[int] ), identifier[choices] =[ literal[string] ]+ identifier[self] . identifier[parent] . identifier[T_list] , identifier[style] = identifier[wx] . identifier[CB_DROPDOWN] | identifier[wx] . identifier[TE_READONLY] , identifier[name] = literal[string] ) identifier[self] . identifier[tmax_box] . identifier[SetHelpText] ( identifier[dieh] . identifier[tmax_box_help] ) identifier[self] . identifier[color_dict] = identifier[self] . identifier[parent] . identifier[color_dict] identifier[self] . identifier[color_box] = identifier[wx] . identifier[ComboBox] ( identifier[self] . identifier[panel] ,- literal[int] , identifier[size] =( literal[int] * identifier[self] . identifier[GUI_RESOLUTION] , literal[int] ), identifier[choices] =[ literal[string] ]+ identifier[sorted] ( identifier[self] . identifier[color_dict] . identifier[keys] ()), identifier[style] = identifier[wx] . identifier[CB_DROPDOWN] | identifier[wx] . identifier[TE_PROCESS_ENTER] , identifier[name] = literal[string] ) identifier[self] . identifier[Bind] ( identifier[wx] . identifier[EVT_TEXT_ENTER] , identifier[self] . identifier[add_new_color] , identifier[self] . identifier[color_box] ) identifier[self] . identifier[color_box] . identifier[SetHelpText] ( identifier[dieh] . identifier[color_box_help] ) identifier[self] . identifier[name_box] = identifier[wx] . identifier[TextCtrl] ( identifier[self] . identifier[panel] ,- literal[int] , identifier[size] =( literal[int] * identifier[self] . identifier[GUI_RESOLUTION] , literal[int] ), identifier[name] = literal[string] ) identifier[self] . identifier[name_box] . identifier[SetHelpText] ( identifier[dieh] . identifier[name_box_help] ) identifier[h_size_buttons] , identifier[button_spacing] = literal[int] , literal[int] keyword[if] identifier[is_mac] : identifier[h_size_buttons] , identifier[button_spacing] = literal[int] , literal[int] identifier[self] . identifier[add_all_button] = identifier[wx] . identifier[Button] ( identifier[self] . identifier[panel] , identifier[id] =- literal[int] , identifier[label] = literal[string] , identifier[size] =( literal[int] * identifier[self] . identifier[GUI_RESOLUTION] , identifier[h_size_buttons] )) identifier[self] . identifier[add_all_button] . identifier[SetFont] ( identifier[font1] ) identifier[self] . identifier[Bind] ( identifier[wx] . identifier[EVT_BUTTON] , identifier[self] . identifier[add_fit_to_all] , identifier[self] . identifier[add_all_button] ) identifier[self] . identifier[add_all_button] . identifier[SetHelpText] ( identifier[dieh] . identifier[add_all_help] ) identifier[self] . identifier[add_fit_button] = identifier[wx] . identifier[Button] ( identifier[self] . identifier[panel] , identifier[id] =- literal[int] , identifier[label] = literal[string] , identifier[size] =( literal[int] * identifier[self] . identifier[GUI_RESOLUTION] , identifier[h_size_buttons] )) identifier[self] . identifier[add_fit_button] . identifier[SetFont] ( identifier[font1] ) identifier[self] . identifier[Bind] ( identifier[wx] . identifier[EVT_BUTTON] , identifier[self] . identifier[add_highlighted_fits] , identifier[self] . identifier[add_fit_button] ) identifier[self] . identifier[add_fit_button] . identifier[SetHelpText] ( identifier[dieh] . identifier[add_fit_btn_help] ) identifier[self] . identifier[delete_fit_button] = identifier[wx] . identifier[Button] ( identifier[self] . identifier[panel] , identifier[id] =- literal[int] , identifier[label] = literal[string] , identifier[size] =( literal[int] * identifier[self] . identifier[GUI_RESOLUTION] , identifier[h_size_buttons] )) identifier[self] . identifier[delete_fit_button] . identifier[SetFont] ( identifier[font1] ) identifier[self] . identifier[Bind] ( identifier[wx] . identifier[EVT_BUTTON] , identifier[self] . identifier[delete_highlighted_fits] , identifier[self] . identifier[delete_fit_button] ) identifier[self] . identifier[delete_fit_button] . identifier[SetHelpText] ( identifier[dieh] . identifier[delete_fit_btn_help] ) identifier[self] . identifier[apply_changes_button] = identifier[wx] . identifier[Button] ( identifier[self] . identifier[panel] , identifier[id] =- literal[int] , identifier[label] = literal[string] , identifier[size] =( literal[int] * identifier[self] . identifier[GUI_RESOLUTION] , identifier[h_size_buttons] )) identifier[self] . identifier[apply_changes_button] . identifier[SetFont] ( identifier[font1] ) identifier[self] . identifier[Bind] ( identifier[wx] . identifier[EVT_BUTTON] , identifier[self] . identifier[apply_changes] , identifier[self] . identifier[apply_changes_button] ) identifier[self] . identifier[apply_changes_button] . identifier[SetHelpText] ( identifier[dieh] . identifier[apply_changes_help] ) identifier[display_window_0] = identifier[wx] . identifier[GridSizer] ( literal[int] , literal[int] , literal[int] * identifier[self] . identifier[GUI_RESOLUTION] , literal[int] * identifier[self] . identifier[GUI_RESOLUTION] ) identifier[display_window_1] = identifier[wx] . identifier[GridSizer] ( literal[int] , literal[int] , literal[int] * identifier[self] . identifier[GUI_RESOLUTION] , literal[int] * identifier[self] . identifier[GUI_RESOLUTION] ) identifier[display_window_2] = identifier[wx] . identifier[GridSizer] ( literal[int] , literal[int] , literal[int] * identifier[self] . identifier[GUI_RESOLUTION] , literal[int] * identifier[self] . identifier[GUI_RESOLUTION] ) identifier[name_window] = identifier[wx] . identifier[GridSizer] ( literal[int] , literal[int] , literal[int] * identifier[self] . identifier[GUI_RESOLUTION] , literal[int] * identifier[self] . identifier[GUI_RESOLUTION] ) identifier[bounds_window] = identifier[wx] . identifier[GridSizer] ( literal[int] , literal[int] , literal[int] * identifier[self] . identifier[GUI_RESOLUTION] , literal[int] * identifier[self] . identifier[GUI_RESOLUTION] ) identifier[buttons1_window] = identifier[wx] . identifier[GridSizer] ( literal[int] , literal[int] , literal[int] * identifier[self] . identifier[GUI_RESOLUTION] , literal[int] * identifier[self] . identifier[GUI_RESOLUTION] ) identifier[display_window_0] . identifier[AddMany] ([( identifier[self] . identifier[coordinates_box] , identifier[wx] . identifier[ALIGN_LEFT] ), ( identifier[self] . identifier[show_box] , identifier[wx] . identifier[ALIGN_LEFT] )]) identifier[display_window_1] . identifier[AddMany] ([( identifier[self] . identifier[level_box] , identifier[wx] . identifier[ALIGN_LEFT] ), ( identifier[self] . identifier[level_names] , identifier[wx] . identifier[ALIGN_LEFT] )]) identifier[display_window_2] . identifier[AddMany] ([( identifier[self] . identifier[mean_type_box] , identifier[wx] . identifier[ALIGN_LEFT] ), ( identifier[self] . identifier[mean_fit_box] , identifier[wx] . identifier[ALIGN_LEFT] )]) identifier[name_window] . identifier[AddMany] ([( identifier[self] . identifier[name_box] , identifier[wx] . identifier[ALIGN_LEFT] ), ( identifier[self] . identifier[color_box] , identifier[wx] . identifier[ALIGN_LEFT] )]) identifier[bounds_window] . identifier[AddMany] ([( identifier[self] . identifier[tmin_box] , identifier[wx] . identifier[ALIGN_LEFT] ), ( identifier[self] . identifier[tmax_box] , identifier[wx] . identifier[ALIGN_LEFT] )]) identifier[buttons1_window] . identifier[AddMany] ([( identifier[self] . identifier[add_fit_button] , identifier[wx] . identifier[ALL] | identifier[wx] . identifier[ALIGN_CENTER] | identifier[wx] . identifier[SHAPED] , literal[int] ), ( identifier[self] . identifier[add_all_button] , identifier[wx] . identifier[ALL] | identifier[wx] . identifier[ALIGN_CENTER] | identifier[wx] . identifier[SHAPED] , literal[int] ), ( identifier[self] . identifier[delete_fit_button] , identifier[wx] . identifier[ALL] | identifier[wx] . identifier[ALIGN_CENTER] | identifier[wx] . identifier[SHAPED] , literal[int] ), ( identifier[self] . identifier[apply_changes_button] , identifier[wx] . identifier[ALL] | identifier[wx] . identifier[ALIGN_CENTER] | identifier[wx] . identifier[SHAPED] , literal[int] )]) identifier[self] . identifier[display_sizer] . identifier[Add] ( identifier[display_window_0] , literal[int] , identifier[wx] . identifier[TOP] | identifier[wx] . identifier[EXPAND] , literal[int] ) identifier[self] . identifier[display_sizer] . identifier[Add] ( identifier[display_window_1] , literal[int] , identifier[wx] . identifier[TOP] | identifier[wx] . identifier[LEFT] | identifier[wx] . identifier[EXPAND] , literal[int] ) identifier[self] . identifier[display_sizer] . identifier[Add] ( identifier[display_window_2] , literal[int] , identifier[wx] . identifier[TOP] | identifier[wx] . identifier[LEFT] | identifier[wx] . identifier[EXPAND] , literal[int] ) identifier[self] . identifier[name_sizer] . identifier[Add] ( identifier[name_window] , literal[int] , identifier[wx] . identifier[TOP] , literal[int] ) identifier[self] . identifier[bounds_sizer] . identifier[Add] ( identifier[bounds_window] , literal[int] , identifier[wx] . identifier[TOP] , literal[int] ) identifier[self] . identifier[buttons_sizer] . identifier[Add] ( identifier[buttons1_window] , literal[int] , identifier[wx] . identifier[TOP] , literal[int] ) identifier[self] . identifier[fig] = identifier[Figure] (( literal[int] * identifier[self] . identifier[GUI_RESOLUTION] , literal[int] * identifier[self] . identifier[GUI_RESOLUTION] ), identifier[dpi] = literal[int] ) identifier[self] . identifier[canvas] = identifier[FigCanvas] ( identifier[self] . identifier[panel] ,- literal[int] , identifier[self] . identifier[fig] ,) identifier[self] . identifier[toolbar] = identifier[NavigationToolbar] ( identifier[self] . identifier[canvas] ) identifier[self] . identifier[toolbar] . identifier[Hide] () identifier[self] . identifier[toolbar] . identifier[zoom] () identifier[self] . identifier[high_EA_setting] = literal[string] identifier[self] . identifier[canvas] . identifier[Bind] ( identifier[wx] . identifier[EVT_LEFT_DCLICK] , identifier[self] . identifier[on_equalarea_high_select] ) identifier[self] . identifier[canvas] . identifier[Bind] ( identifier[wx] . identifier[EVT_MOTION] , identifier[self] . identifier[on_change_high_mouse_cursor] ) identifier[self] . identifier[canvas] . identifier[Bind] ( identifier[wx] . identifier[EVT_MIDDLE_DOWN] , identifier[self] . identifier[home_high_equalarea] ) identifier[self] . identifier[canvas] . identifier[Bind] ( identifier[wx] . identifier[EVT_RIGHT_DOWN] , identifier[self] . identifier[pan_zoom_high_equalarea] ) identifier[self] . identifier[canvas] . identifier[SetHelpText] ( identifier[dieh] . identifier[eqarea_help] ) identifier[self] . identifier[eqarea] = identifier[self] . identifier[fig] . identifier[add_subplot] ( literal[int] ) identifier[draw_net] ( identifier[self] . identifier[eqarea] ) identifier[self] . identifier[stats_sizer] = identifier[wx] . identifier[StaticBoxSizer] ( identifier[wx] . identifier[StaticBox] ( identifier[self] . identifier[panel] , identifier[wx] . identifier[ID_ANY] , literal[string] ), identifier[wx] . identifier[VERTICAL] ) keyword[for] identifier[parameter] keyword[in] [ literal[string] , literal[string] , literal[string] , literal[string] , literal[string] , literal[string] , literal[string] , literal[string] ]: identifier[COMMAND] = literal[string] % identifier[parameter] identifier[exec] ( identifier[COMMAND] ) identifier[COMMAND] = literal[string] % identifier[parameter] identifier[exec] ( identifier[COMMAND] ) identifier[COMMAND] = literal[string] % identifier[parameter] identifier[exec] ( identifier[COMMAND] ) identifier[COMMAND] = literal[string] % identifier[parameter] identifier[exec] ( identifier[COMMAND] ) identifier[COMMAND] = literal[string] %( identifier[parameter] , identifier[parameter] , identifier[parameter] ) identifier[exec] ( identifier[COMMAND] ) identifier[COMMAND] = literal[string] % identifier[parameter] identifier[exec] ( identifier[COMMAND] ) identifier[self] . identifier[switch_stats_button] = identifier[wx] . identifier[SpinButton] ( identifier[self] . identifier[panel] , identifier[id] = identifier[wx] . identifier[ID_ANY] , identifier[style] = identifier[wx] . identifier[SP_HORIZONTAL] | identifier[wx] . identifier[SP_ARROW_KEYS] | identifier[wx] . identifier[SP_WRAP] , identifier[name] = literal[string] ) identifier[self] . identifier[Bind] ( identifier[wx] . identifier[EVT_SPIN] , identifier[self] . identifier[on_select_stats_button] , identifier[self] . identifier[switch_stats_button] ) identifier[self] . identifier[switch_stats_button] . identifier[SetHelpText] ( identifier[dieh] . identifier[switch_stats_btn_help] ) identifier[hbox0] = identifier[wx] . identifier[BoxSizer] ( identifier[wx] . identifier[HORIZONTAL] ) identifier[hbox0] . identifier[Add] ( identifier[self] . identifier[name_sizer] , identifier[flag] = identifier[wx] . identifier[ALIGN_TOP] | identifier[wx] . identifier[EXPAND] , identifier[border] = literal[int] ) identifier[hbox0] . identifier[Add] ( identifier[self] . identifier[bounds_sizer] , identifier[flag] = identifier[wx] . identifier[ALIGN_TOP] | identifier[wx] . identifier[EXPAND] , identifier[border] = literal[int] ) identifier[vbox0] = identifier[wx] . identifier[BoxSizer] ( identifier[wx] . identifier[VERTICAL] ) identifier[vbox0] . identifier[Add] ( identifier[hbox0] , identifier[flag] = identifier[wx] . identifier[ALIGN_TOP] , identifier[border] = literal[int] ) identifier[vbox0] . identifier[Add] ( identifier[self] . identifier[buttons_sizer] , identifier[flag] = identifier[wx] . identifier[ALIGN_TOP] , identifier[border] = literal[int] ) identifier[hbox1] = identifier[wx] . identifier[BoxSizer] ( identifier[wx] . identifier[HORIZONTAL] ) identifier[hbox1] . identifier[Add] ( identifier[vbox0] , identifier[flag] = identifier[wx] . identifier[ALIGN_TOP] , identifier[border] = literal[int] ) identifier[hbox1] . identifier[Add] ( identifier[self] . identifier[stats_sizer] , identifier[flag] = identifier[wx] . identifier[ALIGN_TOP] , identifier[border] = literal[int] ) identifier[hbox1] . identifier[Add] ( identifier[self] . identifier[switch_stats_button] , identifier[flag] = identifier[wx] . identifier[ALIGN_TOP] | identifier[wx] . identifier[EXPAND] , identifier[border] = literal[int] ) identifier[vbox1] = identifier[wx] . identifier[BoxSizer] ( identifier[wx] . identifier[VERTICAL] ) identifier[vbox1] . identifier[Add] ( identifier[self] . identifier[display_sizer] , identifier[flag] = identifier[wx] . identifier[ALIGN_TOP] , identifier[border] = literal[int] ) identifier[vbox1] . identifier[Add] ( identifier[hbox1] , identifier[flag] = identifier[wx] . identifier[ALIGN_TOP] , identifier[border] = literal[int] ) identifier[vbox1] . identifier[Add] ( identifier[self] . identifier[canvas] , identifier[proportion] = literal[int] , identifier[flag] = identifier[wx] . identifier[ALIGN_CENTER_HORIZONTAL] | identifier[wx] . identifier[ALIGN_CENTER_VERTICAL] | identifier[wx] . identifier[EXPAND] , identifier[border] = literal[int] ) identifier[vbox2] = identifier[wx] . identifier[BoxSizer] ( identifier[wx] . identifier[VERTICAL] ) identifier[vbox2] . identifier[Add] ( identifier[self] . identifier[search_bar] , identifier[proportion] = literal[int] , identifier[flag] = identifier[wx] . identifier[ALIGN_LEFT] | identifier[wx] . identifier[ALIGN_BOTTOM] | identifier[wx] . identifier[EXPAND] , identifier[border] = literal[int] ) identifier[vbox2] . identifier[Add] ( identifier[self] . identifier[logger] , identifier[proportion] = literal[int] , identifier[flag] = identifier[wx] . identifier[ALIGN_LEFT] | identifier[wx] . identifier[EXPAND] , identifier[border] = literal[int] ) identifier[hbox2] = identifier[wx] . identifier[BoxSizer] ( identifier[wx] . identifier[HORIZONTAL] ) identifier[hbox2] . identifier[Add] ( identifier[vbox2] , identifier[proportion] = literal[int] , identifier[flag] = identifier[wx] . identifier[ALIGN_LEFT] | identifier[wx] . identifier[EXPAND] ) identifier[hbox2] . identifier[Add] ( identifier[vbox1] , identifier[flag] = identifier[wx] . identifier[ALIGN_TOP] | identifier[wx] . identifier[EXPAND] ) identifier[self] . identifier[panel] . identifier[SetSizerAndFit] ( identifier[hbox2] ) identifier[hbox2] . identifier[Fit] ( identifier[self] )
def init_UI(self): """ Builds User Interface for the interpretation Editor """ #set fonts FONT_WEIGHT = 1 if sys.platform.startswith('win'): FONT_WEIGHT = -1 # depends on [control=['if'], data=[]] font1 = wx.Font(9 + FONT_WEIGHT, wx.SWISS, wx.NORMAL, wx.NORMAL, False, self.font_type) font2 = wx.Font(12 + FONT_WEIGHT, wx.SWISS, wx.NORMAL, wx.NORMAL, False, self.font_type) #if you're on mac do some funny stuff to make it look okay is_mac = False if sys.platform.startswith('darwin'): is_mac = True # depends on [control=['if'], data=[]] self.search_bar = wx.SearchCtrl(self.panel, size=(350 * self.GUI_RESOLUTION, 25), style=wx.TE_PROCESS_ENTER | wx.TE_PROCESS_TAB | wx.TE_NOHIDESEL) self.Bind(wx.EVT_TEXT_ENTER, self.on_enter_search_bar, self.search_bar) self.Bind(wx.EVT_SEARCHCTRL_SEARCH_BTN, self.on_enter_search_bar, self.search_bar) self.search_bar.SetHelpText(dieh.search_help) # self.Bind(wx.EVT_TEXT, self.on_complete_search_bar,self.search_bar) #build logger self.logger = wx.ListCtrl(self.panel, -1, size=(100 * self.GUI_RESOLUTION, 475 * self.GUI_RESOLUTION), style=wx.LC_REPORT) self.logger.SetFont(font1) self.logger.InsertColumn(0, 'specimen', width=75 * self.GUI_RESOLUTION) self.logger.InsertColumn(1, 'fit name', width=65 * self.GUI_RESOLUTION) self.logger.InsertColumn(2, 'max', width=55 * self.GUI_RESOLUTION) self.logger.InsertColumn(3, 'min', width=55 * self.GUI_RESOLUTION) self.logger.InsertColumn(4, 'n', width=25 * self.GUI_RESOLUTION) self.logger.InsertColumn(5, 'fit type', width=60 * self.GUI_RESOLUTION) self.logger.InsertColumn(6, 'dec', width=45 * self.GUI_RESOLUTION) self.logger.InsertColumn(7, 'inc', width=45 * self.GUI_RESOLUTION) self.logger.InsertColumn(8, 'mad', width=45 * self.GUI_RESOLUTION) self.logger.InsertColumn(9, 'dang', width=45 * self.GUI_RESOLUTION) self.logger.InsertColumn(10, 'a95', width=45 * self.GUI_RESOLUTION) self.logger.InsertColumn(11, 'K', width=45 * self.GUI_RESOLUTION) self.logger.InsertColumn(12, 'R', width=45 * self.GUI_RESOLUTION) self.Bind(wx.EVT_LIST_ITEM_ACTIVATED, self.OnClick_listctrl, self.logger) self.Bind(wx.EVT_LIST_ITEM_RIGHT_CLICK, self.OnRightClickListctrl, self.logger) self.logger.SetHelpText(dieh.logger_help) #set fit attributes boxsizers self.display_sizer = wx.StaticBoxSizer(wx.StaticBox(self.panel, wx.ID_ANY, 'display options'), wx.HORIZONTAL) self.name_sizer = wx.StaticBoxSizer(wx.StaticBox(self.panel, wx.ID_ANY, 'fit name/color'), wx.VERTICAL) self.bounds_sizer = wx.StaticBoxSizer(wx.StaticBox(self.panel, wx.ID_ANY, 'fit bounds'), wx.VERTICAL) self.buttons_sizer = wx.StaticBoxSizer(wx.StaticBox(self.panel, wx.ID_ANY), wx.VERTICAL) #logger display selection box UPPER_LEVEL = self.parent.level_box.GetValue() if UPPER_LEVEL == 'sample': name_choices = self.parent.samples # depends on [control=['if'], data=[]] if UPPER_LEVEL == 'site': name_choices = self.parent.sites # depends on [control=['if'], data=[]] if UPPER_LEVEL == 'location': name_choices = self.parent.locations # depends on [control=['if'], data=[]] if UPPER_LEVEL == 'study': name_choices = ['this study'] # depends on [control=['if'], data=[]] self.level_box = wx.ComboBox(self.panel, -1, size=(110 * self.GUI_RESOLUTION, 25), value=UPPER_LEVEL, choices=['sample', 'site', 'location', 'study'], style=wx.CB_DROPDOWN | wx.TE_READONLY) self.Bind(wx.EVT_COMBOBOX, self.on_select_high_level, self.level_box) self.level_box.SetHelpText(dieh.level_box_help) self.level_names = wx.ComboBox(self.panel, -1, size=(110 * self.GUI_RESOLUTION, 25), value=self.parent.level_names.GetValue(), choices=name_choices, style=wx.CB_DROPDOWN | wx.TE_READONLY) self.Bind(wx.EVT_COMBOBOX, self.on_select_level_name, self.level_names) self.level_names.SetHelpText(dieh.level_names_help) #mean type and plot display boxes self.mean_type_box = wx.ComboBox(self.panel, -1, size=(110 * self.GUI_RESOLUTION, 25), value=self.parent.mean_type_box.GetValue(), choices=['Fisher', 'Fisher by polarity', 'None'], style=wx.CB_DROPDOWN | wx.TE_READONLY, name='high_type') self.Bind(wx.EVT_COMBOBOX, self.on_select_mean_type_box, self.mean_type_box) self.mean_type_box.SetHelpText(dieh.mean_type_help) self.mean_fit_box = wx.ComboBox(self.panel, -1, size=(110 * self.GUI_RESOLUTION, 25), value=self.parent.mean_fit, choices=['None', 'All'] + self.parent.fit_list, style=wx.CB_DROPDOWN | wx.TE_READONLY, name='high_type') self.Bind(wx.EVT_COMBOBOX, self.on_select_mean_fit_box, self.mean_fit_box) self.mean_fit_box.SetHelpText(dieh.mean_fit_help) #show box if UPPER_LEVEL == 'study' or UPPER_LEVEL == 'location': show_box_choices = ['specimens', 'samples', 'sites'] # depends on [control=['if'], data=[]] if UPPER_LEVEL == 'site': show_box_choices = ['specimens', 'samples'] # depends on [control=['if'], data=[]] if UPPER_LEVEL == 'sample': show_box_choices = ['specimens'] # depends on [control=['if'], data=[]] self.show_box = wx.ComboBox(self.panel, -1, size=(110 * self.GUI_RESOLUTION, 25), value='specimens', choices=show_box_choices, style=wx.CB_DROPDOWN | wx.TE_READONLY, name='high_elements') self.Bind(wx.EVT_COMBOBOX, self.on_select_show_box, self.show_box) self.show_box.SetHelpText(dieh.show_help) #coordinates box self.coordinates_box = wx.ComboBox(self.panel, -1, size=(110 * self.GUI_RESOLUTION, 25), choices=self.parent.coordinate_list, value=self.parent.coordinates_box.GetValue(), style=wx.CB_DROPDOWN | wx.TE_READONLY, name='coordinates') self.Bind(wx.EVT_COMBOBOX, self.on_select_coordinates, self.coordinates_box) self.coordinates_box.SetHelpText(dieh.coordinates_box_help) #bounds select boxes self.tmin_box = wx.ComboBox(self.panel, -1, size=(80 * self.GUI_RESOLUTION, 25), choices=[''] + self.parent.T_list, style=wx.CB_DROPDOWN | wx.TE_READONLY, name='lower bound') self.tmin_box.SetHelpText(dieh.tmin_box_help) self.tmax_box = wx.ComboBox(self.panel, -1, size=(80 * self.GUI_RESOLUTION, 25), choices=[''] + self.parent.T_list, style=wx.CB_DROPDOWN | wx.TE_READONLY, name='upper bound') self.tmax_box.SetHelpText(dieh.tmax_box_help) #color box self.color_dict = self.parent.color_dict self.color_box = wx.ComboBox(self.panel, -1, size=(80 * self.GUI_RESOLUTION, 25), choices=[''] + sorted(self.color_dict.keys()), style=wx.CB_DROPDOWN | wx.TE_PROCESS_ENTER, name='color') self.Bind(wx.EVT_TEXT_ENTER, self.add_new_color, self.color_box) self.color_box.SetHelpText(dieh.color_box_help) #name box self.name_box = wx.TextCtrl(self.panel, -1, size=(80 * self.GUI_RESOLUTION, 25), name='name') self.name_box.SetHelpText(dieh.name_box_help) #more mac stuff (h_size_buttons, button_spacing) = (25, 5.5) if is_mac: (h_size_buttons, button_spacing) = (18, 0.0) # depends on [control=['if'], data=[]] #buttons self.add_all_button = wx.Button(self.panel, id=-1, label='add new fit to all specimens', size=(160 * self.GUI_RESOLUTION, h_size_buttons)) self.add_all_button.SetFont(font1) self.Bind(wx.EVT_BUTTON, self.add_fit_to_all, self.add_all_button) self.add_all_button.SetHelpText(dieh.add_all_help) self.add_fit_button = wx.Button(self.panel, id=-1, label='add fit to highlighted specimens', size=(160 * self.GUI_RESOLUTION, h_size_buttons)) self.add_fit_button.SetFont(font1) self.Bind(wx.EVT_BUTTON, self.add_highlighted_fits, self.add_fit_button) self.add_fit_button.SetHelpText(dieh.add_fit_btn_help) self.delete_fit_button = wx.Button(self.panel, id=-1, label='delete highlighted fits', size=(160 * self.GUI_RESOLUTION, h_size_buttons)) self.delete_fit_button.SetFont(font1) self.Bind(wx.EVT_BUTTON, self.delete_highlighted_fits, self.delete_fit_button) self.delete_fit_button.SetHelpText(dieh.delete_fit_btn_help) self.apply_changes_button = wx.Button(self.panel, id=-1, label='apply changes to highlighted fits', size=(160 * self.GUI_RESOLUTION, h_size_buttons)) self.apply_changes_button.SetFont(font1) self.Bind(wx.EVT_BUTTON, self.apply_changes, self.apply_changes_button) self.apply_changes_button.SetHelpText(dieh.apply_changes_help) #windows display_window_0 = wx.GridSizer(2, 1, 10 * self.GUI_RESOLUTION, 19 * self.GUI_RESOLUTION) display_window_1 = wx.GridSizer(2, 1, 10 * self.GUI_RESOLUTION, 19 * self.GUI_RESOLUTION) display_window_2 = wx.GridSizer(2, 1, 10 * self.GUI_RESOLUTION, 19 * self.GUI_RESOLUTION) name_window = wx.GridSizer(2, 1, 10 * self.GUI_RESOLUTION, 19 * self.GUI_RESOLUTION) bounds_window = wx.GridSizer(2, 1, 10 * self.GUI_RESOLUTION, 19 * self.GUI_RESOLUTION) buttons1_window = wx.GridSizer(4, 1, 5 * self.GUI_RESOLUTION, 19 * self.GUI_RESOLUTION) display_window_0.AddMany([(self.coordinates_box, wx.ALIGN_LEFT), (self.show_box, wx.ALIGN_LEFT)]) display_window_1.AddMany([(self.level_box, wx.ALIGN_LEFT), (self.level_names, wx.ALIGN_LEFT)]) display_window_2.AddMany([(self.mean_type_box, wx.ALIGN_LEFT), (self.mean_fit_box, wx.ALIGN_LEFT)]) name_window.AddMany([(self.name_box, wx.ALIGN_LEFT), (self.color_box, wx.ALIGN_LEFT)]) bounds_window.AddMany([(self.tmin_box, wx.ALIGN_LEFT), (self.tmax_box, wx.ALIGN_LEFT)]) buttons1_window.AddMany([(self.add_fit_button, wx.ALL | wx.ALIGN_CENTER | wx.SHAPED, 0), (self.add_all_button, wx.ALL | wx.ALIGN_CENTER | wx.SHAPED, 0), (self.delete_fit_button, wx.ALL | wx.ALIGN_CENTER | wx.SHAPED, 0), (self.apply_changes_button, wx.ALL | wx.ALIGN_CENTER | wx.SHAPED, 0)]) self.display_sizer.Add(display_window_0, 1, wx.TOP | wx.EXPAND, 8) self.display_sizer.Add(display_window_1, 1, wx.TOP | wx.LEFT | wx.EXPAND, 8) self.display_sizer.Add(display_window_2, 1, wx.TOP | wx.LEFT | wx.EXPAND, 8) self.name_sizer.Add(name_window, 1, wx.TOP, 5.5) self.bounds_sizer.Add(bounds_window, 1, wx.TOP, 5.5) self.buttons_sizer.Add(buttons1_window, 1, wx.TOP, 0) #duplicate high levels plot self.fig = Figure((2.5 * self.GUI_RESOLUTION, 2.5 * self.GUI_RESOLUTION), dpi=100) self.canvas = FigCanvas(self.panel, -1, self.fig) self.toolbar = NavigationToolbar(self.canvas) self.toolbar.Hide() self.toolbar.zoom() self.high_EA_setting = 'Zoom' self.canvas.Bind(wx.EVT_LEFT_DCLICK, self.on_equalarea_high_select) self.canvas.Bind(wx.EVT_MOTION, self.on_change_high_mouse_cursor) self.canvas.Bind(wx.EVT_MIDDLE_DOWN, self.home_high_equalarea) self.canvas.Bind(wx.EVT_RIGHT_DOWN, self.pan_zoom_high_equalarea) self.canvas.SetHelpText(dieh.eqarea_help) self.eqarea = self.fig.add_subplot(111) draw_net(self.eqarea) #Higher Level Statistics Box self.stats_sizer = wx.StaticBoxSizer(wx.StaticBox(self.panel, wx.ID_ANY, 'mean statistics'), wx.VERTICAL) for parameter in ['mean_type', 'dec', 'inc', 'alpha95', 'K', 'R', 'n_lines', 'n_planes']: COMMAND = 'self.%s_window=wx.TextCtrl(self.panel,style=wx.TE_CENTER|wx.TE_READONLY,size=(100*self.GUI_RESOLUTION,25))' % parameter exec(COMMAND) COMMAND = 'self.%s_window.SetBackgroundColour(wx.WHITE)' % parameter exec(COMMAND) COMMAND = 'self.%s_window.SetFont(font2)' % parameter exec(COMMAND) COMMAND = 'self.%s_outer_window = wx.GridSizer(1,2,5*self.GUI_RESOLUTION,15*self.GUI_RESOLUTION)' % parameter exec(COMMAND) COMMAND = "self.%s_outer_window.AddMany([\n (wx.StaticText(self.panel,label='%s',style=wx.TE_CENTER),wx.EXPAND),\n (self.%s_window, wx.EXPAND)])" % (parameter, parameter, parameter) exec(COMMAND) COMMAND = 'self.stats_sizer.Add(self.%s_outer_window, 1, wx.ALIGN_LEFT|wx.EXPAND, 0)' % parameter exec(COMMAND) # depends on [control=['for'], data=['parameter']] self.switch_stats_button = wx.SpinButton(self.panel, id=wx.ID_ANY, style=wx.SP_HORIZONTAL | wx.SP_ARROW_KEYS | wx.SP_WRAP, name='change stats') self.Bind(wx.EVT_SPIN, self.on_select_stats_button, self.switch_stats_button) self.switch_stats_button.SetHelpText(dieh.switch_stats_btn_help) #construct panel hbox0 = wx.BoxSizer(wx.HORIZONTAL) hbox0.Add(self.name_sizer, flag=wx.ALIGN_TOP | wx.EXPAND, border=8) hbox0.Add(self.bounds_sizer, flag=wx.ALIGN_TOP | wx.EXPAND, border=8) vbox0 = wx.BoxSizer(wx.VERTICAL) vbox0.Add(hbox0, flag=wx.ALIGN_TOP, border=8) vbox0.Add(self.buttons_sizer, flag=wx.ALIGN_TOP, border=8) hbox1 = wx.BoxSizer(wx.HORIZONTAL) hbox1.Add(vbox0, flag=wx.ALIGN_TOP, border=8) hbox1.Add(self.stats_sizer, flag=wx.ALIGN_TOP, border=8) hbox1.Add(self.switch_stats_button, flag=wx.ALIGN_TOP | wx.EXPAND, border=8) vbox1 = wx.BoxSizer(wx.VERTICAL) vbox1.Add(self.display_sizer, flag=wx.ALIGN_TOP, border=8) vbox1.Add(hbox1, flag=wx.ALIGN_TOP, border=8) vbox1.Add(self.canvas, proportion=1, flag=wx.ALIGN_CENTER_HORIZONTAL | wx.ALIGN_CENTER_VERTICAL | wx.EXPAND, border=8) vbox2 = wx.BoxSizer(wx.VERTICAL) vbox2.Add(self.search_bar, proportion=0.5, flag=wx.ALIGN_LEFT | wx.ALIGN_BOTTOM | wx.EXPAND, border=8) vbox2.Add(self.logger, proportion=1, flag=wx.ALIGN_LEFT | wx.EXPAND, border=8) hbox2 = wx.BoxSizer(wx.HORIZONTAL) hbox2.Add(vbox2, proportion=1, flag=wx.ALIGN_LEFT | wx.EXPAND) hbox2.Add(vbox1, flag=wx.ALIGN_TOP | wx.EXPAND) self.panel.SetSizerAndFit(hbox2) hbox2.Fit(self)
def _check_methods(cls, subclass): # pylint: disable=too-many-branches """ Args: cls(:py:class:`Plugin`): Parent class subclass(:py:class:`Plugin`): Subclass to evaluate Returns: Result: Named tuple Validate abstract methods are defined in subclass For error codes see _inspect_class """ for meth, methobj in cls.__abstractmethods__.items(): # Need to get attribute from dictionary for instance tests to work for base in subclass.__mro__: # pragma: no branch if meth in base.__dict__: submethobj = base.__dict__[meth] break # If we found our abstract method, we didn't find anything if submethobj is methobj: submethobj = UNDEFINED # Determine if we have the right method type result = None bad_arg_spec = 'Argument spec does not match parent for method %s' # pylint: disable=deprecated-method if isinstance(methobj, property): if submethobj is UNDEFINED or not isinstance(submethobj, property): result = Result(False, 'Does not contain required property (%s)' % meth, 210) elif isinstance(methobj, staticmethod): if submethobj is UNDEFINED or not isinstance(submethobj, staticmethod): result = Result(False, 'Does not contain required static method (%s)' % meth, 211) elif PY26: # pragma: no cover if getfullargspec(methobj.__get__(True)) != \ getfullargspec(submethobj.__get__(True)): result = Result(False, bad_arg_spec % meth, 220) elif getfullargspec(methobj.__func__) != getfullargspec(submethobj.__func__): result = Result(False, bad_arg_spec % meth, 220) elif isinstance(methobj, classmethod): if submethobj is UNDEFINED or not isinstance(submethobj, classmethod): result = Result(False, 'Does not contain required class method (%s)' % meth, 212) elif PY26: # pragma: no cover if getfullargspec(methobj.__get__(True).__func__) != \ getfullargspec(submethobj.__get__(True).__func__): result = Result(False, bad_arg_spec % meth, 220) elif getfullargspec(methobj.__func__) != getfullargspec(submethobj.__func__): result = Result(False, bad_arg_spec % meth, 220) elif isfunction(methobj): if submethobj is UNDEFINED or not isfunction(submethobj): result = Result(False, 'Does not contain required method (%s)' % meth, 213) elif getfullargspec(methobj) != getfullargspec(submethobj): result = Result(False, bad_arg_spec % meth, 220) # If it's not a type we're specifically checking, just check for existence elif submethobj is UNDEFINED: result = Result(False, 'Does not contain required attribute (%s)' % meth, 214) if result: return result return Result(True, None, 0)
def function[_check_methods, parameter[cls, subclass]]: constant[ Args: cls(:py:class:`Plugin`): Parent class subclass(:py:class:`Plugin`): Subclass to evaluate Returns: Result: Named tuple Validate abstract methods are defined in subclass For error codes see _inspect_class ] for taget[tuple[[<ast.Name object at 0x7da1b0b3a110>, <ast.Name object at 0x7da1b0b392d0>]]] in starred[call[name[cls].__abstractmethods__.items, parameter[]]] begin[:] for taget[name[base]] in starred[name[subclass].__mro__] begin[:] if compare[name[meth] in name[base].__dict__] begin[:] variable[submethobj] assign[=] call[name[base].__dict__][name[meth]] break if compare[name[submethobj] is name[methobj]] begin[:] variable[submethobj] assign[=] name[UNDEFINED] variable[result] assign[=] constant[None] variable[bad_arg_spec] assign[=] constant[Argument spec does not match parent for method %s] if call[name[isinstance], parameter[name[methobj], name[property]]] begin[:] if <ast.BoolOp object at 0x7da1b0ac7490> begin[:] variable[result] assign[=] call[name[Result], parameter[constant[False], binary_operation[constant[Does not contain required property (%s)] <ast.Mod object at 0x7da2590d6920> name[meth]], constant[210]]] if name[result] begin[:] return[name[result]] return[call[name[Result], parameter[constant[True], constant[None], constant[0]]]]
keyword[def] identifier[_check_methods] ( identifier[cls] , identifier[subclass] ): literal[string] keyword[for] identifier[meth] , identifier[methobj] keyword[in] identifier[cls] . identifier[__abstractmethods__] . identifier[items] (): keyword[for] identifier[base] keyword[in] identifier[subclass] . identifier[__mro__] : keyword[if] identifier[meth] keyword[in] identifier[base] . identifier[__dict__] : identifier[submethobj] = identifier[base] . identifier[__dict__] [ identifier[meth] ] keyword[break] keyword[if] identifier[submethobj] keyword[is] identifier[methobj] : identifier[submethobj] = identifier[UNDEFINED] identifier[result] = keyword[None] identifier[bad_arg_spec] = literal[string] keyword[if] identifier[isinstance] ( identifier[methobj] , identifier[property] ): keyword[if] identifier[submethobj] keyword[is] identifier[UNDEFINED] keyword[or] keyword[not] identifier[isinstance] ( identifier[submethobj] , identifier[property] ): identifier[result] = identifier[Result] ( keyword[False] , literal[string] % identifier[meth] , literal[int] ) keyword[elif] identifier[isinstance] ( identifier[methobj] , identifier[staticmethod] ): keyword[if] identifier[submethobj] keyword[is] identifier[UNDEFINED] keyword[or] keyword[not] identifier[isinstance] ( identifier[submethobj] , identifier[staticmethod] ): identifier[result] = identifier[Result] ( keyword[False] , literal[string] % identifier[meth] , literal[int] ) keyword[elif] identifier[PY26] : keyword[if] identifier[getfullargspec] ( identifier[methobj] . identifier[__get__] ( keyword[True] ))!= identifier[getfullargspec] ( identifier[submethobj] . identifier[__get__] ( keyword[True] )): identifier[result] = identifier[Result] ( keyword[False] , identifier[bad_arg_spec] % identifier[meth] , literal[int] ) keyword[elif] identifier[getfullargspec] ( identifier[methobj] . identifier[__func__] )!= identifier[getfullargspec] ( identifier[submethobj] . identifier[__func__] ): identifier[result] = identifier[Result] ( keyword[False] , identifier[bad_arg_spec] % identifier[meth] , literal[int] ) keyword[elif] identifier[isinstance] ( identifier[methobj] , identifier[classmethod] ): keyword[if] identifier[submethobj] keyword[is] identifier[UNDEFINED] keyword[or] keyword[not] identifier[isinstance] ( identifier[submethobj] , identifier[classmethod] ): identifier[result] = identifier[Result] ( keyword[False] , literal[string] % identifier[meth] , literal[int] ) keyword[elif] identifier[PY26] : keyword[if] identifier[getfullargspec] ( identifier[methobj] . identifier[__get__] ( keyword[True] ). identifier[__func__] )!= identifier[getfullargspec] ( identifier[submethobj] . identifier[__get__] ( keyword[True] ). identifier[__func__] ): identifier[result] = identifier[Result] ( keyword[False] , identifier[bad_arg_spec] % identifier[meth] , literal[int] ) keyword[elif] identifier[getfullargspec] ( identifier[methobj] . identifier[__func__] )!= identifier[getfullargspec] ( identifier[submethobj] . identifier[__func__] ): identifier[result] = identifier[Result] ( keyword[False] , identifier[bad_arg_spec] % identifier[meth] , literal[int] ) keyword[elif] identifier[isfunction] ( identifier[methobj] ): keyword[if] identifier[submethobj] keyword[is] identifier[UNDEFINED] keyword[or] keyword[not] identifier[isfunction] ( identifier[submethobj] ): identifier[result] = identifier[Result] ( keyword[False] , literal[string] % identifier[meth] , literal[int] ) keyword[elif] identifier[getfullargspec] ( identifier[methobj] )!= identifier[getfullargspec] ( identifier[submethobj] ): identifier[result] = identifier[Result] ( keyword[False] , identifier[bad_arg_spec] % identifier[meth] , literal[int] ) keyword[elif] identifier[submethobj] keyword[is] identifier[UNDEFINED] : identifier[result] = identifier[Result] ( keyword[False] , literal[string] % identifier[meth] , literal[int] ) keyword[if] identifier[result] : keyword[return] identifier[result] keyword[return] identifier[Result] ( keyword[True] , keyword[None] , literal[int] )
def _check_methods(cls, subclass): # pylint: disable=too-many-branches '\n Args:\n cls(:py:class:`Plugin`): Parent class\n subclass(:py:class:`Plugin`): Subclass to evaluate\n\n Returns:\n Result: Named tuple\n\n Validate abstract methods are defined in subclass\n For error codes see _inspect_class\n ' for (meth, methobj) in cls.__abstractmethods__.items(): # Need to get attribute from dictionary for instance tests to work for base in subclass.__mro__: # pragma: no branch if meth in base.__dict__: submethobj = base.__dict__[meth] break # depends on [control=['if'], data=['meth']] # depends on [control=['for'], data=['base']] # If we found our abstract method, we didn't find anything if submethobj is methobj: submethobj = UNDEFINED # depends on [control=['if'], data=['submethobj']] # Determine if we have the right method type result = None bad_arg_spec = 'Argument spec does not match parent for method %s' # pylint: disable=deprecated-method if isinstance(methobj, property): if submethobj is UNDEFINED or not isinstance(submethobj, property): result = Result(False, 'Does not contain required property (%s)' % meth, 210) # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]] elif isinstance(methobj, staticmethod): if submethobj is UNDEFINED or not isinstance(submethobj, staticmethod): result = Result(False, 'Does not contain required static method (%s)' % meth, 211) # depends on [control=['if'], data=[]] elif PY26: # pragma: no cover if getfullargspec(methobj.__get__(True)) != getfullargspec(submethobj.__get__(True)): result = Result(False, bad_arg_spec % meth, 220) # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]] elif getfullargspec(methobj.__func__) != getfullargspec(submethobj.__func__): result = Result(False, bad_arg_spec % meth, 220) # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]] elif isinstance(methobj, classmethod): if submethobj is UNDEFINED or not isinstance(submethobj, classmethod): result = Result(False, 'Does not contain required class method (%s)' % meth, 212) # depends on [control=['if'], data=[]] elif PY26: # pragma: no cover if getfullargspec(methobj.__get__(True).__func__) != getfullargspec(submethobj.__get__(True).__func__): result = Result(False, bad_arg_spec % meth, 220) # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]] elif getfullargspec(methobj.__func__) != getfullargspec(submethobj.__func__): result = Result(False, bad_arg_spec % meth, 220) # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]] elif isfunction(methobj): if submethobj is UNDEFINED or not isfunction(submethobj): result = Result(False, 'Does not contain required method (%s)' % meth, 213) # depends on [control=['if'], data=[]] elif getfullargspec(methobj) != getfullargspec(submethobj): result = Result(False, bad_arg_spec % meth, 220) # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]] # If it's not a type we're specifically checking, just check for existence elif submethobj is UNDEFINED: result = Result(False, 'Does not contain required attribute (%s)' % meth, 214) # depends on [control=['if'], data=[]] if result: return result # depends on [control=['if'], data=[]] # depends on [control=['for'], data=[]] return Result(True, None, 0)
def warning(request, message, extra_tags='', fail_silently=False): """Adds a message with the ``WARNING`` level.""" add_message(request, constants.WARNING, message, extra_tags=extra_tags, fail_silently=fail_silently)
def function[warning, parameter[request, message, extra_tags, fail_silently]]: constant[Adds a message with the ``WARNING`` level.] call[name[add_message], parameter[name[request], name[constants].WARNING, name[message]]]
keyword[def] identifier[warning] ( identifier[request] , identifier[message] , identifier[extra_tags] = literal[string] , identifier[fail_silently] = keyword[False] ): literal[string] identifier[add_message] ( identifier[request] , identifier[constants] . identifier[WARNING] , identifier[message] , identifier[extra_tags] = identifier[extra_tags] , identifier[fail_silently] = identifier[fail_silently] )
def warning(request, message, extra_tags='', fail_silently=False): """Adds a message with the ``WARNING`` level.""" add_message(request, constants.WARNING, message, extra_tags=extra_tags, fail_silently=fail_silently)
def listDatasets(self, dataset="", parent_dataset="", is_dataset_valid=1, release_version="", pset_hash="", app_name="", output_module_label="", global_tag="", processing_version=0, acquisition_era_name="", run_num=-1, physics_group_name="", logical_file_name="", primary_ds_name="", primary_ds_type="", processed_ds_name='', data_tier_name="", dataset_access_type="VALID", prep_id='', create_by="", last_modified_by="", min_cdate='0', max_cdate='0', min_ldate='0', max_ldate='0', cdate='0', ldate='0', detail=False, dataset_id=-1): """ API to list dataset(s) in DBS * You can use ANY combination of these parameters in this API * In absence of parameters, all valid datasets known to the DBS instance will be returned :param dataset: Full dataset (path) of the dataset. :type dataset: str :param parent_dataset: Full dataset (path) of the dataset :type parent_dataset: str :param release_version: cmssw version :type release_version: str :param pset_hash: pset hash :type pset_hash: str :param app_name: Application name (generally it is cmsRun) :type app_name: str :param output_module_label: output_module_label :type output_module_label: str :param global_tag: global_tag :type global_tag: str :param processing_version: Processing Version :type processing_version: str :param acquisition_era_name: Acquisition Era :type acquisition_era_name: str :param run_num: Specify a specific run number or range. Possible format are: run_num, 'run_min-run_max' or ['run_min-run_max', run1, run2, ...]. run_num=1 is not allowed. :type run_num: int,list,str :param physics_group_name: List only dataset having physics_group_name attribute :type physics_group_name: str :param logical_file_name: List dataset containing the logical_file_name :type logical_file_name: str :param primary_ds_name: Primary Dataset Name :type primary_ds_name: str :param primary_ds_type: Primary Dataset Type (Type of data, MC/DATA) :type primary_ds_type: str :param processed_ds_name: List datasets having this processed dataset name :type processed_ds_name: str :param data_tier_name: Data Tier :type data_tier_name: str :param dataset_access_type: Dataset Access Type ( PRODUCTION, DEPRECATED etc.) :type dataset_access_type: str :param prep_id: prep_id :type prep_id: str :param create_by: Creator of the dataset :type create_by: str :param last_modified_by: Last modifier of the dataset :type last_modified_by: str :param min_cdate: Lower limit for the creation date (unixtime) (Optional) :type min_cdate: int, str :param max_cdate: Upper limit for the creation date (unixtime) (Optional) :type max_cdate: int, str :param min_ldate: Lower limit for the last modification date (unixtime) (Optional) :type min_ldate: int, str :param max_ldate: Upper limit for the last modification date (unixtime) (Optional) :type max_ldate: int, str :param cdate: creation date (unixtime) (Optional) :type cdate: int, str :param ldate: last modification date (unixtime) (Optional) :type ldate: int, str :param detail: List all details of a dataset :type detail: bool :param dataset_id: dataset table primary key used by CMS Computing Analytics. :type dataset_id: int, long, str :returns: List of dictionaries containing the following keys (dataset). If the detail option is used. The dictionary contain the following keys (primary_ds_name, physics_group_name, acquisition_era_name, create_by, dataset_access_type, data_tier_name, last_modified_by, creation_date, processing_version, processed_ds_name, xtcrosssection, last_modification_date, dataset_id, dataset, prep_id, primary_ds_type) :rtype: list of dicts """ dataset = dataset.replace("*", "%") parent_dataset = parent_dataset.replace("*", "%") release_version = release_version.replace("*", "%") pset_hash = pset_hash.replace("*", "%") app_name = app_name.replace("*", "%") output_module_label = output_module_label.replace("*", "%") global_tag = global_tag.replace("*", "%") logical_file_name = logical_file_name.replace("*", "%") physics_group_name = physics_group_name.replace("*", "%") primary_ds_name = primary_ds_name.replace("*", "%") primary_ds_type = primary_ds_type.replace("*", "%") data_tier_name = data_tier_name.replace("*", "%") dataset_access_type = dataset_access_type.replace("*", "%") processed_ds_name = processed_ds_name.replace("*", "%") acquisition_era_name = acquisition_era_name.replace("*", "%") #processing_version = processing_version.replace("*", "%") #create_by and last_modified_by have be full spelled, no wildcard will allowed. #We got them from request head so they can be either HN account name or DN. #This is depended on how an user's account is set up. # # In the next release we will require dataset has no wildcard in it. # DBS will reject wildcard search with dataset name with listDatasets call. # One should seperate the dataset into primary , process and datatier if any wildcard. # YG Oct 26, 2016 # Some of users were overwhiled by the API change. So we split the wildcarded dataset in the server instead of by the client. # YG Dec. 9 2016 # # run_num=1 caused full table scan and CERN DBS reported some of the queries ran more than 50 hours # We will disbale all the run_num=1 calls in DBS. Run_num=1 will be OK when logical_file_name is given. # YG Jan. 15 2019 # if (run_num != -1 and logical_file_name ==''): for r in parseRunRange(run_num): if isinstance(r, basestring) or isinstance(r, int) or isinstance(r, long): if r == 1 or r == '1': dbsExceptionHandler("dbsException-invalid-input", "Run_num=1 is not a valid input.", self.logger.exception) elif isinstance(r, run_tuple): if r[0] == r[1]: dbsExceptionHandler('dbsException-invalid-input', "DBS run range must be apart at least by 1.", self.logger.exception) elif r[0] <= 1 <= r[1]: dbsExceptionHandler("dbsException-invalid-input", "Run_num=1 is not a valid input.", self.logger.exception) if( dataset and ( dataset == "/%/%/%" or dataset== "/%" or dataset == "/%/%" ) ): dataset='' elif( dataset and ( dataset.find('%') != -1 ) ) : junk, primary_ds_name, processed_ds_name, data_tier_name = dataset.split('/') dataset = '' if ( primary_ds_name == '%' ): primary_ds_name = '' if( processed_ds_name == '%' ): processed_ds_name = '' if ( data_tier_name == '%' ): data_tier_name = '' try: dataset_id = int(dataset_id) except: dbsExceptionHandler("dbsException-invalid-input2", "Invalid Input for dataset_id that has to be an int.", self.logger.exception, 'dataset_id has to be an int.') if create_by.find('*')!=-1 or create_by.find('%')!=-1 or last_modified_by.find('*')!=-1\ or last_modified_by.find('%')!=-1: dbsExceptionHandler("dbsException-invalid-input2", "Invalid Input for create_by or last_modified_by.\ No wildcard allowed.", self.logger.exception, 'No wildcards allowed for create_by or last_modified_by') try: if isinstance(min_cdate, basestring) and ('*' in min_cdate or '%' in min_cdate): min_cdate = 0 else: try: min_cdate = int(min_cdate) except: dbsExceptionHandler("dbsException-invalid-input", "invalid input for min_cdate") if isinstance(max_cdate, basestring) and ('*' in max_cdate or '%' in max_cdate): max_cdate = 0 else: try: max_cdate = int(max_cdate) except: dbsExceptionHandler("dbsException-invalid-input", "invalid input for max_cdate") if isinstance(min_ldate, basestring) and ('*' in min_ldate or '%' in min_ldate): min_ldate = 0 else: try: min_ldate = int(min_ldate) except: dbsExceptionHandler("dbsException-invalid-input", "invalid input for min_ldate") if isinstance(max_ldate, basestring) and ('*' in max_ldate or '%' in max_ldate): max_ldate = 0 else: try: max_ldate = int(max_ldate) except: dbsExceptionHandler("dbsException-invalid-input", "invalid input for max_ldate") if isinstance(cdate, basestring) and ('*' in cdate or '%' in cdate): cdate = 0 else: try: cdate = int(cdate) except: dbsExceptionHandler("dbsException-invalid-input", "invalid input for cdate") if isinstance(ldate, basestring) and ('*' in ldate or '%' in ldate): ldate = 0 else: try: ldate = int(ldate) except: dbsExceptionHandler("dbsException-invalid-input", "invalid input for ldate") except dbsException as de: dbsExceptionHandler(de.eCode, de.message, self.logger.exception, de.serverError) except Exception as ex: sError = "DBSReaderModel/listDatasets. %s \n. Exception trace: \n %s" \ % (ex, traceback.format_exc()) dbsExceptionHandler('dbsException-server-error', dbsExceptionCode['dbsException-server-error'], self.logger.exception, sError) detail = detail in (True, 1, "True", "1", 'true') try: return self.dbsDataset.listDatasets(dataset, parent_dataset, is_dataset_valid, release_version, pset_hash, app_name, output_module_label, global_tag, processing_version, acquisition_era_name, run_num, physics_group_name, logical_file_name, primary_ds_name, primary_ds_type, processed_ds_name, data_tier_name, dataset_access_type, prep_id, create_by, last_modified_by, min_cdate, max_cdate, min_ldate, max_ldate, cdate, ldate, detail, dataset_id) except dbsException as de: dbsExceptionHandler(de.eCode, de.message, self.logger.exception, de.serverError) except Exception as ex: sError = "DBSReaderModel/listdatasets. %s.\n Exception trace: \n %s" % (ex, traceback.format_exc()) dbsExceptionHandler('dbsException-server-error', dbsExceptionCode['dbsException-server-error'], self.logger.exception, sError)
def function[listDatasets, parameter[self, dataset, parent_dataset, is_dataset_valid, release_version, pset_hash, app_name, output_module_label, global_tag, processing_version, acquisition_era_name, run_num, physics_group_name, logical_file_name, primary_ds_name, primary_ds_type, processed_ds_name, data_tier_name, dataset_access_type, prep_id, create_by, last_modified_by, min_cdate, max_cdate, min_ldate, max_ldate, cdate, ldate, detail, dataset_id]]: constant[ API to list dataset(s) in DBS * You can use ANY combination of these parameters in this API * In absence of parameters, all valid datasets known to the DBS instance will be returned :param dataset: Full dataset (path) of the dataset. :type dataset: str :param parent_dataset: Full dataset (path) of the dataset :type parent_dataset: str :param release_version: cmssw version :type release_version: str :param pset_hash: pset hash :type pset_hash: str :param app_name: Application name (generally it is cmsRun) :type app_name: str :param output_module_label: output_module_label :type output_module_label: str :param global_tag: global_tag :type global_tag: str :param processing_version: Processing Version :type processing_version: str :param acquisition_era_name: Acquisition Era :type acquisition_era_name: str :param run_num: Specify a specific run number or range. Possible format are: run_num, 'run_min-run_max' or ['run_min-run_max', run1, run2, ...]. run_num=1 is not allowed. :type run_num: int,list,str :param physics_group_name: List only dataset having physics_group_name attribute :type physics_group_name: str :param logical_file_name: List dataset containing the logical_file_name :type logical_file_name: str :param primary_ds_name: Primary Dataset Name :type primary_ds_name: str :param primary_ds_type: Primary Dataset Type (Type of data, MC/DATA) :type primary_ds_type: str :param processed_ds_name: List datasets having this processed dataset name :type processed_ds_name: str :param data_tier_name: Data Tier :type data_tier_name: str :param dataset_access_type: Dataset Access Type ( PRODUCTION, DEPRECATED etc.) :type dataset_access_type: str :param prep_id: prep_id :type prep_id: str :param create_by: Creator of the dataset :type create_by: str :param last_modified_by: Last modifier of the dataset :type last_modified_by: str :param min_cdate: Lower limit for the creation date (unixtime) (Optional) :type min_cdate: int, str :param max_cdate: Upper limit for the creation date (unixtime) (Optional) :type max_cdate: int, str :param min_ldate: Lower limit for the last modification date (unixtime) (Optional) :type min_ldate: int, str :param max_ldate: Upper limit for the last modification date (unixtime) (Optional) :type max_ldate: int, str :param cdate: creation date (unixtime) (Optional) :type cdate: int, str :param ldate: last modification date (unixtime) (Optional) :type ldate: int, str :param detail: List all details of a dataset :type detail: bool :param dataset_id: dataset table primary key used by CMS Computing Analytics. :type dataset_id: int, long, str :returns: List of dictionaries containing the following keys (dataset). If the detail option is used. The dictionary contain the following keys (primary_ds_name, physics_group_name, acquisition_era_name, create_by, dataset_access_type, data_tier_name, last_modified_by, creation_date, processing_version, processed_ds_name, xtcrosssection, last_modification_date, dataset_id, dataset, prep_id, primary_ds_type) :rtype: list of dicts ] variable[dataset] assign[=] call[name[dataset].replace, parameter[constant[*], constant[%]]] variable[parent_dataset] assign[=] call[name[parent_dataset].replace, parameter[constant[*], constant[%]]] variable[release_version] assign[=] call[name[release_version].replace, parameter[constant[*], constant[%]]] variable[pset_hash] assign[=] call[name[pset_hash].replace, parameter[constant[*], constant[%]]] variable[app_name] assign[=] call[name[app_name].replace, parameter[constant[*], constant[%]]] variable[output_module_label] assign[=] call[name[output_module_label].replace, parameter[constant[*], constant[%]]] variable[global_tag] assign[=] call[name[global_tag].replace, parameter[constant[*], constant[%]]] variable[logical_file_name] assign[=] call[name[logical_file_name].replace, parameter[constant[*], constant[%]]] variable[physics_group_name] assign[=] call[name[physics_group_name].replace, parameter[constant[*], constant[%]]] variable[primary_ds_name] assign[=] call[name[primary_ds_name].replace, parameter[constant[*], constant[%]]] variable[primary_ds_type] assign[=] call[name[primary_ds_type].replace, parameter[constant[*], constant[%]]] variable[data_tier_name] assign[=] call[name[data_tier_name].replace, parameter[constant[*], constant[%]]] variable[dataset_access_type] assign[=] call[name[dataset_access_type].replace, parameter[constant[*], constant[%]]] variable[processed_ds_name] assign[=] call[name[processed_ds_name].replace, parameter[constant[*], constant[%]]] variable[acquisition_era_name] assign[=] call[name[acquisition_era_name].replace, parameter[constant[*], constant[%]]] if <ast.BoolOp object at 0x7da20c6a9450> begin[:] for taget[name[r]] in starred[call[name[parseRunRange], parameter[name[run_num]]]] begin[:] if <ast.BoolOp object at 0x7da20c6aa530> begin[:] if <ast.BoolOp object at 0x7da20c6abdf0> begin[:] call[name[dbsExceptionHandler], parameter[constant[dbsException-invalid-input], constant[Run_num=1 is not a valid input.], name[self].logger.exception]] if <ast.BoolOp object at 0x7da20c6aa800> begin[:] variable[dataset] assign[=] constant[] if compare[name[primary_ds_name] equal[==] constant[%]] begin[:] variable[primary_ds_name] assign[=] constant[] if compare[name[processed_ds_name] equal[==] constant[%]] begin[:] variable[processed_ds_name] assign[=] constant[] if compare[name[data_tier_name] equal[==] constant[%]] begin[:] variable[data_tier_name] assign[=] constant[] <ast.Try object at 0x7da20c6aa140> if <ast.BoolOp object at 0x7da20c6a94b0> begin[:] call[name[dbsExceptionHandler], parameter[constant[dbsException-invalid-input2], constant[Invalid Input for create_by or last_modified_by. No wildcard allowed.], name[self].logger.exception, constant[No wildcards allowed for create_by or last_modified_by]]] <ast.Try object at 0x7da20c992710> variable[detail] assign[=] compare[name[detail] in tuple[[<ast.Constant object at 0x7da20c993340>, <ast.Constant object at 0x7da20c990730>, <ast.Constant object at 0x7da20c993730>, <ast.Constant object at 0x7da20c991270>, <ast.Constant object at 0x7da20c990a00>]]] <ast.Try object at 0x7da20c991cf0>
keyword[def] identifier[listDatasets] ( identifier[self] , identifier[dataset] = literal[string] , identifier[parent_dataset] = literal[string] , identifier[is_dataset_valid] = literal[int] , identifier[release_version] = literal[string] , identifier[pset_hash] = literal[string] , identifier[app_name] = literal[string] , identifier[output_module_label] = literal[string] , identifier[global_tag] = literal[string] , identifier[processing_version] = literal[int] , identifier[acquisition_era_name] = literal[string] , identifier[run_num] =- literal[int] , identifier[physics_group_name] = literal[string] , identifier[logical_file_name] = literal[string] , identifier[primary_ds_name] = literal[string] , identifier[primary_ds_type] = literal[string] , identifier[processed_ds_name] = literal[string] , identifier[data_tier_name] = literal[string] , identifier[dataset_access_type] = literal[string] , identifier[prep_id] = literal[string] , identifier[create_by] = literal[string] , identifier[last_modified_by] = literal[string] , identifier[min_cdate] = literal[string] , identifier[max_cdate] = literal[string] , identifier[min_ldate] = literal[string] , identifier[max_ldate] = literal[string] , identifier[cdate] = literal[string] , identifier[ldate] = literal[string] , identifier[detail] = keyword[False] , identifier[dataset_id] =- literal[int] ): literal[string] identifier[dataset] = identifier[dataset] . identifier[replace] ( literal[string] , literal[string] ) identifier[parent_dataset] = identifier[parent_dataset] . identifier[replace] ( literal[string] , literal[string] ) identifier[release_version] = identifier[release_version] . identifier[replace] ( literal[string] , literal[string] ) identifier[pset_hash] = identifier[pset_hash] . identifier[replace] ( literal[string] , literal[string] ) identifier[app_name] = identifier[app_name] . identifier[replace] ( literal[string] , literal[string] ) identifier[output_module_label] = identifier[output_module_label] . identifier[replace] ( literal[string] , literal[string] ) identifier[global_tag] = identifier[global_tag] . identifier[replace] ( literal[string] , literal[string] ) identifier[logical_file_name] = identifier[logical_file_name] . identifier[replace] ( literal[string] , literal[string] ) identifier[physics_group_name] = identifier[physics_group_name] . identifier[replace] ( literal[string] , literal[string] ) identifier[primary_ds_name] = identifier[primary_ds_name] . identifier[replace] ( literal[string] , literal[string] ) identifier[primary_ds_type] = identifier[primary_ds_type] . identifier[replace] ( literal[string] , literal[string] ) identifier[data_tier_name] = identifier[data_tier_name] . identifier[replace] ( literal[string] , literal[string] ) identifier[dataset_access_type] = identifier[dataset_access_type] . identifier[replace] ( literal[string] , literal[string] ) identifier[processed_ds_name] = identifier[processed_ds_name] . identifier[replace] ( literal[string] , literal[string] ) identifier[acquisition_era_name] = identifier[acquisition_era_name] . identifier[replace] ( literal[string] , literal[string] ) keyword[if] ( identifier[run_num] !=- literal[int] keyword[and] identifier[logical_file_name] == literal[string] ): keyword[for] identifier[r] keyword[in] identifier[parseRunRange] ( identifier[run_num] ): keyword[if] identifier[isinstance] ( identifier[r] , identifier[basestring] ) keyword[or] identifier[isinstance] ( identifier[r] , identifier[int] ) keyword[or] identifier[isinstance] ( identifier[r] , identifier[long] ): keyword[if] identifier[r] == literal[int] keyword[or] identifier[r] == literal[string] : identifier[dbsExceptionHandler] ( literal[string] , literal[string] , identifier[self] . identifier[logger] . identifier[exception] ) keyword[elif] identifier[isinstance] ( identifier[r] , identifier[run_tuple] ): keyword[if] identifier[r] [ literal[int] ]== identifier[r] [ literal[int] ]: identifier[dbsExceptionHandler] ( literal[string] , literal[string] , identifier[self] . identifier[logger] . identifier[exception] ) keyword[elif] identifier[r] [ literal[int] ]<= literal[int] <= identifier[r] [ literal[int] ]: identifier[dbsExceptionHandler] ( literal[string] , literal[string] , identifier[self] . identifier[logger] . identifier[exception] ) keyword[if] ( identifier[dataset] keyword[and] ( identifier[dataset] == literal[string] keyword[or] identifier[dataset] == literal[string] keyword[or] identifier[dataset] == literal[string] )): identifier[dataset] = literal[string] keyword[elif] ( identifier[dataset] keyword[and] ( identifier[dataset] . identifier[find] ( literal[string] )!=- literal[int] )): identifier[junk] , identifier[primary_ds_name] , identifier[processed_ds_name] , identifier[data_tier_name] = identifier[dataset] . identifier[split] ( literal[string] ) identifier[dataset] = literal[string] keyword[if] ( identifier[primary_ds_name] == literal[string] ): identifier[primary_ds_name] = literal[string] keyword[if] ( identifier[processed_ds_name] == literal[string] ): identifier[processed_ds_name] = literal[string] keyword[if] ( identifier[data_tier_name] == literal[string] ): identifier[data_tier_name] = literal[string] keyword[try] : identifier[dataset_id] = identifier[int] ( identifier[dataset_id] ) keyword[except] : identifier[dbsExceptionHandler] ( literal[string] , literal[string] , identifier[self] . identifier[logger] . identifier[exception] , literal[string] ) keyword[if] identifier[create_by] . identifier[find] ( literal[string] )!=- literal[int] keyword[or] identifier[create_by] . identifier[find] ( literal[string] )!=- literal[int] keyword[or] identifier[last_modified_by] . identifier[find] ( literal[string] )!=- literal[int] keyword[or] identifier[last_modified_by] . identifier[find] ( literal[string] )!=- literal[int] : identifier[dbsExceptionHandler] ( literal[string] , literal[string] , identifier[self] . identifier[logger] . identifier[exception] , literal[string] ) keyword[try] : keyword[if] identifier[isinstance] ( identifier[min_cdate] , identifier[basestring] ) keyword[and] ( literal[string] keyword[in] identifier[min_cdate] keyword[or] literal[string] keyword[in] identifier[min_cdate] ): identifier[min_cdate] = literal[int] keyword[else] : keyword[try] : identifier[min_cdate] = identifier[int] ( identifier[min_cdate] ) keyword[except] : identifier[dbsExceptionHandler] ( literal[string] , literal[string] ) keyword[if] identifier[isinstance] ( identifier[max_cdate] , identifier[basestring] ) keyword[and] ( literal[string] keyword[in] identifier[max_cdate] keyword[or] literal[string] keyword[in] identifier[max_cdate] ): identifier[max_cdate] = literal[int] keyword[else] : keyword[try] : identifier[max_cdate] = identifier[int] ( identifier[max_cdate] ) keyword[except] : identifier[dbsExceptionHandler] ( literal[string] , literal[string] ) keyword[if] identifier[isinstance] ( identifier[min_ldate] , identifier[basestring] ) keyword[and] ( literal[string] keyword[in] identifier[min_ldate] keyword[or] literal[string] keyword[in] identifier[min_ldate] ): identifier[min_ldate] = literal[int] keyword[else] : keyword[try] : identifier[min_ldate] = identifier[int] ( identifier[min_ldate] ) keyword[except] : identifier[dbsExceptionHandler] ( literal[string] , literal[string] ) keyword[if] identifier[isinstance] ( identifier[max_ldate] , identifier[basestring] ) keyword[and] ( literal[string] keyword[in] identifier[max_ldate] keyword[or] literal[string] keyword[in] identifier[max_ldate] ): identifier[max_ldate] = literal[int] keyword[else] : keyword[try] : identifier[max_ldate] = identifier[int] ( identifier[max_ldate] ) keyword[except] : identifier[dbsExceptionHandler] ( literal[string] , literal[string] ) keyword[if] identifier[isinstance] ( identifier[cdate] , identifier[basestring] ) keyword[and] ( literal[string] keyword[in] identifier[cdate] keyword[or] literal[string] keyword[in] identifier[cdate] ): identifier[cdate] = literal[int] keyword[else] : keyword[try] : identifier[cdate] = identifier[int] ( identifier[cdate] ) keyword[except] : identifier[dbsExceptionHandler] ( literal[string] , literal[string] ) keyword[if] identifier[isinstance] ( identifier[ldate] , identifier[basestring] ) keyword[and] ( literal[string] keyword[in] identifier[ldate] keyword[or] literal[string] keyword[in] identifier[ldate] ): identifier[ldate] = literal[int] keyword[else] : keyword[try] : identifier[ldate] = identifier[int] ( identifier[ldate] ) keyword[except] : identifier[dbsExceptionHandler] ( literal[string] , literal[string] ) keyword[except] identifier[dbsException] keyword[as] identifier[de] : identifier[dbsExceptionHandler] ( identifier[de] . identifier[eCode] , identifier[de] . identifier[message] , identifier[self] . identifier[logger] . identifier[exception] , identifier[de] . identifier[serverError] ) keyword[except] identifier[Exception] keyword[as] identifier[ex] : identifier[sError] = literal[string] %( identifier[ex] , identifier[traceback] . identifier[format_exc] ()) identifier[dbsExceptionHandler] ( literal[string] , identifier[dbsExceptionCode] [ literal[string] ], identifier[self] . identifier[logger] . identifier[exception] , identifier[sError] ) identifier[detail] = identifier[detail] keyword[in] ( keyword[True] , literal[int] , literal[string] , literal[string] , literal[string] ) keyword[try] : keyword[return] identifier[self] . identifier[dbsDataset] . identifier[listDatasets] ( identifier[dataset] , identifier[parent_dataset] , identifier[is_dataset_valid] , identifier[release_version] , identifier[pset_hash] , identifier[app_name] , identifier[output_module_label] , identifier[global_tag] , identifier[processing_version] , identifier[acquisition_era_name] , identifier[run_num] , identifier[physics_group_name] , identifier[logical_file_name] , identifier[primary_ds_name] , identifier[primary_ds_type] , identifier[processed_ds_name] , identifier[data_tier_name] , identifier[dataset_access_type] , identifier[prep_id] , identifier[create_by] , identifier[last_modified_by] , identifier[min_cdate] , identifier[max_cdate] , identifier[min_ldate] , identifier[max_ldate] , identifier[cdate] , identifier[ldate] , identifier[detail] , identifier[dataset_id] ) keyword[except] identifier[dbsException] keyword[as] identifier[de] : identifier[dbsExceptionHandler] ( identifier[de] . identifier[eCode] , identifier[de] . identifier[message] , identifier[self] . identifier[logger] . identifier[exception] , identifier[de] . identifier[serverError] ) keyword[except] identifier[Exception] keyword[as] identifier[ex] : identifier[sError] = literal[string] %( identifier[ex] , identifier[traceback] . identifier[format_exc] ()) identifier[dbsExceptionHandler] ( literal[string] , identifier[dbsExceptionCode] [ literal[string] ], identifier[self] . identifier[logger] . identifier[exception] , identifier[sError] )
def listDatasets(self, dataset='', parent_dataset='', is_dataset_valid=1, release_version='', pset_hash='', app_name='', output_module_label='', global_tag='', processing_version=0, acquisition_era_name='', run_num=-1, physics_group_name='', logical_file_name='', primary_ds_name='', primary_ds_type='', processed_ds_name='', data_tier_name='', dataset_access_type='VALID', prep_id='', create_by='', last_modified_by='', min_cdate='0', max_cdate='0', min_ldate='0', max_ldate='0', cdate='0', ldate='0', detail=False, dataset_id=-1): """ API to list dataset(s) in DBS * You can use ANY combination of these parameters in this API * In absence of parameters, all valid datasets known to the DBS instance will be returned :param dataset: Full dataset (path) of the dataset. :type dataset: str :param parent_dataset: Full dataset (path) of the dataset :type parent_dataset: str :param release_version: cmssw version :type release_version: str :param pset_hash: pset hash :type pset_hash: str :param app_name: Application name (generally it is cmsRun) :type app_name: str :param output_module_label: output_module_label :type output_module_label: str :param global_tag: global_tag :type global_tag: str :param processing_version: Processing Version :type processing_version: str :param acquisition_era_name: Acquisition Era :type acquisition_era_name: str :param run_num: Specify a specific run number or range. Possible format are: run_num, 'run_min-run_max' or ['run_min-run_max', run1, run2, ...]. run_num=1 is not allowed. :type run_num: int,list,str :param physics_group_name: List only dataset having physics_group_name attribute :type physics_group_name: str :param logical_file_name: List dataset containing the logical_file_name :type logical_file_name: str :param primary_ds_name: Primary Dataset Name :type primary_ds_name: str :param primary_ds_type: Primary Dataset Type (Type of data, MC/DATA) :type primary_ds_type: str :param processed_ds_name: List datasets having this processed dataset name :type processed_ds_name: str :param data_tier_name: Data Tier :type data_tier_name: str :param dataset_access_type: Dataset Access Type ( PRODUCTION, DEPRECATED etc.) :type dataset_access_type: str :param prep_id: prep_id :type prep_id: str :param create_by: Creator of the dataset :type create_by: str :param last_modified_by: Last modifier of the dataset :type last_modified_by: str :param min_cdate: Lower limit for the creation date (unixtime) (Optional) :type min_cdate: int, str :param max_cdate: Upper limit for the creation date (unixtime) (Optional) :type max_cdate: int, str :param min_ldate: Lower limit for the last modification date (unixtime) (Optional) :type min_ldate: int, str :param max_ldate: Upper limit for the last modification date (unixtime) (Optional) :type max_ldate: int, str :param cdate: creation date (unixtime) (Optional) :type cdate: int, str :param ldate: last modification date (unixtime) (Optional) :type ldate: int, str :param detail: List all details of a dataset :type detail: bool :param dataset_id: dataset table primary key used by CMS Computing Analytics. :type dataset_id: int, long, str :returns: List of dictionaries containing the following keys (dataset). If the detail option is used. The dictionary contain the following keys (primary_ds_name, physics_group_name, acquisition_era_name, create_by, dataset_access_type, data_tier_name, last_modified_by, creation_date, processing_version, processed_ds_name, xtcrosssection, last_modification_date, dataset_id, dataset, prep_id, primary_ds_type) :rtype: list of dicts """ dataset = dataset.replace('*', '%') parent_dataset = parent_dataset.replace('*', '%') release_version = release_version.replace('*', '%') pset_hash = pset_hash.replace('*', '%') app_name = app_name.replace('*', '%') output_module_label = output_module_label.replace('*', '%') global_tag = global_tag.replace('*', '%') logical_file_name = logical_file_name.replace('*', '%') physics_group_name = physics_group_name.replace('*', '%') primary_ds_name = primary_ds_name.replace('*', '%') primary_ds_type = primary_ds_type.replace('*', '%') data_tier_name = data_tier_name.replace('*', '%') dataset_access_type = dataset_access_type.replace('*', '%') processed_ds_name = processed_ds_name.replace('*', '%') acquisition_era_name = acquisition_era_name.replace('*', '%') #processing_version = processing_version.replace("*", "%") #create_by and last_modified_by have be full spelled, no wildcard will allowed. #We got them from request head so they can be either HN account name or DN. #This is depended on how an user's account is set up. # # In the next release we will require dataset has no wildcard in it. # DBS will reject wildcard search with dataset name with listDatasets call. # One should seperate the dataset into primary , process and datatier if any wildcard. # YG Oct 26, 2016 # Some of users were overwhiled by the API change. So we split the wildcarded dataset in the server instead of by the client. # YG Dec. 9 2016 # # run_num=1 caused full table scan and CERN DBS reported some of the queries ran more than 50 hours # We will disbale all the run_num=1 calls in DBS. Run_num=1 will be OK when logical_file_name is given. # YG Jan. 15 2019 # if run_num != -1 and logical_file_name == '': for r in parseRunRange(run_num): if isinstance(r, basestring) or isinstance(r, int) or isinstance(r, long): if r == 1 or r == '1': dbsExceptionHandler('dbsException-invalid-input', 'Run_num=1 is not a valid input.', self.logger.exception) # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]] elif isinstance(r, run_tuple): if r[0] == r[1]: dbsExceptionHandler('dbsException-invalid-input', 'DBS run range must be apart at least by 1.', self.logger.exception) # depends on [control=['if'], data=[]] elif r[0] <= 1 <= r[1]: dbsExceptionHandler('dbsException-invalid-input', 'Run_num=1 is not a valid input.', self.logger.exception) # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['r']] # depends on [control=['if'], data=[]] if dataset and (dataset == '/%/%/%' or dataset == '/%' or dataset == '/%/%'): dataset = '' # depends on [control=['if'], data=[]] elif dataset and dataset.find('%') != -1: (junk, primary_ds_name, processed_ds_name, data_tier_name) = dataset.split('/') dataset = '' # depends on [control=['if'], data=[]] if primary_ds_name == '%': primary_ds_name = '' # depends on [control=['if'], data=['primary_ds_name']] if processed_ds_name == '%': processed_ds_name = '' # depends on [control=['if'], data=['processed_ds_name']] if data_tier_name == '%': data_tier_name = '' # depends on [control=['if'], data=['data_tier_name']] try: dataset_id = int(dataset_id) # depends on [control=['try'], data=[]] except: dbsExceptionHandler('dbsException-invalid-input2', 'Invalid Input for dataset_id that has to be an int.', self.logger.exception, 'dataset_id has to be an int.') # depends on [control=['except'], data=[]] if create_by.find('*') != -1 or create_by.find('%') != -1 or last_modified_by.find('*') != -1 or (last_modified_by.find('%') != -1): dbsExceptionHandler('dbsException-invalid-input2', 'Invalid Input for create_by or last_modified_by. No wildcard allowed.', self.logger.exception, 'No wildcards allowed for create_by or last_modified_by') # depends on [control=['if'], data=[]] try: if isinstance(min_cdate, basestring) and ('*' in min_cdate or '%' in min_cdate): min_cdate = 0 # depends on [control=['if'], data=[]] else: try: min_cdate = int(min_cdate) # depends on [control=['try'], data=[]] except: dbsExceptionHandler('dbsException-invalid-input', 'invalid input for min_cdate') # depends on [control=['except'], data=[]] if isinstance(max_cdate, basestring) and ('*' in max_cdate or '%' in max_cdate): max_cdate = 0 # depends on [control=['if'], data=[]] else: try: max_cdate = int(max_cdate) # depends on [control=['try'], data=[]] except: dbsExceptionHandler('dbsException-invalid-input', 'invalid input for max_cdate') # depends on [control=['except'], data=[]] if isinstance(min_ldate, basestring) and ('*' in min_ldate or '%' in min_ldate): min_ldate = 0 # depends on [control=['if'], data=[]] else: try: min_ldate = int(min_ldate) # depends on [control=['try'], data=[]] except: dbsExceptionHandler('dbsException-invalid-input', 'invalid input for min_ldate') # depends on [control=['except'], data=[]] if isinstance(max_ldate, basestring) and ('*' in max_ldate or '%' in max_ldate): max_ldate = 0 # depends on [control=['if'], data=[]] else: try: max_ldate = int(max_ldate) # depends on [control=['try'], data=[]] except: dbsExceptionHandler('dbsException-invalid-input', 'invalid input for max_ldate') # depends on [control=['except'], data=[]] if isinstance(cdate, basestring) and ('*' in cdate or '%' in cdate): cdate = 0 # depends on [control=['if'], data=[]] else: try: cdate = int(cdate) # depends on [control=['try'], data=[]] except: dbsExceptionHandler('dbsException-invalid-input', 'invalid input for cdate') # depends on [control=['except'], data=[]] if isinstance(ldate, basestring) and ('*' in ldate or '%' in ldate): ldate = 0 # depends on [control=['if'], data=[]] else: try: ldate = int(ldate) # depends on [control=['try'], data=[]] except: dbsExceptionHandler('dbsException-invalid-input', 'invalid input for ldate') # depends on [control=['except'], data=[]] # depends on [control=['try'], data=[]] except dbsException as de: dbsExceptionHandler(de.eCode, de.message, self.logger.exception, de.serverError) # depends on [control=['except'], data=['de']] except Exception as ex: sError = 'DBSReaderModel/listDatasets. %s \n. Exception trace: \n %s' % (ex, traceback.format_exc()) dbsExceptionHandler('dbsException-server-error', dbsExceptionCode['dbsException-server-error'], self.logger.exception, sError) # depends on [control=['except'], data=['ex']] detail = detail in (True, 1, 'True', '1', 'true') try: return self.dbsDataset.listDatasets(dataset, parent_dataset, is_dataset_valid, release_version, pset_hash, app_name, output_module_label, global_tag, processing_version, acquisition_era_name, run_num, physics_group_name, logical_file_name, primary_ds_name, primary_ds_type, processed_ds_name, data_tier_name, dataset_access_type, prep_id, create_by, last_modified_by, min_cdate, max_cdate, min_ldate, max_ldate, cdate, ldate, detail, dataset_id) # depends on [control=['try'], data=[]] except dbsException as de: dbsExceptionHandler(de.eCode, de.message, self.logger.exception, de.serverError) # depends on [control=['except'], data=['de']] except Exception as ex: sError = 'DBSReaderModel/listdatasets. %s.\n Exception trace: \n %s' % (ex, traceback.format_exc()) dbsExceptionHandler('dbsException-server-error', dbsExceptionCode['dbsException-server-error'], self.logger.exception, sError) # depends on [control=['except'], data=['ex']]
def update_one(self, update=None, validate=True, **kw): """Update this document in the database. Local representations will not be affected. A single positional parameter, `update`, may be provided as a mapping. Keyword arguments (other than those identified in UPDATE_MAPPING) are interpreted as parametric updates, added to any `update` passed in. https://api.mongodb.com/python/current/api/pymongo/collection.html#pymongo.collection.Collection.update_one """ D = self.__class__ collection = self.get_collection(kw.pop('source', None)) update = Update(update or {}) if kw: update &= U(D, **kw) if not update: raise TypeError("Must provide an update operation.") return collection.update_one(D.id == self, update, bypass_document_validation=not validate)
def function[update_one, parameter[self, update, validate]]: constant[Update this document in the database. Local representations will not be affected. A single positional parameter, `update`, may be provided as a mapping. Keyword arguments (other than those identified in UPDATE_MAPPING) are interpreted as parametric updates, added to any `update` passed in. https://api.mongodb.com/python/current/api/pymongo/collection.html#pymongo.collection.Collection.update_one ] variable[D] assign[=] name[self].__class__ variable[collection] assign[=] call[name[self].get_collection, parameter[call[name[kw].pop, parameter[constant[source], constant[None]]]]] variable[update] assign[=] call[name[Update], parameter[<ast.BoolOp object at 0x7da2054a7b80>]] if name[kw] begin[:] <ast.AugAssign object at 0x7da2054a5180> if <ast.UnaryOp object at 0x7da2054a52a0> begin[:] <ast.Raise object at 0x7da2054a5ab0> return[call[name[collection].update_one, parameter[compare[name[D].id equal[==] name[self]], name[update]]]]
keyword[def] identifier[update_one] ( identifier[self] , identifier[update] = keyword[None] , identifier[validate] = keyword[True] ,** identifier[kw] ): literal[string] identifier[D] = identifier[self] . identifier[__class__] identifier[collection] = identifier[self] . identifier[get_collection] ( identifier[kw] . identifier[pop] ( literal[string] , keyword[None] )) identifier[update] = identifier[Update] ( identifier[update] keyword[or] {}) keyword[if] identifier[kw] : identifier[update] &= identifier[U] ( identifier[D] ,** identifier[kw] ) keyword[if] keyword[not] identifier[update] : keyword[raise] identifier[TypeError] ( literal[string] ) keyword[return] identifier[collection] . identifier[update_one] ( identifier[D] . identifier[id] == identifier[self] , identifier[update] , identifier[bypass_document_validation] = keyword[not] identifier[validate] )
def update_one(self, update=None, validate=True, **kw): """Update this document in the database. Local representations will not be affected. A single positional parameter, `update`, may be provided as a mapping. Keyword arguments (other than those identified in UPDATE_MAPPING) are interpreted as parametric updates, added to any `update` passed in. https://api.mongodb.com/python/current/api/pymongo/collection.html#pymongo.collection.Collection.update_one """ D = self.__class__ collection = self.get_collection(kw.pop('source', None)) update = Update(update or {}) if kw: update &= U(D, **kw) # depends on [control=['if'], data=[]] if not update: raise TypeError('Must provide an update operation.') # depends on [control=['if'], data=[]] return collection.update_one(D.id == self, update, bypass_document_validation=not validate)
def baremetal_models(call=None): ''' Return a dict of all available baremetal models with relevant data. ''' if call == 'action': raise SaltCloudSystemExit( 'The baremetal_models function must be called with ' '-f or --function' ) conn = get_conn() bmodels = conn.list_baremetal_models() return bmodels
def function[baremetal_models, parameter[call]]: constant[ Return a dict of all available baremetal models with relevant data. ] if compare[name[call] equal[==] constant[action]] begin[:] <ast.Raise object at 0x7da18bccaec0> variable[conn] assign[=] call[name[get_conn], parameter[]] variable[bmodels] assign[=] call[name[conn].list_baremetal_models, parameter[]] return[name[bmodels]]
keyword[def] identifier[baremetal_models] ( identifier[call] = keyword[None] ): literal[string] keyword[if] identifier[call] == literal[string] : keyword[raise] identifier[SaltCloudSystemExit] ( literal[string] literal[string] ) identifier[conn] = identifier[get_conn] () identifier[bmodels] = identifier[conn] . identifier[list_baremetal_models] () keyword[return] identifier[bmodels]
def baremetal_models(call=None): """ Return a dict of all available baremetal models with relevant data. """ if call == 'action': raise SaltCloudSystemExit('The baremetal_models function must be called with -f or --function') # depends on [control=['if'], data=[]] conn = get_conn() bmodels = conn.list_baremetal_models() return bmodels
def random_partition(n, n_data): """return n random rows of data (and also the other len(data)-n rows)""" all_idxs = np.arange(n_data) np.random.shuffle(all_idxs) idxs1 = all_idxs[:n] idxs2 = all_idxs[n:] return idxs1, idxs2
def function[random_partition, parameter[n, n_data]]: constant[return n random rows of data (and also the other len(data)-n rows)] variable[all_idxs] assign[=] call[name[np].arange, parameter[name[n_data]]] call[name[np].random.shuffle, parameter[name[all_idxs]]] variable[idxs1] assign[=] call[name[all_idxs]][<ast.Slice object at 0x7da18f00ddb0>] variable[idxs2] assign[=] call[name[all_idxs]][<ast.Slice object at 0x7da18f00fc10>] return[tuple[[<ast.Name object at 0x7da18f00d480>, <ast.Name object at 0x7da18f00d720>]]]
keyword[def] identifier[random_partition] ( identifier[n] , identifier[n_data] ): literal[string] identifier[all_idxs] = identifier[np] . identifier[arange] ( identifier[n_data] ) identifier[np] . identifier[random] . identifier[shuffle] ( identifier[all_idxs] ) identifier[idxs1] = identifier[all_idxs] [: identifier[n] ] identifier[idxs2] = identifier[all_idxs] [ identifier[n] :] keyword[return] identifier[idxs1] , identifier[idxs2]
def random_partition(n, n_data): """return n random rows of data (and also the other len(data)-n rows)""" all_idxs = np.arange(n_data) np.random.shuffle(all_idxs) idxs1 = all_idxs[:n] idxs2 = all_idxs[n:] return (idxs1, idxs2)
def volatility(tnet, distance_func_name='default', calc='global', communities=None, event_displacement=None): r""" Volatility of temporal networks. Volatility is the average distance between consecutive time points of graphlets (difference is caclualted either globally or per edge). Parameters ---------- tnet : array or dict temporal network input (graphlet or contact). Nettype: 'bu','bd','wu','wd' D : str Distance function. Following options available: 'default', 'hamming', 'euclidean'. (Default implies hamming for binary networks, euclidean for weighted). calc : str Version of volaitility to caclulate. Possibilities include: 'global' - (default): the average distance of all nodes for each consecutive time point). 'edge' - average distance between consecutive time points for each edge). Takes considerably longer 'node' - (i.e. returns the average per node output when calculating volatility per 'edge'). 'time' - returns volatility per time point 'communities' - returns volatility per communitieswork id (see communities). Also is returned per time-point and this may be changed in the future (with additional options) 'event_displacement' - calculates the volatility from a specified point. Returns time-series. communities : array Array of indicies for community (eiter (node) or (node,time) dimensions). event_displacement : int if calc = event_displacement specify the temporal index where all other time-points are calculated in relation too. Notes ----- Volatility calculates the difference between network snapshots. .. math:: V_t = D(G_t,G_{t+1}) Where D is some distance function (e.g. Hamming distance for binary matrices). V can be calculated for the entire network (global), but can also be calculated for individual edges, nodes or given a community vector. Index of communities are returned "as is" with a shape of [max(communities)+1,max(communities)+1]. So if the indexes used are [1,2,3,5], V.shape==(6,6). The returning V[1,2] will correspond indexes 1 and 2. And missing index (e.g. here 0 and 4 will be NANs in rows and columns). If this behaviour is unwanted, call clean_communitiesdexes first. This will probably change. Examples -------- Import everything needed. >>> import teneto >>> import numpy >>> np.random.seed(1) >>> tnet = teneto.TemporalNetwork(nettype='bu') Here we generate a binary network where edges have a 0.5 change of going "on", and once on a 0.2 change to go "off" >>> tnet.generatenetwork('rand_binomial', size=(3,10), prob=(0.5,0.2)) Calculate the volatility >>> tnet.calc_networkmeasure('volatility', distance_func_name='hamming') 0.5555555555555556 If we change the probabilities to instead be certain edges disapeared the time-point after the appeared: >>> tnet.generatenetwork('rand_binomial', size=(3,10), prob=(0.5,1)) This will make a more volatile network >>> tnet.calc_networkmeasure('volatility', distance_func_name='hamming') 0.1111111111111111 We can calculate the volatility per time instead >>> vol_time = tnet.calc_networkmeasure('volatility', calc='time', distance_func_name='hamming') >>> len(vol_time) 9 >>> vol_time[0] 0.3333333333333333 Or per node: >>> vol_node = tnet.calc_networkmeasure('volatility', calc='node', distance_func_name='hamming') >>> vol_node array([0.07407407, 0.07407407, 0.07407407]) Here we see the volatility for each node was the same. It is also possible to pass a community vector and the function will return volatility both within and between each community. So the following has two communities: >>> vol_com = tnet.calc_networkmeasure('volatility', calc='communities', communities=[0,1,1], distance_func_name='hamming') >>> vol_com.shape (2, 2, 9) >>> vol_com[:,:,0] array([[nan, 0.5], [0.5, 0. ]]) And we see that, at time-point 0, there is some volatility between community 0 and 1 but no volatility within community 1. The reason for nan appearing is due to there only being 1 node in community 0. Output ------ vol : array """ # Get input (C or G) tnet, netinfo = process_input(tnet, ['C', 'G', 'TN']) distance_func_name = check_distance_funciton_input( distance_func_name, netinfo) if not isinstance(distance_func_name, str): raise ValueError('Distance metric must be a string') # If not directional, only calc on the uppertriangle if netinfo['nettype'][1] == 'd': ind = np.triu_indices(tnet.shape[0], k=-tnet.shape[0]) elif netinfo['nettype'][1] == 'u': ind = np.triu_indices(tnet.shape[0], k=1) if calc == 'communities': # Make sure communities is np array for indexing later on. communities = np.array(communities) if len(communities) != netinfo['netshape'][0]: raise ValueError( 'When processing per network, communities vector must equal the number of nodes') if communities.min() < 0: raise ValueError( 'Communitiy assignments must be positive integers') # Get chosen distance metric fucntion distance_func = getDistanceFunction(distance_func_name) if calc == 'global': vol = np.mean([distance_func(tnet[ind[0], ind[1], t], tnet[ind[0], ind[1], t + 1]) for t in range(0, tnet.shape[-1] - 1)]) elif calc == 'time': vol = [distance_func(tnet[ind[0], ind[1], t], tnet[ind[0], ind[1], t + 1]) for t in range(0, tnet.shape[-1] - 1)] elif calc == 'event_displacement': vol = [distance_func(tnet[ind[0], ind[1], event_displacement], tnet[ind[0], ind[1], t]) for t in range(0, tnet.shape[-1])] # This takes quite a bit of time to loop through. When calculating per edge/node. elif calc == 'edge' or calc == 'node': vol = np.zeros([tnet.shape[0], tnet.shape[1]]) for i in ind[0]: for j in ind[1]: vol[i, j] = np.mean([distance_func( tnet[i, j, t], tnet[i, j, t + 1]) for t in range(0, tnet.shape[-1] - 1)]) if netinfo['nettype'][1] == 'u': vol = vol + np.transpose(vol) if calc == 'node': vol = np.mean(vol, axis=1) elif calc == 'communities': net_id = set(communities) vol = np.zeros([max(net_id) + 1, max(net_id) + 1, netinfo['netshape'][-1] - 1]) for net1 in net_id: for net2 in net_id: if net1 != net2: vol[net1, net2, :] = [distance_func(tnet[communities == net1][:, communities == net2, t].flatten(), tnet[communities == net1][:, communities == net2, t + 1].flatten()) for t in range(0, tnet.shape[-1] - 1)] else: nettmp = tnet[communities == net1][:, communities == net2, :] triu = np.triu_indices(nettmp.shape[0], k=1) nettmp = nettmp[triu[0], triu[1], :] vol[net1, net2, :] = [distance_func(nettmp[:, t].flatten( ), nettmp[:, t + 1].flatten()) for t in range(0, tnet.shape[-1] - 1)] elif calc == 'withincommunities': withi = np.array([[ind[0][n], ind[1][n]] for n in range( 0, len(ind[0])) if communities[ind[0][n]] == communities[ind[1][n]]]) vol = [distance_func(tnet[withi[:, 0], withi[:, 1], t], tnet[withi[:, 0], withi[:, 1], t + 1]) for t in range(0, tnet.shape[-1] - 1)] elif calc == 'betweencommunities': beti = np.array([[ind[0][n], ind[1][n]] for n in range( 0, len(ind[0])) if communities[ind[0][n]] != communities[ind[1][n]]]) vol = [distance_func(tnet[beti[:, 0], beti[:, 1], t], tnet[beti[:, 0], beti[:, 1], t + 1]) for t in range(0, tnet.shape[-1] - 1)] return vol
def function[volatility, parameter[tnet, distance_func_name, calc, communities, event_displacement]]: constant[ Volatility of temporal networks. Volatility is the average distance between consecutive time points of graphlets (difference is caclualted either globally or per edge). Parameters ---------- tnet : array or dict temporal network input (graphlet or contact). Nettype: 'bu','bd','wu','wd' D : str Distance function. Following options available: 'default', 'hamming', 'euclidean'. (Default implies hamming for binary networks, euclidean for weighted). calc : str Version of volaitility to caclulate. Possibilities include: 'global' - (default): the average distance of all nodes for each consecutive time point). 'edge' - average distance between consecutive time points for each edge). Takes considerably longer 'node' - (i.e. returns the average per node output when calculating volatility per 'edge'). 'time' - returns volatility per time point 'communities' - returns volatility per communitieswork id (see communities). Also is returned per time-point and this may be changed in the future (with additional options) 'event_displacement' - calculates the volatility from a specified point. Returns time-series. communities : array Array of indicies for community (eiter (node) or (node,time) dimensions). event_displacement : int if calc = event_displacement specify the temporal index where all other time-points are calculated in relation too. Notes ----- Volatility calculates the difference between network snapshots. .. math:: V_t = D(G_t,G_{t+1}) Where D is some distance function (e.g. Hamming distance for binary matrices). V can be calculated for the entire network (global), but can also be calculated for individual edges, nodes or given a community vector. Index of communities are returned "as is" with a shape of [max(communities)+1,max(communities)+1]. So if the indexes used are [1,2,3,5], V.shape==(6,6). The returning V[1,2] will correspond indexes 1 and 2. And missing index (e.g. here 0 and 4 will be NANs in rows and columns). If this behaviour is unwanted, call clean_communitiesdexes first. This will probably change. Examples -------- Import everything needed. >>> import teneto >>> import numpy >>> np.random.seed(1) >>> tnet = teneto.TemporalNetwork(nettype='bu') Here we generate a binary network where edges have a 0.5 change of going "on", and once on a 0.2 change to go "off" >>> tnet.generatenetwork('rand_binomial', size=(3,10), prob=(0.5,0.2)) Calculate the volatility >>> tnet.calc_networkmeasure('volatility', distance_func_name='hamming') 0.5555555555555556 If we change the probabilities to instead be certain edges disapeared the time-point after the appeared: >>> tnet.generatenetwork('rand_binomial', size=(3,10), prob=(0.5,1)) This will make a more volatile network >>> tnet.calc_networkmeasure('volatility', distance_func_name='hamming') 0.1111111111111111 We can calculate the volatility per time instead >>> vol_time = tnet.calc_networkmeasure('volatility', calc='time', distance_func_name='hamming') >>> len(vol_time) 9 >>> vol_time[0] 0.3333333333333333 Or per node: >>> vol_node = tnet.calc_networkmeasure('volatility', calc='node', distance_func_name='hamming') >>> vol_node array([0.07407407, 0.07407407, 0.07407407]) Here we see the volatility for each node was the same. It is also possible to pass a community vector and the function will return volatility both within and between each community. So the following has two communities: >>> vol_com = tnet.calc_networkmeasure('volatility', calc='communities', communities=[0,1,1], distance_func_name='hamming') >>> vol_com.shape (2, 2, 9) >>> vol_com[:,:,0] array([[nan, 0.5], [0.5, 0. ]]) And we see that, at time-point 0, there is some volatility between community 0 and 1 but no volatility within community 1. The reason for nan appearing is due to there only being 1 node in community 0. Output ------ vol : array ] <ast.Tuple object at 0x7da207f00520> assign[=] call[name[process_input], parameter[name[tnet], list[[<ast.Constant object at 0x7da207f03970>, <ast.Constant object at 0x7da207f00d00>, <ast.Constant object at 0x7da207f01810>]]]] variable[distance_func_name] assign[=] call[name[check_distance_funciton_input], parameter[name[distance_func_name], name[netinfo]]] if <ast.UnaryOp object at 0x7da207f00280> begin[:] <ast.Raise object at 0x7da207f028c0> if compare[call[call[name[netinfo]][constant[nettype]]][constant[1]] equal[==] constant[d]] begin[:] variable[ind] assign[=] call[name[np].triu_indices, parameter[call[name[tnet].shape][constant[0]]]] if compare[name[calc] equal[==] constant[communities]] begin[:] variable[communities] assign[=] call[name[np].array, parameter[name[communities]]] if compare[call[name[len], parameter[name[communities]]] not_equal[!=] call[call[name[netinfo]][constant[netshape]]][constant[0]]] begin[:] <ast.Raise object at 0x7da207f00250> if compare[call[name[communities].min, parameter[]] less[<] constant[0]] begin[:] <ast.Raise object at 0x7da207f01660> variable[distance_func] assign[=] call[name[getDistanceFunction], parameter[name[distance_func_name]]] if compare[name[calc] equal[==] constant[global]] begin[:] variable[vol] assign[=] call[name[np].mean, parameter[<ast.ListComp object at 0x7da207f02680>]] return[name[vol]]
keyword[def] identifier[volatility] ( identifier[tnet] , identifier[distance_func_name] = literal[string] , identifier[calc] = literal[string] , identifier[communities] = keyword[None] , identifier[event_displacement] = keyword[None] ): literal[string] identifier[tnet] , identifier[netinfo] = identifier[process_input] ( identifier[tnet] ,[ literal[string] , literal[string] , literal[string] ]) identifier[distance_func_name] = identifier[check_distance_funciton_input] ( identifier[distance_func_name] , identifier[netinfo] ) keyword[if] keyword[not] identifier[isinstance] ( identifier[distance_func_name] , identifier[str] ): keyword[raise] identifier[ValueError] ( literal[string] ) keyword[if] identifier[netinfo] [ literal[string] ][ literal[int] ]== literal[string] : identifier[ind] = identifier[np] . identifier[triu_indices] ( identifier[tnet] . identifier[shape] [ literal[int] ], identifier[k] =- identifier[tnet] . identifier[shape] [ literal[int] ]) keyword[elif] identifier[netinfo] [ literal[string] ][ literal[int] ]== literal[string] : identifier[ind] = identifier[np] . identifier[triu_indices] ( identifier[tnet] . identifier[shape] [ literal[int] ], identifier[k] = literal[int] ) keyword[if] identifier[calc] == literal[string] : identifier[communities] = identifier[np] . identifier[array] ( identifier[communities] ) keyword[if] identifier[len] ( identifier[communities] )!= identifier[netinfo] [ literal[string] ][ literal[int] ]: keyword[raise] identifier[ValueError] ( literal[string] ) keyword[if] identifier[communities] . identifier[min] ()< literal[int] : keyword[raise] identifier[ValueError] ( literal[string] ) identifier[distance_func] = identifier[getDistanceFunction] ( identifier[distance_func_name] ) keyword[if] identifier[calc] == literal[string] : identifier[vol] = identifier[np] . identifier[mean] ([ identifier[distance_func] ( identifier[tnet] [ identifier[ind] [ literal[int] ], identifier[ind] [ literal[int] ], identifier[t] ], identifier[tnet] [ identifier[ind] [ literal[int] ], identifier[ind] [ literal[int] ], identifier[t] + literal[int] ]) keyword[for] identifier[t] keyword[in] identifier[range] ( literal[int] , identifier[tnet] . identifier[shape] [- literal[int] ]- literal[int] )]) keyword[elif] identifier[calc] == literal[string] : identifier[vol] =[ identifier[distance_func] ( identifier[tnet] [ identifier[ind] [ literal[int] ], identifier[ind] [ literal[int] ], identifier[t] ], identifier[tnet] [ identifier[ind] [ literal[int] ], identifier[ind] [ literal[int] ], identifier[t] + literal[int] ]) keyword[for] identifier[t] keyword[in] identifier[range] ( literal[int] , identifier[tnet] . identifier[shape] [- literal[int] ]- literal[int] )] keyword[elif] identifier[calc] == literal[string] : identifier[vol] =[ identifier[distance_func] ( identifier[tnet] [ identifier[ind] [ literal[int] ], identifier[ind] [ literal[int] ], identifier[event_displacement] ], identifier[tnet] [ identifier[ind] [ literal[int] ], identifier[ind] [ literal[int] ], identifier[t] ]) keyword[for] identifier[t] keyword[in] identifier[range] ( literal[int] , identifier[tnet] . identifier[shape] [- literal[int] ])] keyword[elif] identifier[calc] == literal[string] keyword[or] identifier[calc] == literal[string] : identifier[vol] = identifier[np] . identifier[zeros] ([ identifier[tnet] . identifier[shape] [ literal[int] ], identifier[tnet] . identifier[shape] [ literal[int] ]]) keyword[for] identifier[i] keyword[in] identifier[ind] [ literal[int] ]: keyword[for] identifier[j] keyword[in] identifier[ind] [ literal[int] ]: identifier[vol] [ identifier[i] , identifier[j] ]= identifier[np] . identifier[mean] ([ identifier[distance_func] ( identifier[tnet] [ identifier[i] , identifier[j] , identifier[t] ], identifier[tnet] [ identifier[i] , identifier[j] , identifier[t] + literal[int] ]) keyword[for] identifier[t] keyword[in] identifier[range] ( literal[int] , identifier[tnet] . identifier[shape] [- literal[int] ]- literal[int] )]) keyword[if] identifier[netinfo] [ literal[string] ][ literal[int] ]== literal[string] : identifier[vol] = identifier[vol] + identifier[np] . identifier[transpose] ( identifier[vol] ) keyword[if] identifier[calc] == literal[string] : identifier[vol] = identifier[np] . identifier[mean] ( identifier[vol] , identifier[axis] = literal[int] ) keyword[elif] identifier[calc] == literal[string] : identifier[net_id] = identifier[set] ( identifier[communities] ) identifier[vol] = identifier[np] . identifier[zeros] ([ identifier[max] ( identifier[net_id] )+ literal[int] , identifier[max] ( identifier[net_id] )+ literal[int] , identifier[netinfo] [ literal[string] ][- literal[int] ]- literal[int] ]) keyword[for] identifier[net1] keyword[in] identifier[net_id] : keyword[for] identifier[net2] keyword[in] identifier[net_id] : keyword[if] identifier[net1] != identifier[net2] : identifier[vol] [ identifier[net1] , identifier[net2] ,:]=[ identifier[distance_func] ( identifier[tnet] [ identifier[communities] == identifier[net1] ][:, identifier[communities] == identifier[net2] , identifier[t] ]. identifier[flatten] (), identifier[tnet] [ identifier[communities] == identifier[net1] ][:, identifier[communities] == identifier[net2] , identifier[t] + literal[int] ]. identifier[flatten] ()) keyword[for] identifier[t] keyword[in] identifier[range] ( literal[int] , identifier[tnet] . identifier[shape] [- literal[int] ]- literal[int] )] keyword[else] : identifier[nettmp] = identifier[tnet] [ identifier[communities] == identifier[net1] ][:, identifier[communities] == identifier[net2] ,:] identifier[triu] = identifier[np] . identifier[triu_indices] ( identifier[nettmp] . identifier[shape] [ literal[int] ], identifier[k] = literal[int] ) identifier[nettmp] = identifier[nettmp] [ identifier[triu] [ literal[int] ], identifier[triu] [ literal[int] ],:] identifier[vol] [ identifier[net1] , identifier[net2] ,:]=[ identifier[distance_func] ( identifier[nettmp] [:, identifier[t] ]. identifier[flatten] ( ), identifier[nettmp] [:, identifier[t] + literal[int] ]. identifier[flatten] ()) keyword[for] identifier[t] keyword[in] identifier[range] ( literal[int] , identifier[tnet] . identifier[shape] [- literal[int] ]- literal[int] )] keyword[elif] identifier[calc] == literal[string] : identifier[withi] = identifier[np] . identifier[array] ([[ identifier[ind] [ literal[int] ][ identifier[n] ], identifier[ind] [ literal[int] ][ identifier[n] ]] keyword[for] identifier[n] keyword[in] identifier[range] ( literal[int] , identifier[len] ( identifier[ind] [ literal[int] ])) keyword[if] identifier[communities] [ identifier[ind] [ literal[int] ][ identifier[n] ]]== identifier[communities] [ identifier[ind] [ literal[int] ][ identifier[n] ]]]) identifier[vol] =[ identifier[distance_func] ( identifier[tnet] [ identifier[withi] [:, literal[int] ], identifier[withi] [:, literal[int] ], identifier[t] ], identifier[tnet] [ identifier[withi] [:, literal[int] ], identifier[withi] [:, literal[int] ], identifier[t] + literal[int] ]) keyword[for] identifier[t] keyword[in] identifier[range] ( literal[int] , identifier[tnet] . identifier[shape] [- literal[int] ]- literal[int] )] keyword[elif] identifier[calc] == literal[string] : identifier[beti] = identifier[np] . identifier[array] ([[ identifier[ind] [ literal[int] ][ identifier[n] ], identifier[ind] [ literal[int] ][ identifier[n] ]] keyword[for] identifier[n] keyword[in] identifier[range] ( literal[int] , identifier[len] ( identifier[ind] [ literal[int] ])) keyword[if] identifier[communities] [ identifier[ind] [ literal[int] ][ identifier[n] ]]!= identifier[communities] [ identifier[ind] [ literal[int] ][ identifier[n] ]]]) identifier[vol] =[ identifier[distance_func] ( identifier[tnet] [ identifier[beti] [:, literal[int] ], identifier[beti] [:, literal[int] ], identifier[t] ], identifier[tnet] [ identifier[beti] [:, literal[int] ], identifier[beti] [:, literal[int] ], identifier[t] + literal[int] ]) keyword[for] identifier[t] keyword[in] identifier[range] ( literal[int] , identifier[tnet] . identifier[shape] [- literal[int] ]- literal[int] )] keyword[return] identifier[vol]
def volatility(tnet, distance_func_name='default', calc='global', communities=None, event_displacement=None): """ Volatility of temporal networks. Volatility is the average distance between consecutive time points of graphlets (difference is caclualted either globally or per edge). Parameters ---------- tnet : array or dict temporal network input (graphlet or contact). Nettype: 'bu','bd','wu','wd' D : str Distance function. Following options available: 'default', 'hamming', 'euclidean'. (Default implies hamming for binary networks, euclidean for weighted). calc : str Version of volaitility to caclulate. Possibilities include: 'global' - (default): the average distance of all nodes for each consecutive time point). 'edge' - average distance between consecutive time points for each edge). Takes considerably longer 'node' - (i.e. returns the average per node output when calculating volatility per 'edge'). 'time' - returns volatility per time point 'communities' - returns volatility per communitieswork id (see communities). Also is returned per time-point and this may be changed in the future (with additional options) 'event_displacement' - calculates the volatility from a specified point. Returns time-series. communities : array Array of indicies for community (eiter (node) or (node,time) dimensions). event_displacement : int if calc = event_displacement specify the temporal index where all other time-points are calculated in relation too. Notes ----- Volatility calculates the difference between network snapshots. .. math:: V_t = D(G_t,G_{t+1}) Where D is some distance function (e.g. Hamming distance for binary matrices). V can be calculated for the entire network (global), but can also be calculated for individual edges, nodes or given a community vector. Index of communities are returned "as is" with a shape of [max(communities)+1,max(communities)+1]. So if the indexes used are [1,2,3,5], V.shape==(6,6). The returning V[1,2] will correspond indexes 1 and 2. And missing index (e.g. here 0 and 4 will be NANs in rows and columns). If this behaviour is unwanted, call clean_communitiesdexes first. This will probably change. Examples -------- Import everything needed. >>> import teneto >>> import numpy >>> np.random.seed(1) >>> tnet = teneto.TemporalNetwork(nettype='bu') Here we generate a binary network where edges have a 0.5 change of going "on", and once on a 0.2 change to go "off" >>> tnet.generatenetwork('rand_binomial', size=(3,10), prob=(0.5,0.2)) Calculate the volatility >>> tnet.calc_networkmeasure('volatility', distance_func_name='hamming') 0.5555555555555556 If we change the probabilities to instead be certain edges disapeared the time-point after the appeared: >>> tnet.generatenetwork('rand_binomial', size=(3,10), prob=(0.5,1)) This will make a more volatile network >>> tnet.calc_networkmeasure('volatility', distance_func_name='hamming') 0.1111111111111111 We can calculate the volatility per time instead >>> vol_time = tnet.calc_networkmeasure('volatility', calc='time', distance_func_name='hamming') >>> len(vol_time) 9 >>> vol_time[0] 0.3333333333333333 Or per node: >>> vol_node = tnet.calc_networkmeasure('volatility', calc='node', distance_func_name='hamming') >>> vol_node array([0.07407407, 0.07407407, 0.07407407]) Here we see the volatility for each node was the same. It is also possible to pass a community vector and the function will return volatility both within and between each community. So the following has two communities: >>> vol_com = tnet.calc_networkmeasure('volatility', calc='communities', communities=[0,1,1], distance_func_name='hamming') >>> vol_com.shape (2, 2, 9) >>> vol_com[:,:,0] array([[nan, 0.5], [0.5, 0. ]]) And we see that, at time-point 0, there is some volatility between community 0 and 1 but no volatility within community 1. The reason for nan appearing is due to there only being 1 node in community 0. Output ------ vol : array """ # Get input (C or G) (tnet, netinfo) = process_input(tnet, ['C', 'G', 'TN']) distance_func_name = check_distance_funciton_input(distance_func_name, netinfo) if not isinstance(distance_func_name, str): raise ValueError('Distance metric must be a string') # depends on [control=['if'], data=[]] # If not directional, only calc on the uppertriangle if netinfo['nettype'][1] == 'd': ind = np.triu_indices(tnet.shape[0], k=-tnet.shape[0]) # depends on [control=['if'], data=[]] elif netinfo['nettype'][1] == 'u': ind = np.triu_indices(tnet.shape[0], k=1) # depends on [control=['if'], data=[]] if calc == 'communities': # Make sure communities is np array for indexing later on. communities = np.array(communities) if len(communities) != netinfo['netshape'][0]: raise ValueError('When processing per network, communities vector must equal the number of nodes') # depends on [control=['if'], data=[]] if communities.min() < 0: raise ValueError('Communitiy assignments must be positive integers') # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]] # Get chosen distance metric fucntion distance_func = getDistanceFunction(distance_func_name) if calc == 'global': vol = np.mean([distance_func(tnet[ind[0], ind[1], t], tnet[ind[0], ind[1], t + 1]) for t in range(0, tnet.shape[-1] - 1)]) # depends on [control=['if'], data=[]] elif calc == 'time': vol = [distance_func(tnet[ind[0], ind[1], t], tnet[ind[0], ind[1], t + 1]) for t in range(0, tnet.shape[-1] - 1)] # depends on [control=['if'], data=[]] elif calc == 'event_displacement': vol = [distance_func(tnet[ind[0], ind[1], event_displacement], tnet[ind[0], ind[1], t]) for t in range(0, tnet.shape[-1])] # depends on [control=['if'], data=[]] # This takes quite a bit of time to loop through. When calculating per edge/node. elif calc == 'edge' or calc == 'node': vol = np.zeros([tnet.shape[0], tnet.shape[1]]) for i in ind[0]: for j in ind[1]: vol[i, j] = np.mean([distance_func(tnet[i, j, t], tnet[i, j, t + 1]) for t in range(0, tnet.shape[-1] - 1)]) # depends on [control=['for'], data=['j']] # depends on [control=['for'], data=['i']] if netinfo['nettype'][1] == 'u': vol = vol + np.transpose(vol) # depends on [control=['if'], data=[]] if calc == 'node': vol = np.mean(vol, axis=1) # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]] elif calc == 'communities': net_id = set(communities) vol = np.zeros([max(net_id) + 1, max(net_id) + 1, netinfo['netshape'][-1] - 1]) for net1 in net_id: for net2 in net_id: if net1 != net2: vol[net1, net2, :] = [distance_func(tnet[communities == net1][:, communities == net2, t].flatten(), tnet[communities == net1][:, communities == net2, t + 1].flatten()) for t in range(0, tnet.shape[-1] - 1)] # depends on [control=['if'], data=['net1', 'net2']] else: nettmp = tnet[communities == net1][:, communities == net2, :] triu = np.triu_indices(nettmp.shape[0], k=1) nettmp = nettmp[triu[0], triu[1], :] vol[net1, net2, :] = [distance_func(nettmp[:, t].flatten(), nettmp[:, t + 1].flatten()) for t in range(0, tnet.shape[-1] - 1)] # depends on [control=['for'], data=['net2']] # depends on [control=['for'], data=['net1']] # depends on [control=['if'], data=[]] elif calc == 'withincommunities': withi = np.array([[ind[0][n], ind[1][n]] for n in range(0, len(ind[0])) if communities[ind[0][n]] == communities[ind[1][n]]]) vol = [distance_func(tnet[withi[:, 0], withi[:, 1], t], tnet[withi[:, 0], withi[:, 1], t + 1]) for t in range(0, tnet.shape[-1] - 1)] # depends on [control=['if'], data=[]] elif calc == 'betweencommunities': beti = np.array([[ind[0][n], ind[1][n]] for n in range(0, len(ind[0])) if communities[ind[0][n]] != communities[ind[1][n]]]) vol = [distance_func(tnet[beti[:, 0], beti[:, 1], t], tnet[beti[:, 0], beti[:, 1], t + 1]) for t in range(0, tnet.shape[-1] - 1)] # depends on [control=['if'], data=[]] return vol
def get_all_user_objects(self): """ Fetches all user objects from the AD, and returns MSADUser object """ logger.debug('Polling AD for all user objects') ldap_filter = r'(objectClass=user)' attributes = MSADUser.ATTRS for entry in self.pagedsearch(ldap_filter, attributes): # TODO: return ldapuser object yield MSADUser.from_ldap(entry, self._ldapinfo) logger.debug('Finished polling for entries!')
def function[get_all_user_objects, parameter[self]]: constant[ Fetches all user objects from the AD, and returns MSADUser object ] call[name[logger].debug, parameter[constant[Polling AD for all user objects]]] variable[ldap_filter] assign[=] constant[(objectClass=user)] variable[attributes] assign[=] name[MSADUser].ATTRS for taget[name[entry]] in starred[call[name[self].pagedsearch, parameter[name[ldap_filter], name[attributes]]]] begin[:] <ast.Yield object at 0x7da1b05d8e50> call[name[logger].debug, parameter[constant[Finished polling for entries!]]]
keyword[def] identifier[get_all_user_objects] ( identifier[self] ): literal[string] identifier[logger] . identifier[debug] ( literal[string] ) identifier[ldap_filter] = literal[string] identifier[attributes] = identifier[MSADUser] . identifier[ATTRS] keyword[for] identifier[entry] keyword[in] identifier[self] . identifier[pagedsearch] ( identifier[ldap_filter] , identifier[attributes] ): keyword[yield] identifier[MSADUser] . identifier[from_ldap] ( identifier[entry] , identifier[self] . identifier[_ldapinfo] ) identifier[logger] . identifier[debug] ( literal[string] )
def get_all_user_objects(self): """ Fetches all user objects from the AD, and returns MSADUser object """ logger.debug('Polling AD for all user objects') ldap_filter = '(objectClass=user)' attributes = MSADUser.ATTRS for entry in self.pagedsearch(ldap_filter, attributes): # TODO: return ldapuser object yield MSADUser.from_ldap(entry, self._ldapinfo) # depends on [control=['for'], data=['entry']] logger.debug('Finished polling for entries!')
def hide_element(self, selector, by=By.CSS_SELECTOR): """ Hide the first element on the page that matches the selector. """ selector, by = self.__recalculate_selector(selector, by) selector = self.convert_to_css_selector(selector, by=by) selector = self.__make_css_match_first_element_only(selector) hide_script = """jQuery('%s').hide()""" % selector self.safe_execute_script(hide_script)
def function[hide_element, parameter[self, selector, by]]: constant[ Hide the first element on the page that matches the selector. ] <ast.Tuple object at 0x7da1b1b15cc0> assign[=] call[name[self].__recalculate_selector, parameter[name[selector], name[by]]] variable[selector] assign[=] call[name[self].convert_to_css_selector, parameter[name[selector]]] variable[selector] assign[=] call[name[self].__make_css_match_first_element_only, parameter[name[selector]]] variable[hide_script] assign[=] binary_operation[constant[jQuery('%s').hide()] <ast.Mod object at 0x7da2590d6920> name[selector]] call[name[self].safe_execute_script, parameter[name[hide_script]]]
keyword[def] identifier[hide_element] ( identifier[self] , identifier[selector] , identifier[by] = identifier[By] . identifier[CSS_SELECTOR] ): literal[string] identifier[selector] , identifier[by] = identifier[self] . identifier[__recalculate_selector] ( identifier[selector] , identifier[by] ) identifier[selector] = identifier[self] . identifier[convert_to_css_selector] ( identifier[selector] , identifier[by] = identifier[by] ) identifier[selector] = identifier[self] . identifier[__make_css_match_first_element_only] ( identifier[selector] ) identifier[hide_script] = literal[string] % identifier[selector] identifier[self] . identifier[safe_execute_script] ( identifier[hide_script] )
def hide_element(self, selector, by=By.CSS_SELECTOR): """ Hide the first element on the page that matches the selector. """ (selector, by) = self.__recalculate_selector(selector, by) selector = self.convert_to_css_selector(selector, by=by) selector = self.__make_css_match_first_element_only(selector) hide_script = "jQuery('%s').hide()" % selector self.safe_execute_script(hide_script)
def arc_center(points): """ Given three points on an arc find: center, radius, normal, and angle. This uses the fact that the intersection of the perp bisectors of the segments between the control points is the center of the arc. Parameters --------- points : (3, dimension) float Points in space, where dimension is either 2 or 3 Returns --------- result : dict Has keys: 'center': (d,) float, cartesian center of the arc 'radius': float, radius of the arc 'normal': (3,) float, the plane normal. 'angle': (2,) float, angle of start and end, in radians 'span' : float, angle swept by the arc, in radians """ # it's a lot easier to treat 2D as 3D with a zero Z value points, is_2D = util.stack_3D(points, return_2D=True) # find the two edge vectors of the triangle edge_direction = np.diff(points, axis=0) edge_midpoints = (edge_direction * 0.5) + points[:2] # three points define a plane, so we find its normal vector plane_normal = np.cross(*edge_direction[::-1]) plane_normal /= np.linalg.norm(plane_normal) # unit vector along edges vector_edge = (edge_direction / np.linalg.norm(edge_direction, axis=1).reshape((-1, 1))) # perpendicular cector to each segment vector_perp = np.cross(vector_edge, plane_normal) vector_perp /= np.linalg.norm(vector_perp, axis=1).reshape((-1, 1)) # run the line- line intersection to find the point intersects, center = line_line(origins=edge_midpoints, directions=vector_perp, plane_normal=plane_normal) if not intersects: raise ValueError('Segments do not intersect!') # radius is euclidean distance radius = ((points[0] - center) ** 2).sum() ** .5 # vectors from points on arc to center point vector = points - center vector /= np.linalg.norm(vector, axis=1).reshape((-1, 1)) angle = np.arccos(np.clip(np.dot(*vector[[0, 2]]), -1.0, 1.0)) large_arc = (abs(angle) > tol.zero and np.dot(*edge_direction) < 0.0) if large_arc: angle = (np.pi * 2) - angle angles = np.arctan2(*vector[:, :2].T[::-1]) + np.pi * 2 angles_sorted = np.sort(angles[[0, 2]]) reverse = angles_sorted[0] < angles[1] < angles_sorted[1] angles_sorted = angles_sorted[::(1 - int(not reverse) * 2)] result = {'center': center[:(3 - is_2D)], 'radius': radius, 'normal': plane_normal, 'span': angle, 'angles': angles_sorted} return result
def function[arc_center, parameter[points]]: constant[ Given three points on an arc find: center, radius, normal, and angle. This uses the fact that the intersection of the perp bisectors of the segments between the control points is the center of the arc. Parameters --------- points : (3, dimension) float Points in space, where dimension is either 2 or 3 Returns --------- result : dict Has keys: 'center': (d,) float, cartesian center of the arc 'radius': float, radius of the arc 'normal': (3,) float, the plane normal. 'angle': (2,) float, angle of start and end, in radians 'span' : float, angle swept by the arc, in radians ] <ast.Tuple object at 0x7da2044c1c30> assign[=] call[name[util].stack_3D, parameter[name[points]]] variable[edge_direction] assign[=] call[name[np].diff, parameter[name[points]]] variable[edge_midpoints] assign[=] binary_operation[binary_operation[name[edge_direction] * constant[0.5]] + call[name[points]][<ast.Slice object at 0x7da2044c1540>]] variable[plane_normal] assign[=] call[name[np].cross, parameter[<ast.Starred object at 0x7da2044c1c00>]] <ast.AugAssign object at 0x7da2044c2620> variable[vector_edge] assign[=] binary_operation[name[edge_direction] / call[call[name[np].linalg.norm, parameter[name[edge_direction]]].reshape, parameter[tuple[[<ast.UnaryOp object at 0x7da2044c2860>, <ast.Constant object at 0x7da2044c1f00>]]]]] variable[vector_perp] assign[=] call[name[np].cross, parameter[name[vector_edge], name[plane_normal]]] <ast.AugAssign object at 0x7da2044c38e0> <ast.Tuple object at 0x7da2044c25c0> assign[=] call[name[line_line], parameter[]] if <ast.UnaryOp object at 0x7da2044c0790> begin[:] <ast.Raise object at 0x7da2044c2c80> variable[radius] assign[=] binary_operation[call[binary_operation[binary_operation[call[name[points]][constant[0]] - name[center]] ** constant[2]].sum, parameter[]] ** constant[0.5]] variable[vector] assign[=] binary_operation[name[points] - name[center]] <ast.AugAssign object at 0x7da2044c0070> variable[angle] assign[=] call[name[np].arccos, parameter[call[name[np].clip, parameter[call[name[np].dot, parameter[<ast.Starred object at 0x7da20c7cb310>]], <ast.UnaryOp object at 0x7da20c7c9330>, constant[1.0]]]]] variable[large_arc] assign[=] <ast.BoolOp object at 0x7da20c7cab00> if name[large_arc] begin[:] variable[angle] assign[=] binary_operation[binary_operation[name[np].pi * constant[2]] - name[angle]] variable[angles] assign[=] binary_operation[call[name[np].arctan2, parameter[<ast.Starred object at 0x7da20c7cbbb0>]] + binary_operation[name[np].pi * constant[2]]] variable[angles_sorted] assign[=] call[name[np].sort, parameter[call[name[angles]][list[[<ast.Constant object at 0x7da20c7cb9d0>, <ast.Constant object at 0x7da20c7c86a0>]]]]] variable[reverse] assign[=] compare[call[name[angles_sorted]][constant[0]] less[<] call[name[angles]][constant[1]]] variable[angles_sorted] assign[=] call[name[angles_sorted]][<ast.Slice object at 0x7da20c7cbc70>] variable[result] assign[=] dictionary[[<ast.Constant object at 0x7da20c7c8730>, <ast.Constant object at 0x7da20c7c83d0>, <ast.Constant object at 0x7da20c7c8190>, <ast.Constant object at 0x7da20c7ca2f0>, <ast.Constant object at 0x7da20c7cabf0>], [<ast.Subscript object at 0x7da20c7c9d20>, <ast.Name object at 0x7da20c990ac0>, <ast.Name object at 0x7da20c992620>, <ast.Name object at 0x7da20c9912a0>, <ast.Name object at 0x7da20c9924a0>]] return[name[result]]
keyword[def] identifier[arc_center] ( identifier[points] ): literal[string] identifier[points] , identifier[is_2D] = identifier[util] . identifier[stack_3D] ( identifier[points] , identifier[return_2D] = keyword[True] ) identifier[edge_direction] = identifier[np] . identifier[diff] ( identifier[points] , identifier[axis] = literal[int] ) identifier[edge_midpoints] =( identifier[edge_direction] * literal[int] )+ identifier[points] [: literal[int] ] identifier[plane_normal] = identifier[np] . identifier[cross] (* identifier[edge_direction] [::- literal[int] ]) identifier[plane_normal] /= identifier[np] . identifier[linalg] . identifier[norm] ( identifier[plane_normal] ) identifier[vector_edge] =( identifier[edge_direction] / identifier[np] . identifier[linalg] . identifier[norm] ( identifier[edge_direction] , identifier[axis] = literal[int] ). identifier[reshape] ((- literal[int] , literal[int] ))) identifier[vector_perp] = identifier[np] . identifier[cross] ( identifier[vector_edge] , identifier[plane_normal] ) identifier[vector_perp] /= identifier[np] . identifier[linalg] . identifier[norm] ( identifier[vector_perp] , identifier[axis] = literal[int] ). identifier[reshape] ((- literal[int] , literal[int] )) identifier[intersects] , identifier[center] = identifier[line_line] ( identifier[origins] = identifier[edge_midpoints] , identifier[directions] = identifier[vector_perp] , identifier[plane_normal] = identifier[plane_normal] ) keyword[if] keyword[not] identifier[intersects] : keyword[raise] identifier[ValueError] ( literal[string] ) identifier[radius] =(( identifier[points] [ literal[int] ]- identifier[center] )** literal[int] ). identifier[sum] ()** literal[int] identifier[vector] = identifier[points] - identifier[center] identifier[vector] /= identifier[np] . identifier[linalg] . identifier[norm] ( identifier[vector] , identifier[axis] = literal[int] ). identifier[reshape] ((- literal[int] , literal[int] )) identifier[angle] = identifier[np] . identifier[arccos] ( identifier[np] . identifier[clip] ( identifier[np] . identifier[dot] (* identifier[vector] [[ literal[int] , literal[int] ]]),- literal[int] , literal[int] )) identifier[large_arc] =( identifier[abs] ( identifier[angle] )> identifier[tol] . identifier[zero] keyword[and] identifier[np] . identifier[dot] (* identifier[edge_direction] )< literal[int] ) keyword[if] identifier[large_arc] : identifier[angle] =( identifier[np] . identifier[pi] * literal[int] )- identifier[angle] identifier[angles] = identifier[np] . identifier[arctan2] (* identifier[vector] [:,: literal[int] ]. identifier[T] [::- literal[int] ])+ identifier[np] . identifier[pi] * literal[int] identifier[angles_sorted] = identifier[np] . identifier[sort] ( identifier[angles] [[ literal[int] , literal[int] ]]) identifier[reverse] = identifier[angles_sorted] [ literal[int] ]< identifier[angles] [ literal[int] ]< identifier[angles_sorted] [ literal[int] ] identifier[angles_sorted] = identifier[angles_sorted] [::( literal[int] - identifier[int] ( keyword[not] identifier[reverse] )* literal[int] )] identifier[result] ={ literal[string] : identifier[center] [:( literal[int] - identifier[is_2D] )], literal[string] : identifier[radius] , literal[string] : identifier[plane_normal] , literal[string] : identifier[angle] , literal[string] : identifier[angles_sorted] } keyword[return] identifier[result]
def arc_center(points): """ Given three points on an arc find: center, radius, normal, and angle. This uses the fact that the intersection of the perp bisectors of the segments between the control points is the center of the arc. Parameters --------- points : (3, dimension) float Points in space, where dimension is either 2 or 3 Returns --------- result : dict Has keys: 'center': (d,) float, cartesian center of the arc 'radius': float, radius of the arc 'normal': (3,) float, the plane normal. 'angle': (2,) float, angle of start and end, in radians 'span' : float, angle swept by the arc, in radians """ # it's a lot easier to treat 2D as 3D with a zero Z value (points, is_2D) = util.stack_3D(points, return_2D=True) # find the two edge vectors of the triangle edge_direction = np.diff(points, axis=0) edge_midpoints = edge_direction * 0.5 + points[:2] # three points define a plane, so we find its normal vector plane_normal = np.cross(*edge_direction[::-1]) plane_normal /= np.linalg.norm(plane_normal) # unit vector along edges vector_edge = edge_direction / np.linalg.norm(edge_direction, axis=1).reshape((-1, 1)) # perpendicular cector to each segment vector_perp = np.cross(vector_edge, plane_normal) vector_perp /= np.linalg.norm(vector_perp, axis=1).reshape((-1, 1)) # run the line- line intersection to find the point (intersects, center) = line_line(origins=edge_midpoints, directions=vector_perp, plane_normal=plane_normal) if not intersects: raise ValueError('Segments do not intersect!') # depends on [control=['if'], data=[]] # radius is euclidean distance radius = ((points[0] - center) ** 2).sum() ** 0.5 # vectors from points on arc to center point vector = points - center vector /= np.linalg.norm(vector, axis=1).reshape((-1, 1)) angle = np.arccos(np.clip(np.dot(*vector[[0, 2]]), -1.0, 1.0)) large_arc = abs(angle) > tol.zero and np.dot(*edge_direction) < 0.0 if large_arc: angle = np.pi * 2 - angle # depends on [control=['if'], data=[]] angles = np.arctan2(*vector[:, :2].T[::-1]) + np.pi * 2 angles_sorted = np.sort(angles[[0, 2]]) reverse = angles_sorted[0] < angles[1] < angles_sorted[1] angles_sorted = angles_sorted[::1 - int(not reverse) * 2] result = {'center': center[:3 - is_2D], 'radius': radius, 'normal': plane_normal, 'span': angle, 'angles': angles_sorted} return result
def do_rewind(self, line): """ rewind """ self.print_response("Rewinding from frame %s to 0" % self.bot._frame) self.bot._frame = 0
def function[do_rewind, parameter[self, line]]: constant[ rewind ] call[name[self].print_response, parameter[binary_operation[constant[Rewinding from frame %s to 0] <ast.Mod object at 0x7da2590d6920> name[self].bot._frame]]] name[self].bot._frame assign[=] constant[0]
keyword[def] identifier[do_rewind] ( identifier[self] , identifier[line] ): literal[string] identifier[self] . identifier[print_response] ( literal[string] % identifier[self] . identifier[bot] . identifier[_frame] ) identifier[self] . identifier[bot] . identifier[_frame] = literal[int]
def do_rewind(self, line): """ rewind """ self.print_response('Rewinding from frame %s to 0' % self.bot._frame) self.bot._frame = 0
def prime_generator(p_min=2, p_max=None): """ Generator of prime numbers using the sieve of Eratosthenes. Args: p_min (int): prime numbers lower than p_min will not be in the resulting primes p_max (int): the generator will stop when this value is reached, it means that there will be no prime bigger than this number in the resulting primes. If p_max is None, there will not be any upper limit Returns: A generator of all the consecutive primes between p_min and p_max Raises: TypeError: if p_min or p_max is not an integer """ if not isinstance(p_min, int): raise TypeError("Expecting an integer") if p_max is not None and not isinstance(p_max, int): raise TypeError("Expecting an integer") q = max(p_min, 3) if q % 2 == 0: q += 1 if p_min <= 2 and (p_max is None or p_max >= 2): yield 2 # outside the while block to make the double increment optimization work while p_max is None or q <= p_max: if is_prime(q): yield q q += 2
def function[prime_generator, parameter[p_min, p_max]]: constant[ Generator of prime numbers using the sieve of Eratosthenes. Args: p_min (int): prime numbers lower than p_min will not be in the resulting primes p_max (int): the generator will stop when this value is reached, it means that there will be no prime bigger than this number in the resulting primes. If p_max is None, there will not be any upper limit Returns: A generator of all the consecutive primes between p_min and p_max Raises: TypeError: if p_min or p_max is not an integer ] if <ast.UnaryOp object at 0x7da18fe92a10> begin[:] <ast.Raise object at 0x7da18fe91ed0> if <ast.BoolOp object at 0x7da18fe91e10> begin[:] <ast.Raise object at 0x7da18fe904f0> variable[q] assign[=] call[name[max], parameter[name[p_min], constant[3]]] if compare[binary_operation[name[q] <ast.Mod object at 0x7da2590d6920> constant[2]] equal[==] constant[0]] begin[:] <ast.AugAssign object at 0x7da18fe91510> if <ast.BoolOp object at 0x7da18fe938b0> begin[:] <ast.Yield object at 0x7da18fe90ee0> while <ast.BoolOp object at 0x7da18fe925c0> begin[:] if call[name[is_prime], parameter[name[q]]] begin[:] <ast.Yield object at 0x7da18fe911e0> <ast.AugAssign object at 0x7da18fe93430>
keyword[def] identifier[prime_generator] ( identifier[p_min] = literal[int] , identifier[p_max] = keyword[None] ): literal[string] keyword[if] keyword[not] identifier[isinstance] ( identifier[p_min] , identifier[int] ): keyword[raise] identifier[TypeError] ( literal[string] ) keyword[if] identifier[p_max] keyword[is] keyword[not] keyword[None] keyword[and] keyword[not] identifier[isinstance] ( identifier[p_max] , identifier[int] ): keyword[raise] identifier[TypeError] ( literal[string] ) identifier[q] = identifier[max] ( identifier[p_min] , literal[int] ) keyword[if] identifier[q] % literal[int] == literal[int] : identifier[q] += literal[int] keyword[if] identifier[p_min] <= literal[int] keyword[and] ( identifier[p_max] keyword[is] keyword[None] keyword[or] identifier[p_max] >= literal[int] ): keyword[yield] literal[int] keyword[while] identifier[p_max] keyword[is] keyword[None] keyword[or] identifier[q] <= identifier[p_max] : keyword[if] identifier[is_prime] ( identifier[q] ): keyword[yield] identifier[q] identifier[q] += literal[int]
def prime_generator(p_min=2, p_max=None): """ Generator of prime numbers using the sieve of Eratosthenes. Args: p_min (int): prime numbers lower than p_min will not be in the resulting primes p_max (int): the generator will stop when this value is reached, it means that there will be no prime bigger than this number in the resulting primes. If p_max is None, there will not be any upper limit Returns: A generator of all the consecutive primes between p_min and p_max Raises: TypeError: if p_min or p_max is not an integer """ if not isinstance(p_min, int): raise TypeError('Expecting an integer') # depends on [control=['if'], data=[]] if p_max is not None and (not isinstance(p_max, int)): raise TypeError('Expecting an integer') # depends on [control=['if'], data=[]] q = max(p_min, 3) if q % 2 == 0: q += 1 # depends on [control=['if'], data=[]] if p_min <= 2 and (p_max is None or p_max >= 2): yield 2 # outside the while block to make the double increment optimization work # depends on [control=['if'], data=[]] while p_max is None or q <= p_max: if is_prime(q): yield q # depends on [control=['if'], data=[]] q += 2 # depends on [control=['while'], data=[]]
def write_inj_snrs(page, ifos, injList, grbtag): """ Write injection chisq plots to markup.page object page """ if injList: th = ['']+injList + ['OFFSOURCE'] else: th= ['','OFFSOURCE'] injList = ['OFFSOURCE'] td = [] ifos = [ifos[i:i+2] for i in range(0, len(ifos), 2)] plots = ['null_stat2']+['%s_snr' % ifo for ifo in ifos] for row in plots: pTag = row.replace('_',' ').title() d = [pTag] for inj in injList + ['OFFSOURCE']: plot = markup.page() p = "%s/plots_clustered/GRB%s_%s_vs_snr_zoom.png" % (inj, grbtag, row) plot.a(href=p, title="%s %s versus SNR" % (inj, pTag)) plot.img(src=p) plot.a.close() d.append(plot()) td.append(d) page = write_table(page, th, td) return page
def function[write_inj_snrs, parameter[page, ifos, injList, grbtag]]: constant[ Write injection chisq plots to markup.page object page ] if name[injList] begin[:] variable[th] assign[=] binary_operation[binary_operation[list[[<ast.Constant object at 0x7da20c76f820>]] + name[injList]] + list[[<ast.Constant object at 0x7da20c76c700>]]] variable[td] assign[=] list[[]] variable[ifos] assign[=] <ast.ListComp object at 0x7da20c76e050> variable[plots] assign[=] binary_operation[list[[<ast.Constant object at 0x7da20c6ab4c0>]] + <ast.ListComp object at 0x7da20c6a9a50>] for taget[name[row]] in starred[name[plots]] begin[:] variable[pTag] assign[=] call[call[name[row].replace, parameter[constant[_], constant[ ]]].title, parameter[]] variable[d] assign[=] list[[<ast.Name object at 0x7da20c6aa350>]] for taget[name[inj]] in starred[binary_operation[name[injList] + list[[<ast.Constant object at 0x7da2054a56c0>]]]] begin[:] variable[plot] assign[=] call[name[markup].page, parameter[]] variable[p] assign[=] binary_operation[constant[%s/plots_clustered/GRB%s_%s_vs_snr_zoom.png] <ast.Mod object at 0x7da2590d6920> tuple[[<ast.Name object at 0x7da2044c14b0>, <ast.Name object at 0x7da2044c25f0>, <ast.Name object at 0x7da2044c1e40>]]] call[name[plot].a, parameter[]] call[name[plot].img, parameter[]] call[name[plot].a.close, parameter[]] call[name[d].append, parameter[call[name[plot], parameter[]]]] call[name[td].append, parameter[name[d]]] variable[page] assign[=] call[name[write_table], parameter[name[page], name[th], name[td]]] return[name[page]]
keyword[def] identifier[write_inj_snrs] ( identifier[page] , identifier[ifos] , identifier[injList] , identifier[grbtag] ): literal[string] keyword[if] identifier[injList] : identifier[th] =[ literal[string] ]+ identifier[injList] +[ literal[string] ] keyword[else] : identifier[th] =[ literal[string] , literal[string] ] identifier[injList] =[ literal[string] ] identifier[td] =[] identifier[ifos] =[ identifier[ifos] [ identifier[i] : identifier[i] + literal[int] ] keyword[for] identifier[i] keyword[in] identifier[range] ( literal[int] , identifier[len] ( identifier[ifos] ), literal[int] )] identifier[plots] =[ literal[string] ]+[ literal[string] % identifier[ifo] keyword[for] identifier[ifo] keyword[in] identifier[ifos] ] keyword[for] identifier[row] keyword[in] identifier[plots] : identifier[pTag] = identifier[row] . identifier[replace] ( literal[string] , literal[string] ). identifier[title] () identifier[d] =[ identifier[pTag] ] keyword[for] identifier[inj] keyword[in] identifier[injList] +[ literal[string] ]: identifier[plot] = identifier[markup] . identifier[page] () identifier[p] = literal[string] %( identifier[inj] , identifier[grbtag] , identifier[row] ) identifier[plot] . identifier[a] ( identifier[href] = identifier[p] , identifier[title] = literal[string] %( identifier[inj] , identifier[pTag] )) identifier[plot] . identifier[img] ( identifier[src] = identifier[p] ) identifier[plot] . identifier[a] . identifier[close] () identifier[d] . identifier[append] ( identifier[plot] ()) identifier[td] . identifier[append] ( identifier[d] ) identifier[page] = identifier[write_table] ( identifier[page] , identifier[th] , identifier[td] ) keyword[return] identifier[page]
def write_inj_snrs(page, ifos, injList, grbtag): """ Write injection chisq plots to markup.page object page """ if injList: th = [''] + injList + ['OFFSOURCE'] # depends on [control=['if'], data=[]] else: th = ['', 'OFFSOURCE'] injList = ['OFFSOURCE'] td = [] ifos = [ifos[i:i + 2] for i in range(0, len(ifos), 2)] plots = ['null_stat2'] + ['%s_snr' % ifo for ifo in ifos] for row in plots: pTag = row.replace('_', ' ').title() d = [pTag] for inj in injList + ['OFFSOURCE']: plot = markup.page() p = '%s/plots_clustered/GRB%s_%s_vs_snr_zoom.png' % (inj, grbtag, row) plot.a(href=p, title='%s %s versus SNR' % (inj, pTag)) plot.img(src=p) plot.a.close() d.append(plot()) # depends on [control=['for'], data=['inj']] td.append(d) # depends on [control=['for'], data=['row']] page = write_table(page, th, td) return page
def fitNullTraitByTrait(self, verbose=False, cache=False, out_dir='./cache', fname=None, rewrite=False): """ Fit null model trait by trait """ read_from_file = False if cache: assert fname is not None, 'MultiTraitSetTest:: specify fname' if not os.path.exists(out_dir): os.makedirs(out_dir) out_file = os.path.join(out_dir,fname) read_from_file = os.path.exists(out_file) and not rewrite RV = {} if read_from_file: f = h5py.File(out_file,'r') for p in range(self.P): trait_id = self.traitID[p] g = f[trait_id] RV[trait_id] = {} for key in list(g.keys()): RV[trait_id][key] = g[key][:] f.close() self.nullST=RV else: """ create stSet and fit null column by column returns all info """ if self.stSet is None: y = sp.zeros((self.N,1)) self.stSet = MTSet(Y=y, S_R=self.S_R, U_R=self.U_R, F=self.F) RV = {} for p in range(self.P): trait_id = self.traitID[p] self.stSet.Y = self.Y[:,p:p+1] RV[trait_id] = self.stSet.fitNull() self.nullST = RV if cache: f = h5py.File(out_file,'w') smartDumpDictHdf5(RV,f) f.close() return RV
def function[fitNullTraitByTrait, parameter[self, verbose, cache, out_dir, fname, rewrite]]: constant[ Fit null model trait by trait ] variable[read_from_file] assign[=] constant[False] if name[cache] begin[:] assert[compare[name[fname] is_not constant[None]]] if <ast.UnaryOp object at 0x7da1b26ad510> begin[:] call[name[os].makedirs, parameter[name[out_dir]]] variable[out_file] assign[=] call[name[os].path.join, parameter[name[out_dir], name[fname]]] variable[read_from_file] assign[=] <ast.BoolOp object at 0x7da1b26ae3e0> variable[RV] assign[=] dictionary[[], []] if name[read_from_file] begin[:] variable[f] assign[=] call[name[h5py].File, parameter[name[out_file], constant[r]]] for taget[name[p]] in starred[call[name[range], parameter[name[self].P]]] begin[:] variable[trait_id] assign[=] call[name[self].traitID][name[p]] variable[g] assign[=] call[name[f]][name[trait_id]] call[name[RV]][name[trait_id]] assign[=] dictionary[[], []] for taget[name[key]] in starred[call[name[list], parameter[call[name[g].keys, parameter[]]]]] begin[:] call[call[name[RV]][name[trait_id]]][name[key]] assign[=] call[call[name[g]][name[key]]][<ast.Slice object at 0x7da1b26ac940>] call[name[f].close, parameter[]] name[self].nullST assign[=] name[RV] return[name[RV]]
keyword[def] identifier[fitNullTraitByTrait] ( identifier[self] , identifier[verbose] = keyword[False] , identifier[cache] = keyword[False] , identifier[out_dir] = literal[string] , identifier[fname] = keyword[None] , identifier[rewrite] = keyword[False] ): literal[string] identifier[read_from_file] = keyword[False] keyword[if] identifier[cache] : keyword[assert] identifier[fname] keyword[is] keyword[not] keyword[None] , literal[string] keyword[if] keyword[not] identifier[os] . identifier[path] . identifier[exists] ( identifier[out_dir] ): identifier[os] . identifier[makedirs] ( identifier[out_dir] ) identifier[out_file] = identifier[os] . identifier[path] . identifier[join] ( identifier[out_dir] , identifier[fname] ) identifier[read_from_file] = identifier[os] . identifier[path] . identifier[exists] ( identifier[out_file] ) keyword[and] keyword[not] identifier[rewrite] identifier[RV] ={} keyword[if] identifier[read_from_file] : identifier[f] = identifier[h5py] . identifier[File] ( identifier[out_file] , literal[string] ) keyword[for] identifier[p] keyword[in] identifier[range] ( identifier[self] . identifier[P] ): identifier[trait_id] = identifier[self] . identifier[traitID] [ identifier[p] ] identifier[g] = identifier[f] [ identifier[trait_id] ] identifier[RV] [ identifier[trait_id] ]={} keyword[for] identifier[key] keyword[in] identifier[list] ( identifier[g] . identifier[keys] ()): identifier[RV] [ identifier[trait_id] ][ identifier[key] ]= identifier[g] [ identifier[key] ][:] identifier[f] . identifier[close] () identifier[self] . identifier[nullST] = identifier[RV] keyword[else] : literal[string] keyword[if] identifier[self] . identifier[stSet] keyword[is] keyword[None] : identifier[y] = identifier[sp] . identifier[zeros] (( identifier[self] . identifier[N] , literal[int] )) identifier[self] . identifier[stSet] = identifier[MTSet] ( identifier[Y] = identifier[y] , identifier[S_R] = identifier[self] . identifier[S_R] , identifier[U_R] = identifier[self] . identifier[U_R] , identifier[F] = identifier[self] . identifier[F] ) identifier[RV] ={} keyword[for] identifier[p] keyword[in] identifier[range] ( identifier[self] . identifier[P] ): identifier[trait_id] = identifier[self] . identifier[traitID] [ identifier[p] ] identifier[self] . identifier[stSet] . identifier[Y] = identifier[self] . identifier[Y] [:, identifier[p] : identifier[p] + literal[int] ] identifier[RV] [ identifier[trait_id] ]= identifier[self] . identifier[stSet] . identifier[fitNull] () identifier[self] . identifier[nullST] = identifier[RV] keyword[if] identifier[cache] : identifier[f] = identifier[h5py] . identifier[File] ( identifier[out_file] , literal[string] ) identifier[smartDumpDictHdf5] ( identifier[RV] , identifier[f] ) identifier[f] . identifier[close] () keyword[return] identifier[RV]
def fitNullTraitByTrait(self, verbose=False, cache=False, out_dir='./cache', fname=None, rewrite=False): """ Fit null model trait by trait """ read_from_file = False if cache: assert fname is not None, 'MultiTraitSetTest:: specify fname' if not os.path.exists(out_dir): os.makedirs(out_dir) # depends on [control=['if'], data=[]] out_file = os.path.join(out_dir, fname) read_from_file = os.path.exists(out_file) and (not rewrite) # depends on [control=['if'], data=[]] RV = {} if read_from_file: f = h5py.File(out_file, 'r') for p in range(self.P): trait_id = self.traitID[p] g = f[trait_id] RV[trait_id] = {} for key in list(g.keys()): RV[trait_id][key] = g[key][:] # depends on [control=['for'], data=['key']] # depends on [control=['for'], data=['p']] f.close() self.nullST = RV # depends on [control=['if'], data=[]] else: ' create stSet and fit null column by column returns all info ' if self.stSet is None: y = sp.zeros((self.N, 1)) self.stSet = MTSet(Y=y, S_R=self.S_R, U_R=self.U_R, F=self.F) # depends on [control=['if'], data=[]] RV = {} for p in range(self.P): trait_id = self.traitID[p] self.stSet.Y = self.Y[:, p:p + 1] RV[trait_id] = self.stSet.fitNull() # depends on [control=['for'], data=['p']] self.nullST = RV if cache: f = h5py.File(out_file, 'w') smartDumpDictHdf5(RV, f) f.close() # depends on [control=['if'], data=[]] return RV
def lists(self, uid=0, **kwargs): """ Returns a list of :class:`List` objects (lists which Contact belongs to) and a pager dict. :Example: lists, pager = client.contacts.lists(uid=1901010) :param int uid: The unique id of the Contact to update. Required. :param int page: Fetch specified results page. Default=1 :param int limit: How many results on page. Default=10 """ lists = Lists(self.base_uri, self.auth) return self.get_subresource_instances(uid, instance=lists, resource="lists", params=kwargs)
def function[lists, parameter[self, uid]]: constant[ Returns a list of :class:`List` objects (lists which Contact belongs to) and a pager dict. :Example: lists, pager = client.contacts.lists(uid=1901010) :param int uid: The unique id of the Contact to update. Required. :param int page: Fetch specified results page. Default=1 :param int limit: How many results on page. Default=10 ] variable[lists] assign[=] call[name[Lists], parameter[name[self].base_uri, name[self].auth]] return[call[name[self].get_subresource_instances, parameter[name[uid]]]]
keyword[def] identifier[lists] ( identifier[self] , identifier[uid] = literal[int] ,** identifier[kwargs] ): literal[string] identifier[lists] = identifier[Lists] ( identifier[self] . identifier[base_uri] , identifier[self] . identifier[auth] ) keyword[return] identifier[self] . identifier[get_subresource_instances] ( identifier[uid] , identifier[instance] = identifier[lists] , identifier[resource] = literal[string] , identifier[params] = identifier[kwargs] )
def lists(self, uid=0, **kwargs): """ Returns a list of :class:`List` objects (lists which Contact belongs to) and a pager dict. :Example: lists, pager = client.contacts.lists(uid=1901010) :param int uid: The unique id of the Contact to update. Required. :param int page: Fetch specified results page. Default=1 :param int limit: How many results on page. Default=10 """ lists = Lists(self.base_uri, self.auth) return self.get_subresource_instances(uid, instance=lists, resource='lists', params=kwargs)
def SetWindowLong(handle: int, index: int, value: int) -> int: """ SetWindowLong from Win32. handle: int, the handle of a native window. index: int. value: int. Return int, the previous value before set. """ return ctypes.windll.user32.SetWindowLongW(ctypes.c_void_p(handle), index, value)
def function[SetWindowLong, parameter[handle, index, value]]: constant[ SetWindowLong from Win32. handle: int, the handle of a native window. index: int. value: int. Return int, the previous value before set. ] return[call[name[ctypes].windll.user32.SetWindowLongW, parameter[call[name[ctypes].c_void_p, parameter[name[handle]]], name[index], name[value]]]]
keyword[def] identifier[SetWindowLong] ( identifier[handle] : identifier[int] , identifier[index] : identifier[int] , identifier[value] : identifier[int] )-> identifier[int] : literal[string] keyword[return] identifier[ctypes] . identifier[windll] . identifier[user32] . identifier[SetWindowLongW] ( identifier[ctypes] . identifier[c_void_p] ( identifier[handle] ), identifier[index] , identifier[value] )
def SetWindowLong(handle: int, index: int, value: int) -> int: """ SetWindowLong from Win32. handle: int, the handle of a native window. index: int. value: int. Return int, the previous value before set. """ return ctypes.windll.user32.SetWindowLongW(ctypes.c_void_p(handle), index, value)
def get_uninvoiced_hours(entries, billable=None): """Given an iterable of entries, return the total hours that have not been invoiced. If billable is passed as 'billable' or 'nonbillable', limit to the corresponding entries. """ statuses = ('invoiced', 'not-invoiced') if billable is not None: billable = (billable.lower() == u'billable') entries = [e for e in entries if e.activity.billable == billable] hours = sum([e.hours for e in entries if e.status not in statuses]) return '{0:.2f}'.format(hours)
def function[get_uninvoiced_hours, parameter[entries, billable]]: constant[Given an iterable of entries, return the total hours that have not been invoiced. If billable is passed as 'billable' or 'nonbillable', limit to the corresponding entries. ] variable[statuses] assign[=] tuple[[<ast.Constant object at 0x7da1b10434f0>, <ast.Constant object at 0x7da1b1042fe0>]] if compare[name[billable] is_not constant[None]] begin[:] variable[billable] assign[=] compare[call[name[billable].lower, parameter[]] equal[==] constant[billable]] variable[entries] assign[=] <ast.ListComp object at 0x7da1b103aef0> variable[hours] assign[=] call[name[sum], parameter[<ast.ListComp object at 0x7da1b10391e0>]] return[call[constant[{0:.2f}].format, parameter[name[hours]]]]
keyword[def] identifier[get_uninvoiced_hours] ( identifier[entries] , identifier[billable] = keyword[None] ): literal[string] identifier[statuses] =( literal[string] , literal[string] ) keyword[if] identifier[billable] keyword[is] keyword[not] keyword[None] : identifier[billable] =( identifier[billable] . identifier[lower] ()== literal[string] ) identifier[entries] =[ identifier[e] keyword[for] identifier[e] keyword[in] identifier[entries] keyword[if] identifier[e] . identifier[activity] . identifier[billable] == identifier[billable] ] identifier[hours] = identifier[sum] ([ identifier[e] . identifier[hours] keyword[for] identifier[e] keyword[in] identifier[entries] keyword[if] identifier[e] . identifier[status] keyword[not] keyword[in] identifier[statuses] ]) keyword[return] literal[string] . identifier[format] ( identifier[hours] )
def get_uninvoiced_hours(entries, billable=None): """Given an iterable of entries, return the total hours that have not been invoiced. If billable is passed as 'billable' or 'nonbillable', limit to the corresponding entries. """ statuses = ('invoiced', 'not-invoiced') if billable is not None: billable = billable.lower() == u'billable' entries = [e for e in entries if e.activity.billable == billable] # depends on [control=['if'], data=['billable']] hours = sum([e.hours for e in entries if e.status not in statuses]) return '{0:.2f}'.format(hours)
def load_domain_config(self, loaded_config): ''' Loads the domain_config and sets up queue_dict @param loaded_config: the yaml loaded config dict from zookeeper ''' self.domain_config = {} # vetting process to ensure correct configs if loaded_config: if 'domains' in loaded_config: for domain in loaded_config['domains']: item = loaded_config['domains'][domain] # check valid if 'window' in item and 'hits' in item: self.logger.debug("Added domain {dom} to loaded config" .format(dom=domain)) self.domain_config[domain] = item if 'blacklist' in loaded_config: self.black_domains = loaded_config['blacklist'] self.config_flag = True
def function[load_domain_config, parameter[self, loaded_config]]: constant[ Loads the domain_config and sets up queue_dict @param loaded_config: the yaml loaded config dict from zookeeper ] name[self].domain_config assign[=] dictionary[[], []] if name[loaded_config] begin[:] if compare[constant[domains] in name[loaded_config]] begin[:] for taget[name[domain]] in starred[call[name[loaded_config]][constant[domains]]] begin[:] variable[item] assign[=] call[call[name[loaded_config]][constant[domains]]][name[domain]] if <ast.BoolOp object at 0x7da1b192ee60> begin[:] call[name[self].logger.debug, parameter[call[constant[Added domain {dom} to loaded config].format, parameter[]]]] call[name[self].domain_config][name[domain]] assign[=] name[item] if compare[constant[blacklist] in name[loaded_config]] begin[:] name[self].black_domains assign[=] call[name[loaded_config]][constant[blacklist]] name[self].config_flag assign[=] constant[True]
keyword[def] identifier[load_domain_config] ( identifier[self] , identifier[loaded_config] ): literal[string] identifier[self] . identifier[domain_config] ={} keyword[if] identifier[loaded_config] : keyword[if] literal[string] keyword[in] identifier[loaded_config] : keyword[for] identifier[domain] keyword[in] identifier[loaded_config] [ literal[string] ]: identifier[item] = identifier[loaded_config] [ literal[string] ][ identifier[domain] ] keyword[if] literal[string] keyword[in] identifier[item] keyword[and] literal[string] keyword[in] identifier[item] : identifier[self] . identifier[logger] . identifier[debug] ( literal[string] . identifier[format] ( identifier[dom] = identifier[domain] )) identifier[self] . identifier[domain_config] [ identifier[domain] ]= identifier[item] keyword[if] literal[string] keyword[in] identifier[loaded_config] : identifier[self] . identifier[black_domains] = identifier[loaded_config] [ literal[string] ] identifier[self] . identifier[config_flag] = keyword[True]
def load_domain_config(self, loaded_config): """ Loads the domain_config and sets up queue_dict @param loaded_config: the yaml loaded config dict from zookeeper """ self.domain_config = {} # vetting process to ensure correct configs if loaded_config: if 'domains' in loaded_config: for domain in loaded_config['domains']: item = loaded_config['domains'][domain] # check valid if 'window' in item and 'hits' in item: self.logger.debug('Added domain {dom} to loaded config'.format(dom=domain)) self.domain_config[domain] = item # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['domain']] # depends on [control=['if'], data=['loaded_config']] if 'blacklist' in loaded_config: self.black_domains = loaded_config['blacklist'] # depends on [control=['if'], data=['loaded_config']] # depends on [control=['if'], data=[]] self.config_flag = True
def sphere_analytical_gaussian(dr, a, alpha=0.2765): """ Analytically calculate the sphere's functional form by convolving the Heavyside function with first order approximation to the sinc, a Gaussian. The alpha parameters controls the width of the approximation -- should be 1, but is fit to be roughly 0.2765 """ term1 = 0.5*(erf((dr+2*a)/(alpha*np.sqrt(2))) + erf(-dr/(alpha*np.sqrt(2)))) term2 = np.sqrt(0.5/np.pi)*(alpha/(dr+a+1e-10)) * ( np.exp(-0.5*dr**2/alpha**2) - np.exp(-0.5*(dr+2*a)**2/alpha**2) ) return term1 - term2
def function[sphere_analytical_gaussian, parameter[dr, a, alpha]]: constant[ Analytically calculate the sphere's functional form by convolving the Heavyside function with first order approximation to the sinc, a Gaussian. The alpha parameters controls the width of the approximation -- should be 1, but is fit to be roughly 0.2765 ] variable[term1] assign[=] binary_operation[constant[0.5] * binary_operation[call[name[erf], parameter[binary_operation[binary_operation[name[dr] + binary_operation[constant[2] * name[a]]] / binary_operation[name[alpha] * call[name[np].sqrt, parameter[constant[2]]]]]]] + call[name[erf], parameter[binary_operation[<ast.UnaryOp object at 0x7da20c6e76a0> / binary_operation[name[alpha] * call[name[np].sqrt, parameter[constant[2]]]]]]]]] variable[term2] assign[=] binary_operation[binary_operation[call[name[np].sqrt, parameter[binary_operation[constant[0.5] / name[np].pi]]] * binary_operation[name[alpha] / binary_operation[binary_operation[name[dr] + name[a]] + constant[1e-10]]]] * binary_operation[call[name[np].exp, parameter[binary_operation[binary_operation[<ast.UnaryOp object at 0x7da20c6e4940> * binary_operation[name[dr] ** constant[2]]] / binary_operation[name[alpha] ** constant[2]]]]] - call[name[np].exp, parameter[binary_operation[binary_operation[<ast.UnaryOp object at 0x7da18f722530> * binary_operation[binary_operation[name[dr] + binary_operation[constant[2] * name[a]]] ** constant[2]]] / binary_operation[name[alpha] ** constant[2]]]]]]] return[binary_operation[name[term1] - name[term2]]]
keyword[def] identifier[sphere_analytical_gaussian] ( identifier[dr] , identifier[a] , identifier[alpha] = literal[int] ): literal[string] identifier[term1] = literal[int] *( identifier[erf] (( identifier[dr] + literal[int] * identifier[a] )/( identifier[alpha] * identifier[np] . identifier[sqrt] ( literal[int] )))+ identifier[erf] (- identifier[dr] /( identifier[alpha] * identifier[np] . identifier[sqrt] ( literal[int] )))) identifier[term2] = identifier[np] . identifier[sqrt] ( literal[int] / identifier[np] . identifier[pi] )*( identifier[alpha] /( identifier[dr] + identifier[a] + literal[int] ))*( identifier[np] . identifier[exp] (- literal[int] * identifier[dr] ** literal[int] / identifier[alpha] ** literal[int] )- identifier[np] . identifier[exp] (- literal[int] *( identifier[dr] + literal[int] * identifier[a] )** literal[int] / identifier[alpha] ** literal[int] ) ) keyword[return] identifier[term1] - identifier[term2]
def sphere_analytical_gaussian(dr, a, alpha=0.2765): """ Analytically calculate the sphere's functional form by convolving the Heavyside function with first order approximation to the sinc, a Gaussian. The alpha parameters controls the width of the approximation -- should be 1, but is fit to be roughly 0.2765 """ term1 = 0.5 * (erf((dr + 2 * a) / (alpha * np.sqrt(2))) + erf(-dr / (alpha * np.sqrt(2)))) term2 = np.sqrt(0.5 / np.pi) * (alpha / (dr + a + 1e-10)) * (np.exp(-0.5 * dr ** 2 / alpha ** 2) - np.exp(-0.5 * (dr + 2 * a) ** 2 / alpha ** 2)) return term1 - term2
def get_assign_annotation(node): """Get the type annotation of the assignment of the given node. :param node: The node to get the annotation for. :type node: astroid.nodes.Assign or astroid.nodes.AnnAssign :returns: The type annotation as a string, or None if one does not exist. :type: str or None """ annotation = None annotation_node = None try: annotation_node = node.annotation except AttributeError: # Python 2 has no support for type annotations, so use getattr annotation_node = getattr(node, "type_annotation", None) if annotation_node: if isinstance(annotation_node, astroid.nodes.Const): annotation = node.value else: annotation = annotation_node.as_string() return annotation
def function[get_assign_annotation, parameter[node]]: constant[Get the type annotation of the assignment of the given node. :param node: The node to get the annotation for. :type node: astroid.nodes.Assign or astroid.nodes.AnnAssign :returns: The type annotation as a string, or None if one does not exist. :type: str or None ] variable[annotation] assign[=] constant[None] variable[annotation_node] assign[=] constant[None] <ast.Try object at 0x7da1b07232e0> if name[annotation_node] begin[:] if call[name[isinstance], parameter[name[annotation_node], name[astroid].nodes.Const]] begin[:] variable[annotation] assign[=] name[node].value return[name[annotation]]
keyword[def] identifier[get_assign_annotation] ( identifier[node] ): literal[string] identifier[annotation] = keyword[None] identifier[annotation_node] = keyword[None] keyword[try] : identifier[annotation_node] = identifier[node] . identifier[annotation] keyword[except] identifier[AttributeError] : identifier[annotation_node] = identifier[getattr] ( identifier[node] , literal[string] , keyword[None] ) keyword[if] identifier[annotation_node] : keyword[if] identifier[isinstance] ( identifier[annotation_node] , identifier[astroid] . identifier[nodes] . identifier[Const] ): identifier[annotation] = identifier[node] . identifier[value] keyword[else] : identifier[annotation] = identifier[annotation_node] . identifier[as_string] () keyword[return] identifier[annotation]
def get_assign_annotation(node): """Get the type annotation of the assignment of the given node. :param node: The node to get the annotation for. :type node: astroid.nodes.Assign or astroid.nodes.AnnAssign :returns: The type annotation as a string, or None if one does not exist. :type: str or None """ annotation = None annotation_node = None try: annotation_node = node.annotation # depends on [control=['try'], data=[]] except AttributeError: # Python 2 has no support for type annotations, so use getattr annotation_node = getattr(node, 'type_annotation', None) # depends on [control=['except'], data=[]] if annotation_node: if isinstance(annotation_node, astroid.nodes.Const): annotation = node.value # depends on [control=['if'], data=[]] else: annotation = annotation_node.as_string() # depends on [control=['if'], data=[]] return annotation
def local_path(path): """ Return the absolute path relative to the root of this project """ current = os.path.dirname(__file__) root = current return os.path.abspath(os.path.join(root, path))
def function[local_path, parameter[path]]: constant[ Return the absolute path relative to the root of this project ] variable[current] assign[=] call[name[os].path.dirname, parameter[name[__file__]]] variable[root] assign[=] name[current] return[call[name[os].path.abspath, parameter[call[name[os].path.join, parameter[name[root], name[path]]]]]]
keyword[def] identifier[local_path] ( identifier[path] ): literal[string] identifier[current] = identifier[os] . identifier[path] . identifier[dirname] ( identifier[__file__] ) identifier[root] = identifier[current] keyword[return] identifier[os] . identifier[path] . identifier[abspath] ( identifier[os] . identifier[path] . identifier[join] ( identifier[root] , identifier[path] ))
def local_path(path): """ Return the absolute path relative to the root of this project """ current = os.path.dirname(__file__) root = current return os.path.abspath(os.path.join(root, path))
def save(self, filename, metadata={}, **data): """ The implementation in the base class simply checks there is no clash between the metadata and data keys. """ intersection = set(metadata.keys()) & set(data.keys()) if intersection: msg = 'Key(s) overlap between data and metadata: %s' raise Exception(msg % ','.join(intersection))
def function[save, parameter[self, filename, metadata]]: constant[ The implementation in the base class simply checks there is no clash between the metadata and data keys. ] variable[intersection] assign[=] binary_operation[call[name[set], parameter[call[name[metadata].keys, parameter[]]]] <ast.BitAnd object at 0x7da2590d6b60> call[name[set], parameter[call[name[data].keys, parameter[]]]]] if name[intersection] begin[:] variable[msg] assign[=] constant[Key(s) overlap between data and metadata: %s] <ast.Raise object at 0x7da1afe07670>
keyword[def] identifier[save] ( identifier[self] , identifier[filename] , identifier[metadata] ={},** identifier[data] ): literal[string] identifier[intersection] = identifier[set] ( identifier[metadata] . identifier[keys] ())& identifier[set] ( identifier[data] . identifier[keys] ()) keyword[if] identifier[intersection] : identifier[msg] = literal[string] keyword[raise] identifier[Exception] ( identifier[msg] % literal[string] . identifier[join] ( identifier[intersection] ))
def save(self, filename, metadata={}, **data): """ The implementation in the base class simply checks there is no clash between the metadata and data keys. """ intersection = set(metadata.keys()) & set(data.keys()) if intersection: msg = 'Key(s) overlap between data and metadata: %s' raise Exception(msg % ','.join(intersection)) # depends on [control=['if'], data=[]]
def setDisabledBorderColor( self ): """ Returns the base color for this node. :return <QColor> """ color = QColor(color) if self._palette is None: self._palette = XNodePalette(self._scenePalette) self._palette.setColor(self._palette.Disabled, self._palette.NodeBorder, color) self.setDirty()
def function[setDisabledBorderColor, parameter[self]]: constant[ Returns the base color for this node. :return <QColor> ] variable[color] assign[=] call[name[QColor], parameter[name[color]]] if compare[name[self]._palette is constant[None]] begin[:] name[self]._palette assign[=] call[name[XNodePalette], parameter[name[self]._scenePalette]] call[name[self]._palette.setColor, parameter[name[self]._palette.Disabled, name[self]._palette.NodeBorder, name[color]]] call[name[self].setDirty, parameter[]]
keyword[def] identifier[setDisabledBorderColor] ( identifier[self] ): literal[string] identifier[color] = identifier[QColor] ( identifier[color] ) keyword[if] identifier[self] . identifier[_palette] keyword[is] keyword[None] : identifier[self] . identifier[_palette] = identifier[XNodePalette] ( identifier[self] . identifier[_scenePalette] ) identifier[self] . identifier[_palette] . identifier[setColor] ( identifier[self] . identifier[_palette] . identifier[Disabled] , identifier[self] . identifier[_palette] . identifier[NodeBorder] , identifier[color] ) identifier[self] . identifier[setDirty] ()
def setDisabledBorderColor(self): """ Returns the base color for this node. :return <QColor> """ color = QColor(color) if self._palette is None: self._palette = XNodePalette(self._scenePalette) # depends on [control=['if'], data=[]] self._palette.setColor(self._palette.Disabled, self._palette.NodeBorder, color) self.setDirty()
def add_reshape(self, name, input_name, output_name, target_shape, mode): """ Add a reshape layer. Kindly refer to NeuralNetwork.proto for details. Parameters ---------- name: str The name of this layer. target_shape: tuple Shape of the output blob. The product of target_shape must be equal to the shape of the input blob. Can be either length 3 (C,H,W) or length 4 (Seq,C,H,W). mode: int - If mode == 0, the reshape layer is in CHANNEL_FIRST mode. - If mode == 1, the reshape layer is in CHANNEL_LAST mode. input_name: str The input blob name of this layer. output_name: str The output blob name of this layer. See Also -------- add_flatten, add_permute """ spec = self.spec nn_spec = self.nn_spec # Add a new layer spec_layer = nn_spec.layers.add() spec_layer.name = name spec_layer.input.append(input_name) spec_layer.output.append(output_name) spec_layer_params = spec_layer.reshape spec_layer_params.targetShape.extend(target_shape) if mode == 0: spec_layer_params.mode = \ _NeuralNetwork_pb2.ReshapeLayerParams.ReshapeOrder.Value('CHANNEL_FIRST') else: spec_layer_params.mode = \ _NeuralNetwork_pb2.ReshapeLayerParams.ReshapeOrder.Value('CHANNEL_LAST') if len(target_shape) != 4 and len(target_shape) != 3: raise ValueError("Length of the 'target-shape' parameter must be equal to 3 or 4")
def function[add_reshape, parameter[self, name, input_name, output_name, target_shape, mode]]: constant[ Add a reshape layer. Kindly refer to NeuralNetwork.proto for details. Parameters ---------- name: str The name of this layer. target_shape: tuple Shape of the output blob. The product of target_shape must be equal to the shape of the input blob. Can be either length 3 (C,H,W) or length 4 (Seq,C,H,W). mode: int - If mode == 0, the reshape layer is in CHANNEL_FIRST mode. - If mode == 1, the reshape layer is in CHANNEL_LAST mode. input_name: str The input blob name of this layer. output_name: str The output blob name of this layer. See Also -------- add_flatten, add_permute ] variable[spec] assign[=] name[self].spec variable[nn_spec] assign[=] name[self].nn_spec variable[spec_layer] assign[=] call[name[nn_spec].layers.add, parameter[]] name[spec_layer].name assign[=] name[name] call[name[spec_layer].input.append, parameter[name[input_name]]] call[name[spec_layer].output.append, parameter[name[output_name]]] variable[spec_layer_params] assign[=] name[spec_layer].reshape call[name[spec_layer_params].targetShape.extend, parameter[name[target_shape]]] if compare[name[mode] equal[==] constant[0]] begin[:] name[spec_layer_params].mode assign[=] call[name[_NeuralNetwork_pb2].ReshapeLayerParams.ReshapeOrder.Value, parameter[constant[CHANNEL_FIRST]]] if <ast.BoolOp object at 0x7da18bc73850> begin[:] <ast.Raise object at 0x7da18bc71ff0>
keyword[def] identifier[add_reshape] ( identifier[self] , identifier[name] , identifier[input_name] , identifier[output_name] , identifier[target_shape] , identifier[mode] ): literal[string] identifier[spec] = identifier[self] . identifier[spec] identifier[nn_spec] = identifier[self] . identifier[nn_spec] identifier[spec_layer] = identifier[nn_spec] . identifier[layers] . identifier[add] () identifier[spec_layer] . identifier[name] = identifier[name] identifier[spec_layer] . identifier[input] . identifier[append] ( identifier[input_name] ) identifier[spec_layer] . identifier[output] . identifier[append] ( identifier[output_name] ) identifier[spec_layer_params] = identifier[spec_layer] . identifier[reshape] identifier[spec_layer_params] . identifier[targetShape] . identifier[extend] ( identifier[target_shape] ) keyword[if] identifier[mode] == literal[int] : identifier[spec_layer_params] . identifier[mode] = identifier[_NeuralNetwork_pb2] . identifier[ReshapeLayerParams] . identifier[ReshapeOrder] . identifier[Value] ( literal[string] ) keyword[else] : identifier[spec_layer_params] . identifier[mode] = identifier[_NeuralNetwork_pb2] . identifier[ReshapeLayerParams] . identifier[ReshapeOrder] . identifier[Value] ( literal[string] ) keyword[if] identifier[len] ( identifier[target_shape] )!= literal[int] keyword[and] identifier[len] ( identifier[target_shape] )!= literal[int] : keyword[raise] identifier[ValueError] ( literal[string] )
def add_reshape(self, name, input_name, output_name, target_shape, mode): """ Add a reshape layer. Kindly refer to NeuralNetwork.proto for details. Parameters ---------- name: str The name of this layer. target_shape: tuple Shape of the output blob. The product of target_shape must be equal to the shape of the input blob. Can be either length 3 (C,H,W) or length 4 (Seq,C,H,W). mode: int - If mode == 0, the reshape layer is in CHANNEL_FIRST mode. - If mode == 1, the reshape layer is in CHANNEL_LAST mode. input_name: str The input blob name of this layer. output_name: str The output blob name of this layer. See Also -------- add_flatten, add_permute """ spec = self.spec nn_spec = self.nn_spec # Add a new layer spec_layer = nn_spec.layers.add() spec_layer.name = name spec_layer.input.append(input_name) spec_layer.output.append(output_name) spec_layer_params = spec_layer.reshape spec_layer_params.targetShape.extend(target_shape) if mode == 0: spec_layer_params.mode = _NeuralNetwork_pb2.ReshapeLayerParams.ReshapeOrder.Value('CHANNEL_FIRST') # depends on [control=['if'], data=[]] else: spec_layer_params.mode = _NeuralNetwork_pb2.ReshapeLayerParams.ReshapeOrder.Value('CHANNEL_LAST') if len(target_shape) != 4 and len(target_shape) != 3: raise ValueError("Length of the 'target-shape' parameter must be equal to 3 or 4") # depends on [control=['if'], data=[]]
def run( self, program: Union[circuits.Circuit, schedules.Schedule], param_resolver: 'study.ParamResolverOrSimilarType' = None, repetitions: int = 1, ) -> study.TrialResult: """Samples from the given Circuit or Schedule. Args: program: The circuit or schedule to simulate. param_resolver: Parameters to run with the program. repetitions: The number of repetitions to simulate. Returns: TrialResult for a run. """ return self.run_sweep(program, study.ParamResolver(param_resolver), repetitions)[0]
def function[run, parameter[self, program, param_resolver, repetitions]]: constant[Samples from the given Circuit or Schedule. Args: program: The circuit or schedule to simulate. param_resolver: Parameters to run with the program. repetitions: The number of repetitions to simulate. Returns: TrialResult for a run. ] return[call[call[name[self].run_sweep, parameter[name[program], call[name[study].ParamResolver, parameter[name[param_resolver]]], name[repetitions]]]][constant[0]]]
keyword[def] identifier[run] ( identifier[self] , identifier[program] : identifier[Union] [ identifier[circuits] . identifier[Circuit] , identifier[schedules] . identifier[Schedule] ], identifier[param_resolver] : literal[string] = keyword[None] , identifier[repetitions] : identifier[int] = literal[int] , )-> identifier[study] . identifier[TrialResult] : literal[string] keyword[return] identifier[self] . identifier[run_sweep] ( identifier[program] , identifier[study] . identifier[ParamResolver] ( identifier[param_resolver] ), identifier[repetitions] )[ literal[int] ]
def run(self, program: Union[circuits.Circuit, schedules.Schedule], param_resolver: 'study.ParamResolverOrSimilarType'=None, repetitions: int=1) -> study.TrialResult: """Samples from the given Circuit or Schedule. Args: program: The circuit or schedule to simulate. param_resolver: Parameters to run with the program. repetitions: The number of repetitions to simulate. Returns: TrialResult for a run. """ return self.run_sweep(program, study.ParamResolver(param_resolver), repetitions)[0]
def get_xpath_frequencydistribution(paths): """ Build and return a frequency distribution over xpath occurrences.""" # "html/body/div/div/text" -> [ "html", "body", "div", "div", "text" ] splitpaths = [p.split('/') for p in paths] # get list of "parentpaths" by right-stripping off the last xpath-node, # effectively getting the parent path parentpaths = ['/'.join(p[:-1]) for p in splitpaths] # build frequency distribution parentpaths_counter = Counter(parentpaths) return parentpaths_counter.most_common()
def function[get_xpath_frequencydistribution, parameter[paths]]: constant[ Build and return a frequency distribution over xpath occurrences.] variable[splitpaths] assign[=] <ast.ListComp object at 0x7da18fe93550> variable[parentpaths] assign[=] <ast.ListComp object at 0x7da18fe93e80> variable[parentpaths_counter] assign[=] call[name[Counter], parameter[name[parentpaths]]] return[call[name[parentpaths_counter].most_common, parameter[]]]
keyword[def] identifier[get_xpath_frequencydistribution] ( identifier[paths] ): literal[string] identifier[splitpaths] =[ identifier[p] . identifier[split] ( literal[string] ) keyword[for] identifier[p] keyword[in] identifier[paths] ] identifier[parentpaths] =[ literal[string] . identifier[join] ( identifier[p] [:- literal[int] ]) keyword[for] identifier[p] keyword[in] identifier[splitpaths] ] identifier[parentpaths_counter] = identifier[Counter] ( identifier[parentpaths] ) keyword[return] identifier[parentpaths_counter] . identifier[most_common] ()
def get_xpath_frequencydistribution(paths): """ Build and return a frequency distribution over xpath occurrences.""" # "html/body/div/div/text" -> [ "html", "body", "div", "div", "text" ] splitpaths = [p.split('/') for p in paths] # get list of "parentpaths" by right-stripping off the last xpath-node, # effectively getting the parent path parentpaths = ['/'.join(p[:-1]) for p in splitpaths] # build frequency distribution parentpaths_counter = Counter(parentpaths) return parentpaths_counter.most_common()
def _sort(self): """sort sprites by z_order""" self.__dict__['_z_ordered_sprites'] = sorted(self.sprites, key=lambda sprite:sprite.z_order)
def function[_sort, parameter[self]]: constant[sort sprites by z_order] call[name[self].__dict__][constant[_z_ordered_sprites]] assign[=] call[name[sorted], parameter[name[self].sprites]]
keyword[def] identifier[_sort] ( identifier[self] ): literal[string] identifier[self] . identifier[__dict__] [ literal[string] ]= identifier[sorted] ( identifier[self] . identifier[sprites] , identifier[key] = keyword[lambda] identifier[sprite] : identifier[sprite] . identifier[z_order] )
def _sort(self): """sort sprites by z_order""" self.__dict__['_z_ordered_sprites'] = sorted(self.sprites, key=lambda sprite: sprite.z_order)
def index(self, row, column=0, parent=QModelIndex()): """ Reimplements the :meth:`QAbstractItemModel.index` method. :param row: Row. :type row: int :param column: Column. :type column: int :param parent: Parent. :type parent: QModelIndex :return: Index. :rtype: QModelIndex """ parent_node = self.get_node(parent) child = parent_node.child(row) if child: return self.createIndex(row, column, child) else: return QModelIndex()
def function[index, parameter[self, row, column, parent]]: constant[ Reimplements the :meth:`QAbstractItemModel.index` method. :param row: Row. :type row: int :param column: Column. :type column: int :param parent: Parent. :type parent: QModelIndex :return: Index. :rtype: QModelIndex ] variable[parent_node] assign[=] call[name[self].get_node, parameter[name[parent]]] variable[child] assign[=] call[name[parent_node].child, parameter[name[row]]] if name[child] begin[:] return[call[name[self].createIndex, parameter[name[row], name[column], name[child]]]]
keyword[def] identifier[index] ( identifier[self] , identifier[row] , identifier[column] = literal[int] , identifier[parent] = identifier[QModelIndex] ()): literal[string] identifier[parent_node] = identifier[self] . identifier[get_node] ( identifier[parent] ) identifier[child] = identifier[parent_node] . identifier[child] ( identifier[row] ) keyword[if] identifier[child] : keyword[return] identifier[self] . identifier[createIndex] ( identifier[row] , identifier[column] , identifier[child] ) keyword[else] : keyword[return] identifier[QModelIndex] ()
def index(self, row, column=0, parent=QModelIndex()): """ Reimplements the :meth:`QAbstractItemModel.index` method. :param row: Row. :type row: int :param column: Column. :type column: int :param parent: Parent. :type parent: QModelIndex :return: Index. :rtype: QModelIndex """ parent_node = self.get_node(parent) child = parent_node.child(row) if child: return self.createIndex(row, column, child) # depends on [control=['if'], data=[]] else: return QModelIndex()
def _handle_universal(self, X): """ Scan through the corpus to compute counts of each Universal Dependencies part-of-speech. Parameters ---------- X : list or generator Should be provided as a list of documents or a generator that yields a list of documents that contain a list of sentences that contain (token, tag) tuples. """ jump = { # combine proper and regular nouns "NOUN": "noun", "PROPN": "noun", "ADJ": "adjective", "VERB": "verb", # include particles with adverbs "ADV": "adverb", "PART": "adverb", "ADP": "adposition", "PRON": "pronoun", "CCONJ": "conjunction", "PUNCT": "punctuation", "DET": "determiner", "NUM": "number", "INTJ": "interjection", "SYM": "symbol", } for tagged_doc in X: for tagged_sent in tagged_doc: for _, tag in tagged_sent: if tag == "SPACE": continue self.pos_tag_counts_[jump.get(tag, "other")] += 1
def function[_handle_universal, parameter[self, X]]: constant[ Scan through the corpus to compute counts of each Universal Dependencies part-of-speech. Parameters ---------- X : list or generator Should be provided as a list of documents or a generator that yields a list of documents that contain a list of sentences that contain (token, tag) tuples. ] variable[jump] assign[=] dictionary[[<ast.Constant object at 0x7da18bcc99f0>, <ast.Constant object at 0x7da18bcc96f0>, <ast.Constant object at 0x7da18bcca0e0>, <ast.Constant object at 0x7da18bcc8580>, <ast.Constant object at 0x7da18bcc94b0>, <ast.Constant object at 0x7da18bcc9960>, <ast.Constant object at 0x7da18bccb9d0>, <ast.Constant object at 0x7da18bccac50>, <ast.Constant object at 0x7da18bccb700>, <ast.Constant object at 0x7da18bcc86d0>, <ast.Constant object at 0x7da18bcc8d60>, <ast.Constant object at 0x7da18bccac20>, <ast.Constant object at 0x7da18bcc9d20>, <ast.Constant object at 0x7da18bccb070>], [<ast.Constant object at 0x7da18bcc8640>, <ast.Constant object at 0x7da18bccb820>, <ast.Constant object at 0x7da18bcca4a0>, <ast.Constant object at 0x7da18bccbb50>, <ast.Constant object at 0x7da18bcc9300>, <ast.Constant object at 0x7da18bcc9c60>, <ast.Constant object at 0x7da18bcca5f0>, <ast.Constant object at 0x7da18bcc8850>, <ast.Constant object at 0x7da18bcc9840>, <ast.Constant object at 0x7da18bcc80d0>, <ast.Constant object at 0x7da18bcc88e0>, <ast.Constant object at 0x7da18bcc89a0>, <ast.Constant object at 0x7da18bcc9ed0>, <ast.Constant object at 0x7da18bcc97e0>]] for taget[name[tagged_doc]] in starred[name[X]] begin[:] for taget[name[tagged_sent]] in starred[name[tagged_doc]] begin[:] for taget[tuple[[<ast.Name object at 0x7da18bcc8820>, <ast.Name object at 0x7da18bcc98d0>]]] in starred[name[tagged_sent]] begin[:] if compare[name[tag] equal[==] constant[SPACE]] begin[:] continue <ast.AugAssign object at 0x7da18bcca3b0>
keyword[def] identifier[_handle_universal] ( identifier[self] , identifier[X] ): literal[string] identifier[jump] ={ literal[string] : literal[string] , literal[string] : literal[string] , literal[string] : literal[string] , literal[string] : literal[string] , literal[string] : literal[string] , literal[string] : literal[string] , literal[string] : literal[string] , literal[string] : literal[string] , literal[string] : literal[string] , literal[string] : literal[string] , literal[string] : literal[string] , literal[string] : literal[string] , literal[string] : literal[string] , literal[string] : literal[string] , } keyword[for] identifier[tagged_doc] keyword[in] identifier[X] : keyword[for] identifier[tagged_sent] keyword[in] identifier[tagged_doc] : keyword[for] identifier[_] , identifier[tag] keyword[in] identifier[tagged_sent] : keyword[if] identifier[tag] == literal[string] : keyword[continue] identifier[self] . identifier[pos_tag_counts_] [ identifier[jump] . identifier[get] ( identifier[tag] , literal[string] )]+= literal[int]
def _handle_universal(self, X): """ Scan through the corpus to compute counts of each Universal Dependencies part-of-speech. Parameters ---------- X : list or generator Should be provided as a list of documents or a generator that yields a list of documents that contain a list of sentences that contain (token, tag) tuples. """ # combine proper and regular nouns # include particles with adverbs jump = {'NOUN': 'noun', 'PROPN': 'noun', 'ADJ': 'adjective', 'VERB': 'verb', 'ADV': 'adverb', 'PART': 'adverb', 'ADP': 'adposition', 'PRON': 'pronoun', 'CCONJ': 'conjunction', 'PUNCT': 'punctuation', 'DET': 'determiner', 'NUM': 'number', 'INTJ': 'interjection', 'SYM': 'symbol'} for tagged_doc in X: for tagged_sent in tagged_doc: for (_, tag) in tagged_sent: if tag == 'SPACE': continue # depends on [control=['if'], data=[]] self.pos_tag_counts_[jump.get(tag, 'other')] += 1 # depends on [control=['for'], data=[]] # depends on [control=['for'], data=['tagged_sent']] # depends on [control=['for'], data=['tagged_doc']]
def start(self): """ function to initialize thread for downloading """ global parallel for self.i in range(0, self.length): if parallel: self.thread.append(myThread(self.url[ self.i ], self.directory, self.i, self.min_file_size, self.max_file_size, self.no_redirects)) else: # if not parallel whole url list is passed self.thread.append(myThread(self.url, self.directory, self.i , self.min_file_size, self.max_file_size, self.no_redirects)) self.progress[self.i]["value"] = 0 self.bytes[self.i] = 0 self.thread[self.i].start() self.read_bytes()
def function[start, parameter[self]]: constant[ function to initialize thread for downloading ] <ast.Global object at 0x7da1b0f0d7b0> for taget[name[self].i] in starred[call[name[range], parameter[constant[0], name[self].length]]] begin[:] if name[parallel] begin[:] call[name[self].thread.append, parameter[call[name[myThread], parameter[call[name[self].url][name[self].i], name[self].directory, name[self].i, name[self].min_file_size, name[self].max_file_size, name[self].no_redirects]]]] call[call[name[self].progress][name[self].i]][constant[value]] assign[=] constant[0] call[name[self].bytes][name[self].i] assign[=] constant[0] call[call[name[self].thread][name[self].i].start, parameter[]] call[name[self].read_bytes, parameter[]]
keyword[def] identifier[start] ( identifier[self] ): literal[string] keyword[global] identifier[parallel] keyword[for] identifier[self] . identifier[i] keyword[in] identifier[range] ( literal[int] , identifier[self] . identifier[length] ): keyword[if] identifier[parallel] : identifier[self] . identifier[thread] . identifier[append] ( identifier[myThread] ( identifier[self] . identifier[url] [ identifier[self] . identifier[i] ], identifier[self] . identifier[directory] , identifier[self] . identifier[i] , identifier[self] . identifier[min_file_size] , identifier[self] . identifier[max_file_size] , identifier[self] . identifier[no_redirects] )) keyword[else] : identifier[self] . identifier[thread] . identifier[append] ( identifier[myThread] ( identifier[self] . identifier[url] , identifier[self] . identifier[directory] , identifier[self] . identifier[i] , identifier[self] . identifier[min_file_size] , identifier[self] . identifier[max_file_size] , identifier[self] . identifier[no_redirects] )) identifier[self] . identifier[progress] [ identifier[self] . identifier[i] ][ literal[string] ]= literal[int] identifier[self] . identifier[bytes] [ identifier[self] . identifier[i] ]= literal[int] identifier[self] . identifier[thread] [ identifier[self] . identifier[i] ]. identifier[start] () identifier[self] . identifier[read_bytes] ()
def start(self): """ function to initialize thread for downloading """ global parallel for self.i in range(0, self.length): if parallel: self.thread.append(myThread(self.url[self.i], self.directory, self.i, self.min_file_size, self.max_file_size, self.no_redirects)) # depends on [control=['if'], data=[]] else: # if not parallel whole url list is passed self.thread.append(myThread(self.url, self.directory, self.i, self.min_file_size, self.max_file_size, self.no_redirects)) self.progress[self.i]['value'] = 0 self.bytes[self.i] = 0 self.thread[self.i].start() # depends on [control=['for'], data=[]] self.read_bytes()
def fqdn(self): """ Returns the string used to identify the client when initiating a SMTP session. RFC 5321 `§ 4.1.1.1`_ and `§ 4.1.3`_ tell us what to do: - Use the client FQDN ; - If it isn't available, we SHOULD fall back to an address literal. Returns: str: The value that should be used as the client FQDN. .. _`§ 4.1.1.1`: https://tools.ietf.org/html/rfc5321#section-4.1.1.1 .. _`§ 4.1.3`: https//tools.ietf.org/html/rfc5321#section-4.1.3 """ if self._fqdn is None: # Let's try to retrieve it: self._fqdn = socket.getfqdn() if "." not in self._fqdn: try: info = socket.getaddrinfo( host="localhost", port=None, proto=socket.IPPROTO_TCP ) except socket.gaierror: addr = "127.0.0.1" else: # We only consider the first returned result and we're # only interested in getting the IP(v4 or v6) address: addr = info[0][4][0] self._fqdn = "[{}]".format(addr) return self._fqdn
def function[fqdn, parameter[self]]: constant[ Returns the string used to identify the client when initiating a SMTP session. RFC 5321 `§ 4.1.1.1`_ and `§ 4.1.3`_ tell us what to do: - Use the client FQDN ; - If it isn't available, we SHOULD fall back to an address literal. Returns: str: The value that should be used as the client FQDN. .. _`§ 4.1.1.1`: https://tools.ietf.org/html/rfc5321#section-4.1.1.1 .. _`§ 4.1.3`: https//tools.ietf.org/html/rfc5321#section-4.1.3 ] if compare[name[self]._fqdn is constant[None]] begin[:] name[self]._fqdn assign[=] call[name[socket].getfqdn, parameter[]] if compare[constant[.] <ast.NotIn object at 0x7da2590d7190> name[self]._fqdn] begin[:] <ast.Try object at 0x7da1b2535ab0> name[self]._fqdn assign[=] call[constant[[{}]].format, parameter[name[addr]]] return[name[self]._fqdn]
keyword[def] identifier[fqdn] ( identifier[self] ): literal[string] keyword[if] identifier[self] . identifier[_fqdn] keyword[is] keyword[None] : identifier[self] . identifier[_fqdn] = identifier[socket] . identifier[getfqdn] () keyword[if] literal[string] keyword[not] keyword[in] identifier[self] . identifier[_fqdn] : keyword[try] : identifier[info] = identifier[socket] . identifier[getaddrinfo] ( identifier[host] = literal[string] , identifier[port] = keyword[None] , identifier[proto] = identifier[socket] . identifier[IPPROTO_TCP] ) keyword[except] identifier[socket] . identifier[gaierror] : identifier[addr] = literal[string] keyword[else] : identifier[addr] = identifier[info] [ literal[int] ][ literal[int] ][ literal[int] ] identifier[self] . identifier[_fqdn] = literal[string] . identifier[format] ( identifier[addr] ) keyword[return] identifier[self] . identifier[_fqdn]
def fqdn(self): """ Returns the string used to identify the client when initiating a SMTP session. RFC 5321 `§ 4.1.1.1`_ and `§ 4.1.3`_ tell us what to do: - Use the client FQDN ; - If it isn't available, we SHOULD fall back to an address literal. Returns: str: The value that should be used as the client FQDN. .. _`§ 4.1.1.1`: https://tools.ietf.org/html/rfc5321#section-4.1.1.1 .. _`§ 4.1.3`: https//tools.ietf.org/html/rfc5321#section-4.1.3 """ if self._fqdn is None: # Let's try to retrieve it: self._fqdn = socket.getfqdn() if '.' not in self._fqdn: try: info = socket.getaddrinfo(host='localhost', port=None, proto=socket.IPPROTO_TCP) # depends on [control=['try'], data=[]] except socket.gaierror: addr = '127.0.0.1' # depends on [control=['except'], data=[]] else: # We only consider the first returned result and we're # only interested in getting the IP(v4 or v6) address: addr = info[0][4][0] self._fqdn = '[{}]'.format(addr) # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]] return self._fqdn
def pisaPreLoop(node, context, collect=False): """ Collect all CSS definitions """ data = u"" if node.nodeType == Node.TEXT_NODE and collect: data = node.data elif node.nodeType == Node.ELEMENT_NODE: name = node.tagName.lower() if name in ("style", "link"): attr = pisaGetAttributes(context, name, node.attributes) media = [x.strip() for x in attr.media.lower().split(",") if x.strip()] if attr.get("type", "").lower() in ("", "text/css") and \ (not media or "all" in media or "print" in media or "pdf" in media): if name == "style": for node in node.childNodes: data += pisaPreLoop(node, context, collect=True) context.addCSS(data) return u"" if name == "link" and attr.href and attr.rel.lower() == "stylesheet": # print "CSS LINK", attr context.addCSS('\n@import "%s" %s;' % (attr.href, ",".join(media))) for node in node.childNodes: result = pisaPreLoop(node, context, collect=collect) if collect: data += result return data
def function[pisaPreLoop, parameter[node, context, collect]]: constant[ Collect all CSS definitions ] variable[data] assign[=] constant[] if <ast.BoolOp object at 0x7da1b12cbf10> begin[:] variable[data] assign[=] name[node].data for taget[name[node]] in starred[name[node].childNodes] begin[:] variable[result] assign[=] call[name[pisaPreLoop], parameter[name[node], name[context]]] if name[collect] begin[:] <ast.AugAssign object at 0x7da1b12c8520> return[name[data]]
keyword[def] identifier[pisaPreLoop] ( identifier[node] , identifier[context] , identifier[collect] = keyword[False] ): literal[string] identifier[data] = literal[string] keyword[if] identifier[node] . identifier[nodeType] == identifier[Node] . identifier[TEXT_NODE] keyword[and] identifier[collect] : identifier[data] = identifier[node] . identifier[data] keyword[elif] identifier[node] . identifier[nodeType] == identifier[Node] . identifier[ELEMENT_NODE] : identifier[name] = identifier[node] . identifier[tagName] . identifier[lower] () keyword[if] identifier[name] keyword[in] ( literal[string] , literal[string] ): identifier[attr] = identifier[pisaGetAttributes] ( identifier[context] , identifier[name] , identifier[node] . identifier[attributes] ) identifier[media] =[ identifier[x] . identifier[strip] () keyword[for] identifier[x] keyword[in] identifier[attr] . identifier[media] . identifier[lower] (). identifier[split] ( literal[string] ) keyword[if] identifier[x] . identifier[strip] ()] keyword[if] identifier[attr] . identifier[get] ( literal[string] , literal[string] ). identifier[lower] () keyword[in] ( literal[string] , literal[string] ) keyword[and] ( keyword[not] identifier[media] keyword[or] literal[string] keyword[in] identifier[media] keyword[or] literal[string] keyword[in] identifier[media] keyword[or] literal[string] keyword[in] identifier[media] ): keyword[if] identifier[name] == literal[string] : keyword[for] identifier[node] keyword[in] identifier[node] . identifier[childNodes] : identifier[data] += identifier[pisaPreLoop] ( identifier[node] , identifier[context] , identifier[collect] = keyword[True] ) identifier[context] . identifier[addCSS] ( identifier[data] ) keyword[return] literal[string] keyword[if] identifier[name] == literal[string] keyword[and] identifier[attr] . identifier[href] keyword[and] identifier[attr] . identifier[rel] . identifier[lower] ()== literal[string] : identifier[context] . identifier[addCSS] ( literal[string] % ( identifier[attr] . identifier[href] , literal[string] . identifier[join] ( identifier[media] ))) keyword[for] identifier[node] keyword[in] identifier[node] . identifier[childNodes] : identifier[result] = identifier[pisaPreLoop] ( identifier[node] , identifier[context] , identifier[collect] = identifier[collect] ) keyword[if] identifier[collect] : identifier[data] += identifier[result] keyword[return] identifier[data]
def pisaPreLoop(node, context, collect=False): """ Collect all CSS definitions """ data = u'' if node.nodeType == Node.TEXT_NODE and collect: data = node.data # depends on [control=['if'], data=[]] elif node.nodeType == Node.ELEMENT_NODE: name = node.tagName.lower() if name in ('style', 'link'): attr = pisaGetAttributes(context, name, node.attributes) media = [x.strip() for x in attr.media.lower().split(',') if x.strip()] if attr.get('type', '').lower() in ('', 'text/css') and (not media or 'all' in media or 'print' in media or ('pdf' in media)): if name == 'style': for node in node.childNodes: data += pisaPreLoop(node, context, collect=True) # depends on [control=['for'], data=['node']] context.addCSS(data) return u'' # depends on [control=['if'], data=[]] if name == 'link' and attr.href and (attr.rel.lower() == 'stylesheet'): # print "CSS LINK", attr context.addCSS('\n@import "%s" %s;' % (attr.href, ','.join(media))) # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]] # depends on [control=['if'], data=['name']] # depends on [control=['if'], data=[]] for node in node.childNodes: result = pisaPreLoop(node, context, collect=collect) if collect: data += result # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['node']] return data
def resolve(self): 'Resolve pathname shell variables and ~userdir' return os.path.expandvars(os.path.expanduser(self.fqpn))
def function[resolve, parameter[self]]: constant[Resolve pathname shell variables and ~userdir] return[call[name[os].path.expandvars, parameter[call[name[os].path.expanduser, parameter[name[self].fqpn]]]]]
keyword[def] identifier[resolve] ( identifier[self] ): literal[string] keyword[return] identifier[os] . identifier[path] . identifier[expandvars] ( identifier[os] . identifier[path] . identifier[expanduser] ( identifier[self] . identifier[fqpn] ))
def resolve(self): """Resolve pathname shell variables and ~userdir""" return os.path.expandvars(os.path.expanduser(self.fqpn))
def build_user_requested_parameters(request, meta): """Build the list of parameters requested by the plugit server""" postParameters = {} getParameters = {} files = {} # Add parameters requested by the server if 'user_info' in meta: for prop in meta['user_info']: # Test if the value exist, otherwise return None value = None if hasattr(request.user, prop) and prop in settings.PIAPI_USERDATA: value = getattr(request.user, prop) else: raise Exception('requested user attribute "%s", ' 'does not exist or requesting is not allowed' % prop) # Add informations to get or post parameters, depending on the current method if request.method == 'POST': postParameters['ebuio_u_' + prop] = value else: getParameters['ebuio_u_' + prop] = value return (getParameters, postParameters, files)
def function[build_user_requested_parameters, parameter[request, meta]]: constant[Build the list of parameters requested by the plugit server] variable[postParameters] assign[=] dictionary[[], []] variable[getParameters] assign[=] dictionary[[], []] variable[files] assign[=] dictionary[[], []] if compare[constant[user_info] in name[meta]] begin[:] for taget[name[prop]] in starred[call[name[meta]][constant[user_info]]] begin[:] variable[value] assign[=] constant[None] if <ast.BoolOp object at 0x7da18f810580> begin[:] variable[value] assign[=] call[name[getattr], parameter[name[request].user, name[prop]]] if compare[name[request].method equal[==] constant[POST]] begin[:] call[name[postParameters]][binary_operation[constant[ebuio_u_] + name[prop]]] assign[=] name[value] return[tuple[[<ast.Name object at 0x7da18f8101c0>, <ast.Name object at 0x7da18f813130>, <ast.Name object at 0x7da18f8117e0>]]]
keyword[def] identifier[build_user_requested_parameters] ( identifier[request] , identifier[meta] ): literal[string] identifier[postParameters] ={} identifier[getParameters] ={} identifier[files] ={} keyword[if] literal[string] keyword[in] identifier[meta] : keyword[for] identifier[prop] keyword[in] identifier[meta] [ literal[string] ]: identifier[value] = keyword[None] keyword[if] identifier[hasattr] ( identifier[request] . identifier[user] , identifier[prop] ) keyword[and] identifier[prop] keyword[in] identifier[settings] . identifier[PIAPI_USERDATA] : identifier[value] = identifier[getattr] ( identifier[request] . identifier[user] , identifier[prop] ) keyword[else] : keyword[raise] identifier[Exception] ( literal[string] literal[string] % identifier[prop] ) keyword[if] identifier[request] . identifier[method] == literal[string] : identifier[postParameters] [ literal[string] + identifier[prop] ]= identifier[value] keyword[else] : identifier[getParameters] [ literal[string] + identifier[prop] ]= identifier[value] keyword[return] ( identifier[getParameters] , identifier[postParameters] , identifier[files] )
def build_user_requested_parameters(request, meta): """Build the list of parameters requested by the plugit server""" postParameters = {} getParameters = {} files = {} # Add parameters requested by the server if 'user_info' in meta: for prop in meta['user_info']: # Test if the value exist, otherwise return None value = None if hasattr(request.user, prop) and prop in settings.PIAPI_USERDATA: value = getattr(request.user, prop) # depends on [control=['if'], data=[]] else: raise Exception('requested user attribute "%s", does not exist or requesting is not allowed' % prop) # Add informations to get or post parameters, depending on the current method if request.method == 'POST': postParameters['ebuio_u_' + prop] = value # depends on [control=['if'], data=[]] else: getParameters['ebuio_u_' + prop] = value # depends on [control=['for'], data=['prop']] # depends on [control=['if'], data=['meta']] return (getParameters, postParameters, files)
def get(self, robj, r=None, pr=None, timeout=None, basic_quorum=None, notfound_ok=None, head_only=False): """ Get a bucket/key from the server """ # We could detect quorum_controls here but HTTP ignores # unknown flags/params. params = {'r': r, 'pr': pr, 'timeout': timeout, 'basic_quorum': basic_quorum, 'notfound_ok': notfound_ok} bucket_type = self._get_bucket_type(robj.bucket.bucket_type) url = self.object_path(robj.bucket.name, robj.key, bucket_type=bucket_type, **params) response = self._request('GET', url) return self._parse_body(robj, response, [200, 300, 404])
def function[get, parameter[self, robj, r, pr, timeout, basic_quorum, notfound_ok, head_only]]: constant[ Get a bucket/key from the server ] variable[params] assign[=] dictionary[[<ast.Constant object at 0x7da18f722b30>, <ast.Constant object at 0x7da18f723df0>, <ast.Constant object at 0x7da18f723ee0>, <ast.Constant object at 0x7da18f720400>, <ast.Constant object at 0x7da18f721f60>], [<ast.Name object at 0x7da18f723460>, <ast.Name object at 0x7da18f721990>, <ast.Name object at 0x7da18f723eb0>, <ast.Name object at 0x7da18f722bf0>, <ast.Name object at 0x7da18f723c70>]] variable[bucket_type] assign[=] call[name[self]._get_bucket_type, parameter[name[robj].bucket.bucket_type]] variable[url] assign[=] call[name[self].object_path, parameter[name[robj].bucket.name, name[robj].key]] variable[response] assign[=] call[name[self]._request, parameter[constant[GET], name[url]]] return[call[name[self]._parse_body, parameter[name[robj], name[response], list[[<ast.Constant object at 0x7da18eb55de0>, <ast.Constant object at 0x7da18eb55fc0>, <ast.Constant object at 0x7da18eb57370>]]]]]
keyword[def] identifier[get] ( identifier[self] , identifier[robj] , identifier[r] = keyword[None] , identifier[pr] = keyword[None] , identifier[timeout] = keyword[None] , identifier[basic_quorum] = keyword[None] , identifier[notfound_ok] = keyword[None] , identifier[head_only] = keyword[False] ): literal[string] identifier[params] ={ literal[string] : identifier[r] , literal[string] : identifier[pr] , literal[string] : identifier[timeout] , literal[string] : identifier[basic_quorum] , literal[string] : identifier[notfound_ok] } identifier[bucket_type] = identifier[self] . identifier[_get_bucket_type] ( identifier[robj] . identifier[bucket] . identifier[bucket_type] ) identifier[url] = identifier[self] . identifier[object_path] ( identifier[robj] . identifier[bucket] . identifier[name] , identifier[robj] . identifier[key] , identifier[bucket_type] = identifier[bucket_type] ,** identifier[params] ) identifier[response] = identifier[self] . identifier[_request] ( literal[string] , identifier[url] ) keyword[return] identifier[self] . identifier[_parse_body] ( identifier[robj] , identifier[response] ,[ literal[int] , literal[int] , literal[int] ])
def get(self, robj, r=None, pr=None, timeout=None, basic_quorum=None, notfound_ok=None, head_only=False): """ Get a bucket/key from the server """ # We could detect quorum_controls here but HTTP ignores # unknown flags/params. params = {'r': r, 'pr': pr, 'timeout': timeout, 'basic_quorum': basic_quorum, 'notfound_ok': notfound_ok} bucket_type = self._get_bucket_type(robj.bucket.bucket_type) url = self.object_path(robj.bucket.name, robj.key, bucket_type=bucket_type, **params) response = self._request('GET', url) return self._parse_body(robj, response, [200, 300, 404])
def changed_get(self, start_time, nick=None, page_size=200, page_no=1): '''taobao.simba.creativeids.changed.get =================================== 获取修改的创意ID''' request = TOPRequest('taobao.simba.creativeids.changed.get') request['start_time'] = start_time request['page_size'] = page_size request['page_no'] = page_no if nick!=None: request['nick'] = nick self.create(self.execute(request), models={'result':INCategory}) return self.result
def function[changed_get, parameter[self, start_time, nick, page_size, page_no]]: constant[taobao.simba.creativeids.changed.get =================================== 获取修改的创意ID] variable[request] assign[=] call[name[TOPRequest], parameter[constant[taobao.simba.creativeids.changed.get]]] call[name[request]][constant[start_time]] assign[=] name[start_time] call[name[request]][constant[page_size]] assign[=] name[page_size] call[name[request]][constant[page_no]] assign[=] name[page_no] if compare[name[nick] not_equal[!=] constant[None]] begin[:] call[name[request]][constant[nick]] assign[=] name[nick] call[name[self].create, parameter[call[name[self].execute, parameter[name[request]]]]] return[name[self].result]
keyword[def] identifier[changed_get] ( identifier[self] , identifier[start_time] , identifier[nick] = keyword[None] , identifier[page_size] = literal[int] , identifier[page_no] = literal[int] ): literal[string] identifier[request] = identifier[TOPRequest] ( literal[string] ) identifier[request] [ literal[string] ]= identifier[start_time] identifier[request] [ literal[string] ]= identifier[page_size] identifier[request] [ literal[string] ]= identifier[page_no] keyword[if] identifier[nick] != keyword[None] : identifier[request] [ literal[string] ]= identifier[nick] identifier[self] . identifier[create] ( identifier[self] . identifier[execute] ( identifier[request] ), identifier[models] ={ literal[string] : identifier[INCategory] }) keyword[return] identifier[self] . identifier[result]
def changed_get(self, start_time, nick=None, page_size=200, page_no=1): """taobao.simba.creativeids.changed.get =================================== 获取修改的创意ID""" request = TOPRequest('taobao.simba.creativeids.changed.get') request['start_time'] = start_time request['page_size'] = page_size request['page_no'] = page_no if nick != None: request['nick'] = nick # depends on [control=['if'], data=['nick']] self.create(self.execute(request), models={'result': INCategory}) return self.result
async def nextset(self): """Get the next query set""" conn = self._get_db() current_result = self._result if current_result is None or current_result is not conn._result: return if not current_result.has_next: return self._result = None self._clear_result() await conn.next_result() await self._do_get_result() return True
<ast.AsyncFunctionDef object at 0x7da20c6a93f0>
keyword[async] keyword[def] identifier[nextset] ( identifier[self] ): literal[string] identifier[conn] = identifier[self] . identifier[_get_db] () identifier[current_result] = identifier[self] . identifier[_result] keyword[if] identifier[current_result] keyword[is] keyword[None] keyword[or] identifier[current_result] keyword[is] keyword[not] identifier[conn] . identifier[_result] : keyword[return] keyword[if] keyword[not] identifier[current_result] . identifier[has_next] : keyword[return] identifier[self] . identifier[_result] = keyword[None] identifier[self] . identifier[_clear_result] () keyword[await] identifier[conn] . identifier[next_result] () keyword[await] identifier[self] . identifier[_do_get_result] () keyword[return] keyword[True]
async def nextset(self): """Get the next query set""" conn = self._get_db() current_result = self._result if current_result is None or current_result is not conn._result: return # depends on [control=['if'], data=[]] if not current_result.has_next: return # depends on [control=['if'], data=[]] self._result = None self._clear_result() await conn.next_result() await self._do_get_result() return True
def _set_show_mpls_ldp_statistics(self, v, load=False): """ Setter method for show_mpls_ldp_statistics, mapped from YANG variable /brocade_mpls_rpc/show_mpls_ldp_statistics (rpc) If this variable is read-only (config: false) in the source YANG file, then _set_show_mpls_ldp_statistics is considered as a private method. Backends looking to populate this variable should do so via calling thisObj._set_show_mpls_ldp_statistics() directly. """ if hasattr(v, "_utype"): v = v._utype(v) try: t = YANGDynClass(v,base=show_mpls_ldp_statistics.show_mpls_ldp_statistics, is_leaf=True, yang_name="show-mpls-ldp-statistics", rest_name="show-mpls-ldp-statistics", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=False, extensions={u'tailf-common': {u'hidden': u'rpccmd', u'actionpoint': u'showMplsLdpStatistics'}}, namespace='urn:brocade.com:mgmt:brocade-mpls', defining_module='brocade-mpls', yang_type='rpc', is_config=True) except (TypeError, ValueError): raise ValueError({ 'error-string': """show_mpls_ldp_statistics must be of a type compatible with rpc""", 'defined-type': "rpc", 'generated-type': """YANGDynClass(base=show_mpls_ldp_statistics.show_mpls_ldp_statistics, is_leaf=True, yang_name="show-mpls-ldp-statistics", rest_name="show-mpls-ldp-statistics", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=False, extensions={u'tailf-common': {u'hidden': u'rpccmd', u'actionpoint': u'showMplsLdpStatistics'}}, namespace='urn:brocade.com:mgmt:brocade-mpls', defining_module='brocade-mpls', yang_type='rpc', is_config=True)""", }) self.__show_mpls_ldp_statistics = t if hasattr(self, '_set'): self._set()
def function[_set_show_mpls_ldp_statistics, parameter[self, v, load]]: constant[ Setter method for show_mpls_ldp_statistics, mapped from YANG variable /brocade_mpls_rpc/show_mpls_ldp_statistics (rpc) If this variable is read-only (config: false) in the source YANG file, then _set_show_mpls_ldp_statistics is considered as a private method. Backends looking to populate this variable should do so via calling thisObj._set_show_mpls_ldp_statistics() directly. ] if call[name[hasattr], parameter[name[v], constant[_utype]]] begin[:] variable[v] assign[=] call[name[v]._utype, parameter[name[v]]] <ast.Try object at 0x7da18bc72e90> name[self].__show_mpls_ldp_statistics assign[=] name[t] if call[name[hasattr], parameter[name[self], constant[_set]]] begin[:] call[name[self]._set, parameter[]]
keyword[def] identifier[_set_show_mpls_ldp_statistics] ( identifier[self] , identifier[v] , identifier[load] = keyword[False] ): literal[string] keyword[if] identifier[hasattr] ( identifier[v] , literal[string] ): identifier[v] = identifier[v] . identifier[_utype] ( identifier[v] ) keyword[try] : identifier[t] = identifier[YANGDynClass] ( identifier[v] , identifier[base] = identifier[show_mpls_ldp_statistics] . identifier[show_mpls_ldp_statistics] , identifier[is_leaf] = keyword[True] , identifier[yang_name] = literal[string] , identifier[rest_name] = literal[string] , identifier[parent] = identifier[self] , identifier[path_helper] = identifier[self] . identifier[_path_helper] , identifier[extmethods] = identifier[self] . identifier[_extmethods] , identifier[register_paths] = keyword[False] , identifier[extensions] ={ literal[string] :{ literal[string] : literal[string] , literal[string] : literal[string] }}, identifier[namespace] = literal[string] , identifier[defining_module] = literal[string] , identifier[yang_type] = literal[string] , identifier[is_config] = keyword[True] ) keyword[except] ( identifier[TypeError] , identifier[ValueError] ): keyword[raise] identifier[ValueError] ({ literal[string] : literal[string] , literal[string] : literal[string] , literal[string] : literal[string] , }) identifier[self] . identifier[__show_mpls_ldp_statistics] = identifier[t] keyword[if] identifier[hasattr] ( identifier[self] , literal[string] ): identifier[self] . identifier[_set] ()
def _set_show_mpls_ldp_statistics(self, v, load=False): """ Setter method for show_mpls_ldp_statistics, mapped from YANG variable /brocade_mpls_rpc/show_mpls_ldp_statistics (rpc) If this variable is read-only (config: false) in the source YANG file, then _set_show_mpls_ldp_statistics is considered as a private method. Backends looking to populate this variable should do so via calling thisObj._set_show_mpls_ldp_statistics() directly. """ if hasattr(v, '_utype'): v = v._utype(v) # depends on [control=['if'], data=[]] try: t = YANGDynClass(v, base=show_mpls_ldp_statistics.show_mpls_ldp_statistics, is_leaf=True, yang_name='show-mpls-ldp-statistics', rest_name='show-mpls-ldp-statistics', parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=False, extensions={u'tailf-common': {u'hidden': u'rpccmd', u'actionpoint': u'showMplsLdpStatistics'}}, namespace='urn:brocade.com:mgmt:brocade-mpls', defining_module='brocade-mpls', yang_type='rpc', is_config=True) # depends on [control=['try'], data=[]] except (TypeError, ValueError): raise ValueError({'error-string': 'show_mpls_ldp_statistics must be of a type compatible with rpc', 'defined-type': 'rpc', 'generated-type': 'YANGDynClass(base=show_mpls_ldp_statistics.show_mpls_ldp_statistics, is_leaf=True, yang_name="show-mpls-ldp-statistics", rest_name="show-mpls-ldp-statistics", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=False, extensions={u\'tailf-common\': {u\'hidden\': u\'rpccmd\', u\'actionpoint\': u\'showMplsLdpStatistics\'}}, namespace=\'urn:brocade.com:mgmt:brocade-mpls\', defining_module=\'brocade-mpls\', yang_type=\'rpc\', is_config=True)'}) # depends on [control=['except'], data=[]] self.__show_mpls_ldp_statistics = t if hasattr(self, '_set'): self._set() # depends on [control=['if'], data=[]]
def _update_representative(self, index_cluster, point): """! @brief Update cluster representative in line with new cluster size and added point to it. @param[in] index_cluster (uint): Index of cluster whose representative should be updated. @param[in] point (list): Point that was added to cluster. """ length = len(self._clusters[index_cluster]); rep = self._representatives[index_cluster]; for dimension in range(len(rep)): rep[dimension] = ( (length - 1) * rep[dimension] + point[dimension] ) / length;
def function[_update_representative, parameter[self, index_cluster, point]]: constant[! @brief Update cluster representative in line with new cluster size and added point to it. @param[in] index_cluster (uint): Index of cluster whose representative should be updated. @param[in] point (list): Point that was added to cluster. ] variable[length] assign[=] call[name[len], parameter[call[name[self]._clusters][name[index_cluster]]]] variable[rep] assign[=] call[name[self]._representatives][name[index_cluster]] for taget[name[dimension]] in starred[call[name[range], parameter[call[name[len], parameter[name[rep]]]]]] begin[:] call[name[rep]][name[dimension]] assign[=] binary_operation[binary_operation[binary_operation[binary_operation[name[length] - constant[1]] * call[name[rep]][name[dimension]]] + call[name[point]][name[dimension]]] / name[length]]
keyword[def] identifier[_update_representative] ( identifier[self] , identifier[index_cluster] , identifier[point] ): literal[string] identifier[length] = identifier[len] ( identifier[self] . identifier[_clusters] [ identifier[index_cluster] ]); identifier[rep] = identifier[self] . identifier[_representatives] [ identifier[index_cluster] ]; keyword[for] identifier[dimension] keyword[in] identifier[range] ( identifier[len] ( identifier[rep] )): identifier[rep] [ identifier[dimension] ]=(( identifier[length] - literal[int] )* identifier[rep] [ identifier[dimension] ]+ identifier[point] [ identifier[dimension] ])/ identifier[length] ;
def _update_representative(self, index_cluster, point): """! @brief Update cluster representative in line with new cluster size and added point to it. @param[in] index_cluster (uint): Index of cluster whose representative should be updated. @param[in] point (list): Point that was added to cluster. """ length = len(self._clusters[index_cluster]) rep = self._representatives[index_cluster] for dimension in range(len(rep)): rep[dimension] = ((length - 1) * rep[dimension] + point[dimension]) / length # depends on [control=['for'], data=['dimension']]
def get_cn_dict(self, structure, n, use_weights=False): """ Get coordination number, CN, of each element bonded to site with index n in structure Args: structure (Structure): input structure n (integer): index of site for which to determine CN. use_weights (boolean): flag indicating whether (True) to use weights for computing the coordination number or not (False, default: each coordinated site has equal weight). Returns: cn (dict): dictionary of CN of each element bonded to site """ siw = self.get_nn_info(structure, n) cn_dict = {} for i in siw: site_element = i['site'].species_string if site_element not in cn_dict: if use_weights: cn_dict[site_element] = i['weight'] else: cn_dict[site_element] = 1 else: if use_weights: cn_dict[site_element] += i['weight'] else: cn_dict[site_element] += 1 return cn_dict
def function[get_cn_dict, parameter[self, structure, n, use_weights]]: constant[ Get coordination number, CN, of each element bonded to site with index n in structure Args: structure (Structure): input structure n (integer): index of site for which to determine CN. use_weights (boolean): flag indicating whether (True) to use weights for computing the coordination number or not (False, default: each coordinated site has equal weight). Returns: cn (dict): dictionary of CN of each element bonded to site ] variable[siw] assign[=] call[name[self].get_nn_info, parameter[name[structure], name[n]]] variable[cn_dict] assign[=] dictionary[[], []] for taget[name[i]] in starred[name[siw]] begin[:] variable[site_element] assign[=] call[name[i]][constant[site]].species_string if compare[name[site_element] <ast.NotIn object at 0x7da2590d7190> name[cn_dict]] begin[:] if name[use_weights] begin[:] call[name[cn_dict]][name[site_element]] assign[=] call[name[i]][constant[weight]] return[name[cn_dict]]
keyword[def] identifier[get_cn_dict] ( identifier[self] , identifier[structure] , identifier[n] , identifier[use_weights] = keyword[False] ): literal[string] identifier[siw] = identifier[self] . identifier[get_nn_info] ( identifier[structure] , identifier[n] ) identifier[cn_dict] ={} keyword[for] identifier[i] keyword[in] identifier[siw] : identifier[site_element] = identifier[i] [ literal[string] ]. identifier[species_string] keyword[if] identifier[site_element] keyword[not] keyword[in] identifier[cn_dict] : keyword[if] identifier[use_weights] : identifier[cn_dict] [ identifier[site_element] ]= identifier[i] [ literal[string] ] keyword[else] : identifier[cn_dict] [ identifier[site_element] ]= literal[int] keyword[else] : keyword[if] identifier[use_weights] : identifier[cn_dict] [ identifier[site_element] ]+= identifier[i] [ literal[string] ] keyword[else] : identifier[cn_dict] [ identifier[site_element] ]+= literal[int] keyword[return] identifier[cn_dict]
def get_cn_dict(self, structure, n, use_weights=False): """ Get coordination number, CN, of each element bonded to site with index n in structure Args: structure (Structure): input structure n (integer): index of site for which to determine CN. use_weights (boolean): flag indicating whether (True) to use weights for computing the coordination number or not (False, default: each coordinated site has equal weight). Returns: cn (dict): dictionary of CN of each element bonded to site """ siw = self.get_nn_info(structure, n) cn_dict = {} for i in siw: site_element = i['site'].species_string if site_element not in cn_dict: if use_weights: cn_dict[site_element] = i['weight'] # depends on [control=['if'], data=[]] else: cn_dict[site_element] = 1 # depends on [control=['if'], data=['site_element', 'cn_dict']] elif use_weights: cn_dict[site_element] += i['weight'] # depends on [control=['if'], data=[]] else: cn_dict[site_element] += 1 # depends on [control=['for'], data=['i']] return cn_dict
def parse_args(self, args=None, namespace=None): """ Reparses new arguments when _DemoAction (triggering parser.demo_args()) or _WizardAction (triggering input_args()) was called. """ if not namespace: # use the new Namespace class for handling _config namespace = Namespace(self) namespace = super(ArgumentParser, self).parse_args(args, namespace) if len(self._reparse_args['pos']) > 0 or \ len(self._reparse_args['opt']) > 0 or \ len(self._reparse_args['sub']) > 0: args = self._reset_args() namespace = super(ArgumentParser, self).parse_args(args, namespace) # process "-hh..." here, after having parsed the arguments help_level = getattr(namespace, "help", 0) if help_level > 0: self.print_help() self.print_extended_help(help_level) self.exit() return namespace
def function[parse_args, parameter[self, args, namespace]]: constant[ Reparses new arguments when _DemoAction (triggering parser.demo_args()) or _WizardAction (triggering input_args()) was called. ] if <ast.UnaryOp object at 0x7da1b19b54e0> begin[:] variable[namespace] assign[=] call[name[Namespace], parameter[name[self]]] variable[namespace] assign[=] call[call[name[super], parameter[name[ArgumentParser], name[self]]].parse_args, parameter[name[args], name[namespace]]] if <ast.BoolOp object at 0x7da1b19b78b0> begin[:] variable[args] assign[=] call[name[self]._reset_args, parameter[]] variable[namespace] assign[=] call[call[name[super], parameter[name[ArgumentParser], name[self]]].parse_args, parameter[name[args], name[namespace]]] variable[help_level] assign[=] call[name[getattr], parameter[name[namespace], constant[help], constant[0]]] if compare[name[help_level] greater[>] constant[0]] begin[:] call[name[self].print_help, parameter[]] call[name[self].print_extended_help, parameter[name[help_level]]] call[name[self].exit, parameter[]] return[name[namespace]]
keyword[def] identifier[parse_args] ( identifier[self] , identifier[args] = keyword[None] , identifier[namespace] = keyword[None] ): literal[string] keyword[if] keyword[not] identifier[namespace] : identifier[namespace] = identifier[Namespace] ( identifier[self] ) identifier[namespace] = identifier[super] ( identifier[ArgumentParser] , identifier[self] ). identifier[parse_args] ( identifier[args] , identifier[namespace] ) keyword[if] identifier[len] ( identifier[self] . identifier[_reparse_args] [ literal[string] ])> literal[int] keyword[or] identifier[len] ( identifier[self] . identifier[_reparse_args] [ literal[string] ])> literal[int] keyword[or] identifier[len] ( identifier[self] . identifier[_reparse_args] [ literal[string] ])> literal[int] : identifier[args] = identifier[self] . identifier[_reset_args] () identifier[namespace] = identifier[super] ( identifier[ArgumentParser] , identifier[self] ). identifier[parse_args] ( identifier[args] , identifier[namespace] ) identifier[help_level] = identifier[getattr] ( identifier[namespace] , literal[string] , literal[int] ) keyword[if] identifier[help_level] > literal[int] : identifier[self] . identifier[print_help] () identifier[self] . identifier[print_extended_help] ( identifier[help_level] ) identifier[self] . identifier[exit] () keyword[return] identifier[namespace]
def parse_args(self, args=None, namespace=None): """ Reparses new arguments when _DemoAction (triggering parser.demo_args()) or _WizardAction (triggering input_args()) was called. """ if not namespace: # use the new Namespace class for handling _config namespace = Namespace(self) # depends on [control=['if'], data=[]] namespace = super(ArgumentParser, self).parse_args(args, namespace) if len(self._reparse_args['pos']) > 0 or len(self._reparse_args['opt']) > 0 or len(self._reparse_args['sub']) > 0: args = self._reset_args() namespace = super(ArgumentParser, self).parse_args(args, namespace) # depends on [control=['if'], data=[]] # process "-hh..." here, after having parsed the arguments help_level = getattr(namespace, 'help', 0) if help_level > 0: self.print_help() self.print_extended_help(help_level) self.exit() # depends on [control=['if'], data=['help_level']] return namespace
def step(self, step, total, label='STEP', speed_label='STEPS/S', size=1): """ Increase the step indicator, which is a sub progress circle of the actual main progress circle (epoch, progress() method). """ self.lock.acquire() try: time_diff = time.time() - self.last_step_time if self.last_step > step: # it restarted self.last_step = 0 made_steps_since_last_call = step - self.last_step self.last_step = step self.made_steps_since_last_sync += made_steps_since_last_call self.made_steps_size_since_last_sync += made_steps_since_last_call * size if time_diff >= 1 or step == total: # only each second or last batch self.set_system_info('step', step, True) self.set_system_info('steps', total, True) steps_per_second = self.made_steps_since_last_sync / time_diff samples_per_second = self.made_steps_size_since_last_sync / time_diff self.last_step_time = time.time() if size: self.report_speed(samples_per_second) epochs_per_second = steps_per_second / total # all batches self.set_system_info('epochsPerSecond', epochs_per_second, True) current_epochs = self.current_epoch if self.current_epoch else 1 total_epochs = self.total_epochs if self.total_epochs else 1 self.made_steps_since_last_sync = 0 self.made_steps_size_since_last_sync = 0 eta = 0 if step < total: # time to end this epoch if steps_per_second != 0: eta = (total - step) / steps_per_second # time until all epochs are done if total_epochs - current_epochs > 0: if epochs_per_second != 0: eta += (total_epochs - (current_epochs)) / epochs_per_second self.git.store_file('aetros/job/times/eta.json', simplejson.dumps(eta)) if label and self.step_label != label: self.set_system_info('stepLabel', label, True) self.step_label = label if speed_label and self.step_speed_label != speed_label: self.set_system_info('stepSpeedLabel', speed_label, True) self.step_speed_label = speed_label finally: self.lock.release()
def function[step, parameter[self, step, total, label, speed_label, size]]: constant[ Increase the step indicator, which is a sub progress circle of the actual main progress circle (epoch, progress() method). ] call[name[self].lock.acquire, parameter[]] <ast.Try object at 0x7da20c6aa530>
keyword[def] identifier[step] ( identifier[self] , identifier[step] , identifier[total] , identifier[label] = literal[string] , identifier[speed_label] = literal[string] , identifier[size] = literal[int] ): literal[string] identifier[self] . identifier[lock] . identifier[acquire] () keyword[try] : identifier[time_diff] = identifier[time] . identifier[time] ()- identifier[self] . identifier[last_step_time] keyword[if] identifier[self] . identifier[last_step] > identifier[step] : identifier[self] . identifier[last_step] = literal[int] identifier[made_steps_since_last_call] = identifier[step] - identifier[self] . identifier[last_step] identifier[self] . identifier[last_step] = identifier[step] identifier[self] . identifier[made_steps_since_last_sync] += identifier[made_steps_since_last_call] identifier[self] . identifier[made_steps_size_since_last_sync] += identifier[made_steps_since_last_call] * identifier[size] keyword[if] identifier[time_diff] >= literal[int] keyword[or] identifier[step] == identifier[total] : identifier[self] . identifier[set_system_info] ( literal[string] , identifier[step] , keyword[True] ) identifier[self] . identifier[set_system_info] ( literal[string] , identifier[total] , keyword[True] ) identifier[steps_per_second] = identifier[self] . identifier[made_steps_since_last_sync] / identifier[time_diff] identifier[samples_per_second] = identifier[self] . identifier[made_steps_size_since_last_sync] / identifier[time_diff] identifier[self] . identifier[last_step_time] = identifier[time] . identifier[time] () keyword[if] identifier[size] : identifier[self] . identifier[report_speed] ( identifier[samples_per_second] ) identifier[epochs_per_second] = identifier[steps_per_second] / identifier[total] identifier[self] . identifier[set_system_info] ( literal[string] , identifier[epochs_per_second] , keyword[True] ) identifier[current_epochs] = identifier[self] . identifier[current_epoch] keyword[if] identifier[self] . identifier[current_epoch] keyword[else] literal[int] identifier[total_epochs] = identifier[self] . identifier[total_epochs] keyword[if] identifier[self] . identifier[total_epochs] keyword[else] literal[int] identifier[self] . identifier[made_steps_since_last_sync] = literal[int] identifier[self] . identifier[made_steps_size_since_last_sync] = literal[int] identifier[eta] = literal[int] keyword[if] identifier[step] < identifier[total] : keyword[if] identifier[steps_per_second] != literal[int] : identifier[eta] =( identifier[total] - identifier[step] )/ identifier[steps_per_second] keyword[if] identifier[total_epochs] - identifier[current_epochs] > literal[int] : keyword[if] identifier[epochs_per_second] != literal[int] : identifier[eta] +=( identifier[total_epochs] -( identifier[current_epochs] ))/ identifier[epochs_per_second] identifier[self] . identifier[git] . identifier[store_file] ( literal[string] , identifier[simplejson] . identifier[dumps] ( identifier[eta] )) keyword[if] identifier[label] keyword[and] identifier[self] . identifier[step_label] != identifier[label] : identifier[self] . identifier[set_system_info] ( literal[string] , identifier[label] , keyword[True] ) identifier[self] . identifier[step_label] = identifier[label] keyword[if] identifier[speed_label] keyword[and] identifier[self] . identifier[step_speed_label] != identifier[speed_label] : identifier[self] . identifier[set_system_info] ( literal[string] , identifier[speed_label] , keyword[True] ) identifier[self] . identifier[step_speed_label] = identifier[speed_label] keyword[finally] : identifier[self] . identifier[lock] . identifier[release] ()
def step(self, step, total, label='STEP', speed_label='STEPS/S', size=1): """ Increase the step indicator, which is a sub progress circle of the actual main progress circle (epoch, progress() method). """ self.lock.acquire() try: time_diff = time.time() - self.last_step_time if self.last_step > step: # it restarted self.last_step = 0 # depends on [control=['if'], data=[]] made_steps_since_last_call = step - self.last_step self.last_step = step self.made_steps_since_last_sync += made_steps_since_last_call self.made_steps_size_since_last_sync += made_steps_since_last_call * size if time_diff >= 1 or step == total: # only each second or last batch self.set_system_info('step', step, True) self.set_system_info('steps', total, True) steps_per_second = self.made_steps_since_last_sync / time_diff samples_per_second = self.made_steps_size_since_last_sync / time_diff self.last_step_time = time.time() if size: self.report_speed(samples_per_second) # depends on [control=['if'], data=[]] epochs_per_second = steps_per_second / total # all batches self.set_system_info('epochsPerSecond', epochs_per_second, True) current_epochs = self.current_epoch if self.current_epoch else 1 total_epochs = self.total_epochs if self.total_epochs else 1 self.made_steps_since_last_sync = 0 self.made_steps_size_since_last_sync = 0 eta = 0 if step < total: # time to end this epoch if steps_per_second != 0: eta = (total - step) / steps_per_second # depends on [control=['if'], data=['steps_per_second']] # depends on [control=['if'], data=['step', 'total']] # time until all epochs are done if total_epochs - current_epochs > 0: if epochs_per_second != 0: eta += (total_epochs - current_epochs) / epochs_per_second # depends on [control=['if'], data=['epochs_per_second']] # depends on [control=['if'], data=[]] self.git.store_file('aetros/job/times/eta.json', simplejson.dumps(eta)) # depends on [control=['if'], data=[]] if label and self.step_label != label: self.set_system_info('stepLabel', label, True) self.step_label = label # depends on [control=['if'], data=[]] if speed_label and self.step_speed_label != speed_label: self.set_system_info('stepSpeedLabel', speed_label, True) self.step_speed_label = speed_label # depends on [control=['if'], data=[]] # depends on [control=['try'], data=[]] finally: self.lock.release()
def default_grid(n_items, max_cols=4, min_cols=3): # noqa: D202 """Make a grid for subplots. Tries to get as close to sqrt(n_items) x sqrt(n_items) as it can, but allows for custom logic Parameters ---------- n_items : int Number of panels required max_cols : int Maximum number of columns, inclusive min_cols : int Minimum number of columns, inclusive Returns ------- (int, int) Rows and columns, so that rows * columns >= n_items """ def in_bounds(val): return np.clip(val, min_cols, max_cols) if n_items <= max_cols: return 1, n_items ideal = in_bounds(round(n_items ** 0.5)) for offset in (0, 1, -1, 2, -2): cols = in_bounds(ideal + offset) rows, extra = divmod(n_items, cols) if extra == 0: return rows, cols return n_items // ideal + 1, ideal
def function[default_grid, parameter[n_items, max_cols, min_cols]]: constant[Make a grid for subplots. Tries to get as close to sqrt(n_items) x sqrt(n_items) as it can, but allows for custom logic Parameters ---------- n_items : int Number of panels required max_cols : int Maximum number of columns, inclusive min_cols : int Minimum number of columns, inclusive Returns ------- (int, int) Rows and columns, so that rows * columns >= n_items ] def function[in_bounds, parameter[val]]: return[call[name[np].clip, parameter[name[val], name[min_cols], name[max_cols]]]] if compare[name[n_items] less_or_equal[<=] name[max_cols]] begin[:] return[tuple[[<ast.Constant object at 0x7da1b1b2ad10>, <ast.Name object at 0x7da1b1b2a170>]]] variable[ideal] assign[=] call[name[in_bounds], parameter[call[name[round], parameter[binary_operation[name[n_items] ** constant[0.5]]]]]] for taget[name[offset]] in starred[tuple[[<ast.Constant object at 0x7da1b1b3d2d0>, <ast.Constant object at 0x7da1b1b3d210>, <ast.UnaryOp object at 0x7da1b1b3d900>, <ast.Constant object at 0x7da1b1b3d060>, <ast.UnaryOp object at 0x7da1b1b3d0c0>]]] begin[:] variable[cols] assign[=] call[name[in_bounds], parameter[binary_operation[name[ideal] + name[offset]]]] <ast.Tuple object at 0x7da1b1b3ecb0> assign[=] call[name[divmod], parameter[name[n_items], name[cols]]] if compare[name[extra] equal[==] constant[0]] begin[:] return[tuple[[<ast.Name object at 0x7da1b1b3e530>, <ast.Name object at 0x7da1b1b3e440>]]] return[tuple[[<ast.BinOp object at 0x7da1b1b3e4d0>, <ast.Name object at 0x7da1b1b3e560>]]]
keyword[def] identifier[default_grid] ( identifier[n_items] , identifier[max_cols] = literal[int] , identifier[min_cols] = literal[int] ): literal[string] keyword[def] identifier[in_bounds] ( identifier[val] ): keyword[return] identifier[np] . identifier[clip] ( identifier[val] , identifier[min_cols] , identifier[max_cols] ) keyword[if] identifier[n_items] <= identifier[max_cols] : keyword[return] literal[int] , identifier[n_items] identifier[ideal] = identifier[in_bounds] ( identifier[round] ( identifier[n_items] ** literal[int] )) keyword[for] identifier[offset] keyword[in] ( literal[int] , literal[int] ,- literal[int] , literal[int] ,- literal[int] ): identifier[cols] = identifier[in_bounds] ( identifier[ideal] + identifier[offset] ) identifier[rows] , identifier[extra] = identifier[divmod] ( identifier[n_items] , identifier[cols] ) keyword[if] identifier[extra] == literal[int] : keyword[return] identifier[rows] , identifier[cols] keyword[return] identifier[n_items] // identifier[ideal] + literal[int] , identifier[ideal]
def default_grid(n_items, max_cols=4, min_cols=3): # noqa: D202 'Make a grid for subplots.\n\n Tries to get as close to sqrt(n_items) x sqrt(n_items) as it can,\n but allows for custom logic\n\n Parameters\n ----------\n n_items : int\n Number of panels required\n max_cols : int\n Maximum number of columns, inclusive\n min_cols : int\n Minimum number of columns, inclusive\n\n Returns\n -------\n (int, int)\n Rows and columns, so that rows * columns >= n_items\n ' def in_bounds(val): return np.clip(val, min_cols, max_cols) if n_items <= max_cols: return (1, n_items) # depends on [control=['if'], data=['n_items']] ideal = in_bounds(round(n_items ** 0.5)) for offset in (0, 1, -1, 2, -2): cols = in_bounds(ideal + offset) (rows, extra) = divmod(n_items, cols) if extra == 0: return (rows, cols) # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['offset']] return (n_items // ideal + 1, ideal)
def sampleLocationFromFeature(self, feature): """ Samples a location from one specific feature. This is only supported with three dimensions. """ if feature == "face": return self._sampleFromFaces() elif feature == "edge": return self._sampleFromEdges() elif feature == "vertex": return self._sampleFromVertices() elif feature == "random": return self.sampleLocation() else: raise NameError("No such feature in {}: {}".format(self, feature))
def function[sampleLocationFromFeature, parameter[self, feature]]: constant[ Samples a location from one specific feature. This is only supported with three dimensions. ] if compare[name[feature] equal[==] constant[face]] begin[:] return[call[name[self]._sampleFromFaces, parameter[]]]
keyword[def] identifier[sampleLocationFromFeature] ( identifier[self] , identifier[feature] ): literal[string] keyword[if] identifier[feature] == literal[string] : keyword[return] identifier[self] . identifier[_sampleFromFaces] () keyword[elif] identifier[feature] == literal[string] : keyword[return] identifier[self] . identifier[_sampleFromEdges] () keyword[elif] identifier[feature] == literal[string] : keyword[return] identifier[self] . identifier[_sampleFromVertices] () keyword[elif] identifier[feature] == literal[string] : keyword[return] identifier[self] . identifier[sampleLocation] () keyword[else] : keyword[raise] identifier[NameError] ( literal[string] . identifier[format] ( identifier[self] , identifier[feature] ))
def sampleLocationFromFeature(self, feature): """ Samples a location from one specific feature. This is only supported with three dimensions. """ if feature == 'face': return self._sampleFromFaces() # depends on [control=['if'], data=[]] elif feature == 'edge': return self._sampleFromEdges() # depends on [control=['if'], data=[]] elif feature == 'vertex': return self._sampleFromVertices() # depends on [control=['if'], data=[]] elif feature == 'random': return self.sampleLocation() # depends on [control=['if'], data=[]] else: raise NameError('No such feature in {}: {}'.format(self, feature))
def import_patches(self, patches): """ Import several patches into the patch queue """ dest_dir = self.quilt_patches patch_names = [] for patch in patches: patch_name = os.path.basename(patch) patch_file = File(patch) dest_file = dest_dir + File(patch_name) patch_file.copy(dest_file) patch_names.append(patch_name) self._import_patches(patch_names)
def function[import_patches, parameter[self, patches]]: constant[ Import several patches into the patch queue ] variable[dest_dir] assign[=] name[self].quilt_patches variable[patch_names] assign[=] list[[]] for taget[name[patch]] in starred[name[patches]] begin[:] variable[patch_name] assign[=] call[name[os].path.basename, parameter[name[patch]]] variable[patch_file] assign[=] call[name[File], parameter[name[patch]]] variable[dest_file] assign[=] binary_operation[name[dest_dir] + call[name[File], parameter[name[patch_name]]]] call[name[patch_file].copy, parameter[name[dest_file]]] call[name[patch_names].append, parameter[name[patch_name]]] call[name[self]._import_patches, parameter[name[patch_names]]]
keyword[def] identifier[import_patches] ( identifier[self] , identifier[patches] ): literal[string] identifier[dest_dir] = identifier[self] . identifier[quilt_patches] identifier[patch_names] =[] keyword[for] identifier[patch] keyword[in] identifier[patches] : identifier[patch_name] = identifier[os] . identifier[path] . identifier[basename] ( identifier[patch] ) identifier[patch_file] = identifier[File] ( identifier[patch] ) identifier[dest_file] = identifier[dest_dir] + identifier[File] ( identifier[patch_name] ) identifier[patch_file] . identifier[copy] ( identifier[dest_file] ) identifier[patch_names] . identifier[append] ( identifier[patch_name] ) identifier[self] . identifier[_import_patches] ( identifier[patch_names] )
def import_patches(self, patches): """ Import several patches into the patch queue """ dest_dir = self.quilt_patches patch_names = [] for patch in patches: patch_name = os.path.basename(patch) patch_file = File(patch) dest_file = dest_dir + File(patch_name) patch_file.copy(dest_file) patch_names.append(patch_name) # depends on [control=['for'], data=['patch']] self._import_patches(patch_names)
def _run(self, line): ''' everything from RUN goes into the install list Parameters ========== line: the line from the recipe file to parse for FROM ''' line = self._setup('RUN', line) self.install += line
def function[_run, parameter[self, line]]: constant[ everything from RUN goes into the install list Parameters ========== line: the line from the recipe file to parse for FROM ] variable[line] assign[=] call[name[self]._setup, parameter[constant[RUN], name[line]]] <ast.AugAssign object at 0x7da1b040efe0>
keyword[def] identifier[_run] ( identifier[self] , identifier[line] ): literal[string] identifier[line] = identifier[self] . identifier[_setup] ( literal[string] , identifier[line] ) identifier[self] . identifier[install] += identifier[line]
def _run(self, line): """ everything from RUN goes into the install list Parameters ========== line: the line from the recipe file to parse for FROM """ line = self._setup('RUN', line) self.install += line
def from_yaml(cls, yaml_str=None, str_or_buffer=None): """ Create a DiscreteChoiceModel instance from a saved YAML configuration. Arguments are mutally exclusive. Parameters ---------- yaml_str : str, optional A YAML string from which to load model. str_or_buffer : str or file like, optional File name or buffer from which to load YAML. Returns ------- MNLDiscreteChoiceModel """ cfg = yamlio.yaml_to_dict(yaml_str, str_or_buffer) model = cls( cfg['model_expression'], cfg['sample_size'], probability_mode=cfg.get('probability_mode', 'full_product'), choice_mode=cfg.get('choice_mode', 'individual'), choosers_fit_filters=cfg.get('choosers_fit_filters', None), choosers_predict_filters=cfg.get('choosers_predict_filters', None), alts_fit_filters=cfg.get('alts_fit_filters', None), alts_predict_filters=cfg.get('alts_predict_filters', None), interaction_predict_filters=cfg.get( 'interaction_predict_filters', None), estimation_sample_size=cfg.get('estimation_sample_size', None), prediction_sample_size=cfg.get('prediction_sample_size', None), choice_column=cfg.get('choice_column', None), name=cfg.get('name', None) ) if cfg.get('log_likelihoods', None): model.log_likelihoods = cfg['log_likelihoods'] if cfg.get('fit_parameters', None): model.fit_parameters = pd.DataFrame(cfg['fit_parameters']) logger.debug('loaded LCM model {} from YAML'.format(model.name)) return model
def function[from_yaml, parameter[cls, yaml_str, str_or_buffer]]: constant[ Create a DiscreteChoiceModel instance from a saved YAML configuration. Arguments are mutally exclusive. Parameters ---------- yaml_str : str, optional A YAML string from which to load model. str_or_buffer : str or file like, optional File name or buffer from which to load YAML. Returns ------- MNLDiscreteChoiceModel ] variable[cfg] assign[=] call[name[yamlio].yaml_to_dict, parameter[name[yaml_str], name[str_or_buffer]]] variable[model] assign[=] call[name[cls], parameter[call[name[cfg]][constant[model_expression]], call[name[cfg]][constant[sample_size]]]] if call[name[cfg].get, parameter[constant[log_likelihoods], constant[None]]] begin[:] name[model].log_likelihoods assign[=] call[name[cfg]][constant[log_likelihoods]] if call[name[cfg].get, parameter[constant[fit_parameters], constant[None]]] begin[:] name[model].fit_parameters assign[=] call[name[pd].DataFrame, parameter[call[name[cfg]][constant[fit_parameters]]]] call[name[logger].debug, parameter[call[constant[loaded LCM model {} from YAML].format, parameter[name[model].name]]]] return[name[model]]
keyword[def] identifier[from_yaml] ( identifier[cls] , identifier[yaml_str] = keyword[None] , identifier[str_or_buffer] = keyword[None] ): literal[string] identifier[cfg] = identifier[yamlio] . identifier[yaml_to_dict] ( identifier[yaml_str] , identifier[str_or_buffer] ) identifier[model] = identifier[cls] ( identifier[cfg] [ literal[string] ], identifier[cfg] [ literal[string] ], identifier[probability_mode] = identifier[cfg] . identifier[get] ( literal[string] , literal[string] ), identifier[choice_mode] = identifier[cfg] . identifier[get] ( literal[string] , literal[string] ), identifier[choosers_fit_filters] = identifier[cfg] . identifier[get] ( literal[string] , keyword[None] ), identifier[choosers_predict_filters] = identifier[cfg] . identifier[get] ( literal[string] , keyword[None] ), identifier[alts_fit_filters] = identifier[cfg] . identifier[get] ( literal[string] , keyword[None] ), identifier[alts_predict_filters] = identifier[cfg] . identifier[get] ( literal[string] , keyword[None] ), identifier[interaction_predict_filters] = identifier[cfg] . identifier[get] ( literal[string] , keyword[None] ), identifier[estimation_sample_size] = identifier[cfg] . identifier[get] ( literal[string] , keyword[None] ), identifier[prediction_sample_size] = identifier[cfg] . identifier[get] ( literal[string] , keyword[None] ), identifier[choice_column] = identifier[cfg] . identifier[get] ( literal[string] , keyword[None] ), identifier[name] = identifier[cfg] . identifier[get] ( literal[string] , keyword[None] ) ) keyword[if] identifier[cfg] . identifier[get] ( literal[string] , keyword[None] ): identifier[model] . identifier[log_likelihoods] = identifier[cfg] [ literal[string] ] keyword[if] identifier[cfg] . identifier[get] ( literal[string] , keyword[None] ): identifier[model] . identifier[fit_parameters] = identifier[pd] . identifier[DataFrame] ( identifier[cfg] [ literal[string] ]) identifier[logger] . identifier[debug] ( literal[string] . identifier[format] ( identifier[model] . identifier[name] )) keyword[return] identifier[model]
def from_yaml(cls, yaml_str=None, str_or_buffer=None): """ Create a DiscreteChoiceModel instance from a saved YAML configuration. Arguments are mutally exclusive. Parameters ---------- yaml_str : str, optional A YAML string from which to load model. str_or_buffer : str or file like, optional File name or buffer from which to load YAML. Returns ------- MNLDiscreteChoiceModel """ cfg = yamlio.yaml_to_dict(yaml_str, str_or_buffer) model = cls(cfg['model_expression'], cfg['sample_size'], probability_mode=cfg.get('probability_mode', 'full_product'), choice_mode=cfg.get('choice_mode', 'individual'), choosers_fit_filters=cfg.get('choosers_fit_filters', None), choosers_predict_filters=cfg.get('choosers_predict_filters', None), alts_fit_filters=cfg.get('alts_fit_filters', None), alts_predict_filters=cfg.get('alts_predict_filters', None), interaction_predict_filters=cfg.get('interaction_predict_filters', None), estimation_sample_size=cfg.get('estimation_sample_size', None), prediction_sample_size=cfg.get('prediction_sample_size', None), choice_column=cfg.get('choice_column', None), name=cfg.get('name', None)) if cfg.get('log_likelihoods', None): model.log_likelihoods = cfg['log_likelihoods'] # depends on [control=['if'], data=[]] if cfg.get('fit_parameters', None): model.fit_parameters = pd.DataFrame(cfg['fit_parameters']) # depends on [control=['if'], data=[]] logger.debug('loaded LCM model {} from YAML'.format(model.name)) return model
def check_rev_options(self, rev, dest, rev_options): """Check the revision options before checkout to compensate that tags and branches may need origin/ as a prefix. Returns the SHA1 of the branch or tag if found. """ revisions = self.get_refs(dest) origin_rev = 'origin/%s' % rev if origin_rev in revisions: # remote branch return [revisions[origin_rev]] elif rev in revisions: # a local tag or branch name return [revisions[rev]] else: logger.warning( "Could not find a tag or branch '%s', assuming commit.", rev, ) return rev_options
def function[check_rev_options, parameter[self, rev, dest, rev_options]]: constant[Check the revision options before checkout to compensate that tags and branches may need origin/ as a prefix. Returns the SHA1 of the branch or tag if found. ] variable[revisions] assign[=] call[name[self].get_refs, parameter[name[dest]]] variable[origin_rev] assign[=] binary_operation[constant[origin/%s] <ast.Mod object at 0x7da2590d6920> name[rev]] if compare[name[origin_rev] in name[revisions]] begin[:] return[list[[<ast.Subscript object at 0x7da18f720310>]]]
keyword[def] identifier[check_rev_options] ( identifier[self] , identifier[rev] , identifier[dest] , identifier[rev_options] ): literal[string] identifier[revisions] = identifier[self] . identifier[get_refs] ( identifier[dest] ) identifier[origin_rev] = literal[string] % identifier[rev] keyword[if] identifier[origin_rev] keyword[in] identifier[revisions] : keyword[return] [ identifier[revisions] [ identifier[origin_rev] ]] keyword[elif] identifier[rev] keyword[in] identifier[revisions] : keyword[return] [ identifier[revisions] [ identifier[rev] ]] keyword[else] : identifier[logger] . identifier[warning] ( literal[string] , identifier[rev] , ) keyword[return] identifier[rev_options]
def check_rev_options(self, rev, dest, rev_options): """Check the revision options before checkout to compensate that tags and branches may need origin/ as a prefix. Returns the SHA1 of the branch or tag if found. """ revisions = self.get_refs(dest) origin_rev = 'origin/%s' % rev if origin_rev in revisions: # remote branch return [revisions[origin_rev]] # depends on [control=['if'], data=['origin_rev', 'revisions']] elif rev in revisions: # a local tag or branch name return [revisions[rev]] # depends on [control=['if'], data=['rev', 'revisions']] else: logger.warning("Could not find a tag or branch '%s', assuming commit.", rev) return rev_options
def add_keyword(self, keyword, schema=None, source=None): """Add a keyword. Args: keyword(str): keyword to add. schema(str): schema to which the keyword belongs. source(str): source for the keyword. """ keyword_dict = self._sourced_dict(source, value=keyword) if schema is not None: keyword_dict['schema'] = schema self._append_to('keywords', keyword_dict)
def function[add_keyword, parameter[self, keyword, schema, source]]: constant[Add a keyword. Args: keyword(str): keyword to add. schema(str): schema to which the keyword belongs. source(str): source for the keyword. ] variable[keyword_dict] assign[=] call[name[self]._sourced_dict, parameter[name[source]]] if compare[name[schema] is_not constant[None]] begin[:] call[name[keyword_dict]][constant[schema]] assign[=] name[schema] call[name[self]._append_to, parameter[constant[keywords], name[keyword_dict]]]
keyword[def] identifier[add_keyword] ( identifier[self] , identifier[keyword] , identifier[schema] = keyword[None] , identifier[source] = keyword[None] ): literal[string] identifier[keyword_dict] = identifier[self] . identifier[_sourced_dict] ( identifier[source] , identifier[value] = identifier[keyword] ) keyword[if] identifier[schema] keyword[is] keyword[not] keyword[None] : identifier[keyword_dict] [ literal[string] ]= identifier[schema] identifier[self] . identifier[_append_to] ( literal[string] , identifier[keyword_dict] )
def add_keyword(self, keyword, schema=None, source=None): """Add a keyword. Args: keyword(str): keyword to add. schema(str): schema to which the keyword belongs. source(str): source for the keyword. """ keyword_dict = self._sourced_dict(source, value=keyword) if schema is not None: keyword_dict['schema'] = schema # depends on [control=['if'], data=['schema']] self._append_to('keywords', keyword_dict)
def _remove_exact(self, needle, keep_field): """Remove a specific parameter, *needle*, from the template.""" for i, param in enumerate(self.params): if param is needle: if keep_field: self._blank_param_value(param.value) else: self._fix_dependendent_params(i) self.params.pop(i) return raise ValueError(needle)
def function[_remove_exact, parameter[self, needle, keep_field]]: constant[Remove a specific parameter, *needle*, from the template.] for taget[tuple[[<ast.Name object at 0x7da20c990430>, <ast.Name object at 0x7da20c990670>]]] in starred[call[name[enumerate], parameter[name[self].params]]] begin[:] if compare[name[param] is name[needle]] begin[:] if name[keep_field] begin[:] call[name[self]._blank_param_value, parameter[name[param].value]] return[None] <ast.Raise object at 0x7da20c991720>
keyword[def] identifier[_remove_exact] ( identifier[self] , identifier[needle] , identifier[keep_field] ): literal[string] keyword[for] identifier[i] , identifier[param] keyword[in] identifier[enumerate] ( identifier[self] . identifier[params] ): keyword[if] identifier[param] keyword[is] identifier[needle] : keyword[if] identifier[keep_field] : identifier[self] . identifier[_blank_param_value] ( identifier[param] . identifier[value] ) keyword[else] : identifier[self] . identifier[_fix_dependendent_params] ( identifier[i] ) identifier[self] . identifier[params] . identifier[pop] ( identifier[i] ) keyword[return] keyword[raise] identifier[ValueError] ( identifier[needle] )
def _remove_exact(self, needle, keep_field): """Remove a specific parameter, *needle*, from the template.""" for (i, param) in enumerate(self.params): if param is needle: if keep_field: self._blank_param_value(param.value) # depends on [control=['if'], data=[]] else: self._fix_dependendent_params(i) self.params.pop(i) return # depends on [control=['if'], data=['param']] # depends on [control=['for'], data=[]] raise ValueError(needle)
def _republish_displaypub(self, content, eid): """republish individual displaypub content dicts""" try: ip = get_ipython() except NameError: # displaypub is meaningless outside IPython return md = content['metadata'] or {} md['engine'] = eid ip.display_pub.publish(content['source'], content['data'], md)
def function[_republish_displaypub, parameter[self, content, eid]]: constant[republish individual displaypub content dicts] <ast.Try object at 0x7da1b2345de0> variable[md] assign[=] <ast.BoolOp object at 0x7da1b2345360> call[name[md]][constant[engine]] assign[=] name[eid] call[name[ip].display_pub.publish, parameter[call[name[content]][constant[source]], call[name[content]][constant[data]], name[md]]]
keyword[def] identifier[_republish_displaypub] ( identifier[self] , identifier[content] , identifier[eid] ): literal[string] keyword[try] : identifier[ip] = identifier[get_ipython] () keyword[except] identifier[NameError] : keyword[return] identifier[md] = identifier[content] [ literal[string] ] keyword[or] {} identifier[md] [ literal[string] ]= identifier[eid] identifier[ip] . identifier[display_pub] . identifier[publish] ( identifier[content] [ literal[string] ], identifier[content] [ literal[string] ], identifier[md] )
def _republish_displaypub(self, content, eid): """republish individual displaypub content dicts""" try: ip = get_ipython() # depends on [control=['try'], data=[]] except NameError: # displaypub is meaningless outside IPython return # depends on [control=['except'], data=[]] md = content['metadata'] or {} md['engine'] = eid ip.display_pub.publish(content['source'], content['data'], md)
def eoq(I,F,h,d,w,W,a0,aK,K): """eoq -- multi-item capacitated economic ordering quantity model Parameters: - I: set of items - F[i]: ordering cost for item i - h[i]: holding cost for item i - d[i]: demand for item i - w[i]: unit weight for item i - W: capacity (limit on order quantity) - a0: lower bound on the cycle time (x axis) - aK: upper bound on the cycle time (x axis) - K: number of linear pieces to use in the approximation Returns a model, ready to be solved. """ # construct points for piecewise-linear relation, store in a,b a,b = {},{} delta = float(aK-a0)/K for i in I: for k in range(K): T = a0 + delta*k a[i,k] = T # abscissa: cycle time b[i,k] = F[i]/T + h[i]*d[i]*T/2. # ordinate: (convex) cost for this cycle time model = Model("multi-item, capacitated EOQ") x,c,w_ = {},{},{} for i in I: x[i] = model.addVar(vtype="C", name="x(%s)"%i) # cycle time for item i c[i] = model.addVar(vtype="C", name="c(%s)"%i) # total cost for item i for k in range(K): w_[i,k] = model.addVar(ub=1, vtype="C", name="w(%s,%s)"%(i,k)) #todo ?? for i in I: model.addCons(quicksum(w_[i,k] for k in range(K)) == 1) model.addCons(quicksum(a[i,k]*w_[i,k] for k in range(K)) == x[i]) model.addCons(quicksum(b[i,k]*w_[i,k] for k in range(K)) == c[i]) model.addCons(quicksum(w[i]*d[i]*x[i] for i in I) <= W) model.setObjective(quicksum(c[i] for i in I), "minimize") model.data = x,w return model
def function[eoq, parameter[I, F, h, d, w, W, a0, aK, K]]: constant[eoq -- multi-item capacitated economic ordering quantity model Parameters: - I: set of items - F[i]: ordering cost for item i - h[i]: holding cost for item i - d[i]: demand for item i - w[i]: unit weight for item i - W: capacity (limit on order quantity) - a0: lower bound on the cycle time (x axis) - aK: upper bound on the cycle time (x axis) - K: number of linear pieces to use in the approximation Returns a model, ready to be solved. ] <ast.Tuple object at 0x7da1b1700ac0> assign[=] tuple[[<ast.Dict object at 0x7da1b1700250>, <ast.Dict object at 0x7da1b1701e10>]] variable[delta] assign[=] binary_operation[call[name[float], parameter[binary_operation[name[aK] - name[a0]]]] / name[K]] for taget[name[i]] in starred[name[I]] begin[:] for taget[name[k]] in starred[call[name[range], parameter[name[K]]]] begin[:] variable[T] assign[=] binary_operation[name[a0] + binary_operation[name[delta] * name[k]]] call[name[a]][tuple[[<ast.Name object at 0x7da1b1700340>, <ast.Name object at 0x7da1b17014e0>]]] assign[=] name[T] call[name[b]][tuple[[<ast.Name object at 0x7da1b1703670>, <ast.Name object at 0x7da1b1703160>]]] assign[=] binary_operation[binary_operation[call[name[F]][name[i]] / name[T]] + binary_operation[binary_operation[binary_operation[call[name[h]][name[i]] * call[name[d]][name[i]]] * name[T]] / constant[2.0]]] variable[model] assign[=] call[name[Model], parameter[constant[multi-item, capacitated EOQ]]] <ast.Tuple object at 0x7da1b1702d70> assign[=] tuple[[<ast.Dict object at 0x7da1b17033a0>, <ast.Dict object at 0x7da1b1703970>, <ast.Dict object at 0x7da1b17034c0>]] for taget[name[i]] in starred[name[I]] begin[:] call[name[x]][name[i]] assign[=] call[name[model].addVar, parameter[]] call[name[c]][name[i]] assign[=] call[name[model].addVar, parameter[]] for taget[name[k]] in starred[call[name[range], parameter[name[K]]]] begin[:] call[name[w_]][tuple[[<ast.Name object at 0x7da1b1700ee0>, <ast.Name object at 0x7da1b1700e80>]]] assign[=] call[name[model].addVar, parameter[]] for taget[name[i]] in starred[name[I]] begin[:] call[name[model].addCons, parameter[compare[call[name[quicksum], parameter[<ast.GeneratorExp object at 0x7da1b1701cf0>]] equal[==] constant[1]]]] call[name[model].addCons, parameter[compare[call[name[quicksum], parameter[<ast.GeneratorExp object at 0x7da1b1703a90>]] equal[==] call[name[x]][name[i]]]]] call[name[model].addCons, parameter[compare[call[name[quicksum], parameter[<ast.GeneratorExp object at 0x7da1b1701ed0>]] equal[==] call[name[c]][name[i]]]]] call[name[model].addCons, parameter[compare[call[name[quicksum], parameter[<ast.GeneratorExp object at 0x7da1b1701b10>]] less_or_equal[<=] name[W]]]] call[name[model].setObjective, parameter[call[name[quicksum], parameter[<ast.GeneratorExp object at 0x7da18f00f460>]], constant[minimize]]] name[model].data assign[=] tuple[[<ast.Name object at 0x7da18f00d870>, <ast.Name object at 0x7da18f00ec50>]] return[name[model]]
keyword[def] identifier[eoq] ( identifier[I] , identifier[F] , identifier[h] , identifier[d] , identifier[w] , identifier[W] , identifier[a0] , identifier[aK] , identifier[K] ): literal[string] identifier[a] , identifier[b] ={},{} identifier[delta] = identifier[float] ( identifier[aK] - identifier[a0] )/ identifier[K] keyword[for] identifier[i] keyword[in] identifier[I] : keyword[for] identifier[k] keyword[in] identifier[range] ( identifier[K] ): identifier[T] = identifier[a0] + identifier[delta] * identifier[k] identifier[a] [ identifier[i] , identifier[k] ]= identifier[T] identifier[b] [ identifier[i] , identifier[k] ]= identifier[F] [ identifier[i] ]/ identifier[T] + identifier[h] [ identifier[i] ]* identifier[d] [ identifier[i] ]* identifier[T] / literal[int] identifier[model] = identifier[Model] ( literal[string] ) identifier[x] , identifier[c] , identifier[w_] ={},{},{} keyword[for] identifier[i] keyword[in] identifier[I] : identifier[x] [ identifier[i] ]= identifier[model] . identifier[addVar] ( identifier[vtype] = literal[string] , identifier[name] = literal[string] % identifier[i] ) identifier[c] [ identifier[i] ]= identifier[model] . identifier[addVar] ( identifier[vtype] = literal[string] , identifier[name] = literal[string] % identifier[i] ) keyword[for] identifier[k] keyword[in] identifier[range] ( identifier[K] ): identifier[w_] [ identifier[i] , identifier[k] ]= identifier[model] . identifier[addVar] ( identifier[ub] = literal[int] , identifier[vtype] = literal[string] , identifier[name] = literal[string] %( identifier[i] , identifier[k] )) keyword[for] identifier[i] keyword[in] identifier[I] : identifier[model] . identifier[addCons] ( identifier[quicksum] ( identifier[w_] [ identifier[i] , identifier[k] ] keyword[for] identifier[k] keyword[in] identifier[range] ( identifier[K] ))== literal[int] ) identifier[model] . identifier[addCons] ( identifier[quicksum] ( identifier[a] [ identifier[i] , identifier[k] ]* identifier[w_] [ identifier[i] , identifier[k] ] keyword[for] identifier[k] keyword[in] identifier[range] ( identifier[K] ))== identifier[x] [ identifier[i] ]) identifier[model] . identifier[addCons] ( identifier[quicksum] ( identifier[b] [ identifier[i] , identifier[k] ]* identifier[w_] [ identifier[i] , identifier[k] ] keyword[for] identifier[k] keyword[in] identifier[range] ( identifier[K] ))== identifier[c] [ identifier[i] ]) identifier[model] . identifier[addCons] ( identifier[quicksum] ( identifier[w] [ identifier[i] ]* identifier[d] [ identifier[i] ]* identifier[x] [ identifier[i] ] keyword[for] identifier[i] keyword[in] identifier[I] )<= identifier[W] ) identifier[model] . identifier[setObjective] ( identifier[quicksum] ( identifier[c] [ identifier[i] ] keyword[for] identifier[i] keyword[in] identifier[I] ), literal[string] ) identifier[model] . identifier[data] = identifier[x] , identifier[w] keyword[return] identifier[model]
def eoq(I, F, h, d, w, W, a0, aK, K): """eoq -- multi-item capacitated economic ordering quantity model Parameters: - I: set of items - F[i]: ordering cost for item i - h[i]: holding cost for item i - d[i]: demand for item i - w[i]: unit weight for item i - W: capacity (limit on order quantity) - a0: lower bound on the cycle time (x axis) - aK: upper bound on the cycle time (x axis) - K: number of linear pieces to use in the approximation Returns a model, ready to be solved. """ # construct points for piecewise-linear relation, store in a,b (a, b) = ({}, {}) delta = float(aK - a0) / K for i in I: for k in range(K): T = a0 + delta * k a[i, k] = T # abscissa: cycle time b[i, k] = F[i] / T + h[i] * d[i] * T / 2.0 # ordinate: (convex) cost for this cycle time # depends on [control=['for'], data=['k']] # depends on [control=['for'], data=['i']] model = Model('multi-item, capacitated EOQ') (x, c, w_) = ({}, {}, {}) for i in I: x[i] = model.addVar(vtype='C', name='x(%s)' % i) # cycle time for item i c[i] = model.addVar(vtype='C', name='c(%s)' % i) # total cost for item i for k in range(K): w_[i, k] = model.addVar(ub=1, vtype='C', name='w(%s,%s)' % (i, k)) #todo ?? # depends on [control=['for'], data=['k']] # depends on [control=['for'], data=['i']] for i in I: model.addCons(quicksum((w_[i, k] for k in range(K))) == 1) model.addCons(quicksum((a[i, k] * w_[i, k] for k in range(K))) == x[i]) model.addCons(quicksum((b[i, k] * w_[i, k] for k in range(K))) == c[i]) # depends on [control=['for'], data=['i']] model.addCons(quicksum((w[i] * d[i] * x[i] for i in I)) <= W) model.setObjective(quicksum((c[i] for i in I)), 'minimize') model.data = (x, w) return model
def serialize(func): """ Falcon response serialization """ def wrapped(instance, req, resp, **kwargs): assert not req.get_param("unicode") or req.get_param("unicode") == u"✓", "Unicode sanity check failed" resp.set_header("Cache-Control", "no-cache, no-store, must-revalidate"); resp.set_header("Pragma", "no-cache"); resp.set_header("Expires", "0"); r = func(instance, req, resp, **kwargs) if not resp.body: if not req.client_accepts_json: raise falcon.HTTPUnsupportedMediaType( 'This API only supports the JSON media type.', href='http://docs.examples.com/api/json') resp.set_header('Content-Type', 'application/json') resp.body = json.dumps(r, cls=MyEncoder) return r return wrapped
def function[serialize, parameter[func]]: constant[ Falcon response serialization ] def function[wrapped, parameter[instance, req, resp]]: assert[<ast.BoolOp object at 0x7da1b0a6d720>] call[name[resp].set_header, parameter[constant[Cache-Control], constant[no-cache, no-store, must-revalidate]]] call[name[resp].set_header, parameter[constant[Pragma], constant[no-cache]]] call[name[resp].set_header, parameter[constant[Expires], constant[0]]] variable[r] assign[=] call[name[func], parameter[name[instance], name[req], name[resp]]] if <ast.UnaryOp object at 0x7da1b0a6c760> begin[:] if <ast.UnaryOp object at 0x7da1b0a6d390> begin[:] <ast.Raise object at 0x7da1b0a6c340> call[name[resp].set_header, parameter[constant[Content-Type], constant[application/json]]] name[resp].body assign[=] call[name[json].dumps, parameter[name[r]]] return[name[r]] return[name[wrapped]]
keyword[def] identifier[serialize] ( identifier[func] ): literal[string] keyword[def] identifier[wrapped] ( identifier[instance] , identifier[req] , identifier[resp] ,** identifier[kwargs] ): keyword[assert] keyword[not] identifier[req] . identifier[get_param] ( literal[string] ) keyword[or] identifier[req] . identifier[get_param] ( literal[string] )== literal[string] , literal[string] identifier[resp] . identifier[set_header] ( literal[string] , literal[string] ); identifier[resp] . identifier[set_header] ( literal[string] , literal[string] ); identifier[resp] . identifier[set_header] ( literal[string] , literal[string] ); identifier[r] = identifier[func] ( identifier[instance] , identifier[req] , identifier[resp] ,** identifier[kwargs] ) keyword[if] keyword[not] identifier[resp] . identifier[body] : keyword[if] keyword[not] identifier[req] . identifier[client_accepts_json] : keyword[raise] identifier[falcon] . identifier[HTTPUnsupportedMediaType] ( literal[string] , identifier[href] = literal[string] ) identifier[resp] . identifier[set_header] ( literal[string] , literal[string] ) identifier[resp] . identifier[body] = identifier[json] . identifier[dumps] ( identifier[r] , identifier[cls] = identifier[MyEncoder] ) keyword[return] identifier[r] keyword[return] identifier[wrapped]
def serialize(func): """ Falcon response serialization """ def wrapped(instance, req, resp, **kwargs): assert not req.get_param('unicode') or req.get_param('unicode') == u'✓', 'Unicode sanity check failed' resp.set_header('Cache-Control', 'no-cache, no-store, must-revalidate') resp.set_header('Pragma', 'no-cache') resp.set_header('Expires', '0') r = func(instance, req, resp, **kwargs) if not resp.body: if not req.client_accepts_json: raise falcon.HTTPUnsupportedMediaType('This API only supports the JSON media type.', href='http://docs.examples.com/api/json') # depends on [control=['if'], data=[]] resp.set_header('Content-Type', 'application/json') resp.body = json.dumps(r, cls=MyEncoder) # depends on [control=['if'], data=[]] return r return wrapped
def list(self, id=None): """ List all running jobs :param id: optional ID for the job to list """ args = {'id': id} self._job_chk.check(args) return self._client.json('job.list', args)
def function[list, parameter[self, id]]: constant[ List all running jobs :param id: optional ID for the job to list ] variable[args] assign[=] dictionary[[<ast.Constant object at 0x7da1b04dae90>], [<ast.Name object at 0x7da1b04d9420>]] call[name[self]._job_chk.check, parameter[name[args]]] return[call[name[self]._client.json, parameter[constant[job.list], name[args]]]]
keyword[def] identifier[list] ( identifier[self] , identifier[id] = keyword[None] ): literal[string] identifier[args] ={ literal[string] : identifier[id] } identifier[self] . identifier[_job_chk] . identifier[check] ( identifier[args] ) keyword[return] identifier[self] . identifier[_client] . identifier[json] ( literal[string] , identifier[args] )
def list(self, id=None): """ List all running jobs :param id: optional ID for the job to list """ args = {'id': id} self._job_chk.check(args) return self._client.json('job.list', args)
def valid_remainder(cntxt: Context, n: Node, matchables: RDFGraph, S: ShExJ.Shape) -> bool: """ Let **outs** be the arcsOut in remainder: `outs = remainder ∩ arcsOut(G, n)`. Let **matchables** be the triples in outs whose predicate appears in a TripleConstraint in `expression`. If `expression` is absent, matchables = Ø (the empty set). * There is no triple in **matchables** which matches a TripleConstraint in expression * There is no triple in **matchables** whose predicate does not appear in extra. * closed is false or unmatchables is empty :param cntxt: evaluation context :param n: focus node :param matchables: non-matched triples :param S: Shape being evaluated :return: True if remainder is valid """ # TODO: Update this and satisfies to address the new algorithm # Let **outs** be the arcsOut in remainder: `outs = remainder ∩ arcsOut(G, n)`. outs = arcsOut(cntxt.graph, n).intersection(matchables) # predicates that in a TripleConstraint in `expression` predicates = predicates_in_expression(S, cntxt) # Let **matchables** be the triples in outs whose predicate appears in predicates. If # `expression` is absent, matchables = Ø (the empty set). matchables = RDFGraph(t for t in outs if str(t.p) in predicates) # There is no triple in **matchables** which matches a TripleConstraint in expression if matchables and S.expression is not None: tes = triple_constraints_in_expression(S.expression, cntxt) for m in matchables: if any(matchesTripleConstraint(cntxt, m, te) for te in tes): return False # There is no triple in **matchables** whose predicate does not appear in extra. extras = {iriref_to_uriref(e) for e in S.extra} if S.extra is not None else {} if any(t.p not in extras for t in matchables): return False # closed is false or unmatchables is empty. return not S.closed.val or not bool(outs - matchables)
def function[valid_remainder, parameter[cntxt, n, matchables, S]]: constant[ Let **outs** be the arcsOut in remainder: `outs = remainder ∩ arcsOut(G, n)`. Let **matchables** be the triples in outs whose predicate appears in a TripleConstraint in `expression`. If `expression` is absent, matchables = Ø (the empty set). * There is no triple in **matchables** which matches a TripleConstraint in expression * There is no triple in **matchables** whose predicate does not appear in extra. * closed is false or unmatchables is empty :param cntxt: evaluation context :param n: focus node :param matchables: non-matched triples :param S: Shape being evaluated :return: True if remainder is valid ] variable[outs] assign[=] call[call[name[arcsOut], parameter[name[cntxt].graph, name[n]]].intersection, parameter[name[matchables]]] variable[predicates] assign[=] call[name[predicates_in_expression], parameter[name[S], name[cntxt]]] variable[matchables] assign[=] call[name[RDFGraph], parameter[<ast.GeneratorExp object at 0x7da1b0fadbd0>]] if <ast.BoolOp object at 0x7da1b0faf910> begin[:] variable[tes] assign[=] call[name[triple_constraints_in_expression], parameter[name[S].expression, name[cntxt]]] for taget[name[m]] in starred[name[matchables]] begin[:] if call[name[any], parameter[<ast.GeneratorExp object at 0x7da1b0fafee0>]] begin[:] return[constant[False]] variable[extras] assign[=] <ast.IfExp object at 0x7da1b0fae2c0> if call[name[any], parameter[<ast.GeneratorExp object at 0x7da1b0fac6d0>]] begin[:] return[constant[False]] return[<ast.BoolOp object at 0x7da1b0fac100>]
keyword[def] identifier[valid_remainder] ( identifier[cntxt] : identifier[Context] , identifier[n] : identifier[Node] , identifier[matchables] : identifier[RDFGraph] , identifier[S] : identifier[ShExJ] . identifier[Shape] )-> identifier[bool] : literal[string] identifier[outs] = identifier[arcsOut] ( identifier[cntxt] . identifier[graph] , identifier[n] ). identifier[intersection] ( identifier[matchables] ) identifier[predicates] = identifier[predicates_in_expression] ( identifier[S] , identifier[cntxt] ) identifier[matchables] = identifier[RDFGraph] ( identifier[t] keyword[for] identifier[t] keyword[in] identifier[outs] keyword[if] identifier[str] ( identifier[t] . identifier[p] ) keyword[in] identifier[predicates] ) keyword[if] identifier[matchables] keyword[and] identifier[S] . identifier[expression] keyword[is] keyword[not] keyword[None] : identifier[tes] = identifier[triple_constraints_in_expression] ( identifier[S] . identifier[expression] , identifier[cntxt] ) keyword[for] identifier[m] keyword[in] identifier[matchables] : keyword[if] identifier[any] ( identifier[matchesTripleConstraint] ( identifier[cntxt] , identifier[m] , identifier[te] ) keyword[for] identifier[te] keyword[in] identifier[tes] ): keyword[return] keyword[False] identifier[extras] ={ identifier[iriref_to_uriref] ( identifier[e] ) keyword[for] identifier[e] keyword[in] identifier[S] . identifier[extra] } keyword[if] identifier[S] . identifier[extra] keyword[is] keyword[not] keyword[None] keyword[else] {} keyword[if] identifier[any] ( identifier[t] . identifier[p] keyword[not] keyword[in] identifier[extras] keyword[for] identifier[t] keyword[in] identifier[matchables] ): keyword[return] keyword[False] keyword[return] keyword[not] identifier[S] . identifier[closed] . identifier[val] keyword[or] keyword[not] identifier[bool] ( identifier[outs] - identifier[matchables] )
def valid_remainder(cntxt: Context, n: Node, matchables: RDFGraph, S: ShExJ.Shape) -> bool: """ Let **outs** be the arcsOut in remainder: `outs = remainder ∩ arcsOut(G, n)`. Let **matchables** be the triples in outs whose predicate appears in a TripleConstraint in `expression`. If `expression` is absent, matchables = Ø (the empty set). * There is no triple in **matchables** which matches a TripleConstraint in expression * There is no triple in **matchables** whose predicate does not appear in extra. * closed is false or unmatchables is empty :param cntxt: evaluation context :param n: focus node :param matchables: non-matched triples :param S: Shape being evaluated :return: True if remainder is valid """ # TODO: Update this and satisfies to address the new algorithm # Let **outs** be the arcsOut in remainder: `outs = remainder ∩ arcsOut(G, n)`. outs = arcsOut(cntxt.graph, n).intersection(matchables) # predicates that in a TripleConstraint in `expression` predicates = predicates_in_expression(S, cntxt) # Let **matchables** be the triples in outs whose predicate appears in predicates. If # `expression` is absent, matchables = Ø (the empty set). matchables = RDFGraph((t for t in outs if str(t.p) in predicates)) # There is no triple in **matchables** which matches a TripleConstraint in expression if matchables and S.expression is not None: tes = triple_constraints_in_expression(S.expression, cntxt) for m in matchables: if any((matchesTripleConstraint(cntxt, m, te) for te in tes)): return False # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['m']] # depends on [control=['if'], data=[]] # There is no triple in **matchables** whose predicate does not appear in extra. extras = {iriref_to_uriref(e) for e in S.extra} if S.extra is not None else {} if any((t.p not in extras for t in matchables)): return False # depends on [control=['if'], data=[]] # closed is false or unmatchables is empty. return not S.closed.val or not bool(outs - matchables)
def iptag_clear(self, iptag, x, y): """Clear an IPTag. Parameters ---------- iptag : int Index of the IPTag to clear. """ self._send_scp(x, y, 0, SCPCommands.iptag, int(consts.IPTagCommands.clear) << 16 | iptag)
def function[iptag_clear, parameter[self, iptag, x, y]]: constant[Clear an IPTag. Parameters ---------- iptag : int Index of the IPTag to clear. ] call[name[self]._send_scp, parameter[name[x], name[y], constant[0], name[SCPCommands].iptag, binary_operation[binary_operation[call[name[int], parameter[name[consts].IPTagCommands.clear]] <ast.LShift object at 0x7da2590d69e0> constant[16]] <ast.BitOr object at 0x7da2590d6aa0> name[iptag]]]]
keyword[def] identifier[iptag_clear] ( identifier[self] , identifier[iptag] , identifier[x] , identifier[y] ): literal[string] identifier[self] . identifier[_send_scp] ( identifier[x] , identifier[y] , literal[int] , identifier[SCPCommands] . identifier[iptag] , identifier[int] ( identifier[consts] . identifier[IPTagCommands] . identifier[clear] )<< literal[int] | identifier[iptag] )
def iptag_clear(self, iptag, x, y): """Clear an IPTag. Parameters ---------- iptag : int Index of the IPTag to clear. """ self._send_scp(x, y, 0, SCPCommands.iptag, int(consts.IPTagCommands.clear) << 16 | iptag)