text
stringlengths
89
104k
code_tokens
list
avg_line_len
float64
7.91
980
score
float64
0
630
def runScript(self, scriptname, additional_environment=None): ''' Run the specified script from the scripts section of the module.json file in the directory of this module. ''' import subprocess import shlex command = self.getScript(scriptname) if command is None: logger.debug('%s has no script %s', self, scriptname) return 0 if not len(command): logger.error("script %s of %s is empty", scriptname, self.getName()) return 1 # define additional environment variables for scripts: env = os.environ.copy() if additional_environment is not None: env.update(additional_environment) errcode = 0 child = None try: logger.debug('running script: %s', command) child = subprocess.Popen( command, cwd = self.path, env = env ) child.wait() if child.returncode: logger.error( "script %s (from %s) exited with non-zero status %s", scriptname, self.getName(), child.returncode ) errcode = child.returncode child = None finally: if child is not None: tryTerminate(child) return errcode
[ "def", "runScript", "(", "self", ",", "scriptname", ",", "additional_environment", "=", "None", ")", ":", "import", "subprocess", "import", "shlex", "command", "=", "self", ".", "getScript", "(", "scriptname", ")", "if", "command", "is", "None", ":", "logger", ".", "debug", "(", "'%s has no script %s'", ",", "self", ",", "scriptname", ")", "return", "0", "if", "not", "len", "(", "command", ")", ":", "logger", ".", "error", "(", "\"script %s of %s is empty\"", ",", "scriptname", ",", "self", ".", "getName", "(", ")", ")", "return", "1", "# define additional environment variables for scripts:", "env", "=", "os", ".", "environ", ".", "copy", "(", ")", "if", "additional_environment", "is", "not", "None", ":", "env", ".", "update", "(", "additional_environment", ")", "errcode", "=", "0", "child", "=", "None", "try", ":", "logger", ".", "debug", "(", "'running script: %s'", ",", "command", ")", "child", "=", "subprocess", ".", "Popen", "(", "command", ",", "cwd", "=", "self", ".", "path", ",", "env", "=", "env", ")", "child", ".", "wait", "(", ")", "if", "child", ".", "returncode", ":", "logger", ".", "error", "(", "\"script %s (from %s) exited with non-zero status %s\"", ",", "scriptname", ",", "self", ".", "getName", "(", ")", ",", "child", ".", "returncode", ")", "errcode", "=", "child", ".", "returncode", "child", "=", "None", "finally", ":", "if", "child", "is", "not", "None", ":", "tryTerminate", "(", "child", ")", "return", "errcode" ]
32.642857
18.5
def batch_update(self, values, w=1): """ Update the t-digest with an iterable of values. This assumes all points have the same weight. """ for x in values: self.update(x, w) self.compress() return
[ "def", "batch_update", "(", "self", ",", "values", ",", "w", "=", "1", ")", ":", "for", "x", "in", "values", ":", "self", ".", "update", "(", "x", ",", "w", ")", "self", ".", "compress", "(", ")", "return" ]
28.444444
15.777778
def do_tagg(self, arglist: List[str]): """version of creating an html tag using arglist instead of argparser""" if len(arglist) >= 2: tag = arglist[0] content = arglist[1:] self.poutput('<{0}>{1}</{0}>'.format(tag, ' '.join(content))) else: self.perror("tagg requires at least 2 arguments")
[ "def", "do_tagg", "(", "self", ",", "arglist", ":", "List", "[", "str", "]", ")", ":", "if", "len", "(", "arglist", ")", ">=", "2", ":", "tag", "=", "arglist", "[", "0", "]", "content", "=", "arglist", "[", "1", ":", "]", "self", ".", "poutput", "(", "'<{0}>{1}</{0}>'", ".", "format", "(", "tag", ",", "' '", ".", "join", "(", "content", ")", ")", ")", "else", ":", "self", ".", "perror", "(", "\"tagg requires at least 2 arguments\"", ")" ]
44.375
14.125
def prepare_new_layer(self, input_layer): """Prepare new layer for the output layer. :param input_layer: Vector layer. :type input_layer: QgsVectorLayer :return: New memory layer duplicated from input_layer. :rtype: QgsVectorLayer """ # create memory layer output_layer_name = os.path.splitext(input_layer.name())[0] output_layer_name = unique_filename( prefix=('%s_minimum_needs_' % output_layer_name), dir='minimum_needs_calculator') output_layer = create_memory_layer( output_layer_name, input_layer.geometryType(), input_layer.crs(), input_layer.fields()) # monkey patching input layer to make it work with # prepare vector layer function temp_layer = input_layer temp_layer.keywords = { 'layer_purpose': layer_purpose_aggregation['key']} # copy features to output layer copy_layer(temp_layer, output_layer) # Monkey patching output layer to make it work with # minimum needs calculator output_layer.keywords['layer_purpose'] = ( layer_purpose_aggregation['key']) output_layer.keywords['inasafe_fields'] = { displaced_field['key']: self.displaced.currentField() } if self.aggregation_name.currentField(): output_layer.keywords['inasafe_fields'][ aggregation_name_field['key']] = ( self.aggregation_name.currentField()) # remove unnecessary fields & rename inasafe fields clean_inasafe_fields(output_layer) return output_layer
[ "def", "prepare_new_layer", "(", "self", ",", "input_layer", ")", ":", "# create memory layer", "output_layer_name", "=", "os", ".", "path", ".", "splitext", "(", "input_layer", ".", "name", "(", ")", ")", "[", "0", "]", "output_layer_name", "=", "unique_filename", "(", "prefix", "=", "(", "'%s_minimum_needs_'", "%", "output_layer_name", ")", ",", "dir", "=", "'minimum_needs_calculator'", ")", "output_layer", "=", "create_memory_layer", "(", "output_layer_name", ",", "input_layer", ".", "geometryType", "(", ")", ",", "input_layer", ".", "crs", "(", ")", ",", "input_layer", ".", "fields", "(", ")", ")", "# monkey patching input layer to make it work with", "# prepare vector layer function", "temp_layer", "=", "input_layer", "temp_layer", ".", "keywords", "=", "{", "'layer_purpose'", ":", "layer_purpose_aggregation", "[", "'key'", "]", "}", "# copy features to output layer", "copy_layer", "(", "temp_layer", ",", "output_layer", ")", "# Monkey patching output layer to make it work with", "# minimum needs calculator", "output_layer", ".", "keywords", "[", "'layer_purpose'", "]", "=", "(", "layer_purpose_aggregation", "[", "'key'", "]", ")", "output_layer", ".", "keywords", "[", "'inasafe_fields'", "]", "=", "{", "displaced_field", "[", "'key'", "]", ":", "self", ".", "displaced", ".", "currentField", "(", ")", "}", "if", "self", ".", "aggregation_name", ".", "currentField", "(", ")", ":", "output_layer", ".", "keywords", "[", "'inasafe_fields'", "]", "[", "aggregation_name_field", "[", "'key'", "]", "]", "=", "(", "self", ".", "aggregation_name", ".", "currentField", "(", ")", ")", "# remove unnecessary fields & rename inasafe fields", "clean_inasafe_fields", "(", "output_layer", ")", "return", "output_layer" ]
36.622222
14.733333
def cylindrical_histogram(data=None, rho_bins="numpy", phi_bins=16, z_bins="numpy", transformed=False, *args, **kwargs): """Facade construction function for the CylindricalHistogram. """ dropna = kwargs.pop("dropna", True) data = _prepare_data(data, transformed=transformed, klass=CylindricalHistogram, dropna=dropna) if isinstance(phi_bins, int): phi_range = (0, 2 * np.pi) if "phi_range" in "kwargs": phi_range = kwargs["phi_range"] elif "range" in "kwargs": phi_range = kwargs["range"][1] phi_range = list(phi_range) + [phi_bins + 1] phi_bins = np.linspace(*phi_range) bin_schemas = binnings.calculate_bins_nd(data, [rho_bins, phi_bins, z_bins], *args, check_nan=not dropna, **kwargs) weights = kwargs.pop("weights", None) frequencies, errors2, missed = histogram_nd.calculate_frequencies(data, ndim=3, binnings=bin_schemas, weights=weights) return CylindricalHistogram(binnings=bin_schemas, frequencies=frequencies, errors2=errors2, missed=missed)
[ "def", "cylindrical_histogram", "(", "data", "=", "None", ",", "rho_bins", "=", "\"numpy\"", ",", "phi_bins", "=", "16", ",", "z_bins", "=", "\"numpy\"", ",", "transformed", "=", "False", ",", "*", "args", ",", "*", "*", "kwargs", ")", ":", "dropna", "=", "kwargs", ".", "pop", "(", "\"dropna\"", ",", "True", ")", "data", "=", "_prepare_data", "(", "data", ",", "transformed", "=", "transformed", ",", "klass", "=", "CylindricalHistogram", ",", "dropna", "=", "dropna", ")", "if", "isinstance", "(", "phi_bins", ",", "int", ")", ":", "phi_range", "=", "(", "0", ",", "2", "*", "np", ".", "pi", ")", "if", "\"phi_range\"", "in", "\"kwargs\"", ":", "phi_range", "=", "kwargs", "[", "\"phi_range\"", "]", "elif", "\"range\"", "in", "\"kwargs\"", ":", "phi_range", "=", "kwargs", "[", "\"range\"", "]", "[", "1", "]", "phi_range", "=", "list", "(", "phi_range", ")", "+", "[", "phi_bins", "+", "1", "]", "phi_bins", "=", "np", ".", "linspace", "(", "*", "phi_range", ")", "bin_schemas", "=", "binnings", ".", "calculate_bins_nd", "(", "data", ",", "[", "rho_bins", ",", "phi_bins", ",", "z_bins", "]", ",", "*", "args", ",", "check_nan", "=", "not", "dropna", ",", "*", "*", "kwargs", ")", "weights", "=", "kwargs", ".", "pop", "(", "\"weights\"", ",", "None", ")", "frequencies", ",", "errors2", ",", "missed", "=", "histogram_nd", ".", "calculate_frequencies", "(", "data", ",", "ndim", "=", "3", ",", "binnings", "=", "bin_schemas", ",", "weights", "=", "weights", ")", "return", "CylindricalHistogram", "(", "binnings", "=", "bin_schemas", ",", "frequencies", "=", "frequencies", ",", "errors2", "=", "errors2", ",", "missed", "=", "missed", ")" ]
49.6
24.8
def rapl_read(): """ Read power stats and return dictionary""" basenames = glob.glob('/sys/class/powercap/intel-rapl:*/') basenames = sorted(set({x for x in basenames})) pjoin = os.path.join ret = list() for path in basenames: name = None try: name = cat(pjoin(path, 'name'), fallback=None, binary=False) except (IOError, OSError, ValueError) as err: logging.warning("ignoring %r for file %r", (err, path), RuntimeWarning) continue if name: try: current = cat(pjoin(path, 'energy_uj')) max_reading = 0.0 ret.append(RaplStats(name, float(current), max_reading)) except (IOError, OSError, ValueError) as err: logging.warning("ignoring %r for file %r", (err, path), RuntimeWarning) return ret
[ "def", "rapl_read", "(", ")", ":", "basenames", "=", "glob", ".", "glob", "(", "'/sys/class/powercap/intel-rapl:*/'", ")", "basenames", "=", "sorted", "(", "set", "(", "{", "x", "for", "x", "in", "basenames", "}", ")", ")", "pjoin", "=", "os", ".", "path", ".", "join", "ret", "=", "list", "(", ")", "for", "path", "in", "basenames", ":", "name", "=", "None", "try", ":", "name", "=", "cat", "(", "pjoin", "(", "path", ",", "'name'", ")", ",", "fallback", "=", "None", ",", "binary", "=", "False", ")", "except", "(", "IOError", ",", "OSError", ",", "ValueError", ")", "as", "err", ":", "logging", ".", "warning", "(", "\"ignoring %r for file %r\"", ",", "(", "err", ",", "path", ")", ",", "RuntimeWarning", ")", "continue", "if", "name", ":", "try", ":", "current", "=", "cat", "(", "pjoin", "(", "path", ",", "'energy_uj'", ")", ")", "max_reading", "=", "0.0", "ret", ".", "append", "(", "RaplStats", "(", "name", ",", "float", "(", "current", ")", ",", "max_reading", ")", ")", "except", "(", "IOError", ",", "OSError", ",", "ValueError", ")", "as", "err", ":", "logging", ".", "warning", "(", "\"ignoring %r for file %r\"", ",", "(", "err", ",", "path", ")", ",", "RuntimeWarning", ")", "return", "ret" ]
37.958333
19.916667
def mark_all_read(request): """ Mark all messages as read (i.e. delete from inbox) for current logged in user """ from .settings import stored_messages_settings backend = stored_messages_settings.STORAGE_BACKEND() backend.inbox_purge(request.user) return Response({"message": "All messages read"})
[ "def", "mark_all_read", "(", "request", ")", ":", "from", ".", "settings", "import", "stored_messages_settings", "backend", "=", "stored_messages_settings", ".", "STORAGE_BACKEND", "(", ")", "backend", ".", "inbox_purge", "(", "request", ".", "user", ")", "return", "Response", "(", "{", "\"message\"", ":", "\"All messages read\"", "}", ")" ]
39.75
12
def get_importable_modules(folder): """Find all module files in the given folder that end with '.py' and don't start with an underscore. @return module names @rtype: iterator of string """ for fname in os.listdir(folder): if fname.endswith('.py') and not fname.startswith('_'): yield fname[:-3]
[ "def", "get_importable_modules", "(", "folder", ")", ":", "for", "fname", "in", "os", ".", "listdir", "(", "folder", ")", ":", "if", "fname", ".", "endswith", "(", "'.py'", ")", "and", "not", "fname", ".", "startswith", "(", "'_'", ")", ":", "yield", "fname", "[", ":", "-", "3", "]" ]
36.666667
8.333333
def publishToRoom(self, roomId, name, data, userList=None): """ Publish to given room data submitted """ if userList is None: userList = self.getRoom(roomId) # Publish data to all room users logging.debug("%s: broadcasting (name: %s, data: %s, number of users: %s)" % (self._gcls(), name, data, len(userList))) self.broadcast(userList, { "name": name, "data": SockJSRoomHandler._parser.encode(data) })
[ "def", "publishToRoom", "(", "self", ",", "roomId", ",", "name", ",", "data", ",", "userList", "=", "None", ")", ":", "if", "userList", "is", "None", ":", "userList", "=", "self", ".", "getRoom", "(", "roomId", ")", "# Publish data to all room users", "logging", ".", "debug", "(", "\"%s: broadcasting (name: %s, data: %s, number of users: %s)\"", "%", "(", "self", ".", "_gcls", "(", ")", ",", "name", ",", "data", ",", "len", "(", "userList", ")", ")", ")", "self", ".", "broadcast", "(", "userList", ",", "{", "\"name\"", ":", "name", ",", "\"data\"", ":", "SockJSRoomHandler", ".", "_parser", ".", "encode", "(", "data", ")", "}", ")" ]
43.272727
20.909091
def ComputeRoot(hashes): """ Compute the root hash. Args: hashes (list): the list of hashes to build the root from. Returns: bytes: the root hash. """ if not len(hashes): raise Exception('Hashes must have length') if len(hashes) == 1: return hashes[0] tree = MerkleTree(hashes) return tree.Root.Hash
[ "def", "ComputeRoot", "(", "hashes", ")", ":", "if", "not", "len", "(", "hashes", ")", ":", "raise", "Exception", "(", "'Hashes must have length'", ")", "if", "len", "(", "hashes", ")", "==", "1", ":", "return", "hashes", "[", "0", "]", "tree", "=", "MerkleTree", "(", "hashes", ")", "return", "tree", ".", "Root", ".", "Hash" ]
23.882353
17.764706
def _get_by(key, val, l): """ Out of list *l* return all elements that have *key=val* This comes in handy when you are working with aggregated/bucketed queries """ return [x for x in l if _check_value_recursively(key, val, x)]
[ "def", "_get_by", "(", "key", ",", "val", ",", "l", ")", ":", "return", "[", "x", "for", "x", "in", "l", "if", "_check_value_recursively", "(", "key", ",", "val", ",", "x", ")", "]" ]
40.166667
16.166667
def __request(self, method, url, request_args, headers=None, stream=False): """__request. make the actual request. This method is called by the request method in case of 'regular' API-calls. Or indirectly by the__stream_request method if it concerns a 'streaming' call. """ func = getattr(self.client, method) headers = headers if headers else {} response = None try: logger.info("performing request %s", url) response = func(url, stream=stream, headers=headers, **request_args) except requests.RequestException as err: logger.error("request %s failed [%s]", url, err) raise err # Handle error responses if response.status_code >= 400: logger.error("request %s failed [%d,%s]", url, response.status_code, response.content.decode('utf-8')) raise V20Error(response.status_code, response.content.decode('utf-8')) return response
[ "def", "__request", "(", "self", ",", "method", ",", "url", ",", "request_args", ",", "headers", "=", "None", ",", "stream", "=", "False", ")", ":", "func", "=", "getattr", "(", "self", ".", "client", ",", "method", ")", "headers", "=", "headers", "if", "headers", "else", "{", "}", "response", "=", "None", "try", ":", "logger", ".", "info", "(", "\"performing request %s\"", ",", "url", ")", "response", "=", "func", "(", "url", ",", "stream", "=", "stream", ",", "headers", "=", "headers", ",", "*", "*", "request_args", ")", "except", "requests", ".", "RequestException", "as", "err", ":", "logger", ".", "error", "(", "\"request %s failed [%s]\"", ",", "url", ",", "err", ")", "raise", "err", "# Handle error responses", "if", "response", ".", "status_code", ">=", "400", ":", "logger", ".", "error", "(", "\"request %s failed [%d,%s]\"", ",", "url", ",", "response", ".", "status_code", ",", "response", ".", "content", ".", "decode", "(", "'utf-8'", ")", ")", "raise", "V20Error", "(", "response", ".", "status_code", ",", "response", ".", "content", ".", "decode", "(", "'utf-8'", ")", ")", "return", "response" ]
41
16.185185
def build_summary_table(summary, idx, is_fragment_root, indent_level, output): """Direct translation of Coordinator::PrintExecSummary() to recursively build a list of rows of summary statistics, one per exec node summary: the TExecSummary object that contains all the summary data idx: the index of the node to print is_fragment_root: true if the node to print is the root of a fragment (and therefore feeds into an exchange) indent_level: the number of spaces to print before writing the node's label, to give the appearance of a tree. The 0th child of a node has the same indent_level as its parent. All other children have an indent_level of one greater than their parent. output: the list of rows into which to append the rows produced for this node and its children. Returns the index of the next exec node in summary.exec_nodes that should be processed, used internally to this method only. """ # pylint: disable=too-many-locals attrs = ["latency_ns", "cpu_time_ns", "cardinality", "memory_used"] # Initialise aggregate and maximum stats agg_stats, max_stats = TExecStats(), TExecStats() for attr in attrs: setattr(agg_stats, attr, 0) setattr(max_stats, attr, 0) node = summary.nodes[idx] for stats in node.exec_stats: for attr in attrs: val = getattr(stats, attr) if val is not None: setattr(agg_stats, attr, getattr(agg_stats, attr) + val) setattr(max_stats, attr, max(getattr(max_stats, attr), val)) if len(node.exec_stats) > 0: avg_time = agg_stats.latency_ns / len(node.exec_stats) else: avg_time = 0 # If the node is a broadcast-receiving exchange node, the cardinality of # rows produced is the max over all instances (which should all have # received the same number of rows). Otherwise, the cardinality is the sum # over all instances which process disjoint partitions. if node.is_broadcast and is_fragment_root: cardinality = max_stats.cardinality else: cardinality = agg_stats.cardinality est_stats = node.estimated_stats label_prefix = "" if indent_level > 0: label_prefix = "|" if is_fragment_root: label_prefix += " " * indent_level else: label_prefix += "--" * indent_level def prettyprint(val, units, divisor): for unit in units: if val < divisor: if unit == units[0]: return "%d%s" % (val, unit) else: return "%3.2f%s" % (val, unit) val /= divisor def prettyprint_bytes(byte_val): return prettyprint( byte_val, [' B', ' KB', ' MB', ' GB', ' TB'], 1024.0) def prettyprint_units(unit_val): return prettyprint(unit_val, ["", "K", "M", "B"], 1000.0) def prettyprint_time(time_val): return prettyprint(time_val, ["ns", "us", "ms", "s"], 1000.0) row = [label_prefix + node.label, len(node.exec_stats), prettyprint_time(avg_time), prettyprint_time(max_stats.latency_ns), prettyprint_units(cardinality), prettyprint_units(est_stats.cardinality), prettyprint_bytes(max_stats.memory_used), prettyprint_bytes(est_stats.memory_used), node.label_detail] output.append(row) try: sender_idx = summary.exch_to_sender_map[idx] # This is an exchange node, so the sender is a fragment root, and # should be printed next. build_summary_table(summary, sender_idx, True, indent_level, output) except (KeyError, TypeError): # Fall through if idx not in map, or if exch_to_sender_map itself is # not set pass idx += 1 if node.num_children > 0: first_child_output = [] idx = build_summary_table(summary, idx, False, indent_level, first_child_output) # pylint: disable=unused-variable # TODO: is child_idx supposed to be unused? See #120 for child_idx in range(1, node.num_children): # All other children are indented (we only have 0, 1 or 2 children # for every exec node at the moment) idx = build_summary_table(summary, idx, False, indent_level + 1, output) output += first_child_output return idx
[ "def", "build_summary_table", "(", "summary", ",", "idx", ",", "is_fragment_root", ",", "indent_level", ",", "output", ")", ":", "# pylint: disable=too-many-locals", "attrs", "=", "[", "\"latency_ns\"", ",", "\"cpu_time_ns\"", ",", "\"cardinality\"", ",", "\"memory_used\"", "]", "# Initialise aggregate and maximum stats", "agg_stats", ",", "max_stats", "=", "TExecStats", "(", ")", ",", "TExecStats", "(", ")", "for", "attr", "in", "attrs", ":", "setattr", "(", "agg_stats", ",", "attr", ",", "0", ")", "setattr", "(", "max_stats", ",", "attr", ",", "0", ")", "node", "=", "summary", ".", "nodes", "[", "idx", "]", "for", "stats", "in", "node", ".", "exec_stats", ":", "for", "attr", "in", "attrs", ":", "val", "=", "getattr", "(", "stats", ",", "attr", ")", "if", "val", "is", "not", "None", ":", "setattr", "(", "agg_stats", ",", "attr", ",", "getattr", "(", "agg_stats", ",", "attr", ")", "+", "val", ")", "setattr", "(", "max_stats", ",", "attr", ",", "max", "(", "getattr", "(", "max_stats", ",", "attr", ")", ",", "val", ")", ")", "if", "len", "(", "node", ".", "exec_stats", ")", ">", "0", ":", "avg_time", "=", "agg_stats", ".", "latency_ns", "/", "len", "(", "node", ".", "exec_stats", ")", "else", ":", "avg_time", "=", "0", "# If the node is a broadcast-receiving exchange node, the cardinality of", "# rows produced is the max over all instances (which should all have", "# received the same number of rows). Otherwise, the cardinality is the sum", "# over all instances which process disjoint partitions.", "if", "node", ".", "is_broadcast", "and", "is_fragment_root", ":", "cardinality", "=", "max_stats", ".", "cardinality", "else", ":", "cardinality", "=", "agg_stats", ".", "cardinality", "est_stats", "=", "node", ".", "estimated_stats", "label_prefix", "=", "\"\"", "if", "indent_level", ">", "0", ":", "label_prefix", "=", "\"|\"", "if", "is_fragment_root", ":", "label_prefix", "+=", "\" \"", "*", "indent_level", "else", ":", "label_prefix", "+=", "\"--\"", "*", "indent_level", "def", "prettyprint", "(", "val", ",", "units", ",", "divisor", ")", ":", "for", "unit", "in", "units", ":", "if", "val", "<", "divisor", ":", "if", "unit", "==", "units", "[", "0", "]", ":", "return", "\"%d%s\"", "%", "(", "val", ",", "unit", ")", "else", ":", "return", "\"%3.2f%s\"", "%", "(", "val", ",", "unit", ")", "val", "/=", "divisor", "def", "prettyprint_bytes", "(", "byte_val", ")", ":", "return", "prettyprint", "(", "byte_val", ",", "[", "' B'", ",", "' KB'", ",", "' MB'", ",", "' GB'", ",", "' TB'", "]", ",", "1024.0", ")", "def", "prettyprint_units", "(", "unit_val", ")", ":", "return", "prettyprint", "(", "unit_val", ",", "[", "\"\"", ",", "\"K\"", ",", "\"M\"", ",", "\"B\"", "]", ",", "1000.0", ")", "def", "prettyprint_time", "(", "time_val", ")", ":", "return", "prettyprint", "(", "time_val", ",", "[", "\"ns\"", ",", "\"us\"", ",", "\"ms\"", ",", "\"s\"", "]", ",", "1000.0", ")", "row", "=", "[", "label_prefix", "+", "node", ".", "label", ",", "len", "(", "node", ".", "exec_stats", ")", ",", "prettyprint_time", "(", "avg_time", ")", ",", "prettyprint_time", "(", "max_stats", ".", "latency_ns", ")", ",", "prettyprint_units", "(", "cardinality", ")", ",", "prettyprint_units", "(", "est_stats", ".", "cardinality", ")", ",", "prettyprint_bytes", "(", "max_stats", ".", "memory_used", ")", ",", "prettyprint_bytes", "(", "est_stats", ".", "memory_used", ")", ",", "node", ".", "label_detail", "]", "output", ".", "append", "(", "row", ")", "try", ":", "sender_idx", "=", "summary", ".", "exch_to_sender_map", "[", "idx", "]", "# This is an exchange node, so the sender is a fragment root, and", "# should be printed next.", "build_summary_table", "(", "summary", ",", "sender_idx", ",", "True", ",", "indent_level", ",", "output", ")", "except", "(", "KeyError", ",", "TypeError", ")", ":", "# Fall through if idx not in map, or if exch_to_sender_map itself is", "# not set", "pass", "idx", "+=", "1", "if", "node", ".", "num_children", ">", "0", ":", "first_child_output", "=", "[", "]", "idx", "=", "build_summary_table", "(", "summary", ",", "idx", ",", "False", ",", "indent_level", ",", "first_child_output", ")", "# pylint: disable=unused-variable", "# TODO: is child_idx supposed to be unused? See #120", "for", "child_idx", "in", "range", "(", "1", ",", "node", ".", "num_children", ")", ":", "# All other children are indented (we only have 0, 1 or 2 children", "# for every exec node at the moment)", "idx", "=", "build_summary_table", "(", "summary", ",", "idx", ",", "False", ",", "indent_level", "+", "1", ",", "output", ")", "output", "+=", "first_child_output", "return", "idx" ]
37.435897
20.358974
def tobuf(self, format=DEFAULT_FORMAT, encoding=ENCODING, errors="surrogateescape"): """Return a tar header as a string of 512 byte blocks. """ info = self.get_info() if format == USTAR_FORMAT: return self.create_ustar_header(info, encoding, errors) elif format == GNU_FORMAT: return self.create_gnu_header(info, encoding, errors) elif format == PAX_FORMAT: return self.create_pax_header(info, encoding) else: raise ValueError("invalid format")
[ "def", "tobuf", "(", "self", ",", "format", "=", "DEFAULT_FORMAT", ",", "encoding", "=", "ENCODING", ",", "errors", "=", "\"surrogateescape\"", ")", ":", "info", "=", "self", ".", "get_info", "(", ")", "if", "format", "==", "USTAR_FORMAT", ":", "return", "self", ".", "create_ustar_header", "(", "info", ",", "encoding", ",", "errors", ")", "elif", "format", "==", "GNU_FORMAT", ":", "return", "self", ".", "create_gnu_header", "(", "info", ",", "encoding", ",", "errors", ")", "elif", "format", "==", "PAX_FORMAT", ":", "return", "self", ".", "create_pax_header", "(", "info", ",", "encoding", ")", "else", ":", "raise", "ValueError", "(", "\"invalid format\"", ")" ]
41.307692
16.461538
def get_contributors(gh, repo_id): """Get list of contributors to a repository.""" try: # FIXME: Use `github3.Repository.contributors` to get this information contrib_url = gh.repository_with_id(repo_id).contributors_url r = requests.get(contrib_url) if r.status_code == 200: contributors = r.json() def get_author(contributor): r = requests.get(contributor['url']) if r.status_code == 200: data = r.json() return dict( name=(data['name'] if 'name' in data and data['name'] else data['login']), affiliation=data.get('company') or '', ) # Sort according to number of contributions contributors.sort(key=itemgetter('contributions')) contributors = [get_author(x) for x in reversed(contributors) if x['type'] == 'User'] contributors = filter(lambda x: x is not None, contributors) return contributors except Exception: return None
[ "def", "get_contributors", "(", "gh", ",", "repo_id", ")", ":", "try", ":", "# FIXME: Use `github3.Repository.contributors` to get this information", "contrib_url", "=", "gh", ".", "repository_with_id", "(", "repo_id", ")", ".", "contributors_url", "r", "=", "requests", ".", "get", "(", "contrib_url", ")", "if", "r", ".", "status_code", "==", "200", ":", "contributors", "=", "r", ".", "json", "(", ")", "def", "get_author", "(", "contributor", ")", ":", "r", "=", "requests", ".", "get", "(", "contributor", "[", "'url'", "]", ")", "if", "r", ".", "status_code", "==", "200", ":", "data", "=", "r", ".", "json", "(", ")", "return", "dict", "(", "name", "=", "(", "data", "[", "'name'", "]", "if", "'name'", "in", "data", "and", "data", "[", "'name'", "]", "else", "data", "[", "'login'", "]", ")", ",", "affiliation", "=", "data", ".", "get", "(", "'company'", ")", "or", "''", ",", ")", "# Sort according to number of contributions", "contributors", ".", "sort", "(", "key", "=", "itemgetter", "(", "'contributions'", ")", ")", "contributors", "=", "[", "get_author", "(", "x", ")", "for", "x", "in", "reversed", "(", "contributors", ")", "if", "x", "[", "'type'", "]", "==", "'User'", "]", "contributors", "=", "filter", "(", "lambda", "x", ":", "x", "is", "not", "None", ",", "contributors", ")", "return", "contributors", "except", "Exception", ":", "return", "None" ]
39.206897
19.172414
def put(self, urls=None, **overrides): """Sets the acceptable HTTP method to PUT""" if urls is not None: overrides['urls'] = urls return self.where(accept='PUT', **overrides)
[ "def", "put", "(", "self", ",", "urls", "=", "None", ",", "*", "*", "overrides", ")", ":", "if", "urls", "is", "not", "None", ":", "overrides", "[", "'urls'", "]", "=", "urls", "return", "self", ".", "where", "(", "accept", "=", "'PUT'", ",", "*", "*", "overrides", ")" ]
41.2
6
def get_me(self) -> "pyrogram.User": """A simple method for testing your authorization. Requires no parameters. Returns: Basic information about the user or bot in form of a :obj:`User` object Raises: :class:`RPCError <pyrogram.RPCError>` in case of a Telegram RPC error. """ return pyrogram.User._parse( self, self.send( functions.users.GetFullUser( id=types.InputPeerSelf() ) ).user )
[ "def", "get_me", "(", "self", ")", "->", "\"pyrogram.User\"", ":", "return", "pyrogram", ".", "User", ".", "_parse", "(", "self", ",", "self", ".", "send", "(", "functions", ".", "users", ".", "GetFullUser", "(", "id", "=", "types", ".", "InputPeerSelf", "(", ")", ")", ")", ".", "user", ")" ]
31.294118
20.411765
def timezone(name, extended=True): # type: (Union[str, int]) -> _Timezone """ Return a Timezone instance given its name. """ if isinstance(name, int): return fixed_timezone(name) if name.lower() == "utc": return UTC if name in _tz_cache: return _tz_cache[name] tz = _Timezone(name, extended=extended) _tz_cache[name] = tz return tz
[ "def", "timezone", "(", "name", ",", "extended", "=", "True", ")", ":", "# type: (Union[str, int]) -> _Timezone", "if", "isinstance", "(", "name", ",", "int", ")", ":", "return", "fixed_timezone", "(", "name", ")", "if", "name", ".", "lower", "(", ")", "==", "\"utc\"", ":", "return", "UTC", "if", "name", "in", "_tz_cache", ":", "return", "_tz_cache", "[", "name", "]", "tz", "=", "_Timezone", "(", "name", ",", "extended", "=", "extended", ")", "_tz_cache", "[", "name", "]", "=", "tz", "return", "tz" ]
22.352941
18.823529
def gettrace(self, burn=0, thin=1, chain=-1, slicing=None): """Return the trace (last by default). :Parameters: burn : integer The number of transient steps to skip. thin : integer Keep one in thin. chain : integer The index of the chain to fetch. If None, return all chains. The default is to return the last chain. slicing : slice object A slice overriding burn and thin assignement. """ # XXX: handle chain == None case properly if chain is None: chain = -1 chain = self.db.chains[chain] arr = self.db._arrays[chain, self.name] if slicing is not None: burn, stop, thin = slicing.start, slicing.stop, slicing.step if slicing is None or stop is None: stop = arr.nrows return np.asarray(arr.read(start=burn, stop=stop, step=thin))
[ "def", "gettrace", "(", "self", ",", "burn", "=", "0", ",", "thin", "=", "1", ",", "chain", "=", "-", "1", ",", "slicing", "=", "None", ")", ":", "# XXX: handle chain == None case properly", "if", "chain", "is", "None", ":", "chain", "=", "-", "1", "chain", "=", "self", ".", "db", ".", "chains", "[", "chain", "]", "arr", "=", "self", ".", "db", ".", "_arrays", "[", "chain", ",", "self", ".", "name", "]", "if", "slicing", "is", "not", "None", ":", "burn", ",", "stop", ",", "thin", "=", "slicing", ".", "start", ",", "slicing", ".", "stop", ",", "slicing", ".", "step", "if", "slicing", "is", "None", "or", "stop", "is", "None", ":", "stop", "=", "arr", ".", "nrows", "return", "np", ".", "asarray", "(", "arr", ".", "read", "(", "start", "=", "burn", ",", "stop", "=", "stop", ",", "step", "=", "thin", ")", ")" ]
31.37931
19.275862
def create(self): """ Creates a new record for a domain. Args: type (str): The type of the DNS record (e.g. A, CNAME, TXT). name (str): The host name, alias, or service being defined by the record. data (int): Variable data depending on record type. priority (int): The priority for SRV and MX records. port (int): The port for SRV records. ttl (int): The time to live for the record, in seconds. weight (int): The weight for SRV records. flags (int): An unsigned integer between 0-255 used for CAA records. tags (string): The parameter tag for CAA records. Valid values are "issue", "wildissue", or "iodef" """ input_params = { "type": self.type, "data": self.data, "name": self.name, "priority": self.priority, "port": self.port, "ttl": self.ttl, "weight": self.weight, "flags": self.flags, "tags": self.tags } data = self.get_data( "domains/%s/records" % (self.domain), type=POST, params=input_params, ) if data: self.id = data['domain_record']['id']
[ "def", "create", "(", "self", ")", ":", "input_params", "=", "{", "\"type\"", ":", "self", ".", "type", ",", "\"data\"", ":", "self", ".", "data", ",", "\"name\"", ":", "self", ".", "name", ",", "\"priority\"", ":", "self", ".", "priority", ",", "\"port\"", ":", "self", ".", "port", ",", "\"ttl\"", ":", "self", ".", "ttl", ",", "\"weight\"", ":", "self", ".", "weight", ",", "\"flags\"", ":", "self", ".", "flags", ",", "\"tags\"", ":", "self", ".", "tags", "}", "data", "=", "self", ".", "get_data", "(", "\"domains/%s/records\"", "%", "(", "self", ".", "domain", ")", ",", "type", "=", "POST", ",", "params", "=", "input_params", ",", ")", "if", "data", ":", "self", ".", "id", "=", "data", "[", "'domain_record'", "]", "[", "'id'", "]" ]
34.810811
18.27027
def reset(self, source): """ Reset scanner's state. :param source: Source for parsing """ self.tokens = [] self.source = source self.pos = 0
[ "def", "reset", "(", "self", ",", "source", ")", ":", "self", ".", "tokens", "=", "[", "]", "self", ".", "source", "=", "source", "self", ".", "pos", "=", "0" ]
20.222222
16.111111
def make(directory): """Makes a RAS Machine directory""" if os.path.exists(directory): if os.path.isdir(directory): click.echo('Directory already exists') else: click.echo('Path exists and is not a directory') sys.exit() os.makedirs(directory) os.mkdir(os.path.join(directory, 'jsons')) copy_default_config(os.path.join(directory, 'config.yaml'))
[ "def", "make", "(", "directory", ")", ":", "if", "os", ".", "path", ".", "exists", "(", "directory", ")", ":", "if", "os", ".", "path", ".", "isdir", "(", "directory", ")", ":", "click", ".", "echo", "(", "'Directory already exists'", ")", "else", ":", "click", ".", "echo", "(", "'Path exists and is not a directory'", ")", "sys", ".", "exit", "(", ")", "os", ".", "makedirs", "(", "directory", ")", "os", ".", "mkdir", "(", "os", ".", "path", ".", "join", "(", "directory", ",", "'jsons'", ")", ")", "copy_default_config", "(", "os", ".", "path", ".", "join", "(", "directory", ",", "'config.yaml'", ")", ")" ]
31.076923
17.923077
def maxind_numba(block): """ filter for indels """ ## remove terminal edges inds = 0 for row in xrange(block.shape[0]): where = np.where(block[row] != 45)[0] if len(where) == 0: obs = 100 else: left = np.min(where) right = np.max(where) obs = np.sum(block[row, left:right] == 45) if obs > inds: inds = obs return inds
[ "def", "maxind_numba", "(", "block", ")", ":", "## remove terminal edges", "inds", "=", "0", "for", "row", "in", "xrange", "(", "block", ".", "shape", "[", "0", "]", ")", ":", "where", "=", "np", ".", "where", "(", "block", "[", "row", "]", "!=", "45", ")", "[", "0", "]", "if", "len", "(", "where", ")", "==", "0", ":", "obs", "=", "100", "else", ":", "left", "=", "np", ".", "min", "(", "where", ")", "right", "=", "np", ".", "max", "(", "where", ")", "obs", "=", "np", ".", "sum", "(", "block", "[", "row", ",", "left", ":", "right", "]", "==", "45", ")", "if", "obs", ">", "inds", ":", "inds", "=", "obs", "return", "inds" ]
27.666667
14.133333
def disable(self, msgid, scope="package", line=None, ignore_unknown=False): """don't output message of the given id""" self._set_msg_status( msgid, enable=False, scope=scope, line=line, ignore_unknown=ignore_unknown ) self._register_by_id_managed_msg(msgid, line)
[ "def", "disable", "(", "self", ",", "msgid", ",", "scope", "=", "\"package\"", ",", "line", "=", "None", ",", "ignore_unknown", "=", "False", ")", ":", "self", ".", "_set_msg_status", "(", "msgid", ",", "enable", "=", "False", ",", "scope", "=", "scope", ",", "line", "=", "line", ",", "ignore_unknown", "=", "ignore_unknown", ")", "self", ".", "_register_by_id_managed_msg", "(", "msgid", ",", "line", ")" ]
50.333333
22.666667
def get(self, *args, **kwargs): """ This renders the form or, if needed, does the http redirects. """ step_url = kwargs.get('step', None) if step_url is None: if 'reset' in self.request.GET: self.storage.reset() self.storage.current_step = self.steps.first if self.request.GET: query_string = "?%s" % self.request.GET.urlencode() else: query_string = "" next_step_url = reverse(self.url_name, kwargs={ 'step': self.steps.current, }) + query_string return redirect(next_step_url) # is the current step the "done" name/view? elif step_url == self.done_step_name: last_step = self.steps.last return self.render_done(self.get_form(step=last_step, data=self.storage.get_step_data(last_step), files=self.storage.get_step_files(last_step) ), **kwargs) # is the url step name not equal to the step in the storage? # if yes, change the step in the storage (if name exists) elif step_url == self.steps.current: # URL step name and storage step name are equal, render! return self.render(self.get_form( data=self.storage.current_step_data, files=self.storage.current_step_data, ), **kwargs) elif step_url in self.get_form_list(): self.storage.current_step = step_url return self.render(self.get_form( data=self.storage.current_step_data, files=self.storage.current_step_data, ), **kwargs) # invalid step name, reset to first and redirect. else: self.storage.current_step = self.steps.first return redirect(self.url_name, step=self.steps.first)
[ "def", "get", "(", "self", ",", "*", "args", ",", "*", "*", "kwargs", ")", ":", "step_url", "=", "kwargs", ".", "get", "(", "'step'", ",", "None", ")", "if", "step_url", "is", "None", ":", "if", "'reset'", "in", "self", ".", "request", ".", "GET", ":", "self", ".", "storage", ".", "reset", "(", ")", "self", ".", "storage", ".", "current_step", "=", "self", ".", "steps", ".", "first", "if", "self", ".", "request", ".", "GET", ":", "query_string", "=", "\"?%s\"", "%", "self", ".", "request", ".", "GET", ".", "urlencode", "(", ")", "else", ":", "query_string", "=", "\"\"", "next_step_url", "=", "reverse", "(", "self", ".", "url_name", ",", "kwargs", "=", "{", "'step'", ":", "self", ".", "steps", ".", "current", ",", "}", ")", "+", "query_string", "return", "redirect", "(", "next_step_url", ")", "# is the current step the \"done\" name/view?", "elif", "step_url", "==", "self", ".", "done_step_name", ":", "last_step", "=", "self", ".", "steps", ".", "last", "return", "self", ".", "render_done", "(", "self", ".", "get_form", "(", "step", "=", "last_step", ",", "data", "=", "self", ".", "storage", ".", "get_step_data", "(", "last_step", ")", ",", "files", "=", "self", ".", "storage", ".", "get_step_files", "(", "last_step", ")", ")", ",", "*", "*", "kwargs", ")", "# is the url step name not equal to the step in the storage?", "# if yes, change the step in the storage (if name exists)", "elif", "step_url", "==", "self", ".", "steps", ".", "current", ":", "# URL step name and storage step name are equal, render!", "return", "self", ".", "render", "(", "self", ".", "get_form", "(", "data", "=", "self", ".", "storage", ".", "current_step_data", ",", "files", "=", "self", ".", "storage", ".", "current_step_data", ",", ")", ",", "*", "*", "kwargs", ")", "elif", "step_url", "in", "self", ".", "get_form_list", "(", ")", ":", "self", ".", "storage", ".", "current_step", "=", "step_url", "return", "self", ".", "render", "(", "self", ".", "get_form", "(", "data", "=", "self", ".", "storage", ".", "current_step_data", ",", "files", "=", "self", ".", "storage", ".", "current_step_data", ",", ")", ",", "*", "*", "kwargs", ")", "# invalid step name, reset to first and redirect.", "else", ":", "self", ".", "storage", ".", "current_step", "=", "self", ".", "steps", ".", "first", "return", "redirect", "(", "self", ".", "url_name", ",", "step", "=", "self", ".", "steps", ".", "first", ")" ]
40.76087
15.5
def get_mean_and_stddevs(self, sites, rup, dists, imt, stddev_types): """ See :meth:`superclass method <.base.GroundShakingIntensityModel.get_mean_and_stddevs>` for spec of input and result values. Implement equation 1, page 20. """ # compute median PGA on rock, needed to compute non-linear site # amplification C_pga = self.COEFFS[PGA()] median_pga = np.exp( self._compute_mean(C_pga, rup.mag, dists, rup.rake) ) # compute full mean value by adding nonlinear site amplification terms C = self.COEFFS[imt] mean = (self._compute_mean(C, rup.mag, dists, rup.rake) + self._compute_non_linear_term(C, median_pga, sites)) stddevs = self._get_stddevs(C, stddev_types, num_sites=sites.vs30.size) return mean + self.adjustment_factor, stddevs
[ "def", "get_mean_and_stddevs", "(", "self", ",", "sites", ",", "rup", ",", "dists", ",", "imt", ",", "stddev_types", ")", ":", "# compute median PGA on rock, needed to compute non-linear site", "# amplification", "C_pga", "=", "self", ".", "COEFFS", "[", "PGA", "(", ")", "]", "median_pga", "=", "np", ".", "exp", "(", "self", ".", "_compute_mean", "(", "C_pga", ",", "rup", ".", "mag", ",", "dists", ",", "rup", ".", "rake", ")", ")", "# compute full mean value by adding nonlinear site amplification terms", "C", "=", "self", ".", "COEFFS", "[", "imt", "]", "mean", "=", "(", "self", ".", "_compute_mean", "(", "C", ",", "rup", ".", "mag", ",", "dists", ",", "rup", ".", "rake", ")", "+", "self", ".", "_compute_non_linear_term", "(", "C", ",", "median_pga", ",", "sites", ")", ")", "stddevs", "=", "self", ".", "_get_stddevs", "(", "C", ",", "stddev_types", ",", "num_sites", "=", "sites", ".", "vs30", ".", "size", ")", "return", "mean", "+", "self", ".", "adjustment_factor", ",", "stddevs" ]
37.956522
21.695652
def get_object(self, queryset=None): """ get privacy settings of current user """ try: obj = self.get_queryset() except self.model.DoesNotExist: raise Http404() self.check_object_permissions(self.request, obj) return obj
[ "def", "get_object", "(", "self", ",", "queryset", "=", "None", ")", ":", "try", ":", "obj", "=", "self", ".", "get_queryset", "(", ")", "except", "self", ".", "model", ".", "DoesNotExist", ":", "raise", "Http404", "(", ")", "self", ".", "check_object_permissions", "(", "self", ".", "request", ",", "obj", ")", "return", "obj" ]
34.625
10.875
def add_to_queue(self, series): """Add a series to the queue @param crunchyroll.models.Series series @return bool """ result = self._android_api.add_to_queue(series_id=series.series_id) return result
[ "def", "add_to_queue", "(", "self", ",", "series", ")", ":", "result", "=", "self", ".", "_android_api", ".", "add_to_queue", "(", "series_id", "=", "series", ".", "series_id", ")", "return", "result" ]
30.125
16.25
def _warcprox_opts(self, args): ''' Takes args as produced by the argument parser built by _build_arg_parser and builds warcprox arguments object suitable to pass to warcprox.main.init_controller. Copies some arguments, renames some, populates some with defaults appropriate for brozzler-easy, etc. ''' warcprox_opts = warcprox.Options() warcprox_opts.address = 'localhost' # let the OS choose an available port; discover it later using # sock.getsockname()[1] warcprox_opts.port = 0 warcprox_opts.cacert = args.cacert warcprox_opts.certs_dir = args.certs_dir warcprox_opts.directory = args.warcs_dir warcprox_opts.gzip = True warcprox_opts.prefix = 'brozzler' warcprox_opts.size = 1000 * 1000* 1000 warcprox_opts.rollover_idle_time = 3 * 60 warcprox_opts.digest_algorithm = 'sha1' warcprox_opts.base32 = True warcprox_opts.stats_db_file = None warcprox_opts.playback_port = None warcprox_opts.playback_index_db_file = None warcprox_opts.rethinkdb_big_table_url = ( 'rethinkdb://%s/%s/captures' % ( args.rethinkdb_servers, args.rethinkdb_db)) warcprox_opts.queue_size = 500 warcprox_opts.max_threads = None warcprox_opts.profile = False warcprox_opts.onion_tor_socks_proxy = args.onion_tor_socks_proxy return warcprox_opts
[ "def", "_warcprox_opts", "(", "self", ",", "args", ")", ":", "warcprox_opts", "=", "warcprox", ".", "Options", "(", ")", "warcprox_opts", ".", "address", "=", "'localhost'", "# let the OS choose an available port; discover it later using", "# sock.getsockname()[1]", "warcprox_opts", ".", "port", "=", "0", "warcprox_opts", ".", "cacert", "=", "args", ".", "cacert", "warcprox_opts", ".", "certs_dir", "=", "args", ".", "certs_dir", "warcprox_opts", ".", "directory", "=", "args", ".", "warcs_dir", "warcprox_opts", ".", "gzip", "=", "True", "warcprox_opts", ".", "prefix", "=", "'brozzler'", "warcprox_opts", ".", "size", "=", "1000", "*", "1000", "*", "1000", "warcprox_opts", ".", "rollover_idle_time", "=", "3", "*", "60", "warcprox_opts", ".", "digest_algorithm", "=", "'sha1'", "warcprox_opts", ".", "base32", "=", "True", "warcprox_opts", ".", "stats_db_file", "=", "None", "warcprox_opts", ".", "playback_port", "=", "None", "warcprox_opts", ".", "playback_index_db_file", "=", "None", "warcprox_opts", ".", "rethinkdb_big_table_url", "=", "(", "'rethinkdb://%s/%s/captures'", "%", "(", "args", ".", "rethinkdb_servers", ",", "args", ".", "rethinkdb_db", ")", ")", "warcprox_opts", ".", "queue_size", "=", "500", "warcprox_opts", ".", "max_threads", "=", "None", "warcprox_opts", ".", "profile", "=", "False", "warcprox_opts", ".", "onion_tor_socks_proxy", "=", "args", ".", "onion_tor_socks_proxy", "return", "warcprox_opts" ]
45.59375
12.78125
def parse_relation(obj: dict) -> BioCRelation: """Deserialize a dict obj to a BioCRelation object""" rel = BioCRelation() rel.id = obj['id'] rel.infons = obj['infons'] for node in obj['nodes']: rel.add_node(BioCNode(node['refid'], node['role'])) return rel
[ "def", "parse_relation", "(", "obj", ":", "dict", ")", "->", "BioCRelation", ":", "rel", "=", "BioCRelation", "(", ")", "rel", ".", "id", "=", "obj", "[", "'id'", "]", "rel", ".", "infons", "=", "obj", "[", "'infons'", "]", "for", "node", "in", "obj", "[", "'nodes'", "]", ":", "rel", ".", "add_node", "(", "BioCNode", "(", "node", "[", "'refid'", "]", ",", "node", "[", "'role'", "]", ")", ")", "return", "rel" ]
36
13
def create_stash(self, payload, path=None): """ Create a stash. (JSON document) """ if path: self._request('POST', '/stashes/{}'.format(path), json=payload) else: self._request('POST', '/stashes', json=payload) return True
[ "def", "create_stash", "(", "self", ",", "payload", ",", "path", "=", "None", ")", ":", "if", "path", ":", "self", ".", "_request", "(", "'POST'", ",", "'/stashes/{}'", ".", "format", "(", "path", ")", ",", "json", "=", "payload", ")", "else", ":", "self", ".", "_request", "(", "'POST'", ",", "'/stashes'", ",", "json", "=", "payload", ")", "return", "True" ]
31.1
11.7
def qsnorm(p): """ rational approximation for x where q(x)=d, q being the cumulative normal distribution function. taken from Abramowitz & Stegun p. 933 |error(x)| < 4.5*10**-4 """ d = p if d < 0. or d > 1.: print('d not in (1,1) ') sys.exit() x = 0. if (d - 0.5) > 0: d = 1. - d if (d - 0.5) < 0: t2 = -2. * np.log(d) t = np.sqrt(t2) x = t - old_div((2.515517 + .802853 * t + .010328 * t2), (1. + 1.432788 * t + .189269 * t2 + .001308 * t * t2)) if p < 0.5: x = -x return x
[ "def", "qsnorm", "(", "p", ")", ":", "d", "=", "p", "if", "d", "<", "0.", "or", "d", ">", "1.", ":", "print", "(", "'d not in (1,1) '", ")", "sys", ".", "exit", "(", ")", "x", "=", "0.", "if", "(", "d", "-", "0.5", ")", ">", "0", ":", "d", "=", "1.", "-", "d", "if", "(", "d", "-", "0.5", ")", "<", "0", ":", "t2", "=", "-", "2.", "*", "np", ".", "log", "(", "d", ")", "t", "=", "np", ".", "sqrt", "(", "t2", ")", "x", "=", "t", "-", "old_div", "(", "(", "2.515517", "+", ".802853", "*", "t", "+", ".010328", "*", "t2", ")", ",", "(", "1.", "+", "1.432788", "*", "t", "+", ".189269", "*", "t2", "+", ".001308", "*", "t", "*", "t2", ")", ")", "if", "p", "<", "0.5", ":", "x", "=", "-", "x", "return", "x" ]
28.095238
20.380952
def list_image(root, recursive, exts): """Traverses the root of directory that contains images and generates image list iterator. Parameters ---------- root: string recursive: bool exts: string Returns ------- image iterator that contains all the image under the specified path """ i = 0 if recursive: cat = {} for path, dirs, files in os.walk(root, followlinks=True): dirs.sort() files.sort() for fname in files: fpath = os.path.join(path, fname) suffix = os.path.splitext(fname)[1].lower() if os.path.isfile(fpath) and (suffix in exts): if path not in cat: cat[path] = len(cat) yield (i, os.path.relpath(fpath, root), cat[path]) i += 1 for k, v in sorted(cat.items(), key=lambda x: x[1]): print(os.path.relpath(k, root), v) else: for fname in sorted(os.listdir(root)): fpath = os.path.join(root, fname) suffix = os.path.splitext(fname)[1].lower() if os.path.isfile(fpath) and (suffix in exts): yield (i, os.path.relpath(fpath, root), 0) i += 1
[ "def", "list_image", "(", "root", ",", "recursive", ",", "exts", ")", ":", "i", "=", "0", "if", "recursive", ":", "cat", "=", "{", "}", "for", "path", ",", "dirs", ",", "files", "in", "os", ".", "walk", "(", "root", ",", "followlinks", "=", "True", ")", ":", "dirs", ".", "sort", "(", ")", "files", ".", "sort", "(", ")", "for", "fname", "in", "files", ":", "fpath", "=", "os", ".", "path", ".", "join", "(", "path", ",", "fname", ")", "suffix", "=", "os", ".", "path", ".", "splitext", "(", "fname", ")", "[", "1", "]", ".", "lower", "(", ")", "if", "os", ".", "path", ".", "isfile", "(", "fpath", ")", "and", "(", "suffix", "in", "exts", ")", ":", "if", "path", "not", "in", "cat", ":", "cat", "[", "path", "]", "=", "len", "(", "cat", ")", "yield", "(", "i", ",", "os", ".", "path", ".", "relpath", "(", "fpath", ",", "root", ")", ",", "cat", "[", "path", "]", ")", "i", "+=", "1", "for", "k", ",", "v", "in", "sorted", "(", "cat", ".", "items", "(", ")", ",", "key", "=", "lambda", "x", ":", "x", "[", "1", "]", ")", ":", "print", "(", "os", ".", "path", ".", "relpath", "(", "k", ",", "root", ")", ",", "v", ")", "else", ":", "for", "fname", "in", "sorted", "(", "os", ".", "listdir", "(", "root", ")", ")", ":", "fpath", "=", "os", ".", "path", ".", "join", "(", "root", ",", "fname", ")", "suffix", "=", "os", ".", "path", ".", "splitext", "(", "fname", ")", "[", "1", "]", ".", "lower", "(", ")", "if", "os", ".", "path", ".", "isfile", "(", "fpath", ")", "and", "(", "suffix", "in", "exts", ")", ":", "yield", "(", "i", ",", "os", ".", "path", ".", "relpath", "(", "fpath", ",", "root", ")", ",", "0", ")", "i", "+=", "1" ]
34.638889
17.75
def set(self, val): """Set the value""" import time now = time.time() expected_value = [] new_val = {} new_val['timestamp'] = now if self._value != None: new_val['last_value'] = self._value expected_value = ['current_value', str(self._value)] new_val['current_value'] = val try: self.db.put_attributes(self.id, new_val, expected_value=expected_value) self.timestamp = new_val['timestamp'] except SDBResponseError, e: if e.status == 409: raise ValueError, "Sequence out of sync" else: raise
[ "def", "set", "(", "self", ",", "val", ")", ":", "import", "time", "now", "=", "time", ".", "time", "(", ")", "expected_value", "=", "[", "]", "new_val", "=", "{", "}", "new_val", "[", "'timestamp'", "]", "=", "now", "if", "self", ".", "_value", "!=", "None", ":", "new_val", "[", "'last_value'", "]", "=", "self", ".", "_value", "expected_value", "=", "[", "'current_value'", ",", "str", "(", "self", ".", "_value", ")", "]", "new_val", "[", "'current_value'", "]", "=", "val", "try", ":", "self", ".", "db", ".", "put_attributes", "(", "self", ".", "id", ",", "new_val", ",", "expected_value", "=", "expected_value", ")", "self", ".", "timestamp", "=", "new_val", "[", "'timestamp'", "]", "except", "SDBResponseError", ",", "e", ":", "if", "e", ".", "status", "==", "409", ":", "raise", "ValueError", ",", "\"Sequence out of sync\"", "else", ":", "raise" ]
34.473684
15.263158
def bootstrap_repl(which_ns: str) -> types.ModuleType: """Bootstrap the REPL with a few useful vars and returned the bootstrapped module so it's functions can be used by the REPL command.""" repl_ns = runtime.Namespace.get_or_create(sym.symbol("basilisp.repl")) ns = runtime.Namespace.get_or_create(sym.symbol(which_ns)) repl_module = importlib.import_module("basilisp.repl") ns.add_alias(sym.symbol("basilisp.repl"), repl_ns) ns.refer_all(repl_ns) return repl_module
[ "def", "bootstrap_repl", "(", "which_ns", ":", "str", ")", "->", "types", ".", "ModuleType", ":", "repl_ns", "=", "runtime", ".", "Namespace", ".", "get_or_create", "(", "sym", ".", "symbol", "(", "\"basilisp.repl\"", ")", ")", "ns", "=", "runtime", ".", "Namespace", ".", "get_or_create", "(", "sym", ".", "symbol", "(", "which_ns", ")", ")", "repl_module", "=", "importlib", ".", "import_module", "(", "\"basilisp.repl\"", ")", "ns", ".", "add_alias", "(", "sym", ".", "symbol", "(", "\"basilisp.repl\"", ")", ",", "repl_ns", ")", "ns", ".", "refer_all", "(", "repl_ns", ")", "return", "repl_module" ]
49.4
16
def parse_options(self, kwargs): """Validate the provided kwargs and return options as json string.""" kwargs = {camelize(key): value for key, value in kwargs.items()} for key in kwargs.keys(): assert key in self.valid_options, ( 'The option {} is not in the available options: {}.' .format(key, ', '.join(self.valid_options)) ) assert isinstance(kwargs[key], self.valid_options[key]), ( 'The option {} must be one of the following types: {}.' .format(key, self.valid_options[key]) ) return kwargs
[ "def", "parse_options", "(", "self", ",", "kwargs", ")", ":", "kwargs", "=", "{", "camelize", "(", "key", ")", ":", "value", "for", "key", ",", "value", "in", "kwargs", ".", "items", "(", ")", "}", "for", "key", "in", "kwargs", ".", "keys", "(", ")", ":", "assert", "key", "in", "self", ".", "valid_options", ",", "(", "'The option {} is not in the available options: {}.'", ".", "format", "(", "key", ",", "', '", ".", "join", "(", "self", ".", "valid_options", ")", ")", ")", "assert", "isinstance", "(", "kwargs", "[", "key", "]", ",", "self", ".", "valid_options", "[", "key", "]", ")", ",", "(", "'The option {} must be one of the following types: {}.'", ".", "format", "(", "key", ",", "self", ".", "valid_options", "[", "key", "]", ")", ")", "return", "kwargs" ]
48.384615
19.076923
def get_default_jprops_parsers(parser_finder: ParserFinder, conversion_finder: ConversionFinder) -> List[AnyParser]: """ Utility method to return the default parsers able to parse a dictionary from a properties file. :return: """ return [SingleFileParserFunction(parser_function=read_dict_from_properties, streaming_mode=True, custom_name='read_dict_from_properties', supported_exts={'.properties', '.txt'}, supported_types={dict}, function_args={'conversion_finder': conversion_finder}), # SingleFileParserFunction(parser_function=read_list_from_properties, # streaming_mode=True, # supported_exts={'.properties', '.txt'}, # supported_types={list}), ]
[ "def", "get_default_jprops_parsers", "(", "parser_finder", ":", "ParserFinder", ",", "conversion_finder", ":", "ConversionFinder", ")", "->", "List", "[", "AnyParser", "]", ":", "return", "[", "SingleFileParserFunction", "(", "parser_function", "=", "read_dict_from_properties", ",", "streaming_mode", "=", "True", ",", "custom_name", "=", "'read_dict_from_properties'", ",", "supported_exts", "=", "{", "'.properties'", ",", "'.txt'", "}", ",", "supported_types", "=", "{", "dict", "}", ",", "function_args", "=", "{", "'conversion_finder'", ":", "conversion_finder", "}", ")", ",", "# SingleFileParserFunction(parser_function=read_list_from_properties,", "# streaming_mode=True,", "# supported_exts={'.properties', '.txt'},", "# supported_types={list}),", "]" ]
62.466667
34.733333
def copen(filepath, flag='r', encoding=None): """ FIXME: How to test this ? >>> c = copen(__file__) >>> c is not None True """ if encoding is None: encoding = locale.getdefaultlocale()[1] return codecs.open(filepath, flag, encoding)
[ "def", "copen", "(", "filepath", ",", "flag", "=", "'r'", ",", "encoding", "=", "None", ")", ":", "if", "encoding", "is", "None", ":", "encoding", "=", "locale", ".", "getdefaultlocale", "(", ")", "[", "1", "]", "return", "codecs", ".", "open", "(", "filepath", ",", "flag", ",", "encoding", ")" ]
20.230769
17.769231
def venue_stocks(self): """List the stocks available for trading on the venue. https://starfighter.readme.io/docs/list-stocks-on-venue """ url = urljoin(self.base_url, 'venues/{0}/stocks'.format(self.venue)) return self.session.get(url).json()
[ "def", "venue_stocks", "(", "self", ")", ":", "url", "=", "urljoin", "(", "self", ".", "base_url", ",", "'venues/{0}/stocks'", ".", "format", "(", "self", ".", "venue", ")", ")", "return", "self", ".", "session", ".", "get", "(", "url", ")", ".", "json", "(", ")" ]
39.714286
17
def p_subidentifiers_defval(self, p): """subidentifiers_defval : subidentifiers_defval subidentifier_defval | subidentifier_defval""" n = len(p) if n == 3: p[0] = ('subidentifiers_defval', p[1][1] + [p[2]]) elif n == 2: p[0] = ('subidentifiers_defval', [p[1]])
[ "def", "p_subidentifiers_defval", "(", "self", ",", "p", ")", ":", "n", "=", "len", "(", "p", ")", "if", "n", "==", "3", ":", "p", "[", "0", "]", "=", "(", "'subidentifiers_defval'", ",", "p", "[", "1", "]", "[", "1", "]", "+", "[", "p", "[", "2", "]", "]", ")", "elif", "n", "==", "2", ":", "p", "[", "0", "]", "=", "(", "'subidentifiers_defval'", ",", "[", "p", "[", "1", "]", "]", ")" ]
42.75
12.625
def add_update_topology_db(self, **params): """Add or update an entry to the topology DB. """ topo_dict = params.get('columns') session = db.get_session() host = topo_dict.get('host') protocol_interface = topo_dict.get('protocol_interface') with session.begin(subtransactions=True): try: # Check if entry exists. session.query(DfaTopologyDb).filter_by( host=host, protocol_interface=protocol_interface).one() session.query(DfaTopologyDb).filter_by( host=host, protocol_interface=protocol_interface).update( topo_dict) except orm_exc.NoResultFound: LOG.info("Creating new topology entry for host " "%(host)s on Interface %(intf)s", {'host': host, 'intf': protocol_interface}) topo_disc = DfaTopologyDb( host=host, protocol_interface=protocol_interface, phy_interface=topo_dict.get('phy_interface'), created=topo_dict.get('created'), heartbeat=topo_dict.get('heartbeat'), remote_mgmt_addr=topo_dict.get('remote_mgmt_addr'), remote_system_name=topo_dict.get('remote_system_name'), remote_system_desc=topo_dict.get('remote_system_desc'), remote_port_id_mac=topo_dict.get('remote_port_id_mac'), remote_chassis_id_mac=topo_dict.get( 'remote_chassis_id_mac'), remote_port=topo_dict.get('remote_port'), remote_evb_cfgd=topo_dict.get('remote_evb_cfgd'), remote_evb_mode=topo_dict.get('remote_evb_mode'), configurations=topo_dict.get('configurations')) session.add(topo_disc) except orm_exc.MultipleResultsFound: LOG.error("More than one enty found for agent %(host)s." "Interface %(intf)s", {'host': host, 'intf': protocol_interface}) except Exception as exc: LOG.error("Exception in add_update_topology_db %s", exc)
[ "def", "add_update_topology_db", "(", "self", ",", "*", "*", "params", ")", ":", "topo_dict", "=", "params", ".", "get", "(", "'columns'", ")", "session", "=", "db", ".", "get_session", "(", ")", "host", "=", "topo_dict", ".", "get", "(", "'host'", ")", "protocol_interface", "=", "topo_dict", ".", "get", "(", "'protocol_interface'", ")", "with", "session", ".", "begin", "(", "subtransactions", "=", "True", ")", ":", "try", ":", "# Check if entry exists.", "session", ".", "query", "(", "DfaTopologyDb", ")", ".", "filter_by", "(", "host", "=", "host", ",", "protocol_interface", "=", "protocol_interface", ")", ".", "one", "(", ")", "session", ".", "query", "(", "DfaTopologyDb", ")", ".", "filter_by", "(", "host", "=", "host", ",", "protocol_interface", "=", "protocol_interface", ")", ".", "update", "(", "topo_dict", ")", "except", "orm_exc", ".", "NoResultFound", ":", "LOG", ".", "info", "(", "\"Creating new topology entry for host \"", "\"%(host)s on Interface %(intf)s\"", ",", "{", "'host'", ":", "host", ",", "'intf'", ":", "protocol_interface", "}", ")", "topo_disc", "=", "DfaTopologyDb", "(", "host", "=", "host", ",", "protocol_interface", "=", "protocol_interface", ",", "phy_interface", "=", "topo_dict", ".", "get", "(", "'phy_interface'", ")", ",", "created", "=", "topo_dict", ".", "get", "(", "'created'", ")", ",", "heartbeat", "=", "topo_dict", ".", "get", "(", "'heartbeat'", ")", ",", "remote_mgmt_addr", "=", "topo_dict", ".", "get", "(", "'remote_mgmt_addr'", ")", ",", "remote_system_name", "=", "topo_dict", ".", "get", "(", "'remote_system_name'", ")", ",", "remote_system_desc", "=", "topo_dict", ".", "get", "(", "'remote_system_desc'", ")", ",", "remote_port_id_mac", "=", "topo_dict", ".", "get", "(", "'remote_port_id_mac'", ")", ",", "remote_chassis_id_mac", "=", "topo_dict", ".", "get", "(", "'remote_chassis_id_mac'", ")", ",", "remote_port", "=", "topo_dict", ".", "get", "(", "'remote_port'", ")", ",", "remote_evb_cfgd", "=", "topo_dict", ".", "get", "(", "'remote_evb_cfgd'", ")", ",", "remote_evb_mode", "=", "topo_dict", ".", "get", "(", "'remote_evb_mode'", ")", ",", "configurations", "=", "topo_dict", ".", "get", "(", "'configurations'", ")", ")", "session", ".", "add", "(", "topo_disc", ")", "except", "orm_exc", ".", "MultipleResultsFound", ":", "LOG", ".", "error", "(", "\"More than one enty found for agent %(host)s.\"", "\"Interface %(intf)s\"", ",", "{", "'host'", ":", "host", ",", "'intf'", ":", "protocol_interface", "}", ")", "except", "Exception", "as", "exc", ":", "LOG", ".", "error", "(", "\"Exception in add_update_topology_db %s\"", ",", "exc", ")" ]
55.95
18.025
def start(self): """ Start the installation wizard """ self.log.debug('Starting the installation process') self.browser.open(self.url) self.system_check()
[ "def", "start", "(", "self", ")", ":", "self", ".", "log", ".", "debug", "(", "'Starting the installation process'", ")", "self", ".", "browser", ".", "open", "(", "self", ".", "url", ")", "self", ".", "system_check", "(", ")" ]
21.777778
16
def _validate_no_rels(param, rels): """ Ensure the sortable field is not on a relationship """ if param.field in rels: raise InvalidQueryParams(**{ 'detail': 'The sort query param value of "%s" is not ' 'supported. Sorting on relationships is not ' 'currently supported' % param.raw_field, 'links': LINK, 'parameter': PARAM, })
[ "def", "_validate_no_rels", "(", "param", ",", "rels", ")", ":", "if", "param", ".", "field", "in", "rels", ":", "raise", "InvalidQueryParams", "(", "*", "*", "{", "'detail'", ":", "'The sort query param value of \"%s\" is not '", "'supported. Sorting on relationships is not '", "'currently supported'", "%", "param", ".", "raw_field", ",", "'links'", ":", "LINK", ",", "'parameter'", ":", "PARAM", ",", "}", ")" ]
38.363636
17.272727
def get_filtered_devices( self, model_name, device_types="upnp:rootdevice", timeout=2 ): """ returns a dict of devices that contain the given model name """ # get list of all UPNP devices in the network upnp_devices = self.discover_upnp_devices(st=device_types) # go through all UPNP devices and filter wanted devices filtered_devices = collections.defaultdict(dict) for dev in upnp_devices.values(): try: # download XML file with information about the device # from the device's location r = requests.get(dev.location, timeout=timeout) if r.status_code == requests.codes.ok: # parse returned XML root = ET.fromstring(r.text) # add shortcut for XML namespace to access sub nodes ns = {"upnp": "urn:schemas-upnp-org:device-1-0"} # get device element device = root.find("upnp:device", ns) if model_name in device.find( "upnp:modelName", ns ).text: # model name is wanted => add to list # get unique UDN of the device that is used as key udn = device.find("upnp:UDN", ns).text # add url base url_base = root.find("upnp:URLBase", ns) if url_base is not None: filtered_devices[udn][ "URLBase" ] = url_base.text # add interesting device attributes and # use unique UDN as key for attr in ( "deviceType", "friendlyName", "manufacturer", "manufacturerURL", "modelDescription", "modelName", "modelNumber" ): el = device.find("upnp:%s" % attr, ns) if el is not None: filtered_devices[udn][ attr ] = el.text.strip() except ET.ParseError: # just skip devices that are invalid xml pass except requests.exceptions.ConnectTimeout: # just skip devices that are not replying in time print("Timeout for '%s'. Skipping." % dev.location) return filtered_devices
[ "def", "get_filtered_devices", "(", "self", ",", "model_name", ",", "device_types", "=", "\"upnp:rootdevice\"", ",", "timeout", "=", "2", ")", ":", "# get list of all UPNP devices in the network", "upnp_devices", "=", "self", ".", "discover_upnp_devices", "(", "st", "=", "device_types", ")", "# go through all UPNP devices and filter wanted devices", "filtered_devices", "=", "collections", ".", "defaultdict", "(", "dict", ")", "for", "dev", "in", "upnp_devices", ".", "values", "(", ")", ":", "try", ":", "# download XML file with information about the device", "# from the device's location", "r", "=", "requests", ".", "get", "(", "dev", ".", "location", ",", "timeout", "=", "timeout", ")", "if", "r", ".", "status_code", "==", "requests", ".", "codes", ".", "ok", ":", "# parse returned XML", "root", "=", "ET", ".", "fromstring", "(", "r", ".", "text", ")", "# add shortcut for XML namespace to access sub nodes", "ns", "=", "{", "\"upnp\"", ":", "\"urn:schemas-upnp-org:device-1-0\"", "}", "# get device element", "device", "=", "root", ".", "find", "(", "\"upnp:device\"", ",", "ns", ")", "if", "model_name", "in", "device", ".", "find", "(", "\"upnp:modelName\"", ",", "ns", ")", ".", "text", ":", "# model name is wanted => add to list", "# get unique UDN of the device that is used as key", "udn", "=", "device", ".", "find", "(", "\"upnp:UDN\"", ",", "ns", ")", ".", "text", "# add url base", "url_base", "=", "root", ".", "find", "(", "\"upnp:URLBase\"", ",", "ns", ")", "if", "url_base", "is", "not", "None", ":", "filtered_devices", "[", "udn", "]", "[", "\"URLBase\"", "]", "=", "url_base", ".", "text", "# add interesting device attributes and", "# use unique UDN as key", "for", "attr", "in", "(", "\"deviceType\"", ",", "\"friendlyName\"", ",", "\"manufacturer\"", ",", "\"manufacturerURL\"", ",", "\"modelDescription\"", ",", "\"modelName\"", ",", "\"modelNumber\"", ")", ":", "el", "=", "device", ".", "find", "(", "\"upnp:%s\"", "%", "attr", ",", "ns", ")", "if", "el", "is", "not", "None", ":", "filtered_devices", "[", "udn", "]", "[", "attr", "]", "=", "el", ".", "text", ".", "strip", "(", ")", "except", "ET", ".", "ParseError", ":", "# just skip devices that are invalid xml", "pass", "except", "requests", ".", "exceptions", ".", "ConnectTimeout", ":", "# just skip devices that are not replying in time", "print", "(", "\"Timeout for '%s'. Skipping.\"", "%", "dev", ".", "location", ")", "return", "filtered_devices" ]
40.453125
19.515625
def managed_wrapper_class_factory(zos_obj): """Creates and returns a wrapper class of a ZOS object, exposing the ZOS objects methods and propertis, and patching custom specialized attributes @param zos_obj: ZOS API Python COM object """ cls_name = repr(zos_obj).split()[0].split('.')[-1] dispatch_attr = '_' + cls_name.lower() # protocol to be followed to store the ZOS COM object cdict = {} # class dictionary # patch the properties of the base objects base_cls_list = inheritance_dict.get(cls_name, None) if base_cls_list: for base_cls_name in base_cls_list: getters, setters = get_properties(_CastTo(zos_obj, base_cls_name)) for each in getters: exec("p{} = ZOSPropMapper('{}', '{}', cast_to='{}')".format(each, dispatch_attr, each, base_cls_name), globals(), cdict) for each in setters: exec("p{} = ZOSPropMapper('{}', '{}', setter=True, cast_to='{}')".format(each, dispatch_attr, each, base_cls_name), globals(), cdict) # patch the property attributes of the given ZOS object getters, setters = get_properties(zos_obj) for each in getters: exec("p{} = ZOSPropMapper('{}', '{}')".format(each, dispatch_attr, each), globals(), cdict) for each in setters: exec("p{} = ZOSPropMapper('{}', '{}', setter=True)".format(each, dispatch_attr, each), globals(), cdict) def __init__(self, zos_obj): # dispatcher attribute cls_name = repr(zos_obj).split()[0].split('.')[-1] dispatch_attr = '_' + cls_name.lower() # protocol to be followed to store the ZOS COM object self.__dict__[dispatch_attr] = zos_obj self._dispatch_attr_value = dispatch_attr # used in __getattr__ # Store base class object self._base_cls_list = inheritance_dict.get(cls_name, None) # patch the methods of the base class(s) of the given ZOS object if self._base_cls_list: for base_cls_name in self._base_cls_list: replicate_methods(_CastTo(zos_obj, base_cls_name), self) # patch the methods of given ZOS object replicate_methods(zos_obj, self) # mark object as wrapped to prevent it from being wrapped subsequently self._wrapped = True # Provide a way to make property calls without the prefix p def __getattr__(self, attrname): return wrapped_zos_object(getattr(self.__dict__[self._dispatch_attr_value], attrname)) def __repr__(self): if type(self).__name__ == 'IZOSAPI_Application': repr_str = "{.__name__}(NumberOfOpticalSystems = {})".format(type(self), self.pNumberOfOpticalSystems) else: repr_str = "{.__name__}".format(type(self)) return repr_str cdict['__init__'] = __init__ cdict['__getattr__'] = __getattr__ cdict['__repr__'] = __repr__ # patch custom methods from python files imported as modules module_import_str = """ try: from pyzos.zos_obj_override.{module:} import * except ImportError: pass """.format(module=cls_name.lower() + '_methods') exec(module_import_str, globals(), cdict) _ = cdict.pop('print_function', None) _ = cdict.pop('division', None) return type(cls_name, (), cdict)
[ "def", "managed_wrapper_class_factory", "(", "zos_obj", ")", ":", "cls_name", "=", "repr", "(", "zos_obj", ")", ".", "split", "(", ")", "[", "0", "]", ".", "split", "(", "'.'", ")", "[", "-", "1", "]", "dispatch_attr", "=", "'_'", "+", "cls_name", ".", "lower", "(", ")", "# protocol to be followed to store the ZOS COM object", "cdict", "=", "{", "}", "# class dictionary", "# patch the properties of the base objects ", "base_cls_list", "=", "inheritance_dict", ".", "get", "(", "cls_name", ",", "None", ")", "if", "base_cls_list", ":", "for", "base_cls_name", "in", "base_cls_list", ":", "getters", ",", "setters", "=", "get_properties", "(", "_CastTo", "(", "zos_obj", ",", "base_cls_name", ")", ")", "for", "each", "in", "getters", ":", "exec", "(", "\"p{} = ZOSPropMapper('{}', '{}', cast_to='{}')\"", ".", "format", "(", "each", ",", "dispatch_attr", ",", "each", ",", "base_cls_name", ")", ",", "globals", "(", ")", ",", "cdict", ")", "for", "each", "in", "setters", ":", "exec", "(", "\"p{} = ZOSPropMapper('{}', '{}', setter=True, cast_to='{}')\"", ".", "format", "(", "each", ",", "dispatch_attr", ",", "each", ",", "base_cls_name", ")", ",", "globals", "(", ")", ",", "cdict", ")", "# patch the property attributes of the given ZOS object", "getters", ",", "setters", "=", "get_properties", "(", "zos_obj", ")", "for", "each", "in", "getters", ":", "exec", "(", "\"p{} = ZOSPropMapper('{}', '{}')\"", ".", "format", "(", "each", ",", "dispatch_attr", ",", "each", ")", ",", "globals", "(", ")", ",", "cdict", ")", "for", "each", "in", "setters", ":", "exec", "(", "\"p{} = ZOSPropMapper('{}', '{}', setter=True)\"", ".", "format", "(", "each", ",", "dispatch_attr", ",", "each", ")", ",", "globals", "(", ")", ",", "cdict", ")", "def", "__init__", "(", "self", ",", "zos_obj", ")", ":", "# dispatcher attribute", "cls_name", "=", "repr", "(", "zos_obj", ")", ".", "split", "(", ")", "[", "0", "]", ".", "split", "(", "'.'", ")", "[", "-", "1", "]", "dispatch_attr", "=", "'_'", "+", "cls_name", ".", "lower", "(", ")", "# protocol to be followed to store the ZOS COM object", "self", ".", "__dict__", "[", "dispatch_attr", "]", "=", "zos_obj", "self", ".", "_dispatch_attr_value", "=", "dispatch_attr", "# used in __getattr__", "# Store base class object ", "self", ".", "_base_cls_list", "=", "inheritance_dict", ".", "get", "(", "cls_name", ",", "None", ")", "# patch the methods of the base class(s) of the given ZOS object", "if", "self", ".", "_base_cls_list", ":", "for", "base_cls_name", "in", "self", ".", "_base_cls_list", ":", "replicate_methods", "(", "_CastTo", "(", "zos_obj", ",", "base_cls_name", ")", ",", "self", ")", "# patch the methods of given ZOS object ", "replicate_methods", "(", "zos_obj", ",", "self", ")", "# mark object as wrapped to prevent it from being wrapped subsequently", "self", ".", "_wrapped", "=", "True", "# Provide a way to make property calls without the prefix p", "def", "__getattr__", "(", "self", ",", "attrname", ")", ":", "return", "wrapped_zos_object", "(", "getattr", "(", "self", ".", "__dict__", "[", "self", ".", "_dispatch_attr_value", "]", ",", "attrname", ")", ")", "def", "__repr__", "(", "self", ")", ":", "if", "type", "(", "self", ")", ".", "__name__", "==", "'IZOSAPI_Application'", ":", "repr_str", "=", "\"{.__name__}(NumberOfOpticalSystems = {})\"", ".", "format", "(", "type", "(", "self", ")", ",", "self", ".", "pNumberOfOpticalSystems", ")", "else", ":", "repr_str", "=", "\"{.__name__}\"", ".", "format", "(", "type", "(", "self", ")", ")", "return", "repr_str", "cdict", "[", "'__init__'", "]", "=", "__init__", "cdict", "[", "'__getattr__'", "]", "=", "__getattr__", "cdict", "[", "'__repr__'", "]", "=", "__repr__", "# patch custom methods from python files imported as modules", "module_import_str", "=", "\"\"\"\ntry: \n from pyzos.zos_obj_override.{module:} import *\nexcept ImportError:\n pass\n\"\"\"", ".", "format", "(", "module", "=", "cls_name", ".", "lower", "(", ")", "+", "'_methods'", ")", "exec", "(", "module_import_str", ",", "globals", "(", ")", ",", "cdict", ")", "_", "=", "cdict", ".", "pop", "(", "'print_function'", ",", "None", ")", "_", "=", "cdict", ".", "pop", "(", "'division'", ",", "None", ")", "return", "type", "(", "cls_name", ",", "(", ")", ",", "cdict", ")" ]
41.769231
24.602564
def _max_lengths(): """ The length of the largest magic string + its offset""" max_header_length = max([len(x.byte_match) + x.offset for x in magic_header_array]) max_footer_length = max([len(x.byte_match) + abs(x.offset) for x in magic_footer_array]) return max_header_length, max_footer_length
[ "def", "_max_lengths", "(", ")", ":", "max_header_length", "=", "max", "(", "[", "len", "(", "x", ".", "byte_match", ")", "+", "x", ".", "offset", "for", "x", "in", "magic_header_array", "]", ")", "max_footer_length", "=", "max", "(", "[", "len", "(", "x", ".", "byte_match", ")", "+", "abs", "(", "x", ".", "offset", ")", "for", "x", "in", "magic_footer_array", "]", ")", "return", "max_header_length", ",", "max_footer_length" ]
51.857143
14.714286
def _serializeParamsUniq_eval(parentUnit, obj, isDeclaration, priv): """ Decide to serialize only objs with uniq parameters and class :param priv: private data for this function ({frozen_params: obj}) :return: tuple (do serialize this object, next priv) """ params = paramsToValTuple(parentUnit) if priv is None: priv = {} if isDeclaration: try: prevUnit = priv[params] except KeyError: priv[params] = parentUnit return True, priv prepareEntity(obj, prevUnit._entity.name, prevUnit) return False, priv return priv[params] is parentUnit, priv
[ "def", "_serializeParamsUniq_eval", "(", "parentUnit", ",", "obj", ",", "isDeclaration", ",", "priv", ")", ":", "params", "=", "paramsToValTuple", "(", "parentUnit", ")", "if", "priv", "is", "None", ":", "priv", "=", "{", "}", "if", "isDeclaration", ":", "try", ":", "prevUnit", "=", "priv", "[", "params", "]", "except", "KeyError", ":", "priv", "[", "params", "]", "=", "parentUnit", "return", "True", ",", "priv", "prepareEntity", "(", "obj", ",", "prevUnit", ".", "_entity", ".", "name", ",", "prevUnit", ")", "return", "False", ",", "priv", "return", "priv", "[", "params", "]", "is", "parentUnit", ",", "priv" ]
24.730769
20.269231
def on_message(self, message): """Message from the backend has been received. :param message: Message string received. """ work_unit = SelenolMessage(message) request_id = work_unit.request_id if message['reason'] == ['selenol', 'request']: try: result = self.on_request(work_unit) if result is not None: return { 'reason': ['request', 'result'], 'request_id': request_id, 'content': { 'content': result, }, } except SelenolException as e: logging.exception(e) return { 'reason': ['request', 'exception'], 'request_id': request_id, 'content': { 'message': str(e), }, } except Exception as e: logging.exception(e) return { 'reason': ['request', 'exception'], 'request_id': request_id, 'content': { 'message': 'Not a Selenol exception', }, }
[ "def", "on_message", "(", "self", ",", "message", ")", ":", "work_unit", "=", "SelenolMessage", "(", "message", ")", "request_id", "=", "work_unit", ".", "request_id", "if", "message", "[", "'reason'", "]", "==", "[", "'selenol'", ",", "'request'", "]", ":", "try", ":", "result", "=", "self", ".", "on_request", "(", "work_unit", ")", "if", "result", "is", "not", "None", ":", "return", "{", "'reason'", ":", "[", "'request'", ",", "'result'", "]", ",", "'request_id'", ":", "request_id", ",", "'content'", ":", "{", "'content'", ":", "result", ",", "}", ",", "}", "except", "SelenolException", "as", "e", ":", "logging", ".", "exception", "(", "e", ")", "return", "{", "'reason'", ":", "[", "'request'", ",", "'exception'", "]", ",", "'request_id'", ":", "request_id", ",", "'content'", ":", "{", "'message'", ":", "str", "(", "e", ")", ",", "}", ",", "}", "except", "Exception", "as", "e", ":", "logging", ".", "exception", "(", "e", ")", "return", "{", "'reason'", ":", "[", "'request'", ",", "'exception'", "]", ",", "'request_id'", ":", "request_id", ",", "'content'", ":", "{", "'message'", ":", "'Not a Selenol exception'", ",", "}", ",", "}" ]
34.837838
11.945946
def __parse(self) -> object: """Selects the appropriate method to decode next bencode element and returns the result.""" char = self.data[self.idx: self.idx + 1] if char in [b'1', b'2', b'3', b'4', b'5', b'6', b'7', b'8', b'9', b'0']: str_len = int(self.__read_to(b':')) return self.__read(str_len) elif char == b'i': self.idx += 1 return int(self.__read_to(b'e')) elif char == b'd': return self.__parse_dict() elif char == b'l': return self.__parse_list() elif char == b'': raise bencodepy.DecodingError('Unexpected End of File at index position of {0}.'.format(str(self.idx))) else: raise bencodepy.DecodingError( 'Invalid token character ({0}) at position {1}.'.format(str(char), str(self.idx)))
[ "def", "__parse", "(", "self", ")", "->", "object", ":", "char", "=", "self", ".", "data", "[", "self", ".", "idx", ":", "self", ".", "idx", "+", "1", "]", "if", "char", "in", "[", "b'1'", ",", "b'2'", ",", "b'3'", ",", "b'4'", ",", "b'5'", ",", "b'6'", ",", "b'7'", ",", "b'8'", ",", "b'9'", ",", "b'0'", "]", ":", "str_len", "=", "int", "(", "self", ".", "__read_to", "(", "b':'", ")", ")", "return", "self", ".", "__read", "(", "str_len", ")", "elif", "char", "==", "b'i'", ":", "self", ".", "idx", "+=", "1", "return", "int", "(", "self", ".", "__read_to", "(", "b'e'", ")", ")", "elif", "char", "==", "b'd'", ":", "return", "self", ".", "__parse_dict", "(", ")", "elif", "char", "==", "b'l'", ":", "return", "self", ".", "__parse_list", "(", ")", "elif", "char", "==", "b''", ":", "raise", "bencodepy", ".", "DecodingError", "(", "'Unexpected End of File at index position of {0}.'", ".", "format", "(", "str", "(", "self", ".", "idx", ")", ")", ")", "else", ":", "raise", "bencodepy", ".", "DecodingError", "(", "'Invalid token character ({0}) at position {1}.'", ".", "format", "(", "str", "(", "char", ")", ",", "str", "(", "self", ".", "idx", ")", ")", ")" ]
47.611111
17.222222
def get_matching_service_template_file(service_name, template_files): """ Return the template file that goes with the given service name, or return None if there's no match. Subservices return the parent service's file. """ # If this is a subservice, use the parent service's template service_name = service_name.split('.')[0] if service_name in template_files: return template_files[service_name] return None
[ "def", "get_matching_service_template_file", "(", "service_name", ",", "template_files", ")", ":", "# If this is a subservice, use the parent service's template", "service_name", "=", "service_name", ".", "split", "(", "'.'", ")", "[", "0", "]", "if", "service_name", "in", "template_files", ":", "return", "template_files", "[", "service_name", "]", "return", "None" ]
44.1
16.1
def resource_property(klass, name, **kwargs): """Builds a resource object property.""" klass.PROPERTIES[name] = kwargs def getter(self): return getattr(self, '_%s' % name, kwargs.get('default', None)) if kwargs.get('readonly', False): setattr(klass, name, property(getter)) else: def setter(self, value): setattr(self, '_%s' % name, value) setattr(klass, name, property(getter, setter))
[ "def", "resource_property", "(", "klass", ",", "name", ",", "*", "*", "kwargs", ")", ":", "klass", ".", "PROPERTIES", "[", "name", "]", "=", "kwargs", "def", "getter", "(", "self", ")", ":", "return", "getattr", "(", "self", ",", "'_%s'", "%", "name", ",", "kwargs", ".", "get", "(", "'default'", ",", "None", ")", ")", "if", "kwargs", ".", "get", "(", "'readonly'", ",", "False", ")", ":", "setattr", "(", "klass", ",", "name", ",", "property", "(", "getter", ")", ")", "else", ":", "def", "setter", "(", "self", ",", "value", ")", ":", "setattr", "(", "self", ",", "'_%s'", "%", "name", ",", "value", ")", "setattr", "(", "klass", ",", "name", ",", "property", "(", "getter", ",", "setter", ")", ")" ]
33.846154
16
def get_parameter_value(self, parameter, from_cache=True, timeout=10): """ Retrieve the current value of the specified parameter. :param str parameter: Either a fully-qualified XTCE name or an alias in the format ``NAMESPACE/NAME``. :param bool from_cache: If ``False`` this call will block until a fresh value is received on the processor. If ``True`` the server returns the latest value instead (which may be ``None``). :param float timeout: The amount of seconds to wait for a fresh value. (ignored if ``from_cache=True``). :rtype: .ParameterValue """ params = { 'fromCache': from_cache, 'timeout': int(timeout * 1000), } parameter = adapt_name_for_rest(parameter) url = '/processors/{}/{}/parameters{}'.format( self._instance, self._processor, parameter) response = self._client.get_proto(url, params=params) proto = pvalue_pb2.ParameterValue() proto.ParseFromString(response.content) # Server returns ParameterValue with only 'id' set if no # value existed. Convert this to ``None``. if proto.HasField('rawValue') or proto.HasField('engValue'): return ParameterValue(proto) return None
[ "def", "get_parameter_value", "(", "self", ",", "parameter", ",", "from_cache", "=", "True", ",", "timeout", "=", "10", ")", ":", "params", "=", "{", "'fromCache'", ":", "from_cache", ",", "'timeout'", ":", "int", "(", "timeout", "*", "1000", ")", ",", "}", "parameter", "=", "adapt_name_for_rest", "(", "parameter", ")", "url", "=", "'/processors/{}/{}/parameters{}'", ".", "format", "(", "self", ".", "_instance", ",", "self", ".", "_processor", ",", "parameter", ")", "response", "=", "self", ".", "_client", ".", "get_proto", "(", "url", ",", "params", "=", "params", ")", "proto", "=", "pvalue_pb2", ".", "ParameterValue", "(", ")", "proto", ".", "ParseFromString", "(", "response", ".", "content", ")", "# Server returns ParameterValue with only 'id' set if no", "# value existed. Convert this to ``None``.", "if", "proto", ".", "HasField", "(", "'rawValue'", ")", "or", "proto", ".", "HasField", "(", "'engValue'", ")", ":", "return", "ParameterValue", "(", "proto", ")", "return", "None" ]
47.033333
20.1
def set_skips(self, skips): """Set the line skips.""" skips.sort() internal_assert(lambda: len(set(skips)) == len(skips), "duplicate line skip(s) in skips", skips) self.skips = skips
[ "def", "set_skips", "(", "self", ",", "skips", ")", ":", "skips", ".", "sort", "(", ")", "internal_assert", "(", "lambda", ":", "len", "(", "set", "(", "skips", ")", ")", "==", "len", "(", "skips", ")", ",", "\"duplicate line skip(s) in skips\"", ",", "skips", ")", "self", ".", "skips", "=", "skips" ]
42
22.2
def del_cells(self, name): """Implementation of cells deletion ``del space.name`` where name is a cells, or ``del space.cells['name']`` """ if name in self.cells: cells = self.cells[name] self.cells.del_item(name) self.inherit() self.model.spacegraph.update_subspaces(self) elif name in self.dynamic_spaces: cells = self.dynamic_spaces.pop(name) self.dynamic_spaces.set_update() else: raise KeyError("Cells '%s' does not exist" % name) NullImpl(cells)
[ "def", "del_cells", "(", "self", ",", "name", ")", ":", "if", "name", "in", "self", ".", "cells", ":", "cells", "=", "self", ".", "cells", "[", "name", "]", "self", ".", "cells", ".", "del_item", "(", "name", ")", "self", ".", "inherit", "(", ")", "self", ".", "model", ".", "spacegraph", ".", "update_subspaces", "(", "self", ")", "elif", "name", "in", "self", ".", "dynamic_spaces", ":", "cells", "=", "self", ".", "dynamic_spaces", ".", "pop", "(", "name", ")", "self", ".", "dynamic_spaces", ".", "set_update", "(", ")", "else", ":", "raise", "KeyError", "(", "\"Cells '%s' does not exist\"", "%", "name", ")", "NullImpl", "(", "cells", ")" ]
29.2
15.9
def get_port_profile_for_intf_output_has_more(self, **kwargs): """Auto Generated Code """ config = ET.Element("config") get_port_profile_for_intf = ET.Element("get_port_profile_for_intf") config = get_port_profile_for_intf output = ET.SubElement(get_port_profile_for_intf, "output") has_more = ET.SubElement(output, "has-more") has_more.text = kwargs.pop('has_more') callback = kwargs.pop('callback', self._callback) return callback(config)
[ "def", "get_port_profile_for_intf_output_has_more", "(", "self", ",", "*", "*", "kwargs", ")", ":", "config", "=", "ET", ".", "Element", "(", "\"config\"", ")", "get_port_profile_for_intf", "=", "ET", ".", "Element", "(", "\"get_port_profile_for_intf\"", ")", "config", "=", "get_port_profile_for_intf", "output", "=", "ET", ".", "SubElement", "(", "get_port_profile_for_intf", ",", "\"output\"", ")", "has_more", "=", "ET", ".", "SubElement", "(", "output", ",", "\"has-more\"", ")", "has_more", ".", "text", "=", "kwargs", ".", "pop", "(", "'has_more'", ")", "callback", "=", "kwargs", ".", "pop", "(", "'callback'", ",", "self", ".", "_callback", ")", "return", "callback", "(", "config", ")" ]
42.5
14.416667
def is_token_expired(self, margin=None): """Determine if the token is expired. :returns: ``True`` if the token is expired, ``False`` if not, and ``None`` if there is no token set. :param margin: A security time margin in seconds before real expiration. Will return ``True`` if the token expires in less than ``margin`` seconds of time. A default margin can be set by the TOKEN_TIMEOUT_MARGIN in the django settings. """ if self.token is None: return None return not utils.is_token_valid(self.token, margin)
[ "def", "is_token_expired", "(", "self", ",", "margin", "=", "None", ")", ":", "if", "self", ".", "token", "is", "None", ":", "return", "None", "return", "not", "utils", ".", "is_token_valid", "(", "self", ".", "token", ",", "margin", ")" ]
34.833333
19.944444
def get_alpha_or_number(number, template): """Returns an Alphanumber that represents the number passed in, expressed as defined in the template. Otherwise, returns the number """ match = re.match(r".*\{alpha:(\d+a\d+d)\}$", template.strip()) if match and match.groups(): format = match.groups()[0] return to_alpha(number, format) return number
[ "def", "get_alpha_or_number", "(", "number", ",", "template", ")", ":", "match", "=", "re", ".", "match", "(", "r\".*\\{alpha:(\\d+a\\d+d)\\}$\"", ",", "template", ".", "strip", "(", ")", ")", "if", "match", "and", "match", ".", "groups", "(", ")", ":", "format", "=", "match", ".", "groups", "(", ")", "[", "0", "]", "return", "to_alpha", "(", "number", ",", "format", ")", "return", "number" ]
41.666667
9.666667
def _get_mid_and_update_msg(self, msg, use_mid): """Get message ID for current request and assign to msg.mid if needed. Parameters ---------- msg : katcp.Message ?request message use_mid : bool or None If msg.mid is None, a new message ID will be created. msg.mid will be filled with this ID if use_mid is True or if use_mid is None and the server supports message ids. If msg.mid is already assigned, it will not be touched, and will be used as the active message ID. Return value ------------ The active message ID """ if use_mid is None: use_mid = self._server_supports_ids if msg.mid is None: mid = self._next_id() if use_mid: msg.mid = mid # An internal mid may be needed for the request/inform/response # machinery to work, so we return it return mid else: return msg.mid
[ "def", "_get_mid_and_update_msg", "(", "self", ",", "msg", ",", "use_mid", ")", ":", "if", "use_mid", "is", "None", ":", "use_mid", "=", "self", ".", "_server_supports_ids", "if", "msg", ".", "mid", "is", "None", ":", "mid", "=", "self", ".", "_next_id", "(", ")", "if", "use_mid", ":", "msg", ".", "mid", "=", "mid", "# An internal mid may be needed for the request/inform/response", "# machinery to work, so we return it", "return", "mid", "else", ":", "return", "msg", ".", "mid" ]
32.7
20.733333
def get_default_object_parsers(parser_finder: ParserFinder, conversion_finder: ConversionFinder) -> List[AnyParser]: """ Utility method to return the default parsers able to parse an object from a file. Note that MultifileObjectParser is not provided in this list, as it is already added in a hardcoded way in RootParser :return: """ return [SingleFileParserFunction(parser_function=read_object_from_pickle, streaming_mode=False, supported_exts={'.pyc'}, supported_types={AnyObject}), MultifileObjectParser(parser_finder, conversion_finder) ]
[ "def", "get_default_object_parsers", "(", "parser_finder", ":", "ParserFinder", ",", "conversion_finder", ":", "ConversionFinder", ")", "->", "List", "[", "AnyParser", "]", ":", "return", "[", "SingleFileParserFunction", "(", "parser_function", "=", "read_object_from_pickle", ",", "streaming_mode", "=", "False", ",", "supported_exts", "=", "{", "'.pyc'", "}", ",", "supported_types", "=", "{", "AnyObject", "}", ")", ",", "MultifileObjectParser", "(", "parser_finder", ",", "conversion_finder", ")", "]" ]
53.307692
30.846154
def call_script(self, key, tmp_key, key_type, start, end, exclude, *args): """Call the lua scripts with given keys and args Parameters ----------- key: str The key of the index sorted-set tmp_key: str The final temporary key where to store the filtered primary keys key_type: str The type of temporary key to use, either 'set' or 'zset' start: str The "start" argument to pass to the filtering sorted-set command end: str The "end" argument to pass to the filtering sorted-set command exclude: any A value to exclude from the filtered pks to save to the temporary key args: list Any other argument to be passed by a subclass will be passed as addition args to the script. """ self.model.database.call_script( # be sure to use the script dict at the class level # to avoid registering it many times script_dict=self.__class__.lua_filter_script, keys=[key, tmp_key], args=[key_type, start, end, exclude] + list(args) )
[ "def", "call_script", "(", "self", ",", "key", ",", "tmp_key", ",", "key_type", ",", "start", ",", "end", ",", "exclude", ",", "*", "args", ")", ":", "self", ".", "model", ".", "database", ".", "call_script", "(", "# be sure to use the script dict at the class level", "# to avoid registering it many times", "script_dict", "=", "self", ".", "__class__", ".", "lua_filter_script", ",", "keys", "=", "[", "key", ",", "tmp_key", "]", ",", "args", "=", "[", "key_type", ",", "start", ",", "end", ",", "exclude", "]", "+", "list", "(", "args", ")", ")" ]
39.655172
22.310345
def get_leaf_children(gos_user, go2obj_arg): """Find all the GO descendants under all user GO IDs. Return leaf-level GO IDs.""" childgoid2obj = {} for goid_usr in gos_user: goobj_usr = go2obj_arg[goid_usr] fill_childgoid2obj(childgoid2obj, goobj_usr) return set(go for go, o in childgoid2obj.items() if not o.children)
[ "def", "get_leaf_children", "(", "gos_user", ",", "go2obj_arg", ")", ":", "childgoid2obj", "=", "{", "}", "for", "goid_usr", "in", "gos_user", ":", "goobj_usr", "=", "go2obj_arg", "[", "goid_usr", "]", "fill_childgoid2obj", "(", "childgoid2obj", ",", "goobj_usr", ")", "return", "set", "(", "go", "for", "go", ",", "o", "in", "childgoid2obj", ".", "items", "(", ")", "if", "not", "o", ".", "children", ")" ]
49.142857
10.857143
def unitary_operator(state_vector): """ Uses QR factorization to create a unitary operator that can encode an arbitrary normalized vector into the wavefunction of a quantum state. Assumes that the state of the input qubits is to be expressed as .. math:: (1, 0, \\ldots, 0)^T :param 1d array state_vector: Normalized vector whose length is at least two and a power of two. :return: Unitary operator that encodes state_vector :rtype: 2d array """ if not np.allclose([np.linalg.norm(state_vector)], [1]): raise ValueError("Vector must be normalized") if 2 ** get_bits_needed(len(state_vector)) != len(state_vector): raise ValueError("Vector length must be a power of two and at least two") mat = np.identity(len(state_vector), dtype=complex) for i in range(len(state_vector)): mat[i, 0] = state_vector[i] U = np.linalg.qr(mat)[0] # make sure U|0> = |v> zero_state = np.zeros(len(U)) zero_state[0] = 1 if np.allclose(U.dot(zero_state), state_vector): return U else: # adjust phase if needed return -1 * U
[ "def", "unitary_operator", "(", "state_vector", ")", ":", "if", "not", "np", ".", "allclose", "(", "[", "np", ".", "linalg", ".", "norm", "(", "state_vector", ")", "]", ",", "[", "1", "]", ")", ":", "raise", "ValueError", "(", "\"Vector must be normalized\"", ")", "if", "2", "**", "get_bits_needed", "(", "len", "(", "state_vector", ")", ")", "!=", "len", "(", "state_vector", ")", ":", "raise", "ValueError", "(", "\"Vector length must be a power of two and at least two\"", ")", "mat", "=", "np", ".", "identity", "(", "len", "(", "state_vector", ")", ",", "dtype", "=", "complex", ")", "for", "i", "in", "range", "(", "len", "(", "state_vector", ")", ")", ":", "mat", "[", "i", ",", "0", "]", "=", "state_vector", "[", "i", "]", "U", "=", "np", ".", "linalg", ".", "qr", "(", "mat", ")", "[", "0", "]", "# make sure U|0> = |v>", "zero_state", "=", "np", ".", "zeros", "(", "len", "(", "U", ")", ")", "zero_state", "[", "0", "]", "=", "1", "if", "np", ".", "allclose", "(", "U", ".", "dot", "(", "zero_state", ")", ",", "state_vector", ")", ":", "return", "U", "else", ":", "# adjust phase if needed", "return", "-", "1", "*", "U" ]
30.891892
21.054054
def read_csv(self, dtype=False, parse_dates=True, *args, **kwargs): """Fetch the target and pass through to pandas.read_csv Don't provide the first argument of read_csv(); it is supplied internally. """ import pandas t = self.resolved_url.get_resource().get_target() kwargs = self._update_pandas_kwargs(dtype, parse_dates, kwargs) return pandas.read_csv(t.fspath, *args, **kwargs)
[ "def", "read_csv", "(", "self", ",", "dtype", "=", "False", ",", "parse_dates", "=", "True", ",", "*", "args", ",", "*", "*", "kwargs", ")", ":", "import", "pandas", "t", "=", "self", ".", "resolved_url", ".", "get_resource", "(", ")", ".", "get_target", "(", ")", "kwargs", "=", "self", ".", "_update_pandas_kwargs", "(", "dtype", ",", "parse_dates", ",", "kwargs", ")", "return", "pandas", ".", "read_csv", "(", "t", ".", "fspath", ",", "*", "args", ",", "*", "*", "kwargs", ")" ]
33
27.153846
def _compute_needed_metrics(self, instance, available_metrics): """ Compare the available metrics for one MOR we have computed and intersect them with the set of metrics we want to report """ i_key = self._instance_key(instance) if self.in_compatibility_mode(instance): if instance.get('all_metrics', False): return available_metrics wanted_metrics = [] # Get only the basic metrics for counter_id in available_metrics: # No cache yet, skip it for now if not self.metadata_cache.contains(i_key, counter_id): self.log.debug( "No metadata found for counter {}, will not collect it".format(ensure_unicode(counter_id)) ) continue metadata = self.metadata_cache.get_metadata(i_key, counter_id) if metadata.get('name') in BASIC_METRICS: wanted_metrics.append(vim.PerformanceManager.MetricId(counterId=counter_id, instance="*")) return wanted_metrics else: # The metadata cache contains only metrics of the desired level, so use it to filter the metrics to keep return [ vim.PerformanceManager.MetricId(counterId=counter_id, instance="*") for counter_id in available_metrics if self.metadata_cache.contains(i_key, counter_id) ]
[ "def", "_compute_needed_metrics", "(", "self", ",", "instance", ",", "available_metrics", ")", ":", "i_key", "=", "self", ".", "_instance_key", "(", "instance", ")", "if", "self", ".", "in_compatibility_mode", "(", "instance", ")", ":", "if", "instance", ".", "get", "(", "'all_metrics'", ",", "False", ")", ":", "return", "available_metrics", "wanted_metrics", "=", "[", "]", "# Get only the basic metrics", "for", "counter_id", "in", "available_metrics", ":", "# No cache yet, skip it for now", "if", "not", "self", ".", "metadata_cache", ".", "contains", "(", "i_key", ",", "counter_id", ")", ":", "self", ".", "log", ".", "debug", "(", "\"No metadata found for counter {}, will not collect it\"", ".", "format", "(", "ensure_unicode", "(", "counter_id", ")", ")", ")", "continue", "metadata", "=", "self", ".", "metadata_cache", ".", "get_metadata", "(", "i_key", ",", "counter_id", ")", "if", "metadata", ".", "get", "(", "'name'", ")", "in", "BASIC_METRICS", ":", "wanted_metrics", ".", "append", "(", "vim", ".", "PerformanceManager", ".", "MetricId", "(", "counterId", "=", "counter_id", ",", "instance", "=", "\"*\"", ")", ")", "return", "wanted_metrics", "else", ":", "# The metadata cache contains only metrics of the desired level, so use it to filter the metrics to keep", "return", "[", "vim", ".", "PerformanceManager", ".", "MetricId", "(", "counterId", "=", "counter_id", ",", "instance", "=", "\"*\"", ")", "for", "counter_id", "in", "available_metrics", "if", "self", ".", "metadata_cache", ".", "contains", "(", "i_key", ",", "counter_id", ")", "]" ]
48.966667
22.033333
def rover_lat_accel(VFR_HUD, SERVO_OUTPUT_RAW): '''return lateral acceleration in m/s/s''' speed = VFR_HUD.groundspeed yaw_rate = rover_yaw_rate(VFR_HUD, SERVO_OUTPUT_RAW) accel = radians(yaw_rate) * speed return accel
[ "def", "rover_lat_accel", "(", "VFR_HUD", ",", "SERVO_OUTPUT_RAW", ")", ":", "speed", "=", "VFR_HUD", ".", "groundspeed", "yaw_rate", "=", "rover_yaw_rate", "(", "VFR_HUD", ",", "SERVO_OUTPUT_RAW", ")", "accel", "=", "radians", "(", "yaw_rate", ")", "*", "speed", "return", "accel" ]
38.833333
10.833333
def setdefault(pb_or_dict, key, value): """Set the key on the object to the value if the current value is falsy. Because protobuf Messages do not distinguish between unset values and falsy ones particularly well, this method treats any falsy value (e.g. 0, empty list) as a target to be overwritten, on both Messages and dictionaries. Args: pb_or_dict (Union[~google.protobuf.message.Message, Mapping]): the object. key (str): The key on the object in question. value (Any): The value to set. Raises: TypeError: If pb_or_dict is not a Message or Mapping. """ if not get(pb_or_dict, key, default=None): set(pb_or_dict, key, value)
[ "def", "setdefault", "(", "pb_or_dict", ",", "key", ",", "value", ")", ":", "if", "not", "get", "(", "pb_or_dict", ",", "key", ",", "default", "=", "None", ")", ":", "set", "(", "pb_or_dict", ",", "key", ",", "value", ")" ]
36.947368
20.789474
def get_version(self, extra=None): """ This will return a string that can be used as a prefix for django's cache key. Something like key.1 or key.1.2 If a version was not found '1' will be stored and returned as the number for that key. If extra is given a version will be returned for that value. Otherwise the major version will be returned. :param extra: the minor version to get. Defaults to None. """ if extra: key = self._get_extra_key(extra) else: key = self.key v = self._get_cache(key).get(key) if v == None: v = self._increment_version(extra=extra) return "%s.%s" % (key, v)
[ "def", "get_version", "(", "self", ",", "extra", "=", "None", ")", ":", "if", "extra", ":", "key", "=", "self", ".", "_get_extra_key", "(", "extra", ")", "else", ":", "key", "=", "self", ".", "key", "v", "=", "self", ".", "_get_cache", "(", "key", ")", ".", "get", "(", "key", ")", "if", "v", "==", "None", ":", "v", "=", "self", ".", "_increment_version", "(", "extra", "=", "extra", ")", "return", "\"%s.%s\"", "%", "(", "key", ",", "v", ")" ]
29.791667
20.875
def render_to_string(template, extra=None): """ Renders the given template to a string. """ from jinja2 import Template extra = extra or {} final_fqfn = find_template(template) assert final_fqfn, 'Template not found: %s' % template template_content = open(final_fqfn, 'r').read() t = Template(template_content) if extra: context = env.copy() context.update(extra) else: context = env rendered_content = t.render(**context) rendered_content = rendered_content.replace('&quot;', '"') return rendered_content
[ "def", "render_to_string", "(", "template", ",", "extra", "=", "None", ")", ":", "from", "jinja2", "import", "Template", "extra", "=", "extra", "or", "{", "}", "final_fqfn", "=", "find_template", "(", "template", ")", "assert", "final_fqfn", ",", "'Template not found: %s'", "%", "template", "template_content", "=", "open", "(", "final_fqfn", ",", "'r'", ")", ".", "read", "(", ")", "t", "=", "Template", "(", "template_content", ")", "if", "extra", ":", "context", "=", "env", ".", "copy", "(", ")", "context", ".", "update", "(", "extra", ")", "else", ":", "context", "=", "env", "rendered_content", "=", "t", ".", "render", "(", "*", "*", "context", ")", "rendered_content", "=", "rendered_content", ".", "replace", "(", "'&quot;'", ",", "'\"'", ")", "return", "rendered_content" ]
31.555556
11.333333
async def lookup_session(self, topic_name): """ Attempts to find the session id for a given topic http://crossbar.io/docs/Subscription-Meta-Events-and-Procedures/ """ res = await self.call("wamp.subscription.lookup", topic_name) self.log.info(res)
[ "async", "def", "lookup_session", "(", "self", ",", "topic_name", ")", ":", "res", "=", "await", "self", ".", "call", "(", "\"wamp.subscription.lookup\"", ",", "topic_name", ")", "self", ".", "log", ".", "info", "(", "res", ")" ]
36.125
16.875
def decrypt_file(filename, set_env=True, override_env=False): """ Decrypts a JSON file containing encrypted secrets. This file should contain an object mapping the key names to encrypted secrets. This encrypted file can be created using `credkeep.encrypt_file` or the commandline utility. :param filename: filename of the JSON file :param set_env: If True, an environment variable representing the key is created. :param override_env: If True, an existing environment variable with the same key name will be overridden with the new decrypted value. If False, the environment variable will not be set. :return: Dict containing the decrypted keys """ data = json.load(open(filename)) results = {} for key, v in data.iteritems(): v_decrypt = decrypt_secret(v) results[key] = v_decrypt if set_env: if key in os.environ and not override_env: break os.environ[str(key)] = v_decrypt return results
[ "def", "decrypt_file", "(", "filename", ",", "set_env", "=", "True", ",", "override_env", "=", "False", ")", ":", "data", "=", "json", ".", "load", "(", "open", "(", "filename", ")", ")", "results", "=", "{", "}", "for", "key", ",", "v", "in", "data", ".", "iteritems", "(", ")", ":", "v_decrypt", "=", "decrypt_secret", "(", "v", ")", "results", "[", "key", "]", "=", "v_decrypt", "if", "set_env", ":", "if", "key", "in", "os", ".", "environ", "and", "not", "override_env", ":", "break", "os", ".", "environ", "[", "str", "(", "key", ")", "]", "=", "v_decrypt", "return", "results" ]
39.68
26.8
def _clean_pivot_attributes(self, model): """ Get the pivot attributes from a model. :type model: eloquent.Model """ values = {} delete_keys = [] for key, value in model.get_attributes().items(): if key.find('pivot_') == 0: values[key[6:]] = value delete_keys.append(key) for key in delete_keys: delattr(model, key) return values
[ "def", "_clean_pivot_attributes", "(", "self", ",", "model", ")", ":", "values", "=", "{", "}", "delete_keys", "=", "[", "]", "for", "key", ",", "value", "in", "model", ".", "get_attributes", "(", ")", ".", "items", "(", ")", ":", "if", "key", ".", "find", "(", "'pivot_'", ")", "==", "0", ":", "values", "[", "key", "[", "6", ":", "]", "]", "=", "value", "delete_keys", ".", "append", "(", "key", ")", "for", "key", "in", "delete_keys", ":", "delattr", "(", "model", ",", "key", ")", "return", "values" ]
23.368421
16.105263
def intermediates(self): """ A list of asn1crypto.x509.Certificate objects that were presented as intermediates by the server """ if self._session_context is None: self._raise_closed() if self._certificate is None: self._read_certificates() return self._intermediates
[ "def", "intermediates", "(", "self", ")", ":", "if", "self", ".", "_session_context", "is", "None", ":", "self", ".", "_raise_closed", "(", ")", "if", "self", ".", "_certificate", "is", "None", ":", "self", ".", "_read_certificates", "(", ")", "return", "self", ".", "_intermediates" ]
26
15.230769
def p_paramlist_single(p): """ paramlist : ID """ p[0] = [ID(p[1], value='', args=None, lineno=p.lineno(1), fname=CURRENT_FILE[-1])]
[ "def", "p_paramlist_single", "(", "p", ")", ":", "p", "[", "0", "]", "=", "[", "ID", "(", "p", "[", "1", "]", ",", "value", "=", "''", ",", "args", "=", "None", ",", "lineno", "=", "p", ".", "lineno", "(", "1", ")", ",", "fname", "=", "CURRENT_FILE", "[", "-", "1", "]", ")", "]" ]
31
7.2
def convert_disp_formula_elements(self): """ <disp-formula> elements must be converted to conforming elements """ for disp in self.main.getroot().findall('.//disp-formula'): #find label element label_el = disp.find('label') graphic_el = disp.find('graphic') if graphic_el is None: # No graphic, assume math as text instead text_span = etree.Element('span', {'class': 'disp-formula'}) if 'id' in disp.attrib: text_span.attrib['id'] = disp.attrib['id'] append_all_below(text_span, disp) #Insert the text span before the disp-formula insert_before(disp, text_span) #If a label exists, modify and insert before text_span if label_el is not None: label_el.tag = 'b' insert_before(text_span, label_el) #Remove the disp-formula remove(disp) #Skip the rest, which deals with the graphic element continue #The graphic element is present #Create a file reference for the image xlink_href = ns_format(graphic_el, 'xlink:href') graphic_xlink_href = graphic_el.attrib[xlink_href] file_name = graphic_xlink_href.split('.')[-1] + '.png' img_dir = 'images-' + self.doi_suffix() img_path = '/'.join([img_dir, file_name]) #Create the img element img_element = etree.Element('img', {'alt': 'A Display Formula', 'class': 'disp-formula', 'src': img_path}) #Transfer the id attribute if 'id' in disp.attrib: img_element.attrib['id'] = disp.attrib['id'] #Insert the img element insert_before(disp, img_element) #Create content for the label if label_el is not None: label_el.tag = 'b' insert_before(img_element, label_el) #Remove the old disp-formula element remove(disp)
[ "def", "convert_disp_formula_elements", "(", "self", ")", ":", "for", "disp", "in", "self", ".", "main", ".", "getroot", "(", ")", ".", "findall", "(", "'.//disp-formula'", ")", ":", "#find label element", "label_el", "=", "disp", ".", "find", "(", "'label'", ")", "graphic_el", "=", "disp", ".", "find", "(", "'graphic'", ")", "if", "graphic_el", "is", "None", ":", "# No graphic, assume math as text instead", "text_span", "=", "etree", ".", "Element", "(", "'span'", ",", "{", "'class'", ":", "'disp-formula'", "}", ")", "if", "'id'", "in", "disp", ".", "attrib", ":", "text_span", ".", "attrib", "[", "'id'", "]", "=", "disp", ".", "attrib", "[", "'id'", "]", "append_all_below", "(", "text_span", ",", "disp", ")", "#Insert the text span before the disp-formula", "insert_before", "(", "disp", ",", "text_span", ")", "#If a label exists, modify and insert before text_span", "if", "label_el", "is", "not", "None", ":", "label_el", ".", "tag", "=", "'b'", "insert_before", "(", "text_span", ",", "label_el", ")", "#Remove the disp-formula", "remove", "(", "disp", ")", "#Skip the rest, which deals with the graphic element", "continue", "#The graphic element is present", "#Create a file reference for the image", "xlink_href", "=", "ns_format", "(", "graphic_el", ",", "'xlink:href'", ")", "graphic_xlink_href", "=", "graphic_el", ".", "attrib", "[", "xlink_href", "]", "file_name", "=", "graphic_xlink_href", ".", "split", "(", "'.'", ")", "[", "-", "1", "]", "+", "'.png'", "img_dir", "=", "'images-'", "+", "self", ".", "doi_suffix", "(", ")", "img_path", "=", "'/'", ".", "join", "(", "[", "img_dir", ",", "file_name", "]", ")", "#Create the img element", "img_element", "=", "etree", ".", "Element", "(", "'img'", ",", "{", "'alt'", ":", "'A Display Formula'", ",", "'class'", ":", "'disp-formula'", ",", "'src'", ":", "img_path", "}", ")", "#Transfer the id attribute", "if", "'id'", "in", "disp", ".", "attrib", ":", "img_element", ".", "attrib", "[", "'id'", "]", "=", "disp", ".", "attrib", "[", "'id'", "]", "#Insert the img element", "insert_before", "(", "disp", ",", "img_element", ")", "#Create content for the label", "if", "label_el", "is", "not", "None", ":", "label_el", ".", "tag", "=", "'b'", "insert_before", "(", "img_element", ",", "label_el", ")", "#Remove the old disp-formula element", "remove", "(", "disp", ")" ]
47.152174
13.76087
def _resource_deletion(resource): """ Recalculate consumption details and save resource details """ if resource.__class__ not in CostTrackingRegister.registered_resources: return new_configuration = {} price_estimate = models.PriceEstimate.update_resource_estimate(resource, new_configuration) price_estimate.init_details()
[ "def", "_resource_deletion", "(", "resource", ")", ":", "if", "resource", ".", "__class__", "not", "in", "CostTrackingRegister", ".", "registered_resources", ":", "return", "new_configuration", "=", "{", "}", "price_estimate", "=", "models", ".", "PriceEstimate", ".", "update_resource_estimate", "(", "resource", ",", "new_configuration", ")", "price_estimate", ".", "init_details", "(", ")" ]
49.285714
20.571429
def default_subprocess_runner(cmd, cwd=None, extra_environ=None): """The default method of calling the wrapper subprocess.""" env = os.environ.copy() if extra_environ: env.update(extra_environ) check_call(cmd, cwd=cwd, env=env)
[ "def", "default_subprocess_runner", "(", "cmd", ",", "cwd", "=", "None", ",", "extra_environ", "=", "None", ")", ":", "env", "=", "os", ".", "environ", ".", "copy", "(", ")", "if", "extra_environ", ":", "env", ".", "update", "(", "extra_environ", ")", "check_call", "(", "cmd", ",", "cwd", "=", "cwd", ",", "env", "=", "env", ")" ]
35.142857
15.285714
def task(self, total: int, name=None, message=None): """Wrap code into a begin and end call on this monitor""" self.begin(total, name, message) try: yield self finally: self.done()
[ "def", "task", "(", "self", ",", "total", ":", "int", ",", "name", "=", "None", ",", "message", "=", "None", ")", ":", "self", ".", "begin", "(", "total", ",", "name", ",", "message", ")", "try", ":", "yield", "self", "finally", ":", "self", ".", "done", "(", ")" ]
32.857143
14.142857
def __is_current(filepath): '''Checks whether file is current''' if not __DOWNLOAD_PARAMS['auto_update']: return True if not os.path.isfile(filepath): return False return datetime.datetime.utcfromtimestamp(os.path.getmtime(filepath)) \ > __get_last_update_time()
[ "def", "__is_current", "(", "filepath", ")", ":", "if", "not", "__DOWNLOAD_PARAMS", "[", "'auto_update'", "]", ":", "return", "True", "if", "not", "os", ".", "path", ".", "isfile", "(", "filepath", ")", ":", "return", "False", "return", "datetime", ".", "datetime", ".", "utcfromtimestamp", "(", "os", ".", "path", ".", "getmtime", "(", "filepath", ")", ")", ">", "__get_last_update_time", "(", ")" ]
29.5
18.3
def experiments_predictions_get(self, resource_url): """Get handle for model run resource at given Url. Parameters ---------- resource_url : string Url for model run resource at SCO-API Returns ------- scoserv.ModelRunHandle Handle for local copy of model run resource """ # Get resource directory, Json representation, active flag, and cache id obj_dir, obj_json, is_active, cache_id = self.get_object(resource_url) # Create model run handle. Will raise an exception if resource is not # in cache and cannot be downloaded. run = ModelRunHandle(obj_json, obj_dir, self) # Add resource to cache if not exists if not cache_id in self.cache: self.cache_add(resource_url, cache_id) # Return model run handle return run
[ "def", "experiments_predictions_get", "(", "self", ",", "resource_url", ")", ":", "# Get resource directory, Json representation, active flag, and cache id", "obj_dir", ",", "obj_json", ",", "is_active", ",", "cache_id", "=", "self", ".", "get_object", "(", "resource_url", ")", "# Create model run handle. Will raise an exception if resource is not", "# in cache and cannot be downloaded.", "run", "=", "ModelRunHandle", "(", "obj_json", ",", "obj_dir", ",", "self", ")", "# Add resource to cache if not exists", "if", "not", "cache_id", "in", "self", ".", "cache", ":", "self", ".", "cache_add", "(", "resource_url", ",", "cache_id", ")", "# Return model run handle", "return", "run" ]
37.652174
17.782609
def natural_name(self) -> str: """Valid python identifier representation of the expession.""" name = self.expression.strip() for op in operators: name = name.replace(op, operator_to_identifier[op]) return wt_kit.string2identifier(name)
[ "def", "natural_name", "(", "self", ")", "->", "str", ":", "name", "=", "self", ".", "expression", ".", "strip", "(", ")", "for", "op", "in", "operators", ":", "name", "=", "name", ".", "replace", "(", "op", ",", "operator_to_identifier", "[", "op", "]", ")", "return", "wt_kit", ".", "string2identifier", "(", "name", ")" ]
45.666667
8.666667
def find_ab_params(spread, min_dist): """Fit a, b params for the differentiable curve used in lower dimensional fuzzy simplicial complex construction. We want the smooth curve (from a pre-defined family with simple gradient) that best matches an offset exponential decay. """ def curve(x, a, b): return 1.0 / (1.0 + a * x ** (2 * b)) xv = np.linspace(0, spread * 3, 300) yv = np.zeros(xv.shape) yv[xv < min_dist] = 1.0 yv[xv >= min_dist] = np.exp(-(xv[xv >= min_dist] - min_dist) / spread) params, covar = curve_fit(curve, xv, yv) return params[0], params[1]
[ "def", "find_ab_params", "(", "spread", ",", "min_dist", ")", ":", "def", "curve", "(", "x", ",", "a", ",", "b", ")", ":", "return", "1.0", "/", "(", "1.0", "+", "a", "*", "x", "**", "(", "2", "*", "b", ")", ")", "xv", "=", "np", ".", "linspace", "(", "0", ",", "spread", "*", "3", ",", "300", ")", "yv", "=", "np", ".", "zeros", "(", "xv", ".", "shape", ")", "yv", "[", "xv", "<", "min_dist", "]", "=", "1.0", "yv", "[", "xv", ">=", "min_dist", "]", "=", "np", ".", "exp", "(", "-", "(", "xv", "[", "xv", ">=", "min_dist", "]", "-", "min_dist", ")", "/", "spread", ")", "params", ",", "covar", "=", "curve_fit", "(", "curve", ",", "xv", ",", "yv", ")", "return", "params", "[", "0", "]", ",", "params", "[", "1", "]" ]
37.5625
14.9375
def pdf2png(file_in, file_out): """ Uses `ImageMagick <http://www.imagemagick.org/>`_ to convert an input *file_in* pdf to a *file_out* png. (Untested with other formats.) Parameters ---------- file_in : str The path to the pdf file to be converted. file_out : str The path to the png file to be written. """ command = 'convert -display 37.5 {} -resize 600 -append {}'.format(file_in, file_out) _subprocess.call(_shlex.split(command))
[ "def", "pdf2png", "(", "file_in", ",", "file_out", ")", ":", "command", "=", "'convert -display 37.5 {} -resize 600 -append {}'", ".", "format", "(", "file_in", ",", "file_out", ")", "_subprocess", ".", "call", "(", "_shlex", ".", "split", "(", "command", ")", ")" ]
33.928571
25.214286
def create_application(self, team_id, name, url=None): """ Creates an application under a given team. :param team_id: Team identifier. :param name: The name of the new application being created. :param url: The url of where the application is located. """ params = {'name': name} if url: params['url'] = url return self._request('POST', 'rest/teams/' + str(team_id) + '/applications/new', params)
[ "def", "create_application", "(", "self", ",", "team_id", ",", "name", ",", "url", "=", "None", ")", ":", "params", "=", "{", "'name'", ":", "name", "}", "if", "url", ":", "params", "[", "'url'", "]", "=", "url", "return", "self", ".", "_request", "(", "'POST'", ",", "'rest/teams/'", "+", "str", "(", "team_id", ")", "+", "'/applications/new'", ",", "params", ")" ]
42.727273
15.818182
def delete(self): """ Remove the document and all of its bundles from ProvStore. .. warning:: Cannot be undone. """ if self.abstract: raise AbstractDocumentException() self._api.delete_document(self.id) self._id = None return True
[ "def", "delete", "(", "self", ")", ":", "if", "self", ".", "abstract", ":", "raise", "AbstractDocumentException", "(", ")", "self", ".", "_api", ".", "delete_document", "(", "self", ".", "id", ")", "self", ".", "_id", "=", "None", "return", "True" ]
21.928571
18.642857
def read_atoms(fn, cycfn=None, pos_only=False, conv=1.0): """ Read atom information from an atoms.dat file (i.e., tblmd, MDCORE input file) """ f = paropen(fn, "r") l = f.readline().lstrip() while len(l) > 0 and ( l[0] == '#' or l[0] == '<' ): l = f.readline().lstrip() n_atoms = int(l) l = f.readline().lstrip() while len(l) > 0 and ( l[0] == '#' or l[0] == '<' ): l = f.readline().lstrip() l = f.readline().lstrip() while len(l) > 0 and ( l[0] == '#' or l[0] == '<' ): l = f.readline().lstrip() # # Read positions # forces = np.zeros( [ n_atoms, 3 ] ) groups = np.zeros( [ n_atoms ] ) gamma = np.zeros( [ n_atoms ] ) T = np.zeros( [ n_atoms ] ) ats = [ ] for i in range(n_atoms): s = l.split() # type x y z sym = None try: Z = int(s[0]) sym = ase.data.chemical_symbols[Z] except: sym = s[0] a = ase.Atom(sym, ( float(s[2])*conv, float(s[3])*conv, float(s[4])*conv ) ) groups[i] = int(s[5]) gamma[i] = float(s[6]) T[i] = float(s[7]) ats += [ a ] l = f.readline() this = ase.Atoms(ats, pbc=True) if not pos_only: while l and l == "": l = f.readline().strip() while l: key = l.strip(" <-#\r\n") if key.upper() == "VELOCITIES": for i in range(n_atoms): s = f.readline().split() m = this[i].mass if m is None: m = ase.data.atomic_masses[ase.data.chemical_symbols.index(this[i].symbol)] this[i].momentum = ( m*float(s[0]), m*float(s[1]), m*float(s[2]) ) l = None elif key.upper() == "FORCES": for i in range(n_atoms): s = f.readline().split() forces[i] = np.array( [ float(s[0]), float(s[1]), float(s[2]) ] ) l = None elif key.upper() == "CHARGES": for i in this: l = f.readline() if l and len(l.split()) == 1: i.charge = float(l) l = None elif key.upper() == "CELL" or key.upper().split()[0:2] == ("BOX", "VECTORS" ): l1 = f.readline() l2 = f.readline() l3 = f.readline() this.set_cell( [ [float(x) for x in l1.split()], [float(x) for x in l2.split()], [float(x) for x in l3.split()] ] ) l = None else: aux = [ ] l = f.readline().strip() while l and l[0] not in [ '<', '#' ]: s = l.split() aux += [ [float(x) for x in s] ] l = f.readline().strip() if len(aux) == n_atoms: this.set_array(key, np.asarray(aux)) else: print("Warning: Encountered field '%s' which does not seem to be per-atom data." % key) if l is None: l = f.readline().strip() while l and l == "": l = f.readline().strip() f.close() this.set_array("forces", forces) this.set_array("groups", groups) this.set_array("gamma", gamma) this.set_array("T", T) if cycfn: read_cyc(this, cycfn, conv=conv) return this
[ "def", "read_atoms", "(", "fn", ",", "cycfn", "=", "None", ",", "pos_only", "=", "False", ",", "conv", "=", "1.0", ")", ":", "f", "=", "paropen", "(", "fn", ",", "\"r\"", ")", "l", "=", "f", ".", "readline", "(", ")", ".", "lstrip", "(", ")", "while", "len", "(", "l", ")", ">", "0", "and", "(", "l", "[", "0", "]", "==", "'#'", "or", "l", "[", "0", "]", "==", "'<'", ")", ":", "l", "=", "f", ".", "readline", "(", ")", ".", "lstrip", "(", ")", "n_atoms", "=", "int", "(", "l", ")", "l", "=", "f", ".", "readline", "(", ")", ".", "lstrip", "(", ")", "while", "len", "(", "l", ")", ">", "0", "and", "(", "l", "[", "0", "]", "==", "'#'", "or", "l", "[", "0", "]", "==", "'<'", ")", ":", "l", "=", "f", ".", "readline", "(", ")", ".", "lstrip", "(", ")", "l", "=", "f", ".", "readline", "(", ")", ".", "lstrip", "(", ")", "while", "len", "(", "l", ")", ">", "0", "and", "(", "l", "[", "0", "]", "==", "'#'", "or", "l", "[", "0", "]", "==", "'<'", ")", ":", "l", "=", "f", ".", "readline", "(", ")", ".", "lstrip", "(", ")", "#", "# Read positions", "#", "forces", "=", "np", ".", "zeros", "(", "[", "n_atoms", ",", "3", "]", ")", "groups", "=", "np", ".", "zeros", "(", "[", "n_atoms", "]", ")", "gamma", "=", "np", ".", "zeros", "(", "[", "n_atoms", "]", ")", "T", "=", "np", ".", "zeros", "(", "[", "n_atoms", "]", ")", "ats", "=", "[", "]", "for", "i", "in", "range", "(", "n_atoms", ")", ":", "s", "=", "l", ".", "split", "(", ")", "# type x y z", "sym", "=", "None", "try", ":", "Z", "=", "int", "(", "s", "[", "0", "]", ")", "sym", "=", "ase", ".", "data", ".", "chemical_symbols", "[", "Z", "]", "except", ":", "sym", "=", "s", "[", "0", "]", "a", "=", "ase", ".", "Atom", "(", "sym", ",", "(", "float", "(", "s", "[", "2", "]", ")", "*", "conv", ",", "float", "(", "s", "[", "3", "]", ")", "*", "conv", ",", "float", "(", "s", "[", "4", "]", ")", "*", "conv", ")", ")", "groups", "[", "i", "]", "=", "int", "(", "s", "[", "5", "]", ")", "gamma", "[", "i", "]", "=", "float", "(", "s", "[", "6", "]", ")", "T", "[", "i", "]", "=", "float", "(", "s", "[", "7", "]", ")", "ats", "+=", "[", "a", "]", "l", "=", "f", ".", "readline", "(", ")", "this", "=", "ase", ".", "Atoms", "(", "ats", ",", "pbc", "=", "True", ")", "if", "not", "pos_only", ":", "while", "l", "and", "l", "==", "\"\"", ":", "l", "=", "f", ".", "readline", "(", ")", ".", "strip", "(", ")", "while", "l", ":", "key", "=", "l", ".", "strip", "(", "\" <-#\\r\\n\"", ")", "if", "key", ".", "upper", "(", ")", "==", "\"VELOCITIES\"", ":", "for", "i", "in", "range", "(", "n_atoms", ")", ":", "s", "=", "f", ".", "readline", "(", ")", ".", "split", "(", ")", "m", "=", "this", "[", "i", "]", ".", "mass", "if", "m", "is", "None", ":", "m", "=", "ase", ".", "data", ".", "atomic_masses", "[", "ase", ".", "data", ".", "chemical_symbols", ".", "index", "(", "this", "[", "i", "]", ".", "symbol", ")", "]", "this", "[", "i", "]", ".", "momentum", "=", "(", "m", "*", "float", "(", "s", "[", "0", "]", ")", ",", "m", "*", "float", "(", "s", "[", "1", "]", ")", ",", "m", "*", "float", "(", "s", "[", "2", "]", ")", ")", "l", "=", "None", "elif", "key", ".", "upper", "(", ")", "==", "\"FORCES\"", ":", "for", "i", "in", "range", "(", "n_atoms", ")", ":", "s", "=", "f", ".", "readline", "(", ")", ".", "split", "(", ")", "forces", "[", "i", "]", "=", "np", ".", "array", "(", "[", "float", "(", "s", "[", "0", "]", ")", ",", "float", "(", "s", "[", "1", "]", ")", ",", "float", "(", "s", "[", "2", "]", ")", "]", ")", "l", "=", "None", "elif", "key", ".", "upper", "(", ")", "==", "\"CHARGES\"", ":", "for", "i", "in", "this", ":", "l", "=", "f", ".", "readline", "(", ")", "if", "l", "and", "len", "(", "l", ".", "split", "(", ")", ")", "==", "1", ":", "i", ".", "charge", "=", "float", "(", "l", ")", "l", "=", "None", "elif", "key", ".", "upper", "(", ")", "==", "\"CELL\"", "or", "key", ".", "upper", "(", ")", ".", "split", "(", ")", "[", "0", ":", "2", "]", "==", "(", "\"BOX\"", ",", "\"VECTORS\"", ")", ":", "l1", "=", "f", ".", "readline", "(", ")", "l2", "=", "f", ".", "readline", "(", ")", "l3", "=", "f", ".", "readline", "(", ")", "this", ".", "set_cell", "(", "[", "[", "float", "(", "x", ")", "for", "x", "in", "l1", ".", "split", "(", ")", "]", ",", "[", "float", "(", "x", ")", "for", "x", "in", "l2", ".", "split", "(", ")", "]", ",", "[", "float", "(", "x", ")", "for", "x", "in", "l3", ".", "split", "(", ")", "]", "]", ")", "l", "=", "None", "else", ":", "aux", "=", "[", "]", "l", "=", "f", ".", "readline", "(", ")", ".", "strip", "(", ")", "while", "l", "and", "l", "[", "0", "]", "not", "in", "[", "'<'", ",", "'#'", "]", ":", "s", "=", "l", ".", "split", "(", ")", "aux", "+=", "[", "[", "float", "(", "x", ")", "for", "x", "in", "s", "]", "]", "l", "=", "f", ".", "readline", "(", ")", ".", "strip", "(", ")", "if", "len", "(", "aux", ")", "==", "n_atoms", ":", "this", ".", "set_array", "(", "key", ",", "np", ".", "asarray", "(", "aux", ")", ")", "else", ":", "print", "(", "\"Warning: Encountered field '%s' which does not seem to be per-atom data.\"", "%", "key", ")", "if", "l", "is", "None", ":", "l", "=", "f", ".", "readline", "(", ")", ".", "strip", "(", ")", "while", "l", "and", "l", "==", "\"\"", ":", "l", "=", "f", ".", "readline", "(", ")", ".", "strip", "(", ")", "f", ".", "close", "(", ")", "this", ".", "set_array", "(", "\"forces\"", ",", "forces", ")", "this", ".", "set_array", "(", "\"groups\"", ",", "groups", ")", "this", ".", "set_array", "(", "\"gamma\"", ",", "gamma", ")", "this", ".", "set_array", "(", "\"T\"", ",", "T", ")", "if", "cycfn", ":", "read_cyc", "(", "this", ",", "cycfn", ",", "conv", "=", "conv", ")", "return", "this" ]
28.300813
20.593496
def fetch_request_ids(item_ids, cls, attr_name, verification_list=None): """Return a list of cls instances for all the ids provided in item_ids. :param item_ids: The list of ids to fetch objects for :param cls: The class to fetch the ids from :param attr_name: The name of the attribute for exception purposes :param verification_list: If provided, a list of acceptable instances Raise InvalidId exception using attr_name if any do not exist, or are not present in the verification_list. """ if not item_ids: return [] items = [] for item_id in item_ids: item = cls.fetch_by_id(item_id) if not item or (verification_list is not None and item not in verification_list): raise InvalidId(attr_name) items.append(item) return items
[ "def", "fetch_request_ids", "(", "item_ids", ",", "cls", ",", "attr_name", ",", "verification_list", "=", "None", ")", ":", "if", "not", "item_ids", ":", "return", "[", "]", "items", "=", "[", "]", "for", "item_id", "in", "item_ids", ":", "item", "=", "cls", ".", "fetch_by_id", "(", "item_id", ")", "if", "not", "item", "or", "(", "verification_list", "is", "not", "None", "and", "item", "not", "in", "verification_list", ")", ":", "raise", "InvalidId", "(", "attr_name", ")", "items", ".", "append", "(", "item", ")", "return", "items" ]
37.681818
19.590909
def add_result(self, result): """ Adds the result of a completed job to the result list, then decrements the active job count. If the job set is already complete, the result is simply discarded instead. """ if self._active_jobs == 0: return self._results.add(result) self._active_jobs -= 1 if self._active_jobs == 0: self._done()
[ "def", "add_result", "(", "self", ",", "result", ")", ":", "if", "self", ".", "_active_jobs", "==", "0", ":", "return", "self", ".", "_results", ".", "add", "(", "result", ")", "self", ".", "_active_jobs", "-=", "1", "if", "self", ".", "_active_jobs", "==", "0", ":", "self", ".", "_done", "(", ")" ]
29.571429
17.285714
def _parse_chance(self, element): """ Parse a chance element :param element: The XML Element object :type element: etree._Element """ try: chance = float(element.text) except (ValueError, TypeError, AttributeError): self._log.warn('Invalid Chance string: {chance}'.format(chance=element.text)) return # Make sure the chance is a valid percentage if not (0 <= chance <= 100): self._log.warn('Chance percent must contain an integer or float between 0 and 100') return self.chance = chance self.chance_blocking = bool_attribute(element, 'blocking', self.chance_blocking)
[ "def", "_parse_chance", "(", "self", ",", "element", ")", ":", "try", ":", "chance", "=", "float", "(", "element", ".", "text", ")", "except", "(", "ValueError", ",", "TypeError", ",", "AttributeError", ")", ":", "self", ".", "_log", ".", "warn", "(", "'Invalid Chance string: {chance}'", ".", "format", "(", "chance", "=", "element", ".", "text", ")", ")", "return", "# Make sure the chance is a valid percentage", "if", "not", "(", "0", "<=", "chance", "<=", "100", ")", ":", "self", ".", "_log", ".", "warn", "(", "'Chance percent must contain an integer or float between 0 and 100'", ")", "return", "self", ".", "chance", "=", "chance", "self", ".", "chance_blocking", "=", "bool_attribute", "(", "element", ",", "'blocking'", ",", "self", ".", "chance_blocking", ")" ]
36.842105
19.578947
def LargestComponent(self): """ Returns (i, val) where i is the component index (0 - 2) which has largest absolute value and val is the value of the component. """ if abs(self.x) > abs(self.y): if abs(self.x) > abs(self.z): return (0, self.x) else: return (2, self.z) else: if abs(self.y) > abs(self.z): return (1, self.y) else: return (2, self.z)
[ "def", "LargestComponent", "(", "self", ")", ":", "if", "abs", "(", "self", ".", "x", ")", ">", "abs", "(", "self", ".", "y", ")", ":", "if", "abs", "(", "self", ".", "x", ")", ">", "abs", "(", "self", ".", "z", ")", ":", "return", "(", "0", ",", "self", ".", "x", ")", "else", ":", "return", "(", "2", ",", "self", ".", "z", ")", "else", ":", "if", "abs", "(", "self", ".", "y", ")", ">", "abs", "(", "self", ".", "z", ")", ":", "return", "(", "1", ",", "self", ".", "y", ")", "else", ":", "return", "(", "2", ",", "self", ".", "z", ")" ]
26
14.375
def parse( source: SourceType, no_location=False, experimental_fragment_variables=False ) -> DocumentNode: """Given a GraphQL source, parse it into a Document. Throws GraphQLError if a syntax error is encountered. By default, the parser creates AST nodes that know the location in the source that they correspond to. The `no_location` option disables that behavior for performance or testing. Experimental features: If `experimental_fragment_variables` is set to True, the parser will understand and parse variable definitions contained in a fragment definition. They'll be represented in the `variable_definitions` field of the `FragmentDefinitionNode`. The syntax is identical to normal, query-defined variables. For example:: fragment A($var: Boolean = false) on T { ... } """ if isinstance(source, str): source = Source(source) elif not isinstance(source, Source): raise TypeError(f"Must provide Source. Received: {inspect(source)}") lexer = Lexer( source, no_location=no_location, experimental_fragment_variables=experimental_fragment_variables, ) return parse_document(lexer)
[ "def", "parse", "(", "source", ":", "SourceType", ",", "no_location", "=", "False", ",", "experimental_fragment_variables", "=", "False", ")", "->", "DocumentNode", ":", "if", "isinstance", "(", "source", ",", "str", ")", ":", "source", "=", "Source", "(", "source", ")", "elif", "not", "isinstance", "(", "source", ",", "Source", ")", ":", "raise", "TypeError", "(", "f\"Must provide Source. Received: {inspect(source)}\"", ")", "lexer", "=", "Lexer", "(", "source", ",", "no_location", "=", "no_location", ",", "experimental_fragment_variables", "=", "experimental_fragment_variables", ",", ")", "return", "parse_document", "(", "lexer", ")" ]
36.060606
27.181818
def __scanPlugins(self): """Scanning local plugin directories and third-party plugin packages. :returns: No return, but self.__plugins will be updated :raises: PluginError: raises an exception, maybe CSSLoadError, VersionError, based PluginError """ self.logger.info("Initialization Plugins Start, local plugins path: %s, third party plugins: %s" % (self.plugins_abspath, self.plugin_packages)) #: Load third-party plugins if self.plugin_packages and isinstance(self.plugin_packages, (list, tuple)): for package_name in self.plugin_packages: try: plugin = __import__(package_name) except ImportError as e: raise PluginError("ImportError for %s, detail is %s" %(package_name, e)) else: plugin_abspath = os.path.dirname(os.path.abspath(plugin.__file__)) self.__loadPlugin(plugin, plugin_abspath, package_name) #: Load local plug-in directory if os.path.isdir(self.plugins_abspath) and os.path.isfile(os.path.join(self.plugins_abspath, "__init__.py")): for package_name in os.listdir(self.plugins_abspath): package_abspath = os.path.join(self.plugins_abspath, package_name) if os.path.isdir(package_abspath) and os.path.isfile(os.path.join(package_abspath, "__init__.py")): self.logger.info("find plugin package: %s" % package_name) #: Dynamic load module (plugins.package): you can query custom information and get the plugin's class definition through getPluginClass plugin = __import__("{0}.{1}".format(self.plugins_folder, package_name), fromlist=[self.plugins_folder, ]) self.__loadPlugin(plugin, package_abspath, package_name)
[ "def", "__scanPlugins", "(", "self", ")", ":", "self", ".", "logger", ".", "info", "(", "\"Initialization Plugins Start, local plugins path: %s, third party plugins: %s\"", "%", "(", "self", ".", "plugins_abspath", ",", "self", ".", "plugin_packages", ")", ")", "#: Load third-party plugins", "if", "self", ".", "plugin_packages", "and", "isinstance", "(", "self", ".", "plugin_packages", ",", "(", "list", ",", "tuple", ")", ")", ":", "for", "package_name", "in", "self", ".", "plugin_packages", ":", "try", ":", "plugin", "=", "__import__", "(", "package_name", ")", "except", "ImportError", "as", "e", ":", "raise", "PluginError", "(", "\"ImportError for %s, detail is %s\"", "%", "(", "package_name", ",", "e", ")", ")", "else", ":", "plugin_abspath", "=", "os", ".", "path", ".", "dirname", "(", "os", ".", "path", ".", "abspath", "(", "plugin", ".", "__file__", ")", ")", "self", ".", "__loadPlugin", "(", "plugin", ",", "plugin_abspath", ",", "package_name", ")", "#: Load local plug-in directory", "if", "os", ".", "path", ".", "isdir", "(", "self", ".", "plugins_abspath", ")", "and", "os", ".", "path", ".", "isfile", "(", "os", ".", "path", ".", "join", "(", "self", ".", "plugins_abspath", ",", "\"__init__.py\"", ")", ")", ":", "for", "package_name", "in", "os", ".", "listdir", "(", "self", ".", "plugins_abspath", ")", ":", "package_abspath", "=", "os", ".", "path", ".", "join", "(", "self", ".", "plugins_abspath", ",", "package_name", ")", "if", "os", ".", "path", ".", "isdir", "(", "package_abspath", ")", "and", "os", ".", "path", ".", "isfile", "(", "os", ".", "path", ".", "join", "(", "package_abspath", ",", "\"__init__.py\"", ")", ")", ":", "self", ".", "logger", ".", "info", "(", "\"find plugin package: %s\"", "%", "package_name", ")", "#: Dynamic load module (plugins.package): you can query custom information and get the plugin's class definition through getPluginClass", "plugin", "=", "__import__", "(", "\"{0}.{1}\"", ".", "format", "(", "self", ".", "plugins_folder", ",", "package_name", ")", ",", "fromlist", "=", "[", "self", ".", "plugins_folder", ",", "]", ")", "self", ".", "__loadPlugin", "(", "plugin", ",", "package_abspath", ",", "package_name", ")" ]
63.482759
38.448276
def add_global_response_interceptor(self, response_interceptor): # type: (AbstractResponseInterceptor) -> None """Register input to the global response interceptors list. :param response_interceptor: Response Interceptor instance to be registered. :type response_interceptor: AbstractResponseInterceptor :return: None """ if response_interceptor is None: raise RuntimeConfigException( "Valid Response Interceptor instance to be provided") if not isinstance(response_interceptor, AbstractResponseInterceptor): raise RuntimeConfigException( "Input should be a ResponseInterceptor instance") self.global_response_interceptors.append(response_interceptor)
[ "def", "add_global_response_interceptor", "(", "self", ",", "response_interceptor", ")", ":", "# type: (AbstractResponseInterceptor) -> None", "if", "response_interceptor", "is", "None", ":", "raise", "RuntimeConfigException", "(", "\"Valid Response Interceptor instance to be provided\"", ")", "if", "not", "isinstance", "(", "response_interceptor", ",", "AbstractResponseInterceptor", ")", ":", "raise", "RuntimeConfigException", "(", "\"Input should be a ResponseInterceptor instance\"", ")", "self", ".", "global_response_interceptors", ".", "append", "(", "response_interceptor", ")" ]
43.166667
20.277778
def neighbors(self): ''' Returns the left and right neighbors as Word instance. If the word is the first one in the sentence only the right neighbor is returned and vice versa. ''' if len(self._sentence) == 1: return { 'left': None, 'right': None } else: p = self._position if -1 < p < len(self._sentence): if 0 == self._position: return { 'left': None, 'right': self._sentence.word(p+1) } elif 0 < self._position < len(self._sentence) - 1: return { 'left': self._sentence.word(p-1), 'right': self._sentence.word(p+1) } else: return { 'left': self._sentence.word(p-1), 'right': None } else: raise IndexError()
[ "def", "neighbors", "(", "self", ")", ":", "if", "len", "(", "self", ".", "_sentence", ")", "==", "1", ":", "return", "{", "'left'", ":", "None", ",", "'right'", ":", "None", "}", "else", ":", "p", "=", "self", ".", "_position", "if", "-", "1", "<", "p", "<", "len", "(", "self", ".", "_sentence", ")", ":", "if", "0", "==", "self", ".", "_position", ":", "return", "{", "'left'", ":", "None", ",", "'right'", ":", "self", ".", "_sentence", ".", "word", "(", "p", "+", "1", ")", "}", "elif", "0", "<", "self", ".", "_position", "<", "len", "(", "self", ".", "_sentence", ")", "-", "1", ":", "return", "{", "'left'", ":", "self", ".", "_sentence", ".", "word", "(", "p", "-", "1", ")", ",", "'right'", ":", "self", ".", "_sentence", ".", "word", "(", "p", "+", "1", ")", "}", "else", ":", "return", "{", "'left'", ":", "self", ".", "_sentence", ".", "word", "(", "p", "-", "1", ")", ",", "'right'", ":", "None", "}", "else", ":", "raise", "IndexError", "(", ")" ]
34.966667
17.366667
def fix_module(job): """ Fix for tasks without a module. Provides backwards compatibility with < 0.1.5 """ modules = settings.RQ_JOBS_MODULE if not type(modules) == tuple: modules = [modules] for module in modules: try: module_match = importlib.import_module(module) if hasattr(module_match, job.task): job.task = '{}.{}'.format(module, job.task) break except ImportError: continue return job
[ "def", "fix_module", "(", "job", ")", ":", "modules", "=", "settings", ".", "RQ_JOBS_MODULE", "if", "not", "type", "(", "modules", ")", "==", "tuple", ":", "modules", "=", "[", "modules", "]", "for", "module", "in", "modules", ":", "try", ":", "module_match", "=", "importlib", ".", "import_module", "(", "module", ")", "if", "hasattr", "(", "module_match", ",", "job", ".", "task", ")", ":", "job", ".", "task", "=", "'{}.{}'", ".", "format", "(", "module", ",", "job", ".", "task", ")", "break", "except", "ImportError", ":", "continue", "return", "job" ]
31.0625
15.4375
def toggle_codes(self, event): """ Show/hide method code explanation widget on button click """ btn = event.GetEventObject() if btn.Label == 'Show method codes': self.code_msg_boxsizer.ShowItems(True) btn.SetLabel('Hide method codes') else: self.code_msg_boxsizer.ShowItems(False) btn.SetLabel('Show method codes') self.do_fit(None)
[ "def", "toggle_codes", "(", "self", ",", "event", ")", ":", "btn", "=", "event", ".", "GetEventObject", "(", ")", "if", "btn", ".", "Label", "==", "'Show method codes'", ":", "self", ".", "code_msg_boxsizer", ".", "ShowItems", "(", "True", ")", "btn", ".", "SetLabel", "(", "'Hide method codes'", ")", "else", ":", "self", ".", "code_msg_boxsizer", ".", "ShowItems", "(", "False", ")", "btn", ".", "SetLabel", "(", "'Show method codes'", ")", "self", ".", "do_fit", "(", "None", ")" ]
35.416667
9.583333
async def restart(request: web.Request) -> web.Response: """ Restart the robot. Blocks while the restart lock is held. """ async with request.app[RESTART_LOCK_NAME]: asyncio.get_event_loop().call_later(1, _do_restart) return web.json_response({'message': 'Restarting in 1s'}, status=200)
[ "async", "def", "restart", "(", "request", ":", "web", ".", "Request", ")", "->", "web", ".", "Response", ":", "async", "with", "request", ".", "app", "[", "RESTART_LOCK_NAME", "]", ":", "asyncio", ".", "get_event_loop", "(", ")", ".", "call_later", "(", "1", ",", "_do_restart", ")", "return", "web", ".", "json_response", "(", "{", "'message'", ":", "'Restarting in 1s'", "}", ",", "status", "=", "200", ")" ]
37.444444
11.555556
def read_10xgenomics(cls, tarball_fpath: str, prefix: str, use_ensembl_ids: bool = False): """Read a 10X genomics compressed tarball containing expression data. Note: common prefix patterns: - "filtered_gene_bc_matrices/[annotations]/" - "filtered_matrices_mex/[annotations]/" TODO: docstring""" _LOGGER.info('Reading file: %s', tarball_fpath) with tarfile.open(tarball_fpath, mode='r:gz') as tf: ti = tf.getmember('%smatrix.mtx' % prefix) with tf.extractfile(ti) as fh: mtx = scipy.io.mmread(fh) ti = tf.getmember('%sgenes.tsv' % prefix) with tf.extractfile(ti) as fh: wrapper = io.TextIOWrapper(fh, encoding='ascii') i = 1 if use_ensembl_ids: i = 0 gene_names = \ [row[i] for row in csv.reader(wrapper, delimiter='\t')] ti = tf.getmember('%sbarcodes.tsv' % prefix) with tf.extractfile(ti) as fh: wrapper = io.TextIOWrapper(fh, encoding='ascii') barcodes = \ [row[0] for row in csv.reader(wrapper, delimiter='\t')] assert mtx.shape[0] == len(gene_names) assert mtx.shape[1] == len(barcodes) _LOGGER.info('Matrix dimensions: %s', str(mtx.shape)) X = mtx.todense() matrix = cls(X=X, genes=gene_names, cells=barcodes) return matrix
[ "def", "read_10xgenomics", "(", "cls", ",", "tarball_fpath", ":", "str", ",", "prefix", ":", "str", ",", "use_ensembl_ids", ":", "bool", "=", "False", ")", ":", "_LOGGER", ".", "info", "(", "'Reading file: %s'", ",", "tarball_fpath", ")", "with", "tarfile", ".", "open", "(", "tarball_fpath", ",", "mode", "=", "'r:gz'", ")", "as", "tf", ":", "ti", "=", "tf", ".", "getmember", "(", "'%smatrix.mtx'", "%", "prefix", ")", "with", "tf", ".", "extractfile", "(", "ti", ")", "as", "fh", ":", "mtx", "=", "scipy", ".", "io", ".", "mmread", "(", "fh", ")", "ti", "=", "tf", ".", "getmember", "(", "'%sgenes.tsv'", "%", "prefix", ")", "with", "tf", ".", "extractfile", "(", "ti", ")", "as", "fh", ":", "wrapper", "=", "io", ".", "TextIOWrapper", "(", "fh", ",", "encoding", "=", "'ascii'", ")", "i", "=", "1", "if", "use_ensembl_ids", ":", "i", "=", "0", "gene_names", "=", "[", "row", "[", "i", "]", "for", "row", "in", "csv", ".", "reader", "(", "wrapper", ",", "delimiter", "=", "'\\t'", ")", "]", "ti", "=", "tf", ".", "getmember", "(", "'%sbarcodes.tsv'", "%", "prefix", ")", "with", "tf", ".", "extractfile", "(", "ti", ")", "as", "fh", ":", "wrapper", "=", "io", ".", "TextIOWrapper", "(", "fh", ",", "encoding", "=", "'ascii'", ")", "barcodes", "=", "[", "row", "[", "0", "]", "for", "row", "in", "csv", ".", "reader", "(", "wrapper", ",", "delimiter", "=", "'\\t'", ")", "]", "assert", "mtx", ".", "shape", "[", "0", "]", "==", "len", "(", "gene_names", ")", "assert", "mtx", ".", "shape", "[", "1", "]", "==", "len", "(", "barcodes", ")", "_LOGGER", ".", "info", "(", "'Matrix dimensions: %s'", ",", "str", "(", "mtx", ".", "shape", ")", ")", "X", "=", "mtx", ".", "todense", "(", ")", "matrix", "=", "cls", "(", "X", "=", "X", ",", "genes", "=", "gene_names", ",", "cells", "=", "barcodes", ")", "return", "matrix" ]
36.878049
19.439024
def credential_share_simulate(self, cred_id, *user_ids): """Shares a given credential to the specified Users. :param cred_id: Credential ID :param user_ids: List of User IDs """ return self.raw_query("credential", "shareSimulate", data={ 'id': cred_id, 'users': [{'id': i} for i in user_ids], })
[ "def", "credential_share_simulate", "(", "self", ",", "cred_id", ",", "*", "user_ids", ")", ":", "return", "self", ".", "raw_query", "(", "\"credential\"", ",", "\"shareSimulate\"", ",", "data", "=", "{", "'id'", ":", "cred_id", ",", "'users'", ":", "[", "{", "'id'", ":", "i", "}", "for", "i", "in", "user_ids", "]", ",", "}", ")" ]
35.9
14.2
def from_string(cls, token_string): """ `token_string` should be the string representation from the server. """ # unhexlify works fine with unicode input in everythin but pypy3, where it Raises "TypeError: 'str' does not support the buffer interface" if isinstance(token_string, six.text_type): token_string = token_string.encode('ascii') # The BOP stores a hex string return cls(unhexlify(token_string))
[ "def", "from_string", "(", "cls", ",", "token_string", ")", ":", "# unhexlify works fine with unicode input in everythin but pypy3, where it Raises \"TypeError: 'str' does not support the buffer interface\"", "if", "isinstance", "(", "token_string", ",", "six", ".", "text_type", ")", ":", "token_string", "=", "token_string", ".", "encode", "(", "'ascii'", ")", "# The BOP stores a hex string", "return", "cls", "(", "unhexlify", "(", "token_string", ")", ")" ]
64.285714
20.428571
async def commit( request: web.Request, session: UpdateSession) -> web.Response: """ Serves /update/:session/commit """ if session.stage != Stages.DONE: return web.json_response( data={'error': 'not-ready', 'message': f'System is not ready to commit the update ' f'(currently {session.stage.value.short})'}, status=409) with dbus_actions.unmount_boot(): write_file(os.path.join(session.download_path, BOOT_NAME), constants.BOOT_PARTITION_NAME, lambda x: None) session.set_stage(Stages.READY_FOR_RESTART) return web.json_response( data=session.state, status=200)
[ "async", "def", "commit", "(", "request", ":", "web", ".", "Request", ",", "session", ":", "UpdateSession", ")", "->", "web", ".", "Response", ":", "if", "session", ".", "stage", "!=", "Stages", ".", "DONE", ":", "return", "web", ".", "json_response", "(", "data", "=", "{", "'error'", ":", "'not-ready'", ",", "'message'", ":", "f'System is not ready to commit the update '", "f'(currently {session.stage.value.short})'", "}", ",", "status", "=", "409", ")", "with", "dbus_actions", ".", "unmount_boot", "(", ")", ":", "write_file", "(", "os", ".", "path", ".", "join", "(", "session", ".", "download_path", ",", "BOOT_NAME", ")", ",", "constants", ".", "BOOT_PARTITION_NAME", ",", "lambda", "x", ":", "None", ")", "session", ".", "set_stage", "(", "Stages", ".", "READY_FOR_RESTART", ")", "return", "web", ".", "json_response", "(", "data", "=", "session", ".", "state", ",", "status", "=", "200", ")" ]
37
16.473684