text
stringlengths
89
104k
code_tokens
list
avg_line_len
float64
7.91
980
score
float64
0
630
def convert_softmax(net, node, module, builder): """Convert a softmax layer from mxnet to coreml. Parameters ---------- network: net A mxnet network object. layer: node Node to convert. module: module An module for MXNet builder: NeuralNetworkBuilder A neural network builder object. """ input_name, output_name = _get_input_output_name(net, node) name = node['name'] builder.add_softmax(name=name, input_name=input_name, output_name=output_name)
[ "def", "convert_softmax", "(", "net", ",", "node", ",", "module", ",", "builder", ")", ":", "input_name", ",", "output_name", "=", "_get_input_output_name", "(", "net", ",", "node", ")", "name", "=", "node", "[", "'name'", "]", "builder", ".", "add_softmax...
25.136364
18
def update_screen(self): """Refresh the screen. You don't need to override this except to update only small portins of the screen.""" self.clock.tick(self.FPS) pygame.display.update()
[ "def", "update_screen", "(", "self", ")", ":", "self", ".", "clock", ".", "tick", "(", "self", ".", "FPS", ")", "pygame", ".", "display", ".", "update", "(", ")" ]
51
8
def path(self, which=None): """Extend ``nailgun.entity_mixins.Entity.path``. The format of the returned path depends on the value of ``which``: incremental_update /content_view_versions/incremental_update promote /content_view_versions/<id>/promote ``super`` is called otherwise. """ if which in ('incremental_update', 'promote'): prefix = 'base' if which == 'incremental_update' else 'self' return '{0}/{1}'.format( super(ContentViewVersion, self).path(prefix), which ) return super(ContentViewVersion, self).path(which)
[ "def", "path", "(", "self", ",", "which", "=", "None", ")", ":", "if", "which", "in", "(", "'incremental_update'", ",", "'promote'", ")", ":", "prefix", "=", "'base'", "if", "which", "==", "'incremental_update'", "else", "'self'", "return", "'{0}/{1}'", "....
33.1
20.15
def cross_v2(vec1, vec2): """Return the crossproduct of the two vectors as a Vec2. Cross product doesn't really make sense in 2D, but return the Z component of the 3d result. """ return vec1.y * vec2.x - vec1.x * vec2.y
[ "def", "cross_v2", "(", "vec1", ",", "vec2", ")", ":", "return", "vec1", ".", "y", "*", "vec2", ".", "x", "-", "vec1", ".", "x", "*", "vec2", ".", "y" ]
33.428571
16.428571
def _init_map(self, record_types=None, **kwargs): """Initialize form map""" osid_objects.OsidObjectForm._init_map(self, record_types=record_types) self._my_map['assignedAgencyIds'] = [str(kwargs['agency_id'])]
[ "def", "_init_map", "(", "self", ",", "record_types", "=", "None", ",", "*", "*", "kwargs", ")", ":", "osid_objects", ".", "OsidObjectForm", ".", "_init_map", "(", "self", ",", "record_types", "=", "record_types", ")", "self", ".", "_my_map", "[", "'assign...
57.5
19.25
def blockingSave(self, path): """ saved session to file - returns after finish only called by interactiveTutorial-save at the moment """ self.tmp_dir_save_session = self.tmp_dir_session.join('block').mkdir() state = {'session': dict(self.opts), 'dialogs': self.dialogs.saveState()} self.saveThread.prepare('0', path, self.tmp_dir_session, state) self.sigSave.emit(self) self.saveThread.run()
[ "def", "blockingSave", "(", "self", ",", "path", ")", ":", "self", ".", "tmp_dir_save_session", "=", "self", ".", "tmp_dir_session", ".", "join", "(", "'block'", ")", ".", "mkdir", "(", ")", "state", "=", "{", "'session'", ":", "dict", "(", "self", "."...
43.636364
14
def use_any_adapter(self, use_any_adapter): """ Allows GNS3 to use any VMware adapter on this instance. :param use_any_adapter: boolean """ if use_any_adapter: log.info("VMware VM '{name}' [{id}] is allowed to use any adapter".format(name=self.name, id=self.id)) else: log.info("VMware VM '{name}' [{id}] is not allowed to use any adapter".format(name=self.name, id=self.id)) self._use_any_adapter = use_any_adapter
[ "def", "use_any_adapter", "(", "self", ",", "use_any_adapter", ")", ":", "if", "use_any_adapter", ":", "log", ".", "info", "(", "\"VMware VM '{name}' [{id}] is allowed to use any adapter\"", ".", "format", "(", "name", "=", "self", ".", "name", ",", "id", "=", "...
40.5
25.5
def shortened_character_name(self_or_cls, c, eliminations=[], substitutions={}, transforms=[]): """ Given a unicode character c, return the shortened unicode name (as a list of tokens) by applying the eliminations, substitutions and transforms. """ name = unicodedata.name(c).lower() # Filtering for elim in eliminations: name = name.replace(elim, '') # Substitution for i,o in substitutions.items(): name = name.replace(i, o) for transform in transforms: name = transform(name) return ' '.join(name.strip().split()).replace(' ','_').replace('-','_')
[ "def", "shortened_character_name", "(", "self_or_cls", ",", "c", ",", "eliminations", "=", "[", "]", ",", "substitutions", "=", "{", "}", ",", "transforms", "=", "[", "]", ")", ":", "name", "=", "unicodedata", ".", "name", "(", "c", ")", ".", "lower", ...
41.6875
13.0625
def on_start(self, host, port, channel, nickname, password): """ A WebSocket session has started - create a greenlet to host the IRC client, and start it. """ self.client = WebSocketIRCClient(host, port, channel, nickname, password, self) self.spawn(self.client.start)
[ "def", "on_start", "(", "self", ",", "host", ",", "port", ",", "channel", ",", "nickname", ",", "password", ")", ":", "self", ".", "client", "=", "WebSocketIRCClient", "(", "host", ",", "port", ",", "channel", ",", "nickname", ",", "password", ",", "se...
43.75
12.5
def clone(self, newname, config_path=None, flags=0, bdevtype=None, bdevdata=None, newsize=0, hookargs=()): """ Clone the current container. """ args = {} args['newname'] = newname args['flags'] = flags args['newsize'] = newsize args['hookargs'] = hookargs if config_path: args['config_path'] = config_path if bdevtype: args['bdevtype'] = bdevtype if bdevdata: args['bdevdata'] = bdevdata if _lxc.Container.clone(self, **args): return Container(newname, config_path=config_path) else: return False
[ "def", "clone", "(", "self", ",", "newname", ",", "config_path", "=", "None", ",", "flags", "=", "0", ",", "bdevtype", "=", "None", ",", "bdevdata", "=", "None", ",", "newsize", "=", "0", ",", "hookargs", "=", "(", ")", ")", ":", "args", "=", "{"...
29.954545
13.954545
def delete(self, path, data, **options): """ Parses DELETE request options and dispatches a request """ data, options = self._update_request(data, options) return self.request('delete', path, data=data, **options)
[ "def", "delete", "(", "self", ",", "path", ",", "data", ",", "*", "*", "options", ")", ":", "data", ",", "options", "=", "self", ".", "_update_request", "(", "data", ",", "options", ")", "return", "self", ".", "request", "(", "'delete'", ",", "path",...
41.333333
11
def _scan(positions): """get the region inside the vector with more expression""" scores = [] for start in range(0, len(positions) - 17, 5): end = start = 17 scores.add(_enrichment(positions[start:end], positions[:start], positions[end:]))
[ "def", "_scan", "(", "positions", ")", ":", "scores", "=", "[", "]", "for", "start", "in", "range", "(", "0", ",", "len", "(", "positions", ")", "-", "17", ",", "5", ")", ":", "end", "=", "start", "=", "17", "scores", ".", "add", "(", "_enrichm...
43.666667
19.833333
def kwargs_as_assignments(call_node, parent): """Yield NoDeclAssign nodes from kwargs in a Call node.""" if not isinstance(call_node, ast.Call): raise TypeError('node must be an ast.Call') if len(call_node.args) > 0: raise ValueError('positional args not allowed') for keyword in call_node.keywords: dst_name = keyword.arg if dst_name.startswith('gl_'): # Write to builtins directly target = [ast.Name(id=keyword.arg, ctx=ast.Load())] else: # Non-builtins are part of an interface block target = [ast.Attribute(value=parent, attr=keyword.arg, ctx=ast.Store())] yield NoDeclAssign(targets=target, value=keyword.value)
[ "def", "kwargs_as_assignments", "(", "call_node", ",", "parent", ")", ":", "if", "not", "isinstance", "(", "call_node", ",", "ast", ".", "Call", ")", ":", "raise", "TypeError", "(", "'node must be an ast.Call'", ")", "if", "len", "(", "call_node", ".", "args...
37.45
17.35
def increment(self, key, value=1): """ Increment the value of an item in the cache. :param key: The cache key :type key: str :param value: The increment value :type value: int :rtype: int or bool """ self._store.increment(self.tagged_item_key(key), value)
[ "def", "increment", "(", "self", ",", "key", ",", "value", "=", "1", ")", ":", "self", ".", "_store", ".", "increment", "(", "self", ".", "tagged_item_key", "(", "key", ")", ",", "value", ")" ]
24.461538
16.615385
def split_by_connected_component(self, idents): '''Split idents into equivalence classes based on connected components. ''' idents_remaining = set(idents) connected_components = [] for ident in idents: if ident not in idents_remaining: continue idents_remaining.remove(ident) connected_component = [ident] for label in self.connected_component(ident): cids = label.content_id1, label.content_id2 for cid in cids: if cid in idents_remaining: connected_component.append(cid) idents_remaining.remove(cid) connected_components.append(sorted(connected_component)) return connected_components
[ "def", "split_by_connected_component", "(", "self", ",", "idents", ")", ":", "idents_remaining", "=", "set", "(", "idents", ")", "connected_components", "=", "[", "]", "for", "ident", "in", "idents", ":", "if", "ident", "not", "in", "idents_remaining", ":", ...
36.363636
16.363636
def _paramiko_tunnel(lport, rport, server, remoteip, keyfile=None, password=None): """Function for actually starting a paramiko tunnel, to be passed to multiprocessing.Process(target=this), and not called directly. """ username, server, port = _split_server(server) client = paramiko.SSHClient() client.load_system_host_keys() client.set_missing_host_key_policy(paramiko.WarningPolicy()) try: client.connect(server, port, username=username, key_filename=keyfile, look_for_keys=True, password=password) # except paramiko.AuthenticationException: # if password is None: # password = getpass("%s@%s's password: "%(username, server)) # client.connect(server, port, username=username, password=password) # else: # raise except Exception as e: print ('*** Failed to connect to %s:%d: %r' % (server, port, e)) sys.exit(1) # print ('Now forwarding port %d to %s:%d ...' % (lport, server, rport)) try: forward_tunnel(lport, remoteip, rport, client.get_transport()) except KeyboardInterrupt: print ('SIGINT: Port forwarding stopped cleanly') sys.exit(0) except Exception as e: print ("Port forwarding stopped uncleanly: %s"%e) sys.exit(255)
[ "def", "_paramiko_tunnel", "(", "lport", ",", "rport", ",", "server", ",", "remoteip", ",", "keyfile", "=", "None", ",", "password", "=", "None", ")", ":", "username", ",", "server", ",", "port", "=", "_split_server", "(", "server", ")", "client", "=", ...
40.34375
22.71875
def validate(self, instance, value): """Check the class of the container and validate each element This returns a copy of the container to prevent unwanted sharing of pointers. """ if not self.coerce and not isinstance(value, self._class_container): self.error(instance, value) if self.coerce and not isinstance(value, CONTAINERS): value = [value] if not isinstance(value, self._class_container): out_class = self._class_container else: out_class = value.__class__ out = [] for val in value: try: out += [self.prop.validate(instance, val)] except ValueError: self.error(instance, val, extra='This item is invalid.') return out_class(out)
[ "def", "validate", "(", "self", ",", "instance", ",", "value", ")", ":", "if", "not", "self", ".", "coerce", "and", "not", "isinstance", "(", "value", ",", "self", ".", "_class_container", ")", ":", "self", ".", "error", "(", "instance", ",", "value", ...
38.571429
16.952381
def scalar(cls, value): """Create an ArgumentType with a single scalar in range(value).""" return lambda i, name: cls(i, name, (value,), lambda a: a[0], None)
[ "def", "scalar", "(", "cls", ",", "value", ")", ":", "return", "lambda", "i", ",", "name", ":", "cls", "(", "i", ",", "name", ",", "(", "value", ",", ")", ",", "lambda", "a", ":", "a", "[", "0", "]", ",", "None", ")" ]
54.666667
16
def register_dependency(self, data_src, data_sink): """ registers a dependency of data_src -> data_sink by placing appropriate entries in provides_for and depends_on """ pdebug("registering dependency %s -> %s" % (data_src, data_sink)) if (data_src not in self._gettask(data_sink).depends_on): self._gettask(data_sink).depends_on.append(data_src) if (data_sink not in self._gettask(data_src).provides_for): self._gettask(data_src).provides_for.append(data_sink)
[ "def", "register_dependency", "(", "self", ",", "data_src", ",", "data_sink", ")", ":", "pdebug", "(", "\"registering dependency %s -> %s\"", "%", "(", "data_src", ",", "data_sink", ")", ")", "if", "(", "data_src", "not", "in", "self", ".", "_gettask", "(", ...
44.083333
24.916667
def duration(cls, seconds, first=True): """ Constructs a human readable string to indicate the time duration for the given seconds :param int seconds: :param bool first: Just return the first unit instead of all :rtype: str """ num_units = [] for unit in reversed(TimeUnit): if seconds >= unit.seconds: name = unit.name.lower() count = int(seconds / unit.seconds) num_units.append(f'{count} {plural(name, count=count)}') seconds -= count * unit.seconds if first or not seconds: break return ' '.join(num_units)
[ "def", "duration", "(", "cls", ",", "seconds", ",", "first", "=", "True", ")", ":", "num_units", "=", "[", "]", "for", "unit", "in", "reversed", "(", "TimeUnit", ")", ":", "if", "seconds", ">=", "unit", ".", "seconds", ":", "name", "=", "unit", "."...
32.285714
17.52381
def remap_index_fn(ref_file): """minimap2 can build indexes on the fly but will also store commons ones. """ index_dir = os.path.join(os.path.dirname(ref_file), os.pardir, "minimap2") if os.path.exists(index_dir) and os.path.isdir(index_dir): return index_dir else: return os.path.dirname(ref_file)
[ "def", "remap_index_fn", "(", "ref_file", ")", ":", "index_dir", "=", "os", ".", "path", ".", "join", "(", "os", ".", "path", ".", "dirname", "(", "ref_file", ")", ",", "os", ".", "pardir", ",", "\"minimap2\"", ")", "if", "os", ".", "path", ".", "e...
40.875
14.75
def filename(self): """ Name of the file on the client file system, but normalized to ensure file system compatibility. An empty filename is returned as 'empty'. Only ASCII letters, digits, dashes, underscores and dots are allowed in the final filename. Accents are removed, if possible. Whitespace is replaced by a single dash. Leading or tailing dots or dashes are removed. The filename is limited to 255 characters. """ fname = self.raw_filename if not isinstance(fname, unicode): fname = fname.decode('utf8', 'ignore') fname = normalize('NFKD', fname) fname = fname.encode('ASCII', 'ignore').decode('ASCII') fname = os.path.basename(fname.replace('\\', os.path.sep)) fname = re.sub(r'[^a-zA-Z0-9-_.\s]', '', fname).strip() fname = re.sub(r'[-\s]+', '-', fname).strip('.-') return fname[:255] or 'empty'
[ "def", "filename", "(", "self", ")", ":", "fname", "=", "self", ".", "raw_filename", "if", "not", "isinstance", "(", "fname", ",", "unicode", ")", ":", "fname", "=", "fname", ".", "decode", "(", "'utf8'", ",", "'ignore'", ")", "fname", "=", "normalize"...
52.333333
19.611111
def hide_zeroes(self): ''' Sometimes it makes sense to hide the labels for subsets whose size is zero. This utility method does this. ''' for v in self.subset_labels: if v is not None and v.get_text() == '0': v.set_visible(False)
[ "def", "hide_zeroes", "(", "self", ")", ":", "for", "v", "in", "self", ".", "subset_labels", ":", "if", "v", "is", "not", "None", "and", "v", ".", "get_text", "(", ")", "==", "'0'", ":", "v", ".", "set_visible", "(", "False", ")" ]
36.25
17.75
def _maybe_validate_rightmost_transposed_ndims( rightmost_transposed_ndims, validate_args, name=None): """Checks that `rightmost_transposed_ndims` is valid.""" with tf.name_scope(name or 'maybe_validate_rightmost_transposed_ndims'): assertions = [] if not dtype_util.is_integer(rightmost_transposed_ndims.dtype): raise TypeError('`rightmost_transposed_ndims` must be integer type.') if tensorshape_util.rank(rightmost_transposed_ndims.shape) is not None: if tensorshape_util.rank(rightmost_transposed_ndims.shape) != 0: raise ValueError('`rightmost_transposed_ndims` must be a scalar, ' 'saw rank: {}.'.format( tensorshape_util.rank( rightmost_transposed_ndims.shape))) elif validate_args: assertions += [assert_util.assert_rank(rightmost_transposed_ndims, 0)] rightmost_transposed_ndims_ = tf.get_static_value( rightmost_transposed_ndims) msg = '`rightmost_transposed_ndims` must be non-negative.' if rightmost_transposed_ndims_ is not None: if rightmost_transposed_ndims_ < 0: raise ValueError(msg[:-1] + ', saw: {}.'.format( rightmost_transposed_ndims_)) elif validate_args: assertions += [ assert_util.assert_non_negative( rightmost_transposed_ndims, message=msg) ] return assertions
[ "def", "_maybe_validate_rightmost_transposed_ndims", "(", "rightmost_transposed_ndims", ",", "validate_args", ",", "name", "=", "None", ")", ":", "with", "tf", ".", "name_scope", "(", "name", "or", "'maybe_validate_rightmost_transposed_ndims'", ")", ":", "assertions", "...
44.741935
20.354839
def nonblock_recv(self): """Non blocking receive""" if self.buffered_frames(): # Get a frame from the buffer return self.get_frame() # Set the non blocking flag, read from the socket, and unset the flag self.set_nonblock(True) pkt = L2bpfListenSocket.recv(self) self.set_nonblock(False) return pkt
[ "def", "nonblock_recv", "(", "self", ")", ":", "if", "self", ".", "buffered_frames", "(", ")", ":", "# Get a frame from the buffer", "return", "self", ".", "get_frame", "(", ")", "# Set the non blocking flag, read from the socket, and unset the flag", "self", ".", "set_...
30.666667
15.5
def _get_url(url): """Retrieve requested URL""" try: data = HTTP_SESSION.get(url, stream=True) data.raise_for_status() except requests.exceptions.RequestException as exc: raise FetcherException(exc) return data
[ "def", "_get_url", "(", "url", ")", ":", "try", ":", "data", "=", "HTTP_SESSION", ".", "get", "(", "url", ",", "stream", "=", "True", ")", "data", ".", "raise_for_status", "(", ")", "except", "requests", ".", "exceptions", ".", "RequestException", "as", ...
27
17.444444
def tf_action_exploration(self, action, exploration, action_spec): """ Applies optional exploration to the action (post-processor for action outputs). Args: action (tf.Tensor): The original output action tensor (to be post-processed). exploration (Exploration): The Exploration object to use. action_spec (dict): Dict specifying the action space. Returns: The post-processed action output tensor. """ action_shape = tf.shape(input=action) exploration_value = exploration.tf_explore( episode=self.global_episode, timestep=self.global_timestep, shape=action_spec['shape'] ) exploration_value = tf.expand_dims(input=exploration_value, axis=0) if action_spec['type'] == 'bool': action = tf.where( condition=(tf.random_uniform(shape=action_shape) < exploration_value), x=(tf.random_uniform(shape=action_shape) < 0.5), y=action ) elif action_spec['type'] == 'int': action = tf.where( condition=(tf.random_uniform(shape=action_shape) < exploration_value), x=tf.random_uniform(shape=action_shape, maxval=action_spec['num_actions'], dtype=util.tf_dtype('int')), y=action ) elif action_spec['type'] == 'float': noise = tf.random_normal(shape=action_shape, dtype=util.tf_dtype('float')) action += noise * exploration_value if 'min_value' in action_spec: action = tf.clip_by_value( t=action, clip_value_min=action_spec['min_value'], clip_value_max=action_spec['max_value'] ) return action
[ "def", "tf_action_exploration", "(", "self", ",", "action", ",", "exploration", ",", "action_spec", ")", ":", "action_shape", "=", "tf", ".", "shape", "(", "input", "=", "action", ")", "exploration_value", "=", "exploration", ".", "tf_explore", "(", "episode",...
40.931818
22.386364
def load(stream, fmt='lha'): """Load a parameter file in DSixTools SLHA-like format or its JSON or YAML representation.""" if fmt == 'lha': return pylha.load(stream) elif fmt == 'json': if isinstance(stream, str): return json.loads(stream) else: return json.load(stream) elif fmt == 'yaml': return yaml.load(stream)
[ "def", "load", "(", "stream", ",", "fmt", "=", "'lha'", ")", ":", "if", "fmt", "==", "'lha'", ":", "return", "pylha", ".", "load", "(", "stream", ")", "elif", "fmt", "==", "'json'", ":", "if", "isinstance", "(", "stream", ",", "str", ")", ":", "r...
31.666667
10
def integer_list_file(cls, filename): """ Read a list of integers from a file. The file format is: - # anywhere in the line begins a comment - leading and trailing spaces are ignored - empty lines are ignored - integers can be specified as: - decimal numbers ("100" is 100) - hexadecimal numbers ("0x100" is 256) - binary numbers ("0b100" is 4) - octal numbers ("0100" is 64) @type filename: str @param filename: Name of the file to read. @rtype: list( int ) @return: List of integers read from the file. """ count = 0 result = list() fd = open(filename, 'r') for line in fd: count = count + 1 if '#' in line: line = line[ : line.find('#') ] line = line.strip() if line: try: value = cls.integer(line) except ValueError: e = sys.exc_info()[1] msg = "Error in line %d of %s: %s" msg = msg % (count, filename, str(e)) raise ValueError(msg) result.append(value) return result
[ "def", "integer_list_file", "(", "cls", ",", "filename", ")", ":", "count", "=", "0", "result", "=", "list", "(", ")", "fd", "=", "open", "(", "filename", ",", "'r'", ")", "for", "line", "in", "fd", ":", "count", "=", "count", "+", "1", "if", "'#...
31.923077
12.282051
def calibrate_container_with_instrument(self, container: Container, instrument, save: bool ): '''Calibrates a container using the bottom of the first well''' well = container[0] # Get the relative position of well with respect to instrument delta = pose_tracker.change_base( self.poses, src=instrument, dst=well ) if fflags.calibrate_to_bottom(): delta_x = delta[0] delta_y = delta[1] if 'tiprack' in container.get_type(): delta_z = delta[2] else: delta_z = delta[2] + well.z_size() else: delta_x = delta[0] delta_y = delta[1] delta_z = delta[2] self.poses = self._calibrate_container_with_delta( self.poses, container, delta_x, delta_y, delta_z, save ) self.max_deck_height.cache_clear()
[ "def", "calibrate_container_with_instrument", "(", "self", ",", "container", ":", "Container", ",", "instrument", ",", "save", ":", "bool", ")", ":", "well", "=", "container", "[", "0", "]", "# Get the relative position of well with respect to instrument", "delta", "=...
29.894737
18.842105
def groups_delete(self, room_id=None, group=None, **kwargs): """Delete a private group.""" if room_id: return self.__call_api_post('groups.delete', roomId=room_id, kwargs=kwargs) elif group: return self.__call_api_post('groups.delete', roomName=group, kwargs=kwargs) else: raise RocketMissingParamException('roomId or group required')
[ "def", "groups_delete", "(", "self", ",", "room_id", "=", "None", ",", "group", "=", "None", ",", "*", "*", "kwargs", ")", ":", "if", "room_id", ":", "return", "self", ".", "__call_api_post", "(", "'groups.delete'", ",", "roomId", "=", "room_id", ",", ...
49.375
27
def fit(self, X, y=None, **fit_params): """Fit base estimators. Parameters ---------- X : array-like, shape = (n_samples, n_features) Training data. y : array-like, optional Target data if base estimators are supervised. Returns ------- self """ X = numpy.asarray(X) self._fit_estimators(X, y, **fit_params) Xt = self._predict_estimators(X) self.meta_estimator.fit(Xt, y) return self
[ "def", "fit", "(", "self", ",", "X", ",", "y", "=", "None", ",", "*", "*", "fit_params", ")", ":", "X", "=", "numpy", ".", "asarray", "(", "X", ")", "self", ".", "_fit_estimators", "(", "X", ",", "y", ",", "*", "*", "fit_params", ")", "Xt", "...
23.952381
18.142857
def prepare_samples(job, fastqs, univ_options): """ This module will accept a dict object holding the 3 input prefixes and the patient id and will attempt to store the fastqs to the jobstore. The input files must satisfy the following 1. File extensions can only be fastq or fq (.gz is also allowed) 2. Forward and reverse reads MUST be in the same folder with the same prefix 3. The files must be on the form <prefix_for_file>1.<fastq/fq>[.gz] <prefix_for_file>2.<fastq/fq>[.gz] The input dict is: tumor_dna: prefix_to_tumor_dna tumor_rna: prefix_to_tumor_rna normal_dna: prefix_to_normal_dna patient_id: patient ID The input dict is updated to tumor_dna: [jobStoreID_for_fastq1, jobStoreID_for_fastq2] tumor_rna: [jobStoreID_for_fastq1, jobStoreID_for_fastq2] normal_dna: [jobStoreID_for_fastq1, jobStoreID_for_fastq2] patient_id: patient ID gzipped: True/False This module corresponds to node 1 in the tree """ job.fileStore.logToMaster('Downloading Inputs for %s' % univ_options['patient']) allowed_samples = {'tumor_dna_fastq_prefix', 'tumor_rna_fastq_prefix', 'normal_dna_fastq_prefix'} if set(fastqs.keys()).difference(allowed_samples) != {'patient_id'}: raise ParameterError('Sample with the following parameters has an error:\n' + '\n'.join(fastqs.values())) # For each sample type, check if the prefix is an S3 link or a regular file # Download S3 files. for sample_type in ['tumor_dna', 'tumor_rna', 'normal_dna']: prefix = fastqs[''.join([sample_type, '_fastq_prefix'])] # if the file was an xml, process it before moving further. if prefix.endswith('.xml'): prefix = get_file_from_cghub(job, fastqs[''.join([sample_type, '_fastq_prefix'])], univ_options['cghub_key'], univ_options, write_to_jobstore=False) # If gzipped, remember that if prefix.endswith('.gz'): prefix = os.path.splitext(prefix)[0] fastqs['gzipped'] = True else: fastqs['gzipped'] = False # Is the file .fastq or .fq if prefix.endswith(('.fastq', '.fq')): prefix, extn = os.path.splitext(prefix) # If the file was gzipped, add that to the extension if fastqs['gzipped']: extn = ''.join([extn, '.gz']) else: raise ParameterError('Are the inputs for patient (%s) fastq/fq?' % fastqs['patient_id']) # Handle the R1/R2 identifiers in the prefix. That is added by the program. assert prefix.endswith('1'), 'Prefix didn\'t end with 1.<fastq/fq>[.gz]: (%s.%s)' % (prefix, extn) prefix = prefix[:-1] # If it is a weblink, it HAS to be from S3 if prefix.startswith('https://s3') or prefix.startswith('S3://'): fastqs[sample_type] = [ get_file_from_s3(job, ''.join([prefix, '1', extn]), univ_options['sse_key'], per_file_encryption=univ_options['sse_key_is_master']), get_file_from_s3(job, ''.join([prefix, '2', extn]), univ_options['sse_key'], per_file_encryption=univ_options['sse_key_is_master'])] else: # Relies heavily on the assumption that the pair will be in the same # folder assert os.path.exists(''.join([prefix, '1', extn])), 'Bogus input: %s' % ''.join( [prefix, '1', extn]) # Python lists maintain order hence the values are always guaranteed to be # [fastq1, fastq2] fastqs[sample_type] = [ job.fileStore.writeGlobalFile(''.join([prefix, '1', extn])), job.fileStore.writeGlobalFile(''.join([prefix, '2', extn]))] [fastqs.pop(x) for x in allowed_samples] return fastqs
[ "def", "prepare_samples", "(", "job", ",", "fastqs", ",", "univ_options", ")", ":", "job", ".", "fileStore", ".", "logToMaster", "(", "'Downloading Inputs for %s'", "%", "univ_options", "[", "'patient'", "]", ")", "allowed_samples", "=", "{", "'tumor_dna_fastq_pre...
53.039474
23.828947
def writeWarp(self, warpDict): """ Write a list of (in, out) values for a warpmap """ warpElement = ET.Element("warp") axisNames = sorted(warpDict.keys()) for name in axisNames: axisElement = ET.Element("axis") axisElement.attrib['name'] = name for a, b in warpDict[name]: warpPt = ET.Element("map") warpPt.attrib['input'] = str(a) warpPt.attrib['output'] = str(b) axisElement.append(warpPt) warpElement.append(axisElement) self.root.append(warpElement)
[ "def", "writeWarp", "(", "self", ",", "warpDict", ")", ":", "warpElement", "=", "ET", ".", "Element", "(", "\"warp\"", ")", "axisNames", "=", "sorted", "(", "warpDict", ".", "keys", "(", ")", ")", "for", "name", "in", "axisNames", ":", "axisElement", "...
42.285714
4.142857
def rev(self, i): """Return a clone with a different revision.""" on = copy(self) on.revision = i return on
[ "def", "rev", "(", "self", ",", "i", ")", ":", "on", "=", "copy", "(", "self", ")", "on", ".", "revision", "=", "i", "return", "on" ]
27
16
def __select_nearest_ws(jsondata, latitude, longitude): """Select the nearest weatherstation.""" log.debug("__select_nearest_ws: latitude: %s, longitude: %s", latitude, longitude) dist = 0 dist2 = 0 loc_data = None try: ws_json = jsondata[__ACTUAL] ws_json = ws_json[__STATIONMEASUREMENTS] except (KeyError, TypeError): log.warning("Missing section in Buienradar xmldata (%s)." "Can happen 00:00-01:00 CE(S)T", __STATIONMEASUREMENTS) return None for wstation in ws_json: dist2 = __get_ws_distance(wstation, latitude, longitude) if dist2 is not None: if ((loc_data is None) or (dist2 < dist)): dist = dist2 loc_data = wstation if loc_data is None: log.warning("No weatherstation selected; aborting...") return None else: try: log.debug("Selected weatherstation: code='%s', " "name='%s', lat='%s', lon='%s'.", loc_data[__STATIONID], loc_data[__STATIONNAME], loc_data[__LAT], loc_data[__LON]) except KeyError: log.debug("Selected weatherstation") return loc_data
[ "def", "__select_nearest_ws", "(", "jsondata", ",", "latitude", ",", "longitude", ")", ":", "log", ".", "debug", "(", "\"__select_nearest_ws: latitude: %s, longitude: %s\"", ",", "latitude", ",", "longitude", ")", "dist", "=", "0", "dist2", "=", "0", "loc_data", ...
32.948718
17.410256
def dict(self, iss_list=None): """ Return the bundle of keys as a dictionary with the issuer IDs as the keys and the key sets represented as JWKS instances. :param iss_list: List of Issuer IDs that should be part of the output :rtype: Dictionary """ _int = {} for iss, kj in self.bundle.items(): if iss_list is None or iss in iss_list: try: _int[iss] = kj.export_jwks_as_json(issuer=iss) except KeyError: _int[iss] = kj.export_jwks_as_json() return _int
[ "def", "dict", "(", "self", ",", "iss_list", "=", "None", ")", ":", "_int", "=", "{", "}", "for", "iss", ",", "kj", "in", "self", ".", "bundle", ".", "items", "(", ")", ":", "if", "iss_list", "is", "None", "or", "iss", "in", "iss_list", ":", "t...
36.117647
17.294118
def create_message(self): """Returns a message body to send in this email. Should be from email.mime.*""" body = dedent("""\ Received exception {exception} on {queue} from worker {worker}: {traceback} Payload: {payload} """).format(exception=self._exception, traceback=self._traceback, queue=self._queue, payload=self._payload, worker=self._worker) return MIMEText(body)
[ "def", "create_message", "(", "self", ")", ":", "body", "=", "dedent", "(", "\"\"\"\\\n Received exception {exception} on {queue} from worker {worker}:\n\n {traceback}\n\n Payload:\n {payload}\n\n \"\"\"", ")", ".", "format", "(", "exception", "=", ...
27.666667
18.611111
def make_proxy_method(cls, name): """Creates a proxy function that can be used by Flasks routing. The proxy instantiates the Mocha subclass and calls the appropriate method. :param name: the name of the method to create a proxy for """ i = cls() view = getattr(i, name) for decorator in cls.decorators: view = decorator(view) @functools.wraps(view) def proxy(**forgettable_view_args): # Always use the global request object's view_args, because they # can be modified by intervening function before an endpoint or # wrapper gets called. This matches Flask's behavior. del forgettable_view_args if hasattr(i, "before_request"): response = i.before_request(name, **request.view_args) if response is not None: return response before_view_name = "before_" + name if hasattr(i, before_view_name): before_view = getattr(i, before_view_name) response = before_view(**request.view_args) if response is not None: return response response = view(**request.view_args) # You can also return a dict or None, it will pass it to render if isinstance(response, dict) or response is None: response = response or {} if hasattr(i, "_renderer"): response = i._renderer(response) else: _template = build_endpoint_route_name(cls, view.__name__) _template = utils.list_replace([".", ":"], "/", _template) _template = "%s.%s" % (_template, cls.template_markup) # Set the title from the nav title, if not set _meta_title = getattr(g, "__META__", {}).get("title") if (not _meta_title or _meta_title == "") and get_view_attr(view, "title"): page_attr(title=get_view_attr(view, "title")) response.setdefault("_template", _template) response = i.render(**response) if not isinstance(response, Response): response = make_response(response) for ext in cls._ext: response = ext(response) after_view_name = "after_" + name if hasattr(i, after_view_name): after_view = getattr(i, after_view_name) response = after_view(response) if hasattr(i, "after_request"): response = i.after_request(name, response) return response return proxy
[ "def", "make_proxy_method", "(", "cls", ",", "name", ")", ":", "i", "=", "cls", "(", ")", "view", "=", "getattr", "(", "i", ",", "name", ")", "for", "decorator", "in", "cls", ".", "decorators", ":", "view", "=", "decorator", "(", "view", ")", "@", ...
38.971014
20.710145
def transaction_search(self, **kwargs): """Shortcut for the TransactionSearch method. Returns a PayPalResponseList object, which merges the L_ syntax list to a list of dictionaries with properly named keys. Note that the API will limit returned transactions to 100. Required Kwargs --------------- * STARTDATE Optional Kwargs --------------- STATUS = one of ['Pending','Processing','Success','Denied','Reversed'] """ plain = self._call('TransactionSearch', **kwargs) return PayPalResponseList(plain.raw, self.config)
[ "def", "transaction_search", "(", "self", ",", "*", "*", "kwargs", ")", ":", "plain", "=", "self", ".", "_call", "(", "'TransactionSearch'", ",", "*", "*", "kwargs", ")", "return", "PayPalResponseList", "(", "plain", ".", "raw", ",", "self", ".", "config...
33.722222
22.388889
def get(self, request, *args, **kwargs): """ Do the login and password protection. """ response = super(EntryProtectionMixin, self).get( request, *args, **kwargs) if self.object.login_required and not request.user.is_authenticated: return self.login() if (self.object.password and self.object.password != self.request.session.get(self.session_key % self.object.pk)): return self.password() return response
[ "def", "get", "(", "self", ",", "request", ",", "*", "args", ",", "*", "*", "kwargs", ")", ":", "response", "=", "super", "(", "EntryProtectionMixin", ",", "self", ")", ".", "get", "(", "request", ",", "*", "args", ",", "*", "*", "kwargs", ")", "...
41.833333
12.5
def transform(self, maps): """ This function transforms from component masses and caretsian spins to chi_p. Parameters ---------- maps : a mapping object Examples -------- Convert a dict of numpy.array: Returns ------- out : dict A dict with key as parameter name and value as numpy.array or float of transformed values. """ out = {} out["chi_p"] = conversions.chi_p( maps[parameters.mass1], maps[parameters.mass2], maps[parameters.spin1x], maps[parameters.spin1y], maps[parameters.spin2x], maps[parameters.spin2y]) return self.format_output(maps, out)
[ "def", "transform", "(", "self", ",", "maps", ")", ":", "out", "=", "{", "}", "out", "[", "\"chi_p\"", "]", "=", "conversions", ".", "chi_p", "(", "maps", "[", "parameters", ".", "mass1", "]", ",", "maps", "[", "parameters", ".", "mass2", "]", ",",...
31.791667
21.583333
def _do_subst(node, subs): """ Fetch the node contents and replace all instances of the keys with their values. For example, if subs is {'%VERSION%': '1.2345', '%BASE%': 'MyProg', '%prefix%': '/bin'}, then all instances of %VERSION% in the file will be replaced with 1.2345 and so forth. """ contents = node.get_text_contents() if subs: for (k, val) in subs: contents = re.sub(k, val, contents) if 'b' in TEXTFILE_FILE_WRITE_MODE: try: contents = bytearray(contents, 'utf-8') except UnicodeDecodeError: # contents is already utf-8 encoded python 2 str i.e. a byte array contents = bytearray(contents) return contents
[ "def", "_do_subst", "(", "node", ",", "subs", ")", ":", "contents", "=", "node", ".", "get_text_contents", "(", ")", "if", "subs", ":", "for", "(", "k", ",", "val", ")", "in", "subs", ":", "contents", "=", "re", ".", "sub", "(", "k", ",", "val", ...
34.238095
17
def date_map(doc, datemap_list, time_format=None): ''' For all the datetime fields in "datemap" find that key in doc and map the datetime object to a strftime string. This pprint and others will print out readable datetimes. ''' if datemap_list: for i in datemap_list: if isinstance(i, datetime): doc=CursorFormatter.date_map_field(doc, i, time_format=time_format) return doc
[ "def", "date_map", "(", "doc", ",", "datemap_list", ",", "time_format", "=", "None", ")", ":", "if", "datemap_list", ":", "for", "i", "in", "datemap_list", ":", "if", "isinstance", "(", "i", ",", "datetime", ")", ":", "doc", "=", "CursorFormatter", ".", ...
46.2
26.6
def stop_consumer(self): """Stop the consumer object and allow it to do a clean shutdown if it has the ability to do so. """ try: LOGGER.info('Shutting down the consumer') self.consumer.shutdown() except AttributeError: LOGGER.debug('Consumer does not have a shutdown method')
[ "def", "stop_consumer", "(", "self", ")", ":", "try", ":", "LOGGER", ".", "info", "(", "'Shutting down the consumer'", ")", "self", ".", "consumer", ".", "shutdown", "(", ")", "except", "AttributeError", ":", "LOGGER", ".", "debug", "(", "'Consumer does not ha...
34.4
14.6
def peng_mant(snum): r""" Return the mantissa of a number represented in engineering notation. :param snum: Number :type snum: :ref:`EngineeringNotationNumber` :rtype: float .. [[[cog cog.out(exobj_eng.get_sphinx_autodoc()) ]]] .. Auto-generated exceptions documentation for .. peng.functions.peng_mant :raises: RuntimeError (Argument \`snum\` is not valid) .. [[[end]]] For example: >>> import peng >>> peng.peng_mant(peng.peng(1235.6789E3, 3, False)) 1.236 """ snum = snum.rstrip() return float(snum if snum[-1].isdigit() else snum[:-1])
[ "def", "peng_mant", "(", "snum", ")", ":", "snum", "=", "snum", ".", "rstrip", "(", ")", "return", "float", "(", "snum", "if", "snum", "[", "-", "1", "]", ".", "isdigit", "(", ")", "else", "snum", "[", ":", "-", "1", "]", ")" ]
24.12
23.28
def iso_name_increment(name, is_dir=False, max_length=8): """Increment an ISO name to avoid name collision. Example: >>> iso_name_increment('foo.txt') 'foo1.txt' >>> iso_name_increment('bar10') 'bar11' >>> iso_name_increment('bar99', max_length=5) 'ba100' """ # Split the extension if needed if not is_dir and '.' in name: name, ext = name.rsplit('.') ext = '.{}'.format(ext) else: ext = '' # Find the position of the last letter for position, char in reversed(list(enumerate(name))): if char not in string.digits: break # Extract the numbers and the text from the name base, tag = name[:position+1], name[position+1:] tag = str(int(tag or 0) + 1) # Crop the text if the numbers are too long if len(tag) + len(base) > max_length: base = base[:max_length - len(tag)] # Return the name with the extension return ''.join([base, tag, ext])
[ "def", "iso_name_increment", "(", "name", ",", "is_dir", "=", "False", ",", "max_length", "=", "8", ")", ":", "# Split the extension if needed", "if", "not", "is_dir", "and", "'.'", "in", "name", ":", "name", ",", "ext", "=", "name", ".", "rsplit", "(", ...
29.333333
15.272727
def validate_bam(self, input_bam): """ Wrapper for Picard's ValidateSamFile. :param str input_bam: Path to file to validate. :return str: Command to run for the validation. """ cmd = self.tools.java + " -Xmx" + self.pm.javamem cmd += " -jar " + self.tools.picard + " ValidateSamFile" cmd += " INPUT=" + input_bam return cmd
[ "def", "validate_bam", "(", "self", ",", "input_bam", ")", ":", "cmd", "=", "self", ".", "tools", ".", "java", "+", "\" -Xmx\"", "+", "self", ".", "pm", ".", "javamem", "cmd", "+=", "\" -jar \"", "+", "self", ".", "tools", ".", "picard", "+", "\" Val...
35.090909
13.454545
def format_mode(sres): """ Format a line in the directory list based on the file's type and other attributes. """ mode = sres.st_mode root = (mode & 0o700) >> 6 group = (mode & 0o070) >> 3 user = (mode & 0o7) def stat_type(md): ''' stat type''' if stat.S_ISDIR(md): return 'd' elif stat.S_ISSOCK(md): return 's' else: return '-' def triple(md): ''' triple ''' return '%c%c%c' % ( 'r' if md & 0b100 else '-', 'w' if md & 0b010 else '-', 'x' if md & 0b001 else '-') return ''.join([stat_type(mode), triple(root), triple(group), triple(user)])
[ "def", "format_mode", "(", "sres", ")", ":", "mode", "=", "sres", ".", "st_mode", "root", "=", "(", "mode", "&", "0o700", ")", ">>", "6", "group", "=", "(", "mode", "&", "0o070", ")", ">>", "3", "user", "=", "(", "mode", "&", "0o7", ")", "def",...
22.37037
21.111111
def com_google_fonts_check_kerning_for_non_ligated_sequences(ttFont, ligatures, has_kerning_info): """Is there kerning info for non-ligated sequences?""" def look_for_nonligated_kern_info(table): for pairpos in table.SubTable: for i, glyph in enumerate(pairpos.Coverage.glyphs): if not hasattr(pairpos, 'PairSet'): continue for pairvalue in pairpos.PairSet[i].PairValueRecord: kern_pair = (glyph, pairvalue.SecondGlyph) if kern_pair in ligature_pairs: ligature_pairs.remove(kern_pair) def ligatures_str(pairs): result = [f"\t- {first} + {second}" for first, second in pairs] return "\n".join(result) if ligatures == -1: yield FAIL, Message("malformed", "Failed to lookup ligatures." " This font file seems to be malformed." " For more info, read:" " https://github.com" "/googlefonts/fontbakery/issues/1596") else: ligature_pairs = [] for first, comp in ligatures.items(): for components in comp: while components: pair = (first, components[0]) if pair not in ligature_pairs: ligature_pairs.append(pair) first = components[0] components.pop(0) for record in ttFont["GSUB"].table.FeatureList.FeatureRecord: if record.FeatureTag == 'kern': for index in record.Feature.LookupListIndex: lookup = ttFont["GSUB"].table.LookupList.Lookup[index] look_for_nonligated_kern_info(lookup) if ligature_pairs: yield WARN, Message("lacks-kern-info", ("GPOS table lacks kerning info for the following" " non-ligated sequences:\n" "{}\n\n ").format(ligatures_str(ligature_pairs))) else: yield PASS, ("GPOS table provides kerning info for " "all non-ligated sequences.")
[ "def", "com_google_fonts_check_kerning_for_non_ligated_sequences", "(", "ttFont", ",", "ligatures", ",", "has_kerning_info", ")", ":", "def", "look_for_nonligated_kern_info", "(", "table", ")", ":", "for", "pairpos", "in", "table", ".", "SubTable", ":", "for", "i", ...
40.145833
17.479167
def manager_required(func=None): """ Decorator for views that require not only a team but also that a user be logged in and be the manager or owner of that team. """ def decorator(view_func): @team_required @login_required @functools.wraps(view_func, assigned=available_attrs(view_func)) def _wrapped_view(request, *args, **kwargs): role = request.team.role_for(request.user) if role not in [Membership.ROLE_MANAGER, Membership.ROLE_OWNER]: raise Http404() return view_func(request, *args, **kwargs) return _wrapped_view if func: return decorator(func) return decorator
[ "def", "manager_required", "(", "func", "=", "None", ")", ":", "def", "decorator", "(", "view_func", ")", ":", "@", "team_required", "@", "login_required", "@", "functools", ".", "wraps", "(", "view_func", ",", "assigned", "=", "available_attrs", "(", "view_...
37.777778
16.222222
def makedirs_safe(fulldir): """Creates a directory if it does not exists. Takes into consideration concurrent access support. Works like the shell's 'mkdir -p'. """ try: if not os.path.exists(fulldir): os.makedirs(fulldir) except OSError as exc: # Python >2.5 import errno if exc.errno == errno.EEXIST: pass else: raise
[ "def", "makedirs_safe", "(", "fulldir", ")", ":", "try", ":", "if", "not", "os", ".", "path", ".", "exists", "(", "fulldir", ")", ":", "os", ".", "makedirs", "(", "fulldir", ")", "except", "OSError", "as", "exc", ":", "# Python >2.5", "import", "errno"...
30.545455
16.272727
def sanitize(self, table_name, column_name, value): """ Sanitizes given value extracted from the database according to the sanitation configuration. TODO: Add support for dates, booleans and other types found in SQL than string. :param table_name: Name of the database table from which the value is from. :type table_name: str :param column_name: Name of the database column from which the value is from. :type column_name: str :param value: Value from the database, either in text form or None if the value is null. :type value: str|None :return: Sanitized version of the given value. :rtype: str|None """ sanitizer_callback = self.get_sanitizer_for(table_name, column_name) return sanitizer_callback(value) if sanitizer_callback else value
[ "def", "sanitize", "(", "self", ",", "table_name", ",", "column_name", ",", "value", ")", ":", "sanitizer_callback", "=", "self", ".", "get_sanitizer_for", "(", "table_name", ",", "column_name", ")", "return", "sanitizer_callback", "(", "value", ")", "if", "sa...
37.08
23
def _UnserializableObjectFallback(self, obj): """Handles sanitizing of unserializable objects for Json. For instances of heap types, we take the class dict, augment it with the instance's __dict__, tag it and transmit it over to the RPC client to be reconstructed there. (Works with both old and new style classes) Args: obj: The object to Json-serialize Returns: A Json-serializable version of the parameter """ if isinstance(obj, libpython.PyInstanceObjectPtr): # old-style classes use 'classobj'/'instance' # get class attribute dictionary in_class = obj.pyop_field('in_class') result_dict = in_class.pyop_field('cl_dict').proxyval(set()) # let libpython.py do the work of getting the instance dict instanceproxy = obj.proxyval(set()) result_dict.update(instanceproxy.attrdict) result_dict['__pyringe_type_name__'] = instanceproxy.cl_name result_dict['__pyringe_address__'] = instanceproxy.address return result_dict if isinstance(obj, libpython.HeapTypeObjectPtr): # interestingly enough, HeapTypeObjectPtr seems to handle all pointers to # heap type PyObjects, not only pointers to PyHeapTypeObject. This # corresponds to new-style class instances. However, as all instances of # new-style classes are simple PyObject pointers to the interpreter, # libpython.py tends to give us HeapTypeObjectPtrs for things we can't # handle properly. try: # get class attribute dictionary type_ptr = obj.field('ob_type') tp_dict = type_ptr.cast(GdbCache.TYPE)['tp_dict'].cast(GdbCache.DICT) result_dict = libpython.PyDictObjectPtr(tp_dict).proxyval(set()) except gdb.error: # There was probably a type mismatch triggered by wrong assumptions in # libpython.py result_dict = {} try: # get instance attributes result_dict.update(obj.get_attr_dict().proxyval(set())) result_dict['__pyringe_type_name__'] = obj.safe_tp_name() result_dict['__pyringe_address__'] = long(obj._gdbval) # pylint: disable=protected-access return result_dict except TypeError: # This happens in the case where we're not really looking at a heap type # instance. There isn't really anything we can do, so we fall back to # the default handling. pass # Default handler -- this does not result in proxy objects or fancy dicts, # but most of the time, we end up emitting strings of the format # '<object at remote 0x345a235>' try: proxy = obj.proxyval(set()) # json doesn't accept non-strings as keys, so we're helping along if isinstance(proxy, dict): return {str(key): val for key, val in proxy.iteritems()} return proxy except AttributeError: return str(obj)
[ "def", "_UnserializableObjectFallback", "(", "self", ",", "obj", ")", ":", "if", "isinstance", "(", "obj", ",", "libpython", ".", "PyInstanceObjectPtr", ")", ":", "# old-style classes use 'classobj'/'instance'", "# get class attribute dictionary", "in_class", "=", "obj", ...
44.015625
22.515625
def loadXMLGenericData(filename): # not tested """Read any type of vtk data object encoded in XML format. Return an ``Actor(vtkActor)`` object.""" reader = vtk.vtkXMLGenericDataObjectReader() reader.SetFileName(filename) reader.Update() return Actor(reader.GetOutput())
[ "def", "loadXMLGenericData", "(", "filename", ")", ":", "# not tested", "reader", "=", "vtk", ".", "vtkXMLGenericDataObjectReader", "(", ")", "reader", ".", "SetFileName", "(", "filename", ")", "reader", ".", "Update", "(", ")", "return", "Actor", "(", "reader...
47.5
8
def parse_ls(out): """ Parser for Android's ls -lR output. Takes a string, returns parsed structure. """ # assumed ls -lR line format: # -rw-r--r-- root shell 0 2013-07-05 02:26 tasks # drwxr-xr-x root root 2013-07-05 02:26 log # brw------- root root 179, 0 2013-07-05 02:26 mmcblk0 # lrwxrwxrwx root root 2013-07-05 02:34 subsystem -> ../class/bdi # CAVE: format may change through versions. # TODO: implement plausibility test. mode = r'^(.)' field = r'([^ ]+) +' dev = r'(\d+), +(\d+) ' date = r'(\d{4}\-\d{2}\-\d{2} \d{2}:\d{2}) ' name = r'(.+)$' link = r'(.+) -> (.+)$' logger = get_default_logger() # adb returns newline as \r\n # but mozdevice uses \n for dirstr in out[2:-2].split('\r\n\r\n'): lines = dirstr.split('\r\n') dirname = lines[0][:-1] if len(lines) == 2 and lines[1].startswith("opendir failed"): continue for filestr in lines[1:]: if filestr.endswith(": No such file or directory"): continue if filestr.endswith(": Permission denied"): continue specs = None if filestr[0] in 'dsp': # directory, socket, pipe regexp = mode + field * 3 + date + name m = re.search(regexp, filestr) specs = { 'mode': m.group(1), 'perm': m.group(2), 'uid': m.group(3), 'gid': m.group(4), 'date': m.group(5), 'name': m.group(6) } elif filestr[0] == 'l': # symbolic link regexp = mode + field * 3 + date + link m = re.search(regexp, filestr) specs = { 'mode': m.group(1), 'perm': m.group(2), 'uid': m.group(3), 'gid': m.group(4), 'date': m.group(5), 'name': m.group(6), 'link': m.group(7) } elif filestr[0] in 'cb': # device regexp = mode + field * 3 + dev + date + name m = re.search(regexp, filestr) specs = { 'mode': m.group(1), 'perm': m.group(2), 'uid': m.group(3), 'gid': m.group(4), 'major': m.group(5), 'minor': m.group(6), 'date': m.group(7), 'name': m.group(8) } else: # rest try: regexp = mode + field * 4 + date + name m = re.search(regexp, filestr) specs = { 'mode': m.group(1), 'perm': m.group(2), 'uid': m.group(3), 'gid': m.group(4), 'size': m.group(5), 'date': m.group(6), 'name': m.group(7) } except: logger.error("parse error on %s" % filestr) try: specs['name'] = '/' + os.path.relpath("%s/%s" % (dirname, specs['name']), '/') if 'link' in specs.keys(): specs['link'] = '/' + os.path.relpath("%s/%s" % (dirname, specs['link']), '/') except: logger.warning("no name from %s" % filestr) yield specs
[ "def", "parse_ls", "(", "out", ")", ":", "# assumed ls -lR line format:", "# -rw-r--r-- root shell 0 2013-07-05 02:26 tasks", "# drwxr-xr-x root root 2013-07-05 02:26 log", "# brw------- root root 179, 0 2013-07-05 02:26 mmcblk0", "# lrwxrwxrwx root r...
35.412371
14.443299
def verify_mass_range_options(opts, parser, nonSpin=False): """ Parses the metric calculation options given and verifies that they are correct. Parameters ---------- opts : argparse.Values instance Result of parsing the input options with OptionParser parser : object The OptionParser instance. nonSpin : boolean, optional (default=False) If this is provided the spin-related options will not be checked. """ # Mass1 must be the heavier! if opts.min_mass1 < opts.min_mass2: parser.error("min-mass1 cannot be less than min-mass2!") if opts.max_mass1 < opts.max_mass2: parser.error("max-mass1 cannot be less than max-mass2!") # If given are min/max total mass/chirp mass possible? if opts.min_total_mass \ and (opts.min_total_mass > opts.max_mass1 + opts.max_mass2): err_msg = "Supplied minimum total mass %f " %(opts.min_total_mass,) err_msg += "greater than the sum of the two max component masses " err_msg += " %f and %f." %(opts.max_mass1,opts.max_mass2) parser.error(err_msg) if opts.max_total_mass \ and (opts.max_total_mass < opts.min_mass1 + opts.min_mass2): err_msg = "Supplied maximum total mass %f " %(opts.max_total_mass,) err_msg += "smaller than the sum of the two min component masses " err_msg += " %f and %f." %(opts.min_mass1,opts.min_mass2) parser.error(err_msg) if opts.max_total_mass and opts.min_total_mass \ and (opts.max_total_mass < opts.min_total_mass): parser.error("Min total mass must be larger than max total mass.") # Warn the user that his/her setup is such that EM dim NS-BH binaries # will not be targeted by the template bank that is being built. Also # inform him/her about the caveats involved in this. if hasattr(opts, 'remnant_mass_threshold') \ and opts.remnant_mass_threshold is not None: logging.info("""You have asked to exclude EM dim NS-BH systems from the target parameter space. The script will assume that m1 is the BH and m2 is the NS: make sure that your settings respect this convention. The script will also treat the NS as non-spinning: use NS spins in the template bank at your own risk!""") if opts.use_eos_max_ns_mass: logging.info("""You have asked to take into account the maximum NS mass value for the EOS in use.""") # Find out if the EM constraint surface data already exists or not # and inform user whether this will be read from file or generated. # This is the minumum eta as a function of BH spin and NS mass # required to produce an EM counterpart if os.path.isfile('constraint_em_bright.npz'): logging.info("""The constraint surface for EM bright binaries will be read in from constraint_em_bright.npz.""") # Assign min/max total mass from mass1, mass2 if not specified if (not opts.min_total_mass) or \ ((opts.min_mass1 + opts.min_mass2) > opts.min_total_mass): opts.min_total_mass = opts.min_mass1 + opts.min_mass2 if (not opts.max_total_mass) or \ ((opts.max_mass1 + opts.max_mass2) < opts.max_total_mass): opts.max_total_mass = opts.max_mass1 + opts.max_mass2 # It is vital that min and max total mass be set correctly. # This is becasue the heavily-used function get_random_mass will place # points first in total mass (to some power), and then in eta. If the total # mass limits are not well known ahead of time it will place unphysical # points and fail. # This test is a bit convoluted as we identify the maximum and minimum # possible total mass from chirp mass and/or eta restrictions. if opts.min_chirp_mass is not None: # Need to get the smallest possible min_tot_mass from this chirp mass # There are 4 possibilities for where the min_tot_mass is found on the # line of min_chirp_mass that interacts with the component mass limits. # Either it is found at max_m2, or at min_m1, or it starts on the equal # mass line within the parameter space, or it doesn't intersect # at all. # First let's get the masses at both of these possible points m1_at_max_m2 = pnutils.mchirp_mass1_to_mass2(opts.min_chirp_mass, opts.max_mass2) if m1_at_max_m2 < opts.max_mass2: # Unphysical, remove m1_at_max_m2 = -1 m2_at_min_m1 = pnutils.mchirp_mass1_to_mass2(opts.min_chirp_mass, opts.min_mass1) if m2_at_min_m1 > opts.min_mass1: # Unphysical, remove m2_at_min_m1 = -1 # Get the values on the equal mass line m1_at_equal_mass, m2_at_equal_mass = pnutils.mchirp_eta_to_mass1_mass2( opts.min_chirp_mass, 0.25) # Are any of these possible? if m1_at_max_m2 <= opts.max_mass1 and m1_at_max_m2 >= opts.min_mass1: min_tot_mass = opts.max_mass2 + m1_at_max_m2 elif m2_at_min_m1 <= opts.max_mass2 and m2_at_min_m1 >= opts.min_mass2: min_tot_mass = opts.min_mass1 + m2_at_min_m1 elif m1_at_equal_mass <= opts.max_mass1 and \ m1_at_equal_mass >= opts.min_mass1 and \ m2_at_equal_mass <= opts.max_mass2 and \ m2_at_equal_mass >= opts.min_mass2: min_tot_mass = m1_at_equal_mass + m2_at_equal_mass # So either the restriction is low enough to be redundant, or is # removing all the parameter space elif m2_at_min_m1 < opts.min_mass2: # This is the redundant case, ignore min_tot_mass = opts.min_total_mass else: # And this is the bad case err_msg = "The minimum chirp mass provided is not possible given " err_msg += "restrictions on component masses." raise ValueError(err_msg) # Is there also an eta restriction? if opts.max_eta: # Get the value of m1,m2 at max_eta, min_chirp_mass max_eta_m1, max_eta_m2 = pnutils.mchirp_eta_to_mass1_mass2( opts.min_chirp_mass, opts.max_eta) max_eta_min_tot_mass = max_eta_m1 + max_eta_m2 if max_eta_min_tot_mass > min_tot_mass: # Okay, eta does restrict this further. Still physical? min_tot_mass = max_eta_min_tot_mass if max_eta_m1 > opts.max_mass1: err_msg = "The combination of component mass, chirp " err_msg += "mass, eta and (possibly) total mass limits " err_msg += "have precluded all systems." raise ValueError(err_msg) # Update min_tot_mass if needed if min_tot_mass > opts.min_total_mass: opts.min_total_mass = float(min_tot_mass) # Then need to do max_chirp_mass and min_eta if opts.max_chirp_mass is not None: # Need to get the largest possible maxn_tot_mass from this chirp mass # There are 3 possibilities for where the max_tot_mass is found on the # line of max_chirp_mass that interacts with the component mass limits. # Either it is found at min_m2, or at max_m1, or it doesn't intersect # at all. # First let's get the masses at both of these possible points m1_at_min_m2 = pnutils.mchirp_mass1_to_mass2(opts.max_chirp_mass, opts.min_mass2) m2_at_max_m1 = pnutils.mchirp_mass1_to_mass2(opts.max_chirp_mass, opts.max_mass1) # Are either of these possible? if m1_at_min_m2 <= opts.max_mass1 and m1_at_min_m2 >= opts.min_mass1: max_tot_mass = opts.min_mass2 + m1_at_min_m2 elif m2_at_max_m1 <= opts.max_mass2 and m2_at_max_m1 >= opts.min_mass2: max_tot_mass = opts.max_mass1 + m2_at_max_m1 # So either the restriction is low enough to be redundant, or is # removing all the paramter space elif m2_at_max_m1 > opts.max_mass2: # This is the redundant case, ignore max_tot_mass = opts.max_total_mass else: # And this is the bad case err_msg = "The maximum chirp mass provided is not possible given " err_msg += "restrictions on component masses." raise ValueError(err_msg) # Is there also an eta restriction? if opts.min_eta: # Get the value of m1,m2 at max_eta, min_chirp_mass min_eta_m1, min_eta_m2 = pnutils.mchirp_eta_to_mass1_mass2( opts.max_chirp_mass, opts.min_eta) min_eta_max_tot_mass = min_eta_m1 + min_eta_m2 if min_eta_max_tot_mass < max_tot_mass: # Okay, eta does restrict this further. Still physical? max_tot_mass = min_eta_max_tot_mass if min_eta_m1 < opts.min_mass1: err_msg = "The combination of component mass, chirp " err_msg += "mass, eta and (possibly) total mass limits " err_msg += "have precluded all systems." raise ValueError(err_msg) # Update min_tot_mass if needed if max_tot_mass < opts.max_total_mass: opts.max_total_mass = float(max_tot_mass) # Need to check max_eta alone for minimum and maximum mass if opts.max_eta: # Similar to above except this can affect both the minimum and maximum # total mass. Need to identify where the line of max_eta intersects # the parameter space, and if it affects mass restrictions. m1_at_min_m2 = pnutils.eta_mass1_to_mass2(opts.max_eta, opts.min_mass2, return_mass_heavier=True) m2_at_min_m1 = pnutils.eta_mass1_to_mass2(opts.max_eta, opts.min_mass1, return_mass_heavier=False) m1_at_max_m2 = pnutils.eta_mass1_to_mass2(opts.max_eta, opts.max_mass2, return_mass_heavier=True) m2_at_max_m1 = pnutils.eta_mass1_to_mass2(opts.max_eta, opts.max_mass1, return_mass_heavier=False) # Check for restrictions on the minimum total mass # Are either of these possible? if m1_at_min_m2 <= opts.max_mass1 and m1_at_min_m2 >= opts.min_mass1: min_tot_mass = opts.min_mass2 + m1_at_min_m2 elif m2_at_min_m1 <= opts.max_mass2 and m2_at_min_m1 >= opts.min_mass2: # This case doesn't change the minimal total mass min_tot_mass = opts.min_total_mass # So either the restriction is low enough to be redundant, or is # removing all the paramter space elif m2_at_min_m1 > opts.max_mass2: # This is the redundant case, ignore min_tot_mass = opts.min_total_mass elif opts.max_eta == 0.25 and (m1_at_min_m2 < opts.min_mass2 or \ m2_at_min_m1 > opts.min_mass1): # This just catches potential roundoff issues in the case that # max-eta is not used min_tot_mass = opts.min_total_mass else: # And this is the bad case err_msg = "The maximum eta provided is not possible given " err_msg += "restrictions on component masses." print(m1_at_min_m2, m2_at_min_m1, m1_at_max_m2, m2_at_max_m1) print(opts.min_mass1, opts.max_mass1, opts.min_mass2, opts.max_mass2) raise ValueError(err_msg) # Update min_tot_mass if needed if min_tot_mass > opts.min_total_mass: opts.min_total_mass = float(min_tot_mass) # Check for restrictions on the maximum total mass # Are either of these possible? if m2_at_max_m1 <= opts.max_mass2 and m2_at_max_m1 >= opts.min_mass2: max_tot_mass = opts.max_mass1 + m2_at_max_m1 elif m1_at_max_m2 <= opts.max_mass1 and m1_at_max_m2 >= opts.min_mass1: # This case doesn't change the maximal total mass max_tot_mass = opts.max_total_mass # So either the restriction is low enough to be redundant, or is # removing all the paramter space, the latter case is already tested else: # This is the redundant case, ignore max_tot_mass = opts.max_total_mass if max_tot_mass < opts.max_total_mass: opts.max_total_mass = float(max_tot_mass) # Need to check min_eta alone for maximum and minimum total mass if opts.min_eta: # Same as max_eta. # Need to identify where the line of max_eta intersects # the parameter space, and if it affects mass restrictions. m1_at_min_m2 = pnutils.eta_mass1_to_mass2(opts.min_eta, opts.min_mass2, return_mass_heavier=True) m2_at_min_m1 = pnutils.eta_mass1_to_mass2(opts.min_eta, opts.min_mass1, return_mass_heavier=False) m1_at_max_m2 = pnutils.eta_mass1_to_mass2(opts.min_eta, opts.max_mass2, return_mass_heavier=True) m2_at_max_m1 = pnutils.eta_mass1_to_mass2(opts.min_eta, opts.max_mass1, return_mass_heavier=False) # Check for restrictions on the maximum total mass # Are either of these possible? if m1_at_max_m2 <= opts.max_mass1 and m1_at_max_m2 >= opts.min_mass1: max_tot_mass = opts.max_mass2 + m1_at_max_m2 elif m2_at_max_m1 <= opts.max_mass2 and m2_at_max_m1 >= opts.min_mass2: # This case doesn't affect the maximum total mass max_tot_mass = opts.max_total_mass # So either the restriction is low enough to be redundant, or is # removing all the paramter space elif m2_at_max_m1 < opts.min_mass2: # This is the redundant case, ignore max_tot_mass = opts.max_total_mass else: # And this is the bad case err_msg = "The minimum eta provided is not possible given " err_msg += "restrictions on component masses." raise ValueError(err_msg) # Update min_tot_mass if needed if max_tot_mass < opts.max_total_mass: opts.max_total_mass = float(max_tot_mass) # Check for restrictions on the minimum total mass # Are either of these possible? if m2_at_min_m1 <= opts.max_mass2 and m2_at_min_m1 >= opts.min_mass2: min_tot_mass = opts.min_mass1 + m2_at_min_m1 elif m1_at_min_m2 <= opts.max_mass1 and m1_at_min_m2 >= opts.min_mass1: # This case doesn't change the maximal total mass min_tot_mass = opts.min_total_mass # So either the restriction is low enough to be redundant, or is # removing all the paramter space, which is tested above else: # This is the redundant case, ignore min_tot_mass = opts.min_total_mass if min_tot_mass > opts.min_total_mass: opts.min_total_mass = float(min_tot_mass) if opts.max_total_mass < opts.min_total_mass: err_msg = "After including restrictions on chirp mass, component mass, " err_msg += "eta and total mass, no physical systems are possible." raise ValueError(err_msg) if opts.max_eta and opts.min_eta and (opts.max_eta < opts.min_eta): parser.error("--max-eta must be larger than --min-eta.") if nonSpin: return if opts.max_ns_spin_mag is None: if opts.nsbh_flag: parser.error("Must supply --max_ns_spin_mag with --nsbh-flag") # Can ignore this if no NSs will be generated elif opts.min_mass2 < (opts.ns_bh_boundary_mass or massRangeParameters.default_nsbh_boundary_mass): parser.error("Must supply --max-ns-spin-mag for the chosen" " value of --min_mass2") else: opts.max_ns_spin_mag = opts.max_bh_spin_mag if opts.max_bh_spin_mag is None: if opts.nsbh_flag: parser.error("Must supply --max_bh_spin_mag with --nsbh-flag") # Can ignore this if no BHs will be generated if opts.max_mass1 >= (opts.ns_bh_boundary_mass or massRangeParameters.default_nsbh_boundary_mass): parser.error("Must supply --max-bh-spin-mag for the chosen" " value of --max_mass1") else: opts.max_bh_spin_mag = opts.max_ns_spin_mag
[ "def", "verify_mass_range_options", "(", "opts", ",", "parser", ",", "nonSpin", "=", "False", ")", ":", "# Mass1 must be the heavier!", "if", "opts", ".", "min_mass1", "<", "opts", ".", "min_mass2", ":", "parser", ".", "error", "(", "\"min-mass1 cannot be less tha...
53.512739
21.33121
def get_source(self, objtxt): """Get object source""" from spyder_kernels.utils.dochelpers import getsource obj, valid = self._eval(objtxt) if valid: return getsource(obj)
[ "def", "get_source", "(", "self", ",", "objtxt", ")", ":", "from", "spyder_kernels", ".", "utils", ".", "dochelpers", "import", "getsource", "obj", ",", "valid", "=", "self", ".", "_eval", "(", "objtxt", ")", "if", "valid", ":", "return", "getsource", "(...
30
14.714286
def kill(self, exit_code: Any = None): """ Stops the behaviour Args: exit_code (object, optional): the exit code of the behaviour (Default value = None) """ self._force_kill.set() if exit_code is not None: self._exit_code = exit_code logger.info("Killing behavior {0} with exit code: {1}".format(self, exit_code))
[ "def", "kill", "(", "self", ",", "exit_code", ":", "Any", "=", "None", ")", ":", "self", ".", "_force_kill", ".", "set", "(", ")", "if", "exit_code", "is", "not", "None", ":", "self", ".", "_exit_code", "=", "exit_code", "logger", ".", "info", "(", ...
31.833333
20
def add_in_filter(self, *values): """ Add a filter using "IN" logic. This is typically the primary filter that will be used to find a match and generally combines other filters to get more granular. An example of usage would be searching for an IP address (or addresses) in a specific log field. Or looking for an IP address in multiple log fields. .. seealso:: :class:`smc_monitoring.models.filters.InFilter` for examples. :param values: optional constructor args for :class:`smc_monitoring.models.filters.InFilter` :rtype: InFilter """ filt = InFilter(*values) self.update_filter(filt) return filt
[ "def", "add_in_filter", "(", "self", ",", "*", "values", ")", ":", "filt", "=", "InFilter", "(", "*", "values", ")", "self", ".", "update_filter", "(", "filt", ")", "return", "filt" ]
42.176471
20.176471
def _create_command_file(self, expect, send): """Internal function. Do not use. Takes a long command, and puts it in an executable file ready to run. Returns the filename. """ shutit = self.shutit random_id = shutit_util.random_id() fname = shutit_global.shutit_global_object.shutit_state_dir + '/tmp_' + random_id working_str = send # truncate -s must be used as --size is not supported everywhere (eg busybox) assert not self.sendline(ShutItSendSpec(self, send=' truncate -s 0 '+ fname, ignore_background=True)), shutit_util.print_debug() self.expect(expect) size = shutit_global.shutit_global_object.line_limit while working_str: curr_str = working_str[:size] working_str = working_str[size:] assert not self.sendline(ShutItSendSpec(self, send=' ' + shutit.get_command('head') + ''' -c -1 >> ''' + fname + """ << 'END_""" + random_id + """'\n""" + curr_str + """\nEND_""" + random_id, ignore_background=True)), shutit_util.print_debug() self.expect(expect) assert not self.sendline(ShutItSendSpec(self, send=' chmod +x ' + fname, ignore_background=True)), shutit_util.print_debug() self.expect(expect) return fname
[ "def", "_create_command_file", "(", "self", ",", "expect", ",", "send", ")", ":", "shutit", "=", "self", ".", "shutit", "random_id", "=", "shutit_util", ".", "random_id", "(", ")", "fname", "=", "shutit_global", ".", "shutit_global_object", ".", "shutit_state_...
51.518519
22.037037
def config_value_keys(self, sortkey = False): """ Return configuration keys directly stored in this node. Configurations in child nodes are not included. """ if sortkey: items = sorted(self.items()) else: items = self.items() return (k for k,v in items if not isinstance(v,ConfigTree))
[ "def", "config_value_keys", "(", "self", ",", "sortkey", "=", "False", ")", ":", "if", "sortkey", ":", "items", "=", "sorted", "(", "self", ".", "items", "(", ")", ")", "else", ":", "items", "=", "self", ".", "items", "(", ")", "return", "(", "k", ...
38.777778
17.666667
def wait_lock(path, lock_fn=None, timeout=5, sleep=0.1, time_start=None): ''' Obtain a write lock. If one exists, wait for it to release first ''' if not isinstance(path, six.string_types): raise FileLockError('path must be a string') if lock_fn is None: lock_fn = path + '.w' if time_start is None: time_start = time.time() obtained_lock = False def _raise_error(msg, race=False): ''' Raise a FileLockError ''' raise FileLockError(msg, time_start=time_start) try: if os.path.exists(lock_fn) and not os.path.isfile(lock_fn): _raise_error( 'lock_fn {0} exists and is not a file'.format(lock_fn) ) open_flags = os.O_CREAT | os.O_EXCL | os.O_WRONLY while time.time() - time_start < timeout: try: # Use os.open() to obtain filehandle so that we can force an # exception if the file already exists. Concept found here: # http://stackoverflow.com/a/10979569 fh_ = os.open(lock_fn, open_flags) except (IOError, OSError) as exc: if exc.errno != errno.EEXIST: _raise_error( 'Error {0} encountered obtaining file lock {1}: {2}' .format(exc.errno, lock_fn, exc.strerror) ) log.trace( 'Lock file %s exists, sleeping %f seconds', lock_fn, sleep ) time.sleep(sleep) else: # Write the lock file with os.fdopen(fh_, 'w'): pass # Lock successfully acquired log.trace('Write lock %s obtained', lock_fn) obtained_lock = True # Transfer control back to the code inside the with block yield # Exit the loop break else: _raise_error( 'Timeout of {0} seconds exceeded waiting for lock_fn {1} ' 'to be released'.format(timeout, lock_fn) ) except FileLockError: raise except Exception as exc: _raise_error( 'Error encountered obtaining file lock {0}: {1}'.format( lock_fn, exc ) ) finally: if obtained_lock: os.remove(lock_fn) log.trace('Write lock for %s (%s) released', path, lock_fn)
[ "def", "wait_lock", "(", "path", ",", "lock_fn", "=", "None", ",", "timeout", "=", "5", ",", "sleep", "=", "0.1", ",", "time_start", "=", "None", ")", ":", "if", "not", "isinstance", "(", "path", ",", "six", ".", "string_types", ")", ":", "raise", ...
33.445946
21.364865
def cmd_up(self, args): '''adjust TRIM_PITCH_CD up by 5 degrees''' if len(args) == 0: adjust = 5.0 else: adjust = float(args[0]) old_trim = self.get_mav_param('TRIM_PITCH_CD', None) if old_trim is None: print("Existing trim value unknown!") return new_trim = int(old_trim + (adjust*100)) if math.fabs(new_trim - old_trim) > 1000: print("Adjustment by %d too large (from %d to %d)" % (adjust*100, old_trim, new_trim)) return print("Adjusting TRIM_PITCH_CD from %d to %d" % (old_trim, new_trim)) self.param_set('TRIM_PITCH_CD', new_trim)
[ "def", "cmd_up", "(", "self", ",", "args", ")", ":", "if", "len", "(", "args", ")", "==", "0", ":", "adjust", "=", "5.0", "else", ":", "adjust", "=", "float", "(", "args", "[", "0", "]", ")", "old_trim", "=", "self", ".", "get_mav_param", "(", ...
41.5
18.375
def register(cls, name, klass, content_type='text/plain'): """ Register an emitter. Parameters:: - `name`: The name of the emitter ('json', 'xml', 'yaml', ...) - `klass`: The emitter class. - `content_type`: The content type to serve response as. """ cls.EMITTERS[name] = (klass, content_type)
[ "def", "register", "(", "cls", ",", "name", ",", "klass", ",", "content_type", "=", "'text/plain'", ")", ":", "cls", ".", "EMITTERS", "[", "name", "]", "=", "(", "klass", ",", "content_type", ")" ]
35.2
15.8
def commit_hash(dir='.'): """ Return commit hash for HEAD of checked out branch of the specified directory. """ cmd = ['git', 'rev-parse', 'HEAD'] try: with open(os.devnull, 'w') as devnull: revision_hash = subprocess.check_output( cmd, cwd=dir, stderr=devnull ) if sys.version_info.major > 2: revision_hash = revision_hash.decode('ascii') return revision_hash.strip() except subprocess.CalledProcessError: return None
[ "def", "commit_hash", "(", "dir", "=", "'.'", ")", ":", "cmd", "=", "[", "'git'", ",", "'rev-parse'", ",", "'HEAD'", "]", "try", ":", "with", "open", "(", "os", ".", "devnull", ",", "'w'", ")", "as", "devnull", ":", "revision_hash", "=", "subprocess"...
24.772727
17.318182
def read(self, uri=None, resources=None, index_only=False): """Read sitemap from a URI including handling sitemapindexes. If index_only is True then individual sitemaps references in a sitemapindex will not be read. This will result in no resources being returned and is useful only to read the metadata and links listed in the sitemapindex. Includes the subtlety that if the input URI is a local file and is a sitemapindex which contains URIs for the individual sitemaps, then these are mapped to the filesystem also. """ try: fh = URLopener().open(uri) self.num_files += 1 except IOError as e: raise IOError( "Failed to load sitemap/sitemapindex from %s (%s)" % (uri, str(e))) # Get the Content-Length if we can (works fine for local files) try: self.content_length = int(fh.info()['Content-Length']) self.bytes_read += self.content_length self.logger.debug( "Read %d bytes from %s" % (self.content_length, uri)) except KeyError: # If we don't get a length then c'est la vie self.logger.debug("Read ????? bytes from %s" % (uri)) pass self.logger.info("Read sitemap/sitemapindex from %s" % (uri)) s = self.new_sitemap() s.parse_xml(fh=fh, resources=self, capability=self.capability_name) # what did we read? sitemap or sitemapindex? if (s.parsed_index): # sitemapindex if (not self.allow_multifile): raise ListBaseIndexError( "Got sitemapindex from %s but support for sitemapindex disabled" % (uri)) self.logger.info( "Parsed as sitemapindex, %d sitemaps" % (len( self.resources))) sitemapindex_is_file = self.is_file_uri(uri) if (index_only): # don't read the component sitemaps self.sitemapindex = True return # now loop over all entries to read each sitemap and add to # resources sitemaps = self.resources self.resources = self.resources_class() self.logger.info("Now reading %d sitemaps" % len(sitemaps.uris())) for sitemap_uri in sorted(sitemaps.uris()): self.read_component_sitemap( uri, sitemap_uri, s, sitemapindex_is_file) else: # sitemap self.logger.info("Parsed as sitemap, %d resources" % (len(self.resources)))
[ "def", "read", "(", "self", ",", "uri", "=", "None", ",", "resources", "=", "None", ",", "index_only", "=", "False", ")", ":", "try", ":", "fh", "=", "URLopener", "(", ")", ".", "open", "(", "uri", ")", "self", ".", "num_files", "+=", "1", "excep...
44.483333
18.216667
def get_prefix(): """Global prefix to identify ncluster created resources name used to identify ncluster created resources, (name of EFS, VPC, keypair prefixes), can be changed through $NCLUSTER_PREFIX for debugging purposes. """ name = os.environ.get('NCLUSTER_PREFIX', DEFAULT_PREFIX) if name != DEFAULT_PREFIX: validate_prefix(name) return name
[ "def", "get_prefix", "(", ")", ":", "name", "=", "os", ".", "environ", ".", "get", "(", "'NCLUSTER_PREFIX'", ",", "DEFAULT_PREFIX", ")", "if", "name", "!=", "DEFAULT_PREFIX", ":", "validate_prefix", "(", "name", ")", "return", "name" ]
44.375
16.875
def rgb_to_name(rgb_triplet, spec=u'css3'): """ Convert a 3-tuple of integers, suitable for use in an ``rgb()`` color triplet, to its corresponding normalized color name, if any such name exists. The optional keyword argument ``spec`` determines which specification's list of color names will be used; valid values are ``html4``, ``css2``, ``css21`` and ``css3``, and the default is ``css3``. If there is no matching name, ``ValueError`` is raised. """ return hex_to_name( rgb_to_hex( normalize_integer_triplet( rgb_triplet ) ), spec=spec )
[ "def", "rgb_to_name", "(", "rgb_triplet", ",", "spec", "=", "u'css3'", ")", ":", "return", "hex_to_name", "(", "rgb_to_hex", "(", "normalize_integer_triplet", "(", "rgb_triplet", ")", ")", ",", "spec", "=", "spec", ")" ]
28.818182
22.181818
def get_change(self, change_id): """ Get information about a proposed set of changes, as submitted by the change_rrsets method. Returns a Python data structure with status information about the changes. :type change_id: str :param change_id: The unique identifier for the set of changes. This ID is returned in the response to the change_rrsets method. """ uri = '/%s/change/%s' % (self.Version, change_id) response = self.make_request('GET', uri) body = response.read() boto.log.debug(body) if response.status >= 300: raise exception.DNSServerError(response.status, response.reason, body) e = boto.jsonresponse.Element() h = boto.jsonresponse.XmlHandler(e, None) h.parse(body) return e
[ "def", "get_change", "(", "self", ",", "change_id", ")", ":", "uri", "=", "'/%s/change/%s'", "%", "(", "self", ".", "Version", ",", "change_id", ")", "response", "=", "self", ".", "make_request", "(", "'GET'", ",", "uri", ")", "body", "=", "response", ...
37.958333
17.041667
def GET_AUTH(self): """ GET request """ return self.template_helper.get_renderer().queue(*self.submission_manager.get_job_queue_snapshot(), datetime.fromtimestamp)
[ "def", "GET_AUTH", "(", "self", ")", ":", "return", "self", ".", "template_helper", ".", "get_renderer", "(", ")", ".", "queue", "(", "*", "self", ".", "submission_manager", ".", "get_job_queue_snapshot", "(", ")", ",", "datetime", ".", "fromtimestamp", ")" ...
59
37.333333
def _upsert(context, params, data): """Insert or update data and add/update appropriate timestamps""" table = params.get("table") table = datastore.get_table(table, primary_id=False) unique_keys = ensure_list(params.get("unique")) data["__last_seen"] = datetime.datetime.utcnow() if len(unique_keys): updated = table.update(data, unique_keys, return_count=True) if updated: return data["__first_seen"] = data["__last_seen"] table.insert(data)
[ "def", "_upsert", "(", "context", ",", "params", ",", "data", ")", ":", "table", "=", "params", ".", "get", "(", "\"table\"", ")", "table", "=", "datastore", ".", "get_table", "(", "table", ",", "primary_id", "=", "False", ")", "unique_keys", "=", "ens...
40.916667
13.666667
def _make_pkh_address(pubkey_hash, witness=False, cashaddr=True): ''' bytes, bool -> str ''' addr_bytes = bytearray() if riemann.network.CASHADDR_P2PKH is not None and cashaddr: addr_bytes.extend(riemann.network.CASHADDR_P2PKH) addr_bytes.extend(pubkey_hash) return riemann.network.CASHADDR_ENCODER.encode(addr_bytes) if witness: addr_bytes.extend(riemann.network.P2WPKH_PREFIX) addr_bytes.extend(pubkey_hash) return riemann.network.SEGWIT_ENCODER.encode(addr_bytes) else: addr_bytes.extend(riemann.network.P2PKH_PREFIX) addr_bytes.extend(pubkey_hash) return riemann.network.LEGACY_ENCODER.encode(addr_bytes)
[ "def", "_make_pkh_address", "(", "pubkey_hash", ",", "witness", "=", "False", ",", "cashaddr", "=", "True", ")", ":", "addr_bytes", "=", "bytearray", "(", ")", "if", "riemann", ".", "network", ".", "CASHADDR_P2PKH", "is", "not", "None", "and", "cashaddr", ...
40.705882
19.294118
def gen_random_mobile(): """ 随机生成一个手机号 :return: * str: (string) 手机号 举例如下:: print('--- gen_random_mobile demo ---') print(gen_random_mobile()) print(gen_random_mobile()) print('---') 执行结果:: --- gen_random_mobile demo --- 16706146773 14402633925 --- """ prefix_list = ["13", "1400", "1410", "1440", "145", "146", "147", "148", "15", "162", "165", "166", "167", "170", "171", "172", "173", "175", "176", "177", "178", "1740", "18", "191", "198", "199"] prefix_str = random.choice(prefix_list) return prefix_str + "".join(random.choice("0123456789") for _ in range(11 - len(prefix_str)))
[ "def", "gen_random_mobile", "(", ")", ":", "prefix_list", "=", "[", "\"13\"", ",", "\"1400\"", ",", "\"1410\"", ",", "\"1440\"", ",", "\"145\"", ",", "\"146\"", ",", "\"147\"", ",", "\"148\"", ",", "\"15\"", ",", "\"162\"", ",", "\"165\"", ",", "\"166\"", ...
25.193548
22.032258
def create_cylinder(rows, cols, radius=[1.0, 1.0], length=1.0, offset=False): """Create a cylinder Parameters ---------- rows : int Number of rows. cols : int Number of columns. radius : tuple of float Cylinder radii. length : float Length of the cylinder. offset : bool Rotate each row by half a column. Returns ------- cylinder : MeshData Vertices and faces computed for a cylindrical surface. """ verts = np.empty((rows+1, cols, 3), dtype=np.float32) if isinstance(radius, int): radius = [radius, radius] # convert to list # compute vertices th = np.linspace(2 * np.pi, 0, cols).reshape(1, cols) # radius as a function of z r = np.linspace(radius[0], radius[1], num=rows+1, endpoint=True).reshape(rows+1, 1) verts[..., 2] = np.linspace(0, length, num=rows+1, endpoint=True).reshape(rows+1, 1) # z if offset: # rotate each row by 1/2 column th = th + ((np.pi / cols) * np.arange(rows+1).reshape(rows+1, 1)) verts[..., 0] = r * np.cos(th) # x = r cos(th) verts[..., 1] = r * np.sin(th) # y = r sin(th) # just reshape: no redundant vertices... verts = verts.reshape((rows+1)*cols, 3) # compute faces faces = np.empty((rows*cols*2, 3), dtype=np.uint32) rowtemplate1 = (((np.arange(cols).reshape(cols, 1) + np.array([[0, 1, 0]])) % cols) + np.array([[0, 0, cols]])) rowtemplate2 = (((np.arange(cols).reshape(cols, 1) + np.array([[0, 1, 1]])) % cols) + np.array([[cols, 0, cols]])) for row in range(rows): start = row * cols * 2 faces[start:start+cols] = rowtemplate1 + row * cols faces[start+cols:start+(cols*2)] = rowtemplate2 + row * cols return MeshData(vertices=verts, faces=faces)
[ "def", "create_cylinder", "(", "rows", ",", "cols", ",", "radius", "=", "[", "1.0", ",", "1.0", "]", ",", "length", "=", "1.0", ",", "offset", "=", "False", ")", ":", "verts", "=", "np", ".", "empty", "(", "(", "rows", "+", "1", ",", "cols", ",...
36.365385
17.346154
def make_frame(self, frame, birthframe, startframe, stopframe, deathframe, noiseframe=None): """ :param frame: current frame :param birthframe: frame where this animation starts returning something other than None :param startframe: frame where animation starts to evolve :param stopframe: frame where animation is completed :param deathframe: frame where animation starts to return None :return: """ newx = self.anim_x.make_frame(frame, birthframe, startframe, stopframe, deathframe, noiseframe) newy = self.anim_y.make_frame(frame, birthframe, startframe, stopframe, deathframe, noiseframe) if self.xy_noise_fn is not None: if noiseframe is not None: t = noiseframe else: t = Tween.tween2(frame, startframe, stopframe) addx, addy = self.xy_noise_fn(newx, newy, t) else: addx, addy = 0, 0 return newx + addx, newy + addy
[ "def", "make_frame", "(", "self", ",", "frame", ",", "birthframe", ",", "startframe", ",", "stopframe", ",", "deathframe", ",", "noiseframe", "=", "None", ")", ":", "newx", "=", "self", ".", "anim_x", ".", "make_frame", "(", "frame", ",", "birthframe", "...
47.047619
23.238095
def get_valid_statements_for_modeling(sts: List[Influence]) -> List[Influence]: """ Select INDRA statements that can be used to construct a Delphi model from a given list of statements. """ return [ s for s in sts if is_grounded_statement(s) and (s.subj_delta["polarity"] is not None) and (s.obj_delta["polarity"] is not None) ]
[ "def", "get_valid_statements_for_modeling", "(", "sts", ":", "List", "[", "Influence", "]", ")", "->", "List", "[", "Influence", "]", ":", "return", "[", "s", "for", "s", "in", "sts", "if", "is_grounded_statement", "(", "s", ")", "and", "(", "s", ".", ...
34.090909
19.727273
def fulltext(search, lang=Lang.English, ignore_case=True): """Full text search. Example:: filters = Text.fulltext("python pymongo_mate") .. note:: This field doesn't need to specify field. """ return { "$text": { "$search": search, "$language": lang, "$caseSensitive": not ignore_case, "$diacriticSensitive": False, } }
[ "def", "fulltext", "(", "search", ",", "lang", "=", "Lang", ".", "English", ",", "ignore_case", "=", "True", ")", ":", "return", "{", "\"$text\"", ":", "{", "\"$search\"", ":", "search", ",", "\"$language\"", ":", "lang", ",", "\"$caseSensitive\"", ":", ...
24.473684
20.105263
def get_version_details(path): """Parses version file :param path: path to version file :return: version details """ with open(path, "r") as reader: lines = reader.readlines() data = { line.split(" = ")[0].replace("__", ""): line.split(" = ")[1].strip().replace("'", "") for line in lines } return data
[ "def", "get_version_details", "(", "path", ")", ":", "with", "open", "(", "path", ",", "\"r\"", ")", "as", "reader", ":", "lines", "=", "reader", ".", "readlines", "(", ")", "data", "=", "{", "line", ".", "split", "(", "\" = \"", ")", "[", "0", "]"...
25.466667
15.666667
def get_snapshot(self, hosts, macromodulations, timeperiods): # pragma: no cover, not yet! """ Raise snapshot event handlers if NONE of the following conditions is met:: * snapshot_command is None * snapshot_enabled is disabled * snapshot_criteria does not matches current state * last_snapshot > now - snapshot_interval * interval_length (previous snapshot too early) * snapshot_period is not valid :param hosts: hosts objects, used to get data for macros :type hosts: alignak.objects.host.Hosts :param macromodulations: Macro modulations objects, used in commands (notif, check) :type macromodulations: alignak.objects.macromodulation.Macromodulations :param timeperiods: Timeperiods objects, used for snapshot period and macros evaluation :type timeperiods: alignak.objects.timeperiod.Timeperiods :return: None """ # We should have a snapshot_command, to be enabled and of course # in the good time and state :D if self.snapshot_command is None: return if not self.snapshot_enabled: return # look at if one state is matching the criteria boolmap = [self.is_state(s) for s in self.snapshot_criteria] if True not in boolmap: return # Time based checks now, we should be in the period and not too far # from the last_snapshot now = int(time.time()) cls = self.__class__ if self.last_snapshot > now - self.snapshot_interval * cls.interval_length: # too close return # no period means 24x7 :) timeperiod = timeperiods[self.snapshot_period] if timeperiod is not None and not timeperiod.is_time_valid(now): return cls = self.__class__ macroresolver = MacroResolver() data = self.get_data_for_event_handler(hosts) cmd = macroresolver.resolve_command(self.snapshot_command, data, macromodulations, timeperiods) reac_tag = self.snapshot_command.reactionner_tag event_h = EventHandler({ 'command': cmd, 'timeout': cls.event_handler_timeout, 'ref': self.uuid, 'reactionner_tag': reac_tag, 'is_snapshot': True }) self.raise_snapshot_log_entry(self.snapshot_command) # we save the time we launch the snap self.last_snapshot = now # ok we can put it in our temp action queue self.actions.append(event_h)
[ "def", "get_snapshot", "(", "self", ",", "hosts", ",", "macromodulations", ",", "timeperiods", ")", ":", "# pragma: no cover, not yet!", "# We should have a snapshot_command, to be enabled and of course", "# in the good time and state :D", "if", "self", ".", "snapshot_command", ...
39.6875
22
def run(self): """ Run the EventHubClient in blocking mode. Opens the connection and starts running all Sender/Receiver clients. Returns a list of the start up results. For a succcesful client start the result will be `None`, otherwise the exception raised. If all clients failed to start, then run will fail, shut down the connection and raise an exception. If at least one client starts up successfully the run command will succeed. :rtype: list[~azure.eventhub.common.EventHubError] """ log.info("%r: Starting %r clients", self.container_id, len(self.clients)) try: self._start_clients() redirects = [c.redirected for c in self.clients if c.redirected] failed = [c.error for c in self.clients if c.error] if failed and len(failed) == len(self.clients): log.warning("%r: All clients failed to start.", self.container_id) raise failed[0] if failed: log.warning("%r: %r clients failed to start.", self.container_id, len(failed)) elif redirects: self._handle_redirect(redirects) except EventHubError: self.stop() raise except Exception as e: self.stop() raise EventHubError(str(e)) return failed
[ "def", "run", "(", "self", ")", ":", "log", ".", "info", "(", "\"%r: Starting %r clients\"", ",", "self", ".", "container_id", ",", "len", "(", "self", ".", "clients", ")", ")", "try", ":", "self", ".", "_start_clients", "(", ")", "redirects", "=", "["...
44.16129
22.032258
def add_pool(arg, opts, shell_opts): """ Add a pool. """ p = Pool() p.name = opts.get('name') p.description = opts.get('description') p.default_type = opts.get('default-type') p.ipv4_default_prefix_length = opts.get('ipv4_default_prefix_length') p.ipv6_default_prefix_length = opts.get('ipv6_default_prefix_length') if 'tags' in opts: tags = list(csv.reader([opts.get('tags', '')], escapechar='\\'))[0] p.tags = {} for tag_name in tags: tag = Tag() tag.name = tag_name p.tags[tag_name] = tag for avp in opts.get('extra-attribute', []): try: key, value = avp.split('=', 1) except ValueError: print("ERROR: Incorrect extra-attribute: %s. Accepted form: 'key=value'\n" % avp, file=sys.stderr) return p.avps[key] = value try: p.save() except pynipap.NipapError as exc: print("Could not add pool to NIPAP: %s" % str(exc), file=sys.stderr) sys.exit(1) print("Pool '%s' created." % (p.name))
[ "def", "add_pool", "(", "arg", ",", "opts", ",", "shell_opts", ")", ":", "p", "=", "Pool", "(", ")", "p", ".", "name", "=", "opts", ".", "get", "(", "'name'", ")", "p", ".", "description", "=", "opts", ".", "get", "(", "'description'", ")", "p", ...
30.941176
20.764706
def lint(fix_imports): """ Run flake8. """ from glob import glob from subprocess import call # FIXME: should support passing these in an option skip = [ 'ansible', 'db', 'flask_sessions', 'node_modules', 'requirements', ] root_files = glob('*.py') root_dirs = [name for name in next(os.walk('.'))[1] if not name.startswith('.')] files_and_dirs = [x for x in root_files + root_dirs if x not in skip] def execute_tool(desc, *args): command = list(args) + files_and_dirs click.echo(f"{desc}: {' '.join(command)}") ret = call(command) if ret != 0: exit(ret) if fix_imports: execute_tool('Fixing import order', 'isort', '-rc') execute_tool('Checking code style', 'flake8')
[ "def", "lint", "(", "fix_imports", ")", ":", "from", "glob", "import", "glob", "from", "subprocess", "import", "call", "# FIXME: should support passing these in an option", "skip", "=", "[", "'ansible'", ",", "'db'", ",", "'flask_sessions'", ",", "'node_modules'", "...
26.866667
18.266667
def mark(self, value=1): """Updates the dictionary.""" self['count'] += value for m in self._meters: m.update(value)
[ "def", "mark", "(", "self", ",", "value", "=", "1", ")", ":", "self", "[", "'count'", "]", "+=", "value", "for", "m", "in", "self", ".", "_meters", ":", "m", ".", "update", "(", "value", ")" ]
21.666667
17.166667
def retrieve_outputs(self): """ Declare the outputs of the algorithms as attributes: x_final, y_final, metrics. """ metrics = {} for obs in self._observers['cv_metrics']: metrics[obs.name] = obs.retrieve_metrics() self.metrics = metrics
[ "def", "retrieve_outputs", "(", "self", ")", ":", "metrics", "=", "{", "}", "for", "obs", "in", "self", ".", "_observers", "[", "'cv_metrics'", "]", ":", "metrics", "[", "obs", ".", "name", "]", "=", "obs", ".", "retrieve_metrics", "(", ")", "self", ...
32.111111
13.444444
def read(self, length=1000, blocking=False): """ Read ``length`` bytes from current process output stream. Note: This method is not fully non-blocking, however it behaves like one. """ size_p = PLARGE_INTEGER(LARGE_INTEGER(0)) if not blocking: windll.kernel32.GetFileSizeEx(self.conout_pipe, size_p) size = size_p[0] length = min(size, length) data = ctypes.create_string_buffer(length) if length > 0: num_bytes = PLARGE_INTEGER(LARGE_INTEGER(0)) ReadFile(self.conout_pipe, data, length, num_bytes, None) return data.value
[ "def", "read", "(", "self", ",", "length", "=", "1000", ",", "blocking", "=", "False", ")", ":", "size_p", "=", "PLARGE_INTEGER", "(", "LARGE_INTEGER", "(", "0", ")", ")", "if", "not", "blocking", ":", "windll", ".", "kernel32", ".", "GetFileSizeEx", "...
38.058824
15.352941
def if_then_else(cls, condition: 'TensorFluent', true_case: 'TensorFluent', false_case: 'TensorFluent') -> 'TensorFluent': '''Returns a TensorFluent for the control op if-then-else. Args: condition: Boolean fluent for the if condition. true_case: Fluent returned in the true clause. false_case: Fluent returned in the false clause. Returns: A TensorFluent wrapping the if-then-else control statement. Raises: ValueError: If cases don't have same shape. ''' true = TensorFluent.constant(True, tf.bool) false = TensorFluent.constant(False, tf.bool) ite = (condition == true) * true_case + (condition == false) * false_case if true_case.dtype == tf.bool and false_case.dtype == tf.bool: ite = ite.cast(tf.bool) return ite
[ "def", "if_then_else", "(", "cls", ",", "condition", ":", "'TensorFluent'", ",", "true_case", ":", "'TensorFluent'", ",", "false_case", ":", "'TensorFluent'", ")", "->", "'TensorFluent'", ":", "true", "=", "TensorFluent", ".", "constant", "(", "True", ",", "tf...
38.565217
22.478261
def svm_score(self, x): x = x[1:] ''' original_X = self.svm_processor.train_X[:, 1:] score = 0 for i in range(len(self.svm_processor.sv_alpha)): score += self.svm_processor.sv_alpha[i] * self.svm_processor.sv_Y[i] * utility.Kernel.gaussian_kernel(self, original_X[self.svm_processor.sv_index[i]], x) score = score + self.svm_processor.sv_avg_b ''' score = np.sum(self.svm_processor.sv_alpha * self.svm_processor.sv_Y * utility.Kernel.kernel_matrix_xX(self, x, self.svm_processor.sv_X)) + self.svm_processor.sv_avg_b return score
[ "def", "svm_score", "(", "self", ",", "x", ")", ":", "x", "=", "x", "[", "1", ":", "]", "score", "=", "np", ".", "sum", "(", "self", ".", "svm_processor", ".", "sv_alpha", "*", "self", ".", "svm_processor", ".", "sv_Y", "*", "utility", ".", "Kern...
40.133333
40.266667
def add_edge(self, id1, id2, weight=0.0, length=1.0, label="", properties={}): """ Add weighted (0.0-1.0) edge between nodes, creating them if necessary. The weight represents the importance of the connection (not the cost). """ if id1 == id2: return None if not self.has_key(id1): self.add_node(id1) if not self.has_key(id2): self.add_node(id2) n1 = self[id1] n2 = self[id2] # If a->b already exists, don't re-create it. # However, b->a may still pass. if n1 in n2.links: if n2.links.edge(n1).node1 == n1: return self.edge(id1, id2) weight = max(0.0, min(weight, 1.0)) e = edge(n1, n2, weight, length, label, properties) self.edges.append(e) n1.links.append(n2, e) n2.links.append(n1, e) return e
[ "def", "add_edge", "(", "self", ",", "id1", ",", "id2", ",", "weight", "=", "0.0", ",", "length", "=", "1.0", ",", "label", "=", "\"\"", ",", "properties", "=", "{", "}", ")", ":", "if", "id1", "==", "id2", ":", "return", "None", "if", "not", "...
32.518519
18.481481
def pseudo_with_symbol(self, symbol, allow_multi=False): """ Return the pseudo with the given chemical symbol. Args: symbols: String with the chemical symbol of the element allow_multi: By default, the method raises ValueError if multiple occurrences are found. Use allow_multi to prevent this. Raises: ValueError if symbol is not found or multiple occurences are present and not allow_multi """ pseudos = self.select_symbols(symbol, ret_list=True) if not pseudos or (len(pseudos) > 1 and not allow_multi): raise ValueError("Found %d occurrences of symbol %s" % (len(pseudos), symbol)) if not allow_multi: return pseudos[0] else: return pseudos
[ "def", "pseudo_with_symbol", "(", "self", ",", "symbol", ",", "allow_multi", "=", "False", ")", ":", "pseudos", "=", "self", ".", "select_symbols", "(", "symbol", ",", "ret_list", "=", "True", ")", "if", "not", "pseudos", "or", "(", "len", "(", "pseudos"...
39.4
26
def is_consecutive(self): """ Determine whether or not the non-zero labels in the segmentation image are consecutive (i.e. no missing values). """ if (self.labels[-1] - self.labels[0] + 1) == self.nlabels: return True else: return False
[ "def", "is_consecutive", "(", "self", ")", ":", "if", "(", "self", ".", "labels", "[", "-", "1", "]", "-", "self", ".", "labels", "[", "0", "]", "+", "1", ")", "==", "self", ".", "nlabels", ":", "return", "True", "else", ":", "return", "False" ]
30
18.8
def get_stored_cert_serials(store): ''' Get all of the certificate serials in the specified store store The store to get all the certificate serials from CLI Example: .. code-block:: bash salt '*' certutil.get_stored_cert_serials <store> ''' cmd = "certutil.exe -store {0}".format(store) out = __salt__['cmd.run'](cmd) # match serial numbers by header position to work with multiple languages matches = re.findall(r"={16}\r\n.*:\s*(\w*)\r\n", out) return matches
[ "def", "get_stored_cert_serials", "(", "store", ")", ":", "cmd", "=", "\"certutil.exe -store {0}\"", ".", "format", "(", "store", ")", "out", "=", "__salt__", "[", "'cmd.run'", "]", "(", "cmd", ")", "# match serial numbers by header position to work with multiple langua...
28.277778
24.944444
def add_ref_annotation(self, id_tier, tier2, time, value='', prev=None, svg=None): """Add a reference annotation. .. note:: When a timepoint matches two annotations the new reference annotation will reference to the first annotation. To circumvent this it's always safer to take the middle of the annotation you want to reference to. :param str id_tier: Name of the tier. :param str tier2: Tier of the referenced annotation. :param int time: Time of the referenced annotation. :param str value: Value of the annotation. :param str prev: Id of the previous annotation. :param str svg_ref: Svg reference. :raises KeyError: If the tier is non existent. :raises ValueError: If the tier already contains normal annotations or if there is no annotation in the tier on the time to reference to. """ if self.tiers[id_tier][0]: raise ValueError('This tier already contains normal annotations.') ann = None for aid, (begin, end, _, _) in self.tiers[tier2][0].items(): begin = self.timeslots[begin] end = self.timeslots[end] if begin <= time and end >= time: ann = aid break if not ann: raise ValueError('There is no annotation to reference to.') aid = self.generate_annotation_id() self.annotations[aid] = id_tier self.tiers[id_tier][1][aid] = (ann, value, prev, svg)
[ "def", "add_ref_annotation", "(", "self", ",", "id_tier", ",", "tier2", ",", "time", ",", "value", "=", "''", ",", "prev", "=", "None", ",", "svg", "=", "None", ")", ":", "if", "self", ".", "tiers", "[", "id_tier", "]", "[", "0", "]", ":", "raise...
47.6875
17.78125
def C(w, Xs): '''Calculate the cylinder center given the cylinder direction and a list of data points. ''' n = len(Xs) P = projection_matrix(w) Ys = [np.dot(P, X) for X in Xs] A = calc_A(Ys) A_hat = calc_A_hat(A, skew_matrix(w)) return np.dot(A_hat, sum(np.dot(Y, Y) * Y for Y in Ys)) / np.trace(np.dot(A_hat, A))
[ "def", "C", "(", "w", ",", "Xs", ")", ":", "n", "=", "len", "(", "Xs", ")", "P", "=", "projection_matrix", "(", "w", ")", "Ys", "=", "[", "np", ".", "dot", "(", "P", ",", "X", ")", "for", "X", "in", "Xs", "]", "A", "=", "calc_A", "(", "...
31
23.363636
def firmware_download_input_protocol_type_usb_protocol_usb_directory(self, **kwargs): """Auto Generated Code """ config = ET.Element("config") firmware_download = ET.Element("firmware_download") config = firmware_download input = ET.SubElement(firmware_download, "input") protocol_type = ET.SubElement(input, "protocol-type") usb_protocol = ET.SubElement(protocol_type, "usb-protocol") usb = ET.SubElement(usb_protocol, "usb") directory = ET.SubElement(usb, "directory") directory.text = kwargs.pop('directory') callback = kwargs.pop('callback', self._callback) return callback(config)
[ "def", "firmware_download_input_protocol_type_usb_protocol_usb_directory", "(", "self", ",", "*", "*", "kwargs", ")", ":", "config", "=", "ET", ".", "Element", "(", "\"config\"", ")", "firmware_download", "=", "ET", ".", "Element", "(", "\"firmware_download\"", ")",...
45.066667
15.4
def request(self, req_and_resp, opt=None, headers=None): """ preprocess before performing a request, usually some patching. authorization also applied here. :param req_and_resp: tuple of Request and Response :type req_and_resp: (Request, Response) :param opt: customized options :type opt: dict :param headers: customized headers :type headers: dict of 'string', or list of tuple: (string, string) for multiple values for one key :return: patched request and response :rtype: Request, Response """ req, resp = req_and_resp # dump info for debugging logger.info('request.url: {0}'.format(req.url)) logger.info('request.header: {0}'.format(req.header)) logger.info('request.query: {0}'.format(req.query)) logger.info('request.file: {0}'.format(req.files)) logger.info('request.schemes: {0}'.format(req.schemes)) # apply authorizations if self.__security: self.__security(req) return req, resp
[ "def", "request", "(", "self", ",", "req_and_resp", ",", "opt", "=", "None", ",", "headers", "=", "None", ")", ":", "req", ",", "resp", "=", "req_and_resp", "# dump info for debugging", "logger", ".", "info", "(", "'request.url: {0}'", ".", "format", "(", ...
38.814815
17
def make_class_method_decorator(classkey, modname=None): """ register a class to be injectable classkey is a key that identifies the injected class REMEMBER to call inject_instance in __init__ Args: classkey : the class to be injected into modname : the global __name__ of the module youa re injecting from Returns: closure_decorate_class_method (func): decorator for injectable methods Example: >>> # ENABLE_DOCTEST >>> import utool as ut >>> class CheeseShop(object): ... def __init__(self): ... import utool as ut ... ut.inject_all_external_modules(self) >>> cheeseshop_method = ut.make_class_method_decorator(CheeseShop) >>> shop1 = CheeseShop() >>> assert not hasattr(shop1, 'has_cheese'), 'have not injected yet' >>> @cheeseshop_method >>> def has_cheese(self): >>> return False >>> shop2 = CheeseShop() >>> assert shop2.has_cheese() is False, 'external method not injected' >>> print('Cheese shop does not have cheese. All is well.') """ global __APP_MODNAME_REGISTER__ #if util_arg.VERBOSE or VERBOSE_CLASS: if VERBOSE_CLASS: print('[util_class] register via make_class_method_decorator classkey=%r, modname=%r' % (classkey, modname)) if modname == '__main__': # skips reinjects into main print('WARNING: cannot register classkey=%r functions as __main__' % (classkey,)) return lambda func: func # register that this module was injected into if isinstance(classkey, tuple): classname, _ = classkey __CLASSNAME_CLASSKEY_REGISTER__[classname].append(modname) elif isinstance(classkey, type): classname = classkey.__name__ if modname is not None: assert modname == classkey.__module__, ( 'modname=%r does not agree with __module__=%r' % ( modname, classkey.__module__)) modname = classkey.__module__ # Convert to new classkey format classkey = (classname, modname) __CLASSNAME_CLASSKEY_REGISTER__[classname].append(modname) else: print('Warning not using classkey for %r %r' % (classkey, modname)) raise AssertionError('classkey no longer supported. Use class_inject_key instead') closure_decorate_class_method = functools.partial(decorate_class_method, classkey=classkey) return closure_decorate_class_method
[ "def", "make_class_method_decorator", "(", "classkey", ",", "modname", "=", "None", ")", ":", "global", "__APP_MODNAME_REGISTER__", "#if util_arg.VERBOSE or VERBOSE_CLASS:", "if", "VERBOSE_CLASS", ":", "print", "(", "'[util_class] register via make_class_method_decorator classkey...
42.830508
17.644068
def use_comparative_agent_view(self): """Pass through to provider ResourceAgentSession.use_comparative_agent_view""" self._object_views['agent'] = COMPARATIVE # self._get_provider_session('resource_agent_session') # To make sure the session is tracked for session in self._get_provider_sessions(): try: session.use_comparative_agent_view() except AttributeError: pass
[ "def", "use_comparative_agent_view", "(", "self", ")", ":", "self", ".", "_object_views", "[", "'agent'", "]", "=", "COMPARATIVE", "# self._get_provider_session('resource_agent_session') # To make sure the session is tracked", "for", "session", "in", "self", ".", "_get_provid...
49.666667
16.333333
def async_mail_task(subject_or_message, to=None, template=None, **kwargs): """ Celery task to send emails asynchronously using the mail bundle. """ to = to or kwargs.pop('recipients', []) msg = make_message(subject_or_message, to, template, **kwargs) with mail.connect() as connection: connection.send(msg)
[ "def", "async_mail_task", "(", "subject_or_message", ",", "to", "=", "None", ",", "template", "=", "None", ",", "*", "*", "kwargs", ")", ":", "to", "=", "to", "or", "kwargs", ".", "pop", "(", "'recipients'", ",", "[", "]", ")", "msg", "=", "make_mess...
41.375
13.125
def wait(self, task_id): """ Blocking method which wait end of task. It's prefered to use :class:`carotte.Task` object directly :param string task_id: Task ID :returns: Task dict :rtype: dict """ data = { 'action': 'wait', 'id': task_id } self.__send_pyobj(data) task = self.__recv_pyobj(notimeout=True) return task
[ "def", "wait", "(", "self", ",", "task_id", ")", ":", "data", "=", "{", "'action'", ":", "'wait'", ",", "'id'", ":", "task_id", "}", "self", ".", "__send_pyobj", "(", "data", ")", "task", "=", "self", ".", "__recv_pyobj", "(", "notimeout", "=", "True...
24.764706
16.647059