text
stringlengths
89
104k
code_tokens
list
avg_line_len
float64
7.91
980
score
float64
0
630
def _ParseRecord(self, parser_mediator, file_object): """Parses an event record. Args: parser_mediator (ParserMediator): mediates interactions between parsers and other components, such as storage and dfvfs. file_object (dfvfs.FileIO): file-like object. Raises: ParseError: if the event record cannot be read. """ header_record_offset = file_object.tell() # Check the header token type before reading the token data to prevent # variable size tokens to consume a large amount of memory. token_type = self._ParseTokenType(file_object, header_record_offset) if token_type not in self._HEADER_TOKEN_TYPES: raise errors.ParseError( 'Unsupported header token type: 0x{0:02x}'.format(token_type)) token_type, token_data = self._ParseToken(file_object, header_record_offset) if token_data.format_version != 11: raise errors.ParseError('Unsupported format version type: {0:d}'.format( token_data.format_version)) timestamp = token_data.microseconds + ( token_data.timestamp * definitions.MICROSECONDS_PER_SECOND) event_type = token_data.event_type header_record_size = token_data.record_size record_end_offset = header_record_offset + header_record_size event_tokens = [] return_token_values = None file_offset = file_object.tell() while file_offset < record_end_offset: token_type, token_data = self._ParseToken(file_object, file_offset) if not token_data: raise errors.ParseError('Unsupported token type: 0x{0:02x}'.format( token_type)) file_offset = file_object.tell() if token_type == self._TOKEN_TYPE_AUT_TRAILER: break token_type_string = self._TOKEN_TYPES.get(token_type, 'UNKNOWN') token_values = self._FormatTokenData(token_type, token_data) event_tokens.append({token_type_string: token_values}) if token_type in ( self._TOKEN_TYPE_AUT_RETURN32, self._TOKEN_TYPE_AUT_RETURN64): return_token_values = token_values if token_data.signature != self._TRAILER_TOKEN_SIGNATURE: raise errors.ParseError('Unsupported signature in trailer token.') if token_data.record_size != header_record_size: raise errors.ParseError( 'Mismatch of event record size between header and trailer token.') event_data = BSMEventData() event_data.event_type = event_type event_data.extra_tokens = event_tokens event_data.offset = header_record_offset event_data.record_length = header_record_size event_data.return_value = return_token_values date_time = dfdatetime_posix_time.PosixTimeInMicroseconds( timestamp=timestamp) event = time_events.DateTimeValuesEvent( date_time, definitions.TIME_DESCRIPTION_CREATION) parser_mediator.ProduceEventWithEventData(event, event_data)
[ "def", "_ParseRecord", "(", "self", ",", "parser_mediator", ",", "file_object", ")", ":", "header_record_offset", "=", "file_object", ".", "tell", "(", ")", "# Check the header token type before reading the token data to prevent", "# variable size tokens to consume a large amount...
37.466667
22.013333
def image_channel_compress_top(body_output, targets, model_hparams, vocab_size): """Transforms body output to return logits. Args: body_output: Tensor of shape [batch, img_len, img_len, depth]. targets: model_hparams: HParams, model hyperparmeters. vocab_size: int, vocabulary size. Returns: Tensor of shape [batch, img_len, img_len, channels, vocab_size]. """ del targets # unused arg with tf.variable_scope("image_channel_compress_modality"): hidden_size = model_hparams.hidden_size img_len = model_hparams.img_len channels = 3 # RGB batch = common_layers.shape_list(body_output)[0] x = tf.layers.conv2d( body_output, hidden_size * channels, kernel_size=(1, 1), strides=(1, 1), padding="VALID", activation=tf.nn.relu, name="decompress_conv") x = tf.reshape(x, [batch, img_len, img_len * channels, hidden_size]) x = common_layers.layer_preprocess(x, model_hparams) x = tf.layers.dense(x, vocab_size, use_bias=True, activation=None, name="output_conv") x = tf.reshape( x, [batch, img_len, img_len, channels, vocab_size]) return x
[ "def", "image_channel_compress_top", "(", "body_output", ",", "targets", ",", "model_hparams", ",", "vocab_size", ")", ":", "del", "targets", "# unused arg", "with", "tf", ".", "variable_scope", "(", "\"image_channel_compress_modality\"", ")", ":", "hidden_size", "=",...
34.166667
16.555556
def _set_cache_(self, attr): """Retrieve object information""" if attr == "size": oinfo = self.repo.odb.info(self.binsha) self.size = oinfo.size # assert oinfo.type == self.type, _assertion_msg_format % (self.binsha, oinfo.type, self.type) else: super(Object, self)._set_cache_(attr)
[ "def", "_set_cache_", "(", "self", ",", "attr", ")", ":", "if", "attr", "==", "\"size\"", ":", "oinfo", "=", "self", ".", "repo", ".", "odb", ".", "info", "(", "self", ".", "binsha", ")", "self", ".", "size", "=", "oinfo", ".", "size", "# assert oi...
43.5
18.125
def children_types( record, index, key='refs', stop_types=STOP_TYPES ): """Produce dictionary mapping type-key to instances for all children""" types = {} for child in children( record, index, key, stop_types=stop_types ): types.setdefault(child['type'],[]).append( child ) return types
[ "def", "children_types", "(", "record", ",", "index", ",", "key", "=", "'refs'", ",", "stop_types", "=", "STOP_TYPES", ")", ":", "types", "=", "{", "}", "for", "child", "in", "children", "(", "record", ",", "index", ",", "key", ",", "stop_types", "=", ...
50.833333
21.666667
def squish_infinite(x, range=(0, 1)): """ Truncate infinite values to a range. Parameters ---------- x : array_like Values that should have infinities squished. range : tuple The range onto which to squish the infinites. Must be of size 2. Returns ------- out : array_like Values with infinites squished. Examples -------- >>> squish_infinite([0, .5, .25, np.inf, .44]) [0.0, 0.5, 0.25, 1.0, 0.44] >>> squish_infinite([0, -np.inf, .5, .25, np.inf], (-10, 9)) [0.0, -10.0, 0.5, 0.25, 9.0] """ xtype = type(x) if not hasattr(x, 'dtype'): x = np.asarray(x) x[x == -np.inf] = range[0] x[x == np.inf] = range[1] if not isinstance(x, xtype): x = xtype(x) return x
[ "def", "squish_infinite", "(", "x", ",", "range", "=", "(", "0", ",", "1", ")", ")", ":", "xtype", "=", "type", "(", "x", ")", "if", "not", "hasattr", "(", "x", ",", "'dtype'", ")", ":", "x", "=", "np", ".", "asarray", "(", "x", ")", "x", "...
21.857143
19.628571
def _adaptive(self, gamma=1.0, relative_tolerance=1.0e-8, maximum_iterations=1000, verbose=True, print_warning=True): """ Determine dimensionless free energies by a combination of Newton-Raphson iteration and self-consistent iteration. Picks whichever method gives the lowest gradient. Is slower than NR (approximated, not calculated) since it calculates the log norms twice each iteration. OPTIONAL ARGUMENTS gamma (float between 0 and 1) - incrementor for NR iterations. relative_tolerance (float between 0 and 1) - relative tolerance for convergence (default 1.0e-6) maximum_iterations (int) - maximum number of Newton-Raphson iterations (default 1000) verbose (boolean) - verbosity level for debug output NOTES This method determines the dimensionless free energies by minimizing a convex function whose solution is the desired estimator. The original idea came from the construction of a likelihood function that independently reproduced the work of Geyer (see [1] and Section 6 of [2]). This can alternatively be formulated as a root-finding algorithm for the Z-estimator. More details of this procedure will follow in a subsequent paper. Only those states with nonzero counts are include in the estimation procedure. REFERENCES See Appendix C.2 of [1]. """ if verbose: print("Determining dimensionless free energies by Newton-Raphson iteration.") # keep track of Newton-Raphson and self-consistent iterations nr_iter = 0 sci_iter = 0 N_k = self.N_k[self.states_with_samples] K = len(N_k) f_k_sci = np.zeros([K], dtype=np.float64) f_k_new = np.zeros([K], dtype=np.float64) # Perform Newton-Raphson iterations (with sci computed on the way) for iteration in range(0, maximum_iterations): # Store for new estimate of dimensionless relative free energies. f_k = self.f_k[self.states_with_samples].copy() # compute weights for gradients: the denominators and free energies are from the previous # iteration in most cases. (W_nk, f_k_sci) = self._computeWeights( recalc_denom=(iteration == 0), return_f_k = True) # Compute gradient and Hessian of last (K-1) states. # # gradient (defined by Eq. C6 of [1]) # g_i(theta) = N_i - \sum_n N_i W_ni # # Hessian (defined by Eq. C9 of [1]) # H_ii(theta) = - \sum_n N_i W_ni (1 - N_i W_ni) # H_ij(theta) = \sum_n N_i W_ni N_j W_nj # """ g = np.matrix(np.zeros([K-1,1], dtype=np.float64)) # gradient H = np.matrix(np.zeros([K-1,K-1], dtype=np.float64)) # Hessian for i in range(1,K): g[i-1] = N_k[i] - N_k[i] * W_nk[:,i].sum() H[i-1,i-1] = - (N_k[i] * W_nk[:,i] * (1.0 - N_k[i] * W_nk[:,i])).sum() for j in range(1,i): H[i-1,j-1] = (N_k[i] * W_nk[:,i] * N_k[j] * W_nk[:,j]).sum() H[j-1,i-1] = H[i-1,j-1] # Update the free energy estimate (Eq. C11 of [1]). Hinvg = linalg.lstsq(H,g)[0] # # Hinvg = linalg.solve(H,g) # This might be faster if we can guarantee full rank. for k in range(0,K-1): f_k_new[k+1] = f_k[k+1] - gamma*Hinvg[k] """ g = N_k - N_k * W_nk.sum(axis=0) NW = N_k * W_nk H = np.dot(NW.T, NW) H += (g.T - N_k) * np.eye(K) # Update the free energy estimate (Eq. C11 of [1]). # will always have lower rank the way it is set up Hinvg = linalg.lstsq(H, g)[0] Hinvg -= Hinvg[0] f_k_new = f_k - gamma * Hinvg # self-consistent iteration gradient norm and saved log sums. g_sci = self._gradientF(f_k_sci) gnorm_sci = np.dot(g_sci, g_sci) # save this so we can switch it back in if g_sci is lower. log_weight_denom = self.log_weight_denom.copy() # newton raphson gradient norm and saved log sums. g_nr = self._gradientF(f_k_new) gnorm_nr = np.dot(g_nr, g_nr) # we could save the gradient, too, but it's not too expensive to # compute since we are doing the Hessian anyway. if verbose: print("self consistent iteration gradient norm is %10.5g, Newton-Raphson gradient norm is %10.5g" % (gnorm_sci, gnorm_nr)) # decide which directon to go depending on size of gradient norm if (gnorm_sci < gnorm_nr or sci_iter < 2): sci_iter += 1 self.log_weight_denom = log_weight_denom.copy() if verbose: if sci_iter < 2: print("Choosing self-consistent iteration on iteration %d" % iteration) else: print("Choosing self-consistent iteration for lower gradient on iteration %d" % iteration) f_k_new = f_k_sci.copy() else: nr_iter += 1 if verbose: print("Newton-Raphson used on iteration %d" % iteration) # get rid of big matrices that are not used. del(log_weight_denom, NW, W_nk) # have to set the free energies back in self, since the gradient # routine changes them. self.f_k[self.states_with_samples] = f_k if (self._amIdoneIterating(f_k_new, relative_tolerance, iteration, maximum_iterations, print_warning, verbose)): if verbose: print('Of %d iterations, %d were Newton-Raphson iterations and %d were self-consistent iterations' % (iteration + 1, nr_iter, sci_iter)) break return
[ "def", "_adaptive", "(", "self", ",", "gamma", "=", "1.0", ",", "relative_tolerance", "=", "1.0e-8", ",", "maximum_iterations", "=", "1000", ",", "verbose", "=", "True", ",", "print_warning", "=", "True", ")", ":", "if", "verbose", ":", "print", "(", "\"...
45.069767
27.813953
def clear(self): """ Reset the config object to its initial state """ with self._lock: self._config = { CacheConfig.Morlist: {'last': defaultdict(float), 'intl': {}}, CacheConfig.Metadata: {'last': defaultdict(float), 'intl': {}}, }
[ "def", "clear", "(", "self", ")", ":", "with", "self", ".", "_lock", ":", "self", ".", "_config", "=", "{", "CacheConfig", ".", "Morlist", ":", "{", "'last'", ":", "defaultdict", "(", "float", ")", ",", "'intl'", ":", "{", "}", "}", ",", "CacheConf...
34.666667
18.666667
def shift(self, time: int) -> 'Timeslot': """Return a new Timeslot shifted by `time`. Args: time: time to be shifted """ return Timeslot(self.interval.shift(time), self.channel)
[ "def", "shift", "(", "self", ",", "time", ":", "int", ")", "->", "'Timeslot'", ":", "return", "Timeslot", "(", "self", ".", "interval", ".", "shift", "(", "time", ")", ",", "self", ".", "channel", ")" ]
30.857143
13.714286
def eas2tas(eas, h): """ Equivalent airspeed to true airspeed """ rho = density(h) tas = eas * np.sqrt(rho0 / rho) return tas
[ "def", "eas2tas", "(", "eas", ",", "h", ")", ":", "rho", "=", "density", "(", "h", ")", "tas", "=", "eas", "*", "np", ".", "sqrt", "(", "rho0", "/", "rho", ")", "return", "tas" ]
27.4
14.2
def _async_route(self, msg, in_stream=None): """ Arrange for `msg` to be forwarded towards its destination. If its destination is the local context, then arrange for it to be dispatched using the local handlers. This is a lower overhead version of :meth:`route` that may only be called from the :class:`Broker` thread. :param Stream in_stream: If not :data:`None`, the stream the message arrived on. Used for performing source route verification, to ensure sensitive messages such as ``CALL_FUNCTION`` arrive only from trusted contexts. """ _vv and IOLOG.debug('%r._async_route(%r, %r)', self, msg, in_stream) if len(msg.data) > self.max_message_size: self._maybe_send_dead(msg, self.too_large_msg % ( self.max_message_size, )) return # Perform source verification. if in_stream: parent = self._stream_by_id.get(mitogen.parent_id) expect = self._stream_by_id.get(msg.auth_id, parent) if in_stream != expect: LOG.error('%r: bad auth_id: got %r via %r, not %r: %r', self, msg.auth_id, in_stream, expect, msg) return if msg.src_id != msg.auth_id: expect = self._stream_by_id.get(msg.src_id, parent) if in_stream != expect: LOG.error('%r: bad src_id: got %r via %r, not %r: %r', self, msg.src_id, in_stream, expect, msg) return if in_stream.auth_id is not None: msg.auth_id = in_stream.auth_id # Maintain a set of IDs the source ever communicated with. in_stream.egress_ids.add(msg.dst_id) if msg.dst_id == mitogen.context_id: return self._invoke(msg, in_stream) out_stream = self._stream_by_id.get(msg.dst_id) if out_stream is None: out_stream = self._stream_by_id.get(mitogen.parent_id) if out_stream is None: self._maybe_send_dead(msg, self.no_route_msg, msg.dst_id, mitogen.context_id) return if in_stream and self.unidirectional and not \ (in_stream.is_privileged or out_stream.is_privileged): self._maybe_send_dead(msg, self.unidirectional_msg, in_stream.remote_id, out_stream.remote_id) return out_stream._send(msg)
[ "def", "_async_route", "(", "self", ",", "msg", ",", "in_stream", "=", "None", ")", ":", "_vv", "and", "IOLOG", ".", "debug", "(", "'%r._async_route(%r, %r)'", ",", "self", ",", "msg", ",", "in_stream", ")", "if", "len", "(", "msg", ".", "data", ")", ...
39.650794
22.126984
def openpty(): """openpty() -> (master_fd, slave_fd) Open a pty master/slave pair, using os.openpty() if possible.""" try: return os.openpty() except (AttributeError, OSError): pass master_fd, slave_name = _open_terminal() slave_fd = slave_open(slave_name) return master_fd, slave_fd
[ "def", "openpty", "(", ")", ":", "try", ":", "return", "os", ".", "openpty", "(", ")", "except", "(", "AttributeError", ",", "OSError", ")", ":", "pass", "master_fd", ",", "slave_name", "=", "_open_terminal", "(", ")", "slave_fd", "=", "slave_open", "(",...
28.909091
14.454545
def flatten(l, unique=True): """flatten a list of lists Parameters ---------- l : list of lists unique : boolean whether or not only unique items are wanted (default=True) Returns ------- list of single items Examples -------- Creating a sample list whose elements are lists of integers >>> l = [[1, 2], [3, 4, ], [5, 6]] Applying flatten function >>> flatten(l) [1, 2, 3, 4, 5, 6] """ l = reduce(lambda x, y: x + y, l) if not unique: return list(l) return list(set(l))
[ "def", "flatten", "(", "l", ",", "unique", "=", "True", ")", ":", "l", "=", "reduce", "(", "lambda", "x", ",", "y", ":", "x", "+", "y", ",", "l", ")", "if", "not", "unique", ":", "return", "list", "(", "l", ")", "return", "list", "(", "set", ...
18.28125
24
def _gei8(ins): """ Compares & pops top 2 operands out of the stack, and checks if the 1st operand >= 2nd operand (top of the stack). Pushes 0 if False, 1 if True. 8 bit signed version """ output = _8bit_oper(ins.quad[2], ins.quad[3], reversed_=True) output.append('call __LEI8') output.append('push af') REQUIRES.add('lei8.asm') return output
[ "def", "_gei8", "(", "ins", ")", ":", "output", "=", "_8bit_oper", "(", "ins", ".", "quad", "[", "2", "]", ",", "ins", ".", "quad", "[", "3", "]", ",", "reversed_", "=", "True", ")", "output", ".", "append", "(", "'call __LEI8'", ")", "output", "...
29.615385
17
def itemStyle(self): """ Returns the item style information for this item. :return <XGanttWidgetItem.ItemStyle> """ if ( self.useGroupStyleWithChildren() and self.childCount() ): return XGanttWidgetItem.ItemStyle.Group return self._itemStyle
[ "def", "itemStyle", "(", "self", ")", ":", "if", "(", "self", ".", "useGroupStyleWithChildren", "(", ")", "and", "self", ".", "childCount", "(", ")", ")", ":", "return", "XGanttWidgetItem", ".", "ItemStyle", ".", "Group", "return", "self", ".", "_itemStyle...
32.3
16.1
def needs(self): """Returns a unique list of module names that this module depends on.""" result = [] for dep in self.dependencies: module = dep.split(".")[0].lower() if module not in result: result.append(module) return result
[ "def", "needs", "(", "self", ")", ":", "result", "=", "[", "]", "for", "dep", "in", "self", ".", "dependencies", ":", "module", "=", "dep", ".", "split", "(", "\".\"", ")", "[", "0", "]", ".", "lower", "(", ")", "if", "module", "not", "in", "re...
32.444444
13.333333
def tuples_as_dict(_list): """Translate a list of tuples to OrderedDict with key and val as strings. Parameters ---------- _list : list of tuples Returns ------- collections.OrderedDict Example ------- :: >>> tuples_as_dict([('cmd', 'val'), ('cmd2', 'val2')]) OrderedDict([('cmd', 'val'), ('cmd2', 'val2')]) """ _dict = OrderedDict() for key, val in _list: key = str(key) val = str(val) _dict[key] = val return _dict
[ "def", "tuples_as_dict", "(", "_list", ")", ":", "_dict", "=", "OrderedDict", "(", ")", "for", "key", ",", "val", "in", "_list", ":", "key", "=", "str", "(", "key", ")", "val", "=", "str", "(", "val", ")", "_dict", "[", "key", "]", "=", "val", ...
19.72
23.4
def view(self): """A list of view specs""" spec = [] for k, v in six.iteritems(self._p4dict): if k.startswith('view'): match = RE_FILESPEC.search(v) if match: spec.append(FileSpec(v[:match.end() - 1], v[match.end():])) return spec
[ "def", "view", "(", "self", ")", ":", "spec", "=", "[", "]", "for", "k", ",", "v", "in", "six", ".", "iteritems", "(", "self", ".", "_p4dict", ")", ":", "if", "k", ".", "startswith", "(", "'view'", ")", ":", "match", "=", "RE_FILESPEC", ".", "s...
31.8
18
def accel_toggle_hide_on_lose_focus(self, *args): """Callback toggle whether the window should hide when it loses focus. Called by the accel key. """ if self.settings.general.get_boolean('window-losefocus'): self.settings.general.set_boolean('window-losefocus', False) else: self.settings.general.set_boolean('window-losefocus', True) return True
[ "def", "accel_toggle_hide_on_lose_focus", "(", "self", ",", "*", "args", ")", ":", "if", "self", ".", "settings", ".", "general", ".", "get_boolean", "(", "'window-losefocus'", ")", ":", "self", ".", "settings", ".", "general", ".", "set_boolean", "(", "'win...
45.555556
16.222222
def ds_add(ds, days): """ Add or subtract days from a YYYY-MM-DD :param ds: anchor date in ``YYYY-MM-DD`` format to add to :type ds: str :param days: number of days to add to the ds, you can use negative values :type days: int >>> ds_add('2015-01-01', 5) '2015-01-06' >>> ds_add('2015-01-06', -5) '2015-01-01' """ ds = datetime.strptime(ds, '%Y-%m-%d') if days: ds = ds + timedelta(days) return ds.isoformat()[:10]
[ "def", "ds_add", "(", "ds", ",", "days", ")", ":", "ds", "=", "datetime", ".", "strptime", "(", "ds", ",", "'%Y-%m-%d'", ")", "if", "days", ":", "ds", "=", "ds", "+", "timedelta", "(", "days", ")", "return", "ds", ".", "isoformat", "(", ")", "[",...
24.368421
18.684211
def transpose(vari): """ Transpose a shapeable quantety. Args: vari (chaospy.poly.base.Poly, numpy.ndarray): Quantety of interest. Returns: (chaospy.poly.base.Poly, numpy.ndarray): Same type as ``vari``. Examples: >>> P = chaospy.reshape(chaospy.prange(4), (2,2)) >>> print(P) [[1, q0], [q0^2, q0^3]] >>> print(chaospy.transpose(P)) [[1, q0^2], [q0, q0^3]] """ if isinstance(vari, Poly): core = vari.A.copy() for key in vari.keys: core[key] = transpose(core[key]) return Poly(core, vari.dim, vari.shape[::-1], vari.dtype) return numpy.transpose(vari)
[ "def", "transpose", "(", "vari", ")", ":", "if", "isinstance", "(", "vari", ",", "Poly", ")", ":", "core", "=", "vari", ".", "A", ".", "copy", "(", ")", "for", "key", "in", "vari", ".", "keys", ":", "core", "[", "key", "]", "=", "transpose", "(...
26.038462
16.576923
def random_id(size=8, chars=string.ascii_letters + string.digits): """Generates a random string of given size from the given chars. @param size: The size of the random string. @param chars: Constituent pool of characters to draw random characters from. @type size: number @type chars: string @rtype: string @return: The string of random characters. """ return ''.join(random.choice(chars) for _ in range(size))
[ "def", "random_id", "(", "size", "=", "8", ",", "chars", "=", "string", ".", "ascii_letters", "+", "string", ".", "digits", ")", ":", "return", "''", ".", "join", "(", "random", ".", "choice", "(", "chars", ")", "for", "_", "in", "range", "(", "siz...
38.636364
17.272727
def routing_solution_to_ding0_graph(graph, solution): """ Insert `solution` from routing into `graph` Args ---- graph: :networkx:`NetworkX Graph Obj< >` NetworkX graph object with nodes solution: BaseSolution Instance of `BaseSolution` or child class (e.g. `LocalSearchSolution`) (=solution from routing) Returns ------- :networkx:`NetworkX Graph Obj< >` NetworkX graph object with nodes and edges """ # TODO: Bisherige Herangehensweise (diese Funktion): Branches werden nach Routing erstellt um die Funktionsfähigkeit # TODO: des Routing-Tools auch für die TestCases zu erhalten. Es wird ggf. notwendig, diese direkt im Routing vorzunehmen. # build node dict (name: obj) from graph nodes to map node names on node objects node_list = {str(n): n for n in graph.nodes()} # add edges from solution to graph try: depot = solution._nodes[solution._problem._depot.name()] depot_node = node_list[depot.name()] for r in solution.routes(): circ_breaker_pos = None # if route has only one node and is not aggregated, it wouldn't be possible to add two lines from and to # this node (undirected graph of NetworkX). So, as workaround, an additional MV cable distributor is added # at nodes' position (resulting route: HV/MV_subst --- node --- cable_dist --- HV/MV_subst. if len(r._nodes) == 1: if not solution._problem._is_aggregated[r._nodes[0]._name]: # create new cable dist cable_dist = MVCableDistributorDing0(geo_data=node_list[r._nodes[0]._name].geo_data, grid=depot_node.grid) depot_node.grid.add_cable_distributor(cable_dist) # create new node (as dummy) an allocate to route r r.allocate([Node(name=repr(cable_dist), demand=0)]) # add it to node list and allocated-list manually node_list[str(cable_dist)] = cable_dist solution._problem._is_aggregated[str(cable_dist)] = False # set circ breaker pos manually circ_breaker_pos = 1 # build edge list n1 = r._nodes[0:len(r._nodes)-1] n2 = r._nodes[1:len(r._nodes)] edges = list(zip(n1, n2)) edges.append((depot, r._nodes[0])) edges.append((r._nodes[-1], depot)) # create MV Branch object for every edge in `edges` mv_branches = [BranchDing0() for _ in edges] edges_with_branches = list(zip(edges, mv_branches)) # recalculate circuit breaker positions for final solution, create it and set associated branch. # if circ. breaker position is not set manually (routes with more than one load area, see above) if not circ_breaker_pos: circ_breaker_pos = r.calc_circuit_breaker_position() node1 = node_list[edges[circ_breaker_pos - 1][0].name()] node2 = node_list[edges[circ_breaker_pos - 1][1].name()] # ALTERNATIVE TO METHOD ABOVE: DO NOT CREATE 2 BRANCHES (NO RING) -> LA IS CONNECTED AS SATELLITE # IF THIS IS COMMENTED-IN, THE IF-BLOCK IN LINE 87 HAS TO BE COMMENTED-OUT # See issue #114 # =============================== # do not add circuit breaker for routes which are aggregated load areas or # routes that contain only one load area # if not (node1 == depot_node and solution._problem._is_aggregated[edges[circ_breaker_pos - 1][1].name()] or # node2 == depot_node and solution._problem._is_aggregated[edges[circ_breaker_pos - 1][0].name()] or # len(r._nodes) == 1): # =============================== # do not add circuit breaker for routes which are aggregated load areas if not (node1 == depot_node and solution._problem._is_aggregated[edges[circ_breaker_pos - 1][1].name()] or node2 == depot_node and solution._problem._is_aggregated[edges[circ_breaker_pos - 1][0].name()]): branch = mv_branches[circ_breaker_pos - 1] circ_breaker = CircuitBreakerDing0(grid=depot_node.grid, branch=branch, geo_data=calc_geo_centre_point(node1, node2)) branch.circuit_breaker = circ_breaker # create new ring object for route ring = RingDing0(grid=depot_node.grid) # translate solution's node names to graph node objects using dict created before # note: branch object is assigned to edge using an attribute ('branch' is used here), it can be accessed # using the method `graph_edges()` of class `GridDing0` edges_graph = [] for ((n1, n2), b) in edges_with_branches: # get node objects node1 = node_list[n1.name()] node2 = node_list[n2.name()] # set branch's ring attribute b.ring = ring # set LVLA's ring attribute if isinstance(node1, LVLoadAreaCentreDing0): node1.lv_load_area.ring = ring # set branch length b.length = calc_geo_dist_vincenty(node1, node2) # set branch kind and type # 1) default b.kind = depot_node.grid.default_branch_kind b.type = depot_node.grid.default_branch_type # 2) aggregated load area types if node1 == depot_node and solution._problem._is_aggregated[n2.name()]: b.connects_aggregated = True b.kind = depot_node.grid.default_branch_kind_aggregated b.type = depot_node.grid.default_branch_type_aggregated elif node2 == depot_node and solution._problem._is_aggregated[n1.name()]: b.connects_aggregated = True b.kind = depot_node.grid.default_branch_kind_aggregated b.type = depot_node.grid.default_branch_type_aggregated # append to branch list edges_graph.append((node1, node2, dict(branch=b))) # add branches to graph graph.add_edges_from(edges_graph) except: logger.exception( 'unexpected error while converting routing solution to DING0 graph (NetworkX).') return graph
[ "def", "routing_solution_to_ding0_graph", "(", "graph", ",", "solution", ")", ":", "# TODO: Bisherige Herangehensweise (diese Funktion): Branches werden nach Routing erstellt um die Funktionsfähigkeit", "# TODO: des Routing-Tools auch für die TestCases zu erhalten. Es wird ggf. notwendig, diese di...
49.227273
28.454545
def to_string(self, cart_coords=False): """ Return GaussianInput string Option: whe cart_coords sets to True return the cartesian coordinates instead of the z-matrix """ def para_dict_to_string(para, joiner=" "): para_str = [] # sorted is only done to make unittests work reliably for par, val in sorted(para.items()): if val is None or val == "": para_str.append(par) elif isinstance(val, dict): val_str = para_dict_to_string(val, joiner=",") para_str.append("{}=({})".format(par, val_str)) else: para_str.append("{}={}".format(par, val)) return joiner.join(para_str) output = [] if self.link0_parameters: output.append(para_dict_to_string(self.link0_parameters, "\n")) output.append("{diez} {func}/{bset} {route}" .format(diez=self.dieze_tag, func=self.functional, bset=self.basis_set, route=para_dict_to_string(self.route_parameters)) ) output.append("") output.append(self.title) output.append("") output.append("{} {}".format(self.charge, self.spin_multiplicity)) if isinstance(self._mol, Molecule): if cart_coords is True: output.append(self.get_cart_coords()) else: output.append(self.get_zmatrix()) else: output.append(str(self._mol)) output.append("") if self.gen_basis is not None: output.append("{:s}\n".format(self.gen_basis)) output.append(para_dict_to_string(self.input_parameters, "\n")) output.append("\n") return "\n".join(output)
[ "def", "to_string", "(", "self", ",", "cart_coords", "=", "False", ")", ":", "def", "para_dict_to_string", "(", "para", ",", "joiner", "=", "\" \"", ")", ":", "para_str", "=", "[", "]", "# sorted is only done to make unittests work reliably", "for", "par", ",", ...
40.108696
15.978261
async def restart(request): """ Returns OK, then waits approximately 1 second and restarts container """ def wait_and_restart(): log.info('Restarting server') sleep(1) os.system('kill 1') Thread(target=wait_and_restart).start() return web.json_response({"message": "restarting"})
[ "async", "def", "restart", "(", "request", ")", ":", "def", "wait_and_restart", "(", ")", ":", "log", ".", "info", "(", "'Restarting server'", ")", "sleep", "(", "1", ")", "os", ".", "system", "(", "'kill 1'", ")", "Thread", "(", "target", "=", "wait_a...
31.8
11.6
def get_geometry_from_name(self, name): """ Returns the coordination geometry of the given name. :param name: The name of the coordination geometry. """ for gg in self.cg_list: if gg.name == name or name in gg.alternative_names: return gg raise LookupError( 'No coordination geometry found with name "{name}"'.format( name=name))
[ "def", "get_geometry_from_name", "(", "self", ",", "name", ")", ":", "for", "gg", "in", "self", ".", "cg_list", ":", "if", "gg", ".", "name", "==", "name", "or", "name", "in", "gg", ".", "alternative_names", ":", "return", "gg", "raise", "LookupError", ...
38.454545
13.181818
def match_tweet(self, tweet, user_stream): """ Check if a tweet matches the defined criteria :param tweet: The tweet in question :type tweet: :class:`~responsebot.models.Tweet` :return: True if matched, False otherwise """ if user_stream: if len(self.track) > 0: return self.is_tweet_match_track(tweet) return True return self.is_tweet_match_track(tweet) or self.is_tweet_match_follow(tweet)
[ "def", "match_tweet", "(", "self", ",", "tweet", ",", "user_stream", ")", ":", "if", "user_stream", ":", "if", "len", "(", "self", ".", "track", ")", ">", "0", ":", "return", "self", ".", "is_tweet_match_track", "(", "tweet", ")", "return", "True", "re...
32.266667
17.333333
def has_no_dangling_branch(neuron): '''Check if the neuron has dangling neurites''' soma_center = neuron.soma.points[:, COLS.XYZ].mean(axis=0) recentered_soma = neuron.soma.points[:, COLS.XYZ] - soma_center radius = np.linalg.norm(recentered_soma, axis=1) soma_max_radius = radius.max() def is_dangling(neurite): '''Is the neurite dangling ?''' starting_point = neurite.points[1][COLS.XYZ] if np.linalg.norm(starting_point - soma_center) - soma_max_radius <= 12.: return False if neurite.type != NeuriteType.axon: return True all_points = list(chain.from_iterable(n.points[1:] for n in iter_neurites(neurite) if n.type != NeuriteType.axon)) res = [np.linalg.norm(starting_point - p[COLS.XYZ]) >= 2 * p[COLS.R] + 2 for p in all_points] return all(res) bad_ids = [(n.root_node.id, [n.root_node.points[1]]) for n in iter_neurites(neuron) if is_dangling(n)] return CheckResult(len(bad_ids) == 0, bad_ids)
[ "def", "has_no_dangling_branch", "(", "neuron", ")", ":", "soma_center", "=", "neuron", ".", "soma", ".", "points", "[", ":", ",", "COLS", ".", "XYZ", "]", ".", "mean", "(", "axis", "=", "0", ")", "recentered_soma", "=", "neuron", ".", "soma", ".", "...
41.222222
21.814815
def get_tempdir(): """ Get the temporary directory where pyelastix stores its temporary files. The directory is specific to the current process and the calling thread. Generally, the user does not need this; directories are automatically cleaned up. Though Elastix log files are also written here. """ tempdir = os.path.join(tempfile.gettempdir(), 'pyelastix') # Make sure it exists if not os.path.isdir(tempdir): os.makedirs(tempdir) # Clean up all directories for which the process no longer exists for fname in os.listdir(tempdir): dirName = os.path.join(tempdir, fname) # Check if is right kind of dir if not (os.path.isdir(dirName) and fname.startswith('id_')): continue # Get pid and check if its running try: pid = int(fname.split('_')[1]) except Exception: continue if not _is_pid_running(pid): _clear_dir(dirName) # Select dir that included process and thread id tid = id(threading.current_thread() if hasattr(threading, 'current_thread') else threading.currentThread()) dir = os.path.join(tempdir, 'id_%i_%i' % (os.getpid(), tid)) if not os.path.isdir(dir): os.mkdir(dir) return dir
[ "def", "get_tempdir", "(", ")", ":", "tempdir", "=", "os", ".", "path", ".", "join", "(", "tempfile", ".", "gettempdir", "(", ")", ",", "'pyelastix'", ")", "# Make sure it exists", "if", "not", "os", ".", "path", ".", "isdir", "(", "tempdir", ")", ":",...
38.205882
18.294118
def insert_value(self, agg, value, idx, name=''): """ Insert *value* into member number *idx* from aggregate. """ if not isinstance(idx, (tuple, list)): idx = [idx] instr = instructions.InsertValue(self.block, agg, value, idx, name=name) self._insert(instr) return instr
[ "def", "insert_value", "(", "self", ",", "agg", ",", "value", ",", "idx", ",", "name", "=", "''", ")", ":", "if", "not", "isinstance", "(", "idx", ",", "(", "tuple", ",", "list", ")", ")", ":", "idx", "=", "[", "idx", "]", "instr", "=", "instru...
36.666667
14.222222
def create_namespaced_resource_quota(self, namespace, body, **kwargs): # noqa: E501 """create_namespaced_resource_quota # noqa: E501 create a ResourceQuota # noqa: E501 This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async_req=True >>> thread = api.create_namespaced_resource_quota(namespace, body, async_req=True) >>> result = thread.get() :param async_req bool :param str namespace: object name and auth scope, such as for teams and projects (required) :param V1ResourceQuota body: (required) :param bool include_uninitialized: If true, partially initialized resources are included in the response. :param str pretty: If 'true', then the output is pretty printed. :param str dry_run: When present, indicates that modifications should not be persisted. An invalid or unrecognized dryRun directive will result in an error response and no further processing of the request. Valid values are: - All: all dry run stages will be processed :return: V1ResourceQuota If the method is called asynchronously, returns the request thread. """ kwargs['_return_http_data_only'] = True if kwargs.get('async_req'): return self.create_namespaced_resource_quota_with_http_info(namespace, body, **kwargs) # noqa: E501 else: (data) = self.create_namespaced_resource_quota_with_http_info(namespace, body, **kwargs) # noqa: E501 return data
[ "def", "create_namespaced_resource_quota", "(", "self", ",", "namespace", ",", "body", ",", "*", "*", "kwargs", ")", ":", "# noqa: E501", "kwargs", "[", "'_return_http_data_only'", "]", "=", "True", "if", "kwargs", ".", "get", "(", "'async_req'", ")", ":", "...
62.68
35.56
def has_permission(self, user): """ Returns True if the given request has permission to use the tool. Can be overriden by the user in subclasses. """ return user.has_perm( self.model._meta.app_label + '.' + self.get_permission() )
[ "def", "has_permission", "(", "self", ",", "user", ")", ":", "return", "user", ".", "has_perm", "(", "self", ".", "model", ".", "_meta", ".", "app_label", "+", "'.'", "+", "self", ".", "get_permission", "(", ")", ")" ]
35.375
15.375
def read_resp_data(service_name, implementation_name, url, response): """ Read the (DELETE, PATCH, POST, PUT) response body and header if exist. """ RR = _mockdata_path_root(service_name, implementation_name) for resource_dir in app_resource_dirs: path = os.path.join(resource_dir['path'], service_name, implementation_name) found_header = _read_resp_header(path, url, response) found_body = _read_resp_body(path, url, response) if found_body or found_header: return response response.status = 404 return response
[ "def", "read_resp_data", "(", "service_name", ",", "implementation_name", ",", "url", ",", "response", ")", ":", "RR", "=", "_mockdata_path_root", "(", "service_name", ",", "implementation_name", ")", "for", "resource_dir", "in", "app_resource_dirs", ":", "path", ...
41.8
13
def all_properties(self): """Get all properties of a given index""" properties = {} r = self.requests.get(self.index_url + "/_mapping", headers=HEADER_JSON, verify=False) try: r.raise_for_status() r_json = r.json() if 'items' not in r_json[self.index]['mappings']: return properties if 'properties' not in r_json[self.index]['mappings']['items']: return properties properties = r_json[self.index]['mappings']['items']['properties'] except requests.exceptions.HTTPError as ex: logger.error("Error all attributes for %s.", self.anonymize_url(self.index_url)) logger.error(ex) return return properties
[ "def", "all_properties", "(", "self", ")", ":", "properties", "=", "{", "}", "r", "=", "self", ".", "requests", ".", "get", "(", "self", ".", "index_url", "+", "\"/_mapping\"", ",", "headers", "=", "HEADER_JSON", ",", "verify", "=", "False", ")", "try"...
34.454545
25.136364
def set_result(self, result): """ Sets the result of the Future. :param result: Result of the Future. """ if result is None: self._result = NONE_RESULT else: self._result = result self._event.set() self._invoke_callbacks()
[ "def", "set_result", "(", "self", ",", "result", ")", ":", "if", "result", "is", "None", ":", "self", ".", "_result", "=", "NONE_RESULT", "else", ":", "self", ".", "_result", "=", "result", "self", ".", "_event", ".", "set", "(", ")", "self", ".", ...
25
10.833333
def to_postfix(tokens): """ Convert a list of evaluatable tokens to postfix format. """ precedence = { '/': 4, '*': 4, '+': 3, '-': 3, '^': 2, '(': 1 } postfix = [] opstack = [] for token in tokens: if is_int(token): postfix.append(int(token)) elif is_float(token): postfix.append(float(token)) elif token in mathwords.CONSTANTS: postfix.append(mathwords.CONSTANTS[token]) elif is_unary(token): opstack.append(token) elif token == '(': opstack.append(token) elif token == ')': top_token = opstack.pop() while top_token != '(': postfix.append(top_token) top_token = opstack.pop() else: while (opstack != []) and ( precedence[opstack[-1]] >= precedence[token] ): postfix.append(opstack.pop()) opstack.append(token) while opstack != []: postfix.append(opstack.pop()) return postfix
[ "def", "to_postfix", "(", "tokens", ")", ":", "precedence", "=", "{", "'/'", ":", "4", ",", "'*'", ":", "4", ",", "'+'", ":", "3", ",", "'-'", ":", "3", ",", "'^'", ":", "2", ",", "'('", ":", "1", "}", "postfix", "=", "[", "]", "opstack", "...
25.023256
16.325581
def dispatch(self, args): """ Calls proper method depending on command-line arguments. """ if not args.list and not args.group: if not args.font and not args.char and not args.block: self.info() return else: args.list = args.group = True self._display = {k: args.__dict__[k] for k in ('list', 'group', 'omit_summary')} if args.char: char = self._getChar(args.char) if args.font: self.fontChar(args.font, char) else: self.char(char) else: block = self._getBlock(args.block) self.chars(args.font, block)
[ "def", "dispatch", "(", "self", ",", "args", ")", ":", "if", "not", "args", ".", "list", "and", "not", "args", ".", "group", ":", "if", "not", "args", ".", "font", "and", "not", "args", ".", "char", "and", "not", "args", ".", "block", ":", "self"...
35.1
14.2
def invert(self): """ Invert the transform """ libfn = utils.get_lib_fn('inverseTransform%s' % (self._libsuffix)) inv_tx_ptr = libfn(self.pointer) new_tx = ANTsTransform(precision=self.precision, dimension=self.dimension, transform_type=self.transform_type, pointer=inv_tx_ptr) return new_tx
[ "def", "invert", "(", "self", ")", ":", "libfn", "=", "utils", ".", "get_lib_fn", "(", "'inverseTransform%s'", "%", "(", "self", ".", "_libsuffix", ")", ")", "inv_tx_ptr", "=", "libfn", "(", "self", ".", "pointer", ")", "new_tx", "=", "ANTsTransform", "(...
39.666667
27.222222
def sorted_enums(self) -> List[Tuple[str, int]]: """Return list of enum items sorted by value.""" return sorted(self.enum.items(), key=lambda x: x[1])
[ "def", "sorted_enums", "(", "self", ")", "->", "List", "[", "Tuple", "[", "str", ",", "int", "]", "]", ":", "return", "sorted", "(", "self", ".", "enum", ".", "items", "(", ")", ",", "key", "=", "lambda", "x", ":", "x", "[", "1", "]", ")" ]
54.666667
9.333333
def read_sizes(self): """ read the memory ussage """ command = const.CMD_GET_FREE_SIZES response_size = 1024 cmd_response = self.__send_command(command,b'', response_size) if cmd_response.get('status'): if self.verbose: print(codecs.encode(self.__data,'hex')) size = len(self.__data) if len(self.__data) >= 80: fields = unpack('20i', self.__data[:80]) self.users = fields[4] self.fingers = fields[6] self.records = fields[8] self.dummy = fields[10] #??? self.cards = fields[12] self.fingers_cap = fields[14] self.users_cap = fields[15] self.rec_cap = fields[16] self.fingers_av = fields[17] self.users_av = fields[18] self.rec_av = fields[19] self.__data = self.__data[80:] if len(self.__data) >= 12: #face info fields = unpack('3i', self.__data[:12]) #dirty hack! we need more information self.faces = fields[0] self.faces_cap = fields[2] return True else: raise ZKErrorResponse("can't read sizes")
[ "def", "read_sizes", "(", "self", ")", ":", "command", "=", "const", ".", "CMD_GET_FREE_SIZES", "response_size", "=", "1024", "cmd_response", "=", "self", ".", "__send_command", "(", "command", ",", "b''", ",", "response_size", ")", "if", "cmd_response", ".", ...
40.677419
8.935484
def heartbeat_timeout(self): """ Called by heartbeat_monitor on timeout """ assert not self._closed, "Did we not stop heartbeat_monitor on close?" log.error("Heartbeat time out") poison_exc = ConnectionLostError('Heartbeat timed out') poison_frame = frames.PoisonPillFrame(poison_exc) self.dispatcher.dispatch_all(poison_frame) # Spec says to just close socket without ConnectionClose handshake. self.close()
[ "def", "heartbeat_timeout", "(", "self", ")", ":", "assert", "not", "self", ".", "_closed", ",", "\"Did we not stop heartbeat_monitor on close?\"", "log", ".", "error", "(", "\"Heartbeat time out\"", ")", "poison_exc", "=", "ConnectionLostError", "(", "'Heartbeat timed ...
51.555556
17.333333
def _set_capabilities(self, v, load=False): """ Setter method for capabilities, mapped from YANG variable /capabilities (container) If this variable is read-only (config: false) in the source YANG file, then _set_capabilities is considered as a private method. Backends looking to populate this variable should do so via calling thisObj._set_capabilities() directly. """ if hasattr(v, "_utype"): v = v._utype(v) try: t = YANGDynClass(v,base=capabilities.capabilities, is_container='container', presence=False, yang_name="capabilities", rest_name="capabilities", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'callpoint': u'CapabilitiesCallpoint'}}, namespace='urn:brocade.com:mgmt:brocade-system-capabilities', defining_module='brocade-system-capabilities', yang_type='container', is_config=True) except (TypeError, ValueError): raise ValueError({ 'error-string': """capabilities must be of a type compatible with container""", 'defined-type': "container", 'generated-type': """YANGDynClass(base=capabilities.capabilities, is_container='container', presence=False, yang_name="capabilities", rest_name="capabilities", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'callpoint': u'CapabilitiesCallpoint'}}, namespace='urn:brocade.com:mgmt:brocade-system-capabilities', defining_module='brocade-system-capabilities', yang_type='container', is_config=True)""", }) self.__capabilities = t if hasattr(self, '_set'): self._set()
[ "def", "_set_capabilities", "(", "self", ",", "v", ",", "load", "=", "False", ")", ":", "if", "hasattr", "(", "v", ",", "\"_utype\"", ")", ":", "v", "=", "v", ".", "_utype", "(", "v", ")", "try", ":", "t", "=", "YANGDynClass", "(", "v", ",", "b...
75.818182
34.727273
def Render(self, rdf_data): """Processes data according to formatting rules.""" report_data = rdf_data[:self.max_results] results = [self.hinter.Render(rdf) for rdf in report_data] extra = len(rdf_data) - len(report_data) if extra > 0: results.append("...plus another %d issues." % extra) return results
[ "def", "Render", "(", "self", ",", "rdf_data", ")", ":", "report_data", "=", "rdf_data", "[", ":", "self", ".", "max_results", "]", "results", "=", "[", "self", ".", "hinter", ".", "Render", "(", "rdf", ")", "for", "rdf", "in", "report_data", "]", "e...
40.75
13.375
def _set_circuit_type(self, v, load=False): """ Setter method for circuit_type, mapped from YANG variable /routing_system/interface/ve/intf_isis/interface_isis/circuit_type (enumeration) If this variable is read-only (config: false) in the source YANG file, then _set_circuit_type is considered as a private method. Backends looking to populate this variable should do so via calling thisObj._set_circuit_type() directly. """ if hasattr(v, "_utype"): v = v._utype(v) try: t = YANGDynClass(v,base=RestrictedClassType(base_type=unicode, restriction_type="dict_key", restriction_arg={u'level-1-2': {'value': 3}, u'level-2': {'value': 2}, u'level-1': {'value': 1}},), is_leaf=True, yang_name="circuit-type", rest_name="circuit-type", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'cli-full-command': None, u'info': u'Define inter-area/intra area operation mode', u'cli-full-no': None}}, namespace='urn:brocade.com:mgmt:brocade-isis', defining_module='brocade-isis', yang_type='enumeration', is_config=True) except (TypeError, ValueError): raise ValueError({ 'error-string': """circuit_type must be of a type compatible with enumeration""", 'defined-type': "brocade-isis:enumeration", 'generated-type': """YANGDynClass(base=RestrictedClassType(base_type=unicode, restriction_type="dict_key", restriction_arg={u'level-1-2': {'value': 3}, u'level-2': {'value': 2}, u'level-1': {'value': 1}},), is_leaf=True, yang_name="circuit-type", rest_name="circuit-type", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'cli-full-command': None, u'info': u'Define inter-area/intra area operation mode', u'cli-full-no': None}}, namespace='urn:brocade.com:mgmt:brocade-isis', defining_module='brocade-isis', yang_type='enumeration', is_config=True)""", }) self.__circuit_type = t if hasattr(self, '_set'): self._set()
[ "def", "_set_circuit_type", "(", "self", ",", "v", ",", "load", "=", "False", ")", ":", "if", "hasattr", "(", "v", ",", "\"_utype\"", ")", ":", "v", "=", "v", ".", "_utype", "(", "v", ")", "try", ":", "t", "=", "YANGDynClass", "(", "v", ",", "b...
99.363636
47.863636
def view_extreme_groups(token, dstore): """ Show the source groups contributing the most to the highest IML """ data = dstore['disagg_by_grp'].value data.sort(order='extreme_poe') return rst_table(data[::-1])
[ "def", "view_extreme_groups", "(", "token", ",", "dstore", ")", ":", "data", "=", "dstore", "[", "'disagg_by_grp'", "]", ".", "value", "data", ".", "sort", "(", "order", "=", "'extreme_poe'", ")", "return", "rst_table", "(", "data", "[", ":", ":", "-", ...
32.285714
6
def tx_max(tasmax, freq='YS'): r"""Highest max temperature The maximum value of daily maximum temperature. Parameters ---------- tasmax : xarray.DataArray Maximum daily temperature [℃] or [K] freq : str, optional Resampling frequency Returns ------- xarray.DataArray Maximum value of daily maximum temperature. Notes ----- Let :math:`TX_{ij}` be the maximum temperature at day :math:`i` of period :math:`j`. Then the maximum daily maximum temperature for period :math:`j` is: .. math:: TXx_j = max(TX_{ij}) """ return tasmax.resample(time=freq).max(dim='time', keep_attrs=True)
[ "def", "tx_max", "(", "tasmax", ",", "freq", "=", "'YS'", ")", ":", "return", "tasmax", ".", "resample", "(", "time", "=", "freq", ")", ".", "max", "(", "dim", "=", "'time'", ",", "keep_attrs", "=", "True", ")" ]
23.107143
24.75
def decodebytes(input): """Decode base64 string to byte array.""" py_version = sys.version_info[0] if py_version >= 3: return _decodebytes_py3(input) return _decodebytes_py2(input)
[ "def", "decodebytes", "(", "input", ")", ":", "py_version", "=", "sys", ".", "version_info", "[", "0", "]", "if", "py_version", ">=", "3", ":", "return", "_decodebytes_py3", "(", "input", ")", "return", "_decodebytes_py2", "(", "input", ")" ]
33.166667
7.666667
def example_metadata(study_name, draft_name): """Example of building a metadata doc""" odm = ODM("SYSTEM_NAME", filetype=ODM.FILETYPE_SNAPSHOT) study = Study(study_name, project_type=Study.PROJECT) # Push study element into odm odm << study # Create global variables and set them into study. study << GlobalVariables(study_name) #Expected that protocol name will match the Study OID. # Create some basic definitions bd = BasicDefinitions() # Add some measurement units to the basic definitions. This time using the call () syntax: bd( MeasurementUnit("KG", "Kilograms")( Symbol()(TranslatedText("Kilograms")) ), MeasurementUnit("CM", "Centimeters")( Symbol()(TranslatedText("Centimeters")) ) ) # Add basic definitions to study study << bd # Now metadata which will contain all our form and field defs eventually meta = MetaDataVersion('META1', draft_name) study << meta # Protocol contains StudyEventRefs protocol = Protocol() # Add some StudyEventRefs protocol << StudyEventRef("FLDR1", 1, True) # Order 1, Mandatory # protocol << StudyEventRef("FLDR2", 2, False) # Order 2, Not Mandatory # protocol << StudyEventRef("AE", 3, True) meta << protocol # Add Study Event Defs with some child FormRefs fldr1 = StudyEventDef("FLDR1", "Folder 1", False, StudyEventDef.SCHEDULED) fldr1 << FormRef("DM", 1, True) fldr1 << FormRef("VS", 2, True) meta << fldr1 meta << StudyEventDef("FLDR2", "Folder 2", False, StudyEventDef.UNSCHEDULED)( FormRef("VS", 1, True) ) meta << StudyEventDef("AE", "Adverse Events", False, StudyEventDef.COMMON)( FormRef("AE", 1, False) ) dm_form = FormDef("DM","Demography") dm_form << MdsolHelpText("en","Some help text for Demography") dm_form << MdsolViewRestriction('Data Manager') dm_form << MdsolEntryRestriction('Batch Upload') dm_form << ItemGroupRef("DM_IG1", 1) dm_form << ItemGroupRef("DM_IG2", 2) # Add to metadata meta << dm_form # Define item group meta << ItemGroupDef("DM_IG1", "DM Item Group 1")( MdsolLabelRef("LBL1", 1), ItemRef("SEX", 2)( MdsolAttribute("Standards","CDASH","SEX"), MdsolAttribute("Standards","STDNUMBER","1120") ), ItemRef("RACE", 3), ItemRef("RACE_OTH", 4), ItemRef("DOB", 5), ItemRef("AGE", 6) ) # Add the ItemDefs meta << ItemDef("SEX", "Gender", DataType.Text, 1, control_type=ControlType.RadioButton )( Question()(TranslatedText("Gender at Birth")), CodeListRef("CL_SEX") ) meta << ItemDef("RACE", "Race", DataType.Text, 2, control_type=ControlType.RadioButtonVertical )( Question()(TranslatedText("Race")), CodeListRef("CL_RACE") ) meta << ItemDef("RACE_OTH", "RaceOther", DataType.Text, 20) \ << Question() << TranslatedText("If Race Other, please specify") meta << ItemDef("DOB", "DateOfBirth", DataType.Date, 10, control_type=ControlType.DateTime, date_time_format="dd/mm/yyyy" )( Question()(TranslatedText("Date of Birth")), MdsolHelpText("en","If month unknown, enter January") ) meta << ItemDef("AGE", "Age in Years", DataType.Integer, 4, significant_digits=3, control_type=ControlType.Text )( Question()(TranslatedText("Age in Years")), RangeCheck(RangeCheckComparatorType.GreaterThanEqualTo, RangeCheckType.Soft) ( CheckValue("18") ), RangeCheck(RangeCheckComparatorType.LessThanEqualTo, RangeCheckType.Soft) ( CheckValue("65") ) ) # Add a Label meta.add(MdsolLabelDef("LBL1", "Label1")(TranslatedText("Please answer all questions."))) # As well as () and << you can use add() meta.add( CodeList("CL_SEX", "SEX", datatype=DataType.Text)( CodeListItem("M").add( Decode().add( TranslatedText("Male")) ), CodeListItem("F").add( Decode().add( TranslatedText("Female")) ), ), CodeList("CL_RACE", "RACE", datatype=DataType.Text)( CodeListItem("AS")(Decode()(TranslatedText("Asian"))), CodeListItem("CA")(Decode()(TranslatedText("White"))), CodeListItem("OT")(Decode()(TranslatedText("Other"))), ) ) meta.add(MdsolEditCheckDef('CHECK1')( # Static value required to make this stick, gets ignored but won't load without it MdsolCheckStep(form_oid="DM", field_oid="RACE", data_format='CodedValue', static_value="1"), MdsolCheckStep(static_value="OT", data_format="$2"), MdsolCheckStep(function=StepType.IsEqualTo), MdsolCheckStep(form_oid="DM", field_oid="RACE_OTH"), MdsolCheckStep(function=StepType.IsEmpty), MdsolCheckStep(function=StepType.And), MdsolCheckAction(form_oid="DM", field_oid="RACE_OTH", check_action_type=ActionType.OpenQuery, check_string="Race is set as OTHER but not specified. Please correct.", check_options="Site from System,RequiresResponse,RequiresManualClose" ) ), MdsolEditCheckDef('CHECK2') ) meta.add(MdsolCustomFunctionDef("CF1","SELECT 1,2 FROM DataPoints", language="SQ")) meta.add(MdsolCustomFunctionDef("CF2","return true;", language="C#")) meta.add( # Variable OID required MdsolDerivationDef("AGE", form_oid="DM", field_oid="AGE")( # Variable OID required to be identified as a data step MdsolDerivationStep(form_oid="DM", field_oid="DOB", data_format="StandardValue", variable_oid="DOB"), MdsolDerivationStep(function=StepType.Age) ) ) return odm
[ "def", "example_metadata", "(", "study_name", ",", "draft_name", ")", ":", "odm", "=", "ODM", "(", "\"SYSTEM_NAME\"", ",", "filetype", "=", "ODM", ".", "FILETYPE_SNAPSHOT", ")", "study", "=", "Study", "(", "study_name", ",", "project_type", "=", "Study", "."...
35.708333
25.422619
def all_status(self): """Return names, hall numbers, and the washers/dryers available for all rooms in the system >>> all_laundry = l.all_status() """ laundry_rooms = {} for room in self.hall_to_link: laundry_rooms[room] = self.parse_a_hall(room) return laundry_rooms
[ "def", "all_status", "(", "self", ")", ":", "laundry_rooms", "=", "{", "}", "for", "room", "in", "self", ".", "hall_to_link", ":", "laundry_rooms", "[", "room", "]", "=", "self", ".", "parse_a_hall", "(", "room", ")", "return", "laundry_rooms" ]
29.727273
14.272727
def auto_decompress_stream(src): """Decompress data from `src` if required. If the first block of `src` appears to be compressed, then the entire stream will be uncompressed. Otherwise the stream will be passed along as-is. Args: src (iterable): iterable that yields blocks of data Yields: blocks of uncompressed data """ block = next(src) compression = guess_compression(block) if compression == 'bz2': src = bz2_decompress_stream(chain([block], src)) elif compression == 'xz': src = xz_decompress_stream(chain([block], src)) else: src = chain([block], src) for block in src: yield block
[ "def", "auto_decompress_stream", "(", "src", ")", ":", "block", "=", "next", "(", "src", ")", "compression", "=", "guess_compression", "(", "block", ")", "if", "compression", "==", "'bz2'", ":", "src", "=", "bz2_decompress_stream", "(", "chain", "(", "[", ...
26.76
21.68
def set_mute(self, mute): """Send mute command.""" req_url = ENDPOINTS["setMute"].format(self.ip_address, self.zone_id) params = {"enable": "true" if mute else "false"} return request(req_url, params=params)
[ "def", "set_mute", "(", "self", ",", "mute", ")", ":", "req_url", "=", "ENDPOINTS", "[", "\"setMute\"", "]", ".", "format", "(", "self", ".", "ip_address", ",", "self", ".", "zone_id", ")", "params", "=", "{", "\"enable\"", ":", "\"true\"", "if", "mute...
47
14.6
def firmware_download_output_cluster_output_fwdl_status(self, **kwargs): """Auto Generated Code """ config = ET.Element("config") firmware_download = ET.Element("firmware_download") config = firmware_download output = ET.SubElement(firmware_download, "output") cluster_output = ET.SubElement(output, "cluster-output") fwdl_status = ET.SubElement(cluster_output, "fwdl-status") fwdl_status.text = kwargs.pop('fwdl_status') callback = kwargs.pop('callback', self._callback) return callback(config)
[ "def", "firmware_download_output_cluster_output_fwdl_status", "(", "self", ",", "*", "*", "kwargs", ")", ":", "config", "=", "ET", ".", "Element", "(", "\"config\"", ")", "firmware_download", "=", "ET", ".", "Element", "(", "\"firmware_download\"", ")", "config", ...
44
15.923077
def _toggle_transparency(self, changed_from_config_window=False, force_value=None): """ Toggles theme trasparency. changed_from_config_window is used to inhibit toggling from within Config Window when 'T' is pressed. force_value will set trasparency if True or False, or toggle trasparency if None """ if self.window_mode == CONFIG_MODE and not changed_from_config_window: return self._theme.toggleTransparency(force_value) self._cnf.use_transparency = self._theme.getTransparency() if self.operation_mode == THEME_MODE: self._theme_slector.transparent = self._cnf.use_transparency self.headWin.refresh() self.bodyWin.refresh() self.footerWin.refresh() if self._config_win: self._config_win._config_options['use_transparency'][1] = self._cnf.use_transparency if not changed_from_config_window: self._config_win._saved_config_options['use_transparency'][1] = self._cnf.use_transparency self._config_win._old_use_transparency = self._cnf.use_transparency
[ "def", "_toggle_transparency", "(", "self", ",", "changed_from_config_window", "=", "False", ",", "force_value", "=", "None", ")", ":", "if", "self", ".", "window_mode", "==", "CONFIG_MODE", "and", "not", "changed_from_config_window", ":", "return", "self", ".", ...
49.565217
23.26087
def DefaultSelector(sock): "Return the best selector for the platform" global _DEFAULT_SELECTOR if _DEFAULT_SELECTOR is None: if has_selector('poll'): _DEFAULT_SELECTOR = PollSelector elif hasattr(select, 'select'): _DEFAULT_SELECTOR = SelectSelector else: raise RedisError('Platform does not support any selectors') return _DEFAULT_SELECTOR(sock)
[ "def", "DefaultSelector", "(", "sock", ")", ":", "global", "_DEFAULT_SELECTOR", "if", "_DEFAULT_SELECTOR", "is", "None", ":", "if", "has_selector", "(", "'poll'", ")", ":", "_DEFAULT_SELECTOR", "=", "PollSelector", "elif", "hasattr", "(", "select", ",", "'select...
37.545455
11.181818
def cache_file(app_name=APPNAME, app_author=APPAUTHOR, filename=DATABASENAME): """Returns the filename (including path) for the data cache. The path will depend on the operating system, certain environmental variables and whether it is being run inside a virtual environment. See `homebase <https://github.com/dwavesystems/homebase>`_. Args: app_name (str, optional): The application name. Default is given by :obj:`.APPNAME`. app_author (str, optional): The application author. Default is given by :obj:`.APPAUTHOR`. filename (str, optional): The name of the database file. Default is given by :obj:`DATABASENAME`. Returns: str: The full path to the file that can be used as a cache. Notes: Creates the directory if it does not already exist. If run inside of a virtual environment, the cache will be stored in `/path/to/virtualenv/data/app_name` """ user_data_dir = homebase.user_data_dir(app_name=app_name, app_author=app_author, create=True) return os.path.join(user_data_dir, filename)
[ "def", "cache_file", "(", "app_name", "=", "APPNAME", ",", "app_author", "=", "APPAUTHOR", ",", "filename", "=", "DATABASENAME", ")", ":", "user_data_dir", "=", "homebase", ".", "user_data_dir", "(", "app_name", "=", "app_name", ",", "app_author", "=", "app_au...
40.814815
25.518519
def monitored(name, **params): ''' Makes sure an URL is monitored by uptime. Checks if URL is already monitored, and if not, adds it. ''' ret = {'name': name, 'changes': {}, 'result': None, 'comment': ''} if __salt__['uptime.check_exists'](name=name): ret['result'] = True ret['comment'] = 'URL {0} is already monitored'.format(name) ret['changes'] = {} return ret if not __opts__['test']: url_monitored = __salt__['uptime.create'](name, **params) if url_monitored: ret['result'] = True msg = 'Successfully added the URL {0} to uptime' ret['comment'] = msg.format(name) ret['changes'] = {'url_monitored': url_monitored} else: ret['result'] = False ret['comment'] = 'Failed to add {0} to uptime'.format(name) ret['changes'] = {} else: msg = 'URL {0} is going to be added to uptime' ret.update(result=None, comment=msg.format(name)) return ret
[ "def", "monitored", "(", "name", ",", "*", "*", "params", ")", ":", "ret", "=", "{", "'name'", ":", "name", ",", "'changes'", ":", "{", "}", ",", "'result'", ":", "None", ",", "'comment'", ":", "''", "}", "if", "__salt__", "[", "'uptime.check_exists'...
35.37931
19.655172
def fetch(self, start=False, full_data=True): """ Get the current job data and possibly flag it as started. """ if self.id is None: return self if full_data is True: fields = None elif isinstance(full_data, dict): fields = full_data else: fields = { "_id": 0, "path": 1, "params": 1, "status": 1, "retry_count": 1, } if start: self.datestarted = datetime.datetime.utcnow() self.set_data(self.collection.find_and_modify( { "_id": self.id, "status": {"$nin": ["cancel", "abort", "maxretries"]} }, {"$set": { "status": "started", "datestarted": self.datestarted, "worker": self.worker.id }, "$unset": { "dateexpires": 1 # we don't want started jobs to expire unexpectedly }}, projection=fields) ) context.metric("jobs.status.started") else: self.set_data(self.collection.find_one({ "_id": self.id }, projection=fields)) if self.data is None: context.log.info( "Job %s not found in MongoDB or status was cancelled!" % self.id) self.stored = True return self
[ "def", "fetch", "(", "self", ",", "start", "=", "False", ",", "full_data", "=", "True", ")", ":", "if", "self", ".", "id", "is", "None", ":", "return", "self", "if", "full_data", "is", "True", ":", "fields", "=", "None", "elif", "isinstance", "(", ...
28.730769
19.25
def _function(self): """ Waits until stopped to keep script live. Gui must handle calling of Toggle_NV function on mouse click. """ start_time = datetime.datetime.now() # calculate stop time if self.settings['wait_mode'] == 'absolute': stop_time = start_time + datetime.timedelta(seconds= self.settings['wait_time']) elif self.settings['wait_mode'] == 'loop_interval': if self.last_execution is None: stop_time = start_time else: loop_time = start_time - self.last_execution wait_time = datetime.timedelta(seconds= self.settings['wait_time']) if wait_time.total_seconds() <0: stop_time = start_time else: stop_time = start_time + wait_time else: TypeError('unknown wait_mode') current_time = start_time while current_time<stop_time: if self._abort: break current_time = datetime.datetime.now() time.sleep(1) self.progress = 100.*(current_time- start_time).total_seconds() / (stop_time - start_time).total_seconds() self.updateProgress.emit(int(self.progress)) if self.settings['wait_mode'] == 'absolute': self.last_execution = None else: self.last_execution = start_time
[ "def", "_function", "(", "self", ")", ":", "start_time", "=", "datetime", ".", "datetime", ".", "now", "(", ")", "# calculate stop time", "if", "self", ".", "settings", "[", "'wait_mode'", "]", "==", "'absolute'", ":", "stop_time", "=", "start_time", "+", ...
36.921053
20.973684
def build_toc_line_without_indentation(header: dict, ordered: bool = False, no_links: bool = False, index: int = 1, parser: str = 'github', list_marker: str = '-') -> str: r"""Return a list element of the table of contents. :parameter header: a data structure that contains the original text, the trimmed text and the type of header. :parameter ordered: if set to ``True``, numbers will be used as list ids, otherwise a dash character. Defaults to ``False``. :parameter no_links: disables the use of links. Defaults to ``False``. :parameter index: a number that will be used as list id in case of an ordered table of contents. Defaults to ``1``. :parameter parser: decides rules on how compute indentations. Defaults to ``github``. :parameter list_marker: a string that contains some of the first characters of the list element. Defaults to ``-``. :type header: dict :type ordered: bool :type no_links: bool :type index: int :type parser: str :type list_marker: str :returns: toc_line_no_indent, a single line of the table of contents without indentation. :rtype: str :raises: a built-in exception. """ assert 'type' in header assert 'text_original' in header assert 'text_anchor_link' in header assert isinstance(header['type'], int) assert isinstance(header['text_original'], str) assert isinstance(header['text_anchor_link'], str) assert header['type'] >= 1 assert index >= 1 if (parser == 'github' or parser == 'cmark' or parser == 'gitlab' or parser == 'commonmarker' or parser == 'redcarpet'): if ordered: assert list_marker in md_parser[parser]['list']['ordered'][ 'closing_markers'] else: assert list_marker in md_parser[parser]['list']['unordered'][ 'bullet_markers'] toc_line_no_indent = str() if (parser == 'github' or parser == 'cmark' or parser == 'gitlab' or parser == 'commonmarker' or parser == 'redcarpet'): if ordered: list_marker = str(index) + list_marker # FIXME: is this always correct? if no_links: line = header['text_original'] else: line = '[' + header['text_original'] + ']' + '(#' + header[ 'text_anchor_link'] + ')' toc_line_no_indent = list_marker + ' ' + line return toc_line_no_indent
[ "def", "build_toc_line_without_indentation", "(", "header", ":", "dict", ",", "ordered", ":", "bool", "=", "False", ",", "no_links", ":", "bool", "=", "False", ",", "index", ":", "int", "=", "1", ",", "parser", ":", "str", "=", "'github'", ",", "list_mar...
40.9375
19
def launch(url, wait=False, locate=False): """This function launches the given URL (or filename) in the default viewer application for this file type. If this is an executable, it might launch the executable in a new session. The return value is the exit code of the launched application. Usually, ``0`` indicates success. Examples:: click.launch('https://click.palletsprojects.com/') click.launch('/my/downloaded/file', locate=True) .. versionadded:: 2.0 :param url: URL or filename of the thing to launch. :param wait: waits for the program to stop. :param locate: if this is set to `True` then instead of launching the application associated with the URL it will attempt to launch a file manager with the file located. This might have weird effects if the URL does not point to the filesystem. """ from ._termui_impl import open_url return open_url(url, wait=wait, locate=locate)
[ "def", "launch", "(", "url", ",", "wait", "=", "False", ",", "locate", "=", "False", ")", ":", "from", ".", "_termui_impl", "import", "open_url", "return", "open_url", "(", "url", ",", "wait", "=", "wait", ",", "locate", "=", "locate", ")" ]
42.125
21.916667
def joint(self, table, fields, join_table, join_fields, condition_field, condition_join_field, join_method='left_join'): """.. :py:method:: Usage:: >>> joint('user', 'name, id_number', 'medical_card', 'number', 'id', 'user_id', 'inner_join') select u.name, u.id_number, v.number from user as u inner join medical_card as v on u.id=v.user_id; """ import string fields = map(string.strip, fields.split(',')) select = ', '.join( ['u.{}'.format(field) for field in fields] ) join_fields = map(string.strip, join_fields.split(',')) join_select = ', '.join( ['v.{}'.format(field) for field in join_fields] ) sql = "select {select}, {join_select} from {table} as u {join_method}"\ " {join_table} as v on u.{condition_field}="\ "v.{condition_join_field};".format(select=select, join_select=join_select, table=table, join_method=join_method, join_table=join_table, condition_field=condition_field, condition_join_field=condition_join_field) return super(PGWrapper, self).execute(sql, result=True).results
[ "def", "joint", "(", "self", ",", "table", ",", "fields", ",", "join_table", ",", "join_fields", ",", "condition_field", ",", "condition_join_field", ",", "join_method", "=", "'left_join'", ")", ":", "import", "string", "fields", "=", "map", "(", "string", "...
52.142857
28.964286
def callback(self, request, **kwargs): """ Called from the Service when the user accept to activate it :param request: request object :return: callback url :rtype: string , path to the template """ access_token = request.session['oauth_token'] + "#TH#" access_token += str(request.session['oauth_id']) kwargs = {'access_token': access_token} return super(ServiceGithub, self).callback(request, **kwargs)
[ "def", "callback", "(", "self", ",", "request", ",", "*", "*", "kwargs", ")", ":", "access_token", "=", "request", ".", "session", "[", "'oauth_token'", "]", "+", "\"#TH#\"", "access_token", "+=", "str", "(", "request", ".", "session", "[", "'oauth_id'", ...
44.454545
11.363636
def build_sdist(sdist_directory, config_settings=None): """Builds an sdist, places it in sdist_directory""" poetry = Poetry.create(".") path = SdistBuilder(poetry, SystemEnv(Path(sys.prefix)), NullIO()).build( Path(sdist_directory) ) return unicode(path.name)
[ "def", "build_sdist", "(", "sdist_directory", ",", "config_settings", "=", "None", ")", ":", "poetry", "=", "Poetry", ".", "create", "(", "\".\"", ")", "path", "=", "SdistBuilder", "(", "poetry", ",", "SystemEnv", "(", "Path", "(", "sys", ".", "prefix", ...
31.222222
22
def cleanup(self): "Remove the directory containin the clone and virtual environment." log.info('Removing temp dir %s', self._tempdir.name) self._tempdir.cleanup()
[ "def", "cleanup", "(", "self", ")", ":", "log", ".", "info", "(", "'Removing temp dir %s'", ",", "self", ".", "_tempdir", ".", "name", ")", "self", ".", "_tempdir", ".", "cleanup", "(", ")" ]
46
21.5
def sflow_source_ip(self, **kwargs): """Auto Generated Code """ config = ET.Element("config") sflow = ET.SubElement(config, "sflow", xmlns="urn:brocade.com:mgmt:brocade-sflow") source_ip = ET.SubElement(sflow, "source-ip") source_ip.text = kwargs.pop('source_ip') callback = kwargs.pop('callback', self._callback) return callback(config)
[ "def", "sflow_source_ip", "(", "self", ",", "*", "*", "kwargs", ")", ":", "config", "=", "ET", ".", "Element", "(", "\"config\"", ")", "sflow", "=", "ET", ".", "SubElement", "(", "config", ",", "\"sflow\"", ",", "xmlns", "=", "\"urn:brocade.com:mgmt:brocad...
39.3
14.4
def _request_status(self): """ Checks the api endpoint to check if the async job progress """ if self.item_id: return True response = self.con.get(self.monitor_url) if not response: return False data = response.json() self.status = data.get('status', 'inProgress') self.completion_percentage = data.get(self._cc('percentageComplete'), 0) self.item_id = data.get(self._cc('resourceId'), None) return self.item_id is not None
[ "def", "_request_status", "(", "self", ")", ":", "if", "self", ".", "item_id", ":", "return", "True", "response", "=", "self", ".", "con", ".", "get", "(", "self", ".", "monitor_url", ")", "if", "not", "response", ":", "return", "False", "data", "=", ...
32.529412
19.941176
def is_valid(self, t: URIRef) -> bool: """ Raise an exception if 't' is unrecognized :param t: metadata URI """ if not self.has_type(t): raise TypeError("Unrecognized FHIR type: {}".format(t)) return True
[ "def", "is_valid", "(", "self", ",", "t", ":", "URIRef", ")", "->", "bool", ":", "if", "not", "self", ".", "has_type", "(", "t", ")", ":", "raise", "TypeError", "(", "\"Unrecognized FHIR type: {}\"", ".", "format", "(", "t", ")", ")", "return", "True" ...
32.125
9.625
def validate_inputs(u_kn, N_k, f_k): """Check types and return inputs for MBAR calculations. Parameters ---------- u_kn or q_kn : np.ndarray, shape=(n_states, n_samples), dtype='float' The reduced potential energies or unnormalized probabilities N_k : np.ndarray, shape=(n_states), dtype='int' The number of samples in each state f_k : np.ndarray, shape=(n_states), dtype='float' The reduced free energies of each state Returns ------- u_kn or q_kn : np.ndarray, shape=(n_states, n_samples), dtype='float' The reduced potential energies or unnormalized probabilities N_k : np.ndarray, shape=(n_states), dtype='float' The number of samples in each state. Converted to float because this cast is required when log is calculated. f_k : np.ndarray, shape=(n_states), dtype='float' The reduced free energies of each state """ n_states, n_samples = u_kn.shape u_kn = ensure_type(u_kn, 'float', 2, "u_kn or Q_kn", shape=(n_states, n_samples)) N_k = ensure_type(N_k, 'float', 1, "N_k", shape=(n_states,), warn_on_cast=False) # Autocast to float because will be eventually used in float calculations. f_k = ensure_type(f_k, 'float', 1, "f_k", shape=(n_states,)) return u_kn, N_k, f_k
[ "def", "validate_inputs", "(", "u_kn", ",", "N_k", ",", "f_k", ")", ":", "n_states", ",", "n_samples", "=", "u_kn", ".", "shape", "u_kn", "=", "ensure_type", "(", "u_kn", ",", "'float'", ",", "2", ",", "\"u_kn or Q_kn\"", ",", "shape", "=", "(", "n_sta...
45.321429
26.75
def _do_anchor(self, anchor): """ Collects preposition anchors and attachments in a dictionary. Once the dictionary has an entry for both the anchor and the attachment, they are linked. """ if anchor: for x in anchor.split("-"): A, P = None, None if x.startswith("A") and len(self.chunks) > 0: # anchor A, P = x, x.replace("A","P") self._anchors[A] = self.chunks[-1] if x.startswith("P") and len(self.pnp) > 0: # attachment (PNP) A, P = x.replace("P","A"), x self._anchors[P] = self.pnp[-1] if A in self._anchors and P in self._anchors and not self._anchors[P].anchor: pnp = self._anchors[P] pnp.anchor = self._anchors[A] pnp.anchor.attachments.append(pnp)
[ "def", "_do_anchor", "(", "self", ",", "anchor", ")", ":", "if", "anchor", ":", "for", "x", "in", "anchor", ".", "split", "(", "\"-\"", ")", ":", "A", ",", "P", "=", "None", ",", "None", "if", "x", ".", "startswith", "(", "\"A\"", ")", "and", "...
52.647059
17.235294
def compat_get_paginated_response(view, page): """ get_paginated_response is unknown to DRF 3.0 """ if DRFVLIST[0] == 3 and DRFVLIST[1] >= 1: from rest_messaging.serializers import ComplexMessageSerializer # circular import serializer = ComplexMessageSerializer(page, many=True) return view.get_paginated_response(serializer.data) else: serializer = view.get_pagination_serializer(page) return Response(serializer.data)
[ "def", "compat_get_paginated_response", "(", "view", ",", "page", ")", ":", "if", "DRFVLIST", "[", "0", "]", "==", "3", "and", "DRFVLIST", "[", "1", "]", ">=", "1", ":", "from", "rest_messaging", ".", "serializers", "import", "ComplexMessageSerializer", "# c...
51.555556
16.666667
def _get_indexes(self, schema, **kwargs): """return all the indexes for the given schema""" # http://www.sqlite.org/pragma.html#schema # http://www.mail-archive.com/sqlite-users@sqlite.org/msg22055.html # http://stackoverflow.com/questions/604939/ ret = {} rs = self._query('PRAGMA index_list({})'.format(self._normalize_table_name(schema)), **kwargs) if rs: for r in rs: iname = r['name'] ret.setdefault(iname, []) indexes = self._query('PRAGMA index_info({})'.format(r['name']), **kwargs) for idict in indexes: ret[iname].append(idict['name']) return ret
[ "def", "_get_indexes", "(", "self", ",", "schema", ",", "*", "*", "kwargs", ")", ":", "# http://www.sqlite.org/pragma.html#schema", "# http://www.mail-archive.com/sqlite-users@sqlite.org/msg22055.html", "# http://stackoverflow.com/questions/604939/", "ret", "=", "{", "}", "rs",...
43.875
20.0625
def badnick(self, me=None, nick=None, **kw): """Use alt nick on nick error""" if me == '*': self.bot.set_nick(self.bot.nick + '_') self.bot.log.debug('Trying to regain nickname in 30s...') self.nick_handle = self.bot.loop.call_later( 30, self.bot.set_nick, self.bot.original_nick)
[ "def", "badnick", "(", "self", ",", "me", "=", "None", ",", "nick", "=", "None", ",", "*", "*", "kw", ")", ":", "if", "me", "==", "'*'", ":", "self", ".", "bot", ".", "set_nick", "(", "self", ".", "bot", ".", "nick", "+", "'_'", ")", "self", ...
47.142857
12.571429
def get(self, key, namespace=None): """Retrieve value for key.""" # Short-circuit to reduce overhead. if not _CONFIG_OVERRIDE: return NO_VALUE full_key = generate_uppercase_key(key, namespace) logger.debug('Searching %s for %s', self, full_key) return get_key_from_envs(reversed(_CONFIG_OVERRIDE), full_key)
[ "def", "get", "(", "self", ",", "key", ",", "namespace", "=", "None", ")", ":", "# Short-circuit to reduce overhead.", "if", "not", "_CONFIG_OVERRIDE", ":", "return", "NO_VALUE", "full_key", "=", "generate_uppercase_key", "(", "key", ",", "namespace", ")", "logg...
45
11.875
def is_method(arg): """Checks whether given object is a method.""" if inspect.ismethod(arg): return True if isinstance(arg, NonInstanceMethod): return True # Unfortunately, there is no disctinction between instance methods # that are yet to become part of a class, and regular functions. # We attempt to evade this little gray zone by relying on extremely strong # convention (which is nevertheless _not_ enforced by the intepreter) # that first argument of an instance method must be always named ``self``. if inspect.isfunction(arg): return _get_first_arg_name(arg) == 'self' return False
[ "def", "is_method", "(", "arg", ")", ":", "if", "inspect", ".", "ismethod", "(", "arg", ")", ":", "return", "True", "if", "isinstance", "(", "arg", ",", "NonInstanceMethod", ")", ":", "return", "True", "# Unfortunately, there is no disctinction between instance me...
40.0625
22.8125
def run(self): """ Writes data in JSON format into the task's output target. The data objects have the following attributes: * `_id` is the default Elasticsearch id field, * `text`: the text, * `date`: the day when the data was created. """ today = datetime.date.today() with self.output().open('w') as output: for i in range(5): output.write(json.dumps({'_id': i, 'text': 'Hi %s' % i, 'date': str(today)})) output.write('\n')
[ "def", "run", "(", "self", ")", ":", "today", "=", "datetime", ".", "date", ".", "today", "(", ")", "with", "self", ".", "output", "(", ")", ".", "open", "(", "'w'", ")", "as", "output", ":", "for", "i", "in", "range", "(", "5", ")", ":", "ou...
33.529412
17.882353
def _last_commit(self): """ Retrieve the most recent commit message (with ``svn log -l1``) Returns: tuple: (datestr, (revno, user, None, desc)) :: $ svn log -l1 ------------------------------------------------------------------------ r25701 | bhendrix | 2010-08-02 12:14:25 -0500 (Mon, 02 Aug 2010) | 1 line added selection range traits to make it possible for users to replace ------------------------------------------------------------------------ .. note:: svn log references the svn server """ cmd = ['svn', 'log' '-l1'] op = self.sh(cmd, shell=False) data, rest = op.split('\n', 2)[1:] revno, user, datestr, lc = data.split(' | ', 3) desc = '\n'.join(rest.split('\n')[1:-2]) revno = revno[1:] # lc = long(lc.rstrip(' line')) return datestr, (revno, user, None, desc)
[ "def", "_last_commit", "(", "self", ")", ":", "cmd", "=", "[", "'svn'", ",", "'log'", "'-l1'", "]", "op", "=", "self", ".", "sh", "(", "cmd", ",", "shell", "=", "False", ")", "data", ",", "rest", "=", "op", ".", "split", "(", "'\\n'", ",", "2",...
35.08
19.96
def insertBlock(self): """ API to insert a block into DBS :param blockObj: Block object :type blockObj: dict :key open_for_writing: Open For Writing (1/0) (Optional, default 1) :key block_size: Block Size (Optional, default 0) :key file_count: File Count (Optional, default 0) :key block_name: Block Name (Required) :key origin_site_name: Origin Site Name (Required) """ try: body = request.body.read() indata = cjson.decode(body) indata = validateJSONInputNoCopy("block", indata) self.dbsBlock.insertBlock(indata) except cjson.DecodeError as dc: dbsExceptionHandler("dbsException-invalid-input2", "Wrong format/data from insert Block input", self.logger.exception, str(dc)) except dbsException as de: dbsExceptionHandler(de.eCode, de.message, self.logger.exception, de.message) except Exception as ex: sError = "DBSWriterModel/insertBlock. %s\n. Exception trace: \n %s" \ % (ex, traceback.format_exc()) dbsExceptionHandler('dbsException-server-error', dbsExceptionCode['dbsException-server-error'], self.logger.exception, sError)
[ "def", "insertBlock", "(", "self", ")", ":", "try", ":", "body", "=", "request", ".", "body", ".", "read", "(", ")", "indata", "=", "cjson", ".", "decode", "(", "body", ")", "indata", "=", "validateJSONInputNoCopy", "(", "\"block\"", ",", "indata", ")"...
47.576923
22.269231
def get_selected(self): """return the current selected option as a tuple: (option, index) or as a list of tuples (in case multi_select==True) """ if self.multi_select: return_tuples = [] for selected in self.all_selected: return_tuples.append((self.options[selected], selected)) return return_tuples else: return self.options[self.index], self.index
[ "def", "get_selected", "(", "self", ")", ":", "if", "self", ".", "multi_select", ":", "return_tuples", "=", "[", "]", "for", "selected", "in", "self", ".", "all_selected", ":", "return_tuples", ".", "append", "(", "(", "self", ".", "options", "[", "selec...
40.545455
13.454545
def cut_gmail_quote(html_message): ''' Cuts the outermost block element with class gmail_quote. ''' gmail_quote = cssselect('div.gmail_quote', html_message) if gmail_quote and (gmail_quote[0].text is None or not RE_FWD.match(gmail_quote[0].text)): gmail_quote[0].getparent().remove(gmail_quote[0]) return True
[ "def", "cut_gmail_quote", "(", "html_message", ")", ":", "gmail_quote", "=", "cssselect", "(", "'div.gmail_quote'", ",", "html_message", ")", "if", "gmail_quote", "and", "(", "gmail_quote", "[", "0", "]", ".", "text", "is", "None", "or", "not", "RE_FWD", "."...
55.333333
24.333333
def import_from_filename(obj, filename, silent=False): # pragma: no cover """If settings_module is a filename path import it.""" if filename in [item.filename for item in inspect.stack()]: raise ImportError( "Looks like you are loading dynaconf " "from inside the {} file and then it is trying " "to load itself entering in a circular reference " "problem. To solve it you have to " "invoke your program from another root folder " "or rename your program file.".format(filename) ) _find_file = getattr(obj, "find_file", find_file) if not filename.endswith(".py"): filename = "{0}.py".format(filename) if filename in default_settings.SETTINGS_FILE_FOR_DYNACONF: silent = True mod = types.ModuleType(filename.rstrip(".py")) mod.__file__ = filename mod._is_error = False try: with io.open( _find_file(filename), encoding=default_settings.ENCODING_FOR_DYNACONF, ) as config_file: exec(compile(config_file.read(), filename, "exec"), mod.__dict__) except IOError as e: e.strerror = ("py_loader: error loading file (%s %s)\n") % ( e.strerror, filename, ) if silent and e.errno in (errno.ENOENT, errno.EISDIR): return raw_logger().debug(e.strerror) mod._is_error = True return mod
[ "def", "import_from_filename", "(", "obj", ",", "filename", ",", "silent", "=", "False", ")", ":", "# pragma: no cover", "if", "filename", "in", "[", "item", ".", "filename", "for", "item", "in", "inspect", ".", "stack", "(", ")", "]", ":", "raise", "Imp...
38.243243
19.054054
def _get_assistive_access(): ''' Get a list of all of the assistive access applications installed, returns as a ternary showing whether each app is enabled or not. ''' cmd = 'sqlite3 "/Library/Application Support/com.apple.TCC/TCC.db" "SELECT * FROM access"' call = __salt__['cmd.run_all']( cmd, output_loglevel='debug', python_shell=False ) if call['retcode'] != 0: comment = '' if 'stderr' in call: comment += call['stderr'] if 'stdout' in call: comment += call['stdout'] raise CommandExecutionError('Error: {0}'.format(comment)) out = call['stdout'] return re.findall(r'kTCCServiceAccessibility\|(.*)\|[0-9]{1}\|([0-9]{1})\|[0-9]{1}\|', out, re.MULTILINE)
[ "def", "_get_assistive_access", "(", ")", ":", "cmd", "=", "'sqlite3 \"/Library/Application Support/com.apple.TCC/TCC.db\" \"SELECT * FROM access\"'", "call", "=", "__salt__", "[", "'cmd.run_all'", "]", "(", "cmd", ",", "output_loglevel", "=", "'debug'", ",", "python_shell"...
33
24.826087
def _handle_indent_between_paren(self, column, line, parent_impl, tc): """ Handle indent between symbols such as parenthesis, braces,... """ pre, post = parent_impl next_char = self._get_next_char(tc) prev_char = self._get_prev_char(tc) prev_open = prev_char in ['[', '(', '{'] next_close = next_char in [']', ')', '}'] (open_line, open_symbol_col), (close_line, close_col) = \ self._get_paren_pos(tc, column) open_line_txt = self._helper.line_text(open_line) open_line_indent = len(open_line_txt) - len(open_line_txt.lstrip()) if prev_open: post = (open_line_indent + self.editor.tab_length) * ' ' elif next_close and prev_char != ',': post = open_line_indent * ' ' elif tc.block().blockNumber() == open_line: post = open_symbol_col * ' ' # adapt indent if cursor on closing line and next line have same # indent -> PEP8 compliance if close_line and close_col: txt = self._helper.line_text(close_line) bn = tc.block().blockNumber() flg = bn == close_line next_indent = self._helper.line_indent(bn + 1) * ' ' if flg and txt.strip().endswith(':') and next_indent == post: # | look at how the previous line ( ``':'):`` ) was # over-indented, this is actually what we are trying to # achieve here post += self.editor.tab_length * ' ' # breaking string if next_char in ['"', "'"]: tc.movePosition(tc.Left) is_string = self._helper.is_comment_or_string(tc, formats=['string']) if next_char in ['"', "'"]: tc.movePosition(tc.Right) if is_string: trav = QTextCursor(tc) while self._helper.is_comment_or_string( trav, formats=['string']): trav.movePosition(trav.Left) trav.movePosition(trav.Right) symbol = '%s' % self._get_next_char(trav) pre += symbol post += symbol return pre, post
[ "def", "_handle_indent_between_paren", "(", "self", ",", "column", ",", "line", ",", "parent_impl", ",", "tc", ")", ":", "pre", ",", "post", "=", "parent_impl", "next_char", "=", "self", ".", "_get_next_char", "(", "tc", ")", "prev_char", "=", "self", ".",...
42.38
14.54
def _join_sequence(seq, last_separator=''): """Join a sequence into a string.""" count = len(seq) return ', '.join(_format_element(element, count, i, last_separator) for i, element in enumerate(seq))
[ "def", "_join_sequence", "(", "seq", ",", "last_separator", "=", "''", ")", ":", "count", "=", "len", "(", "seq", ")", "return", "', '", ".", "join", "(", "_format_element", "(", "element", ",", "count", ",", "i", ",", "last_separator", ")", "for", "i"...
45.6
13.6
def service_enable(s_name, **connection_args): ''' Enable a service CLI Example: .. code-block:: bash salt '*' netscaler.service_enable 'serviceName' ''' ret = True service = _service_get(s_name, **connection_args) if service is None: return False nitro = _connect(**connection_args) if nitro is None: return False try: NSService.enable(nitro, service) except NSNitroError as error: log.debug('netscaler module error - NSService.enable() failed: %s', error) ret = False _disconnect(nitro) return ret
[ "def", "service_enable", "(", "s_name", ",", "*", "*", "connection_args", ")", ":", "ret", "=", "True", "service", "=", "_service_get", "(", "s_name", ",", "*", "*", "connection_args", ")", "if", "service", "is", "None", ":", "return", "False", "nitro", ...
23.32
22.76
async def update_offer(self, **params): """Updates offer after transaction confirmation Accepts: - transaction id - coinid - confirmed (boolean flag) """ logging.debug("\n\n -- Update offer. ") if params.get("message"): params = json.loads(params.get("message", "{}")) if not params: return {"error":400, "reason":"Missed required fields"} # Check if required fields exists txid = params.get("txid") coinid = params.get("coinid").upper() try: coinid = coinid.replace("TEST", "") except: pass # Try to find offer with account id and cid database = client[coinid] offer_db = database[settings.OFFER] offer = await offer_db.find_one({"txid":txid}) logging.debug("\n\n -- Try to get offer. ") logging.debug(offer) if not offer: return {"error":404, "reason":"Offer with txid %s not found" % txid } # Update offer await offer_db.find_one_and_update( {"txid":txid}, {"$set":{"confirmed":1}}) # Get updated offer updated = await offer_db.find_one({"txid":txid}) return {i:updated[i] for i in updated if i != "_id"}
[ "async", "def", "update_offer", "(", "self", ",", "*", "*", "params", ")", ":", "logging", ".", "debug", "(", "\"\\n\\n -- Update offer. \"", ")", "if", "params", ".", "get", "(", "\"message\"", ")", ":", "params", "=", "json", ".", "loads", "(", "params...
25.829268
18.02439
def GenerateKeys(config, overwrite_keys=False): """Generate the keys we need for a GRR server.""" if not hasattr(key_utils, "MakeCACert"): raise OpenSourceKeyUtilsRequiredError( "Generate keys can only run with open source key_utils.") if (config.Get("PrivateKeys.server_key", default=None) and not overwrite_keys): print(config.Get("PrivateKeys.server_key")) raise KeysAlreadyExistError( "Config %s already has keys, use --overwrite_keys to " "override." % config.parser) length = grr_config.CONFIG["Server.rsa_key_length"] print("All keys will have a bit length of %d." % length) print("Generating executable signing key") executable_key = rdf_crypto.RSAPrivateKey.GenerateKey(bits=length) config.Set("PrivateKeys.executable_signing_private_key", executable_key.AsPEM()) config.Set("Client.executable_signing_public_key", executable_key.GetPublicKey().AsPEM()) print("Generating CA keys") ca_key = rdf_crypto.RSAPrivateKey.GenerateKey(bits=length) ca_cert = key_utils.MakeCACert(ca_key) config.Set("CA.certificate", ca_cert.AsPEM()) config.Set("PrivateKeys.ca_key", ca_key.AsPEM()) print("Generating Server keys") server_key = rdf_crypto.RSAPrivateKey.GenerateKey(bits=length) server_cert = key_utils.MakeCASignedCert(u"grr", server_key, ca_cert, ca_key) config.Set("Frontend.certificate", server_cert.AsPEM()) config.Set("PrivateKeys.server_key", server_key.AsPEM()) print("Generating secret key for csrf protection.") _GenerateCSRFKey(config)
[ "def", "GenerateKeys", "(", "config", ",", "overwrite_keys", "=", "False", ")", ":", "if", "not", "hasattr", "(", "key_utils", ",", "\"MakeCACert\"", ")", ":", "raise", "OpenSourceKeyUtilsRequiredError", "(", "\"Generate keys can only run with open source key_utils.\"", ...
43.6
15.971429
def create_namespaced_controller_revision(self, namespace, body, **kwargs): # noqa: E501 """create_namespaced_controller_revision # noqa: E501 create a ControllerRevision # noqa: E501 This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async_req=True >>> thread = api.create_namespaced_controller_revision(namespace, body, async_req=True) >>> result = thread.get() :param async_req bool :param str namespace: object name and auth scope, such as for teams and projects (required) :param V1ControllerRevision body: (required) :param bool include_uninitialized: If true, partially initialized resources are included in the response. :param str pretty: If 'true', then the output is pretty printed. :param str dry_run: When present, indicates that modifications should not be persisted. An invalid or unrecognized dryRun directive will result in an error response and no further processing of the request. Valid values are: - All: all dry run stages will be processed :return: V1ControllerRevision If the method is called asynchronously, returns the request thread. """ kwargs['_return_http_data_only'] = True if kwargs.get('async_req'): return self.create_namespaced_controller_revision_with_http_info(namespace, body, **kwargs) # noqa: E501 else: (data) = self.create_namespaced_controller_revision_with_http_info(namespace, body, **kwargs) # noqa: E501 return data
[ "def", "create_namespaced_controller_revision", "(", "self", ",", "namespace", ",", "body", ",", "*", "*", "kwargs", ")", ":", "# noqa: E501", "kwargs", "[", "'_return_http_data_only'", "]", "=", "True", "if", "kwargs", ".", "get", "(", "'async_req'", ")", ":"...
64.28
36.56
def getClassInModuleFromName(className, module): """ get a class from name within a module """ n = getAvClassNamesInModule(module) i = n.index(className) c = getAvailableClassesInModule(module) return c[i]
[ "def", "getClassInModuleFromName", "(", "className", ",", "module", ")", ":", "n", "=", "getAvClassNamesInModule", "(", "module", ")", "i", "=", "n", ".", "index", "(", "className", ")", "c", "=", "getAvailableClassesInModule", "(", "module", ")", "return", ...
28.25
6.5
def _from_args(args): """Factory method to create a new instance from command line args. :param args: instance of :class:`argparse.Namespace` """ return bugzscout.BugzScout(args.url, args.user, args.project, args.area)
[ "def", "_from_args", "(", "args", ")", ":", "return", "bugzscout", ".", "BugzScout", "(", "args", ".", "url", ",", "args", ".", "user", ",", "args", ".", "project", ",", "args", ".", "area", ")" ]
38.333333
18.5
def HelloWorld(handler, t): """ This is the traditional "Hello, World" function. The idiom is used throughout the API. We construct a Tropo object, and then flesh out that object by calling "action" functions (in this case, tropo.say). Then call tropo.Render, which translates the Tropo object into JSON format. Finally, we write the JSON object to the standard output, so that it will get POSTed back to the API. """ t.say (["Hello, World", "How ya doing?"]) json = t.RenderJson() logging.info ("HelloWorld json: %s" % json) handler.response.out.write(json)
[ "def", "HelloWorld", "(", "handler", ",", "t", ")", ":", "t", ".", "say", "(", "[", "\"Hello, World\"", ",", "\"How ya doing?\"", "]", ")", "json", "=", "t", ".", "RenderJson", "(", ")", "logging", ".", "info", "(", "\"HelloWorld json: %s\"", "%", "json"...
72.375
48.625
def _le_annot_parms(self, annot, p1, p2): """Get common parameters for making line end symbols. """ w = annot.border["width"] # line width sc = annot.colors["stroke"] # stroke color if not sc: sc = (0,0,0) scol = " ".join(map(str, sc)) + " RG\n" fc = annot.colors["fill"] # fill color if not fc: fc = (0,0,0) fcol = " ".join(map(str, fc)) + " rg\n" nr = annot.rect np1 = p1 # point coord relative to annot rect np2 = p2 # point coord relative to annot rect m = self._hor_matrix(np1, np2) # matrix makes the line horizontal im = ~m # inverted matrix L = np1 * m # converted start (left) point R = np2 * m # converted end (right) point if 0 <= annot.opacity < 1: opacity = "/Alp0 gs\n" else: opacity = "" return m, im, L, R, w, scol, fcol, opacity
[ "def", "_le_annot_parms", "(", "self", ",", "annot", ",", "p1", ",", "p2", ")", ":", "w", "=", "annot", ".", "border", "[", "\"width\"", "]", "# line width", "sc", "=", "annot", ".", "colors", "[", "\"stroke\"", "]", "# stroke color", "if", "not", "sc"...
47.318182
15.863636
def deleteEvent(self, physicalInterfaceId, eventId): """ Delete an event mapping from a physical interface. Parameters: physicalInterfaceId (string), eventId (string). Throws APIException on failure. """ req = ApiClient.oneEventUrl % (self.host, "/draft", physicalInterfaceId, eventId) resp = requests.delete(req, auth=self.credentials, verify=self.verify) if resp.status_code == 204: self.logger.debug("Event mapping deleted") else: raise ibmiotf.APIException(resp.status_code, "HTTP error deleting event mapping", resp) return resp
[ "def", "deleteEvent", "(", "self", ",", "physicalInterfaceId", ",", "eventId", ")", ":", "req", "=", "ApiClient", ".", "oneEventUrl", "%", "(", "self", ".", "host", ",", "\"/draft\"", ",", "physicalInterfaceId", ",", "eventId", ")", "resp", "=", "requests", ...
48.076923
20.846154
def _has_ipv6(host): """ Returns True if the system can bind an IPv6 address. """ sock = None has_ipv6 = False # App Engine doesn't support IPV6 sockets and actually has a quota on the # number of sockets that can be used, so just early out here instead of # creating a socket needlessly. # See https://github.com/urllib3/urllib3/issues/1446 if _appengine_environ.is_appengine_sandbox(): return False if socket.has_ipv6: # has_ipv6 returns true if cPython was compiled with IPv6 support. # It does not tell us if the system has IPv6 support enabled. To # determine that we must bind to an IPv6 address. # https://github.com/shazow/urllib3/pull/611 # https://bugs.python.org/issue658327 try: sock = socket.socket(socket.AF_INET6) sock.bind((host, 0)) has_ipv6 = True except Exception: pass if sock: sock.close() return has_ipv6
[ "def", "_has_ipv6", "(", "host", ")", ":", "sock", "=", "None", "has_ipv6", "=", "False", "# App Engine doesn't support IPV6 sockets and actually has a quota on the", "# number of sockets that can be used, so just early out here instead of", "# creating a socket needlessly.", "# See ht...
34.5
21.071429
def occupy(self, address, size, sort): """ Include a block, specified by (address, size), in this segment list. :param int address: The starting address of the block. :param int size: Size of the block. :param str sort: Type of the block. :return: None """ if size is None or size <= 0: # Cannot occupy a non-existent block return # l.debug("Occpuying 0x%08x-0x%08x", address, address + size) if not self._list: self._list.append(Segment(address, address + size, sort)) self._bytes_occupied += size return # Find adjacent element in our list idx = self._search(address) # print idx self._insert_and_merge(address, size, sort, idx)
[ "def", "occupy", "(", "self", ",", "address", ",", "size", ",", "sort", ")", ":", "if", "size", "is", "None", "or", "size", "<=", "0", ":", "# Cannot occupy a non-existent block", "return", "# l.debug(\"Occpuying 0x%08x-0x%08x\", address, address + size)", "if", "no...
33.375
18.125
def merge_section(key, prnt_sec, child_sec): """ Synthesize a output numpy docstring section. Parameters ---------- key: str The numpy-section being merged. prnt_sec: Optional[str] The docstring section from the parent's attribute. child_sec: Optional[str] The docstring section from the child's attribute. Returns ------- Optional[str] The output docstring section.""" if prnt_sec is None and child_sec is None: return None if key == "Short Summary": header = '' else: header = "\n".join((key, "".join("-" for i in range(len(key))), "")) if child_sec is None: body = prnt_sec else: body = child_sec return header + body
[ "def", "merge_section", "(", "key", ",", "prnt_sec", ",", "child_sec", ")", ":", "if", "prnt_sec", "is", "None", "and", "child_sec", "is", "None", ":", "return", "None", "if", "key", "==", "\"Short Summary\"", ":", "header", "=", "''", "else", ":", "head...
26.758621
20.137931
def node_to_evenly_discretized(node): """ Parses the evenly discretized mfd node to an instance of the :class: openquake.hazardlib.mfd.evenly_discretized.EvenlyDiscretizedMFD, or to None if not all parameters are available """ if not all([node.attrib["minMag"], node.attrib["binWidth"], node.nodes[0].text]): return None # Text to float rates = [float(x) for x in node.nodes[0].text.split()] return mfd.evenly_discretized.EvenlyDiscretizedMFD( float(node.attrib["minMag"]), float(node.attrib["binWidth"]), rates)
[ "def", "node_to_evenly_discretized", "(", "node", ")", ":", "if", "not", "all", "(", "[", "node", ".", "attrib", "[", "\"minMag\"", "]", ",", "node", ".", "attrib", "[", "\"binWidth\"", "]", ",", "node", ".", "nodes", "[", "0", "]", ".", "text", "]",...
38.8
13.6
def pstdev(data): """Calculates the population standard deviation.""" n = len(data) if n < 2: return 0 # raise ValueError('variance requires at least two data points') ss = TableExtraction._ss(data) pvar = ss/n # the population variance return pvar**0.5
[ "def", "pstdev", "(", "data", ")", ":", "n", "=", "len", "(", "data", ")", "if", "n", "<", "2", ":", "return", "0", "# raise ValueError('variance requires at least two data points')", "ss", "=", "TableExtraction", ".", "_ss", "(", "data", ")", "pvar", "=", ...
35.222222
16
async def mount(self, mount_point, *, mount_options=None): """Mount this partition.""" self._data = await self._handler.mount( system_id=self.block_device.node.system_id, device_id=self.block_device.id, id=self.id, mount_point=mount_point, mount_options=mount_options)
[ "async", "def", "mount", "(", "self", ",", "mount_point", ",", "*", ",", "mount_options", "=", "None", ")", ":", "self", ".", "_data", "=", "await", "self", ".", "_handler", ".", "mount", "(", "system_id", "=", "self", ".", "block_device", ".", "node",...
46.571429
8.428571
def render(self, file_path, **kwargs): """ Save the content of the .text file in the PDF. Parameters ---------- file_path: str Path to the output file. """ temp = get_tempfile(suffix='.tex') self.save_content(temp.name) try: self._render_function(temp.name, file_path, output_format='pdf') except: log.exception('Error exporting file {} to PDF.'.format(file_path)) raise
[ "def", "render", "(", "self", ",", "file_path", ",", "*", "*", "kwargs", ")", ":", "temp", "=", "get_tempfile", "(", "suffix", "=", "'.tex'", ")", "self", ".", "save_content", "(", "temp", ".", "name", ")", "try", ":", "self", ".", "_render_function", ...
29.8125
19
def add_path_segment(self, value): """ Add a new path segment to the end of the current string :param string value: the new path segment to use Example:: >>> u = URL('http://example.com/foo/') >>> u.add_path_segment('bar').as_string() 'http://example.com/foo/bar' """ segments = self.path_segments() + (to_unicode(value),) return self.path_segments(segments)
[ "def", "add_path_segment", "(", "self", ",", "value", ")", ":", "segments", "=", "self", ".", "path_segments", "(", ")", "+", "(", "to_unicode", "(", "value", ")", ",", ")", "return", "self", ".", "path_segments", "(", "segments", ")" ]
31.428571
16.857143
def makevAndvPfuncs(self,policyFunc): ''' Constructs the marginal value function for this period. Parameters ---------- policyFunc : function Consumption and medical care function for this period, defined over market resources, permanent income level, and the medical need shock. Returns ------- vFunc : function Value function for this period, defined over market resources and permanent income. vPfunc : function Marginal value (of market resources) function for this period, defined over market resources and permanent income. ''' # Get state dimension sizes mCount = self.aXtraGrid.size pCount = self.pLvlGrid.size MedCount = self.MedShkVals.size # Make temporary grids to evaluate the consumption function temp_grid = np.tile(np.reshape(self.aXtraGrid,(mCount,1,1)),(1,pCount,MedCount)) aMinGrid = np.tile(np.reshape(self.mLvlMinNow(self.pLvlGrid),(1,pCount,1)), (mCount,1,MedCount)) pGrid = np.tile(np.reshape(self.pLvlGrid,(1,pCount,1)),(mCount,1,MedCount)) mGrid = temp_grid*pGrid + aMinGrid if self.pLvlGrid[0] == 0: mGrid[:,0,:] = np.tile(np.reshape(self.aXtraGrid,(mCount,1)),(1,MedCount)) MedShkGrid = np.tile(np.reshape(self.MedShkVals,(1,1,MedCount)),(mCount,pCount,1)) probsGrid = np.tile(np.reshape(self.MedShkPrbs,(1,1,MedCount)),(mCount,pCount,1)) # Get optimal consumption (and medical care) for each state cGrid,MedGrid = policyFunc(mGrid,pGrid,MedShkGrid) # Calculate expected value by "integrating" across medical shocks if self.vFuncBool: MedGrid = np.maximum(MedGrid,1e-100) # interpolation error sometimes makes Med < 0 (barely) aGrid = np.maximum(mGrid - cGrid - self.MedPrice*MedGrid, aMinGrid) # interpolation error sometimes makes tiny violations vGrid = self.u(cGrid) + MedShkGrid*self.uMed(MedGrid) + self.EndOfPrdvFunc(aGrid,pGrid) vNow = np.sum(vGrid*probsGrid,axis=2) # Calculate expected marginal value by "integrating" across medical shocks vPgrid = self.uP(cGrid) vPnow = np.sum(vPgrid*probsGrid,axis=2) # Add vPnvrs=0 at m=mLvlMin to close it off at the bottom (and vNvrs=0) mGrid_small = np.concatenate((np.reshape(self.mLvlMinNow(self.pLvlGrid),(1,pCount)),mGrid[:,:,0])) vPnvrsNow = np.concatenate((np.zeros((1,pCount)),self.uPinv(vPnow))) if self.vFuncBool: vNvrsNow = np.concatenate((np.zeros((1,pCount)),self.uinv(vNow)),axis=0) vNvrsPnow = vPnow*self.uinvP(vNow) vNvrsPnow = np.concatenate((np.zeros((1,pCount)),vNvrsPnow),axis=0) # Construct the pseudo-inverse value and marginal value functions over mLvl,pLvl vPnvrsFunc_by_pLvl = [] vNvrsFunc_by_pLvl = [] for j in range(pCount): # Make a pseudo inverse marginal value function for each pLvl pLvl = self.pLvlGrid[j] m_temp = mGrid_small[:,j] - self.mLvlMinNow(pLvl) vPnvrs_temp = vPnvrsNow[:,j] vPnvrsFunc_by_pLvl.append(LinearInterp(m_temp,vPnvrs_temp)) if self.vFuncBool: vNvrs_temp = vNvrsNow[:,j] vNvrsP_temp = vNvrsPnow[:,j] vNvrsFunc_by_pLvl.append(CubicInterp(m_temp,vNvrs_temp,vNvrsP_temp)) vPnvrsFuncBase = LinearInterpOnInterp1D(vPnvrsFunc_by_pLvl,self.pLvlGrid) vPnvrsFunc = VariableLowerBoundFunc2D(vPnvrsFuncBase,self.mLvlMinNow) # adjust for the lower bound of mLvl if self.vFuncBool: vNvrsFuncBase = LinearInterpOnInterp1D(vNvrsFunc_by_pLvl,self.pLvlGrid) vNvrsFunc = VariableLowerBoundFunc2D(vNvrsFuncBase,self.mLvlMinNow) # adjust for the lower bound of mLvl # "Re-curve" the (marginal) value function vPfunc = MargValueFunc2D(vPnvrsFunc,self.CRRA) if self.vFuncBool: vFunc = ValueFunc2D(vNvrsFunc,self.CRRA) else: vFunc = NullFunc() return vFunc, vPfunc
[ "def", "makevAndvPfuncs", "(", "self", ",", "policyFunc", ")", ":", "# Get state dimension sizes", "mCount", "=", "self", ".", "aXtraGrid", ".", "size", "pCount", "=", "self", ".", "pLvlGrid", ".", "size", "MedCount", "=", "self", ".", "MedShkVals", ".", "si...
49.819277
28.46988