code
stringlengths
51
2.38k
docstring
stringlengths
4
15.2k
def init_pool(self): if self.pool is None: if self.nproc > 1: self.pool = mp.Pool(processes=self.nproc) else: self.pool = None else: print('pool already initialized?')
Initialize multiprocessing pool if necessary.
def _build_install_command_list(cmd_prefix, to_install, to_downgrade, to_reinstall): cmds = [] if to_install: cmd = copy.deepcopy(cmd_prefix) cmd.extend(to_install) cmds.append(cmd) if to_downgrade: cmd = copy.deepcopy(cmd_prefix) cmd.append('--force-downgrade') cmd.extend(to_downgrade) cmds.append(cmd) if to_reinstall: cmd = copy.deepcopy(cmd_prefix) cmd.append('--force-reinstall') cmd.extend(to_reinstall) cmds.append(cmd) return cmds
Builds a list of install commands to be executed in sequence in order to process each of the to_install, to_downgrade, and to_reinstall lists.
def reflected_binary_operator(op): assert not is_comparison(op) @with_name(method_name_for_op(op, commute=True)) @coerce_numbers_to_my_dtype def reflected_binary_operator(self, other): if isinstance(self, NumericalExpression): self_expr, other_expr, new_inputs = self.build_binary_op( op, other ) return NumExprFactor( "({left}) {op} ({right})".format( left=other_expr, right=self_expr, op=op, ), new_inputs, dtype=binop_return_dtype(op, other.dtype, self.dtype) ) elif isinstance(other, Number): return NumExprFactor( "{constant} {op} x_0".format(op=op, constant=other), binds=(self,), dtype=binop_return_dtype(op, other.dtype, self.dtype), ) raise BadBinaryOperator(op, other, self) return reflected_binary_operator
Factory function for making binary operator methods on a Factor. Returns a function, "reflected_binary_operator" suitable for implementing functions like __radd__.
def create_default_config(): config = configparser.RawConfigParser() config.add_section('global') config.set('global', 'env_source_rc', False) config.add_section('shell') config.set('shell', 'bash', "true") config.set('shell', 'zsh', "true") config.set('shell', 'gui', "true") return config
Create a default configuration object, with all parameters filled
def plotSkymapCatalog(lon,lat,**kwargs): fig = plt.figure() ax = plt.subplot(111,projection=projection) drawSkymapCatalog(ax,lon,lat,**kwargs)
Plot a catalog of coordinates on a full-sky map.
def _callable_func(self, func, axis, *args, **kwargs): def callable_apply_builder(df, axis=0): if not axis: df.index = index df.columns = pandas.RangeIndex(len(df.columns)) else: df.columns = index df.index = pandas.RangeIndex(len(df.index)) result = df.apply(func, axis=axis, *args, **kwargs) return result index = self.index if not axis else self.columns func_prepared = self._build_mapreduce_func(callable_apply_builder, axis=axis) result_data = self._map_across_full_axis(axis, func_prepared) return self._post_process_apply(result_data, axis)
Apply callable functions across given axis. Args: func: The functions to apply. axis: Target axis to apply the function along. Returns: A new PandasQueryCompiler.
def render_label(self, cairo_context, shape_id, text=None, label_scale=.9): text = shape_id if text is None else text shape = self.canvas.df_bounding_shapes.ix[shape_id] shape_center = self.canvas.df_shape_centers.ix[shape_id] font_size, text_shape = \ aspect_fit_font_size(text, shape * label_scale, cairo_context=cairo_context) cairo_context.set_font_size(font_size) cairo_context.move_to(shape_center[0] - .5 * text_shape.width, shape_center[1] + .5 * text_shape.height) cairo_context.show_text(text)
Draw label on specified shape. Parameters ---------- cairo_context : cairo.Context Cairo context to draw text width. Can be preconfigured, for example, to set font style, etc. shape_id : str Shape identifier. text : str, optional Label text. If not specified, shape identifier is used. label_scale : float, optional Fraction of limiting dimension of shape bounding box to scale text to.
def archive(cls): req = datastore.BeginTransactionRequest() resp = datastore.begin_transaction(req) tx = resp.transaction req = datastore.RunQueryRequest() req.read_options.transaction = tx q = req.query set_kind(q, kind='Todo') add_projection(q, '__key__') set_composite_filter(q.filter, datastore.CompositeFilter.AND, set_property_filter( datastore.Filter(), 'done', datastore.PropertyFilter.EQUAL, True), set_property_filter( datastore.Filter(), '__key__', datastore.PropertyFilter.HAS_ANCESTOR, default_todo_list.key)) resp = datastore.run_query(req) req = datastore.CommitRequest() req.transaction = tx for result in resp.batch.entity_results: req.mutations.add().delete.CopyFrom(result.entity.key) resp = datastore.commit(req) return ''
Delete all Todo items that are done.
def ignore_path(path, ignore_list=None, whitelist=None): if ignore_list is None: return True should_ignore = matches_glob_list(path, ignore_list) if whitelist is None: return should_ignore return should_ignore and not matches_glob_list(path, whitelist)
Returns a boolean indicating if a path should be ignored given an ignore_list and a whitelist of glob patterns.
def language(self, language): if language is None: raise ValueError("Invalid value for `language`, must not be `None`") allowed_values = ["python", "r", "rmarkdown"] if language not in allowed_values: raise ValueError( "Invalid value for `language` ({0}), must be one of {1}" .format(language, allowed_values) ) self._language = language
Sets the language of this KernelPushRequest. The language that the kernel is written in # noqa: E501 :param language: The language of this KernelPushRequest. # noqa: E501 :type: str
def sg_max(tensor, opt): r return tf.reduce_max(tensor, axis=opt.axis, keep_dims=opt.keep_dims, name=opt.name)
r"""Computes the maximum of elements across axis of a tensor. See `tf.reduce_max()` in tensorflow. Args: tensor: A `Tensor` (automatically given by chain). opt: axis : A tuple/list of integers or an integer. The axis to reduce. keep_dims: If true, retains reduced dimensions with length 1. name: If provided, replace current tensor's name. Returns: A `Tensor`.
def bellman_ford(graph, weight, source=0): n = len(graph) dist = [float('inf')] * n prec = [None] * n dist[source] = 0 for nb_iterations in range(n): changed = False for node in range(n): for neighbor in graph[node]: alt = dist[node] + weight[node][neighbor] if alt < dist[neighbor]: dist[neighbor] = alt prec[neighbor] = node changed = True if not changed: return dist, prec, False return dist, prec, True
Single source shortest paths by Bellman-Ford :param graph: directed graph in listlist or listdict format :param weight: can be negative. in matrix format or same listdict graph :returns: distance table, precedence table, bool :explanation: bool is True if a negative circuit is reachable from the source, circuits can have length 2. :complexity: `O(|V|*|E|)`
def subjects_download(self, subject_id): subject = self.subjects_get(subject_id) if subject is None: return None else: return FileInfo( subject.data_file, subject.properties[datastore.PROPERTY_MIMETYPE], subject.properties[datastore.PROPERTY_FILENAME] )
Get data file for subject with given identifier. Parameters ---------- subject_id : string Unique subject identifier Returns ------- FileInfo Information about subject's data file on disk or None if identifier is unknown
async def end(self): try: await self.proc.wait() finally: for temporary_file in self.temporary_files: temporary_file.close() self.temporary_files = [] return self.proc.returncode
End process execution.
def set_annotation(self): assert self.pending_symbol is not None assert not self.value annotations = (_as_symbol(self.pending_symbol, is_symbol_value=False),) self.annotations = annotations if not self.annotations else self.annotations + annotations self.ion_type = None self.pending_symbol = None self.quoted_text = False self.line_comment = False self.is_self_delimiting = False return self
Appends the context's ``pending_symbol`` to its ``annotations`` sequence.
def start_patching(name=None): global _factory_map, _patchers, _mocks if _patchers and name is None: warnings.warn('start_patching() called again, already patched') _pre_import() if name is not None: factory = _factory_map[name] items = [(name, factory)] else: items = _factory_map.items() for name, factory in items: patcher = mock.patch(name, new=factory()) mocked = patcher.start() _patchers[name] = patcher _mocks[name] = mocked
Initiate mocking of the functions listed in `_factory_map`. For this to work reliably all mocked helper functions should be imported and used like this: import dp_paypal.client as paypal res = paypal.do_paypal_express_checkout(...) (i.e. don't use `from dp_paypal.client import x` import style) Kwargs: name (Optional[str]): if given, only patch the specified path, else all defined default mocks
def get_utm_epsg(longitude, latitude, crs=None): if crs is None or crs.authid() == 'EPSG:4326': epsg = 32600 if latitude < 0.0: epsg += 100 epsg += get_utm_zone(longitude) return epsg else: epsg_4326 = QgsCoordinateReferenceSystem('EPSG:4326') transform = QgsCoordinateTransform( crs, epsg_4326, QgsProject.instance()) geom = QgsGeometry.fromPointXY(QgsPointXY(longitude, latitude)) geom.transform(transform) point = geom.asPoint() return get_utm_epsg(point.x(), point.y())
Return epsg code of the utm zone according to X, Y coordinates. By default, the CRS is EPSG:4326. If the CRS is provided, first X,Y will be reprojected from the input CRS to WGS84. The code is based on the code: http://gis.stackexchange.com/questions/34401 :param longitude: The longitude. :type longitude: float :param latitude: The latitude. :type latitude: float :param crs: The coordinate reference system of the latitude, longitude. :type crs: QgsCoordinateReferenceSystem
def rows_above_layout(self): if self._in_alternate_screen: return 0 elif self._min_available_height > 0: total_rows = self.output.get_size().rows last_screen_height = self._last_screen.height if self._last_screen else 0 return total_rows - max(self._min_available_height, last_screen_height) else: raise HeightIsUnknownError('Rows above layout is unknown.')
Return the number of rows visible in the terminal above the layout.
def get_resource_form(self, *args, **kwargs): if isinstance(args[-1], list) or 'resource_record_types' in kwargs: return self.get_resource_form_for_create(*args, **kwargs) else: return self.get_resource_form_for_update(*args, **kwargs)
Pass through to provider ResourceAdminSession.get_resource_form_for_update
def dataSetUnit(h5Dataset): attributes = h5Dataset.attrs if not attributes: return '' for key in ('unit', 'units', 'Unit', 'Units', 'UNIT', 'UNITS'): if key in attributes: return to_string(attributes[key]) return ''
Returns the unit of the h5Dataset by looking in the attributes. It searches in the attributes for one of the following keys: 'unit', 'units', 'Unit', 'Units', 'UNIT', 'UNITS'. If these are not found, the empty string is returned. Always returns a string
def parseLayoutFeatures(font): featxt = tounicode(font.features.text or "", "utf-8") if not featxt: return ast.FeatureFile() buf = UnicodeIO(featxt) ufoPath = font.path if ufoPath is not None: buf.name = ufoPath glyphNames = set(font.keys()) try: parser = Parser(buf, glyphNames) doc = parser.parse() except IncludedFeaNotFound as e: if ufoPath and os.path.exists(os.path.join(ufoPath, e.args[0])): logger.warning( "Please change the file name in the include(...); " "statement to be relative to the UFO itself, " "instead of relative to the 'features.fea' file " "contained in it." ) raise return doc
Parse OpenType layout features in the UFO and return a feaLib.ast.FeatureFile instance.
def blackbox_and_coarse_grain(blackbox, coarse_grain): if blackbox is None: return for box in blackbox.partition: outputs = set(box) & set(blackbox.output_indices) if coarse_grain is None and len(outputs) > 1: raise ValueError( 'A blackboxing with multiple outputs per box must be ' 'coarse-grained.') if (coarse_grain and not any(outputs.issubset(part) for part in coarse_grain.partition)): raise ValueError( 'Multiple outputs from a blackbox must be partitioned into ' 'the same macro-element of the coarse-graining')
Validate that a coarse-graining properly combines the outputs of a blackboxing.
def loadhex(self, fobj): if getattr(fobj, "read", None) is None: fobj = open(fobj, "r") fclose = fobj.close else: fclose = None self._offset = 0 line = 0 try: decode = self._decode_record try: for s in fobj: line += 1 decode(s, line) except _EndOfFile: pass finally: if fclose: fclose()
Load hex file into internal buffer. This is not necessary if object was initialized with source set. This will overwrite addresses if object was already initialized. @param fobj file name or file-like object
def set_xlabels(self, label=None, **kwargs): if label is None: label = label_from_attrs(self.data[self._x_var]) for ax in self._bottom_axes: ax.set_xlabel(label, **kwargs) return self
Label the x axis on the bottom row of the grid.
def update_record(self, record, data=None, priority=None, ttl=None, comment=None): return self.manager.update_record(self, record, data=data, priority=priority, ttl=ttl, comment=comment)
Modifies an existing record for this domain.
def get_minimum_size(self, data): size = self.element.get_minimum_size(data) return datatypes.Point( max(size.x, self.min_width), max(size.y, self.min_height) )
Returns the minimum size of the managed element, as long as it is larger than any manually set minima.
def get_serializer_class(self): klass = None lookup_url_kwarg = self.lookup_url_kwarg or self.lookup_field if lookup_url_kwarg in self.kwargs: klass = self.get_object().__class__ elif "doctype" in self.request.REQUEST: base = self.model.get_base_class() doctypes = indexable_registry.families[base] try: klass = doctypes[self.request.REQUEST["doctype"]] except KeyError: raise Http404 if hasattr(klass, "get_serializer_class"): return klass.get_serializer_class() return super(ContentViewSet, self).get_serializer_class()
gets the class type of the serializer :return: `rest_framework.Serializer`
def get_switched_form_field_attrs(self, prefix, input_type, name): attributes = {'class': 'switched', 'data-switch-on': prefix + 'field'} attributes['data-' + prefix + 'field-' + input_type] = name return attributes
Creates attribute dicts for the switchable theme form
def remove_hash_prefix_indices(self, threat_list, indices): batch_size = 40 q = prefixes_to_remove = self.get_hash_prefix_values_to_remove(threat_list, indices) with self.get_cursor() as dbc: for i in range(0, len(prefixes_to_remove), batch_size): remove_batch = prefixes_to_remove[i:(i + batch_size)] params = [ threat_list.threat_type, threat_list.platform_type, threat_list.threat_entry_type ] + [sqlite3.Binary(b) for b in remove_batch] dbc.execute(q.format(','.join(['?'] * len(remove_batch))), params)
Remove records matching idices from a lexicographically-sorted local threat list.
def do_IHaveRequest(self, apdu): if _debug: WhoHasIHaveServices._debug("do_IHaveRequest %r", apdu) if apdu.deviceIdentifier is None: raise MissingRequiredParameter("deviceIdentifier required") if apdu.objectIdentifier is None: raise MissingRequiredParameter("objectIdentifier required") if apdu.objectName is None: raise MissingRequiredParameter("objectName required")
Respond to a I-Have request.
def on_delivery(self, name, channel, method, properties, body): message = data.Message(name, channel, method, properties, body) if self.is_processing: return self.pending.append(message) self.invoke_consumer(message)
Process a message from Rabbit :param str name: The connection name :param pika.channel.Channel channel: The message's delivery channel :param pika.frames.MethodFrame method: The method frame :param pika.spec.BasicProperties properties: The message properties :param str body: The message body
def complete_node(arg): try: cmd = cfg.get('global', 'complete_node_cmd') except configparser.NoOptionError: return [ '', ] cmd = re.sub('%search_string%', pipes.quote(arg), cmd) args = shlex.split(cmd) p = subprocess.Popen(args, stdout=subprocess.PIPE) res, err = p.communicate() nodes = res.split('\n') return nodes
Complete node hostname This function is currently a bit special as it looks in the config file for a command to use to complete a node hostname from an external system. It is configured by setting the config attribute "complete_node_cmd" to a shell command. The string "%search_string%" in the command will be replaced by the current search string.
def patch_string(s): res = '' it = PeekIterator(s) for c in it: if (ord(c) >> 10) == 0b110110: n = it.peek() if n and (ord(n) >> 10) == 0b110111: res += chr(((ord(c) & 0x3ff) << 10 | (ord(n) & 0x3ff)) + 0x10000) next(it) else: res += "\\u{:04x}".format(ord(c)) elif (ord(c) >> 10) == 0b110111: res += "\\u{:04x}".format(ord(c)) else: res += c return res
Reorganize a String in such a way that surrogates are printable and lonely surrogates are escaped. :param s: input string :return: string with escaped lonely surrogates and 32bit surrogates
def get_part_filenames(num_parts=None, start_num=0): if num_parts is None: num_parts = get_num_part_files() return ['PART{0}.html'.format(i) for i in range(start_num+1, num_parts+1)]
Get numbered PART.html filenames.
def _shuffle_single(fname, extra_fn=None): records = read_records(fname) random.shuffle(records) if extra_fn is not None: records = extra_fn(records) out_fname = fname.replace(UNSHUFFLED_SUFFIX, "") write_records(records, out_fname) tf.gfile.Remove(fname)
Shuffle a single file of records. Args: fname: a string extra_fn: an optional function from list of TFRecords to list of TFRecords to be called after shuffling.
def KL_divergence(P,Q): assert(P.keys()==Q.keys()) distance = 0 for k in P.keys(): distance += P[k] * log(P[k]/Q[k]) return distance
Compute the KL divergence between distributions P and Q P and Q should be dictionaries linking symbols to probabilities. the keys to P and Q should be the same.
def export_event_based_gateway_info(node_params, output_element): output_element.set(consts.Consts.gateway_direction, node_params[consts.Consts.gateway_direction]) output_element.set(consts.Consts.instantiate, node_params[consts.Consts.instantiate]) output_element.set(consts.Consts.event_gateway_type, node_params[consts.Consts.event_gateway_type])
Adds EventBasedGateway node attributes to exported XML element :param node_params: dictionary with given event based gateway parameters, :param output_element: object representing BPMN XML 'eventBasedGateway' element.
def init(self): self.url = self.url.format(host=self.host, port=self.port, api_key=self.api_key)
Initialize the URL used to connect to SABnzbd.
def import_submodules(package): if isinstance(package, str): package = importlib.import_module(package) results = {} for _, full_name, is_pkg in pkgutil.walk_packages(package.__path__, package.__name__ + '.'): results[full_name] = importlib.import_module(full_name) if is_pkg: results.update(import_submodules(full_name)) return results
Return list of imported module instances from beneath root_package
def dump_commands(self, commands): directory = os.path.join(os.path.dirname(self.sql_script), 'fails') fname = os.path.basename(self.sql_script.rsplit('.')[0]) return dump_commands(commands, directory, fname)
Dump commands wrapper for external access.
def get_manhole_factory(namespace, **passwords): realm = manhole_ssh.TerminalRealm() realm.chainedProtocolFactory.protocolFactory = ( lambda _: EnhancedColoredManhole(namespace) ) p = portal.Portal(realm) p.registerChecker( checkers.InMemoryUsernamePasswordDatabaseDontUse(**passwords) ) return manhole_ssh.ConchFactory(p)
Get a Manhole Factory
def load_modules_from_python(self, route_list): for name, modpath in route_list: if ':' in modpath: path, attr = modpath.split(':', 1) else: path, attr = modpath, None self.commands[name] = ModuleLoader(path, attr=attr)
Load modules from the native python source.
def validate(obj, schema): if not framework.EvaluationContext.current().validate: return obj if hasattr(obj, 'tuple_schema'): obj.tuple_schema.validate(obj) if schema: schema.validate(obj) return obj
Validate an object according to its own AND an externally imposed schema.
def flat_model(tree): names = [] for columns in viewvalues(tree): for col in columns: if isinstance(col, dict): col_name = list(col)[0] names += [col_name + '__' + c for c in flat_model(col)] else: names.append(col) return names
Flatten the tree into a list of properties adding parents as prefixes.
def layer(self, layer_name): uri = self.layer_uri(layer_name) layer = QgsVectorLayer(uri, layer_name, 'ogr') if not layer.isValid(): layer = QgsRasterLayer(uri, layer_name) if not layer.isValid(): return False monkey_patch_keywords(layer) return layer
Get QGIS layer. :param layer_name: The name of the layer to fetch. :type layer_name: str :return: The QGIS layer. :rtype: QgsMapLayer .. versionadded:: 4.0
def iter_halfs_bend(graph): for atom2 in range(graph.num_vertices): neighbors = list(graph.neighbors[atom2]) for index1, atom1 in enumerate(neighbors): for atom3 in neighbors[index1+1:]: try: affected_atoms = graph.get_halfs(atom2, atom1)[0] yield affected_atoms, (atom1, atom2, atom3) continue except GraphError: pass try: affected_atoms = graph.get_halfs(atom2, atom3)[0] yield affected_atoms, (atom3, atom2, atom1) except GraphError: pass
Select randomly two consecutive bonds that divide the molecule in two
def update_global_secondary_index(table_name, global_indexes, region=None, key=None, keyid=None, profile=None): conn = _get_conn(region=region, key=key, keyid=keyid, profile=profile) table = Table(table_name, connection=conn) return table.update_global_secondary_index(global_indexes)
Updates the throughput of the given global secondary indexes. CLI Example: .. code-block:: bash salt myminion boto_dynamodb.update_global_secondary_index table_name / indexes
def _add(self, error: "Err"): if self.trace_errs is True: self.errors.append(error)
Adds an error to the trace if required
def format_py3o_val(value): value = force_unicode(value) value = escape(value) value = value.replace(u'\n', u'<text:line-break/>') return Markup(value)
format a value to fit py3o's context * Handle linebreaks
def page(self, number, **context): size = max(0, self.context(**context).pageSize) if not size: return self.copy() else: return self.copy(page=number, pageSize=size)
Returns the records for the current page, or the specified page number. If a page size is not specified, then this record sets page size will be used. :param pageno | <int> pageSize | <int> :return <orb.RecordSet>
def _handle_child( self, node: SchemaNode, stmt: Statement, sctx: SchemaContext) -> None: if not sctx.schema_data.if_features(stmt, sctx.text_mid): return node.name = stmt.argument node.ns = sctx.default_ns node._get_description(stmt) self._add_child(node) node._handle_substatements(stmt, sctx)
Add child node to the receiver and handle substatements.
def desc(self) -> str: kind, value = self.kind.value, self.value return f"{kind} {value!r}" if value else kind
A helper property to describe a token as a string for debugging
def _deserialize(self, value, attr, obj): if not self.context.get('convert_dates', True) or not value: return value value = super(PendulumField, self)._deserialize(value, attr, value) timezone = self.get_field_value('timezone') target = pendulum.instance(value) if (timezone and (text_type(target) != text_type(target.in_timezone(timezone)))): raise ValidationError( "The provided datetime is not in the " "{} timezone.".format(timezone) ) return target
Deserializes a string into a Pendulum object.
def rule(self, key): def register(f): self.rules[key] = f return f return register
Decorate as a rule for a key in top level JSON.
def repair(self, volume_id_or_uri, timeout=-1): data = { "type": "ExtraManagedStorageVolumePaths", "resourceUri": self._client.build_uri(volume_id_or_uri) } custom_headers = {'Accept-Language': 'en_US'} uri = self.URI + '/repair' return self._client.create(data, uri=uri, timeout=timeout, custom_headers=custom_headers)
Removes extra presentations from a specified volume on the storage system. Args: volume_id_or_uri: Can be either the volume id or the volume uri. timeout: Timeout in seconds. Wait for task completion by default. The timeout does not abort the operation in OneView, just stops waiting for its completion. Returns: dict: Storage volume.
def install_middleware(middleware_name, lookup_names=None): if lookup_names is None: lookup_names = (middleware_name,) middleware_attr = 'MIDDLEWARE' if getattr(settings, 'MIDDLEWARE', None) is not None \ else 'MIDDLEWARE_CLASSES' middleware = getattr(settings, middleware_attr, ()) or () if set(lookup_names).isdisjoint(set(middleware)): setattr(settings, middleware_attr, type(middleware)((middleware_name,)) + middleware)
Install specified middleware
def mouseDoubleClickEvent(self, event): if self.rename_tabs is True and \ event.buttons() == Qt.MouseButtons(Qt.LeftButton): index = self.tabAt(event.pos()) if index >= 0: self.tab_name_editor.edit_tab(index) else: QTabBar.mouseDoubleClickEvent(self, event)
Override Qt method to trigger the tab name editor.
async def recv_message(self): if not self._recv_initial_metadata_done: await self.recv_initial_metadata() with self._wrapper: message = await recv_message(self._stream, self._codec, self._recv_type) self._recv_message_count += 1 message, = await self._dispatch.recv_message(message) return message
Coroutine to receive incoming message from the server. If server sends UNARY response, then you can call this coroutine only once. If server sends STREAM response, then you should call this coroutine several times, until it returns None. To simplify you code in this case, :py:class:`Stream` implements async iterations protocol, so you can use it like this: .. code-block:: python3 async for massage in stream: do_smth_with(message) or even like this: .. code-block:: python3 messages = [msg async for msg in stream] HTTP/2 has flow control mechanism, so client will acknowledge received DATA frames as a message only after user consumes this coroutine. :returns: message
def set_source_variable(self, source_id, variable, value): source_id = int(source_id) return self._send_cmd("SET S[%d].%s=\"%s\"" % ( source_id, variable, value))
Change the value of a source variable.
def _create_and_add_parameters(params): global _current_parameter if _is_simple_type(params): _current_parameter = SimpleParameter(params) _current_option.add_parameter(_current_parameter) else: for i in params: if _is_simple_type(i): _current_parameter = SimpleParameter(i) else: _current_parameter = TypedParameter() _parse_typed_parameter(i) _current_option.add_parameter(_current_parameter)
Parses the configuration and creates Parameter instances.
def simulate(self, ts_length=100, random_state=None): r random_state = check_random_state(random_state) x0 = multivariate_normal(self.mu_0.flatten(), self.Sigma_0) w = random_state.randn(self.m, ts_length-1) v = self.C.dot(w) x = simulate_linear_model(self.A, x0, v, ts_length) if self.H is not None: v = random_state.randn(self.l, ts_length) y = self.G.dot(x) + self.H.dot(v) else: y = self.G.dot(x) return x, y
r""" Simulate a time series of length ts_length, first drawing .. math:: x_0 \sim N(\mu_0, \Sigma_0) Parameters ---------- ts_length : scalar(int), optional(default=100) The length of the simulation random_state : int or np.random.RandomState, optional Random seed (integer) or np.random.RandomState instance to set the initial state of the random number generator for reproducibility. If None, a randomly initialized RandomState is used. Returns ------- x : array_like(float) An n x ts_length array, where the t-th column is :math:`x_t` y : array_like(float) A k x ts_length array, where the t-th column is :math:`y_t`
def _check_time_fn(self, time_instance=False): if time_instance and not isinstance(self.time_fn, param.Time): raise AssertionError("%s requires a Time object" % self.__class__.__name__) if self.time_dependent: global_timefn = self.time_fn is param.Dynamic.time_fn if global_timefn and not param.Dynamic.time_dependent: raise AssertionError("Cannot use Dynamic.time_fn as" " parameters are ignoring time.")
If time_fn is the global time function supplied by param.Dynamic.time_fn, make sure Dynamic parameters are using this time function to control their behaviour. If time_instance is True, time_fn must be a param.Time instance.
def on(self, year, month, day): return self.set(year=int(year), month=int(month), day=int(day))
Returns a new instance with the current date set to a different date. :param year: The year :type year: int :param month: The month :type month: int :param day: The day :type day: int :rtype: DateTime
def _get_rom_firmware_version(self, data): firmware_details = self._get_firmware_embedded_health(data) if firmware_details: try: rom_firmware_version = ( firmware_details['HP ProLiant System ROM']) return {'rom_firmware_version': rom_firmware_version} except KeyError: return None
Gets the rom firmware version for server capabilities Parse the get_host_health_data() to retreive the firmware details. :param data: the output returned by get_host_health_data() :returns: a dictionary of rom firmware version.
def accept_line(self, logevent): if ("is now in state" in logevent.line_str and logevent.split_tokens[-1] in self.states): return True if ("replSet" in logevent.line_str and logevent.thread == "rsMgr" and logevent.split_tokens[-1] in self.states): return True return False
Return True on match. Only match log lines containing 'is now in state' (reflects other node's state changes) or of type "[rsMgr] replSet PRIMARY" (reflects own state changes).
def intcomma(value): try: if isinstance(value, compat.string_types): float(value.replace(',', '')) else: float(value) except (TypeError, ValueError): return value orig = str(value) new = re.sub("^(-?\d+)(\d{3})", '\g<1>,\g<2>', orig) if orig == new: return new else: return intcomma(new)
Converts an integer to a string containing commas every three digits. For example, 3000 becomes '3,000' and 45000 becomes '45,000'. To maintain some compatability with Django's intcomma, this function also accepts floats.
def disaggregate_wind(wind_daily, method='equal', a=None, b=None, t_shift=None): assert method in ('equal', 'cosine', 'random'), 'Invalid method' wind_eq = melodist.distribute_equally(wind_daily) if method == 'equal': wind_disagg = wind_eq elif method == 'cosine': assert None not in (a, b, t_shift) wind_disagg = _cosine_function(np.array([wind_eq.values, wind_eq.index.hour]), a, b, t_shift) elif method == 'random': wind_disagg = wind_eq * (-np.log(np.random.rand(len(wind_eq))))**0.3 return wind_disagg
general function for windspeed disaggregation Args: wind_daily: daily values method: keyword specifying the disaggregation method to be used a: parameter a for the cosine function b: parameter b for the cosine function t_shift: parameter t_shift for the cosine function Returns: Disaggregated hourly values of windspeed.
def is_unix(name=None): name = name or sys.platform return Platform.is_darwin(name) or Platform.is_linux(name) or Platform.is_freebsd(name)
Return true if the platform is a unix, False otherwise.
def pop(self): if self.stack: val = self.stack[0] self.stack = self.stack[1:] return val else: raise StackError('Stack empty')
Pops a value off the top of the stack. @return: Value popped off the stack. @rtype: * @raise StackError: Raised when there is a stack underflow.
def encode_offset_commit_request(cls, client_id, correlation_id, group, payloads): grouped_payloads = group_by_topic_and_partition(payloads) message = [] message.append(cls._encode_message_header(client_id, correlation_id, KafkaProtocol.OFFSET_COMMIT_KEY)) message.append(write_short_string(group)) message.append(struct.pack('>i', len(grouped_payloads))) for topic, topic_payloads in grouped_payloads.items(): message.append(write_short_string(topic)) message.append(struct.pack('>i', len(topic_payloads))) for partition, payload in topic_payloads.items(): message.append(struct.pack('>iq', partition, payload.offset)) message.append(write_short_string(payload.metadata)) msg = b''.join(message) return struct.pack('>i%ds' % len(msg), len(msg), msg)
Encode some OffsetCommitRequest structs Arguments: client_id: string correlation_id: int group: string, the consumer group you are committing offsets for payloads: list of OffsetCommitRequest
def at_block_number(block_number: BlockNumber, chain: MiningChain) -> MiningChain: if not isinstance(chain, MiningChain): raise ValidationError("`at_block_number` may only be used with 'MiningChain") at_block = chain.get_canonical_block_by_number(block_number) db = chain.chaindb.db chain_at_block = type(chain)(db, chain.create_header_from_parent(at_block.header)) return chain_at_block
Rewind the chain back to the given block number. Calls to things like ``get_canonical_head`` will still return the canonical head of the chain, however, you can use ``mine_block`` to mine fork chains.
def unlock_password(self, ID, reason): log.info('Unlock password %s, Reason: %s' % (ID, reason)) self.unlock_reason = reason self.put('passwords/%s/unlock.json' % ID)
Unlock a password.
def set_iscsi_info(self, target_name, lun, ip_address, port='3260', auth_method=None, username=None, password=None): if(self._is_boot_mode_uefi()): iscsi_info = {} iscsi_info['iSCSITargetName'] = target_name iscsi_info['iSCSILUN'] = lun iscsi_info['iSCSITargetIpAddress'] = ip_address iscsi_info['iSCSITargetTcpPort'] = int(port) iscsi_info['iSCSITargetInfoViaDHCP'] = False iscsi_info['iSCSIConnection'] = 'Enabled' if (auth_method == 'CHAP'): iscsi_info['iSCSIAuthenticationMethod'] = 'Chap' iscsi_info['iSCSIChapUsername'] = username iscsi_info['iSCSIChapSecret'] = password self._change_iscsi_target_settings(iscsi_info) else: msg = 'iSCSI boot is not supported in the BIOS boot mode' raise exception.IloCommandNotSupportedInBiosError(msg)
Set iSCSI details of the system in UEFI boot mode. The initiator system is set with the target details like IQN, LUN, IP, Port etc. :param target_name: Target Name for iSCSI. :param lun: logical unit number. :param ip_address: IP address of the target. :param port: port of the target. :param auth_method : either None or CHAP. :param username: CHAP Username for authentication. :param password: CHAP secret. :raises: IloCommandNotSupportedInBiosError, if the system is in the bios boot mode.
def expandf(m, format): _assert_expandable(format, True) return _apply_replace_backrefs(m, format, flags=FORMAT)
Expand the string using the format replace pattern or function.
def set_viewbox(self, x, y, w, h): self.attributes['viewBox'] = "%s %s %s %s" % (x, y, w, h) self.attributes['preserveAspectRatio'] = 'none'
Sets the origin and size of the viewbox, describing a virtual view area. Args: x (int): x coordinate of the viewbox origin y (int): y coordinate of the viewbox origin w (int): width of the viewbox h (int): height of the viewbox
def anchor(args): from jcvi.formats.blast import bed p = OptionParser(anchor.__doc__) opts, args = p.parse_args(args) if len(args) != 2: sys.exit(not p.print_help()) mapbed, blastfile = args bedfile = bed([blastfile]) markersbed = Bed(bedfile) markers = markersbed.order mapbed = Bed(mapbed, sorted=False) for b in mapbed: m = b.accn if m not in markers: continue i, mb = markers[m] new_accn = "{0}:{1}-{2}".format(mb.seqid, mb.start, mb.end) b.accn = new_accn print(b)
%prog anchor map.bed markers.blast > anchored.bed Anchor scaffolds based on map.
def getServiceRequest(self, request, target): try: return self._request_class( request.envelope, self.services[target], None) except KeyError: pass try: sp = target.split('.') name, meth = '.'.join(sp[:-1]), sp[-1] return self._request_class( request.envelope, self.services[name], meth) except (ValueError, KeyError): pass raise UnknownServiceError("Unknown service %s" % target)
Returns a service based on the message. @raise UnknownServiceError: Unknown service. @param request: The AMF request. @type request: L{Request<pyamf.remoting.Request>} @rtype: L{ServiceRequest}
def add_port_forward_rule(self, is_ipv6, rule_name, proto, host_ip, host_port, guest_ip, guest_port): if not isinstance(is_ipv6, bool): raise TypeError("is_ipv6 can only be an instance of type bool") if not isinstance(rule_name, basestring): raise TypeError("rule_name can only be an instance of type basestring") if not isinstance(proto, NATProtocol): raise TypeError("proto can only be an instance of type NATProtocol") if not isinstance(host_ip, basestring): raise TypeError("host_ip can only be an instance of type basestring") if not isinstance(host_port, baseinteger): raise TypeError("host_port can only be an instance of type baseinteger") if not isinstance(guest_ip, basestring): raise TypeError("guest_ip can only be an instance of type basestring") if not isinstance(guest_port, baseinteger): raise TypeError("guest_port can only be an instance of type baseinteger") self._call("addPortForwardRule", in_p=[is_ipv6, rule_name, proto, host_ip, host_port, guest_ip, guest_port])
Protocol handled with the rule. in is_ipv6 of type bool in rule_name of type str in proto of type :class:`NATProtocol` Protocol handled with the rule. in host_ip of type str IP of the host interface to which the rule should apply. An empty ip address is acceptable, in which case the NAT engine binds the handling socket to any interface. in host_port of type int The port number to listen on. in guest_ip of type str The IP address of the guest which the NAT engine will forward matching packets to. An empty IP address is not acceptable. in guest_port of type int The port number to forward.
def _tot_unhandled_hosts_by_state(self, state): return sum(1 for h in self.hosts if h.state == state and h.state_type == u'HARD' and h.is_problem and not h.problem_has_been_acknowledged)
Generic function to get the number of unhandled problem hosts in the specified state :param state: state to filter on :type state: :return: number of host in state *state* and which are not acknowledged problems :rtype: int
def _apply_key_type(self, keys): typed_key = () for dim, key in zip(self.kdims, keys): key_type = dim.type if key_type is None: typed_key += (key,) elif isinstance(key, slice): sl_vals = [key.start, key.stop, key.step] typed_key += (slice(*[key_type(el) if el is not None else None for el in sl_vals]),) elif key is Ellipsis: typed_key += (key,) elif isinstance(key, list): typed_key += ([key_type(k) for k in key],) else: typed_key += (key_type(key),) return typed_key
If a type is specified by the corresponding key dimension, this method applies the type to the supplied key.
def get_archive(self, archive_name): try: spec = self._get_archive_spec(archive_name) return spec except KeyError: raise KeyError('Archive "{}" not found'.format(archive_name))
Get a data archive given an archive name Returns ------- archive_specification : dict archive_name: name of the archive to be retrieved authority: name of the archive's authority archive_path: service path of archive
def is_config_container(v): cls = type(v) return ( issubclass(cls, list) or issubclass(cls, dict) or issubclass(cls, Config) )
checks whether v is of type list,dict or Config
def declare_queue(self, queue_name): attempts = 1 while True: try: if queue_name not in self.queues: self.emit_before("declare_queue", queue_name) self._declare_queue(queue_name) self.queues.add(queue_name) self.emit_after("declare_queue", queue_name) delayed_name = dq_name(queue_name) self._declare_dq_queue(queue_name) self.delay_queues.add(delayed_name) self.emit_after("declare_delay_queue", delayed_name) self._declare_xq_queue(queue_name) break except (pika.exceptions.AMQPConnectionError, pika.exceptions.AMQPChannelError) as e: del self.channel del self.connection attempts += 1 if attempts > MAX_DECLARE_ATTEMPTS: raise ConnectionClosed(e) from None self.logger.debug( "Retrying declare due to closed connection. [%d/%d]", attempts, MAX_DECLARE_ATTEMPTS, )
Declare a queue. Has no effect if a queue with the given name already exists. Parameters: queue_name(str): The name of the new queue. Raises: ConnectionClosed: If the underlying channel or connection has been closed.
def _get_thumbnail_filename(filename, append_text="-thumbnail"): name, ext = os.path.splitext(filename) return ''.join([name, append_text, ext])
Returns a thumbnail version of the file name.
def types_of_specie(self): if not self.is_ordered: raise TypeError( ) types = [] for site in self: if site.specie not in types: types.append(site.specie) return types
List of types of specie. Only works for ordered structures. Disordered structures will raise TypeError.
def get_SZ_orient(self): tm_outdated = self._tm_signature != (self.radius, self.radius_type, self.wavelength, self.m, self.axis_ratio, self.shape, self.ddelt, self.ndgs) scatter_outdated = self._scatter_signature != (self.thet0, self.thet, self.phi0, self.phi, self.alpha, self.beta, self.orient) orient_outdated = self._orient_signature != \ (self.orient, self.or_pdf, self.n_alpha, self.n_beta) if orient_outdated: self._init_orient() outdated = tm_outdated or scatter_outdated or orient_outdated if outdated: (self._S_orient, self._Z_orient) = self.orient(self) self._set_scatter_signature() return (self._S_orient, self._Z_orient)
Get the S and Z matrices using the specified orientation averaging.
def load_structure_path(self, structure_path, file_type): if not file_type: raise ValueError('File type must be specified') self.file_type = file_type self.structure_dir = op.dirname(structure_path) self.structure_file = op.basename(structure_path)
Load a structure file and provide pointers to its location Args: structure_path (str): Path to structure file file_type (str): Type of structure file
async def oauth2_request( self, url: str, access_token: str = None, post_args: Dict[str, Any] = None, **args: Any ) -> Any: all_args = {} if access_token: all_args["access_token"] = access_token all_args.update(args) if all_args: url += "?" + urllib.parse.urlencode(all_args) http = self.get_auth_http_client() if post_args is not None: response = await http.fetch( url, method="POST", body=urllib.parse.urlencode(post_args) ) else: response = await http.fetch(url) return escape.json_decode(response.body)
Fetches the given URL auth an OAuth2 access token. If the request is a POST, ``post_args`` should be provided. Query string arguments should be given as keyword arguments. Example usage: ..testcode:: class MainHandler(tornado.web.RequestHandler, tornado.auth.FacebookGraphMixin): @tornado.web.authenticated async def get(self): new_entry = await self.oauth2_request( "https://graph.facebook.com/me/feed", post_args={"message": "I am posting from my Tornado application!"}, access_token=self.current_user["access_token"]) if not new_entry: # Call failed; perhaps missing permission? await self.authorize_redirect() return self.finish("Posted a message!") .. testoutput:: :hide: .. versionadded:: 4.3 .. versionchanged::: 6.0 The ``callback`` argument was removed. Use the returned awaitable object instead.
def addJsonDirectory(self, directory, test=None): for filename in os.listdir(directory): try: fullPath = os.path.join(directory, filename) if not test or test(filename, fullPath): with open(fullPath) as f: jsonData = json.load(f) name, _ = os.path.splitext(filename) self.addSource(name, jsonData) except ValueError: continue
Adds data from json files in the given directory.
def update_tasks(self): for task in self.task_manager.timeout_tasks(): self.task_manager.task_done( task.id, TimeoutError("Task timeout", task.timeout)) self.worker_manager.stop_worker(task.worker_id) for task in self.task_manager.cancelled_tasks(): self.task_manager.task_done( task.id, CancelledError()) self.worker_manager.stop_worker(task.worker_id)
Handles timing out Tasks.
def add_logging_args(parser: argparse.ArgumentParser, patch: bool = True, erase_args: bool = True) -> None: parser.add_argument("--log-level", default="INFO", choices=logging._nameToLevel, help="Logging verbosity.") parser.add_argument("--log-structured", action="store_true", help="Enable structured logging (JSON record per line).") parser.add_argument("--log-config", help="Path to the file which sets individual log levels of domains.") def _patched_parse_args(args=None, namespace=None) -> argparse.Namespace: args = parser._original_parse_args(args, namespace) setup(args.log_level, args.log_structured, args.log_config) if erase_args: for log_arg in ("log_level", "log_structured", "log_config"): delattr(args, log_arg) return args if patch and not hasattr(parser, "_original_parse_args"): parser._original_parse_args = parser.parse_args parser.parse_args = _patched_parse_args
Add command line flags specific to logging. :param parser: `argparse` parser where to add new flags. :param erase_args: Automatically remove logging-related flags from parsed args. :param patch: Patch parse_args() to automatically setup logging.
def register(cls, name: str, plugin: Type[ConnectionPlugin]) -> None: existing_plugin = cls.available.get(name) if existing_plugin is None: cls.available[name] = plugin elif existing_plugin != plugin: raise ConnectionPluginAlreadyRegistered( f"Connection plugin {plugin.__name__} can't be registered as " f"{name!r} because plugin {existing_plugin.__name__} " f"was already registered under this name" )
Registers a connection plugin with a specified name Args: name: name of the connection plugin to register plugin: defined connection plugin class Raises: :obj:`nornir.core.exceptions.ConnectionPluginAlreadyRegistered` if another plugin with the specified name was already registered
def save_list(key, *values): return json.dumps({key: [_get_json(value) for value in values]})
Convert the given list of parameters to a JSON object. JSON object is of the form: { key: [values[0], values[1], ... ] }, where values represent the given list of parameters.
def spec(self): pspec = {} pspec['name'] = self.NAME pspec['version'] = self.VERSION pspec['description'] = self.DESCRIPTION components = [sys.argv[0], self.NAME] if self.USE_ARGUMENTS: components.append('$(arguments)') pspec['exe_command'] = self.COMMAND or ' '.join(components) pspec['inputs'] = [ inp.spec for inp in self.INPUTS ] pspec['outputs'] = [ out.spec for out in self.OUTPUTS ] pspec['parameters'] = [ param.spec for param in self.PARAMETERS ] if hasattr(self, 'test') and callable(self.test): pspec['has_test'] = True return pspec
Generate spec for the processor as a Python dictionary. A spec is a standard way to describe a MountainLab processor in a way that is easy to process, yet still understandable by humans. This method generates a Python dictionary that complies with a spec definition.
def clone(self, substitutions, commit=True, **kwargs): return self.store.clone(substitutions, **kwargs)
Clone a DAG, optionally skipping the commit.
def _env_filenames(filenames, env): env_filenames = [] for filename in filenames: filename_parts = filename.split('.') filename_parts.insert(1, env) env_filenames.extend([filename, '.'.join(filename_parts)]) return env_filenames
Extend filenames with ennv indication of environments. :param list filenames: list of strings indicating filenames :param str env: environment indicator :returns: list of filenames extended with environment version :rtype: list
def get_p2o_params_from_url(cls, url): if PRJ_JSON_FILTER_SEPARATOR not in url: return {"url": url} params = {'url': url.split(' ', 1)[0]} tokens = url.split(PRJ_JSON_FILTER_SEPARATOR)[1:] if len(tokens) > 1: cause = "Too many filters defined for %s, only the first one is considered" % url logger.warning(cause) token = tokens[0] filter_tokens = token.split(PRJ_JSON_FILTER_OP_ASSIGNMENT) if len(filter_tokens) != 2: cause = "Too many tokens after splitting for %s in %s" % (token, url) logger.error(cause) raise ELKError(cause=cause) fltr_name = filter_tokens[0].strip() fltr_value = filter_tokens[1].strip() params['filter-' + fltr_name] = fltr_value return params
Get the p2o params given a URL for the data source
def init_threads(t=None, s=None): global THREAD, SIGNAL THREAD = t or dummyThread SIGNAL = s or dummySignal
Should define dummyThread class and dummySignal class
def accounts(self): response = self.graph.get('%s/accounts' % self.id) accounts = [] for item in response['data']: account = Structure( page = Page( id = item['id'], name = item['name'], category = item['category'] ), access_token = item['access_token'], permissions = item['perms'] ) accounts.append(account) return accounts
A list of structures describing apps and pages owned by this user.
def get_valid_kwargs(func, potential_kwargs): kwargs = {} for name in get_kwarg_names(func): with suppress(KeyError): kwargs[name] = potential_kwargs[name] return kwargs
Return valid kwargs to function func