code
stringlengths
51
2.38k
docstring
stringlengths
4
15.2k
def to_utc_date(date): return datetime.utcfromtimestamp(float(date.strftime('%s'))).replace(tzinfo=None) if date else None
Convert a datetime object from local to UTC format >>> import datetime >>> d = datetime.datetime(2017, 8, 15, 18, 24, 31) >>> to_utc_date(d) datetime.datetime(2017, 8, 16, 1, 24, 31) Args: date (`datetime`): Input datetime object Returns: `datetime`
def add_matplotlib_cmaps(fail_on_import_error=True): try: from matplotlib import cm as _cm from matplotlib.cbook import mplDeprecation except ImportError: if fail_on_import_error: raise return for name in _cm.cmap_d: if not isinstance(name, str): continue try: with warnings.catch_warnings(): warnings.simplefilter('error', mplDeprecation) cm = _cm.get_cmap(name) add_matplotlib_cmap(cm, name=name) except Exception as e: if fail_on_import_error: print("Error adding colormap '%s': %s" % (name, str(e)))
Add all matplotlib colormaps.
def fit(self, data, debug=False): data = util.apply_filter_query(data, self.fit_filters) unique = data[self.segmentation_col].unique() value_counts = data[self.segmentation_col].value_counts() gone = set(self._group.models) - set(unique) for g in gone: del self._group.models[g] for x in unique: if x not in self._group.models and \ value_counts[x] > self.min_segment_size: self.add_segment(x) with log_start_finish( 'fitting models in segmented model {}'.format(self.name), logger): return self._group.fit(data, debug=debug)
Fit each segment. Segments that have not already been explicitly added will be automatically added with default model and ytransform. Parameters ---------- data : pandas.DataFrame Must have a column with the same name as `segmentation_col`. debug : bool If set to true will pass debug to the fit method of each model. Returns ------- fits : dict of statsmodels.regression.linear_model.OLSResults Keys are the segment names.
def assign(self, pm): if isinstance(pm, QPixmap): self._pm = pm else: self._xpmstr = pm self._pm = None self._icon = None
Reassign pixmap or xpm string array to wrapper
def _get_deleted_at_column(self, builder): if len(builder.get_query().joins) > 0: return builder.get_model().get_qualified_deleted_at_column() else: return builder.get_model().get_deleted_at_column()
Get the "deleted at" column for the builder. :param builder: The query builder :type builder: orator.orm.builder.Builder :rtype: str
def pause(self, message: Optional[Message_T] = None, **kwargs) -> None: if message: asyncio.ensure_future(self.send(message, **kwargs)) raise _PauseException
Pause the session for further interaction.
def _resolve_key(self, key): with self._reserve(key): factory = self.factory_for(key) with self._profiler(key): component = factory(self) invoke_resolve_hook(component) return self.assign(key, component)
Attempt to lazily create a component. :raises NotBoundError: if the component does not have a bound factory :raises CyclicGraphError: if the factory function requires a cycle :raises LockedGraphError: if the graph is locked
def _recursive_merged_items(self, index): subdirs = [os.path.join(d, "parts", str(index)) for d in self.localdirs] m = ExternalMerger(self.agg, self.memory_limit, self.serializer, subdirs, self.scale * self.partitions, self.partitions, self.batch) m.pdata = [{} for _ in range(self.partitions)] limit = self._next_limit() for j in range(self.spills): path = self._get_spill_dir(j) p = os.path.join(path, str(index)) with open(p, 'rb') as f: m.mergeCombiners(self.serializer.load_stream(f), 0) if get_used_memory() > limit: m._spill() limit = self._next_limit() return m._external_items()
merge the partitioned items and return the as iterator If one partition can not be fit in memory, then them will be partitioned and merged recursively.
def create_alarm_subscription(self, on_data=None, timeout=60): manager = WebSocketSubscriptionManager(self._client, resource='alarms') subscription = AlarmSubscription(manager) wrapped_callback = functools.partial( _wrap_callback_parse_alarm_data, subscription, on_data) manager.open(wrapped_callback, instance=self._instance, processor=self._processor) subscription.reply(timeout=timeout) return subscription
Create a new alarm subscription. :param on_data: Function that gets called with :class:`.AlarmEvent` updates. :param float timeout: The amount of seconds to wait for the request to complete. :return: A Future that can be used to manage the background websocket subscription. :rtype: .AlarmSubscription
def from_file(cls, h5_file): return cls({ country: HDF5DailyBarReader.from_file(h5_file, country) for country in h5_file.keys() })
Construct from an h5py.File. Parameters ---------- h5_file : h5py.File An HDF5 daily pricing file.
def geometricBar(weights, alldistribT): assert(len(weights) == alldistribT.shape[1]) return np.exp(np.dot(np.log(alldistribT), weights.T))
return the weighted geometric mean of distributions
def read_plugin_config(self): folders = self.config["pluginfolders"] modules = plugins.get_plugin_modules(folders) for pluginclass in plugins.get_plugin_classes(modules): section = pluginclass.__name__ if self.has_section(section): self.config["enabledplugins"].append(section) self.config[section] = pluginclass.read_config(self)
Read plugin-specific configuration values.
def download(self, streamed=False, action=None, chunk_size=1024, **kwargs): path = '/projects/%s/export/download' % self.project_id result = self.manager.gitlab.http_get(path, streamed=streamed, raw=True, **kwargs) return utils.response_content(result, streamed, action, chunk_size)
Download the archive of a project export. Args: streamed (bool): If True the data will be processed by chunks of `chunk_size` and each chunk is passed to `action` for reatment action (callable): Callable responsible of dealing with chunk of data chunk_size (int): Size of each chunk **kwargs: Extra options to send to the server (e.g. sudo) Raises: GitlabAuthenticationError: If authentication is not correct GitlabGetError: If the server failed to perform the request Returns: str: The blob content if streamed is False, None otherwise
def _build_idp_config_endpoints(self, config, providers): idp_endpoints = [] for endp_category in self.endpoints: for func, endpoint in self.endpoints[endp_category].items(): for provider in providers: _endpoint = "{base}/{provider}/{endpoint}".format( base=self.base_url, provider=provider, endpoint=endpoint) idp_endpoints.append((_endpoint, func)) config["service"]["idp"]["endpoints"][endp_category] = idp_endpoints return config
Builds the final frontend module config :type config: dict[str, Any] :type providers: list[str] :rtype: dict[str, Any] :param config: The module config :param providers: A list of backend names :return: The final config
def handle(): try: cli = ZappaCLI() sys.exit(cli.handle()) except SystemExit as e: cli.on_exit() sys.exit(e.code) except KeyboardInterrupt: cli.on_exit() sys.exit(130) except Exception as e: cli.on_exit() click.echo("Oh no! An " + click.style("error occurred", fg='red', bold=True) + "! :(") click.echo("\n==============\n") import traceback traceback.print_exc() click.echo("\n==============\n") shamelessly_promote() sys.exit(-1)
Main program execution handler.
def get_instance(self, payload): return AuthCallsCredentialListMappingInstance( self._version, payload, account_sid=self._solution['account_sid'], domain_sid=self._solution['domain_sid'], )
Build an instance of AuthCallsCredentialListMappingInstance :param dict payload: Payload response from the API :returns: twilio.rest.api.v2010.account.sip.domain.auth_types.auth_calls_mapping.auth_calls_credential_list_mapping.AuthCallsCredentialListMappingInstance :rtype: twilio.rest.api.v2010.account.sip.domain.auth_types.auth_calls_mapping.auth_calls_credential_list_mapping.AuthCallsCredentialListMappingInstance
def build_cycle_time(self, build_id): json_form = self.__retrieve_as_json(self.builds_path % build_id) return BuildCycleTime( build_id, json_form[u'buildTypeId'], as_date(json_form, u'startDate'), (as_date(json_form, u'finishDate') - as_date(json_form, u'queuedDate')).seconds * 1000 )
Returns a BuildCycleTime object for the given build
def _get_cmd(cmd): check_cmd = "RunTHetA.py" try: local_cmd = subprocess.check_output(["which", check_cmd]).strip() except subprocess.CalledProcessError: return None return [sys.executable, "%s/%s" % (os.path.dirname(os.path.realpath(local_cmd)), cmd)]
Retrieve required commands for running THetA with our local bcbio python.
def process_wildcard(fractions): wildcard_zs = set() total_fraction = 0.0 for z, fraction in fractions.items(): if fraction == '?': wildcard_zs.add(z) else: total_fraction += fraction if not wildcard_zs: return fractions balance_fraction = (1.0 - total_fraction) / len(wildcard_zs) for z in wildcard_zs: fractions[z] = balance_fraction return fractions
Processes element with a wildcard ``?`` weight fraction and returns composition balanced to 1.0.
def download_file(self, url, local_path): response = requests.get(url, stream=True) with open(local_path, 'wb') as f: for chunk in tqdm(response.iter_content(chunk_size=1024), unit='KB'): if chunk: f.write(chunk) f.flush()
Download a file from a remote host.
def list_values(hive, key=None, use_32bit_registry=False, include_default=True): r return __utils__['reg.list_values'](hive=hive, key=key, use_32bit_registry=use_32bit_registry, include_default=include_default)
r''' Enumerates the values in a registry key or hive. Args: hive (str): The name of the hive. Can be one of the following: - HKEY_LOCAL_MACHINE or HKLM - HKEY_CURRENT_USER or HKCU - HKEY_USER or HKU - HKEY_CLASSES_ROOT or HKCR - HKEY_CURRENT_CONFIG or HKCC key (str): The key (looks like a path) to the value name. If a key is not passed, the values under the hive will be returned. use_32bit_registry (bool): Accesses the 32bit portion of the registry on 64 bit installations. On 32bit machines this is ignored. include_default (bool): Toggle whether to include the '(Default)' value. Returns: list: A list of values under the hive or key. CLI Example: .. code-block:: bash salt '*' reg.list_values HKLM 'SYSTEM\\CurrentControlSet\\Services\\Tcpip'
def lazy_property(fn): attr_name = '_lazy_' + fn.__name__ @property @wraps(fn) def _lazy_property(self): if not hasattr(self, attr_name): setattr(self, attr_name, fn(self)) return getattr(self, attr_name) return _lazy_property
Decorator that makes a property lazy-evaluated whilst preserving docstrings. Args: fn (function): the property in question Returns: evaluated version of the property.
def get_opener(self, name): if name not in self.registry: raise NoOpenerError("No opener for %s" % name) index = self.registry[name] return self.openers[index]
Retrieve an opener for the given protocol :param name: name of the opener to open :type name: string :raises NoOpenerError: if no opener has been registered of that name
def connect(self, nice_quit_ev): _logger.debug("Connecting to explicit server node: [%s]", self.server_host) try: c = self.primitive_connect() except gevent.socket.error: _logger.exception("Could not connect to explicit server: [%s]", self.server_host) raise nsq.exceptions.NsqConnectGiveUpError( "Could not connect to the nsqd server: [%s]" % (self.server_host,)) _logger.info("Explicit server-node connected: [%s]", self.server_host) return c
Connect the server. We expect this to implement connection logistics for servers that were explicitly prescribed to us.
def iterate_schema(fields, schema, path_prefix=''): if path_prefix and path_prefix[-1] != '.': path_prefix += '.' for field_schema in schema: name = field_schema['name'] if 'group' in field_schema: for rvals in iterate_schema(fields[name] if name in fields else {}, field_schema['group'], '{}{}'.format(path_prefix, name)): yield rvals else: yield (field_schema, fields, '{}{}'.format(path_prefix, name))
Iterate over all schema sub-fields. This will iterate over all field definitions in the schema. Some field v alues might be None. :param fields: field values to iterate over :type fields: dict :param schema: schema to iterate over :type schema: dict :param path_prefix: dot separated path prefix :type path_prefix: str :return: (field schema, field value, field path) :rtype: tuple
def add_ds_ids_from_files(self): for file_handlers in self.file_handlers.values(): try: fh = file_handlers[0] avail_ids = fh.available_datasets() except NotImplementedError: continue for ds_id, ds_info in avail_ids: coordinates = ds_info.get('coordinates') if isinstance(coordinates, list): ds_info['coordinates'] = tuple(ds_info['coordinates']) self.ids.setdefault(ds_id, ds_info)
Check files for more dynamically discovered datasets.
def case_sensitive_name(self, package_name): if len(self.environment[package_name]): return self.environment[package_name][0].project_name
Return case-sensitive package name given any-case package name @param project_name: PyPI project name @type project_name: string
def download_if_not_exists(url: str, filename: str, skip_cert_verify: bool = True, mkdir: bool = True) -> None: if os.path.isfile(filename): log.info("No need to download, already have: {}", filename) return if mkdir: directory, basename = os.path.split(os.path.abspath(filename)) mkdir_p(directory) download(url=url, filename=filename, skip_cert_verify=skip_cert_verify)
Downloads a URL to a file, unless the file already exists.
def is_valid(obj: JSGValidateable, log: Optional[Union[TextIO, Logger]] = None) -> bool: return obj._is_valid(log)
Determine whether obj is valid :param obj: Object to validate :param log: Logger to record validation failures. If absent, no information is recorded
def reset(self): self._current_index = -1 self._current_value = self._default_value self._current_rendered = self._default_rendered self.offset = None
Reset the field to its default state
def _create_scaling_policies(conn, as_name, scaling_policies): 'helper function to create scaling policies' if scaling_policies: for policy in scaling_policies: policy = autoscale.policy.ScalingPolicy( name=policy["name"], as_name=as_name, adjustment_type=policy["adjustment_type"], scaling_adjustment=policy["scaling_adjustment"], min_adjustment_step=policy.get("min_adjustment_step", None), cooldown=policy["cooldown"]) conn.create_scaling_policy(policy)
helper function to create scaling policies
def load_xml(self, xmlfile): self.logger.info('Loading XML') for c in self.components: c.load_xml(xmlfile) for name in self.like.sourceNames(): self.update_source(name) self._fitcache = None self.logger.info('Finished Loading XML')
Load model definition from XML. Parameters ---------- xmlfile : str Name of the input XML file.
def blocks(self, lines): state = markdown.blockparser.State() blocks = [] state.set('start') currblock = 0 for line in lines: line += '\n' if state.isstate('start'): if line[:3] == '```': state.set('```') else: state.set('\n') blocks.append('') currblock = len(blocks) - 1 else: marker = line[:3] if state.isstate(marker): state.reset() blocks[currblock] += line return blocks
Groups lines into markdown blocks
def graph_edges(self): edges = nx.get_edge_attributes(self._graph, 'branch').items() edges_sorted = sorted(list(edges), key=lambda _: (''.join(sorted([repr(_[0][0]),repr(_[0][1])])))) for edge in edges_sorted: yield {'adj_nodes': edge[0], 'branch': edge[1]}
Returns a generator for iterating over graph edges The edge of a graph is described by the two adjacent node and the branch object itself. Whereas the branch object is used to hold all relevant power system parameters. Yields ------ int Description #TODO check Note ---- There are generator functions for nodes (`Graph.nodes()`) and edges (`Graph.edges()`) in NetworkX but unlike graph nodes, which can be represented by objects, branch objects can only be accessed by using an edge attribute ('branch' is used here) To make access to attributes of the branch objects simpler and more intuitive for the user, this generator yields a dictionary for each edge that contains information about adjacent nodes and the branch object. Note, the construction of the dictionary highly depends on the structure of the in-going tuple (which is defined by the needs of networkX). If this changes, the code will break.
def incr(self, att, val=1): if att not in self.counters: raise ValueError("%s is not a counter.") self.db.hincrby(self.key(), att, val)
Increments a counter.
def _write_scalar(self, name:str, scalar_value, iteration:int)->None: "Writes single scalar value to Tensorboard." tag = self.metrics_root + name self.tbwriter.add_scalar(tag=tag, scalar_value=scalar_value, global_step=iteration)
Writes single scalar value to Tensorboard.
def GetClientOs(client_id, token=None): if data_store.RelationalDBEnabled(): kb = data_store.REL_DB.ReadClientSnapshot(client_id).knowledge_base else: with aff4.FACTORY.Open(client_id, token=token) as client: kb = client.Get(client.Schema.KNOWLEDGE_BASE) return kb.os
Returns last known operating system name that the client used.
def _resize(self, ratio_x, ratio_y, resampling): new_width = int(np.ceil(self.width * ratio_x)) new_height = int(np.ceil(self.height * ratio_y)) dest_affine = self.affine * Affine.scale(1 / ratio_x, 1 / ratio_y) if self.not_loaded(): window = rasterio.windows.Window(0, 0, self.width, self.height) resized_raster = self.get_window(window, xsize=new_width, ysize=new_height, resampling=resampling) else: resized_raster = self._reproject(new_width, new_height, dest_affine, resampling=resampling) return resized_raster
Return raster resized by ratio.
def listen_tta(self, target, timeout): info = "{device} does not support listen as Type A Target" raise nfc.clf.UnsupportedTargetError(info.format(device=self))
Listen as Type A Target is not supported.
def deprecated(instructions): def decorator(func): @functools.wraps(func) def wrapper(*args, **kwargs): message = 'Call to deprecated function {}. {}'.format(func.__name__, instructions) frame = inspect.currentframe().f_back warnings.warn_explicit(message, category=DeprecatedWarning, filename=inspect.getfile(frame.f_code), lineno=frame.f_lineno) return func(*args, **kwargs) return wrapper return decorator
Flags a method as deprecated. :param instructions: A human-friendly string of instructions, such as: 'Please migrate to add_proxy() ASAP.' :return: DeprecatedWarning
def get_app_content_types(self): from django.contrib.contenttypes.models import ContentType return [ContentType.objects.get_for_model(c) for c in self.get_app_model_classes()]
Return a list of all content_types for this app.
def setup_rabbitmq(self): if not self.rabbitmq_key: self.rabbitmq_key = '{}:start_urls'.format(self.name) self.server = connection.from_settings(self.crawler.settings) self.crawler.signals.connect(self.spider_idle, signal=signals.spider_idle) self.crawler.signals.connect(self.item_scraped, signal=signals.item_scraped)
Setup RabbitMQ connection. Call this method after spider has set its crawler object. :return: None
def format_exc_skip(skip, limit=None): etype, val, tb = sys.exc_info() for i in range(skip): tb = tb.tb_next return (''.join(format_exception(etype, val, tb, limit))).rstrip()
Like traceback.format_exc but allow skipping the first frames.
def _add_variable_proxy_methods(var, proxy_tensor): proxy_tensor.read_value = lambda: tf.identity(proxy_tensor) proxy_tensor.assign_sub = var.assign_sub proxy_tensor.assign = var.assign proxy_tensor.initialized_value = var.initialized_value
Proxy methods of underlying variable. This enables our custom getters to still work with, e.g., batch norm. Args: var: Variable to proxy proxy_tensor: Tensor that is identity of var
def has_cwd(state, dir, incorrect_msg="Your current working directory should be `{{dir}}`. Use `cd {{dir}}` to navigate there."): expr = "[[ $PWD == '{}' ]]".format(dir) _msg = state.build_message(incorrect_msg, fmt_kwargs={ 'dir': dir }) has_expr_exit_code(state, expr, output="0", incorrect_msg=_msg) return state
Check whether the student is in the expected directory. This check is typically used before using ``has_expr_output()`` to make sure the student didn't navigate somewhere else. Args: state: State instance describing student and solution code. Can be omitted if used with ``Ex()``. dir: Directory that the student should be in. Always use the absolute path. incorrect_msg: If specified, this overrides the automatically generated message in case the student is not in the expected directory. :Example: If you want to be sure that the student is in ``/home/repl/my_dir``: :: Ex().has_cwd('/home/repl/my_dir')
def timed(function): @wraps(function) def function_wrapper(obj, *args, **kwargs): name = obj.__class__.__name__ + '.' + function.__name__ start = time.clock() result = function(obj, *args, **kwargs) print('{}: {:.4f} seconds'.format(name, time.clock() - start)) return result return function_wrapper
Decorator timing the method call and printing the result to `stdout`
def create_datasource(jboss_config, name, datasource_properties, profile=None): log.debug("======================== MODULE FUNCTION: jboss7.create_datasource, name=%s, profile=%s", name, profile) ds_resource_description = __get_datasource_resource_description(jboss_config, name, profile) operation = '/subsystem=datasources/data-source="{name}":add({properties})'.format( name=name, properties=__get_properties_assignment_string(datasource_properties, ds_resource_description) ) if profile is not None: operation = '/profile="{profile}"'.format(profile=profile) + operation return __salt__['jboss7_cli.run_operation'](jboss_config, operation, fail_on_error=False)
Create datasource in running jboss instance jboss_config Configuration dictionary with properties specified above. name Datasource name datasource_properties A dictionary of datasource properties to be created: - driver-name: mysql - connection-url: 'jdbc:mysql://localhost:3306/sampleDatabase' - jndi-name: 'java:jboss/datasources/sampleDS' - user-name: sampleuser - password: secret - min-pool-size: 3 - use-java-context: True profile The profile name (JBoss domain mode only) CLI Example: .. code-block:: bash salt '*' jboss7.create_datasource '{"cli_path": "integration.modules.sysmod.SysModuleTest.test_valid_docs", "controller": "10.11.12.13:9999", "cli_user": "jbossadm", "cli_password": "jbossadm"}' 'my_datasource' '{"driver-name": "mysql", "connection-url": "jdbc:mysql://localhost:3306/sampleDatabase", "jndi-name": "java:jboss/datasources/sampleDS", "user-name": "sampleuser", "password": "secret", "min-pool-size": 3, "use-java-context": True}'
def start(self): scheduler_threads = [ Thread(target=self._monitor_events, daemon=True), Thread(target=self._processing_controller_status, daemon=True), Thread(target=self._schedule_processing_blocks, daemon=True), Thread(target=self._monitor_pbc_status, daemon=True) ] for thread in scheduler_threads: thread.start() try: for thread in scheduler_threads: thread.join() except KeyboardInterrupt: LOG.info('Keyboard interrupt!') sys.exit(0) finally: LOG.info('Finally!')
Start the scheduler threads.
def newAddress(self, currency='btc', label=''): request = '/v1/deposit/' + currency + '/newAddress' url = self.base_url + request params = { 'request': request, 'nonce': self.get_nonce() } if label != '': params['label'] = label return requests.post(url, headers=self.prepare(params))
Send a request for a new cryptocurrency deposit address with an optional label. Return the response. Arguements: currency -- a Gemini supported cryptocurrency (btc, eth) label -- optional label for the deposit address
def import_address(self, address, account="*", rescan=False): response = self.make_request("importaddress", [address, account, rescan]) error = response.get('error') if error is not None: raise Exception(error) return response
param address = address to import param label= account name to use
async def _wrap_ws(self, handler, *args, **kwargs): try: method = self.request_method() data = await handler(self, *args, **kwargs) status = self.responses.get(method, OK) response = { 'type': 'response', 'key': getattr(self.request, 'key', None), 'status': status, 'payload': data } except Exception as ex: response = { 'type': 'response', 'key': getattr(self.request, 'key', None), 'status': getattr(ex, 'status', 500), 'payload': getattr(ex, 'msg', 'general error') } return self.format(method, response)
wraps a handler by receiving a websocket request and returning a websocket response
def deinstall(name): portpath = _check_portname(name) old = __salt__['pkg.list_pkgs']() result = __salt__['cmd.run_all']( ['make', 'deinstall', 'BATCH=yes'], cwd=portpath, python_shell=False ) __context__.pop('pkg.list_pkgs', None) new = __salt__['pkg.list_pkgs']() return salt.utils.data.compare_dicts(old, new)
De-install a port. CLI Example: .. code-block:: bash salt '*' ports.deinstall security/nmap
def create(self): assert not self.called return self.klass(*self.args, **self.kw)
Create a normal field from the lazy field
def bisine_wave(frequency): f_hi = frequency f_lo = frequency / 2.0 with tf.name_scope('hi'): sine_hi = sine_wave(f_hi) with tf.name_scope('lo'): sine_lo = sine_wave(f_lo) return tf.concat([sine_lo, sine_hi], axis=2)
Emit two sine waves, in stereo at different octaves.
def column(self, column, option=None, **kw): config = False if option == 'type': return self._column_types[column] elif 'type' in kw: config = True self._column_types[column] = kw.pop('type') if kw: self._visual_drag.column(ttk.Treeview.column(self, column, 'id'), option, **kw) if kw or option: return ttk.Treeview.column(self, column, option, **kw) elif not config: res = ttk.Treeview.column(self, column, option, **kw) res['type'] = self._column_types[column] return res
Query or modify the options for the specified column. If `kw` is not given, returns a dict of the column option values. If `option` is specified then the value for that option is returned. Otherwise, sets the options to the corresponding values. :param id: the column's identifier (read-only option) :param anchor: "n", "ne", "e", "se", "s", "sw", "w", "nw", or "center": alignment of the text in this column with respect to the cell :param minwidth: minimum width of the column in pixels :type minwidth: int :param stretch: whether the column's width should be adjusted when the widget is resized :type stretch: bool :param width: width of the column in pixels :type width: int :param type: column's content type (for sorting), default type is `str` :type type: type
def run(self, args): jlink = self.create_jlink(args) mcu = args.name[0].lower() if pylink.unlock(jlink, mcu): print('Successfully unlocked device!') else: print('Failed to unlock device!')
Unlocks the target device. Args: self (UnlockCommand): the ``UnlockCommand`` instance args (Namespace): the arguments passed on the command-line Returns: ``None``
def create_send_message(self, string_message, controller, zone=None, parameter=None): cc = hex(int(controller) - 1).replace('0x', '') if zone is not None: zz = hex(int(zone) - 1).replace('0x', '') else: zz = '' if parameter is not None: pr = hex(int(parameter)).replace('0x', '') else: pr = '' string_message = string_message.replace('@cc', cc) string_message = string_message.replace('@zz', zz) string_message = string_message.replace('@kk', KEYPAD_CODE) string_message = string_message.replace('@pr', pr) send_msg = string_message.split() send_msg = self.calc_checksum(send_msg) return send_msg
Creates a message from a string, substituting the necessary parameters, that is ready to send to the socket
def update_firmware(self, file, data=None, progress=None, bank=None): self.oem_init() if progress is None: progress = lambda x: True return self._oem.update_firmware(file, data, progress, bank)
Send file to BMC to perform firmware update :param filename: The filename to upload to the target BMC :param data: The payload of the firmware. Default is to read from specified filename. :param progress: A callback that will be given a dict describing update process. Provide if :param bank: Indicate a target 'bank' of firmware if supported
def _get_snmp(self, oid): if self.snmp_version in ["v1", "v2c"]: return self._get_snmpv2c(oid) else: return self._get_snmpv3(oid)
Wrapper for generic SNMP call.
def extern_store_i64(self, context_handle, i64): c = self._ffi.from_handle(context_handle) return c.to_value(i64)
Given a context and int32_t, return a new Handle to represent the int32_t.
def pypi( click_ctx, requirements, index=None, python_version=3, exclude_packages=None, output=None, subgraph_check_api=None, no_transitive=True, no_pretty=False, ): requirements = [requirement.strip() for requirement in requirements.split("\\n") if requirement] if not requirements: _LOG.error("No requirements specified, exiting") sys.exit(1) if not subgraph_check_api: _LOG.info( "No subgraph check API provided, no queries will be done for dependency subgraphs that should be avoided" ) result = resolve_python( requirements, index_urls=index.split(",") if index else ("https://pypi.org/simple",), python_version=int(python_version), transitive=not no_transitive, exclude_packages=set(map(str.strip, (exclude_packages or "").split(","))), subgraph_check_api=subgraph_check_api, ) print_command_result( click_ctx, result, analyzer=analyzer_name, analyzer_version=analyzer_version, output=output or "-", pretty=not no_pretty, )
Manipulate with dependency requirements using PyPI.
def analyze(self, using=None, **kwargs): return self._get_connection(using).indices.analyze(index=self._name, **kwargs)
Perform the analysis process on a text and return the tokens breakdown of the text. Any additional keyword arguments will be passed to ``Elasticsearch.indices.analyze`` unchanged.
def _normalize_server_settings(**settings): ret = dict() settings = salt.utils.args.clean_kwargs(**settings) for setting in settings: if isinstance(settings[setting], dict): value_from_key = next(six.iterkeys(settings[setting])) ret[setting] = "{{{0}}}".format(value_from_key) else: ret[setting] = settings[setting] return ret
Convert setting values that has been improperly converted to a dict back to a string.
def filename(self): if self._filename is None: self._filename = storage.get_file(self.basename, self.ccd, ext=self.extension, version=self.type, prefix=self.prefix) return self._filename
Name if the MOP formatted file to parse. @rtype: basestring @return: filename
def _should_resolve_subgraph(subgraph_check_api: str, package_name: str, package_version: str, index_url: str) -> bool: _LOGGER.info( "Checking if the given dependency subgraph for package %r in version %r from index %r should be resolved", package_name, package_version, index_url, ) response = requests.get( subgraph_check_api, params={"package_name": package_name, "package_version": package_version, "index_url": index_url}, ) if response.status_code == 200: return True elif response.status_code == 208: return False response.raise_for_status() raise ValueError( "Unreachable code - subgraph check API responded with unknown HTTP status " "code %s for package %r in version %r from index %r", package_name, package_version, index_url, )
Ask the given subgraph check API if the given package in the given version should be included in the resolution. This subgraph resolving avoidence serves two purposes - we don't need to resolve dependency subgraphs that were already analyzed and we also avoid analyzing of "core" packages (like setuptools) where not needed as they can break installation environment.
def render_build_args(options, ns): build_args = options.get('buildArgs', {}) for key, value in build_args.items(): build_args[key] = value.format(**ns) return build_args
Get docker build args dict, rendering any templated args. Args: options (dict): The dictionary for a given image from chartpress.yaml. Fields in `options['buildArgs']` will be rendered and returned, if defined. ns (dict): the namespace used when rendering templated arguments
def delete(self, *, auto_commit=False): try: db.session.delete(self.resource) if auto_commit: db.session.commit() except SQLAlchemyError: self.log.exception('Failed deleting resource: {}'.format(self.id)) db.session.rollback()
Removes a resource from the database Args: auto_commit (bool): Automatically commit the transaction. Default: `False` Returns: `None`
def _get_lt_from_user_by_id(self, user, lt_id): req = meta.Session.query(LayerTemplate).select_from(join(LayerTemplate, User)) try: return req.filter(and_(User.login==user, LayerTemplate.id==lt_id)).one() except Exception, e: return None
Get a layertemplate owned by a user from the database by lt_id.
def _notify_listeners(self, sender, message): uid = message['uid'] msg_topic = message['topic'] self._ack(sender, uid, 'fire') all_listeners = set() for lst_topic, listeners in self.__listeners.items(): if fnmatch.fnmatch(msg_topic, lst_topic): all_listeners.update(listeners) self._ack(sender, uid, 'notice', 'ok' if all_listeners else 'none') try: results = [] for listener in all_listeners: result = listener.handle_message(sender, message['topic'], message['content']) if result: results.append(result) self._ack(sender, uid, 'send', json.dumps(results)) except: self._ack(sender, uid, 'send', "Error")
Notifies listeners of a new message
def get_partial_DOS(self): warnings.warn("Phonopy.get_partial_DOS is deprecated. " "Use Phonopy.get_projected_dos_dict.", DeprecationWarning) pdos = self.get_projected_dos_dict() return pdos['frequency_points'], pdos['projected_dos']
Return frequency points and partial DOS as a tuple. Projection is done to atoms and may be also done along directions depending on the parameters at run_partial_dos. Returns ------- A tuple with (frequency_points, partial_dos). frequency_points: ndarray shape=(frequency_sampling_points, ), dtype='double' partial_dos: shape=(frequency_sampling_points, projections), dtype='double'
def confirm_login_allowed(self, user): if not user.is_active: raise forms.ValidationError( self.error_messages['inactive'], code='inactive', )
Controls whether the given User may log in. This is a policy setting, independent of end-user authentication. This default behavior is to allow login by active users, and reject login by inactive users. If the given user cannot log in, this method should raise a ``forms.ValidationError``. If the given user may log in, this method should return None.
def verify_registration(request): user = process_verify_registration_data(request.data) extra_data = None if registration_settings.REGISTER_VERIFICATION_AUTO_LOGIN: extra_data = perform_login(request, user) return get_ok_response('User verified successfully', extra_data=extra_data)
Verify registration via signature.
def calc_temperature_stats(self): self.temp.max_delta = melodist.get_shift_by_data(self.data.temp, self._lon, self._lat, self._timezone) self.temp.mean_course = melodist.util.calculate_mean_daily_course_by_month(self.data.temp, normalize=True)
Calculates statistics in order to derive diurnal patterns of temperature
def _get_data_from_bigquery(self, queries): all_df = [] for query in queries: all_df.append(query.execute().result().to_dataframe()) df = pd.concat(all_df, ignore_index=True) return df
Get data from bigquery table or query.
def create_directory(self, path=None): if path is None: path = self.get_path() if not os.path.exists(path): os.makedirs(path)
Create the directory for the given path. If path is None use the path of this instance :param path: the path to create :type path: str :returns: None :rtype: None :raises: OSError
def writeline(self, line=b'', sep=b'\n', echo=None): self.writelines([line], sep, echo)
Write a byte sequences to the channel and terminate it with carriage return and line feed. Args: line(bytes): The line to send. sep(bytes): The separator to use after each line. echo(bool): Whether to echo the written data to stdout. Raises: EOFError: If the channel was closed before all data was sent.
def _fail_if_contains_errors(response, sync_uuid=None): if response.status_code != _HTTP_OK: raise RequestError(response) response_json = response.json() if sync_uuid and 'sync_status' in response_json: status = response_json['sync_status'] if sync_uuid in status and 'error' in status[sync_uuid]: raise RequestError(response)
Raise a RequestError Exception if a given response does not denote a successful request.
def auth_from_hass_config(path=None, **kwargs): if path is None: path = config.find_hass_config() return Auth(os.path.join(path, ".storage/auth"), **kwargs)
Initialize auth from HASS config.
def merge_data(path_data, request_data): merged = request_data.copy() if request_data else {} merged.update(path_data or {}) return merged
Merge data from the URI path and the request. Path data wins.
def num_inputs(self): num = 0 for walker, _ in self.inputs: if not isinstance(walker, InvalidStreamWalker): num += 1 return num
Return the number of connected inputs. Returns: int: The number of connected inputs
def incident_exists(name, message, status): incidents = cachet.Incidents(endpoint=ENDPOINT) all_incidents = json.loads(incidents.get()) for incident in all_incidents['data']: if name == incident['name'] and \ status == incident['status'] and \ message.strip() == incident['message'].strip(): return True return False
Check if an incident with these attributes already exists
def secondary_xi(mass1, mass2, spin1x, spin1y, spin2x, spin2y): spinx = secondary_spin(mass1, mass2, spin1x, spin2x) spiny = secondary_spin(mass1, mass2, spin1y, spin2y) return xi2_from_mass1_mass2_spin2x_spin2y(mass1, mass2, spinx, spiny)
Returns the effective precession spin argument for the smaller mass.
def remove(self, bw): try: self.__ranges.remove(bw) except KeyError: if not bw.oneshot: raise
Removes a buffer watch identifier. @type bw: L{BufferWatch} @param bw: Buffer watch identifier. @raise KeyError: The buffer watch identifier was already removed.
def parse_type(field): if field.type_id == 'string': if 'size' in field.options: return "parser.getString(%d)" % field.options['size'].value else: return "parser.getString()" elif field.type_id in JAVA_TYPE_MAP: return "parser.get" + field.type_id.capitalize() + "()" if field.type_id == 'array': t = field.options['fill'].value if t in JAVA_TYPE_MAP: if 'size' in field.options: return "parser.getArrayof%s(%d)" % (t.capitalize(), field.options['size'].value) else: return "parser.getArrayof%s()" % t.capitalize() else: if 'size' in field.options: return "parser.getArray(%s.class, %d)" % (t, field.options['size'].value) else: return "parser.getArray(%s.class)" % t else: return "new %s().parse(parser)" % field.type_id
Function to pull a type from the binary payload.
def write_hyper_response(self, links=[], meta={}, entity_name=None, entity=None, notifications=[], actions=[]): assert entity_name is not None assert entity is not None meta.update({ "status": self.get_status() }) self.write({ "links": links, "meta": meta, entity_name: entity, "notifications": notifications, "actions": actions })
Writes a hyper media response object :param list links: A list of links to the resources :param dict meta: The meta data for this response :param str entity_name: The entity name :param object entity: The Entity itself :param list notifications: List of notifications :param list actions: List of actions
def broadcast_identifier(self): if self.handler_type is not BROADCAST: return None if self.reliable_delivery: raise EventHandlerConfigurationError( "You are using the default broadcast identifier " "which is not compatible with reliable delivery. See " ":meth:`nameko.events.EventHandler.broadcast_identifier` " "for details.") return uuid.uuid4().hex
A unique string to identify a service instance for `BROADCAST` type handlers. The `broadcast_identifier` is appended to the queue name when the `BROADCAST` handler type is used. It must uniquely identify service instances that receive broadcasts. The default `broadcast_identifier` is a uuid that is set when the service starts. It will change when the service restarts, meaning that any unconsumed messages that were broadcast to the 'old' service instance will not be received by the 'new' one. :: @property def broadcast_identifier(self): # use a uuid as the identifier. # the identifier will change when the service restarts and # any unconsumed messages will be lost return uuid.uuid4().hex The default behaviour is therefore incompatible with reliable delivery. An alternative `broadcast_identifier` that would survive service restarts is :: @property def broadcast_identifier(self): # use the machine hostname as the identifier. # this assumes that only one instance of a service runs on # any given machine return socket.gethostname() If neither of these approaches are appropriate, you could read the value out of a configuration file :: @property def broadcast_identifier(self): return self.config['SERVICE_IDENTIFIER'] # or similar Broadcast queues are exclusive to ensure that `broadcast_identifier` values are unique. Because this method is a descriptor, it will be called during container creation, regardless of the configured `handler_type`. See :class:`nameko.extensions.Extension` for more details.
def pastdate(self, prompt, default=None): prompt = prompt if prompt is not None else "Enter a past date" if default is not None: prompt += " [" + default.strftime('%d %m %Y') + "]" prompt += ': ' return self.input(curry(filter_pastdate, default=default), prompt)
Prompts user to input a date in the past.
def add_photometry(self, compare_to_existing=True, **kwargs): self._add_cat_dict( Photometry, self._KEYS.PHOTOMETRY, compare_to_existing=compare_to_existing, **kwargs) return
Add a `Photometry` instance to this entry.
def get_paginated_response(self, data): metadata = { 'next': self.get_next_link(), 'previous': self.get_previous_link(), 'count': self.get_result_count(), 'num_pages': self.get_num_pages(), } if isinstance(data, dict): if 'results' not in data: raise TypeError(u'Malformed result dict') data['pagination'] = metadata else: data = { 'results': data, 'pagination': metadata, } return Response(data)
Annotate the response with pagination information
def input_option(message, options="yn", error_message=None): def _valid(character): if character not in options: print(error_message % character) return input("%s [%s]" % (message, options), _valid, True, lambda a: a.lower())
Reads an option from the screen, with a specified prompt. Keeps asking until a valid option is sent by the user.
def guess_content_type_and_encoding(path): for ext, content_type in _EXTENSION_TO_MIME_TYPE.items(): if path.endswith(ext): return content_type content_type, encoding = mimetypes.guess_type(path) content_type = content_type or "application/binary" return content_type, encoding
Guess the content type of a path, using ``mimetypes``. Falls back to "application/binary" if no content type is found. Args: path (str): the path to guess the mimetype of Returns: str: the content type of the file
def delete_network(self, network): net_id = self._find_network_id(network) ret = self.network_conn.delete_network(network=net_id) return ret if ret else True
Deletes the specified network
def update_extent_from_rectangle(self): self.show() self.canvas.unsetMapTool(self.rectangle_map_tool) self.canvas.setMapTool(self.pan_tool) rectangle = self.rectangle_map_tool.rectangle() if rectangle: self.bounding_box_group.setTitle( self.tr('Bounding box from rectangle')) extent = rectangle_geo_array(rectangle, self.iface.mapCanvas()) self.update_extent(extent)
Update extent value in GUI based from the QgsMapTool rectangle. .. note:: Delegates to update_extent()
def to_representation(self, obj): representation = {} for name, field in self.fields.items(): if field.write_only: continue attribute = self.get_attribute(obj, field.source or name) if attribute is None: representation[name] = [] if field.many else None elif field.many: representation[name] = [ field.to_representation(item) for item in attribute ] else: representation[name] = field.to_representation(attribute) return representation
Convert given internal object instance into representation dict. Representation dict may be later serialized to the content-type of choice in the resource HTTP method handler. This loops over all fields and retrieves source keys/attributes as field values with respect to optional field sources and converts each one using ``field.to_representation()`` method. Args: obj (object): internal object that needs to be represented Returns: dict: representation dictionary
def config(name, config, write=True): _build_config_tree(name, config) configs = _render_configuration() if __opts__.get('test', False): comment = 'State syslog_ng will write \'{0}\' into {1}'.format( configs, __SYSLOG_NG_CONFIG_FILE ) return _format_state_result(name, result=None, comment=comment) succ = write if write: succ = _write_config(config=configs) return _format_state_result(name, result=succ, changes={'new': configs, 'old': ''})
Builds syslog-ng configuration. This function is intended to be used from the state module, users should not use it directly! name : the id of the Salt document or it is the format of <statement name>.id config : the parsed YAML code write : if True, it writes the config into the configuration file, otherwise just returns it CLI Example: .. code-block:: bash salt '*' syslog_ng.config name='s_local' config="[{'tcp':[{'ip':'127.0.0.1'},{'port':1233}]}]"
def mach2tas(Mach, H): a = vsound(H) Vtas = Mach*a return Vtas
Mach number to True Airspeed
def add_child_book(self, book_id, child_id): if self._catalog_session is not None: return self._catalog_session.add_child_catalog(catalog_id=book_id, child_id=child_id) return self._hierarchy_session.add_child(id_=book_id, child_id=child_id)
Adds a child to a book. arg: book_id (osid.id.Id): the ``Id`` of a book arg: child_id (osid.id.Id): the ``Id`` of the new child raise: AlreadyExists - ``book_id`` is already a parent of ``child_id`` raise: NotFound - ``book_id`` or ``child_id`` not found raise: NullArgument - ``book_id`` or ``child_id`` is ``null`` raise: OperationFailed - unable to complete request raise: PermissionDenied - authorization failure *compliance: mandatory -- This method must be implemented.*
def progress_status(self): if self.line % self.freq == 0: text = self.statustext.format(nele=self.line, totalele=self.total_lines) if self.main_window.grid.actions.pasting: try: post_command_event(self.main_window, self.main_window.StatusBarMsg, text=text) except TypeError: pass else: self.main_window.GetStatusBar().SetStatusText(text) if is_gtk(): try: wx.Yield() except: pass self.line += 1
Displays progress in statusbar
def generate_ppi_network( ppi_graph_path: str, dge_list: List[Gene], max_adj_p: float, max_log2_fold_change: float, min_log2_fold_change: float, ppi_edge_min_confidence: Optional[float] = None, current_disease_ids_path: Optional[str] = None, disease_associations_path: Optional[str] = None, ) -> Network: protein_interactions = parsers.parse_ppi_graph(ppi_graph_path, ppi_edge_min_confidence) protein_interactions = protein_interactions.simplify() if disease_associations_path is not None and current_disease_ids_path is not None: current_disease_ids = parsers.parse_disease_ids(current_disease_ids_path) disease_associations = parsers.parse_disease_associations(disease_associations_path, current_disease_ids) else: disease_associations = None network = Network( protein_interactions, max_adj_p=max_adj_p, max_l2fc=max_log2_fold_change, min_l2fc=min_log2_fold_change, ) network.set_up_network(dge_list, disease_associations=disease_associations) return network
Generate the protein-protein interaction network. :return Network: Protein-protein interaction network with information on differential expression.
def read_float(self, registeraddress, functioncode=3, numberOfRegisters=2): _checkFunctioncode(functioncode, [3, 4]) _checkInt(numberOfRegisters, minvalue=2, maxvalue=4, description='number of registers') return self._genericCommand(functioncode, registeraddress, numberOfRegisters=numberOfRegisters, payloadformat='float')
Read a floating point number from the slave. Floats are stored in two or more consecutive 16-bit registers in the slave. The encoding is according to the standard IEEE 754. There are differences in the byte order used by different manufacturers. A floating point value of 1.0 is encoded (in single precision) as 3f800000 (hex). In this implementation the data will be sent as ``'\\x3f\\x80'`` and ``'\\x00\\x00'`` to two consecutetive registers . Make sure to test that it makes sense for your instrument. It is pretty straight-forward to change this code if some other byte order is required by anyone (see support section). Args: * registeraddress (int): The slave register start address (use decimal numbers, not hex). * functioncode (int): Modbus function code. Can be 3 or 4. * numberOfRegisters (int): The number of registers allocated for the float. Can be 2 or 4. ====================================== ================= =========== ================= Type of floating point number in slave Size Registers Range ====================================== ================= =========== ================= Single precision (binary32) 32 bits (4 bytes) 2 registers 1.4E-45 to 3.4E38 Double precision (binary64) 64 bits (8 bytes) 4 registers 5E-324 to 1.8E308 ====================================== ================= =========== ================= Returns: The numerical value (float). Raises: ValueError, TypeError, IOError