code
stringlengths
51
2.38k
docstring
stringlengths
4
15.2k
def start(ctx, **kwargs): update_context(ctx, kwargs) daemon = mk_daemon(ctx) if ctx.debug or kwargs['no_fork']: daemon.run() else: daemon.start()
start a vaping process
def arbiter(**params): arbiter = get_actor() if arbiter is None: return set_actor(_spawn_actor('arbiter', None, **params)) elif arbiter.is_arbiter(): return arbiter
Obtain the ``arbiter``. It returns the arbiter instance only if we are on the arbiter context domain, otherwise it returns nothing.
def stop(self, check=True): if (check and self.countiter > 0 and self.opts['termination_callback'] and self.opts['termination_callback'] != str(self.opts['termination_callback'])): self.callbackstop = self.opts['termination_callback'](self) return self._stopdict(self, check)
return a dictionary with the termination status. With ``check==False``, the termination conditions are not checked and the status might not reflect the current situation.
def getNucleotideCodon(sequence, x1) : if x1 < 0 or x1 >= len(sequence) : return None p = x1%3 if p == 0 : return (sequence[x1: x1+3], 0) elif p ==1 : return (sequence[x1-1: x1+2], 1) elif p == 2 : return (sequence[x1-2: x1+1], 2)
Returns the entire codon of the nucleotide at pos x1 in sequence, and the position of that nocleotide in the codon in a tuple
def _nanmedian(array, axis=None): if isinstance(axis, tuple): array = _move_tuple_axes_first(array, axis=axis) axis = 0 return bottleneck.nanmedian(array, axis=axis)
Bottleneck nanmedian function that handle tuple axis.
def in_SCAT_box(x, y, low_bound, high_bound, x_max, y_max): passing = True upper_limit = high_bound(x) lower_limit = low_bound(x) if x > x_max or y > y_max: passing = False if x < 0 or y < 0: passing = False if y > upper_limit: passing = False if y < lower_limit: passing = False return passing
determines if a particular point falls within a box
def urlencode_params(params): params = [(key, normalize_for_urlencode(val)) for key, val in params] return requests.utils.unquote_unreserved(urlencode(params))
URL encodes the parameters. :param params: The parameters :type params: list of key/value tuples. :rtype: string
def move(fname, folder, options): if os.path.isfile(fname): shutil.move(fname, folder) else: if options.silent is False: print('{0} missing'.format(fname))
Move file to dir if existing
def _create_alias_map(im, alias=None): r phases_num = sp.unique(im * 1) phases_num = sp.trim_zeros(phases_num) al = {} for values in phases_num: al[values] = 'phase{}'.format(values) if alias is not None: alias_sort = dict(sorted(alias.items())) phase_labels = sp.array([*alias_sort]) al = alias if set(phase_labels) != set(phases_num): raise Exception('Alias labels does not match with image labels ' 'please provide correct image labels') return al
r""" Creates an alias mapping between phases in original image and identifyable names. This mapping is used during network extraction to label interconnection between and properties of each phase. Parameters ---------- im : ND-array Image of porous material where each phase is represented by unique integer. Phase integer should start from 1. Boolean image will extract only one network labeled with True's only. alias : dict (Optional) A dictionary that assigns unique image label to specific phase. For example {1: 'Solid'} will show all structural properties associated with label 1 as Solid phase properties. If ``None`` then default labelling will be used i.e {1: 'Phase1',..}. Returns ------- A dictionary with numerical phase labels as key, and readable phase names as valuies. If no alias is provided then default labelling is used i.e {1: 'Phase1',..}
def remove_workspace(self): def confirm_clicked(): if len(self.document_model.workspaces) > 1: command = Workspace.RemoveWorkspaceCommand(self) command.perform() self.document_controller.push_undo_command(command) caption = _("Remove workspace named '{0}'?").format(self.__workspace.name) self.pose_confirmation_message_box(caption, confirm_clicked, accepted_text=_("Remove Workspace"), message_box_id="remove_workspace")
Pose a dialog to confirm removal then remove workspace.
def collectstatic(settings_module, bin_env=None, no_post_process=False, ignore=None, dry_run=False, clear=False, link=False, no_default_ignore=False, pythonpath=None, env=None, runas=None): args = ['noinput'] kwargs = {} if no_post_process: args.append('no-post-process') if ignore: kwargs['ignore'] = ignore if dry_run: args.append('dry-run') if clear: args.append('clear') if link: args.append('link') if no_default_ignore: args.append('no-default-ignore') return command(settings_module, 'collectstatic', bin_env, pythonpath, env, runas, *args, **kwargs)
Collect static files from each of your applications into a single location that can easily be served in production. CLI Example: .. code-block:: bash salt '*' django.collectstatic <settings_module>
def sanitize_dict(input_dict): r plain_dict = dict() for key in input_dict.keys(): value = input_dict[key] if hasattr(value, 'keys'): plain_dict[key] = sanitize_dict(value) else: plain_dict[key] = value return plain_dict
r""" Given a nested dictionary, ensures that all nested dicts are normal Python dicts. This is necessary for pickling, or just converting an 'auto-vivifying' dict to something that acts normal.
def normalize_feature_inputs(ctx, param, value): for feature_like in value or ('-',): try: with click.open_file(feature_like) as src: for feature in iter_features(iter(src)): yield feature except IOError: coords = list(coords_from_query(feature_like)) yield { 'type': 'Feature', 'properties': {}, 'geometry': { 'type': 'Point', 'coordinates': coords}}
Click callback that normalizes feature input values. Returns a generator over features from the input value. Parameters ---------- ctx: a Click context param: the name of the argument or option value: object The value argument may be one of the following: 1. A list of paths to files containing GeoJSON feature collections or feature sequences. 2. A list of string-encoded coordinate pairs of the form "[lng, lat]", or "lng, lat", or "lng lat". If no value is provided, features will be read from stdin.
def clear(self): for field in self.__privfields__: delattr(self, field) setattr(self, field, MPI(0))
delete and re-initialize all private components to zero
def _format_eval_result(value, show_stdv=True): if len(value) == 4: return '%s\'s %s: %g' % (value[0], value[1], value[2]) elif len(value) == 5: if show_stdv: return '%s\'s %s: %g + %g' % (value[0], value[1], value[2], value[4]) else: return '%s\'s %s: %g' % (value[0], value[1], value[2]) else: raise ValueError("Wrong metric value")
Format metric string.
def dmag(self,band): if self.mags is None: raise ValueError('dmag is not defined because primary mags are not defined for this population.') return self.stars['{}_mag'.format(band)] - self.mags[band]
Magnitude difference between primary star and BG stars
def get_banks(self): catalogs = self._get_provider_session('bank_lookup_session').get_banks() cat_list = [] for cat in catalogs: cat_list.append(Bank(self._provider_manager, cat, self._runtime, self._proxy)) return BankList(cat_list)
Pass through to provider BankLookupSession.get_banks
def get_layout_context(self): errors = self.non_field_errors() for field in self.hidden_fields(): errors.extend(field.errors) return { 'form': self, 'errors': errors, 'hidden_fields': self.hidden_fields(), 'visible_fields': self.visible_fields(), }
Returns the context which is used when rendering the form to HTML. The generated template context will contain the following variables: * form: `Form` instance * errors: `ErrorList` instance with non field errors and hidden field errors * hidden_fields: All hidden fields to render. * visible_fields: All visible fields to render. :return: Template context for form rendering.
def save_cb(self): w = Widgets.SaveDialog(title='Save plot') target = w.get_path() if target is None: return plot_ext = self.settings.get('file_suffix', '.png') if not target.endswith(plot_ext): target += plot_ext fig_dpi = 100 try: fig = self.tab_plot.get_figure() fig.savefig(target, dpi=fig_dpi) except Exception as e: self.logger.error(str(e)) else: self.logger.info('Table plot saved as {0}'.format(target))
Save plot to file.
def showdeletion(self, *objects): from ..memory import showdeletion as S for o in objects: S.monitor_object_cleanup(o)
Record a stack trace at the point when an ROOT TObject is deleted
def bool(self, item, default=None): try: item = self.__getattr__(item) except AttributeError as err: if default is not None: return default raise err if isinstance(item, (bool, int)): return bool(item) if (isinstance(item, str) and item.lower() in ('n', 'no', 'false', 'f', '0')): return False return True if item else False
Return value of key as a boolean :param item: key of value to transform :param default: value to return if item does not exist :return: approximated bool of value
def _get_taxids(self, taxids=None): taxid_keys = set(self.taxid2asscs.keys()) return taxid_keys if taxids is None else set(taxids).intersection(taxid_keys)
Return user-specified taxids or taxids in self.taxid2asscs
def by_visits(self, event_kind=None): qs = self.get_queryset() if event_kind is not None: qs = qs.filter(event__kind=event_kind) qs = qs.annotate(num_visits=Count('event')) \ .order_by('-num_visits', 'name_sort') return qs
Gets Venues in order of how many Events have been held there. Adds a `num_visits` field to each one. event_kind filters by kind of Event, e.g. 'theatre', 'cinema', etc.
def get_item(self): "If the item is publishable, get the visible version" if hasattr(self, 'get_draft'): draft = self.get_draft() else: draft = self if not hasattr(self, '_item_cache'): try: self._item_cache = draft.item.get_published_or_draft() except AttributeError: self._item_cache = draft.item return self._item_cache
If the item is publishable, get the visible version
def _collect_layer_outputs(mod, data, include_layer=None, max_num_examples=None, logger=None): collector = _LayerOutputCollector(include_layer=include_layer, logger=logger) num_examples = _collect_layer_statistics(mod, data, collector, max_num_examples, logger) return collector.nd_dict, num_examples
Collect layer outputs and save them in a dictionary mapped by layer names.
def table_delete(self, table_name): url = Api._ENDPOINT + (Api._TABLES_PATH % table_name) return datalab.utils.Http.request(url, method='DELETE', credentials=self._credentials, raw_response=True)
Issues a request to delete a table. Args: table_name: the name of the table as a tuple of components. Returns: A parsed result object. Raises: Exception if there is an error performing the operation.
def trace_memory_clean_caches(self): urllib.parse.clear_cache() re.purge() linecache.clearcache() copyreg.clear_extension_cache() if hasattr(fnmatch, "purge"): fnmatch.purge() elif hasattr(fnmatch, "_purge"): fnmatch._purge() if hasattr(encodings, "_cache") and len(encodings._cache) > 0: encodings._cache = {} for handler in context.log.handlers: handler.flush()
Avoid polluting results with some builtin python caches
def stop_and_destroy(self, sync=True): def _self_destruct(): try_it_n_times(operation=self.destroy, expected_error_codes=['SERVER_STATE_ILLEGAL'], custom_error='destroying server failed') for storage in self.storage_devices: try_it_n_times(operation=storage.destroy, expected_error_codes=['STORAGE_STATE_ILLEGAL'], custom_error='destroying storage failed') if sync: self.populate() if self.state in ['maintenance', 'error']: self._wait_for_state_change(['stopped', 'started']) if self.state == 'started': try_it_n_times(operation=self.stop, expected_error_codes=['SERVER_STATE_ILLEGAL'], custom_error='stopping server failed') self._wait_for_state_change(['stopped']) if self.state == 'stopped': _self_destruct() else: raise Exception('unknown server state: ' + self.state)
Destroy a server and its storages. Stops the server before destroying. Syncs the server state from the API, use sync=False to disable.
def get_sdk_version(self): name = 'VCVarsQueryRegistry.bat' path = os.path.join(self.tool_dir, name) batch = read_file(path) if not batch: raise RuntimeError(_('failed to find the SDK version')) regex = r'(?<=\\Microsoft SDKs\\Windows\\).+?(?=")' try: version = re.search(regex, batch).group() except AttributeError: return '' else: logging.debug(_('SDK version: %s'), version) return version
Get the version of Windows SDK from VCVarsQueryRegistry.bat.
def option(self, key, value=None, **kwargs): if not isinstance(self._container, Section): raise ValueError("Options can only be added inside a section!") option = Option(key, value, container=self._container, **kwargs) option.value = value self._container.structure.insert(self._idx, option) self._idx += 1 return self
Creates a new option inside a section Args: key (str): key of the option value (str or None): value of the option **kwargs: are passed to the constructor of :class:`Option` Returns: self for chaining
def authenticate_credentials(self, payload): username = payload.get('preferred_username') or payload.get('username') if username is None: raise exceptions.AuthenticationFailed('JWT must include a preferred_username or username claim!') else: try: user, __ = get_user_model().objects.get_or_create(username=username) attributes_updated = False for claim, attr in self.get_jwt_claim_attribute_map().items(): payload_value = payload.get(claim) if getattr(user, attr) != payload_value and payload_value is not None: setattr(user, attr, payload_value) attributes_updated = True if attributes_updated: user.save() except: msg = 'User retrieval failed.' logger.exception(msg) raise exceptions.AuthenticationFailed(msg) return user
Get or create an active user with the username contained in the payload.
def replace(self, v): if self.popsize < self._popsize: return self.add(v) k = self.tournament(negative=True) self.clean(self.population[k]) self.population[k] = v v.position = len(self._hist) self._hist.append(v) self.bsf = v self.estopping = v self._inds_replace += 1 self._density += self.get_density(v) if self._inds_replace == self._popsize: self._inds_replace = 0 self.generation += 1 gc.collect()
Replace an individual selected by negative tournament selection with individual v
def evaluate_parameter_sets(self): self.parameter_interpreter = LcoptParameterSet(self.modelInstance) self.modelInstance.evaluated_parameter_sets = self.parameter_interpreter.evaluated_parameter_sets self.modelInstance.bw2_export_params = self.parameter_interpreter.bw2_export_params
This takes the parameter sets of the model instance and evaluates any formulas using the parameter values to create a fixed, full set of parameters for each parameter set in the model
def maelstrom(args): infile = args.inputfile genome = args.genome outdir = args.outdir pwmfile = args.pwmfile methods = args.methods ncpus = args.ncpus if not os.path.exists(infile): raise ValueError("file {} does not exist".format(infile)) if methods: methods = [x.strip() for x in methods.split(",")] run_maelstrom(infile, genome, outdir, pwmfile, methods=methods, ncpus=ncpus)
Run the maelstrom method.
def filter_belief(): if request.method == 'OPTIONS': return {} response = request.body.read().decode('utf-8') body = json.loads(response) stmts_json = body.get('statements') belief_cutoff = body.get('belief_cutoff') if belief_cutoff is not None: belief_cutoff = float(belief_cutoff) stmts = stmts_from_json(stmts_json) stmts_out = ac.filter_belief(stmts, belief_cutoff) return _return_stmts(stmts_out)
Filter to beliefs above a given threshold.
def check_nonbond(molecule, thresholds): for atom1 in range(molecule.graph.num_vertices): for atom2 in range(atom1): if molecule.graph.distances[atom1, atom2] > 2: distance = np.linalg.norm(molecule.coordinates[atom1] - molecule.coordinates[atom2]) if distance < thresholds[frozenset([molecule.numbers[atom1], molecule.numbers[atom2]])]: return False return True
Check whether all nonbonded atoms are well separated. If a nonbond atom pair is found that has an interatomic distance below the given thresholds. The thresholds dictionary has the following format: {frozenset([atom_number1, atom_number2]): distance} When random geometries are generated for sampling the conformational space of a molecule without strong repulsive nonbonding interactions, try to underestimate the thresholds at first instance and exclude bond stretches and bending motions for the random manipuulations. Then compute the forces projected on the nonbonding distance gradients. The distance for which the absolute value of these gradients drops below 100 kJ/mol is a coarse guess of a proper threshold value.
def is_correct(self): state = True if self.members: self.members = list(set(self.members)) if self.unknown_members: for member in self.unknown_members: msg = "[%s::%s] as %s, got unknown member '%s'" % ( self.my_type, self.get_name(), self.__class__.my_type, member ) self.add_error(msg) state = False return super(Itemgroup, self).is_correct() and state
Check if a group is valid. Valid mean all members exists, so list of unknown_members is empty :return: True if group is correct, otherwise False :rtype: bool
def check_closed_streams(options): if sys.version_info[0:3] >= (3, 6, 4): return True if sys.stderr is None: sys.stderr = open(os.devnull, 'w') if sys.stdin is None: if options.input_file == '-': print("Trying to read from stdin but stdin seems closed", file=sys.stderr) return False sys.stdin = open(os.devnull, 'r') if sys.stdout is None: if options.output_file == '-': print( textwrap.dedent( ), file=sys.stderr, ) return False sys.stdout = open(os.devnull, 'w') return True
Work around Python issue with multiprocessing forking on closed streams https://bugs.python.org/issue28326 Attempting to a fork/exec a new Python process when any of std{in,out,err} are closed or not flushable for some reason may raise an exception. Fix this by opening devnull if the handle seems to be closed. Do this globally to avoid tracking places all places that fork. Seems to be specific to multiprocessing.Process not all Python process forkers. The error actually occurs when the stream object is not flushable, but replacing an open stream object that is not flushable with /dev/null is a bad idea since it will create a silent failure. Replacing a closed handle with /dev/null seems safe.
def _detect_notebook(): try: from IPython import get_ipython from ipykernel import zmqshell except ImportError: return False kernel = get_ipython() return isinstance(kernel, zmqshell.ZMQInteractiveShell)
This isn't 100% correct but seems good enough Returns ------- bool True if it detects this is a notebook, otherwise False.
async def set_dhw_setpoint(self, temperature, timeout=OTGW_DEFAULT_TIMEOUT): cmd = OTGW_CMD_SET_WATER status = {} ret = await self._wait_for_cmd(cmd, temperature, timeout) if ret is None: return ret = float(ret) status[DATA_DHW_SETPOINT] = ret self._update_status(status) return ret
Set the domestic hot water setpoint. This command is only available with boilers that support this function. Return the newly accepted setpoint, or None on failure. This method is a coroutine
def availableRoles(self): eventRoles = self.eventrole_set.filter(capacity__gt=0) if eventRoles.count() > 0: return [x.role for x in eventRoles] elif isinstance(self,Series): return self.classDescription.danceTypeLevel.danceType.roles.all() return []
Returns the set of roles for this event. Since roles are not always custom specified for event, this looks for the set of available roles in multiple places. If no roles are found, then the method returns an empty list, in which case it can be assumed that the event's registration is not role-specific.
def has_own_property(self, attr): try: object.__getattribute__(self, attr) except AttributeError: return False else: return True
Returns if the property
def compute_laplacian_matrix(affinity_matrix, method='auto', **kwargs): if method == 'auto': method = 'geometric' return Laplacian.init(method, **kwargs).laplacian_matrix(affinity_matrix)
Compute the laplacian matrix with the given method
def _submit_rate(self, metric_name, val, metric, custom_tags=None, hostname=None): _tags = self._metric_tags(metric_name, val, metric, custom_tags, hostname) self.rate('{}.{}'.format(self.NAMESPACE, metric_name), val, _tags, hostname=hostname)
Submit a metric as a rate, additional tags provided will be added to the ones from the label provided via the metrics object. `custom_tags` is an array of 'tag:value' that will be added to the metric when sending the rate to Datadog.
def do_quit(self, arg): for name, fh in self._backup: setattr(sys, name, fh) self.console.writeline('*** Aborting program ***\n') self.console.flush() self.console.close() WebPdb.active_instance = None return Pdb.do_quit(self, arg)
quit || exit || q Stop and quit the current debugging session
def force_bytes(value): if IS_PY3: if isinstance(value, str): value = value.encode('utf-8', 'backslashreplace') else: if isinstance(value, unicode): value = value.encode('utf-8') return value
Forces a Unicode string to become a bytestring.
def API520_B(Pset, Pback, overpressure=0.1): r gauge_backpressure = (Pback-atm)/(Pset-atm)*100 if overpressure not in [0.1, 0.16, 0.21]: raise Exception('Only overpressure of 10%, 16%, or 21% are permitted') if (overpressure == 0.1 and gauge_backpressure < 30) or ( overpressure == 0.16 and gauge_backpressure < 38) or ( overpressure == 0.21 and gauge_backpressure < 50): return 1 elif gauge_backpressure > 50: raise Exception('Gauge pressure must be < 50%') if overpressure == 0.16: Kb = interp(gauge_backpressure, Kb_16_over_x, Kb_16_over_y) elif overpressure == 0.1: Kb = interp(gauge_backpressure, Kb_10_over_x, Kb_10_over_y) return Kb
r'''Calculates capacity correction due to backpressure on balanced spring-loaded PRVs in vapor service. For pilot operated valves, this is always 1. Applicable up to 50% of the percent gauge backpressure, For use in API 520 relief valve sizing. 1D interpolation among a table with 53 backpressures is performed. Parameters ---------- Pset : float Set pressure for relief [Pa] Pback : float Backpressure, [Pa] overpressure : float, optional The maximum fraction overpressure; one of 0.1, 0.16, or 0.21, [] Returns ------- Kb : float Correction due to vapor backpressure [-] Notes ----- If the calculated gauge backpressure is less than 30%, 38%, or 50% for overpressures of 0.1, 0.16, or 0.21, a value of 1 is returned. Percent gauge backpressure must be under 50%. Examples -------- Custom examples from figure 30: >>> API520_B(1E6, 5E5) 0.7929945420944432 References ---------- .. [1] API Standard 520, Part 1 - Sizing and Selection.
def class_name(self, cls, parts=0, aliases=None): module = cls.__module__ if module in ('__builtin__', 'builtins'): fullname = cls.__name__ else: fullname = '%s.%s' % (module, cls.__name__) if parts == 0: result = fullname else: name_parts = fullname.split('.') result = '.'.join(name_parts[-parts:]) if aliases is not None and result in aliases: return aliases[result] return result
Given a class object, return a fully-qualified name. This works for things I've tested in matplotlib so far, but may not be completely general.
def _rec_owner_number(self): player = self._header.initial.players[self._header.replay.rec_player] return player.attributes.player_color + 1
Get rec owner number.
def check_classes(self, scope=-1): for entry in self[scope].values(): if entry.class_ is None: syntax_error(entry.lineno, "Unknown identifier '%s'" % entry.name)
Check if pending identifiers are defined or not. If not, returns a syntax error. If no scope is given, the current one is checked.
def min_order_amount(self) -> Money: return self._fetch('minimum order amount', self.market.code)(self._min_order_amount)()
Minimum amount to place an order.
def parse_registries(filesystem, registries): results = {} for path in registries: with NamedTemporaryFile(buffering=0) as tempfile: filesystem.download(path, tempfile.name) registry = RegistryHive(tempfile.name) registry.rootkey = registry_root(path) results.update({k.path: (k.timestamp, k.values) for k in registry.keys()}) return results
Returns a dictionary with the content of the given registry hives. {"\\Registry\\Key\\", (("ValueKey", "ValueType", ValueValue))}
async def add(ctx, left: int, right: int): await ctx.send(left + right)
Adds two numbers together.
def trie(self): domain = 0 if domain not in self.domains: self.register_domain(domain=domain) return self.domains[domain].trie
A property to link into IntentEngine's trie. warning:: this is only for backwards compatiblility and should not be used if you intend on using domains. Return: the domains trie from its IntentEngine
def PushSection(self, name, pre_formatters): if name == '@': value = self.stack[-1].context else: value = self.stack[-1].context.get(name) for i, (f, args, formatter_type) in enumerate(pre_formatters): if formatter_type == ENHANCED_FUNC: value = f(value, self, args) elif formatter_type == SIMPLE_FUNC: value = f(value) else: assert False, 'Invalid formatter type %r' % formatter_type self.stack.append(_Frame(value)) return value
Given a section name, push it on the top of the stack. Returns: The new section, or None if there is no such section.
def pascal_row(n): result = [1] x, numerator = 1, n for denominator in range(1, n // 2 + 1): x *= numerator x /= denominator result.append(x) numerator -= 1 if n & 1 == 0: result.extend(reversed(result[:-1])) else: result.extend(reversed(result)) return result
Returns n-th row of Pascal's triangle
def list_datasources(self, source_id): target_url = self.client.get_url('DATASOURCE', 'GET', 'multi', {'source_id': source_id}) return base.Query(self.client.get_manager(Datasource), target_url)
Filterable list of Datasources for a Source.
def find_mappable(*axes): for ax in axes: for aset in ('collections', 'images'): try: return getattr(ax, aset)[-1] except (AttributeError, IndexError): continue raise ValueError("Cannot determine mappable layer on any axes " "for this colorbar")
Find the most recently added mappable layer in the given axes Parameters ---------- *axes : `~matplotlib.axes.Axes` one or more axes to search for a mappable
def iter_links_param_element(cls, element): valuetype = element.attrib.get('valuetype', '') if valuetype.lower() == 'ref' and 'value' in element.attrib: link_type = identify_link_type(element.attrib.get('value')) yield LinkInfo( element=element, tag=element.tag, attrib='value', link=element.attrib.get('value'), inline=True, linked=False, base_link=None, value_type='plain', link_type=link_type )
Iterate a ``param`` element.
def get_all_names(self): result = set() for module in self.names: result.update(set(self.names[module])) return result
Return the list of all cached global names
def _get_insert_commands(self, rows, cols): insert_queries = {} for table in tqdm(list(rows.keys()), total=len(list(rows.keys())), desc='Getting insert rows queries'): insert_queries[table] = {} _rows = rows.pop(table) _cols = cols.pop(table) if len(_rows) > 1: insert_queries[table]['insert_many'] = self.insert_many(table, _cols, _rows, execute=False) elif len(_rows) == 1: insert_queries[table]['insert'] = self.insert(table, _cols, _rows, execute=False) return insert_queries
Retrieve dictionary of insert statements to be executed.
def select_matchedfilter_class(curr_exe): exe_to_class_map = { 'pycbc_inspiral' : PyCBCInspiralExecutable, 'pycbc_inspiral_skymax' : PyCBCInspiralExecutable, 'pycbc_multi_inspiral' : PyCBCMultiInspiralExecutable, } try: return exe_to_class_map[curr_exe] except KeyError: raise NotImplementedError( "No job class exists for executable %s, exiting" % curr_exe)
This function returns a class that is appropriate for setting up matched-filtering jobs within workflow. Parameters ---------- curr_exe : string The name of the matched filter executable to be used. Returns -------- exe_class : Sub-class of pycbc.workflow.core.Executable that holds utility functions appropriate for the given executable. Instances of the class ('jobs') **must** have methods * job.create_node() and * job.get_valid_times(ifo, )
def survival_function_at_times(self, times, label=None): label = coalesce(label, self._label) return pd.Series(self._survival_function(self._fitted_parameters_, times), index=_to_array(times), name=label)
Return a Pandas series of the predicted survival value at specific times. Parameters ----------- times: iterable or float values to return the survival function at. label: string, optional Rename the series returned. Useful for plotting. Returns -------- pd.Series
def fire(self, sender=None, **params): keys = (_make_id(None), _make_id(sender)) results = [] for (_, key), callback in self.callbacks: if key in keys: results.append(callback(self, sender, **params)) return results
Fire callbacks from a ``sender``.
def recpgr(body, rectan, re, f): body = stypes.stringToCharP(body) rectan = stypes.toDoubleVector(rectan) re = ctypes.c_double(re) f = ctypes.c_double(f) lon = ctypes.c_double() lat = ctypes.c_double() alt = ctypes.c_double() libspice.recpgr_c(body, rectan, re, f, ctypes.byref(lon), ctypes.byref(lat), ctypes.byref(alt)) return lon.value, lat.value, alt.value
Convert rectangular coordinates to planetographic coordinates. http://naif.jpl.nasa.gov/pub/naif/toolkit_docs/C/cspice/recpgr_c.html :param body: Body with which coordinate system is associated. :type body: str :param rectan: Rectangular coordinates of a point. :type rectan: 3-Element Array of floats :param re: Equatorial radius of the reference spheroid. :type re: float :param f: Flattening coefficient. :type f: float :return: Planetographic longitude (radians), Planetographic latitude (radians), Altitude above reference spheroid :rtype: tuple
def DecoratorMixin(decorator): class Mixin(object): __doc__ = decorator.__doc__ @classmethod def as_view(cls, *args, **kwargs): view = super(Mixin, cls).as_view(*args, **kwargs) return decorator(view) Mixin.__name__ = str('DecoratorMixin(%s)' % decorator.__name__) return Mixin
Converts a decorator written for a function view into a mixin for a class-based view. :: LoginRequiredMixin = DecoratorMixin(login_required) class MyView(LoginRequiredMixin): pass class SomeView(DecoratorMixin(some_decorator), DecoratorMixin(something_else)): pass
def atlasdb_get_random_peer( con=None, path=None ): ret = {} with AtlasDBOpen(con=con, path=path) as dbcon: num_peers = atlasdb_num_peers( con=con, path=path ) if num_peers is None or num_peers == 0: ret['peer_hostport'] = None else: r = random.randint(1, num_peers) sql = "SELECT * FROM peers WHERE peer_index = ?;" args = (r,) cur = dbcon.cursor() res = atlasdb_query_execute( cur, sql, args ) ret = {'peer_hostport': None} for row in res: ret.update( row ) break return ret['peer_hostport']
Select a peer from the db at random Return None if the table is empty
def share_secret(threshold, nshares, secret, identifier, hash_id=Hash.SHA256): if identifier is None: raise TSSError('an identifier must be provided') if not Hash.is_valid(hash_id): raise TSSError('invalid hash algorithm %s' % hash_id) secret = encode(secret) identifier = encode(identifier) if hash_id != Hash.NONE: secret += Hash.to_func(hash_id)(secret).digest() shares = generate_shares(threshold, nshares, secret) header = format_header(identifier, hash_id, threshold, len(secret) + 1) return [format_share(header, share) for share in shares]
Create nshares of the secret. threshold specifies the number of shares needed for reconstructing the secret value. A 0-16 bytes identifier must be provided. Optionally the secret is hashed with the algorithm specified by hash_id, a class attribute of Hash. This function must return a list of formatted shares or raises a TSSError exception if anything went wrong.
def send_location(self, latitude, longitude, **options): return self.bot.api_call( "sendLocation", chat_id=self.id, latitude=latitude, longitude=longitude, **options )
Send a point on the map. :param float latitude: Latitude of the location :param float longitude: Longitude of the location :param options: Additional sendLocation options (see https://core.telegram.org/bots/api#sendlocation)
def check_id_idx_exclusivity(id, idx): if (id is not None and idx is not None): msg = ("'id' and 'idx' fields can't both not be None," + " please specify subset in only one of these fields") logger.error(msg) raise Exception("parse_gctx.check_id_idx_exclusivity: " + msg) elif id is not None: return ("id", id) elif idx is not None: return ("idx", idx) else: return (None, [])
Makes sure user didn't provide both ids and idx values to subset by. Input: - id (list or None): if not None, a list of string id names - idx (list or None): if not None, a list of integer id indexes Output: - a tuple: first element is subset type, second is subset content
def c_transform_entropic(b, M, reg, beta): n_source = np.shape(M)[0] alpha = np.zeros(n_source) for i in range(n_source): r = M[i, :] - beta min_r = np.min(r) exp_beta = np.exp(-(r - min_r) / reg) * b alpha[i] = min_r - reg * np.log(np.sum(exp_beta)) return alpha
The goal is to recover u from the c-transform. The function computes the c_transform of a dual variable from the other dual variable: .. math:: u = v^{c,reg} = -reg \sum_j exp((v - M)/reg) b_j Where : - M is the (ns,nt) metric cost matrix - u, v are dual variables in R^IxR^J - reg is the regularization term It is used to recover an optimal u from optimal v solving the semi dual problem, see Proposition 2.1 of [18]_ Parameters ---------- b : np.ndarray(nt,) target measure M : np.ndarray(ns, nt) cost matrix reg : float regularization term > 0 v : np.ndarray(nt,) dual variable Returns ------- u : np.ndarray(ns,) dual variable Examples -------- >>> n_source = 7 >>> n_target = 4 >>> reg = 1 >>> numItermax = 300000 >>> a = ot.utils.unif(n_source) >>> b = ot.utils.unif(n_target) >>> rng = np.random.RandomState(0) >>> X_source = rng.randn(n_source, 2) >>> Y_target = rng.randn(n_target, 2) >>> M = ot.dist(X_source, Y_target) >>> method = "ASGD" >>> asgd_pi = stochastic.solve_semi_dual_entropic(a, b, M, reg, method, numItermax) >>> print(asgd_pi) References ---------- [Genevay et al., 2016] : Stochastic Optimization for Large-scale Optimal Transport, Advances in Neural Information Processing Systems (2016), arXiv preprint arxiv:1605.08527.
def set_mode_auto(self): if self.mavlink10(): self.mav.command_long_send(self.target_system, self.target_component, mavlink.MAV_CMD_MISSION_START, 0, 0, 0, 0, 0, 0, 0, 0) else: MAV_ACTION_SET_AUTO = 13 self.mav.action_send(self.target_system, self.target_component, MAV_ACTION_SET_AUTO)
enter auto mode
def add_load_constant(self, name, output_name, constant_value, shape): spec = self.spec nn_spec = self.nn_spec spec_layer = nn_spec.layers.add() spec_layer.name = name spec_layer.output.append(output_name) spec_layer_params = spec_layer.loadConstant data = spec_layer_params.data data.floatValue.extend(map(float, constant_value.flatten())) spec_layer_params.shape.extend(shape) if len(data.floatValue) != np.prod(shape): raise ValueError("Dimensions of 'shape' do not match the size of the provided constant") if len(shape) != 3: raise ValueError("'shape' must be of length 3")
Add a load constant layer. Parameters ---------- name: str The name of this layer. output_name: str The output blob name of this layer. constant_value: numpy.array value of the constant as a numpy array. shape: [int] List of ints representing the shape of the constant. Must be of length 3: [C,H,W] See Also -------- add_elementwise
def is_evalframeex(self): if self._gdbframe.name() == 'PyEval_EvalFrameEx': if self._gdbframe.type() == gdb.NORMAL_FRAME: return True return False
Is this a PyEval_EvalFrameEx frame?
def get_window_size(window): width_value = ctypes.c_int(0) width = ctypes.pointer(width_value) height_value = ctypes.c_int(0) height = ctypes.pointer(height_value) _glfw.glfwGetWindowSize(window, width, height) return width_value.value, height_value.value
Retrieves the size of the client area of the specified window. Wrapper for: void glfwGetWindowSize(GLFWwindow* window, int* width, int* height);
def get_remote_content(filepath): with hide('running'): temp = BytesIO() get(filepath, temp) content = temp.getvalue().decode('utf-8') return content.strip()
A handy wrapper to get a remote file content
def shawn_text(self): if len(self._shawn_text) == len(self): return self._shawn_text if self.style == self.DOTS: return chr(0x2022) * len(self) ranges = [ (902, 1366), (192, 683), (33, 122) ] s = '' while len(s) < len(self.text): apolo = randint(33, 1366) for a, b in ranges: if a <= apolo <= b: s += chr(apolo) break self._shawn_text = s return s
The text displayed instead of the real one.
def set_computer_desc(desc=None): if six.PY2: desc = _to_unicode(desc) system_info = win32net.NetServerGetInfo(None, 101) if desc is None: return False system_info['comment'] = desc try: win32net.NetServerSetInfo(None, 101, system_info) except win32net.error as exc: (number, context, message) = exc.args log.error('Failed to update system') log.error('nbr: %s', number) log.error('ctx: %s', context) log.error('msg: %s', message) return False return {'Computer Description': get_computer_desc()}
Set the Windows computer description Args: desc (str): The computer description Returns: str: Description if successful, otherwise ``False`` CLI Example: .. code-block:: bash salt 'minion-id' system.set_computer_desc 'This computer belongs to Dave!'
def make_jagged_equity_info(num_assets, start_date, first_end, frequency, periods_between_ends, auto_close_delta): frame = pd.DataFrame( { 'symbol': [chr(ord('A') + i) for i in range(num_assets)], 'start_date': start_date, 'end_date': pd.date_range( first_end, freq=(periods_between_ends * frequency), periods=num_assets, ), 'exchange': 'TEST', }, index=range(num_assets), ) if auto_close_delta is not None: frame['auto_close_date'] = frame['end_date'] + auto_close_delta return frame
Create a DataFrame representing assets that all begin at the same start date, but have cascading end dates. Parameters ---------- num_assets : int How many assets to create. start_date : pd.Timestamp The start date for all the assets. first_end : pd.Timestamp The date at which the first equity will end. frequency : str or pd.tseries.offsets.Offset (e.g. trading_day) Frequency used to interpret the next argument. periods_between_ends : int Starting after the first end date, end each asset every `frequency` * `periods_between_ends`. Returns ------- info : pd.DataFrame DataFrame representing newly-created assets.
def glance(self, nitems=5): nitems = max([1, min([nitems, self.num_samples - 1])]) return self.__take(nitems, iter(self.__data.items()))
Quick and partial glance of the data matrix. Parameters ---------- nitems : int Number of items to glance from the dataset. Default : 5 Returns ------- dict
def virtual_media(self): return virtual_media.VirtualMediaCollection( self._conn, utils.get_subresource_path_by(self, 'VirtualMedia'), redfish_version=self.redfish_version)
Property to provide reference to `VirtualMediaCollection` instance. It is calculated once when the first time it is queried. On refresh, this property gets reset.
def reset_rf_samples(): forest._generate_sample_indices = (lambda rs, n_samples: forest.check_random_state(rs).randint(0, n_samples, n_samples))
Undoes the changes produced by set_rf_samples.
def close(self): if self._con: self._pool.unshare(self._shared_con) self._shared_con = self._con = None
Close the pooled shared connection.
def rvpl(self, prompt, error='Entered value is invalid', intro=None, validator=lambda x: x != '', clean=lambda x: x.strip(), strict=True, default=None): if intro: self.pstd(utils.rewrap_long(intro)) val = self.read(prompt, clean) while not validator(val): if not strict: return default if hasattr(error, '__call__'): self.perr(error(val)) else: self.perr(error) val = self.read(prompt, clean) return val
Start a read-validate-print loop The RVPL will read the user input, validate it, and loop until the entered value passes the validation, then return it. Error message can be customized using the ``error`` argument. If the value is a callable, it will be called with the value and it will be expected to return a printable message. Exceptions raised by the ``error`` function are not trapped. When ``intro`` is passed, it is printed above the prompt. The ``validator`` argument is is a function that validates the user input. Default validator simply validates if user entered any value. The ``clean`` argument specifies a function for the ``read()`` method with the same semantics.
def matrix2cube(data_matrix, im_shape): r return data_matrix.T.reshape([data_matrix.shape[1]] + list(im_shape))
r"""Matrix to Cube This method transforms a 2D matrix to a 3D cube Parameters ---------- data_matrix : np.ndarray Input data cube, 2D array im_shape : tuple 2D shape of the individual images Returns ------- np.ndarray 3D cube Examples -------- >>> from modopt.base.transform import matrix2cube >>> a = np.array([[0, 4, 8, 12], [1, 5, 9, 13], [2, 6, 10, 14], [3, 7, 11, 15]]) >>> matrix2cube(a, (2, 2)) array([[[ 0, 1], [ 2, 3]], [[ 4, 5], [ 6, 7]], [[ 8, 9], [10, 11]], [[12, 13], [14, 15]]])
def pipeline_name(self): if 'pipeline_name' in self.data: return self.data.get('pipeline_name') elif self.pipeline is not None: return self.pipeline.data.name
Get pipeline name of current stage instance. Because instantiating stage instance could be performed in different ways and those return different results, we have to check where from to get name of the pipeline. :return: pipeline name.
def load(fname: str) -> 'Config': with open(fname) as inp: obj = yaml.load(inp) obj.__add_frozen() return obj
Returns a Config object loaded from a file. The loaded object is not frozen. :param fname: Name of file to load the Config from. :return: Configuration.
def parse_port_from_tensorboard_output(tensorboard_output: str) -> int: search = re.search("at http://[^:]+:([0-9]+)", tensorboard_output) if search is not None: port = search.group(1) return int(port) else: raise UnexpectedOutputError(tensorboard_output, "Address and port where Tensorboard has started," " e.g. TensorBoard 1.8.0 at http://martin-VirtualBox:36869")
Parse tensorboard port from its outputted message. :param tensorboard_output: Output message of Tensorboard in format TensorBoard 1.8.0 at http://martin-VirtualBox:36869 :return: Returns the port TensorBoard is listening on. :raise UnexpectedOutputError
def from_cli(cls, opt): injection_file = opt.injection_file chirp_time_window = \ opt.injection_filter_rejector_chirp_time_window match_threshold = opt.injection_filter_rejector_match_threshold coarsematch_deltaf = opt.injection_filter_rejector_coarsematch_deltaf coarsematch_fmax = opt.injection_filter_rejector_coarsematch_fmax seg_buffer = opt.injection_filter_rejector_seg_buffer if opt.injection_filter_rejector_f_lower is not None: f_lower = opt.injection_filter_rejector_f_lower else: f_lower = opt.low_frequency_cutoff return cls(injection_file, chirp_time_window, match_threshold, f_lower, coarsematch_deltaf=coarsematch_deltaf, coarsematch_fmax=coarsematch_fmax, seg_buffer=seg_buffer)
Create an InjFilterRejector instance from command-line options.
def to_FIB(self, other): if not isinstance(other, GroundedFunctionNetwork): raise TypeError( f"Expected GroundedFunctionNetwork, but got {type(other)}" ) def shortname(var): return var[var.find("::") + 2 : var.rfind("_")] def shortname_vars(graph, shortname): return [v for v in graph.nodes() if shortname in v] this_var_nodes = [ shortname(n) for (n, d) in self.nodes(data=True) if d["type"] == "variable" ] other_var_nodes = [ shortname(n) for (n, d) in other.nodes(data=True) if d["type"] == "variable" ] shared_vars = set(this_var_nodes).intersection(set(other_var_nodes)) full_shared_vars = { full_var for shared_var in shared_vars for full_var in shortname_vars(self, shared_var) } return ForwardInfluenceBlanket(self, full_shared_vars)
Creates a ForwardInfluenceBlanket object representing the intersection of this model with the other input model. Args: other: The GroundedFunctionNetwork object to compare this model to. Returns: A ForwardInfluenceBlanket object to use for model comparison.
def _make_cache_key(key_prefix): if callable(key_prefix): cache_key = key_prefix() elif '%s' in key_prefix: cache_key = key_prefix % request.path else: cache_key = key_prefix cache_key = cache_key.encode('utf-8') return cache_key
Make cache key from prefix Borrowed from Flask-Cache extension
def communicate(args): if args.local: raise ValueError("The communicate command can only be used without the '--local' command line option") jm = setup(args) jm.communicate(job_ids=get_ids(args.job_ids))
Uses qstat to get the status of the requested jobs.
def from_json(json): segments = [Segment.from_json(s) for s in json['segments']] return Track(json['name'], segments).compute_metrics()
Creates a Track from a JSON file. No preprocessing is done. Arguments: json: map with the keys: name (optional) and segments. Return: A track instance
def render_child(self, child, view_name=None, context=None): return child.render(view_name or self._view_name, context)
A shortcut to render a child block. Use this method to render your children from your own view function. If `view_name` is not provided, it will default to the view name you're being rendered with. Returns the same value as :func:`render`.
def create_ondemand_instances(ec2, image_id, spec, num_instances=1): instance_type = spec['instance_type'] log.info('Creating %s instance(s) ... ', instance_type) for attempt in retry_ec2(retry_for=a_long_time, retry_while=inconsistencies_detected): with attempt: return ec2.run_instances(image_id, min_count=num_instances, max_count=num_instances, **spec).instances
Requests the RunInstances EC2 API call but accounts for the race between recently created instance profiles, IAM roles and an instance creation that refers to them. :rtype: list[Instance]
def list_corpus_files(dotted_path): corpus_path = get_file_path(dotted_path, extension=CORPUS_EXTENSION) paths = [] if os.path.isdir(corpus_path): paths = glob.glob(corpus_path + '/**/*.' + CORPUS_EXTENSION, recursive=True) else: paths.append(corpus_path) paths.sort() return paths
Return a list of file paths to each data file in the specified corpus.
def fit_effective_mass(distances, energies, parabolic=True): if parabolic: fit = np.polyfit(distances, energies, 2) c = 2 * fit[0] else: def f(x, alpha, d): top = np.sqrt(4 * alpha * d * x**2 + 1) - 1 bot = 2 * alpha return top / bot bounds = ((1e-8, -np.inf), (np.inf, np.inf)) popt, _ = curve_fit(f, distances, energies, p0=[1., 1.], bounds=bounds) c = 2 * popt[1] eff_mass = (angstrom_to_bohr**2 / eV_to_hartree) / c return eff_mass
Fit the effective masses using either a parabolic or nonparabolic fit. Args: distances (:obj:`numpy.ndarray`): The x-distances between k-points in reciprocal Angstroms, normalised to the band extrema. energies (:obj:`numpy.ndarray`): The band eigenvalues normalised to the eigenvalue of the band extrema. parabolic (:obj:`bool`, optional): Use a parabolic fit of the band edges. If ``False`` then nonparabolic fitting will be attempted. Defaults to ``True``. Returns: float: The effective mass in units of electron rest mass, :math:`m_0`.
def tokens(self, tokenset='internal'): toks = self.get('tokens', {}).get(tokenset) if toks is not None: if isinstance(toks, stringtypes): toks = YyTokenLattice.from_string(toks) elif isinstance(toks, Sequence): toks = YyTokenLattice.from_list(toks) return toks
Deserialize and return a YyTokenLattice object for the initial or internal token set, if provided, from the YY format or the JSON-formatted data; otherwise return the original string. Args: tokenset (str): return `'initial'` or `'internal'` tokens (default: `'internal'`) Returns: :class:`YyTokenLattice`
def maybe_add_child(self, fcoord): if fcoord not in self.children: new_position = self.position.play_move( coords.from_flat(fcoord)) self.children[fcoord] = MCTSNode( new_position, fmove=fcoord, parent=self) return self.children[fcoord]
Adds child node for fcoord if it doesn't already exist, and returns it.
def list_virtual(hostname, username, password, name): ret = {'name': name, 'changes': {}, 'result': False, 'comment': ''} if __opts__['test']: return _test_output(ret, 'list', params={ 'hostname': hostname, 'username': username, 'password': password, 'name': name } ) response = __salt__['bigip.list_virtual'](hostname, username, password, name) return _load_result(response, ret)
A function to list a specific virtual. hostname The host/address of the bigip device username The iControl REST username password The iControl REST password name The name of the virtual to list