code
stringlengths
51
2.38k
docstring
stringlengths
4
15.2k
def referenced_vertices(self): if len(self.entities) == 0: return np.array([], dtype=np.int64) referenced = np.concatenate([e.points for e in self.entities]) referenced = np.unique(referenced.astype(np.int64)) return referenced
Which vertices are referenced by an entity. Returns ----------- referenced_vertices: (n,) int, indexes of self.vertices
def drop(self, labels, dim=None): if utils.is_scalar(labels): labels = [labels] if dim is None: return self._drop_vars(labels) else: try: index = self.indexes[dim] except KeyError: raise ValueError( 'dimension %r does not have coordinate labels' % dim) new_index = index.drop(labels) return self.loc[{dim: new_index}]
Drop variables or index labels from this dataset. Parameters ---------- labels : scalar or list of scalars Name(s) of variables or index labels to drop. dim : None or str, optional Dimension along which to drop index labels. By default (if ``dim is None``), drops variables rather than index labels. Returns ------- dropped : Dataset
def _slice_bam(in_bam, region, tmp_dir, config): name_file = os.path.splitext(os.path.basename(in_bam))[0] out_file = os.path.join(tmp_dir, os.path.join(tmp_dir, name_file + _to_str(region) + ".bam")) sambamba = config_utils.get_program("sambamba", config) region = _to_sambamba(region) with file_transaction(out_file) as tx_out_file: cmd = ("{sambamba} slice {in_bam} {region} -o {tx_out_file}") do.run(cmd.format(**locals()), "Slice region", {}) return out_file
Use sambamba to slice a bam region
def trace_walker(module): for name, function in inspect.getmembers(module, inspect.isfunction): yield None, function for name, cls in inspect.getmembers(module, inspect.isclass): yield cls, None for name, method in inspect.getmembers(cls, inspect.ismethod): yield cls, method for name, function in inspect.getmembers(cls, inspect.isfunction): yield cls, function for name, accessor in inspect.getmembers(cls, lambda x: type(x) is property): yield cls, accessor.fget yield cls, accessor.fset yield cls, accessor.fdel
Defines a generator used to walk into modules. :param module: Module to walk. :type module: ModuleType :return: Class / Function / Method. :rtype: object or object
def resource_url(path): url = QtCore.QUrl.fromLocalFile(path) return str(url.toString())
Get the a local filesystem url to a given resource. .. versionadded:: 3.0 Note that in version 3.0 we removed the use of Qt Resource files in favour of directly accessing on-disk resources. :param path: Path to resource e.g. /home/timlinux/foo/bar.png :type path: str :return: A valid file url e.g. file:///home/timlinux/foo/bar.png :rtype: str
def _initialize_context(self, trace_header): sampled = None if not global_sdk_config.sdk_enabled(): sampled = False elif trace_header.sampled == 0: sampled = False elif trace_header.sampled == 1: sampled = True segment = FacadeSegment( name='facade', traceid=trace_header.root, entityid=trace_header.parent, sampled=sampled, ) setattr(self._local, 'segment', segment) setattr(self._local, 'entities', [])
Create a facade segment based on environment variables set by AWS Lambda and initialize storage for subsegments.
def lst_avg(lst): salt.utils.versions.warn_until( 'Neon', 'This results of this function are currently being rounded.' 'Beginning in the Salt Neon release, results will no longer be ' 'rounded and this warning will be removed.', stacklevel=3 ) if not isinstance(lst, collections.Hashable): return float(sum(lst)/len(lst)) return float(lst)
Returns the average value of a list. .. code-block:: jinja {% my_list = [1,2,3,4] -%} {{ set my_list | avg }} will be rendered as: .. code-block:: yaml 2.5
def send_command(self, *args, **kwargs): if len(args) >= 2: expect_string = args[1] else: expect_string = kwargs.get("expect_string") if expect_string is None: expect_string = r"(OK|ERROR|Command not recognized\.)" expect_string = self.RETURN + expect_string + self.RETURN kwargs.setdefault("expect_string", expect_string) output = super(CiscoSSHConnection, self).send_command(*args, **kwargs) return output
Send command to network device retrieve output until router_prompt or expect_string By default this method will keep waiting to receive data until the network device prompt is detected. The current network device prompt will be determined automatically. command_string = command to execute expect_string = pattern to search for uses re.search (use raw strings) delay_factor = decrease the initial delay before we start looking for data max_loops = number of iterations before we give up and raise an exception strip_prompt = strip the trailing prompt from the output strip_command = strip the leading command from the output
def author_list(self): author_list = [self.submitter] + \ [author for author in self.authors.all().exclude(pk=self.submitter.pk)] return ",\n".join([author.get_full_name() for author in author_list])
The list of authors als text, for admin submission list overview.
def create_ellipse(width,height,angle): angle = angle / 180.0 * np.pi thetas = np.linspace(0,2*np.pi,200) a = width / 2.0 b = height / 2.0 x = a*np.cos(thetas)*np.cos(angle) - b*np.sin(thetas)*np.sin(angle) y = a*np.cos(thetas)*np.sin(angle) + b*np.sin(thetas)*np.cos(angle) z = np.zeros(thetas.shape) return np.vstack((x,y,z)).T
Create parametric ellipse from 200 points.
def find_by_name(self, item_name, items_list, name_list=None): if not name_list: names = [item.name for item in items_list if item] else: names = name_list if item_name in names: ind = names.index(item_name) return items_list[ind] return False
Return item from items_list with name item_name.
def error(self, message, rofi_args=None, **kwargs): rofi_args = rofi_args or [] args = ['rofi', '-e', message] args.extend(self._common_args(allow_fullscreen=False, **kwargs)) args.extend(rofi_args) self._run_blocking(args)
Show an error window. This method blocks until the user presses a key. Fullscreen mode is not supported for error windows, and if specified will be ignored. Parameters ---------- message: string Error message to show.
def close(self): with self._lock: for server in self._servers.values(): server.close() self._description = self._description.reset() self._update_servers() self._opened = False if self._publish_tp: self._events.put((self._listeners.publish_topology_closed, (self._topology_id,))) if self._publish_server or self._publish_tp: self.__events_executor.close()
Clear pools and terminate monitors. Topology reopens on demand.
def real_time_sequencing(self, availability, oauth, event, target_calendars=()): args = { 'oauth': oauth, 'event': event, 'target_calendars': target_calendars } if availability: options = {} options['sequence'] = self.map_availability_sequence(availability.get('sequence', None)) if availability.get('available_periods', None): self.translate_available_periods(availability['available_periods']) options['available_periods'] = availability['available_periods'] args['availability'] = options return self.request_handler.post(endpoint='real_time_sequencing', data=args, use_api_key=True).json()
Generates an real time sequencing link to start the OAuth process with an event to be automatically upserted :param dict availability: - A dict describing the availability details for the event: :sequence: An Array of dics representing sequences to find availability for each sequence can contain. :sequence_id - A string identifying this step in the sequence. :ordinal - An Integer defining the order of this step in the sequence. :participants - A dict stating who is required for the availability call :required_duration - A dict stating the length of time the event will last for :event - A dict describing the event :available_periods - A dict stating the available periods for the step :available_periods - A dict stating the available periods for the sequence :param dict oauth: - A dict describing the OAuth flow required: :scope - A String representing the scopes to ask for within the OAuth flow :redirect_uri - A String containing a url to redirect the user to after completing the OAuth flow. :scope - A String representing additional state to be passed within the OAuth flow. :param dict event: - A dict describing the event :param list target_calendars: - An list of dics stating into which calendars to insert the created event See http://www.cronofy.com/developers/api#upsert-event for reference.
def pause(msg="Press Enter to Continue..."): print('\n' + Fore.YELLOW + msg + Fore.RESET, end='') input()
press to continue
def atlas_init(blockstack_opts, db, recover=False, port=None): if port is None: port = blockstack_opts['rpc_port'] atlas_state = None if is_atlas_enabled(blockstack_opts): atlas_seed_peers = filter( lambda x: len(x) > 0, blockstack_opts['atlas_seeds'].split(",")) atlas_blacklist = filter( lambda x: len(x) > 0, blockstack_opts['atlas_blacklist'].split(",")) zonefile_dir = blockstack_opts['zonefiles'] my_hostname = blockstack_opts['atlas_hostname'] my_port = blockstack_opts['atlas_port'] initial_peer_table = atlasdb_init(blockstack_opts['atlasdb_path'], zonefile_dir, db, atlas_seed_peers, atlas_blacklist, validate=True, recover=recover) atlas_peer_table_init(initial_peer_table) atlas_state = atlas_node_init(my_hostname, my_port, blockstack_opts['atlasdb_path'], zonefile_dir, db.working_dir) return atlas_state
Start up atlas functionality
def values(self): return [self.policy.header_fetch_parse(k, v) for k, v in self._headers]
Return a list of all the message's header values. These will be sorted in the order they appeared in the original message, or were added to the message, and may contain duplicates. Any fields deleted and re-inserted are always appended to the header list.
def loadtxt_str(path:PathOrStr)->np.ndarray: "Return `ndarray` of `str` of lines of text from `path`." with open(path, 'r') as f: lines = f.readlines() return np.array([l.strip() for l in lines])
Return `ndarray` of `str` of lines of text from `path`.
def open_download_stream(self, file_id): gout = GridOut(self._collection, file_id) gout._ensure_file() return gout
Opens a Stream from which the application can read the contents of the stored file specified by file_id. For example:: my_db = MongoClient().test fs = GridFSBucket(my_db) # get _id of file to read. file_id = fs.upload_from_stream("test_file", "data I want to store!") grid_out = fs.open_download_stream(file_id) contents = grid_out.read() Returns an instance of :class:`~gridfs.grid_file.GridOut`. Raises :exc:`~gridfs.errors.NoFile` if no file with file_id exists. :Parameters: - `file_id`: The _id of the file to be downloaded.
def from_pdb(cls, path, forcefield=None, loader=PDBFile, strict=True, **kwargs): pdb = loader(path) box = kwargs.pop('box', pdb.topology.getPeriodicBoxVectors()) positions = kwargs.pop('positions', pdb.positions) velocities = kwargs.pop('velocities', getattr(pdb, 'velocities', None)) if strict and not forcefield: from .md import FORCEFIELDS as forcefield logger.info('! Forcefields for PDB not specified. Using default: %s', ', '.join(forcefield)) pdb.forcefield = ForceField(*list(process_forcefield(*forcefield))) return cls(master=pdb.forcefield, topology=pdb.topology, positions=positions, velocities=velocities, box=box, path=path, **kwargs)
Loads topology, positions and, potentially, velocities and vectors, from a PDB or PDBx file Parameters ---------- path : str Path to PDB/PDBx file forcefields : list of str Paths to FFXML and/or FRCMOD forcefields. REQUIRED. Returns ------- pdb : SystemHandler SystemHandler with topology, positions, and, potentially, velocities and box vectors. Forcefields are embedded in the `master` attribute.
def serve(application, host='127.0.0.1', port=8080, threads=4, **kw): serve_(application, host=host, port=int(port), threads=int(threads), **kw)
The recommended development HTTP server. Note that this server performs additional buffering and will not honour chunked encoding breaks.
def check_errors(self): errors = ERROR_PATTTERN.findall(self.out) if errors: self.log.error('! Errors occurred:') self.log.error('\n'.join( [error.replace('\r', '').strip() for error in chain(*errors) if error.strip()] )) self.log.error('! See "%s.log" for details.' % self.project_name) if self.opt.exit_on_error: self.log.error('! Exiting...') sys.exit(1)
Check if errors occured during a latex run by scanning the output.
def format_name(self, name, indent_size=4): name_block = '' if self.short_desc is None: name_block += name + '\n' else: name_block += name + ': ' + self.short_desc + '\n' if self.long_desc is not None: name_block += self.wrap_lines(self.long_desc, 1, indent_size=indent_size) name_block += '\n' return name_block
Format the name of this verifier The name will be formatted as: <name>: <short description> long description if one is given followed by \n otherwise no long description Args: name (string): A name for this validator indent_size (int): The number of spaces to indent the description Returns: string: The formatted name block with a short and or long description appended.
def add_package_dependency(self, package_name, version): if not PEP440_VERSION_PATTERN.match(version): raise ValueError('Invalid Version: "{}"'.format(version)) self.dependencies.add(PackageDependency(package_name, version))
Add a package to the list of dependencies. :param package_name: The name of the package dependency :type package_name: str :param version: The (minimum) version of the package :type version: str
def get_info(self, symbol, as_of=None): version = self._read_metadata(symbol, as_of=as_of, read_preference=None) handler = self._read_handler(version, symbol) if handler and hasattr(handler, 'get_info'): return handler.get_info(version) return {}
Reads and returns information about the data stored for symbol Parameters ---------- symbol : `str` symbol name for the item as_of : `str` or int or `datetime.datetime` Return the data as it was as_of the point in time. `int` : specific version number `str` : snapshot name which contains the version `datetime.datetime` : the version of the data that existed as_of the requested point in time Returns ------- dictionary of the information (specific to the type of data)
def add(self, name, value): normalized_name = normalize_name(name, self._normalize_overrides) self._map[normalized_name].append(value)
Append the name-value pair to the record.
def make_pattern(self, pattern, listsep=','): if self is Cardinality.one: return pattern elif self is Cardinality.zero_or_one: return self.schema % pattern else: return self.schema % (pattern, listsep, pattern)
Make pattern for a data type with the specified cardinality. .. code-block:: python yes_no_pattern = r"yes|no" many_yes_no = Cardinality.one_or_more.make_pattern(yes_no_pattern) :param pattern: Regular expression for type (as string). :param listsep: List separator for multiple items (as string, optional) :return: Regular expression pattern for type with cardinality.
def create(cls, name, dead_interval=40, hello_interval=10, hello_interval_type='normal', dead_multiplier=1, mtu_mismatch_detection=True, retransmit_interval=5, router_priority=1, transmit_delay=1, authentication_type=None, password=None, key_chain_ref=None): json = {'name': name, 'authentication_type': authentication_type, 'password': password, 'key_chain_ref': element_resolver(key_chain_ref), 'dead_interval': dead_interval, 'dead_multiplier': dead_multiplier, 'hello_interval': hello_interval, 'hello_interval_type': hello_interval_type, 'mtu_mismatch_detection': mtu_mismatch_detection, 'retransmit_interval': retransmit_interval, 'router_priority': router_priority, 'transmit_delay': transmit_delay} return ElementCreator(cls, json)
Create custom OSPF interface settings profile :param str name: name of interface settings :param int dead_interval: in seconds :param str hello_interval: in seconds :param str hello_interval_type: \|normal\|fast_hello :param int dead_multipler: fast hello packet multipler :param bool mtu_mismatch_detection: True|False :param int retransmit_interval: in seconds :param int router_priority: set priority :param int transmit_delay: in seconds :param str authentication_type: \|password\|message_digest :param str password: max 8 chars (required when authentication_type='password') :param str,Element key_chain_ref: OSPFKeyChain (required when authentication_type='message_digest') :raises CreateElementFailed: create failed with reason :return: instance with meta :rtype: OSPFInterfaceSetting
def _setup_source_and_destination(self): super(FetchTransformSaveWithSeparateNewCrashSourceApp, self) \ ._setup_source_and_destination() if self.config.new_crash_source.new_crash_source_class: self.new_crash_source = \ self.config.new_crash_source.new_crash_source_class( self.config.new_crash_source, name=self.app_instance_name, quit_check_callback=self.quit_check ) else: self.new_crash_source = self.source
use the base class to setup the source and destinations but add to that setup the instantiation of the "new_crash_source"
def unique_otuids(groups): uniques = {key: set() for key in groups} for i, group in enumerate(groups): to_combine = groups.values()[:i]+groups.values()[i+1:] combined = combine_sets(*to_combine) uniques[group] = groups[group].difference(combined) return uniques
Get unique OTUIDs of each category. :type groups: Dict :param groups: {Category name: OTUIDs in category} :return type: dict :return: Dict keyed on category name and unique OTUIDs as values.
def _print_app(self, app, models): self._print(self._app_start % app) self._print_models(models) self._print(self._app_end)
Print the models of app, showing them in a package.
def load_json_from_file(file_path): try: with open(file_path) as f: json_data = json.load(f) except ValueError as e: raise ValueError('Given file {} is not a valid JSON file: {}'.format(file_path, e)) else: return json_data
Load schema from a JSON file
def add_graph(self, run_key, device_name, graph_def, debug=False): graph_dict = (self._run_key_to_debug_graphs if debug else self._run_key_to_original_graphs) if not run_key in graph_dict: graph_dict[run_key] = dict() graph_dict[run_key][tf.compat.as_str(device_name)] = ( debug_graphs_helper.DebugGraphWrapper(graph_def))
Add a GraphDef. Args: run_key: A key for the run, containing information about the feeds, fetches, and targets. device_name: The name of the device that the `GraphDef` is for. graph_def: An instance of the `GraphDef` proto. debug: Whether `graph_def` consists of the debug ops.
def set_fun_prop(f, k, v): if not hasattr(f, _FUN_PROPS): setattr(f, _FUN_PROPS, {}) if not isinstance(getattr(f, _FUN_PROPS), dict): raise InternalError("Invalid properties dictionary for %s" % str(f)) getattr(f, _FUN_PROPS)[k] = v
Set the value of property `k` to be `v` in function `f`. We define properties as annotations added to a function throughout the process of defining a function for verification, e.g. the argument types. This sets function `f`'s property named `k` to be value `v`. Users should never access this function directly.
def execute(self, *args, **kwargs): try: return self.client.execute(*args, **kwargs) except requests.exceptions.HTTPError as err: res = err.response logger.error("%s response executing GraphQL." % res.status_code) logger.error(res.text) self.display_gorilla_error_if_found(res) six.reraise(*sys.exc_info())
Wrapper around execute that logs in cases of failure.
def do_we_have_enough_cookies(cj, class_name): domain = 'class.coursera.org' path = "/" + class_name return cj.get('csrf_token', domain=domain, path=path) is not None
Check whether we have all the required cookies to authenticate on class.coursera.org.
def load_cml(cml_filename): parser = make_parser() parser.setFeature(feature_namespaces, 0) dh = CMLMoleculeLoader() parser.setContentHandler(dh) parser.parse(cml_filename) return dh.molecules
Load the molecules from a CML file Argument: | ``cml_filename`` -- The filename of a CML file. Returns a list of molecule objects with optional molecular graph attribute and extra attributes.
def generate_id(self, obj): object_type = type(obj).__name__.lower() return '{}_{}'.format(object_type, self.get_object_id(obj))
Generate unique document id for ElasticSearch.
def get(self, block=True, timeout=None): return self._queue.get(block, timeout)
Get item from underlying queue.
def get_cached_data( step: 'projects.ProjectStep' ) -> typing.Union[None, STEP_DATA]: cache_path = step.report.results_cache_path if not os.path.exists(cache_path): return None out = create_data(step) try: with open(cache_path, 'r') as f: cached_data = json.load(f) except Exception: return None file_writes = [ file_io.entry_from_dict(fw) for fw in cached_data['file_writes'] ] return out \ ._replace(**cached_data) \ ._replace(file_writes=file_writes)
Attempts to load and return the cached step data for the specified step. If not cached data exists, or the cached data is corrupt, a None value is returned instead. :param step: The step for which the cached data should be loaded :return: Either a step data structure containing the cached step data or None if no cached data exists for the step
def check(degree, knot_vector, num_ctrlpts): try: if knot_vector is None or len(knot_vector) == 0: raise ValueError("Input knot vector cannot be empty") except TypeError as e: print("An error occurred: {}".format(e.args[-1])) raise TypeError("Knot vector must be a list or tuple") except Exception: raise if len(knot_vector) != degree + num_ctrlpts + 1: return False prev_knot = knot_vector[0] for knot in knot_vector: if prev_knot > knot: return False prev_knot = knot return True
Checks the validity of the input knot vector. Please refer to The NURBS Book (2nd Edition), p.50 for details. :param degree: degree of the curve or the surface :type degree: int :param knot_vector: knot vector to be checked :type knot_vector: list, tuple :param num_ctrlpts: number of control points :type num_ctrlpts: int :return: True if the knot vector is valid, False otherwise :rtype: bool
def _get_elements(mol, label): elements = [int(mol.GetAtom(i).GetAtomicNum()) for i in label] return elements
The the elements of the atoms in the specified order Args: mol: The molecule. OpenBabel OBMol object. label: The atom indices. List of integers. Returns: Elements. List of integers.
def kth_to_last(head, k): if not (head or k > -1): return False p1 = head p2 = head for i in range(1, k+1): if p1 is None: raise IndexError p1 = p1.next while p1: p1 = p1.next p2 = p2.next return p2
This is an optimal method using iteration. We move p1 k steps ahead into the list. Then we move p1 and p2 together until p1 hits the end.
def get_bool(self, key: str) -> Optional[bool]: v = self.get(key) if v is None: return None if v in ['true', 'True']: return True if v in ['false', 'False']: return False raise ConfigTypeError(self.full_key(key), v, 'bool')
Returns an optional configuration value, as a bool, by its key, or None if it doesn't exist. If the configuration value isn't a legal boolean, this function will throw an error. :param str key: The requested configuration key. :return: The configuration key's value, or None if one does not exist. :rtype: Optional[bool] :raises ConfigTypeError: The configuration value existed but couldn't be coerced to bool.
def applyReferrerVouchersTemporarily(sender,**kwargs): if not getConstant('referrals__enableReferralProgram'): return logger.debug('Signal fired to temporarily apply referrer vouchers.') reg = kwargs.pop('registration') try: c = Customer.objects.get(user__email=reg.email) vouchers = c.getReferralVouchers() except ObjectDoesNotExist: vouchers = None if not vouchers: logger.debug('No referral vouchers found.') return for v in vouchers: TemporaryVoucherUse(voucher=v,registration=reg,amount=0).save()
Unlike voucher codes which have to be manually supplied, referrer discounts are automatically applied here, assuming that the referral program is enabled.
def folder2ver(folder): ver = folder.split('EnergyPlus')[-1] ver = ver[1:] splitapp = ver.split('-') ver = '.'.join(splitapp) return ver
get the version number from the E+ install folder
def __connect(host, port, username, password, private_key): ssh = paramiko.SSHClient() ssh.set_missing_host_key_policy(paramiko.AutoAddPolicy()) if private_key is not None and password is not None: private_key = paramiko.RSAKey.from_private_key_file(private_key, password) elif private_key is not None: private_key = paramiko.RSAKey.from_private_key_file(private_key, password) try: ssh.connect(host, port, username, password, private_key) except Exception as e: raise e return ssh
Establish remote connection :param host: Hostname or IP address to connect to :param port: Port number to use for SSH :param username: Username credentials for SSH access :param password: Password credentials for SSH access (or private key passphrase) :param private_key: Private key to bypass clear text password :return: Paramiko SSH client instance if connection was established :raises Exception if connection was unsuccessful
def get_file(self, index, doc_type, id=None): data = self.get(index, doc_type, id) return data['_name'], base64.standard_b64decode(data['content'])
Return the filename and memory data stream
def save(self, specfiles=None, compress=True, path=None): if specfiles is None: specfiles = [_ for _ in viewkeys(self.info)] else: specfiles = aux.toList(specfiles) for specfile in specfiles: if specfile not in self.info: warntext = 'Error while calling "SiiContainer.save()": "%s" is'\ ' not present in "SiiContainer.info"!'\ % (specfile, ) warnings.warn(warntext) continue else: path = self.info[specfile]['path'] if path is None else path with aux.PartiallySafeReplace() as msr: filename = specfile + '.siic' filepath = aux.joinpath(path, filename) with msr.open(filepath, mode='w+b') as openfile: self._writeContainer(openfile, specfile, compress)
Writes the specified specfiles to ``siic`` files on the hard disk. .. note:: If ``.save()`` is called and no ``siic`` files are present in the specified path new files are generated, otherwise old files are replaced. :param specfiles: the name of an ms-run file or a list of names. If None all specfiles are selected. :param compress: bool, True to use zip file compression :param path: filedirectory to which the ``siic`` files are written. By default the parameter is set to ``None`` and the filedirectory is read from ``self.info[specfile]['path']``
def _firmware_update(firmwarefile='', host='', directory=''): dest = os.path.join(directory, firmwarefile[7:]) __salt__['cp.get_file'](firmwarefile, dest) username = __pillar__['proxy']['admin_user'] password = __pillar__['proxy']['admin_password'] __salt__['dracr.update_firmware'](dest, host=host, admin_username=username, admin_password=password)
Update firmware for a single host
def table(self): if hasattr(self.data, 'table_on') and self.data.table_on: assert_index_sane(self.data.table, len(self.song.tables)) return self.song.tables[self.data.table]
a ```pylsdj.Table``` referencing the instrument's table, or None if the instrument doesn't have a table
def die(self): if self.process: _log(self.logging, 'Stopping {0} server with PID: {1} running at {2}.' .format(self.__class__.__name__, self.process.pid, self.check_url)) self._kill()
Stops the server if it is running.
def get_healthcheck(self, service_id, version_number, name): content = self._fetch("/service/%s/version/%d/healthcheck/%s" % (service_id, version_number, name)) return FastlyHealthCheck(self, content)
Get the healthcheck for a particular service and version.
def load_wikiqa(): dataset_path = _load('wikiqa') data = _load_csv(dataset_path, 'data', set_index=True) questions = _load_csv(dataset_path, 'questions', set_index=True) sentences = _load_csv(dataset_path, 'sentences', set_index=True) vocabulary = _load_csv(dataset_path, 'vocabulary', set_index=True) entities = { 'data': (data, 'd3mIndex', None), 'questions': (questions, 'qIndex', None), 'sentences': (sentences, 'sIndex', None), 'vocabulary': (vocabulary, 'index', None) } relationships = [ ('questions', 'qIndex', 'data', 'qIndex'), ('sentences', 'sIndex', 'data', 'sIndex') ] target = data.pop('isAnswer').values return Dataset(load_wikiqa.__doc__, data, target, accuracy_score, startify=True, entities=entities, relationships=relationships)
A Challenge Dataset for Open-Domain Question Answering. WikiQA dataset is a publicly available set of question and sentence (QS) pairs, collected and annotated for research on open-domain question answering. source: "Microsoft" sourceURI: "https://www.microsoft.com/en-us/research/publication/wikiqa-a-challenge-dataset-for-open-domain-question-answering/#"
def loo_compare(psisloo1, psisloo2): loores = psisloo1.pointwise.join( psisloo2.pointwise, lsuffix = '_m1', rsuffix = '_m2') loores['pw_diff'] = loores.pointwise_elpd_m2 - loores.pointwise_elpd_m1 sum_elpd_diff = loores.apply(numpy.sum).pw_diff sd_elpd_diff = loores.apply(numpy.std).pw_diff elpd_diff = { 'diff' : sum_elpd_diff, 'se_diff' : math.sqrt(len(loores.pw_diff)) * sd_elpd_diff } return elpd_diff
Compares two models using pointwise approximate leave-one-out cross validation. For the method to be valid, the two models should have been fit on the same input data. Parameters ------------------- psisloo1 : Psisloo object for model1 psisloo2 : Psisloo object for model2 Returns ------------------- Dict with two values: diff: difference in elpd (estimated log predictive density) between two models, where a positive value indicates that model2 is a better fit than model1. se_diff: estimated standard error of the difference between model2 & model1.
def get_user(self, username=None): username = username or self.username or '' url = self.url('GET_USER', username=username) response = self.dispatch('GET', url) try: return (response[0], response[1]['user']) except TypeError: pass return response
Returns user informations. If username is not defined, tries to return own informations.
def _inner_default(x1, x2): order = 'F' if all(a.data.flags.f_contiguous for a in (x1, x2)) else 'C' if is_real_dtype(x1.dtype): if x1.size > THRESHOLD_MEDIUM: return np.tensordot(x1, x2, [range(x1.ndim)] * 2) else: return np.dot(x1.data.ravel(order), x2.data.ravel(order)) else: return np.vdot(x2.data.ravel(order), x1.data.ravel(order))
Default Euclidean inner product implementation.
def _make_default_header(self): td_max = 0 for idx, tr in enumerate(self._tr_nodes): td_count = len(tr.contents.filter_tags(matches=ftag('td'))) if td_count > td_max: td_max = td_count self._log('creating default header (%d columns)' % td_max) return [ 'column%d' % n for n in range(0,td_max) ]
Return a generic placeholder header based on the tables column count
def evaluate_feature_performance(project, force=False): if not force and not project.on_pr(): raise SkippedValidationTest('Not on PR') out = project.build() X_df, y, features = out['X_df'], out['y'], out['features'] proposed_feature = get_proposed_feature(project) accepted_features = get_accepted_features(features, proposed_feature) evaluator = GFSSFAcceptanceEvaluator(X_df, y, accepted_features) accepted = evaluator.judge(proposed_feature) if not accepted: raise FeatureRejected
Evaluate feature performance
def plot_string_match(sf,regex,field,**kwargs): index,shape_records = string_match(sf,regex,field) plot(shape_records,**kwargs)
Plot the geometry of a shapefile whose fields match a regular expression given :param sf: shapefile :type sf: shapefile object :regex: regular expression to match :type regex: string :field: field number to be matched with the regex :type field: integer
def load_names(self): self.all_male_first_names = load_csv_data('male-first-names.csv') self.all_female_first_names = load_csv_data('female-first-names.csv') self.all_last_names = load_csv_data('CSV_Database_of_Last_Names.csv')
Loads a name database from package data Uses data files sourced from http://www.quietaffiliate.com/free-first-name-and-last-name-databases-csv-and-sql/
def cancel(self): if (not self.cancelled) and (self._fn is not None): self._cancelled = True self._drop_fn()
Cancel the scheduled task.
def Copy(self): result = QueueManager(store=self.data_store, token=self.token) result.prev_frozen_timestamps = self.prev_frozen_timestamps result.frozen_timestamp = self.frozen_timestamp return result
Return a copy of the queue manager. Returns: Copy of the QueueManager object. NOTE: pending writes/deletions are not copied. On the other hand, if the original object has a frozen timestamp, a copy will have it as well.
def discard(sample, embedding): unembeded = {} for v, chain in iteritems(embedding): vals = [sample[u] for u in chain] if _all_equal(vals): unembeded[v] = vals.pop() else: return yield unembeded
Discards the sample if broken. Args: sample (dict): A sample of the form {v: val, ...} where v is a variable in the target graph and val is the associated value as determined by a binary quadratic model sampler. embedding (dict): The mapping from the source graph to the target graph. Should be of the form {v: {s, ...}, ...} where v is a node in the source graph and s is a node in the target graph. Yields: dict: The unembedded sample is no chains were broken.
def create_connection(self, alias='default', **kwargs): kwargs.setdefault('serializer', serializer) conn = self._conns[alias] = Elasticsearch(**kwargs) return conn
Construct an instance of ``elasticsearch.Elasticsearch`` and register it under given alias.
def sql_key(self, generation, sql, params, order, result_type, using='default'): suffix = self.keygen.gen_key(sql, params, order, result_type) using = settings.DB_CACHE_KEYS[using] return '%s_%s_query_%s.%s' % (self.prefix, using, generation, suffix)
Return the specific cache key for the sql query described by the pieces of the query and the generation key.
def get_single_outfile (directory, archive, extension=""): outfile = os.path.join(directory, stripext(archive)) if os.path.exists(outfile + extension): i = 1 newfile = "%s%d" % (outfile, i) while os.path.exists(newfile + extension): newfile = "%s%d" % (outfile, i) i += 1 outfile = newfile return outfile + extension
Get output filename if archive is in a single file format like gzip.
def relation_set(relation_id=None, relation_settings=None, **kwargs): try: if relation_id in relation_ids('cluster'): return leader_set(settings=relation_settings, **kwargs) else: raise NotImplementedError except NotImplementedError: return _relation_set(relation_id=relation_id, relation_settings=relation_settings, **kwargs)
Attempt to use leader-set if supported in the current version of Juju, otherwise falls back on relation-set. Note that we only attempt to use leader-set if the provided relation_id is a peer relation id or no relation id is provided (in which case we assume we are within the peer relation context).
def seek_in_frame(self, pos, *args, **kwargs): super().seek(self._total_offset + pos, *args, **kwargs)
Seeks relative to the total offset of the current contextual frames.
def iter_parents(self, paths='', **kwargs): skip = kwargs.get("skip", 1) if skip == 0: skip = 1 kwargs['skip'] = skip return self.iter_items(self.repo, self, paths, **kwargs)
Iterate _all_ parents of this commit. :param paths: Optional path or list of paths limiting the Commits to those that contain at least one of the paths :param kwargs: All arguments allowed by git-rev-list :return: Iterator yielding Commit objects which are parents of self
def set_state(self, state): for k, v in state.items(): setattr(self, k, v)
Set the view state. The passed object is the persisted `self.state` bunch. May be overriden.
def separators(self, reordered = True): if reordered: return [list(self.snrowidx[self.sncolptr[k]+self.snptr[k+1]-self.snptr[k]:self.sncolptr[k+1]]) for k in range(self.Nsn)] else: return [list(self.__p[self.snrowidx[self.sncolptr[k]+self.snptr[k+1]-self.snptr[k]:self.sncolptr[k+1]]]) for k in range(self.Nsn)]
Returns a list of separator sets
def get_avatar_url(self, size=2): hashbytes = self.get_ps('avatar_hash') if hashbytes != "\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000": ahash = hexlify(hashbytes).decode('ascii') else: ahash = 'fef49e7fa7e1997310d705b2a6158ff8dc1cdfeb' sizes = { 0: '', 1: '_medium', 2: '_full', } url = "http://cdn.akamai.steamstatic.com/steamcommunity/public/images/avatars/%s/%s%s.jpg" return url % (ahash[:2], ahash, sizes[size])
Get URL to avatar picture :param size: possible values are ``0``, ``1``, or ``2`` corresponding to small, medium, large :type size: :class:`int` :return: url to avatar :rtype: :class:`str`
def _generate_input(options): if options.input: fp = open(options.input) if options.input != "-" else sys.stdin for string in fp.readlines(): yield string if options.strings: for string in options.strings: yield string
First send strings from any given file, one string per line, sends any strings provided on the command line. :param options: ArgumentParser or equivalent to provide options.input and options.strings. :return: string
def store(self, thing): to_store = {'field1': thing.field1, 'date_field': thing.date_field, } to_store['stuff'] = Binary(cPickle.dumps(thing.stuff)) self._arctic_lib.check_quota() self._collection.insert_one(to_store)
Simple persistence method
async def list(self, **params) -> Mapping: response = await self.docker._query_json("images/json", "GET", params=params) return response
List of images
def list_sessions(self) -> List[Session]: data = self._client.get("/sessions") return [Session.from_json(item) for item in data["sessions"]]
List all the active sessions in Livy.
def to_java_rdd(jsc, features, labels, batch_size): data_sets = java_classes.ArrayList() num_batches = int(len(features) / batch_size) for i in range(num_batches): xi = ndarray(features[:batch_size].copy()) yi = ndarray(labels[:batch_size].copy()) data_set = java_classes.DataSet(xi.array, yi.array) data_sets.add(data_set) features = features[batch_size:] labels = labels[batch_size:] return jsc.parallelize(data_sets)
Convert numpy features and labels into a JavaRDD of DL4J DataSet type. :param jsc: JavaSparkContext from pyjnius :param features: numpy array with features :param labels: numpy array with labels: :return: JavaRDD<DataSet>
def template_delete(call=None, kwargs=None): if call != 'function': raise SaltCloudSystemExit( 'The template_delete function must be called with -f or --function.' ) if kwargs is None: kwargs = {} name = kwargs.get('name', None) template_id = kwargs.get('template_id', None) if template_id: if name: log.warning( 'Both the \'template_id\' and \'name\' arguments were provided. ' '\'template_id\' will take precedence.' ) elif name: template_id = get_template_id(kwargs={'name': name}) else: raise SaltCloudSystemExit( 'The template_delete function requires either a \'name\' or a \'template_id\' ' 'to be provided.' ) server, user, password = _get_xml_rpc() auth = ':'.join([user, password]) response = server.one.template.delete(auth, int(template_id)) data = { 'action': 'template.delete', 'deleted': response[0], 'template_id': response[1], 'error_code': response[2], } return data
Deletes the given template from OpenNebula. Either a name or a template_id must be supplied. .. versionadded:: 2016.3.0 name The name of the template to delete. Can be used instead of ``template_id``. template_id The ID of the template to delete. Can be used instead of ``name``. CLI Example: .. code-block:: bash salt-cloud -f template_delete opennebula name=my-template salt-cloud --function template_delete opennebula template_id=5
def _node_is_match(qualified_name, package_names, fqn): if len(qualified_name) == 1 and fqn[-1] == qualified_name[0]: return True if qualified_name[0] in package_names: if is_selected_node(fqn, qualified_name): return True for package_name in package_names: local_qualified_node_name = [package_name] + qualified_name if is_selected_node(fqn, local_qualified_node_name): return True return False
Determine if a qualfied name matches an fqn, given the set of package names in the graph. :param List[str] qualified_name: The components of the selector or node name, split on '.'. :param Set[str] package_names: The set of pacakge names in the graph. :param List[str] fqn: The node's fully qualified name in the graph.
def create_with_secret(self, name, secret, encryption): try: encryption = encryption or DEFAULT_ENCRYPTION enc = ENCRYPTION_MAP[encryption] except KeyError: raise TypeError('encryption must be one of "cleartext", "md5"' ' or "sha512"') cmd = 'username %s secret %s %s' % (name, enc, secret) return self.configure(cmd)
Creates a new user on the local node Args: name (str): The name of the user to craete secret (str): The secret (password) to assign to this user encryption (str): Specifies how the secret is encoded. Valid values are "cleartext", "md5", "sha512". The default is "cleartext" Returns: True if the operation was successful otherwise False
def get_knowledge_category_metadata(self): metadata = dict(self._mdata['knowledge_category']) metadata.update({'existing_id_values': self._my_map['knowledgeCategoryId']}) return Metadata(**metadata)
Gets the metadata for a knowledge category. return: (osid.Metadata) - metadata for the knowledge category *compliance: mandatory -- This method must be implemented.*
def instruction(self, val): self._instruction = val if isinstance(val, tuple): if len(val) is 2: self._action, self.command = val else: self._action, self.command, self.extra = val else: split = val.split(" ", 1) if split[0] == "FROM": split = val.split(" ", 2) if len(split) == 3: self._action, self.command, self.extra = split else: self._action, self.command = split
Set the action and command from an instruction
def set_basic(self, realm='authentication required'): dict.clear(self) dict.update(self, {'__auth_type__': 'basic', 'realm': realm}) if self.on_update: self.on_update(self)
Clear the auth info and enable basic auth.
def __do_filter_sub(self, scanline, result): ai = 0 for i in range(self.fu, len(result)): x = scanline[i] a = scanline[ai] result[i] = (x - a) & 0xff ai += 1
Sub filter.
def prepare_sparse_params(self, param_rowids): if not self._kvstore: return assert(isinstance(param_rowids, dict)) for param_name, rowids in param_rowids.items(): if isinstance(rowids, (tuple, list)): rowids_1d = [] for r in rowids: rowids_1d.append(r.reshape((-1,)).astype(np.int64)) rowid = mx.nd.concat(*rowids_1d, dim=0) else: rowid = rowids param_idx = self._exec_group.param_names.index(param_name) param_val = self._exec_group.param_arrays[param_idx] self._kvstore.row_sparse_pull(param_name, param_val, row_ids=rowid, priority=-param_idx)
Prepares the module for processing a data batch by pulling row_sparse parameters from kvstore to all devices based on rowids. Parameters ---------- param_rowids : dict of str to NDArray of list of NDArrays
def set_value(self, value): if self.__is_value_array: if len(value) == self.__report_count: for index, item in enumerate(value): self.__setitem__(index, item) else: raise ValueError("Value size should match report item size "\ "length" ) else: self.__value = value & ((1 << self.__bit_size) - 1)
Set usage value within report
def _scheduled_check_for_summaries(self): if self._analysis_process is None: return timed_out = time.time() - self._analyze_start_time > self.time_limit if timed_out: self._handle_results('Analysis timed out but managed\n' ' to get lower turn results.', 'Analysis timed out with no results.') return try: self._analysis_process.join(0.001) except AssertionError: pass if not self._analysis_process.is_alive(): self._handle_results('Completed analysis.', 'Unable to find the game on screen.') return self._base.after(self._POLL_PERIOD_MILLISECONDS, self._scheduled_check_for_summaries)
Present the results if they have become available or timed out.
def round_(values, decimals=None, width=0, lfill=None, rfill=None, **kwargs): if decimals is None: decimals = hydpy.pub.options.reprdigits with hydpy.pub.options.reprdigits(decimals): if isinstance(values, abctools.IterableNonStringABC): string = repr_values(values) else: string = repr_(values) if (lfill is not None) and (rfill is not None): raise ValueError( 'For function `round_` values are passed for both arguments ' '`lfill` and `rfill`. This is not allowed.') if (lfill is not None) or (rfill is not None): width = max(width, len(string)) if lfill is not None: string = string.rjust(width, lfill) else: string = string.ljust(width, rfill) print(string, **kwargs)
Prints values with a maximum number of digits in doctests. See the documentation on function |repr| for more details. And note thate the option keyword arguments are passed to the print function. Usually one would apply function |round_| on a single or a vector of numbers: >>> from hydpy import round_ >>> round_(1./3., decimals=6) 0.333333 >>> round_((1./2., 1./3., 1./4.), decimals=4) 0.5, 0.3333, 0.25 Additionally, one can supply a `width` and a `rfill` argument: >>> round_(1.0, width=6, rfill='0') 1.0000 Alternatively, one can use the `lfill` arguments, which might e.g. be usefull for aligning different strings: >>> round_('test', width=6, lfill='_') __test Using both the `lfill` and the `rfill` argument raises an error: >>> round_(1.0, lfill='_', rfill='0') Traceback (most recent call last): ... ValueError: For function `round_` values are passed for both \ arguments `lfill` and `rfill`. This is not allowed.
def append(self, newconfig): for attr_name in ( 'title', 'body', 'author', 'date', 'strip', 'strip_id_or_class', 'strip_image_src', 'single_page_link', 'single_page_link_in_feed', 'next_page_link', 'http_header' ): current_set = getattr(self, attr_name) for val in getattr(newconfig, attr_name): current_set.add(val) setattr(self, attr_name, current_set) for attr_name in ( 'parser', 'tidy', 'prune', 'autodetect_on_failure' ): if getattr(self, attr_name) is None: if getattr(newconfig, attr_name) is None: setattr(self, attr_name, self.defaults[attr_name]) else: setattr(self, attr_name, getattr(newconfig, attr_name)) if self.parser == 'libxml': self.parser = 'lxml' for attr_name in ('find_string', 'replace_string', ): getattr(self, attr_name).extend(getattr(newconfig, attr_name)) if self.find_string: self.replace_patterns = zip( self.find_string, self.replace_string) else: self.replace_patterns = None
Append another site config to current instance. All ``newconfig`` attributes are appended one by one to ours. Order matters, eg. current instance values will come first when merging. Thus, if you plan to use some sort of global site config with more generic directives, append it last for specific directives to be tried first. .. note:: this method is also aliased to :meth:`merge`.
def stats(self, request): doc = HtmlDocument(title='Live server stats', media_path='/assets/') return doc.http_response(request)
Live stats for the server. Try sending lots of requests
def visit_tuple(self, node, parent): context = self._get_context(node) newnode = nodes.Tuple( ctx=context, lineno=node.lineno, col_offset=node.col_offset, parent=parent ) newnode.postinit([self.visit(child, newnode) for child in node.elts]) return newnode
visit a Tuple node by returning a fresh instance of it
def execute(self, fetchcommand, sql, params=None): cur = self.conn.cursor() if params: if not type(params).__name__ == 'tuple': raise ValueError('the params argument needs to be a tuple') return None cur.execute(sql, params) else: cur.execute(sql) self.conn.commit() if not fetchcommand or fetchcommand == 'none': return if fetchcommand == 'last' or fetchcommand == 'lastid': lastdata = cur.fetchall() self.conn.commit() return lastdata m = insertion_pattern.match(sql) if m: lastdata = cur.fetchone() self.conn.commit() return lastdata if fetchcommand == 'fetchone' or fetchcommand == 'one': return cur.fetchone() elif fetchcommand == 'fetchall' or fetchcommand == 'all': return cur.fetchall() else: msg = "expecting <fetchcommand> argument to be either 'fetchone'|'one'|'fetchall|all'" raise ValueError(msg)
where 'fetchcommand' is either 'fetchone' or 'fetchall'
def rbridge_id(self, **kwargs): is_get_config = kwargs.pop('get', False) if not is_get_config: rbridge_id = kwargs.pop('rbridge_id') else: rbridge_id = '' callback = kwargs.pop('callback', self._callback) rid_args = dict(rbridge_id=rbridge_id) rid = getattr(self._rbridge, 'rbridge_id_rbridge_id') config = rid(**rid_args) if is_get_config: return callback(config, handler='get_config') return callback(config)
Configures device's rbridge ID. Setting this property will need a switch reboot Args: rbridge_id (str): The rbridge ID of the device on which BGP will be configured in a VCS fabric. get (bool): Get config instead of editing config. (True, False) callback (function): A function executed upon completion of the method. The only parameter passed to `callback` will be the ``ElementTree`` `config`. Returns: Return value of `callback`. Raises: KeyError: if `rbridge_id` is not specified. Examples: >>> import pynos.device >>> conn = ('10.24.39.211', '22') >>> auth = ('admin', 'password') >>> with pynos.device.Device(conn=conn, auth=auth) as dev: ... output = dev.system.rbridge_id(rbridge_id='225') ... output = dev.system.rbridge_id(rbridge_id='225', get=True) ... dev.system.rbridge_id() # doctest: +IGNORE_EXCEPTION_DETAIL Traceback (most recent call last): KeyError
def emit(self, record): try: msg = self.format(record) if isinstance(msg,unicode): if hasattr(self.stream, "encoding") and self.stream.encoding: self.stream.write(msg.encode(self.stream.encoding)) else: self.stream.write(msg.encode(encoding)) else: self.stream.write(msg) terminator = getattr(record, 'terminator', '\n') if terminator is not None: self.stream.write(terminator) self.flush() except (KeyboardInterrupt, SystemExit): raise except: self.handleError(record)
Emit a record. Unless record.terminator is set, a trailing newline will be written to the output stream.
def add_noise_to_dict_values(dictionary: Dict[A, float], noise_param: float) -> Dict[A, float]: new_dict = {} for key, value in dictionary.items(): noise_value = value * noise_param noise = random.uniform(-noise_value, noise_value) new_dict[key] = value + noise return new_dict
Returns a new dictionary with noise added to every key in ``dictionary``. The noise is uniformly distributed within ``noise_param`` percent of the value for every value in the dictionary.
def _create_warm_start_tuner(self, additional_parents, warm_start_type, estimator=None): all_parents = {self.latest_tuning_job.name} if additional_parents: all_parents = all_parents.union(additional_parents) return HyperparameterTuner(estimator=estimator if estimator else self.estimator, objective_metric_name=self.objective_metric_name, hyperparameter_ranges=self._hyperparameter_ranges, objective_type=self.objective_type, max_jobs=self.max_jobs, max_parallel_jobs=self.max_parallel_jobs, warm_start_config=WarmStartConfig(warm_start_type=warm_start_type, parents=all_parents))
Creates a new ``HyperparameterTuner`` with ``WarmStartConfig``, where type will be equal to ``warm_start_type`` and``parents`` would be equal to union of ``additional_parents`` and self. Args: additional_parents (set{str}): Additional parents along with self, to be used for warm starting. warm_start_type (sagemaker.tuner.WarmStartTypes): Type of warm start job. Returns: sagemaker.tuner.HyperparameterTuner: Instance with the request fields copied from self along with the warm start configuration
def _find_usage_parameter_groups(self): num_groups = 0 paginator = self.conn.get_paginator('describe_cache_parameter_groups') for page in paginator.paginate(): for group in page['CacheParameterGroups']: num_groups += 1 self.limits['Parameter Groups']._add_current_usage( num_groups, aws_type='AWS::ElastiCache::ParameterGroup' )
find usage for elasticache parameter groups
def set_step(self, value, block_events=False): if block_events: self.block_events() self._widget.setSingleStep(value) if block_events: self.unblock_events()
Sets the step of the number box. Setting block_events=True will temporarily block the widget from sending any signals when setting the value.
def attach(self, payload): if self._payload is None: self._payload = [payload] else: self._payload.append(payload)
Add the given payload to the current payload. The current payload will always be a list of objects after this method is called. If you want to set the payload to a scalar object, use set_payload() instead.