code
stringlengths
51
2.38k
docstring
stringlengths
4
15.2k
def char_code(columns, name=None): if name is None: name = 'Char Code Field (' + str(columns) + ' columns)' if columns <= 0: raise BaseException() char_sets = None for char_set in _tables.get_data('character_set'): regex = '[ ]{' + str(15 - len(char_set)) + '}' + char_set if char_sets is None: char_sets = regex else: char_sets += '|' + regex _character_sets = pp.Regex(char_sets) _unicode_1_16b = pp.Regex('U\+0[0-8,A-F]{3}[ ]{' + str(columns - 6) + '}') _unicode_2_21b = pp.Regex('U\+0[0-8,A-F]{4}[ ]{' + str(columns - 7) + '}') char_code_field = (_character_sets | _unicode_1_16b | _unicode_2_21b) char_code_field = char_code_field.setParseAction(lambda s: s[0].strip()) char_code_field.setName(name) return char_code_field
Character set code field. :param name: name for the field :return: an instance of the Character set code field rules
def _altaz_rotation(self, t): R_lon = rot_z(- self.longitude.radians - t.gast * tau / 24.0) return einsum('ij...,jk...,kl...->il...', self.R_lat, R_lon, t.M)
Compute the rotation from the ICRF into the alt-az system.
def makeicons(source): im = Image.open(source) for name, (_, w, h, func) in icon_sizes.iteritems(): print('Making icon %s...' % name) tn = func(im, (w, h)) bg = Image.new('RGBA', (w, h), (255, 255, 255)) x = (w / 2) - (tn.size[0] / 2) y = (h / 2) - (tn.size[1] / 2) bg.paste(tn, (x, y)) bg.save(path.join(env.dir, name))
Create all the neccessary icons from source image
def _format_dates(self, start, end): start = self._split_date(start) end = self._split_date(end) return start, end
Format start and end dates.
def get_command_class(name, exclude_packages=None, exclude_command_class=None): from django.conf import settings return get_command_class_from_apps( name, settings.INSTALLED_APPS \ if "django.core" in settings.INSTALLED_APPS \ else ("django.core",) + tuple(settings.INSTALLED_APPS), exclude_packages=exclude_packages, exclude_command_class=exclude_command_class)
Searches "django.core" and the apps in settings.INSTALLED_APPS to find the named command class, optionally skipping packages or a particular command class.
def size(self): (halfw, halfh) = self._halfdim if self.orientation in ["top", "bottom"]: return (halfw * 2., halfh * 2.) else: return (halfh * 2., halfw * 2.)
The size of the ColorBar Returns ------- size: (major_axis_length, minor_axis_length) major and minor axis are defined by the orientation of the ColorBar
def merge_config(self, user_config): temp_data_config = copy.deepcopy(self.data_config).update(user_config) temp_model_config = copy.deepcopy(self.model_config).update(user_config) temp_conversation_config = copy.deepcopy(self.conversation_config).update(user_config) if validate_data_config(temp_data_config): self.data_config = temp_data_config if validate_model_config(temp_model_config): self.model_config = temp_model_config if validate_conversation_config(temp_conversation_config): self.conversation_config = temp_conversation_config
Take a dictionary of user preferences and use them to update the default data, model, and conversation configurations.
def get_tri_area(pts): a, b, c = pts[0], pts[1], pts[2] v1 = np.array(b) - np.array(a) v2 = np.array(c) - np.array(a) area_tri = abs(sp.linalg.norm(sp.cross(v1, v2)) / 2) return area_tri
Given a list of coords for 3 points, Compute the area of this triangle. Args: pts: [a, b, c] three points
def humanize_hours(total_hours, frmt='{hours:02d}:{minutes:02d}:{seconds:02d}', negative_frmt=None): seconds = int(float(total_hours) * 3600) return humanize_seconds(seconds, frmt, negative_frmt)
Given time in hours, return a string representing the time.
def _unichr(i): if not isinstance(i, int): raise TypeError try: return six.unichr(i) except ValueError: return struct.pack("i", i).decode("utf-32")
Helper function for taking a Unicode scalar value and returning a Unicode character. :param s: Unicode scalar value to convert. :return: Unicode character
def create_tag_and_push(version): "Create a git tag for `version` and push it to origin." assert version not in tags() git('config', 'user.name', 'Travis CI on behalf of Austin Bingham') git('config', 'user.email', 'austin@sixty-north.com') git('config', 'core.sshCommand', 'ssh -i deploy_key') git( 'remote', 'add', 'ssh-origin', 'git@github.com:sixty-north/cosmic-ray.git' ) git('tag', version) subprocess.check_call([ 'ssh-agent', 'sh', '-c', 'chmod 0600 deploy_key && ' + 'ssh-add deploy_key && ' + 'git push ssh-origin --tags' ])
Create a git tag for `version` and push it to origin.
def lookup_mac(self, ip): res = self.lookup_by_lease(ip=ip) try: return res["hardware-address"] except KeyError: raise OmapiErrorAttributeNotFound()
Look up a lease object with given ip address and return the associated mac address. @type ip: str @rtype: str or None @raises ValueError: @raises OmapiError: @raises OmapiErrorNotFound: if no lease object with the given ip could be found @raises OmapiErrorAttributeNotFound: if lease could be found, but objects lacks a mac @raises socket.error:
def initialize(self): if self._modules is None: self._modules = [] for i in xrange(self.moduleCount): self._modules.append(ThresholdedGaussian2DLocationModule( cellsPerAxis=self.cellsPerAxis, scale=self.scale[i], orientation=self.orientation[i], anchorInputSize=self.anchorInputSize, activeFiringRate=self.activeFiringRate, bumpSigma=self.bumpSigma, activationThreshold=self.activationThreshold, initialPermanence=self.initialPermanence, connectedPermanence=self.connectedPermanence, learningThreshold=self.learningThreshold, sampleSize=self.sampleSize, permanenceIncrement=self.permanenceIncrement, permanenceDecrement=self.permanenceDecrement, maxSynapsesPerSegment=self.maxSynapsesPerSegment, bumpOverlapMethod=self.bumpOverlapMethod, seed=self.seed)) if self.dimensions > 2: self._projection = [ self.createProjectionMatrix(dimensions=self.dimensions) for _ in xrange(self.moduleCount)]
Initialize grid cell modules
def save_map(dsp, path): import dill with open(path, 'wb') as f: dill.dump(dsp.dmap, f)
Write Dispatcher graph object in Python pickle format. Pickles are a serialized byte stream of a Python object. This format will preserve Python objects used as nodes or edges. :param dsp: A dispatcher that identifies the model adopted. :type dsp: schedula.Dispatcher :param path: File or filename to write. File names ending in .gz or .bz2 will be compressed. :type path: str, file .. testsetup:: >>> from tempfile import mkstemp >>> file_name = mkstemp()[1] Example:: >>> from schedula import Dispatcher >>> dsp = Dispatcher() >>> dsp.add_function(function=max, inputs=['a', 'b'], outputs=['c']) 'max' >>> save_map(dsp, file_name)
def access_array(self, id_, lineno, scope=None, default_type=None): if not self.check_is_declared(id_, lineno, 'array', scope): return None if not self.check_class(id_, CLASS.array, lineno, scope): return None return self.access_id(id_, lineno, scope=scope, default_type=default_type)
Called whenever an accessed variable is expected to be an array. ZX BASIC requires arrays to be declared before usage, so they're checked. Also checks for class array.
def set_keywords_creation_mode(self, layer=None, keywords=None): self.layer = layer or self.iface.mapCanvas().currentLayer() if keywords is not None: self.existing_keywords = keywords else: try: self.existing_keywords = self.keyword_io.read_keywords( self.layer) except (HashNotFoundError, OperationalError, NoKeywordsFoundError, KeywordNotFoundError, InvalidParameterError, UnsupportedProviderError, MetadataReadError): self.existing_keywords = {} self.set_mode_label_to_keywords_creation() step = self.step_kw_purpose step.set_widgets() self.go_to_step(step)
Set the Wizard to the Keywords Creation mode. :param layer: Layer to set the keywords for :type layer: QgsMapLayer :param keywords: Keywords for the layer. :type keywords: dict, None
def validate_arc_links_same_outline(sender, instance, *args, **kwargs): if instance.story_element_node: if instance.story_element_node.outline != instance.parent_outline: raise IntegrityError(_('An arc cannot be associated with an story element from another outline.'))
Evaluates attempts to link an arc to a story node from another outline.
def setM0Coast(self, device=DEFAULT_DEVICE_ID): cmd = self._COMMAND.get('m0-coast') self._writeData(cmd, device)
Set motor 0 to coast. :Keywords: device : `int` The device is the integer number of the hardware devices ID and is only used with the Pololu Protocol. Defaults to the hardware's default value. :Exceptions: * `SerialTimeoutException` If the low level serial package times out. * `SerialException` IO error when the port is not open.
def create_order(self, order_deets): request = self._post('transactions/orders', order_deets) return self.responder(request)
Creates a new order transaction.
def get_characters(self, *args, **kwargs): from .character import Character, CharacterDataWrapper return self.get_related_resource(Character, CharacterDataWrapper, args, kwargs)
Returns a full CharacterDataWrapper object for this story. /stories/{storyId}/characters :returns: CharacterDataWrapper -- A new request to API. Contains full results set.
def update_qos_aggregated_configuration(self, qos_configuration, timeout=-1): uri = "{}{}".format(self.data["uri"], self.QOS_AGGREGATED_CONFIGURATION) return self._helper.update(qos_configuration, uri=uri, timeout=timeout)
Updates the QoS aggregated configuration for the logical interconnect. Args: qos_configuration: QOS configuration. timeout: Timeout in seconds. Wait for task completion by default. The timeout does not abort the operation in OneView, just stops waiting for its completion. Returns: dict: Logical Interconnect.
def write_libxc_docs_json(xcfuncs, jpath): from copy import deepcopy xcfuncs = deepcopy(xcfuncs) for d in xcfuncs.values(): d["Family"] = d["Family"].replace("XC_FAMILY_", "", 1) d["Kind"] = d["Kind"].replace("XC_", "", 1) for num, d in xcfuncs.items(): xcfuncs[num] = {k: d[k] for k in ("Family", "Kind", "References")} for opt in ("Description 1", "Description 2"): desc = d.get(opt) if desc is not None: xcfuncs[num][opt] = desc with open(jpath, "wt") as fh: json.dump(xcfuncs, fh) return xcfuncs
Write json file with libxc metadata to path jpath.
def set_include_rts(self, rts): if not isinstance(rts, bool): raise TwitterSearchException(1008) self.arguments.update({'include_rts': 'true' if rts else 'false'})
Sets 'include_rts' parameter. When set to False, \ the timeline will strip any native retweets from the returned timeline :param rts: Boolean triggering the usage of the parameter :raises: TwitterSearchException
def get_file(path, dest, saltenv='base', makedirs=False, template=None, gzip=None): if gzip is not None: log.warning('The gzip argument to cp.get_file in salt-ssh is ' 'unsupported') if template is not None: (path, dest) = _render_filenames(path, dest, saltenv, template) src = __context__['fileclient'].cache_file( path, saltenv, cachedir=os.path.join('salt-ssh', __salt__.kwargs['id_'])) single = salt.client.ssh.Single( __opts__, '', **__salt__.kwargs) ret = single.shell.send(src, dest, makedirs) return not ret[2]
Send a file from the master to the location in specified .. note:: gzip compression is not supported in the salt-ssh version of cp.get_file. The argument is only accepted for interface compatibility.
def monitor(i): count = 0 for x in i: count+=1 if count % 10000 == 0: logger.info("%d records so far, current record is %s", count, x["idx"]) yield x
Given an iterator, yields data from it but prints progress every 10,000 records
def execute(self, args): all_args = list(args) try: return self._cmd(all_args) except OSError as e: if errno.E2BIG == e.errno: args1, args2 = self._split_args(all_args) result = self.execute(args1) if result != 0: return result return self.execute(args2) else: raise e
Executes the configured cmd passing args in one or more rounds xargs style. :param list args: Extra arguments to pass to cmd.
def i3(): install_package('i3') install_file_legacy(path='~/.i3/config', username=env.user, repos_dir='repos') install_packages(['make', 'pkg-config', 'gcc', 'libc6-dev', 'libx11-dev']) checkup_git_repo_legacy(url='https://github.com/aktau/hhpc.git') run('cd ~/repos/hhpc && make')
Install and customize the tiling window manager i3.
def delete(self, response_choice=1, async=False, callback=None): return self._manage_child_object(nurest_object=self, method=HTTP_METHOD_DELETE, async=async, callback=callback, response_choice=response_choice)
Delete object and call given callback in case of call. Args: response_choice (int): Automatically send a response choice when confirmation is needed async (bool): Boolean to make an asynchronous call. Default is False callback (function): Callback method that will be triggered in case of asynchronous call Example: >>> entity.delete() # will delete the enterprise from the server
def add_node_to_network(self, node, network): network.add_node(node) node.receive() environment = network.nodes(type=Environment)[0] environment.connect(whom=node) gene = node.infos(type=LearningGene)[0].contents if (gene == "social"): prev_agents = RogersAgent.query\ .filter(and_(RogersAgent.failed == False, RogersAgent.network_id == network.id, RogersAgent.generation == node.generation - 1))\ .all() parent = random.choice(prev_agents) parent.connect(whom=node) parent.transmit(what=Meme, to_whom=node) elif (gene == "asocial"): environment.transmit(to_whom=node) else: raise ValueError("{} has invalid learning gene value of {}" .format(node, gene)) node.receive()
Add participant's node to a network.
def OnMacroToolbarToggle(self, event): self.main_window.macro_toolbar.SetGripperVisible(True) macro_toolbar_info = self.main_window._mgr.GetPane("macro_toolbar") self._toggle_pane(macro_toolbar_info) event.Skip()
Macro toolbar toggle event handler
def from_string(address): if len(address) == 0: return WFQDN() if address[-1] == '.': address = address[:-1] if len(address) > WFQDN.maximum_fqdn_length: raise ValueError('Invalid address') result = WFQDN() for label in address.split('.'): if isinstance(label, str) and WFQDN.re_label.match(label): result._labels.append(label) else: raise ValueError('Invalid address') return result
Convert doted-written FQDN address to WFQDN object :param address: address to convert :return: WFQDN
def replace(self, replacements): for old_var, new_var in replacements.items(): old_var_id = id(old_var) if old_var_id in self._object_mapping: old_so = self._object_mapping[old_var_id] self._store(old_so.start, new_var, old_so.size, overwrite=True) return self
Replace variables with other variables. :param dict replacements: A dict of variable replacements. :return: self
def schema_map(schema): mapper = {} for name in getFieldNames(schema): mapper[name] = name return mapper
Return a valid ICachedItemMapper.map for schema
def format_datetime(self, format='medium', locale='en_US'): return format_datetime(self._dt, format=format, locale=locale)
Return a date string formatted to the given pattern. .. testsetup:: from delorean import Delorean .. doctest:: >>> d = Delorean(datetime(2015, 1, 1, 12, 30), timezone='US/Pacific') >>> d.format_datetime(locale='en_US') u'Jan 1, 2015, 12:30:00 PM' >>> d.format_datetime(format='long', locale='de_DE') u'1. Januar 2015 12:30:00 -0800' :param format: one of "full", "long", "medium", "short", or a custom datetime pattern :param locale: a locale identifier
def transfer_funds(self, to, amount, denom, msg): try: self.steem_instance().commit.transfer(to, float(amount), denom, msg, self.mainaccount) except Exception as e: self.msg.error_message(e) return False else: return True
Transfer SBD or STEEM to the given account
def _get_resize_target(img, crop_target, do_crop=False)->TensorImageSize: "Calc size of `img` to fit in `crop_target` - adjust based on `do_crop`." if crop_target is None: return None ch,r,c = img.shape target_r,target_c = crop_target ratio = (min if do_crop else max)(r/target_r, c/target_c) return ch,int(round(r/ratio)),int(round(c/ratio))
Calc size of `img` to fit in `crop_target` - adjust based on `do_crop`.
def sort(iterable): ips = sorted(normalize_ip(ip) for ip in iterable) return [clean_ip(ip) for ip in ips]
Given an IP address list, this function sorts the list. :type iterable: Iterator :param iterable: An IP address list. :rtype: list :return: The sorted IP address list.
def get_route_name(resource_uri): resource_uri = resource_uri.strip('/') resource_uri = re.sub('\W', '', resource_uri) return resource_uri
Get route name from RAML resource URI. :param resource_uri: String representing RAML resource URI. :returns string: String with route name, which is :resource_uri: stripped of non-word characters.
def kernel_type(self, kernel_type): if kernel_type is None: raise ValueError("Invalid value for `kernel_type`, must not be `None`") allowed_values = ["script", "notebook"] if kernel_type not in allowed_values: raise ValueError( "Invalid value for `kernel_type` ({0}), must be one of {1}" .format(kernel_type, allowed_values) ) self._kernel_type = kernel_type
Sets the kernel_type of this KernelPushRequest. The type of kernel. Cannot be changed once the kernel has been created # noqa: E501 :param kernel_type: The kernel_type of this KernelPushRequest. # noqa: E501 :type: str
def read(self, *args, **kwargs): with self.open('r') as f: return f.read(*args, **kwargs)
Reads the node as a file
def dpt_timeseries(adata, color_map=None, show=None, save=None, as_heatmap=True): if adata.n_vars > 100: logg.warn('Plotting more than 100 genes might take some while,' 'consider selecting only highly variable genes, for example.') if as_heatmap: timeseries_as_heatmap(adata.X[adata.obs['dpt_order_indices'].values], var_names=adata.var_names, highlightsX=adata.uns['dpt_changepoints'], color_map=color_map) else: timeseries(adata.X[adata.obs['dpt_order_indices'].values], var_names=adata.var_names, highlightsX=adata.uns['dpt_changepoints'], xlim=[0, 1.3*adata.X.shape[0]]) pl.xlabel('dpt order') utils.savefig_or_show('dpt_timeseries', save=save, show=show)
Heatmap of pseudotime series. Parameters ---------- as_heatmap : bool (default: False) Plot the timeseries as heatmap.
def run_py(self, cmd, cwd=os.curdir): c = None if isinstance(cmd, six.string_types): script = vistir.cmdparse.Script.parse("{0} -c {1}".format(self.python, cmd)) else: script = vistir.cmdparse.Script.parse([self.python, "-c"] + list(cmd)) with self.activated(): c = vistir.misc.run(script._parts, return_object=True, nospin=True, cwd=cwd, write_to_stdout=False) return c
Run a python command in the enviornment context. :param cmd: A command to run in the environment - runs with `python -c` :type cmd: str or list :param str cwd: The working directory in which to execute the command, defaults to :data:`os.curdir` :return: A finished command object :rtype: :class:`~subprocess.Popen`
def format(self, record): super(CliFormatter, self).format(record) localized_time = datetime.datetime.fromtimestamp(record.created) terse_time = localized_time.strftime(u'%H:%M:%S') terse_level = record.levelname[0] terse_name = record.name.split('.')[-1] match = RECORD_LOGGER_RE.match(record.name) if match: subsys_match = SUBSYSTEM_LOGGER_RE.match(record.name) if subsys_match: terse_name = '<{subsys}: {id}>'.format( subsys=subsys_match.group('subsys'), id=subsys_match.group('id')) else: terse_name = '<test %s>' % match.group('test_uid')[-5:] return '{lvl} {time} {logger} - {msg}'.format(lvl=terse_level, time=terse_time, logger=terse_name, msg=record.message)
Format the record as tersely as possible but preserve info.
def revisit(self, node, include_self=True): successors = self.successors(node) if include_self: self._sorted_nodes.add(node) for succ in successors: self._sorted_nodes.add(succ) self._sorted_nodes = OrderedSet(sorted(self._sorted_nodes, key=lambda n: self._node_to_index[n]))
Revisit a node in the future. As a result, the successors to this node will be revisited as well. :param node: The node to revisit in the future. :return: None
def _validate_nested_list_type(self, name, obj, nested_level, *args): if nested_level <= 1: self._validate_list_type(name, obj, *args) else: if obj is None: return if not isinstance(obj, list): raise TypeError(self.__class__.__name__ + '.' + name + ' contains value of type ' + type(obj).__name__ + ' where a list is expected') for sub_obj in obj: self._validate_nested_list_type(name, sub_obj, nested_level - 1, *args)
Helper function that checks the input object as a list then recursively until nested_level is 1. :param name: Name of the object. :param obj: Object to check the type of. :param nested_level: Integer with the current nested level. :param args: List of classes. :raises TypeError: if the input object is not of any of the allowed types.
def from_string(cls, model_id, default_project=None): proj, dset, model = _helpers._parse_3_part_id( model_id, default_project=default_project, property_name="model_id" ) return cls.from_api_repr( {"projectId": proj, "datasetId": dset, "modelId": model} )
Construct a model reference from model ID string. Args: model_id (str): A model ID in standard SQL format. If ``default_project`` is not specified, this must included a project ID, dataset ID, and model ID, each separated by ``.``. default_project (str): Optional. The project ID to use when ``model_id`` does not include a project ID. Returns: google.cloud.bigquery.model.ModelReference: Model reference parsed from ``model_id``. Raises: ValueError: If ``model_id`` is not a fully-qualified table ID in standard SQL format.
def _run_workflow(items, paired, workflow_file, work_dir): utils.remove_safe(os.path.join(work_dir, "workspace")) data = paired.tumor_data if paired else items[0] cmd = [utils.get_program_python("configManta.py"), workflow_file, "-m", "local", "-j", dd.get_num_cores(data)] do.run(cmd, "Run manta SV analysis") utils.remove_safe(os.path.join(work_dir, "workspace"))
Run manta analysis inside prepared workflow directory.
def diet_adam_optimizer_params(): return hparam.HParams( quantize=True, quantization_scale=10.0 / tf.int16.max, optimizer="DietAdam", learning_rate=1.0, learning_rate_warmup_steps=2000, learning_rate_decay_scheme="noam", epsilon=1e-10, beta1=0.0, beta2=0.98, factored_second_moment_accumulator=True, )
Default hyperparameters for a DietAdamOptimizer. Returns: a hyperparameters object.
def build_vars(path=None): init_vars = { "__name__": "__main__", "__package__": None, "reload": reload, } if path is not None: init_vars["__file__"] = fixpath(path) for var in reserved_vars: init_vars[var] = None return init_vars
Build initial vars.
def SetBackingStore(cls, backing): if backing not in ['json', 'sqlite', 'memory']: raise ArgumentError("Unknown backing store type that is not json or sqlite", backing=backing) if backing == 'json': cls.BackingType = JSONKVStore cls.BackingFileName = 'component_registry.json' elif backing == 'memory': cls.BackingType = InMemoryKVStore cls.BackingFileName = None else: cls.BackingType = SQLiteKVStore cls.BackingFileName = 'component_registry.db'
Set the global backing type used by the ComponentRegistry from this point forward This function must be called before any operations that use the registry are initiated otherwise they will work from different registries that will likely contain different data
def _init(): connection.connect() ready_data = utils.encode_data('host:track-devices') connection.adb_socket.send(ready_data) status = connection.adb_socket.recv(4) if status != b'OKAY': raise RuntimeError('adb server return "{}", not OKAY'.format(str(status)))
build connection and init it
def build_archive(cls, **kwargs): if cls._archive is None: cls._archive = cls(**kwargs) return cls._archive
Return the singleton `JobArchive` instance, building it if needed
def line_math(fx=None, fy=None, axes='gca'): if axes=='gca': axes = _pylab.gca() lines = axes.get_lines() for line in lines: if isinstance(line, _mpl.lines.Line2D): xdata, ydata = line.get_data() if not fx==None: xdata = fx(xdata) if not fy==None: ydata = fy(ydata) line.set_data(xdata,ydata) _pylab.draw()
applies function fx to all xdata and fy to all ydata.
def smart_import(mpath): try: rest = __import__(mpath) except ImportError: split = mpath.split('.') rest = smart_import('.'.join(split[:-1])) rest = getattr(rest, split[-1]) return rest
Given a path smart_import will import the module and return the attr reffered to.
def on_valid(valid_content_type, on_invalid=json): invalid_kwargs = introspect.generate_accepted_kwargs(on_invalid, 'request', 'response') invalid_takes_response = introspect.takes_all_arguments(on_invalid, 'response') def wrapper(function): valid_kwargs = introspect.generate_accepted_kwargs(function, 'request', 'response') valid_takes_response = introspect.takes_all_arguments(function, 'response') @content_type(valid_content_type) @wraps(function) def output_content(content, response, **kwargs): if type(content) == dict and 'errors' in content: response.content_type = on_invalid.content_type if invalid_takes_response: kwargs['response'] = response return on_invalid(content, **invalid_kwargs(kwargs)) if valid_takes_response: kwargs['response'] = response return function(content, **valid_kwargs(kwargs)) return output_content return wrapper
Renders as the specified content type only if no errors are found in the provided data object
def typed_range(type_func, minimum, maximum): @functools.wraps(type_func) def inner(string): result = type_func(string) if not result >= minimum and result <= maximum: raise argparse.ArgumentTypeError( "Please provide a value between {0} and {1}".format( minimum, maximum)) return result return inner
Require variables to be of the specified type, between minimum and maximum
def delete_dagobah(self, dagobah_id): rec = self.dagobah_coll.find_one({'_id': dagobah_id}) for job in rec.get('jobs', []): if 'job_id' in job: self.delete_job(job['job_id']) self.log_coll.remove({'parent_id': dagobah_id}) self.dagobah_coll.remove({'_id': dagobah_id})
Deletes the Dagobah and all child Jobs from the database. Related run logs are deleted as well.
def default_rotations(*qubits): for gates in cartesian_product(TOMOGRAPHY_GATES.keys(), repeat=len(qubits)): tomography_program = Program() for qubit, gate in izip(qubits, gates): tomography_program.inst(gate(qubit)) yield tomography_program
Generates the Quil programs for the tomographic pre- and post-rotations of any number of qubits. :param list qubits: A list of qubits to perform tomography on.
def make_github_markdown_collector(opts): assert hasattr(opts, 'wrapper_regex') atx = MarkdownATXCollectorStrategy(opts) setext = MarkdownSetextCollectorStrategy(opts) code_block_switch = ghswitches.code_block_switch strategies = [atx, setext] switches = [code_block_switch] return Collector(converter.create_anchor_from_header, strategies, switches=switches)
Creates a Collector object used for parsing Markdown files with a GitHub style anchor transformation :param opts: Namespace object of options for the AnchorHub program. Usually created from command-line arguments. It must contain a 'wrapper_regex' attribute :return: a Collector object designed for collecting tag/anchor pairs from Markdown files using GitHub style anchors
def binary_size(self): return ( 1 + 2 + 1 + len(self.name.encode('utf-8')) + 1 + 1 + len(self.dimensions) + self.total_bytes + 1 + len(self.desc.encode('utf-8')) )
Return the number of bytes needed to store this parameter.
def _match_value_filter(self, p, value): return self._VALUE_FILTER_MAP[p[0]](value[p[1]], p[2])
Returns True of False if value in the pattern p matches the filter.
def compare(dicts): common_members = {} common_keys = reduce(lambda x, y: x & y, map(dict.keys, dicts)) for k in common_keys: common_members[k] = list( reduce(lambda x, y: x & y, [set(d[k]) for d in dicts])) return common_members
Compare by iteration
def sleep(self, seconds): start = self.time() while (self.time() - start < seconds and not self.need_to_stop.is_set()): self.need_to_stop.wait(self.sim_time)
Sleep in simulated time.
def match(self, url): try: urlSchemes = self._urlSchemes.itervalues() except AttributeError: urlSchemes = self._urlSchemes.values() for urlScheme in urlSchemes: if urlScheme.match(url): return True return False
Try to find if url matches against any of the schemes within this endpoint. Args: url: The url to match against each scheme Returns: True if a matching scheme was found for the url, False otherwise
def get_user(self, request): try: return User.objects.get(username=request.data.get('username'), is_active=True) except User.DoesNotExist: return None
return active user or ``None``
def is_legacy_server(): with Session() as session: ret = session.Kernel.hello() bai_version = ret['version'] legacy = True if bai_version <= 'v4.20181215' else False return legacy
Determine execution mode. Legacy mode: <= v4.20181215
def probability_density(self, X): self.check_fit() return norm.pdf(X, loc=self.mean, scale=self.std)
Compute probability density. Arguments: X: `np.ndarray` of shape (n, 1). Returns: np.ndarray
def prefix(self, keys): assert isinstance(keys, tuple) self._prefix = keys self._load_prefix_binding()
Set a new prefix key.
def _get_correlated_reports_page_generator(self, indicators, enclave_ids=None, is_enclave=True, start_page=0, page_size=None): get_page = functools.partial(self.get_correlated_reports_page, indicators, enclave_ids, is_enclave) return Page.get_page_generator(get_page, start_page, page_size)
Creates a generator from the |get_correlated_reports_page| method that returns each successive page. :param indicators: A list of indicator values to retrieve correlated reports for. :param enclave_ids: :param is_enclave: :return: The generator.
def _sign_response(self, response): if 'Authorization' not in request.headers: return response try: mohawk_receiver = mohawk.Receiver( credentials_map=self._client_key_loader_func, request_header=request.headers['Authorization'], url=request.url, method=request.method, content=request.get_data(), content_type=request.mimetype, accept_untrusted_content=current_app.config['HAWK_ACCEPT_UNTRUSTED_CONTENT'], localtime_offset_in_seconds=current_app.config['HAWK_LOCALTIME_OFFSET_IN_SECONDS'], timestamp_skew_in_seconds=current_app.config['HAWK_TIMESTAMP_SKEW_IN_SECONDS'] ) except mohawk.exc.HawkFail: return response response.headers['Server-Authorization'] = mohawk_receiver.respond( content=response.data, content_type=response.mimetype ) return response
Signs a response if it's possible.
def add_layer_to_canvas(layer, name): if qgis_version() >= 21800: layer.setName(name) else: layer.setLayerName(name) QgsProject.instance().addMapLayer(layer, False)
Helper method to add layer to QGIS. :param layer: The layer. :type layer: QgsMapLayer :param name: Layer name. :type name: str
def write_file(self, filename): with open(filename, "w") as f: f.write(self.__str__())
Write the PWSCF input file. Args: filename (str): The string filename to output to.
def get_magnitude_scaling_term(self, C, rup): if rup.mag <= self.CONSTANTS["m_c"]: return C["ccr"] * rup.mag else: return (C["ccr"] * self.CONSTANTS["m_c"]) +\ (C["dcr"] * (rup.mag - self.CONSTANTS["m_c"]))
Returns the magnitude scaling term in equations 1 and 2
def flush(self): self.notify(tuple(self._queue)) self._queue.clear()
Emits the current queue and clears the queue
def getDigitalActionData(self, action, unActionDataSize, ulRestrictToDevice): fn = self.function_table.getDigitalActionData pActionData = InputDigitalActionData_t() result = fn(action, byref(pActionData), unActionDataSize, ulRestrictToDevice) return result, pActionData
Reads the state of a digital action given its handle. This will return VRInputError_WrongType if the type of action is something other than digital
def get_info(self, info): if info['docstring']: if info['filename']: filename = os.path.basename(info['filename']) filename = os.path.splitext(filename)[0] else: filename = '<module>' resp = dict(docstring=info['docstring'], name=filename, note='', argspec='', calltip=None) return resp else: return default_info_response()
Get a formatted calltip and docstring from Fallback
def get_default_prefix(self, instance=None): if instance is None and hasattr(self, 'instance'): instance = self.instance if instance and instance.id is not None: instance_prefix = self.default_instance_prefix if instance_prefix is None: instance_prefix = self.__class__.__name__.lower() + 'i-' return '{0}{1}'.format(instance_prefix, instance.id) if self.default_new_prefix is not None: return self.default_new_prefix return self.__class__.__name__.lower() + 'new-'
Gets the prefix for this form. :param instance: the form model instance. When calling this method directly this should almost always stay None so it looks for self.instance.
def save(self, file_name, model_name='default', overwrite=False, save_streaming_chain=False): r from pyemma._base.serialization.h5file import H5File try: with H5File(file_name=file_name, mode='a') as f: f.add_serializable(model_name, obj=self, overwrite=overwrite, save_streaming_chain=save_streaming_chain) except Exception as e: msg = ('During saving the object {obj}") ' 'the following error occurred: {error}'.format(obj=self, error=e)) if isinstance(self, Loggable): self.logger.exception(msg) else: logger.exception(msg) raise
r""" saves the current state of this object to given file and name. Parameters ----------- file_name: str path to desired output file model_name: str, default='default' creates a group named 'model_name' in the given file, which will contain all of the data. If the name already exists, and overwrite is False (default) will raise a RuntimeError. overwrite: bool, default=False Should overwrite existing model names? save_streaming_chain : boolean, default=False if True, the data_producer(s) of this object will also be saved in the given file. Examples -------- >>> import pyemma, numpy as np >>> from pyemma.util.contexts import named_temporary_file >>> m = pyemma.msm.MSM(P=np.array([[0.1, 0.9], [0.9, 0.1]])) >>> with named_temporary_file() as file: # doctest: +SKIP ... m.save(file, 'simple') # doctest: +SKIP ... inst_restored = pyemma.load(file, 'simple') # doctest: +SKIP >>> np.testing.assert_equal(m.P, inst_restored.P) # doctest: +SKIP
def invalidate(self): for row in self.rows: for key in row.keys: key.state = 0
Rests all keys states.
def open(self, writeAccess=False): host = self.database().writeHost() if writeAccess else self.database().host() pool = self.__pool[host] if self.__poolSize[host] >= self.__maxSize or pool.qsize(): if pool.qsize() == 0: log.warning('Waiting for connection to database!!!') return pool.get() else: db = self.database() event = orb.events.ConnectionEvent() db.onPreConnect(event) self.__poolSize[host] += 1 try: conn = self._open(self.database(), writeAccess=writeAccess) except Exception: self.__poolSize[host] -= 1 raise else: event = orb.events.ConnectionEvent(success=conn is not None, native=conn) db.onPostConnect(event) return conn
Returns the sqlite database for the current thread. :return <variant> || None
def cli_form(self, *args): if args[0] == '*': for schema in schemastore: self.log(schema, ':', schemastore[schema]['form'], pretty=True) else: self.log(schemastore[args[0]]['form'], pretty=True)
Display a schemata's form definition
def get_partition_vrfProf(self, org_name, part_name=None, part_info=None): vrf_profile = None if part_info is None: part_info = self._get_partition(org_name, part_name) LOG.info("query result from dcnm for partition info is %s", part_info) if ("vrfProfileName" in part_info): vrf_profile = part_info.get("vrfProfileName") return vrf_profile
get VRF Profile for the partition from the DCNM. :param org_name: name of organization :param part_name: name of partition
def with_exit_condition(self, exit_condition: Optional[bool]=True) -> 'MonitorTask': self._exit_condition = exit_condition return self
Sets the flag indicating that the task should also run after the optimisation is ended.
def _assemble_translocation(stmt): agent_str = _assemble_agent_str(stmt.agent) stmt_str = agent_str + ' translocates' if stmt.from_location is not None: stmt_str += ' from the ' + stmt.from_location if stmt.to_location is not None: stmt_str += ' to the ' + stmt.to_location return _make_sentence(stmt_str)
Assemble Translocation statements into text.
def Popen(self, cmd, **kwargs): prefixed_cmd = self._prepare_cmd(cmd) return subprocess.Popen(prefixed_cmd, **kwargs)
Remote Popen.
def yaml_dquote(text): with io.StringIO() as ostream: yemitter = yaml.emitter.Emitter(ostream, width=six.MAXSIZE) yemitter.write_double_quoted(six.text_type(text)) return ostream.getvalue()
Make text into a double-quoted YAML string with correct escaping for special characters. Includes the opening and closing double quote characters.
def set_author(self): try: self.author = self.soup.find('author').string except AttributeError: self.author = None
Parses author and set value.
def sorted_maybe_numeric(x): all_numeric = all(map(str.isdigit, x)) if all_numeric: return sorted(x, key=int) else: return sorted(x)
Sorts x with numeric semantics if all keys are nonnegative integers. Otherwise uses standard string sorting.
def clean(self): if not self.urlhash or 'url' in self._get_changed_fields(): self.urlhash = hash_url(self.url) super(Reuse, self).clean()
Auto populate urlhash from url
def model_post_save(sender, instance, created=False, **kwargs): if sender._meta.app_label == 'rest_framework_reactive': return def notify(): table = sender._meta.db_table if created: notify_observers(table, ORM_NOTIFY_KIND_CREATE, instance.pk) else: notify_observers(table, ORM_NOTIFY_KIND_UPDATE, instance.pk) transaction.on_commit(notify)
Signal emitted after any model is saved via Django ORM. :param sender: Model class that was saved :param instance: The actual instance that was saved :param created: True if a new row was created
def add_ticks_to_x(ax, newticks, newnames): ticks = list(ax.get_xticks()) ticks.extend(newticks) ax.set_xticks(ticks) names = list(ax.get_xticklabels()) names.extend(newnames) ax.set_xticklabels(names)
Add new ticks to an axis. I use this for the right-hand plotting of resonance names in my plots.
def valid_config_exists(config_path=CONFIG_PATH): if os.path.isfile(config_path): try: config = read_config(config_path) check_config(config) except (ConfigurationError, IOError): return False else: return False return True
Verify that a valid config file exists. Args: config_path (str): Path to the config file. Returns: boolean: True if there is a valid config file, false if not.
def update_channels(cls, installation_id, channels_to_add=set(), channels_to_remove=set(), **kw): installation_url = cls._get_installation_url(installation_id) current_config = cls.GET(installation_url) new_channels = list(set(current_config['channels']).union(channels_to_add).difference(channels_to_remove)) cls.PUT(installation_url, channels=new_channels)
Allow an application to manually subscribe or unsubscribe an installation to a certain push channel in a unified operation. this is based on: https://www.parse.com/docs/rest#installations-updating installation_id: the installation id you'd like to add a channel to channels_to_add: the name of the channel you'd like to subscribe the user to channels_to_remove: the name of the channel you'd like to unsubscribe the user from
def _clean_data(self, str_value, file_data, obj_value): str_value = str_value or None obj_value = obj_value or None return (str_value, None, obj_value)
This overwrite is neccesary for work with multivalues
def to_glyphs_master_user_data(self, ufo, master): target_user_data = master.userData for key, value in ufo.lib.items(): if _user_data_has_no_special_meaning(key): target_user_data[key] = value if ufo.data.fileNames: from glyphsLib.types import BinaryData ufo_data = {} for os_filename in ufo.data.fileNames: filename = posixpath.join(*os_filename.split(os.path.sep)) ufo_data[filename] = BinaryData(ufo.data[os_filename]) master.userData[UFO_DATA_KEY] = ufo_data
Set the GSFontMaster userData from the UFO master-specific lib data.
def get_git_isolation(): ctx = click.get_current_context(silent=True) if ctx and GIT_ISOLATION in ctx.meta: return ctx.meta[GIT_ISOLATION]
Get Git isolation from the current context.
def arp_packet(opcode, src_mac, src_ip, dst_mac, dst_ip): pkt = packet.Packet() eth_pkt = ethernet.ethernet(dst_mac, src_mac, ETH_TYPE_ARP) pkt.add_protocol(eth_pkt) arp_pkt = arp.arp_ip(opcode, src_mac, src_ip, dst_mac, dst_ip) pkt.add_protocol(arp_pkt) pkt.serialize() return pkt.data
Generate ARP packet with ethernet encapsulated.
def _translate_space(self, space): self.space = [] self.dimensionality = 0 self.has_types = d = {t: False for t in self.supported_types} for i, d in enumerate(space): descriptor = deepcopy(d) descriptor['name'] = descriptor.get('name', 'var_' + str(i)) descriptor['type'] = descriptor.get('type', 'continuous') if 'domain' not in descriptor: raise InvalidConfigError('Domain attribute is missing for variable ' + descriptor['name']) variable = create_variable(descriptor) self.space.append(variable) self.dimensionality += variable.dimensionality self.has_types[variable.type] = True if any(v.is_bandit() for v in self.space) and any(not v.is_bandit() for v in self.space): raise InvalidConfigError('Invalid mixed domain configuration. Bandit variables cannot be mixed with other types.')
Translates a list of dictionaries into internal list of variables
def undo(self, change=None, drop=False, task_handle=taskhandle.NullTaskHandle()): if not self._undo_list: raise exceptions.HistoryError('Undo list is empty') if change is None: change = self.undo_list[-1] dependencies = self._find_dependencies(self.undo_list, change) self._move_front(self.undo_list, dependencies) self._perform_undos(len(dependencies), task_handle) result = self.redo_list[-len(dependencies):] if drop: del self.redo_list[-len(dependencies):] return result
Redo done changes from the history When `change` is `None`, the last done change will be undone. If change is not `None` it should be an item from `self.undo_list`; this change and all changes that depend on it will be undone. In both cases the list of undone changes will be returned. If `drop` is `True`, the undone change will not be appended to the redo list.
def unmount(self): self.unmount_bindmounts() self.unmount_mounts() self.unmount_volume_groups() self.unmount_loopbacks() self.unmount_base_images() self.clean_dirs()
Calls all unmount methods in the correct order.