code
stringlengths
51
2.38k
docstring
stringlengths
4
15.2k
def _play(self): while True: if self._buffered: self._source.push(self._buffer.get())
Relay buffer to app source.
def _select_phase_left_bound(self, epoch_number): idx = bisect.bisect_left(self.ladder, epoch_number) if idx >= len(self.ladder): return len(self.ladder) - 1 elif self.ladder[idx] > epoch_number: return idx - 1 else: return idx
Return number of current phase. Return index of first phase not done after all up to epoch_number were done.
def npedln(a, b, c, linept, linedr): a = ctypes.c_double(a) b = ctypes.c_double(b) c = ctypes.c_double(c) linept = stypes.toDoubleVector(linept) linedr = stypes.toDoubleVector(linedr) pnear = stypes.emptyDoubleVector(3) dist = ctypes.c_double() libspice.npedln_c(a, b, c, linept, linedr, ...
Find nearest point on a triaxial ellipsoid to a specified line and the distance from the ellipsoid to the line. http://naif.jpl.nasa.gov/pub/naif/toolkit_docs/C/cspice/npedln_c.html :param a: Length of ellipsoid's semi-axis in the x direction :type a: float :param b: Length of ellipsoid's semi-axi...
def get_results_as_xarray(self, parameter_space, result_parsing_function, output_labels, runs): np_array = np.array( self.get_space( self.db.get_complete_results(), {}, collections.OrderedDict([(k, v) for k, ...
Return the results relative to the desired parameter space in the form of an xarray data structure. Args: parameter_space (dict): The space of parameters to export. result_parsing_function (function): user-defined function, taking a result dictionary as argument,...
def convertShape(shapeString): cshape = [] for pointString in shapeString.split(): p = [float(e) for e in pointString.split(",")] if len(p) == 2: cshape.append((p[0], p[1], 0.)) elif len(p) == 3: cshape.append(tuple(p)) else: raise ValueError( ...
Convert xml shape string into float tuples. This method converts the 2d or 3d shape string from SUMO's xml file into a list containing 3d float-tuples. Non existant z coordinates default to zero. If shapeString is empty, an empty list will be returned.
def get_xid_device(device_number): scanner = XidScanner() com = scanner.device_at_index(device_number) com.open() return XidDevice(com)
returns device at a given index. Raises ValueError if the device at the passed in index doesn't exist.
def DeleteAttributes(self, subject, attributes, start=None, end=None, sync=True): _ = sync if not attributes: return if isinstance(attributes, string_types): raise ValueError( ...
Remove some attributes from a subject.
def get_grounded_agent(gene_name): db_refs = {'TEXT': gene_name} if gene_name in hgnc_map: gene_name = hgnc_map[gene_name] hgnc_id = hgnc_client.get_hgnc_id(gene_name) if hgnc_id: db_refs['HGNC'] = hgnc_id up_id = hgnc_client.get_uniprot_id(hgnc_id) if up_id: ...
Return a grounded Agent based on an HGNC symbol.
def expect(obj, strict=None, times=None, atleast=None, atmost=None, between=None): if strict is None: strict = True theMock = _get_mock(obj, strict=strict) verification_fn = _get_wanted_verification( times=times, atleast=atleast, atmost=atmost, between=between) class Expect(ob...
Stub a function call, and set up an expected call count. Usage:: # Given `dog` is an instance of a `Dog` expect(dog, times=1).bark('Wuff').thenReturn('Miau') dog.bark('Wuff') dog.bark('Wuff') # will throw at call time: too many invocations # maybe if you need to ensure th...
def update_edges(self, elev_fn, dem_proc): interp = self.build_interpolator(dem_proc) self.update_edge_todo(elev_fn, dem_proc) self.set_neighbor_data(elev_fn, dem_proc, interp)
After finishing a calculation, this will update the neighbors and the todo for that tile
def visit_BoolOp(self, node: ast.BoolOp) -> Any: values = [self.visit(value_node) for value_node in node.values] if isinstance(node.op, ast.And): result = functools.reduce(lambda left, right: left and right, values, True) elif isinstance(node.op, ast.Or): result = functoo...
Recursively visit the operands and apply the operation on them.
def to_date(ts: float) -> datetime.date: return datetime.datetime.fromtimestamp( ts, tz=datetime.timezone.utc).date()
Convert timestamp to date. >>> to_date(978393600.0) datetime.date(2001, 1, 2)
def _check_if_downloaded(self): if not os.path.isfile(self.path + self.file_name): print("") self.msg.template(78) print("| Download '{0}' file [ {1}FAILED{2} ]".format( self.file_name, self.meta.color["RED"], self.meta.color["ENDC"])) ...
Check if file downloaded
def llen(key, host=None, port=None, db=None, password=None): server = _connect(host, port, db, password) return server.llen(key)
Get the length of a list in Redis CLI Example: .. code-block:: bash salt '*' redis.llen foo_list
def write_template_to_file(conf, template_body): template_file_name = _get_stack_name(conf) + '-generated-cf-template.json' with open(template_file_name, 'w') as opened_file: opened_file.write(template_body) print('wrote cf-template for %s to disk: %s' % ( get_env(), template_file_name)) ...
Writes the template to disk
def targets_by_artifact_set(self, targets): sets_to_targets = defaultdict(list) for target in targets: sets_to_targets[self.for_target(target)].append(target) return dict(sets_to_targets)
Partitions the input targets by the sets of pinned artifacts they are managed by. :param collections.Iterable targets: the input targets (typically just JarLibrary targets). :return: a mapping of PinnedJarArtifactSet -> list of targets. :rtype: dict
def record_prefix(required_type, factory): field = record_type(required_type) field += factory.get_rule('transaction_sequence_n') field += factory.get_rule('record_sequence_n') return field
Creates a record prefix for the specified record type. :param required_type: the type of the record using this prefix :param factory: field factory :return: the record prefix
def extraterrestrial_direct_normal_radiation(self, value=9999.0): if value is not None: try: value = float(value) except ValueError: raise ValueError( 'value {} need to be of type float ' 'for field `extraterrestrial...
Corresponds to IDD Field `extraterrestrial_direct_normal_radiation` Args: value (float): value for IDD Field `extraterrestrial_direct_normal_radiation` Unit: Wh/m2 value >= 0.0 Missing value: 9999.0 if `value` is None it will not be ch...
def update_context(self, context, app=None): if (app is None and self._context is _CONTEXT_MISSING and not in_app_context()): raise RuntimeError("Attempted to update component context without" " a bound app context or eager app set! Please" ...
Replace the component's context with a new one. Args: context (dict): The new context to set this component's context to. Keyword Args: app (flask.Flask, optional): The app to update this context for. If not provided, the result of ``Component.app`` will be used...
def create(self, client=None, project=None, location=None): if self.user_project is not None: raise ValueError("Cannot create bucket with 'user_project' set.") client = self._require_client(client) if project is None: project = client.project if project is None: ...
Creates current bucket. If the bucket already exists, will raise :class:`google.cloud.exceptions.Conflict`. This implements "storage.buckets.insert". If :attr:`user_project` is set, bills the API request to that project. :type client: :class:`~google.cloud.storage.client.Clie...
def rlmb_grid(rhp): rhp.set_categorical("loop.game", ["breakout", "pong", "freeway"]) base = 100000 medium = base // 2 small = medium // 2 rhp.set_discrete("loop.num_real_env_frames", [base, medium, small]) rhp.set_discrete("model.moe_loss_coef", list(range(5)))
Grid over games and frames, and 5 runs each for variance.
def properties(self): props = {} for line in self.adb_shell(['getprop']).splitlines(): m = _PROP_PATTERN.match(line) if m: props[m.group('key')] = m.group('value') return props
Android Properties, extracted from `adb shell getprop` Returns: dict of props, for example: {'ro.bluetooth.dun': 'true'}
def print_summaries(self): if hasattr(self, "fit_summary") and hasattr(self, "summary"): print("\n") print(self.fit_summary) print("=" * 30) print(self.summary) else: msg = "This {} object has not yet been estimated so there " msg_2...
Returns None. Will print the measures of fit and the estimation results for the model.
def cat_acc(y, z): weights = _cat_sample_weights(y) _acc = K.cast(K.equal(K.argmax(y, axis=-1), K.argmax(z, axis=-1)), K.floatx()) _acc = K.sum(_acc * weights) / K.sum(weights) return _acc
Classification accuracy for multi-categorical case
def srt_formatter(subtitles, padding_before=0, padding_after=0): sub_rip_file = pysrt.SubRipFile() for i, ((start, end), text) in enumerate(subtitles, start=1): item = pysrt.SubRipItem() item.index = i item.text = six.text_type(text) item.start.seconds = max(0, start - padding_be...
Serialize a list of subtitles according to the SRT format, with optional time padding.
def cli(env, account_id, origin_id): manager = SoftLayer.CDNManager(env.client) manager.remove_origin(account_id, origin_id)
Remove an origin pull mapping.
def _imread(self, file): img = skimage_io.imread(file, as_gray=self.as_gray, plugin='imageio') if img is not None and len(img.shape) != 2: img = skimage_io.imread(file, as_gray=self.as_gray, plugin='matplotlib') return img
Proxy to skimage.io.imread with some fixes.
def contrib_phone(contrib_tag): phone = None if raw_parser.phone(contrib_tag): phone = first(raw_parser.phone(contrib_tag)).text return phone
Given a contrib tag, look for an phone tag
def start(environment, opts): environment.require_data() if environment.fully_running(): print 'Already running at {0}'.format(environment.web_address()) return reload_(environment, opts)
Create containers and start serving environment Usage: datacats start [-b] [--site-url SITE_URL] [-p|--no-watch] [-s NAME] [-i] [--syslog] [--address=IP] [ENVIRONMENT [PORT]] datacats start -r [-b] [--site-url SITE_URL] [-s NAME] [--syslog] [-i] [--address=IP] [ENVIRONMENT] Optio...
def upgrade_tools(name, reboot=False, call=None): if call != 'action': raise SaltCloudSystemExit( 'The upgrade_tools action must be called with ' '-a or --action.' ) vm_ref = salt.utils.vmware.get_mor_by_property(_get_si(), vim.VirtualMachine, name) return _upg_tools_...
To upgrade VMware Tools on a specified virtual machine. .. note:: If the virtual machine is running Windows OS, use ``reboot=True`` to reboot the virtual machine after VMware tools upgrade. Default is ``reboot=False`` CLI Example: .. code-block:: bash salt-cloud -a upgra...
def newBuild(self, requests): b = self.buildClass(requests) b.useProgress = self.useProgress b.workdir = self.workdir b.setStepFactories(self.steps) return b
Create a new Build instance. @param requests: a list of buildrequest dictionaries describing what is to be built
def search(self, search_phrase, limit=None): from ambry.identity import ObjectNumber from ambry.orm.exc import NotFoundError from ambry.library.search_backends.base import SearchTermParser results = [] stp = SearchTermParser() parsed_terms = stp.parse(search_phrase) ...
Search for datasets, and expand to database records
def looks_like_a_filename(kernel_source): logging.debug('looks_like_a_filename called') result = False if isinstance(kernel_source, str): result = True if len(kernel_source) > 250: result = False for c in "();{}\\": if c in kernel_source: resul...
attempt to detect whether source code or a filename was passed
def is_valid(file_path): from os import path, stat can_open = False try: with open(file_path) as fp: can_open = True except IOError: return False is_file = path.isfile(file_path) return path.exists(file_path) and is_file and stat(file_path).st_size > 0
Check to see if a file exists or is empty.
def factory(obj, field, source_text, lang, context=""): obj_classname = obj.__class__.__name__ obj_module = obj.__module__ source_md5 = checksum(source_text) translation = "" field_lang = trans_attr(field,lang) if hasattr(obj,field_lang) and getattr(obj,field_lang)!="": translation = getattr(obj,field_la...
Static method that constructs a translation based on its contents.
def setup_shot_signals(self, ): log.debug("Setting up shot page signals.") self.shot_prj_view_pb.clicked.connect(self.shot_view_prj) self.shot_seq_view_pb.clicked.connect(self.shot_view_seq) self.shot_asset_view_pb.clicked.connect(self.shot_view_asset) self.shot_asset_create_pb.c...
Setup the signals for the shot page :returns: None :rtype: None :raises: None
def remove_available_work_units(self, work_spec_name, work_unit_names): return self._remove_some_work_units( work_spec_name, work_unit_names, priority_max=time.time())
Remove some work units in the available queue. If `work_unit_names` is :const:`None` (which must be passed explicitly), all available work units in `work_spec_name` are removed; otherwise only the specific named work units will be. :param str work_spec_name: name of the work spec ...
def check_election_status(self, config): try: record = self._get_record( config.get("record_kind", ""), config.get("record_name", ""), config.get("record_namespace", "") ) self._report_status(config, record) except Exception as e: self.warn...
Retrieves the leader-election annotation from a given object, and submits metrics and a service check. An integration warning is sent if the object is not retrievable, or no record is found. Monitors on the service-check should have no-data alerts enabled to account for this. T...
def _stringify_val(val): if isinstance(val, list): return " ".join([str(i) for i in val]) else: return str(val)
Convert the given value to string.
def set_sampler_info(self, sample): if sample.getSampler() and sample.getDateSampled(): return True sampler = self.get_form_value("Sampler", sample, sample.getSampler()) sampled = self.get_form_value("getDateSampled", sample, sample.getDateSample...
Updates the Sampler and the Sample Date with the values provided in the request. If neither Sampler nor SampleDate are present in the request, returns False
def save(self): for exp, d in dict(self).items(): if isinstance(d, dict): project_path = self.projects[d['project']]['root'] d = self.rel_paths(copy.deepcopy(d)) fname = osp.join(project_path, '.project', exp + '.yml') if not osp.exists...
Save the experiment configuration This method stores the configuration of each of the experiments in a file ``'<project-dir>/.project/<experiment>.yml'``, where ``'<project-dir>'`` corresponds to the project directory of the specific ``'<experiment>'``. Furthermore it dumps all experime...
def update_connected(self, status): self._client['connected'] = status _LOGGER.info('updated connected status to %s on %s', status, self.friendly_name) self.callback()
Update connected.
def num_examples(self): if self.is_list: return len(self.list_or_slice) else: start, stop, step = self.slice_to_numerical_args( self.list_or_slice, self.original_num_examples) return stop - start
The number of examples this subset spans.
def change_password(self, usrname, oldpwd, newpwd, callback=None): params = {'usrName': usrname, 'oldPwd' : oldpwd, 'newPwd' : newpwd, } return self.execute_command('changePassword', params, callback=callback)
Change password.
def from_dict(cls, entries, **kwargs): from dask.base import tokenize cat = cls(**kwargs) cat._entries = entries cat._tok = tokenize(kwargs, entries) return cat
Create Catalog from the given set of entries Parameters ---------- entries : dict-like A mapping of name:entry which supports dict-like functionality, e.g., is derived from ``collections.abc.Mapping``. kwargs : passed on the constructor Things like me...
def setActivities(self, *args, **kwargs): def activityDate(activity): try: return activity['activity']['timestamp'] except KeyError as kerr: return None try: activities = self.mambuactivitiesclass(groupId=self['encodedKey'], *args, **kw...
Adds the activities for this group to a 'activities' field. Activities are MambuActivity objects. Activities get sorted by activity timestamp. Returns the number of requests done to Mambu.
def display_replica_imbalance(cluster_topologies): assert cluster_topologies rg_ids = list(next(six.itervalues(cluster_topologies)).rgs.keys()) assert all( set(rg_ids) == set(cluster_topology.rgs.keys()) for cluster_topology in six.itervalues(cluster_topologies) ) rg_imbalances = [ ...
Display replica replication-group distribution imbalance statistics. :param cluster_topologies: A dictionary mapping a string name to a ClusterTopology object.
def set_and_get(self, new_value): return self._encode_invoke(atomic_reference_set_and_get_codec, new_value=self._to_data(new_value))
Sets and gets the value. :param new_value: (object), the new value. :return: (object), the new value.
def ping(self): msg = StandardSend(self._address, COMMAND_PING_0X0F_0X00) self._send_msg(msg)
Ping a device.
def read_d1_letter(fin_txt): go2letter = {} re_goid = re.compile(r"(GO:\d{7})") with open(fin_txt) as ifstrm: for line in ifstrm: mtch = re_goid.search(line) if mtch and line[:1] != ' ': go2letter[mtch.group(1)] = line[:1] return go2letter
Reads letter aliases from a text file created by GoDepth1LettersWr.
def get_operation_output_names(self, operation_name): for output_tensor in self._name_to_operation(operation_name).outputs: yield output_tensor.name
Generates the names of all output tensors of an operation. Args: operation_name: a string, the name of an operation in the graph. Yields: a string, the name of an output tensor.
def lp7(self, reaction_subset): if self._zl is None: self._add_maximization_vars() positive = set(reaction_subset) - self._flipped negative = set(reaction_subset) & self._flipped v = self._v.set(positive) zl = self._zl.set(positive) cs = self._prob.add_linear_...
Approximately maximize the number of reaction with flux. This is similar to FBA but approximately maximizing the number of reactions in subset with flux > epsilon, instead of just maximizing the flux of one particular reaction. LP7 prefers "flux splitting" over "flux concentrating".
def read(self, length=-1): if 0 <= length < len(self): newpos = self.pos + length data = self.buf[self.pos:newpos] self.pos = newpos self.__discard() return data data = self.buf[self.pos:] self.clear() return data
Reads from the FIFO. Reads as much data as possible from the FIFO up to the specified length. If the length argument is negative or ommited all data currently available in the FIFO will be read. If there is no data available in the FIFO an empty string is returned. Args: ...
def refresh_instruments(self): def list_access_nested_dict(dict, somelist): return reduce(operator.getitem, somelist, dict) def update(item): if item.isExpanded(): for index in range(item.childCount()): child = item.child(index) ...
if self.tree_settings has been expanded, ask instruments for their actual values
def fetch(self, plan_id, data={}, **kwargs): return super(Plan, self).fetch(plan_id, data, **kwargs)
Fetch Plan for given Id Args: plan_id : Id for which Plan object has to be retrieved Returns: Plan dict for given subscription Id
def add_scheduling_block(config): try: DB.add_sbi(config) except jsonschema.ValidationError as error: error_dict = error.__dict__ for key in error_dict: error_dict[key] = error_dict[key].__str__() error_response = dict(message="Failed to add scheduling block", ...
Adds a scheduling block to the database, returning a response object
def filter_data(self, pattern=''): filtered_profiles = {} with open(self.abspath) as fobj: for idx, line in enumerate(fobj): if 'TIME SERIES' in line: break if pattern in line and (idx-self._attributes['CATALOG']-1) > 0: ...
Filter available varaibles
def multizone_member_removed(self, member_uuid): casts = self._casts if member_uuid not in casts: casts[member_uuid] = {'listeners': [], 'groups': set()} casts[member_uuid]['groups'].discard(self._group_uuid) for listener in list(casts[member_uuid]['listeners']): ...
Handle removed audio group member.
def display(self): for pkg in self.binary: name = GetFromInstalled(pkg).name() ver = GetFromInstalled(pkg).version() find = find_package("{0}{1}{2}".format(name, ver, self.meta.sp), self.meta.pkg_path) if find: packa...
Print the Slackware packages contents
def to_strings(self): result = [] if not self.expansions: result.append(self.name) else: for expansion in self.expansions: result.extend('{}.{}'.format(self.name, es) for es in expansion.to_strings()) return result
Convert the expansion node to a list of expansion strings. :return: a list of expansion strings that represent the leaf nodes of the expansion tree. :rtype: list[union[str, unicode]]
def from_dataframe(df, name='df', client=None): if client is None: return connect({name: df}).table(name) client.dictionary[name] = df return client.table(name)
convenience function to construct an ibis table from a DataFrame EXPERIMENTAL API Parameters ---------- df : DataFrame name : str, default 'df' client : Client, default new PandasClient client dictionary will be mutated with the name of the DataFrame Returns ------...
def get_doc_from_docid(self, docid, doc_type_name=None, inst=True): assert(docid is not None) if docid in self._docs_by_id: return self._docs_by_id[docid] if not inst: return None doc = self.__inst_doc(docid, doc_type_name) if doc is None: retu...
Try to find a document based on its document id. if inst=True, if it hasn't been instantiated yet, it will be.
def send_command(self, cmd): logger.debug('Sending {0} command.'.format(cmd)) self.comm_chan.sendall(cmd + '\n')
Send a command to the remote SSH server. :param cmd: The command to send
def display_notes(self, notes): hassyntastic = bool(int(self._vim.eval('exists(":SyntasticCheck")'))) if hassyntastic: self.__display_notes_with_syntastic(notes) else: self.__display_notes(notes) self._vim.command('redraw!')
Renders "notes" reported by ENSIME, such as typecheck errors.
def generators(self): if not self._generators: generators = list(self.graph.nodes_by_attribute('generator')) generators.extend(list(self.graph.nodes_by_attribute( 'generator_aggr'))) return generators else: return self._generators
Connected Generators within the grid Returns ------- list List of Generator Objects
def get(self): attachment = {} if self.file_content is not None: attachment["content"] = self.file_content.get() if self.file_type is not None: attachment["type"] = self.file_type.get() if self.file_name is not None: attachment["filename"] = self.file_...
Get a JSON-ready representation of this Attachment. :returns: This Attachment, ready for use in a request body. :rtype: dict
def get_threshold(self): if self.threshold.startswith('+'): if self.threshold[1:].isdigit(): self._threshold = int(self.threshold[1:]) self._upper = True elif self.threshold.startswith('-'): if self.threshold[1:].isdigit(): self._th...
Get and validate raw RMS value from threshold
def find_by_name(collection, name, exact=True): params = {'filter[]': ['name==%s' % name]} found = collection.index(params=params) if not exact and len(found) > 0: return found for f in found: if f.soul['name'] == name: return f
Searches collection by resource name. :param rightscale.ResourceCollection collection: The collection in which to look for :attr:`name`. :param str name: The name to look for in collection. :param bool exact: A RightScale ``index`` search with a :attr:`name` filter can return multiple res...
def to_primitive(self, load_rels=None, sparse_fields=None, *args, **kwargs): if load_rels: for rel in load_rels: getattr(self, rel).load() data = super(Model, self).to_primitive(*args, **kwargs) if sparse_fields: for key in data.keys()...
Override the schematics native to_primitive method :param loads_rels: List of field names that are relationships that should be loaded for the serialization process. This needs to be run before the native schematics to_primitive is run so the proper data is seria...
def create_comment_browser(self, layout): brws = CommentBrowser(1, headers=['Comments:']) layout.insertWidget(1, brws) return brws
Create a comment browser and insert it into the given layout :param layout: the layout to insert the browser into :type layout: QLayout :returns: the created browser :rtype: :class:`jukeboxcore.gui.widgets.browser.ListBrowser` :raises: None
def getModifiers(chart): modifiers = [] asc = chart.getAngle(const.ASC) ascRulerID = essential.ruler(asc.sign) ascRuler = chart.getObject(ascRulerID) moon = chart.getObject(const.MOON) factors = [ [MOD_ASC, asc], [MOD_ASC_RULER, ascRuler], [MOD_MOON, moon] ] mars ...
Returns the factors of the temperament modifiers.
def update_invoice(self, invoice_id, invoice_dict): return self._create_put_request(resource=INVOICES, billomat_id=invoice_id, send_data=invoice_dict)
Updates an invoice :param invoice_id: the invoice id :param invoice_dict: dict :return: dict
def _get_cached_file_name(bucket_name, saltenv, path): file_path = os.path.join(_get_cache_dir(), saltenv, bucket_name, path) if not os.path.exists(os.path.dirname(file_path)): os.makedirs(os.path.dirname(file_path)) return file_path
Return the cached file name for a bucket path file
def build_evenly_discretised_mfd(mfd): occur_rates = Node("occurRates", text=mfd.occurrence_rates) return Node("incrementalMFD", {"binWidth": mfd.bin_width, "minMag": mfd.min_mag}, nodes=[occur_rates])
Returns the evenly discretized MFD as a Node :param mfd: MFD as instance of :class: `openquake.hazardlib.mfd.evenly_discretized.EvenlyDiscretizedMFD` :returns: Instance of :class:`openquake.baselib.node.Node`
def create_post(self, post_type, post_folders, post_subject, post_content, is_announcement=0, bypass_email=0, anonymous=False): params = { "anonymous": "yes" if anonymous else "no", "subject": post_subject, "content": post_content, "folders": post_folders, ...
Create a post It seems like if the post has `<p>` tags, then it's treated as HTML, but is treated as text otherwise. You'll want to provide `content` accordingly. :type post_type: str :param post_type: 'note', 'question' :type post_folders: str :param post_folde...
def color_text_boxes(ax, labels, colors, color_arrow=True): assert len(labels) == len(colors), \ "Equal no. of colors and lables must be given" boxes = ax.findobj(mpl.text.Annotation) box_labels = lineid_plot.unique_labels(labels) for box in boxes: l = box.get_label() try: ...
Color text boxes. Instead of this function, one can pass annotate_kwargs and plot_kwargs to plot_line_ids function.
def solvate_bilayer(self): solvent_number_density = self.solvent.n_particles / np.prod(self.solvent.periodicity) lengths = self.lipid_box.lengths water_box_z = self.solvent_per_layer / (lengths[0] * lengths[1] * solvent_number_density) mins = self.lipid_box.mins maxs = self.lipid...
Solvate the constructed bilayer.
def pass_verbosity(f): def new_func(*args, **kwargs): kwargs['verbosity'] = click.get_current_context().verbosity return f(*args, **kwargs) return update_wrapper(new_func, f)
Marks a callback as wanting to receive the verbosity as a keyword argument.
def fromimportreg(cls, bundle, import_reg): exc = import_reg.get_exception() if exc: return RemoteServiceAdminEvent( RemoteServiceAdminEvent.IMPORT_ERROR, bundle, import_reg.get_import_container_id(), import_reg.get_remoteservic...
Creates a RemoteServiceAdminEvent object from an ImportRegistration
def _struct_get_field(expr, field_name): return ops.StructField(expr, field_name).to_expr().name(field_name)
Get the `field_name` field from the ``Struct`` expression `expr`. Parameters ---------- field_name : str The name of the field to access from the ``Struct`` typed expression `expr`. Must be a Python ``str`` type; programmatic struct field access is not yet supported. Returns ...
def get_flattened(dct, names, path_joiner="_"): new_dct = dict() for key, val in dct.items(): if key in names: child = {path_joiner.join(k): v for k, v in flatten_dict(val, (key, ))} new_dct.update(child) else: new_dct[key] = dct[key] return new_dct
Flatten a child dicts, whose resulting keys are joined by path_joiner. E.G. { "valuation": { "currency": "USD", "amount": "100" } } -> { "valuation_currency": "USD", "valuation_amount": "100" }
def expire_file(filepath): load_message.cache_clear() orm.delete(pa for pa in model.PathAlias if pa.entry.file_path == filepath) orm.delete(item for item in model.Entry if item.file_path == filepath) orm.commit()
Expire a record for a missing file
def _merge_args_opts(args_opts_dict, **kwargs): merged = [] if not args_opts_dict: return merged for arg, opt in args_opts_dict.items(): if not _is_sequence(opt): opt = shlex.split(opt or '') merged += opt if not arg: continue if 'add_input_opt...
Merge options with their corresponding arguments. Iterates over the dictionary holding arguments (keys) and options (values). Merges each options string with its corresponding argument. :param dict args_opts_dict: a dictionary of arguments and options :param dict kwargs: *input_option* - if specified ...
def render(self): if not self.validate(): raise ValidationError self.process_request() self.clean() return self.response
Validate, process, clean and return the result of the call.
def retry(exception_processor=generic_exception_processor, max_retries=100): max_retries = int(os.getenv('WALE_RETRIES', max_retries)) def yield_new_function_from(f): def shim(*args, **kwargs): exc_processor_cxt = None retries = 0 while True: gevent.sl...
Generic retry decorator Tries to call the decorated function. Should no exception be raised, the value is simply returned, otherwise, call an exception_processor function with the exception (type, value, traceback) tuple (with the intention that it could raise the exception without losing the trac...
def _onGlobal(self, name, line, pos, absPosition, level): for item in self.globals: if item.name == name: return self.globals.append(Global(name, line, pos, absPosition))
Memorizes a global variable
def show_setup(self): shell = os.getenv('SHELL') if not shell: raise SystemError("No $SHELL env var found") shell = os.path.basename(shell) if shell not in self.script_body: raise SystemError("Unsupported shell: %s" % shell) tplvars = { "prog":...
Provide a helper script for the user to setup completion.
def run(items, background=None): if not background: background = [] background_bams = [] paired = vcfutils.get_paired_bams([x["align_bam"] for x in items], items) if paired: inputs = [paired.tumor_data] if paired.normal_bam: background = [paired.normal_data] backg...
Detect copy number variations from batched set of samples using WHAM.
def prepare_relationship(config, model_name, raml_resource): if get_existing_model(model_name) is None: plural_route = '/' + pluralize(model_name.lower()) route = '/' + model_name.lower() for res in raml_resource.root.resources: if res.method.upper() != 'POST': co...
Create referenced model if it doesn't exist. When preparing a relationship, we check to see if the model that will be referenced already exists. If not, it is created so that it will be possible to use it in a relationship. Thus the first usage of this model in RAML file must provide its schema in POST...
def get_display_types(): display_types = OrderedDict() for namespace in get_supported_libraries(): display_types[namespace] = get_choices('luma.{0}.device'.format( namespace)) return display_types
Get ordered dict containing available display types from available luma sub-projects. :rtype: collections.OrderedDict
def _check_with_retry(self): address = self._server_description.address retry = True if self._server_description.server_type == SERVER_TYPE.Unknown: retry = False start = _time() try: return self._check_once() except ReferenceError: rai...
Call ismaster once or twice. Reset server's pool on error. Returns a ServerDescription.
def kill_all_process(self): if (runtime.get_active_config("cleanup_pending_process",False)): for process in self.get_processes(): self.terminate(process.unique_id)
Terminates all the running processes. By default it is set to false. Users can set to true in config once the method to get_pid is done deterministically either using pid_file or an accurate keyword
def kick_job(self, job_id): if hasattr(job_id, 'job_id'): job_id = job_id.job_id with self._sock_ctx() as socket: self._send_message('kick-job {0}'.format(job_id), socket) self._receive_word(socket, b'KICKED')
Kick the given job id. The job must either be in the DELAYED or BURIED state and will be immediately moved to the READY state.
def profile(script, argv, profiler_factory, pickle_protocol, dump_filename, mono): filename, code, globals_ = script sys.argv[:] = [filename] + list(argv) __profile__(filename, code, globals_, profiler_factory, pickle_protocol=pickle_protocol, dump_filename=dump_filename, ...
Profile a Python script.
def update_workspace_config(namespace, workspace, cnamespace, configname, body): uri = "workspaces/{0}/{1}/method_configs/{2}/{3}".format(namespace, workspace, cnamespace, configname) return __post(uri, json=body)
Update method configuration in workspace. Args: namespace (str): project to which workspace belongs workspace (str): Workspace name cnamespace (str): Configuration namespace configname (str): Configuration name body (json): new body (definition) of the method config ...
def iterator(self): for obj in (self.execute().json().get("items") or []): yield self.api_obj_class(self.api, obj)
Execute the API request and return an iterator over the objects. This method does not use the query cache.
def natural_name(self) -> str: name = self.expression.strip() for op in operators: name = name.replace(op, operator_to_identifier[op]) return wt_kit.string2identifier(name)
Valid python identifier representation of the expession.
def to_integer(value, ctx): if isinstance(value, bool): return 1 if value else 0 elif isinstance(value, int): return value elif isinstance(value, Decimal): try: val = int(value.to_integral_exact(ROUND_HALF_UP)) if isinstance(val, int): return v...
Tries conversion of any value to an integer
def get_identifier(identifier, module_globals, module_name): if isinstance(identifier, six.string_types): fn = module_globals.get(identifier) if fn is None: raise ValueError('Unknown {}: {}'.format(module_name, identifier)) return fn elif callable(identifier): return ...
Helper utility to retrieve the callable function associated with a string identifier. Args: identifier: The identifier. Could be a string or function. module_globals: The global objects of the module. module_name: The module name Returns: The callable associated with the identi...
def toy_rbf_1d(seed=default_seed, num_samples=500): np.random.seed(seed=seed) num_in = 1 X = np.random.uniform(low= -1.0, high=1.0, size=(num_samples, num_in)) X.sort(axis=0) rbf = GPy.kern.RBF(num_in, variance=1., lengthscale=np.array((0.25,))) white = GPy.kern.White(num_in, variance=1e-2) ...
Samples values of a function from an RBF covariance with very small noise for inputs uniformly distributed between -1 and 1. :param seed: seed to use for random sampling. :type seed: int :param num_samples: number of samples to sample in the function (default 500). :type num_samples: int