Unnamed: 0
int64
0
389k
code
stringlengths
26
79.6k
docstring
stringlengths
1
46.9k
370,200
def get_my_contributions(self, *args, **kwargs): return self.get_content(self.config[], *args, **kwargs)
Return a get_content generator of subreddits. The Subreddits generated are those where the session's user is a contributor. The additional parameters are passed directly into :meth:`.get_content`. Note: the `url` parameter cannot be altered.
370,201
def encode(self, word, version=2): word = word.lower() word = .join(c for c in word if c in self._lc_set) def _squeeze_replace(word, char): while char * 2 in word: word = word.replace(char * 2, char) return word.replace(char, char.up...
Return the Caverphone code for a word. Parameters ---------- word : str The word to transform version : int The version of Caverphone to employ for encoding (defaults to 2) Returns ------- str The Caverphone value Exa...
370,202
def register_plugin(self): ipyconsole = self.main.ipyconsole treewidget = self.fileexplorer.treewidget self.main.add_dockwidget(self) self.fileexplorer.sig_open_file.connect(self.main.open_file) self.register_widget_shortcuts(treewidget) treewidget.sig...
Register plugin in Spyder's main window
370,203
def bootstrap_falsealarmprob(lspinfo, times, mags, errs, nbootstrap=250, magsarefluxes=False, sigclip=10.0, npeaks=No...
Calculates the false alarm probabilities of periodogram peaks using bootstrap resampling of the magnitude time series. The false alarm probability here is defined as:: (1.0 + sum(trialbestpeaks[i] > peak[j]))/(ntrialbestpeaks + 1) for each best periodogram peak j. The index i is for each bootstra...
370,204
def _harvest_lost_resources(self): with self._lock: for i in self._unavailable_range(): rtracker = self._reference_queue[i] if rtracker is not None and rtracker.available(): self.put_resource(rtracker.resource)
Return lost resources to pool.
370,205
def add_dataset_to_collection(dataset_id, collection_id, **kwargs): collection_i = _get_collection(collection_id) collection_item = _get_collection_item(collection_id, dataset_id) if collection_item is not None: raise HydraError("Dataset Collection %s already contains dataset %s", collection_id...
Add a single dataset to a dataset collection.
370,206
async def create_table(**data): table = data.get() try: clickhouse_queries.create_table(table, data) return except ServerException as e: exception_code = int(str(e)[5:8].strip()) if exception_code == 57: return elif exception_code == 50: ...
RPC method for creating table with custom name and fields :return event id
370,207
def serializer(metadata_prefix): metadataFormats = current_app.config[] serializer_ = metadataFormats[metadata_prefix][] if isinstance(serializer_, tuple): return partial(import_string(serializer_[0]), **serializer_[1]) return import_string(serializer_)
Return etree_dumper instances. :param metadata_prefix: One of the metadata identifiers configured in ``OAISERVER_METADATA_FORMATS``.
370,208
def add(self, media_type, media_file, title=None, introduction=None): params = { : self.access_token, : media_type } if media_type == : assert title, assert introduction, description = { : title, ...
新增其它类型永久素材 详情请参考 http://mp.weixin.qq.com/wiki/14/7e6c03263063f4813141c3e17dd4350a.html :param media_type: 媒体文件类型,分别有图片(image)、语音(voice)、视频(video)和缩略图(thumb) :param media_file: 要上传的文件,一个 File-object :param title: 视频素材标题,仅上传视频素材时需要 :param introduction: 视频素材简介,仅上传视频素材时需要 ...
370,209
def generate_s3_url(files): if files: vault = g.client.Vault.get_personal_vault() files = json.loads(files) objects = [] for i in xrange(len(files)): obj = g.client.Object.create( vault_id=vault.id, object_type=, filena...
Takes files from React side, creates SolveBio Object containing signed S3 URL.
370,210
def generate_url(self, name: str, **kwargs) -> str: return self.urlmapper.generate(name, **kwargs)
generate url with urlgenerator used by urldispatch
370,211
def process(self, metric): for rule in self.rules: rule.process(metric, self)
process a single metric @type metric: diamond.metric.Metric @param metric: metric to process @rtype None
370,212
def background_color(self, node, depth): if self.color_mapping is None: self.color_mapping = {} color = self.color_mapping.get(node.key) if color is None: depth = len(self.color_mapping) red = (depth * 10) % 255 green = 200 - ((depth * 5) ...
Create a (unique-ish) background color for each node
370,213
def name(self): return ffi.string( lib.EnvGetDefglobalName(self._env, self._glb)).decode()
Global name.
370,214
def form_invalid(self, form, prefix=None): response = super(FormAjaxMixin, self).form_invalid(form) if self.request.is_ajax(): data = { "errors_list": self.add_prefix(form.errors, prefix), } return self.json_to_response(status=400, json_data=d...
If form invalid return error list in JSON response
370,215
def scaleBy(self, value, origin=None): value = normalizers.normalizeTransformationScale(value) if origin is None: origin = (0, 0) origin = normalizers.normalizeCoordinateTuple(origin) self._scaleBy(value, origin=origin)
Scale the object. >>> obj.transformBy(2.0) >>> obj.transformBy((0.5, 2.0), origin=(500, 500)) **value** must be an iterable containing two :ref:`type-int-float` values defining the x and y values to scale the object by. **origin** defines the point at with the s...
370,216
def make_request_fn(): if FLAGS.cloud_mlengine_model_name: request_fn = serving_utils.make_cloud_mlengine_request_fn( credentials=GoogleCredentials.get_application_default(), model_name=FLAGS.cloud_mlengine_model_name, version=FLAGS.cloud_mlengine_model_version) else: request_fn ...
Returns a request function.
370,217
def rpc_get_subdomains_owned_by_address(self, address, **con_info): if not check_address(address): return {: , : 400} res = get_subdomains_owned_by_address(address) return self.success_response({: res})
Get the list of subdomains owned by an address. Return {'status': True, 'subdomains': ...} on success Return {'error': ...} on error
370,218
def get_oauth_data(self, code, client_id, client_secret, state): request = self._get_request() response = request.post(self.OAUTH_TOKEN_URL, { "state": state, "code": code, "grant_type": "authorization_code", "client_id": client_id, "c...
Get Oauth data from HelloSign Args: code (str): Code returned by HelloSign for our callback url client_id (str): Client id of the associated app client_secret (str): Secret token of the associated app Returns: A HSAccessTokenAuth...
370,219
def next_call(self, for_method=None): last_call_name = self._last_declared_call_name if for_method: if for_method not in self._declared_calls: raise FakeDeclarationError( "next_call(for_method=%r) is not possible; " ...
Start expecting or providing multiple calls. .. note:: next_call() cannot be used in combination with :func:`fudge.Fake.times_called` Up until calling this method, calls are infinite. For example, before next_call() ... :: >>> from fudge import Fake >>> f = Fake().pro...
370,220
def subgraph(self, nodelist): subcoupling = CouplingMap() subcoupling.graph = self.graph.subgraph(nodelist) for node in nodelist: if node not in subcoupling.physical_qubits: subcoupling.add_physical_qubit(node) return subcoupling
Return a CouplingMap object for a subgraph of self. nodelist (list): list of integer node labels
370,221
def _opt_soft(eigvectors, rot_matrix, n_clusters): eigvectors = eigvectors[:, :n_clusters] rot_crop_matrix = rot_matrix[1:][:, 1:] (x, y) = rot_crop_matrix.shape rot_crop_vec = np.reshape(rot_crop_matrix, x * y) def susanna_func(rot_crop_vec, eigvectors): ...
Optimizes the PCCA+ rotation matrix such that the memberships are exclusively nonnegative. Parameters ---------- eigenvectors : ndarray A matrix with the sorted eigenvectors in the columns. The stationary eigenvector should be first, then the one to the slowest relaxation process, etc. ...
370,222
def create_fresh_child_cgroup(self, *subsystems): assert set(subsystems).issubset(self.per_subsystem.keys()) createdCgroupsPerSubsystem = {} createdCgroupsPerParent = {} for subsystem in subsystems: parentCgroup = self.per_subsystem[subsystem] if parentCg...
Create child cgroups of the current cgroup for at least the given subsystems. @return: A Cgroup instance representing the new child cgroup(s).
370,223
def closure(self, relation, depth=float()): ancestors = [] unvisited_ancestors = [(synset,1) for synset in self.get_related_synsets(relation)] while len(unvisited_ancestors) > 0: ancestor_depth = unvisited_ancestors.pop() if ancestor_depth[1] > depth: ...
Finds all the ancestors of the synset using provided relation. Parameters ---------- relation : str Name of the relation which is recursively used to fetch the ancestors. Returns ------- list of Synsets Returns the ancestors of the synset via given r...
370,224
def PyParseIntCast(string, location, tokens): for index, token in enumerate(tokens): try: tokens[index] = int(token) except ValueError: logger.error(.format( token)) tokens[index] = 0 for key in tokens.keys(): try: tokens[key] = int(tokens[key], 10) except...
Return an integer from a string. This is a pyparsing callback method that converts the matched string into an integer. The method modifies the content of the tokens list and converts them all to an integer value. Args: string (str): original string. location (int): location in the string where the ...
370,225
def get_backward_star(self, node): if node not in self._node_attributes: raise ValueError("No such node exists.") return self._backward_star[node].copy()
Given a node, get a copy of that node's backward star. :param node: node to retrieve the backward-star of. :returns: set -- set of hyperedge_ids for the hyperedges in the node's backward star. :raises: ValueError -- No such node exists.
370,226
def t_B_SEQUENCE_COMPACT_START(self, t): r indent_status, curr_depth, next_depth = self.get_indent_status(t) if indent_status == : self.indent_stack.append(next_depth) return t msg = dedent().format(**vars()) raise YAMLUnknownSyntaxError(msg)
r""" \-\ + (?= -\ ) # ^ ^ sequence indicator | \-\ + (?= [\{\[]\ | [^:\n]*:\s ) # ^ ^ ^^^ map indicator # ^ ^ flow indicator
370,227
def GetCellValueNoFail (self, column, row = None): if row == None: (row, column) = ParseCellSpec(column) cell = GetCellValue(self, column, row) if cell == None: raise ValueError("cell %d:%d does not ex...
get a cell, if it does not exist fail note that column at row START AT 1 same as excel
370,228
def ParseOptions(cls, options, output_module): if not hasattr(output_module, ): raise errors.BadConfigObject() server = cls._ParseStringOption( options, , default_value=cls._DEFAULT_SERVER) port = cls._ParseNumericOption( options, , default_value=cls._DEFAULT_PORT) output_mo...
Parses and validates options. Args: options (argparse.Namespace): parser options. output_module (OutputModule): output module to configure. Raises: BadConfigObject: when the output module object does not have the SetServerInformation method.
370,229
def stop(self): if not self.active: return self.removeHandler(self.handlers[-1]) self.active = False return
Stop logging with this logger.
370,230
def process_files(self): file_names = [] for f in self.files: file_names.append(f.process_file()) if not self.has_thumbnail() and config.THUMBNAILS: file_names.append(self.derive_thumbnail()) return file_names
Processes all the files associated with this Node. Files are downloaded if not present in the local storage. Creates and processes a NodeFile containing this Node's metadata. :return: A list of names of all the processed files.
370,231
def GetColorfulSearchPropertiesStr(self, keyColor=, valueColor=) -> str: strs = [.format(keyColor if k in Control.ValidKeys else , k, valueColor, ControlTypeNames[v] if k == else repr(v)) for k, v in self.searchProperties.items()] return + .join(strs) +
keyColor, valueColor: str, color name in class ConsoleColor
370,232
def cudnn_compatible_lstm(units, n_hidden, n_layers=1, trainable_initial_states=None, seq_lengths=None, initial_h=None, initial_c=None, name=, reuse=False): with tf.variable_scope(name, reuse=reuse): if trainable_initial_states: init_h = tf.get_variable(, [n_layer...
CuDNN Compatible LSTM implementation. It should be used to load models saved with CudnnLSTMCell to run on CPU. Args: units: tf.Tensor with dimensions [B x T x F], where B - batch size T - number of tokens F - features n_hidden: dim...
370,233
def _reversedict(d): return dict(list(zip(list(d.values()), list(d.keys()))))
Internal helper for generating reverse mappings; given a dictionary, returns a new dictionary with keys and values swapped.
370,234
def date_range(data): start = min([parse(d[]) for d in data]) end = max([parse(d[]) for d in data]) start = remove_hours(start) end = remove_hours(end) + timedelta(days=1) return start, end
Returns the minimum activity start time and the maximum activity end time from the active entities response. These dates are modified in the following way. The hours (and minutes and so on) are removed from the start and end times and a *day* is added to the end time. These are the dates that should be ...
370,235
def deleteMultipleByPks(self, pks): if type(pks) == set: pks = list(pks) if len(pks) == 1: return self.deleteByPk(pks[0]) objs = self.mdl.objects.getMultipleOnlyIndexedFields(pks) return self.deleteMultiple(objs)
deleteMultipleByPks - Delete multiple objects given their primary keys @param pks - List of primary keys @return - Number of objects deleted
370,236
def imp_print(self, text, end): try: PRINT(text, end=end) except UnicodeEncodeError: for i in text: try: PRINT(i, end="") except UnicodeEncodeError: PRINT("?", end="") PRINT("", end=end)
Catch UnicodeEncodeError
370,237
def gauss_fltr_astropy(dem, size=None, sigma=None, origmask=False, fill_interior=False): import astropy.convolution dem = malib.checkma(dem) if size is not None: size = int(np.floor(size/2)*2 + 1) size = max(size, 3) truncate = 3.0 if size is not N...
Astropy gaussian filter properly handles convolution with NaN http://stackoverflow.com/questions/23832852/by-which-measures-should-i-set-the-size-of-my-gaussian-filter-in-matlab width1 = 3; sigma1 = (width1-1) / 6; Specify width for smallest feature of interest and determine sigma appropriately sigma...
370,238
def find_distributions(path_item, only=False): importer = get_importer(path_item) finder = _find_adapter(_distribution_finders, importer) return finder(importer, path_item, only)
Yield distributions accessible via `path_item`
370,239
def cancel_order(self, order_id: str) -> str: self.log.debug(f) if self.dry_run: return order_id
Cancel an order by ID.
370,240
def format_section(stream, section, options, doc=None): if doc: print(_comment(doc), file=stream) print("[%s]" % section, file=stream) _ini_format(stream, options)
format an options section using the INI format
370,241
def sanitize_config_loglevel(level): if sys.version_info[:2] != (2, 6) or isinstance(level, (int, long)): return level lvl = None if isinstance(level, basestring): lvl = logging._levelNames.get(level) if not lvl: raise ValueError( % level) return lvl
Kinda sorta backport of loglevel sanitization for Python 2.6.
370,242
def recompute_if_necessary(self, ui): self.__initialize_cache() if self.__cached_value_dirty: with self.__is_recomputing_lock: is_recomputing = self.__is_recomputing self.__is_recomputing = True if is_recomputing: pass ...
Recompute the data on a thread, if necessary. If the data has recently been computed, this call will be rescheduled for the future. If the data is currently being computed, it do nothing.
370,243
def predict(self, y, t=None, return_cov=True, return_var=False): y = self._process_input(y) if len(y.shape) > 1: raise ValueError("dimension mismatch") if t is None: xs = self._t else: xs = np.ascontiguousarray(t, dtype=float) if ...
Compute the conditional predictive distribution of the model You must call :func:`GP.compute` before this method. Args: y (array[n]): The observations at coordinates ``x`` from :func:`GP.compute`. t (Optional[array[ntest]]): The independent coordinates where the...
370,244
def from_tuple(self, t): if len(t) > 1: self.id = t[0] self.sitting = t[1] else: self.sitting = t[0] self.id = None return self
Set this person from tuple :param t: Tuple representing a person (sitting[, id]) :type t: (bool) | (bool, None | str | unicode | int) :rtype: Person
370,245
def parse_path(path): if not path: raise ValueError("Invalid path") if isinstance(path, str): if path == "/": raise ValueError("Invalid path") if path[0] != "/": raise ValueError("Invalid path") return path.split(_PATH_SEP)[1:] elif isinstance(path, (tuple, list)): return path ...
Parse a rfc 6901 path.
370,246
def repair(self, verbose=False, joincomp=False, remove_smallest_components=True): assert self.f.shape[1] == 3, assert self.f.ndim == 2, self.v, self.f = _meshfix.clean_from_arrays(self.v, self.f, verbose, joincomp, ...
Performs mesh repair using MeshFix's default repair process. Parameters ---------- verbose : bool, optional Enables or disables debug printing. Disabled by default. joincomp : bool, optional Attempts to join nearby open components. remove_small...
370,247
def setup(cls, configuration=None, **kwargs): if configuration is None: cls._configuration = Configuration(**kwargs) else: cls._configuration = configuration
Set up the HDX configuration Args: configuration (Optional[Configuration]): Configuration instance. Defaults to setting one up from passed arguments. **kwargs: See below user_agent (str): User agent string. HDXPythonLibrary/X.X.X- is prefixed. Must be supplied if remoteckan ...
370,248
def cloud_cover_to_ghi_linear(self, cloud_cover, ghi_clear, offset=35, **kwargs): offset = offset / 100. cloud_cover = cloud_cover / 100. ghi = (offset + (1 - offset) * (1 - cloud_cover)) * ghi_clear return ghi
Convert cloud cover to GHI using a linear relationship. 0% cloud cover returns ghi_clear. 100% cloud cover returns offset*ghi_clear. Parameters ---------- cloud_cover: numeric Cloud cover in %. ghi_clear: numeric GHI under clear sky conditions. ...
370,249
def insert_child(self, child_pid): self._check_child_limits(child_pid) try: with db.session.begin_nested(): if not isinstance(child_pid, PersistentIdentifier): child_pid = resolve_pid(child_pid) return PIDRelation.crea...
Add the given PID to the list of children PIDs.
370,250
def _to_dict(self): _dict = {} if hasattr(self, ) and self.interval is not None: _dict[] = self.interval if hasattr(self, ) and self.event_type is not None: _dict[] = self.event_type if hasattr(self, ) and self.results is not None: _dict[] = [...
Return a json dictionary representing this model.
370,251
def create_instances(self, config_list): tags = [conf.pop(, None) for conf in config_list] resp = self.guest.createObjects([self._generate_create_dict(**kwargs) for kwargs in config_list]) for instance, tag in zip(resp, tags): if ta...
Creates multiple virtual server instances. This takes a list of dictionaries using the same arguments as create_instance(). .. warning:: This will add charges to your account Example:: # Define the instance we want to create. new_vsi = { ...
370,252
def min(self, axis=None, skipna=True, *args, **kwargs): nv.validate_min(args, kwargs) nv.validate_minmax_axis(axis) result = nanops.nanmin(self.asi8, skipna=skipna, mask=self.isna()) if isna(result): return NaT return self._box_func(result)
Return the minimum value of the Array or minimum along an axis. See Also -------- numpy.ndarray.min Index.min : Return the minimum value in an Index. Series.min : Return the minimum value in a Series.
370,253
def _findLocation(self, reference_name, start, end): try: return self._locationMap[][reference_name][start][end] except: return None
return a location key form the locationMap
370,254
def columnInfo(self): code = "proc contents data=" + self.libref + + self.table + + self._dsopts() + ";ods select Variables;run;" if self.sas.nosub: print(code) return if self.results.upper() == : code = "proc contents data=%s.%s %s ;ods output Va...
display metadata about the table, size, number of rows, columns and their data type
370,255
def GetSortedEvents(self, time_range=None): filter_expression = None if time_range: filter_expression = [] if time_range.start_timestamp: filter_expression.append( .format(time_range.start_timestamp)) if time_range.end_timestamp: filter_expression.append( ...
Retrieves the events in increasing chronological order. Args: time_range (Optional[TimeRange]): time range used to filter events that fall in a specific period. Yield: EventObject: event.
370,256
def empty_line_count_at_the_end(self): count = 0 for line in self.lines[::-1]: if not line or line.isspace(): count += 1 else: break return count
Return number of empty lines at the end of the document.
370,257
def _add_params_docstring(params): p_string = "\nAccepts the following paramters: \n" for param in params: p_string += "name: %s, required: %s, description: %s \n" % (param[], param[], param[]) return p_string
Add params to doc string
370,258
def sc_zoom_coarse(self, viewer, event, msg=True): if not self.canzoom: return True zoom_accel = self.settings.get(, 1.0) amount = self._scale_adjust(1.2, event.amount, zoom_accel, max_limit=4.0) self._scale_image(viewer, event.direction, amount, msg=msg) ...
Interactively zoom the image by scrolling motion. This zooms by adjusting the scale in x and y coarsely.
370,259
def export_configuration_generator(self, sql, sql_args): self.con.execute(sql, sql_args) results = self.con.fetchall() output = [] for result in results: if result[] == "observation": search = mp.ObservationSearch.from_dict(json.loads(result[])) ...
Generator for :class:`meteorpi_model.ExportConfiguration` :param sql: A SQL statement which must return rows describing export configurations :param sql_args: Any variables required to populate the query provided in 'sql' :return: A generator which produces :...
370,260
def _mutect2_filter(broad_runner, in_file, out_file, ref_file): params = ["-T", "FilterMutectCalls", "--reference", ref_file, "--variant", in_file, "--output", out_file] return broad_runner.cl_gatk(params, os.path.dirname(out_file))
Filter of MuTect2 calls, a separate step in GATK4.
370,261
def decode_list_offset_response(cls, response): return [ kafka.structs.ListOffsetResponsePayload(topic, partition, error, timestamp, offset) for topic, partitions in response.topics for partition, error, timestamp, offset in partitions ]
Decode OffsetResponse_v2 into ListOffsetResponsePayloads Arguments: response: OffsetResponse_v2 Returns: list of ListOffsetResponsePayloads
370,262
def gatk_rnaseq_calling(data): from bcbio.bam import callable data = utils.deepish_copy(data) tools_on = dd.get_tools_on(data) if not tools_on: tools_on = [] tools_on.append("gvcf") data = dd.set_tools_on(data, tools_on) data = dd.set_jointcaller(data, ["%s-joint" % v for v in d...
Use GATK to perform gVCF variant calling on RNA-seq data
370,263
def doFindAny(self, WHAT={}, SORT=[], SKIP=None, MAX=None, LOP=, **params): self._preFind(WHAT, SORT, SKIP, MAX, LOP) for key in params: self._addDBParam(key, params[key]) return self._doAction()
This function will perform the command -findany.
370,264
def find_by_ids(ids, _connection=None, page_size=100, page_number=0, sort_by=enums.DEFAULT_SORT_BY, sort_order=enums.DEFAULT_SORT_ORDER): if not isinstance(ids, (list, tuple)): err = "Video.find_by_ids expects an iterable argument" raise exceptions.PyBrightcoveError(err)...
List all videos identified by a list of Brightcove video ids
370,265
def _try_switches(self, lines, index): for s in self._switches: s.switch(lines, index)
For each switch in the Collector object, pass a list of string, representing lines of text in a file, and an index to the current line to try to flip the switch. A switch will only flip on if the line passes its 'test_on' method, and will only flip off if the line passes its 'test_off' m...
370,266
def get_least_orbits(atom_index, cell, site_symmetry, symprec=1e-5): orbits = _get_orbits(atom_index, cell, site_symmetry, symprec) mapping = np.arange(cell.get_number_of_atoms()) for i, orb in enumerate(orbits): for num in np.unique(orb): if mapping[num] > mapping[i]: ...
Find least orbits for a centering atom
370,267
def _set_preferences(self, node): pref = etree.SubElement(node, ) pacman = etree.SubElement(pref, ) pacman.text = self._get_package_manager() p_version = etree.SubElement(pref, ) p_version.text = p_type = etree.SubElement(pref, ) p_type.set(, ) ...
Set preferences. :return:
370,268
def get_tms_layers(self, catid, bands=, gamma=1.3, highcutoff=0.98, lowcutoff=0.02, brightness=1.0, contrast=1.0): description = self.describ...
Get list of urls and bounding boxes corrsponding to idaho images for a given catalog id. Args: catid (str): Catalog id bands (str): Bands to display, separated by commas (0-7). gamma (float): gamma coefficient. This is for on-the-fly pansharpening. highcutoff (float)...
370,269
def getAltitudeFromLatLon(self, lat, lon): lat -= self.lat lon -= self.lon if lat < 0.0 or lat >= 1.0 or lon < 0.0 or lon >= 1.0: raise WrongTileError(self.lat, self.lon, self.lat+lat, self.lon+lon) x = lon * (self.size - 1) y = lat * (self....
Get the altitude of a lat lon pair, using the four neighbouring pixels for interpolation.
370,270
def _process_outgoing_msg(self, sink_iter): LOG.debug() from ryu.services.protocols.bgp.model import ( FlexinetOutgoingRoute) while self.is_connected: outgoing_msg.__class__) if rpc_msg: ...
For every message we construct a corresponding RPC message to be sent over the given socket inside given RPC session. This function should be launched in a new green thread as it loops forever.
370,271
def format_label(sl, fmt=None): if isinstance(sl, str): return sl if fmt: return fmt.format(*sl) return .join(str(s) for s in sl)
Combine a list of strings to a single str, joined by sep. Passes through single strings. :param sl: :return:
370,272
def get_confidence_interval(self, node, interval = (0.05, 0.95)): rate_contribution = self.date_uncertainty_due_to_rate(node, interval) if hasattr(node, "marginal_inverse_cdf"): min_date, max_date = [self.date2dist.to_numdate(x) for x in (node.marg...
If temporal reconstruction was done using the marginal ML mode, the entire distribution of times is available. This function determines the 90% (or other) confidence interval, defined as the range where 5% of probability is below and above. Note that this does not necessarily contain the highest...
370,273
def store_tmp(self, tmp, content, reg_deps=None, tmp_deps=None, deps=None): self.state._inspect(, BP_BEFORE, tmp_write_num=tmp, tmp_write_expr=content) tmp = self.state._inspect_getattr(, tmp) content = self.state._inspect_getattr(, content) if o.SYMBOLIC_TEMPS not in self.stat...
Stores a Claripy expression in a VEX temp value. If in symbolic mode, this involves adding a constraint for the tmp's symbolic variable. :param tmp: the number of the tmp :param content: a Claripy expression of the content :param reg_deps: the register dependencies of the content ...
370,274
def create_group(groupname, gid, system=True): sudo(addgroup(groupname, gid, system))
Creates a new user group with a specific id. :param groupname: Group name. :type groupname: unicode :param gid: Group id. :type gid: int or unicode :param system: Creates a system group.
370,275
def chimera_elimination_order(m, n=None, t=None): if n is None: n = m if t is None: t = 4 index_flip = m > n if index_flip: m, n = n, m def chimeraI(m0, n0, k0, l0): if index_flip: return m*2*t*n0 + 2*t*m0 + t*(1-k0) + l0 else: ...
Provides a variable elimination order for a Chimera graph. A graph defined by chimera_graph(m,n,t) has treewidth max(m,n)*t. This function outputs a variable elimination order inducing a tree decomposition of that width. Parameters ---------- m : int Number of rows in the Chimera latti...
370,276
def create_cli(create_app=None): def create_cli_app(info): if create_app is None: info.create_app = None app = info.load_app() else: app = create_app(debug=get_debug_flag()) return app @click.group(cls=FlaskGroup, create_app...
Create CLI for ``inveniomanage`` command. :param create_app: Flask application factory. :returns: Click command group. .. versionadded: 1.0.0
370,277
def _startProductionCrewNode(self, name, attrs): if name == : self._programId = attrs.get() elif name == : self._role = None self._givenname = None self._surname = None
Process the start of a node under xtvd/productionCrew
370,278
def validate_jsonschema_from_file(self, json_string, path_to_schema): schema = open(path_to_schema).read() load_input_json = self.string_to_json(json_string) try: load_schema = json.loads(schema) except ValueError as e: raise JsonValidatorError(.format(e...
Validate JSON according to schema, loaded from a file. *Args:*\n _json_string_ - JSON string;\n _path_to_schema_ - path to file with JSON schema; *Raises:*\n JsonValidatorError *Example:*\n | *Settings* | *Value* | | Library | JsonValidator | ...
370,279
def update_target(self, name, current, total): self.refresh(self._bar(name, current, total))
Updates progress bar for a specified target.
370,280
def calc_gs_kappa(b, ne, delta, sinth, nu): s = nu / calc_nu_b(b) return (ne / b * 1.4e-9 * 10**(-0.22 * delta) * sinth**(-0.09 + 0.72 * delta) * s**(-1.30 - 0.98 * delta))
Calculate the gyrosynchrotron absorption coefficient κ_ν. This is Dulk (1985) equation 36, which is a fitting function assuming a power-law electron population. Arguments are: b Magnetic field strength in Gauss ne The density of electrons per cubic centimeter with energies greater than 10 ...
370,281
def __search_ca_path(self): if "X509_CERT_DIR" in os.environ: self._ca_path = os.environ[] elif os.path.exists(): self._ca_path = else: raise ClientAuthException("Could not find a valid CA path")
Get CA Path to check the validity of the server host certificate on the client side
370,282
def apply_thresholds(input, thresholds, choices): condlist = [input <= threshold for threshold in thresholds] if len(condlist) == len(choices) - 1: condlist += [True] assert len(condlist) == len(choices), \ "apply_thresholds must be called with the same number of thresholds tha...
Return one of the choices depending on the input position compared to thresholds, for each input. >>> apply_thresholds(np.array([4]), [5, 7], [10, 15, 20]) array([10]) >>> apply_thresholds(np.array([5]), [5, 7], [10, 15, 20]) array([10]) >>> apply_thresholds(np.array([6]), [5, 7], [10, 15, 20]) ...
370,283
def RgbToYiq(r, g, b): (%g, %g, %g)(0.592263, 0.458874, -0.0499818) y = (r * 0.29895808) + (g * 0.58660979) + (b *0.11443213) i = (r * 0.59590296) - (g * 0.27405705) - (b *0.32184591) q = (r * 0.21133576) - (g * 0.52263517) + (b *0.31129940) return (y, i, q)
Convert the color from RGB to YIQ. Parameters: :r: The Red component value [0...1] :g: The Green component value [0...1] :b: The Blue component value [0...1] Returns: The color as an (y, i, q) tuple in the range: y[0...1], i[0...1], q[0...1] ...
370,284
def logprob(self, action_sample, pd_params): means = pd_params[:, :, 0] log_std = pd_params[:, :, 1] std = torch.exp(log_std) z_score = (action_sample - means) / std return - (0.5 * ((z_score**2 + self.LOG2PI).sum(dim=-1)) + log_std.sum(dim=-1))
Log-likelihood
370,285
def refresh_content(self, order=None, name=None): order = order or self.content.order if name is None: query = self.content.query else: query = None name = name or self.content.name if order == : order = N...
Re-download all submissions and reset the page index
370,286
def exec_context(self, **kwargs): import inspect import dateutil.parser import datetime import random from functools import partial from ambry.valuetype.types import parse_date, parse_time, parse_datetime import ambry.valuetype.types import ambry....
Base environment for evals, the stuff that is the same for all evals. Primarily used in the Caster pipe
370,287
def genslices_ndim(ndim, shape): iterables = [genslices(shape[n]) for n in range(ndim)] yield from product(*iterables)
Generate all possible slice tuples for 'shape'.
370,288
def _update_dates(self, **update_props): tree_to_update = update_props[] xpath_root = self._data_map[] if self.dates: date_type = self.dates[DATE_TYPE] remove_element(tree_to_update, xpath_root) if date_type == DATE_TYPE_MULTIPLE: ...
Update operation for ArcGIS Dates metadata :see: gis_metadata.utils._complex_definitions[DATES]
370,289
def start_update(self, draw=None, queues=None): if queues is not None: queues[0].get() try: for arr in self: arr.psy.start_update(draw=False) self.onupdate.emit() except Exception: self._finish_all(queues) raise...
Conduct the formerly registered updates This method conducts the updates that have been registered via the :meth:`update` method. You can call this method if the :attr:`auto_update` attribute of this instance is True and the `auto_update` parameter in the :meth:`update` method has been ...
370,290
def calculate_subgraph_edge_overlap( graph: BELGraph, annotation: str = ) -> Tuple[ Mapping[str, EdgeSet], Mapping[str, Mapping[str, EdgeSet]], Mapping[str, Mapping[str, EdgeSet]], Mapping[str, Mapping[str, float]], ]: sg2edge = defaultdict(set) for u, v, d in graph.edges(...
Build a DatafFame to show the overlap between different sub-graphs. Options: 1. Total number of edges overlap (intersection) 2. Percentage overlap (tanimoto similarity) :param graph: A BEL graph :param annotation: The annotation to group by and compare. Defaults to 'Subgraph' :return: {subgrap...
370,291
def prox_gradf_lim(xy, step, boundary=None): return prox_lim(prox_gradf(xy,step), step, boundary=boundary)
Forward-backward step: gradient, followed by projection
370,292
def signature(self, name=None): self._ensure_loaded() if name is None: name = self.name num_args = len(self.arg_names) num_def = 0 if self.arg_defaults is not None: num_def = len(self.arg_defaults) num_no_def = num_args - num_def ...
Return our function signature as a string. By default this function uses the annotated name of the function however if you need to override that with a custom name you can pass name=<custom name> Args: name (str): Optional name to override the default name given ...
370,293
def getstate(self): if not os.path.exists(self.jobstorefile): logging.info( + self.run_id + ) return "QUEUED", -1 if os.path.exists(self.statcompletefile): logging.info( + self.run_id + ) return "COMPLETE", 0 i...
Returns QUEUED, -1 INITIALIZING, -1 RUNNING, -1 COMPLETE, 0 or EXECUTOR_ERROR, 255
370,294
def list_objects(self, path=, relative=False, first_level=False, max_request_entries=None): entries = 0 max_request_entries_arg = None if not relative: path = self.relpath(path) if not path: locators = self._list_locators()...
List objects. Args: path (str): Path or URL. relative (bool): Path is relative to current root. first_level (bool): It True, returns only first level objects. Else, returns full tree. max_request_entries (int): If specified, maximum entries return...
370,295
def prepare_cookies(self, cookies): if isinstance(cookies, cookielib.CookieJar): self._cookies = cookies else: self._cookies = cookiejar_from_dict(cookies) cookie_header = get_cookie_header(self._cookies, self) if cookie_header is not None: s...
Prepares the given HTTP cookie data. This function eventually generates a ``Cookie`` header from the given cookies using cookielib. Due to cookielib's design, the header will not be regenerated if it already exists, meaning this function can only be called once for the life of the ...
370,296
def combine_sj_out( fns, external_db, total_jxn_cov_cutoff=20, define_sample_name=None, verbose=False, ): if verbose: import sys stats += filter_stats return countsDF, annotDF, stats
Combine SJ.out.tab files from STAR by filtering based on coverage and comparing to an external annotation to discover novel junctions. Parameters ---------- fns : list of strings Filenames of SJ.out.tab files to combine. external_db : str Filename of splice junction information fr...
370,297
def delete_branch(self, project, repository, name, end_point): url = .format(project=project, repository=repository) data = {"name": str(name), "endPoint": str(end_point)} return self.delete(ur...
Delete branch from related repo :param self: :param project: :param repository: :param name: :param end_point: :return:
370,298
def rerun(version="3.7.0"): from commandlib import Command Command(DIR.gen.joinpath("py{0}".format(version), "bin", "python"))( DIR.gen.joinpath("state", "examplepythoncode.py") ).in_dir(DIR.gen.joinpath("state")).run()
Rerun last example code block with specified version of python.
370,299
def config_default(dest): conf_path = Path(dest).resolve() if conf_path.is_dir(): conf_path = conf_path / LIGHTFLOW_CONFIG_NAME conf_path.write_text(Config.default()) click.echo(.format(conf_path))
Create a default configuration file. \b DEST: Path or file name for the configuration file.