code
stringlengths
51
2.38k
docstring
stringlengths
4
15.2k
def get_request(self, model_run, run_url): return ModelRunRequest( model_run.identifier, model_run.experiment_id, run_url )
Create request object to run model. Requests are handled by SCO worker implementations. Parameters ---------- model_run : ModelRunHandle Handle to model run run_url : string URL for model run information Returns ------- ModelRunRequest Object representing model run request
def _skip_remove(self): if "--checklist" not in self.extra: self.msg.template(78) print("| Insert packages to exception remove:") self.msg.template(78) try: self.skip = raw_input(" > ").split() except EOFError: print("") raise SystemExit() for s in self.skip: if s in self.removed: self.removed.remove(s)
Skip packages from remove
def ls_dir(dirname): ls = os.listdir(dirname) files = [p for p in ls if os.path.isfile(os.path.join(dirname, p))] dirs = [p for p in ls if os.path.isdir(os.path.join(dirname, p))] return files, dirs
Returns files and subdirectories within a given directory. Returns a pair of lists, containing the names of directories and files in ``dirname``. Raises ------ OSError : Accessing the given directory path failed Parameters ---------- dirname : str The path of the directory to be listed
def list_ape (archive, compression, cmd, verbosity, interactive): return stripext(cmd, archive, verbosity, extension=".wav")
List an APE archive.
def enable_global_annotations_decorator(flag = True, retrospective = True): global global_annotations_decorator global_annotations_decorator = flag if import_hook_enabled: _install_import_hook() if global_annotations_decorator and retrospective: _catch_up_global_annotations_decorator() return global_annotations_decorator
Enables or disables global annotation mode via decorators. See flag global_annotations_decorator. In contrast to setting the flag directly, this function provides a retrospective option. If retrospective is true, this will also affect already imported modules, not only future imports.
def _normalize(self): "Normalize basis function. From THO eq. 2.2" l,m,n = self.powers self.norm = np.sqrt(pow(2,2*(l+m+n)+1.5)* pow(self.exponent,l+m+n+1.5)/ fact2(2*l-1)/fact2(2*m-1)/ fact2(2*n-1)/pow(np.pi,1.5)) return
Normalize basis function. From THO eq. 2.2
def copy(self, props=None, value=None): return Overlay(self.text, (self.start, self.end), props=props or self.props, value=value or self.value)
Copy the Overlay possibly overriding props.
def latitude(self, dms: bool = False) -> Union[str, float]: return self._get_fs('lt', dms)
Generate a random value of latitude. :param dms: DMS format. :return: Value of longitude.
def _get_site(self): import mwclient parts = self.server.settings.wiki.replace("http", "").replace("://", "").split("/") self.url = parts[0] if len(parts) > 1 and parts[1].strip() != "": self.relpath = '/' + '/'.join(parts[1:len(parts)]) if self.relpath[-1] != "/": self.relpath += "/" if not self.testmode: self.site = mwclient.Site(self.url, path=self.relpath) else: if not self.testmode: self.site = mwclient.Site(self.url)
Returns the mwclient.Site for accessing and editing the wiki pages.
def _bind_variables(self, instance, space): instance.api = self if space: instance.space = space return instance
Bind related variables to the instance
def is_in_bounds(self, x): if self.bounds is None: return True for ib in [0, 1]: if self.bounds[ib] is None: continue for i in rglen(x): idx = min([i, len(self.bounds[ib]) - 1]) if self.bounds[ib][idx] is not None and \ (-1)**ib * x[i] < (-1)**ib * self.bounds[ib][idx]: return False return True
not yet tested
def with_img_type_and_preset(self, image_type, preset): assert isinstance(image_type, ImageType) assert isinstance(preset, str) return list(filter(lambda x: x.image_type == image_type and x.preset == preset, self.metaimages))
Returns the search results having both the specified image type and preset :param image_type: the desired image type (valid values are provided by the `pyowm.commons.enums.ImageTypeEnum` enum) :type image_type: `pyowm.commons.databoxes.ImageType` instance :param preset: the desired image preset (valid values are provided by the `pyowm.agroapi10.enums.PresetEnum` enum) :type preset: str :returns: a list of `pyowm.agroapi10.imagery.MetaImage` instances
def insert_runner(fun, args=None, kwargs=None, queue=None, backend=None): if args is None: args = [] elif isinstance(args, six.string_types): args = args.split(',') if kwargs is None: kwargs = {} queue_kwargs = __get_queue_opts(queue=queue, backend=backend) data = {'fun': fun, 'args': args, 'kwargs': kwargs} return insert(items=data, **queue_kwargs)
Insert a reference to a runner into the queue so that it can be run later. fun The runner function that is going to be run args list or comma-seperated string of args to send to fun kwargs dictionary of keyword arguments to send to fun queue queue to insert the runner reference into backend backend that to use for the queue CLI Example: .. code-block:: bash salt-run queue.insert_runner test.stdout_print salt-run queue.insert_runner event.send test_insert_runner kwargs='{"data": {"foo": "bar"}}'
def _isFollowedByComma( wordID, clauseTokens ): koma = WordTemplate({ROOT:'^,+$', POSTAG:'Z'}) for i in range(len(clauseTokens)): token = clauseTokens[i] if token[WORD_ID] == wordID: if re.match('^.*,$', token[TEXT]): return True elif i+1 < len(clauseTokens) and koma.matches(clauseTokens[i+1]): return True break return False
Teeb kindlaks, kas etteantud ID-ga s6nale j2rgneb vahetult koma; Tagastab True, kui eeltoodud tingimus on t2idetud, vastasel juhul False;
def workflow_script_cancel(self): if skip(self, "cancel"): return self.reindexObject(idxs=["is_active", ]) analysis_requests = self.getAnalysisRequests() for ar in analysis_requests: ar_obj = ar.getObject() workflow = getToolByName(self, 'portal_workflow') if workflow.getInfoFor(ar_obj, 'review_state') != 'cancelled': doActionFor(ar.getObject(), 'cancel')
When the round is cancelled, all its associated Samples and ARs are cancelled by the system.
def initialize_constraint_table(cfg_list): for cfg in cfg_list: constraint_table.update(dict.fromkeys(cfg.nodes, 0))
Collects all given cfg nodes and initializes the table with value 0.
def disconnect(self): self.log("Disconnecting broker %r.", self) d = defer.succeed(None) if self.is_master(): if self.listener is not None: d.addCallback(defer.drop_param, self.listener.stopListening) d.addCallback(defer.drop_param, self.factory.disconnect) elif self.is_slave(): d = defer.maybeDeferred(self.factory.disconnect) elif self._cmp_state(BrokerRole.disconnected): return defer.succeed(None) d.addCallback(defer.drop_param, self.become_disconnected) return d
This is called as part of the agency shutdown.
def parse(link): html = get_html(link) data = {'rating': get_rating(html), 'name': get_name_date(html)[0]} div = html.find(id="title-episode-widget") season_tags = get_a(div, find="season=") episodes = {} for slink in season_tags: for e in episode_list(slink): season, d = parse_episode(e) if season in episodes: episodes[season].append(d) else: episodes[season] = [d] data['episodes'] = episodes return data
Parses a Tv Series returns the dataset as a dictionary
def get_profile(self, user_id, timeout=None): response = self._get( '/v2/bot/profile/{user_id}'.format(user_id=user_id), timeout=timeout ) return Profile.new_from_json_dict(response.json)
Call get profile API. https://devdocs.line.me/en/#bot-api-get-profile Get user profile information. :param str user_id: User ID :param timeout: (optional) How long to wait for the server to send data before giving up, as a float, or a (connect timeout, read timeout) float tuple. Default is self.http_client.timeout :type timeout: float | tuple(float, float) :rtype: :py:class:`linebot.models.responses.Profile` :return: Profile instance
def register_mapper(self, mapper, content_type, shortname=None): self._check_mapper(mapper) cont_type_names = self._get_content_type_names(content_type, shortname) self._datamappers.update(dict([(name, mapper) for name in cont_type_names]))
Register new mapper. :param mapper: mapper object needs to implement ``parse()`` and ``format()`` functions.
def readTraining(self, training_file): logger.info('reading training from file') training_pairs = json.load(training_file, cls=serializer.dedupe_decoder) self.markPairs(training_pairs)
Read training from previously built training data file object Arguments: training_file -- file object containing the training data
def _cleanup(self): if self.todolist.exists(): try: saved_todo = iter(open(self.todolist, encoding="utf-8")) int(next(saved_todo).strip()) for line in saved_todo: _, serial = line.strip().split() int(serial) except (StopIteration, ValueError): logger.info("Removing inconsistent todo list.") self.todolist.unlink()
Does a couple of cleanup tasks to ensure consistent data for later processing.
def status(self): status_request = etcdrpc.StatusRequest() status_response = self.maintenancestub.Status( status_request, self.timeout, credentials=self.call_credentials, metadata=self.metadata ) for m in self.members: if m.id == status_response.leader: leader = m break else: leader = None return Status(status_response.version, status_response.dbSize, leader, status_response.raftIndex, status_response.raftTerm)
Get the status of the responding member.
def count_partitions(self, topic): return sum(1 for p in topic.partitions if p in self.partitions)
Return count of partitions for given topic.
def is_promisc(ip, fake_bcast="ff:ff:00:00:00:00", **kargs): responses = srp1(Ether(dst=fake_bcast) / ARP(op="who-has", pdst=ip), type=ETH_P_ARP, iface_hint=ip, timeout=1, verbose=0, **kargs) return responses is not None
Try to guess if target is in Promisc mode. The target is provided by its ip.
def set_mt_wcs(self, image): for chip in range(1,self._numchips+1,1): sci_chip = self._image[self.scienceExt,chip] ref_chip = image._image[image.scienceExt,chip] sci_chip.wcs = ref_chip.wcs.copy()
Reset the WCS for this image based on the WCS information from another imageObject.
def interact(banner=None, readfunc=None, local=None): console = InteractiveConsole(local) if readfunc is not None: console.raw_input = readfunc else: try: import readline except ImportError: pass console.interact(banner)
Closely emulate the interactive Python interpreter. This is a backwards compatible interface to the InteractiveConsole class. When readfunc is not specified, it attempts to import the readline module to enable GNU readline if it is available. Arguments (all optional, all default to None): banner -- passed to InteractiveConsole.interact() readfunc -- if not None, replaces InteractiveConsole.raw_input() local -- passed to InteractiveInterpreter.__init__()
def canCommit(self, prepare: Prepare) -> (bool, str): quorum = self.quorums.prepare.value if not self.prepares.hasQuorum(prepare, quorum): return False, 'does not have prepare quorum for {}'.format(prepare) if self.hasCommitted(prepare): return False, 'has already sent COMMIT for {}'.format(prepare) return True, ''
Return whether the specified PREPARE can proceed to the Commit step. Decision criteria: - If this replica has got just n-f-1 PREPARE requests then commit request. - If less than n-f-1 PREPARE requests then probably there's no consensus on the request; don't commit - If more than n-f-1 then already sent COMMIT; don't commit :param prepare: the PREPARE
def close(self): try: self.connection.close() self.connection = None except Exception: if not self.fail_silently: raise
Close any open HTTP connections to the API server.
def stub(): form = cgi.FieldStorage() userid = form['userid'].value password = form['passwd'].value group = form['group'].value
Just some left over code
def updateRPYLocations(self): self.rollText.set_position((self.leftPos+(self.vertSize/10.0),-0.97+(2*self.vertSize)-(self.vertSize/10.0))) self.pitchText.set_position((self.leftPos+(self.vertSize/10.0),-0.97+self.vertSize-(0.5*self.vertSize/10.0))) self.yawText.set_position((self.leftPos+(self.vertSize/10.0),-0.97)) self.rollText.set_size(self.fontSize) self.pitchText.set_size(self.fontSize) self.yawText.set_size(self.fontSize)
Update the locations of roll, pitch, yaw text.
def _total_rectangles(tree): return sum(len(sec.children) + sec.points.shape[0] - 1 for sec in tree.iter_sections())
Calculate the total number of segments that are required for the dendrogram. There is a vertical line for each segment and two horizontal line at each branching point
def returner(ret): _options = _get_options(ret) from_jid = _options.get('from_jid') password = _options.get('password') recipient_jid = _options.get('recipient_jid') if not from_jid: log.error('xmpp.jid not defined in salt config') return if not password: log.error('xmpp.password not defined in salt config') return if not recipient_jid: log.error('xmpp.recipient not defined in salt config') return message = ('id: {0}\r\n' 'function: {1}\r\n' 'function args: {2}\r\n' 'jid: {3}\r\n' 'return: {4}\r\n').format( ret.get('id'), ret.get('fun'), ret.get('fun_args'), ret.get('jid'), pprint.pformat(ret.get('return'))) xmpp = SendMsgBot(from_jid, password, recipient_jid, message) xmpp.register_plugin('xep_0030') xmpp.register_plugin('xep_0199') if xmpp.connect(): xmpp.process(block=True) return True return False
Send an xmpp message with the data
def rooms_info(self, room_id=None, room_name=None): if room_id is not None: return self.__call_api_get('rooms.info', roomId=room_id) elif room_name is not None: return self.__call_api_get('rooms.info', roomName=room_name) else: raise RocketMissingParamException('roomId or roomName required')
Retrieves the information about the room.
def get_install_names(filename): lines = _cmd_out_err(['otool', '-L', filename]) if not _line0_says_object(lines[0], filename): return () names = tuple(parse_install_name(line)[0] for line in lines[1:]) install_id = get_install_id(filename) if not install_id is None: assert names[0] == install_id return names[1:] return names
Return install names from library named in `filename` Returns tuple of install names tuple will be empty if no install names, or if this is not an object file. Parameters ---------- filename : str filename of library Returns ------- install_names : tuple tuple of install names for library `filename`
def _compute_all_files(self): self._all_files = any(pat.all_files() for pat in self.patterns)
Handles lazy evaluation of self.all_files
def eof_received(self) -> bool: logger.debug("%s - event = eof_received()", self.side) super().eof_received() return False
Close the transport after receiving EOF. Since Python 3.5, `:meth:~StreamReaderProtocol.eof_received` returns ``True`` on non-TLS connections. See http://bugs.python.org/issue24539 for more information. This is inappropriate for websockets for at least three reasons: 1. The use case is to read data until EOF with self.reader.read(-1). Since websockets is a TLV protocol, this never happens. 2. It doesn't work on TLS connections. A falsy value must be returned to have the same behavior on TLS and plain connections. 3. The websockets protocol has its own closing handshake. Endpoints close the TCP connection after sending a close frame. As a consequence we revert to the previous, more useful behavior.
def get_rendition_url(self, width=0, height=0): if width == 0 and height == 0: return self.get_master_url() target_width, target_height = self.get_rendition_size(width, height) key = '%sx%s' % (target_width, target_height) if not self.renditions: self.renditions = {} rendition_name = self.renditions.get(key, False) if not rendition_name: rendition_name = self.make_rendition(target_width, target_height) return default_storage.url(rendition_name)
get the rendition URL for a specified size if the renditions does not exists it will be created
def rl_get_point() -> int: if rl_type == RlType.GNU: return ctypes.c_int.in_dll(readline_lib, "rl_point").value elif rl_type == RlType.PYREADLINE: return readline.rl.mode.l_buffer.point else: return 0
Returns the offset of the current cursor position in rl_line_buffer
def validate(self, value): if value is None: if self.required: raise ValidationError('{} - None values are not allowed'.format(self.column_name or self.db_field)) return value
Returns a cleaned and validated value. Raises a ValidationError if there's a problem
def dump(self): results = [] for data in self.data(): results.append(data) return results
Dump raw JSON output of matching queryset objects. Returns: List of dicts.
def raise_not_found(self, environ, msg): raise NotFound(response=self.rewriterapp._error_response(environ, msg))
Utility function for raising a werkzeug.exceptions.NotFound execption with the supplied WSGI environment and message. :param dict environ: The WSGI environment dictionary for the request :param str msg: The error message
def _convert_units(obj, desired, guess=False): if obj.units is None: obj.units = units_from_metadata(obj, guess=guess) log.info('converting units from %s to %s', obj.units, desired) conversion = unit_conversion(obj.units, desired) obj.apply_scale(conversion) obj.units = desired
Given an object with scale and units try to scale to different units via the object's `apply_scale`. Parameters --------- obj : object With apply_scale method (i.e. Trimesh, Path2D, etc) desired : str Units desired (eg 'inches') guess: bool Whether we are allowed to guess the units if they are not specified.
def nsmap(self): NSMAP = dict() for k, v in set(self.namespaces): s_prefix = self.sb[k] s_uri = self.sb[v] if s_uri != "" and s_prefix != "": NSMAP[s_prefix] = s_uri return NSMAP
Returns the current namespace mapping as a dictionary there are several problems with the map and we try to guess a few things here: 1) a URI can be mapped by many prefixes, so it is to decide which one to take 2) a prefix might map to an empty string (some packers) 3) uri+prefix mappings might be included several times 4) prefix might be empty
def get_correlated_reports_page(self, indicators, enclave_ids=None, is_enclave=True, page_size=None, page_number=None): if is_enclave: distribution_type = DistributionType.ENCLAVE else: distribution_type = DistributionType.COMMUNITY params = { 'indicators': indicators, 'enclaveIds': enclave_ids, 'distributionType': distribution_type, 'pageNumber': page_number, 'pageSize': page_size } resp = self._client.get("reports/correlated", params=params) return Page.from_dict(resp.json(), content_type=Report)
Retrieves a page of all TruSTAR reports that contain the searched indicators. :param indicators: A list of indicator values to retrieve correlated reports for. :param enclave_ids: The enclaves to search in. :param is_enclave: Whether to search enclave reports or community reports. :param int page_number: the page number to get. :param int page_size: the size of the page to be returned. :return: The list of IDs of reports that correlated. Example: >>> reports = ts.get_correlated_reports_page(["wannacry", "www.evil.com"]).items >>> print([report.id for report in reports]) ["e3bc6921-e2c8-42eb-829e-eea8da2d3f36", "4d04804f-ff82-4a0b-8586-c42aef2f6f73"]
def ncores_used(self): return sum(task.manager.num_cores for task in self if task.status == task.S_RUN)
Returns the number of cores used in this moment. A core is used if there's a job that is running on it.
def convert_http_request(request, referrer_host=None): new_request = urllib.request.Request( request.url_info.url, origin_req_host=referrer_host, ) for name, value in request.fields.get_all(): new_request.add_header(name, value) return new_request
Convert a HTTP request. Args: request: An instance of :class:`.http.request.Request`. referrer_host (str): The referrering hostname or IP address. Returns: Request: An instance of :class:`urllib.request.Request`
def param_fetch_one(self, name): try: idx = int(name) self.mav.param_request_read_send(self.target_system, self.target_component, "", idx) except Exception: self.mav.param_request_read_send(self.target_system, self.target_component, name, -1)
initiate fetch of one parameter
def interfacesFor(self, powerup): pc = _PowerupConnector for iface in self.store.query(pc, AND(pc.item == self, pc.powerup == powerup)).getColumn('interface'): yield namedAny(iface)
Return an iterator of the interfaces for which the given powerup is installed on this object. This is not implemented for in-memory powerups. It will probably fail in an unpredictable, implementation-dependent way if used on one.
def capture_moves(self, position): try: right_diagonal = self.square_in_front(self.location.shift_right()) for move in self._one_diagonal_capture_square(right_diagonal, position): yield move except IndexError: pass try: left_diagonal = self.square_in_front(self.location.shift_left()) for move in self._one_diagonal_capture_square(left_diagonal, position): yield move except IndexError: pass
Finds out all possible capture moves :rtype: list
def bind_license(self, license_item_id=None): params = {'license_item_id': license_item_id} self.make_request( LicenseError, method='create', resource='bind', params=params)
Auto bind license, uses dynamic if POS is not found :param str license_item_id: license id :raises LicenseError: binding license failed, possibly no licenses :return: None
def _collect_peers_of_interest(self, new_best_path): path_rts = new_best_path.get_rts() qualified_peers = set(self._peers.values()) qualified_peers = self._rt_manager.filter_by_origin_as( new_best_path, qualified_peers ) if path_rts: path_rts.append(RouteTargetMembershipNLRI.DEFAULT_RT) qualified_peers = set(self._get_non_rtc_peers()) peer_to_rtfilter_map = self._peer_to_rtfilter_map for peer, rt_filter in peer_to_rtfilter_map.items(): if peer is None: continue if rt_filter is None: qualified_peers.add(peer) elif rt_filter.intersection(path_rts): qualified_peers.add(peer) return qualified_peers
Collect all peers that qualify for sharing a path with given RTs.
def layer_norm_compute(x, epsilon, scale, bias, layer_collection=None): params = (scale, bias) epsilon, scale, bias = [cast_like(t, x) for t in [epsilon, scale, bias]] mean = tf.reduce_mean(x, axis=[-1], keepdims=True) variance = tf.reduce_mean( tf.squared_difference(x, mean), axis=[-1], keepdims=True) norm_x = (x - mean) * tf.rsqrt(variance + epsilon) output = norm_x * scale + bias return output
Layer norm raw computation.
def disable_pow_check(chain_class: Type[BaseChain]) -> Type[BaseChain]: if not chain_class.vm_configuration: raise ValidationError("Chain class has no vm_configuration") if issubclass(chain_class, NoChainSealValidationMixin): chain_class_without_seal_validation = chain_class else: chain_class_without_seal_validation = type( chain_class.__name__, (chain_class, NoChainSealValidationMixin), {}, ) return chain_class_without_seal_validation.configure( vm_configuration=_mix_in_disable_seal_validation( chain_class_without_seal_validation.vm_configuration ), )
Disable the proof of work validation check for each of the chain's vms. This allows for block mining without generation of the proof of work seal. .. note:: blocks mined this way will not be importable on any chain that does not have proof of work disabled.
def plot(self): pl.plot(self.x, self.y, '.') pl.plot(self.x_int, self.y_int) pl.grid(True) pl.show()
Plot the individual and the gene
def _set_folium_map(self): m = Map(features=[self], width=self._width, height=self._height) self._folium_map = m.draw()
A map containing only the feature.
def _from_java(cls, java_stage): featuresCol = java_stage.getFeaturesCol() labelCol = java_stage.getLabelCol() predictionCol = java_stage.getPredictionCol() classifier = JavaParams._from_java(java_stage.getClassifier()) models = [JavaParams._from_java(model) for model in java_stage.models()] py_stage = cls(models=models).setPredictionCol(predictionCol).setLabelCol(labelCol)\ .setFeaturesCol(featuresCol).setClassifier(classifier) py_stage._resetUid(java_stage.uid()) return py_stage
Given a Java OneVsRestModel, create and return a Python wrapper of it. Used for ML persistence.
def _decode_telegram_base64(string): try: return base64.urlsafe_b64decode(string + '=' * (len(string) % 4)) except (binascii.Error, ValueError, TypeError): return None
Decodes an url-safe base64-encoded string into its bytes by first adding the stripped necessary padding characters. This is the way Telegram shares binary data as strings, such as Bot API-style file IDs or invite links. Returns ``None`` if the input string was not valid.
def dens_alum_nanocluster(coag): density = (coag.PrecipDensity * MOLEC_WEIGHT_ALUMINUM * coag.PrecipAluminumMPM / coag.PrecipMolecWeight) return density
Return the density of the aluminum in the nanocluster. This is useful for determining the volume of nanoclusters given a concentration of aluminum.
def clear_expired_cookies(self): self._cookies_lock.acquire() try: now = time.time() for cookie in self: if cookie.is_expired(now): self.clear(cookie.domain, cookie.path, cookie.name) finally: self._cookies_lock.release()
Discard all expired cookies. You probably don't need to call this method: expired cookies are never sent back to the server (provided you're using DefaultCookiePolicy), this method is called by CookieJar itself every so often, and the .save() method won't save expired cookies anyway (unless you ask otherwise by passing a true ignore_expires argument).
def has_key(self, key): if key in self._dict: try: self[key] return True except ValueError: return False except KeyError: return False return False
Does the key exist? This method will check to see if it has expired too.
def cancel(self) : "tells libdbus you no longer care about the pending incoming message." dbus.dbus_pending_call_cancel(self._dbobj) if self._awaiting != None : self._awaiting.cancel()
tells libdbus you no longer care about the pending incoming message.
def build_git_url(self): if len(self.dutinformation) > 0 and (self.dutinformation.get(0).build is not None): return self.dutinformation.get(0).build.giturl return None
get build git url. :return: build git url or None if not found
def to_json(self): result = super(UIExtension, self).to_json() result.update({ 'extension': self.extension }) return result
Returns the JSON Representation of the UI extension.
def md5sum(fname, block_size=1048576): md5 = hashlib.md5() with open(fname, 'rb') as fid: while True: data = fid.read(block_size) if not data: break md5.update(data) return md5.hexdigest()
Calculate the md5sum for a file. Parameters ---------- fname : str Filename. block_size : int Block size to use when reading. Returns ------- hash_ : str The hexadecimal digest of the hash.
def pile(args): from jcvi.utils.grouper import Grouper p = OptionParser(pile.__doc__) p.add_option("--minOverlap", default=0, type="int", help="Minimum overlap required [default: %default]") opts, args = p.parse_args(args) if len(args) != 2: sys.exit(not p.print_help()) abedfile, bbedfile = args iw = intersectBed_wao(abedfile, bbedfile, minOverlap=opts.minOverlap) groups = Grouper() for a, b in iw: groups.join(a.accn, b.accn) ngroups = 0 for group in groups: if len(group) > 1: ngroups += 1 print("|".join(group)) logging.debug("A total of {0} piles (>= 2 members)".format(ngroups))
%prog pile abedfile bbedfile > piles Call intersectBed on two bedfiles.
def write(self, obj: BioCDocument or BioCPassage or BioCSentence): if self.level == DOCUMENT and not isinstance(obj, BioCDocument): raise ValueError if self.level == PASSAGE and not isinstance(obj, BioCPassage): raise ValueError if self.level == SENTENCE and not isinstance(obj, BioCSentence): raise ValueError self.writer.write(BioCJSONEncoder().default(obj))
Encode and write a single object. Args: obj: an instance of BioCDocument, BioCPassage, or BioCSentence Returns:
def load_window_opener(self, item): window = Window.from_config(self.pyvlx, item) self.add(window)
Load window opener from JSON.
def get_contact(self, msisdn): response = self.session.post( urllib_parse.urljoin(self.api_url, "/v1/contacts"), json={"blocking": "wait", "contacts": [msisdn]}, ) response.raise_for_status() whatsapp_id = response.json()["contacts"][0].get("wa_id") if not whatsapp_id: self.fire_failed_contact_lookup(msisdn) return whatsapp_id
Returns the WhatsApp ID for the given MSISDN
def fetch(self, minutes=values.unset, start_date=values.unset, end_date=values.unset, task_channel=values.unset): return self._proxy.fetch( minutes=minutes, start_date=start_date, end_date=end_date, task_channel=task_channel, )
Fetch a WorkerStatisticsInstance :param unicode minutes: Filter cumulative statistics by up to 'x' minutes in the past. :param datetime start_date: Filter cumulative statistics by a start date. :param datetime end_date: Filter cumulative statistics by a end date. :param unicode task_channel: Filter cumulative statistics by TaskChannel. :returns: Fetched WorkerStatisticsInstance :rtype: twilio.rest.taskrouter.v1.workspace.worker.worker_statistics.WorkerStatisticsInstance
def beep(self, duration, frequency): cmd = 'BEEP', [Float(min=0.1, max=5.0), Integer(min=500, max=5000)] self._write(cmd, duration, frequency)
Generates a beep. :param duration: The duration in seconds, in the range 0.1 to 5. :param frequency: The frequency in Hz, in the range 500 to 5000.
def _handle_signal_gracefully(cls, signum, signame, traceback_lines): formatted_traceback = cls._format_traceback(traceback_lines=traceback_lines, should_print_backtrace=True) signal_error_log_entry = cls._CATCHABLE_SIGNAL_ERROR_LOG_FORMAT.format( signum=signum, signame=signame, formatted_traceback=formatted_traceback) cls.log_exception(signal_error_log_entry) formatted_traceback_for_terminal = cls._format_traceback( traceback_lines=traceback_lines, should_print_backtrace=cls._should_print_backtrace_to_terminal) terminal_log_entry = cls._CATCHABLE_SIGNAL_ERROR_LOG_FORMAT.format( signum=signum, signame=signame, formatted_traceback=formatted_traceback_for_terminal) cls._exit_with_failure(terminal_log_entry)
Signal handler for non-fatal signals which raises or logs an error and exits with failure.
def _get_bucket(self, client_kwargs): return _oss.Bucket(self.client, endpoint=self._endpoint, bucket_name=client_kwargs['bucket_name'])
Get bucket object. Returns: oss2.Bucket
def _hdfs_namenode_metrics(self, beans, metrics, tags): bean = next(iter(beans)) bean_name = bean.get('name') if bean_name != bean_name: raise Exception("Unexpected bean name {}".format(bean_name)) for metric, (metric_name, metric_type) in iteritems(metrics): metric_value = bean.get(metric) if metric_value is not None: self._set_metric(metric_name, metric_type, metric_value, tags) if 'CapacityUsed' in bean and 'CapacityTotal' in bean: self._set_metric( 'hdfs.namenode.capacity_in_use', self.GAUGE, float(bean['CapacityUsed']) / float(bean['CapacityTotal']), tags, )
Get HDFS namenode metrics from JMX
def _param_toc_updated_cb(self): logger.info('Param TOC finished updating') self.connected_ts = datetime.datetime.now() self.connected.call(self.link_uri) self.param.request_update_of_all_params()
Called when the param TOC has been fully updated
def _alert_malformed(self, msg, row_num): if self.error_bad_lines: raise ParserError(msg) elif self.warn_bad_lines: base = 'Skipping line {row_num}: '.format(row_num=row_num) sys.stderr.write(base + msg + '\n')
Alert a user about a malformed row. If `self.error_bad_lines` is True, the alert will be `ParserError`. If `self.warn_bad_lines` is True, the alert will be printed out. Parameters ---------- msg : The error message to display. row_num : The row number where the parsing error occurred. Because this row number is displayed, we 1-index, even though we 0-index internally.
def login(self, came_from=lurl('/')): login_counter = request.environ.get('repoze.who.logins', 0) if login_counter > 0: flash(_('Wrong credentials'), 'warning') return dict(page='login', login_counter=str(login_counter), came_from=came_from)
Start the user login.
def _calculate(self, field): encloser = field.enclosing if encloser: rendered = encloser.get_rendered_fields(RenderContext(self)) if field not in rendered: value = len(rendered) else: value = rendered.index(field) else: value = 0 return value
We want to avoid trouble, so if the field is not enclosed by any other field, we just return 0.
def create(self, resource): uri = self.URI + self.RESOURCES_PATH return self._client.create(resource=resource, uri=uri)
Set all the labels for a resource. Args: resource: The object containing the resource URI and a list of labels Returns: dict: Resource Labels
def down(force): try: cloud_config = CloudConfig() cloud_controller = CloudController(cloud_config) cloud_controller.down(force) except CloudComposeException as ex: print(ex)
destroys an existing cluster
def autodoc_event_handlers(stream=sys.stdout): lines = [] for cls in all_subclasses(EventHandler): if cls in _ABC_EVHANDLER_CLASSES: continue event_class = cls.event_class lines.extend(cls.cls2str().split("\n")) if not hasattr(cls, "can_change_physics"): raise RuntimeError("%s: can_change_physics must be defined" % cls) stream.write("\n".join(lines) + "\n")
Print to the given string, the documentation for the events and the associated handlers.
def _parsecsv(x): for line in x: yield line.decode('utf-8').strip().split(config.DELIMITER)
Deserialize file-like object containing csv to a Python generator.
def _decode_symbols(symbols): for symbol in symbols: data = string_at(zbar_symbol_get_data(symbol)) symbol_type = ZBarSymbol(symbol.contents.type).name polygon = convex_hull( ( zbar_symbol_get_loc_x(symbol, index), zbar_symbol_get_loc_y(symbol, index) ) for index in _RANGEFN(zbar_symbol_get_loc_size(symbol)) ) yield Decoded( data=data, type=symbol_type, rect=bounding_box(polygon), polygon=polygon )
Generator of decoded symbol information. Args: symbols: iterable of instances of `POINTER(zbar_symbol)` Yields: Decoded: decoded symbol
def get_array_shape(self, key): data = self.model.get_data() return data[key].shape
Return array's shape
def resizeToContents(self): if self.count(): item = self.item(self.count() - 1) rect = self.visualItemRect(item) height = rect.bottom() + 8 height = max(28, height) self.setFixedHeight(height) else: self.setFixedHeight(self.minimumHeight())
Resizes the list widget to fit its contents vertically.
def tojson(self) -> str: return json.dumps({ 'event_id': str(self.id), 'event_type': self.type, 'schema_name': self.schema_name, 'table_name': self.table_name, 'row_id': self.row_id })
Serialize an Event into JSON. Returns ------- str JSON-serialized Event.
def get_prep_value(self, value: LocalizedIntegerValue) -> dict: default_values = LocalizedIntegerValue(self.default) if isinstance(value, LocalizedIntegerValue): for lang_code, _ in settings.LANGUAGES: local_value = value.get(lang_code) if local_value is None: value.set(lang_code, default_values.get(lang_code, None)) prepped_value = super().get_prep_value(value) if prepped_value is None: return None for lang_code, _ in settings.LANGUAGES: local_value = prepped_value[lang_code] try: if local_value is not None: int(local_value) except (TypeError, ValueError): raise IntegrityError('non-integer value in column "%s.%s" violates ' 'integer constraint' % (self.name, lang_code)) prepped_value[lang_code] = str(local_value) if local_value is not None else None return prepped_value
Gets the value in a format to store into the database.
def stderrHandler(level, object, category, file, line, message): o = "" if object: o = '"' + object + '"' where = "(%s:%d)" % (file, line) safeprintf(sys.stderr, '%s [%5d] %-32s %-17s %-15s ', getFormattedLevelName(level), os.getpid(), o, category, time.strftime("%b %d %H:%M:%S")) try: safeprintf(sys.stderr, '%-4s %s %s\n', "", message, where) except UnicodeEncodeError: message = message.encode('UTF-8') safeprintf(sys.stderr, '%-4s %s %s\n', "", message, where) sys.stderr.flush()
A log handler that writes to stderr. @type level: string @type object: string (or None) @type category: string @type message: string
def space_references(document): for ref in document.xpath('.//a/sup/span[@class="sup_ref"]'): a = ref.getparent().getparent() if a is not None: atail = a.tail or '' if not atail.startswith(')') and not atail.startswith(',') and not atail.startswith(' '): a.tail = ' ' + atail return document
Ensure a space around reference links, so there's a gap when they are removed.
def get_sigma(imt): if imt.period < 0.2: return np.log(10**0.23) elif imt.period > 1.0: return np.log(10**0.27) else: return np.log(10**(0.23 + (imt.period - 0.2)/0.8 * 0.04))
Return the value of the total sigma :param float imt: An :class:`openquake.hazardlib.imt.IMT` instance :returns: A float representing the total sigma value
def get_dividend_sum_for_symbol(book: Book, symbol: str): svc = SecuritiesAggregate(book) security = svc.get_by_symbol(symbol) sec_svc = SecurityAggregate(book, security) accounts = sec_svc.get_income_accounts() total = Decimal(0) for account in accounts: income = get_dividend_sum(book, account) total += income return total
Calculates all income for a symbol
def _build_hash_string(self): if self.site_name in SITE_LIST or self.hash_string: if self.username and self.password: try: hash_string = self.hash_string.format(self.password) except TypeError: raise PybooruError("Pybooru can't add 'password' " "to 'hash_string'") self.password_hash = hashlib.sha1( hash_string.encode('utf-8')).hexdigest() else: raise PybooruError("Specify the 'username' and 'password' " "parameters of the Pybooru object, for " "setting 'password_hash' attribute.") else: raise PybooruError( "Specify the 'hash_string' parameter of the Pybooru" " object, for the functions that requires login.")
Function for build password hash string. Raises: PybooruError: When isn't provide hash string. PybooruError: When aren't provide username or password. PybooruError: When Pybooru can't add password to hash strring.
def get_instance(self, payload): return TaskQueueStatisticsInstance( self._version, payload, workspace_sid=self._solution['workspace_sid'], task_queue_sid=self._solution['task_queue_sid'], )
Build an instance of TaskQueueStatisticsInstance :param dict payload: Payload response from the API :returns: twilio.rest.taskrouter.v1.workspace.task_queue.task_queue_statistics.TaskQueueStatisticsInstance :rtype: twilio.rest.taskrouter.v1.workspace.task_queue.task_queue_statistics.TaskQueueStatisticsInstance
def divide(self, layer=WORDS, by=SENTENCES): if not self.is_tagged(layer): self.tag(layer) if not self.is_tagged(by): self.tag(by) return divide(self[layer], self[by])
Divide the Text into pieces by keeping references to original elements, when possible. This is not possible only, if the _element_ is a multispan. Parameters ---------- element: str The element to collect and distribute in resulting bins. by: str Each resulting bin is defined by spans of this element. Returns ------- list of (list of dict)
def get(self, *, no_ack=False): if self.deleted: raise Deleted("Queue {} was deleted".format(self.name)) self.sender.send_BasicGet(self.name, no_ack) tag_msg = yield from self.synchroniser.wait(spec.BasicGetOK, spec.BasicGetEmpty) if tag_msg is not None: consumer_tag, msg = tag_msg assert consumer_tag is None else: msg = None self.reader.ready() return msg
Synchronously get a message from the queue. This method is a :ref:`coroutine <coroutine>`. :keyword bool no_ack: if true, the broker does not require acknowledgement of receipt of the message. :return: an :class:`~asynqp.message.IncomingMessage`, or ``None`` if there were no messages on the queue.
def _get_tnames(self): tnames = set() for arr in self: if isinstance(arr, InteractiveList): tnames.update(arr.get_tnames()) else: tnames.add(arr.psy.decoder.get_tname( next(arr.psy.iter_base_variables), arr.coords)) return tnames - {None}
Get the name of the time coordinate of the objects in this list
def get_ancestor_processes(): if not _ANCESTOR_PROCESSES and psutil is not None: proc = psutil.Process(os.getpid()) while proc.parent() is not None: try: _ANCESTOR_PROCESSES.append(proc.parent().exe()) proc = proc.parent() except psutil.Error: break return _ANCESTOR_PROCESSES
Get a list of the executables of all ancestor processes.
def insert(self, key, value, data={}): if value < self.min_value or value > self.max_value: raise BoundsError('item value out of bounds') item = self.Item(key, value, data) index = self.get_bin_index(value) self.bins[index].append(item)
Insert the `key` into a bin based on the given `value`. Optionally, `data` dictionary may be provided to attach arbitrary data to the key.
def _update_config(self,directory,filename): basefilename=os.path.splitext(filename)[0] ext=os.path.splitext(filename)[1].lower() if filename==SET_FILE: print("%s - Moving photos to album"%(filename)) return self._upload_media(directory,movealbum_request=True) elif filename==MEGAPIXEL_FILE: print("%s - Resizing photos"%(filename)) return self._upload_media(directory,resize_request=True) elif ext in self.FB_META_EXTENSIONS: print("%s - Changing photo title"%(basefilename)) return self._upload_media(directory,basefilename,changetitle_request=True) return False
Manages FB config files
def get_fallback_languages(self): lang_dict = get_language_settings(self._current_language) fallbacks = [lang for lang in lang_dict['fallbacks'] if lang != self._current_language] return fallbacks or []
Return the fallback language codes, which are used in case there is no translation for the currently active language.