code
stringlengths
51
2.38k
docstring
stringlengths
4
15.2k
def patternVector(self, vector): if not self.patterned: return vector if type(vector) == int: if self.getWord(vector) != '': return self.getWord(vector) else: return vector elif type(vector) == float: if self.getWord(vector) != '': return self.getWord(vector) else: return vector elif type(vector) == str: return vector elif type(vector) == list: if self.getWord(vector) != '': return self.getWord(vector) vec = [] for v in vector: if self.getWord(v) != '': retval = self.getWord(v) vec.append( retval ) else: retval = self.patternVector(v) vec.append( retval ) return vec
Replaces vector with patterns. Used for loading inputs or targets from a file and still preserving patterns.
def _server_property(self, attr_name): server = self._topology.select_server( writable_server_selector) return getattr(server.description, attr_name)
An attribute of the current server's description. If the client is not connected, this will block until a connection is established or raise ServerSelectionTimeoutError if no server is available. Not threadsafe if used multiple times in a single method, since the server may change. In such cases, store a local reference to a ServerDescription first, then use its properties.
def _parse_response(self, respond): mobj = self._max_qubit_error_re.match(respond.text) if mobj: raise RegisterSizeError( 'device register size must be <= {}'.format(mobj.group(1))) return True
parse text of response for HTTP errors This parses the text of the response to decide whether to retry request or raise exception. At the moment this only detects an exception condition. Args: respond (Response): requests.Response object Returns: bool: False if the request should be retried, True if not. Raises: RegisterSizeError
def resetScale(self): self.img.scale(1./self.imgScale[0], 1./self.imgScale[1]) self.imgScale = (1.,1.)
Resets the scale on this image. Correctly aligns time scale, undoes manual scaling
def find(self, start_address, end_address, byte_depth=20, instrs_depth=2): self._max_bytes = byte_depth self._instrs_depth = instrs_depth if self._architecture == ARCH_X86: candidates = self._find_x86_candidates(start_address, end_address) elif self._architecture == ARCH_ARM: candidates = self._find_arm_candidates(start_address, end_address) else: raise Exception("Architecture not supported.") return sorted(candidates, key=lambda g: g.address)
Find gadgets.
def _default_headers(self): headers = { "Authorization": 'Bearer {}'.format(self.api_key), "User-agent": self.useragent, "Accept": 'application/json' } if self.impersonate_subuser: headers['On-Behalf-Of'] = self.impersonate_subuser return headers
Set the default header for a Twilio SendGrid v3 API call
def ordc(item, inset): assert isinstance(inset, stypes.SpiceCell) assert inset.is_char() assert isinstance(item, str) item = stypes.stringToCharP(item) return libspice.ordc_c(item, ctypes.byref(inset))
The function returns the ordinal position of any given item in a character set. If the item does not appear in the set, the function returns -1. http://naif.jpl.nasa.gov/pub/naif/toolkit_docs/C/cspice/ordc_c.html :param item: An item to locate within a set. :type item: str :param inset: A set to search for a given item. :type inset: SpiceCharCell :return: the ordinal position of item within the set :rtype: int
def escape_query(query): return query.replace("\\", r"\5C").replace("*", r"\2A").replace("(", r"\28").replace(")", r"\29")
Escapes certain filter characters from an LDAP query.
def _lxml_el_to_data(lxml_el, ns, nsmap, snake=True): tag_name = _to_colon_ns(lxml_el.tag, default_ns=ns, nsmap=nsmap) ret = [tag_name] attributes = _get_el_attributes(lxml_el, ns=ns, nsmap=nsmap) if attributes: ret.append(attributes) for sub_el in lxml_el: ret.append(_lxml_el_to_data(sub_el, ns, nsmap, snake=snake)) text = lxml_el.text if text: ret.append(text) return tuple(ret)
Convert an ``lxml._Element`` instance to a Python tuple.
def register_id(self, id_string): try: prefix, count = id_string.rsplit("_", 1) count = int(count) except ValueError: pass else: if prefix == self.prefix: self.counter = max(count, self.counter)
Register a manually assigned id as used, to avoid collisions.
def facts(self): fact = lib.EnvGetNextFact(self._env, ffi.NULL) while fact != ffi.NULL: yield new_fact(self._env, fact) fact = lib.EnvGetNextFact(self._env, fact)
Iterate over the asserted Facts.
def columns_in_filters(filters): if not filters: return [] if not isinstance(filters, str): filters = ' '.join(filters) columns = [] reserved = {'and', 'or', 'in', 'not'} for toknum, tokval, _, _, _ in generate_tokens(StringIO(filters).readline): if toknum == NAME and tokval not in reserved: columns.append(tokval) return list(tz.unique(columns))
Returns a list of the columns used in a set of query filters. Parameters ---------- filters : list of str or str List of the filters as passed passed to ``apply_filter_query``. Returns ------- columns : list of str List of all the strings mentioned in the filters.
def yml_fnc(fname, *args, **options): key = "ac_safe" fnc = getattr(yaml, r"safe_" + fname if options.get(key) else fname) return fnc(*args, **common.filter_from_options(key, options))
An wrapper of yaml.safe_load, yaml.load, yaml.safe_dump and yaml.dump. :param fname: "load" or "dump", not checked but it should be OK. see also :func:`yml_load` and :func:`yml_dump` :param args: [stream] for load or [cnf, stream] for dump :param options: keyword args may contain "ac_safe" to load/dump safely
def set_attr_value(self, key, attr, value): idx = self._keys[key] self._attrs[attr][idx].set(value)
set the value of a given attribute for a given key
def _(obj): tz_offset = obj.utcoffset() if not tz_offset or tz_offset == UTC_ZERO: iso_datetime = obj.strftime('%Y-%m-%dT%H:%M:%S.%fZ') else: iso_datetime = obj.isoformat() return iso_datetime
ISO 8601 format. Interprets naive datetime as UTC with zulu suffix.
def _check_panel(self, length): n = len(self.index) if divmod(n, length)[1] != 0: raise ValueError("Panel length '%g' must evenly divide length of series '%g'" % (length, n)) if n == length: raise ValueError("Panel length '%g' cannot be length of series '%g'" % (length, n))
Check that given fixed panel length evenly divides index. Parameters ---------- length : int Fixed length with which to subdivide index
def run_alias(): mode = Path(sys.argv[0]).stem help = True if len(sys.argv) <= 1 else False if mode == 'lcc': sys.argv.insert(1, 'c') elif mode == 'lpython': sys.argv.insert(1, 'python') sys.argv.insert(1, 'run') if help: sys.argv.append('--help') main.main(prog_name='backend.ai')
Quick aliases for run command.
def knob_subgroup(self, cutoff=7.0): if cutoff > self.cutoff: raise ValueError("cutoff supplied ({0}) cannot be greater than self.cutoff ({1})".format(cutoff, self.cutoff)) return KnobGroup(monomers=[x for x in self.get_monomers() if x.max_kh_distance <= cutoff], ampal_parent=self.ampal_parent)
KnobGroup where all KnobsIntoHoles have max_kh_distance <= cutoff.
def _get_factor(self, belief_prop, evidence): final_factor = factor_product(*belief_prop.junction_tree.get_factors()) if evidence: for var in evidence: if var in final_factor.scope(): final_factor.reduce([(var, evidence[var])]) return final_factor
Extracts the required factor from the junction tree. Parameters: ---------- belief_prop: Belief Propagation Belief Propagation which needs to be updated. evidence: dict a dict key, value pair as {var: state_of_var_observed}
def building(shape=None, gray=False): name = 'cms.mat' url = URL_CAM + name dct = get_data(name, subset=DATA_SUBSET, url=url) im = np.rot90(dct['im'], k=3) return convert(im, shape, gray=gray)
Photo of the Centre for Mathematical Sciences in Cambridge. Returns ------- An image with the following properties: image type: color (or gray scales if `gray=True`) size: [442, 331] (if not specified by `size`) scale: [0, 1] type: float64
def cache_key(self, repo: str, branch: str, task: Task, git_repo: Repo) -> str: return "{repo}_{branch}_{hash}_{task}".format(repo=self.repo_id(repo), branch=branch, hash=self.current_git_hash(repo, branch, git_repo), task=task.hash)
Returns the key used for storing results in cache.
def humanize_filesize(filesize: int) -> Tuple[str, str]: for unit in ['', 'K', 'M', 'G', 'T', 'P', 'E', 'Z']: if filesize < 1024.0: return '{:3.1f}'.format(filesize), unit+'B' filesize /= 1024.0
Return human readable pair of size and unit from the given filesize in bytes.
def contents_match(self, path): checksum = file_hash(path) kv = unitdata.kv() stored_checksum = kv.get('hardening:%s' % path) if not stored_checksum: log('Checksum for %s has not been calculated.' % path, level=DEBUG) return False elif stored_checksum != checksum: log('Checksum mismatch for %s.' % path, level=DEBUG) return False return True
Determines if the file content is the same. This is determined by comparing hashsum of the file contents and the saved hashsum. If there is no hashsum, then the content cannot be sure to be the same so treat them as if they are not the same. Otherwise, return True if the hashsums are the same, False if they are not the same. :param path: the file to check.
def datasets(self, libref: str = '') -> str: code = "proc datasets" if libref: code += " dd=" + libref code += "; quit;" if self.nosub: print(code) else: if self.results.lower() == 'html': ll = self._io.submit(code, "html") if not self.batch: self.DISPLAY(self.HTML(ll['LST'])) else: return ll else: ll = self._io.submit(code, "text") if self.batch: return ll['LOG'].rsplit(";*\';*\";*/;\n")[0] else: print(ll['LOG'].rsplit(";*\';*\";*/;\n")[0])
This method is used to query a libref. The results show information about the libref including members. :param libref: the libref to query :return:
def _decode_linode_plan_label(label): sizes = avail_sizes() if label not in sizes: if 'GB' in label: raise SaltCloudException( 'Invalid Linode plan ({}) specified - call avail_sizes() for all available options'.format(label) ) else: plan = label.split() if len(plan) != 2: raise SaltCloudException( 'Invalid Linode plan ({}) specified - call avail_sizes() for all available options'.format(label) ) plan_type = plan[0] try: plan_size = int(plan[1]) except TypeError: plan_size = 0 log.debug('Failed to decode Linode plan label in Cloud Profile: %s', label) if plan_type == 'Linode' and plan_size == 1024: plan_type = 'Nanode' plan_size = plan_size/1024 new_label = "{} {}GB".format(plan_type, plan_size) if new_label not in sizes: raise SaltCloudException( 'Invalid Linode plan ({}) specified - call avail_sizes() for all available options'.format(new_label) ) log.warning( 'An outdated Linode plan label was detected in your Cloud ' 'Profile (%s). Please update the profile to use the new ' 'label format (%s) for the requested Linode plan size.', label, new_label ) label = new_label return sizes[label]['PLANID']
Attempts to decode a user-supplied Linode plan label into the format in Linode API output label The label, or name, of the plan to decode. Example: `Linode 2048` will decode to `Linode 2GB`
def sort_dict(self, data, key): return sorted(data, key=itemgetter(key)) if data else []
Sort a list of dictionaries by dictionary key
def allow_exception(self, exc_class): name = exc_class.__name__ self._allowed_exceptions[name] = exc_class
Allow raising this class of exceptions from commands. When a command fails on the server side due to an exception, by default it is turned into a string and raised on the client side as an ExternalError. The original class name is sent but ignored. If you would like to instead raise an instance of the same exception on the client side, you can pass the exception class object to this method and instances of that exception will be reraised. The caveat is that the exception must be creatable with a single string parameter and it should have a ``msg`` property. Args: exc_class (class): A class object with the exception that we should allow to pass from server to client.
def battlecry_requires_target(self): if self.has_combo and self.controller.combo: if PlayReq.REQ_TARGET_FOR_COMBO in self.requirements: return True for req in TARGETING_PREREQUISITES: if req in self.requirements: return True return False
True if the play action of the card requires a target
def signed_int_to_unsigned_hex(signed_int: int) -> str: hex_string = hex(struct.unpack('Q', struct.pack('q', signed_int))[0])[2:] if hex_string.endswith('L'): return hex_string[:-1] return hex_string
Converts a signed int value to a 64-bit hex string. Examples: 1662740067609015813 => '17133d482ba4f605' -5270423489115668655 => 'b6dbb1c2b362bf51' :param signed_int: an int to convert :returns: unsigned hex string
def keys(cls, name, hash_key, range_key=None, throughput=None): return cls(cls.KEYS, name, hash_key, range_key, throughput=throughput)
Create an index that projects only key attributes
def get_property_dict(entity_proto): return dict((p.key, p.value) for p in entity_proto.property)
Convert datastore.Entity to a dict of property name -> datastore.Value. Args: entity_proto: datastore.Entity proto message. Usage: >>> get_property_dict(entity_proto) {'foo': {string_value='a'}, 'bar': {integer_value=2}} Returns: dict of entity properties.
def unregister_event(self, event_name, handler): try: self._event_handlers[event_name].remove(handler) except ValueError: pass
Unregister a callable that will be called when event is raised. :param event_name: The name of the event (see knack.events for in-built events) :type event_name: str :param handler: The callback that was used to register the event :type handler: function
def hashes(self): for url_variant in self.url_permutations(self.canonical): url_hash = self.digest(url_variant) yield url_hash
Hashes of all possible permutations of the URL in canonical form
def get_block_by_hash(self, block_hash: Hash32) -> BaseBlock: validate_word(block_hash, title="Block Hash") block_header = self.get_block_header_by_hash(block_hash) return self.get_block_by_header(block_header)
Returns the requested block as specified by block hash.
def remote_file(self, branch='master', filename=''): LOG.info('Retrieving "%s" from "%s".', filename, self.git_short) file_contents = '' try: file_blob = self.project.files.get(file_path=filename, ref=branch) except gitlab.exceptions.GitlabGetError: file_blob = None LOG.debug('GitLab file response:\n%s', file_blob) if not file_blob: msg = 'Project "{0}" is missing file "{1}" in "{2}" branch.'.format(self.git_short, filename, branch) LOG.warning(msg) raise FileNotFoundError(msg) else: file_contents = b64decode(file_blob.content).decode() LOG.debug('Remote file contents:\n%s', file_contents) return file_contents
Read the remote file on Git Server. Args: branch (str): Git Branch to find file. filename (str): Name of file to retrieve relative to root of repository. Returns: str: Contents of remote file. Raises: FileNotFoundError: Requested file missing.
def get_instantiated_service(self, name): if name not in self.instantiated_services: raise UninstantiatedServiceException return self.instantiated_services[name]
Get instantiated service by name
def register_area(self, area_code, index, userdata): size = ctypes.sizeof(userdata) logger.info("registering area %s, index %s, size %s" % (area_code, index, size)) size = ctypes.sizeof(userdata) return self.library.Srv_RegisterArea(self.pointer, area_code, index, ctypes.byref(userdata), size)
Shares a memory area with the server. That memory block will be visible by the clients.
def Search(self, text, wholewords=0, titleonly=0): if text and text != '' and self.file: return extra.search(self.file, text, wholewords, titleonly) else: return None
Performs full-text search on the archive. The first parameter is the word to look for, the second indicates if the search should be for whole words only, and the third parameter indicates if the search should be restricted to page titles. This method will return a tuple, the first item indicating if the search results were partial, and the second item being a dictionary containing the results.
def stop(self): if self._died.ready(): _log.debug('already stopped %s', self) return if self._being_killed: _log.debug('already being killed %s', self) try: self._died.wait() except: pass return _log.debug('stopping %s', self) with _log_time('stopped %s', self): self.entrypoints.all.stop() self._worker_pool.waitall() self.dependencies.all.stop() self.subextensions.all.stop() self._kill_managed_threads() self.started = False if not self._died.ready(): self._died.send(None)
Stop the container gracefully. First all entrypoints are asked to ``stop()``. This ensures that no new worker threads are started. It is the extensions' responsibility to gracefully shut down when ``stop()`` is called on them and only return when they have stopped. After all entrypoints have stopped the container waits for any active workers to complete. After all active workers have stopped the container stops all dependency providers. At this point there should be no more managed threads. In case there are any managed threads, they are killed by the container.
def add_tunnel_port(self, name, tunnel_type, remote_ip, local_ip=None, key=None, ofport=None): options = 'remote_ip=%(remote_ip)s' % locals() if key: options += ',key=%(key)s' % locals() if local_ip: options += ',local_ip=%(local_ip)s' % locals() args = ['Interface', name, 'type=%s' % tunnel_type, 'options:%s' % options] if ofport: args.append('ofport_request=%(ofport)s' % locals()) command_add = ovs_vsctl.VSCtlCommand('add-port', (self.br_name, name)) command_set = ovs_vsctl.VSCtlCommand('set', args) self.run_command([command_add, command_set])
Creates a tunnel port. :param name: Port name to be created :param tunnel_type: Type of tunnel (gre or vxlan) :param remote_ip: Remote IP address of tunnel :param local_ip: Local IP address of tunnel :param key: Key of GRE or VNI of VxLAN :param ofport: Requested OpenFlow port number
def disable(self, retain_port=False): self.update_server(disabled=True) if retain_port: return self.update_device(disabled=True) if self.conf.dhcp_delete_namespaces and self.network.namespace: ns_ip = ip_lib.IPWrapper(self.root_helper, self.network.namespace) try: ns_ip.netns.delete(self.network.namespace) except RuntimeError: msg = _('Failed trying to delete namespace: %s') LOG.exception(msg, self.network.namespace)
Teardown DHCP. Disable DHCP for this network by updating the remote server and then destroying any local device and namespace.
def getStreamNetworkAsWkt(self, session, withNodes=True): wkt_list = [] for link in self.streamLinks: wkt_link = link.getAsWkt(session) if wkt_link: wkt_list.append(wkt_link) if withNodes: for node in link.nodes: wkt_node = node.getAsWkt(session) if wkt_node: wkt_list.append(wkt_node) return 'GEOMCOLLECTION ({0})'.format(', '.join(wkt_list))
Retrieve the stream network geometry in Well Known Text format. Args: session (:mod:`sqlalchemy.orm.session.Session`): SQLAlchemy session object bound to PostGIS enabled database withNodes (bool, optional): Include nodes. Defaults to False. Returns: str: Well Known Text string.
def filter_product_filename_generator(obs_info,nn): proposal_id = obs_info[0] visit_id = obs_info[1] instrument = obs_info[2] detector = obs_info[3] filter = obs_info[4] product_filename_dict = {} product_filename_dict["image"] = "hst_{}_{}_{}_{}_{}.fits".format(proposal_id,visit_id,instrument,detector,filter) product_filename_dict["source catalog"] = product_filename_dict["image"].replace(".fits",".cat") return(product_filename_dict)
Generate image and sourcelist filenames for filter products Parameters ---------- obs_info : list list of items that will be used to generate the filenames: proposal_id, visit_id, instrument, detector, and filter nn : string the single-exposure image number (NOTE: only used in single_exposure_product_filename_generator()) Returns -------- product_filename_dict : dictionary A dictionary containing the generated filenames.
def iscsi_resource(self): return iscsi.ISCSIResource( self._conn, utils.get_subresource_path_by( self, ["Oem", "Hpe", "Links", "iScsi"]), redfish_version=self.redfish_version)
Property to provide reference to bios iscsi resource instance It is calculated once when the first time it is queried. On refresh, this property gets reset.
def get_logger(self, name="deployment-logger", level=logging.DEBUG): log = logging logger = log.getLogger(name) fmt = log.Formatter("%(asctime)s %(funcName)s " "%(levelname)s: %(message)s") handler = log.StreamHandler(stream=sys.stdout) handler.setLevel(level) handler.setFormatter(fmt) logger.addHandler(handler) logger.setLevel(level) return logger
Get a logger object that will log to stdout.
def ascent(self): total_ascent = 0.0 altitude_data = self.altitude_points() for i in range(len(altitude_data) - 1): diff = altitude_data[i+1] - altitude_data[i] if diff > 0.0: total_ascent += diff return total_ascent
Returns ascent of workout in meters
def _load(self): if self.is_sw or self.platform_name == 'EOS-Aqua': scale = 0.001 else: scale = 1.0 detector = read_modis_response(self.requested_band_filename, scale) self.rsr = detector if self._sort: self.sort()
Load the MODIS RSR data for the band requested
def _create_body(self, name, flavor=None, volume=None, databases=None, users=None, version=None, type=None): if flavor is None: flavor = 1 flavor_ref = self.api._get_flavor_ref(flavor) if volume is None: volume = 1 if databases is None: databases = [] if users is None: users = [] body = {"instance": { "name": name, "flavorRef": flavor_ref, "volume": {"size": volume}, "databases": databases, "users": users, }} if type is not None or version is not None: required = (type, version) if all(required): body['instance']['datastore'] = {"type": type, "version": version} else: raise exc.MissingCloudDatabaseParameter("Specifying a datastore" " requires both the datastore type as well as the version.") return body
Used to create the dict required to create a Cloud Database instance.
def renew_local_branch(branch, start_point, remote=False): if branch in branches(): checkout(start_point) delete(branch, force=True, remote=remote) result = new_local_branch(branch, start_point) if remote: publish(branch) return result
Make a new local branch from that start_point start_point is a git "commit-ish", e.g branch, tag, commit If a local branch already exists it is removed If remote is true then push the new branch to origin
def example_method(self, i3s_output_list, i3s_config): full_text = self.format.format(output='example') response = { 'cached_until': time() + self.cache_timeout, 'full_text': full_text } return response
This method will return an empty text message so it will NOT be displayed on your i3bar. If you want something displayed you should write something in the 'full_text' key of your response. See the i3bar protocol spec for more information: http://i3wm.org/docs/i3bar-protocol.html
def get_structure_with_nodes(self): new_s = Structure.from_sites(self.structure) for v in self.vnodes: new_s.append("X", v.frac_coords) return new_s
Get the modified structure with the voronoi nodes inserted. The species is set as a DummySpecie X.
def create(self, article, attachment, inline=False, file_name=None, content_type=None): return HelpdeskAttachmentRequest(self).post(self.endpoint.create, article=article, attachments=attachment, inline=inline, file_name=file_name, content_type=content_type)
This function creates attachment attached to article. :param article: Numeric article id or :class:`Article` object. :param attachment: File object or os path to file :param inline: If true, the attached file is shown in the dedicated admin UI for inline attachments and its url can be referenced in the HTML body of the article. If false, the attachment is listed in the list of attachments. Default is `false` :param file_name: you can set filename on file upload. :param content_type: The content type of the file. `Example: image/png`, Zendesk can ignore it. :return: :class:`ArticleAttachment` object
def get_repo_revision(): repopath = _findrepo() if not repopath: return '' try: import mercurial.hg, mercurial.ui, mercurial.scmutil from mercurial.node import short as hexfunc except ImportError: pass else: ui = mercurial.ui.ui() repo = mercurial.hg.repository(ui, repopath) parents = repo[None].parents() changed = filter(None, repo.status()) and "+" or "" return ';'.join(['%s:%s%s' % (p.rev(), hexfunc(p.node()), changed) for p in parents]) return ''
Returns mercurial revision string somelike `hg identify` does. Format is rev1:short-id1+;rev2:short-id2+ Returns an empty string if anything goes wrong, such as missing .hg files or an unexpected format of internal HG files or no mercurial repository found.
def singledispatch(*, nargs=None, nouts=None, ndefs=None): def wrapper(f): return wraps(f)(SingleDispatchFunction(f, nargs=nargs, nouts=nouts, ndefs=ndefs)) return wrapper
singledispatch decorate of both functools.singledispatch and func
def rmSelf(f): def new_f(*args, **kwargs): newArgs = args[1:] result = f(*newArgs, **kwargs) return result return new_f
f -> function. Decorator, removes first argument from f parameters.
def parse_summary(content, reference_id=None): summary = None m = _END_SUMMARY_PATTERN.search(content) if m: end_of_summary = m.start() m = _START_SUMMARY_PATTERN.search(content, 0, end_of_summary) or _ALTERNATIVE_START_SUMMARY_PATTERN.search(content, 0, end_of_summary) if m: summary = content[m.end():end_of_summary] elif reference_id not in _CABLES_WITH_MALFORMED_SUMMARY: logger.debug('Found "end of summary" but no start in "%s", content: "%s"' % (reference_id, content[:end_of_summary])) else: m = _PARSE_SUMMARY_PATTERN.search(content) if m: summary = content[m.start(1):m.end(1)] if summary: summary = _CLEAN_SUMMARY_CLS_PATTERN.sub(u'', summary) summary = _CLEAN_SUMMARY_PATTERN.sub(u' ', summary) summary = _CLEAN_SUMMARY_WS_PATTERN.sub(u' ', summary) summary = summary.strip() return summary
\ Extracts the summary from the `content` of the cable. If no summary can be found, ``None`` is returned. `content` The content of the cable. `reference_id` The reference identifier of the cable.
def attach_framebuffer(self, screen_id, framebuffer): if not isinstance(screen_id, baseinteger): raise TypeError("screen_id can only be an instance of type baseinteger") if not isinstance(framebuffer, IFramebuffer): raise TypeError("framebuffer can only be an instance of type IFramebuffer") id_p = self._call("attachFramebuffer", in_p=[screen_id, framebuffer]) return id_p
Sets the graphics update target for a screen. in screen_id of type int in framebuffer of type :class:`IFramebuffer` return id_p of type str
def create_archiver(typename): archiver = _ARCHIVER_BY_TYPE.get(typename) if not archiver: raise ValueError('No archiver registered for {!r}'.format(typename)) return archiver
Returns Archivers in common configurations. :API: public The typename must correspond to one of the following: 'tar' Returns a tar archiver that applies no compression and emits .tar files. 'tgz' Returns a tar archiver that applies gzip compression and emits .tar.gz files. 'tbz2' Returns a tar archiver that applies bzip2 compression and emits .tar.bz2 files. 'zip' Returns a zip archiver that applies standard compression and emits .zip files. 'jar' Returns a jar archiver that applies no compression and emits .jar files. Note this is provided as a light way of zipping input files into a jar, without the need to prepare Manifest etc. For more advanced usages, please refer to :class: `pants.backend.jvm.subsystems.jar_tool.JarTool` or :class: `pants.backend.jvm.tasks.jar_task.JarTask`.
def resume_writing(self): if not self._can_send.is_set(): self._can_send.set() self.transport.resume_reading()
Transport calls when the send buffer has room.
def patch(self, request, format=None): data = request.data.copy() try: ct = ChatType.objects.get(id=data.pop("chat_type")) data["chat_type"] = ct except ChatType.DoesNotExist: return typeNotFound404 if not self.is_path_unique( data["id"], data["publish_path"], ct.publish_path ): return notUnique400 try: c = Channel.objects.get(id=data.pop("id")) except Channel.DoesNotExist: return channelNotFound404 for key, value in data.items(): setattr(c, key, value) c.save() self.handle_webhook(c) return Response( { "text": "Channel saved.", "method": "PATCH", "saved": ChannelCMSSerializer(c).data, }, 200, )
Update an existing Channel
def take_profit_replace(self, accountID, orderID, **kwargs): return self.replace( accountID, orderID, order=TakeProfitOrderRequest(**kwargs) )
Shortcut to replace a pending Take Profit Order in an Account Args: accountID : The ID of the Account orderID : The ID of the Take Profit Order to replace kwargs : The arguments to create a TakeProfitOrderRequest Returns: v20.response.Response containing the results from submitting the request
def parse_file(file): lines = [] for line in file: line = line.rstrip('\n') if line == '%%': yield from parse_item(lines) lines.clear() elif line.startswith(' '): lines[-1] += line[1:] else: lines.append(line) yield from parse_item(lines)
Take an open file containing the IANA subtag registry, and yield a dictionary of information for each subtag it describes.
def lookup(alias): if alias in matchers: return matchers[alias] else: norm = normalize(alias) if norm in normalized: alias = normalized[norm] return matchers[alias] if -1 != alias.find('_'): norm = normalize(alias).replace('_', '') return lookup(norm) return None
Tries to find a matcher callable associated to the given alias. If an exact match does not exists it will try normalizing it and even removing underscores to find one.
def assign_ranks_to_grid(grid, ranks): assignments = deepcopy(grid) ranks["0b0"] = 0 ranks["-0b1"] = -1 for i in range(len(grid)): for j in range(len(grid[i])): if type(grid[i][j]) is list: for k in range(len(grid[i][j])): assignments[i][j][k] = ranks[grid[i][j][k]] else: assignments[i][j] = ranks[grid[i][j]] return assignments
Takes a 2D array of binary numbers represented as strings and a dictionary mapping binary strings to integers representing the rank of the cluster they belong to, and returns a grid in which each binary number has been replaced with the rank of its cluster.
def reset(self): self.v = self.c self.u = self.b * self.v self.fired = 0.0 self.current = self.bias
Resets all state variables.
def init_app(self, app, path='templates.yaml'): if self._route is None: raise TypeError("route is a required argument when app is not None") self.app = app app.ask = self app.add_url_rule(self._route, view_func=self._flask_view_func, methods=['POST']) app.jinja_loader = ChoiceLoader([app.jinja_loader, YamlLoader(app, path)])
Initializes Ask app by setting configuration variables, loading templates, and maps Ask route to a flask view. The Ask instance is given the following configuration variables by calling on Flask's configuration: `ASK_APPLICATION_ID`: Turn on application ID verification by setting this variable to an application ID or a list of allowed application IDs. By default, application ID verification is disabled and a warning is logged. This variable should be set in production to ensure requests are being sent by the applications you specify. Default: None `ASK_VERIFY_REQUESTS`: Enables or disables Alexa request verification, which ensures requests sent to your skill are from Amazon's Alexa service. This setting should not be disabled in production. It is useful for mocking JSON requests in automated tests. Default: True `ASK_VERIFY_TIMESTAMP_DEBUG`: Turn on request timestamp verification while debugging by setting this to True. Timestamp verification helps mitigate against replay attacks. It relies on the system clock being synchronized with an NTP server. This setting should not be enabled in production. Default: False `ASK_PRETTY_DEBUG_LOGS`: Add tabs and linebreaks to the Alexa request and response printed to the debug log. This improves readability when printing to the console, but breaks formatting when logging to CloudWatch. Default: False
def extended_capabilities(self): buf = (ctypes.c_uint8 * 32)() self._dll.JLINKARM_GetEmuCapsEx(buf, 32) return list(buf)
Gets the capabilities of the connected emulator as a list. Args: self (JLink): the ``JLink`` instance Returns: List of 32 integers which define the extended capabilities based on their value and index within the list.
def _do_lumping(self): right_eigenvectors = self.right_eigenvectors_[:, :self.n_macrostates] index = index_search(right_eigenvectors) A = right_eigenvectors[index, :] A = inv(A) A = fill_A(A, right_eigenvectors) if self.do_minimization: A = self._optimize_A(A) self.A_ = fill_A(A, right_eigenvectors) self.chi_ = dot(right_eigenvectors, self.A_) self.microstate_mapping_ = np.argmax(self.chi_, 1)
Perform PCCA+ algorithm by optimizing transformation matrix A. Creates the following member variables: ------- A : ndarray The transformation matrix. chi : ndarray The membership matrix microstate_mapping : ndarray Mapping from microstates to macrostates.
def as_stream(self): if not self.singular: raise ArgumentError("Attempted to convert a non-singular selector to a data stream, it matches multiple", selector=self) return DataStream(self.match_type, self.match_id, self.match_spec == DataStreamSelector.MatchSystemOnly)
Convert this selector to a DataStream. This function will only work if this is a singular selector that matches exactly one DataStream.
def oracle_approximating(self): X = np.nan_to_num(self.X.values) shrunk_cov, self.delta = covariance.oas(X) return self.format_and_annualise(shrunk_cov)
Calculate the Oracle Approximating Shrinkage estimate :return: shrunk sample covariance matrix :rtype: np.ndarray
def create_supercut_in_batches(composition, outputfile, padding): total_clips = len(composition) start_index = 0 end_index = BATCH_SIZE batch_comp = [] while start_index < total_clips: filename = outputfile + '.tmp' + str(start_index) + '.mp4' try: create_supercut(composition[start_index:end_index], filename, padding) batch_comp.append(filename) gc.collect() start_index += BATCH_SIZE end_index += BATCH_SIZE except: start_index += BATCH_SIZE end_index += BATCH_SIZE next clips = [VideoFileClip(filename) for filename in batch_comp] video = concatenate(clips) video.to_videofile(outputfile, codec="libx264", temp_audiofile='temp-audio.m4a', remove_temp=True, audio_codec='aac') for filename in batch_comp: os.remove(filename) cleanup_log_files(outputfile)
Create & concatenate video clips in groups of size BATCH_SIZE and output finished video file to output directory.
def updated_dimensions(self): return [("ntime", args.ntime), ("nchan", args.nchan), ("na", args.na), ("npsrc", len(lm_coords))]
Inform montblanc about dimension sizes
def string_to_response(content_type): def outer_wrapper(req_function): @wraps(req_function) def newreq(request, *args, **kwargs): try: outp = req_function(request, *args, **kwargs) if issubclass(outp.__class__, HttpResponse): response = outp else: response = HttpResponse() response.write(outp) response['Content-Length'] = str(len(response.content)) response['Content-Type'] = content_type except HttpBadRequestException as bad_request: response = HttpResponseBadRequest(bad_request.message) return response return newreq return outer_wrapper
Wrap a view-like function that returns a string and marshalls it into an HttpResponse with the given Content-Type If the view raises an HttpBadRequestException, it will be converted into an HttpResponseBadRequest.
def handle_update(self, args): component = int(args[0]) action = int(args[1]) params = [int(x) for x in args[2:]] _LOGGER.debug("Updating %d(%s): c=%d a=%d params=%s" % ( self._integration_id, self._name, component, action, params)) if component in self._components: return self._components[component].handle_update(action, params) return False
The callback invoked by the main event loop if there's an event from this keypad.
def all(cls, client, **kwargs): max_date = kwargs['max_date'] if 'max_date' in kwargs else None max_fetches = \ kwargs['max_fetches'] if 'max_fetches' in kwargs else None url = 'https://api.robinhood.com/options/positions/' params = {} data = client.get(url, params=params) results = data["results"] if is_max_date_gt(max_date, results[-1]['updated_at'][0:10]): return results if max_fetches == 1: return results fetches = 1 while data["next"]: fetches = fetches + 1 data = client.get(data["next"]) results.extend(data["results"]) if is_max_date_gt(max_date, results[-1]['updated_at'][0:10]): return results if max_fetches and (fetches >= max_fetches): return results return results
fetch all option positions
def get_redshift(self, dist): dist, input_is_array = ensurearray(dist) try: zs = self.nearby_d2z(dist) except TypeError: self.setup_interpolant() zs = self.nearby_d2z(dist) replacemask = numpy.isnan(zs) if replacemask.any(): zs[replacemask] = self.faraway_d2z(dist[replacemask]) replacemask = numpy.isnan(zs) if replacemask.any(): if not (dist > 0.).all() and numpy.isfinite(dist).all(): raise ValueError("distance must be finite and > 0") zs[replacemask] = _redshift(dist[replacemask], cosmology=self.cosmology) return formatreturn(zs, input_is_array)
Returns the redshift for the given distance.
def phase_type(self, value): self._params.phase_type = value self._overwrite_lock.disable()
compresses the waveform horizontally; one of ``"normal"``, ``"resync"``, ``"resync2"``
def product(target, prop1, prop2, **kwargs): r value = target[prop1]*target[prop2] for item in kwargs.values(): value *= target[item] return value
r""" Calculates the product of multiple property values Parameters ---------- target : OpenPNM Object The object which this model is associated with. This controls the length of the calculated array, and also provides access to other necessary properties. prop1 : string The name of the first argument prop2 : string The name of the second argument Notes ----- Additional properties can be specified beyond just ``prop1`` and ``prop2`` by including additional arguments in the function call (i.e. ``prop3 = 'pore.foo'``).
def getPeers(self, offset=0, limit=1000): select = models.Peer.select().order_by( models.Peer.url).limit(limit).offset(offset) return [peers.Peer(p.url, record=p) for p in select]
Get the list of peers using an SQL offset and limit. Returns a list of peer datamodel objects in a list.
def prepare_sort_key(self): if isinstance(self.convert_type, str): try: app_name, model_name = self.convert_type.split('.') except ValueError: raise ImproperlyConfigured('"{}" is not a valid converter type. String-based converter types must be specified in "app.Model" format.'.format(self.convert_type)) try: self.convert_type = apps.get_model(app_name, model_name) except LookupError as e: raise ImproperlyConfigured('"{}" is not a valid model name. {}'.format(self.convert_type, e)) self.sort_key = ( -1 * len(inspect.getmro(self.convert_type)), -1 * self.source_order )
Triggered by view_function._sort_converters when our sort key should be created. This can't be called in the constructor because Django models might not be ready yet.
def list_streams(self, types=[], inactive=False): req_hook = 'pod/v1/streams/list' json_query = { "streamTypes": types, "includeInactiveStreams": inactive } req_args = json.dumps(json_query) status_code, response = self.__rest__.POST_query(req_hook, req_args) self.logger.debug('%s: %s' % (status_code, response)) return status_code, response
list user streams
def _evaluate_usecols(usecols, names): if callable(usecols): return {i for i, name in enumerate(names) if usecols(name)} return usecols
Check whether or not the 'usecols' parameter is a callable. If so, enumerates the 'names' parameter and returns a set of indices for each entry in 'names' that evaluates to True. If not a callable, returns 'usecols'.
def to_flat(coord): if coord is None: return go.N * go.N return go.N * coord[0] + coord[1]
Converts from a Minigo coordinate to a flattened coordinate.
def remove(path): if not os.path.exists(path): return if os.path.isdir(path): return shutil.rmtree(path) if os.path.isfile(path): return os.remove(path)
Wrapper that switches between os.remove and shutil.rmtree depending on whether the provided path is a file or directory.
def as_minimized(values: List[float], maximized: List[bool]) -> List[float]: return [v * -1. if m else v for v, m in zip(values, maximized)]
Return vector values as minimized
def load_data(): boston = load_boston() X_train, X_test, y_train, y_test = train_test_split(boston.data, boston.target, random_state=99, test_size=0.25) ss_X = StandardScaler() ss_y = StandardScaler() X_train = ss_X.fit_transform(X_train) X_test = ss_X.transform(X_test) y_train = ss_y.fit_transform(y_train[:, None])[:,0] y_test = ss_y.transform(y_test[:, None])[:,0] return X_train, X_test, y_train, y_test
Load dataset, use boston dataset
def _should_ignore(self, path): for ignore in self.options.ignores: if fnmatch.fnmatch(path, ignore): return True return False
Return True iff path should be ignored.
def preview(self, obj, request=None): source = self.get_thumbnail_source(obj) if source: try: from easy_thumbnails.files import get_thumbnailer except ImportError: logger.warning( _( '`easy_thumbnails` is not installed and required for ' 'icekit.admin_tools.mixins.ThumbnailAdminMixin' ) ) return '' try: thumbnailer = get_thumbnailer(source) thumbnail = thumbnailer.get_thumbnail(self.thumbnail_options) return '<img class="thumbnail" src="{0}" />'.format( thumbnail.url) except Exception as ex: logger.warning( _(u'`easy_thumbnails` failed to generate a thumbnail image' u' for {0}'.format(source))) if self.thumbnail_show_exceptions: return 'Thumbnail exception: {0}'.format(ex) return ''
Generate the HTML to display for the image. :param obj: An object with a thumbnail_field defined. :return: HTML for image display.
def _CreateShapePointFolder(self, shapes_folder, shape): folder_name = shape.shape_id + ' Shape Points' folder = self._CreateFolder(shapes_folder, folder_name, visible=False) for (index, (lat, lon, dist)) in enumerate(shape.points): placemark = self._CreatePlacemark(folder, str(index+1)) point = ET.SubElement(placemark, 'Point') coordinates = ET.SubElement(point, 'coordinates') coordinates.text = '%.6f,%.6f' % (lon, lat) return folder
Create a KML Folder containing all the shape points in a shape. The folder contains placemarks for each shapepoint. Args: shapes_folder: A KML Shape Folder ElementTree.Element instance shape: The shape to plot. Returns: The Folder ElementTree.Element instance or None.
def any_match(self, urls): return any(urlparse(u).hostname in self for u in urls)
Check if any of the given URLs has a matching host. :param urls: an iterable containing URLs :returns: True if any host has a listed match :raises InvalidURLError: if there are any invalid URLs in the sequence
def QA_fetch_get_sh_margin(date): if date in trade_date_sse: data= pd.read_excel(_sh_url.format(QA_util_date_str2int (date)), 1).assign(date=date).assign(sse='sh') data.columns=['code','name','leveraged_balance','leveraged_buyout','leveraged_payoff','margin_left','margin_sell','margin_repay','date','sse'] return data else: pass
return shanghai margin data Arguments: date {str YYYY-MM-DD} -- date format Returns: pandas.DataFrame -- res for margin data
def list_resources(self, device_id): api = self._get_api(mds.EndpointsApi) return [Resource(r) for r in api.get_endpoint_resources(device_id)]
List all resources registered to a connected device. .. code-block:: python >>> for r in api.list_resources(device_id): print(r.name, r.observable, r.uri) None,True,/3/0/1 Update,False,/5/0/3 ... :param str device_id: The ID of the device (Required) :returns: A list of :py:class:`Resource` objects for the device :rtype: list
def file_handler(self, handler_type, path, prefixed_path, source_storage): if self.faster: if prefixed_path not in self.found_files: self.found_files[prefixed_path] = (source_storage, path) self.task_queue.put({ 'handler_type': handler_type, 'path': path, 'prefixed_path': prefixed_path, 'source_storage': source_storage }) self.counter += 1 else: if handler_type == 'link': super(Command, self).link_file(path, prefixed_path, source_storage) else: super(Command, self).copy_file(path, prefixed_path, source_storage)
Create a dict with all kwargs of the `copy_file` or `link_file` method of the super class and add it to the queue for later processing.
def login_form_factory(Form, app): class LoginForm(Form): def __init__(self, *args, **kwargs): super(LoginForm, self).__init__(*args, **kwargs) self.remember.data = False return LoginForm
Return extended login form.
def purge(self): while not self.stopped.isSet(): self.stopped.wait(timeout=defines.EXCHANGE_LIFETIME) self._messageLayer.purge()
Clean old transactions
def delete(self): self.bucket.size -= self.size Part.query_by_multipart(self).delete() self.query.filter_by(upload_id=self.upload_id).delete()
Delete a multipart object.
def process_xml(xml_str): try: tree = ET.XML(xml_str, parser=UTB()) except ET.ParseError as e: logger.error('Could not parse XML string') logger.error(e) return None sp = _process_elementtree(tree) return sp
Return processor with Statements extracted from a Sparser XML. Parameters ---------- xml_str : str The XML string obtained by reading content with Sparser, using the 'xml' output mode. Returns ------- sp : SparserXMLProcessor A SparserXMLProcessor which has extracted Statements as its statements attribute.
async def deactivate(cls, access_key: str) -> dict: q = 'mutation($access_key: String!, $input: ModifyKeyPairInput!) {' + \ ' modify_keypair(access_key: $access_key, props: $input) {' \ ' ok msg' \ ' }' \ '}' variables = { 'access_key': access_key, 'input': { 'is_active': False, 'is_admin': None, 'resource_policy': None, 'rate_limit': None, }, } rqst = Request(cls.session, 'POST', '/admin/graphql') rqst.set_json({ 'query': q, 'variables': variables, }) async with rqst.fetch() as resp: data = await resp.json() return data['modify_keypair']
Deactivates this keypair. Deactivated keypairs cannot make any API requests unless activated again by an administrator. You need an admin privilege for this operation.
def tween(self, t): if t is None: return None if self.method in self.method_to_tween: return self.method_to_tween[self.method](t) elif self.method in self.method_1param: return self.method_1param[self.method](t, self.param1) elif self.method in self.method_2param: return self.method_2param[self.method](t, self.param1, self.param2) else: raise Exception("Unsupported tween method {0}".format(self.method))
t is number between 0 and 1 to indicate how far the tween has progressed
def _get_prepped_model_field(model_obj, field): field = model_obj._meta.get_field(field) value = field.get_db_prep_save(getattr(model_obj, field.attname), connection) return value
Gets the value of a field of a model obj that is prepared for the db.