code
stringlengths
51
2.38k
docstring
stringlengths
4
15.2k
def visualize( logdir, outdir, num_agents, num_episodes, checkpoint=None, env_processes=True): config = utility.load_config(logdir) with tf.device('/cpu:0'): batch_env = utility.define_batch_env( lambda: _create_environment(config, outdir), num_agents, env_processes) graph = utility....
Recover checkpoint and render videos from it. Args: logdir: Logging directory of the trained algorithm. outdir: Directory to store rendered videos in. num_agents: Number of environments to simulate in parallel. num_episodes: Total number of episodes to simulate. checkpoint: Checkpoint name to loa...
def _load_properties(property_name, config_option, set_default=False, default=None): if not property_name: log.debug("No property specified in function, trying to load from salt configuration") try: options = __salt__['config.option']('cassandra') except BaseException as e: ...
Load properties for the cassandra module from config or pillar. :param property_name: The property to load. :type property_name: str or list of str :param config_option: The name of the config option. :type config_option: str :param set_default: Should a default be set if not found in config. ...
def join(self, timeout=None): if self.__wait_for_finishing_thread: if not timeout: while True: self.__wait_for_finishing_thread.join(0.5) if not self.__wait_for_finishing_thread.isAlive(): break else: ...
Blocking wait for the execution to finish :param float timeout: Maximum time to wait or None for infinitely :return: True if the execution finished, False if no state machine was started or a timeout occurred :rtype: bool
def _unpack_model(self, om): buses = om.case.connected_buses branches = om.case.online_branches gens = om.case.online_generators cp = om.get_cost_params() return buses, branches, gens, cp
Returns data from the OPF model.
def info_hash(self): info = self._struct.get('info') if not info: return None return sha1(Bencode.encode(info)).hexdigest()
Hash of torrent file info section. Also known as torrent hash.
def fetchItem(self, ekey, cls=None, **kwargs): if isinstance(ekey, int): ekey = '/library/metadata/%s' % ekey for elem in self._server.query(ekey): if self._checkAttrs(elem, **kwargs): return self._buildItem(elem, cls, ekey) clsname = cls.__name__ if cls e...
Load the specified key to find and build the first item with the specified tag and attrs. If no tag or attrs are specified then the first item in the result set is returned. Parameters: ekey (str or int): Path in Plex to fetch items from. If an int is passed ...
def filesys_decode(path): if isinstance(path, six.text_type): return path fs_enc = sys.getfilesystemencoding() or 'utf-8' candidates = fs_enc, 'utf-8' for enc in candidates: try: return path.decode(enc) except UnicodeDecodeError: continue
Ensure that the given path is decoded, NONE when no expected encoding works
def get_events_attendees(self, event_ids): query = urllib.urlencode({'key': self._api_key, 'event_id': ','.join(event_ids)}) url = '{0}?{1}'.format(RSVPS_URL, query) data = self._http_get_json(url) rsvps = data['results'] return [(rsvp['event']['...
Get the attendees of the identified events. Parameters ---------- event_ids List of IDs of events to get attendees for. Returns ------- List of tuples of (event id, ``pythonkc_meetups.types.MeetupMember``). Exceptions ---------- * Py...
def status_message(self): msg = None if self.last_ddns_response in response_messages.keys(): return response_messages.get(self.last_ddns_response) if 'good' in self.last_ddns_response: ip = re.search(r'(\d{1,3}\.?){4}', self.last_ddns_response).group() msg = "...
Return friendly response from API based on response code.
def get_objgrpwr(self, goea_results): sortobj = self.get_sortobj(goea_results) return GrpWr(sortobj, self.pval_fld, ver_list=self.ver_list)
Get a GrpWr object to write grouped GOEA results.
def provider(cls, note, provider=None, name=False): def decorator(provider): if inspect.isgeneratorfunction(provider): provider = cls.generator_provider.bind( provider, support_name=name) return decorator(provider) cls.register(note...
Register a provider, either a Provider class or a generator. Provider class:: from jeni import Injector as BaseInjector from jeni import Provider class Injector(BaseInjector): pass @Injector.provider('hello') class HelloProvider(Pro...
def get_primary_mac_address(): log = logging.getLogger(mod_logger + '.get_primary_mac_address') log.debug('Attempting to determine the MAC address for eth0...') try: mac_address = netifaces.ifaddresses('eth0')[netifaces.AF_LINK][0]['addr'] except Exception: _, ex, trace = sys.exc_info() ...
Determines the MAC address to use for querying the AWS meta data service for network related queries :return: (str) MAC address for the eth0 interface :raises: AWSMetaDataError
def create_filter(self, name=None, description=None, jql=None, favourite=None): data = {} if name is not None: data['name'] = name if description is not None: data['description'] = description if jql is not None: data['jql'] = jql...
Create a new filter and return a filter Resource for it. :param name: name of the new filter :type name: str :param description: useful human readable description of the new filter :type description: str :param jql: query string that defines the filter :type jql: str ...
def show_explorer(self): if self.dockwidget is not None: if self.dockwidget.isHidden(): self.dockwidget.show() self.dockwidget.raise_() self.dockwidget.update()
Show the explorer
def solvePerfForesight(solution_next,DiscFac,LivPrb,CRRA,Rfree,PermGroFac): solver = ConsPerfForesightSolver(solution_next,DiscFac,LivPrb,CRRA,Rfree,PermGroFac) solution = solver.solve() return solution
Solves a single period consumption-saving problem for a consumer with perfect foresight. Parameters ---------- solution_next : ConsumerSolution The solution to next period's one period problem. DiscFac : float Intertemporal discount factor for future utility. LivPrb : float ...
def A(self, ID): obj = self.chart.getObject(ID).antiscia() ID = 'A_%s' % (ID) return self.G(ID, obj.lat, obj.lon)
Returns the Antiscia of an object.
def cwd(self, newdir): logger.debug('Sending FTP cwd command. New Workding Directory: {}'.format(newdir)) self.client.cwd(newdir) self.state['current_dir'] = self.client.pwd()
Send the FTP CWD command :param newdir: Directory to change to
def merge_contextual(self, other): for k in self.keys(): for item in self[k]: for other_item in other.get(k, []): if isinstance(other_item, six.text_type): continue for otherk in other_item.keys(): ...
Merge in contextual info from a template Compound.
def prj_add_dep(self, *args, **kwargs): if not self.cur_prj: return dialog = DepAdderDialog(project=self.cur_prj) dialog.exec_() deps = dialog.deps for dep in deps: depdata = djitemdata.DepartmentItemData(dep) treemodel.TreeItem(depdata, self.p...
Add more departments to the project. :returns: None :rtype: None :raises: None
def kdf(size, password, salt, opslimit=OPSLIMIT_SENSITIVE, memlimit=MEMLIMIT_SENSITIVE, encoder=nacl.encoding.RawEncoder): return encoder.encode( nacl.bindings.crypto_pwhash_alg(size, password, salt, opslimit, memlimit, ...
Derive a ``size`` bytes long key from a caller-supplied ``password`` and ``salt`` pair using the argon2i memory-hard construct. the enclosing module provides the constants - :py:const:`.OPSLIMIT_INTERACTIVE` - :py:const:`.MEMLIMIT_INTERACTIVE` - :py:const:`.OPSLIMIT_MODERATE` ...
def login(self): resp = super(CookieSession, self).request( 'POST', self._session_url, data={'name': self._username, 'password': self._password}, ) resp.raise_for_status()
Perform cookie based user login.
def make_ascii(word): if sys.version_info < (3, 0, 0): word = unicode(word) else: word = str(word) normalized = unicodedata.normalize('NFKD', word) return normalized.encode('ascii', 'ignore').decode('utf-8')
Converts unicode-specific characters to their equivalent ascii
def getCellStr(self, x, y): c = self.board.getCell(x, y) if c == 0: return '.' if self.__azmode else ' .' elif self.__azmode: az = {} for i in range(1, int(math.log(self.board.goal(), 2))): az[2 ** i] = chr(i + 96) if c not in az: ...
return a string representation of the cell located at x,y.
def floating_point( element_name, attribute=None, required=True, alias=None, default=0.0, omit_empty=False, hooks=None ): value_parser = _number_parser(float) return _PrimitiveValue( element_name, value_parser, attribute, re...
Create a processor for floating point values. See also :func:`declxml.boolean`
def __get_switch_arr(work_sheet, row_num): u_dic = [] for col_idx in FILTER_COLUMNS: cell_val = work_sheet['{0}{1}'.format(col_idx, row_num)].value if cell_val in [1, '1']: u_dic.append(work_sheet['{0}1'.format(col_idx)].value.strip().split(',')[0]) return u_dic
if valud of the column of the row is `1`, it will be added to the array.
def get_parser(description, input_desc): parser = ArgumentParser(description=description) parser.add_argument( dest='input_file', help=input_desc ) parser.add_argument( '-r', '--readers', choices=['reach', 'sparser', 'trips'], help='List of readers to be used....
Get a parser that is generic to reading scripts. Parameters ---------- description : str A description of the tool, usually about one line long. input_desc: str A string describing the nature of the input file used by the reading tool. Returns ------- parser : argpa...
def get_sessions(self, app_path=None): if app_path is not None: return self._tornado.get_sessions(app_path) all_sessions = [] for path in self._tornado.app_paths: all_sessions += self._tornado.get_sessions(path) return all_sessions
Gets all currently active sessions for applications. Args: app_path (str, optional) : The configured application path for the application to return sessions for. If None, return active sessions for all applications. (default: None) Returns: ...
def get_remote_member(self, member=None): cluster_params = self.get_standby_cluster_config() if cluster_params: name = member.name if member else 'remote_master:{}'.format(uuid.uuid1()) data = {k: v for k, v in cluster_params.items() if k in RemoteMember.allowed_keys()} ...
In case of standby cluster this will tel us from which remote master to stream. Config can be both patroni config or cluster.config.data
def get_number_unit(number): n = str(float(number)) mult, submult = n.split('.') if float(submult) != 0: unit = '0.' + (len(submult)-1)*'0' + '1' return float(unit) else: return float(1)
get the unit of number
def _create_drawables(self, tokensource): lineno = charno = maxcharno = 0 for ttype, value in tokensource: while ttype not in self.styles: ttype = ttype.parent style = self.styles[ttype] value = value.expandtabs(4) lines = value.splitlines(...
Create drawables for the token content.
def get_protocol_from_name(name): cls = protocol_map.get(name) if not cls: raise ValueError('Unsupported protocol "%s".' % name) return cls
Returns the protocol class for the protocol with the given name. :type name: str :param name: The name of the protocol. :rtype: Protocol :return: The protocol class.
def _get_sqla_coltype_class_from_str(coltype: str, dialect: Dialect) -> Type[TypeEngine]: ischema_names = dialect.ischema_names try: return ischema_names[coltype.upper()] except KeyError: return ischema_names[coltype.lower()]
Returns the SQLAlchemy class corresponding to a particular SQL column type in a given dialect. Performs an upper- and lower-case search. For example, the SQLite dialect uses upper case, and the MySQL dialect uses lower case.
def sam2fastq(line): fastq = [] fastq.append('@%s' % line[0]) fastq.append(line[9]) fastq.append('+%s' % line[0]) fastq.append(line[10]) return fastq
print fastq from sam
def hasmethod(obj, meth): if hasattr(obj, meth): return callable(getattr(obj,meth)) return False
Checks if an object, obj, has a callable method, meth return True or False
def index_model(index_name, adapter): model = adapter.model log.info('Indexing {0} objects'.format(model.__name__)) qs = model.objects if hasattr(model.objects, 'visible'): qs = qs.visible() if adapter.exclude_fields: qs = qs.exclude(*adapter.exclude_fields) docs = iter_qs(qs, ad...
Indel all objects given a model
def eq(self, r1, r2): if not is_register(r1) or not is_register(r2): return False if self.regs[r1] is None or self.regs[r2] is None: return False return self.regs[r1] == self.regs[r2]
True if values of r1 and r2 registers are equal
def create(self, name, type, mains=None, libs=None, description=None, interface=None, is_public=None, is_protected=None): data = { 'name': name, 'type': type } self._copy_if_defined(data, description=description, mains=mains, l...
Create a Job.
def process_task(self): if _debug: ServerSSM._debug("process_task") if self.state == SEGMENTED_REQUEST: self.segmented_request_timeout() elif self.state == AWAIT_RESPONSE: self.await_response_timeout() elif self.state == SEGMENTED_RESPONSE: self.segmen...
This function is called when the client has failed to send all of the segments of a segmented request, the application has taken too long to complete the request, or the client failed to ack the segments of a segmented response.
def git_status_all_repos(cat, hard=True, origin=False, clean=True): log = cat.log log.debug("gitter.git_status_all_repos()") all_repos = cat.PATHS.get_all_repo_folders() for repo_name in all_repos: log.info("Repo in: '{}'".format(repo_name)) sha_beg = get_sha(repo_name) log.debug...
Perform a 'git status' in each data repository.
def setup(self, environ): request = wsgi_request(environ) cfg = request.cache.cfg loop = request.cache._loop self.store = create_store(cfg.data_store, loop=loop) pubsub = self.store.pubsub(protocol=Protocol()) channel = '%s_webchat' % self.name ensure_future(pubsu...
Called once only to setup the WSGI application handler. Check :ref:`lazy wsgi handler <wsgi-lazy-handler>` section for further information.
def _linkToParent(self, feature, parentName): parentParts = self.byFeatureName.get(parentName) if parentParts is None: raise GFF3Exception( "Parent feature does not exist: {}".format(parentName), self.fileName) for parentPart in parentParts: ...
Link a feature with its children
def error_cutout_ma(self): if self._error is None: return None else: return np.ma.masked_array(self._error[self._slice], mask=self._total_mask)
A 2D `~numpy.ma.MaskedArray` cutout from the input ``error`` image. The mask is `True` for pixels outside of the source segment (labeled region of interest), masked pixels from the ``mask`` input, or any non-finite ``data`` values (e.g. NaN or inf). If ``error`` is `None`, then...
def words(self, quantity: int = 5) -> List[str]: words = self._data['words'].get('normal') words_list = [self.random.choice(words) for _ in range(quantity)] return words_list
Generate lis of the random words. :param quantity: Quantity of words. Default is 5. :return: Word list. :Example: [science, network, god, octopus, love]
def optimize(self, sess, batch_index): feed_dict = { self._batch_index: batch_index, self._per_device_batch_size: self._loaded_per_device_batch_size, self._max_seq_len: self._loaded_max_seq_len, } for tower in self._towers: feed_dict.update(tower.l...
Run a single step of SGD. Runs a SGD step over a slice of the preloaded batch with size given by self._loaded_per_device_batch_size and offset given by the batch_index argument. Updates shared model weights based on the averaged per-device gradients. Args: ...
def rotateAroundVector(v1, w, theta_deg): ct = np.cos(np.radians(theta_deg)) st = np.sin(np.radians(theta_deg)) term1 = v1*ct term2 = np.cross(w, v1) * st term3 = np.dot(w, v1) term3 = w * term3 * (1-ct) return term1 + term2 + term3
Rotate vector v1 by an angle theta around w Taken from https://en.wikipedia.org/wiki/Axis%E2%80%93angle_representation (see Section "Rotating a vector") Notes: Rotating the x axis 90 degrees about the y axis gives -z Rotating the x axis 90 degrees about the z axis gives +y
def in_tree(self, name): r if self._validate_node_name(name): raise RuntimeError("Argument `name` is not valid") return name in self._db
r""" Test if a node is in the tree. :param name: Node name to search for :type name: :ref:`NodeName` :rtype: boolean :raises: RuntimeError (Argument \`name\` is not valid)
def _get(self, locator, expected_condition, params=None, timeout=None, error_msg="", driver=None, **kwargs): from selenium.webdriver.support.ui import WebDriverWait if not isinstance(locator, WebElement): error_msg += "\nLocator of type <{}> with selector <{}> with params <{params}>".format(...
Get elements based on locator with optional parameters. Uses selenium.webdriver.support.expected_conditions to determine the state of the element(s). :param locator: element identifier :param expected_condition: expected condition of element (ie. visible, clickable, etc) :param params:...
def get_schema_version_from_xml(xml): if isinstance(xml, six.string_types): xml = StringIO(xml) try: tree = ElementTree.parse(xml) except ParseError: return None root = tree.getroot() return root.attrib.get('schemaVersion', None)
Get schemaVersion attribute from OpenMalaria scenario file xml - open file or content of xml document to be processed
def dataset_path(cache=None, cachefile="~/.io3d_cache.yaml", get_root=False): local_data_dir = local_dir if cachefile is not None: cache = cachef.CacheFile(cachefile) if cache is not None: local_data_dir = cache.get_or_save_default("local_dataset_dir", local_dir) if get_root: loc...
Get dataset path. :param cache: CacheFile object :param cachefile: cachefile path, default '~/.io3d_cache.yaml' :return: path to dataset
def get_track_by_mbid(self, mbid): params = {"mbid": mbid} doc = _Request(self, "track.getInfo", params).execute(True) return Track(_extract(doc, "name", 1), _extract(doc, "name"), self)
Looks up a track by its MusicBrainz ID
def clear_ddata(self): self._ddata = dict.fromkeys(self._get_keys_ddata()) self._ddata['uptodate'] = False
Clear the working copy of data Harmless, as it preserves the reference copy and the treatment dict Use only to free some memory
def toDict(self): return { 'titleAlignments': [titleAlignment.toDict() for titleAlignment in self], 'subjectTitle': self.subjectTitle, 'subjectLength': self.subjectLength, }
Get information about the title's alignments as a dictionary. @return: A C{dict} representation of the title's aligments.
def resolve_one(self, correlation_id, key): connection = None for item in self._items: if item.key == key and item.connection != None: connection = item.connection break return connection
Resolves a single connection parameters by its key. :param correlation_id: (optional) transaction id to trace execution through call chain. :param key: a key to uniquely identify the connection. :return: a resolved connection.
def _u_distance_correlation_sqr_naive(x, y, exponent=1): return _distance_sqr_stats_naive_generic( x, y, matrix_centered=_u_distance_matrix, product=u_product, exponent=exponent).correlation_xy
Bias-corrected distance correlation estimator between two matrices.
def make_key_hippie(obj, typed=True): ftype = type if typed else lambda o: None if is_hashable(obj): return obj, ftype(obj) if isinstance(obj, set): obj = sorted(obj) if isinstance(obj, (list, tuple)): return tuple(make_key_hippie(e, typed) for e in obj) if isinstance(obj, di...
Return hashable structure from non-hashable structure using hippie means dict and set are sorted and their content subjected to same hippie means. Note that the key identifies the current content of the structure.
def read_whole_packet(self): self._read_packet() return readall(self, self._size - _header.size)
Reads single packet and returns bytes payload of the packet Can only be called when transport's read pointer is at the beginning of the packet.
def samaccountname(self, base_dn, distinguished_name): mappings = self.samaccountnames(base_dn, [distinguished_name]) try: return mappings[distinguished_name] except KeyError: logging.info("%s - unable to retrieve object from AD by DistinguishedName", ...
Retrieve the sAMAccountName for a specific DistinguishedName :param str base_dn: The base DN to search within :param list distinguished_name: The base DN to search within :param list attributes: Object attributes to populate, defaults to all :return: A populated ADUser object :...
def datetime_match(data, dts): dts = dts if islistable(dts) else [dts] if any([not isinstance(i, datetime.datetime) for i in dts]): error_msg = ( "`time` can only be filtered by datetimes" ) raise TypeError(error_msg) return data.isin(dts)
matching of datetimes in time columns for data filtering
def load(self): private = self.is_private() with open_tls_file(self.file_path, 'r', private=private) as fh: if private: self.x509 = crypto.load_privatekey(self.encoding, fh.read()) else: self.x509 = crypto.load_certificate(self.encoding, fh.read())...
Load from a file and return an x509 object
def all_methods(cls): def name(fn): return fn.__get__(cls).__name__.replace("_", "-") return sorted(name(f) for f in cls.__dict__.values() if isinstance(f, staticmethod))
Return the names of all available binning methods
def segment(self, value=None, scope=None, metric_scope=None, **selection): SCOPES = { 'hits': 'perHit', 'sessions': 'perSession', 'users': 'perUser', } segments = self.meta.setdefault('segments', []) if value and len(selection): raise V...
Return a new query, limited to a segment of all users or sessions. Accepts segment objects, filtered segment objects and segment names: ```python query.segment(account.segments['browser']) query.segment('browser') query.segment(account.segments['browser'].any('Chrome', 'Firefox...
def _validate_recurse_directive_types(current_schema_type, field_schema_type, context): type_hints = context['type_equivalence_hints'].get(field_schema_type) type_hints_inverse = context['type_equivalence_hints_inverse'].get(field_schema_type) allowed_current_types = {field_schema_type} if type_hints an...
Perform type checks on the enclosing type and the recursed type for a recurse directive. Args: current_schema_type: GraphQLType, the schema type at the current location field_schema_type: GraphQLType, the schema type at the inner scope context: dict, various per-compilation data (e.g. decla...
def parse_instruction(string, location, tokens): mnemonic_str = tokens.get("mnemonic") operands = [op for op in tokens.get("operands", [])] instr = ArmInstruction( string, mnemonic_str["ins"], operands, arch_info.architecture_mode ) if "cc" in mnemonic_str: in...
Parse an ARM instruction.
def slurp(path, encoding='UTF-8'): with io.open(path, 'r', encoding=encoding) as f: return f.read()
Reads file `path` and returns the entire contents as a unicode string By default assumes the file is encoded as UTF-8 Parameters ---------- path : str File path to file on disk encoding : str, default `UTF-8`, optional Encoding of the file Returns ------- The txt read...
def sort_by_tag(self, tag): return AmpalContainer(sorted(self, key=lambda x: x.tags[tag]))
Sorts the `AmpalContainer` by a tag on the component objects. Parameters ---------- tag : str Key of tag used for sorting.
def categorical__int(self, column_name, output_column_prefix): return [_ColumnFunctionTransformation( features = [column_name], output_column_prefix = output_column_prefix, transform_function = lambda col: col.astype(str), transform_function_name = "astype(str)")]
Interprets an integer column as a categorical variable.
def get_config(self, service, setting): try: return self.get_service(service)[setting] except KeyError: return getattr(self, setting + '_DEFAULT')
Access the configuration for a given service and setting. If the service is not found, return a default value.
def TransposeTable(table): transposed = [] rows = len(table) cols = max(len(row) for row in table) for x in range(cols): transposed.append([]) for y in range(rows): if x < len(table[y]): transposed[x].append(table[y][x]) else: transposed[x].append(None) return transposed
Transpose a list of lists, using None to extend all input lists to the same length. For example: >>> TransposeTable( [ [11, 12, 13], [21, 22], [31, 32, 33, 34]]) [ [11, 21, 31], [12, 22, 32], [13, None, 33], [None, None, 34]]
def add_zone(self, spatial_unit, container_id, name='', description='', visible=True, reuse=0, drop_behavior_type=None): if not isinstance(spatial_unit, abc_mapping_primitives.SpatialUnit): raise InvalidArgument('zone is not a SpatialUnit') if not isinstance(reuse, int): raise In...
container_id is a targetId that the zone belongs to
def add(self, elem): if not isinstance(elem, JWK): raise TypeError('Only JWK objects are valid elements') set.add(self, elem)
Adds a JWK object to the set :param elem: the JWK object to add. :raises TypeError: if the object is not a JWK.
def extendMarkdown(self, md, md_globals): md.inlinePatterns.add('del', SimpleTagPattern(DEL_RE, 'del'), '<not_strong') md.inlinePatterns.add('ins', SimpleTagPattern(INS_RE, 'ins'), '<not_strong') md.inlinePatterns.add('mark', SimpleTagPattern(MARK_RE, 'mark'), '<not_strong')
Modifies inline patterns.
def listen(self): self.listening = True if self.threading: from threading import Thread self.listen_thread = Thread(target=self.listen_loop) self.listen_thread.daemon = True self.listen_thread.start() self.scheduler_thread = Thread(target=self....
Starts the listen loop. If threading is enabled, then the loop will be started in its own thread. Args: None Returns: None
def convertwaiveredfits(waiveredObject, outputFileName=None, forceFileOutput=False, convertTo='multiExtension', verbose=False): if convertTo == 'multiExtension': func = toMultiExtensionFits else: rais...
Convert the input waivered FITS object to various formats. The default conversion format is multi-extension FITS. Generate an output file in the desired format if requested. Parameters: waiveredObject input object representing a waivered FITS file; either...
def removeSessionWithKey(self, key): self.store.query( PersistentSession, PersistentSession.sessionKey == key).deleteFromStore()
Remove a persistent session, if it exists. @type key: L{bytes} @param key: The persistent session identifier.
def reset(self): self._sampled = [np.repeat(False, x) for x in self.sizes_] self._n_sampled = np.zeros(self.n_strata_, dtype=int)
Reset the instance to begin sampling from scratch
def _log_exception(self, exception): self._io.error(str(exception).strip().split(os.linesep))
Logs an exception. :param Exception exception: The exception. :rtype: None
def create_stack(self, name): deployment = find_exact(self.api.deployments, name=name) if not deployment: try: self.api.client.post( '/api/deployments', data={'deployment[name]': name}, ) exce...
Creates stack if necessary.
def geom_xys(geom): if geom.has_z: geom = wkt.loads(geom.to_wkt()) assert not geom.has_z if hasattr(geom, "geoms"): geoms = geom.geoms else: geoms = [geom] for g in geoms: arr = g.array_interface_base['data'] for pair in zip(arr[::2], arr[1::2]): ...
Given a shapely geometry, generate a flattened series of 2D points as x,y tuples
def parse_redis_url(url): redis_config = { "DB": 0, "PASSWORD": None, "HOST": "localhost", "PORT": 6379, "SSL": False } if not url: return redis_config url = urlparse.urlparse(url) path = url.path[1:] path = path.split('?', 2)[0] if path: ...
Parses a redis URL.
def list_containers(self): data = run_cmd(["machinectl", "list", "--no-legend", "--no-pager"], return_output=True) output = [] reg = re.compile(r"\s+") for line in data.split("\n"): stripped = line.strip() if stripped: parts ...
list all available nspawn containers :return: collection of instances of :class:`conu.backend.nspawn.container.NspawnContainer`
def site_symbols(self): syms = [site.specie.symbol for site in self.structures[0]] return [a[0] for a in itertools.groupby(syms)]
Sequence of symbols associated with the Xdatcar. Similar to 6th line in vasp 5+ Xdatcar.
def is_git_repo(repo_dir): command = ['git', 'rev-parse'] try: execute_git_command(command, repo_dir=repo_dir) except exceptions.SimplGitCommandError: return False else: return True
Return True if the directory is inside a git repo.
def get_entities_query(namespace, workspace, etype, page=1, page_size=100, sort_direction="asc", filter_terms=None): params = { "page" : page, "pageSize" : page_size, "sortDirection" : sort_direction } if filter_terms: params['fil...
Paginated version of get_entities_with_type. Args: namespace (str): project to which workspace belongs workspace (str): Workspace name Swagger: https://api.firecloud.org/#!/Entities/entityQuery
def init_i18n (loc=None): if 'LOCPATH' in os.environ: locdir = os.environ['LOCPATH'] else: locdir = os.path.join(get_install_data(), 'share', 'locale') i18n.init(configdata.name.lower(), locdir, loc=loc) import logging logging.addLevelName(logging.CRITICAL, _('CRITICAL')) logging...
Initialize i18n with the configured locale dir. The environment variable LOCPATH can also specify a locale dir. @return: None
def register_module_classes(yaml: ruamel.yaml.YAML, modules: Optional[Iterable[Any]] = None) -> ruamel.yaml.YAML: if modules is None: modules = [] classes_to_register = set() for module in modules: module_classes = [member[1] for member in inspect.getmembers(module, inspect.isclass)] ...
Register all classes in the given modules with the YAML object. This is a simple helper function.
def is_higher_permission(level1, level2): return (is_publish_permission(level1) and not is_publish_permission(level2) or (is_edit_permission(level1) and not is_publish_permission(level2) and not is_edit_permission(level2)) or (is_showon_permission(level1...
Return True if the level1 is higher than level2
def iteryaml(self, *args, **kwargs): from rowgenerators.rowpipe.json import VTEncoder import yaml if 'cls' not in kwargs: kwargs['cls'] = VTEncoder for s in self.iterstruct: yield (yaml.safe_dump(s))
Yields the data structures from iterstruct as YAML strings
def create_parser(): parser = argparse.ArgumentParser(description=DESCRIPTION, formatter_class=argparse.RawDescriptionHelpFormatter) parser.add_argument('-v', '--verbose', action="count", default=0, help="Increase logging level (goes error, warn, info, debug)") parser.add_argument('-l', '--logfile', help="T...
Create the argument parser for iotile.
def get(self, eid): data = self._http_req('connections/%u' % eid) self.debug(0x01, data['decoded']) return data['decoded']
Returns a dict with the complete record of the entity with the given eID
def read(self): if path.exists(self.filepath): with open(self.filepath, 'rb') as infile: self.data = yaml.load( self.fernet.decrypt(infile.read())) else: self.data = dict()
Reads and decrypts data from the filesystem
def _find_matching_expectation(self, args, kwargs): for expectation in self._expectations: if expectation.satisfy_exact_match(args, kwargs): return expectation for expectation in self._expectations: if expectation.satisfy_custom_matcher(args, kwargs): ...
Return a matching expectation. Returns the first expectation that matches the ones declared. Tries one with specific arguments first, then falls back to an expectation that allows arbitrary arguments. :return: The matching ``Expectation``, if one was found. :rtype: Expectation, None
def profile_update(self, profile): if profile.get('install_json') is None: print( '{}{}Missing install_json parameter for profile {}.'.format( c.Style.BRIGHT, c.Fore.YELLOW, profile.get('profile_name') ) ) self.profile_update_ar...
Update an existing profile with new parameters or remove deprecated parameters. Args: profile (dict): The dictionary containting the profile settings.
def stop(self) -> None: if self._stop and not self._posted_kork: self._stop() self._stop = None
Stops the analysis as soon as possible.
def _build_implicit_prefetches( self, model, prefetches, requirements ): for source, remainder in six.iteritems(requirements): if not remainder or isinstance(remainder, six.string_types): continue related_field = get_model_field(model, ...
Build a prefetch dictionary based on internal requirements.
def setup_standalone_signals(instance): window = instance.get_widget('config-window') window.connect('delete-event', Gtk.main_quit) button = instance.get_widget('button1') button.handler_block_by_func(instance.gtk_widget_destroy) button.connect('clicked', Gtk.main_quit) return instance
Called when prefs dialog is running in standalone mode. It makes the delete event of dialog and click on close button finish the application.
def put_acl(Bucket, ACL=None, AccessControlPolicy=None, GrantFullControl=None, GrantRead=None, GrantReadACP=None, GrantWrite=None, GrantWriteACP=None, region=None, key=None, keyid=None, profile=None): try: conn = _get_co...
Given a valid config, update the ACL for a bucket. Returns {updated: true} if the ACL was updated and returns {updated: False} if the ACL was not updated. CLI Example: .. code-block:: bash salt myminion boto_s3_bucket.put_acl my_bucket 'public' \\ GrantFullControl='e...
def do_mkdir(self, path): path = path[0] self.n.makeDirectory(self.current_path + path) self.dirs = self.dir_complete()
create a new directory
def sg_prod(tensor, opt): r return tf.reduce_prod(tensor, axis=opt.axis, keep_dims=opt.keep_dims, name=opt.name)
r"""Computes the product of elements across axis of a tensor. See `tf.reduce_prod()` in tensorflow. Args: tensor: A `Tensor` (automatically given by chain). opt: axis : A tuple/list of integers or an integer. The axis to reduce. keep_dims: If true, retains reduced dimensions with l...
def install_pyenv(name, user=None): ret = {'name': name, 'result': None, 'comment': '', 'changes': {}} if __opts__['test']: ret['comment'] = 'pyenv is set to be installed' return ret return _check_and_install_python(ret, user)
Install pyenv if not installed. Allows you to require pyenv be installed prior to installing the plugins. Useful if you want to install pyenv plugins via the git or file modules and need them installed before installing any rubies. Use the pyenv.root configuration option to set the path for pyenv if yo...
def run_score(self): diffs = 0 lines = 0 for file in self.files: try: results = self._check(file) except Error as e: termcolor.cprint(e.msg, "yellow", file=sys.stderr) continue diffs += results.diffs ...
Run checks on self.files, printing raw percentage to stdout.