code
stringlengths
51
2.38k
docstring
stringlengths
4
15.2k
def f2tc(f,base=25): try: f = int(f) except: return "--:--:--:--" hh = int((f / base) / 3600) mm = int(((f / base) / 60) - (hh*60)) ss = int((f/base) - (hh*3600) - (mm*60)) ff = int(f - (hh*3600*base) - (mm*60*base) - (ss*base)) return "{:02d}:{:02d}:{:02d}:{:02d}".format(hh,...
Converts frames to timecode
def undecorate(cls, function): if cls.is_function_validated(function): return cls.get_function_validator(function).function return function
Remove validator decoration from a function. The `function` argument is the function to be cleaned from the validator decorator.
def remove_event(self, func_name: str, event: str) -> None: event_funcs_copy = self._events[event].copy() for func in self._event_funcs(event): if func.__name__ == func_name: event_funcs_copy.remove(func) if self._events[event] == event_funcs_copy: err_msg...
Removes a subscribed function from a specific event. :param func_name: The name of the function to be removed. :type func_name: str :param event: The name of the event. :type event: str :raise EventDoesntExist if there func_name doesn't exist in event.
def archs(self, _args): print('{Style.BRIGHT}Available target architectures are:' '{Style.RESET_ALL}'.format(Style=Out_Style)) for arch in self.ctx.archs: print(' {}'.format(arch.arch))
List the target architectures available to be built for.
def mean_values(self): if not self.istransformed: return self.pst.parameter_data.parval1.copy() else: vals = self.pst.parameter_data.parval1.copy() vals[self.log_indexer] = np.log10(vals[self.log_indexer]) return vals
the mean value vector while respecting log transform Returns ------- mean_values : pandas.Series
def getThirdPartyLibCmakeFlags(self, libs): fmt = PrintingFormat.singleLine() if libs[0] == '--multiline': fmt = PrintingFormat.multiLine() libs = libs[1:] platformDefaults = True if libs[0] == '--nodefaults': platformDefaults = False libs = libs[1:] details = self.getThirdpartyLibs(libs, includeP...
Retrieves the CMake invocation flags for building against the Unreal-bundled versions of the specified third-party libraries
def serialize(self, attr, obj, accessor=None, **kwargs): if self._CHECK_ATTRIBUTE: value = self.get_value(obj, attr, accessor=accessor) if value is missing_ and hasattr(self, 'default'): default = self.default value = default() if callable(default) else de...
Pulls the value for the given key from the object, applies the field's formatting and returns the result. :param str attr: The attribute or key to get from the object. :param str obj: The object to pull the key from. :param callable accessor: Function used to pull values from ``obj``. ...
def open(fn, expand_includes=True, include_comments=False, include_position=False, **kwargs): p = Parser(expand_includes=expand_includes, include_comments=include_comments, **kwargs) ast = p.parse_file(fn) m = MapfileToDict(include_position=include_position, include_comm...
Load a Mapfile from the supplied filename into a Python dictionary. Parameters ---------- fn: string The path to the Mapfile, or partial Mapfile expand_includes: boolean Load any ``INCLUDE`` files in the MapFile include_comments: boolean Include or discard comment strings ...
def get_connectable_volume_templates(self, start=0, count=-1, filter='', query='', sort=''): uri = self.URI + "/connectable-volume-templates" get_uri = self._client.build_query_uri(start=start, count=count, filter=filter, query=query, sort=sort, uri=uri) ...
Gets the storage volume templates that are available on the specified networks based on the storage system port's expected network connectivity. If there are no storage volume templates that meet the specified connectivity criteria, an empty collection will be returned. Returns: lis...
def parse_groups(self, group, params): if group['GroupName'] in self.groups: return api_client = params['api_client'] group['id'] = group.pop('GroupId') group['name'] = group.pop('GroupName') group['arn'] = group.pop('Arn') group['users'] = self.__fetch_group_...
Parse a single IAM group and fetch additional information
def transaction_fail(self, name): if not name: raise ValueError("Transaction name cannot be empty") if self.transaction_count > 0: logger.debug("{}. Failing transaction {}".format(self.transaction_count, name)) if self.transaction_count == 1: self._tra...
rollback a transaction if currently in one e -- Exception() -- if passed in, bubble up the exception by re-raising it
def sample(self, hash, limit=None, offset=None): uri = self._uris['sample'].format(hash) params = {'limit': limit, 'offset': offset} return self.get_parse(uri, params)
Return an object representing the sample identified by the input hash, or an empty object if that sample is not found
def strict_deps_for_target(self, target, predicate=None): if self._native_build_settings.get_strict_deps_value_for_target(target): strict_deps = target.strict_dependencies(DependencyContext()) if predicate: filtered_deps = list(filter(predicate, strict_deps)) else: filtered_deps = ...
Get the dependencies of `target` filtered by `predicate`, accounting for 'strict_deps'. If 'strict_deps' is on, instead of using the transitive closure of dependencies, targets will only be able to see their immediate dependencies declared in the BUILD file. The 'strict_deps' setting is obtained from the r...
def rdf_source(self, aformat="turtle"): if aformat and aformat not in self.SUPPORTED_FORMATS: return "Sorry. Allowed formats are %s" % str(self.SUPPORTED_FORMATS) if aformat == "dot": return self.__serializedDot() else: return self.rdflib_graph.serialize(format=aformat)
Serialize graph using the format required
def create_primary_zone_by_upload(self, account_name, zone_name, bind_file): zone_properties = {"name": zone_name, "accountName": account_name, "type": "PRIMARY"} primary_zone_info = {"forceImport": True, "createType": "UPLOAD"} zone_data = {"properties": zone_properties, "primaryCreateInfo": pr...
Creates a new primary zone by uploading a bind file Arguments: account_name -- The name of the account that will contain this zone. zone_name -- The name of the zone. It must be unique. bind_file -- The file to upload.
def _md5_compare(self, file_path, checksum, block_size=2 ** 13): with closing(self._tqdm(desc="MD5 checksumming", total=getsize(file_path), unit="B", unit_scale=True)) as progress: md5 = hashlib.md5() with open(file_path, "rb") as f: while ...
Compare a given MD5 checksum with one calculated from a file.
def unmarshal_event(self, data: str, response_type): js = json.loads(data) js['raw_object'] = js['object'] if js['type'].lower() == 'error': return js if response_type is not None: js['object'] = self._api_client.deserialize( response=SimpleNamespa...
Return the K8s response `data` in JSON format.
def find_filter_class(filtername): if filtername in FILTERS: return FILTERS[filtername] for name, cls in find_plugin_filters(): if name == filtername: return cls return None
Lookup a filter by name. Return None if not found.
def verify_database(trusted_consensus_hash, consensus_block_height, untrusted_working_dir, trusted_working_dir, start_block=None, expected_snapshots={}): db = BlockstackDB.get_readwrite_instance(trusted_working_dir) consensus_impl = virtualchain_hooks return virtualchain.state_engine_verify(trusted_consensu...
Verify that a database is consistent with a known-good consensus hash. Return True if valid. Return False if not
async def get_default(cls): data = await cls._handler.read(id=cls._default_fabric_id) return cls(data)
Get the 'default' Fabric for the MAAS.
def normalize_shape(shape): if shape is None: raise TypeError('shape is None') if isinstance(shape, numbers.Integral): shape = (int(shape),) shape = tuple(int(s) for s in shape) return shape
Convenience function to normalize the `shape` argument.
def create_files(filedef, cleanup=True): cwd = os.getcwd() tmpdir = tempfile.mkdtemp() try: Filemaker(tmpdir, filedef) if not cleanup: pass os.chdir(tmpdir) yield tmpdir finally: os.chdir(cwd) if cleanup: shutil.rmtree(tmpdir, ignor...
Contextmanager that creates a directory structure from a yaml descripttion.
def get_appended_name(name, columns): loop = 0 while name in columns: loop += 1 if loop > 10: logger_misc.warn("get_appended_name: Too many loops: Tried to get appended name but something looks wrong") break tmp = name + "-" + str(loop) if tmp not in colum...
Append numbers to a name until it no longer conflicts with the other names in a column. Necessary to avoid overwriting columns and losing data. Loop a preset amount of times to avoid an infinite loop. There shouldn't ever be more than two or three identical variable names in a table. :param str name: Varia...
def tracking_save(sender, instance, raw, using, update_fields, **kwargs): if _has_changed(instance): if instance._original_fields['pk'] is None: _create_create_tracking_event(instance) else: _create_update_tracking_event(instance) if _has_changed_related(instance): ...
Post save, detect creation or changes and log them. We need post_save to have the object for a create.
def clear_state(self): self.state = {} self.state['steps'] = [] self.state['current_step'] = None self.state['scope'] = [] self.state['counters'] = {} self.state['strings'] = {} for step in self.matchers: self.state[step] = {} self.state[st...
Clear the recipe state.
def get_prev_block_hash(block_representation, coin_symbol='btc', api_key=None): return get_block_overview(block_representation=block_representation, coin_symbol=coin_symbol, txn_limit=1, api_key=api_key)['prev_block']
Takes a block_representation and returns the previous block hash
def DbAddServer(self, argin): self._log.debug("In DbAddServer()") if len(argin) < 3 or not len(argin) % 2: self.warn_stream("DataBase::AddServer(): incorrect number of input arguments ") th_exc(DB_IncorrectArguments, "incorrect no. of input arguments, needs at ...
Create a device server process entry in database :param argin: Str[0] = Full device server name Str[1] = Device(s) name Str[2] = Tango class name Str[n] = Device name Str[n + 1] = Tango class name :type: tango.DevVarStringArray :return: :rtype: tango.DevV...
def transfer(self, transfer_payload=None, *, from_user, to_user): if self.persist_id is None: raise EntityNotYetPersistedError(('Entities cannot be transferred ' 'until they have been ' 'persisted')) ...
Transfer this entity to another owner on the backing persistence layer Args: transfer_payload (dict): Payload for the transfer from_user (any): A user based on the model specified by the persistence layer to_user (any): A user based on the model speci...
def cluster_path(cls, project, instance, cluster): return google.api_core.path_template.expand( "projects/{project}/instances/{instance}/clusters/{cluster}", project=project, instance=instance, cluster=cluster, )
Return a fully-qualified cluster string.
def symmetrized(self): perms = list(itertools.permutations(range(self.rank))) return sum([np.transpose(self, ind) for ind in perms]) / len(perms)
Returns a generally symmetrized tensor, calculated by taking the sum of the tensor and its transpose with respect to all possible permutations of indices
def count(self): if hasattr(self, '_response'): return self._response.hits.total es = connections.get_connection(self._using) d = self.to_dict(count=True) return es.count( index=self._index, body=d, **self._params )['count']
Return the number of hits matching the query and filters. Note that only the actual number is returned.
def update_room(room): if room.custom_server: return def _update_room(xmpp): muc = xmpp.plugin['xep_0045'] muc.joinMUC(room.jid, xmpp.requested_jid.user) muc.configureRoom(room.jid, _set_form_values(xmpp, room, muc.getRoomConfig(room.jid))) current_plugin.logger.info('Updatin...
Updates a MUC room on the XMPP server.
def typecounter(table, field): counter = Counter() for v in values(table, field): try: counter[v.__class__.__name__] += 1 except IndexError: pass return counter
Count the number of values found for each Python type. >>> import petl as etl >>> table = [['foo', 'bar', 'baz'], ... ['A', 1, 2], ... ['B', u'2', '3.4'], ... [u'B', u'3', u'7.8', True], ... ['D', u'xyz', 9.0], ... ['E...
def absolute_uri(self, location=None, scheme=None, **query): if not is_absolute_uri(location): if location or location is None: location = self.full_path(location, **query) if not scheme: scheme = self.is_secure and 'https' or 'http' base = '%s...
Builds an absolute URI from ``location`` and variables available in this request. If no ``location`` is specified, the relative URI is built from :meth:`full_path`.
def min(self): if self.is_quantized or self.base_dtype in ( bool, string, complex64, complex128, ): raise TypeError("Cannot find minimum value of %s." % self) try: return np.finfo(self.as_numpy_dtype()).min except: ...
Returns the minimum representable value in this data type. Raises: TypeError: if this is a non-numeric, unordered, or quantized type.
def flat_list(input_list): r x = input_list if isinstance(x, list): return [a for i in x for a in flat_list(i)] else: return [x]
r""" Given a list of nested lists of arbitrary depth, returns a single level or 'flat' list.
def putParamset(self, paramset, data={}): try: if paramset in self._PARAMSETS and data: self._proxy.putParamset(self._ADDRESS, paramset, data) self.updateParamsets() return True else: return False except Exception as...
Some devices act upon changes to paramsets. A "putted" paramset must not contain all keys available in the specified paramset, just the ones which are writable and should be changed.
def gateways_info(): data = netifaces.gateways() results = {'default': {}} with suppress(KeyError): results['ipv4'] = data[netifaces.AF_INET] results['default']['ipv4'] = data['default'][netifaces.AF_INET] with suppress(KeyError): results['ipv6'] = data[netifaces.AF_INET6] ...
Returns gateways data.
def library_hierarchy_depth(self): current_library_hierarchy_depth = 1 library_root_state = self.get_next_upper_library_root_state() while library_root_state is not None: current_library_hierarchy_depth += 1 library_root_state = library_root_state.parent.get_next_upper_li...
Calculates the library hierarchy depth Counting starts at the current library state. So if the there is no upper library state the depth is one. :return: library hierarchy depth :rtype: int
def token_cache_pkgs(source=None, release=None): packages = [] if enable_memcache(source=source, release=release): packages.extend(['memcached', 'python-memcache']) return packages
Determine additional packages needed for token caching @param source: source string for charm @param release: release of OpenStack currently deployed @returns List of package to enable token caching
def verify_connectivity(config): logger.debug("Verifying Connectivity") ic = InsightsConnection(config) try: branch_info = ic.get_branch_info() except requests.ConnectionError as e: logger.debug(e) logger.debug("Failed to connect to satellite") return False except Loo...
Verify connectivity to satellite server
def _inject_patched_examples(self, existing_item, patched_item): for key, _ in patched_item.examples.items(): patched_example = patched_item.examples[key] existing_examples = existing_item.examples if key in existing_examples: existing_examples[key].fields.upd...
Injects patched examples into original examples.
def validate_argmin_with_skipna(skipna, args, kwargs): skipna, args = process_skipna(skipna, args) validate_argmin(args, kwargs) return skipna
If 'Series.argmin' is called via the 'numpy' library, the third parameter in its signature is 'out', which takes either an ndarray or 'None', so check if the 'skipna' parameter is either an instance of ndarray or is None, since 'skipna' itself should be a boolean
def next(self): if self._selfiter is None: warnings.warn( "Calling 'next' directly on a query is deprecated. " "Perhaps you want to use iter(query).next(), or something " "more expressive like store.findFirst or store.findOrCreate?", De...
This method is deprecated, a holdover from when queries were iterators, rather than iterables. @return: one element of massaged data.
def rollback(self, release): r = self._h._http_resource( method='POST', resource=('apps', self.name, 'releases'), data={'rollback': release} ) return self.releases[-1]
Rolls back the release to the given version.
def most_recent_submission(project, group): return (Submission.query_by(project=project, group=group) .order_by(Submission.created_at.desc()).first())
Return the most recent submission for the user and project id.
def params_for(prefix, kwargs): if not prefix.endswith('__'): prefix += '__' return {key[len(prefix):]: val for key, val in kwargs.items() if key.startswith(prefix)}
Extract parameters that belong to a given sklearn module prefix from ``kwargs``. This is useful to obtain parameters that belong to a submodule. Examples -------- >>> kwargs = {'encoder__a': 3, 'encoder__b': 4, 'decoder__a': 5} >>> params_for('encoder', kwargs) {'a': 3, 'b': 4}
def joint_entropy_calc(classes, table, POP): try: result = 0 for i in classes: for index, j in enumerate(classes): p_prime = table[i][j] / POP[i] if p_prime != 0: result += p_prime * math.log(p_prime, 2) return -result excep...
Calculate joint entropy. :param classes: confusion matrix classes :type classes : list :param table: confusion matrix table :type table : dict :param POP: population :type POP : dict :return: joint entropy as float
def create_placement_group(self, name, strategy='cluster'): params = {'GroupName':name, 'Strategy':strategy} group = self.get_status('CreatePlacementGroup', params, verb='POST') return group
Create a new placement group for your account. This will create the placement group within the region you are currently connected to. :type name: string :param name: The name of the new placement group :type strategy: string :param strategy: The placement strategy of th...
def _Enum(docstring, *names): enums = dict(zip(names, range(len(names)))) reverse = dict((value, key) for key, value in enums.iteritems()) enums['reverse_mapping'] = reverse enums['__doc__'] = docstring return type('Enum', (object,), enums)
Utility to generate enum classes used by annotations. Args: docstring: Docstring for the generated enum class. *names: Enum names. Returns: A class that contains enum names as attributes.
def setdatastrs(self, label, unit, format, coord_sys): status = _C.SDsetdatastrs(self._id, label, unit, format, coord_sys) _checkErr('setdatastrs', status, 'cannot execute')
Set the dataset standard string type attributes. Args:: label dataset label (attribute 'long_name') unit dataset unit (attribute 'units') format dataset format (attribute 'format') coord_sys dataset coordinate system (attribute 'coordsys') ...
def get_local_ip_address(target): ip_adr = '' try: s = socket.socket(socket.AF_INET, socket.SOCK_DGRAM) s.connect((target, 8000)) ip_adr = s.getsockname()[0] s.close() except: pass return ip_adr
Get the local ip address to access one specific target.
def hideEvent(self, event): super(CallTipWidget, self).hideEvent(event) self._text_edit.cursorPositionChanged.disconnect( self._cursor_position_changed) self._text_edit.removeEventFilter(self)
Reimplemented to disconnect signal handlers and event filter.
def importpath(path, error_text=None): result = None attrs = [] parts = path.split('.') exception = None while parts: try: result = __import__('.'.join(parts), {}, {}, ['']) except ImportError as e: if exception is None: exception = e ...
Import value by specified ``path``. Value can represent module, class, object, attribute or method. If ``error_text`` is not None and import will raise ImproperlyConfigured with user friendly text.
def list_principals(): ret = {} cmd = __execute_kadmin('list_principals') if cmd['retcode'] != 0 or cmd['stderr']: ret['comment'] = cmd['stderr'].splitlines()[-1] ret['result'] = False return ret ret = {'principals': []} for i in cmd['stdout'].splitlines()[1:]: ret['p...
Get all principals CLI Example: .. code-block:: bash salt 'kde.example.com' kerberos.list_principals
def parse(self, **global_args): if self.build_file not in ParseContext._parsed: butcher_context = {} for str_to_exec in self._strs_to_exec: ast = compile(str_to_exec, '<string>', 'exec') exec_function(ast, butcher_context) with ParseContext.act...
Entry point to parsing a BUILD file. Args: **global_args: Variables to include in the parsing environment.
def set_type(self,type): self.add_var_opt('type',str(type)) self.__type = str(type) self.__set_output()
sets the frame type that we are querying
def logger(self): if self._logger: return self._logger else: log_builder = p_logging.ProsperLogger( self.PROGNAME, self.config.get_option('LOGGING', 'log_path'), config_obj=self.config ) if self.verbose: ...
uses "global logger" for logging
def dispatch(self, *args, **kwargs): if not self.registration_allowed(): return HttpResponseRedirect(force_text(self.disallowed_url)) return super(RegistrationView, self).dispatch(*args, **kwargs)
Check that user signup is allowed before even bothering to dispatch or do other processing.
def _get_path_from_parent(self, parent): if hasattr(self, 'get_path_from_parent'): return self.get_path_from_parent(parent) if self.model is parent: return [] model = self.concrete_model chain = model._meta.get_base_chain(parent) or [] chain.reverse() chain.append(model) path...
Return a list of PathInfos containing the path from the parent model to the current model, or an empty list if parent is not a parent of the current model.
def Prod(a, axis, keep_dims): return np.prod(a, axis=axis if not isinstance(axis, np.ndarray) else tuple(axis), keepdims=keep_dims),
Prod reduction op.
def normalizeGlyphTopMargin(value): if not isinstance(value, (int, float)) and value is not None: raise TypeError("Glyph top margin must be an :ref:`type-int-float`, " "not %s." % type(value).__name__) return value
Normalizes glyph top margin. * **value** must be a :ref:`type-int-float` or `None`. * Returned value is the same type as the input value.
def parse_remote(cls, filename): blob_file = cls._URL_FORMAT.search(filename) return cls._REMOTE_FILE("blob", storage=blob_file.group("storage"), container=blob_file.group("container"), blob=blob_file.group("...
Parses a remote filename into blob information.
def relocate(source, destination, move=False): venv = api.VirtualEnvironment(source) if not move: venv.relocate(destination) return None venv.move(destination) return None
Adjust the virtual environment settings and optional move it. Args: source (str): Path to the existing virtual environment. destination (str): Desired path of the virtual environment. move (bool): Whether or not to actually move the files. Default False.
def add_to_sources(self, action, doc_source): mapping = self.sources.setdefault(action["_index"], {}).setdefault( action["_type"], {} ) mapping[action["_id"]] = doc_source
Store sources locally
def override_build_kwarg(workflow, k, v, platform=None): key = OrchestrateBuildPlugin.key workspace = workflow.plugin_workspace.setdefault(key, {}) override_kwargs = workspace.setdefault(WORKSPACE_KEY_OVERRIDE_KWARGS, {}) override_kwargs.setdefault(platform, {}) override_kwargs[platform][k] = v
Override a build-kwarg for all worker builds
def fave(self, deviationid, folderid=""): if self.standard_grant_type is not "authorization_code": raise DeviantartError("Authentication through Authorization Code (Grant Type) is required in order to connect to this endpoint.") post_data = {} post_data['deviationid'] = deviationid ...
Add deviation to favourites :param deviationid: Id of the Deviation to favourite :param folderid: Optional UUID of the Collection folder to add the favourite into
def save_metadata(self, metadata): if metadata in (None, {}): return None if SYSTEM_METADATA in metadata: raise StoreException("Not allowed to store %r in metadata" % SYSTEM_METADATA) path = self.temporary_object_path(str(uuid.uuid4())) with open(path, 'w') as fd:...
Save metadata to the store.
def values_for_column(self, column_name, limit=10000): cols = {col.column_name: col for col in self.columns} target_col = cols[column_name] tp = self.get_template_processor() qry = ( select([target_col.get_sqla_col()]) .select_from(self.get_from_clause(tp)) ...
Runs query against sqla to retrieve some sample values for the given column.
def setsockopt(self, *sockopts): if type(sockopts[0]) in (list, tuple): for sock_opt in sockopts[0]: level, option, value = sock_opt self.connection.sockopts.add((level, option, value)) else: level, option, value = sockopts self.connect...
Add socket options to set
def replace_pattern(tokens, new_pattern): for state in tokens.values(): for index, pattern in enumerate(state): if isinstance(pattern, tuple) and pattern[1] == new_pattern[1]: state[index] = new_pattern
Given a RegexLexer token dictionary 'tokens', replace all patterns that match the token specified in 'new_pattern' with 'new_pattern'.
def drawPolyline(self, points): for i, p in enumerate(points): if i == 0: if not (self.lastPoint == Point(p)): self.draw_cont += "%g %g m\n" % JM_TUPLE(Point(p) * self.ipctm) self.lastPoint = Point(p) else: self.draw...
Draw several connected line segments.
def links(self, base_link, current_page) -> dict: max_pages = self.max_pages - 1 if \ self.max_pages > 0 else self.max_pages base_link = '/%s' % (base_link.strip("/")) self_page = current_page prev = current_page - 1 if current_page is not 0 else None prev_link ...
Return JSON paginate links
def close(self): log.debug("Closing socket connection for %s:%d" % (self.host, self.port)) if self._sock: try: self._sock.shutdown(socket.SHUT_RDWR) except socket.error: pass self._sock.close() self._sock = None else...
Shutdown and close the connection socket
def format_national_number_with_carrier_code(numobj, carrier_code): country_code = numobj.country_code nsn = national_significant_number(numobj) if not _has_valid_country_calling_code(country_code): return nsn region_code = region_code_for_country_code(country_code) metadata = PhoneMetadata....
Format a number in national format for dialing using the specified carrier. The carrier-code will always be used regardless of whether the phone number already has a preferred domestic carrier code stored. If carrier_code contains an empty string, returns the number in national format without any carri...
def add_key_path(key_proto, *path_elements): for i in range(0, len(path_elements), 2): pair = path_elements[i:i+2] elem = key_proto.path.add() elem.kind = pair[0] if len(pair) == 1: return id_or_name = pair[1] if isinstance(id_or_name, (int, long)): elem.id = id_or_name elif is...
Add path elements to the given datastore.Key proto message. Args: key_proto: datastore.Key proto message. *path_elements: list of ancestors to add to the key. (kind1, id1/name1, ..., kindN, idN/nameN), the last 2 elements represent the entity key, if no terminating id/name: they key w...
def publish(self): " Publish last changes." response = self.put('/REST/Zone/%s' % ( self.zone, ), data={'publish': True}) return response.content['data']['serial']
Publish last changes.
def addHost(self, name=None): if name is None: while True: name = 'h' + str(self.__hnum) self.__hnum += 1 if name not in self.__nxgraph: break self.__addNode(name, Host) return name
Add a new host node to the topology.
def parse_instancepath(self, tup_tree): self.check_node(tup_tree, 'INSTANCEPATH') k = kids(tup_tree) if len(k) != 2: raise CIMXMLParseError( _format("Element {0!A} has invalid number of child elements " "{1!A} (expecting two child elements " ...
Parse an INSTANCEPATH element and return the instance path it represents as a CIMInstanceName object. :: <!ELEMENT INSTANCEPATH (NAMESPACEPATH, INSTANCENAME)>
def download(self, path, file): resp = self._sendRequest("GET", path) if resp.status_code == 200: with open(file, "wb") as f: f.write(resp.content) else: raise YaDiskException(resp.status_code, resp.content)
Download remote file to disk.
def bind(self, study, **kwargs): if self.default is None: raise ArcanaError( "Attempted to bind '{}' to {} but only acquired specs with " "a default value should be bound to studies{})".format( self.name, study)) if self._study is not None:...
Returns a copy of the AcquiredSpec bound to the given study Parameters ---------- study : Study A study to bind the fileset spec to (should happen in the study __init__)
def parse_xml_file(self, fileobj, id_generator=None): root = etree.parse(fileobj).getroot() usage_id = self._usage_id_from_node(root, None, id_generator) return usage_id
Parse an open XML file, returning a usage id.
def from_api(cls, **kwargs): vals = cls.get_non_empty_vals({ cls._to_snake_case(k): v for k, v in kwargs.items() }) remove = [] for attr, val in vals.items(): try: vals[attr] = cls._parse_property(attr, val) except HelpScoutValidationEx...
Create a new instance from API arguments. This will switch camelCase keys into snake_case for instantiation. It will also identify any ``Instance`` or ``List`` properties, and instantiate the proper objects using the values. The end result being a fully Objectified and Pythonified API ...
def discover_settings(conf_base=None): settings = { 'zmq_prefix': '', 'libzmq_extension': False, 'no_libzmq_extension': False, 'skip_check_zmq': False, 'build_ext': {}, 'bdist_egg': {}, } if sys.platform.startswith('win'): settings['have_sys_un_h'] = F...
Discover custom settings for ZMQ path
def detect_regions(bam_in, bed_file, out_dir, prefix): bed_file = _reorder_columns(bed_file) counts_reads_cmd = ("coverageBed -s -counts -b {bam_in} " "-a {bed_file} | sort -k4,4 " "> {out_dir}/loci.cov") with utils.chdir(out_dir): run(counts_reads_cmd...
Detect regions using first CoRaL module
def assert_valid_input(cls, tag): if not cls.is_tag(tag): raise TypeError("Expected a BeautifulSoup 'Tag', but instead recieved type {}".format(type(tag)))
Check if valid input tag or document.
def _cast_to_type(self, value): if not isinstance(value, dict): self.fail('invalid', value=value) return value
Raise error if the value is not a dict
def cleanup(output_root): if os.path.exists(output_root): if os.path.isdir(output_root): rmtree(output_root) else: os.remove(output_root)
Remove any reST files which were generated by this extension
def _push_frontier(self, early_frontier: Dict[ops.Qid, int], late_frontier: Dict[ops.Qid, int], update_qubits: Iterable[ops.Qid] = None ) -> Tuple[int, int]: if update_qubits is None: update_qubits = set(earl...
Inserts moments to separate two frontiers. After insertion n_new moments, the following holds: for q in late_frontier: early_frontier[q] <= late_frontier[q] + n_new for q in update_qubits: early_frontier[q] the identifies the same moment as before ...
def field_exists(self, well_x, well_y, field_x, field_y): "Check if field exists ScanFieldArray." return self.field(well_x, well_y, field_x, field_y) != None
Check if field exists ScanFieldArray.
def _get_candidates(self): candidates = np.where(self.dpp_vector == 0) return None if len(candidates[0]) == 0 else candidates[0]
Finds the pipelines that are not yet tried. Returns: np.array: Indices corresponding to columns in ``dpp_matrix`` that haven't been tried on ``X``. ``None`` if all pipelines have been tried on X.
async def send_frame(self, frame): if not self.connection.connected: await self.connect() await self.update_version() await set_utc(pyvlx=self) await house_status_monitor_enable(pyvlx=self) self.connection.write(frame)
Send frame to API via connection.
def get_enrollment(self, id): url = self._url('enrollments/{}'.format(id)) return self.client.get(url)
Retrieves an enrollment. Useful to check its type and related metadata. Args: id (str): The id of the device account to update See: https://auth0.com/docs/api/management/v2#!/Guardian/get_enrollments_by_id
def validate_request_certificate(headers, data): if 'SignatureCertChainUrl' not in headers or \ 'Signature' not in headers: log.error('invalid request headers') return False cert_url = headers['SignatureCertChainUrl'] sig = base64.b64decode(headers['Signature']) cert = _get_certif...
Ensure that the certificate and signature specified in the request headers are truely from Amazon and correctly verify. Returns True if certificate verification succeeds, False otherwise. :param headers: Dictionary (or sufficiently dictionary-like) map of request headers. :param data: Raw POST...
def handle_exception(self, exception): can_redirect = getattr(exception, "can_redirect", True) redirect_uri = getattr(self, "redirect_uri", None) if can_redirect and redirect_uri: return self.redirect_exception(exception) else: return self.render_exception(excepti...
Handle a unspecified exception and return the correct method that should be used for handling it. If the exception has the `can_redirect` property set to False, it is rendered to the browser. Otherwise, it will be redirected to the location provided in the `RedirectUri` object that is ...
def __clean_and_tokenize(self, doc_list): doc_list = filter( lambda x: x is not None and len(x) <= GitSuggest.MAX_DESC_LEN, doc_list, ) cleaned_doc_list = list() tokenizer = RegexpTokenizer(r"[a-zA-Z]+") stopwords = self.__get_words_to_ignore() dic...
Method to clean and tokenize the document list. :param doc_list: Document list to clean and tokenize. :return: Cleaned and tokenized document list.
def cacheback(lifetime=None, fetch_on_miss=None, cache_alias=None, job_class=None, task_options=None, **job_class_kwargs): if job_class is None: job_class = FunctionJob job = job_class(lifetime=lifetime, fetch_on_miss=fetch_on_miss, cache_alias=cache_alias, task_options...
Decorate function to cache its return value. :lifetime: How long to cache items for :fetch_on_miss: Whether to perform a synchronous fetch when no cached result is found :cache_alias: The Django cache alias to store the result into. :job_class: The class to use for running the cache...
def from_str(cls, s): r if '\x1b[' in s: try: tokens_and_strings = parse(s) except ValueError: return FmtStr(Chunk(remove_ansi(s))) else: chunks = [] cur_fmt = {} for x in tokens_and_strin...
r""" Return a FmtStr representing input. The str() of a FmtStr is guaranteed to produced the same FmtStr. Other input with escape sequences may not be preserved. >>> fmtstr("|"+fmtstr("hey", fg='red', bg='blue')+"|") '|'+on_blue(red('hey'))+'|' >>> fmtstr('|\x1b[31m\x1b...
def run(path, code=None, params=None, ignore=None, select=None, **meta): complexity = params.get('complexity', 10) no_assert = params.get('no_assert', False) show_closures = params.get('show_closures', False) visitor = ComplexityVisitor.from_code(code, no_assert=no_assert) blocks...
Check code with Radon. :return list: List of errors.
def _Pluralize(value, unused_context, args): if len(args) == 0: s, p = '', 's' elif len(args) == 1: s, p = '', args[0] elif len(args) == 2: s, p = args else: raise AssertionError if value > 1: return p else: return s
Formatter to pluralize words.