code
stringlengths
51
2.38k
docstring
stringlengths
4
15.2k
def _read_atlas_zonefile( zonefile_path, zonefile_hash ): with open(zonefile_path, "rb") as f: data = f.read() if zonefile_hash is not None: if not verify_zonefile( data, zonefile_hash ): log.debug("Corrupt zonefile '%s'" % zonefile_hash) return None return data
Read and verify an atlas zone file
def triangulate(points): seen = set() uniqpoints = [ p for p in points if str( p[:2] ) not in seen and not seen.add( str( p[:2] ) )] classpoints = [_Point(*point[:2]) for point in uniqpoints] triangle_ids = tesselator.computeDelaunayTriangulation(classpoints) triangles = [[uniqpoints[i] for i in triangle] for triangle in triangle_ids] return triangles
Connects an input list of xy tuples with lines forming a set of smallest possible Delauney triangles between them. Arguments: - **points**: A list of xy or xyz point tuples to triangulate. Returns: - A list of triangle polygons. If the input coordinate points contained a third z value then the output triangles will also have these z values.
def save(self): if not self._new: data = self._data.copy() ID = data.pop(self.primaryKey) reply = r.table(self.table).get(ID) \ .update(data, durability=self.durability, non_atomic=self.non_atomic) \ .run(self._conn) else: reply = r.table(self.table) \ .insert(self._data, durability=self.durability, upsert=self.upsert) \ .run(self._conn) self._new = False if "generated_keys" in reply and reply["generated_keys"]: self._data[self.primaryKey] = reply["generated_keys"][0] if "errors" in reply and reply["errors"] > 0: raise Exception("Could not insert entry: %s" % reply["first_error"]) return True
If an id exists in the database, we assume we'll update it, and if not then we'll insert it. This could be a problem with creating your own id's on new objects, however luckily, we keep track of if this is a new object through a private _new variable, and use that to determine if we insert or update.
def energy(self): e = 0 for i in range(len(self.state)): e += self.distance_matrix[self.state[i-1]][self.state[i]] return e
Calculates the length of the route.
def get_unspents(address, blockchain_client=BlockchainInfoClient()): if isinstance(blockchain_client, BlockcypherClient): return blockcypher.get_unspents(address, blockchain_client) elif isinstance(blockchain_client, BlockchainInfoClient): return blockchain_info.get_unspents(address, blockchain_client) elif isinstance(blockchain_client, ChainComClient): return chain_com.get_unspents(address, blockchain_client) elif isinstance(blockchain_client, (BitcoindClient, AuthServiceProxy)): return bitcoind.get_unspents(address, blockchain_client) elif hasattr(blockchain_client, "get_unspents"): return blockchain_client.get_unspents( address ) elif isinstance(blockchain_client, BlockchainClient): raise Exception('That blockchain interface is not supported.') else: raise Exception('A BlockchainClient object is required')
Gets the unspent outputs for a given address.
def increase_last(self, k): idx = self._last_idx if idx is not None: self.results[idx] += k
Increase the last result by k.
def compile(file): log(_("compiling {} into byte code...").format(file)) try: py_compile.compile(file, doraise=True) except py_compile.PyCompileError as e: log(_("Exception raised: ")) for line in e.msg.splitlines(): log(line) raise Failure(_("{} raised while compiling {} (rerun with --log for more details)").format(e.exc_type_name, file))
Compile a Python program into byte code :param file: file to be compiled :raises check50.Failure: if compilation fails e.g. if there is a SyntaxError
def _scatter_list(self, data, owner): rank = self.comm.rank size = self.comm.size subject_submatrices = [] nblocks = self.comm.bcast(len(data) if rank == owner else None, root=owner) for idx in range(0, nblocks, size): padded = None extra = max(0, idx+size - nblocks) if data is not None: padded = data[idx:idx+size] if extra > 0: padded = padded + [None]*extra mytrans = self.comm.scatter(padded, root=owner) if mytrans is not None: subject_submatrices += [mytrans] return subject_submatrices
Distribute a list from one rank to other ranks in a cyclic manner Parameters ---------- data: list of pickle-able data owner: rank that owns the data Returns ------- A list containing the data in a cyclic layout across ranks
def _change_mode(self, mode, major, minor): if self._mode: if self._mode != mode: raise RuntimeError('Can\'t change mode (from %s to %s)' % (self._mode, mode)) self._require_version(major=major, minor=minor) self._mode = mode self.ticket_flags = YubiKeyConfigBits(0x0) self.config_flags = YubiKeyConfigBits(0x0) self.extended_flags = YubiKeyConfigBits(0x0) if mode != 'YUBIKEY_OTP': self.ticket_flag(mode, True)
Change mode of operation, with some sanity checks.
def job_from_file(job_ini, job_id, username, **kw): hc_id = kw.get('hazard_calculation_id') try: oq = readinput.get_oqparam(job_ini, hc_id=hc_id) except Exception: logs.dbcmd('finish', job_id, 'failed') raise if 'calculation_mode' in kw: oq.calculation_mode = kw.pop('calculation_mode') if 'description' in kw: oq.description = kw.pop('description') if 'exposure_file' in kw: fnames = kw.pop('exposure_file').split() if fnames: oq.inputs['exposure'] = fnames elif 'exposure' in oq.inputs: del oq.inputs['exposure'] logs.dbcmd('update_job', job_id, dict(calculation_mode=oq.calculation_mode, description=oq.description, user_name=username, hazard_calculation_id=hc_id)) return oq
Create a full job profile from a job config file. :param job_ini: Path to a job.ini file :param job_id: ID of the created job :param username: The user who will own this job profile and all results :param kw: Extra parameters including `calculation_mode` and `exposure_file` :returns: an oqparam instance
def pixelate(x, severity=1): c = [0.6, 0.5, 0.4, 0.3, 0.25][severity - 1] shape = x.shape x = tfds.core.lazy_imports.PIL_Image.fromarray(x.astype(np.uint8)) x = x.resize((int(shape[1] * c), int(shape[0] * c))) x = x.resize((shape[1], shape[0])) return np.asarray(x)
Pixelate images. Conduct pixelating corruptions to images by first shrinking the images and then resizing to original size. Args: x: numpy array, uncorrupted image, assumed to have uint8 pixel in [0,255]. severity: integer, severity of corruption. Returns: numpy array, image with uint8 pixels in [0,255]. Applied pixelating corruption.
def constant_coefficients(d, timelines, constant=True, independent=0): return time_varying_coefficients(d, timelines, constant, independent=independent, randgen=random.normal)
Proportional hazards model. d: the dimension of the dataset timelines: the observational times constant: True for constant coefficients independent: the number of coffients to set to 0 (covariate is ind of survival), or a list of covariates to make indepent. returns a matrix (t,d+1) of coefficients
def may_be_null_is_nullable(): repo = GIRepository() repo.require("GLib", "2.0", 0) info = repo.find_by_name("GLib", "spawn_sync") return not info.get_arg(8).may_be_null
If may_be_null returns nullable or if NULL can be passed in. This can still be wrong if the specific typelib is older than the linked libgirepository. https://bugzilla.gnome.org/show_bug.cgi?id=660879#c47
def list_parameters(self, parameter_type=None, page_size=None): params = {'details': True} if parameter_type is not None: params['type'] = parameter_type if page_size is not None: params['limit'] = page_size return pagination.Iterator( client=self._client, path='/mdb/{}/parameters'.format(self._instance), params=params, response_class=mdb_pb2.ListParametersResponse, items_key='parameter', item_mapper=Parameter, )
Lists the parameters visible to this client. Parameters are returned in lexicographical order. :param str parameter_type: The type of parameter :rtype: :class:`.Parameter` iterator
def fit(self, pairs, y, calibration_params=None): calibration_params = (calibration_params if calibration_params is not None else dict()) self._validate_calibration_params(**calibration_params) self._fit(pairs, y) self.calibrate_threshold(pairs, y, **calibration_params) return self
Learn the MMC model. The threshold will be calibrated on the trainset using the parameters `calibration_params`. Parameters ---------- pairs : array-like, shape=(n_constraints, 2, n_features) or (n_constraints, 2) 3D Array of pairs with each row corresponding to two points, or 2D array of indices of pairs if the metric learner uses a preprocessor. y : array-like, of shape (n_constraints,) Labels of constraints. Should be -1 for dissimilar pair, 1 for similar. calibration_params : `dict` or `None` Dictionary of parameters to give to `calibrate_threshold` for the threshold calibration step done at the end of `fit`. If `None` is given, `calibrate_threshold` will use the default parameters. Returns ------- self : object Returns the instance.
def _send_to_timeseries(self, message): logging.debug("MESSAGE=" + str(message)) result = None try: ws = self._get_websocket() ws.send(json.dumps(message)) result = ws.recv() except (websocket.WebSocketConnectionClosedException, Exception) as e: logging.debug("Connection failed, will try again.") logging.debug(e) ws = self._get_websocket(reuse=False) ws.send(json.dumps(message)) result = ws.recv() logging.debug("RESULT=" + str(result)) return result
Establish or reuse socket connection and send the given message to the timeseries service.
def install_package(self, client, package): install_cmd = "{sudo} '{install} {package}'".format( sudo=self.get_sudo_exec_wrapper(), install=self.get_install_cmd(), package=package ) try: out = ipa_utils.execute_ssh_command( client, install_cmd ) except Exception as error: raise IpaDistroException( 'An error occurred installing package {package} ' 'on instance: {error}'.format( package=package, error=error ) ) else: return out
Install package on instance.
def GuinierPorodGuinier(q, G, Rg1, alpha, Rg2): return GuinierPorodMulti(q, G, Rg1, alpha, Rg2)
Empirical Guinier-Porod-Guinier scattering Inputs: ------- ``q``: independent variable ``G``: factor for the first Guinier-branch ``Rg1``: the first radius of gyration ``alpha``: the power-law exponent ``Rg2``: the second radius of gyration Formula: -------- ``G*exp(-q^2*Rg1^2/3)`` if ``q<q_sep1``. ``A*q^alpha`` if ``q_sep1 <= q <=q_sep2``. ``G2*exp(-q^2*Rg2^2/3)`` if ``q_sep2<q``. The parameters ``A``,``G2``, ``q_sep1``, ``q_sep2`` are determined from conditions of smoothness at the cross-overs. Literature: ----------- B. Hammouda: A new Guinier-Porod model. J. Appl. Crystallogr. (2010) 43, 716-719.
def add_interval(self, start, end, data=None): if (end - start) <= 0: return if self.single_interval is None: self.single_interval = (start, end, data) elif self.single_interval == 0: self._add_interval(start, end, data) else: self._add_interval(*self.single_interval) self.single_interval = 0 self._add_interval(start, end, data)
Inserts an interval to the tree. Note that when inserting we do not maintain appropriate sorting of the "mid" data structure. This should be done after all intervals are inserted.
def record_to_objects(self): from ambry.orm import SourceTable bsfile = self.record failures = set() for row in bsfile.dict_row_reader: st = self._dataset.source_table(row['table']) if st: st.columns[:] = [] self._dataset.commit() for row in bsfile.dict_row_reader: st = self._dataset.source_table(row['table']) if not st: st = self._dataset.new_source_table(row['table']) if 'datatype' not in row: row['datatype'] = 'unknown' del row['table'] st.add_column(**row) if failures: raise ConfigurationError('Failed to load source schema, missing sources: {} '.format(failures)) self._dataset.commit()
Write from the stored file data to the source records
def init_db_conn(connection_name, connection_string, scopefunc=None): engine = create_engine(connection_string) session = scoped_session(sessionmaker(), scopefunc=scopefunc) session.configure(bind=engine) pool.connections[connection_name] = Connection(engine, session)
Initialize a postgresql connection by each connection string defined in the configuration file
def _collect_headers(self): res = [] for prop in self.get_sorted_columns(): main_infos = self._get_prop_infos(prop) if self._is_excluded(prop, main_infos): continue if isinstance(prop, RelationshipProperty): main_infos = self._collect_relationship(main_infos, prop, res) if not main_infos: print("Maybe there's missing some informations \ about a relationship") continue else: main_infos = self._merge_many_to_one_field_from_fkey( main_infos, prop, res ) if not main_infos: continue if isinstance(main_infos, (list, tuple)): res.extend(main_infos) else: res.append(main_infos) return res
Collect headers from the models attribute info col
def get_name(self, plugin): for name, val in self._name2plugin.items(): if plugin == val: return name
Return name for registered plugin or None if not registered.
def default_memcache_timeout_policy(key): timeout = None if key is not None and isinstance(key, model.Key): modelclass = model.Model._kind_map.get(key.kind()) if modelclass is not None: policy = getattr(modelclass, '_memcache_timeout', None) if policy is not None: if isinstance(policy, (int, long)): timeout = policy else: timeout = policy(key) return timeout
Default memcache timeout policy. This defers to _memcache_timeout on the Model class. Args: key: Key instance. Returns: Memcache timeout to use (integer), or None.
def create_sequence_readers(sources: List[str], target: str, vocab_sources: List[vocab.Vocab], vocab_target: vocab.Vocab) -> Tuple[List[SequenceReader], SequenceReader]: source_sequence_readers = [SequenceReader(source, vocab, add_eos=True) for source, vocab in zip(sources, vocab_sources)] target_sequence_reader = SequenceReader(target, vocab_target, add_bos=True) return source_sequence_readers, target_sequence_reader
Create source readers with EOS and target readers with BOS. :param sources: The file names of source data and factors. :param target: The file name of the target data. :param vocab_sources: The source vocabularies. :param vocab_target: The target vocabularies. :return: The source sequence readers and the target reader.
def _send(self, *messages): if not self.transport: return False messages = [message.encode('ascii') for message in messages] data = b'' while messages: message = messages.pop(0) if len(data + message) + 1 > self.parent.cfg.maxudpsize: self.transport.sendto(data) data = b'' data += message + b'\n' if data: self.transport.sendto(data)
Send message.
def raise_exception(self, exception, tup=None): if tup: message = ( "Python {exception_name} raised while processing Tuple " "{tup!r}\n{traceback}" ) else: message = "Python {exception_name} raised\n{traceback}" message = message.format( exception_name=exception.__class__.__name__, tup=tup, traceback=format_exc() ) self.send_message({"command": "error", "msg": str(message)}) self.send_message({"command": "sync"})
Report an exception back to Storm via logging. :param exception: a Python exception. :param tup: a :class:`Tuple` object.
def set_state_view(self, request): if not request.user.has_perm('experiments.change_experiment'): return HttpResponseForbidden() try: state = int(request.POST.get("state", "")) except ValueError: return HttpResponseBadRequest() try: experiment = Experiment.objects.get(name=request.POST.get("experiment")) except Experiment.DoesNotExist: return HttpResponseBadRequest() experiment.state = state if state == 0: experiment.end_date = timezone.now() else: experiment.end_date = None experiment.save() return HttpResponse()
Changes the experiment state
def basic_qos(self, prefetch_size, prefetch_count, a_global): args = AMQPWriter() args.write_long(prefetch_size) args.write_short(prefetch_count) args.write_bit(a_global) self._send_method((60, 10), args) return self.wait(allowed_methods=[ (60, 11), ])
Specify quality of service This method requests a specific quality of service. The QoS can be specified for the current channel or for all channels on the connection. The particular properties and semantics of a qos method always depend on the content class semantics. Though the qos method could in principle apply to both peers, it is currently meaningful only for the server. PARAMETERS: prefetch_size: long prefetch window in octets The client can request that messages be sent in advance so that when the client finishes processing a message, the following message is already held locally, rather than needing to be sent down the channel. Prefetching gives a performance improvement. This field specifies the prefetch window size in octets. The server will send a message in advance if it is equal to or smaller in size than the available prefetch size (and also falls into other prefetch limits). May be set to zero, meaning "no specific limit", although other prefetch limits may still apply. The prefetch-size is ignored if the no-ack option is set. RULE: The server MUST ignore this setting when the client is not processing any messages - i.e. the prefetch size does not limit the transfer of single messages to a client, only the sending in advance of more messages while the client still has one or more unacknowledged messages. prefetch_count: short prefetch window in messages Specifies a prefetch window in terms of whole messages. This field may be used in combination with the prefetch-size field; a message will only be sent in advance if both prefetch windows (and those at the channel and connection level) allow it. The prefetch- count is ignored if the no-ack option is set. RULE: The server MAY send less data in advance than allowed by the client's specified prefetch windows but it MUST NOT send more. a_global: boolean apply to entire connection By default the QoS settings apply to the current channel only. If this field is set, they are applied to the entire connection.
def calc_A(Ys): return sum(np.dot(np.reshape(Y, (3,1)), np.reshape(Y, (1, 3))) for Y in Ys)
Return the matrix A from a list of Y vectors.
def set_stream_id(self, stream_id): stream_id = validate_type(stream_id, type(None), *six.string_types) if stream_id is not None: stream_id = stream_id.lstrip('/') self._stream_id = stream_id
Set the stream id associated with this data point
def from_gps(cls, gps, Name = None): self = cls(AttributesImpl({u"Type": u"GPS"})) if Name is not None: self.Name = Name self.pcdata = gps return self
Instantiate a Time element initialized to the value of the given GPS time. The Name attribute will be set to the value of the Name parameter if given. Note: the new Time element holds a reference to the GPS time, not a copy of it. Subsequent modification of the GPS time object will be reflected in what gets written to disk.
def is_base_and_derived(based, derived): assert isinstance(based, class_declaration.class_t) assert isinstance(derived, (class_declaration.class_t, tuple)) if isinstance(derived, class_declaration.class_t): all_derived = ([derived]) else: all_derived = derived for derived_cls in all_derived: for base_desc in derived_cls.recursive_bases: if base_desc.related_class == based: return True return False
returns True, if there is "base and derived" relationship between classes, False otherwise
def adjust_doy_calendar(source, target): doy_max_source = source.dayofyear.max() doy_max = infer_doy_max(target) if doy_max_source == doy_max: return source return _interpolate_doy_calendar(source, doy_max)
Interpolate from one set of dayofyear range to another calendar. Interpolate an array defined over a `dayofyear` range (say 1 to 360) to another `dayofyear` range (say 1 to 365). Parameters ---------- source : xarray.DataArray Array with `dayofyear` coordinates. target : xarray.DataArray Array with `time` coordinate. Returns ------- xarray.DataArray Interpolated source array over coordinates spanning the target `dayofyear` range.
def validate_int(datum, **kwargs): return ( (isinstance(datum, (int, long, numbers.Integral)) and INT_MIN_VALUE <= datum <= INT_MAX_VALUE and not isinstance(datum, bool)) or isinstance( datum, (datetime.time, datetime.datetime, datetime.date) ) )
Check that the data value is a non floating point number with size less that Int32. Also support for logicalType timestamp validation with datetime. Int32 = -2147483648<=datum<=2147483647 conditional python types (int, long, numbers.Integral, datetime.time, datetime.datetime, datetime.date) Parameters ---------- datum: Any Data being validated kwargs: Any Unused kwargs
def generate_ul(self, a_list): return len(a_list) > 0 and (isinstance(a_list[0], Rule) or isinstance(a_list[0], LabelDecl))
Determines if we should generate th 'ul' around the list 'a_list'
def consult_robots_txt(self, request: HTTPRequest) -> bool: if not self._robots_txt_checker: return True result = yield from self._robots_txt_checker.can_fetch(request) return result
Consult by fetching robots.txt as needed. Args: request: The request to be made to get the file. Returns: True if can fetch Coroutine
def import_cfg(file_name, **kwargs): def callback(data): return libconf.loads(data) try: import libconf except ImportError: raise exch.GeomdlException("Please install 'libconf' package to use libconfig format: pip install libconf") delta = kwargs.get('delta', -1.0) use_template = kwargs.get('jinja2', False) file_src = exch.read_file(file_name) return exch.import_dict_str(file_src=file_src, delta=delta, callback=callback, tmpl=use_template)
Imports curves and surfaces from files in libconfig format. .. note:: Requires `libconf <https://pypi.org/project/libconf/>`_ package. Use ``jinja2=True`` to activate Jinja2 template processing. Please refer to the documentation for details. :param file_name: name of the input file :type file_name: str :return: a list of rational spline geometries :rtype: list :raises GeomdlException: an error occurred writing the file
def get_dyndns_records(login, password): params = dict(action='getdyndns', sha=get_auth_key(login, password)) response = requests.get('http://freedns.afraid.org/api/', params=params, timeout=timeout) raw_records = (line.split('|') for line in response.content.split()) try: records = frozenset(DnsRecord(*record) for record in raw_records) except TypeError: raise ApiError("Couldn't parse the server's response", response.content) return records
Gets the set of dynamic DNS records associated with this account
def begin(self): mid = self._device.readU16BE(MCP9808_REG_MANUF_ID) did = self._device.readU16BE(MCP9808_REG_DEVICE_ID) self._logger.debug('Read manufacturer ID: {0:04X}'.format(mid)) self._logger.debug('Read device ID: {0:04X}'.format(did)) return mid == 0x0054 and did == 0x0400
Start taking temperature measurements. Returns True if the device is intialized, False otherwise.
def _dash_f_e_to_dict(self, info_filename, tree_filename): with open(info_filename) as fl: models, likelihood, partition_params = self._dash_f_e_parser.parseFile(fl).asList() with open(tree_filename) as fl: tree = fl.read() d = {'likelihood': likelihood, 'ml_tree': tree, 'partitions': {}} for model, params in zip(models, partition_params): subdict = {} index, name, _, alpha, rates, freqs = params subdict['alpha'] = alpha subdict['name'] = name subdict['rates'] = rates subdict['frequencies'] = freqs subdict['model'] = model d['partitions'][index] = subdict return d
Raxml provides an option to fit model params to a tree, selected with -f e. The output is different and needs a different parser.
def update_by_token(self, token, **kwargs): _sid = self.handler.sid(token) return self.update(_sid, **kwargs)
Updated the session info. Any type of known token can be used :param token: code/access token/refresh token/... :param kwargs: Key word arguements
def handle_input(self, code): if not self.prompt.multiline: if not should_indent(code): try: return self.comp.parse_block(code) except CoconutException: pass while True: line = self.get_input(more=True) if line is None: return None elif line: code += "\n" + line else: break try: return self.comp.parse_block(code) except CoconutException: logger.display_exc() return None
Compile Coconut interpreter input.
def DbGetAliasAttribute(self, argin): self._log.debug("In DbGetAliasAttribute()") alias_name = argin[0] return self.db.get_alias_attribute(alias_name)
Get the attribute name from the given alias. If the given alias is not found in database, returns an empty string :param argin: The attribute alias :type: tango.DevString :return: The attribute name (dev_name/att_name) :rtype: tango.DevString
def dispatch(self, request, *args, **kwargs): self.wizard_name = self.get_wizard_name() self.prefix = self.get_prefix() self.storage = get_storage(self.storage_name, self.prefix, request, getattr(self, 'file_storage', None)) self.steps = StepsHelper(self) response = super(WizardView, self).dispatch(request, *args, **kwargs) self.storage.update_response(response) return response
This method gets called by the routing engine. The first argument is `request` which contains a `HttpRequest` instance. The request is stored in `self.request` for later use. The storage instance is stored in `self.storage`. After processing the request using the `dispatch` method, the response gets updated by the storage engine (for example add cookies).
def _validate_dtype(self, dtype): if dtype is not None: dtype = pandas_dtype(dtype) if dtype.kind == 'V': raise NotImplementedError("compound dtypes are not implemented" " in the {0} constructor" .format(self.__class__.__name__)) return dtype
validate the passed dtype
def removeUnreferencedIDs(referencedIDs, identifiedElements): global _num_ids_removed keepTags = ['font'] num = 0 for id in identifiedElements: node = identifiedElements[id] if id not in referencedIDs and node.nodeName not in keepTags: node.removeAttribute('id') _num_ids_removed += 1 num += 1 return num
Removes the unreferenced ID attributes. Returns the number of ID attributes removed
def get_username(sciper): attribute = 'uid' response = LDAP_search( pattern_search='(uniqueIdentifier={})'.format(sciper), attribute=attribute ) try: username = get_attribute(response, attribute) except Exception: raise EpflLdapException("No username corresponds to sciper {}".format(sciper)) return username
Return username of user
def get_n_tail(tmax, tail_temps): t_index = 0 adj_tmax = 0 if tmax < tail_temps[0]: return 0 try: t_index = list(tail_temps).index(tmax) except: for temp in tail_temps: if temp <= tmax: adj_tmax = temp t_index = list(tail_temps).index(adj_tmax) incl_temps = tail_temps[0:t_index+1] return len(incl_temps)
determines number of included tail checks in best fit segment
def _get_drive_type_and_speed(self): disk_details = self._get_physical_drive_resource() drive_hdd = False drive_ssd = False drive_details = {} speed_const_list = [4800, 5400, 7200, 10000, 15000] if disk_details: for item in disk_details: value = item['MediaType'] if value == "HDD": drive_hdd = True speed = item['RotationalSpeedRpm'] if speed in speed_const_list: var = 'rotational_drive_' + str(speed) + '_rpm' drive_details.update({var: 'true'}) else: drive_ssd = True if drive_hdd: drive_details.update({'has_rotational': 'true'}) if drive_ssd: drive_details.update({'has_ssd': 'true'}) return drive_details if len(drive_details.keys()) > 0 else None
Gets the disk drive type. :returns: A dictionary with the following keys: - has_rotational: True/False. It is True if atleast one rotational disk is attached. - has_ssd: True/False. It is True if at least one SSD disk is attached. - drive_rotational_<speed>_rpm: These are set to true as per the speed of the rotational disks. :raises: IloCommandNotSupportedError if the PhysicalDrives resource doesn't exist. :raises: IloError, on an error from iLO.
def binary_fraction(self,query='mass_A >= 0'): subdf = self.stars.query(query) nbinaries = (subdf['mass_B'] > 0).sum() frac = nbinaries/len(subdf) return frac, frac/np.sqrt(nbinaries)
Binary fraction of stars passing given query :param query: Query to pass to stars ``DataFrame``.
def submit_populator_batch(self, column_name, batch): if not set(column_name).issubset(_allowedCustomDimensionChars): raise ValueError('Invalid custom dimension name "%s": must only contain letters, digits, and underscores' % column_name) if len(column_name) < 3 or len(column_name) > 20: raise ValueError('Invalid value "%s": must be between 3-20 characters' % column_name) url = '%s/api/v5/batch/customdimensions/%s/populators' % (self.base_url, column_name) resp_json_dict = self._submit_batch(url, batch) if resp_json_dict.get('error') is not None: raise RuntimeError('Error received from server: %s' % resp_json_dict['error']) return resp_json_dict['guid']
Submit a populator batch Submit a populator batch as a series of HTTP requests in small chunks, returning the batch GUID, or raising exception on error.
def printd(*args, **kwargs): global settings if settings['PRINT_DEBUG_STATE']: print(*args, **kwargs) return True return False
Print if PRINT_DEBUG_STATE is True
def mpixel(self, z, n=0): z = z * z + self.c if (abs(z) > 2.0): return self.color(n) n += 1 if (n > self.max_iter): return None return self.mpixel(z, n)
Iteration in Mandlebrot coordinate z.
def _check_reset_and_type_change(self, name, orig_ctr): if name in orig_ctr: tf.logging.warning("Overwriting hparam %s", name) ctr_names = [ (self._categorical_params, "categorical"), (self._discrete_params, "discrete"), (self._float_params, "float"), (self._int_params, "int"), ] ctrs, names = list(zip(*ctr_names)) orig_name = names[ctrs.index(orig_ctr)] for ctr, ctr_name in ctr_names: if ctr is orig_ctr: continue if name in ctr: raise ValueError("Setting hyperparameter %s as type %s, but a " "hyperparemeter of the same name was originally " "registered as type %s" % (name, ctr_name, orig_name))
Check if name is in orig_ctr or in one of the other type containers.
def _loadFromRow(self, result, fields, cursor): position = 0 for elem in fields: value = result[position] valueType = cursor.description[position][1] if hasattr(self._dbModule, 'BOOLEAN') and \ valueType == self._dbModule.BOOLEAN and \ (value is not True or value is not False): value = value and True or False if value and self._userClasses.has_key(elem): userClass = self._userClasses[elem] value = userClass(value) self._values[elem] = value position += 1
Load from a database row, described by fields. ``fields`` should be the attribute names that will be set. Note that userclasses will be created (but not loaded).
def _fS1(self, pos_pairs, A): dim = pos_pairs.shape[2] diff = pos_pairs[:, 0, :] - pos_pairs[:, 1, :] return np.einsum('ij,ik->jk', diff, diff)
The gradient of the similarity constraint function w.r.t. A. f = \sum_{ij}(x_i-x_j)A(x_i-x_j)' = \sum_{ij}d_ij*A*d_ij' df/dA = d(d_ij*A*d_ij')/dA Note that d_ij*A*d_ij' = tr(d_ij*A*d_ij') = tr(d_ij'*d_ij*A) so, d(d_ij*A*d_ij')/dA = d_ij'*d_ij
def connect_input(self, wire): self._input = wire wire.sinks.append(self)
Probe the specified wire.
def pckfrm(pck, ids): pck = stypes.stringToCharP(pck) assert isinstance(ids, stypes.SpiceCell) assert ids.dtype == 2 libspice.pckfrm_c(pck, ctypes.byref(ids))
Find the set of reference frame class ID codes of all frames in a specified binary PCK file. http://naif.jpl.nasa.gov/pub/naif/toolkit_docs/C/cspice/pckfrm_c.html :param pck: Name of PCK file. :type pck: str :param ids: Set of frame class ID codes of frames in PCK file. :type ids: SpiceCell
def insert(self, optional_root_locations_path): encountered_simple_optional = False parent_location = self._root_location for optional_root_location in optional_root_locations_path: if encountered_simple_optional: raise AssertionError(u'Encountered simple optional root location {} in path, but' u'further locations are present. This should not happen: {}' .format(optional_root_location, optional_root_locations_path)) if optional_root_location not in self._location_to_children: encountered_simple_optional = True else: self._location_to_children[parent_location].add(optional_root_location) parent_location = optional_root_location
Insert a path of optional Locations into the tree. Each OptionalTraversalTree object contains child Location objects as keys mapping to other OptionalTraversalTree objects. Args: optional_root_locations_path: list of optional root Locations all except the last of which must be present in complex_optional_roots
def check(name, port=None, **kwargs): host = name ret = {'name': name, 'result': True, 'changes': {}, 'comment': ''} if 'test' not in kwargs: kwargs['test'] = __opts__.get('test', False) if kwargs['test']: ret['result'] = True ret['comment'] = 'The connection will be tested' else: results = __salt__['network.connect'](host, port, **kwargs) ret['result'] = results['result'] ret['comment'] = results['comment'] return ret
Checks if there is an open connection from the minion to the defined host on a specific port. name host name or ip address to test connection to port The port to test the connection on kwargs Additional parameters, parameters allowed are: proto (tcp or udp) family (ipv4 or ipv6) timeout .. code-block:: yaml testgoogle: firewall.check: - name: 'google.com' - port: 80 - proto: 'tcp'
def next_media_partname(self, ext): def first_available_media_idx(): media_idxs = sorted([ part.partname.idx for part in self.iter_parts() if part.partname.startswith('/ppt/media/media') ]) for i, media_idx in enumerate(media_idxs): idx = i + 1 if idx < media_idx: return idx return len(media_idxs)+1 idx = first_available_media_idx() return PackURI('/ppt/media/media%d.%s' % (idx, ext))
Return |PackURI| instance for next available media partname. Partname is first available, starting at sequence number 1. Empty sequence numbers are reused. *ext* is used as the extension on the returned partname.
def codon2aa(codon, trans_table): return Seq(''.join(codon), IUPAC.ambiguous_dna).translate(table = trans_table)[0]
convert codon to amino acid
def parse(self, name): name = name.strip() groups = self._parseFedora(name) if groups: self._signature = DistributionNameSignature("Fedora", groups.group(1)) return self raise ValueError("Distribution name '%s' not recognized" % name)
Parse distribution string :param name: distribution string, e.g. "Fedora 23" :type name: string
def emit(self, record): try: msg = self.format(record) pri = self.map_priority(record.levelno) extras = self._extra.copy() if record.exc_text: extras['EXCEPTION_TEXT'] = record.exc_text if record.exc_info: extras['EXCEPTION_INFO'] = record.exc_info if record.args: extras['CODE_ARGS'] = str(record.args) extras.update(record.__dict__) self.send(msg, PRIORITY=format(pri), LOGGER=record.name, THREAD_NAME=record.threadName, PROCESS_NAME=record.processName, CODE_FILE=record.pathname, CODE_LINE=record.lineno, CODE_FUNC=record.funcName, **extras) except Exception: self.handleError(record)
Write `record` as a journal event. MESSAGE is taken from the message provided by the user, and PRIORITY, LOGGER, THREAD_NAME, CODE_{FILE,LINE,FUNC} fields are appended automatically. In addition, record.MESSAGE_ID will be used if present.
def _dayofmonth(hardday, month, year): newday = hardday daysinmonth = calendar.monthrange(year, month)[1] if newday < 0: newday = daysinmonth + hardday + 1 newday = max(1, min(newday, daysinmonth)) return newday
Returns a valid day of the month given the desired value. Negative values are interpreted as offset backwards from the last day of the month, with -1 representing the last day of the month. Out-of-range values are clamped to the first or last day of the month.
def _read_mode_tcpao(self, size, kind): key_ = self._read_unpack(1) rkey = self._read_unpack(1) mac_ = self._read_fileng(size - 2) data = dict( kind=kind, length=size, keyid=key_, rnextkeyid=rkey, mac=mac_, ) return data
Read Authentication option. Positional arguments: * size - int, length of option * kind - int, 29 (TCP Authentication Option) Returns: * dict -- extracted Authentication (AO) option Structure of TCP AOopt [RFC 5925]: +------------+------------+------------+------------+ | Kind=29 | Length | KeyID | RNextKeyID | +------------+------------+------------+------------+ | MAC ... +-----------------------------------... ...-----------------+ ... MAC (con't) | ...-----------------+ Octets Bits Name Description 0 0 tcp.ao.kind Kind (29) 1 8 tcp.ao.length Length 2 16 tcp.ao.keyid KeyID 3 24 tcp.ao.rnextkeyid RNextKeyID 4 32 tcp.ao.mac Message Authentication Code
def _type_container(self, value, _type): ' apply type to all values in the list ' if value is None: return [] elif not isinstance(value, list): raise ValueError("expected list type, got: %s" % type(value)) else: return sorted(self._type_single(item, _type) for item in value)
apply type to all values in the list
def setOverlayNeighbor(self, eDirection, ulFrom, ulTo): fn = self.function_table.setOverlayNeighbor result = fn(eDirection, ulFrom, ulTo) return result
Sets an overlay's neighbor. This will also set the neighbor of the "to" overlay to point back to the "from" overlay. If an overlay's neighbor is set to invalid both ends will be cleared
def init_app(self, app=None, blueprint=None, additional_blueprints=None): if app is not None: self.app = app if blueprint is not None: self.blueprint = blueprint for resource in self.resources: self.route(resource['resource'], resource['view'], *resource['urls'], url_rule_options=resource['url_rule_options']) if self.blueprint is not None: self.app.register_blueprint(self.blueprint) if additional_blueprints is not None: for blueprint in additional_blueprints: self.app.register_blueprint(blueprint) self.app.config.setdefault('PAGE_SIZE', 30)
Update flask application with our api :param Application app: a flask application
def add_option(self, opt_name, otype, hidden=False): if self.has_option(opt_name): raise ValueError("The option is already present !") opt = ValueOption.FromType(opt_name, otype) opt.hidden = hidden self._options[opt_name] = opt
Add an option to the object :param opt_name: option name :type opt_name: str :param otype: option type :type otype: subclass of :class:`.GenericType` :param hidden: if True the option will be hidden :type hidden: bool
def cli(dirty, stash): _setup_logging() LOGGER.info('EPAB %s', __version__) LOGGER.info('Running in %s', os.getcwd()) CTX.repo = epab.utils.Repo() CTX.repo.ensure() CTX.stash = stash for filename in _GIT_IGNORE: epab.utils.add_to_gitignore(filename) if not dirty and CTX.repo.is_dirty(): LOGGER.error('Repository is dirty') sys.exit(-1)
This is a tool that handles all the tasks to build a Python application This tool is installed as a setuptools entry point, which means it should be accessible from your terminal once this application is installed in develop mode.
def bake(self): self.unbake() for key in self.dct: self.get_absolute_time(key) self.is_baked = True
Find absolute times for all keys. Absolute time is stored in the KeyFrame dictionary as the variable __abs_time__.
def set_affiliation(self, mucjid, jid, affiliation, *, reason=None): if mucjid is None or not mucjid.is_bare: raise ValueError("mucjid must be bare JID") if jid is None: raise ValueError("jid must not be None") if affiliation is None: raise ValueError("affiliation must not be None") iq = aioxmpp.stanza.IQ( type_=aioxmpp.structs.IQType.SET, to=mucjid ) iq.payload = muc_xso.AdminQuery( items=[ muc_xso.AdminItem(jid=jid, reason=reason, affiliation=affiliation) ] ) yield from self.client.send(iq)
Change the affiliation of an entity with a MUC. :param mucjid: The bare JID identifying the MUC. :type mucjid: :class:`~aioxmpp.JID` :param jid: The bare JID of the entity whose affiliation shall be changed. :type jid: :class:`~aioxmpp.JID` :param affiliation: The new affiliation for the entity. :type affiliation: :class:`str` :param reason: Optional reason for the affiliation change. :type reason: :class:`str` or :data:`None` Change the affiliation of the given `jid` with the MUC identified by the bare `mucjid` to the given new `affiliation`. Optionally, a `reason` can be given. If you are joined in the MUC, :meth:`Room.muc_set_affiliation` may be more convenient, but it is possible to modify the affiliations of a MUC without being joined, given sufficient privilegues. Setting the different affiliations require different privilegues of the local user. The details can be checked in :xep:`0045` and are enforced solely by the server, not local code. The coroutine returns when the change in affiliation has been acknowledged by the server. If the server returns an error, an appropriate :class:`aioxmpp.errors.XMPPError` subclass is raised.
def durations(self) -> List[datetime.timedelta]: return [x.duration() for x in self.intervals]
Returns a list of ``datetime.timedelta`` objects representing the durations of each interval in our list.
def get_repeated_menu_item( self, current_page, current_site, apply_active_classes, original_menu_tag, request=None, use_absolute_page_urls=False, ): menuitem = copy(self) menuitem.text = self.get_text_for_repeated_menu_item( request, current_site, original_menu_tag ) if use_absolute_page_urls: url = self.get_full_url(request=request) else: url = self.relative_url(current_site) menuitem.href = url if apply_active_classes and self == current_page: menuitem.active_class = settings.ACTIVE_CLASS else: menuitem.active_class = '' menuitem.has_children_in_menu = False menuitem.sub_menu = None return menuitem
Return something that can be used to display a 'repeated' menu item for this specific page.
def can_delete_post(self, post, user): checker = self._get_checker(user) is_author = self._is_post_author(post, user) can_delete = ( user.is_superuser or (is_author and checker.has_perm('can_delete_own_posts', post.topic.forum)) or checker.has_perm('can_delete_posts', post.topic.forum) ) return can_delete
Given a forum post, checks whether the user can delete the latter.
def payload(self): try: rdf_cls = self.classes.get(self.name) if rdf_cls: value = rdf_cls.FromSerializedString(self.data) value.age = self.embedded_age return value except TypeError: return None
Extracts and returns the serialized object.
def make_posthook(self): print(id(self.posthook), self.posthook) print(id(super(self.__class__, self).posthook), super(self.__class__, self).posthook) import ipdb;ipdb.set_trace() if self.posthook: os.chdir(self.project_name) self.posthook()
Run the post hook into the project directory.
def fill_A(A, right_eigenvectors): num_micro, num_eigen = right_eigenvectors.shape A = A.copy() A[1:, 0] = -1 * A[1:, 1:].sum(1) A[0] = -1 * dot(right_eigenvectors[:, 1:].real, A[1:]).min(0) A /= A[0].sum() return A
Construct feasible initial guess for transformation matrix A. Parameters ---------- A : ndarray Possibly non-feasible transformation matrix. right_eigenvectors : ndarray Right eigenvectors of transition matrix Returns ------- A : ndarray Feasible transformation matrix.
def maximize(self, reaction): self._prob.set_objective(self.flux_expr(reaction)) self._solve()
Solve the model by maximizing the given reaction. If reaction is a dictionary object, each entry is interpreted as a weight on the objective for that reaction (non-existent reaction will have zero weight).
def options(allow_partial=False, read=False): global _options if allow_partial: opts, extras = _options_parser.parse_known_args() if opts.run_dir: mkdirp(opts.run_dir) return opts if _options is None: _options_parser.add_argument('-h', '--help', action='help', default=argparse.SUPPRESS, help='show this help message and exit') _options = _options_parser.parse_args() if _options.run_dir: mkdirp(_options.run_dir, overwrite=_options.overwrite or read) if not read: options_dump = vars(_options) del options_dump['overwrite'] del options_dump['config'] dump_pretty(options_dump, 'config.json') return _options
Get the object containing the values of the parsed command line options. :param bool allow_partial: If `True`, ignore unrecognized arguments and allow the options to be re-parsed next time `options` is called. This also suppresses overwrite checking (the check is performed the first time `options` is called with `allow_partial=False`). :param bool read: If `True`, do not create or overwrite a `config.json` file, and do not check whether such file already exists. Use for scripts that read from the run directory rather than/in addition to writing to it. :return argparse.Namespace: An object storing the values of the options specified to the parser returned by `get_options_parser()`.
def _config_options(self): self._config_sortable(self._sortable) self._config_drag_cols(self._drag_cols)
Apply options set in attributes to Treeview
def get_template_by_name(name,**kwargs): try: tmpl_i = db.DBSession.query(Template).filter(Template.name == name).options(joinedload_all('templatetypes.typeattrs.default_dataset.metadata')).one() return tmpl_i except NoResultFound: log.info("%s is not a valid identifier for a template",name) raise HydraError('Template "%s" not found'%name)
Get a specific resource template, by name.
def bar_(self, label=None, style=None, opts=None, options={}): try: return self._get_chart("bar", style=style, opts=opts, label=label, options=options) except Exception as e: self.err(e, self.bar_, "Can not draw bar chart")
Get a bar chart
def engage(self, height: float = None, offset: float = None): if height: dist = height elif self.labware and self.labware.magdeck_engage_height is not None: dist = self.labware.magdeck_engage_height if offset: dist += offset else: raise ValueError( "Currently loaded labware {} does not have a known engage " "height; please specify explicitly with the height param" .format(self.labware)) self._module.engage(dist)
Raise the Magnetic Module's magnets. The destination of the magnets can be specified in several different ways, based on internally stored default heights for labware: - If neither `height` nor `offset` is specified, the magnets will raise to a reasonable default height based on the specified labware. - If `height` is specified, it should be a distance in mm from the home position of the magnets. - If `offset` is specified, it should be an offset in mm from the default position. A positive number moves the magnets higher and a negative number moves the magnets lower. Only certain labwares have defined engage heights for the Magnetic Module. If a labware that does not have a defined engage height is loaded on the Magnetic Module (or if no labware is loaded), then `height` must be specified. :param height: The height to raise the magnets to, in mm from home. :param offset: An offset relative to the default height for the labware in mm
def log_init(level): log = logging.getLogger() hdlr = logging.StreamHandler() formatter = logging.Formatter('%(asctime)s %(name)s %(levelname)s %(message)s') hdlr.setFormatter(formatter) log.addHandler(hdlr) log.setLevel(level)
Set up a logger that catches all channels and logs it to stdout. This is used to set up logging when testing.
def _get_document_data(database, document): try: return document.get_data() except xapian.DatabaseModifiedError: database.reopen() return document.get_data()
A safer version of Xapian.document.get_data Simply wraps the Xapian version and catches any `Xapian.DatabaseModifiedError`, attempting a `database.reopen` as needed. Required arguments: `database` -- The database to be read `document` -- An instance of an Xapian.document object
def format_info(info_list): max_lengths = [] if info_list: nr_columns = len(info_list[0]) else: nr_columns = 0 for i in range(nr_columns): max_lengths.append(max([len(info[i]) for info in info_list])) flattened_info_list = [] for info_id in range(len(info_list)): info = info_list[info_id] for str_id in range(len(info) - 1): orig_str = info[str_id] indent = max_lengths[str_id] - len(orig_str) info[str_id] = orig_str + indent * b' ' flattened_info_list.append(b' '.join(info) + b'\n') return flattened_info_list
Turn a 2-dimension list of bytes into a 1-dimension list of bytes with correct spacing
def _html_image(page): source = _image(page) if not source: return alt = page.data.get('label') or page.data.get('title') img = "<img src=\"%s\"" % source img += " alt=\"%s\" title=\"%s\" " % (alt, alt) img += "align=\"right\" width=\"240\">" return img
returns HTML img tag
def init(port=8813, numRetries=10, host="localhost", label="default"): _connections[label] = connect(port, numRetries, host) switch(label) return getVersion()
Establish a connection to a TraCI-Server and store it under the given label. This method is not thread-safe. It accesses the connection pool concurrently.
def _send(self, command): command = command.encode('utf-8') log.debug('>> ' + command) self.conn.oqueue.put(command)
Sends a raw line to the server. :param command: line to send. :type command: unicode
def _ReadEventDataIntoEvent(self, event): if self._storage_type != definitions.STORAGE_TYPE_SESSION: return event_data_identifier = event.GetEventDataIdentifier() if event_data_identifier: lookup_key = event_data_identifier.CopyToString() event_data = self._event_data[lookup_key] for attribute_name, attribute_value in event_data.GetAttributes(): setattr(event, attribute_name, attribute_value)
Reads the data into the event. This function is intended to offer backwards compatible event behavior. Args: event (EventObject): event.
def get_results(self) -> Iterable[PluginScanResult]: for _ in range(self._get_current_processes_nb()): self._task_queue.put(None) for hostname, hostname_queue in self._hostname_queues_dict.items(): for i in range(len(self._processes_dict[hostname])): hostname_queue.put(None) received_task_results = 0 expected_task_results = self._queued_tasks_nb + self._get_current_processes_nb() while received_task_results != expected_task_results: result = self._result_queue.get() self._result_queue.task_done() received_task_results += 1 if result is None: pass else: yield result self._task_queue.join() self._result_queue.join() for hostname_queue in self._hostname_queues_dict.values(): hostname_queue.join() for process_list in self._processes_dict.values(): for process in process_list: process.join()
Return the result of previously queued scan commands; new commands cannot be queued once this is called. Returns: The results of all the scan commands previously queued. Each result will be an instance of the scan corresponding command's PluginScanResult subclass. If there was an unexpected error while running the scan command, it will be a 'PluginRaisedExceptionScanResult' instance instead.
def chocolatey_version(): if 'chocolatey._version' in __context__: return __context__['chocolatey._version'] cmd = [_find_chocolatey(__context__, __salt__)] cmd.append('-v') out = __salt__['cmd.run'](cmd, python_shell=False) __context__['chocolatey._version'] = out return __context__['chocolatey._version']
Returns the version of Chocolatey installed on the minion. CLI Example: .. code-block:: bash salt '*' chocolatey.chocolatey_version
def register_command(parent_command, name): def wrapper(func): c = command(name)(func) parent_command.add_subcommand(c) return wrapper
Create and register a command with a parent command. Args ---- parent_comand : Command The parent command. name : str Name given to the created Command instance. Example ------- .. testcode:: mygit = Command(name='status') @register_command(mygit, 'status') def status(): print 'Nothing to commit.' .. doctest:: :hide: >>> mygit.init() >>> mygit.parse_args(['status']) Nothing to commit.
def select_many_with_correspondence( self, collection_selector=identity, result_selector=KeyedElement): if self.closed(): raise ValueError("Attempt to call " "select_many_with_correspondence() on a closed Queryable.") if not is_callable(collection_selector): raise TypeError("select_many_with_correspondence() parameter " "projector={0} is not callable".format(repr(collection_selector))) if not is_callable(result_selector): raise TypeError("select_many_with_correspondence() parameter " "selector={0} is not callable".format(repr(result_selector))) return self._create( self._generate_select_many_with_correspondence(collection_selector, result_selector))
Projects each element of a sequence to an intermediate new sequence, and flattens the resulting sequence, into one sequence and uses a selector function to incorporate the corresponding source for each item in the result sequence. Note: This method uses deferred execution. Args: collection_selector: A unary function mapping each element of the source iterable into an intermediate sequence. The single argument of the collection_selector is the value of an element from the source sequence. The return value should be an iterable derived from that element value. The default collection_selector, which is the identity function, assumes that each element of the source sequence is itself iterable. result_selector: An optional binary function mapping the elements in the flattened intermediate sequence together with their corresponding source elements to elements of the result sequence. The two positional arguments of the result_selector are, first the source element corresponding to an element from the intermediate sequence, and second the actual element from the intermediate sequence. The return value should be the corresponding value in the result sequence. If no result_selector function is provided, the elements of the result sequence are KeyedElement namedtuples. Returns: A Queryable over a generated sequence whose elements are the result of applying the one-to-many collection_selector to each element of the source sequence, concatenating the results into an intermediate sequence, and then mapping each of those elements through the result_selector which incorporates the corresponding source element into the result sequence. Raises: ValueError: If this Queryable has been closed. TypeError: If projector or selector are not callable.
def _FindInFileEntry(self, file_entry, find_specs, search_depth): sub_find_specs = [] for find_spec in find_specs: match, location_match = find_spec.Matches(file_entry, search_depth) if match: yield file_entry.path_spec if location_match != False and not find_spec.AtMaximumDepth(search_depth): sub_find_specs.append(find_spec) if not sub_find_specs: return search_depth += 1 try: for sub_file_entry in file_entry.sub_file_entries: for matching_path_spec in self._FindInFileEntry( sub_file_entry, sub_find_specs, search_depth): yield matching_path_spec except errors.AccessError: pass
Searches for matching file entries within the file entry. Args: file_entry (FileEntry): file entry. find_specs (list[FindSpec]): find specifications. search_depth (int): number of location path segments to compare. Yields: PathSpec: path specification of a matching file entry.
def decrease_exponent_to(self, new_exp): if new_exp > self.exponent: raise ValueError('New exponent %i should be more negative than ' 'old exponent %i' % (new_exp, self.exponent)) multiplied = self * pow(EncodedNumber.BASE, self.exponent - new_exp) multiplied.exponent = new_exp return multiplied
Return an EncryptedNumber with same value but lower exponent. If we multiply the encoded value by :attr:`EncodedNumber.BASE` and decrement :attr:`exponent`, then the decoded value does not change. Thus we can almost arbitrarily ratchet down the exponent of an `EncryptedNumber` - we only run into trouble when the encoded integer overflows. There may not be a warning if this happens. When adding `EncryptedNumber` instances, their exponents must match. This method is also useful for hiding information about the precision of numbers - e.g. a protocol can fix the exponent of all transmitted `EncryptedNumber` instances to some lower bound(s). Args: new_exp (int): the desired exponent. Returns: EncryptedNumber: Instance with the same plaintext and desired exponent. Raises: ValueError: You tried to increase the exponent.
def _apply_filters(self): filters = [] for f in self._filters: filters.append('{}{}{}'.format(f['name'], f['operator'], f['value'])) self.tcex.log.debug(u'filters: {}'.format(filters)) if filters: self._request.add_payload('filters', ','.join(filters))
Apply any filters added to the resource.