code
stringlengths
51
2.38k
docstring
stringlengths
4
15.2k
def unset_sentry_context(self, tag): if self.sentry_client: self.sentry_client.tags.pop(tag, None)
Remove a context tag from sentry :param tag: The context tag to remove :type tag: :class:`str`
def _query_helper(self, by=None): if by is None: primary_keys = self.table.primary_key.columns.keys() if len(primary_keys) > 1: warnings.warn("WARNING: MORE THAN 1 PRIMARY KEY FOR TABLE %s. " "USING THE FIRST KEY %s." % ...
Internal helper for preparing queries.
def add(self, logical_id, deployment_preference_dict): if logical_id in self._resource_preferences: raise ValueError("logical_id {logical_id} previously added to this deployment_preference_collection".format( logical_id=logical_id)) self._resource_preferences[logical_id] = De...
Add this deployment preference to the collection :raise ValueError if an existing logical id already exists in the _resource_preferences :param logical_id: logical id of the resource where this deployment preference applies :param deployment_preference_dict: the input SAM template deployment pr...
def _merge_many_to_one_field_from_fkey(self, main_infos, prop, result): if prop.columns[0].foreign_keys and prop.key.endswith('_id'): rel_name = prop.key[0:-3] for val in result: if val["name"] == rel_name: val["label"] = main_infos['label'] ...
Find the relationship associated with this fkey and set the title :param dict main_infos: The already collected datas about this column :param obj prop: The property mapper of the relationship :param list result: The actual collected headers :returns: a main_infos dict or None
def get_ldict_keys(ldict, flatten_keys=False, **kwargs): result = [] for ddict in ldict: if isinstance(ddict, dict): if flatten_keys: ddict = flatten(ddict, **kwargs) result.extend(ddict.keys()) return list(set(result))
Get first level keys from a list of dicts
def qnwgamma(n, a=1.0, b=1.0, tol=3e-14): return _make_multidim_func(_qnwgamma1, n, a, b, tol)
Computes nodes and weights for gamma distribution Parameters ---------- n : int or array_like(float) A length-d iterable of the number of nodes in each dimension a : scalar or array_like(float) : optional(default=ones(d)) Shape parameter of the gamma distribution parameter. Must be pos...
def permission_denied(request, template_name=None, extra_context=None): if template_name is None: template_name = ('403.html', 'authority/403.html') context = { 'request_path': request.path, } if extra_context: context.update(extra_context) return HttpResponseForbidden(loader...
Default 403 handler. Templates: `403.html` Context: request_path The path of the requested URL (e.g., '/app/pages/bad_page/')
def reset(self): if self._call_later_handler is not None: self._call_later_handler.cancel() self._call_later_handler = None self._wait_done_cb()
Reseting duration for throttling
def object_build_function(node, member, localname): args, varargs, varkw, defaults = inspect.getargspec(member) if varargs is not None: args.append(varargs) if varkw is not None: args.append(varkw) func = build_function( getattr(member, "__name__", None) or localname, args, defau...
create astroid for a living function object
def send_frame(self, cmd, headers=None, body=''): frame = utils.Frame(cmd, headers, body) self.transport.transmit(frame)
Encode and send a stomp frame through the underlying transport. :param str cmd: the protocol command :param dict headers: a map of headers to include in the frame :param body: the content of the message
def consolidate_metadata(store, metadata_key='.zmetadata'): store = normalize_store_arg(store) def is_zarr_key(key): return (key.endswith('.zarray') or key.endswith('.zgroup') or key.endswith('.zattrs')) out = { 'zarr_consolidated_format': 1, 'metadata': { ...
Consolidate all metadata for groups and arrays within the given store into a single resource and put it under the given key. This produces a single object in the backend store, containing all the metadata read from all the zarr-related keys that can be found. After metadata have been consolidated, use ...
def run(self, cmd): import __main__ main_dict = __main__.__dict__ return self.runctx(cmd, main_dict, main_dict)
Profile a single executable statement in the main namespace.
def _getOverlay(self, readDataInstance, sectionHdrsInstance): if readDataInstance is not None and sectionHdrsInstance is not None: try: offset = sectionHdrsInstance[-1].pointerToRawData.value + sectionHdrsInstance[-1].sizeOfRawData.value readDataInstance.s...
Returns the overlay data from the PE file. @type readDataInstance: L{ReadData} @param readDataInstance: A L{ReadData} instance containing the PE file data. @type sectionHdrsInstance: L{SectionHeaders} @param sectionHdrsInstance: A L{SectionHeaders} instance containing t...
def strip_empty_values(obj): if isinstance(obj, dict): new_obj = {} for key, val in obj.items(): new_val = strip_empty_values(val) if new_val is not None: new_obj[key] = new_val return new_obj or None elif isinstance(obj, (list, tuple, set)): ...
Recursively strips empty values.
def update_from(self, res_list): for res in res_list: name = res.properties.get(self._manager._name_prop, None) uri = res.properties.get(self._manager._uri_prop, None) self.update(name, uri)
Update the Name-URI cache from the provided resource list. This is done by going through the resource list and updating any cache entries for non-empty resource names in that list. Other cache entries remain unchanged.
def OnDoubleClick(self, event): node = HotMapNavigator.findNodeAtPosition(self.hot_map, event.GetPosition()) if node: wx.PostEvent( self, SquareActivationEvent( node=node, point=event.GetPosition(), map=self ) )
Double click on a given square in the map
def batch_run(self, *commands): original_retries = self.repeat_commands self.repeat_commands = 1 for _ in range(original_retries): for command in commands: cmd = command[0] args = command[1:] cmd(*args) self.repeat_commands = or...
Run batch of commands in sequence. Input is positional arguments with (function pointer, *args) tuples. This method is useful for executing commands to multiple groups with retries, without having too long delays. For example, - Set group 1 to red and brightness to 10%...
def delete_network(self, network): n_res = MechResource(network['id'], a_const.NETWORK_RESOURCE, a_const.DELETE) self.provision_queue.put(n_res)
Enqueue network delete
def _rearrange_output_for_package(self, target_workdir, java_package): package_dir_rel = java_package.replace('.', os.path.sep) package_dir = os.path.join(target_workdir, package_dir_rel) safe_mkdir(package_dir) for root, dirs, files in safe_walk(target_workdir): if root == package_dir_rel: ...
Rearrange the output files to match a standard Java structure. Antlr emits a directory structure based on the relative path provided for the grammar file. If the source root of the file is different from the Pants build root, then the Java files end up with undesired parent directories.
def date_string_to_date(p_date): result = None if p_date: parsed_date = re.match(r'(\d{4})-(\d{2})-(\d{2})', p_date) if parsed_date: result = date( int(parsed_date.group(1)), int(parsed_date.group(2)), int(parsed_date.group(3)) ...
Given a date in YYYY-MM-DD, returns a Python date object. Throws a ValueError if the date is invalid.
def RemoveWifiConnection(self, dev_path, connection_path): dev_obj = dbusmock.get_object(dev_path) settings_obj = dbusmock.get_object(SETTINGS_OBJ) connections = dev_obj.Get(DEVICE_IFACE, 'AvailableConnections') main_connections = settings_obj.ListConnections() if connection_path not in connections ...
Remove the specified WiFi connection. You have to specify the device to remove the connection from, and the path of the Connection. Please note that this does not set any global properties.
def lookup(ctx, path): regions = parse_intervals(path, as_context=ctx.obj['semantic']) _report_from_regions(regions, ctx.obj)
Determine which tests intersect a source interval.
def _GetPathSegmentSeparator(self, path): if path.startswith('\\') or path[1:].startswith(':\\'): return '\\' if path.startswith('/'): return '/' if '/' and '\\' in path: forward_count = len(path.split('/')) backward_count = len(path.split('\\')) if forward_count > backward_cou...
Given a path give back the path separator as a best guess. Args: path (str): path. Returns: str: path segment separator.
def round_sf(number, digits): units = None try: num = number.magnitude units = number.units except AttributeError: num = number try: if (units != None): rounded_num = round(num, digits - int(floor(log10(abs(num)))) - 1) * units else: rounde...
Returns inputted value rounded to number of significant figures desired. :param number: Value to be rounded :type number: float :param digits: number of significant digits to be rounded to. :type digits: int
def _pseudodepths_wenner(configs, spacing=1, grid=None): if grid is None: xpositions = (configs - 1) * spacing else: xpositions = grid.get_electrode_positions()[configs - 1, 0] z = np.abs(np.max(xpositions, axis=1) - np.min(xpositions, axis=1)) * -0.11 x = np.mean(xpositions, axis=1) ...
Given distances between electrodes, compute Wenner pseudo depths for the provided configuration The pseudodepth is computed after Roy & Apparao, 1971, as 0.11 times the distance between the two outermost electrodes. It's not really clear why the Wenner depths are different from the Dipole-Dipole de...
def user_object( element_name, cls, child_processors, required=True, alias=None, hooks=None ): converter = _user_object_converter(cls) processor = _Aggregate(element_name, converter, child_processors, required, alias) return _processor_wrap_if_hooks(processor,...
Create a processor for user objects. :param cls: Class object with a no-argument constructor or other callable no-argument object. See also :func:`declxml.dictionary`
def _get_client(): client = salt.cloud.CloudClient( os.path.join(os.path.dirname(__opts__['conf_file']), 'cloud'), pillars=copy.deepcopy(__pillar__.get('cloud', {})) ) return client
Return a cloud client
def toc_directive(self, maxdepth=1): articles_directive_content = TC.toc.render( maxdepth=maxdepth, article_list=self.sub_article_folders, ) return articles_directive_content
Generate toctree directive text. :param table_of_content_header: :param header_bar_char: :param header_line_length: :param maxdepth: :return:
def rethreshold(self, new_threshold, new_threshold_type='MAD'): for family in self.families: rethresh_detections = [] for d in family.detections: if new_threshold_type == 'MAD' and d.threshold_type == 'MAD': new_thresh = (d.threshold / ...
Remove detections from the Party that are below a new threshold. .. Note:: threshold can only be set higher. .. Warning:: Works in place on Party. :type new_threshold: float :param new_threshold: New threshold level :type new_threshold_type: str :param new_...
def _match_setters(self, query): q = query.decode('utf-8') for name, parser, response, error_response in self._setters: try: parsed = parser(q) logger.debug('Found response in setter of %s' % name) except ValueError: continue ...
Try to find a match
def get_all_items_of_delivery_note(self, delivery_note_id): return self._iterate_through_pages( get_function=self.get_items_of_delivery_note_per_page, resource=DELIVERY_NOTE_ITEMS, **{'delivery_note_id': delivery_note_id} )
Get all items of delivery note This will iterate over all pages until it gets all elements. So if the rate limit exceeded it will throw an Exception and you will get nothing :param delivery_note_id: the delivery note id :return: list
def one_to_many(df, unitcol, manycol): subset = df[[manycol, unitcol]].drop_duplicates() for many in subset[manycol].unique(): if subset[subset[manycol] == many].shape[0] > 1: msg = "{} in {} has multiple values for {}".format(many, manycol, unitcol) raise AssertionError(msg) ...
Assert that a many-to-one relationship is preserved between two columns. For example, a retail store will have have distinct departments, each with several employees. If each employee may only work in a single department, then the relationship of the department to the employees is one to many. Para...
def render_generator(self, context, result): context.response.encoding = 'utf8' context.response.app_iter = ( (i.encode('utf8') if isinstance(i, unicode) else i) for i in result if i is not None ) return True
Attempt to serve generator responses through stream encoding. This allows for direct use of cinje template functions, which are generators, as returned views.
def debug(self): try: __import__('ipdb').post_mortem(self.traceback) except ImportError: __import__('pdb').post_mortem(self.traceback)
Launch a postmortem debug shell at the site of the error.
def get_sla_template_path(service_type=ServiceTypes.ASSET_ACCESS): if service_type == ServiceTypes.ASSET_ACCESS: name = 'access_sla_template.json' elif service_type == ServiceTypes.CLOUD_COMPUTE: name = 'compute_sla_template.json' elif service_type == ServiceTypes.FITCHAIN_COMPUTE: n...
Get the template for a ServiceType. :param service_type: ServiceTypes :return: Path of the template, str
def convert(name): s1 = re.sub('(.)([A-Z][a-z]+)', r'\1_\2', name) return re.sub('([a-z0-9])([A-Z])', r'\1_\2', s1).lower()
Convert CamelCase to underscore Parameters ---------- name : str Camelcase string Returns ------- name : str Converted name
def get_task(self, id, client=None): client = self._require_client(client) task = Task(taskqueue=self, id=id) try: response = client.connection.api_request(method='GET', path=task.path, _target_object=task) task._set_properties(response) return task ex...
Gets a named task from taskqueue If the task isn't found (backend 404), raises a :class:`gcloud.exceptions.NotFound`. :type id: string :param id: A task name to get :type client: :class:`gcloud.taskqueue.client.Client` or ``NoneType`` :param client: Optional. The client...
def _parse_hparams(hparams): prefixes = ["agent_", "optimizer_", "runner_", "replay_buffer_"] ret = [] for prefix in prefixes: ret_dict = {} for key in hparams.values(): if prefix in key: par_name = key[len(prefix):] ret_dict[par_name] = hparams.get(key) ret.append(ret_dict) re...
Split hparams, based on key prefixes. Args: hparams: hyperparameters Returns: Tuple of hparams for respectably: agent, optimizer, runner, replay_buffer.
def opt_strip(prefix, opts): ret = {} for opt_name, opt_value in opts.items(): if opt_name.startswith(prefix): opt_name = opt_name[len(prefix):] ret[opt_name] = opt_value return ret
Given a dict of opts that start with prefix, remove the prefix from each of them.
def get_settings(self, link): return reverse( 'servicesettings-detail', kwargs={'uuid': link.service.settings.uuid}, request=self.context['request'])
URL of service settings
def get_pubmed_record(pmid): handle = Entrez.esummary(db="pubmed", id=pmid) record = Entrez.read(handle) return record
Get PubMed record from PubMed ID.
def prep(config=None, path=None): if config is None: config = parse() if path is None: path = os.getcwd() root = config.get('root', 'path') root = os.path.join(path, root) root = os.path.realpath(root) os.environ['SCIDASH_HOME'] = root if sys.path[0] != root: sys.path...
Prepare to read the configuration information.
def link_property(prop, cls_object): register = False cls_name = cls_object.__name__ if cls_name and cls_name != 'RdfBaseClass': new_name = "%s_%s" % (prop._prop_name, cls_name) else: new_name = prop._prop_name new_prop = types.new_class(new_name, (prop...
Generates a property class linked to the rdfclass args: prop: unlinked property class cls_name: the name of the rdf_class with which the property is associated cls_object: the rdf_class
def __hammingDistance(s1, s2): l1 = len(s1) l2 = len(s2) if l1 != l2: raise ValueError("Hamming distance requires strings of same size.") return sum(ch1 != ch2 for ch1, ch2 in zip(s1, s2))
Finds the Hamming distance between two strings. @param s1: string @param s2: string @return: the distance @raise ValueError: if the lenght of the strings differ
def records(self): compounds = ModelList() seen_labels = set() tagged_tokens = [(CONTROL_RE.sub('', token), tag) for token, tag in self.tagged_tokens] for parser in self.parsers: for record in parser.parse(tagged_tokens): p = record.serialize() ...
Return a list of records for this sentence.
def dim(self, dim): contrast = 0 if not dim: if self._vccstate == SSD1306_EXTERNALVCC: contrast = 0x9F else: contrast = 0xCF
Adjusts contrast to dim the display if dim is True, otherwise sets the contrast to normal brightness if dim is False.
def components(self): with self._mutex: if not self._components: self._components = [c for c in self.children if c.is_component] return self._components
The list of components in this manager, if any. This information can also be found by listing the children of this node that are of type @ref Component. That method is more useful as it returns the tree entries for the components.
def extend_request_args(self, args, item_cls, item_type, key, parameters, orig=False): try: item = self.get_item(item_cls, item_type, key) except KeyError: pass else: for parameter in parameters: if orig: ...
Add a set of parameters and their value to a set of request arguments. :param args: A dictionary :param item_cls: The :py:class:`oidcmsg.message.Message` subclass that describes the item :param item_type: The type of item, this is one of the parameter names in the :py:cl...
def ip2long(ip): if not validate_ip(ip): return None quads = ip.split('.') if len(quads) == 1: quads = quads + [0, 0, 0] elif len(quads) < 4: host = quads[-1:] quads = quads[:-1] + [0, ] * (4 - len(quads)) + host lngip = 0 for q in quads: lngip = (lngip <<...
Convert a dotted-quad ip address to a network byte order 32-bit integer. >>> ip2long('127.0.0.1') 2130706433 >>> ip2long('127.1') 2130706433 >>> ip2long('127') 2130706432 >>> ip2long('127.0.0.256') is None True :param ip: Dotted-quad ip address (eg. '127.0.0.1'). :type ip...
def build_index(self, idx_name, _type='default'): "Build the index related to the `name`." indexes = {} has_non_string_values = False for key, item in self.data.items(): if idx_name in item: value = item[idx_name] if not isinstance(value, six.s...
Build the index related to the `name`.
def __continue_session(self): now = time.time() diff = abs(now - self.last_request_time) timeout_sec = self.session_timeout * 60 if diff >= timeout_sec: self.__log('Session timed out, attempting to authenticate') self.authenticate()
Check if the time since the last HTTP request is under the session timeout limit. If it's been too long since the last request attempt to authenticate again.
def update(self, sequence): item_index = None try: for item in sequence: item_index = self.add(item) except TypeError: raise ValueError( "Argument needs to be an iterable, got %s" % type(sequence) ) return item_index
Update the set with the given iterable sequence, then return the index of the last element inserted. Example: >>> oset = OrderedSet([1, 2, 3]) >>> oset.update([3, 1, 5, 1, 4]) 4 >>> print(oset) OrderedSet([1, 2, 3, 5, 4])
def verify_calling_thread(self, should_be_emulation, message=None): if should_be_emulation == self._on_emulation_thread(): return if message is None: message = "Operation performed on invalid thread" raise InternalError(message)
Verify if the calling thread is or is not the emulation thread. This method can be called to make sure that an action is being taken in the appropriate context such as not blocking the event loop thread or modifying an emulate state outside of the event loop thread. If the verification...
def _add_months(self, date, months): year = date.year + (date.month + months - 1) // 12 month = (date.month + months - 1) % 12 + 1 return datetime.date(year=year, month=month, day=1)
Add ``months`` months to ``date``. Unfortunately we can't use timedeltas to add months because timedelta counts in days and there's no foolproof way to add N months in days without counting the number of days per month.
def _build_raw_headers(self, headers: Dict) -> Tuple: raw_headers = [] for k, v in headers.items(): raw_headers.append((k.encode('utf8'), v.encode('utf8'))) return tuple(raw_headers)
Convert a dict of headers to a tuple of tuples Mimics the format of ClientResponse.
def register(self, obj): for method in dir(obj): if not method.startswith('_'): fct = getattr(obj, method) try: getattr(fct, '__call__') except AttributeError: pass else: loggi...
register all methods for of an object as json rpc methods obj - object with methods
def service_reload(service_name, restart_on_failure=False, **kwargs): service_result = service('reload', service_name, **kwargs) if not service_result and restart_on_failure: service_result = service('restart', service_name, **kwargs) return service_result
Reload a system service, optionally falling back to restart if reload fails. The specified service name is managed via the system level init system. Some init systems (e.g. upstart) require that additional arguments be provided in order to directly control service instances whereas other init syste...
def get_min_sec_from_morning(self): mins = [] for timerange in self.timeranges: mins.append(timerange.get_sec_from_morning()) return min(mins)
Get the first second from midnight where a timerange is effective :return: smallest amount of second from midnight of all timerange :rtype: int
def pwm_max_score(self): if self.max_score is None: score = 0 for row in self.pwm: score += log(max(row) / 0.25 + 0.01) self.max_score = score return self.max_score
Return the maximum PWM score. Returns ------- score : float Maximum PWM score.
def get_area_def(self, dsid): msg = self._get_message(self._msg_datasets[dsid]) try: return self._area_def_from_msg(msg) except (RuntimeError, KeyError): raise RuntimeError("Unknown GRIB projection information")
Get area definition for message. If latlong grid then convert to valid eqc grid.
def Overlay(child, parent): for arg in child, parent: if not isinstance(arg, collections.Mapping): raise DefinitionError("Trying to merge badly defined hints. Child: %s, " "Parent: %s" % (type(child), type(parent))) for attr in ["fix", "format", "problem", "summary"]: if no...
Adds hint attributes to a child hint if they are not defined.
def sort_by(self, fieldName, reverse=False): return self.__class__( sorted(self, key = lambda item : self._get_item_value(item, fieldName), reverse=reverse) )
sort_by - Return a copy of this collection, sorted by the given fieldName. The fieldName is accessed the same way as other filtering, so it supports custom properties, etc. @param fieldName <str> - The name of the field on which to sort by @param reverse <bool> Default False...
def _build_calmar_data(self): assert self.initial_weight_name is not None data = pd.DataFrame() data[self.initial_weight_name] = self.initial_weight * self.filter_by for variable in self.margins_by_variable: if variable == 'total_population': continue ...
Builds the data dictionnary used as calmar input argument
def write(self, data): data_off = 0 while data_off < len(data): left = len(self._buf) - self._pos if left <= 0: self._write_packet(final=False) else: to_write = min(left, len(data) - data_off) self._buf[self._pos:self._p...
Writes given bytes buffer into the stream Function returns only when entire buffer is written
def validate_protocol(protocol): if not re.match(PROTOCOL_REGEX, protocol): raise ValueError(f'invalid protocol: {protocol}') return protocol.lower()
Validate a protocol, a string, and return it.
def export_node(bpmn_graph, export_elements, node, nodes_classification, order=0, prefix="", condition="", who="", add_join=False): node_type = node[1][consts.Consts.type] if node_type == consts.Consts.start_event: return BpmnDiagramGraphCsvExport.export_start_event(bpmn_...
General method for node exporting :param bpmn_graph: an instance of BpmnDiagramGraph class, :param export_elements: a dictionary object. The key is a node ID, value is a dictionary of parameters that will be used in exported CSV document, :param node: networkx.Node object, ...
def _trim_buffer_garbage(rawmessage, debug=True): while rawmessage and rawmessage[0] != MESSAGE_START_CODE_0X02: if debug: _LOGGER.debug('Buffer content: %s', binascii.hexlify(rawmessage)) _LOGGER.debug('Trimming leading buffer garbage') rawmessage = rawmessage[1:] return...
Remove leading bytes from a byte stream. A proper message byte stream begins with 0x02.
def state(self, state): logger.debug('client changing to state=%s', ClientState.Names[state]) self._state = state
Change the state of the client. This is one of the values defined in ClientStates.
def _record_first_run(): info = {'pid': _get_shell_pid(), 'time': time.time()} mode = 'wb' if six.PY2 else 'w' with _get_not_configured_usage_tracker_path().open(mode) as tracker: json.dump(info, tracker)
Records shell pid to tracker file.
def strain_in_plane(self, **kwargs): if self._strain_out_of_plane is not None: return ((self._strain_out_of_plane / -2.) * (self.unstrained.c11(**kwargs) / self.unstrained.c12(**kwargs) ) ) else: return 1 - self.unstrained.a(**kwargs) / ...
Returns the in-plane strain assuming no lattice relaxation, which is positive for tensile strain and negative for compressive strain.
def packet_in_handler(self, evt): msg = evt.msg dpid = msg.datapath.id req_pkt = packet.Packet(msg.data) req_igmp = req_pkt.get_protocol(igmp.igmp) if req_igmp: if self._querier.dpid == dpid: self._querier.packet_in_handler(req_igmp, msg) e...
PacketIn event handler. when the received packet was IGMP, proceed it. otherwise, send a event.
def xack(self, stream, group_name, id, *ids): return self.execute(b'XACK', stream, group_name, id, *ids)
Acknowledge a message for a given consumer group
def asset_asset_swap( self, asset1_id, asset1_transfer_spec, asset2_id, asset2_transfer_spec, fees): btc_transfer_spec = TransferParameters( asset1_transfer_spec.unspent_outputs, asset1_transfer_spec.to_script, asset1_transfer_spec.change_script, 0) return self.transfer( ...
Creates a transaction for swapping an asset for another asset. :param bytes asset1_id: The ID of the first asset. :param TransferParameters asset1_transfer_spec: The parameters of the first asset being transferred. It is also used for paying fees and/or receiving change if any. :par...
def resolve(self, space_id=None, environment_id=None): proxy_method = getattr( self._client, base_path_for(self.link_type) ) if self.link_type == 'Space': return proxy_method().find(self.id) elif environment_id is not None: return proxy_met...
Resolves link to a specific resource.
def start(st_reg_number): weights = [9, 8, 7, 6, 5, 4, 3, 2] digit_state_registration = st_reg_number[-1] if len(st_reg_number) != 9: return False sum_total = 0 for i in range(0, 8): sum_total = sum_total + weights[i] * int(st_reg_number[i]) if sum_total % 11 == 0: return...
Checks the number valiaty for the Paraiba state
def mul_table(self, other): other = coerceBigInt(other) if not other: return NotImplemented other %= orderG2() if not self._table: self._table = lwnafTable() librelic.ep2_mul_pre_lwnaf(byref(self._table), byref(self)) result = G2Element() ...
Fast multiplication using a the LWNAF precomputation table.
def __get_node_by_name(self, name): try: for entry in filter(lambda x: x.name == name, self.nodes()): return entry except StopIteration: raise ValueError("Attempted to retrieve a non-existing tree node with name: {name}" "".format(name...
Returns a first TreeNode object, which name matches the specified argument :raises: ValueError (if no node with specified name is present in the tree)
def quote_edge(identifier): node, _, rest = identifier.partition(':') parts = [quote(node)] if rest: port, _, compass = rest.partition(':') parts.append(quote(port)) if compass: parts.append(compass) return ':'.join(parts)
Return DOT edge statement node_id from string, quote if needed. >>> quote_edge('spam') 'spam' >>> quote_edge('spam spam:eggs eggs') '"spam spam":"eggs eggs"' >>> quote_edge('spam:eggs:s') 'spam:eggs:s'
def text_search(self, search, *, limit=0, table='assets'): return backend.query.text_search(self.connection, search, limit=limit, table=table)
Return an iterator of assets that match the text search Args: search (str): Text search string to query the text index limit (int, optional): Limit the number of returned documents. Returns: iter: An iterator of assets that match the text search.
def _bind_length_handlers(tids, user_handler, lns): for tid in tids: for ln in lns: type_octet = _gen_type_octet(tid, ln) ion_type = _TID_VALUE_TYPE_TABLE[tid] if ln == 1 and ion_type is IonType.STRUCT: handler = partial(_ordered_struct_start_handler, part...
Binds a set of handlers with the given factory. Args: tids (Sequence[int]): The Type IDs to bind to. user_handler (Callable): A function that takes as its parameters :class:`IonType`, ``length``, and the ``ctx`` context returning a co-routine. lns (Sequence[int]): Th...
def main(): parser = argparse.ArgumentParser( description='Relocate a virtual environment.' ) parser.add_argument( '--source', help='The existing virtual environment.', required=True, ) parser.add_argument( '--destination', help='The location for which...
Relocate a virtual environment.
def resized(self, dl, targ, new_path, resume = True, fn=None): return dl.dataset.resize_imgs(targ, new_path, resume=resume, fn=fn) if dl else None
Return a copy of this dataset resized
def run_config_diagnostics(config_path=CONFIG_PATH): config = read_config(config_path) missing_sections = set() malformed_entries = defaultdict(set) for section, expected_section_keys in SECTION_KEYS.items(): section_content = config.get(section) if not section_content: missi...
Run diagnostics on the configuration file. Args: config_path (str): Path to the configuration file. Returns: str, Set[str], dict(str, Set[str]): The path to the configuration file, a set of missing sections and a dict that maps each section to the entries that have either missing or emp...
def _HasExpectedLineLength(self, file_object): original_file_position = file_object.tell() line_reader = self._CreateLineReader(file_object) for _ in range(0, 20): sample_line = line_reader.readline(self._maximum_line_length + 1) if len(sample_line) > self._maximum_line_length: file_obje...
Determines if a file begins with lines of the expected length. As we know the maximum length of valid lines in the DSV file, the presence of lines longer than this indicates that the file will not be parsed successfully, without reading excessive data from a large file. Args: file_object (dfvfs....
def load(stream=None): if stream: loads(stream.read()) else: data = pkgutil.get_data(insights.__name__, _filename) return loads(data) if data else None
Loads filters from a stream, normally an open file. If one is not passed, filters are loaded from a default location within the project.
def open_in_browser(file_location): if not os.path.isfile(file_location): file_location = os.path.join(os.getcwd(), file_location) if not os.path.isfile(file_location): raise IOError("\n\nFile not found.") if sys.platform == "darwin": file_location = "file:///"+file_location new ...
Attempt to open file located at file_location in the default web browser.
def make_auth_headers(self, content_type): headers = self.make_headers(content_type) headers['Authorization'] = 'Basic {}'.format(self.get_auth_string()) return headers
Add authorization header.
def _syllabifyPhones(phoneList, syllableList): numPhoneList = [len(syllable) for syllable in syllableList] start = 0 syllabifiedList = [] for end in numPhoneList: syllable = phoneList[start:start + end] syllabifiedList.append(syllable) start += end return syllabifiedList
Given a phone list and a syllable list, syllabify the phones Typically used by findBestSyllabification which first aligns the phoneList with a dictionary phoneList and then uses the dictionary syllabification to syllabify the input phoneList.
def get_sortobj(self, goea_results, **kws): nts_goea = MgrNtGOEAs(goea_results).get_goea_nts_prt(**kws) goids = set(nt.GO for nt in nts_goea) go2nt = {nt.GO:nt for nt in nts_goea} grprobj = Grouper("GOEA", goids, self.hdrobj, self.grprdflt.gosubdag, go2nt=go2nt) grprobj.prt_summa...
Return a Grouper object, given a list of GOEnrichmentRecord.
def config_(name: str, local: bool, package: str, section: str, key: Optional[str]): cfg = config.read_configs(package, name, local=local) if key: with suppress(NoOptionError, NoSectionError): echo(cfg.get(section, key)) else: with suppress(NoSectionError): ...
Extract or list values from config.
def migrate_config_file( self, config_file_path, always_update=False, current_file_type=None, output_file_name=None, output_file_type=None, create=True, update_defaults=True, dump_kwargs=None, include_bootstrap=True, ): current_...
Migrates a configuration file. This is used to help you update your configurations throughout the lifetime of your application. It is probably best explained through example. Examples: Assume we have a JSON config file ('/path/to/config.json') like the following...
def _gerrit_user_to_author(props, username="unknown"): username = props.get("username", username) username = props.get("name", username) if "email" in props: username += " <%(email)s>" % props return username
Convert Gerrit account properties to Buildbot format Take into account missing values
def update(self, title, key): json = None if title and key: data = {'title': title, 'key': key} json = self._json(self._patch(self._api, data=dumps(data)), 200) if json: self._update_(json) return True return False
Update this key. :param str title: (required), title of the key :param str key: (required), text of the key file :returns: bool
def remove_path(path): if path is None or not os.path.exists(path): return if platform.system() == 'Windows': os.chmod(path, stat.S_IWRITE) try: if os.path.isdir(path): shutil.rmtree(path) elif os.path.isfile(path): shutil.os.remove(path) except OS...
remove path from file system If path is None - do nothing
def get_all_available_leaves(self, language=None, forbidden_item_ids=None): return self.get_all_leaves(language=language, forbidden_item_ids=forbidden_item_ids)
Get all available leaves.
def _sanitize_usecols(usecols): if usecols is None: return None try: pats = usecols.split(',') pats = [p.strip() for p in pats if p] except AttributeError: usecols = [int(c) for c in usecols] usecols.sort() return tuple(usecols) cols = [] for pat in pa...
Make a tuple of sorted integers and return it. Return None if usecols is None
def and_evaluator(conditions, leaf_evaluator): saw_null_result = False for condition in conditions: result = evaluate(condition, leaf_evaluator) if result is False: return False if result is None: saw_null_result = True return None if saw_null_result else True
Evaluates a list of conditions as if the evaluator had been applied to each entry and the results AND-ed together. Args: conditions: List of conditions ex: [operand_1, operand_2]. leaf_evaluator: Function which will be called to evaluate leaf condition values. Returns: Boolean: - True if all o...
def update_machine_state(state_path): charmhelpers.contrib.templating.contexts.juju_state_to_yaml( salt_grains_path) subprocess.check_call([ 'salt-call', '--local', 'state.template', state_path, ])
Update the machine state using the provided state declaration.
def create_new_account(data_dir, password, **geth_kwargs): if os.path.exists(password): geth_kwargs['password'] = password command, proc = spawn_geth(dict( data_dir=data_dir, suffix_args=['account', 'new'], **geth_kwargs )) if os.path.exists(password): stdoutdata,...
Creates a new Ethereum account on geth. This is useful for testing when you want to stress interaction (transfers) between Ethereum accounts. This command communicates with ``geth`` command over terminal interaction. It creates keystore folder and new account there. This function only works a...
def _choose_rest_version(self): versions = self._list_available_rest_versions() versions = [LooseVersion(x) for x in versions if x in self.supported_rest_versions] if versions: return max(versions) else: raise PureError( "Library is incompatible wi...
Return the newest REST API version supported by target array.