code
stringlengths
51
2.38k
docstring
stringlengths
4
15.2k
def encrypt_account(self, id): for key in self.secured_field_names: value = self.parser.get(id, key) self.parser.set_secure(id, key, value) return self
Make sure that certain fields are encrypted.
def get_clamav_conf(filename): if os.path.isfile(filename): return ClamavConfig(filename) log.warn(LOG_PLUGIN, "No ClamAV config file found at %r.", filename)
Initialize clamav configuration.
def _parse(root): if root.tag == "nil-classes": return [] elif root.get("type") == "array": return [_parse(child) for child in root] d = {} for child in root: type = child.get("type") or "string" if child.get("nil"): value = None elif type == "boolean": value = True if child.text.lower() == "true" else False elif type == "dateTime": value = iso8601.parse_date(child.text) elif type == "decimal": value = decimal.Decimal(child.text) elif type == "integer": value = int(child.text) else: value = child.text d[child.tag] = value return d
Recursively convert an Element into python data types
def _check_vbox_port_forwarding(self): result = yield from self._execute("showvminfo", [self._vmname, "--machinereadable"]) for info in result.splitlines(): if '=' in info: name, value = info.split('=', 1) if name.startswith("Forwarding") and value.strip('"').startswith("GNS3VM"): return True return False
Checks if the NAT port forwarding rule exists. :returns: boolean
def delete_feed(self, pid): logger.info("delete_feed(pid=\"%s\") [lid=%s]", pid, self.__lid) return self.__delete_point(R_FEED, pid)
Delete a feed, identified by its local id. Raises [IOTException](./Exceptions.m.html#IoticAgent.IOT.Exceptions.IOTException) containing the error if the infrastructure detects a problem Raises [LinkException](../Core/AmqpLink.m.html#IoticAgent.Core.AmqpLink.LinkException) if there is a communications problem between you and the infrastructure `pid` (required) (string) local identifier of your feed you want to delete
def active(): result = __salt__['cmd.run']('tuned-adm active') pattern = re.compile(r) match = re.match(pattern, result) return '{0}'.format(match.group('profile'))
Return current active profile CLI Example: .. code-block:: bash salt '*' tuned.active
def _set_tz(values, tz, preserve_UTC=False, coerce=False): if tz is not None: name = getattr(values, 'name', None) values = values.ravel() tz = timezones.get_timezone(_ensure_decoded(tz)) values = DatetimeIndex(values, name=name) if values.tz is None: values = values.tz_localize('UTC').tz_convert(tz) if preserve_UTC: if tz == 'UTC': values = list(values) elif coerce: values = np.asarray(values, dtype='M8[ns]') return values
coerce the values to a DatetimeIndex if tz is set preserve the input shape if possible Parameters ---------- values : ndarray tz : string/pickled tz object preserve_UTC : boolean, preserve the UTC of the result coerce : if we do not have a passed timezone, coerce to M8[ns] ndarray
def search_metadata(self, query): query_url = ('/catalog/search?' + urlencode((('metadata', query),))) resp = self.api_request(query_url) return ItemGroup(resp['items'], self)
Submit a search query to the server and retrieve the results :type query: String :param query: the search query :rtype: ItemGroup :returns: the search results :raises: APIError if the API request is not successful
def pddet(A): L = jitchol(A) logdetA = 2*sum(np.log(np.diag(L))) return logdetA
Determinant of a positive definite matrix, only symmetric matricies though
def _get_rank(self, team): rank = None rank_field = team('span[class="pollrank"]') if len(rank_field) > 0: rank = re.findall(r'\(\d+\)', str(rank_field))[0] rank = int(rank.replace('(', '').replace(')', '')) return rank
Find the team's rank when applicable. If a team is ranked, it will showup in a separate <span> tag with the actual rank embedded between parentheses. When a team is ranked, the integer value representing their ranking should be returned. For teams that are not ranked, None should be returned. Parameters ---------- team : PyQuery object A PyQuery object of a team's HTML tag in the boxscore. Returns ------- int Returns an integer representing the team's ranking when applicable, or None if the team is not ranked.
def Genra(request): school = request.GET['school'] c = Course(school=school) return JsonResponse(c.getGenra(), safe=False)
Generate dict of Dept and its grade.
def fromstr(cls, s, *, strict=True): nodedomain, sep, resource = s.partition("/") if not sep: resource = None localpart, sep, domain = nodedomain.partition("@") if not sep: domain = localpart localpart = None return cls(localpart, domain, resource, strict=strict)
Construct a JID out of a string containing it. :param s: The string to parse. :type s: :class:`str` :param strict: Whether to enable strict parsing. :type strict: :class:`bool` :raises: See :class:`JID` :return: The parsed JID :rtype: :class:`JID` See the :class:`JID` class level documentation for the semantics of `strict`.
def _get_autoreload_programs(self,cfg_file): cfg = RawConfigParser() cfg.readfp(cfg_file) reload_progs = [] for section in cfg.sections(): if section.startswith("program:"): try: if cfg.getboolean(section,"autoreload"): reload_progs.append(section.split(":",1)[1]) except NoOptionError: pass return reload_progs
Get the set of programs to auto-reload when code changes. Such programs will have autoreload=true in their config section. This can be affected by config file sections or command-line arguments, so we need to read it out of the merged config.
def _reldiff(a, b): a = float(a) b = float(b) aa = abs(a) ba = abs(b) if a == 0.0 and b == 0.0: return 0.0 elif a == 0 or b == 0.0: return float('inf') return abs(a - b) / min(aa, ba)
Computes the relative difference of two floating-point numbers rel = abs(a-b)/min(abs(a), abs(b)) If a == 0 and b == 0, then 0.0 is returned Otherwise if a or b is 0.0, inf is returned.
def initialize(self): if 'EnErrorStyle' not in self._vim.vars: self._vim.vars['EnErrorStyle'] = 'EnError' self._vim.command('highlight EnErrorStyle ctermbg=red gui=underline') self._vim.command('set omnifunc=EnCompleteFunc') self._vim.command( 'autocmd FileType package_info nnoremap <buffer> <Space> :call EnPackageDecl()<CR>') self._vim.command('autocmd FileType package_info setlocal splitright')
Sets up initial ensime-vim editor settings.
def get_definition(self): match = self._bracket_exact_var(self.context.exact_match) if match is None: match = self._bracket_exact_exec(self.context.exact_match) return match
Checks variable and executable code elements based on the current context for a code element whose name matches context.exact_match perfectly.
def schedule_in(secs, target=None, args=(), kwargs=None): return schedule_at(time.time() + secs, target, args, kwargs)
insert a greenlet into the scheduler to run after a set time If provided a function, it is wrapped in a new greenlet :param secs: the number of seconds to wait before running the target :type unixtime: int or float :param target: what to schedule :type target: function or greenlet :param args: arguments for the function (only used if ``target`` is a function) :type args: tuple :param kwargs: keyword arguments for the function (only used if ``target`` is a function) :type kwargs: dict or None :returns: the ``target`` argument This function can also be used as a decorator: >>> @schedule_in(30) >>> def f(): ... print 'hello from f' and args/kwargs can also be preloaded: >>> @schedule_in(30, args=('world',)) >>> def f(name): ... print 'hello %s' % name
def get_stats(self): stats = yield from self._hypervisor.send("nio get_stats {}".format(self._name)) return stats[0]
Gets statistics for this NIO. :returns: NIO statistics (string with packets in, packets out, bytes in, bytes out)
def _get_data(data_source, context): try: data = data_source.source(context) if data is None: raise ValueError("'None' returned from " "data source '{n}'".format(n=context.name)) elif not isinstance(data, np.ndarray): raise TypeError("Data source '{n}' did not " "return a numpy array, returned a '{t}'".format( t=type(data))) elif data.shape != context.shape or data.dtype != context.dtype: raise ValueError("Expected data of shape '{esh}' and " "dtype '{edt}' for data source '{n}', but " "shape '{rsh}' and '{rdt}' was found instead".format( n=context.name, esh=context.shape, edt=context.dtype, rsh=data.shape, rdt=data.dtype)) return data except Exception as e: ex = ValueError("An exception occurred while " "obtaining data from data source '{ds}'\n\n" "{e}\n\n" "{help}".format(ds=context.name, e=str(e), help=context.help())) raise ex, None, sys.exc_info()[2]
Get data from the data source, checking the return values
def system_error(self, msg_id=None, message=None, description=None, validation_timeout=False, exc_info=None, **kw): if exc_info: if (isinstance(exc_info[1], validator.ValidationTimeout) and msg_id != 'validation_timeout'): raise exc_info[1] log.error('Unexpected error during validation: %s: %s' % (exc_info[0].__name__, exc_info[1]), exc_info=exc_info) full_id = ('validator', 'unexpected_exception') if msg_id: full_id += (msg_id,) self.error(full_id, message or 'An unexpected error has occurred.', description or ('Validation was unable to complete successfully due ' 'to an unexpected error.', 'The error has been logged, but please consider ' 'filing an issue report here: ' 'https://bit.ly/1POrYYU'), tier=1, **kw) self.errors.insert(0, self.errors.pop())
Add an error message for an unexpected exception in validator code, and move it to the front of the error message list. If `exc_info` is supplied, the error will be logged. If the error is a validation timeout, it is re-raised unless `msg_id` is "validation_timeout".
def getGroupsURL(certfile, group): GMS = "https://" + _SERVER + _GMS certfile.seek(0) buf = certfile.read() x509 = crypto.load_certificate(crypto.FILETYPE_PEM, buf) sep = "" dn = "" parts = [] for i in x509.get_issuer().get_components(): if i[0] in parts: continue parts.append(i[0]) dn = i[0] + "=" + i[1] + sep + dn sep = "," return GMS + "/" + group + "/" + urllib.quote(dn)
given a certfile load a list of groups that user is a member of
def async_refresh_state(self): _LOGGER.debug('Setting up extended status') ext_status = ExtendedSend( address=self._address, commandtuple=COMMAND_EXTENDED_GET_SET_0X2E_0X00, cmd2=0x02, userdata=Userdata()) ext_status.set_crc() _LOGGER.debug('Sending ext status: %s', ext_status) self._send_msg(ext_status) _LOGGER.debug('Sending temp status request') self.temperature.async_refresh_state()
Request each state to provide status update.
def encrypt(self, plaintext, nonce, encoder=encoding.RawEncoder): if len(nonce) != self.NONCE_SIZE: raise ValueError("The nonce must be exactly %s bytes long" % self.NONCE_SIZE) ciphertext = libnacl.crypto_box_afternm( plaintext, nonce, self._shared_key, ) encoded_nonce = encoder.encode(nonce) encoded_ciphertext = encoder.encode(ciphertext) return EncryptedMessage._from_parts( encoded_nonce, encoded_ciphertext, encoder.encode(nonce + ciphertext), )
Encrypts the plaintext message using the given `nonce` and returns the ciphertext encoded with the encoder. .. warning:: It is **VITALLY** important that the nonce is a nonce, i.e. it is a number used only once for any given key. If you fail to do this, you compromise the privacy of the messages encrypted. :param plaintext: [:class:`bytes`] The plaintext message to encrypt :param nonce: [:class:`bytes`] The nonce to use in the encryption :param encoder: The encoder to use to encode the ciphertext :rtype: [:class:`nacl.utils.EncryptedMessage`]
def checkpoint(self, interval): self.is_checkpointed = True self._jdstream.checkpoint(self._ssc._jduration(interval)) return self
Enable periodic checkpointing of RDDs of this DStream @param interval: time in seconds, after each period of that, generated RDD will be checkpointed
def validate(self): if self.dosHeader.e_magic.value != consts.MZ_SIGNATURE: raise excep.PEException("Invalid MZ signature. Found %d instead of %d." % (self.dosHeader.magic.value, consts.MZ_SIGNATURE)) if self.dosHeader.e_lfanew.value > len(self): raise excep.PEException("Invalid e_lfanew value. Probably not a PE file.") if self.ntHeaders.signature.value != consts.PE_SIGNATURE: raise excep.PEException("Invalid PE signature. Found %d instead of %d." % (self.ntHeaders.optionaHeader.signature.value, consts.PE_SIGNATURE)) if self.ntHeaders.optionalHeader.numberOfRvaAndSizes.value > 0x10: print excep.PEWarning("Suspicious value for NumberOfRvaAndSizes: %d." % self.ntHeaders.optionaHeader.numberOfRvaAndSizes.value)
Performs validations over some fields of the PE structure to determine if the loaded file has a valid PE format. @raise PEException: If an invalid value is found into the PE instance.
def generate_headers(self, token): headers = {} token = self.encode_token(token) if self.config["header"]: headers[self.config["header"]] = token if self.config["cookie"]: headers["Set-Cookie"] = dump_cookie( self.config["cookie"], token, httponly=True, max_age=self.config["expiration"] ) return headers
Generate auth headers
def get_instances(self): with self.__instances_lock: return sorted( (name, stored_instance.factory_name, stored_instance.state) for name, stored_instance in self.__instances.items() )
Retrieves the list of the currently registered component instances :return: A list of (name, factory name, state) tuples.
def _parse_motion_sensor(self, sensor_xml): return MotionSensor(self._lutron, name=sensor_xml.get('Name'), integration_id=int(sensor_xml.get('IntegrationID')))
Parses a motion sensor object. TODO: We don't actually do anything with these yet. There's a lot of info that needs to be managed to do this right. We'd have to manage the occupancy groups, what's assigned to them, and when they go (un)occupied. We'll handle this later.
def get_db_references(cls, entry): db_refs = [] for db_ref in entry.iterfind("./dbReference"): db_ref_dict = {'identifier': db_ref.attrib['id'], 'type_': db_ref.attrib['type']} db_refs.append(models.DbReference(**db_ref_dict)) return db_refs
get list of `models.DbReference` from XML node entry :param entry: XML node entry :return: list of :class:`pyuniprot.manager.models.DbReference`
def valid(self): now = timezone.now() return self.filter(revoked=False, expires__gt=now, valid_from__lt=now)
Return valid certificates.
def getGraphFieldList(self, graph_name): graph = self._getGraph(graph_name, True) return graph.getFieldList()
Returns list of names of fields for graph with name graph_name. @param graph_name: Graph Name @return: List of field names for graph.
def request_type(self): if self.static and not self.uses_request: return getattr(xenon_pb2, 'Empty') if not self.uses_request: return None return getattr(xenon_pb2, self.request_name)
Retrieve the type of the request, by fetching it from `xenon.proto.xenon_pb2`.
def decrypt_file(filename, set_env=True, override_env=False): data = json.load(open(filename)) results = {} for key, v in data.iteritems(): v_decrypt = decrypt_secret(v) results[key] = v_decrypt if set_env: if key in os.environ and not override_env: break os.environ[str(key)] = v_decrypt return results
Decrypts a JSON file containing encrypted secrets. This file should contain an object mapping the key names to encrypted secrets. This encrypted file can be created using `credkeep.encrypt_file` or the commandline utility. :param filename: filename of the JSON file :param set_env: If True, an environment variable representing the key is created. :param override_env: If True, an existing environment variable with the same key name will be overridden with the new decrypted value. If False, the environment variable will not be set. :return: Dict containing the decrypted keys
def _process_response(self, response): assert self._state == self._STATE_RUNNING, "Should be running if processing response" cols = None data = [] for r in response: if not cols: cols = [(f, r._fields[f].db_type) for f in r._fields] data.append([getattr(r, f) for f in r._fields]) self._data = data self._columns = cols self._state = self._STATE_FINISHED
Update the internal state with the data from the response
def _GetSocket(self): try: return socket.create_connection( (self._host, self._port), self._SOCKET_TIMEOUT) except socket.error as exception: logger.error( 'Unable to connect to nsrlsvr with error: {0!s}.'.format(exception))
Establishes a connection to an nsrlsvr instance. Returns: socket._socketobject: socket connected to an nsrlsvr instance or None if a connection cannot be established.
def analysis2working(self,a): "Convert back from the analysis color space to the working space." a = self.swap_polar_HSVorder[self.analysis_space](a) return self.colorspace.convert(self.analysis_space, self.working_space, a)
Convert back from the analysis color space to the working space.
def main(): try: device = AlarmDecoder(SerialDevice(interface=SERIAL_DEVICE)) device.on_message += handle_message with device.open(baudrate=BAUDRATE): while True: time.sleep(1) except Exception as ex: print('Exception:', ex)
Example application that opens a serial device and prints messages to the terminal.
def get_votes(self): candidate_elections = CandidateElection.objects.filter(election=self) votes = None for ce in candidate_elections: votes = votes | ce.votes.all() return votes
Get all votes for this election.
def set_privilege(self, name, value=None): cmd = 'username %s' % name if value is not None: if not isprivilege(value): raise TypeError('priviledge value must be between 0 and 15') cmd += ' privilege %s' % value else: cmd += ' privilege 1' return self.configure(cmd)
Configures the user privilege value in EOS Args: name (str): The name of the user to craete value (int): The privilege value to assign to the user. Valid values are in the range of 0 to 15 Returns: True if the operation was successful otherwise False Raises: TypeError: if the value is not in the valid range
def get_full_basenames(bases, basenames): for base, basename in zip(bases, basenames): yield get_full_basename(base, basename)
Resolve the base nodes and partial names of a class to full names. :param bases: The astroid node representing something that a class inherits from. :type bases: iterable(astroid.NodeNG) :param basenames: The partial name of something that a class inherits from. :type basenames: iterable(str) :returns: The full names. :rtype: iterable(str)
def auto_kwargs(function): supported = introspect.arguments(function) @wraps(function) def call_function(*args, **kwargs): return function(*args, **{key: value for key, value in kwargs.items() if key in supported}) return call_function
Modifies the provided function to support kwargs by only passing along kwargs for parameters it accepts
def is_admin(controller, client, actor): config = controller.config if not config.has_section("admins"): logging.debug("Ignoring is_admin check - no [admins] config found.") return False for key,val in config.items("admins"): if actor == User(key): logging.debug("is_admin: %r matches admin %r", actor, key) return True if actor.nick.lower() == key.lower() and actor.host.lower() == val.lower(): logging.debug("is_admin: %r matches admin %r=%r", actor, key, val) return True logging.debug("is_admin: %r is not an admin.", actor) return False
Used to determine whether someone issuing a command is an admin. By default, checks to see if there's a line of the type nick=host that matches the command's actor in the [admins] section of the config file, or a key that matches the entire mask (e.g. "foo@bar" or "foo@bar=1").
def _get_filter(sdk_filter, attr_map): if not isinstance(sdk_filter, dict): raise CloudValueError('filter value must be a dictionary, was %r' % (sdk_filter,)) custom = sdk_filter.pop('custom_attributes', {}) new_filter = _normalise_key_values(filter_obj=sdk_filter, attr_map=attr_map) new_filter.update({ 'custom_attributes__%s' % k: v for k, v in _normalise_key_values(filter_obj=custom).items() }) return new_filter
Common functionality for filter structures :param sdk_filter: {field:constraint, field:{operator:constraint}, ...} :return: {field__operator: constraint, ...}
def requestCheckDockerIo(origAppliance, imageName, tag): if '/' not in imageName: imageName = 'library/' + imageName token_url = 'https://auth.docker.io/token?service=registry.docker.io&scope=repository:{repo}:pull'.format(repo=imageName) requests_url = 'https://registry-1.docker.io/v2/{repo}/manifests/{tag}'.format(repo=imageName, tag=tag) token = requests.get(token_url) jsonToken = token.json() bearer = jsonToken["token"] response = requests.head(requests_url, headers={'Authorization': 'Bearer {}'.format(bearer)}) if not response.ok: raise ApplianceImageNotFound(origAppliance, requests_url, response.status_code) else: return origAppliance
Checks docker.io to see if an image exists using the requests library. URL is based on the docker v2 schema. Requires that an access token be fetched first. :param str origAppliance: The full url of the docker image originally specified by the user (or the default). e.g. "ubuntu:latest" :param str imageName: The image, including path and excluding the tag. e.g. "ubuntu" :param str tag: The tag used at that docker image's registry. e.g. "latest" :return: Return True if match found. Raise otherwise.
def returner(ret): serv = _get_serv(ret) minion = ret['id'] jid = ret['jid'] fun = ret['fun'] rets = salt.utils.json.dumps(ret) serv.set('{0}:{1}'.format(jid, minion), rets) serv.set('{0}:{1}'.format(fun, minion), rets) _append_list(serv, 'minions', minion) _append_list(serv, 'jids', jid)
Return data to a memcache data store
def import_csv(file_name, **kwargs): sep = kwargs.get('separator', ",") content = exch.read_file(file_name, skip_lines=1) return exch.import_text_data(content, sep)
Reads control points from a CSV file and generates a 1-dimensional list of control points. It is possible to use a different value separator via ``separator`` keyword argument. The following code segment illustrates the usage of ``separator`` keyword argument. .. code-block:: python :linenos: # By default, import_csv uses 'comma' as the value separator ctrlpts = exchange.import_csv("control_points.csv") # Alternatively, it is possible to import a file containing tab-separated values ctrlpts = exchange.import_csv("control_points.csv", separator="\\t") The only difference of this function from :py:func:`.exchange.import_txt()` is skipping the first line of the input file which generally contains the column headings. :param file_name: file name of the text file :type file_name: str :return: list of control points :rtype: list :raises GeomdlException: an error occurred reading the file
def drawAxis( self, painter ): pen = QPen(self.axisColor()) pen.setWidth(4) painter.setPen(pen) painter.drawLines(self._buildData['axis_lines']) for rect, text in self._buildData['grid_h_notches']: painter.drawText(rect, Qt.AlignTop | Qt.AlignRight, text) for rect, text in self._buildData['grid_v_notches']: painter.drawText(rect, Qt.AlignCenter, text)
Draws the axis for this system.
def sscan(self, key, cursor=0, match=None, count=None): tokens = [key, cursor] match is not None and tokens.extend([b'MATCH', match]) count is not None and tokens.extend([b'COUNT', count]) fut = self.execute(b'SSCAN', *tokens) return wait_convert(fut, lambda obj: (int(obj[0]), obj[1]))
Incrementally iterate Set elements.
def invalidate(cls, inst, name): inst_cls = inst.__class__ if not hasattr(inst, '__dict__'): raise AttributeError("'%s' object has no attribute '__dict__'" % (inst_cls.__name__,)) if name.startswith('__') and not name.endswith('__'): name = '_%s%s' % (inst_cls.__name__, name) if not isinstance(getattr(inst_cls, name), cls): raise AttributeError("'%s.%s' is not a %s attribute" % (inst_cls.__name__, name, cls.__name__)) if name in inst.__dict__: del inst.__dict__[name]
Invalidate a lazy attribute. This obviously violates the lazy contract. A subclass of lazy may however have a contract where invalidation is appropriate.
def _compute_diff(existing, expected): diff = {} for key in ['location', 'contact', 'chassis_id']: if existing.get(key) != expected.get(key): _create_diff(diff, _valid_str, key, existing.get(key), expected.get(key)) for key in ['community']: if existing.get(key) != expected.get(key): _create_diff(diff, _valid_dict, key, existing.get(key), expected.get(key)) return diff
Computes the differences between the existing and the expected SNMP config.
def _gen_hash(self, password, salt): h = hashlib.sha1() h.update(salt) h.update(password) return h.hexdigest()
Generate password hash.
def parse_dest(*args, **kwargs): explicit_dest = kwargs.get('dest') if explicit_dest: return explicit_dest arg = next((a for a in args if a.startswith('--')), args[0]) return arg.lstrip('-').replace('-', '_')
Select the dest name for an option registration. If an explicit `dest` is specified, returns that and otherwise derives a default from the option flags where '--foo-bar' -> 'foo_bar' and '-x' -> 'x'.
def hash_name(name, script_pubkey, register_addr=None): bin_name = b40_to_bin(name) name_and_pubkey = bin_name + unhexlify(script_pubkey) if register_addr is not None: name_and_pubkey += str(register_addr) return hex_hash160(name_and_pubkey)
Generate the hash over a name and hex-string script pubkey
def _create_metric_extractor(metric_name): def extractor_fn(session_or_group): metric_value = _find_metric_value(session_or_group, metric_name) return metric_value.value if metric_value else None return extractor_fn
Returns function that extracts a metric from a session group or a session. Args: metric_name: tensorboard.hparams.MetricName protobuffer. Identifies the metric to extract from the session group. Returns: A function that takes a tensorboard.hparams.SessionGroup or tensorborad.hparams.Session protobuffer and returns the value of the metric identified by 'metric_name' or None if the value doesn't exist.
def to_param_dict(self): param_dict = {} for index, dictionary in enumerate(self.value): for key, value in dictionary.items(): param_name = '{param_name}[{index}][{key}]'.format( param_name=self.param_name, index=index, key=key) param_dict[param_name] = value return OrderedDict(sorted(param_dict.items()))
Sorts to ensure Order is consistent for Testing
def get_objanno(fin_anno, anno_type=None, **kws): anno_type = get_anno_desc(fin_anno, anno_type) if anno_type is not None: if anno_type == 'gene2go': return Gene2GoReader(fin_anno, **kws) if anno_type == 'gaf': return GafReader(fin_anno, hdr_only=kws.get('hdr_only', False), prt=kws.get('prt', sys.stdout), allow_missing_symbol=kws.get('allow_missing_symbol', False)) if anno_type == 'gpad': hdr_only = kws.get('hdr_only', False) return GpadReader(fin_anno, hdr_only) if anno_type == 'id2gos': return IdToGosReader(fin_anno) raise RuntimeError('UNEXPECTED ANNOTATION FILE FORMAT: {F} {D}'.format( F=fin_anno, D=anno_type))
Read annotations in GAF, GPAD, Entrez gene2go, or text format.
async def change_url(self, url: str, description: str = None): await self._change(url=url, description=description)
change the url of that attachment |methcoro| Args: url: url you want to change description: *optional* description for your attachment Raises: ValueError: url must not be None APIException
def get_pwm_list(pwm_id_list, pseudocountProb=0.0001): l = load_motif_db(ATTRACT_PWM) l = {k.split()[0]: v for k, v in l.items()} pwm_list = [PWM(l[str(m)] + pseudocountProb, name=m) for m in pwm_id_list] return pwm_list
Get a list of Attract PWM's. # Arguments pwm_id_list: List of id's from the `PWM_id` column in `get_metadata()` table pseudocountProb: Added pseudocount probabilities to the PWM # Returns List of `concise.utils.pwm.PWM` instances.
def compute_qpi(self): kwargs = self.model_kwargs.copy() kwargs["radius"] = self.radius kwargs["sphere_index"] = self.sphere_index kwargs["center"] = [self.posx_offset, self.posy_offset] qpi = self.sphere_method(**kwargs) bg_data = np.ones(qpi.shape) * -self.pha_offset qpi.set_bg_data(bg_data=bg_data, which_data="phase") return qpi
Compute model data with current parameters Returns ------- qpi: qpimage.QPImage Modeled phase data Notes ----- The model image might deviate from the fitted image because of interpolation during the fitting process.
def retry(retry_count=5, delay=2): if retry_count <= 0: raise ValueError("retry_count have to be positive") def decorator(f): @functools.wraps(f) def wrapper(*args, **kwargs): for i in range(retry_count, 0, -1): try: return f(*args, **kwargs) except Exception: if i <= 1: raise time.sleep(delay) return wrapper return decorator
Use as decorator to retry functions few times with delays Exception will be raised if last call fails :param retry_count: int could of retries in case of failures. It must be a positive number :param delay: int delay between retries
def database_path(self): filename = self.database_filename db_path = ":memory:" if filename == ":memory:" else ( path.abspath(path.join(__file__, "../..", "..", "data", filename))) return db_path
Full database path. Includes the default location + the database filename.
def draw(graph, fname): ag = networkx.nx_agraph.to_agraph(graph) ag.draw(fname, prog='dot')
Draw a graph and save it into a file
def processBED(fh, genome_alig, window_size, window_centre, mi_seqs=MissingSequenceHandler.TREAT_AS_ALL_GAPS, species=None, verbose=False): mean_profile = [] while len(mean_profile) < window_size: mean_profile.append(RollingMean()) for e in BEDIterator(fh, verbose=verbose, scoreType=float, sortedby=ITERATOR_SORTED_START): transform_locus(e, window_centre, window_size) new_profile = conservtion_profile_pid(e, genome_alig, mi_seqs, species) merge_profile(mean_profile, new_profile) return [m.mean for m in mean_profile]
Process BED file, produce profile of conservation using whole genome alig. :param fh: :param genome_alig: the whole-genome alignment to use to compute conservation scores :param window_size: length of the profile. :param window_center: which part of each interval to place at the center of the profile. Acceptable values are in the module constant WINDOW_CENTRE_OPTIONS. :param miss_seqs: how to treat sequence with no actual sequence data for the column. :param verbose: if True, output progress messages to stderr. :return:
def chemical_element(self, name_only: bool = True) -> Union[dict, str]: elements = self._data['chemical_element'] nm, sm, an = self.random.choice(elements).split('|') if not name_only: return { 'name': nm.strip(), 'symbol': sm.strip(), 'atomic_number': an.strip(), } return nm.strip()
Generate a random chemical element. :param name_only: If False then will be returned dict. :return: Name of chemical element or dict. :rtype: dict or str :Example: {'Symbol': 'S', 'Name': 'Sulfur', 'Atomic number': '16'}
def pymodule(line, cell=None): parser = _commands.CommandParser.create('pymodule') parser.add_argument('-n', '--name', help='the name of the python module to create and import') parser.set_defaults(func=_pymodule_cell) return _utils.handle_magic_line(line, cell, parser)
Creates and subsequently auto-imports a python module.
def get_visible_child(self, parent, locator, params=None, timeout=None): return self.get_present_child(parent, locator, params, timeout, True)
Get child-element both present AND visible in the DOM. If timeout is 0 (zero) return WebElement instance or None, else we wait and retry for timeout and raise TimeoutException should the element not be found. :param parent: parent-element :param locator: locator tuple :param params: (optional) locator params :param timeout: (optional) time to wait for element (default: self._explicit_wait) :return: WebElement instance
def synchronized(sync_lock): def wrapper(f): @functools.wraps(f) def inner_wrapper(*args, **kw): with sync_lock: return f(*args, **kw) return inner_wrapper return wrapper
A decorator synchronizing multi-process access to a resource.
def all_ends_of_turn(self, root): if root.parent: raise ValueError('Unexpectedly received a node with a parent for' ' root:\n{}'.format(root)) jobs = [root] while jobs: random_job_index = random.randint(0, len(jobs) - 1) start_eot = jobs.pop(random_job_index) if start_eot is root: kw_root = {'root': start_eot} else: kw_root = {'root_eot': start_eot} for eot in self.ends_of_one_state(**kw_root): if not eot.is_mana_drain: jobs.append(eot) yield eot
Simulate the root and continue generating ends of turn until everything has reached mana drain. Warning on random fill: If random fill is used together with this method, it will generate basically forever due to the huge number of possibilities it introduces. Arguments: root: a start state with no parent Note on mana drain: Generates but does not continue simulation of mana drains. Note on run time: This simulates a complete turn for each eot provided, rather than just one branch at a time. The method will only stop generating when all possibilities have been simulated or filtered.
def describe(**kwargs): if isinstance(kwargs.get("paths"), string_type): kwargs["paths"] = [kwargs["paths"]] if isinstance(kwargs.get("methods"), string_type): kwargs["methods"] = [kwargs["methods"]] attrs = TransmuteAttributes(**kwargs) def decorator(f): if hasattr(f, "transmute"): f.transmute = f.transmute | attrs else: f.transmute = attrs return f return decorator
describe is a decorator to customize the rest API that transmute generates, such as choosing certain arguments to be query parameters or body parameters, or a different method. :param list(str) paths: the path(s) for the handler to represent (using swagger's syntax for a path) :param list(str) methods: the methods this function should respond to. if non is set, transmute defaults to a GET. :param list(str) query_parameters: the names of arguments that should be query parameters. By default, all arguments are query_or path parameters for a GET request. :param body_parameters: the names of arguments that should be body parameters. By default, all arguments are either body or path parameters for a non-GET request. in the case of a single string, the whole body is validated against a single object. :type body_parameters: List[str] or str :param list(str) header_parameters: the arguments that should be passed into the header. :param list(str) path_parameters: the arguments that are specified by the path. By default, arguments that are found in the path are used first before the query_parameters and body_parameters. :param list(str) parameter_descriptions: descriptions for each parameter, keyed by attribute name. this will appear in the swagger documentation.
def is_admin(self, user): return True if self.organization_users.filter( user=user, is_admin=True ) else False
Returns True is user is an admin in the organization, otherwise false
def next_batch(self, n=1): if len(self.queue) == 0: return [] batch = list(reversed((self.queue[-n:]))) self.queue = self.queue[:-n] return batch
Return the next requests that should be dispatched.
def _merge_csv_model(models, pc, csvs): logger_csvs.info("enter merge_csv_model") try: for _name, _model in models.items(): if "summaryTable" in _model: models[_name]["summaryTable"] = _merge_csv_table(_model["summaryTable"], pc, csvs) if "ensembleTable" in _model: models[_name]["ensembleTable"] = _merge_csv_table(_model["ensembleTable"], pc, csvs) if "distributionTable" in _model: models[_name]["distributionTable"] = _merge_csv_table(_model["distributionTable"], pc, csvs) except Exception as e: logger_csvs.error("merge_csv_model: {}",format(e)) logger_csvs.info("exit merge_csv_model") return models
Add csv data to each column in chron model :param dict models: Metadata :return dict models: Metadata
async def is_change_done(self, zone, change_id): zone_id = self.get_managed_zone(zone) url = f'{self._base_url}/managedZones/{zone_id}/changes/{change_id}' resp = await self.get_json(url) return resp['status'] == self.DNS_CHANGES_DONE
Check if a DNS change has completed. Args: zone (str): DNS zone of the change. change_id (str): Identifier of the change. Returns: Boolean
def parse_bossURI(self, uri): t = uri.split("://")[1].split("/") if len(t) is 3: return self.get_channel(t[2], t[0], t[1]) raise ValueError("Cannot parse URI " + uri + ".")
Parse a bossDB URI and handle malform errors. Arguments: uri (str): URI of the form bossdb://<collection>/<experiment>/<channel> Returns: Resource
def _execute(self, execute_inputs, execute_outputs, backward_execution=False): self._script.build_module() outcome_item = self._script.execute(self, execute_inputs, execute_outputs, backward_execution) if backward_execution: return if self.preempted: return Outcome(-2, "preempted") if outcome_item in self.outcomes: return self.outcomes[outcome_item] for outcome_id, outcome in self.outcomes.items(): if outcome.name == outcome_item: return self.outcomes[outcome_id] logger.error("Returned outcome of {0} not existing: {1}".format(self, outcome_item)) return Outcome(-1, "aborted")
Calls the custom execute function of the script.py of the state
def process(self, key, val): for field in self.fields: if field.check(key, val): return for field in self.optional: if field.check(key, val): return
Try to look for `key` in all required and optional fields. If found, set the `val`.
def reset(self, old_scene=None, screen=None): for effect in self._effects: effect.reset() if old_scene: for old_effect in old_scene.effects: try: old_effect.clone(screen, self) except AttributeError: pass
Reset the scene ready for playing. :param old_scene: The previous version of this Scene that was running before the application reset - e.g. due to a screen resize. :param screen: New screen to use if old_scene is not None.
def get(self, dash_id): data = json.loads(r_db.hmget(config.DASH_CONTENT_KEY, dash_id)[0]) return build_response(dict(data=data, code=200))
Read dashboard content. Args: dash_id: dashboard id. Returns: A dict containing the content of that dashboard, not include the meta info.
def _read_field(self): ftype = self._input[self._pos] self._pos += 1 reader = self.field_type_map.get(ftype) if reader: return reader(self) raise Reader.FieldError('Unknown field type %s', ftype)
Read a single byte for field type, then read the value.
def save(self, data, fname, header=None): write_csv(fname, data, self.sep, self.fmt, header) self.fnames.add(getattr(fname, 'name', fname))
Save data on fname. :param data: numpy array or list of lists :param fname: path name :param header: header to use
def auth_string(self): username_token = '{username}:{token}'.format(username=self.username, token=self.token) b64encoded_string = b64encode(username_token) auth_string = 'Token {b64}'.format(b64=b64encoded_string) return auth_string
Authenticate based on username and token which is base64-encoded
def formfield_for_manytomany(self, db_field, request, **kwargs): if db_field.name == 'authors': kwargs['queryset'] = Author.objects.filter( Q(is_staff=True) | Q(entries__isnull=False) ).distinct() return super(EntryAdmin, self).formfield_for_manytomany( db_field, request, **kwargs)
Filter the disposable authors.
def is_valid_program(self,p): arities = list(a.arity[a.in_type] for a in p) accu_arities = list(accumulate(arities)) accu_len = list(np.arange(len(p))+1) check = list(a < b for a,b in zip(accu_arities,accu_len)) return all(check) and sum(a.arity[a.in_type] for a in p) +1 == len(p) and len(p)>0
checks whether program p makes a syntactically valid tree. checks that the accumulated program length is always greater than the accumulated arities, indicating that the appropriate number of arguments is alway present for functions. It then checks that the sum of arties +1 exactly equals the length of the stack, indicating that there are no missing arguments.
def max_heapify(arr, end, simulation, iteration): last_parent = (end - 1) // 2 for parent in range(last_parent, -1, -1): current_parent = parent while current_parent <= last_parent: child = 2 * current_parent + 1 if child + 1 <= end and arr[child] < arr[child + 1]: child = child + 1 if arr[child] > arr[current_parent]: arr[current_parent], arr[child] = arr[child], arr[current_parent] current_parent = child if simulation: iteration = iteration + 1 print("iteration",iteration,":",*arr) else: break arr[0], arr[end] = arr[end], arr[0] return iteration
Max heapify helper for max_heap_sort
def setup_oauth_client(self, url=None): if url and "://" in url: server, endpoint = self._deconstruct_url(url) else: server = self.client.server if server not in self._server_cache: self._add_client(server) if server == self.client.server: self.oauth = OAuth1( client_key=self.store["client-key"], client_secret=self.store["client-secret"], resource_owner_key=self.store["oauth-access-token"], resource_owner_secret=self.store["oauth-access-secret"], ) return self.oauth else: return OAuth1( client_key=self._server_cache[server].key, client_secret=self._server_cache[server].secret, )
Sets up client for requests to pump
def go_right(self): start, end = self._interval delay = (end - start) * .2 self.shift(delay)
Go to right.
def group_keys(self): for key in sorted(listdir(self._store, self._path)): path = self._key_prefix + key if contains_group(self._store, path): yield key
Return an iterator over member names for groups only. Examples -------- >>> import zarr >>> g1 = zarr.group() >>> g2 = g1.create_group('foo') >>> g3 = g1.create_group('bar') >>> d1 = g1.create_dataset('baz', shape=100, chunks=10) >>> d2 = g1.create_dataset('quux', shape=200, chunks=20) >>> sorted(g1.group_keys()) ['bar', 'foo']
def _ensure_object_id(cls, id): if isinstance(id, ObjectId): return id if isinstance(id, basestring) and OBJECTIDEXPR.match(id): return ObjectId(id) return id
Checks whether the given id is an ObjectId instance, and if not wraps it.
def _dstr(degrees, places=1, signed=False): r if isnan(degrees): return 'nan' sgn, d, m, s, etc = _sexagesimalize_to_int(degrees, places) sign = '-' if sgn < 0.0 else '+' if signed else '' return '%s%02ddeg %02d\' %02d.%0*d"' % (sign, d, m, s, places, etc)
r"""Convert floating point `degrees` into a sexagesimal string. >>> _dstr(181.875) '181deg 52\' 30.0"' >>> _dstr(181.875, places=3) '181deg 52\' 30.000"' >>> _dstr(181.875, signed=True) '+181deg 52\' 30.0"' >>> _dstr(float('nan')) 'nan'
def kem(request): keyword = request.GET['keyword'] lang = request.GET['lang'] ontology = 'ontology' if 'ontology' in request.GET and bool(json.loads(request.GET['ontology'].lower())) else 'origin' result = multilanguage_model[lang][ontology].most_similar(keyword, int(request.GET['num']) if 'num' in request.GET else 10) return JsonResponse(result, safe=False)
due to the base directory settings of django, the model_path needs to be different when testing with this section.
def what(): if not self.isactive(): lib.echo("No topic") sys.exit(lib.USER_ERROR) lib.echo(os.environ.get("BE_TOPICS", "This is a bug"))
Print current topics
def add(self, widget, condition=lambda: 42): assert callable(condition) assert isinstance(widget, BaseWidget) self._widgets.append((widget, condition)) return widget
Add a widget to the widows. The widget will auto render. You can use the function like that if you want to keep the widget accecible : self.my_widget = self.add(my_widget)
def set_preferred(node): if node < 0 or node > get_max_node(): raise ValueError(node) libnuma.numa_set_preferred(node)
Sets the preferred node for the current thread to node. The preferred node is the node on which memory is preferably allocated before falling back to other nodes. The default is to use the node on which the process is currently running (local policy). @param node: node idx @type node: C{int}
def setup(): name = "Poisson" link = np.exp scale = False shape = False skewness = False mean_transform = np.log cythonized = True return name, link, scale, shape, skewness, mean_transform, cythonized
Returns the attributes of this family Notes ---------- - scale notes whether family has a variance parameter (sigma) - shape notes whether family has a tail thickness parameter (nu) - skewness notes whether family has a skewness parameter (gamma) - mean_transform is a function which transforms the location parameter - cythonized notes whether the family has cythonized routines Returns ---------- - model name, link function, scale, shape, skewness, mean_transform, cythonized
def common(self): res = {"kty": self.kty} if self.use: res["use"] = self.use if self.kid: res["kid"] = self.kid if self.alg: res["alg"] = self.alg return res
Return the set of parameters that are common to all types of keys. :return: Dictionary
def start(self): def cb(): time_ = time.time() log.debug('Step {}'.format(time_)) for d in self._dstreams: d._step(time_) self._pcb = PeriodicCallback(cb, self.batch_duration * 1000.0) self._pcb.start() self._on_stop_cb.append(self._pcb.stop) StreamingContext._activeContext = self
Start processing streams.
def Cpu(): cpu = 'Unknown' try: cpu = str(multiprocessing.cpu_count()) except Exception as e: logger.error("Can't access CPU count' " + str(e)) return cpu
Get number of available CPUs
def migrate_into_triple(belstr: str) -> str: bo.ast = bel.lang.partialparse.get_ast_obj(belstr, "2.0.0") return migrate_ast(bo.ast).to_triple()
Migrate BEL1 assertion into BEL 2.0.0 SRO triple
def init(): if "_" not in builtins.__dict__: os.environ["LANGUAGE"] = inginious.input.get_lang() if inginious.DEBUG: gettext.install("messages", get_lang_dir_path()) else: gettext.install("messages", get_lang_dir_path())
Install gettext with the default parameters
def add_path(self, path): if os.path.exists(path): self.paths.add(path) return path else: return None
Adds a new path to the list of searchable paths :param path: new path