code stringlengths 51 2.38k | docstring stringlengths 4 15.2k |
|---|---|
def data_filler_customer(self, number_of_rows, cursor, conn):
customer_data = []
try:
for i in range(0, number_of_rows):
customer_data.append((
rnd_id_generator(self), self.faker.first_name(), self.faker.last_name(), self.faker.address(),
... | creates and fills the table with customer |
def get_notifications(self, start=None, stop=None, *args, **kwargs):
filter_kwargs = {}
if start is not None:
filter_kwargs['%s__gte' % self.notification_id_name] = start + 1
if stop is not None:
filter_kwargs['%s__lt' % self.notification_id_name] = stop + 1
objec... | Returns all records in the table. |
def fingerprint_relaxation(T, p0, obs, tau=1, k=None, ncv=None):
r
T = _types.ensure_ndarray_or_sparse(T, ndim=2, uniform=True, kind='numeric')
n = T.shape[0]
if not is_reversible(T):
raise ValueError('Fingerprint calculation is not supported for nonreversible transition matrices. ')
p0 = _t... | r"""Dynamical fingerprint for relaxation experiment.
The dynamical fingerprint is given by the implied time-scale
spectrum together with the corresponding amplitudes.
Parameters
----------
T : (M, M) ndarray or scipy.sparse matrix
Transition matrix
obs1 : (M,) ndarray
Observabl... |
def get_block_from_consensus( self, consensus_hash ):
query = 'SELECT block_id FROM snapshots WHERE consensus_hash = ?;'
args = (consensus_hash,)
con = self.db_open(self.impl, self.working_dir)
rows = self.db_query_execute(con, query, args, verbose=False)
res = None
for r... | Get the block number with the given consensus hash.
Return None if there is no such block. |
def send_mass_template_mail(subject_template, body_template, recipients, context=None):
if context:
subject, body = render_mail_template(subject_template, body_template, context)
else:
subject, body = subject_template, body_template
message_tuples = [(subject, body, conf.get('DEFAULT_FROM_EM... | Renders an email subject and body using the given templates and context,
then sends it to the given recipients list.
The emails are send one-by-one. |
def bookmark_list():
client = get_client()
bookmark_iterator = client.bookmark_list()
def get_ep_name(item):
ep_id = item["endpoint_id"]
try:
ep_doc = client.get_endpoint(ep_id)
return display_name_or_cname(ep_doc)
except TransferAPIError as err:
i... | Executor for `globus bookmark list` |
def _load_audio_file(self):
self._step_begin(u"load audio file")
audio_file = AudioFile(
file_path=self.task.audio_file_path_absolute,
file_format=None,
rconf=self.rconf,
logger=self.logger
)
audio_file.read_samples_from_file()
self... | Load audio in memory.
:rtype: :class:`~aeneas.audiofile.AudioFile` |
def save_method(elements, module_path):
for elem, signature in elements.items():
if isinstance(signature, dict):
save_method(signature, module_path + (elem,))
elif isinstance(signature, Class):
save_method(signature.fields, module_path + (elem,))
elif signature.ismeth... | Recursively save methods with module name and signature. |
def link(self):
if self.linked:
return self
self.linked = True
included_modules = []
for include in self.includes.values():
included_modules.append(include.link().surface)
self.scope.add_surface('__includes__', tuple(included_modules))
self.scope.a... | Link all the types in this module and all included modules. |
def _get(self, word1, word2):
key = self._WSEP.join([self._sanitize(word1), self._sanitize(word2)])
key = key.lower()
if key not in self._db:
return
return sample(self._db[key], 1)[0] | Return a possible next word after ``word1`` and ``word2``, or ``None``
if there's no possibility. |
def hue(self, hue):
if hue < 0 or hue > 1:
raise ValueError("Hue must be a percentage "
"represented as decimal 0-1.0")
self._hue = hue
cmd = self.command_set.hue(hue)
self.send(cmd) | Set the group hue.
:param hue: Hue in decimal percent (0.0-1.0). |
def text(self, path, compression=None, lineSep=None):
self._set_opts(compression=compression, lineSep=lineSep)
self._jwrite.text(path) | Saves the content of the DataFrame in a text file at the specified path.
The text files will be encoded as UTF-8.
:param path: the path in any Hadoop supported file system
:param compression: compression codec to use when saving to file. This can be one of the
known ... |
def render_context_with_title(self, context):
if "page_title" not in context:
con = template.Context(context)
temp = template.Template(encoding.force_text(self.page_title))
context["page_title"] = temp.render(con)
return context | Render a page title and insert it into the context.
This function takes in a context dict and uses it to render the
page_title variable. It then appends this title to the context using
the 'page_title' key. If there is already a page_title key defined in
context received then this funct... |
def _generate_badge(self, subject, status):
url = 'https://img.shields.io/badge/%s-%s-brightgreen.svg' \
'?style=flat&maxAge=3600' % (subject, status)
logger.debug("Getting badge for %s => %s (%s)", subject, status, url)
res = requests.get(url)
if res.status_code != 200:
... | Generate SVG for one badge via shields.io.
:param subject: subject; left-hand side of badge
:type subject: str
:param status: status; right-hand side of badge
:type status: str
:return: badge SVG
:rtype: str |
def handshake(self):
if self._socket is None:
self._socket = self._connect()
return self.call('handshake', {"Version": 1}, expect_body=False) | Sets up the connection with the Serf agent and does the
initial handshake. |
def get_repository_owner_and_name() -> Tuple[str, str]:
check_repo()
url = repo.remote('origin').url
parts = re.search(r'([^/:]+)/([^/]+).git$', url)
if not parts:
raise HvcsRepoParseError
debug('get_repository_owner_and_name', parts)
return parts.group(1), parts.group(2) | Checks the origin remote to get the owner and name of the remote repository.
:return: A tuple of the owner and name. |
def assert_is_instance(obj, cls, msg_fmt="{msg}"):
if not isinstance(obj, cls):
msg = "{!r} is an instance of {!r}, expected {!r}".format(
obj, obj.__class__, cls
)
types = cls if isinstance(cls, tuple) else (cls,)
fail(msg_fmt.format(msg=msg, obj=obj, types=types)) | Fail if an object is not an instance of a class or tuple of classes.
>>> assert_is_instance(5, int)
>>> assert_is_instance('foo', (str, bytes))
>>> assert_is_instance(5, str)
Traceback (most recent call last):
...
AssertionError: 5 is an instance of <class 'int'>, expected <class 'str'>
... |
def add_service_port(service, port):
if service not in get_services(permanent=True):
raise CommandExecutionError('The service does not exist.')
cmd = '--permanent --service={0} --add-port={1}'.format(service, port)
return __firewall_cmd(cmd) | Add a new port to the specified service.
.. versionadded:: 2016.11.0
CLI Example:
.. code-block:: bash
salt '*' firewalld.add_service_port zone 80 |
def _remember_avatarness(
self, character, graph, node,
is_avatar=True, branch=None, turn=None,
tick=None
):
branch = branch or self.branch
turn = turn or self.turn
tick = tick or self.tick
self._avatarness_cache.store(
character,
... | Use this to record a change in avatarness.
Should be called whenever a node that wasn't an avatar of a
character now is, and whenever a node that was an avatar of a
character now isn't.
``character`` is the one using the node as an avatar,
``graph`` is the character the node is... |
def _check(self, file):
if not os.path.exists(file):
raise Error("file \"{}\" not found".format(file))
_, extension = os.path.splitext(file)
try:
check = self.extension_map[extension[1:]]
except KeyError:
magic_type = magic.from_file(file)
... | Run apropriate check based on `file`'s extension and return it,
otherwise raise an Error |
def resource_to_url(resource, request=None, quote=False):
if request is None:
request = get_current_request()
reg = get_current_registry()
cnv = reg.getAdapter(request, IResourceUrlConverter)
return cnv.resource_to_url(resource, quote=quote) | Converts the given resource to a URL.
:param request: Request object (required for the host name part of the
URL). If this is not given, the current request is used.
:param bool quote: If set, the URL returned will be quoted. |
def compute_gas_limit_bounds(parent: BlockHeader) -> Tuple[int, int]:
boundary_range = parent.gas_limit // GAS_LIMIT_ADJUSTMENT_FACTOR
upper_bound = parent.gas_limit + boundary_range
lower_bound = max(GAS_LIMIT_MINIMUM, parent.gas_limit - boundary_range)
return lower_bound, upper_bound | Compute the boundaries for the block gas limit based on the parent block. |
def _search_indicators_page_generator(self, search_term=None,
enclave_ids=None,
from_time=None,
to_time=None,
indicator_types=None,
... | Creates a generator from the |search_indicators_page| method that returns each successive page.
:param str search_term: The term to search for. If empty, no search term will be applied. Otherwise, must
be at least 3 characters.
:param list(str) enclave_ids: list of enclave ids used to res... |
def has_foreign_key(self, name):
name = self._normalize_identifier(name)
return name in self._fk_constraints | Returns whether this table has a foreign key constraint with the given name.
:param name: The constraint name
:type name: str
:rtype: bool |
def cart_to_polar(arr_c):
if arr_c.shape[-1] == 1:
arr_p = arr_c.copy()
elif arr_c.shape[-1] == 2:
arr_p = np.empty_like(arr_c)
arr_p[..., 0] = vector_mag(arr_c)
arr_p[..., 1] = np.arctan2(arr_c[..., 1], arr_c[..., 0])
elif arr_c.shape[-1] == 3:
arr_p = np.empty_like(... | Return cartesian vectors in their polar representation.
Parameters
----------
arr_c: array, shape (a1, a2, ..., d)
Cartesian vectors, with last axis indexing the dimension.
Returns
-------
arr_p: array, shape of arr_c
Polar vectors, using (radius, inclination, azimuth) conventi... |
def mock_decorator_with_params(*oargs, **okwargs):
def inner(fn, *iargs, **ikwargs):
if hasattr(fn, '__call__'):
return fn
return Mock()
return inner | Optionally mock a decorator that takes parameters
E.g.:
@blah(stuff=True)
def things():
pass |
def first_return():
walk = randwalk() >> drop(1) >> takewhile(lambda v: v != Origin) >> list
return len(walk) | Generate a random walk and return its length upto the moment
that the walker first returns to the origin.
It is mathematically provable that the walker will eventually return,
meaning that the function call will halt, although it may take
a *very* long time and your computer may run out of memory!
Thus, try this ... |
def clean(self):
if self.cleaners:
yield from asyncio.wait([x() for x in self.cleaners],
loop=self.loop) | Run all of the cleaners added by the user. |
def __set_token_expired(self, value):
self._token_expired = datetime.datetime.now() + datetime.timedelta(seconds=value)
return | Internal helper for oauth code |
def fetch(self):
params = values.of({})
payload = self._version.fetch(
'GET',
self._uri,
params=params,
)
return AvailablePhoneNumberCountryInstance(
self._version,
payload,
account_sid=self._solution['account_sid'],... | Fetch a AvailablePhoneNumberCountryInstance
:returns: Fetched AvailablePhoneNumberCountryInstance
:rtype: twilio.rest.api.v2010.account.available_phone_number.AvailablePhoneNumberCountryInstance |
def NotEqualTo(self, value):
self._awql = self._CreateSingleValueCondition(value, '!=')
return self._query_builder | Sets the type of the WHERE clause as "not equal to".
Args:
value: The value to be used in the WHERE condition.
Returns:
The query builder that this WHERE builder links to. |
def merge(directory=None, revisions='', message=None, branch_label=None,
rev_id=None):
if alembic_version >= (0, 7, 0):
config = current_app.extensions['migrate'].migrate.get_config(
directory)
command.merge(config, revisions, message=message,
branch_label... | Merge two revisions together. Creates a new migration file |
def handle_os_exceptions():
try:
yield
except ObjectException:
exc_type, exc_value, _ = exc_info()
raise _OS_EXCEPTIONS.get(exc_type, OSError)(exc_value)
except (OSError, same_file_error, UnsupportedOperation):
raise
except Exception:
exc_type, exc_value, _ = exc_... | Handles pycosio exceptions and raise standard OS exceptions. |
def deep_del(data, fn):
result = {}
for k, v in data.iteritems():
if not fn(v):
if isinstance(v, dict):
result[k] = deep_del(v, fn)
else:
result[k] = v
return result | Create dict copy with removed items.
Recursively remove items where fn(value) is True.
Returns:
dict: New dict with matching items removed. |
def get_search_scores(query, choices, ignore_case=True, template='{}',
valid_only=False, sort=False):
query = query.replace(' ', '')
pattern = get_search_regex(query, ignore_case)
results = []
for choice in choices:
r = re.search(pattern, choice)
if query and r:
... | Search for query inside choices and return a list of tuples.
Returns a list of tuples of text with the enriched text (if a template is
provided) and a score for the match. Lower scores imply a better match.
Parameters
----------
query : str
String with letters to search in each choice (in ... |
def prepare_inputseries(self, ramflag: bool = True) -> None:
for element in printtools.progressbar(self):
element.prepare_inputseries(ramflag) | Call method |Element.prepare_inputseries| of all handled
|Element| objects. |
def data_in_label(intvl_in, dtype_in_time, dtype_in_vert=False):
intvl_lbl = intvl_in
time_lbl = dtype_in_time
lbl = '_'.join(['from', intvl_lbl, time_lbl]).replace('__', '_')
vert_lbl = dtype_in_vert if dtype_in_vert else False
if vert_lbl:
lbl = '_'.join([lbl, vert_lbl]).replace('__', '_')... | Create string label specifying the input data of a calculation. |
def crypto_secretstream_xchacha20poly1305_init_push(state, key):
ensure(
isinstance(state, crypto_secretstream_xchacha20poly1305_state),
'State must be a crypto_secretstream_xchacha20poly1305_state object',
raising=exc.TypeError,
)
ensure(
isinstance(key, bytes),
'Key... | Initialize a crypto_secretstream_xchacha20poly1305 encryption buffer.
:param state: a secretstream state object
:type state: crypto_secretstream_xchacha20poly1305_state
:param key: must be
:data:`.crypto_secretstream_xchacha20poly1305_KEYBYTES` long
:type key: bytes
:return: header
... |
def get_forwarding_information_base(self, filter=''):
uri = "{}{}".format(self.data["uri"], self.FORWARDING_INFORMATION_PATH)
return self._helper.get_collection(uri, filter=filter) | Gets the forwarding information base data for a logical interconnect. A maximum of 100 entries is returned.
Optional filtering criteria might be specified.
Args:
filter (list or str):
Filtering criteria may be specified using supported attributes: interconnectUri, macAddress... |
def is_installable_dir(path):
if not os.path.isdir(path):
return False
setup_py = os.path.join(path, "setup.py")
if os.path.isfile(setup_py):
return True
return False | Return True if `path` is a directory containing a setup.py file. |
def fixed_indexer(self):
isfixed = self.pst.parameter_data.partrans.\
apply(lambda x : x in ["fixed","tied"])
return isfixed.values | indexer for fixed status
Returns
-------
fixed_indexer : pandas.Series |
def scan(cls, path):
result = []
try:
for _p in listdir(path):
try:
result.append(Template(_p, op.join(path, _p)))
except ValueError:
continue
except OSError:
pass
return result | Scan directory for templates. |
def _generate_sequences_for_texts(self, l1, t1, l2, t2, ngrams):
self._reverse_substitutes = dict((v, k) for k, v in
self._substitutes.items())
sequences = []
covered_spans = [[], []]
for ngram in ngrams:
sequences.extend(self._generat... | Generates and outputs aligned sequences for the texts `t1` and `t2`
from `ngrams`.
:param l1: label of first witness
:type l1: `str`
:param t1: text content of first witness
:type t1: `str`
:param l2: label of second witness
:type l2: `str`
:param t2: tex... |
def kill_processes(self):
LOGGER.critical('Max shutdown exceeded, forcibly exiting')
processes = self.active_processes(False)
while processes:
for proc in self.active_processes(False):
if int(proc.pid) != int(os.getpid()):
LOGGER.warning('Killing %... | Gets called on shutdown by the timer when too much time has gone by,
calling the terminate method instead of nicely asking for the consumers
to stop. |
def checkout_with_fetch(git_folder, refspec, repository="origin"):
_LOGGER.info("Trying to fetch and checkout %s", refspec)
repo = Repo(str(git_folder))
repo.git.fetch(repository, refspec)
repo.git.checkout("FETCH_HEAD")
_LOGGER.info("Fetch and checkout success for %s", refspec) | Fetch the refspec, and checkout FETCH_HEAD.
Beware that you will ne in detached head mode. |
def xyz(self, arrnx3):
if not self.children:
if not arrnx3.shape[0] == 1:
raise ValueError(
'Trying to set position of {} with more than one'
'coordinate: {}'.format(
self, arrnx3))
self.pos = np.squeeze(arrn... | Set the positions of the particles in the Compound, excluding the Ports.
This function does not set the position of the ports.
Parameters
----------
arrnx3 : np.ndarray, shape=(n,3), dtype=float
The new particle positions |
def get_formats(function_types=None):
if function_types is None:
return {k: v['display'] for k, v in _converter_map.items()}
ftypes = [x.lower() for x in function_types]
ftypes = set(ftypes)
ret = []
for fmt, v in _converter_map.items():
if v['valid'] is None or ftypes <= v['valid']:... | Returns the available formats mapped to display name.
This is returned as an ordered dictionary, with the most common
at the top, followed by the rest in alphabetical order
If a list is specified for function_types, only those formats
supporting the given function types will be returned. |
def extract_original_links(base_url, bs4):
valid_url = convert_invalid_url(base_url)
url = urlparse(valid_url)
base_url = '{}://{}'.format(url.scheme, url.netloc)
base_url_with_www = '{}://www.{}'.format(url.scheme, url.netloc)
links = extract_links(bs4)
result_links = [anchor for anchor in link... | Extracting links that contains specific url from BeautifulSoup object
:param base_url: `str` specific url that matched with the links
:param bs4: `BeautifulSoup`
:return: `list` List of links |
def remove(self, priority, observer, callble):
self.flush()
for i in range(len(self) - 1, -1, -1):
p,o,c = self[i]
if priority==p and observer==o and callble==c:
del self._poc[i] | Remove one observer, which had priority and callble. |
def GetAccounts(self):
selector = {
'fields': ['CustomerId', 'CanManageClients']
}
accounts = self.client.GetService('ManagedCustomerService').get(selector)
return accounts['entries'] | Return the client accounts associated with the user's manager account.
Returns:
list List of ManagedCustomer data objects. |
def send_ether_over_wpa(self, pkt, **kwargs):
payload = LLC() / SNAP() / pkt[Ether].payload
dest = pkt.dst
if dest == "ff:ff:ff:ff:ff:ff":
self.send_wpa_to_group(payload, dest)
else:
assert dest == self.client
self.send_wpa_to_client(payload) | Send an Ethernet packet using the WPA channel
Extra arguments will be ignored, and are just left for compatibility |
def add(self, pattern, start):
"Recursively adds a linear pattern to the AC automaton"
if not pattern:
return [start]
if isinstance(pattern[0], tuple):
match_nodes = []
for alternative in pattern[0]:
end_nodes = self.add(alternative, start=star... | Recursively adds a linear pattern to the AC automaton |
def identical(self, o):
return self.bits == o.bits and self.stride == o.stride and self.lower_bound == o.lower_bound and self.upper_bound == o.upper_bound | Used to make exact comparisons between two StridedIntervals. Usually it is only used in test cases.
:param o: The other StridedInterval to compare with.
:return: True if they are exactly same, False otherwise. |
def connect_output(self, node):
if len(self.outputs) == self.max_outputs:
raise TooManyOutputsError("Attempted to connect too many nodes to the output of a node", max_outputs=self.max_outputs, stream=self.stream)
self.outputs.append(node) | Connect another node to our output.
This downstream node will automatically be triggered when we update
our output.
Args:
node (SGNode): The node that should receive our output |
def full_load(self):
self.parse_data_directories()
class RichHeader(object):
pass
rich_header = self.parse_rich_header()
if rich_header:
self.RICH_HEADER = RichHeader()
self.RICH_HEADER.checksum = rich_header.get('checksum', None)
self.RICH... | Process the data directories.
This method will load the data directories which might not have
been loaded if the "fast_load" option was used. |
def is_scalar(value: Any) -> bool:
return (
getattr(value, 'ndim', None) == 0 or
isinstance(value, (str, bytes)) or not
isinstance(value, (Iterable, ) + dask_array_type)) | Whether to treat a value as a scalar.
Any non-iterable, string, or 0-D array |
def add(self, element):
if not isinstance(element, six.string_types):
raise TypeError("Hll elements can only be strings")
self._adds.add(element) | Adds an element to the HyperLogLog. Datatype cardinality will
be updated when the object is saved.
:param element: the element to add
:type element: str |
def remove_zero_points(self):
points_of_interest = np.where((np.linalg.norm(self.point_cloud.data, axis=0) != 0.0) &
(np.linalg.norm(self.normal_cloud.data, axis=0) != 0.0) &
(np.isfinite(self.normal_cloud.data[0,:])))[0]
self.... | Remove all elements where the norms and points are zero.
Note
----
This returns nothing and updates the NormalCloud in-place. |
def create(cls, name, md5_password=None, connect_retry=120,
session_hold_timer=180, session_keep_alive=60):
json = {'name': name,
'connect': connect_retry,
'session_hold_timer': session_hold_timer,
'session_keep_alive': session_keep_alive}
i... | Create a new BGP Connection Profile.
:param str name: name of profile
:param str md5_password: optional md5 password
:param int connect_retry: The connect retry timer, in seconds
:param int session_hold_timer: The session hold timer, in seconds
:param int session_keep_alive: The... |
def events(cls, filters):
current = filters.pop('current', False)
current_params = []
if current:
current_params = [('current', 'true')]
filter_url = uparse.urlencode(sorted(list(filters.items())) + current_params)
events = cls.json_get('%s/events?%s' % (cls.api_url, ... | Retrieve events details from status.gandi.net. |
def runTemplate(id, data={}):
conn = Qubole.agent()
path = str(id) + "/run"
res = conn.post(Template.element_path(path), data)
cmdType = res['command_type']
cmdId = res['id']
cmdClass = eval(cmdType)
cmd = cmdClass.find(cmdId)
while not Command.is_done(cmd... | Run an existing Template and waits for the Result.
Prints result to stdout.
Args:
`id`: ID of the template to run
`data`: json data containing the input_vars
Returns:
An integer as status (0: success, 1: failure) |
def _replace_property(property_key, property_value, resource, logical_id):
if property_key and property_value:
resource.get(PROPERTIES_KEY, {})[property_key] = property_value
elif property_key or property_value:
LOG.info("WARNING: Ignoring Metadata for Resource %s. Metadata conta... | Replace a property with an asset on a given resource
This method will mutate the template
Parameters
----------
property str
The property to replace on the resource
property_value str
The new value of the property
resource dict
Dictio... |
def define_log_renderer(fmt, fpath, quiet):
if fmt:
return structlog.processors.JSONRenderer()
if fpath is not None:
return structlog.processors.JSONRenderer()
if sys.stderr.isatty() and not quiet:
return structlog.dev.ConsoleRenderer()
return structlog.processors.JSONRenderer() | the final log processor that structlog requires to render. |
def run_task(factory, **kwargs):
context = TaskContext(factory, **kwargs)
pstats_dir = kwargs.get("pstats_dir", os.getenv(PSTATS_DIR))
if pstats_dir:
import cProfile
import tempfile
import pydoop.hdfs as hdfs
hdfs.mkdir(pstats_dir)
fd, pstats_fn = tempfile.mkstemp(suf... | \
Run a MapReduce task.
Available keyword arguments:
* ``raw_keys`` (default: :obj:`False`): pass map input keys to context
as byte strings (ignore any type information)
* ``raw_values`` (default: :obj:`False`): pass map input values to context
as byte strings (ignore any type information)... |
def run_strelka(job, tumor_bam, normal_bam, univ_options, strelka_options, split=True):
if strelka_options['chromosomes']:
chromosomes = strelka_options['chromosomes']
else:
chromosomes = sample_chromosomes(job, strelka_options['genome_fai'])
num_cores = min(len(chromosomes), univ_options['m... | Run the strelka subgraph on the DNA bams. Optionally split the results into per-chromosome
vcfs.
:param dict tumor_bam: Dict of bam and bai for tumor DNA-Seq
:param dict normal_bam: Dict of bam and bai for normal DNA-Seq
:param dict univ_options: Dict of universal options used by almost all tools
... |
def _to_str(dumped_val, encoding='utf-8', ordered=True):
_dict = OrderedDict if ordered else dict
if isinstance(dumped_val, dict):
return OrderedDict((k, _to_str(v, encoding)) for k,v in dumped_val.items())
elif isinstance(dumped_val, (list, tuple)):
return [_to_str(v, encoding) for v in dum... | Convert bytes in a dump value to str, allowing json encode |
def _check_psutil(self, instance):
custom_tags = instance.get('tags', [])
if self._collect_cx_state:
self._cx_state_psutil(tags=custom_tags)
self._cx_counters_psutil(tags=custom_tags) | Gather metrics about connections states and interfaces counters
using psutil facilities |
def set_primary_parameters(self, **kwargs):
given = sorted(kwargs.keys())
required = sorted(self._PRIMARY_PARAMETERS)
if given == required:
for (key, value) in kwargs.items():
setattr(self, key, value)
else:
raise ValueError(
'When ... | Set all primary parameters at once. |
def encode_chain_list(in_strings):
out_bytes = b""
for in_s in in_strings:
out_bytes+=in_s.encode('ascii')
for i in range(mmtf.utils.constants.CHAIN_LEN -len(in_s)):
out_bytes+= mmtf.utils.constants.NULL_BYTE.encode('ascii')
return out_bytes | Convert a list of strings to a list of byte arrays.
:param in_strings: the input strings
:return the encoded list of byte arrays |
def function(self, addr=None, name=None, create=False, syscall=False, plt=None):
if addr is not None:
try:
f = self._function_map.get(addr)
if plt is None or f.is_plt == plt:
return f
except KeyError:
if create:
... | Get a function object from the function manager.
Pass either `addr` or `name` with the appropriate values.
:param int addr: Address of the function.
:param str name: Name of the function.
:param bool create: Whether to create the function or not if the function does not exist.
... |
def process_global(name, val=None, setval=False):
p = current_process()
if not hasattr(p, '_pulsar_globals'):
p._pulsar_globals = {'lock': Lock()}
if setval:
p._pulsar_globals[name] = val
else:
return p._pulsar_globals.get(name) | Access and set global variables for the current process. |
def retire_asset_ddo(self, did):
response = self.requests_session.delete(f'{self.url}/{did}', headers=self._headers)
if response.status_code == 200:
logging.debug(f'Removed asset DID: {did} from metadata store')
return response
raise AquariusGenericError(f'Unable to remov... | Retire asset ddo of Aquarius.
:param did: Asset DID string
:return: API response (depends on implementation) |
def check_server_running(pid):
if pid == os.getpid():
return False
try:
os.kill(pid, 0)
return True
except OSError as oe:
if oe.errno == errno.ESRCH:
return False
else:
raise | Determine if the given process is running |
def info(msg, *args, **kw):
if len(args) or len(kw):
msg = msg.format(*args, **kw)
shell.cprint('-- <32>{}<0>'.format(msg)) | Print sys message to stdout.
System messages should inform about the flow of the script. This should
be a major milestones during the build. |
def _AppendRecord(self):
if not self.values:
return
cur_record = []
for value in self.values:
try:
value.OnSaveRecord()
except SkipRecord:
self._ClearRecord()
return
except SkipValue:
continue
cur_record.append(value.value)
if len(cur_record)... | Adds current record to result if well formed. |
def register_serialization_method(self, name, serialize_func):
if name in self._default_serialization_methods:
raise ValueError("Can't replace original %s serialization method")
self._serialization_methods[name] = serialize_func | Register a custom serialization method that can be
used via schema configuration |
def loads(string, filename=None, includedir=''):
try:
f = io.StringIO(string)
except TypeError:
raise TypeError("libconf.loads() input string must by unicode")
return load(f, filename=filename, includedir=includedir) | Load the contents of ``string`` to a Python object
The returned object is a subclass of ``dict`` that exposes string keys as
attributes as well.
Example:
>>> config = libconf.loads('window: { title: "libconfig example"; };')
>>> config['window']['title']
'libconfig example'
... |
def get_mmax(self, mfd_conf, msr, rake, area):
if mfd_conf['Maximum_Magnitude']:
self.mmax = mfd_conf['Maximum_Magnitude']
else:
self.mmax = msr.get_median_mag(area, rake)
if ('Maximum_Magnitude_Uncertainty' in mfd_conf and
mfd_conf['Maximum_Magnitude_Unce... | Gets the mmax for the fault - reading directly from the config file
or using the msr otherwise
:param dict mfd_config:
Configuration file (see setUp for paramters)
:param msr:
Instance of :class:`nhlib.scalerel`
:param float rake:
Rake of the fault ... |
def profile(model_specification, results_directory, process):
model_specification = Path(model_specification)
results_directory = Path(results_directory)
out_stats_file = results_directory / f'{model_specification.name}'.replace('yaml', 'stats')
command = f'run_simulation("{model_specification}", "{resu... | Run a simulation based on the provided MODEL_SPECIFICATION and profile
the run. |
def count(self, q):
q = "SELECT COUNT(*) %s"%q
return int(self.quick(q).split("\n")[1]) | Shorthand for counting the results of a specific query.
## Arguments
* `q` (str): The query to count. This will be executed as:
`"SELECT COUNT(*) %s" % q`.
## Returns
* `count` (int): The resulting count. |
def as_fs(self):
fs = []
fs.append("cpe:2.3:")
for i in range(0, len(CPEComponent.ordered_comp_parts)):
ck = CPEComponent.ordered_comp_parts[i]
lc = self._get_attribute_components(ck)
if len(lc) > 1:
errmsg = "Incompatible version {0} with form... | Returns the CPE Name as formatted string of version 2.3.
:returns: CPE Name as formatted string
:rtype: string
:exception: TypeError - incompatible version |
def get(self, id, service='facebook', type='analysis'):
return self.request.get(service + '/task/' + type + '/' + id) | Get a given Pylon task
:param id: The ID of the task
:type id: str
:param service: The PYLON service (facebook)
:type service: str
:return: dict of REST API output with headers attached
:rtype: :class:`~datasift.request.DictResponse`
:... |
def ping(self, timeout=0, **kwargs):
def rand_id(size=8, chars=string.ascii_uppercase + string.digits):
return ''.join(random.choice(chars) for _ in range(size))
payload = rand_id()
self.ws.ping(payload)
opcode, data = self.recv_raw(timeout, [websocket.ABNF.OPCODE_PONG], **kw... | THIS DOES NOT WORK, UWSGI DOES NOT RESPOND TO PINGS |
def GetRadioButtonSelect(selectList, title="Select", msg=""):
root = tkinter.Tk()
root.title(title)
val = tkinter.IntVar()
val.set(0)
if msg != "":
tkinter.Label(root, text=msg).pack()
index = 0
for item in selectList:
tkinter.Radiobutton(root, text=item, variable=val,
... | Create radio button window for option selection
title: Window name
mag: Label of the radio button
return (seldctedItem, selectedindex) |
def create_rack(self):
return Rack(
self.networkapi_url,
self.user,
self.password,
self.user_ldap) | Get an instance of rack services facade. |
def get_queryset(self):
if self.queryset is None:
raise ImproperlyConfigured(
"'%s' must define 'queryset'" % self.__class__.__name__)
return self.queryset() | Check that the queryset is defined and call it. |
def compile_all():
print("Compiling for PyQt4: style.qrc -> pyqt_style_rc.py")
os.system("pyrcc4 -py3 style.qrc -o pyqt_style_rc.py")
print("Compiling for PyQt5: style.qrc -> pyqt5_style_rc.py")
os.system("pyrcc5 style.qrc -o pyqt5_style_rc.py")
print("Compiling for PySide: style.qrc -> pyside_style... | Compile style.qrc using rcc, pyside-rcc and pyrcc4 |
def _time_from_iso8601_time_naive(value):
if len(value) == 8:
fmt = _TIMEONLY_NO_FRACTION
elif len(value) == 15:
fmt = _TIMEONLY_W_MICROS
else:
raise ValueError("Unknown time format: {}".format(value))
return datetime.datetime.strptime(value, fmt).time() | Convert a zoneless ISO8601 time string to naive datetime time
:type value: str
:param value: The time string to convert
:rtype: :class:`datetime.time`
:returns: A datetime time object created from the string
:raises ValueError: if the value does not match a known format. |
def from_request(cls, request):
request_headers = HeaderDict()
other_headers = ['CONTENT_TYPE', 'CONTENT_LENGTH']
for header, value in iteritems(request.META):
is_header = header.startswith('HTTP_') or header in other_headers
normalized_header = cls._normalize_django_head... | Generate a HeaderDict based on django request object meta data. |
def generateCertificate(cls):
key = generate_key()
cert = generate_certificate(key)
return cls(key=key, cert=cert) | Create and return an X.509 certificate and corresponding private key.
:rtype: RTCCertificate |
def _CSI(self, cmd):
sys.stdout.write('\x1b[')
sys.stdout.write(cmd) | Control sequence introducer |
def recurrence(self, recurrence):
if not is_valid_recurrence(recurrence):
raise KeyError("'%s' is not a valid recurrence value" % recurrence)
self._recurrence = recurrence | See `recurrence`. |
def safe_wraps(wrapper, *args, **kwargs):
while isinstance(wrapper, functools.partial):
wrapper = wrapper.func
return functools.wraps(wrapper, *args, **kwargs) | Safely wraps partial functions. |
def folderitems(self):
items = super(AnalysisRequestAnalysesView, self).folderitems()
self.categories.sort()
return items | XXX refactor if possible to non-classic mode |
def get_ip_address():
try:
s = socket.socket(socket.AF_INET, socket.SOCK_DGRAM)
s.connect(("8.8.8.8", 80))
ip_address = s.getsockname()[0]
except socket_error as sockerr:
if sockerr.errno != errno.ENETUNREACH:
raise sockerr
ip_address = socket.gethostbyname(socket.getfqdn())
finally:
... | Simple utility to get host IP address. |
def validate_password(entry, username, check_function, password=None, retries=1, save_on_success=True, prompt=None, **check_args):
if password is None:
password = get_password(entry, username, prompt)
for _ in xrange(retries + 1):
if check_function(username, password, **check_args):
... | Validate a password with a check function & retry if the password is incorrect.
Useful for after a user has changed their password in LDAP, but their local keychain entry is then out of sync.
:param str entry: The keychain entry to fetch a password from.
:param str username: The username to authenti... |
def start(self):
resp = self.post('start')
if resp.is_fail():
return None
if 'result' not in resp.data:
return None
result = resp.data['result']
return {
'user': result['user'],
'ws_host': result['ws_host'],
} | Gets the rtm ws_host and user information
Returns:
None if request failed,
else a dict containing "user"(User) and "ws_host" |
def _validate_alias_file_path(alias_file_path):
if not os.path.exists(alias_file_path):
raise CLIError(ALIAS_FILE_NOT_FOUND_ERROR)
if os.path.isdir(alias_file_path):
raise CLIError(ALIAS_FILE_DIR_ERROR.format(alias_file_path)) | Make sure the alias file path is neither non-existant nor a directory
Args:
The alias file path to import aliases from. |
def pytype_to_deps(t):
res = set()
for hpp_dep in pytype_to_deps_hpp(t):
res.add(os.path.join('pythonic', 'types', hpp_dep))
res.add(os.path.join('pythonic', 'include', 'types', hpp_dep))
return res | python -> pythonic type header full path. |
def build_input_table(cls, name='inputTableName', input_name='input'):
obj = cls(name)
obj.exporter = 'get_input_table_name'
obj.input_name = input_name
return obj | Build an input table parameter
:param name: parameter name
:type name: str
:param input_name: bind input port name
:param input_name: str
:return: input description
:rtype: ParamDef |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.