code stringlengths 51 2.38k | docstring stringlengths 4 15.2k |
|---|---|
def wait_for_servers(session, servers):
nclient = nova.Client(NOVA_VERSION, session=session,
region_name=os.environ['OS_REGION_NAME'])
while True:
deployed = []
undeployed = []
for server in servers:
c = nclient.servers.get(server.id)
if c.addresses != {} and c.status == 'ACTIVE':
deployed.append(server)
if c.status == 'ERROR':
undeployed.append(server)
logger.info("[nova]: Polling the Deployment")
logger.info("[nova]: %s deployed servers" % len(deployed))
logger.info("[nova]: %s undeployed servers" % len(undeployed))
if len(deployed) + len(undeployed) >= len(servers):
break
time.sleep(3)
return deployed, undeployed | Wait for the servers to be ready.
Note(msimonin): we don't garantee the SSH connection to be ready. |
def set_attribute(self, obj, attr, value):
if isinstance(obj, MutableMapping):
obj[attr] = value
else:
setattr(obj, attr, value) | Set value of attribute in given object instance.
Reason for existence of this method is the fact that 'attribute' can
be also a object's key if it is a dict or any other kind of mapping.
Args:
obj (object): object instance to modify
attr (str): attribute (or key) to change
value: value to set |
def points(self):
if self._points is None:
_points = self.soma.points.tolist()
for n in self.neurites:
_points.extend(n.points.tolist())
self._points = np.array(_points)
return self._points | Return unordered array with all the points in this neuron |
def macaroon_ops(self, macaroons):
if len(macaroons) == 0:
raise ValueError('no macaroons provided')
storage_id, ops = _decode_macaroon_id(macaroons[0].identifier_bytes)
root_key = self.root_keystore_for_ops(ops).get(storage_id)
if root_key is None:
raise VerificationError(
'macaroon key not found in storage')
v = Verifier()
conditions = []
def validator(condition):
conditions.append(condition)
return True
v.satisfy_general(validator)
try:
v.verify(macaroons[0], root_key, macaroons[1:])
except Exception as exc:
raise six.raise_from(
VerificationError('verification failed: {}'.format(str(exc))),
exc,
)
if (self.ops_store is not None
and len(ops) == 1
and ops[0].entity.startswith('multi-')):
ops = self.ops_store.get_ops(ops[0].entity)
return ops, conditions | This method makes the oven satisfy the MacaroonOpStore protocol
required by the Checker class.
For macaroons minted with previous bakery versions, it always
returns a single LoginOp operation.
:param macaroons:
:return: |
def decode_buffer(buffer: dict) -> np.ndarray:
buf = np.frombuffer(buffer['data'], dtype=buffer['dtype'])
return buf.reshape(buffer['shape']) | Translate a DataBuffer into a numpy array.
:param buffer: Dictionary with 'data' byte array, 'dtype', and 'shape' fields
:return: NumPy array of decoded data |
def _get_json(self, url):
self.log.info(u"/GET " + url)
r = requests.get(url)
if hasattr(r, 'from_cache'):
if r.from_cache:
self.log.info("(from cache)")
if r.status_code != 200:
throw_request_err(r)
return r.json() | Get json from url |
def margin(self, axis):
if self._slice.ndim < 2:
msg = (
"Scale Means marginal cannot be calculated on 1D cubes, as"
"the scale means already get reduced to a scalar value."
)
raise ValueError(msg)
dimension_index = 1 - axis
margin = self._slice.margin(axis=axis)
if len(margin.shape) > 1:
index = [
0 if d.dimension_type == DT.MR else slice(None)
for d in self._slice.dimensions
]
margin = margin[index]
total = np.sum(margin)
values = self.values[dimension_index]
if values is None:
return None
return np.sum(values * margin) / total | Return marginal value of the current slice scaled means.
This value is the the same what you would get from a single variable
(constituting a 2D cube/slice), when the "non-missing" filter of the
opposite variable would be applied. This behavior is consistent with
what is visible in the front-end client. |
def repr2(obj_, **kwargs):
kwargs['nl'] = kwargs.pop('nl', kwargs.pop('newlines', False))
val_str = _make_valstr(**kwargs)
return val_str(obj_) | Attempt to replace repr more configurable
pretty version that works the same in both 2 and 3 |
def _convert_token(self, token):
token = token.copy()
if "expiresOn" in token and "expiresIn" in token:
token["expiresOn"] = token['expiresIn'] + time.time()
return {self._case.sub(r'\1_\2', k).lower(): v
for k, v in token.items()} | Convert token fields from camel case.
:param dict token: An authentication token.
:rtype: dict |
def _set_new_object(self, new_obj, inherited_obj, new_class, superclass,
qualifier_repo, propagated, type_str):
assert isinstance(new_obj, (CIMMethod, CIMProperty, CIMParameter))
if inherited_obj:
inherited_obj_qual = inherited_obj.qualifiers
else:
inherited_obj_qual = None
if propagated:
assert superclass is not None
new_obj.propagated = propagated
if propagated:
assert inherited_obj is not None
new_obj.class_origin = inherited_obj.class_origin
else:
assert inherited_obj is None
new_obj.class_origin = new_class.classname
self._resolve_qualifiers(new_obj.qualifiers,
inherited_obj_qual,
new_class,
superclass,
new_obj.name, type_str,
qualifier_repo,
propagate=propagated) | Set the object attributes for a single object and resolve the
qualifiers. This sets attributes for Properties, Methods, and
Parameters. |
def cli(env, identifier, crt, csr, icc, key, notes):
template = {'id': identifier}
if crt:
template['certificate'] = open(crt).read()
if key:
template['privateKey'] = open(key).read()
if csr:
template['certificateSigningRequest'] = open(csr).read()
if icc:
template['intermediateCertificate'] = open(icc).read()
if notes:
template['notes'] = notes
manager = SoftLayer.SSLManager(env.client)
manager.edit_certificate(template) | Edit SSL certificate. |
def decyear2dt(t):
year = int(t)
rem = t - year
base = datetime(year, 1, 1)
dt = base + timedelta(seconds=(base.replace(year=base.year+1) - base).total_seconds() * rem)
return dt | Convert decimal year to datetime |
def download_attachments(self):
if not self._parent.has_attachments:
log.debug(
'Parent {} has no attachments, skipping out early.'.format(
self._parent.__class__.__name__))
return False
if not self._parent.object_id:
raise RuntimeError(
'Attempted to download attachments of an unsaved {}'.format(
self._parent.__class__.__name__))
url = self.build_url(self._endpoints.get('attachments').format(
id=self._parent.object_id))
response = self._parent.con.get(url)
if not response:
return False
attachments = response.json().get('value', [])
self.untrack = True
self.add({self._cloud_data_key: attachments})
self.untrack = False
return True | Downloads this message attachments into memory.
Need a call to 'attachment.save' to save them on disk.
:return: Success / Failure
:rtype: bool |
def _mkdirs_impacket(path, share='C$', conn=None, host=None, username=None, password=None):
if conn is None:
conn = get_conn(host, username, password)
if conn is False:
return False
comps = path.split('/')
pos = 1
for comp in comps:
cwd = '\\'.join(comps[0:pos])
try:
conn.listPath(share, cwd)
except (smbSessionError, smb3SessionError):
log.exception('Encountered error running conn.listPath')
conn.createDirectory(share, cwd)
pos += 1 | Recursively create a directory structure on an SMB share
Paths should be passed in with forward-slash delimiters, and should not
start with a forward-slash. |
def attention_bias_batch(batch_coordinates_q,
batch_coordinates_k=None,
condition_fn=None):
if batch_coordinates_k is None:
batch_coordinates_k = batch_coordinates_q
def to_float(bc):
bc = tf.squeeze(bc, 1)
bc = tf.to_float(bc)
return bc
bc_v = tf.expand_dims(to_float(batch_coordinates_q), 1)
bc_h = tf.expand_dims(to_float(batch_coordinates_k), 0)
bias_batch = bc_h - bc_v
bias_batch = condition_fn(bias_batch)
bias_batch *= -1e9
return bias_batch | Generate a mask to prevent the batch to attend to each others.
Args:
batch_coordinates_q: Int-like Tensor of shape [length_q, 1] containing the
coordinates of the batches
batch_coordinates_k: Int-like Tensor of shape [length_k, 1] containing the
coordinates of the batches. If None, do self-attention.
condition_fn: Callable defining the attention mask.
Returns:
Float-like Tensor of shape [length_q, length_k] containing either 0 or
-infinity (-1e9). |
def normalize_cmd(self, command):
command = command.rstrip()
command += self.RETURN
return command | Normalize CLI commands to have a single trailing newline.
:param command: Command that may require line feed to be normalized
:type command: str |
def walk_dir(dir_path, walk_after, recurse=None, archive_mtime=None):
if recurse is None:
recurse = Settings.recurse
result_set = set()
if not recurse:
return result_set
for root, _, filenames in os.walk(dir_path):
for filename in filenames:
filename_full = os.path.join(root, filename)
try:
results = walk_file(filename_full, walk_after, recurse,
archive_mtime)
result_set = result_set.union(results)
except Exception:
print("Error with file: {}".format(filename_full))
raise
return result_set | Recursively optimize a directory. |
def draw_pin(text, background_color='green', font_color='white'):
image = Image.new('RGB', (120, 20))
draw = ImageDraw.Draw(image)
draw.rectangle([(1, 1), (118, 18)], fill=color(background_color))
draw.text((10, 4), text, fill=color(font_color))
return image | Draws and returns a pin with the specified text and color scheme |
def count_values(tokens):
ntoks = 0
for tok in tokens:
if tok in ('=', '/', '$', '&'):
if ntoks > 0 and tok == '=':
ntoks -= 1
break
elif tok in whitespace + ',':
continue
else:
ntoks += 1
return ntoks | Identify the number of values ahead of the current token. |
def get_stripe_dashboard_url(self):
if not self.stripe_dashboard_item_name or not self.id:
return ""
else:
return "{base_url}{item}/{id}".format(
base_url=self._get_base_stripe_dashboard_url(),
item=self.stripe_dashboard_item_name,
id=self.id,
) | Get the stripe dashboard url for this object. |
def format_page(self, page, link_resolver, output):
debug('Formatting page %s' % page.link.ref, 'formatting')
if output:
actual_output = os.path.join(output,
'html')
if not os.path.exists(actual_output):
os.makedirs(actual_output)
else:
actual_output = None
page.format(self.formatter, link_resolver, actual_output) | Called by `project.Project.format_page`, to leave full control
to extensions over the formatting of the pages they are
responsible of.
Args:
page: tree.Page, the page to format.
link_resolver: links.LinkResolver, object responsible
for resolving links potentially mentioned in `page`
output: str, path to the output directory. |
def get_list_dimensions(_list):
if isinstance(_list, list) or isinstance(_list, tuple):
return [len(_list)] + get_list_dimensions(_list[0])
return [] | Takes a nested list and returns the size of each dimension followed
by the element type in the list |
def tryOrder(self, commit: Commit):
canOrder, reason = self.canOrder(commit)
if canOrder:
self.logger.trace("{} returning request to node".format(self))
self.doOrder(commit)
else:
self.logger.debug("{} cannot return request to node: {}".format(self, reason))
return canOrder | Try to order if the Commit message is ready to be ordered. |
def forwards(apps, schema_editor):
Work = apps.get_model('spectator_events', 'Work')
for work in Work.objects.all():
if not work.slug:
work.slug = generate_slug(work.pk)
work.save() | Re-save all the Works because something earlier didn't create their slugs. |
def _create_scheduled_actions(conn, as_name, scheduled_actions):
if scheduled_actions:
for name, action in six.iteritems(scheduled_actions):
if 'start_time' in action and isinstance(action['start_time'], six.string_types):
action['start_time'] = datetime.datetime.strptime(
action['start_time'], DATE_FORMAT
)
if 'end_time' in action and isinstance(action['end_time'], six.string_types):
action['end_time'] = datetime.datetime.strptime(
action['end_time'], DATE_FORMAT
)
conn.create_scheduled_group_action(as_name, name,
desired_capacity=action.get('desired_capacity'),
min_size=action.get('min_size'),
max_size=action.get('max_size'),
start_time=action.get('start_time'),
end_time=action.get('end_time'),
recurrence=action.get('recurrence')
) | Helper function to create scheduled actions |
def partition_pairs(neurites, neurite_type=NeuriteType.all):
return map(_bifurcationfunc.partition_pair,
iter_sections(neurites,
iterator_type=Tree.ibifurcation_point,
neurite_filter=is_type(neurite_type))) | Partition pairs at bifurcation points of a collection of neurites.
Partition pait is defined as the number of bifurcations at the two
daughters of the bifurcating section |
def _multiple_field(cls):
klassdict = cls.__dict__
try:
return klassdict["_entitylist_multifield"][0]
except (KeyError, IndexError, TypeError):
from . import fields
multifield_tuple = tuple(fields.find(cls, multiple=True))
assert len(multifield_tuple) == 1
multifield = multifield_tuple[0]
assert issubclass(multifield.type_, Entity)
cls._entitylist_multifield = multifield_tuple
return multifield_tuple[0] | Return the "multiple" TypedField associated with this EntityList.
This also lazily sets the ``_entitylist_multiplefield`` value if it
hasn't been set yet. This is set to a tuple containing one item because
if we set the class attribute to the TypedField, we would effectively
add a TypedField descriptor to the class, which we don't want.
Raises:
AssertionError: If there is more than one multiple TypedField
or the the TypedField type_ is not a subclass of Entity. |
def import_from_setting(setting_name, fallback):
path = getattr(settings, setting_name, None)
if path:
try:
return import_string(path)
except ImportError:
raise ImproperlyConfigured('%s: No such path.' % path)
else:
return fallback | Return the resolution of an import path stored in a Django setting.
:arg setting_name: The name of the setting holding the import path
:arg fallback: An alternate object to use if the setting is empty or
doesn't exist
Raise ImproperlyConfigured if a path is given that can't be resolved. |
def get_user(self, user_id, password):
self.con.execute('SELECT uid, pwHash FROM archive_users WHERE userId = %s;', (user_id,))
results = self.con.fetchall()
if len(results) == 0:
raise ValueError("No such user")
pw_hash = results[0]['pwHash']
if not passlib.hash.bcrypt.verify(password, pw_hash):
raise ValueError("Incorrect password")
self.con.execute('SELECT name FROM archive_roles r INNER JOIN archive_user_roles u ON u.roleId=r.uid '
'WHERE u.userId = %s;', (results[0]['uid'],))
role_list = [row['name'] for row in self.con.fetchall()]
return mp.User(user_id=user_id, roles=role_list) | Retrieve a user record
:param user_id:
the user ID
:param password:
password
:return:
A :class:`meteorpi_model.User` if everything is correct
:raises:
ValueError if the user is found but password is incorrect or if the user is not found. |
def logout(self):
self.client.write('exit\r\n')
self.client.read_all()
self.client.close() | Logout from the remote server. |
def tdev(data, rate=1.0, data_type="phase", taus=None):
phase = input_to_phase(data, rate, data_type)
(taus, md, mde, ns) = mdev(phase, rate=rate, taus=taus)
td = taus * md / np.sqrt(3.0)
tde = td / np.sqrt(ns)
return taus, td, tde, ns | Time deviation.
Based on modified Allan variance.
.. math::
\\sigma^2_{TDEV}( \\tau ) = { \\tau^2 \\over 3 }
\\sigma^2_{MDEV}( \\tau )
Note that TDEV has a unit of seconds.
Parameters
----------
data: np.array
Input data. Provide either phase or frequency (fractional,
adimensional).
rate: float
The sampling rate for data, in Hz. Defaults to 1.0
data_type: {'phase', 'freq'}
Data type, i.e. phase or frequency. Defaults to "phase".
taus: np.array
Array of tau values, in seconds, for which to compute statistic.
Optionally set taus=["all"|"octave"|"decade"] for automatic
tau-list generation.
Returns
-------
(taus, tdev, tdev_error, ns): tuple
Tuple of values
taus: np.array
Tau values for which td computed
tdev: np.array
Computed time deviations (in seconds) for each tau value
tdev_errors: np.array
Time deviation errors
ns: np.array
Values of N used in mdev_phase()
Notes
-----
http://en.wikipedia.org/wiki/Time_deviation |
def build_latent_variables(self):
lvs_to_build = []
lvs_to_build.append(['Noise Sigma^2', fam.Flat(transform='exp'), fam.Normal(0,3), -1.0])
for lag in range(self.X.shape[1]):
lvs_to_build.append(['l lag' + str(lag+1), fam.FLat(transform='exp'), fam.Normal(0,3), -1.0])
lvs_to_build.append(['tau', fam.Flat(transform='exp'), fam.Normal(0,3), -1.0])
return lvs_to_build | Builds latent variables for this kernel
Returns
----------
- A list of lists (each sub-list contains latent variable information) |
def import_eit_fzj(self, filename, configfile, correction_file=None,
timestep=None, **kwargs):
df_emd, dummy1, dummy2 = eit_fzj.read_3p_data(
filename,
configfile,
**kwargs
)
if correction_file is not None:
eit_fzj_utils.apply_correction_factors(df_emd, correction_file)
if timestep is not None:
df_emd['timestep'] = timestep
self._add_to_container(df_emd)
print('Summary:')
self._describe_data(df_emd) | EIT data import for FZJ Medusa systems |
def register(key, initializer: callable, param=None):
get_current_scope().container.register(key, initializer, param) | Adds resolver to global container |
def uniqueTags(tagList):
ret = []
alreadyAdded = set()
for tag in tagList:
myUid = tag.getUid()
if myUid in alreadyAdded:
continue
ret.append(tag)
return TagCollection(ret) | uniqueTags - Returns the unique tags in tagList.
@param tagList list<AdvancedTag> : A list of tag objects. |
def many_psds(k=2,fs=1.0, b0=1.0, N=1024):
psd=[]
for j in range(k):
print j
x = noise.white(N=2*4096,b0=b0,fs=fs)
f, tmp = noise.numpy_psd(x,fs)
if j==0:
psd = tmp
else:
psd = psd + tmp
return f, psd/k | compute average of many PSDs |
def _get_file_iterator(self, file_obj):
file_obj.seek(0)
return iter(lambda: file_obj.read(self.read_bs), '') | For given `file_obj` return iterator, which will read the file in
`self.read_bs` chunks.
Args:
file_obj (file): File-like object.
Return:
iterator: Iterator reading the file-like object in chunks. |
def _get_reference(document_path, reference_map):
try:
return reference_map[document_path]
except KeyError:
msg = _BAD_DOC_TEMPLATE.format(document_path)
raise ValueError(msg) | Get a document reference from a dictionary.
This just wraps a simple dictionary look-up with a helpful error that is
specific to :meth:`~.firestore.client.Client.get_all`, the
**public** caller of this function.
Args:
document_path (str): A fully-qualified document path.
reference_map (Dict[str, .DocumentReference]): A mapping (produced
by :func:`_reference_info`) of fully-qualified document paths to
document references.
Returns:
.DocumentReference: The matching reference.
Raises:
ValueError: If ``document_path`` has not been encountered. |
def uddc(udfunc, x, dx):
x = ctypes.c_double(x)
dx = ctypes.c_double(dx)
isdescr = ctypes.c_int()
libspice.uddc_c(udfunc, x, dx, ctypes.byref(isdescr))
return bool(isdescr.value) | SPICE private routine intended solely for the support of SPICE
routines. Users should not call this routine directly due to the
volatile nature of this routine.
This routine calculates the derivative of 'udfunc' with respect
to time for 'et', then determines if the derivative has a
negative value.
Use the @spiceypy.utils.callbacks.SpiceUDFUNS dectorator to wrap
a given python function that takes one parameter (float) and
returns a float. For example::
@spiceypy.utils.callbacks.SpiceUDFUNS
def udfunc(et_in):
pos, new_et = spice.spkpos("MERCURY", et_in, "J2000", "LT+S", "MOON")
return new_et
deriv = spice.uddf(udfunc, et, 1.0)
https://naif.jpl.nasa.gov/pub/naif/toolkit_docs/C/cspice/uddc_c.html
:param udfunc: Name of the routine that computes the scalar value of interest.
:type udfunc: ctypes.CFunctionType
:param x: Independent variable of 'udfunc'.
:type x: float
:param dx: Interval from 'x' for derivative calculation.
:type dx: float
:return: Boolean indicating if the derivative is negative.
:rtype: bool |
def initialize_block(self, block_header):
state_view = \
BlockWrapper.state_view_for_block(
self._block_cache.block_store.chain_head,
self._state_view_factory)
settings_view = SettingsView(state_view)
self._min_wait_time = settings_view.get_setting(
"sawtooth.consensus.min_wait_time", self._min_wait_time, int)
self._max_wait_time = settings_view.get_setting(
"sawtooth.consensus.max_wait_time", self._max_wait_time, int)
self._valid_block_publishers = settings_view.get_setting(
"sawtooth.consensus.valid_block_publishers",
self._valid_block_publishers,
list)
block_header.consensus = b"Devmode"
self._start_time = time.time()
self._wait_time = random.uniform(
self._min_wait_time, self._max_wait_time)
return True | Do initialization necessary for the consensus to claim a block,
this may include initiating voting activates, starting proof of work
hash generation, or create a PoET wait timer.
Args:
block_header (BlockHeader): the BlockHeader to initialize.
Returns:
True |
def remove_templates(self):
self.hosts.remove_templates()
self.contacts.remove_templates()
self.services.remove_templates()
self.servicedependencies.remove_templates()
self.hostdependencies.remove_templates()
self.timeperiods.remove_templates() | Clean useless elements like templates because they are not needed anymore
:return: None |
def factor_rank_autocorrelation(factor_data, period=1):
grouper = [factor_data.index.get_level_values('date')]
ranks = factor_data.groupby(grouper)['factor'].rank()
asset_factor_rank = ranks.reset_index().pivot(index='date',
columns='asset',
values='factor')
if isinstance(period, int):
asset_shifted = asset_factor_rank.shift(period)
else:
shifted_idx = utils.add_custom_calendar_timedelta(
asset_factor_rank.index, -pd.Timedelta(period),
factor_data.index.levels[0].freq)
asset_shifted = asset_factor_rank.reindex(shifted_idx)
asset_shifted.index = asset_factor_rank.index
autocorr = asset_factor_rank.corrwith(asset_shifted, axis=1)
autocorr.name = period
return autocorr | Computes autocorrelation of mean factor ranks in specified time spans.
We must compare period to period factor ranks rather than factor values
to account for systematic shifts in the factor values of all names or names
within a group. This metric is useful for measuring the turnover of a
factor. If the value of a factor for each name changes randomly from period
to period, we'd expect an autocorrelation of 0.
Parameters
----------
factor_data : pd.DataFrame - MultiIndex
A MultiIndex DataFrame indexed by date (level 0) and asset (level 1),
containing the values for a single alpha factor, forward returns for
each period, the factor quantile/bin that factor value belongs to, and
(optionally) the group the asset belongs to.
- See full explanation in utils.get_clean_factor_and_forward_returns
period: string or int, optional
Period over which to calculate the turnover. If it is a string it must
follow pandas.Timedelta constructor format (e.g. '1 days', '1D', '30m',
'3h', '1D1h', etc).
Returns
-------
autocorr : pd.Series
Rolling 1 period (defined by time_rule) autocorrelation of
factor values. |
def asgray(im):
if im.ndim == 2:
return im
elif im.ndim == 3 and im.shape[2] in (3, 4):
return im[..., :3].mean(axis=-1)
else:
raise ValueError('Invalid image format') | Takes an image and returns its grayscale version by averaging the color
channels. if an alpha channel is present, it will simply be ignored. If a
grayscale image is given, the original image is returned.
Parameters
----------
image : ndarray, ndim 2 or 3
RGB or grayscale image.
Returns
-------
gray_image : ndarray, ndim 2
Grayscale version of image. |
def unique(seq, idfunc=None):
if idfunc is None:
idfunc = lambda x: x
preserved_type = type(seq)
seen = {}
result = []
for item in seq:
marker = idfunc(item)
if marker in seen:
continue
seen[marker] = 1
result.append(item)
return preserved_type(result) | Unique a list or tuple and preserve the order
@type idfunc: Function or None
@param idfunc: If idfunc is provided it will be called during the
comparison process. |
def _process_json(response_body):
data = json.loads(response_body)
uwpassword = UwPassword(uwnetid=data["uwNetID"],
kerb_status=data["kerbStatus"],
interval=None,
last_change=None,
last_change_med=None,
expires_med=None,
interval_med=None,
minimum_length=int(data["minimumLength"]),
time_stamp=parse(data["timeStamp"]),)
if "lastChange" in data:
uwpassword.last_change = parse(data["lastChange"])
if "interval" in data:
uwpassword.interval = timeparse(data["interval"])
if "lastChangeMed" in data:
uwpassword.last_change_med = parse(data["lastChangeMed"])
if "expiresMed" in data:
uwpassword.expires_med = parse(data["expiresMed"])
if "intervalMed" in data:
uwpassword.interval_med = timeparse(data["intervalMed"])
if "netidStatus" in data:
netid_status = []
for status in data["netidStatus"]:
netid_status.append(status)
uwpassword.netid_status = netid_status
return uwpassword | Returns a UwPassword objects |
def download_source_gafs(group_metadata, target_dir, exclusions=[], base_download_url=None):
gaf_urls = [ (data, data["source"]) for data in group_metadata["datasets"] if data["type"] == "gaf" and data["dataset"] not in exclusions ]
click.echo("Found {}".format(", ".join( [ kv[0]["dataset"] for kv in gaf_urls ] )))
downloaded_paths = []
for dataset_metadata, gaf_url in gaf_urls:
dataset = dataset_metadata["dataset"]
path = download_a_dataset_source(group_metadata["id"], dataset_metadata, target_dir, gaf_url, base_download_url=base_download_url)
if dataset_metadata["compression"] == "gzip":
unzipped = os.path.splitext(path)[0]
unzip(path, unzipped)
path = unzipped
else:
zipup(path)
downloaded_paths.append((dataset_metadata, path))
return downloaded_paths | This looks at a group metadata dictionary and downloads each GAF source that is not in the exclusions list.
For each downloaded file, keep track of the path of the file. If the file is zipped, it will unzip it here.
This function returns a list of tuples of the dataset dictionary mapped to the downloaded source path. |
def enhex(d, separator=''):
v = binascii.hexlify(d).decode('ascii')
if separator:
return separator.join(
v[i:i+2]
for i in range(0, len(v), 2)
)
else:
return v | Convert bytes to their hexadecimal representation, optionally joined by a
given separator.
Args:
d(bytes): The data to convert to hexadecimal representation.
separator(str): The separator to insert between hexadecimal tuples.
Returns:
str: The hexadecimal representation of ``d``.
Examples:
>>> from pwny import *
>>> enhex(b'pwnypack')
'70776e797061636b'
>>> enhex(b'pwnypack', separator=' ')
'70 77 6e 79 70 61 63 6b' |
def patch(
self,
id,
name=None,
description=None,
whitelisted_container_task_types=None,
whitelisted_executable_task_types=None,
):
request_url = self._client.base_api_url + self.detail_url.format(id=id)
data_to_patch = {}
if name is not None:
data_to_patch["name"] = name
if description is not None:
data_to_patch["description"] = description
if whitelisted_container_task_types is not None:
data_to_patch[
"whitelisted_container_task_types"
] = whitelisted_container_task_types
if whitelisted_executable_task_types is not None:
data_to_patch[
"whitelisted_executable_task_types"
] = whitelisted_executable_task_types
response = self._client.session.patch(request_url, data=data_to_patch)
self.validate_request_success(
response_text=response.text,
request_url=request_url,
status_code=response.status_code,
expected_status_code=HTTP_200_OK,
)
return self.response_data_to_model_instance(response.json()) | Partially updates a task whitelist on the saltant server.
Args:
id (int): The ID of the task whitelist.
name (str, optional): The name of the task whitelist.
description (str, optional): A description of the task whitelist.
whitelisted_container_task_types (list, optional): A list of
whitelisted container task type IDs.
whitelisted_executable_task_types (list, optional): A list
of whitelisted executable task type IDs.
Returns:
:class:`saltant.models.task_whitelist.TaskWhitelist`:
A task whitelist model instance representing the task
whitelist just updated. |
def validate_query_params(self, strict=True):
if not (self.api_key or default_api_key):
raise ValueError('API key is missing')
if strict and self.query_params_mode not in (None, 'and', 'or'):
raise ValueError('query_params_match should be one of "and"/"or"')
if not self.person.is_searchable:
raise ValueError('No valid name/username/phone/email in request')
if strict and self.person.unsearchable_fields:
raise ValueError('Some fields are unsearchable: %s'
% self.person.unsearchable_fields) | Check if the request is valid and can be sent, raise ValueError if
not.
`strict` is a boolean argument that defaults to True which means an
exception is raised on every invalid query parameter, if set to False
an exception is raised only when the search request cannot be performed
because required query params are missing. |
def update_account_info(self):
request = self._get_request()
return request.post(self.ACCOUNT_UPDATE_URL, {
'callback_url': self.account.callback_url
}) | Update current account information
At the moment you can only update your callback_url.
Returns:
An Account object |
def Close(self):
if self._connection:
self._cursor = None
self._connection.close()
self._connection = None
try:
os.remove(self._temp_file_path)
except (IOError, OSError):
pass
self._temp_file_path = '' | Closes the database file object.
Raises:
IOError: if the close failed.
OSError: if the close failed. |
def indent(text, num=4):
str_indent = ' ' * num
return str_indent + ('\n' + str_indent).join(text.splitlines()) | Indet the given string |
def map_get(self, key, mapkey):
op = SD.get(mapkey)
sdres = self.lookup_in(key, op)
return self._wrap_dsop(sdres, True) | Retrieve a value from a map.
:param str key: The document ID
:param str mapkey: Key within the map to retrieve
:return: :class:`~.ValueResult`
:raise: :exc:`IndexError` if the mapkey does not exist
:raise: :cb_exc:`NotFoundError` if the document does not exist.
.. seealso:: :meth:`map_add` for an example |
def functions(self):
return [v for v in self.globals.values()
if isinstance(v, values.Function)] | A list of functions declared or defined in this module. |
def digest(self, data=None):
if self.digest_finalized:
return self.digest_out.raw[:self.digest_size]
if data is not None:
self.update(data)
self.digest_out = create_string_buffer(256)
length = c_long(0)
result = libcrypto.EVP_DigestFinal_ex(self.ctx, self.digest_out,
byref(length))
if result != 1:
raise DigestError("Unable to finalize digest")
self.digest_finalized = True
return self.digest_out.raw[:self.digest_size] | Finalizes digest operation and return digest value
Optionally hashes more data before finalizing |
def shellsort(inlist):
n = len(inlist)
svec = copy.deepcopy(inlist)
ivec = range(n)
gap = n / 2
while gap > 0:
for i in range(gap, n):
for j in range(i - gap, -1, -gap):
while j >= 0 and svec[j] > svec[j + gap]:
temp = svec[j]
svec[j] = svec[j + gap]
svec[j + gap] = temp
itemp = ivec[j]
ivec[j] = ivec[j + gap]
ivec[j + gap] = itemp
gap = gap / 2
return svec, ivec | Shellsort algorithm. Sorts a 1D-list.
Usage: lshellsort(inlist)
Returns: sorted-inlist, sorting-index-vector (for original list) |
def share_of_standby(df, resolution='24h', time_window=None):
p_sb = standby(df, resolution, time_window)
df = df.resample(resolution).mean()
p_tot = df.sum()
p_standby = p_sb.sum()
share_standby = p_standby / p_tot
res = share_standby.iloc[0]
return res | Compute the share of the standby power in the total consumption.
Parameters
----------
df : pandas.DataFrame or pandas.Series
Power (typically electricity, can be anything)
resolution : str, default='d'
Resolution of the computation. Data will be resampled to this resolution (as mean) before computation
of the minimum.
String that can be parsed by the pandas resample function, example ='h', '15min', '6h'
time_window : tuple with start-hour and end-hour, default=None
Specify the start-time and end-time for the analysis.
Only data within this time window will be considered.
Both times have to be specified as string ('01:00', '06:30') or as datetime.time() objects
Returns
-------
fraction : float between 0-1 with the share of the standby consumption |
async def fetchrow(self, *, timeout=None):
r
self._check_ready()
if self._exhausted:
return None
recs = await self._exec(1, timeout)
if len(recs) < 1:
self._exhausted = True
return None
return recs[0] | r"""Return the next row.
:param float timeout: Optional timeout value in seconds.
:return: A :class:`Record` instance. |
def resolve_template(template):
"Accepts a template object, path-to-template or list of paths"
if isinstance(template, (list, tuple)):
return loader.select_template(template)
elif isinstance(template, basestring):
try:
return loader.get_template(template)
except TemplateDoesNotExist:
return None
else:
return template | Accepts a template object, path-to-template or list of paths |
def _cbCvtReply(self, msg, returnSignature):
if msg is None:
return None
if returnSignature != _NO_CHECK_RETURN:
if not returnSignature:
if msg.signature:
raise error.RemoteError(
'Unexpected return value signature')
else:
if not msg.signature or msg.signature != returnSignature:
msg = 'Expected "%s". Received "%s"' % (
str(returnSignature), str(msg.signature))
raise error.RemoteError(
'Unexpected return value signature: %s' %
(msg,))
if msg.body is None or len(msg.body) == 0:
return None
if len(msg.body) == 1 and not msg.signature[0] == '(':
return msg.body[0]
else:
return msg.body | Converts a remote method call reply message into an appropriate
callback
value. |
def describe(self):
for stage, corunners in self.get_deployers():
print self.name, "STAGE ", stage
for d in corunners:
print d.__class__.__name__, ",".join(
[p[1].__name__ for p in d.phases]
) | Iterates through the deployers but doesn't run anything |
def recall():
a = TpPd(pd=0x3)
b = MessageType(mesType=0xb)
c = RecallType()
d = Facility()
packet = a / b / c / d
return packet | RECALL Section 9.3.18a |
def mozjpeg(ext_args):
args = copy.copy(_MOZJPEG_ARGS)
if Settings.destroy_metadata:
args += ["-copy", "none"]
else:
args += ["-copy", "all"]
args += ['-outfile']
args += [ext_args.new_filename, ext_args.old_filename]
extern.run_ext(args)
return _JPEG_FORMAT | Create argument list for mozjpeg. |
def _send_loop(self):
while True:
message, response_queue = self._send_queue.get()
if message is self.STOP:
break
try:
self._response_queues.put(response_queue)
self._socket.send(message)
except Exception:
log.exception("Exception sending message %s", message) | Service self._send_queue, sending requests to server |
def correct_structure(self, atol=1e-8):
return np.allclose(self.structure.lattice.matrix,
self.prim.lattice.matrix, atol=atol) | Determine if the structure matches the standard primitive structure.
The standard primitive will be different between seekpath and pymatgen
high-symmetry paths, but this is handled by the specific subclasses.
Args:
atol (:obj:`float`, optional): Absolute tolerance used to compare
the input structure with the primitive standard structure.
Returns:
bool: ``True`` if the structure is the same as the standard
primitive, otherwise ``False``. |
def find_nonzero_constrained_reactions(model):
lower_bound, upper_bound = helpers.find_bounds(model)
return [rxn for rxn in model.reactions if
0 > rxn.lower_bound > lower_bound or
0 < rxn.upper_bound < upper_bound] | Return list of reactions with non-zero, non-maximal bounds. |
def version(self):
lines = iter(self._invoke('version').splitlines())
version = next(lines).strip()
return self._parse_version(version) | Return the underlying version |
def _apply_uncertainty_to_mfd(self, mfd, value):
if self.uncertainty_type == 'abGRAbsolute':
a, b = value
mfd.modify('set_ab', dict(a_val=a, b_val=b))
elif self.uncertainty_type == 'bGRRelative':
mfd.modify('increment_b', dict(value=value))
elif self.uncertainty_type == 'maxMagGRRelative':
mfd.modify('increment_max_mag', dict(value=value))
elif self.uncertainty_type == 'maxMagGRAbsolute':
mfd.modify('set_max_mag', dict(value=value))
elif self.uncertainty_type == 'incrementalMFDAbsolute':
min_mag, bin_width, occur_rates = value
mfd.modify('set_mfd', dict(min_mag=min_mag, bin_width=bin_width,
occurrence_rates=occur_rates)) | Modify ``mfd`` object with uncertainty value ``value``. |
def pprint(self):
strings = []
for key in sorted(self.keys()):
values = self[key]
for value in values:
strings.append("%s=%s" % (key, value))
return "\n".join(strings) | Print tag key=value pairs. |
def authorize(self, username, arguments=[],
authen_type=TAC_PLUS_AUTHEN_TYPE_ASCII, priv_lvl=TAC_PLUS_PRIV_LVL_MIN,
rem_addr=TAC_PLUS_VIRTUAL_REM_ADDR, port=TAC_PLUS_VIRTUAL_PORT):
with self.closing():
packet = self.send(
TACACSAuthorizationStart(username,
TAC_PLUS_AUTHEN_METH_TACACSPLUS,
priv_lvl, authen_type, arguments,
rem_addr=rem_addr, port=port),
TAC_PLUS_AUTHOR
)
reply = TACACSAuthorizationReply.unpacked(packet.body)
logger.debug('\n'.join([
reply.__class__.__name__,
'recv header <%s>' % packet.header,
'recv body <%s>' % reply
]))
reply_arguments = dict([
arg.split(six.b('='), 1)
for arg in reply.arguments or []
if arg.find(six.b('=')) > -1]
)
user_priv_lvl = int(reply_arguments.get(
six.b('priv-lvl'), TAC_PLUS_PRIV_LVL_MAX))
if user_priv_lvl < priv_lvl:
reply.status = TAC_PLUS_AUTHOR_STATUS_FAIL
return reply | Authorize with a TACACS+ server.
:param username:
:param arguments: The authorization arguments
:param authen_type: TAC_PLUS_AUTHEN_TYPE_ASCII,
TAC_PLUS_AUTHEN_TYPE_PAP,
TAC_PLUS_AUTHEN_TYPE_CHAP
:param priv_lvl: Minimal Required priv_lvl.
:param rem_addr: AAA request source, default to TAC_PLUS_VIRTUAL_REM_ADDR
:param port: AAA port, default to TAC_PLUS_VIRTUAL_PORT
:return: TACACSAuthenticationReply
:raises: socket.timeout, socket.error |
def _cast_to_type(self, value):
try:
return float(value)
except (ValueError, TypeError):
self.fail('invalid', value=value) | Convert the value to a float and raise error on failures |
def filter(self, func):
results = OrderedDict()
for name, group in self:
if func(group):
results[name] = group
return self.__class__(results) | Filter out Groups based on filtering function.
The function should get a FeatureCollection and return True to leave in the Group and False to take it out. |
def tab_completion_docstring(self_or_cls):
elements = ['%s=Boolean' %k for k in list(Store.renderers.keys())]
for name, p in self_or_cls.params().items():
param_type = p.__class__.__name__
elements.append("%s=%s" % (name, param_type))
return "params(%s)" % ', '.join(['holoviews=Boolean'] + elements) | Generates a docstring that can be used to enable tab-completion
of resources. |
def render(value):
if not value:
return r'^$'
if value[0] != beginning:
value = beginning + value
if value[-1] != end:
value += end
return value | This function finishes the url pattern creation by adding starting
character ^ end possibly by adding end character at the end
:param value: naive URL value
:return: raw string |
def _handle_lrr(self, data):
msg = LRRMessage(data)
if not self._ignore_lrr_states:
self._lrr_system.update(msg)
self.on_lrr_message(message=msg)
return msg | Handle Long Range Radio messages.
:param data: LRR message to parse
:type data: string
:returns: :py:class:`~alarmdecoder.messages.LRRMessage` |
def rm(ctx, cluster_id):
session = create_session(ctx.obj['AWS_PROFILE_NAME'])
client = session.client('emr')
try:
result = client.describe_cluster(ClusterId=cluster_id)
target_dns = result['Cluster']['MasterPublicDnsName']
flag = click.prompt(
"Are you sure you want to terminate {0}: {1}? [y/Y]".format(
cluster_id, target_dns), type=str, default='n')
if flag.lower() == 'y':
result = client.terminate_job_flows(JobFlowIds=[cluster_id])
except ClientError as e:
click.echo(e, err=True) | Terminate a EMR cluster |
def check_environment_presets():
presets = [x for x in os.environ.copy().keys() if x.startswith('NOVA_') or
x.startswith('OS_')]
if len(presets) < 1:
return True
else:
click.echo("_" * 80)
click.echo("*WARNING* Found existing environment variables that may "
"cause conflicts:")
for preset in presets:
click.echo(" - %s" % preset)
click.echo("_" * 80)
return False | Checks for environment variables that can cause problems with supernova |
def reload(self):
'Generate histrow for each row and then reverse-sort by length.'
self.rows = []
self.discreteBinning()
for c in self.nonKeyVisibleCols:
c._cachedValues = collections.OrderedDict() | Generate histrow for each row and then reverse-sort by length. |
def BuildCloudMetadataRequests():
amazon_collection_map = {
"/".join((AMAZON_URL_BASE, "instance-id")): "instance_id",
"/".join((AMAZON_URL_BASE, "ami-id")): "ami_id",
"/".join((AMAZON_URL_BASE, "hostname")): "hostname",
"/".join((AMAZON_URL_BASE, "public-hostname")): "public_hostname",
"/".join((AMAZON_URL_BASE, "instance-type")): "instance_type",
}
google_collection_map = {
"/".join((GOOGLE_URL_BASE, "instance/id")): "instance_id",
"/".join((GOOGLE_URL_BASE, "instance/zone")): "zone",
"/".join((GOOGLE_URL_BASE, "project/project-id")): "project_id",
"/".join((GOOGLE_URL_BASE, "instance/hostname")): "hostname",
"/".join((GOOGLE_URL_BASE, "instance/machine-type")): "machine_type",
}
return CloudMetadataRequests(requests=_MakeArgs(amazon_collection_map,
google_collection_map)) | Build the standard set of cloud metadata to collect during interrogate. |
def get_interface_detail_request(last_interface_name,
last_interface_type):
request_interface = ET.Element(
'get-interface-detail',
xmlns="urn:brocade.com:mgmt:brocade-interface-ext"
)
if last_interface_name != '':
last_received_int = ET.SubElement(request_interface,
"last-rcvd-interface")
last_int_type_el = ET.SubElement(last_received_int,
"interface-type")
last_int_type_el.text = last_interface_type
last_int_name_el = ET.SubElement(last_received_int,
"interface-name")
last_int_name_el.text = last_interface_name
return request_interface | Creates a new Netconf request based on the last received
interface name and type when the hasMore flag is true |
async def _recv_loop(self):
while self._connected:
try:
data = await self._recv()
except asyncio.CancelledError:
break
except Exception as e:
if isinstance(e, (IOError, asyncio.IncompleteReadError)):
msg = 'The server closed the connection'
self._log.info(msg)
elif isinstance(e, InvalidChecksumError):
msg = 'The server response had an invalid checksum'
self._log.info(msg)
else:
msg = 'Unexpected exception in the receive loop'
self._log.exception(msg)
await self.disconnect()
if self._recv_queue.empty():
self._recv_queue.put_nowait(None)
break
try:
await self._recv_queue.put(data)
except asyncio.CancelledError:
break | This loop is constantly putting items on the queue as they're read. |
def unassign_gradebook_column_from_gradebook(self, gradebook_column_id, gradebook_id):
mgr = self._get_provider_manager('GRADING', local=True)
lookup_session = mgr.get_gradebook_lookup_session(proxy=self._proxy)
lookup_session.get_gradebook(gradebook_id)
self._unassign_object_from_catalog(gradebook_column_id, gradebook_id) | Removes a ``GradebookColumn`` from a ``Gradebook``.
arg: gradebook_column_id (osid.id.Id): the ``Id`` of the
``GradebookColumn``
arg: gradebook_id (osid.id.Id): the ``Id`` of the
``Gradebook``
raise: NotFound - ``gradebook_column_id`` or ``gradebook_id``
not found or ``gradebook_column_id`` not assigned to
``gradebook_id``
raise: NullArgument - ``gradebook_column_id`` or
``gradebook_id`` is ``null``
raise: OperationFailed - unable to complete request
raise: PermissionDenied - authorization failure
*compliance: mandatory -- This method must be implemented.* |
def handle_unsubscribe_request(cls, request, message, dispatch, hash_is_valid, redirect_to):
if hash_is_valid:
Subscription.cancel(
dispatch.recipient_id or dispatch.address, cls.alias, dispatch.messenger
)
signal = sig_unsubscribe_success
else:
signal = sig_unsubscribe_failed
signal.send(cls, request=request, message=message, dispatch=dispatch)
return redirect(redirect_to) | Handles user subscription cancelling request.
:param Request request: Request instance
:param Message message: Message model instance
:param Dispatch dispatch: Dispatch model instance
:param bool hash_is_valid: Flag indicating that user supplied request signature is correct
:param str redirect_to: Redirection URL
:rtype: list |
def directed_bipartition(seq, nontrivial=False):
bipartitions = [
(tuple(seq[i] for i in part0_idx), tuple(seq[j] for j in part1_idx))
for part0_idx, part1_idx in directed_bipartition_indices(len(seq))
]
if nontrivial:
return bipartitions[1:-1]
return bipartitions | Return a list of directed bipartitions for a sequence.
Args:
seq (Iterable): The sequence to partition.
Returns:
list[tuple[tuple]]: A list of tuples containing each of the two
parts.
Example:
>>> directed_bipartition((1, 2, 3)) # doctest: +NORMALIZE_WHITESPACE
[((), (1, 2, 3)),
((1,), (2, 3)),
((2,), (1, 3)),
((1, 2), (3,)),
((3,), (1, 2)),
((1, 3), (2,)),
((2, 3), (1,)),
((1, 2, 3), ())] |
def set_current_time(self, t):
method = "set_current_time"
A = None
metadata = {method: t}
send_array(self.socket, A, metadata)
A, metadata = recv_array(
self.socket, poll=self.poll, poll_timeout=self.poll_timeout,
flags=self.zmq_flags) | Set current time of simulation |
def decode(self, encoding='utf-8', errors='strict'):
original_class = getattr(self, 'original_class')
return original_class(super(ColorBytes, self).decode(encoding, errors)) | Decode using the codec registered for encoding. Default encoding is 'utf-8'.
errors may be given to set a different error handling scheme. Default is 'strict' meaning that encoding errors
raise a UnicodeDecodeError. Other possible values are 'ignore' and 'replace' as well as any other name
registered with codecs.register_error that is able to handle UnicodeDecodeErrors.
:param str encoding: Codec.
:param str errors: Error handling scheme. |
def get_functions_auth_string(self, target_subscription_id):
self._initialize_session()
function_auth_variables = [
constants.ENV_FUNCTION_TENANT_ID,
constants.ENV_FUNCTION_CLIENT_ID,
constants.ENV_FUNCTION_CLIENT_SECRET
]
if all(k in os.environ for k in function_auth_variables):
auth = {
'credentials':
{
'client_id': os.environ[constants.ENV_FUNCTION_CLIENT_ID],
'secret': os.environ[constants.ENV_FUNCTION_CLIENT_SECRET],
'tenant': os.environ[constants.ENV_FUNCTION_TENANT_ID]
},
'subscription': target_subscription_id
}
elif type(self.credentials) is ServicePrincipalCredentials:
auth = {
'credentials':
{
'client_id': os.environ[constants.ENV_CLIENT_ID],
'secret': os.environ[constants.ENV_CLIENT_SECRET],
'tenant': os.environ[constants.ENV_TENANT_ID]
},
'subscription': target_subscription_id
}
else:
raise NotImplementedError(
"Service Principal credentials are the only "
"supported auth mechanism for deploying functions.")
return json.dumps(auth, indent=2) | Build auth json string for deploying
Azure Functions. Look for dedicated
Functions environment variables or
fall back to normal Service Principal
variables. |
def matches(text, what):
return text.find(what) > -1 if is_string(what) else what.match(text) | Check if ``what`` occurs in ``text`` |
def ValidateKey(cls, key_path):
for prefix in cls.VALID_PREFIXES:
if key_path.startswith(prefix):
return
if key_path.startswith('HKEY_CURRENT_USER\\'):
raise errors.FormatError(
'HKEY_CURRENT_USER\\ is not supported instead use: '
'HKEY_USERS\\%%users.sid%%\\')
raise errors.FormatError(
'Unupported Registry key path: {0:s}'.format(key_path)) | Validates this key against supported key names.
Args:
key_path (str): path of a Windows Registry key.
Raises:
FormatError: when key is not supported. |
def unique(iterable, key=identity):
seen = set()
for item in iterable:
item_key = key(item)
if item_key not in seen:
seen.add(item_key)
yield item | Yields all the unique values in an iterable maintaining order |
def _loopreport(self):
while 1:
eventlet.sleep(0.2)
ac2popenlist = {}
for action in self.session._actions:
for popen in action._popenlist:
if popen.poll() is None:
lst = ac2popenlist.setdefault(action.activity, [])
lst.append(popen)
if not action._popenlist and action in self._actionmayfinish:
super(RetoxReporter, self).logaction_finish(action)
self._actionmayfinish.remove(action)
self.screen.draw_next_frame(repeat=False) | Loop over the report progress |
def up_capture(self, benchmark, threshold=0.0, compare_op="ge"):
slf, bm = self.upmarket_filter(
benchmark=benchmark,
threshold=threshold,
compare_op=compare_op,
include_benchmark=True,
)
return slf.geomean() / bm.geomean() | Upside capture ratio.
Measures the performance of `self` relative to benchmark
conditioned on periods where `benchmark` is gt or ge to
`threshold`.
Upside capture ratios are calculated by taking the fund's
monthly return during the periods of positive benchmark
performance and dividing it by the benchmark return.
[Source: CFA Institute]
Parameters
----------
benchmark : {pd.Series, TSeries, 1d np.ndarray}
The benchmark security to which `self` is compared.
threshold : float, default 0.
The threshold at which the comparison should be done.
`self` and `benchmark` are "filtered" to periods where
`benchmark` is gt/ge `threshold`.
compare_op : {'ge', 'gt'}
Comparison operator used to compare to `threshold`.
'gt' is greater-than; 'ge' is greater-than-or-equal.
Returns
-------
float
Note
----
This metric uses geometric, not arithmetic, mean return. |
async def create_authenticator_async(self, connection, debug=False, loop=None, **kwargs):
self.loop = loop or asyncio.get_event_loop()
self._connection = connection
self._session = SessionAsync(connection, loop=self.loop, **kwargs)
try:
self._cbs_auth = c_uamqp.CBSTokenAuth(
self.audience,
self.token_type,
self.token,
int(self.expires_at),
self._session._session,
self.timeout,
self._connection.container_id)
self._cbs_auth.set_trace(debug)
except ValueError:
await self._session.destroy_async()
raise errors.AMQPConnectionError(
"Unable to open authentication session on connection {}.\n"
"Please confirm target hostname exists: {}".format(
connection.container_id, connection.hostname)) from None
return self._cbs_auth | Create the async AMQP session and the CBS channel with which
to negotiate the token.
:param connection: The underlying AMQP connection on which
to create the session.
:type connection: ~uamqp.async_ops.connection_async.ConnectionAsync
:param debug: Whether to emit network trace logging events for the
CBS session. Default is `False`. Logging events are set at INFO level.
:type debug: bool
:param loop: A user specified event loop.
:type loop: ~asycnio.AbstractEventLoop
:rtype: uamqp.c_uamqp.CBSTokenAuth |
def contains(self, other):
if self.is_valid_range(other):
if not self:
return not other
elif not other or other.startsafter(self) and other.endsbefore(self):
return True
else:
return False
elif self.is_valid_scalar(other):
is_within_lower = True
if not self.lower_inf:
lower_cmp = operator.le if self.lower_inc else operator.lt
is_within_lower = lower_cmp(self.lower, other)
is_within_upper = True
if not self.upper_inf:
upper_cmp = operator.ge if self.upper_inc else operator.gt
is_within_upper = upper_cmp(self.upper, other)
return is_within_lower and is_within_upper
else:
raise TypeError(
"Unsupported type to test for inclusion '{0.__class__.__name__}'".format(
other)) | Return True if this contains other. Other may be either range of same
type or scalar of same type as the boundaries.
>>> intrange(1, 10).contains(intrange(1, 5))
True
>>> intrange(1, 10).contains(intrange(5, 10))
True
>>> intrange(1, 10).contains(intrange(5, 10, upper_inc=True))
False
>>> intrange(1, 10).contains(1)
True
>>> intrange(1, 10).contains(10)
False
Contains can also be called using the ``in`` operator.
>>> 1 in intrange(1, 10)
True
This is the same as the ``self @> other`` in PostgreSQL.
:param other: Object to be checked whether it exists within this range
or not.
:return: ``True`` if `other` is completely within this range, otherwise
``False``.
:raises TypeError: If `other` is not of the correct type. |
def SetEnvironmentVariable(self, name, value):
if isinstance(value, py2to3.STRING_TYPES):
value = self._PathStripPrefix(value)
if value is not None:
self._environment_variables[name.upper()] = value | Sets an environment variable in the Windows path helper.
Args:
name (str): name of the environment variable without enclosing
%-characters, e.g. SystemRoot as in %SystemRoot%.
value (str): value of the environment variable. |
def _cursor(self):
if self._conn is None:
self._conn = sqlite3.connect(self.filename,
check_same_thread=False)
return self._conn.cursor() | Asserts that the connection is open and returns a cursor |
def parse_quadrant_measurement(quad_azimuth):
def rotation_direction(first, second):
return np.cross(_azimuth2vec(first), _azimuth2vec(second))
quad_azimuth = quad_azimuth.strip()
try:
first_dir = quadrantletter_to_azimuth(quad_azimuth[0].upper())
sec_dir = quadrantletter_to_azimuth(quad_azimuth[-1].upper())
except KeyError:
raise ValueError('{} is not a valid azimuth'.format(quad_azimuth))
angle = float(quad_azimuth[1:-1])
direc = rotation_direction(first_dir, sec_dir)
azi = first_dir + direc * angle
if abs(direc) < 0.9:
raise ValueError('{} is not a valid azimuth'.format(quad_azimuth))
if azi < 0:
azi += 360
elif azi > 360:
azi -= 360
return azi | Parses a quadrant measurement of the form "AxxB", where A and B are cardinal
directions and xx is an angle measured relative to those directions.
In other words, it converts a measurement such as E30N into an azimuth of
60 degrees, or W10S into an azimuth of 260 degrees.
For ambiguous quadrant measurements such as "N30S", a ValueError is raised.
Parameters
-----------
quad_azimuth : string
An azimuth measurement in quadrant form.
Returns
-------
azi : float
An azimuth in degrees clockwise from north.
See Also
--------
parse_azimuth |
def _generate_api_config_with_root(self, request):
actual_root = self._get_actual_root(request)
generator = api_config.ApiConfigGenerator()
api = request.body_json['api']
version = request.body_json['version']
lookup_key = (api, version)
service_factories = self._backend.api_name_version_map.get(lookup_key)
if not service_factories:
return None
service_classes = [service_factory.service_class
for service_factory in service_factories]
config_dict = generator.get_config_dict(
service_classes, hostname=actual_root)
for config in config_dict.get('items', []):
lookup_key_with_root = (
config.get('name', ''), config.get('version', ''), actual_root)
self._config_manager.save_config(lookup_key_with_root, config)
return config_dict | Generate an API config with a specific root hostname.
This uses the backend object and the ApiConfigGenerator to create an API
config specific to the hostname of the incoming request. This allows for
flexible API configs for non-standard environments, such as localhost.
Args:
request: An ApiRequest, the transformed request sent to the Discovery API.
Returns:
A string representation of the generated API config. |
def _refresh(self, _):
from google.appengine.api import app_identity
try:
token, _ = app_identity.get_access_token(self._scopes)
except app_identity.Error as e:
raise exceptions.CredentialsError(str(e))
self.access_token = token | Refresh self.access_token.
Args:
_: (ignored) A function matching httplib2.Http.request's signature. |
def parse_table_properties(doc, table, prop):
"Parse table properties."
if not table:
return
style = prop.find(_name('{{{w}}}tblStyle'))
if style is not None:
table.style_id = style.attrib[_name('{{{w}}}val')]
doc.add_style_as_used(table.style_id) | Parse table properties. |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.