code
stringlengths
51
2.38k
docstring
stringlengths
4
15.2k
def wait_for_servers(session, servers): nclient = nova.Client(NOVA_VERSION, session=session, region_name=os.environ['OS_REGION_NAME']) while True: deployed = [] undeployed = [] for server in servers: c = nclient.servers.get(server.id) if ...
Wait for the servers to be ready. Note(msimonin): we don't garantee the SSH connection to be ready.
def set_attribute(self, obj, attr, value): if isinstance(obj, MutableMapping): obj[attr] = value else: setattr(obj, attr, value)
Set value of attribute in given object instance. Reason for existence of this method is the fact that 'attribute' can be also a object's key if it is a dict or any other kind of mapping. Args: obj (object): object instance to modify attr (str): attribute (or key) to cha...
def points(self): if self._points is None: _points = self.soma.points.tolist() for n in self.neurites: _points.extend(n.points.tolist()) self._points = np.array(_points) return self._points
Return unordered array with all the points in this neuron
def macaroon_ops(self, macaroons): if len(macaroons) == 0: raise ValueError('no macaroons provided') storage_id, ops = _decode_macaroon_id(macaroons[0].identifier_bytes) root_key = self.root_keystore_for_ops(ops).get(storage_id) if root_key is None: raise Verifica...
This method makes the oven satisfy the MacaroonOpStore protocol required by the Checker class. For macaroons minted with previous bakery versions, it always returns a single LoginOp operation. :param macaroons: :return:
def decode_buffer(buffer: dict) -> np.ndarray: buf = np.frombuffer(buffer['data'], dtype=buffer['dtype']) return buf.reshape(buffer['shape'])
Translate a DataBuffer into a numpy array. :param buffer: Dictionary with 'data' byte array, 'dtype', and 'shape' fields :return: NumPy array of decoded data
def _get_json(self, url): self.log.info(u"/GET " + url) r = requests.get(url) if hasattr(r, 'from_cache'): if r.from_cache: self.log.info("(from cache)") if r.status_code != 200: throw_request_err(r) return r.json()
Get json from url
def margin(self, axis): if self._slice.ndim < 2: msg = ( "Scale Means marginal cannot be calculated on 1D cubes, as" "the scale means already get reduced to a scalar value." ) raise ValueError(msg) dimension_index = 1 - axis mar...
Return marginal value of the current slice scaled means. This value is the the same what you would get from a single variable (constituting a 2D cube/slice), when the "non-missing" filter of the opposite variable would be applied. This behavior is consistent with what is visible in the ...
def repr2(obj_, **kwargs): kwargs['nl'] = kwargs.pop('nl', kwargs.pop('newlines', False)) val_str = _make_valstr(**kwargs) return val_str(obj_)
Attempt to replace repr more configurable pretty version that works the same in both 2 and 3
def _convert_token(self, token): token = token.copy() if "expiresOn" in token and "expiresIn" in token: token["expiresOn"] = token['expiresIn'] + time.time() return {self._case.sub(r'\1_\2', k).lower(): v for k, v in token.items()}
Convert token fields from camel case. :param dict token: An authentication token. :rtype: dict
def _set_new_object(self, new_obj, inherited_obj, new_class, superclass, qualifier_repo, propagated, type_str): assert isinstance(new_obj, (CIMMethod, CIMProperty, CIMParameter)) if inherited_obj: inherited_obj_qual = inherited_obj.qualifiers else: ...
Set the object attributes for a single object and resolve the qualifiers. This sets attributes for Properties, Methods, and Parameters.
def cli(env, identifier, crt, csr, icc, key, notes): template = {'id': identifier} if crt: template['certificate'] = open(crt).read() if key: template['privateKey'] = open(key).read() if csr: template['certificateSigningRequest'] = open(csr).read() if icc: template['i...
Edit SSL certificate.
def decyear2dt(t): year = int(t) rem = t - year base = datetime(year, 1, 1) dt = base + timedelta(seconds=(base.replace(year=base.year+1) - base).total_seconds() * rem) return dt
Convert decimal year to datetime
def download_attachments(self): if not self._parent.has_attachments: log.debug( 'Parent {} has no attachments, skipping out early.'.format( self._parent.__class__.__name__)) return False if not self._parent.object_id: raise RuntimeE...
Downloads this message attachments into memory. Need a call to 'attachment.save' to save them on disk. :return: Success / Failure :rtype: bool
def _mkdirs_impacket(path, share='C$', conn=None, host=None, username=None, password=None): if conn is None: conn = get_conn(host, username, password) if conn is False: return False comps = path.split('/') pos = 1 for comp in comps: cwd = '\\'.join(comps[0:pos]) try: ...
Recursively create a directory structure on an SMB share Paths should be passed in with forward-slash delimiters, and should not start with a forward-slash.
def attention_bias_batch(batch_coordinates_q, batch_coordinates_k=None, condition_fn=None): if batch_coordinates_k is None: batch_coordinates_k = batch_coordinates_q def to_float(bc): bc = tf.squeeze(bc, 1) bc = tf.to_float(bc) return bc bc_v = tf....
Generate a mask to prevent the batch to attend to each others. Args: batch_coordinates_q: Int-like Tensor of shape [length_q, 1] containing the coordinates of the batches batch_coordinates_k: Int-like Tensor of shape [length_k, 1] containing the coordinates of the batches. If None, do self-attent...
def normalize_cmd(self, command): command = command.rstrip() command += self.RETURN return command
Normalize CLI commands to have a single trailing newline. :param command: Command that may require line feed to be normalized :type command: str
def walk_dir(dir_path, walk_after, recurse=None, archive_mtime=None): if recurse is None: recurse = Settings.recurse result_set = set() if not recurse: return result_set for root, _, filenames in os.walk(dir_path): for filename in filenames: filename_full = os.path.jo...
Recursively optimize a directory.
def draw_pin(text, background_color='green', font_color='white'): image = Image.new('RGB', (120, 20)) draw = ImageDraw.Draw(image) draw.rectangle([(1, 1), (118, 18)], fill=color(background_color)) draw.text((10, 4), text, fill=color(font_color)) return image
Draws and returns a pin with the specified text and color scheme
def count_values(tokens): ntoks = 0 for tok in tokens: if tok in ('=', '/', '$', '&'): if ntoks > 0 and tok == '=': ntoks -= 1 break elif tok in whitespace + ',': continue else: ntoks += 1 return ntoks
Identify the number of values ahead of the current token.
def get_stripe_dashboard_url(self): if not self.stripe_dashboard_item_name or not self.id: return "" else: return "{base_url}{item}/{id}".format( base_url=self._get_base_stripe_dashboard_url(), item=self.stripe_dashboard_item_name, id=self.id, )
Get the stripe dashboard url for this object.
def format_page(self, page, link_resolver, output): debug('Formatting page %s' % page.link.ref, 'formatting') if output: actual_output = os.path.join(output, 'html') if not os.path.exists(actual_output): os.makedirs(actual_...
Called by `project.Project.format_page`, to leave full control to extensions over the formatting of the pages they are responsible of. Args: page: tree.Page, the page to format. link_resolver: links.LinkResolver, object responsible for resolving links pot...
def get_list_dimensions(_list): if isinstance(_list, list) or isinstance(_list, tuple): return [len(_list)] + get_list_dimensions(_list[0]) return []
Takes a nested list and returns the size of each dimension followed by the element type in the list
def tryOrder(self, commit: Commit): canOrder, reason = self.canOrder(commit) if canOrder: self.logger.trace("{} returning request to node".format(self)) self.doOrder(commit) else: self.logger.debug("{} cannot return request to node: {}".format(self, reason)) ...
Try to order if the Commit message is ready to be ordered.
def forwards(apps, schema_editor): Work = apps.get_model('spectator_events', 'Work') for work in Work.objects.all(): if not work.slug: work.slug = generate_slug(work.pk) work.save()
Re-save all the Works because something earlier didn't create their slugs.
def _create_scheduled_actions(conn, as_name, scheduled_actions): if scheduled_actions: for name, action in six.iteritems(scheduled_actions): if 'start_time' in action and isinstance(action['start_time'], six.string_types): action['start_time'] = datetime.datetime.strptime( ...
Helper function to create scheduled actions
def partition_pairs(neurites, neurite_type=NeuriteType.all): return map(_bifurcationfunc.partition_pair, iter_sections(neurites, iterator_type=Tree.ibifurcation_point, neurite_filter=is_type(neurite_type)))
Partition pairs at bifurcation points of a collection of neurites. Partition pait is defined as the number of bifurcations at the two daughters of the bifurcating section
def _multiple_field(cls): klassdict = cls.__dict__ try: return klassdict["_entitylist_multifield"][0] except (KeyError, IndexError, TypeError): from . import fields multifield_tuple = tuple(fields.find(cls, multiple=True)) assert len(multifield_tup...
Return the "multiple" TypedField associated with this EntityList. This also lazily sets the ``_entitylist_multiplefield`` value if it hasn't been set yet. This is set to a tuple containing one item because if we set the class attribute to the TypedField, we would effectively add a Typed...
def import_from_setting(setting_name, fallback): path = getattr(settings, setting_name, None) if path: try: return import_string(path) except ImportError: raise ImproperlyConfigured('%s: No such path.' % path) else: return fallback
Return the resolution of an import path stored in a Django setting. :arg setting_name: The name of the setting holding the import path :arg fallback: An alternate object to use if the setting is empty or doesn't exist Raise ImproperlyConfigured if a path is given that can't be resolved.
def get_user(self, user_id, password): self.con.execute('SELECT uid, pwHash FROM archive_users WHERE userId = %s;', (user_id,)) results = self.con.fetchall() if len(results) == 0: raise ValueError("No such user") pw_hash = results[0]['pwHash'] if not passlib.hash.bcry...
Retrieve a user record :param user_id: the user ID :param password: password :return: A :class:`meteorpi_model.User` if everything is correct :raises: ValueError if the user is found but password is incorrect or if the user is not found.
def logout(self): self.client.write('exit\r\n') self.client.read_all() self.client.close()
Logout from the remote server.
def tdev(data, rate=1.0, data_type="phase", taus=None): phase = input_to_phase(data, rate, data_type) (taus, md, mde, ns) = mdev(phase, rate=rate, taus=taus) td = taus * md / np.sqrt(3.0) tde = td / np.sqrt(ns) return taus, td, tde, ns
Time deviation. Based on modified Allan variance. .. math:: \\sigma^2_{TDEV}( \\tau ) = { \\tau^2 \\over 3 } \\sigma^2_{MDEV}( \\tau ) Note that TDEV has a unit of seconds. Parameters ---------- data: np.array Input data. Provide either phase or frequency (fractio...
def build_latent_variables(self): lvs_to_build = [] lvs_to_build.append(['Noise Sigma^2', fam.Flat(transform='exp'), fam.Normal(0,3), -1.0]) for lag in range(self.X.shape[1]): lvs_to_build.append(['l lag' + str(lag+1), fam.FLat(transform='exp'), fam.Normal(0,3), -1.0]) lvs_to...
Builds latent variables for this kernel Returns ---------- - A list of lists (each sub-list contains latent variable information)
def import_eit_fzj(self, filename, configfile, correction_file=None, timestep=None, **kwargs): df_emd, dummy1, dummy2 = eit_fzj.read_3p_data( filename, configfile, **kwargs ) if correction_file is not None: eit_fzj_utils.appl...
EIT data import for FZJ Medusa systems
def register(key, initializer: callable, param=None): get_current_scope().container.register(key, initializer, param)
Adds resolver to global container
def uniqueTags(tagList): ret = [] alreadyAdded = set() for tag in tagList: myUid = tag.getUid() if myUid in alreadyAdded: continue ret.append(tag) return TagCollection(ret)
uniqueTags - Returns the unique tags in tagList. @param tagList list<AdvancedTag> : A list of tag objects.
def many_psds(k=2,fs=1.0, b0=1.0, N=1024): psd=[] for j in range(k): print j x = noise.white(N=2*4096,b0=b0,fs=fs) f, tmp = noise.numpy_psd(x,fs) if j==0: psd = tmp else: psd = psd + tmp return f, psd/k
compute average of many PSDs
def _get_file_iterator(self, file_obj): file_obj.seek(0) return iter(lambda: file_obj.read(self.read_bs), '')
For given `file_obj` return iterator, which will read the file in `self.read_bs` chunks. Args: file_obj (file): File-like object. Return: iterator: Iterator reading the file-like object in chunks.
def _get_reference(document_path, reference_map): try: return reference_map[document_path] except KeyError: msg = _BAD_DOC_TEMPLATE.format(document_path) raise ValueError(msg)
Get a document reference from a dictionary. This just wraps a simple dictionary look-up with a helpful error that is specific to :meth:`~.firestore.client.Client.get_all`, the **public** caller of this function. Args: document_path (str): A fully-qualified document path. reference_map ...
def uddc(udfunc, x, dx): x = ctypes.c_double(x) dx = ctypes.c_double(dx) isdescr = ctypes.c_int() libspice.uddc_c(udfunc, x, dx, ctypes.byref(isdescr)) return bool(isdescr.value)
SPICE private routine intended solely for the support of SPICE routines. Users should not call this routine directly due to the volatile nature of this routine. This routine calculates the derivative of 'udfunc' with respect to time for 'et', then determines if the derivative has a negative value. ...
def initialize_block(self, block_header): state_view = \ BlockWrapper.state_view_for_block( self._block_cache.block_store.chain_head, self._state_view_factory) settings_view = SettingsView(state_view) self._min_wait_time = settings_view.get_setting( ...
Do initialization necessary for the consensus to claim a block, this may include initiating voting activates, starting proof of work hash generation, or create a PoET wait timer. Args: block_header (BlockHeader): the BlockHeader to initialize. Returns: True
def remove_templates(self): self.hosts.remove_templates() self.contacts.remove_templates() self.services.remove_templates() self.servicedependencies.remove_templates() self.hostdependencies.remove_templates() self.timeperiods.remove_templates()
Clean useless elements like templates because they are not needed anymore :return: None
def factor_rank_autocorrelation(factor_data, period=1): grouper = [factor_data.index.get_level_values('date')] ranks = factor_data.groupby(grouper)['factor'].rank() asset_factor_rank = ranks.reset_index().pivot(index='date', columns='asset', ...
Computes autocorrelation of mean factor ranks in specified time spans. We must compare period to period factor ranks rather than factor values to account for systematic shifts in the factor values of all names or names within a group. This metric is useful for measuring the turnover of a factor. If the ...
def asgray(im): if im.ndim == 2: return im elif im.ndim == 3 and im.shape[2] in (3, 4): return im[..., :3].mean(axis=-1) else: raise ValueError('Invalid image format')
Takes an image and returns its grayscale version by averaging the color channels. if an alpha channel is present, it will simply be ignored. If a grayscale image is given, the original image is returned. Parameters ---------- image : ndarray, ndim 2 or 3 RGB or grayscale image. Return...
def unique(seq, idfunc=None): if idfunc is None: idfunc = lambda x: x preserved_type = type(seq) seen = {} result = [] for item in seq: marker = idfunc(item) if marker in seen: continue seen[marker] = 1 result.append(item) return preserved_type(result)
Unique a list or tuple and preserve the order @type idfunc: Function or None @param idfunc: If idfunc is provided it will be called during the comparison process.
def _process_json(response_body): data = json.loads(response_body) uwpassword = UwPassword(uwnetid=data["uwNetID"], kerb_status=data["kerbStatus"], interval=None, last_change=None, last_change_med=Non...
Returns a UwPassword objects
def download_source_gafs(group_metadata, target_dir, exclusions=[], base_download_url=None): gaf_urls = [ (data, data["source"]) for data in group_metadata["datasets"] if data["type"] == "gaf" and data["dataset"] not in exclusions ] click.echo("Found {}".format(", ".join( [ kv[0]["dataset"] for kv in gaf_urls ]...
This looks at a group metadata dictionary and downloads each GAF source that is not in the exclusions list. For each downloaded file, keep track of the path of the file. If the file is zipped, it will unzip it here. This function returns a list of tuples of the dataset dictionary mapped to the downloaded source...
def enhex(d, separator=''): v = binascii.hexlify(d).decode('ascii') if separator: return separator.join( v[i:i+2] for i in range(0, len(v), 2) ) else: return v
Convert bytes to their hexadecimal representation, optionally joined by a given separator. Args: d(bytes): The data to convert to hexadecimal representation. separator(str): The separator to insert between hexadecimal tuples. Returns: str: The hexadecimal representation of ``d``. ...
def patch( self, id, name=None, description=None, whitelisted_container_task_types=None, whitelisted_executable_task_types=None, ): request_url = self._client.base_api_url + self.detail_url.format(id=id) data_to_patch = {} if name is not None: ...
Partially updates a task whitelist on the saltant server. Args: id (int): The ID of the task whitelist. name (str, optional): The name of the task whitelist. description (str, optional): A description of the task whitelist. whitelisted_container_task_types (list,...
def validate_query_params(self, strict=True): if not (self.api_key or default_api_key): raise ValueError('API key is missing') if strict and self.query_params_mode not in (None, 'and', 'or'): raise ValueError('query_params_match should be one of "and"/"or"') if not s...
Check if the request is valid and can be sent, raise ValueError if not. `strict` is a boolean argument that defaults to True which means an exception is raised on every invalid query parameter, if set to False an exception is raised only when the search request cannot be ...
def update_account_info(self): request = self._get_request() return request.post(self.ACCOUNT_UPDATE_URL, { 'callback_url': self.account.callback_url })
Update current account information At the moment you can only update your callback_url. Returns: An Account object
def Close(self): if self._connection: self._cursor = None self._connection.close() self._connection = None try: os.remove(self._temp_file_path) except (IOError, OSError): pass self._temp_file_path = ''
Closes the database file object. Raises: IOError: if the close failed. OSError: if the close failed.
def indent(text, num=4): str_indent = ' ' * num return str_indent + ('\n' + str_indent).join(text.splitlines())
Indet the given string
def map_get(self, key, mapkey): op = SD.get(mapkey) sdres = self.lookup_in(key, op) return self._wrap_dsop(sdres, True)
Retrieve a value from a map. :param str key: The document ID :param str mapkey: Key within the map to retrieve :return: :class:`~.ValueResult` :raise: :exc:`IndexError` if the mapkey does not exist :raise: :cb_exc:`NotFoundError` if the document does not exist. .. seeal...
def functions(self): return [v for v in self.globals.values() if isinstance(v, values.Function)]
A list of functions declared or defined in this module.
def digest(self, data=None): if self.digest_finalized: return self.digest_out.raw[:self.digest_size] if data is not None: self.update(data) self.digest_out = create_string_buffer(256) length = c_long(0) result = libcrypto.EVP_DigestFinal_ex(self.ctx, self....
Finalizes digest operation and return digest value Optionally hashes more data before finalizing
def shellsort(inlist): n = len(inlist) svec = copy.deepcopy(inlist) ivec = range(n) gap = n / 2 while gap > 0: for i in range(gap, n): for j in range(i - gap, -1, -gap): while j >= 0 and svec[j] > svec[j + gap]: temp = svec[j] ...
Shellsort algorithm. Sorts a 1D-list. Usage: lshellsort(inlist) Returns: sorted-inlist, sorting-index-vector (for original list)
def share_of_standby(df, resolution='24h', time_window=None): p_sb = standby(df, resolution, time_window) df = df.resample(resolution).mean() p_tot = df.sum() p_standby = p_sb.sum() share_standby = p_standby / p_tot res = share_standby.iloc[0] return res
Compute the share of the standby power in the total consumption. Parameters ---------- df : pandas.DataFrame or pandas.Series Power (typically electricity, can be anything) resolution : str, default='d' Resolution of the computation. Data will be resampled to this resolution (as mean) ...
async def fetchrow(self, *, timeout=None): r self._check_ready() if self._exhausted: return None recs = await self._exec(1, timeout) if len(recs) < 1: self._exhausted = True return None return recs[0]
r"""Return the next row. :param float timeout: Optional timeout value in seconds. :return: A :class:`Record` instance.
def resolve_template(template): "Accepts a template object, path-to-template or list of paths" if isinstance(template, (list, tuple)): return loader.select_template(template) elif isinstance(template, basestring): try: return loader.get_template(template) except TemplateD...
Accepts a template object, path-to-template or list of paths
def _cbCvtReply(self, msg, returnSignature): if msg is None: return None if returnSignature != _NO_CHECK_RETURN: if not returnSignature: if msg.signature: raise error.RemoteError( 'Unexpected return value signature') ...
Converts a remote method call reply message into an appropriate callback value.
def describe(self): for stage, corunners in self.get_deployers(): print self.name, "STAGE ", stage for d in corunners: print d.__class__.__name__, ",".join( [p[1].__name__ for p in d.phases] )
Iterates through the deployers but doesn't run anything
def recall(): a = TpPd(pd=0x3) b = MessageType(mesType=0xb) c = RecallType() d = Facility() packet = a / b / c / d return packet
RECALL Section 9.3.18a
def mozjpeg(ext_args): args = copy.copy(_MOZJPEG_ARGS) if Settings.destroy_metadata: args += ["-copy", "none"] else: args += ["-copy", "all"] args += ['-outfile'] args += [ext_args.new_filename, ext_args.old_filename] extern.run_ext(args) return _JPEG_FORMAT
Create argument list for mozjpeg.
def _send_loop(self): while True: message, response_queue = self._send_queue.get() if message is self.STOP: break try: self._response_queues.put(response_queue) self._socket.send(message) except Exception: ...
Service self._send_queue, sending requests to server
def correct_structure(self, atol=1e-8): return np.allclose(self.structure.lattice.matrix, self.prim.lattice.matrix, atol=atol)
Determine if the structure matches the standard primitive structure. The standard primitive will be different between seekpath and pymatgen high-symmetry paths, but this is handled by the specific subclasses. Args: atol (:obj:`float`, optional): Absolute tolerance used to compare ...
def find_nonzero_constrained_reactions(model): lower_bound, upper_bound = helpers.find_bounds(model) return [rxn for rxn in model.reactions if 0 > rxn.lower_bound > lower_bound or 0 < rxn.upper_bound < upper_bound]
Return list of reactions with non-zero, non-maximal bounds.
def version(self): lines = iter(self._invoke('version').splitlines()) version = next(lines).strip() return self._parse_version(version)
Return the underlying version
def _apply_uncertainty_to_mfd(self, mfd, value): if self.uncertainty_type == 'abGRAbsolute': a, b = value mfd.modify('set_ab', dict(a_val=a, b_val=b)) elif self.uncertainty_type == 'bGRRelative': mfd.modify('increment_b', dict(value=value)) elif self.uncertain...
Modify ``mfd`` object with uncertainty value ``value``.
def pprint(self): strings = [] for key in sorted(self.keys()): values = self[key] for value in values: strings.append("%s=%s" % (key, value)) return "\n".join(strings)
Print tag key=value pairs.
def authorize(self, username, arguments=[], authen_type=TAC_PLUS_AUTHEN_TYPE_ASCII, priv_lvl=TAC_PLUS_PRIV_LVL_MIN, rem_addr=TAC_PLUS_VIRTUAL_REM_ADDR, port=TAC_PLUS_VIRTUAL_PORT): with self.closing(): packet = self.send( TACACSAuthorizationStart(u...
Authorize with a TACACS+ server. :param username: :param arguments: The authorization arguments :param authen_type: TAC_PLUS_AUTHEN_TYPE_ASCII, TAC_PLUS_AUTHEN_TYPE_PAP, TAC_PLUS_AUTHEN_TYPE_CHAP :param priv_lvl: ...
def _cast_to_type(self, value): try: return float(value) except (ValueError, TypeError): self.fail('invalid', value=value)
Convert the value to a float and raise error on failures
def filter(self, func): results = OrderedDict() for name, group in self: if func(group): results[name] = group return self.__class__(results)
Filter out Groups based on filtering function. The function should get a FeatureCollection and return True to leave in the Group and False to take it out.
def tab_completion_docstring(self_or_cls): elements = ['%s=Boolean' %k for k in list(Store.renderers.keys())] for name, p in self_or_cls.params().items(): param_type = p.__class__.__name__ elements.append("%s=%s" % (name, param_type)) return "params(%s)" % ', '.join(['hol...
Generates a docstring that can be used to enable tab-completion of resources.
def render(value): if not value: return r'^$' if value[0] != beginning: value = beginning + value if value[-1] != end: value += end return value
This function finishes the url pattern creation by adding starting character ^ end possibly by adding end character at the end :param value: naive URL value :return: raw string
def _handle_lrr(self, data): msg = LRRMessage(data) if not self._ignore_lrr_states: self._lrr_system.update(msg) self.on_lrr_message(message=msg) return msg
Handle Long Range Radio messages. :param data: LRR message to parse :type data: string :returns: :py:class:`~alarmdecoder.messages.LRRMessage`
def rm(ctx, cluster_id): session = create_session(ctx.obj['AWS_PROFILE_NAME']) client = session.client('emr') try: result = client.describe_cluster(ClusterId=cluster_id) target_dns = result['Cluster']['MasterPublicDnsName'] flag = click.prompt( "Are you sure you want to t...
Terminate a EMR cluster
def check_environment_presets(): presets = [x for x in os.environ.copy().keys() if x.startswith('NOVA_') or x.startswith('OS_')] if len(presets) < 1: return True else: click.echo("_" * 80) click.echo("*WARNING* Found existing environment variables that may " ...
Checks for environment variables that can cause problems with supernova
def reload(self): 'Generate histrow for each row and then reverse-sort by length.' self.rows = [] self.discreteBinning() for c in self.nonKeyVisibleCols: c._cachedValues = collections.OrderedDict()
Generate histrow for each row and then reverse-sort by length.
def BuildCloudMetadataRequests(): amazon_collection_map = { "/".join((AMAZON_URL_BASE, "instance-id")): "instance_id", "/".join((AMAZON_URL_BASE, "ami-id")): "ami_id", "/".join((AMAZON_URL_BASE, "hostname")): "hostname", "/".join((AMAZON_URL_BASE, "public-hostname")): "public_hostname", ...
Build the standard set of cloud metadata to collect during interrogate.
def get_interface_detail_request(last_interface_name, last_interface_type): request_interface = ET.Element( 'get-interface-detail', xmlns="urn:brocade.com:mgmt:brocade-interface-ext" ) if last_interface_name != '': last_rec...
Creates a new Netconf request based on the last received interface name and type when the hasMore flag is true
async def _recv_loop(self): while self._connected: try: data = await self._recv() except asyncio.CancelledError: break except Exception as e: if isinstance(e, (IOError, asyncio.IncompleteReadError)): msg = 'T...
This loop is constantly putting items on the queue as they're read.
def unassign_gradebook_column_from_gradebook(self, gradebook_column_id, gradebook_id): mgr = self._get_provider_manager('GRADING', local=True) lookup_session = mgr.get_gradebook_lookup_session(proxy=self._proxy) lookup_session.get_gradebook(gradebook_id) self._unassign_object_from_catalo...
Removes a ``GradebookColumn`` from a ``Gradebook``. arg: gradebook_column_id (osid.id.Id): the ``Id`` of the ``GradebookColumn`` arg: gradebook_id (osid.id.Id): the ``Id`` of the ``Gradebook`` raise: NotFound - ``gradebook_column_id`` or ``gradebook_id`` ...
def handle_unsubscribe_request(cls, request, message, dispatch, hash_is_valid, redirect_to): if hash_is_valid: Subscription.cancel( dispatch.recipient_id or dispatch.address, cls.alias, dispatch.messenger ) signal = sig_unsubscribe_success else: ...
Handles user subscription cancelling request. :param Request request: Request instance :param Message message: Message model instance :param Dispatch dispatch: Dispatch model instance :param bool hash_is_valid: Flag indicating that user supplied request signature is correct :par...
def directed_bipartition(seq, nontrivial=False): bipartitions = [ (tuple(seq[i] for i in part0_idx), tuple(seq[j] for j in part1_idx)) for part0_idx, part1_idx in directed_bipartition_indices(len(seq)) ] if nontrivial: return bipartitions[1:-1] return bipartitions
Return a list of directed bipartitions for a sequence. Args: seq (Iterable): The sequence to partition. Returns: list[tuple[tuple]]: A list of tuples containing each of the two parts. Example: >>> directed_bipartition((1, 2, 3)) # doctest: +NORMALIZE_WHITESPACE [(...
def set_current_time(self, t): method = "set_current_time" A = None metadata = {method: t} send_array(self.socket, A, metadata) A, metadata = recv_array( self.socket, poll=self.poll, poll_timeout=self.poll_timeout, flags=self.zmq_flags)
Set current time of simulation
def decode(self, encoding='utf-8', errors='strict'): original_class = getattr(self, 'original_class') return original_class(super(ColorBytes, self).decode(encoding, errors))
Decode using the codec registered for encoding. Default encoding is 'utf-8'. errors may be given to set a different error handling scheme. Default is 'strict' meaning that encoding errors raise a UnicodeDecodeError. Other possible values are 'ignore' and 'replace' as well as any other name regi...
def get_functions_auth_string(self, target_subscription_id): self._initialize_session() function_auth_variables = [ constants.ENV_FUNCTION_TENANT_ID, constants.ENV_FUNCTION_CLIENT_ID, constants.ENV_FUNCTION_CLIENT_SECRET ] if all(k in os.environ for k ...
Build auth json string for deploying Azure Functions. Look for dedicated Functions environment variables or fall back to normal Service Principal variables.
def matches(text, what): return text.find(what) > -1 if is_string(what) else what.match(text)
Check if ``what`` occurs in ``text``
def ValidateKey(cls, key_path): for prefix in cls.VALID_PREFIXES: if key_path.startswith(prefix): return if key_path.startswith('HKEY_CURRENT_USER\\'): raise errors.FormatError( 'HKEY_CURRENT_USER\\ is not supported instead use: ' 'HKEY_USERS\\%%users.sid%%\\') raise ...
Validates this key against supported key names. Args: key_path (str): path of a Windows Registry key. Raises: FormatError: when key is not supported.
def unique(iterable, key=identity): seen = set() for item in iterable: item_key = key(item) if item_key not in seen: seen.add(item_key) yield item
Yields all the unique values in an iterable maintaining order
def _loopreport(self): while 1: eventlet.sleep(0.2) ac2popenlist = {} for action in self.session._actions: for popen in action._popenlist: if popen.poll() is None: lst = ac2popenlist.setdefault(action.activity, []) ...
Loop over the report progress
def up_capture(self, benchmark, threshold=0.0, compare_op="ge"): slf, bm = self.upmarket_filter( benchmark=benchmark, threshold=threshold, compare_op=compare_op, include_benchmark=True, ) return slf.geomean() / bm.geomean()
Upside capture ratio. Measures the performance of `self` relative to benchmark conditioned on periods where `benchmark` is gt or ge to `threshold`. Upside capture ratios are calculated by taking the fund's monthly return during the periods of positive benchmark performa...
async def create_authenticator_async(self, connection, debug=False, loop=None, **kwargs): self.loop = loop or asyncio.get_event_loop() self._connection = connection self._session = SessionAsync(connection, loop=self.loop, **kwargs) try: self._cbs_auth = c_uamqp.CBSTokenAuth( ...
Create the async AMQP session and the CBS channel with which to negotiate the token. :param connection: The underlying AMQP connection on which to create the session. :type connection: ~uamqp.async_ops.connection_async.ConnectionAsync :param debug: Whether to emit network trace...
def contains(self, other): if self.is_valid_range(other): if not self: return not other elif not other or other.startsafter(self) and other.endsbefore(self): return True else: return False elif self.is_valid_scalar(other...
Return True if this contains other. Other may be either range of same type or scalar of same type as the boundaries. >>> intrange(1, 10).contains(intrange(1, 5)) True >>> intrange(1, 10).contains(intrange(5, 10)) True >>> intrange(1, 10).contains(intr...
def SetEnvironmentVariable(self, name, value): if isinstance(value, py2to3.STRING_TYPES): value = self._PathStripPrefix(value) if value is not None: self._environment_variables[name.upper()] = value
Sets an environment variable in the Windows path helper. Args: name (str): name of the environment variable without enclosing %-characters, e.g. SystemRoot as in %SystemRoot%. value (str): value of the environment variable.
def _cursor(self): if self._conn is None: self._conn = sqlite3.connect(self.filename, check_same_thread=False) return self._conn.cursor()
Asserts that the connection is open and returns a cursor
def parse_quadrant_measurement(quad_azimuth): def rotation_direction(first, second): return np.cross(_azimuth2vec(first), _azimuth2vec(second)) quad_azimuth = quad_azimuth.strip() try: first_dir = quadrantletter_to_azimuth(quad_azimuth[0].upper()) sec_dir = quadrantletter_to_azimuth(...
Parses a quadrant measurement of the form "AxxB", where A and B are cardinal directions and xx is an angle measured relative to those directions. In other words, it converts a measurement such as E30N into an azimuth of 60 degrees, or W10S into an azimuth of 260 degrees. For ambiguous quadrant measure...
def _generate_api_config_with_root(self, request): actual_root = self._get_actual_root(request) generator = api_config.ApiConfigGenerator() api = request.body_json['api'] version = request.body_json['version'] lookup_key = (api, version) service_factories = self._backend.api_name_version_map.get...
Generate an API config with a specific root hostname. This uses the backend object and the ApiConfigGenerator to create an API config specific to the hostname of the incoming request. This allows for flexible API configs for non-standard environments, such as localhost. Args: request: An ApiRequ...
def _refresh(self, _): from google.appengine.api import app_identity try: token, _ = app_identity.get_access_token(self._scopes) except app_identity.Error as e: raise exceptions.CredentialsError(str(e)) self.access_token = token
Refresh self.access_token. Args: _: (ignored) A function matching httplib2.Http.request's signature.
def parse_table_properties(doc, table, prop): "Parse table properties." if not table: return style = prop.find(_name('{{{w}}}tblStyle')) if style is not None: table.style_id = style.attrib[_name('{{{w}}}val')] doc.add_style_as_used(table.style_id)
Parse table properties.