code stringlengths 51 2.38k | docstring stringlengths 4 15.2k |
|---|---|
def row_to_dict(row):
o = {}
for colname in row.colnames:
if isinstance(row[colname], np.string_) and row[colname].dtype.kind in ['S', 'U']:
o[colname] = str(row[colname])
else:
o[colname] = row[colname]
return o | Convert a table row to a dictionary. |
def get_compression_extension(self):
build_request = BuildRequest(build_json_store=self.os_conf.get_build_json_store())
inner = build_request.inner_template
postbuild_plugins = inner.get('postbuild_plugins', [])
for plugin in postbuild_plugins:
if plugin.get('name') == 'compress':
args = plugin.get('args', {})
method = args.get('method', 'gzip')
if method == 'gzip':
return '.gz'
elif method == 'lzma':
return '.xz'
raise OsbsValidationException("unknown compression method '%s'"
% method)
return None | Find the filename extension for the 'docker save' output, which
may or may not be compressed.
Raises OsbsValidationException if the extension cannot be
determined due to a configuration error.
:returns: str including leading dot, or else None if no compression |
def add_argument_to(self, parser):
from devassistant.cli.devassistant_argparse import DefaultIffUsedActionFactory
if isinstance(self.kwargs.get('action', ''), list):
if self.kwargs['action'][0] == 'default_iff_used':
self.kwargs['action'] = DefaultIffUsedActionFactory.generate_action(
self.kwargs['action'][1])
self.kwargs.pop('preserved', None)
try:
parser.add_argument(*self.flags, **self.kwargs)
except Exception as ex:
problem = "Error while adding argument '{name}': {error}".\
format(name=self.name, error=repr(ex))
raise exceptions.ExecutionException(problem) | Used by cli to add this as an argument to argparse parser.
Args:
parser: parser to add this argument to |
def prepare_ec(oo, sizes, M):
tour = range(len(oo))
tour_sizes = np.array([sizes.sizes[x] for x in oo])
tour_M = M[oo, :][:, oo]
return tour, tour_sizes, tour_M | This prepares EC and converts from contig_id to an index. |
def operator_si(u):
global _aux
if np.ndim(u) == 2:
P = _P2
elif np.ndim(u) == 3:
P = _P3
else:
raise ValueError("u has an invalid number of dimensions "
"(should be 2 or 3)")
if u.shape != _aux.shape[1:]:
_aux = np.zeros((len(P),) + u.shape)
for _aux_i, P_i in zip(_aux, P):
_aux_i[:] = binary_erosion(u, P_i)
return _aux.max(0) | operator_si operator. |
def do_size(self, w, h):
if (w is None):
self.sw = self.rw
self.sh = self.rh
else:
self.sw = w
self.sh = h
image = Image.new("RGB", (self.sw, self.sh), self.gen.background_color)
for y in range(0, self.sh):
for x in range(0, self.sw):
ix = int((x * self.rw) // self.sw + self.rx)
iy = int((y * self.rh) // self.sh + self.ry)
color = self.gen.pixel(ix, iy)
if (color is not None):
image.putpixel((x, y), color)
self.image = image | Record size. |
def copyNamespace(self):
ret = libxml2mod.xmlCopyNamespace(self._o)
if ret is None:raise treeError('xmlCopyNamespace() failed')
__tmp = xmlNs(_obj=ret)
return __tmp | Do a copy of the namespace. |
def get_activity_ids_by_objective_banks(self, objective_bank_ids):
id_list = []
for activity in self.get_activities_by_objective_banks(objective_bank_ids):
id_list.append(activity.get_id())
return IdList(id_list) | Gets the list of ``Activity Ids`` corresponding to a list of ``ObjectiveBanks``.
arg: objective_bank_ids (osid.id.IdList): list of objective
bank ``Ids``
return: (osid.id.IdList) - list of activity ``Ids``
raise: NullArgument - ``objective_bank_ids`` is ``null``
raise: OperationFailed - unable to complete request
raise: PermissionDenied - authorization failure
*compliance: mandatory -- This method must be implemented.* |
def dprintx(passeditem, special=False):
if DEBUGALL:
if special:
from pprint import pprint
pprint(passeditem)
else:
print("%s%s%s" % (C_TI, passeditem, C_NORM)) | Print Text if DEBUGALL set, optionally with PrettyPrint.
Args:
passeditem (str): item to print
special (bool): determines if item prints with PrettyPrint
or regular print. |
def get_fake(locale=None):
if locale is None:
locale = Faker.default_locale
if not hasattr(Maker, '_fake_' + locale):
Faker._fake = faker.Factory.create(locale)
return Faker._fake | Return a shared faker factory used to generate fake data |
def _kill_managed_threads(self):
num_threads = len(self._managed_threads)
if num_threads:
_log.warning('killing %s managed thread(s)', num_threads)
for gt, identifier in list(self._managed_threads.items()):
_log.warning('killing managed thread `%s`', identifier)
gt.kill() | Kill any currently executing managed threads.
See :meth:`ServiceContainer.spawn_managed_thread` |
def run_dynamic_structure_factor(self,
Qpoints,
T,
atomic_form_factor_func=None,
scattering_lengths=None,
freq_min=None,
freq_max=None):
self.init_dynamic_structure_factor(
Qpoints,
T,
atomic_form_factor_func=atomic_form_factor_func,
scattering_lengths=scattering_lengths,
freq_min=freq_min,
freq_max=freq_max)
self._dynamic_structure_factor.run() | Run dynamic structure factor calculation
See the detail of parameters at
Phonopy.init_dynamic_structure_factor(). |
def sam_list(sam):
list = []
for file in sam:
for line in file:
if line.startswith('@') is False:
line = line.strip().split()
id, map = line[0], int(line[1])
if map != 4 and map != 8:
list.append(id)
return set(list) | get a list of mapped reads |
def fill_between(self, canvas, X, lower, upper, color=None, label=None, **kwargs):
raise NotImplementedError("Implement all plot functions in AbstractPlottingLibrary in order to use your own plotting library") | Fill along the xaxis between lower and upper.
the kwargs are plotting library specific kwargs! |
def _generate_password():
uuid_str = six.text_type(uuid.uuid4()).encode("UTF-8")
return hashlib.sha1(uuid_str).hexdigest() | Create a random password
The password is made by taking a uuid and passing it though sha1sum.
We may change this in future to gain more entropy.
This is based on the tripleo command os-make-password |
def run(self):
self.find_new()
for n in self.news:
print("{0}".format(n))
print("")
self.msg.template(78)
print("| Installed {0} new configuration files:".format(
len(self.news)))
self.msg.template(78)
self.choices() | print .new configuration files |
def scramble_date(self, value, format='%d %b %Y'):
if value == '':
end_date = 'now'
start_date = '-1y'
else:
end_date = datetime.datetime.strptime(value, format).date()
start_date = end_date - datetime.timedelta(days=365)
fake_date = fake.date_time_between(start_date=start_date,
end_date=end_date).strftime(format).upper()
return fake_date | Return random date |
def report_privilege_information():
"Report all privilege information assigned to the current process."
privileges = get_privilege_information()
print("found {0} privileges".format(privileges.count))
tuple(map(print, privileges)) | Report all privilege information assigned to the current process. |
def is_hidden(path):
hidden = False
f = os.path.basename(path)
if f[:1] in ('.', b'.'):
hidden = True
elif _PLATFORM == 'windows':
FILE_ATTRIBUTE_HIDDEN = 0x2
if PY35:
results = os.lstat(path)
hidden = bool(results.st_file_attributes & FILE_ATTRIBUTE_HIDDEN)
else:
if isinstance(path, bytes):
attrs = ctypes.windll.kernel32.GetFileAttributesA(path)
else:
attrs = ctypes.windll.kernel32.GetFileAttributesW(path)
hidden = attrs != -1 and attrs & FILE_ATTRIBUTE_HIDDEN
elif _PLATFORM == "osx":
results = os.lstat(path)
hidden = bool(results.st_flags & stat.UF_HIDDEN)
return hidden | Check if file is hidden. |
def read(self, timeout=None):
if not hasattr(self, '_sock'):
return None
if timeout:
if self._heartbeat_timeout:
self._heartbeat_timeout.delete()
self._heartbeat_timeout = \
event.timeout(timeout, self._sock_read_cb, self._sock)
elif self._heartbeat_timeout:
self._heartbeat_timeout.delete()
self._heartbeat_timeout = None
return self._sock.read() | Read from the transport. If no data is available, should return None.
The timeout is ignored as this returns only data that has already
been buffered locally. |
def convert_context_to_csv(self, context):
content = []
date_headers = context['date_headers']
headers = ['Name']
headers.extend([date.strftime('%m/%d/%Y') for date in date_headers])
headers.append('Total')
content.append(headers)
summaries = context['summaries']
summary = summaries.get(self.export, [])
for rows, totals in summary:
for name, user_id, hours in rows:
data = [name]
data.extend(hours)
content.append(data)
total = ['Totals']
total.extend(totals)
content.append(total)
return content | Convert the context dictionary into a CSV file. |
def add_header(self, key, value, **params):
key = self.escape(key)
ci_key = key.casefold()
def quoted_params(items):
for p in items:
param_name = self.escape(p[0])
param_val = self.de_quote(self.escape(p[1]))
yield param_name, param_val
sorted_items = sorted(params.items())
quoted_iter = ('%s="%s"' % p for p in quoted_params(sorted_items))
param_str = ' '.join(quoted_iter)
if param_str:
value = "%s; %s" % (value, param_str)
self._header_data[ci_key] = (key, value) | Add a header to the collection, including potential parameters.
Args:
key (str): The name of the header
value (str): The value to store under that key
params: Option parameters to be appended to the value,
automatically formatting them in a standard way |
def wrap_handler(cls, handler, protocol, **kwargs):
def _wrapper(request, *args, **kwargs):
instance = cls(request=request, **kwargs)
if protocol == Resource.Protocol.http:
return instance._wrap_http(handler, request=request, **kwargs)
elif protocol == Resource.Protocol.websocket:
return instance._wrap_ws(handler, request=request, **kwargs)
elif protocol == Resource.Protocol.amqp:
return instance._wrap_amqp(view_type, *args, **kwargs)
else:
raise Exception('Communication protocol not specified')
return _wrapper | Wrap a request handler with the matching protocol handler |
def node_is_subclass(cls, *subclass_names):
if not isinstance(cls, (ClassDef, Instance)):
return False
for base_cls in cls.bases:
try:
for inf in base_cls.inferred():
if inf.qname() in subclass_names:
return True
if inf != cls and node_is_subclass(
inf, *subclass_names):
return True
except InferenceError:
continue
return False | Checks if cls node has parent with subclass_name. |
def create_partitions(self, topic_partitions, timeout_ms=None, validate_only=False):
version = self._matching_api_version(CreatePartitionsRequest)
timeout_ms = self._validate_timeout(timeout_ms)
if version == 0:
request = CreatePartitionsRequest[version](
topic_partitions=[self._convert_create_partitions_request(topic_name, new_partitions) for topic_name, new_partitions in topic_partitions.items()],
timeout=timeout_ms,
validate_only=validate_only
)
else:
raise NotImplementedError(
"Support for CreatePartitions v{} has not yet been added to KafkaAdminClient."
.format(version))
return self._send_request_to_controller(request) | Create additional partitions for an existing topic.
:param topic_partitions: A map of topic name strings to NewPartition objects.
:param timeout_ms: Milliseconds to wait for new partitions to be
created before the broker returns.
:param validate_only: If True, don't actually create new partitions.
Default: False
:return: Appropriate version of CreatePartitionsResponse class. |
def show_graph(self, format='svg'):
g = self.to_simple_graph(AssetExists())
if format == 'svg':
return g.svg
elif format == 'png':
return g.png
elif format == 'jpeg':
return g.jpeg
else:
raise AssertionError("Unknown graph format %r." % format) | Render this Pipeline as a DAG.
Parameters
----------
format : {'svg', 'png', 'jpeg'}
Image format to render with. Default is 'svg'. |
def run_init_tables(*args):
print('--')
create_table(TabPost)
create_table(TabTag)
create_table(TabMember)
create_table(TabWiki)
create_table(TabLink)
create_table(TabEntity)
create_table(TabPostHist)
create_table(TabWikiHist)
create_table(TabCollect)
create_table(TabPost2Tag)
create_table(TabRel)
create_table(TabEvaluation)
create_table(TabUsage)
create_table(TabReply)
create_table(TabUser2Reply)
create_table(TabRating)
create_table(TabEntity2User)
create_table(TabLog) | Run to init tables. |
def validate(self, proxy_ip, client_ip):
if self.pseudo_proxy:
proxy = self.pseudo_proxy
elif proxy_ip not in self.proxies:
return False
else:
proxy = self.proxies[proxy_ip]
return client_ip in proxy | Looks up the proxy identified by its IP, then verifies that
the given client IP may be introduced by that proxy.
:param proxy_ip: The IP address of the proxy.
:param client_ip: The IP address of the supposed client.
:returns: True if the proxy is permitted to introduce the
client; False if the proxy doesn't exist or isn't
permitted to introduce the client. |
def ifft(a, n=None, axis=-1, norm=None):
unitary = _unitary(norm)
output = mkl_fft.ifft(a, n, axis)
if unitary:
output *= sqrt(output.shape[axis])
return output | Compute the one-dimensional inverse discrete Fourier Transform.
This function computes the inverse of the one-dimensional *n*-point
discrete Fourier transform computed by `fft`. In other words,
``ifft(fft(a)) == a`` to within numerical accuracy.
For a general description of the algorithm and definitions,
see `numpy.fft`.
The input should be ordered in the same way as is returned by `fft`,
i.e.,
* ``a[0]`` should contain the zero frequency term,
* ``a[1:n//2]`` should contain the positive-frequency terms,
* ``a[n//2 + 1:]`` should contain the negative-frequency terms, in
increasing order starting from the most negative frequency.
Parameters
----------
a : array_like
Input array, can be complex.
n : int, optional
Length of the transformed axis of the output.
If `n` is smaller than the length of the input, the input is cropped.
If it is larger, the input is padded with zeros. If `n` is not given,
the length of the input along the axis specified by `axis` is used.
See notes about padding issues.
axis : int, optional
Axis over which to compute the inverse DFT. If not given, the last
axis is used.
norm : {None, "ortho"}, optional
.. versionadded:: 1.10.0
Normalization mode (see `numpy.fft`). Default is None.
Returns
-------
out : complex ndarray
The truncated or zero-padded input, transformed along the axis
indicated by `axis`, or the last one if `axis` is not specified.
Raises
------
IndexError
If `axes` is larger than the last axis of `a`.
See Also
--------
numpy.fft : An introduction, with definitions and general explanations.
fft : The one-dimensional (forward) FFT, of which `ifft` is the inverse
ifft2 : The two-dimensional inverse FFT.
ifftn : The n-dimensional inverse FFT.
Notes
-----
If the input parameter `n` is larger than the size of the input, the input
is padded by appending zeros at the end. Even though this is the common
approach, it might lead to surprising results. If a different padding is
desired, it must be performed before calling `ifft`.
Examples
--------
>>> np.fft.ifft([0, 4, 0, 0])
array([ 1.+0.j, 0.+1.j, -1.+0.j, 0.-1.j])
Create and plot a band-limited signal with random phases:
>>> import matplotlib.pyplot as plt
>>> t = np.arange(400)
>>> n = np.zeros((400,), dtype=complex)
>>> n[40:60] = np.exp(1j*np.random.uniform(0, 2*np.pi, (20,)))
>>> s = np.fft.ifft(n)
>>> plt.plot(t, s.real, 'b-', t, s.imag, 'r--')
...
>>> plt.legend(('real', 'imaginary'))
...
>>> plt.show() |
def add_source(self, source):
geocode_service = self._get_service_by_name(source[0])
self._sources.append(geocode_service(**source[1])) | Add a geocoding service to this instance. |
def _get_specifications(specifications):
if not specifications or specifications is object:
raise ValueError("No specifications given")
elif inspect.isclass(specifications):
if Provides.USE_MODULE_QUALNAME:
if sys.version_info < (3, 3, 0):
raise ValueError(
"Qualified name capability requires Python 3.3+"
)
if not specifications.__module__:
return [specifications.__qualname__]
return [
"{0}.{1}".format(
specifications.__module__, specifications.__qualname__
)
]
else:
return [specifications.__name__]
elif is_string(specifications):
specifications = specifications.strip()
if not specifications:
raise ValueError("Empty specification given")
return [specifications]
elif isinstance(specifications, (list, tuple)):
results = []
for specification in specifications:
results.extend(_get_specifications(specification))
return results
else:
raise ValueError(
"Unhandled specifications type : {0}".format(
type(specifications).__name__
)
) | Computes the list of strings corresponding to the given specifications
:param specifications: A string, a class or a list of specifications
:return: A list of strings
:raise ValueError: Invalid specification found |
def filter_and_date(local_root, conf_rel_paths, commits):
dates_paths = dict()
for commit in commits:
if commit in dates_paths:
continue
command = ['git', 'ls-tree', '--name-only', '-r', commit] + conf_rel_paths
try:
output = run_command(local_root, command)
except CalledProcessError as exc:
raise GitError('Git ls-tree failed on {0}'.format(commit), exc.output)
if output:
dates_paths[commit] = [None, output.splitlines()[0].strip()]
command_prefix = ['git', 'show', '--no-patch', '--pretty=format:%ct']
for commits_group in chunk(dates_paths, 50):
command = command_prefix + commits_group
output = run_command(local_root, command)
timestamps = [int(i) for i in RE_UNIX_TIME.findall(output)]
for i, commit in enumerate(commits_group):
dates_paths[commit][0] = timestamps[i]
return dates_paths | Get commit Unix timestamps and first matching conf.py path. Exclude commits with no conf.py file.
:raise CalledProcessError: Unhandled git command failure.
:raise GitError: A commit SHA has not been fetched.
:param str local_root: Local path to git root directory.
:param iter conf_rel_paths: List of possible relative paths (to git root) of Sphinx conf.py (e.g. docs/conf.py).
:param iter commits: List of commit SHAs.
:return: Commit time (seconds since Unix epoch) for each commit and conf.py path. SHA keys and [int, str] values.
:rtype: dict |
def ec2_table(instances):
t = prettytable.PrettyTable(['ID', 'State', 'Monitored', 'Image', 'Name', 'Type', 'SSH key', 'DNS'])
t.align = 'l'
for i in instances:
name = i.tags.get('Name', '')
t.add_row([i.id, i.state, i.monitored, i.image_id, name, i.instance_type, i.key_name, i.dns_name])
return t | Print nice looking table of information from list of instances |
def read_until_done(self, command, timeout=None):
message = self.read_message(timeout)
while message.command != 'DONE':
message.assert_command_is(command)
yield message
message = self.read_message(timeout) | Yield messages read until we receive a 'DONE' command.
Read messages of the given command until we receive a 'DONE' command. If a
command different than the requested one is received, an AdbProtocolError
is raised.
Args:
command: The command to expect, like 'DENT' or 'DATA'.
timeout: The timeouts.PolledTimeout to use for this operation.
Yields:
Messages read, of type self.RECV_MSG_TYPE, see read_message().
Raises:
AdbProtocolError: If an unexpected command is read.
AdbRemoteError: If a 'FAIL' message is read. |
def _pname_and_metadata(in_file):
if os.path.isfile(in_file):
with open(in_file) as in_handle:
md, global_vars = _parse_metadata(in_handle)
base = os.path.splitext(os.path.basename(in_file))[0]
md_file = in_file
elif objectstore.is_remote(in_file):
with objectstore.open_file(in_file) as in_handle:
md, global_vars = _parse_metadata(in_handle)
base = os.path.splitext(os.path.basename(in_file))[0]
md_file = None
else:
if in_file.endswith(".csv"):
raise ValueError("Did not find input metadata file: %s" % in_file)
base, md, global_vars = _safe_name(os.path.splitext(os.path.basename(in_file))[0]), {}, {}
md_file = None
return _safe_name(base), md, global_vars, md_file | Retrieve metadata and project name from the input metadata CSV file.
Uses the input file name for the project name and for back compatibility,
accepts the project name as an input, providing no metadata. |
def get_current_user_info(anchore_auth):
user_url = anchore_auth['client_info_url'] + '/' + anchore_auth['username']
user_timeout = 60
retries = 3
result = requests.get(user_url, headers={'x-anchore-password': anchore_auth['password']})
if result.status_code == 200:
user_data = json.loads(result.content)
else:
raise requests.HTTPError('Error response from service: {}'.format(result.status_code))
return user_data | Return the metadata about the current user as supplied by the anchore.io service. Includes permissions and tier access.
:return: Dict of user metadata |
def Random(self):
if len(self.d) == 0:
raise ValueError('Pmf contains no values.')
target = random.random()
total = 0.0
for x, p in self.d.iteritems():
total += p
if total >= target:
return x
assert False | Chooses a random element from this PMF.
Returns:
float value from the Pmf |
def inv(self):
self.x, self.y = self.y, self.x
self._x_, self._y_ = self._y_, self._x_
self.xfac, self.yfac = 1 / self.yfac, 1 / self.xfac
self._xfac_, self._yfac_ = 1 / self._yfac_, 1 / self._xfac_
self._u = 1 / self._u.conj() | Invert the transform.
After calling this method, calling the instance will do the inverse
transform. Calling this twice return the instance to the original
transform. |
def read_parameters(self):
param_fname = join(self.out_dir, "PARAMETERS.OUT")
if not exists(param_fname):
raise FileNotFoundError("No PARAMETERS.OUT found")
with open(param_fname) as nml_file:
parameters = dict(f90nml.read(nml_file))
for group in ["nml_years", "nml_allcfgs", "nml_outputcfgs"]:
parameters[group] = dict(parameters[group])
for k, v in parameters[group].items():
parameters[group][k] = _clean_value(v)
parameters[group.replace("nml_", "")] = parameters.pop(group)
self.config = parameters
return parameters | Read a parameters.out file
Returns
-------
dict
A dictionary containing all the configuration used by MAGICC |
def get_last_doc(self):
try:
result = self.solr.search('*:*', sort='_ts desc', rows=1)
except ValueError:
return None
for r in result:
r['_id'] = r.pop(self.unique_key)
return r | Returns the last document stored in the Solr engine. |
def _create_symlink_cygwin(self, initial_path, final_path):
symlink_cmd = [os.path.join(self._cygwin_bin_location, "ln.exe"),
"-s", self._get_cygwin_path(initial_path),
self._get_cygwin_path(final_path)]
process = Popen(symlink_cmd,
stdout=PIPE, stderr=PIPE, shell=False)
out, err = process.communicate()
if err:
print(err)
raise Exception(err)
return out.strip() | Use cygqin to generate symbolic link |
def _create_ema_callback(self):
with self.cached_name_scope():
size = tf.cast(self.queue.size(), tf.float32, name='queue_size')
size_ema_op = add_moving_summary(size, collection=None, decay=0.5)[0].op
ret = RunOp(
lambda: size_ema_op,
run_before=False,
run_as_trigger=False,
run_step=True)
ret.name_scope = "InputSource/EMA"
return ret | Create a hook-only callback which maintain EMA of the queue size.
Also tf.summary.scalar the EMA. |
def error(msg, log_file=None):
UtilClass.print_msg(msg + os.linesep)
if log_file is not None:
UtilClass.writelog(log_file, msg, 'append')
raise RuntimeError(msg) | Print, output error message and raise RuntimeError. |
def expand_factor_conditions(s, env):
try:
factor, value = re.split(r'\s*\:\s*', s)
except ValueError:
return s
if matches_factor_conditions(factor, env):
return value
else:
return '' | If env matches the expanded factor then return value else return ''.
Example
-------
>>> s = 'py{33,34}: docformatter'
>>> expand_factor_conditions(s, Env(name="py34", ...))
"docformatter"
>>> expand_factor_conditions(s, Env(name="py26", ...))
"" |
def creator_type(self, creator_type):
allowed_values = ["USER", "ALERT", "SYSTEM"]
if not set(creator_type).issubset(set(allowed_values)):
raise ValueError(
"Invalid values for `creator_type` [{0}], must be a subset of [{1}]"
.format(", ".join(map(str, set(creator_type) - set(allowed_values))),
", ".join(map(str, allowed_values)))
)
self._creator_type = creator_type | Sets the creator_type of this Event.
:param creator_type: The creator_type of this Event. # noqa: E501
:type: list[str] |
def duplicate_txn_id(ipn_obj):
similars = (ipn_obj.__class__._default_manager
.filter(txn_id=ipn_obj.txn_id)
.exclude(id=ipn_obj.id)
.exclude(flag=True)
.order_by('-created_at')[:1])
if len(similars) > 0:
return similars[0].payment_status == ipn_obj.payment_status
return False | Returns True if a record with this transaction id exists and its
payment_status has not changed.
This function has been completely changed from its previous implementation
where it used to specifically only check for a Pending->Completed
transition. |
def get_uri(source):
import gst
src_info = source_info(source)
if src_info['is_file']:
return get_uri(src_info['uri'])
elif gst.uri_is_valid(source):
uri_protocol = gst.uri_get_protocol(source)
if gst.uri_protocol_is_supported(gst.URI_SRC, uri_protocol):
return source
else:
raise IOError('Invalid URI source for Gstreamer')
else:
raise IOError('Failed getting uri for path %s: no such file' % source) | Check a media source as a valid file or uri and return the proper uri |
def is_same_service(self, endpoint):
return (
self.get_framework_uuid() == endpoint.get_framework_uuid()
and self.get_service_id() == endpoint.get_service_id()
) | Tests if this endpoint and the given one have the same framework UUID
and service ID
:param endpoint: Another endpoint
:return: True if both endpoints represent the same remote service |
def get_tuple_type_str_parts(s: str) -> Optional[Tuple[str, Optional[str]]]:
match = TUPLE_TYPE_STR_RE.match(s)
if match is not None:
tuple_prefix = match.group(1)
tuple_dims = match.group(2)
return tuple_prefix, tuple_dims
return None | Takes a JSON ABI type string. For tuple type strings, returns the separated
prefix and array dimension parts. For all other strings, returns ``None``. |
def _verifyDiscoveryResults(self, resp_msg, endpoint=None):
if resp_msg.getOpenIDNamespace() == OPENID2_NS:
return self._verifyDiscoveryResultsOpenID2(resp_msg, endpoint)
else:
return self._verifyDiscoveryResultsOpenID1(resp_msg, endpoint) | Extract the information from an OpenID assertion message and
verify it against the original
@param endpoint: The endpoint that resulted from doing discovery
@param resp_msg: The id_res message object
@returns: the verified endpoint |
def __exists_row_not_too_old(self, row):
if row is None:
return False
record_time = dateutil.parser.parse(row[2])
now = datetime.datetime.now(dateutil.tz.gettz())
age = (record_time - now).total_seconds()
if age > self.max_age:
return False
return True | Check if the given row exists and is not too old |
def get_host_map(root):
hosts_map = {}
for host in root.get_all_hosts():
hosts_map[host.hostId] = {"hostname": NAGIOS_HOSTNAME_FORMAT % (host.hostname,),
"address": host.ipAddress}
for cluster in root.get_all_clusters():
hosts_map[cluster.name] = {"hostname": cluster.name,
"address": quote(cluster.name)}
hosts_map[CM_DUMMY_HOST] = {"hostname": CM_DUMMY_HOST,
"address": CM_DUMMY_HOST}
return hosts_map | Gets a mapping between CM hostId and Nagios host information
The key is the CM hostId
The value is an object containing the Nagios hostname and host address |
def table_dataset_database_table(
table = None,
include_attributes = None,
rows_limit = None,
print_progress = False,
):
if print_progress:
import shijian
progress = shijian.Progress()
progress.engage_quick_calculation_mode()
number_of_rows = len(table)
if include_attributes:
columns = include_attributes
else:
columns = table.columns
table_contents = [columns]
for index_row, row in enumerate(table):
if rows_limit is not None:
if index_row >= rows_limit:
break
row_contents = []
for column in columns:
try:
string_representation = str(row[column])
except:
string_representation = str(row[column].encode("utf-8"))
row_contents.append(string_representation)
table_contents.append(row_contents)
if print_progress:
print(progress.add_datum(
fraction = float(index_row) / float(number_of_rows))
)
return table_contents | Create a pyprel table contents list from a database table of the module
dataset. Attributes to be included in the table can be specified; by
default, all attributes are included. A limit on the number of rows included
can be specified. Progress on building the table can be reported. |
def _mark_in_progress(self, node_id):
self.queued.remove(node_id)
self.in_progress.add(node_id) | Mark the node as 'in progress'.
Callers must hold the lock.
:param str node_id: The node ID to mark as in progress. |
def exchange_oauth2_member(access_token, base_url=OH_BASE_URL,
all_files=False):
url = urlparse.urljoin(
base_url,
'/api/direct-sharing/project/exchange-member/?{}'.format(
urlparse.urlencode({'access_token': access_token})))
member_data = get_page(url)
returned = member_data.copy()
if all_files:
while member_data['next']:
member_data = get_page(member_data['next'])
returned['data'] = returned['data'] + member_data['data']
logging.debug('JSON data: {}'.format(returned))
return returned | Returns data for a specific user, including shared data files.
:param access_token: This field is the user specific access_token.
:param base_url: It is this URL `https://www.openhumans.org`. |
def _get_blob(self):
if not self.__blob:
self.__blob = self.repo.get_object(self.id)
return self.__blob | read blob on access only because get_object is slow |
def _init_forms(self):
super(BaseCRUDView, self)._init_forms()
conv = GeneralModelConverter(self.datamodel)
if not self.add_form:
self.add_form = conv.create_form(
self.label_columns,
self.add_columns,
self.description_columns,
self.validators_columns,
self.add_form_extra_fields,
self.add_form_query_rel_fields,
)
if not self.edit_form:
self.edit_form = conv.create_form(
self.label_columns,
self.edit_columns,
self.description_columns,
self.validators_columns,
self.edit_form_extra_fields,
self.edit_form_query_rel_fields,
) | Init forms for Add and Edit |
def _aggregrate_scores(its,tss,num_sentences):
final = []
for i,el in enumerate(its):
for j, le in enumerate(tss):
if el[2] == le[2]:
assert el[1] == le[1]
final.append((el[1],i+j,el[2]))
_final = sorted(final, key = lambda tup: tup[1])[:num_sentences]
return sorted(_final, key = lambda tup: tup[0]) | rerank the two vectors by
min aggregrate rank, reorder |
def purge(name, delete_key=True):
ret = {}
client = salt.client.get_local_client(__opts__['conf_file'])
data = vm_info(name, quiet=True)
if not data:
__jid_event__.fire_event({'error': 'Failed to find VM {0} to purge'.format(name)}, 'progress')
return 'fail'
host = next(six.iterkeys(data))
try:
cmd_ret = client.cmd_iter(
host,
'virt.purge',
[name, True],
timeout=600)
except SaltClientError as client_error:
return 'Virtual machine {0} could not be purged: {1}'.format(name, client_error)
for comp in cmd_ret:
ret.update(comp)
if delete_key:
log.debug('Deleting key %s', name)
skey = salt.key.Key(__opts__)
skey.delete_key(name)
__jid_event__.fire_event({'message': 'Purged VM {0}'.format(name)}, 'progress')
return 'good' | Destroy the named VM |
def _sane_version_list(version):
v0 = str(version[0])
if v0:
try:
v0 = v0.lstrip("v").lstrip("V")
v0 = int(v0)
except ValueError:
v0 = None
if v0 is None:
version = [0, 0] + version
else:
version[0] = v0
try:
version[1] = int(version[1])
except ValueError:
version = [version[0], 0] + version[1:]
return version | Ensure the major and minor are int.
Parameters
----------
version: list
Version components
Returns
-------
version: list
List of components where first two components has been sanitised |
def is_service_selected(self, service):
service_uid = api.get_uid(service)
for arnum in range(self.ar_count):
analyses = self.fieldvalues.get("Analyses-{}".format(arnum))
if not analyses:
continue
service_uids = map(self.get_service_uid_from, analyses)
if service_uid in service_uids:
return True
return False | Checks if the given service is selected by one of the ARs.
This is used to make the whole line visible or not. |
def trim(self):
overlay_data_offset = self.get_overlay_data_start_offset()
if overlay_data_offset is not None:
return self.__data__[ : overlay_data_offset ]
return self.__data__[:] | Return the just data defined by the PE headers, removing any overlayed data. |
def scan_elements(self):
for x in self.redis.sscan_iter(self.key):
yield self._unpickle(x) | Yield each of the elements from the collection, without pulling them
all into memory.
.. warning::
This method is not available on the set collections provided
by Python.
This method may return the element multiple times.
See the `Redis SCAN documentation
<http://redis.io/commands/scan#scan-guarantees>`_ for details. |
def could_scope_out(self):
return not self.waiting_for or \
isinstance(self.waiting_for, callable.EndOfStory) or \
self.is_breaking_a_loop() | could bubble up from current scope
:return: |
def iter_predict(self, eval_data, num_batch=None, reset=True, sparse_row_id_fn=None):
assert self.binded and self.params_initialized
if reset:
eval_data.reset()
for nbatch, eval_batch in enumerate(eval_data):
if num_batch is not None and nbatch == num_batch:
break
self.prepare(eval_batch, sparse_row_id_fn=sparse_row_id_fn)
self.forward(eval_batch, is_train=False)
pad = eval_batch.pad
outputs = [out[0:out.shape[0]-pad] for out in self.get_outputs()]
yield (outputs, nbatch, eval_batch) | Iterates over predictions.
Examples
--------
>>> for pred, i_batch, batch in module.iter_predict(eval_data):
... # pred is a list of outputs from the module
... # i_batch is a integer
... # batch is the data batch from the data iterator
Parameters
----------
eval_data : DataIter
Evaluation data to run prediction on.
num_batch : int
Default is ``None``, indicating running all the batches in the data iterator.
reset : bool
Default is ``True``, indicating whether we should reset the data iter before start
doing prediction.
sparse_row_id_fn : A callback function
The function takes `data_batch` as an input and returns a dict of
str -> NDArray. The resulting dict is used for pulling row_sparse
parameters from the kvstore, where the str key is the name of the param,
and the value is the row id of the param to pull. |
def setuptools_entry(dist, keyword, value):
if not value:
return
version = get_version()
if dist.metadata.version is not None:
s = "Ignoring explicit version='{0}' in setup.py, using '{1}' instead"
warnings.warn(s.format(dist.metadata.version, version))
dist.metadata.version = version
ExistingCustomBuildPy = dist.cmdclass.get('build_py', object)
class KatVersionBuildPy(AddVersionToInitBuildPy, ExistingCustomBuildPy):
dist.cmdclass['build_py'] = KatVersionBuildPy
ExistingCustomSdist = dist.cmdclass.get('sdist', object)
class KatVersionSdist(AddVersionToInitSdist, ExistingCustomSdist):
dist.cmdclass['sdist'] = KatVersionSdist | Setuptools entry point for setting version and baking it into package. |
def _extract(self, path, outdir, filter_func=None):
with open_zip(path) as archive_file:
for name in archive_file.namelist():
if name.startswith('/') or name.startswith('..'):
raise ValueError('Zip file contains unsafe path: {}'.format(name))
if (not filter_func or filter_func(name)):
archive_file.extract(name, outdir) | Extract from a zip file, with an optional filter.
:param function filter_func: optional filter with the filename as the parameter. Returns True
if the file should be extracted. |
def after_epoch(self, epoch_data: EpochData, **kwargs) -> None:
self._save_stats(epoch_data)
super().after_epoch(epoch_data=epoch_data, **kwargs) | Compute the specified aggregations and save them to the given epoch data.
:param epoch_data: epoch data to be processed |
def __diff_internal(self):
assert self.p > 0, "order of Bspline must be > 0"
t = self.knot_vector
p = self.p
Bi = Bspline( t[:-1], p-1 )
Bip1 = Bspline( t[1:], p-1 )
numer1 = +p
numer2 = -p
denom1 = t[p:-1] - t[:-(p+1)]
denom2 = t[(p+1):] - t[1:-p]
with np.errstate(divide='ignore', invalid='ignore'):
ci = np.where(denom1 != 0., (numer1 / denom1), 0.)
cip1 = np.where(denom2 != 0., (numer2 / denom2), 0.)
return ( (ci,Bi), (cip1,Bip1) ) | Differentiate a B-spline once, and return the resulting coefficients and Bspline objects.
This preserves the Bspline object nature of the data, enabling recursive implementation
of higher-order differentiation (see `diff`).
The value of the first derivative of `B` at a point `x` can be obtained as::
def diff1(B, x):
terms = B.__diff_internal()
return sum( ci*Bi(x) for ci,Bi in terms )
Returns:
tuple of tuples, where each item is (coefficient, Bspline object).
See:
`diff`: differentiation of any order >= 0 |
def _connect_mitogen_su(spec):
return {
'method': 'su',
'kwargs': {
'username': spec.remote_user(),
'password': spec.password(),
'python_path': spec.python_path(),
'su_path': spec.become_exe(),
'connect_timeout': spec.timeout(),
'remote_name': get_remote_name(spec),
}
} | Return ContextService arguments for su as a first class connection. |
def load(self, definitions):
url = self.location
log.debug('importing (%s)', url)
if '://' not in url:
url = urljoin(definitions.url, url)
options = definitions.options
d = Definitions(url, options)
if d.root.match(Definitions.Tag, wsdlns):
self.import_definitions(definitions, d)
return
if d.root.match(Schema.Tag, Namespace.xsdns):
self.import_schema(definitions, d)
return
raise Exception('document at "%s" is unknown' % url) | Load the object by opening the URL |
def in1d_events(ar1, ar2):
ar1 = np.ascontiguousarray(ar1)
ar2 = np.ascontiguousarray(ar2)
tmp = np.empty_like(ar1, dtype=np.uint8)
return analysis_functions.get_in1d_sorted(ar1, ar2, tmp) | Does the same than np.in1d but uses the fact that ar1 and ar2 are sorted and the c++ library. Is therefore much much faster. |
def _check_dependencies(string):
opener, closer = '(', ')'
_check_enclosing_characters(string, opener, closer)
if opener in string:
if string[0] != opener:
raise ValueError(DEPENDENCIES_NOT_FIRST)
ret = True
else:
ret = False
return ret | Checks the dependencies constructor. Looks to make sure that the
dependencies are the first things defined |
def make_wsgi_app(matching, not_found_app=not_found_app):
def wsgi_app(environ, start_response):
environ['matcha.matching'] = matching
try:
matched_case, matched_dict = matching(environ)
except NotMatched:
return not_found_app(environ, start_response)
else:
environ['matcha.matched_dict'] = matched_dict
return matched_case(environ, start_response)
return wsgi_app | Making a WSGI application from Matching object
registered other WSGI applications on each 'case' argument. |
def sge(self, other):
self._check_match(other)
return self.to_sint() >= other.to_sint() | Compares two equal-sized BinWords, treating them as signed
integers, and returning True if the first is bigger or equal. |
def get_step_by(self, **kwargs):
if not kwargs:
return None
for index, step in enumerate(self.steps.values()):
extended_step = dict(step.serialize(), index=index)
if all(item in extended_step.items() for item in kwargs.items()):
return step
return None | Get the first step that matches all the passed named arguments.
Has special argument index not present in the real step.
Usage:
config.get_step_by(name='not found')
config.get_step_by(index=0)
config.get_step_by(name="greeting", command='echo HELLO MORDOR')
:param kwargs:
:return: Step object or None
:rtype: valohai_yaml.objs.Step|None |
def bookmark_show(bookmark_id_or_name):
client = get_client()
res = resolve_id_or_name(client, bookmark_id_or_name)
formatted_print(
res,
text_format=FORMAT_TEXT_RECORD,
fields=(
("ID", "id"),
("Name", "name"),
("Endpoint ID", "endpoint_id"),
("Path", "path"),
),
simple_text=(
"{}:{}".format(res["endpoint_id"], res["path"])
if not is_verbose()
else None
),
) | Executor for `globus bookmark show` |
def correct_rytov_sc_input(radius_sc, sphere_index_sc, medium_index,
radius_sampling):
params = get_params(radius_sampling)
na = params["na"]
nb = params["nb"]
prefac = medium_index / (2 * na)
sm = 2 * na - nb - 1
rt = nb**2 - 4 * na + 2 * nb + 1 + 4 / medium_index * na * sphere_index_sc
sphere_index = prefac * (sm + np.sqrt(rt))
x = sphere_index / medium_index - 1
radius = radius_sc / (params["ra"] * x**2
+ params["rb"] * x
+ params["rc"])
return radius, sphere_index | Inverse correction of refractive index and radius for Rytov
This method returns the inverse of :func:`correct_rytov_output`.
Parameters
----------
radius_sc: float
Systematically corrected radius of the sphere [m]
sphere_index_sc: float
Systematically corrected refractive index of the sphere
medium_index: float
Refractive index of the surrounding medium
radius_sampling: int
Number of pixels used to sample the sphere radius when
computing the Rytov field.
Returns
-------
radius: float
Fitted radius of the sphere [m]
sphere_index: float
Fitted refractive index of the sphere
See Also
--------
correct_rytov_output: the inverse of this method |
def set_using_network_time(enable):
state = salt.utils.mac_utils.validate_enabled(enable)
cmd = 'systemsetup -setusingnetworktime {0}'.format(state)
salt.utils.mac_utils.execute_return_success(cmd)
return state == salt.utils.mac_utils.validate_enabled(
get_using_network_time()) | Set whether network time is on or off.
:param enable: True to enable, False to disable. Can also use 'on' or 'off'
:type: str bool
:return: True if successful, False if not
:rtype: bool
:raises: CommandExecutionError on failure
CLI Example:
.. code-block:: bash
salt '*' timezone.set_using_network_time True |
def fetch_rrlyrae_lc_params(**kwargs):
save_loc = _get_download_or_cache('table2.dat.gz', **kwargs)
dtype = [('id', 'i'), ('type', 'S2'), ('P', 'f'),
('uA', 'f'), ('u0', 'f'), ('uE', 'f'), ('uT', 'f'),
('gA', 'f'), ('g0', 'f'), ('gE', 'f'), ('gT', 'f'),
('rA', 'f'), ('r0', 'f'), ('rE', 'f'), ('rT', 'f'),
('iA', 'f'), ('i0', 'f'), ('iE', 'f'), ('iT', 'f'),
('zA', 'f'), ('z0', 'f'), ('zE', 'f'), ('zT', 'f')]
return np.loadtxt(save_loc, dtype=dtype) | Fetch data from table 2 of Sesar 2010
This table includes observationally-derived parameters for all the
Sesar 2010 lightcurves. |
def calc_pident_ignore_gaps(a, b):
m = 0
mm = 0
for A, B in zip(list(a), list(b)):
if A == '-' or A == '.' or B == '-' or B == '.':
continue
if A == B:
m += 1
else:
mm += 1
try:
return float(float(m)/float((m + mm))) * 100
except:
return 0 | calculate percent identity |
def field_data(self, field_data):
warnings.warn("Runtime.field_data is deprecated", FieldDataDeprecationWarning, stacklevel=2)
self._deprecated_per_instance_field_data = field_data | Set field_data.
Deprecated in favor of a 'field-data' service. |
def _update_url_map(self):
if HAS_WEBSOCKETS:
self.url_map.update({
'ws': WebsocketEndpoint,
})
self.url_map.update({
self.apiopts.get('webhook_url', 'hook').lstrip('/'): Webhook,
})
self.url_map.update({
self.apiopts.get('app_path', 'app').lstrip('/'): App,
}) | Assemble any dynamic or configurable URLs |
def setUp(self):
if self.__class__ is HarnessCase:
return
logger.info('Setting up')
logger.info('Deleting all .pdf')
os.system('del /q "%HOMEDRIVE%%HOMEPATH%\\Downloads\\NewPdf_*.pdf"')
logger.info('Deleting all .xlsx')
os.system('del /q "%HOMEDRIVE%%HOMEPATH%\\Downloads\\ExcelReport*.xlsx"')
logger.info('Deleting all .pcapng')
os.system('del /q "%s\\Captures\\*.pcapng"' % settings.HARNESS_HOME)
logger.info('Empty files in temps')
os.system('del /q "%s\\Thread_Harness\\temp\\*.*"' % settings.HARNESS_HOME)
os.system('mkdir %s' % self.result_dir)
self._init_harness()
self._init_devices()
self._init_dut()
self._init_rf_shield() | Prepare to run test case.
Start harness service, init golden devices, reset DUT and open browser. |
def addTable(D):
_swap = {
"1": "measurement",
"2": "summary",
"3": "ensemble",
"4": "distribution"
}
print("What type of table would you like to add?\n"
"1: measurement\n"
"2: summary\n"
"3: ensemble (under development)\n"
"4: distribution (under development)\n"
"\n Note: if you want to add a whole model, use the addModel() function")
_ans = input(">")
if _ans in ["3", "4"]:
print("I don't know how to do that yet.")
elif _ans in ["1", "2"]:
print("Locate the CSV file with the values for this table: ")
_path, _files = browse_dialog_file()
_path = _confirm_file_path(_files)
_values = read_csv_from_file(_path)
_table = _build_table(_values)
_placement = _prompt_placement(D, _swap[_ans])
D = _put_table(D, _placement, _table)
else:
print("That's not a valid option")
return D | Add any table type to the given dataset. Use prompts to determine index locations and table type.
:param dict D: Metadata (dataset)
:param dict dat: Metadata (table)
:return dict D: Metadata (dataset) |
def get_messages(self):
cs = self.data["comments"]["data"]
res = []
for c in cs:
res.append(Message(c,self))
return res | Returns list of Message objects which represents messages being transported. |
def get_version(self, layer_id, version_id, expand=[]):
target_url = self.client.get_url('VERSION', 'GET', 'single', {'layer_id': layer_id, 'version_id': version_id})
return self._get(target_url, expand=expand) | Get a specific version of a layer. |
def read_configuration(
filepath, find_others=False, ignore_option_errors=False):
from setuptools.dist import Distribution, _Distribution
filepath = os.path.abspath(filepath)
if not os.path.isfile(filepath):
raise DistutilsFileError(
'Configuration file %s does not exist.' % filepath)
current_directory = os.getcwd()
os.chdir(os.path.dirname(filepath))
try:
dist = Distribution()
filenames = dist.find_config_files() if find_others else []
if filepath not in filenames:
filenames.append(filepath)
_Distribution.parse_config_files(dist, filenames=filenames)
handlers = parse_configuration(
dist, dist.command_options,
ignore_option_errors=ignore_option_errors)
finally:
os.chdir(current_directory)
return configuration_to_dict(handlers) | Read given configuration file and returns options from it as a dict.
:param str|unicode filepath: Path to configuration file
to get options from.
:param bool find_others: Whether to search for other configuration files
which could be on in various places.
:param bool ignore_option_errors: Whether to silently ignore
options, values of which could not be resolved (e.g. due to exceptions
in directives such as file:, attr:, etc.).
If False exceptions are propagated as expected.
:rtype: dict |
def is_job_complete(job_id, conn=None):
result = False
job = RBJ.get(job_id).run(conn)
if job and job.get(STATUS_FIELD) in COMPLETED:
result = job
return result | is_job_done function checks to if Brain.Jobs Status is Completed
Completed is defined in statics as Done|Stopped|Error
:param job_id: <str> id for the job
:param conn: (optional)<connection> to run on
:return: <dict> if job is done <false> if |
def get_volume(self):
log.debug("getting volumne...")
cmd, url = DEVICE_URLS["get_volume"]
return self._exec(cmd, url) | returns the current volume |
def complete_query(
self,
name,
query,
page_size,
language_codes=None,
company_name=None,
scope=None,
type_=None,
retry=google.api_core.gapic_v1.method.DEFAULT,
timeout=google.api_core.gapic_v1.method.DEFAULT,
metadata=None,
):
if "complete_query" not in self._inner_api_calls:
self._inner_api_calls[
"complete_query"
] = google.api_core.gapic_v1.method.wrap_method(
self.transport.complete_query,
default_retry=self._method_configs["CompleteQuery"].retry,
default_timeout=self._method_configs["CompleteQuery"].timeout,
client_info=self._client_info,
)
request = completion_service_pb2.CompleteQueryRequest(
name=name,
query=query,
page_size=page_size,
language_codes=language_codes,
company_name=company_name,
scope=scope,
type=type_,
)
return self._inner_api_calls["complete_query"](
request, retry=retry, timeout=timeout, metadata=metadata
) | Completes the specified prefix with keyword suggestions.
Intended for use by a job search auto-complete search box.
Example:
>>> from google.cloud import talent_v4beta1
>>>
>>> client = talent_v4beta1.CompletionClient()
>>>
>>> name = client.project_path('[PROJECT]')
>>>
>>> # TODO: Initialize `query`:
>>> query = ''
>>>
>>> # TODO: Initialize `page_size`:
>>> page_size = 0
>>>
>>> response = client.complete_query(name, query, page_size)
Args:
name (str): Required.
Resource name of project the completion is performed within.
The format is "projects/{project\_id}", for example,
"projects/api-test-project".
query (str): Required.
The query used to generate suggestions.
The maximum number of allowed characters is 255.
page_size (int): Required.
Completion result count.
The maximum allowed page size is 10.
language_codes (list[str]): Optional.
The list of languages of the query. This is the BCP-47 language code,
such as "en-US" or "sr-Latn". For more information, see `Tags for
Identifying Languages <https://tools.ietf.org/html/bcp47>`__.
For ``CompletionType.JOB_TITLE`` type, only open jobs with the same
``language_codes`` are returned.
For ``CompletionType.COMPANY_NAME`` type, only companies having open
jobs with the same ``language_codes`` are returned.
For ``CompletionType.COMBINED`` type, only open jobs with the same
``language_codes`` or companies having open jobs with the same
``language_codes`` are returned.
The maximum number of allowed characters is 255.
company_name (str): Optional.
If provided, restricts completion to specified company.
The format is "projects/{project\_id}/companies/{company\_id}", for
example, "projects/api-test-project/companies/foo".
scope (~google.cloud.talent_v4beta1.types.CompletionScope): Optional.
The scope of the completion. The defaults is ``CompletionScope.PUBLIC``.
type_ (~google.cloud.talent_v4beta1.types.CompletionType): Optional.
The completion topic. The default is ``CompletionType.COMBINED``.
retry (Optional[google.api_core.retry.Retry]): A retry object used
to retry requests. If ``None`` is specified, requests will not
be retried.
timeout (Optional[float]): The amount of time, in seconds, to wait
for the request to complete. Note that if ``retry`` is
specified, the timeout applies to each individual attempt.
metadata (Optional[Sequence[Tuple[str, str]]]): Additional metadata
that is provided to the method.
Returns:
A :class:`~google.cloud.talent_v4beta1.types.CompleteQueryResponse` instance.
Raises:
google.api_core.exceptions.GoogleAPICallError: If the request
failed for any reason.
google.api_core.exceptions.RetryError: If the request failed due
to a retryable error and retry attempts failed.
ValueError: If the parameters are invalid. |
def _get_stddev_deep_soil(self, mag, imt):
if mag > 7:
mag = 7
C = self.COEFFS_SOIL[imt]
return C['sigma0'] + C['magfactor'] * mag | Calculate and return total standard deviation for deep soil sites.
Implements formulae from the last column of table 4. |
def device_status(self):
return {
'active': self.device['active'],
'offline': self.device['offline'],
'last_update': self.last_update,
'battery_level': self.battery_level,
} | Status of device. |
def _add_data(self, plotter_cls, *args, **kwargs):
return plotter_cls(self._da, *args, **kwargs) | Visualize this data array
Parameters
----------
%(Plotter.parameters.no_data)s
Returns
-------
psyplot.plotter.Plotter
The plotter that visualizes the data |
def easybake(css_in, html_in=sys.stdin, html_out=sys.stdout, last_step=None,
coverage_file=None, use_repeatable_ids=False):
html_doc = etree.parse(html_in)
oven = Oven(css_in, use_repeatable_ids)
oven.bake(html_doc, last_step)
print(etree.tostring(html_doc, method="xml").decode('utf-8'),
file=html_out)
if coverage_file:
print('SF:{}'.format(css_in.name), file=coverage_file)
print(oven.get_coverage_report(), file=coverage_file)
print('end_of_record', file=coverage_file) | Process the given HTML file stream with the css stream. |
def write(self, session, directory, name, replaceParamFile=None, **kwargs):
if self.raster is not None or self.rasterText is not None:
super(RasterMapFile, self).write(session, directory, name, replaceParamFile, **kwargs) | Wrapper for GsshaPyFileObjectBase write method |
def _authenticate(self):
opts = {'domain': self._domain}
opts.update(self._auth)
response = self._api.domain.info(opts)
self._validate_response(
response=response, message='Failed to authenticate')
self.domain_id = 1
return True | run any request against the API just to make sure the credentials
are valid
:return bool: success status
:raises Exception: on error |
def unhook_symbol(self, symbol_name):
sym = self.loader.find_symbol(symbol_name)
if sym is None:
l.warning("Could not find symbol %s", symbol_name)
return False
if sym.owner is self.loader._extern_object:
l.warning("Refusing to unhook external symbol %s, replace it with another hook if you want to change it",
symbol_name)
return False
hook_addr, _ = self.simos.prepare_function_symbol(symbol_name, basic_addr=sym.rebased_addr)
self.unhook(hook_addr)
return True | Remove the hook on a symbol.
This function will fail if the symbol is provided by the extern object, as that would result in a state where
analysis would be unable to cope with a call to this symbol. |
def delete(self, json=None):
return self._call('delete', url=self.endpoint, json=json) | Send a DELETE request and return the JSON decoded result.
Args:
json (dict, optional): Object to encode and send in request.
Returns:
mixed: JSON decoded response data. |
def fill_altgoids(go2obj):
alt2obj = {altgo:goobj for goobj in go2obj.values() for altgo in goobj.alt_ids}
for goid, goobj in alt2obj.items():
go2obj[goid] = goobj | Given a go2obj containing key GO IDs, fill with all alternate GO IDs. |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.