Unnamed: 0 int64 0 389k | code stringlengths 26 79.6k | docstring stringlengths 1 46.9k |
|---|---|---|
367,800 | def validate_key(key: str):
if "//" in key:
raise DoubleSlashKeyError(key)
elif normpath(key) != key:
raise NonNormalisedKeyError(key) | Validates the given key.
:param key: the key to validate
:raises InvalidKeyError: raised if the given key is invalid |
367,801 | def _init_loaders(self) -> None:
for loader in settings.I18N_TRANSLATION_LOADERS:
loader_class = import_class(loader[])
instance = loader_class()
instance.on_update(self.update)
run(instance.load(**loader[])) | This creates the loaders instances and subscribes to their updates. |
367,802 | def _parse_remote_model(self, context):
if not context.remote_endpoints:
raise Exception(, jsonpickle.encode(context, unpicklable=False))
resource = context.remote_endpoints[0]
dictionary = jsonpickle.decode(resource.app_context.deployed_app_json)
holder = DeployDat... | parse the remote resource model and adds its full name
:type context: models.QualiDriverModels.ResourceRemoteCommandContext |
367,803 | def add_permission_view_menu(self, permission_name, view_menu_name):
if not (permission_name and view_menu_name):
return None
pv = self.find_permission_view_menu(
permission_name,
view_menu_name
)
if pv:
return pv
vm = self... | Adds a permission on a view or menu to the backend
:param permission_name:
name of the permission to add: 'can_add','can_edit' etc...
:param view_menu_name:
name of the view menu to add |
367,804 | def firmware_manifest_destroy(self, manifest_id, **kwargs):
kwargs[] = True
if kwargs.get():
return self.firmware_manifest_destroy_with_http_info(manifest_id, **kwargs)
else:
(data) = self.firmware_manifest_destroy_with_http_info(manifest_id, **kwargs)
... | Delete a manifest # noqa: E501
Delete a firmware manifest. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass asynchronous=True
>>> thread = api.firmware_manifest_destroy(manifest_id, asynchronous=True)
>>> r... |
367,805 | def get_copy(dict_, key, default=None):
value = dict_.get(key, default)
if value:
return deepcopy(value)
return value | Looks for a key in a dictionary, if found returns
a deepcopied value, otherwise returns default value |
367,806 | def _get_tables(self, base_dir):
table_dict = {}
for table in self.metadata[]:
if table[]:
relative_path = os.path.join(base_dir, self.metadata[], table[])
data_table = pd.read_csv(relative_path)
pii_fields = self._get_pii_fields(tabl... | Load the contents of meta_file and the corresponding data.
If fields containing Personally Identifiable Information are detected in the metadata
they are anonymized before asign them into `table_dict`.
Args:
base_dir(str): Root folder of the dataset files.
Returns:
... |
367,807 | def non_increasing(values):
return all(x >= y for x, y in zip(values, values[1:])) | True if values are not increasing. |
367,808 | def _edit(self, filename, line=None):
if self.custom_edit:
self.custom_edit_requested.emit(filename, line)
elif not self.editor:
self._append_plain_text(
)
else:
try:
filename = % filename
if l... | Opens a Python script for editing.
Parameters:
-----------
filename : str
A path to a local system file.
line : int, optional
A line of interest in the file. |
367,809 | async def ssh_exec(server, cmd, timeout=10, **ssh_kwargs):
conn = await asyncio.wait_for(asyncssh.connect(server, **ssh_kwargs),
timeout=timeout)
ret = await conn.run(cmd)
conn.close()
return ret | Execute a command on a given server using asynchronous SSH-connection.
The connection to the server is wrapped in :func:`asyncio.wait_for` and
given :attr:`timeout` is applied to it. If the server is not reachable
before timeout expires, :exc:`asyncio.TimeoutError` is raised.
:param str server: Addres... |
367,810 | def RemoveScanNode(self, path_spec):
scan_node = self._scan_nodes.get(path_spec, None)
if not scan_node:
return None
if scan_node.sub_nodes:
raise RuntimeError()
parent_scan_node = scan_node.parent_node
if parent_scan_node:
parent_scan_node.sub_nodes.remove(scan_node)
i... | Removes a scan node of a certain path specification.
Args:
path_spec (PathSpec): path specification.
Returns:
SourceScanNode: parent scan node or None if not available.
Raises:
RuntimeError: if the scan node has sub nodes. |
367,811 | def _config_net_topology(self, conf):
conf = self._init_net_specs(conf)
mgmts = self._select_mgmt_networks(conf)
self._validate_netconfig(conf)
allocated_subnets, conf = self._allocate_subnets(conf)
try:
self._add_mgmt_to_domains(conf, mgmts)
self... | Initialize and populate all the network related elements, like
reserving ips and populating network specs of the given confiiguration
spec
Args:
conf (dict): Configuration spec to initalize
Returns:
None |
367,812 | def get_sum_w2(self, ix, iy=0, iz=0):
if self.GetSumw2N() == 0:
raise RuntimeError(
"Attempting to access Sumw2 in histogram "
"where weights were not stored")
xl = self.nbins(axis=0, overflow=True)
yl = self.nbins(axis=1, overflow=True)
... | Obtain the true number of entries in the bin weighted by w^2 |
367,813 | def paginate_announcements_list(request, context, items):
if "start" in request.GET:
try:
start_num = int(request.GET.get("start"))
except ValueError:
start_num = 0
else:
start_num = 0
display_num = 10
end_num = start_num + display_num
prev... | ***TODO*** Migrate to django Paginator (see lostitems) |
367,814 | def _make_renderer(self, at_paths, at_encoding, **kwargs):
for eopt in ("file_encoding", "string_encoding"):
default = self._roptions.get(eopt, at_encoding.lower())
self._roptions[eopt] = kwargs.get(eopt, default)
pkey = "search_dirs"
paths = kwargs.get(pkey, []... | :param at_paths: Template search paths
:param at_encoding: Template encoding
:param kwargs: Keyword arguments passed to the template engine to
render templates with specific features enabled. |
367,815 | def get_object_info(self):
objectinfo = str(self.__class__).replace(">", "")
objectinfo = objectinfo.replace("class ", "")
objectinfo = objectinfo.replace("'", "")
objectinfo += " object at 0x%x>" % id(self)
return objectinfo | Returns object info in following form <module.class object at address> |
367,816 | def get_name_deadlines( self, name_rec, namespace_rec, block_number ):
if namespace_rec[] != NAMESPACE_READY:
return None
namespace_id = namespace_rec[]
namespace_lifetime_multiplier = get_epoch_namespace_lifetime_multiplier( block_number, namespace_id )
... | Get the expiry and renewal deadlines for a (registered) name.
NOTE: expire block here is NOT the block at which the owner loses the name, but the block at which lookups fail.
The name owner has until renewal_deadline to renew the name.
Return {'expire_block': ..., 'renewal_deadline': ...} on s... |
367,817 | def minion_publish(self, load):
if not self.__verify_minion_publish(load):
return {}
pub_load = {
: load[],
: salt.utils.args.parse_input(
load[],
no_parse=load.get(, [])),
: load.get(, ),
: loa... | Publish a command initiated from a minion, this method executes minion
restrictions so that the minion publication will only work if it is
enabled in the config.
The configuration on the master allows minions to be matched to
salt functions, so the minions can only publish allowed salt f... |
367,818 | def compute_tensor(self, x):
class_matrix = self.target_tensor // self.output_size
class_vector = class_matrix.reshape((-1,))
target_matrix = self.target_tensor % self.output_size
target_vector = target_matrix.reshape((-1,))
input_matrix = x.re... | :param x: (batch, time, vec) |
367,819 | def purge_db(self):
with self.engine.begin() as db:
purge_remote_checkpoints(db, self.user_id) | Purge all database records for the current user. |
367,820 | def delete(self):
headers = self._default_headers()
return self._request(self.name,
ok_status=None,
data=None,
headers=headers,
method="DELETE") | Delete template config for specified template name.
.. __: https://api.go.cd/current/#delete-a-template
Returns:
Response: :class:`gocd.api.response.Response` object |
367,821 | def _recompute_transform(self):
center = (self.convert_xunits(self.center[0]),
self.convert_yunits(self.center[1]))
width = self.width
height = self.height
trans = artist.Artist.get_transform(self)
self._patch_transform = transforms.Affine2D() \
.scale(width * 0.5 * self.scale, height * 0.5* sel... | NOTE: This cannot be called until after this has been added
to an Axes, otherwise unit conversion will fail. This
maxes it very important to call the accessor method and
not directly access the transformation member variable. |
367,822 | def hla_choices(orig_hla, min_parts=2):
yield orig_hla
try:
int(orig_hla[-1])
except ValueError:
yield orig_hla[:-1]
hla_parts = orig_hla.split(":")
for sub_i in range(len(hla_parts) - min_parts + 1):
yield ":".join(hla_parts[:len(hla_parts) - sub_i]) | Provide a range of options for HLA type, with decreasing resolution. |
367,823 | def process_belrdf(rdf_str, print_output=True):
g = rdflib.Graph()
try:
g.parse(data=rdf_str, format=)
except ParseError as e:
logger.error( % e)
return None
bp = BelRdfProcessor(g)
bp.get_complexes()
bp.get_activating_subs()
bp.get_modifications()
bp.ge... | Return a BelRdfProcessor for a BEL/RDF string.
Parameters
----------
rdf_str : str
A BEL/RDF string to be processed. This will usually come from reading
a .rdf file.
Returns
-------
bp : BelRdfProcessor
A BelRdfProcessor object which contains INDRA Statements in
... |
367,824 | def morlet(freq, s_freq, ratio=5, sigma_f=None, dur_in_sd=4, dur_in_s=None,
normalization=, zero_mean=False):
if sigma_f is None:
sigma_f = freq / ratio
else:
ratio = freq / sigma_f
sigma_t = 1 / (2 * pi * sigma_f)
if ratio < 5 and not zero_mean:
lg.info(t have z... | Create a Morlet wavelet.
Parameters
----------
freq : float
central frequency of the wavelet
s_freq : int
sampling frequency
ratio : float
ratio for a wavelet family ( = freq / sigma_f)
sigma_f : float
standard deviation of the wavelet in frequency domain
dur... |
367,825 | def post(cls, payload):
slpk_attr_name = "sequencing_library_prep_kit_id"
paired_bc_id_attr_name = "paired_barcode_id"
seq_reg = re.compile("^[ACGTN]+$")
if paired_bc_id_attr_name in payload:
try:
index1, index2 = payload[paired_bc_id_attr_name].upper... | A wrapper over Model.post() that handles the case where a Library has a PairedBarcode
and the user may have supplied the PairedBarcode in the form of index1-index2, i.e.
GATTTCCA-GGCGTCGA. This isn't the PairedBarcode's record name or a record ID, thus
Model.post() won't be able to figure out ... |
367,826 | def maketrans(fromstr, tostr):
if len(fromstr) != len(tostr):
raise ValueError, "maketrans arguments must have same length"
global _idmapL
if not _idmapL:
_idmapL = list(_idmap)
L = _idmapL[:]
fromstr = map(ord, fromstr)
for i in range(len(fromstr)):
L[fromstr[i]] = ... | maketrans(frm, to) -> string
Return a translation table (a string of 256 bytes long)
suitable for use in string.translate. The strings frm and to
must be of the same length. |
367,827 | def build_image(self, conf, pushing=False):
with conf.make_context() as context:
try:
stream = BuildProgressStream(conf.harpoon.silent_build)
with self.remove_replaced_images(conf) as info:
cached = NormalBuilder().build(conf, context, str... | Build this image |
367,828 | def get_dependencies(ireq, sources=None, parent=None):
if not isinstance(ireq, pip_shims.shims.InstallRequirement):
name = getattr(
ireq, "project_name",
getattr(ireq, "project", ireq.name),
)
version = getattr(ireq, "version", None)
if not version:
... | Get all dependencies for a given install requirement.
:param ireq: A single InstallRequirement
:type ireq: :class:`~pip._internal.req.req_install.InstallRequirement`
:param sources: Pipfile-formatted sources, defaults to None
:type sources: list[dict], optional
:param parent: The parent of this lis... |
367,829 | def words_for_language(language_code):
word_groups = word_groups_for_language(language_code)
words = []
for group in word_groups:
words.extend(word_groups[group].keys())
return words | Return the math words for a language code.
The language_code should be an ISO 639-2 language code.
https://www.loc.gov/standards/iso639-2/php/code_list.php |
367,830 | def _load_single_patient_cufflinks(self, patient, filter_ok):
data = pd.read_csv(patient.tumor_sample.cufflinks_path, sep="\t")
data["patient_id"] = patient.id
if filter_ok:
data = data[data["FPKM_status"] == "OK"]
return data | Load Cufflinks gene quantification given a patient
Parameters
----------
patient : Patient
filter_ok : bool, optional
If true, filter Cufflinks data to row with FPKM_status == "OK"
Returns
-------
data: Pandas dataframe
Pandas dataframe o... |
367,831 | def get_unread_message_count_between(parser, token):
try:
tag_name, arg = token.contents.split(None, 1)
except ValueError:
raise template.TemplateSyntaxError("%s tag requires arguments" % token.contents.split()[0])
m = re.search(r, arg)
if not m:
raise template.TemplateSynta... | Returns the unread message count between two users.
Syntax::
{% get_unread_message_count_between [user] and [user] as [var_name] %}
Example usage::
{% get_unread_message_count_between funky and wunki as message_count %} |
367,832 | def setup(self):
from javatools import cheetah
options = self.options
datadir = getattr(options, "html_copy_data", None)
if getattr(options, "html_data_copied", False) or not datadir:
return
datasrc = join(cheetah.__path... | copies default stylesheets and javascript files if necessary, and
appends them to the options |
367,833 | def Copy(self, field_number=None):
new_args = self._kwargs.copy()
if field_number is not None:
new_args["field_number"] = field_number
return ProtoRDFValue(
rdf_type=self.original_proto_type_name,
default=getattr(self, "default", None),
**new_args) | Returns descriptor copy, optionally changing field number. |
367,834 | def _path_to_baton_json(self, path: str) -> Dict:
entity = self._create_entity_with_path(path)
return self._entity_to_baton_json(entity) | Converts a path to the type of iRODS entity the mapper deals with, to its JSON representation.
:param path: the path to convert
:return: the JSON representation of the path |
367,835 | def header(self):
s metadata as it would appear in a PLY
header.
element %s %delementcomment \n'.join(lines) | Format this element's metadata as it would appear in a PLY
header. |
367,836 | def _copytoscratch(self, maps):
try:
for p in self.inputs:
self._scratch[p][:] = maps[p]
except ValueError:
invals = maps[list(self.inputs)[0]]
if isinstance(invals, numpy.ndarray):
shape ... | Copies the data in maps to the scratch space.
If the maps contain arrays that are not the same shape as the scratch
space, a new scratch space will be created. |
367,837 | def _get_grain(name, proxy=None):
grains = _retrieve_grains_cache(proxy=proxy)
if grains.get(, False) and grains.get(, {}):
return grains.get().get(name) | Retrieves the grain value from the cached dictionary. |
367,838 | def add_alignment_errors(self,ae):
self._target_context_errors = None
self._query_context_errors = None
self._alignment_errors.append(ae)
self._general_errors.add_alignment_errors(ae) | If you alread have thealignment errors, add them for profile construction. |
367,839 | def height(self):
if len(self.coords) <= 1:
return 0
return np.max(self.yy) - np.min(self.yy) | Get the height of a bounding box encapsulating the line. |
367,840 | def _determine_tool(files):
for file in files:
linker_ext = file.split()[-1]
if "sct" in linker_ext or "lin" in linker_ext:
yield (str(file),"uvision")
elif "ld" in linker_ext:
yield (str(file),"make_gcc_arm")
elif "icf" in linker_ext:
yield (... | Yields tuples in the form of (linker file, tool the file links for |
367,841 | def follow_path(file_path, buffering=-1, encoding=None, errors=):
if encoding is None:
encoding = locale.getpreferredencoding()
class FollowPathGenerator(object):
def __init__(self):
if os.path.isfile(file_path):
self.following_file = io.open(file_path, , buffer... | Similar to follow, but also looks up if inode of file is changed
e.g. if it was re-created.
Returned generator yields strings encoded by using encoding.
If encoding is not specified, it defaults to locale.getpreferredencoding()
>>> import io
>>> import os
>>> f = io.open('test_follow_path.txt'... |
367,842 | def get_class_from_settings_from_apps(settings_key):
cls_path = getattr(settings, settings_key, None)
if not cls_path:
raise NotImplementedError()
try:
app_label = cls_path.split()[-2]
model_name = cls_path.split()[-1]
except ValueError:
raise ImproperlyConfigured(... | Try and get a class from a settings path by lookin in installed apps. |
367,843 | def init(ctx):
git = ctx.obj[]
click.echo("Installing hooks...", nl=False)
for old in []:
path = os.path.join(git.path, , old)
if os.path.exists(path):
os.remove(path)
for new in [, ]:
git.install_hook(new, hooks_manager.get_hook(new))
click.echo("Done."... | Initialize the project for use with EasyCI. This installs the necessary
git hooks (pre-commit + pre-push) and add a config file if one does not
already exists. |
367,844 | def present_params(paramlist, spacing = 0, maxchars=90, linecont=", &"):
line = []
length = 0
result = []
for param in paramlist:
extra = len(list(param))
if length + extra + 2 + spacing > maxchars:
result.append(", ".join(line) + linecont)
line = ... | Creates the (paramlist) for a method call formatted nicely for calls
with lots of parameters. |
367,845 | def _create_filter(self, condition):
comparison = re.match(r"^(%s)(<[>=]?|>=?|!=|~)(.*)$" % self.ident_re, condition)
if comparison:
name, comparison, values = comparison.groups()
if values and values[0] in "+-":
raise FilterError("Comparison ope... | Create a filter object from a textual condition. |
367,846 | def serialize(self, private=True):
if private and not self.private_key:
raise ValueError("Cannot serialize a public key as private")
if private:
network_version = long_to_hex(
self.network.EXT_SECRET_KEY, 8)
else:
network_version = lo... | Serialize this key.
:param private: Whether or not the serialized key should contain
private information. Set to False for a public-only representation
that cannot spend funds but can create children. You want
private=False if you are, for example, running an e-commerce
... |
367,847 | def check_grid_mapping(self, ds):
ret_val = []
grid_mapping_variables = cfutil.get_grid_mapping_variables(ds)
for variable in ds.get_variables_by_attributes(grid_mapping=lambda x: x is not None):
grid_mapping = getattr(variable, , None)
defines_grid_ma... | 5.6 When the coordinate variables for a horizontal grid are not
longitude and latitude, it is required that the true latitude and
longitude coordinates be supplied via the coordinates attribute. If in
addition it is desired to describe the mapping between the given
coordinate variables a... |
367,848 | def parse(self, xmp):
tree = etree.fromstring(xmp)
rdf_tree = tree.find(RDF_NS + )
meta = defaultdict(dict)
for desc in rdf_tree.findall(RDF_NS + ):
for el in desc.getchildren():
ns, tag = self._parse_tag(el)
value = self._parse_value(... | Run parser and return a dictionary of all the parsed metadata. |
367,849 | def check_permission(permission, obj):
mtool = api.get_tool()
object = api.get_object(obj)
return mtool.checkPermission(permission, object) | Returns if the current user has rights for the permission passed in against
the obj passed in
:param permission: name of the permission
:param obj: the object to check the permission against for the current user
:return: 1 if the user has rights for this permission for the passed in obj |
367,850 | def visitLexerTerminal(self, ctx: jsgParser.LexerTerminalContext):
if ctx.LEXER_ID():
idtoken = as_token(ctx)
self._rulePattern += + idtoken +
self._ruleTokens.add(idtoken)
else:
self.add_string(ctx.getText()[1:-1], False) | terminal: LEXER_ID | STRING |
367,851 | def lag_plot(data, lag=1, kind="scatter", **kwds):
if lag != int(lag) or int(lag) <= 0:
raise ValueError("lag must be a positive integer")
lag = int(lag)
values = data.values
y1 = "y(t)"
y2 = "y(t + {0})".format(lag)
lags = pd.DataFrame({y1: values[:-lag].T.ravel(), y2: values[lag:... | Lag plot for time series.
Parameters
----------
data: pandas.Series
the time series to plot
lag: integer
The lag of the scatter plot, default=1
kind: string
The kind of plot to use (e.g. 'scatter', 'line')
**kwds:
Additional keywords passed to data.vgplot.scatter... |
367,852 | def _fetch_pageviews(self, storage, year, week, ip_users=False):
prefix =
if ip_users:
query_add = "AND !(bot:True) AND (id_user:0)"
prefix +=
else:
query_add = "AND !(bot:True) AND !(id_user:0)"
store = self.storage.get(prefix, year, week)
... | Fetch PageViews from Elasticsearch.
:param time_from: Staring at timestamp.
:param time_to: To timestamp |
367,853 | def _get_num_locations(d):
lengths = []
for key in EXCEL_GEO:
try:
if key != "siteName":
lengths.append(len(d[key]))
except Exception:
lengths.append(1)
try:
num = max(lengths)
except ValueError:
num = 0
return num | Find out how many locations are being parsed. Compare lengths of each
coordinate list and return the max
:param dict d: Geo metadata
:return int: Max number of locations |
367,854 | def _get_derived_feature_types(self, limit):
if self.test_mode:
graph = self.testgraph
else:
graph = self.graph
model = Model(graph)
raw = .join((self.rawdir, ))
LOG.info("determining some feature types based on relationships")
with open(... | Make a pass through the feature table in order to properly type
the FBal (allele) features, which are derived either from other
sequence features (which can be things like RNAi products)
or transgenic-transposons. We'll save the allele type into a hasmap.
:param limit:
:return: |
367,855 | async def save_changes(self, turn_context: TurnContext, force: bool = False) -> None:
if turn_context == None:
raise TypeError()
cached_state = turn_context.turn_state.get(self._context_service_key)
if force or (cached_state != None and cached_state.is_chan... | If it has changed, writes to storage the state object that is cached in the current context object for this turn.
:param turn_context: The context object for this turn.
:param force: Optional. True to save state to storage whether or not there are changes. |
367,856 | def rotate(self, angle, direction=, axis=None):
return Space(self).rotate(angle, direction, axis)[0] | Returns a new Place which is the same but rotated about a
given axis.
If the axis given is ``None``, the rotation will be computed
about the Place's centroid.
:param angle: Rotation angle (in radians)
:type angle: float
:param direction: Axis di... |
367,857 | def to_native_types(self, slicer=None, na_rep=, quoting=None,
**kwargs):
values = self.get_values()
if slicer is not None:
values = values[:, slicer]
mask = isna(values)
if not self.is_object and not quoting:
values = values.ast... | convert to our native types format, slicing if desired |
367,858 | def get_parser():
from argparse import ArgumentParser, ArgumentDefaultsHelpFormatter
parser = ArgumentParser(description=__doc__,
formatter_class=ArgumentDefaultsHelpFormatter)
parser.add_argument("-n",
dest="n", default=10, type=int,
... | Return the parser object for this script. |
367,859 | def _vec_alpha(self, donor_catchments):
return np.dot(linalg.inv(self._matrix_omega(donor_catchments)), self._vec_b(donor_catchments)) | Return vector alpha which is the weights for donor model errors
Methodology source: Kjeldsen, Jones & Morris 2014, eq 10
:param donor_catchments: Catchments to use as donors
:type donor_catchments: list of :class:`Catchment`
:return: Vector of donor weights
:rtype: :class:`nump... |
367,860 | def loop(self, intro=None):
self.fire("preloop")
if intro is not None:
self.intro = intro
if self.intro is not None:
self.stdout.write(self.intro + "\n")
self.stdout.flush()
stop = None
while not stop:
if self.use_rawinput:... | TODO as heck.
See Python's cmd.Cmd.cmdloop for some (somewhat horrifying)
example loops - that's what we're working similarly to. |
367,861 | def writeAMF3(self, data):
self.writeType(TYPE_AMF3)
self.context.getAMF3Encoder(self).writeElement(data) | Writes an element in L{AMF3<pyamf.amf3>} format. |
367,862 | def get_cachedir_csig(self):
try:
return self.cachedir_csig
except AttributeError:
pass
cachedir, cachefile = self.get_build_env().get_CacheDir().cachepath(self)
if not self.exists() and cachefile and os.path.exists(cachefile):
self.cachedir_... | Fetch a Node's content signature for purposes of computing
another Node's cachesig.
This is a wrapper around the normal get_csig() method that handles
the somewhat obscure case of using CacheDir with the -n option.
Any files that don't exist would normally be "built" by fetching
... |
367,863 | def _datatype_size(datatype, numElms):
sizes = {1: 1,
2: 2,
4: 4,
8: 8,
11: 1,
12: 2,
14: 4,
21: 4,
22: 8,
31: 8,
32: 16,
... | Gets datatype size
Parameters:
datatype : int
CDF variable data type
numElms : int
number of elements
Returns:
numBytes : int
The number of bytes for the data |
367,864 | def filter_resp(self, action_resp, filter_params):
if action_resp.status == STATUS_OK:
try:
return CommandsResponse(
STATUS_OK,
TextFilter.filter(action_resp.value, filter_params)
)
except FilterError as e:
... | Filter response of action. Used to make printed results more
specific
:param action_resp: named tuple (CommandsResponse)
containing response from action.
:param filter_params: params used after '|' specific for given filter
:return: filtered response. |
367,865 | def get_info(self, params={}):
res = self.invoke(zconstant.NS_ZIMBRA_ACC_URL,
sconstant.GetInfoRequest,
params)
return res | Gets mailbox info.
@param params: params to retrieve
@return: AccountInfo |
367,866 | def restore_scoped_package_version_from_recycle_bin(self, package_version_details, feed_id, package_scope, unscoped_package_name, package_version):
route_values = {}
if feed_id is not None:
route_values[] = self._serialize.url(, feed_id, )
if package_scope is not None:
... | RestoreScopedPackageVersionFromRecycleBin.
[Preview API] Restore a package version with an npm scope from the recycle bin to its feed.
:param :class:`<NpmRecycleBinPackageVersionDetails> <azure.devops.v5_0.npm.models.NpmRecycleBinPackageVersionDetails>` package_version_details:
:param str feed_i... |
367,867 | def export(self, name, columns, points):
logger.debug("Export {} stats to ZeroMQ".format(name))
data = dict(zip(columns, points))
if data == {}:
return False
message = [b(self.prefix),
b(name... | Write the points to the ZeroMQ server. |
367,868 | def get_logger(cls, *name, **kwargs):
return cls(getLogger(_normalize_name(name)),
kwargs.get(, None)) | Construct a new :class:`KvLoggerAdapter` which encapsulates
the :class:`logging.Logger` specified by ``name``.
:param name:
Any amount of symbols. Will be concatenated and normalized
to form the logger name. Can also be empty.
:param extra:
Additional conte... |
367,869 | def _indent(indent=0, quote=, indent_char=):
if indent > 0:
indent_string = .join((
str(quote),
(indent_char * (indent - len(quote)))
))
else:
indent_string = .join((
( * (-1 * (indent - len(quote)))),
str(quote))
)
if len... | Indent util function, compute new indent_string |
367,870 | def blend_alpha(image_fg, image_bg, alpha, eps=1e-2):
assert image_fg.shape == image_bg.shape
assert image_fg.dtype.kind == image_bg.dtype.kind
assert image_fg.dtype.name not in ["float128"]
assert image_bg.dtype.name not in ["float128"]
input_was_2d = (len(image_fg.shape) == 2)
... | Blend two images using an alpha blending.
In an alpha blending, the two images are naively mixed. Let ``A`` be the foreground image
and ``B`` the background image and ``a`` is the alpha value. Each pixel intensity is then
computed as ``a * A_ij + (1-a) * B_ij``.
dtype support::
* ``uint8``: y... |
367,871 | def product_id_change(self):
context = dict(self._context)
if not context:
context = {}
if context.get(, False):
if self.product_id and self.folio_id.partner_id:
self.name = self.product_id.name
self.price_unit = self.product_id.li... | - @param self: object pointer
- |
367,872 | def apply_to_image(self, image, reference=None, interpolation=):
if reference is None:
reference = image.clone()
tform_fn = utils.get_lib_fn( % (self._libsuffix, image._libsuffix))
reference = reference.clone(image.pixeltype)
img_ptr = tform_fn(self.pointer, image.... | Apply transform to an image
Arguments
---------
image : ANTsImage
image to which the transform will be applied
reference : ANTsImage
target space for transforming image
interpolation : string
type of interpolation to use
Returns
... |
367,873 | def fetch(self, url, open_graph=None, twitter_card=None, touch_icon=None,
favicon=None, all_images=None, parser=None, handle_file_content=None,
canonical=None):
open_graph = merge_settings(open_graph, self.open_graph)
twitter_card = merge_settings(twitter_c... | Retrieves content from the specified url, parses it, and returns
a beautifully crafted dictionary of important information about that
web page.
Priority tree is as follows:
1. OEmbed
2. Open Graph
3. Twitter Card
4. Other meta content (i.e. descri... |
367,874 | def _register_function(name: str, func, universe: bool, in_place: bool):
if name in mapped:
mapped_func = mapped[name]
raise PipelineNameError(.format(
name=name,
func_mod=mapped_func.__module__,
func_name=mapped_func.__name__
))
mapped[name] = f... | Register a transformation function under the given name.
:param name: Name to register the function under
:param func: A function
:param universe:
:param in_place:
:return: The same function, with additional properties added |
367,875 | def get_skeletons(self, component_info=None, data=None, component_position=None):
components = []
append_components = components.append
for _ in range(component_info.skeleton_count):
component_position, info = QRTPacket._get_exact(
RTSegmentCount, data, comp... | Get skeletons |
367,876 | def train(self, data, epochs, autostop=False):
self._data = data
if self.__ccore_som_pointer is not None:
return wrapper.som_train(self.__ccore_som_pointer, data, epochs, autostop)
self._sqrt_distances = self.__initialize_distances(self._size, self... | !
@brief Trains self-organized feature map (SOM).
@param[in] data (list): Input data - list of points where each point is represented by list of features, for example coordinates.
@param[in] epochs (uint): Number of epochs for training.
@param[in] autostop (bool): Automatic... |
367,877 | def main():
dem =
num_proc = 2
wp =
TauDEMWorkflow.watershed_delineation(num_proc, dem, workingdir=wp) | The simplest usage of watershed delineation based on TauDEM. |
367,878 | def to_detach(b:Tensors, cpu:bool=True):
"Recursively detach lists of tensors in `b `; put them on the CPU if `cpu=True`."
if is_listy(b): return [to_detach(o, cpu) for o in b]
if not isinstance(b,Tensor): return b
b = b.detach()
return b.cpu() if cpu else b | Recursively detach lists of tensors in `b `; put them on the CPU if `cpu=True`. |
367,879 | def get_fragment(self, offset):
fragment_len = 10
s = % (self.source[offset:offset + fragment_len])
if offset + fragment_len < len(self.source):
s +=
return s | Get the part of the source which is causing a problem. |
367,880 | def avg_bp_from_range(self, bp):
try:
if in bp:
maxlen = float(bp.split("-",1)[1])
minlen = float(bp.split("-",1)[0])
bp = ((maxlen - minlen)/2) + minlen
except TypeError:
pass
return(int(bp)) | Helper function - FastQC often gives base pair ranges (eg. 10-15)
which are not helpful when plotting. This returns the average from such
ranges as an int, which is helpful. If not a range, just returns the int |
367,881 | def get_column(name, model=None):
if in name:
m, name = name.split()
model = get_model(m)
if model:
return model.c.get(name) | get table column according to name, the name can be like `model.column` |
367,882 | def first_arg_to_level_name(arg):
try:
return int(arg)
except ValueError:
arg = arg.upper()
for level in LEVELS:
if level in arg:
return level
return None | Decide what level the argument specifies and return it. The argument
must contain (case-insensitive) one of the values in LEVELS or be an integer
constant. Otherwise None will be returned. |
367,883 | def get_jira_key_from_scenario(scenario):
jira_regex = re.compile(]*([A-Z]+\-[0-9]+)[\)
for tag in scenario.tags:
match = jira_regex.search(tag)
if match:
return match.group(1)
return None | Extract Jira Test Case key from scenario tags.
Two tag formats are allowed:
@jira('PROJECT-32')
@jira=PROJECT-32
:param scenario: behave scenario
:returns: Jira test case key |
367,884 | def Search(self, artifact, os_name=None, cpe=None, label=None):
hit = lambda x: x[0] == x[1] or not x[0]
seq = [(artifact, self.artifact), (os_name, self.os_name), (cpe, self.cpe),
(label, self.label)]
return all(map(hit, seq)) | Whether the condition contains the specified values.
Args:
artifact: A string identifier for the artifact.
os_name: An OS string.
cpe: A CPE string.
label: A label string.
Returns:
True if the values match the non-empty query attributes.
Empty query attributes are ignored i... |
367,885 | def filter_entries(entries, filters, exclude):
filtered = [entry
for entry in entries
if all(entry.matches(f) for f in filters)
and not any(entry.matches(e) for e in exclude)]
return filtered | Filters a list of host entries according to the given filters.
:param entries: A list of host entries.
:type entries: [:py:class:`HostEntry`]
:param filters: Regexes that must match a `HostEntry`.
:type filters: [``str``]
:param exclude: Regexes that must NOT match a `HostEntry`.
:type exclude:... |
367,886 | def to_string(self, endpoints):
root = self._make_xml(endpoints)
tree = ElementTree.ElementTree(root)
ElementTree.register_namespace("", EDEF_NAMESPACE)
output = StringIO()
tree.write(
output,
... | Converts the given endpoint description beans into a string
:param endpoints: A list of EndpointDescription beans
:return: A string containing an XML document |
367,887 | def manage_initial_host_status_brok(self, b):
host_name = b.data[]
logger.debug("got initial host status: %s", host_name)
self.hosts_cache[host_name] = {
:
sanitize_name(b.data.get(, b.data.get(, ))),
}
if in b.data:
self.hosts_c... | Prepare the known hosts cache |
367,888 | def enc(x, codec=):
x = x.replace(, ).replace(, ).replace(, ).replace(, )
return x.encode(codec, ) | Encodes a string for SGML/XML/HTML |
367,889 | def DeleteSnapshot(self,names=None):
if names is None: names = self.GetSnapshots()
requests_lst = []
for name in names:
name_links = [obj[] for obj in self.data[][] if obj[]==name][0]
requests_lst.append(
clc.v2.Requests(
clc.v2.API.Call(,
[obj[] for obj in name_links if obj[]==][0],
... | Removes an existing Hypervisor level snapshot.
Supply one or more snapshot names to delete them concurrently.
If no snapshot names are supplied will delete all existing snapshots.
>>> clc.v2.Server(alias='BTDI',id='WA1BTDIKRT02').DeleteSnapshot().WaitUntilComplete()
0 |
367,890 | def deploy(provider=None):
if os.path.exists(DEPLOY_YAML):
site = yaml.safe_load(_read_file(DEPLOY_YAML))
provider_class = PROVIDERS[site[]]
provider_class.deploy() | Deploys your project |
367,891 | def mark_offer_as_win(self, offer_id):
return self._create_put_request(
resource=OFFERS,
billomat_id=offer_id,
command=WIN,
) | Mark offer as win
:param offer_id: the offer id
:return Response |
367,892 | def _read_uaa_cache(self):
self._cache_path = os.path.expanduser()
if not os.path.exists(self._cache_path):
return self._initialize_uaa_cache()
with open(self._cache_path, ) as data:
return json.load(data) | Read cache of UAA client/user details. |
367,893 | def validate_metadata(self, xml):
assert isinstance(xml, compat.text_types)
if len(xml) == 0:
raise Exception()
errors = []
root = OneLogin_Saml2_XML.validate_xml(xml, , self.__debug)
if isinstance(root, str):
errors.append(root)
else:
... | Validates an XML SP Metadata.
:param xml: Metadata's XML that will be validate
:type xml: string
:returns: The list of found errors
:rtype: list |
367,894 | def _new_conn(self):
self.num_connections += 1
log.info("Starting new HTTPS connection (%d): %s"
% (self.num_connections, self.host))
if not self.ConnectionCls or self.ConnectionCls is DummyConnection:
raise SSLError("Can't connect to HTTPS URL... | Return a fresh :class:`httplib.HTTPSConnection`. |
367,895 | def search(d, recursive=True, store_meta=True):
1.2.340.500067.8.9.10.11012.13000001401516017181900000200
scans = col.defaultdict(lambda: col.defaultdict(lambda: col.defaultdict(list)))
for dirpath,dirnames,filenames in os.walk(os.path.expanduser(d)):
for f in filenames:
fullfile = ... | Search for DICOM files within a given directory and receive back a
dictionary of {StudyInstanceUID: {SeriesNumber: [files]}}
Example usage::
>>> import yaxil.dicom
>>> yaxil.dicom.search("~/dicoms").keys()
['1.2.340.500067.8.9.10.11012.13000001401516017181900000200']
:... |
367,896 | def graph_repo(repo_url, output_loc, format=):
log = logging.getLogger("graphgit")
local_repo = os.path.isabs(repo_url)
repo_name = repo_url[repo_url.rfind()+1:repo_url.rfind()] \
if not local_repo else repo_url[repo_url.rfind(os.sep)+1:]
log.info ("Processing git repository: %s" % repo_name)
... | generates a graph for a git repository |
367,897 | def hostgroup_exists(name=None, groupid=None, node=None, nodeids=None, **kwargs):
s docstring)
:param _connection_password: Optional - zabbix password (can also be set in opts or pillar, see modules docstring)
:return: True if at least one host group exists, False if not or on failure.
CLI Example:
... | Checks if at least one host group that matches the given filter criteria exists.
.. versionadded:: 2016.3.0
:param name: names of the host groups
:param groupid: host group IDs
:param node: name of the node the host groups must belong to (zabbix API < 2.4)
:param nodeids: IDs of the nodes the host... |
367,898 | def _range_check(self, value, min_value, max_value):
if value < min_value or value > max_value:
raise ValueError( % (self.__class__.__name__, value, min_value, max_value)) | Utility method to check that the given value is between min_value and max_value. |
367,899 | def _has_nested(self, relations, operator=, count=1, boolean=, extra=None):
relations = relations.split()
def closure(q):
if len(relations) > 1:
q.where_has(relations.pop(0), closure)
else:
q.has(relations.pop(0), operator, count, boolean... | Add nested relationship count conditions to the query.
:param relations: nested relations
:type relations: str
:param operator: The operator
:type operator: str
:param count: The count
:type count: int
:param boolean: The boolean value
:type boolean: s... |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.