Unnamed: 0 int64 0 389k | code stringlengths 26 79.6k | docstring stringlengths 1 46.9k |
|---|---|---|
4,500 | def unzip_file(source_file, dest_dir=None, mkdir=False):
if dest_dir is None:
dest_dir, fname = os.path.split(source_file)
elif not os.path.isdir(dest_dir):
if mkdir:
preparedir(dest_dir)
else:
created = preparedir(dest_dir, False)
if not cre... | Unzip a compressed file.
Args:
source_file: Full path to a valid compressed file (e.g. c:/ladybug/testPts.zip)
dest_dir: Target folder to extract to (e.g. c:/ladybug).
Default is set to the same directory as the source file.
mkdir: Set to True to create the directory if doesn't ... |
4,501 | def collect(self, target):
statusobjects = set()
callables = set()
objs_from_this_obj = getattr(self, % target)()
if not is_iterable(objs_from_this_obj):
objs_from_this_obj = [objs_from_this_obj]
if is_iterable(objs_from_this_obj):
for i in (se... | Recursively collect all potential triggers/targets in this node and its children.
Define targets and triggers of this particular callable in :meth:`_give_triggers`
and :meth:`_give_targets`.
:param str target: valid values: ``'targets'`` and ``'triggers'`` |
4,502 | def run(self):
logger = getLogger(__name__)
try:
config_class_name = self.arguments[0]
except IndexError:
raise SphinxError(
.format(self.directive_name))
logger.debug(, self.directive_name,
config_cl... | Main entrypoint method.
Returns
-------
new_nodes : `list`
Nodes to add to the doctree. |
4,503 | def visit_extslice(self, node, parent):
newnode = nodes.ExtSlice(parent=parent)
newnode.postinit([self.visit(dim, newnode) for dim in node.dims])
return newnode | visit an ExtSlice node by returning a fresh instance of it |
4,504 | def add_f95_to_env(env):
try:
F95Suffixes = env[]
except KeyError:
F95Suffixes = []
try:
F95PPSuffixes = env[]
except KeyError:
F95PPSuffixes = []
DialectAddToEnv(env, "F95", F95Suffixes, F95PPSuffixes,
support_module = 1) | Add Builders and construction variables for f95 to an Environment. |
4,505 | def check_cluster(
cluster_config,
data_path,
java_home,
check_replicas,
batch_size,
minutes,
start_time,
end_time,
):
brokers = get_broker_list(cluster_config)
broker_files = find_files(data_path, brokers, minutes, start_time, end_time)
if not check_replicas:
... | Check the integrity of the Kafka log files in a cluster.
start_time and end_time should be in the format specified
by TIME_FORMAT_REGEX.
:param data_path: the path to the log folder on the broker
:type data_path: str
:param java_home: the JAVA_HOME of the broker
:type java_home: str
:param... |
4,506 | def read_config(config_path=default_config_path):
config_path = os.path.expanduser(config_path)
if not os.path.isfile(config_path):
raise OSError(errno.ENOENT,
"Artifactory configuration file not found: " %
config_path)
p = configparser.ConfigParser(... | Read configuration file and produce a dictionary of the following structure:
{'<instance1>': {'username': '<user>', 'password': '<pass>',
'verify': <True/False>, 'cert': '<path-to-cert>'}
'<instance2>': {...},
...}
Format of the file:
[https://artifactory-instance.... |
4,507 | def plot(self, ax=None, **kwargs):
ax, fig, plt = get_ax_fig_plt(ax)
yy = [len(v) for v in self.values]
ax.plot(self.binvals, yy, **kwargs)
return fig | Plot the histogram with matplotlib, returns `matplotlib` figure. |
4,508 | def _set_blob_properties(self, ud):
if ud.requires_non_encrypted_md5_put:
digest = blobxfer.util.base64_encode_as_string(ud.md5.digest())
else:
digest = None
blobxfer.operations.azure.blob.set_blob_properties(ud.entity, digest)
if blobxfer.util.i... | Set blob properties (md5, cache control)
:param Uploader self: this
:param blobxfer.models.upload.Descriptor ud: upload descriptor |
4,509 | def fetch(url, binary, outfile, noprint, rendered):
s content, and output it to the console.
contentbinarymimietext/htmlbinarycontentbinarycontentcontent'].encode("UTF-8")) | Fetch a specified URL's content, and output it to the console. |
4,510 | def _prepare_ws(self, w0, mmap, n_steps):
from ..dynamics import PhaseSpacePosition
if not isinstance(w0, PhaseSpacePosition):
w0 = PhaseSpacePosition.from_w(w0)
arr_w0 = w0.w(self._func_units)
self.ndim, self.norbits = arr_w0.shape
self.ndim = self.ndim//2... | Decide how to make the return array. If mmap is False, this returns a
full array of zeros, but with the correct shape as the output. If mmap
is True, return a pointer to a memory-mapped array. The latter is
particularly useful for integrating a large number of orbits or
integrating a lar... |
4,511 | def find_boundary_types(model, boundary_type, external_compartment=None):
if not model.boundary:
LOGGER.warning("There are no boundary reactions in this model. "
"Therefore specific types of boundary reactions such "
"as , or cannot be "
... | Find specific boundary reactions.
Arguments
---------
model : cobra.Model
A cobra model.
boundary_type : str
What boundary type to check for. Must be one of
"exchange", "demand", or "sink".
external_compartment : str or None
The id for the external compartment. If No... |
4,512 | def masked_within_block_local_attention_1d(q, k, v, block_length=64, name=None):
with tf.variable_scope(
name, default_name="within_local_attention_1d", values=[q, k, v]):
batch, heads, length, depth_k = common_layers.shape_list(q)
depth_v = common_layers.shape_list(v)[-1]
if isinstance(block_len... | Attention to the source and a neighborhood to the left within a block.
The sequence is divided into blocks of length block_length. Attention for a
given query position can only see memory positions less than or equal to the
query position in the corresponding block.
Args:
q: a Tensor with shape [batch, he... |
4,513 | def gpg_profile_put_key( blockchain_id, key_id, key_name=None, immutable=True, txid=None, key_url=None, use_key_server=True, key_server=None, proxy=None, wallet_keys=None, gpghome=None ):
if key_name is not None:
assert is_valid_keyname(key_name)
if key_server is None:
key_server = DEFAUL... | Put a local GPG key into a blockchain ID's global account.
If the URL is not given, the key will be replicated to the default PGP key server and to either immutable (if @immutable) or mutable data.
Return {'status': True, 'key_url': key_url, 'key_id': key fingerprint, ...} on success
Return {'error': ...} ... |
4,514 | def bucket(cls, bucket_name, connection=None):
connection = cls.connection if connection == None else connection
if bucket_name not in cls._buckets:
connection = "{connection}/{bucket_name}".format(connection=connection, bucket_name=bucket_name)
if cls.password:
... | Gives the bucket from couchbase server.
:param bucket_name: Bucket name to fetch.
:type bucket_name: str
:returns: couchbase driver's Bucket object.
:rtype: :class:`couchbase.client.Bucket`
:raises: :exc:`RuntimeError` If the credentials wasn't set. |
4,515 | def from_group(cls, group):
if not group:
return
tag_items = group.split(";")
return list(map(cls.parse, tag_items)) | Construct tags from the regex group |
4,516 | def from_extension(extension):
if not extension.startswith():
raise ValueError("Extensions must begin with a period.")
try:
return EXTENSION_TO_TYPE[extension.lower()]
except KeyError:
raise UnknownExtensionError(
"seqmagick does not know how to handle " +
... | Look up the BioPython file type corresponding with input extension.
Look up is case insensitive. |
4,517 | def IsSocket(self):
if self._stat_object is None:
self._stat_object = self._GetStat()
if self._stat_object is not None:
self.entry_type = self._stat_object.type
return self.entry_type == definitions.FILE_ENTRY_TYPE_SOCKET | Determines if the file entry is a socket.
Returns:
bool: True if the file entry is a socket. |
4,518 | def hash_data(data, hashlen=None, alphabet=None):
r
if alphabet is None:
alphabet = ALPHABET_27
if hashlen is None:
hashlen = HASH_LEN2
if isinstance(data, stringlike) and len(data) == 0:
text = (alphabet[0] * hashlen)
else:
hasher = hashlib.sha512()
... | r"""
Get a unique hash depending on the state of the data.
Args:
data (object): any sort of loosely organized data
hashlen (None): (default = None)
alphabet (None): (default = None)
Returns:
str: text - hash string
CommandLine:
python -m utool.util_hash hash_d... |
4,519 | def generate_project(self):
if not self.name or not self.destdir or \
not os.path.isdir(self.destdir):
raise ValueError("Empty or invalid property values: run with command")
_log("Generating project " % self.name)
_log("Destination directory is: " %... | Generate the whole project. Returns True if at least one
file has been generated, False otherwise. |
4,520 | def ext_pillar(minion_id, pillar, *args, **kwargs):
if minion_id == :
log.info(s no data to collect from NetBox for the Masterapi_url/api_tokensite_detailssite_prefixesproxy_usernameproxy_returnAuthorizationToken {}{api_url}/{app}/{endpoint}dcimdevicesnameerrorAPI query failed for "%s", status code: %d... | Query NetBox API for minion data |
4,521 | def deserialize(self, data):
ct_in_map = {
: self._form_loader,
: salt.utils.json.loads,
: salt.utils.yaml.safe_load,
: salt.utils.yaml.safe_load,
return ct_in_map[value](tornado.escape.native_str(data))
except KeyError:
... | Deserialize the data based on request content type headers |
4,522 | def createHeaderMenu(self, index):
menu = QtGui.QMenu(self)
act = menu.addAction("Hide " % self.columnOf(index))
act.triggered.connect( self.headerHideColumn )
menu.addSeparator()
act = menu.addAction()
act.setIcon(QtGui.QIcon(resources... | Creates a new header menu to be displayed.
:return <QtGui.QMenu> |
4,523 | def l2traceroute_result_input_session_id(self, **kwargs):
config = ET.Element("config")
l2traceroute_result = ET.Element("l2traceroute_result")
config = l2traceroute_result
input = ET.SubElement(l2traceroute_result, "input")
session_id = ET.SubElement(input, "session-id"... | Auto Generated Code |
4,524 | def parse(self, filename=None, file=None, debuglevel=0):
self.scope.push()
if not file:
file = filename
else:
if hasattr(file, ):
if filename is not None:
raise AssertionError(
... | Parse file.
kwargs:
filename (str): File to parse
debuglevel (int): Parser debuglevel |
4,525 | def undo(self):
if self.canundo():
undoable = self._undos.pop()
with self._pausereceiver():
try:
undoable.undo()
except:
self.clear()
raise
else:
... | Undo the last action. |
4,526 | def _qteRunQueuedMacro(self, macroName: str,
widgetObj: QtGui.QWidget=None,
keysequence: QtmacsKeysequence=None):
app = qteGetAppletFromWidget(widgetObj)
if app is not None:
if sip.isdeleted(app):
... | Execute the next macro in the macro queue.
This method is triggered by the ``timerEvent`` in conjunction
with the focus manager to ensure the event loop updates the
GUI in between any two macros.
.. warning:: Never call this method directly.
|Args|
* ``macroName`` (**... |
4,527 | def _parse(fileobj):
fileobj.seek(0)
try:
part = fileobj.read(2)
except UnicodeDecodeError:
part = ""
if part == "
shebang = shlex.split(fileobj.readline().strip())
if (platform.system() == "Windows" and
len(shebang) and
os.path.basen... | Parse fileobj for a shebang. |
4,528 | def get_face_mask(self, subdomain):
if subdomain is None:
return numpy.s_[:]
if subdomain not in self.subdomains:
self._mark_vertices(subdomain)
is_in = self.subdomains[subdomain]["vertices"][self.idx_hierarchy]
n... | Get faces which are fully in subdomain. |
4,529 | def circlescan(x0, y0, r1, r2):
if r1 < 0: raise ValueError("Initial radius must be non-negative")
if r2 < 0: raise ValueError("Final radius must be non-negative")
previous = []
rstep = 1 if r2 >= r1 else -1
for distance in range(r1, r2 + rstep, rstep):
if distanc... | Scan pixels in a circle pattern around a center point
:param x0: Center x-coordinate
:type x0: float
:param y0: Center y-coordinate
:type y0: float
:param r1: Initial radius
:type r1: float
:param r2: Final radius
:type r2: float
:returns: Coordinate generator
:rtype: function |
4,530 | def parseDockerAppliance(appliance):
appliance = appliance.lower()
if in appliance:
tag = appliance.split()[-1]
appliance = appliance[:-(len( + tag))]
else:
tag =
registryName =
imageName = appliance
if in appliance and in appliance.split... | Takes string describing a docker image and returns the parsed
registry, image reference, and tag for that image.
Example: "quay.io/ucsc_cgl/toil:latest"
Should return: "quay.io", "ucsc_cgl/toil", "latest"
If a registry is not defined, the default is: "docker.io"
If a tag is not defined, the defaul... |
4,531 | def prefetch_docker_image_on_private_agents(
image,
timeout=timedelta(minutes=5).total_seconds()):
agents = len(shakedown.get_private_agents())
app = {
"id": "/prefetch",
"instances": agents,
"container": {
"type": "DOCKER",
"docker": {"image"... | Given a docker image. An app with the image is scale across the private
agents to ensure that the image is prefetched to all nodes.
:param image: docker image name
:type image: str
:param timeout: timeout for deployment wait in secs (default: 5m)
:type password: int |
4,532 | def get_mode(device):
*
ret = {}
cmd = .format(device)
out = __salt__[](cmd, python_shell=False)
for line in out.splitlines():
comps = line.strip().split()
if comps[3] not in ret:
if comps[0].startswith():
if comps[1].startswith():
ret[... | Report whether the quota system for this device is on or off
CLI Example:
.. code-block:: bash
salt '*' quota.get_mode |
4,533 | def clearkml(self):
for layer in self.curlayers:
self.mpstate.map.remove_object(layer)
for layer in self.curtextlayers:
self.mpstate.map.remove_object(layer)
self.allayers = []
self.curlayers = []
self.alltextlayers = []
self.curt... | Clear the kmls from the map |
4,534 | def get(self, key, default=None):
if self.in_memory:
return self._memory_db.get(key, default)
else:
db = self._read_file()
return db.get(key, default) | Get key value, return default if key doesn't exist |
4,535 | def ds_discrete(self, d_min=None, d_max=None, pts=20, limit=1e-9,
method=):
rlinearlogarithmicR10linearlogarithmicR10R5R2.5ISO 3310-1 R40/3ISO 3310-1 R20ISO 3310-1 R20/3ISO 3310-1ISO 3310-1 R10ASTM E11
if method[0] not in (, ):
if d_min is None:
d_min = s... | r'''Create a particle spacing mesh to perform calculations with,
according to one of several ways. The allowable meshes are
'linear', 'logarithmic', a geometric series specified by a Renard
number such as 'R10', or the meshes available in one of several sieve
standards.
... |
4,536 | def create_png(cls_name, meth_name, graph, dir_name=):
m_name = .join(x for x in meth_name if x.isalnum())
name = .join((cls_name.split()[-1][:-1], , m_name))
graph.draw(name, dir_name) | Creates a PNG from a given :class:`~androguard.decompiler.dad.graph.Graph`.
:param str cls_name: name of the class
:param str meth_name: name of the method
:param androguard.decompiler.dad.graph.Graph graph:
:param str dir_name: output directory |
4,537 | def heartbeat_encode(self, type, autopilot, base_mode, custom_mode, system_status, mavlink_version=2):
return MAVLink_heartbeat_message(type, autopilot, base_mode, custom_mode, system_status, mavlink_version) | The heartbeat message shows that a system is present and responding.
The type of the MAV and Autopilot hardware allow the
receiving system to treat further messages from this
system appropriate (e.g. by laying out the user
interface based on the autopilot)... |
4,538 | def place_items_in_square(items, t):
rows = [(t, y, []) for y in range(t)]
for item in items:
x = item % t
y = item // t
inverse_length, _, row_contents = rows[y]
heapq.heappush(row_contents, (x, item))
rows[y] = inv... | Returns a list of rows that are stored as a priority queue to be
used with heapq functions.
>>> place_items_in_square([1,5,7], 4)
[(2, 1, [(1, 5), (3, 7)]), (3, 0, [(1, 1)])]
>>> place_items_in_square([1,5,7], 3)
[(2, 0, [(1, 1)]), (2, 1, [(2, 5)]), (2, 2, [(1, 7)])] |
4,539 | def get_and_set(self, value):
with self._reference.get_lock():
oldval = self._reference.value
self._reference.value = value
return oldval | Atomically sets the value to `value` and returns the old value.
:param value: The value to set. |
4,540 | def build_arch(self, arch):
env = self.get_recipe_env(arch, with_flags_in_cc=False)
for path in (
self.get_build_dir(arch.arch),
join(self.ctx.python_recipe.get_build_dir(arch.arch), ),
join(self.ctx.python_recipe.get_build_dir(arch.arch), )):
... | simple shared compile |
4,541 | def attributes_diagram(rel_objs, obj_labels, colors, markers, filename, figsize=(8, 8), xlabel="Forecast Probability",
ylabel="Observed Relative Frequency", ticks=np.arange(0, 1.05, 0.05), dpi=300,
title="Attributes Diagram", legend_params=None, inset_params=None,
... | Plot reliability curves against a 1:1 diagonal to determine if probability forecasts are consistent with their
observed relative frequency. Also adds gray areas to show where the climatological probabilities lie and what
areas result in a positive Brier Skill Score.
Args:
rel_objs (list): List of D... |
4,542 | def rolling_count(self, window_start, window_end):
agg_op =
return SArray(_proxy=self.__proxy__.builtin_rolling_apply(agg_op, window_start, window_end, 0)) | Count the number of non-NULL values of different subsets over this
SArray.
The subset that the count is executed on is defined as an inclusive
range relative to the position to each value in the SArray, using
`window_start` and `window_end`. For a better understanding of this,
s... |
4,543 | def derive_and_set_name_fields_and_slug(
self, set_name_sort=True, set_slug=True
):
super(PersonCreator, self).derive_and_set_name_fields_and_slug(
set_name_sort=False, set_slug=False)
person_names = [
name for name in [self.name_family, self.name_gi... | Override this method from `CreatorBase` to handle additional name
fields for Person creators.
This method is called during `save()` |
4,544 | def _inverse_i(self, y, i):
lb = self._lb[self._index(i)]
ub = self._ub[self._index(i)]
al = self._al[self._index(i)]
au = self._au[self._index(i)]
if 1 < 3:
if not lb <= y <= ub:
raise ValueError()
if y < lb + al:
return (... | return inverse of y in component i |
4,545 | def build(self, paths, tags=None, wheel_version=None):
if tags is None:
tags = {}
libkey = list(filter(lambda o: o in paths, (, )))[0]
if libkey == :
is_pure =
default_pyver = [IMPVER]
default_abi = [ABI]
default_arch = [ARCH... | Build a wheel from files in specified paths, and use any specified tags
when determining the name of the wheel. |
4,546 | def fetch_html(self, msg_nums):
if not msg_nums:
raise Exception("Invalid Message Number!")
return self.__imap_fetch_content_type(msg_nums, self.HTML) | Given a message number that we found with imap_search,
get the text/html content.
@Params
msg_nums - message number to get html message for
@Returns
HTML content of message matched by message number |
4,547 | def main():
config = Common.open_file(F_CONFIG)
Common.clean_build(config[])
Common.make_dir(config[])
for language in config[]:
Common.make_dir(config[] + language) | d |
4,548 | def splitread(args):
p = OptionParser(splitread.__doc__)
p.add_option("-n", dest="n", default=76, type="int",
help="Split at N-th base position [default: %default]")
p.add_option("--rc", default=False, action="store_true",
help="Reverse complement second read [default: %default]... | %prog splitread fastqfile
Split fastqfile into two read fastqfiles, cut in the middle. |
4,549 | def mmap(func, iterable):
if sys.version_info[0] > 2:
return [i for i in map(func, iterable)]
else:
return map(func, iterable) | Wrapper to make map() behave the same on Py2 and Py3. |
4,550 | def nn_getsockopt(socket, level, option, value):
if memoryview(value).readonly:
raise TypeError()
size_t_size = ctypes.c_size_t(len(value))
rtn = _nn_getsockopt(socket, level, option, ctypes.addressof(value),
ctypes.byref(size_t_size))
return (rtn, size_t_size.value... | retrieve a socket option
socket - socket number
level - option level
option - option
value - a writable byte buffer (e.g. a bytearray) which the option value
will be copied to
returns - number of bytes copied or on error nunber < 0 |
4,551 | def _start_update_server(auth_token):
server = AccumulatorServer(("localhost", 0), _UpdateRequestHandler, auth_token)
thread = threading.Thread(target=server.serve_forever)
thread.daemon = True
thread.start()
return server | Start a TCP server to receive accumulator updates in a daemon thread, and returns it |
4,552 | def _prefix_from_ip_string(cls, ip_str):
try:
ip_int = cls._ip_int_from_string(ip_str)
except AddressValueError:
cls._report_invalid_netmask(ip_str)
try:
return cls._prefix_from_ip_int(ip_int)
except ValueE... | Turn a netmask/hostmask string into a prefix length
Args:
ip_str: The netmask/hostmask to be converted
Returns:
An integer, the prefix length.
Raises:
NetmaskValueError: If the input is not a valid netmask/hostmask |
4,553 | def _normalize_stmt_idx(self, block_addr, stmt_idx):
if type(stmt_idx) is int:
return stmt_idx
if stmt_idx == DEFAULT_STATEMENT:
vex_block = self.project.factory.block(block_addr).vex
return len(vex_block.statements)
raise AngrBackwardSlicingError(... | For each statement ID, convert 'default' to (last_stmt_idx+1)
:param block_addr: The block address.
:param stmt_idx: Statement ID.
:returns: New statement ID. |
4,554 | def build_data(self):
if len(self.dutinformation) > 0 and (self.dutinformation.get(0).build is not None):
return self.dutinformation.get(0).build.get_data()
return None | get build data.
:return: build data or None if not found |
4,555 | def _set_batch(self, batch, fg, bg, bgblend=1, nullChar=False):
for (x, y), char in batch:
self._set_char(x, y, char, fg, bg, bgblend) | Try to perform a batch operation otherwise fall back to _set_char.
If fg and bg are defined then this is faster but not by very
much.
if any character is None then nullChar is True
batch is a iterable of [(x, y), ch] items |
4,556 | def approve(
self,
allowed_address: Address,
allowance: TokenAmount,
):
log_details = {
: pex(self.node_address),
: pex(self.address),
: pex(allowed_address),
: allowance,
}
check... | Aprove `allowed_address` to transfer up to `deposit` amount of token.
Note:
For channel deposit please use the channel proxy, since it does
additional validations. |
4,557 | def verify_signature(message_path: str,
sigfile_path: str,
cert_path: str) -> None:
with tempfile.TemporaryDirectory() as pubkey_dir:
pubkey_contents = subprocess.check_output(
[, , , cert_path,
, ])
pubkey_file = os.path.join(p... | Verify the signature (assumed, of the hash file)
It is assumed that the public key for the signature is in the keyring
:param message_path: The path to the message file to check
:param sigfile_path: The path to the signature to check
:param cert_path: The path to the certificate to check the signature... |
4,558 | def get_context(pid_file, daemon=False):
port_file = get_context_file_name(pid_file)
if not os.path.exists(port_file):
return None
with open(port_file, "rt") as f:
json_data = f.read()
try:
data = json.loads(json_data)
except ValueError as e:
l... | Get context of running notebook.
A context file is created when notebook starts.
:param daemon: Are we trying to fetch the context inside the daemon. Otherwise do the death check.
:return: dict or None if the process is dead/not launcherd |
4,559 | def plot_spectra(self, nmax, convention=, unit=, base=10.,
maxcolumns=3, xscale=, yscale=, grid=True,
xlim=(None, None), ylim=(None, None), show=True,
title=True, axes_labelsize=None, tick_labelsize=None,
title_labelsize=None, ax=None, ... | Plot the spectra of the best-concentrated Slepian functions.
Usage
-----
x.plot_spectra(nmax, [convention, unit, base, maxcolumns, xscale,
yscale, grid, xlim, ylim, show, title,
axes_labelsize, tick_labelsize, title_labelsize,
... |
4,560 | def _get_ssh_public_key(self):
key = ipa_utils.generate_public_ssh_key(self.ssh_private_key_file)
return .format(
user=self.ssh_user,
key=key.decode()
) | Generate SSH public key from private key. |
4,561 | def allocate(self, dut_configuration_list, args=None):
dut_config_list = dut_configuration_list.get_dut_configuration()
try:
for dut_config in dut_config_list:
if not self.can_allocate(dut_config.get_requirements()):
raise AllocationError... | Allocates resources from available local devices.
:param dut_configuration_list: List of ResourceRequirements objects
:param args: Not used
:return: AllocationContextList with allocated resources |
4,562 | def serve(path=None, host=None, port=None, user_content=False, context=None,
username=None, password=None, render_offline=False,
render_wide=False, render_inline=False, api_url=None, title=None,
autorefresh=True, browser=False, quiet=None, grip_class=None):
app = create_app(path, ... | Starts a server to render the specified file or directory containing
a README. |
4,563 | def quick_plot(cmap, fname=None, fig=None, ax=None, N=10):
x = np.linspace(0, 10, N)
X, _ = np.meshgrid(x, x)
if ax is None:
fig = plt.figure()
ax = fig.add_subplot(111)
mappable = ax.pcolor(X, cmap=cmap)
ax.set_title(cmap.name, fontsize=14)
ax.set_xticks([])
ax.set_yt... | Show quick test of a colormap. |
4,564 | def get_realms_by_explosion(self, realms):
if getattr(self, , False):
self.add_error("Error: there is a loop in the realm definition %s" % self.get_name())
return None
self.rec_tag = True
self.realm_members = sorted(self.realm_members... | Get all members of this realm including members of sub-realms on multi-levels
:param realms: realms list, used to look for a specific one
:type realms: alignak.objects.realm.Realms
:return: list of members and add realm to realm_members attribute
:rtype: list |
4,565 | def reset(self):
"Close the current failed connection and prepare for a new one"
log.info("resetting client")
rpc_client = self._rpc_client
self._addrs.append(self._peer.addr)
self.__init__(self._addrs)
self._rpc_client = rpc_client
self._dispatcher.rpc_client = r... | Close the current failed connection and prepare for a new one |
4,566 | def atlas_zonefile_push_dequeue( zonefile_queue=None ):
ret = None
with AtlasZonefileQueueLocked(zonefile_queue) as zfq:
if len(zfq) > 0:
ret = zfq.pop(0)
return ret | Dequeue a zonefile's information to replicate
Return None if there are none queued |
4,567 | def generate_datafile_old(number_items=1000):
from utils import get_names, generate_dataset
from pprint import pprint
filename = "samples.py"
dataset = generate_dataset(number_items)
fo = open(filename, "wb")
fo.write("
fo.write("
fo.write("
fo.write("SAMPLES = ")
pprint(dat... | Create the samples.py file |
4,568 | def are_dicts_equivalent(*args, **kwargs):
if not args:
return False
if len(args) == 1:
return True
if not all(is_dict(x) for x in args):
return False
first_item = args[0]
for item in args[1:]:
if len(item) != len(first_item):
return False
... | Indicate if :ref:`dicts <python:dict>` passed to this function have identical
keys and values.
:param args: One or more values, passed as positional arguments.
:returns: ``True`` if ``args`` have identical keys/values, and ``False`` if not.
:rtype: :class:`bool <python:bool>`
:raises SyntaxError:... |
4,569 | def get_full_recirc_content(self, published=True):
q = self.get_query()
search = custom_search_model(Content, q, published=published, field_map={
"feature_type": "feature_type.slug",
"tag": "tags.slug",
"content-type": "_type"
})
return search | performs es search and gets all content objects |
4,570 | def main(self):
args = self.args
parsed_pytree, pypackages = self.parse_py_tree(pytree=args.pytree)
parsed_doctree = self.parse_doc_tree(doctree=args.doctree, pypackages=pypackages)
return self.compare_trees(parsed_pytree=parsed_pytree, parsed_doctree=parsed_doctree) | Parse package trees and report on any discrepancies. |
4,571 | def gradient_black(
self, text=None, fore=None, back=None, style=None,
start=None, step=1, reverse=False,
linemode=True, movefactor=2, rgb_mode=False):
gradargs = {
: step,
: fore,
: back,
: style,
: reverse... | Return a black and white gradient.
Arguments:
text : String to colorize.
This will always be greater than 0.
fore : Foreground color, background will be gradient.
back : Background color, foreground will be gradie... |
4,572 | def MakeDynamicPotentialFunc(kBT_Gamma, density, SpringPotnlFunc):
def PotentialFunc(xdata, Radius):
mass = ((4/3)*np.pi*((Radius*10**-9)**3))*density
yfit=(kBT_Gamma/mass)
Y = yfit*SpringPotnlFunc(xdata)
return Y
return PotentialFunc | Creates the function that calculates the potential given
the position (in volts) and the radius of the particle.
Parameters
----------
kBT_Gamma : float
Value of kB*T/Gamma
density : float
density of the nanoparticle
SpringPotnlFunc : function
Function which takes the v... |
4,573 | def build_damage_dt(dstore, mean_std=True):
oq = dstore[]
damage_states = [] + list(
dstore.get_attr(, ))
dt_list = []
for ds in damage_states:
ds = str(ds)
if mean_std:
dt_list.append(( % ds, F32))
dt_list.append(( % ds, F32))
else:
... | :param dstore: a datastore instance
:param mean_std: a flag (default True)
:returns:
a composite dtype loss_type -> (mean_ds1, stdv_ds1, ...) or
loss_type -> (ds1, ds2, ...) depending on the flag mean_std |
4,574 | def _get_previous_mz(self, mzs):
mzs = tuple(mzs)
if mzs in self.lru_cache:
return self.lru_cache[mzs]
mz_hash = "%s-%s-%s" % (hash(mzs), sum(mzs), len(mzs))
if mz_hash in self.hashes:
for mz_data in self.hashes[mz_hash]:
test_... | given an mz array, return the mz_data (disk location)
if the mz array was not previously written, write to disk first |
4,575 | def nonzero_monies(self):
return [copy.copy(m) for m in self._money_obs if m.amount != 0] | Get a list of the underlying ``Money`` instances that are not zero
Returns:
([Money]): A list of zero or more money instances. Currencies will be unique. |
4,576 | def get_db_uri(config, output_dir):
db_config = config.get("results_database", {"db_uri": "default"})
if db_config[] == :
return os.path.join(output_dir, "results.sqlite")
return db_config[] | Process results_database parameters in config to format them for
set database function
:param dict config: project configuration dict
:param str output_dir: output directory for results
:return: string for db uri |
4,577 | def year_origin_filter(year_predicate=None, origin_predicate=None):
def accept(cable_id, predicate):
year, origin = _YEAR_ORIGIN_PATTERN.match(
canonicalize_id(cable_id)).groups()
return predicate(year, origin)
if year_predicate and origin_predicate:
return partial(acc... | \
Returns a predicate for cable identifiers where `year_predicate` and
`origin_predicate` must hold true.
If `year_predicate` and `origin_predicate` is ``None`` the returned
predicate holds always true.
`year_predicate`
A predicate which returns ``True`` or ``False`` for a cable
y... |
4,578 | def collect_fragment(event, agora_host):
agora = Agora(agora_host)
graph_pattern = ""
for tp in __triple_patterns:
graph_pattern += .format(tp)
fragment, _, graph = agora.get_fragment_generator( % graph_pattern, stop_event=event, workers=4)
__extract_pattern_nodes(graph)
log.info( %... | Execute a search plan for the declared graph pattern and sends all obtained triples to the corresponding
collector functions (config |
4,579 | def parse_data_shape(data_shape_str):
ds = data_shape_str.strip().split()
if len(ds) == 1:
data_shape = (int(ds[0]), int(ds[0]))
elif len(ds) == 2:
data_shape = (int(ds[0]), int(ds[1]))
else:
raise ValueError("Unexpected data_shape: %s", data_shape_str)
return data_shape | Parse string to tuple or int |
4,580 | def combine_first(self, other):
out = ops.fillna(self, other, join="outer", dataset_join="outer")
return out | Combine two Datasets, default to data_vars of self.
The new coordinates follow the normal broadcasting and alignment rules
of ``join='outer'``. Vacant cells in the expanded coordinates are
filled with np.nan.
Parameters
----------
other : DataArray
Used to ... |
4,581 | def delete_all(self, filter, timeout=-1, force=False):
return self._helper.delete_all(filter=filter, force=force, timeout=timeout) | Deletes all Server Profile objects from the appliance that match the provided filter.
Filters are supported only for the following profile attributes: name, description, serialnumber, uuid,
mactype, wwntype, serialnumbertype, status, and state.
Examples:
>>> server_profile_client.... |
4,582 | def guggenheim_katayama(target, K2, n, temperature=,
critical_temperature=,
critical_pressure=):
r
T = target[temperature]
Pc = target[critical_pressure]
Tc = target[critical_temperature]
sigma_o = K2*Tc**(1/3)*Pc**(2/3)
value = sigma_o*(1-T/Tc)**n... | r"""
Missing description
Parameters
----------
target : OpenPNM Object
The object for which these values are being calculated. This
controls the length of the calculated array, and also provides
access to other necessary thermofluid properties.
K2 : scalar
Fluid sp... |
4,583 | def _auth(profile=None, api_version=1, **connection_args):
if profile:
prefix = profile +
else:
prefix =
def get(key, default=None):
return connection_args.get( + key,
__salt__[](prefix + key, default))
user = get(, )
pass... | Set up heat credentials, returns
`heatclient.client.Client`. Optional parameter
"api_version" defaults to 1.
Only intended to be used within heat-enabled modules |
4,584 | def generate_routes(config):
routes = []
for name, config in iteritems(config):
pattern = r % re.escape(config[].lstrip())
proxy = generate_proxy(
prefix=config[], base_url=config[],
verify_ssl=config.get(, True),
middleware=config.get(),
app... | Generate a list of urls that map to generated proxy views.
generate_routes({
'test_proxy': {
'base_url': 'https://google.com/',
'prefix': '/test_prefix/',
'verify_ssl': False,
'csrf_exempt: False',
'middleware': ['djproxy.proxy_middleware.AddXFF']... |
4,585 | def users_getPresence(self, *, user: str, **kwargs) -> SlackResponse:
kwargs.update({"user": user})
return self.api_call("users.getPresence", http_verb="GET", params=kwargs) | Gets user presence information.
Args:
user (str): User to get presence info on. Defaults to the authed user.
e.g. 'W1234567890' |
4,586 | def lnprior(self, X):
if np.any(X < self._lower_left) or np.any(X > self._upper_right):
return -np.inf
else:
return 0.0 | Use a uniform, bounded prior. |
4,587 | def persist(arg, depth=Ellipsis, on_mutable=None):
ed form; note that this respects laziness and itables.
* If x is a list/tuple type, yields a tuple of persist()s elements is controlled by the depth option;
the default behavior is to persist objects down to the point that a persistent object is found,
... | persist(x) yields a persistent version of x if possible, or yields x itself.
The transformations performed by persist(x) are as follows:
* If x is an immutable object, yields x.persist()
* If x is a set, yield a frozenset of of persist(u) for all u in x.
* If x is a numpy array, yield imm_array(x... |
4,588 | def render_to_message(self, extra_context=None, **kwargs):
if extra_context is None:
extra_context = {}
kwargs.setdefault(, {}).update(self.headers)
context = self.get_context_data(**extra_context)
return self.message_class(
subject=self.render... | Renders and returns an unsent message with the provided context.
Any extra keyword arguments passed will be passed through as keyword
arguments to the message constructor.
:param extra_context: Any additional context to use when rendering the
templated content.
:type extra_... |
4,589 | def parse(text, encoding=):
if isinstance(text, six.binary_type):
text = text.decode(encoding)
return Query(text, split_segments(text)) | Parse the querystring into a normalized form. |
4,590 | def get_url(self):
if not self.data[self.execute_name]:
raise InvalidConfig(extra_body=
.format(self.name))
if not self.data.get():
raise InvalidConfig(extra_body=
... | IFTTT Webhook url
:return: url
:rtype: str |
4,591 | def ball_pick(n, d, rng=None):
def valid(r):
return vector_mag_sq(r) < 1.0
return rejection_pick(L=2.0, n=n, d=d, valid=valid, rng=rng) | Return cartesian vectors uniformly picked on the unit ball in an
arbitrary number of dimensions.
The unit ball is the space enclosed by the unit sphere.
The picking is done by rejection sampling in the unit cube.
In 3-dimensional space, the fraction `\pi / 6 \sim 0.52` points are valid.
Paramete... |
4,592 | def get_records(self, name):
if name in self._cache:
return self._cache[name].values()
else:
return [] | Return all the records for the given name in the cache.
Args:
name (string): The name which the required models are stored under.
Returns:
list: A list of :class:`cinder_data.model.CinderModel` models. |
4,593 | def publish(self, message_type, message_payload):
payload = json.dumps(jsonpickle.Pickler(unpicklable=False).flatten(message_payload))
message = amqp.Message(payload)
message.properties["delivery_mode"] = 2
name = % (settings.ENVIRONMENT_STAGE, self.service_name.lower(), messa... | Publish the specified object that the function automatically converts
into a JSON string representation.
This function use the lowered class name of the service as the AMQP
routing key. For instance, if the class ``ExampleService`` inherits
from the base class ``BaseService``, the meth... |
4,594 | def is_defined(self, objtxt, force_import=False):
return isdefined(objtxt, force_import=force_import,
namespace=self.locals) | Return True if object is defined |
4,595 | def findRequirements():
requirementsPath = os.path.join(REPO_DIR, "requirements.txt")
requirements = parse_file(requirementsPath)
if nupicPrereleaseInstalled():
requirements = [req for req in requirements if "nupic" not in req]
if htmresearchCorePrereleaseInstalled():
requirements = ... | Read the requirements.txt file and parse into requirements for setup's
install_requirements option. |
4,596 | def force_move(source, destination):
if not os.path.exists(destination):
raise RuntimeError(
.format(destination=destination))
destination_folder = os.path.join(destination, os.path.split(source)[-1])
if os.path.exists(destination_folder):
shutil.rmtree(destin... | Force the move of the source inside the destination even if the destination has already a folder with the
name inside. In the case, the folder will be replaced.
:param string source: path of the source to move.
:param string destination: path of the folder to move the source to. |
4,597 | def get_by_username(cls, username):
return cls.query().filter(cls.username == username).first() | Return a User by email address |
4,598 | def __collectGarbage(self, ignored=None):
del ignored
collected = []
level0, level1, level2 = gc.get_count()
if level0 > 0:
collected.append(gc.collect(0))
if level1 > 0:
collected.append(gc.collect(1))
if level2 > 0... | Collects garbage |
4,599 | def _listChunks(self):
chunks = []
for fileName in os.listdir(self.dataDir):
index = ChunkedFileStore._fileNameToChunkIndex(fileName)
if index is not None:
chunks.append(index)
return sorted(chunks) | Lists stored chunks
:return: sorted list of available chunk indices |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.