Unnamed: 0 int64 0 389k | code stringlengths 26 79.6k | docstring stringlengths 1 46.9k |
|---|---|---|
366,500 | def create_tipo_equipamento(self):
return TipoEquipamento(
self.networkapi_url,
self.user,
self.password,
self.user_ldap) | Get an instance of tipo_equipamento services facade. |
366,501 | def default_interface(ifconfig=None, route_output=None):
global Parser
return Parser(ifconfig=ifconfig)._default_interface(route_output=route_output) | Return just the default interface device dictionary.
:param ifconfig: For mocking actual command output
:param route_output: For mocking actual command output |
366,502 | def update(self, data, ed):
if ed == :
encrypted_blocks = b
self.cache += data
if len(self.cache) < self.blocksize:
return b
for i in range(0, len(self.cache)-self.blocksize+1, self.blocksize):
self.IV = self.codebook.encry... | Processes the given ciphertext/plaintext
Inputs:
data: raw string of any length
ed: 'e' for encryption, 'd' for decryption
Output:
processed raw string block(s), if any
When the supplied data is not a multiple of the blocksize
of the cipher, then... |
366,503 | def is_vhost_alive(self, vhost):
return self._api_get(.format(
urllib.parse.quote_plus(vhost)
)) | Declares a test queue, then publishes and consumes a message.
Intended for use by monitoring tools.
:param vhost: The vhost name to check
:type vhost: str |
366,504 | def write(self, pkt):
if isinstance(pkt, bytes):
if not self.header_present:
self._write_header(pkt)
self._write_packet(pkt)
else:
pkt = pkt.__iter__()
for p in pkt:
if not self.header_present:
s... | Writes a Packet or bytes to a pcap file.
:param pkt: Packet(s) to write (one record for each Packet), or raw
bytes to write (as one record).
:type pkt: iterable[Packet], Packet or bytes |
366,505 | def all_low_level_calls(self):
if self._all_low_level_calls is None:
self._all_low_level_calls = self._explore_functions(lambda x: x.low_level_calls)
return self._all_low_level_calls | recursive version of low_level calls |
366,506 | def sample(self, hash, limit=None, offset=None):
uri = self._uris[].format(hash)
params = {: limit, : offset}
return self.get_parse(uri, params) | Return an object representing the sample identified by the input hash, or an empty object if that sample is not found |
366,507 | def _write_image_description(self):
if (not self._datashape or self._datashape[0] == 1 or
self._descriptionoffset <= 0):
return
colormapped = self._colormap is not None
if self._imagej:
isrgb = self._shape[-1] in (3, 4)
description = ... | Write metadata to ImageDescription tag. |
366,508 | def GetTimeOfDay(self):
normalized_timestamp = self._GetNormalizedTimestamp()
if normalized_timestamp is None:
return None, None, None
_, hours, minutes, seconds = self._GetTimeValues(normalized_timestamp)
return hours, minutes, seconds | Retrieves the time of day represented by the date and time values.
Returns:
tuple[int, int, int]: hours, minutes, seconds or (None, None, None)
if the date and time values do not represent a time of day. |
366,509 | def cubic_interpolate(x, y, precision=250, **kwargs):
n = len(x) - 1
a = y
b = [0] * (n + 1)
c = [0] * (n + 1)
d = [0] * (n + 1)
m = [0] * (n + 1)
z = [0] * (n + 1)
h = [x2 - x1 for x1, x2 in zip(x, x[1:])]
k = [a2 - a1 for a1, a2 in zip(a, a[1:])]
g = [k[i] / h[i... | Interpolate x, y using a cubic algorithm
https://en.wikipedia.org/wiki/Spline_interpolation |
366,510 | def voltage_based(feedin, generators, curtailment_timeseries, edisgo,
curtailment_key, **kwargs):
voltage_threshold = pd.Series(kwargs.get(, 0.0),
index=curtailment_timeseries.index)
solver = kwargs.get(, )
combined_analysis = kwargs.get(, False)
... | Implements curtailment methodology 'voltage-based'.
The curtailment that has to be met in each time step is allocated depending
on the exceedance of the allowed voltage deviation at the nodes of the
generators. The higher the exceedance, the higher the curtailment.
The optional parameter `voltage_thre... |
366,511 | def _unlock(self):
self.keyring_key = getpass.getpass(
)
try:
ref_pw = self.get_password(, )
assert ref_pw ==
except AssertionError:
self._lock()
raise ValueError("Incorrect Password") | Unlock this keyring by getting the password for the keyring from the
user. |
366,512 | def hook(self, pc):
def decorator(f):
self.add_hook(pc, f)
return f
return decorator | A decorator used to register a hook function for a given instruction address.
Equivalent to calling :func:`~add_hook`.
:param pc: Address of instruction to hook
:type pc: int or None |
366,513 | def _PrPz(r0, z0, r1, z1, r2, z2, r3, z3):
Pr = ((r0*z1 - z0*r1)*(r2 - r3) - (r0 - r1)*(r2*z3 - r3*z2)) / \
((r0 - r1)*(z2 - z3) - (z0 - z1)*(r2-r3))
Pz = ((r0*z1 - z0*r1)*(z2 - z3) - (z0 - z1)*(r2*z3 - r3*z2)) / \
((r0 - r1)*(z2 - z3) - (z0 - z1)*(r2-r3))
... | Intersection point for infinite lines.
Parameters
----------
r0 : float
z0 : float
r1 : float
z1 : float
r2 : float
z2 : float
r3 : float
z3 : float
Returns
----------
Pr : float
Pz : float
hit : bool |
366,514 | def validate_required_attributes(fully_qualified_name: str, spec: Dict[str, Any],
*attributes: str) -> List[RequiredAttributeError]:
return [
RequiredAttributeError(fully_qualified_name, spec, attribute)
for attribute in attributes
if attribute not in sp... | Validates to ensure that a set of attributes are present in spec |
366,515 | def get(self, id, domain=):
assert isinstance(id, (str, unicode))
assert isinstance(domain, (str, unicode))
if self.defines(id, domain):
return self.messages[domain][id]
if self.fallback_catalogue is not None:
return self.fallback_catalogue.get(id, doma... | Gets a message translation.
@rtype: str
@return: The message translation |
366,516 | async def get_pypi_version(self):
self._version_data["beta"] = self.beta
self._version_data["source"] = "PyPi"
info_version = None
last_release = None
try:
async with async_timeout.timeout(5, loop=self.loop):
response = await self.session.ge... | Get version published to PyPi. |
366,517 | def matched_interpreters(interpreters, constraints):
for interpreter in interpreters:
if any(interpreter.identity.matches(filt) for filt in constraints):
TRACER.log("Constraints on interpreters: %s, Matching Interpreter: %s"
% (constraints, interpreter.binary), V=3)
yield interpret... | Given some filters, yield any interpreter that matches at least one of them.
:param interpreters: a list of PythonInterpreter objects for filtering
:param constraints: A sequence of strings that constrain the interpreter compatibility for this
pex. Each string uses the Requirement-style format, e.g. 'CPython>=... |
366,518 | def from_wif_or_ewif_file(path: str, password: Optional[str] = None) -> SigningKeyType:
with open(path, ) as fh:
wif_content = fh.read()
regex = compile(, MULTILINE)
match = search(regex, wif_content)
if not match:
raise Exception()
... | Return SigningKey instance from Duniter WIF or EWIF file
:param path: Path to WIF of EWIF file
:param password: Password needed for EWIF file |
366,519 | def FlagsIntoString(self):
s =
for flag in self.FlagDict().values():
if flag.value is not None:
s += flag.serialize() +
return s | Returns a string with the flags assignments from this FlagValues object.
This function ignores flags whose value is None. Each flag
assignment is separated by a newline.
NOTE: MUST mirror the behavior of the C++ CommandlineFlagsIntoString
from http://code.google.com/p/google-gflags
Returns:
... |
366,520 | def merge_entries_with_common_prefixes(list_, number_of_needed_commons=6):
prefix = None
lists_to_merge = []
for entry in list_:
newPrefix,number = split_string_at_suffix(entry, numbers_into_suffix=True)
if entry == newPrefix or prefix != newPrefix:
lists_to_merge.appen... | Returns a list where sequences of post-fixed entries are shortened to their common prefix.
This might be useful in cases of several similar values,
where the prefix is identical for several entries.
If less than 'number_of_needed_commons' are identically prefixed, they are kept unchanged.
Example: ['tes... |
366,521 | def sample_qubo(self, qubo, **params):
linear = {i1: v for (i1, i2), v in uniform_iterator(qubo) if i1 == i2}
quadratic = {(i1, i2): v for (i1, i2), v in uniform_iterator(qubo) if i1 != i2}
return self._sample(, linear, quadratic, params) | Sample from the specified QUBO.
Args:
qubo (dict of (int, int):float): Coefficients of a quadratic unconstrained binary
optimization (QUBO) model.
**params: Parameters for the sampling method, specified per solver.
Returns:
:obj:`Future`
Exa... |
366,522 | def run_coral(clus_obj, out_dir, args):
if not args.bed:
raise ValueError("This module needs the bed file output from cluster subcmd.")
workdir = op.abspath(op.join(args.out, ))
safe_dirs(workdir)
bam_in = op.abspath(args.bam)
bed_in = op.abspath(args.bed)
reference = op.abspath(arg... | Run some CoRaL modules to predict small RNA function |
366,523 | def institutes(context, institute_id, json):
LOG.info("Running scout view institutes")
adapter = context.obj[]
if institute_id:
institute_objs = []
institute_obj = adapter.institute(institute_id)
if not institute_obj:
LOG.info("Institute %s does not exost", institut... | Show all institutes in the database |
366,524 | def convert_flatten(net, node, module, builder):
input_name, output_name = _get_input_output_name(net, node)
name = node[]
mode = 0
builder.add_flatten(name, mode, input_name, output_name) | Convert a flatten layer from mxnet to coreml.
Parameters
----------
network: net
A mxnet network object.
layer: node
Node to convert.
module: module
An module for MXNet
builder: NeuralNetworkBuilder
A neural network builder object. |
366,525 | def create_option_group(name, engine_name, major_engine_version,
option_group_description, tags=None, region=None,
key=None, keyid=None, profile=None):
res = __salt__[](name, tags, region, key, keyid,
profile)
... | Create an RDS option group
CLI example to create an RDS option group::
salt myminion boto_rds.create_option_group my-opt-group mysql 5.6 \
"group description" |
366,526 | def normalise_tensor(tensor):
tensor_norm = np.linalg.norm(tensor)
return tensor / tensor_norm, tensor_norm | Normalise the tensor by dividing it by its norm, defined such that
np.sqrt(X:X) |
366,527 | def xmoe2_v1_l4k_local_only():
hparams = xmoe2_v1_l4k()
hparams.decoder_layers = [
"local_att" if l == "att" else l for l in hparams.decoder_layers]
return hparams | With sequence length 4096. |
366,528 | def iterchunks(data, chunksize):
offt = 0
while offt < len(data):
yield data[offt:offt+chunksize]
offt += chunksize | iterate chunks of data |
366,529 | def move(self, module_path, configuration=True, module=True):
if module + configuration < 1:
raise ValueError("You must specify to move at least the module or the configuration of the submodule")
module_checkout_path = self._to_relative_path(self.repo, module_path)
... | Move the submodule to a another module path. This involves physically moving
the repository at our current path, changing the configuration, as well as
adjusting our index entry accordingly.
:param module_path: the path to which to move our module in the parent repostory's working tree,
... |
366,530 | def select_join(self, table, field, join_table, join_field):
sql = "select {table}.{field} from {table} left join {join_table} "\
"on {table}.{field}={join_table}.{join_field} "\
"where {join_table}.{join_field} is null;".format(table=table,
... | .. :py:method::
Usage::
>>> select_join('hospital', 'id', 'department', 'hospid')
select hospital.id from hospital left join department on hospital.id=department.hospid where department.hospid is null; |
366,531 | def addFilter(self, layer_id, where=None, outFields="*"):
import copy
f = copy.deepcopy(self._filterTemplate)
f[] = layer_id
f[] = outFields
if where is not None:
f[] = where
if f not in self._filter:
self._filter.append(f) | adds a layer definition filter |
366,532 | def EnablePlugins(self, plugin_includes):
super(SyslogParser, self).EnablePlugins(plugin_includes)
self._plugin_by_reporter = {}
for plugin in self._plugins:
self._plugin_by_reporter[plugin.REPORTER] = plugin | Enables parser plugins.
Args:
plugin_includes (list[str]): names of the plugins to enable, where None
or an empty list represents all plugins. Note that the default plugin
is handled separately. |
366,533 | def get_dist(self):
egg_info = self.egg_info_path().rstrip()
base_dir = os.path.dirname(egg_info)
metadata = pkg_resources.PathMetadata(base_dir, egg_info)
dist_name = os.path.splitext(os.path.basename(egg_info))[0]
return pkg_resources.Distribution(
os.path.... | Return a pkg_resources.Distribution built from self.egg_info_path |
366,534 | def _partition_species(composition, max_components=2):
def _partition(collection):
if len(collection) == 1:
yield [collection]
return
first = collection[0]
for smaller in _partition(collection[1:]):
... | Private method to split a list of species into
various partitions. |
366,535 | def chambers(self):
return set(sorted([d.chamber for d in self.documents.all()])) | Return distinct chambers. You probably want to prefetch
documents__chamber before calling that. |
366,536 | def rmdir(store, path=None):
path = normalize_storage_path(path)
if hasattr(store, ):
store.rmdir(path)
else:
_rmdir_from_keys(store, path) | Remove all items under the given path. If `store` provides a `rmdir` method,
this will be called, otherwise will fall back to implementation via the
`MutableMapping` interface. |
366,537 | def get_system_time():
minion-id
now = win32api.GetLocalTime()
meridian =
hours = int(now[4])
if hours == 12:
meridian =
elif hours == 0:
hours = 12
elif hours > 12:
hours = hours - 12
meridian =
return .format(hours, now[5], now[6], meridian) | Get the system time.
Returns:
str: Returns the system time in HH:MM:SS AM/PM format.
CLI Example:
.. code-block:: bash
salt 'minion-id' system.get_system_time |
366,538 | def init_process(self):
self.device = Device.objects.filter(protocol__daq_daemon=1, active=1, id=self.device_id).first()
if not self.device:
logger.error("Error init_process for %s" % self.device_id)
return False
self.dt_set = min(self.dt_set, self.device.polling... | init a standard daq process for a single device |
366,539 | def create_html(api_key, attrs):
gif = get_gif(api_key, attrs[])
if not in attrs.keys():
attrs[] = .format(gif[][])
html_out = .format(gif[][])
html_out += .format(
gif[][][][],
attrs[])
html_out +=
return html_out | Returns complete html tag string. |
366,540 | def doit(self, classes=None, recursive=True, **kwargs):
in_classes = (
(classes is None) or
any([isinstance(self, cls) for cls in classes]))
if in_classes:
new = self._doit(**kwargs)
else:
new = self
if (new == self) or recursive:
... | Rewrite (sub-)expressions in a more explicit form
Return a modified expression that is more explicit than the original
expression. The definition of "more explicit" is decided by the
relevant subclass, e.g. a :meth:`Commutator <.Commutator.doit>` is
written out according to its definiti... |
366,541 | def get_role(self, account_id, role_id):
url = ACCOUNTS_API.format(account_id) + "/roles/{}".format(role_id)
return CanvasRole(data=self._get_resource(url)) | Get information about a single role, for the passed Canvas account ID.
https://canvas.instructure.com/doc/api/roles.html#method.role_overrides.show |
366,542 | def _action(self, action):
if action <= 1:
self._outA()
if action <= 2:
self.theA = self.theB
if self.theA == ""\n\\/(,=:[?!&|/\\\n':
raise UnterminatedRegularExpression()
self._outA()
self.theB = s... | do something! What you do is determined by the argument:
1 Output A. Copy B to A. Get the next B.
2 Copy B to A. Get the next B. (Delete A).
3 Get the next B. (Delete B).
action treats a string as a single character. Wow!
action recognizes a regular expressio... |
366,543 | def stationary_distribution_from_backward_iteration(P, eps=1e-15):
r
A = P.transpose()
mu = 1.0 - eps
x0 = np.ones(P.shape[0])
y = backward_iteration(A, mu, x0)
pi = y / y.sum()
return pi | r"""Fast computation of the stationary vector using backward
iteration.
Parameters
----------
P : (M, M) scipy.sparse matrix
Transition matrix
eps : float (optional)
Perturbation parameter for the true eigenvalue.
Returns
-------
pi : (M,) ndarray
Stationary vec... |
366,544 | def main():
character1 = Character(, str=4,int=7,sta=50)
character2 = Character(, str=6,int=6,sta=70)
print(, character1)
print(, character2)
b = Battle(character1, character2)
print(b)
print(, character1)
print(, character2) | Prototype to see how an RPG simulation might be used
in the AIKIF framework.
The idea is to build a simple character and run a simulation
to see how it succeeds in a random world against another players
character
character
stats
world
locations |
366,545 | def handle(self, *args, **options):
verbosity = int(options.get(, 1))
timeout = options.get()
queue = get_queue(options.get())
job = queue.enqueue_call(args[0], args=args[1:], timeout=timeout)
if verbosity:
print( % job.id) | Queues the function given with the first argument with the
parameters given with the rest of the argument list. |
366,546 | def load_tasks_from_file(self, file_path):
file_name, module_path, objects = Loader.import_custom_python_file(file_path)
result = {}
for entry in objects:
try:
if issubclass(entry, BaseTask):
if entry.__name__ != BaseTask.__name__ and entr... | Imports specified python module and returns subclasses of BaseTask from it
:param file_path: a fully qualified file path for a python module to import CustomTasks from
:type file_path: `str`
:return: a dict of CustomTasks, where key is CustomTask.name, and value is a CustomClass task itself
... |
366,547 | def get_action(self, action):
func_name = action.replace(, )
if not hasattr(self, func_name):
return func | Get a callable action. |
366,548 | def _create_cv_ensemble(self, base_ensemble, idx_models_included, model_names=None):
fitted_models = numpy.empty(len(idx_models_included), dtype=numpy.object)
for i, idx in enumerate(idx_models_included):
model_name = self.base_estimators[idx][0] if model_names is None else model_na... | For each selected base estimator, average models trained on each fold |
366,549 | def f_to_dict(self, fast_access=False, short_names=False, nested=False,
copy=True, with_links=True):
return self._nn_interface._to_dict(self, fast_access=fast_access,
short_names=short_names,
nested=... | Returns a dictionary with pairings of (full) names as keys and instances/values.
:param fast_access:
If True, parameter values are returned instead of the instances.
Works also for results if they contain a single item with the name of the result.
:param short_names:
... |
366,550 | def _get_next_occurrence(haystack, offset, needles):
firstcharmap = dict([(n[0], n) for n in needles])
firstchars = firstcharmap.keys()
while offset < len(haystack):
if haystack[offset] in firstchars:
possible_needle = firstcharmap[haystack[... | Find next occurence of one of the needles in the haystack
:return: tuple of (index, needle found)
or: None if no needle was found |
366,551 | def unpack(self, key, value):
value, freshness = value
if not self.is_fresh(freshness):
raise KeyError(.format(key))
return value | Unpack and return value only if it is fresh. |
366,552 | def _getBasicOrbit(self, orbit=None):
if not self.sat.empty:
self._calcOrbits()
if orbit is not None:
if orbit == -1:
self.sat.data = self._fullDayData[self._orbit_break... | Load a particular orbit into .data for loaded day.
Parameters
----------
orbit : int
orbit number, 1 indexed, negative indexes allowed, -1 last orbit
Note
----
A day of data must be loaded before this routine functions properly.
If the last orbit of ... |
366,553 | def _add_workflow(mcs, field_name, state_field, attrs):
attrs[field_name] = StateProperty(state_field.workflow, field_name) | Attach a workflow to the attribute list (create a StateProperty). |
366,554 | def unmasked_for_shape_and_pixel_scale(cls, shape, pixel_scale, invert=False):
mask = np.full(tuple(map(lambda d: int(d), shape)), False)
if invert: mask = np.invert(mask)
return cls(array=mask, pixel_scale=pixel_scale) | Setup a mask where all pixels are unmasked.
Parameters
----------
shape : (int, int)
The (y,x) shape of the mask in units of pixels.
pixel_scale: float
The arc-second to pixel conversion factor of each pixel. |
366,555 | def appendbcolz(table, obj, check_names=True):
import bcolz
import numpy as np
if isinstance(obj, string_types):
ctbl = bcolz.open(obj, mode=)
else:
assert hasattr(obj, ) and hasattr(obj, ), \
% obj
ctbl = obj
dtype = ctbl.dtype
it = iter(table)
... | Append data into a bcolz ctable. The `obj` argument can be either an
existing ctable or the name of a directory were an on-disk ctable is
stored.
.. versionadded:: 1.1.0 |
366,556 | def filesize(value):
suffix =
for unit in , , , , , , , :
if abs(value) < 1024.0:
return "%3.1f%s%s" % (value, unit, suffix)
value /= 1024.0
return "%.1f%s%s" % (value, , suffix) | Display a human readable filesize |
366,557 | def _evaluatephiforces(Pot,R,z,phi=None,t=0.,v=None):
isList= isinstance(Pot,list)
nonAxi= _isNonAxi(Pot)
if nonAxi and phi is None:
raise PotentialError("The (list of) Potential instances is non-axisymmetric, but you did not provide phi")
dissipative= _isDissipative(Pot)
if dissipative... | Raw, undecorated function for internal use |
366,558 | def _enable_logpersist(self):
if not self._ad.is_rootable:
return
logpersist_warning = (
)
if not self._ad.adb.has_shell_command():
logging.warning(logpersist_warning, self)
r... | Attempts to enable logpersist daemon to persist logs. |
366,559 | def join(self, userId, groupId, groupName):
desc = {
"name": "CodeSuccessReslut",
"desc": " http 成功返回结果",
"fields": [{
"name": "code",
"type": "Integer",
"desc": "返回码,200 为正常。"
}, {
"name": ... | 将用户加入指定群组,用户将可以收到该群的消息,同一用户最多可加入 500 个群,每个群最大至 3000 人。 方法
@param userId:要加入群的用户 Id,可提交多个,最多不超过 1000 个。(必传)
@param groupId:要加入的群 Id。(必传)
@param groupName:要加入的群 Id 对应的名称。(必传)
@return code:返回码,200 为正常。
@return errorMessage:错误信息。 |
366,560 | def datediff(end, start):
sc = SparkContext._active_spark_context
return Column(sc._jvm.functions.datediff(_to_java_column(end), _to_java_column(start))) | Returns the number of days from `start` to `end`.
>>> df = spark.createDataFrame([('2015-04-08','2015-05-10')], ['d1', 'd2'])
>>> df.select(datediff(df.d2, df.d1).alias('diff')).collect()
[Row(diff=32)] |
366,561 | def _needSwapWH(self, oldDirection, newDirection ):
if abs(newDirection - oldDirection) == 0: return False
if abs(newDirection - oldDirection) % 180 == 0: return False
if abs(newDirection - oldDirection) % 90 == 0: return True
return False | !
\~english
return screen direction status
@return Boolean
@note No need to rotate if the screen orientation is 0 degrees and 180 degrees
\~chinese
返回屏幕方向状态
@return 布尔值
@note 如果屏幕方向是0度和180度就不需要旋转 |
366,562 | def learnPlaceCode(self,
runs,
dir=1,
periodic=False,
recurrent=True,
randomSpeed=False,
learnRecurrent=False):
self.plotting = False
self.simulate(10, 1, 1, 0, envelope=... | Traverses a sinusoidal trajectory across the environment, learning during
the process. A pair of runs across the environment (one in each direction)
takes 10 seconds if in a periodic larger environment, and 4 seconds in a
smaller nonperiodic environment.
:param runs: How many runs across the environmen... |
366,563 | def _setBorder(self, border, doc, xref):
return _fitz.Link__setBorder(self, border, doc, xref) | _setBorder(self, border, doc, xref) -> PyObject * |
366,564 | def dump_to_string(self, cnf, **opts):
tree = container_to_etree(cnf, **opts)
buf = BytesIO()
etree_write(tree, buf)
return buf.getvalue() | :param cnf: Configuration data to dump
:param opts: optional keyword parameters
:return: string represents the configuration |
366,565 | def system_status(self):
flag, timestamp, status = self._query((, (Integer, Float, Integer)))
return {
: datetime.datetime.fromtimestamp(timestamp),
: STATUS_TEMPERATURE[status & 0xf],
: STATUS_MAGNET[(status >> 4) & 0xf... | The system status codes. |
366,566 | def _prepare_record(self, group):
parents, dtypes = group.parents, group.types
if parents is None:
if group.data_location == v23c.LOCATION_ORIGINAL_FILE:
stream = self._file
else:
stream = self._tempfile
grp = group
... | compute record dtype and parents dict for this group
Parameters
----------
group : dict
MDF group dict
Returns
-------
parents, dtypes : dict, numpy.dtype
mapping of channels to records fields, records fiels dtype |
366,567 | def idna_encode (host):
if host and isinstance(host, unicode):
try:
host.encode()
return host, False
except UnicodeError:
uhost = host.encode().decode()
return uhost, uhost != host
return host, False | Encode hostname as internationalized domain name (IDN) according
to RFC 3490.
@raise: UnicodeError if hostname is not properly IDN encoded. |
366,568 | def _handle_clear(self, load):
s key.
Clear payload received with command %scmdcmd__master_statsfunsend_clearmaster_stats']:
stats = salt.utils.event.update_stats(self.stats, start, load)
self._post_stats(stats)
return ret | Process a cleartext command
:param dict load: Cleartext payload
:return: The result of passing the load to a function in ClearFuncs corresponding to
the command specified in the load's 'cmd' key. |
366,569 | def _set_vlan_add(self, v, load=False):
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(v,base=vlan_add.vlan_add, is_container=, presence=False, yang_name="vlan-add", rest_name="", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, exten... | Setter method for vlan_add, mapped from YANG variable /routing_system/evpn_config/evpn/evpn_instance/vlan/vlan_add (container)
If this variable is read-only (config: false) in the
source YANG file, then _set_vlan_add is considered as a private
method. Backends looking to populate this variable should
do... |
366,570 | def delete(self, skip_mapping=False):
for index in self.indexes:
index.destroy()
if not skip_mapping:
index.create_mapping() | Delete all entries from ElasticSearch. |
366,571 | def getAllKws(self):
kws_ele = []
kws_bl = []
for ele in self.all_elements:
if ele == or ele == :
continue
elif self.getElementType(ele).lower() == u:
kws_bl.append(ele)
else:
kws_ele.append(ele)
... | extract all keywords into two categories
kws_ele: magnetic elements
kws_bl: beamline elements
return (kws_ele, kws_bl) |
366,572 | def lists(self):
if self._lists is None:
self._lists = Lists(self.links[], pypump=self._pump)
return self._lists | :class:`Lists feed <pypump.models.feed.Lists>` with all lists
owned by the person.
Example:
>>> for list in pump.me.lists:
... print(list)
...
Acquaintances
Family
Coworkers
Friends |
366,573 | def get_labels(obj):
if Clustering.is_pyclustering_instance(obj.model):
return obj._labels_from_pyclusters
else:
return obj.model.labels_ | Retrieve the labels of a clustering.rst object
:param obj: the clustering.rst object
:return: the resulting labels |
366,574 | def ReadAPIAuditEntries(self,
username=None,
router_method_names=None,
min_timestamp=None,
max_timestamp=None,
cursor=None):
query =
conditions = []
values = []
where... | Returns audit entries stored in the database. |
366,575 | def chunks(lst, size):
for i in xrange(0, len(lst), size):
yield lst[i:i + size] | Yield successive n-sized chunks from lst. |
366,576 | def DisjoinCalendars(self, cutoff):
def TruncatePeriod(service_period, start, end):
service_period.start_date = max(service_period.start_date, start)
service_period.end_date = min(service_period.end_date, end)
dates_to_delete = []
for k in service_period.date_exceptions:
... | Forces the old and new calendars to be disjoint about a cutoff date.
This truncates the service periods of the old schedule so that service
stops one day before the given cutoff date and truncates the new schedule
so that service only begins on the cutoff date.
Args:
cutoff: The cutoff date as a... |
366,577 | def complete(
text: str, kw_cache: atom.Atom["PMap[int, Keyword]"] = __INTERN
) -> Iterable[str]:
assert text.startswith(":")
interns = kw_cache.deref()
text = text[1:]
if "/" in text:
prefix, suffix = text.split("/", maxsplit=1)
results = filter(
lambda kw: (kw.ns ... | Return an iterable of possible completions for the given text. |
366,578 | def xover_gen(self, range=None):
args = None
if range is not None:
args = utils.unparse_range(range)
code, message = self.command("XOVER", args)
if code != 224:
raise NNTPReplyError(code, message)
for line in self.info_gen(code, message):
... | Generator for the XOVER command.
The XOVER command returns information from the overview database for
the article(s) specified.
<http://tools.ietf.org/html/rfc2980#section-2.8>
Args:
range: An article number as an integer, or a tuple of specifying a
range o... |
366,579 | def __merge_by_signle_link(self):
minimum_single_distance = float();
indexes = None;
for index_cluster1 in range(0, len(self.__clusters)):
for index_cluster2 in range(index_cluster1 + 1, len(self.__clusters)):
candidate_minimum_distance = se... | !
@brief Merges the most similar clusters in line with single link type. |
366,580 | async def start_serving(self,
connection_config: ConnectionConfig,
loop: Optional[asyncio.AbstractEventLoop] = None) -> None:
self.start_serving_nowait(connection_config, loop)
await self.wait_until_serving() | Start serving this :class:`~lahja.endpoint.Endpoint` so that it can receive events. Await
until the :class:`~lahja.endpoint.Endpoint` is ready. |
366,581 | def _simple_chart(x=None, y=None, name=None, color=None, width=None, dash=None, opacity=None,
mode=, yaxis=1, fill=None, text=, style=,
markersize=6):
assert x is not None or y is not None, "x or y must be something"
if y is None:
y = x
x = None
if x ... | Draws connected dots.
Parameters
----------
x : array-like, optional
y : array-like, optional
label : array-like, optional
Returns
-------
Chart |
366,582 | def symlink_list(self, load):
if in load:
load.pop()
ret = {}
if not in load:
return {}
if not isinstance(load[], six.string_types):
load[] = six.text_type(load[])
for fsb in self.backends(load.pop(, None)):
... | Return a list of symlinked files and dirs |
366,583 | def _get_other_names(self, line):
m = re.search(self.compound_regex[][0], line, re.IGNORECASE)
if m:
self.other_names.append(m.group(1).strip()) | Parse and extract any other names that might be recorded for the compound
Args:
line (str): line of the msp file |
366,584 | def is_deaf(self):
if not self.is_consonant():
return False
if self.letter in self.forever_deaf:
return True
if self.letter in self.forever_sonorus:
return False
if self.__forsed_sonorus:
return False
if self.__forsed_sonor... | Глухая ли согласная. |
366,585 | def type(self):
if self.args.args and self.args.args[0].name == "self":
if isinstance(self.parent.scope(), ClassDef):
return "method"
return "function" | Whether this is a method or function.
:returns: 'method' if this is a method, 'function' otherwise.
:rtype: str |
366,586 | def compute_freq_cross(self, csd, asd, output=):
if output == :
coh_list = []
for i in range(len(csd)):
dat = ChanFreq()
dat.data = empty(1, dtype=)
dat.data[0] = empty((1, csd[i][].number_of()[0]),
dtype=)... | Compute cross-spectrum, gain, phase shift and/or coherence.
Parameters
----------
csd : list of dict with 'data' key as instance of ChanFreq
cross-spectral density, one channel
asd : list of dict with 'data' key as instance of ChanFreq
autospectral density, two c... |
366,587 | def _add_dict_values(self, d1, d2):
if d1 is None and d2 is None:
return None
d1 = d1 or {}
d2 = d2 or {}
added = {}
for key in set(list(d1.keys()) + list(d2.keys())):
added[key] = dict(d1.get(key, {}), **(d2.get(key, {})))
return added | Merges the values of two dictionaries, which are expected to be dictionaries, e.g
d1 = {'a': {'x': pqr}}
d2 = {'a': {'y': lmn}, 'b': {'y': rst}}
will return: {'a': {'x': pqr, 'y': lmn}, 'b': {'y': rst}}.
Collisions of the keys of the sub-dictionaries are not checked. |
366,588 | def geocode(
self,
query,
exactly_one=True,
timeout=DEFAULT_SENTINEL,
limit=None,
typeahead=False,
language=None,
):
query = self.format_string % query
params = self._geocode_params(query)
params[] =... | Return a location point by address.
:param str query: The address or query you wish to geocode.
:param bool exactly_one: Return one result or a list of results, if
available.
:param int timeout: Time, in seconds, to wait for the geocoding service
to respond before rais... |
366,589 | def save(self, path, table_format=, sep=,
table_ext=None, float_format=):
if type(path) is str:
path = path.rstrip()
path = Path(path)
path.mkdir(parents=True, exist_ok=True)
para_file_path = path / DEFAULT_FILE_NAMES[]
file_para = dict()
... | Saving the system to path
Parameters
----------
path : pathlib.Path or string
path for the saved data (will be created if necessary, data
within will be overwritten).
table_format : string
Format to save the DataFrames:
- 'pkl' : Bi... |
366,590 | def setText(self, text):
self._text = text
if text:
if self._textItem is None:
self._textItem = QGraphicsTextItem()
self._textItem.setParentItem(self)
self._textItem.setPlainText(text)
elif self._textItem:
self.scene... | Sets the text for this connection to the inputed text.
:param text | <str> |
366,591 | def _rt_parse_execs(self, statement, element, mode, lineparser):
if mode == "insert":
enew, start, end = self.xparser.parse_signature(statement, element, element)
if enew is not None:
enew.start, enew.end = lineparser.absolute_charindex(statement, start,... | As part of parse_line(), checks for new executable declarations in the statement. |
366,592 | async def write(self, data):
self.writer.write((bytes([ord(data)])))
await self.writer.drain() | This method writes sends data to the IP device
:param data:
:return: None |
366,593 | def by_median_home_value(self,
lower=-1,
upper=2 ** 31,
zipcode_type=ZipcodeType.Standard,
sort_by=SimpleZipcode.median_home_value.name,
ascending=False,
... | Search zipcode information by median home value. |
366,594 | def reblog(self, blogname, **kwargs):
url = "/v2/blog/{}/post/reblog".format(blogname)
valid_options = [, , ] + self._post_valid_options(kwargs.get(, None))
if in kwargs and kwargs[]:
kwargs[] = ",".join(kwargs[])
return self.send_api_request(, url, kw... | Creates a reblog on the given blogname
:param blogname: a string, the url of the blog you want to reblog to
:param id: an int, the post id that you are reblogging
:param reblog_key: a string, the reblog key of the post
:param comment: a string, a comment added to the reblogged post
... |
366,595 | def sorted(list, cmp=None, reversed=False):
list = [x for x in list]
list.sort(cmp)
if reversed: list.reverse()
return list | Returns a sorted copy of the list. |
366,596 | def get_opts(opts):
defaults = {
: None,
: Opt.random,
: Opt.preset,
: Opt.preset,
: Opt.preset,
: Opt.preset,
}
_opts = defaults.copy()
if opts is None:
opts = dict()
try:
for key, val in opts.copy().items():
if key ==... | Validate options and apply defaults for options not supplied.
:param opts: dictionary mapping str->str.
:return: dictionary mapping str->Opt. All possible keys are present. |
366,597 | def is_dir_or_file(dirname):
if not os.path.isdir(dirname) and not os.path.isfile(dirname):
msg = "{0} is not a directory nor a file".format(dirname)
raise argparse.ArgumentTypeError(msg)
else:
return dirname | Checks if a path is an actual directory that exists or a file |
366,598 | def prompt(self, error=):
if self.hidden:
return True
cmd = [self.label]
if self.default is not None:
cmd.append(.format(self.default))
elif not self.required:
cmd.append()
if self.type == :
cmd.append()
if self... | Prompts the user to set the value for this item.
:return <bool> | success |
366,599 | def generate(env):
global TeXLaTeXAction
if TeXLaTeXAction is None:
TeXLaTeXAction = SCons.Action.Action(TeXLaTeXFunction,
strfunction=TeXLaTeXStrFunction)
env.AppendUnique(LATEXSUFFIXES=SCons.Tool.LaTeXSuffixes)
generate_common(env)
from . import dvi
... | Add Builders and construction variables for TeX to an Environment. |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.