Unnamed: 0 int64 0 389k | code stringlengths 26 79.6k | docstring stringlengths 1 46.9k |
|---|---|---|
2,500 | def check_file_for_tabs(filename, verbose=True):
file_contains_tabs = False
with open(filename) as f:
lines = f.read().split("\n")
line_no = 1
for line in lines:
if "\t" in line:
file_contains_tabs = True
location = [
i for i in range(len(lin... | identifies if the file contains tabs and returns True if it
does. It also prints the location of the lines and columns. If
verbose is set to False, the location is not printed.
:param verbose: if true prints information about issues
:param filename: the filename
:rtype: True if there are tabs in th... |
2,501 | def new_filename(data, file_kind, ext):
nb_key = file_kind + "number"
if nb_key not in data.keys():
data[nb_key] = -1
if not data["override externals"]:
file_exists = True
while file_exists:
data[nb_key] = data[nb_key] + 1
filename, name = _gen... | Returns an available filename.
:param file_kind: Name under which numbering is recorded, such as 'img' or
'table'.
:type file_kind: str
:param ext: Filename extension.
:type ext: str
:returns: (filename, rel_filepath) where filename is a path in the
filesystem ... |
2,502 | def indent(text: str, num: int = 2) -> str:
lines = text.splitlines()
return "\n".join(indent_iterable(lines, num=num)) | Indent a piece of text. |
2,503 | def ligolw_add(xmldoc, urls, non_lsc_tables_ok = False, verbose = False, contenthandler = DefaultContentHandler):
for n, url in enumerate(urls):
if verbose:
print >>sys.stderr, "%d/%d:" % (n + 1, len(urls)),
utils.load_url(url, verbose = verbose, xmldoc = xmldoc, contenthandler = contenthandler)
if not ... | An implementation of the LIGO LW add algorithm. urls is a list of
URLs (or filenames) to load, xmldoc is the XML document tree to
which they should be added. |
2,504 | def load_extra_emacs_page_navigation_bindings():
registry = ConditionalRegistry(Registry(), EmacsMode())
handle = registry.add_binding
handle(Keys.ControlV)(scroll_page_down)
handle(Keys.PageDown)(scroll_page_down)
handle(Keys.Escape, )(scroll_page_up)
handle(Keys.PageUp)(scroll_page_up)
... | Key bindings, for scrolling up and down through pages.
This are separate bindings, because GNU readline doesn't have them. |
2,505 | def terminate_bits(self, payload):
data_capacity = tables.data_capacity[self.version][self.error][0]
if len(payload) > data_capacity:
raise ValueError(
)
if len(payload) == data_capacity:
return None
elif l... | This method adds zeros to the end of the encoded data so that the
encoded data is of the correct length. It returns a binary string
containing the bits to be added. |
2,506 | def status(self):
status_list = []
for platform in self._config.platforms.instances:
instance_name = platform[]
driver_name = self.name
provisioner_name = self._config.provisioner.name
scenario_name = self._config.scenario.name
status... | Collects the instances state and returns a list.
.. important::
Molecule assumes all instances were created successfully by
Ansible, otherwise Ansible would return an error on create. This
may prove to be a bad assumption. However, configuring Molecule's
drive... |
2,507 | def walk_egg(egg_dir):
walker = sorted_walk(egg_dir)
base, dirs, files = next(walker)
if in dirs:
dirs.remove()
yield base, dirs, files
for bdf in walker:
yield bdf | Walk an unpacked egg's contents, skipping the metadata directory |
2,508 | def _downloaded_filename(self):
link = self._link() or self._finder.find_requirement(self._req, upgrade=False)
if link:
lower_scheme = link.scheme.lower()
if lower_scheme == or lower_scheme == :
... | Download the package's archive if necessary, and return its
filename.
--no-deps is implied, as we have reimplemented the bits that would
ordinarily do dependency resolution. |
2,509 | def operator(func=None, *, pipable=False):
def decorator(func):
bases = (Stream,)
name = func.__name__
module = func.__module__
extra_doc = func.__doc__
doc = extra_doc or f
signature = inspect.signature(func)
parameters = li... | Create a stream operator from an asynchronous generator
(or any function returning an asynchronous iterable).
Decorator usage::
@operator
async def random(offset=0., width=1.):
while True:
yield offset + width * random.random()
Decorator usage for pipable opera... |
2,510 | def list_runids(s3_client, full_path):
listing_finished = False
run_ids_buffer = []
last_continuation_token = None
(bucket, prefix) = split_full_path(full_path)
while not listing_finished:
options = clean_dict({
: bucket,
: prefix,
... | Return list of all run ids inside S3 folder. It does not respect
S3 pagination (`MaxKeys`) and returns **all** keys from bucket
and won't list any prefixes with object archived to AWS Glacier
Arguments:
s3_client - boto3 S3 client (not service)
full_path - full valid S3 path to events (such as enri... |
2,511 | def configure_settings(settings, environment_settings=True):
changes = 1
iterations = 0
while changes:
changes = 0
app_names = [] + list(settings[])
if environment_settings:
app_names.append()
for app_name in app_names:
import django_autoconfig.c... | Given a settings object, run automatic configuration of all
the apps in INSTALLED_APPS. |
2,512 | def _do_refresh_session(self):
if self._session and self._last_session_refresh + self._loop_wait > time.time():
return False
if self._session:
try:
self._client.session.renew(self._session)
except NotFound:
self._session = Non... | :returns: `!True` if it had to create new session |
2,513 | def precedence(item):
try:
mro = item.__class__.__mro__
except AttributeError:
return PRECEDENCE["Atom"]
for i in mro:
n = i.__name__
if n in PRECEDENCE_FUNCTIONS:
return PRECEDENCE_FUNCTIONS[n](item)
elif n in PRECEDENCE_VALUES:
return PR... | Returns the precedence of a given object. |
2,514 | def flatten(nested_iterable):
if not isinstance(nested_iterable, (list, tuple)):
yield nested_iterable
else:
for i in nested_iterable:
if isinstance(i, (list, tuple)):
for j in flatten(i):
yield j
else:
yi... | Flattens arbitrarily nested lists/tuples.
Code partially taken from https://stackoverflow.com/a/10824420.
Parameters
----------
nested_iterable
A list or tuple of arbitrarily nested values.
Yields
------
any
Non-list and non-tuple values in `nested_iterable`. |
2,515 | def remove_override(self, key):
keys = key.split()
if len(keys) > 1:
raise NotImplementedError
elif key in self.overrides:
del self.overrides[key]
self._uncache(key) | Remove a setting override, if one exists. |
2,516 | def spielman_wr(self, norm=True):
wr = []
for r in range(self.nsites):
num = 0
den = 0
for i in range(N_CODON):
j = scipy.intersect1d(scipy.where(CODON_SINGLEMUT[i]==True)[0],
scipy.where(CODON_NONSYN[i]==True)[0])
... | Returns a list of site-specific omega values calculated from the `ExpCM`.
Args:
`norm` (bool)
If `True`, normalize the `omega_r` values by the ExpCM
gene-wide `omega`.
Returns:
`wr` (list)
list of `omeg... |
2,517 | def _match_line(self, city_name, lines):
for line in lines:
toponym = line.split()[0]
if toponym.lower() == city_name.lower():
return line.strip()
return None | The lookup is case insensitive and returns the first matching line,
stripped.
:param city_name: str
:param lines: list of str
:return: str |
2,518 | def get_default_config(self):
config = super(rmqHandler, self).get_default_config()
config.update({
: ,
: ,
})
return config | Return the default config for the handler |
2,519 | def setImage(self,
img,
autoRange=True,
useAutoLevels=None,
levels=None,
axes=None,
pos=None,
scale=None,
transform=None,
):
if hasattr(img, ) and im... | Set the image to be displayed in the widget.
================== ===========================================================================
**Arguments:**
img (numpy array) the image to be displayed. See :func:`ImageItem.setImage` and
*notes* below.
... |
2,520 | def regularizer(name, regularization_fn, name_filter=):
regex = re.compile(name_filter)
def fn(var_name, variable, phase):
if phase is pt.Phase.train and regex.search(var_name):
with tf.name_scope(None, name, [variable]):
loss = regularization_fn(variable)
if loss is not None:
tf.... | Wraps a regularizer in a parameter-function.
Args:
name: The name scope for this regularizer.
regularization_fn: A function with signature:
fn(variable) -> loss `Tensor` or `None`.
name_filter: A regex that will be used to filter variables by name.
Returns:
A parameter modification function ... |
2,521 | def concat_batch_variantcalls(items, region_block=True, skip_jointcheck=False):
items = [utils.to_single_data(x) for x in items]
batch_name = _get_batch_name(items, skip_jointcheck)
variantcaller = _get_batch_variantcaller(items)
if not variantcaller and all(d.get("vrn_file") for d in items):
... | CWL entry point: combine variant calls from regions into single VCF. |
2,522 | def get_mean_and_stddevs(self, sites, rup, dists, imt, stddev_types):
C = self.COEFFS[imt]
C_PGA = self.COEFFS[PGA()]
imt_per = 0 if imt.name == else imt.period
pga_rock = self._get_pga_on_rock(C_PGA, rup, dists)
mean = (self._get_magnitude_scaling_ter... | See :meth:`superclass method
<.base.GroundShakingIntensityModel.get_mean_and_stddevs>`
for spec of input and result values. |
2,523 | def process_large_file(self, local_file, parent):
file_content_sender = FileUploader(self.settings.config, self.settings.data_service, local_file,
self.settings.watcher, self.settings.file_upload_post_processor)
remote_id = file_content_sender.upload(s... | Upload a single file using multiple processes to upload multiple chunks at the same time.
Updates local_file with it's remote_id when done.
:param local_file: LocalFile: file we are uploading
:param parent: LocalFolder/LocalProject: parent of the file |
2,524 | def get_variant_type(variant_source):
file_type = get_file_type(variant_source)
variant_type =
if file_type == :
variants = VCF(variant_source)
elif file_type == :
variants = GeminiQuery(variant_source)
gemini_query = "SELECT * from variants"
variants.run(gemini_que... | Try to find out what type of variants that exists in a variant source
Args:
variant_source (str): Path to variant source
source_mode (str): 'vcf' or 'gemini'
Returns:
variant_type (str): 'sv' or 'snv' |
2,525 | def twoQ_gates(self):
two_q_gates = []
for node in self.gate_nodes():
if len(node.qargs) == 2:
two_q_gates.append(node)
return two_q_gates | Get list of 2-qubit gates. Ignore snapshot, barriers, and the like. |
2,526 | def get_build_controllers(self, name=None):
query_parameters = {}
if name is not None:
query_parameters[] = self._serialize.query(, name, )
response = self._send(http_method=,
location_id=,
version=,
... | GetBuildControllers.
Gets controller, optionally filtered by name
:param str name:
:rtype: [BuildController] |
2,527 | def _assert_path_is_rw(self):
if not self.path:
raise ValueError("`path` argument must be set!")
if not os.path.exists(self.path):
raise IOError("`%s` not found." % self.path)
if not os.path.isdir(self.path):
raise IOError("`%s` is not a directory!"... | Make sure, that `self.path` exists, is directory a readable/writeable.
Raises:
IOError: In case that any of the assumptions failed.
ValueError: In case that `self.path` is not set. |
2,528 | def clone(self, name=None):
if name is None:
name = self.module_name + "_clone"
return MLP(
name=name,
output_sizes=self.output_sizes,
activation=self.activation,
activate_final=self.activate_final,
initializers=self.initializers,
partitioners=self.par... | Creates a new MLP with the same structure.
Args:
name: Optional string specifying the name of the new module. The default
name is constructed by appending "_clone" to the original name.
Returns:
A cloned `MLP` module. |
2,529 | def _GetShowID(self, showName):
self._GetTitleList()
self._GetIDList()
for index, showTitle in enumerate(self._showTitleList):
if showName == showTitle:
return self._showIDList[index]
return None | Get epguides show id for a given show name.
Attempts to match the given show name against a show title in
self._showTitleList and, if found, returns the corresponding index
in self._showIDList.
Parameters
----------
showName : string
Show name to get show ID for.
Returns
---... |
2,530 | def writePIDFile(self):
with self._jobStore.writeSharedFileStream() as f:
f.write(str(os.getpid()).encode()) | Write a the pid of this process to a file in the jobstore.
Overwriting the current contents of pid.log is a feature, not a bug of this method.
Other methods will rely on always having the most current pid available.
So far there is no reason to store any old pids. |
2,531 | def proximity_metric(self, a, b):
res = 1
for ap, bp, n in zip(a.path_parts, b.path_parts, list(range(4))):
res += ap == bp
if n >= 3:
break
return res | Return the weight of the dependency from a to b. Higher weights
usually have shorter straighter edges. Return 1 if it has normal
weight. A value of 4 is usually good for ensuring that a related
pair of modules are drawn next to each other.
Returns an int between 1 (unknown, ... |
2,532 | def taskfileinfo_descriptor_data(tfi, role):
if role == QtCore.Qt.DisplayRole or role == QtCore.Qt.EditRole:
return tfi.descriptor | Return the data for descriptor
:param tfi: the :class:`jukeboxcore.filesys.TaskFileInfo` holds the data
:type tfi: :class:`jukeboxcore.filesys.TaskFileInfo`
:param role: item data role
:type role: QtCore.Qt.ItemDataRole
:returns: data for the descriptor
:rtype: depending on role
:raises: No... |
2,533 | def import_apps_submodule(submodule):
found_apps = []
for appconfig in apps.get_app_configs():
app = appconfig.name
if import_module_or_none(.format(app, submodule)) is not None:
found_apps.append(app)
return found_apps | Look for a submodule is a series of packages, e.g. ".pagetype_plugins" in all INSTALLED_APPS. |
2,534 | def promote(self, content):
for n, v in content.data:
if isinstance(v, list):
content.data = v
return
content.data = [] | Promote (replace) the content.data with the first attribute
of the current content.data that is a I{list}. Note: the
content.data may be empty or contain only _x attributes.
In either case, the content.data is assigned an empty list.
@param content: An array content.
@type conte... |
2,535 | def _set_pos(self, pos):
if self._canvas.height < self._max_height:
pos *= self._max_height - self._canvas.height + 1
pos = int(round(max(0, pos), 0))
self._canvas.scroll_to(pos) | Set current position for scroll bar. |
2,536 | def match(tgt, opts=None):
if not opts:
opts = __opts__
nodegroups = opts.get(, {})
matchers = salt.loader.matchers(opts)
if not isinstance(tgt, six.string_types) and not isinstance(tgt, (list, tuple)):
log.error()
return False
log.debug(, opts[], tgt)
ref = {: ,
... | Runs the compound target check |
2,537 | def autofit(ts, maxp=5, maxd=2, maxq=5, sc=None):
assert sc != None, "Missing SparkContext"
jmodel = sc._jvm.com.cloudera.sparkts.models.ARIMA.autoFit(_py2java(sc, Vectors.dense(ts)), maxp, maxd, maxq)
return ARIMAModel(jmodel=jmodel, sc=sc) | Utility function to help in fitting an automatically selected ARIMA model based on approximate
Akaike Information Criterion (AIC) values. The model search is based on the heuristic
developed by Hyndman and Khandakar (2008) and described in [[http://www.jstatsoft
.org/v27/i03/paper]]. In contrast to the algo... |
2,538 | def _record_field_to_json(fields, row_value):
record = {}
isdict = isinstance(row_value, dict)
for subindex, subfield in enumerate(fields):
subname = subfield.name
if isdict:
subvalue = row_value.get(subname)
else:
subvalue = row_value[subindex]
... | Convert a record/struct field to its JSON representation.
Args:
fields ( \
Sequence[:class:`~google.cloud.bigquery.schema.SchemaField`], \
):
The :class:`~google.cloud.bigquery.schema.SchemaField`s of the
record's subfields to use for type conversion and field na... |
2,539 | async def handle_user_exception(self, error: Exception) -> Response:
if isinstance(error, HTTPException) and not self.trap_http_exception(error):
return await self.handle_http_exception(error)
handler = self._find_exception_handler(error)
if handler is None:
rai... | Handle an exception that has been raised.
This should forward :class:`~quart.exception.HTTPException` to
:meth:`handle_http_exception`, then attempt to handle the
error. If it cannot it should reraise the error. |
2,540 | def excepthook (self, etype, evalue, etb):
self.inner_excepthook (etype, evalue, etb)
if issubclass (etype, KeyboardInterrupt):
signal.signal (signal.SIGINT, signal.SIG_DFL)
os.kill (os.getpid (), signal.SIGINT) | Handle an uncaught exception. We always forward the exception on to
whatever `sys.excepthook` was present upon setup. However, if the
exception is a KeyboardInterrupt, we additionally kill ourselves with
an uncaught SIGINT, so that invoking programs know what happened. |
2,541 | def _maybe_parse_configurable_reference(self):
if self._current_token.value != :
return False, None
location = self._current_location()
self._advance_one_token()
scoped_name = self._parse_selector(allow_periods_in_scope=True)
evaluate = False
if self._current_token.value == :
... | Try to parse a configurable reference (@[scope/name/]fn_name[()]). |
2,542 | def size(dtype):
dtype = tf.as_dtype(dtype)
if hasattr(dtype, ):
return dtype.size
return np.dtype(dtype).itemsize | Returns the number of bytes to represent this `dtype`. |
2,543 | def process_nxml_str(nxml_str, citation=None, offline=False,
output_fname=default_output_fname):
if offline:
if not try_offline:
logger.error()
return None
try:
api_ruler = reach_reader.get_api_ruler()
except ReachOfflineReadingEr... | Return a ReachProcessor by processing the given NXML string.
NXML is the format used by PubmedCentral for papers in the open
access subset.
Parameters
----------
nxml_str : str
The NXML string to be processed.
citation : Optional[str]
A PubMed ID passed to be used in the eviden... |
2,544 | def loudest_triggers_from_cli(opts, coinc_parameters=None,
sngl_parameters=None, bank_parameters=None):
bin_results = []
ifos = opts.sngl_trigger_files.keys()
bins_idx, bank_data = bank_bins_from_cli(opts)
bin_names = bins_idx.keys()
if opts... | Parses the CLI options related to find the loudest coincident or
single detector triggers.
Parameters
----------
opts : object
Result of parsing the CLI with OptionParser.
coinc_parameters : list
List of datasets in statmap file to retrieve.
sngl_parameters : list
List o... |
2,545 | def do_alarm_definition_patch(mc, args):
fields = {}
fields[] = args.id
if args.name:
fields[] = args.name
if args.description:
fields[] = args.description
if args.expression:
fields[] = args.expression
if args.alarm_actions:
fields[] = _arg_split_patch_updat... | Patch the alarm definition. |
2,546 | def _connect(self):
"Create a Unix domain socket connection"
sock = socket.socket(socket.AF_UNIX, socket.SOCK_STREAM)
sock.settimeout(self.socket_timeout)
sock.connect(self.path)
return sock | Create a Unix domain socket connection |
2,547 | def der_cert(der_data):
if isinstance(der_data, str):
der_data = bytes(der_data, )
return x509.load_der_x509_certificate(der_data, default_backend()) | Load a DER encoded certificate
:param der_data: DER-encoded certificate
:return: A cryptography.x509.certificate instance |
2,548 | def shutil_rmtree_onerror(func: Callable[[str], None],
path: str,
exc_info: EXC_INFO_TYPE) -> None:
if not os.access(path, os.W_OK):
os.chmod(path, stat.S_IWUSR)
func(path)
else:
exc = exc_info[1]
raise exc | Error handler for ``shutil.rmtree``.
If the error is due to an access error (read only file)
it attempts to add write permission and then retries.
If the error is for another reason it re-raises the error.
Usage: ``shutil.rmtree(path, onerror=shutil_rmtree_onerror)``
See
https://stackove... |
2,549 | def output(self,pin,value):
self.mraa_gpio.Gpio.write(self.mraa_gpio.Gpio(pin), value) | Set the specified pin the provided high/low value. Value should be
either 1 (ON or HIGH), or 0 (OFF or LOW) or a boolean. |
2,550 | def console_get_default_background(con: tcod.console.Console) -> Color:
return Color._new_from_cdata(
lib.TCOD_console_get_default_background(_console(con))
) | Return this consoles default background color.
.. deprecated:: 8.5
Use :any:`Console.default_bg` instead. |
2,551 | def check_for_rerun_user_task(self):
data = self.current.input
if in data:
return
current_task = self.workflow.get_tasks(Task.READY)[0]
current_task_type = current_task.task_spec.__class__.__name__
pre_task = current_task.parent
pre_task_type = pre_... | Checks that the user task needs to re-run.
If necessary, current task and pre task's states are changed and re-run.
If wf_meta not in data(there is no user interaction from pre-task) and last completed task
type is user task and current step is not EndEvent and there is no lane change,
t... |
2,552 | def mag_cal_progress_encode(self, compass_id, cal_mask, cal_status, attempt, completion_pct, completion_mask, direction_x, direction_y, direction_z):
return MAVLink_mag_cal_progress_message(compass_id, cal_mask, cal_status, attempt, completion_pct, completion_mask, direction_x, directio... | Reports progress of compass calibration.
compass_id : Compass being calibrated (uint8_t)
cal_mask : Bitmask of compasses being calibrated (uint8_t)
cal_status : Status (see MAG_CAL_STATUS enum) (uint8_t)
atte... |
2,553 | def middleware_in_executor(middleware):
@wraps(middleware)
def _(environ, start_response):
loop = get_event_loop()
return loop.run_in_executor(None, middleware, environ, start_response)
return _ | Use this middleware to run a synchronous middleware in the event loop
executor.
Useful when using synchronous web-frameworks such as :django:`django <>`. |
2,554 | def and_terms(*args):
args = [arg if not isinstance(arg, list) else .join(arg) for arg in args]
return .format(.join(args)) | Connect given term strings or list(s) of term strings with an AND operator for querying.
Args:
An arbitrary number of either strings or lists of strings representing query terms.
Returns
A query string consisting of argument terms and'ed together. |
2,555 | def filter_by_pattern(self, pattern):
_filt_values, _filt_datetimes = self._filter_by_pattern(pattern)
if self._enumeration is None:
self._get_mutable_enumeration()
col_obj = self._enumeration[][self._collection_type]
collection = col_obj(self.header.duplicate(), _fi... | Filter the Data Collection based on a list of booleans.
Args:
pattern: A list of True/False values. Typically, this is a list
with a length matching the length of the Data Collections values
but it can also be a pattern to be repeated over the Data Collection.
... |
2,556 | def openXmlDocument(path=None, file_=None, data=None, url=None, mime_type=None):
if path is not None:
file_ = open(path, )
elif file_ is not None:
assert hasattr(file_, )
elif url is not None:
file_ = urllib2.urlopen(url)
if mime_type is None:
mime_type = fil... | **Factory function**
Will guess what document type is best suited and return the appropriate
document type.
User must provide either ``path``, ``file_``, ``data`` or ``url`` parameter.
:param path: file path in the local filesystem to a document.
:param file_: a file (like) object to a document (m... |
2,557 | def _compute_base_term(self, C, rup, dists):
c1 = self.CONSTS[]
R = np.sqrt(dists.rrup ** 2 + self.CONSTS[] ** 2)
base_term = (C[] +
C[] * ((8.5 - rup.mag) ** 2) +
(C[] + self.CONSTS[] * (rup.mag - c1)) *
np.log(R))
... | Compute and return base model term, that is the first term in equation
1, page 74. The calculation of this term is explained in paragraph
'Base Model', page 75. |
2,558 | def p_generate_if_woelse(self, p):
p[0] = IfStatement(p[3], p[5], None, lineno=p.lineno(1))
p.set_lineno(0, p.lineno(1)) | generate_if : IF LPAREN cond RPAREN gif_true_item |
2,559 | def add_x509_key_descriptors(metadata, cert=None, add_encryption=True):
if cert is None or cert == :
return metadata
try:
root = OneLogin_Saml2_XML.to_etree(metadata)
except Exception as e:
raise Exception( + str(e))
assert root.tag == % One... | Adds the x509 descriptors (sign/encryption) to the metadata
The same cert will be used for sign/encrypt
:param metadata: SAML Metadata XML
:type metadata: string
:param cert: x509 cert
:type cert: string
:param add_encryption: Determines if the KeyDescriptor[use="encry... |
2,560 | def __getitem_slice(self, slce):
scaled_indices = (self._step * n for n in slce.indices(self._len))
start_offset, stop_offset, new_step = scaled_indices
return newrange(self._start + start_offset,
self._start + stop_offset,
new_step) | Return a range which represents the requested slce
of the sequence represented by this range. |
2,561 | def validate_config(cls, config):
if "discovery" not in config:
raise ValueError("No discovery method defined.")
installed_balancers = Balancer.get_installed_classes().keys()
if not any([balancer in config for balancer in installed_balancers]):
raise ValueError... | Validates a config dictionary parsed from a cluster config file.
Checks that a discovery method is defined and that at least one of
the balancers in the config are installed and available. |
2,562 | def start(token,
control=False,
trigger=,
groups=None,
groups_pillar_name=None,
fire_all=False,
tag=):
if (not token) or (not token.startswith()):
time.sleep(2) | Listen to slack events and forward them to salt, new version |
2,563 | def comparable(self):
string_parts = []
if self.location is not None:
string_parts.append(.format(self.location))
if self.store_index is not None:
string_parts.append(.format(self.store_index))
return self._GetComparable(sub_comparable_string=.join(string_parts)) | str: comparable representation of the path specification. |
2,564 | def set_value(self, dry_wet: LeakSensorState):
if dry_wet == LeakSensorState.DRY:
self._update_subscribers(0x11)
else:
self._update_subscribers(0x13) | Set the state to wet or dry. |
2,565 | def replace_namespaced_deployment_scale(self, name, namespace, body, **kwargs):
kwargs[] = True
if kwargs.get():
return self.replace_namespaced_deployment_scale_with_http_info(name, namespace, body, **kwargs)
else:
(data) = self.replace_namespaced_deployment_scal... | replace scale of the specified Deployment
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.replace_namespaced_deployment_scale(name, namespace, body, async_req=True)
>>> result = thread.get()
... |
2,566 | def set_encode_key_value(self, value, store_type=PUBLIC_KEY_STORE_TYPE_BASE64):
if store_type == PUBLIC_KEY_STORE_TYPE_PEM:
PublicKeyBase.set_encode_key_value(self, value.exportKey().decode(), store_type)
else:
PublicKeyBase.set_encode_key_value(self, value.exportKey(), ... | Set the value based on the type of encoding supported by RSA. |
2,567 | def _segment_index(self, recarr, existing_index, start, new_segments):
idx_col = self._datetime64_index(recarr)
existing_index_arr = np.frombuffer(decompress(existing_index), dtype=INDEX_DTYPE)
if start > 0:
existing_index_arr = exis... | Generate index of datetime64 -> item offset.
Parameters:
-----------
new_data: new data being written (or appended)
existing_index: index field from the versions document of the previous version
start: first (0-based) offset of the new data
segments: list of offsets. Eac... |
2,568 | def parse_py(s, **kwargs):
nbf = current_nbformat
nbm = current_nbformat_minor
pattern = r
m = re.search(pattern,s)
if m is not None:
digits = m.group().split()
nbf = int(digits[0])
if len(digits) > 1:
nbm = int(digits[1])
return nbf, nbm, s | Parse a string into a (nbformat, string) tuple. |
2,569 | def next_page(self, max_=None):
result = type(self)()
result.after = After(self.last.value)
result.max_ = max_
return result | Return a query set which requests the page after this response.
:param max_: Maximum number of items to return.
:type max_: :class:`int` or :data:`None`
:rtype: :class:`ResultSetMetadata`
:return: A new request set up to request the next page.
Must be called on a result set whi... |
2,570 | def create(self, friendly_name=values.unset, sync_service_sid=values.unset):
data = values.of({: friendly_name, : sync_service_sid, })
payload = self._version.create(
,
self._uri,
data=data,
)
return DeploymentInstance(self._version, payload... | Create a new DeploymentInstance
:param unicode friendly_name: A human readable description for this Deployment.
:param unicode sync_service_sid: The unique identifier of the Sync service instance.
:returns: Newly created DeploymentInstance
:rtype: twilio.rest.preview.deployed_devices.f... |
2,571 | def migrate_database(adapter):
all_variants = adapter.get_variants()
nr_variants = all_variants.count()
nr_updated = 0
with progressbar(all_variants, label="Updating variants", length=nr_variants) as bar:
for variant in bar:
if in variant:
cont... | Migrate an old loqusdb instance to 1.0
Args:
adapter
Returns:
nr_updated(int): Number of variants that where updated |
2,572 | async def subscribe(self, *args, **kwargs):
if args:
args = list_or_args(args[0], args[1:])
new_channels = {}
new_channels.update(dict.fromkeys(map(self.encode, args)))
for channel, handler in iteritems(kwargs):
new_channels[self.encode(channel)] = handle... | Subscribe to channels. Channels supplied as keyword arguments expect
a channel name as the key and a callable as the value. A channel's
callable will be invoked automatically when a message is received on
that channel rather than producing a message via ``listen()`` or
``get_message()``. |
2,573 | def calculateHurst(self, series, exponent=None):
rescaledRange = list()
sizeRange = list()
rescaledRangeMean = list()
if(exponent is None):
exponent = self.bestExponent(len(series))
for i in range(0, exponent):
partsNumber = int(math.pow(2, i))
... | :type series: List
:type exponent: int
:rtype: float |
2,574 | def no_ssl_verification(self):
try:
from functools import partialmethod
except ImportError:
from functools import partial
class partialmethod(partial):
def __get__(self, instance, owner):
if instance is None:
... | Requests module fails due to lets encrypt ssl encryption. Will be fixed in the future release. |
2,575 | def train(self, ftrain):
self.coeffs = 0*self.coeffs
upoints, wpoints = self.getQuadraturePointsAndWeights()
try:
fpoints = [ftrain(u) for u in upoints]
except TypeError:
fpoints = ftrain
for ipoly in np.arange(self.N_poly):
inds =... | Trains the polynomial expansion.
:param numpy.ndarray/function ftrain: output values corresponding to the
quadrature points given by the getQuadraturePoints method to
which the expansion should be trained. Or a function that should be evaluated
at the quadrature points to gi... |
2,576 | def warning(self, message, *args, **kwargs):
self.system.warning(message, *args, **kwargs) | Log warning event.
Compatible with logging.warning signature. |
2,577 | def searchTriples(expnums,ccd):
import MOPfits,os
import MOPdbaccess
if len(expnums)!=3:
return(-1)
mysql=MOPdbaccess.connect(,,)
bucket=mysql.cursor()
proc_file = open("proc-these-files","w")
proc_file.write("
proc_file.write("
import string
... | Given a list of exposure numbers, find all the KBOs in that set of exposures |
2,578 | def sort_index(self, **kwargs):
axis = kwargs.pop("axis", 0)
index = self.columns if axis else self.index
ascending = kwargs.pop("ascending", True)
if ascending is None:
ascending = False
kwargs["ascending"] = ascending
de... | Sorts the data with respect to either the columns or the indices.
Returns:
DataManager containing the data sorted by columns or indices. |
2,579 | def __record(self, oid=None):
f = self.__getFileObj(self.dbf)
recordContents = self.__recStruct.unpack(f.read(self.__recStruct.size))
if recordContents[0] != b:
return None
record = []
for (name, typ, size, deci), value in zip(self.fields,... | Reads and returns a dbf record row as a list of values. |
2,580 | def get_song(self, netease=False):
song = self._playlist.get(True)
self.hash_sid[song[]] = True
self.get_netease_song(song, netease)
self._playingsong = song
return song | 获取歌曲, 对外统一接口 |
2,581 | def serve_forever(self):
loop = True
while loop:
loop = self.__serve_forever()
self.end() | Wrapper to the serve_forever function. |
2,582 | def find(cls, *args, **kwargs):
return cls.from_cursor(cls.collection.find(*args, **kwargs)) | Same as ``collection.find``, returns model object instead of dict. |
2,583 | def create(self, fields):
try:
cleaned_fields = {}
for key, value in fields.items():
if type(value) is dict:
try:
if value[] == :
fake_fk = self.fake_fk(value[])
... | Create the object only once.
So, you need loop to usage.
:param `fields` is dictionary fields. |
2,584 | def transfer(self, receiver_address, amount, sender_account):
self._keeper.token.token_approve(receiver_address, amount,
sender_account)
self._keeper.token.transfer(receiver_address, amount, sender_account) | Transfer a number of tokens from `sender_account` to `receiver_address`
:param receiver_address: hex str ethereum address to receive this transfer of tokens
:param amount: int number of tokens to transfer
:param sender_account: Account instance to take the tokens from
:return: bool |
2,585 | def createMemoryParserCtxt(buffer, size):
ret = libxml2mod.xmlCreateMemoryParserCtxt(buffer, size)
if ret is None:raise parserError()
return parserCtxt(_obj=ret) | Create a parser context for an XML in-memory document. |
2,586 | def luks_cleartext_holder(self):
if not self.is_luks:
return None
for device in self._daemon:
if device.luks_cleartext_slave == self:
return device
return None | Get wrapper to the unlocked luks cleartext device. |
2,587 | def run(self, verbose=False):
log = []
modules_copy = dict(sys.modules)
for modname, module in modules_copy.items():
if modname == :
print(modname, module)
print(self.previous_modules)
if modname not in self.previous_modules:
... | Del user modules to force Python to deeply reload them
Do not del modules which are considered as system modules, i.e.
modules installed in subdirectories of Python interpreter's binary
Do not del C modules |
2,588 | def _cleanup(self, kill, verbose):
if kill:
removed_indices = self.g.prune()
self.nout -= len(removed_indices)
if verbose and removed_indices:
print( % removed_indices)
for j in removed_indices:
self.inv_map.pop(j[0]) | Look for dead components (weight=0) and remove them
if enabled by ``kill``.
Resize storage. Recompute determinant and covariance. |
2,589 | def receive_message(
sock, operation, request_id, max_message_size=MAX_MESSAGE_SIZE):
header = _receive_data_on_socket(sock, 16)
length = _UNPACK_INT(header[:4])[0]
actual_op = _UNPACK_INT(header[12:])[0]
if operation != actual_op:
raise ProtocolError("Got opcode %r but expected "
... | Receive a raw BSON message or raise socket.error. |
2,590 | def convert_sqlite_to_mysql(
self):
from fundamentals.renderer import list_of_dictionaries
from fundamentals.mysql import directory_script_runner
self.log.debug()
con = lite.connect(self.pathToSqlite)
con.row_factory = lite.Row
cur = con.cursor()
... | *copy the contents of the sqlite database into the mysql database*
See class docstring for usage |
2,591 | def loglike(self):
sum = logp_of_set(self.children)
if self.verbose > 2:
print_( + self._id + , sum)
return sum | The summed log-probability of all stochastic variables that depend on
self.stochastics, with self.stochastics removed. |
2,592 | def _extract_sender(
message: Message, resent_dates: List[Union[str, Header]] = None
) -> str:
if resent_dates:
sender_header = "Resent-Sender"
from_header = "Resent-From"
else:
sender_header = "Sender"
from_header = "From"
if sender_header in message:
... | Extract the sender from the message object given. |
2,593 | def grouped_insert(t, value):
collator = Collator.createInstance(Locale(t.lang) if t.lang else Locale())
if value.tail is not None:
val_prev = value.getprevious()
if val_prev is not None:
val_prev.tail = (val_prev.tail or ) + value.tail
else:
val_parent = val... | Insert value into the target tree 't' with correct grouping. |
2,594 | def start(self):
if self._already_running():
message =
sys.stderr.write(message % self.pid_file)
return 0
self.set_gid()
self.set_uid()
self.setup_logging()
self.daemonize()
try:
... | Start the daemon |
2,595 | def retrieve(self, id) :
_, _, task = self.http_client.get("/tasks/{id}".format(id=id))
return task | Retrieve a single task
Returns a single task available to the user according to the unique task ID provided
If the specified task does not exist, this query will return an error
:calls: ``get /tasks/{id}``
:param int id: Unique identifier of a Task.
:return: Dictionary that sup... |
2,596 | def generic_html(self, result, errors):
h1 = htmlize(type(result))
out = []
result = pre_process_json(result)
if not hasattr(result, ):
header = "<tr><th>Value</th></tr>"
if type(result) is list:
result = htmlize_list(result)... | Try to display any object in sensible HTML. |
2,597 | def _flush(self):
if self._recording:
raise Exception("Cannot flush data queue while recording!")
if self._saving_cache:
logging.warn("Flush when using cache means unsaved data will be lost and not returned!")
self._cmds_q.put(("reset_data_segment",))
... | Returns a list of all current data |
2,598 | async def sort(self, request, reverse=False):
return sorted(
self.collection, key=lambda o: getattr(o, self.columns_sort, 0), reverse=reverse) | Sort collection. |
2,599 | def execute(self, query_string, params=None):
cr = self.connection.cursor()
logger.info("SQL: %s (%s)", query_string, params)
self.last_query = (query_string, params)
t0 = time.time()
cr.execute(query_string, params or self.core.empty_params)
ms = (time.time() - t0) * 1000
logger.info("... | Executes a query. Returns the resulting cursor.
:query_string: the parameterized query string
:params: can be either a tuple or a dictionary, and must match the parameterization style of the
query
:return: a cursor object |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.