Unnamed: 0 int64 0 389k | code stringlengths 26 79.6k | docstring stringlengths 1 46.9k |
|---|---|---|
369,400 | def fake2db_mysql_initiator(self, host, port, password, username, number_of_rows, name=None, custom=None):
rows = number_of_rows
if name:
cursor, conn = self.database_caller_creator(host, port, password, username, name)
else:
cursor, conn = self.database_caller_... | Main handler for the operation |
369,401 | def save_config(
self, cmd="save configuration primary", confirm=False, confirm_response=""
):
return super(ExtremeExosBase, self).save_config(
cmd=cmd, confirm=confirm, confirm_response=confirm_response
) | Saves configuration. |
369,402 | def view_plugins(category=None):
if category is not None:
if category == :
return {
name: {"descript": klass.plugin_descript,
"regex": klass.file_regex}
for name, klass in _all_plugins[category].items()
}
return {
... | return a view of the loaded plugin names and descriptions
Parameters
----------
category : None or str
if str, apply for single plugin category
Examples
--------
>>> from pprint import pprint
>>> pprint(view_plugins())
{'decoders': {}, 'encoders': {}, 'parsers': {}}
>>> c... |
369,403 | def publish_message(self,
exchange,
routing_key,
properties,
body,
no_serialization=False,
no_encoding=False,
channel=None,
conn... | Publish a message to RabbitMQ on the same channel the original
message was received on.
By default, if you pass a non-string object to the body and the
properties have a supported ``content_type`` set, the body will be
auto-serialized in the specified ``content_type``.
If the p... |
369,404 | def set_editor_doc(self, doc, force_refresh=False):
if (self.locked and not force_refresh):
return
self.switch_to_editor_source()
self._last_editor_doc = doc
self.object_edit.setText(doc[])
if self.rich_help:
self.render_sphinx_doc(doc)
... | Use the help plugin to show docstring dictionary computed
with introspection plugin from the Editor plugin |
369,405 | def is_valid_ip_pattern(ip):
ip = ip.replace(, )
try:
socket.inet_aton(ip)
return True
except socket.error:
return False | Check whether a string matches the outline of an IPv4 address,
allowing "*" as a wildcard |
369,406 | def draw(self):
colors = self.colors[0:len(self.classes_)]
n_classes = len(colors)
return self.ax | Renders ROC-AUC plot.
Called internally by score, possibly more than once
Returns
-------
ax : the axis with the plotted figure |
369,407 | def fmt_number(p):
formatted = .format(p)
if not config.PRINT_FRACTIONS:
return formatted
fraction = Fraction(p)
nice = fraction.limit_denominator(128)
return (
str(nice) if (abs(fraction - nice) < constants.EPSILON and
nice.denominator in NICE_DENOMINATO... | Format a number.
It will be printed as a fraction if the denominator isn't too big and as a
decimal otherwise. |
369,408 | def clear_callbacks(obj):
callbacks = obj._callbacks
if isinstance(callbacks, dllist):
callbacks.clear()
obj._callbacks = None | Remove all callbacks from an object. |
369,409 | def get_option(self, key, subkey, in_path_none=False):
key, subkey = _lower_keys(key, subkey)
_entry_must_exist(self.gc, key, subkey)
df = self.gc[(self.gc["k1"] == key) & (self.gc["k2"] == subkey)]
if df["type"].values[0] == "bool":
return bool(df["value"].values[0... | Get the current value of the option.
:param str key: First identifier of the option.
:param str subkey: Second identifier of the option.
:param bool in_path_none: Allows for ``in_path`` values of
:data:`None` to be retrieved.
:return: Current value of the option (type varie... |
369,410 | def save_hdf(self,filename,path=):
self.orbpop_long.save_hdf(filename,.format(path))
self.orbpop_short.save_hdf(filename,.format(path)) | Save to .h5 file. |
369,411 | def entity_delete(args):
msg = "WARNING: this will delete {0} {1} in {2}/{3}".format(
args.entity_type, args.entity, args.project, args.workspace)
if not (args.yes or _confirm_prompt(msg)):
return
json_body=[{"entityType": args.entity_type,
"entityName": args.entity}]... | Delete entity in a workspace. |
369,412 | def t_heredocvar_ENCAPSED_AND_WHITESPACE(t):
r
t.lexer.lineno += t.value.count("\n")
t.lexer.pop_state()
return t | r'( [^\n\\${] | \\. | \$(?![A-Za-z_{]) | \{(?!\$) )+\n? | \\?\n |
369,413 | def append_to_arg_count(self, data):
assert data in
current = self._arg
if data == :
assert current is None or current ==
result = data
elif current is None:
result = data
else:
result = "%s%s" % (current, data)
... | Add digit to the input argument.
:param data: the typed digit as string |
369,414 | def validate(self):
validation = super(StudioEditableXBlockMixin, self).validate()
self.validate_field_data(validation, self)
return validation | Validates the state of this XBlock.
Subclasses should override validate_field_data() to validate fields and override this
only for validation not related to this block's field values. |
369,415 | def flash(self, duration=0.0):
for _ in range(2):
self.on = not self.on
time.sleep(duration) | Flash a group.
:param duration: How quickly to flash (in seconds). |
369,416 | def fetch_post_data(self):
post_data = {}
ext_dic = {}
for key in self.request.arguments:
if key.startswith():
ext_dic[key] = self.get_argument(key)
else:
post_data[key] = self.get_arguments(key)[0]
post_data[] = self.user... | fetch post accessed data. post_data, and ext_dic. |
369,417 | def process_entry(self, defect_entry):
self.perform_all_corrections(defect_entry)
self.delocalization_analysis(defect_entry)
corrections = {}
skip_charge_corrections = False
if "num_hole_vbm" in defect_entry.parameters.keys():
if (self.free_chg_cut... | Process a given Defect entry with qualifiers given from initialization of class.
Order of processing is:
1) perform all possible defect corrections with information given
2) consider delocalization analyses based on qualifier metrics
given initialization of class. If delocali... |
369,418 | def hashdata(self, subject):
_data = bytearray()
if isinstance(subject, six.string_types):
subject = subject.encode()
if self.type == SignatureType.BinaryDocument:
if isinstance(subject, (SKEData, IntegrityProtectedSKEData)):
_data... | All signatures are formed by producing a hash over the signature
data, and then using the resulting hash in the signature algorithm. |
369,419 | def local_filename(
self,
url=None,
filename=None,
decompress=False):
return common.build_local_filename(url, filename, decompress) | What local filename will we use within the cache directory
for the given URL/filename/decompress options. |
369,420 | def correspondent_id(self):
try:
return int(self._thread_element.attrib[])
except (ValueError, KeyError):
try:
return int(self.correspondent_profile.id)
except:
pass | :returns: The id assigned to the correspondent of this message. |
369,421 | def hourly_solar_radiation(self):
dir_norm, diff_horiz, glob_horiz = \
self._sky_condition.radiation_values(self._location)
dir_norm_data = self._get_daily_data_collections(
energyintensity.DirectNormalRadiation(), , dir_norm)
diff_horiz_data = self._get_daily_d... | Three data collections containing hourly direct normal, diffuse horizontal,
and global horizontal radiation. |
369,422 | def write_gtiff_file(f_name, n_rows, n_cols, data, geotransform, srs, nodata_value,
gdal_type=GDT_Float32):
UtilClass.mkdir(os.path.dirname(FileClass.get_file_fullpath(f_name)))
driver = gdal_GetDriverByName(str())
try:
ds = driver.Create(f_name, n_c... | Output Raster to GeoTiff format file.
Args:
f_name: output gtiff file name.
n_rows: Row count.
n_cols: Col count.
data: 2D array data.
geotransform: geographic transformation.
srs: coordinate system.
nodata_value: nodata value.... |
369,423 | def is_valid_catalog(catalog, validator=None):
catalog = readers.read_catalog(catalog)
if not validator:
if hasattr(catalog, "validator"):
validator = catalog.validator
else:
validator = create_validator()
jsonschema_res = validator.is_valid(catalog)
custom_... | Valida que un archivo `data.json` cumpla con el schema definido.
Chequea que el data.json tiene todos los campos obligatorios y que
tanto los campos obligatorios como los opcionales siguen la estructura
definida en el schema.
Args:
catalog (str o dict): Catálogo (dict, JSON o XLSX) a ser valid... |
369,424 | def _fix_next_url(next_url):
next_url = str(next_url)
parsed_url = urllib.parse.urlparse(next_url)
if not parsed_url.scheme or not parsed_url.netloc or not parsed_url.path:
raise ValueError(
" must be a valid API endpoint URL, minimally "
"containing a scheme, netloc an... | Remove max=null parameter from URL.
Patch for Webex Teams Defect: 'next' URL returned in the Link headers of
the responses contain an errant 'max=null' parameter, which causes the
next request (to this URL) to fail if the URL is requested as-is.
This patch parses the next_url to remove the max=null p... |
369,425 | def get_region(self, ip):
rec = self.get_all(ip)
return rec and rec.region | Get region |
369,426 | def choose_type(cls, content_type):
return cls.type_cls.SUBDIR if content_type in cls.subdir_types \
else cls.type_cls.FILE | Choose object type from content type. |
369,427 | def plot_eeg_erp_topo(all_epochs, colors=None):
all_evokeds = eeg_to_all_evokeds(all_epochs)
data = {}
for participant, epochs in all_evokeds.items():
for cond, epoch in epochs.items():
data[cond] = []
for participant, epochs in all_evokeds.items():
for cond, epoch in e... | Plot butterfly plot.
DOCS INCOMPLETE :( |
369,428 | def gcp_conn(service, service_type=, future_expiration_minutes=15):
def decorator(f):
@wraps(f)
def decorated_function(*args, **kwargs):
from cloudaux.gcp.auth import get_client
(conn_args, kwargs) = get_creds_from_kwargs(kwargs)
client_details,... | service_type: not currently used. |
369,429 | def get_target_dimensions(self):
if self.target_height is None:
self._calculate_target_dimensions()
return int(self.target_width), int(self.target_height) | Returns the target dimensions and calculates them if necessary.
The target dimensions are display independent.
:return: Target dimensions as a tuple (width, height)
:rtype: (int, int) |
369,430 | def __parse_config(self):
if self.should_parse_config and (self.args.config or self.config_file):
self.config = ConfigParser.SafeConfigParser()
self.config.read(self.args.config or self.config_file) | Invoke the config file parser. |
369,431 | def lkendalltau(x,y):
n1 = 0
n2 = 0
iss = 0
for j in range(len(x)-1):
for k in range(j,len(y)):
a1 = x[j] - x[k]
a2 = y[j] - y[k]
aa = a1 * a2
if (aa):
n1 = n1 + 1
n2 = n2 + 1
if aa ... | Calculates Kendall's tau ... correlation of ordinal data. Adapted
from function kendl1 in Numerical Recipies. Needs good test-routine.@@@
Usage: lkendalltau(x,y)
Returns: Kendall's tau, two-tailed p-value |
369,432 | def get_bel_versions() -> List[str]:
spec_dir = config["bel"]["lang"]["specifications"]
fn = f"{spec_dir}/versions.json"
with open(fn, "r") as f:
versions = json.load(f)
return versions | Get BEL Language versions supported
Get the list of all BEL Language versions supported. The file this depends
on is generated by belspec_yaml2json and is kept up to date using
`make update_ebnf` or `make update_parsers`. You can also run `belspec_yaml2json`
directly as it's added as a command by pip... |
369,433 | def export(self, top=True):
out = []
if top:
out.append(self._internal_name)
out.append(self._to_str(self.typical_or_extreme_period_name))
out.append(self._to_str(self.typical_or_extreme_period_type))
out.append(self._to_str(self.period_start_day))
ou... | Exports object to its string representation.
Args:
top (bool): if True appends `internal_name` before values.
All non list objects should be exported with value top=True,
all list objects, that are embedded in as fields inlist objects
should be expor... |
369,434 | def split_data(self):
try:
time_period1 = (slice(self.baseline_period[0], self.baseline_period[1]))
self.baseline_in = self.original_data.loc[time_period1, self.input_col]
self.baseline_out = self.original_data.loc[time_period1, self.output_col]
... | Split data according to baseline and projection time period values. |
369,435 | def check_engine(handle):
if handle == :
dump_engines()
sys.exit(0)
if handle not in engines.engines:
print( % (handle,), file=sys.stderr)
sys.exit(1) | Check availability of requested template engine. |
369,436 | def _checkpoint_and_erase(self, trial):
with warn_if_slow("save_to_disk"):
trial._checkpoint.value = ray.get(trial.runner.save.remote())
if len(trial.history) >= trial.keep_checkpoints_num:
ray.get(trial.runner.delete_checkpoint.remote(trial.history[-1]))
t... | Checkpoints the model and erases old checkpoints
if needed.
Parameters
----------
trial : trial to save |
369,437 | def sample_given_context(self, c, c_dims):
index = self.discrete_progress.sample_given_context(c, c_dims, self.space)
return self.space.rand_value(index).flatten()[list(set(range(len(self.space.cardinalities))) - set(c_dims))] | Sample the region with max progress among regions that have the same context
c: context value on c_dims dimensions
c_dims: w.r.t sensory space dimensions |
369,438 | def registration(uri):
logger.debug(
.format(uri))
StackInABox.update_uri(uri)
regex = re.compile(.format(uri),
re.I)
METHODS = [
responses.DELETE,
responses.GET,
responses.HEAD,
responses.OPTIONS,
... | Responses handler registration.
Registers a handler for a given URI with Responses
so that it can be intercepted and handed to
Stack-In-A-Box.
:param uri: URI used for the base of the HTTP requests
:returns: n/a |
369,439 | def normalize(X, mean=None, std=None):
if mean is None or std is None:
mean = X.mean(0)
std = X.std(0)
return (X - mean) / std | Normalize X. If mean OR std is None, normalizes
X to have mean 0 and std 1. |
369,440 | def column_max_width(self, column_number):
inner_widths = max_dimensions(self.table_data)[0]
outer_border = 2 if self.outer_border else 0
inner_border = 1 if self.inner_column_border else 0
padding = self.padding_left + self.padding_right
return column_max_width(inner_wi... | Return the maximum width of a column based on the current terminal width.
:param int column_number: The column number to query.
:return: The max width of the column.
:rtype: int |
369,441 | def inflate_analysis_group(self, identifier, definition):
providers_definition = definition.pop(, None)
checkers_definition = definition.pop(, None)
analysis_group = AnalysisGroup()
try:
first_plugin = self.inflate_plugin(identifier, definition)
if is... | Inflate a whole analysis group.
An analysis group is a section defined in the YAML file.
Args:
identifier (str): the group identifier.
definition (list/dict): the group definition.
Returns:
AnalysisGroup: an instance of AnalysisGroup.
Raises:
... |
369,442 | def enqueue_task(self, task):
data = dumps(task)
if self._async:
self.publisher_client.publish(self.topic_path, data=data)
logger.info(.format(task.id))
else:
unpickled_task = unpickle(data)
logger.info(
.format(unpickled_... | Enqueues a task directly. This is used when a task is retried or if
a task was manually created.
Note that this does not store the task. |
369,443 | def get_wegobject_by_id(self, id):
def creator():
res = crab_gateway_request(
self.client, , id
)
if res == None:
raise GatewayResourceNotFoundException()
return Wegobject(
res.IdentificatorWegobject,
... | Retrieve a `Wegobject` by the Id.
:param integer id: the Id of the `Wegobject`
:rtype: :class:`Wegobject` |
369,444 | def data_request(self, payload, timeout=TIMEOUT):
request_url = self.base_url + "/data_request"
return requests.get(request_url, timeout=timeout, params=payload) | Perform a data_request and return the result. |
369,445 | def libvlc_audio_set_mute(p_mi, status):
f = _Cfunctions.get(, None) or \
_Cfunction(, ((1,), (1,),), None,
None, MediaPlayer, ctypes.c_int)
return f(p_mi, status) | Set mute status.
@param p_mi: media player.
@param status: If status is true then mute, otherwise unmute @warning This function does not always work. If there are no active audio playback stream, the mute status might not be available. If digital pass-through (S/PDIF, HDMI...) is in use, muting may be unapplica... |
369,446 | def markdown_filter(value, typogrify=True, extensions=(, )):
match = re.match(r, value)
s, e = match.span(2)
pattern = re.compile(r % (e - s),
if typogrify:
return jinja_filters.typogrify(markdown(output, extensions=extensions))
else:
return markdown(output, extensions=e... | A smart wrapper around the ``markdown`` and ``typogrify`` functions that automatically removes leading
whitespace before every line. This is necessary because Markdown is whitespace-sensitive. Consider some Markdown
content in a template that looks like this:
.. codeblock:: html+jinja
<article>
... |
369,447 | def list_build_configurations_for_product(id=None, name=None, page_size=200, page_index=0, sort="", q=""):
data = list_build_configurations_for_product_raw(id, name, page_size, page_index, sort, q)
if data:
return utils.format_json_list(data) | List all BuildConfigurations associated with the given Product. |
369,448 | def get_timestamp(self, **kwargs):
timestamp = kwargs.get()
if not timestamp:
now = datetime.datetime.utcnow()
timestamp = now.strftime("%Y-%m-%dT%H:%M:%S") + ".%03d" % (now.microsecond / 1000) + "Z"
return timestamp | Retrieves the timestamp for a given set of data |
369,449 | def multiple_optima(gene_number=937, resolution=80, model_restarts=10, seed=10000, max_iters=300, optimize=True, plot=True):
length_scales = np.linspace(0.1, 60., resolution)
log_SNRs = np.linspace(-3., 4., resolution)
try:import pods
except ImportError:
print()
return
da... | Show an example of a multimodal error surface for Gaussian process
regression. Gene 939 has bimodal behaviour where the noisy mode is
higher. |
369,450 | def _populate_and_save_user_profile(self):
try:
app_label, class_name = django.conf.settings.AUTH_PROFILE_MODULE.split()
profile_model = apps.get_model(app_label, class_name)
profile, created = profile_model.objects.get_or_create(user=self._user)
save_pro... | Populates a User profile object with fields from the LDAP directory. |
369,451 | def listen_on_udp_port():
host = os.getenv(
"UDP_LISTEN_ON_HOST",
"127.0.0.1").strip().lstrip()
port = int(os.getenv(
"UDP_LISTEN_ON_PORT",
"17000").strip().lstrip())
backlog = int(os.getenv(
"UDP_LISTEN_BACKLOG",
"5").strip().lstrip())
size = int(os... | listen_on_udp_port
Run a simple server for processing messages over ``UDP``.
``UDP_LISTEN_ON_HOST`` - listen on this host ip address
``UDP_LISTEN_ON_PORT`` - listen on this ``UDP`` port
``UDP_LISTEN_SIZE`` - listen on to packets of this size
``UDP_LISTEN_SLEEP`` - sleep this number of seconds p... |
369,452 | def set_resource_type(resource, type_id, types={}, **kwargs):
ref_key = resource.ref_key
existing_attr_ids = []
for res_attr in resource.attributes:
existing_attr_ids.append(res_attr.attr_id)
if type_id in types:
type_i = types[type_id]
else:
type_i = db.DBSession.que... | Set this resource to be a certain type.
Type objects (a dictionary keyed on type_id) may be
passed in to save on loading.
This function does not call save. It must be done afterwards.
New resource attributes are added to the resource if the template
requires them. Resource attrib... |
369,453 | def download(self, bucket_name, object_name, filename=None):
client = self.get_conn()
bucket = client.get_bucket(bucket_name)
blob = bucket.blob(blob_name=object_name)
if filename:
blob.download_to_filename(filename)
self.log.info(, filename)
re... | Get a file from Google Cloud Storage.
:param bucket_name: The bucket to fetch from.
:type bucket_name: str
:param object_name: The object to fetch.
:type object_name: str
:param filename: If set, a local file path where the file should be written to.
:type filename: str |
369,454 | def _new_stream(self, idx):
if self.stream_weights_[idx]:
self.streams_[idx] = self.streamers[idx].iterate()
else:
self.streams_[idx] = None
self.stream_counts_[idx] = 0 | Randomly select and create a new stream.
Parameters
----------
idx : int, [0:n_streams - 1]
The stream index to replace |
369,455 | def move_into(self, destination_folder):
headers = self.headers
endpoint = + self.id +
payload = + destination_folder.id +
r = requests.post(endpoint, headers=headers, data=payload)
if check_response(r):
return_folder = r.json()
ret... | Move the Folder into a different folder.
This makes the Folder provided a child folder of the destination_folder.
Raises:
AuthError: Raised if Outlook returns a 401, generally caused by an invalid or expired access token.
Args:
destination_folder: A :class:`Folder <pyO... |
369,456 | def aggcv(rlist, show_stdv=True, show_progress=None, as_pandas=True):
cvmap = {}
idx = rlist[0].split()[0]
for line in rlist:
arr = line.split()
assert idx == arr[0]
for it in arr[1:]:
if not isinstance(it, STRING_TYPES):
it = it.decode()
... | Aggregate cross-validation results. |
369,457 | def plot_isotherm(self, T, zs, ws, Pmin=None, Pmax=None, methods=[], pts=50,
only_valid=True):
r
if not has_matplotlib:
raise Exception()
if Pmin is None:
if self.Pmin is not None:
Pmin = self.Pmin
else:
... | r'''Method to create a plot of the property vs pressure at a specified
temperature and composition according to either a specified list of
methods, or the user methods (if set), or all methods. User-selectable
number of points, and pressure range. If only_valid is set,
`test_method_v... |
369,458 | def block_verify( block_data ):
m = merkle.MerkleTree( block_data[] )
root_hash = str(m.root())
return root_hash == str(block_data[]) | Given block data (a dict with 'merkleroot' hex string and 'tx' list of hex strings--i.e.
a block compatible with bitcoind's getblock JSON RPC method), verify that the
transactions are consistent.
Return True on success
Return False if not. |
369,459 | def python(self, cmd):
python_bin = self.cmd_path()
cmd = .format(python_bin, cmd)
return self._execute(cmd) | Execute a python script using the virtual environment python. |
369,460 | def serializer_for(self, obj):
if obj is None:
return self._null_serializer
obj_type = type(obj)
serializer = self.lookup_default_serializer(obj_type, obj)
if serializer is None:
serializer = self.lookup_custom_serializer(obj... | Searches for a serializer for the provided object
Serializers will be searched in this order;
1-NULL serializer
2-Default serializers, like primitives, arrays, string and some default types
3-Custom registered types by user
4-Global serializer if registered ... |
369,461 | def _check_values(self, values):
assert isinstance(values, Iterable) and not \
isinstance(values, (str, dict, bytes, bytearray)), \
.format(type(values))
assert len(values) == len(self.datetimes), \
.format(
len(values), len(self.datetimes))
... | Check values whenever they come through the values setter. |
369,462 | def removeChild(self, child_id):
self.log.debug("Try to remove a child <Workitem %s> from current "
"<Workitem %s>",
child_id,
self)
self._removeChildren([child_id])
self.log.info("Successfully remove a child <Workite... | Remove a child from current workitem
:param child_id: the child workitem id/number
(integer or equivalent string) |
369,463 | def construct_error_message(driver_id, error_type, message, timestamp):
builder = flatbuffers.Builder(0)
driver_offset = builder.CreateString(driver_id.binary())
error_type_offset = builder.CreateString(error_type)
message_offset = builder.CreateString(message)
ray.core.generated.ErrorTableDat... | Construct a serialized ErrorTableData object.
Args:
driver_id: The ID of the driver that the error should go to. If this is
nil, then the error will go to all drivers.
error_type: The type of the error.
message: The error message.
timestamp: The time of the error.
R... |
369,464 | def find_suitable_encoding(self, char):
sorted_encodings = sorted(
self.codepages.items(),
key=self.__encoding_sort_func)
for encoding, _ in sorted_encodings:
if self.can_encode(encoding, char):
self.used_encodings.add(encodi... | The order of our search is a specific one:
1. code pages that we already tried before; there is a good
chance they might work again, reducing the search space,
and by re-using already used encodings we might also
reduce the number of codepage change instructiosn we have
... |
369,465 | def write_biom(self, sample_names, read_taxonomies, biom_file_io):
counts = []
observ_metadata = []
otu_ids = []
for otu_id, tax, count in self._iterate_otu_table_rows(read_taxonomies):
if len(count) != len(sample_names):
raise Exception("Programming ... | Write the OTU info to a biom IO output stream
Parameters
----------
sample_names: String
names of each sample (sample_ids for biom)
read_taxonomies: Array of hashes as per _iterate_otu_table_rows()
biom_file_io: io
open writeable stream to write b... |
369,466 | def check_schedule():
all_items = prefetch_schedule_items()
for validator, _type, _msg in SCHEDULE_ITEM_VALIDATORS:
if validator(all_items):
return False
all_slots = prefetch_slots()
for validator, _type, _msg in SLOT_VALIDATORS:
if validator(all_slots):
ret... | Helper routine to easily test if the schedule is valid |
369,467 | def _query_select_options(self, query, select_columns=None):
if select_columns:
_load_options = list()
for column in select_columns:
if "." in column:
model_relation = self.get_related_model(column.split(".")[0])
if not sel... | Add select load options to query. The goal
is to only SQL select what is requested
:param query: SQLAlchemy Query obj
:param select_columns: (list) of columns
:return: SQLAlchemy Query obj |
369,468 | def retry_on_exception(tries=6, delay=1, backoff=2, max_delay=32):
tries = math.floor(tries)
if tries < 1:
raise ValueError()
if delay < 0:
raise ValueError()
if backoff < 1:
raise ValueError()
if max_delay < delay:
raise ValueError()
def decorated_function_... | Decorator for implementing exponential backoff for retrying on failures.
tries: Max number of tries to execute the wrapped function before failing.
delay: Delay time in seconds before the FIRST retry.
backoff: Multiplier to extend the initial delay by for each retry.
max_delay: Max time in seconds to w... |
369,469 | def from_name(cls, name, all_fallback=True):
name = name.upper()
for vocation in cls:
if vocation.name in name or vocation.name[:-1] in name and vocation != cls.ALL:
return vocation
if all_fallback or name.upper() == "ALL":
return cls.ALL
... | Gets a vocation filter from a vocation's name.
Parameters
----------
name: :class:`str`
The name of the vocation.
all_fallback: :class:`bool`
Whether to return :py:attr:`ALL` if no match is found. Otherwise, ``None`` will be returned.
Returns
---... |
369,470 | def path(self, *args: typing.List[str]) -> typing.Union[None, str]:
if not self._project:
return None
return environ.paths.clean(os.path.join(
self._project.source_directory,
*args
)) | Creates an absolute path in the project source directory from the
relative path components.
:param args:
Relative components for creating a path within the project source
directory
:return:
An absolute path to the specified file or directory within the
... |
369,471 | def _updateCallSetIds(self, variantFile):
if len(self._callSetIdMap) == 0:
for sample in variantFile.header.samples:
self.addCallSetFromName(sample) | Updates the call set IDs based on the specified variant file. |
369,472 | def items(self):
"Returns a list of (key, value) pairs as 2-tuples."
return (list(self._pb.IntMap.items()) + list(self._pb.StringMap.items()) +
list(self._pb.FloatMap.items()) + list(self._pb.BoolMap.items())) | Returns a list of (key, value) pairs as 2-tuples. |
369,473 | def get_transitions(self, indexes):
assert indexes.shape[1] == self.state_buffer.shape[1], \
"Must have the same number of indexes as there are environments"
frame_batch_shape = (
[indexes.shape[0], indexes.shape[1]]
+ list(self.state_buffer.shape[2:... | Get dictionary of transition data |
369,474 | def get_data_csv(file_name, encoding=, file_contents=None, on_demand=False):
def yield_csv(csv_contents, csv_file):
try:
for line in csv_contents:
yield line
finally:
try:
csv_file.close()
except:
pass
def ... | Gets good old csv data from a file.
Args:
file_name: The name of the local file, or the holder for the
extension type when the file_contents are supplied.
encoding: Loads the file with the specified cell encoding.
file_contents: The file-like object holding contents of file_name... |
369,475 | def init_app(self, app):
if not hasattr(app, ):
app.extensions = {}
config = app.extensions.setdefault(, {})
config[] = {}
config[] = self.whoosheers
config[] = app.config.get(, ) or
config[] = app.config.get(, 2)
c... | Initialize the extension. It will create the `index_path_root`
directory upon initalization but it will **not** create the index.
Please use :meth:`reindex` for this.
:param app: The application instance for which the extension should
be initialized. |
369,476 | def recover_chain_id(storage: SQLiteStorage) -> ChainID:
action_init_chain = json.loads(storage.get_state_changes(limit=1, offset=0)[0])
assert action_init_chain[] ==
return action_init_chain[] | We can reasonably assume, that any database has only one value for `chain_id` at this point
in time. |
369,477 | def get_storage_id_for_state(state):
if global_config.get_config_value():
max_length = global_config.get_config_value()
max_length_of_state_name_in_folder_name = 255 - len(ID_NAME_DELIMITER + state.state_id)
if max_length is None or max_length == "None" or max_length > max_len... | Calculates the storage id of a state. This ID can be used for generating the file path for a state.
:param rafcon.core.states.state.State state: state the storage_id should is composed for |
369,478 | def loop(self):
pause_key = self.board.PAUSE
margins = {: 4, : 4, : 4}
atexit.register(self.showCursor)
try:
self.hideCursor()
while True:
self.clearScreen()
print(self.__str__(margins=margins))
if self.bo... | main game loop. returns the final score. |
369,479 | def parse_query(self, query):
tree = pypeg2.parse(query, Main, whitespace="")
return tree.accept(self.converter) | Parse query string using given grammar |
369,480 | def trimLeft(self, amount):
if amount == 0:
return
self.sequenceData = self.sequenceData[amount:]
self.sequenceQual = self.sequenceQual[amount:] | Trim this fastqSequence in-place by removing <amount> nucleotides from
the 5' end (left end).
:param amount: the number of nucleotides to trim from the left-side of
this sequence. |
369,481 | def connect(host, default_protocol=, **kwargs):
host = to_host(host)
conn = prepare(host, default_protocol, **kwargs)
account = host.get_account()
conn.connect(host.get_address(), host.get_tcp_port())
if account is not None:
conn.login(account)
return conn | Like :class:`prepare()`, but also connects to the host by calling
:class:`Protocol.connect()`. If the URL or host contain any login info, this
function also logs into the host using :class:`Protocol.login()`.
:type host: str or Host
:param host: A URL-formatted hostname or a :class:`Exscript.Host` obj... |
369,482 | def main(self, spin, data):
data = data.decode(self.encoding)
field = re.match(RFC_REG, data)
if not field:
return
prefix = self.extract_prefix(field.group())
command = field.group().upper()
args = self.extract_args(field.group())
... | The function which uses irc rfc regex to extract
the basic arguments from the msg. |
369,483 | def get_pathway(self, pathway_name=None, pathway_id=None, limit=None, as_df=False):
q = self.session.query(models.Pathway)
if pathway_name:
q = q.filter(models.Pathway.pathway_name.like(pathway_name))
if pathway_id:
q = q.filter(models.Pathway.pathway_id.like(p... | Get pathway
.. note::
Format of pathway_id is KEGG:X* or REACTOME:X* . X* stands for a sequence of digits
:param bool as_df: if set to True result returns as `pandas.DataFrame`
:param str pathway_name: pathway name
:param str pathway_id: KEGG or REACTOME identifier
... |
369,484 | def debug(self, debug_commands):
if isinstance(debug_commands, sc_debug.DebugCommand):
debug_commands = [debug_commands]
return self._client.send(debug=sc_pb.RequestDebug(debug=debug_commands)) | Run a debug command. |
369,485 | def get(self, key):
if not Log:
_late_import()
return FlatList(vals=[unwrap(coalesce(_datawrap(v), Null)[key]) for v in _get_list(self)]) | simple `select` |
369,486 | def _traverse_command(self, name, *args, **kwargs):
if not name in self.available_commands:
raise AttributeError("%s is not an available command for %s" %
(name, self.__class__.__name__))
attr = getattr(self.connection, "%s" % name)
key = sel... | Add the key to the args and call the Redis command. |
369,487 | def _translate(teleport_value):
if isinstance(teleport_value, dict):
return _translate_composite(teleport_value)
if teleport_value in PRIMITIVES:
return PRIMITIVES[teleport_value]
raise DeserializationError(
"Could not interpret %r as a teleport schema." % teleport_value) | Translate a teleport value in to a val subschema. |
369,488 | def registration_id_chunks(self, registration_ids):
try:
xrange
except NameError:
xrange = range
for i in xrange(0, len(registration_ids), self.FCM_MAX_RECIPIENTS):
yield registration_ids[i:i + self.FCM_MAX_RECIPIENTS] | Splits registration ids in several lists of max 1000 registration ids per list
Args:
registration_ids (list): FCM device registration ID
Yields:
generator: list including lists with registration ids |
369,489 | def metalarchives(song):
artist = normalize(song.artist)
title = normalize(song.title)
url =
url += f
soup = get_url(url, parser=)
if not soup:
return
song_id_re = re.compile(r)
ids = set(re.search(song_id_re, a) for sub in soup[] for a in sub)
if not ids:
re... | Returns the lyrics found in MetalArchives for the specified mp3 file or an
empty string if not found. |
369,490 | def warn_message(self, message, fh=None, prefix="[warn]:", suffix="..."):
msg = prefix + message + suffix
fh = fh or sys.stdout
if fh is sys.stdout:
termcolor.cprint(msg, color="yellow")
else:
fh.write(msg)
pass | print warn type message,
if file handle is `sys.stdout`, print color message
:param str message: message to print
:param file fh: file handle,default is `sys.stdout`
:param str prefix: message prefix,default is `[warn]`
:param str suffix: message suffix ,default is `...`
... |
369,491 | def filter_featured_apps(admin_apps, request):
featured_apps = []
for orig_app_spec in appsettings.DASHBOARD_FEATURED_APPS:
app_spec = orig_app_spec.copy()
if "verbose_name" in app_spec:
warnings.warn(
"DASHBOARD_FEATURED_APPS[][] = is d... | Given a list of apps return a set of pseudo-apps considered featured.
Apps are considered featured if the are defined in the settings
property called `DASHBOARD_FEATURED_APPS` which contains a list of the apps
that are considered to be featured.
:param admin_apps: A list of apps.
:param request: D... |
369,492 | def get_interfaces_ip(self):
interfaces_ip = {}
ipv4_command = "show ip interface vrf all"
ipv6_command = "show ipv6 interface vrf all"
output_v4 = self._send_command(ipv4_command)
output_v6 = self._send_command(ipv6_command)
v4_interfaces = {}
for line ... | Get interface IP details. Returns a dictionary of dictionaries.
Sample output:
{
"Ethernet2/3": {
"ipv4": {
"4.4.4.4": {
"prefix_length": 16
}
},
"ipv6": {
"20... |
369,493 | def build(self, path=None, tag=None, quiet=False, fileobj=None,
nocache=False, rm=False, timeout=None,
custom_context=False, encoding=None, pull=False,
forcerm=False, dockerfile=None, container_limits=None,
decode=False, buildargs=None, gzip=False, shmsize=None,
... | Similar to the ``docker build`` command. Either ``path`` or ``fileobj``
needs to be set. ``path`` can be a local path (to a directory
containing a Dockerfile) or a remote URL. ``fileobj`` must be a
readable file-like object to a Dockerfile.
If you have a tar file for the Docker build co... |
369,494 | def ensure_dim(core, dim, dim_):
if dim is None:
dim = dim_
if not dim:
return core, 1
if dim_ == dim:
return core, int(dim)
if dim > dim_:
key_convert = lambda vari: vari[:dim_]
else:
key_convert = lambda vari: vari + (0,)*(dim-dim_)
new_core = {}
... | Ensure that dim is correct. |
369,495 | def load(self, callback=None, errback=None, reload=False):
if not reload and self.data:
raise NetworkException()
def success(result, *args):
self.data = result
self.id = result[]
self.name = result[]
self.report = self._rest.report(se... | Load network data from the API. |
369,496 | def send_venue(chat_id, latitude, longitude, title, address,
foursquare_id=None, reply_to_message_id=None, reply_markup=None, disable_notification=False,
**kwargs):
params = dict(
chat_id=chat_id,
latitude=latitude,
longitude=longitude,
title=... | Use this method to send information about a venue.
:param chat_id: Unique identifier for the target chat or username of the target channel (in the format @channelusername)
:param latitude: Latitude of location.
:param longitude: Longitude of location.
:param title: Name of the venue.
:param address... |
369,497 | def user_open(url_or_command):
from urllib.parse import urlparse
scheme = urlparse(url_or_command).scheme
if scheme == or scheme == :
import webbrowser
import os
savout = os.dup(1)
os.close(1)
os.open(os.devnull, os.O_RDWR)
try:
... | Open the specified paramater in the web browser if a URL is detected,
othewrise pass the paramater to the shell as a subprocess. This function
is inteded to bu used in on_leftclick/on_rightclick callbacks.
:param url_or_command: String containing URL or command |
369,498 | def mtanh(alpha, z):
z = scipy.asarray(z)
ez = scipy.exp(z)
enz = 1.0 / ez
return ((1 + alpha * z) * ez - enz) / (ez + enz) | Modified hyperbolic tangent function mtanh(z; alpha).
Parameters
----------
alpha : float
The core slope of the mtanh.
z : float or array
The coordinate of the mtanh. |
369,499 | def get_jids():
cb_ = _get_connection()
_verify_views()
ret = {}
for result in cb_.query(DESIGN_NAME, , include_docs=True):
ret[result.key] = _format_jid_instance(result.key, result.doc.value[])
return ret | Return a list of all job ids |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.